1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "hard-reg-set.h"
128 #include "basic-block.h"
129 #include "insn-config.h"
133 #include "function.h"
137 #include "insn-flags.h"
142 #include "splay-tree.h"
144 #define obstack_chunk_alloc xmalloc
145 #define obstack_chunk_free free
148 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
149 the stack pointer does not matter. The value is tested only in
150 functions that have frame pointers.
151 No definition is equivalent to always zero. */
152 #ifndef EXIT_IGNORE_STACK
153 #define EXIT_IGNORE_STACK 0
156 #ifndef HAVE_epilogue
157 #define HAVE_epilogue 0
159 #ifndef HAVE_prologue
160 #define HAVE_prologue 0
162 #ifndef HAVE_sibcall_epilogue
163 #define HAVE_sibcall_epilogue 0
167 #define LOCAL_REGNO(REGNO) 0
169 #ifndef EPILOGUE_USES
170 #define EPILOGUE_USES(REGNO) 0
173 /* The contents of the current function definition are allocated
174 in this obstack, and all are freed at the end of the function.
175 For top-level functions, this is temporary_obstack.
176 Separate obstacks are made for nested functions. */
178 extern struct obstack *function_obstack;
180 /* Number of basic blocks in the current function. */
184 /* Number of edges in the current function. */
188 /* The basic block array. */
190 varray_type basic_block_info;
192 /* The special entry and exit blocks. */
194 struct basic_block_def entry_exit_blocks[2]
199 NULL, /* local_set */
200 NULL, /* global_live_at_start */
201 NULL, /* global_live_at_end */
203 ENTRY_BLOCK, /* index */
205 -1, -1, /* eh_beg, eh_end */
213 NULL, /* local_set */
214 NULL, /* global_live_at_start */
215 NULL, /* global_live_at_end */
217 EXIT_BLOCK, /* index */
219 -1, -1, /* eh_beg, eh_end */
224 /* Nonzero if the second flow pass has completed. */
227 /* Maximum register number used in this function, plus one. */
231 /* Indexed by n, giving various register information */
233 varray_type reg_n_info;
235 /* Size of a regset for the current function,
236 in (1) bytes and (2) elements. */
241 /* Regset of regs live when calls to `setjmp'-like functions happen. */
242 /* ??? Does this exist only for the setjmp-clobbered warning message? */
244 regset regs_live_at_setjmp;
246 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
247 that have to go in the same hard reg.
248 The first two regs in the list are a pair, and the next two
249 are another pair, etc. */
252 /* Set of registers that may be eliminable. These are handled specially
253 in updating regs_ever_live. */
255 static HARD_REG_SET elim_reg_set;
257 /* The basic block structure for every insn, indexed by uid. */
259 varray_type basic_block_for_insn;
261 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
262 /* ??? Should probably be using LABEL_NUSES instead. It would take a
263 bit of surgery to be able to use or co-opt the routines in jump. */
265 static rtx label_value_list;
266 static rtx tail_recursion_label_list;
268 /* Holds information for tracking conditional register life information. */
269 struct reg_cond_life_info
271 /* An EXPR_LIST of conditions under which a register is dead. */
274 /* ??? Could store mask of bytes that are dead, so that we could finally
275 track lifetimes of multi-word registers accessed via subregs. */
278 /* For use in communicating between propagate_block and its subroutines.
279 Holds all information needed to compute life and def-use information. */
281 struct propagate_block_info
283 /* The basic block we're considering. */
286 /* Bit N is set if register N is conditionally or unconditionally live. */
289 /* Bit N is set if register N is set this insn. */
292 /* Element N is the next insn that uses (hard or pseudo) register N
293 within the current basic block; or zero, if there is no such insn. */
296 /* Contains a list of all the MEMs we are tracking for dead store
300 /* If non-null, record the set of registers set in the basic block. */
303 #ifdef HAVE_conditional_execution
304 /* Indexed by register number, holds a reg_cond_life_info for each
305 register that is not unconditionally live or dead. */
306 splay_tree reg_cond_dead;
308 /* Bit N is set if register N is in an expression in reg_cond_dead. */
312 /* Non-zero if the value of CC0 is live. */
315 /* Flags controling the set of information propagate_block collects. */
319 /* Store the data structures necessary for depth-first search. */
320 struct depth_first_search_dsS {
321 /* stack for backtracking during the algorithm */
324 /* number of edges in the stack. That is, positions 0, ..., sp-1
328 /* record of basic blocks already seen by depth-first search */
329 sbitmap visited_blocks;
331 typedef struct depth_first_search_dsS *depth_first_search_ds;
333 /* Forward declarations */
334 static int count_basic_blocks PARAMS ((rtx));
335 static void find_basic_blocks_1 PARAMS ((rtx));
336 static rtx find_label_refs PARAMS ((rtx, rtx));
337 static void clear_edges PARAMS ((void));
338 static void make_edges PARAMS ((rtx));
339 static void make_label_edge PARAMS ((sbitmap *, basic_block,
341 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
342 basic_block, rtx, int));
343 static void mark_critical_edges PARAMS ((void));
344 static void move_stray_eh_region_notes PARAMS ((void));
345 static void record_active_eh_regions PARAMS ((rtx));
347 static void commit_one_edge_insertion PARAMS ((edge));
349 static void delete_unreachable_blocks PARAMS ((void));
350 static void delete_eh_regions PARAMS ((void));
351 static int can_delete_note_p PARAMS ((rtx));
352 static void expunge_block PARAMS ((basic_block));
353 static int can_delete_label_p PARAMS ((rtx));
354 static int tail_recursion_label_p PARAMS ((rtx));
355 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
357 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
359 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
360 static void try_merge_blocks PARAMS ((void));
361 static void tidy_fallthru_edges PARAMS ((void));
362 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
363 static void verify_wide_reg PARAMS ((int, rtx, rtx));
364 static void verify_local_live_at_start PARAMS ((regset, basic_block));
365 static int set_noop_p PARAMS ((rtx));
366 static int noop_move_p PARAMS ((rtx));
367 static void delete_noop_moves PARAMS ((rtx));
368 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
369 static void notice_stack_pointer_modification PARAMS ((rtx));
370 static void mark_reg PARAMS ((rtx, void *));
371 static void mark_regs_live_at_end PARAMS ((regset));
372 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
373 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
374 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
375 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
376 static int insn_dead_p PARAMS ((struct propagate_block_info *,
378 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
380 static void mark_set_regs PARAMS ((struct propagate_block_info *,
382 static void mark_set_1 PARAMS ((struct propagate_block_info *,
383 enum rtx_code, rtx, rtx,
385 #ifdef HAVE_conditional_execution
386 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
388 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
389 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
390 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
392 static rtx ior_reg_cond PARAMS ((rtx, rtx));
393 static rtx not_reg_cond PARAMS ((rtx));
394 static rtx nand_reg_cond PARAMS ((rtx, rtx));
397 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
398 rtx, rtx, rtx, rtx, rtx));
399 static void find_auto_inc PARAMS ((struct propagate_block_info *,
401 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
403 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
405 static void mark_used_reg PARAMS ((struct propagate_block_info *,
407 static void mark_used_regs PARAMS ((struct propagate_block_info *,
409 void dump_flow_info PARAMS ((FILE *));
410 void debug_flow_info PARAMS ((void));
411 static void dump_edge_info PARAMS ((FILE *, edge, int));
413 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
415 static void remove_fake_successors PARAMS ((basic_block));
416 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
417 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
418 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
419 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
420 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
421 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
422 static int flow_depth_first_order_compute PARAMS ((int *, int *));
423 static void flow_dfs_compute_reverse_init
424 PARAMS ((depth_first_search_ds));
425 static void flow_dfs_compute_reverse_add_bb
426 PARAMS ((depth_first_search_ds, basic_block));
427 static basic_block flow_dfs_compute_reverse_execute
428 PARAMS ((depth_first_search_ds));
429 static void flow_dfs_compute_reverse_finish
430 PARAMS ((depth_first_search_ds));
431 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
432 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
433 static void flow_loops_tree_build PARAMS ((struct loops *));
434 static int flow_loop_level_compute PARAMS ((struct loop *, int));
435 static int flow_loops_level_compute PARAMS ((struct loops *));
437 /* Find basic blocks of the current function.
438 F is the first insn of the function and NREGS the number of register
442 find_basic_blocks (f, nregs, file)
444 int nregs ATTRIBUTE_UNUSED;
445 FILE *file ATTRIBUTE_UNUSED;
449 /* Flush out existing data. */
450 if (basic_block_info != NULL)
456 /* Clear bb->aux on all extant basic blocks. We'll use this as a
457 tag for reuse during create_basic_block, just in case some pass
458 copies around basic block notes improperly. */
459 for (i = 0; i < n_basic_blocks; ++i)
460 BASIC_BLOCK (i)->aux = NULL;
462 VARRAY_FREE (basic_block_info);
465 n_basic_blocks = count_basic_blocks (f);
467 /* Size the basic block table. The actual structures will be allocated
468 by find_basic_blocks_1, since we want to keep the structure pointers
469 stable across calls to find_basic_blocks. */
470 /* ??? This whole issue would be much simpler if we called find_basic_blocks
471 exactly once, and thereafter we don't have a single long chain of
472 instructions at all until close to the end of compilation when we
473 actually lay them out. */
475 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
477 find_basic_blocks_1 (f);
479 /* Record the block to which an insn belongs. */
480 /* ??? This should be done another way, by which (perhaps) a label is
481 tagged directly with the basic block that it starts. It is used for
482 more than that currently, but IMO that is the only valid use. */
484 max_uid = get_max_uid ();
486 /* Leave space for insns life_analysis makes in some cases for auto-inc.
487 These cases are rare, so we don't need too much space. */
488 max_uid += max_uid / 10;
491 compute_bb_for_insn (max_uid);
493 /* Discover the edges of our cfg. */
494 record_active_eh_regions (f);
495 make_edges (label_value_list);
497 /* Do very simple cleanup now, for the benefit of code that runs between
498 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
499 tidy_fallthru_edges ();
501 mark_critical_edges ();
503 #ifdef ENABLE_CHECKING
508 /* Count the basic blocks of the function. */
511 count_basic_blocks (f)
515 register RTX_CODE prev_code;
516 register int count = 0;
518 int call_had_abnormal_edge = 0;
520 prev_code = JUMP_INSN;
521 for (insn = f; insn; insn = NEXT_INSN (insn))
523 register RTX_CODE code = GET_CODE (insn);
525 if (code == CODE_LABEL
526 || (GET_RTX_CLASS (code) == 'i'
527 && (prev_code == JUMP_INSN
528 || prev_code == BARRIER
529 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
532 /* Record whether this call created an edge. */
533 if (code == CALL_INSN)
535 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
536 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
538 call_had_abnormal_edge = 0;
540 /* If there is an EH region or rethrow, we have an edge. */
541 if ((eh_region && region > 0)
542 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
543 call_had_abnormal_edge = 1;
544 else if (nonlocal_goto_handler_labels && region >= 0)
545 /* If there is a nonlocal goto label and the specified
546 region number isn't -1, we have an edge. (0 means
547 no throw, but might have a nonlocal goto). */
548 call_had_abnormal_edge = 1;
553 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
555 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
559 /* The rest of the compiler works a bit smoother when we don't have to
560 check for the edge case of do-nothing functions with no basic blocks. */
563 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
570 /* Scan a list of insns for labels referred to other than by jumps.
571 This is used to scan the alternatives of a call placeholder. */
573 find_label_refs (f, lvl)
579 for (insn = f; insn; insn = NEXT_INSN (insn))
584 /* Make a list of all labels referred to other than by jumps
585 (which just don't have the REG_LABEL notes).
587 Make a special exception for labels followed by an ADDR*VEC,
588 as this would be a part of the tablejump setup code.
590 Make a special exception for the eh_return_stub_label, which
591 we know isn't part of any otherwise visible control flow. */
593 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
594 if (REG_NOTE_KIND (note) == REG_LABEL)
596 rtx lab = XEXP (note, 0), next;
598 if (lab == eh_return_stub_label)
600 else if ((next = next_nonnote_insn (lab)) != NULL
601 && GET_CODE (next) == JUMP_INSN
602 && (GET_CODE (PATTERN (next)) == ADDR_VEC
603 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
605 else if (GET_CODE (lab) == NOTE)
608 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
615 /* Find all basic blocks of the function whose first insn is F.
617 Collect and return a list of labels whose addresses are taken. This
618 will be used in make_edges for use with computed gotos. */
621 find_basic_blocks_1 (f)
624 register rtx insn, next;
626 rtx bb_note = NULL_RTX;
627 rtx eh_list = NULL_RTX;
633 /* We process the instructions in a slightly different way than we did
634 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
635 closed out the previous block, so that it gets attached at the proper
636 place. Since this form should be equivalent to the previous,
637 count_basic_blocks continues to use the old form as a check. */
639 for (insn = f; insn; insn = next)
641 enum rtx_code code = GET_CODE (insn);
643 next = NEXT_INSN (insn);
649 int kind = NOTE_LINE_NUMBER (insn);
651 /* Keep a LIFO list of the currently active exception notes. */
652 if (kind == NOTE_INSN_EH_REGION_BEG)
653 eh_list = alloc_INSN_LIST (insn, eh_list);
654 else if (kind == NOTE_INSN_EH_REGION_END)
658 eh_list = XEXP (eh_list, 1);
659 free_INSN_LIST_node (t);
662 /* Look for basic block notes with which to keep the
663 basic_block_info pointers stable. Unthread the note now;
664 we'll put it back at the right place in create_basic_block.
665 Or not at all if we've already found a note in this block. */
666 else if (kind == NOTE_INSN_BASIC_BLOCK)
668 if (bb_note == NULL_RTX)
671 next = flow_delete_insn (insn);
677 /* A basic block starts at a label. If we've closed one off due
678 to a barrier or some such, no need to do it again. */
679 if (head != NULL_RTX)
681 /* While we now have edge lists with which other portions of
682 the compiler might determine a call ending a basic block
683 does not imply an abnormal edge, it will be a bit before
684 everything can be updated. So continue to emit a noop at
685 the end of such a block. */
686 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
688 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
689 end = emit_insn_after (nop, end);
692 create_basic_block (i++, head, end, bb_note);
700 /* A basic block ends at a jump. */
701 if (head == NULL_RTX)
705 /* ??? Make a special check for table jumps. The way this
706 happens is truly and amazingly gross. We are about to
707 create a basic block that contains just a code label and
708 an addr*vec jump insn. Worse, an addr_diff_vec creates
709 its own natural loop.
711 Prevent this bit of brain damage, pasting things together
712 correctly in make_edges.
714 The correct solution involves emitting the table directly
715 on the tablejump instruction as a note, or JUMP_LABEL. */
717 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
718 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
726 goto new_bb_inclusive;
729 /* A basic block ends at a barrier. It may be that an unconditional
730 jump already closed the basic block -- no need to do it again. */
731 if (head == NULL_RTX)
734 /* While we now have edge lists with which other portions of the
735 compiler might determine a call ending a basic block does not
736 imply an abnormal edge, it will be a bit before everything can
737 be updated. So continue to emit a noop at the end of such a
739 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
741 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
742 end = emit_insn_after (nop, end);
744 goto new_bb_exclusive;
748 /* Record whether this call created an edge. */
749 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
750 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
751 int call_has_abnormal_edge = 0;
753 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
755 /* Scan each of the alternatives for label refs. */
756 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
757 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
758 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
759 /* Record its tail recursion label, if any. */
760 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
761 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
764 /* If there is an EH region or rethrow, we have an edge. */
765 if ((eh_list && region > 0)
766 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
767 call_has_abnormal_edge = 1;
768 else if (nonlocal_goto_handler_labels && region >= 0)
769 /* If there is a nonlocal goto label and the specified
770 region number isn't -1, we have an edge. (0 means
771 no throw, but might have a nonlocal goto). */
772 call_has_abnormal_edge = 1;
774 /* A basic block ends at a call that can either throw or
775 do a non-local goto. */
776 if (call_has_abnormal_edge)
779 if (head == NULL_RTX)
784 create_basic_block (i++, head, end, bb_note);
785 head = end = NULL_RTX;
793 if (GET_RTX_CLASS (code) == 'i')
795 if (head == NULL_RTX)
802 if (GET_RTX_CLASS (code) == 'i')
806 /* Make a list of all labels referred to other than by jumps
807 (which just don't have the REG_LABEL notes).
809 Make a special exception for labels followed by an ADDR*VEC,
810 as this would be a part of the tablejump setup code.
812 Make a special exception for the eh_return_stub_label, which
813 we know isn't part of any otherwise visible control flow. */
815 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
816 if (REG_NOTE_KIND (note) == REG_LABEL)
818 rtx lab = XEXP (note, 0), next;
820 if (lab == eh_return_stub_label)
822 else if ((next = next_nonnote_insn (lab)) != NULL
823 && GET_CODE (next) == JUMP_INSN
824 && (GET_CODE (PATTERN (next)) == ADDR_VEC
825 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
827 else if (GET_CODE (lab) == NOTE)
830 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
835 if (head != NULL_RTX)
836 create_basic_block (i++, head, end, bb_note);
838 flow_delete_insn (bb_note);
840 if (i != n_basic_blocks)
843 label_value_list = lvl;
844 tail_recursion_label_list = trll;
847 /* Tidy the CFG by deleting unreachable code and whatnot. */
853 delete_unreachable_blocks ();
854 move_stray_eh_region_notes ();
855 record_active_eh_regions (f);
857 mark_critical_edges ();
859 /* Kill the data we won't maintain. */
860 free_EXPR_LIST_list (&label_value_list);
861 free_EXPR_LIST_list (&tail_recursion_label_list);
864 /* Create a new basic block consisting of the instructions between
865 HEAD and END inclusive. Reuses the note and basic block struct
866 in BB_NOTE, if any. */
869 create_basic_block (index, head, end, bb_note)
871 rtx head, end, bb_note;
876 && ! RTX_INTEGRATED_P (bb_note)
877 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
880 /* If we found an existing note, thread it back onto the chain. */
884 if (GET_CODE (head) == CODE_LABEL)
888 after = PREV_INSN (head);
892 if (after != bb_note && NEXT_INSN (after) != bb_note)
893 reorder_insns (bb_note, bb_note, after);
897 /* Otherwise we must create a note and a basic block structure.
898 Since we allow basic block structs in rtl, give the struct
899 the same lifetime by allocating it off the function obstack
900 rather than using malloc. */
902 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
903 memset (bb, 0, sizeof (*bb));
905 if (GET_CODE (head) == CODE_LABEL)
906 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
909 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
912 NOTE_BASIC_BLOCK (bb_note) = bb;
915 /* Always include the bb note in the block. */
916 if (NEXT_INSN (end) == bb_note)
922 BASIC_BLOCK (index) = bb;
924 /* Tag the block so that we know it has been used when considering
925 other basic block notes. */
929 /* Records the basic block struct in BB_FOR_INSN, for every instruction
930 indexed by INSN_UID. MAX is the size of the array. */
933 compute_bb_for_insn (max)
938 if (basic_block_for_insn)
939 VARRAY_FREE (basic_block_for_insn);
940 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
942 for (i = 0; i < n_basic_blocks; ++i)
944 basic_block bb = BASIC_BLOCK (i);
951 int uid = INSN_UID (insn);
953 VARRAY_BB (basic_block_for_insn, uid) = bb;
956 insn = NEXT_INSN (insn);
961 /* Free the memory associated with the edge structures. */
969 for (i = 0; i < n_basic_blocks; ++i)
971 basic_block bb = BASIC_BLOCK (i);
973 for (e = bb->succ; e ; e = n)
983 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
989 ENTRY_BLOCK_PTR->succ = 0;
990 EXIT_BLOCK_PTR->pred = 0;
995 /* Identify the edges between basic blocks.
997 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
998 that are otherwise unreachable may be reachable with a non-local goto.
1000 BB_EH_END is an array indexed by basic block number in which we record
1001 the list of exception regions active at the end of the basic block. */
1004 make_edges (label_value_list)
1005 rtx label_value_list;
1008 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
1009 sbitmap *edge_cache = NULL;
1011 /* Assume no computed jump; revise as we create edges. */
1012 current_function_has_computed_jump = 0;
1014 /* Heavy use of computed goto in machine-generated code can lead to
1015 nearly fully-connected CFGs. In that case we spend a significant
1016 amount of time searching the edge lists for duplicates. */
1017 if (forced_labels || label_value_list)
1019 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1020 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1023 /* By nature of the way these get numbered, block 0 is always the entry. */
1024 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1026 for (i = 0; i < n_basic_blocks; ++i)
1028 basic_block bb = BASIC_BLOCK (i);
1031 int force_fallthru = 0;
1033 /* Examine the last instruction of the block, and discover the
1034 ways we can leave the block. */
1037 code = GET_CODE (insn);
1040 if (code == JUMP_INSN)
1044 /* ??? Recognize a tablejump and do the right thing. */
1045 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1046 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1047 && GET_CODE (tmp) == JUMP_INSN
1048 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1049 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1054 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1055 vec = XVEC (PATTERN (tmp), 0);
1057 vec = XVEC (PATTERN (tmp), 1);
1059 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1060 make_label_edge (edge_cache, bb,
1061 XEXP (RTVEC_ELT (vec, j), 0), 0);
1063 /* Some targets (eg, ARM) emit a conditional jump that also
1064 contains the out-of-range target. Scan for these and
1065 add an edge if necessary. */
1066 if ((tmp = single_set (insn)) != NULL
1067 && SET_DEST (tmp) == pc_rtx
1068 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1069 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1070 make_label_edge (edge_cache, bb,
1071 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1073 #ifdef CASE_DROPS_THROUGH
1074 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1075 us naturally detecting fallthru into the next block. */
1080 /* If this is a computed jump, then mark it as reaching
1081 everything on the label_value_list and forced_labels list. */
1082 else if (computed_jump_p (insn))
1084 current_function_has_computed_jump = 1;
1086 for (x = label_value_list; x; x = XEXP (x, 1))
1087 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1089 for (x = forced_labels; x; x = XEXP (x, 1))
1090 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1093 /* Returns create an exit out. */
1094 else if (returnjump_p (insn))
1095 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1097 /* Otherwise, we have a plain conditional or unconditional jump. */
1100 if (! JUMP_LABEL (insn))
1102 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1106 /* If this is a sibling call insn, then this is in effect a
1107 combined call and return, and so we need an edge to the
1108 exit block. No need to worry about EH edges, since we
1109 wouldn't have created the sibling call in the first place. */
1111 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1112 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1113 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1116 /* If this is a CALL_INSN, then mark it as reaching the active EH
1117 handler for this CALL_INSN. If we're handling asynchronous
1118 exceptions then any insn can reach any of the active handlers.
1120 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1122 if (code == CALL_INSN || asynchronous_exceptions)
1124 /* Add any appropriate EH edges. We do this unconditionally
1125 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1126 on the call, and this needn't be within an EH region. */
1127 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1129 /* If we have asynchronous exceptions, do the same for *all*
1130 exception regions active in the block. */
1131 if (asynchronous_exceptions
1132 && bb->eh_beg != bb->eh_end)
1134 if (bb->eh_beg >= 0)
1135 make_eh_edge (edge_cache, eh_nest_info, bb,
1136 NULL_RTX, bb->eh_beg);
1138 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1139 if (GET_CODE (x) == NOTE
1140 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1141 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1143 int region = NOTE_EH_HANDLER (x);
1144 make_eh_edge (edge_cache, eh_nest_info, bb,
1149 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1151 /* ??? This could be made smarter: in some cases it's possible
1152 to tell that certain calls will not do a nonlocal goto.
1154 For example, if the nested functions that do the nonlocal
1155 gotos do not have their addresses taken, then only calls to
1156 those functions or to other nested functions that use them
1157 could possibly do nonlocal gotos. */
1158 /* We do know that a REG_EH_REGION note with a value less
1159 than 0 is guaranteed not to perform a non-local goto. */
1160 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1161 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1162 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1163 make_label_edge (edge_cache, bb, XEXP (x, 0),
1164 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1168 /* We know something about the structure of the function __throw in
1169 libgcc2.c. It is the only function that ever contains eh_stub
1170 labels. It modifies its return address so that the last block
1171 returns to one of the eh_stub labels within it. So we have to
1172 make additional edges in the flow graph. */
1173 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1174 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1176 /* Find out if we can drop through to the next block. */
1177 insn = next_nonnote_insn (insn);
1178 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1179 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1180 else if (i + 1 < n_basic_blocks)
1182 rtx tmp = BLOCK_HEAD (i + 1);
1183 if (GET_CODE (tmp) == NOTE)
1184 tmp = next_nonnote_insn (tmp);
1185 if (force_fallthru || insn == tmp)
1186 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1190 free_eh_nesting_info (eh_nest_info);
1192 sbitmap_vector_free (edge_cache);
1195 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1196 about the edge that is accumulated between calls. */
1199 make_edge (edge_cache, src, dst, flags)
1200 sbitmap *edge_cache;
1201 basic_block src, dst;
1207 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1208 many edges to them, and we didn't allocate memory for it. */
1209 use_edge_cache = (edge_cache
1210 && src != ENTRY_BLOCK_PTR
1211 && dst != EXIT_BLOCK_PTR);
1213 /* Make sure we don't add duplicate edges. */
1214 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1215 for (e = src->succ; e ; e = e->succ_next)
1222 e = (edge) xcalloc (1, sizeof (*e));
1225 e->succ_next = src->succ;
1226 e->pred_next = dst->pred;
1235 SET_BIT (edge_cache[src->index], dst->index);
1238 /* Create an edge from a basic block to a label. */
1241 make_label_edge (edge_cache, src, label, flags)
1242 sbitmap *edge_cache;
1247 if (GET_CODE (label) != CODE_LABEL)
1250 /* If the label was never emitted, this insn is junk, but avoid a
1251 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1252 as a result of a syntax error and a diagnostic has already been
1255 if (INSN_UID (label) == 0)
1258 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1261 /* Create the edges generated by INSN in REGION. */
1264 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1265 sbitmap *edge_cache;
1266 eh_nesting_info *eh_nest_info;
1271 handler_info **handler_list;
1274 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1275 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1278 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1279 EDGE_ABNORMAL | EDGE_EH | is_call);
1283 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1284 dangerous if we intend to move basic blocks around. Move such notes
1285 into the following block. */
1288 move_stray_eh_region_notes ()
1293 if (n_basic_blocks < 2)
1296 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1297 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1299 rtx insn, next, list = NULL_RTX;
1301 b1 = BASIC_BLOCK (i);
1302 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1304 next = NEXT_INSN (insn);
1305 if (GET_CODE (insn) == NOTE
1306 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1307 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1309 /* Unlink from the insn chain. */
1310 NEXT_INSN (PREV_INSN (insn)) = next;
1311 PREV_INSN (next) = PREV_INSN (insn);
1314 NEXT_INSN (insn) = list;
1319 if (list == NULL_RTX)
1322 /* Find where to insert these things. */
1324 if (GET_CODE (insn) == CODE_LABEL)
1325 insn = NEXT_INSN (insn);
1329 next = NEXT_INSN (list);
1330 add_insn_after (list, insn);
1336 /* Recompute eh_beg/eh_end for each basic block. */
1339 record_active_eh_regions (f)
1342 rtx insn, eh_list = NULL_RTX;
1344 basic_block bb = BASIC_BLOCK (0);
1346 for (insn = f; insn ; insn = NEXT_INSN (insn))
1348 if (bb->head == insn)
1349 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1351 if (GET_CODE (insn) == NOTE)
1353 int kind = NOTE_LINE_NUMBER (insn);
1354 if (kind == NOTE_INSN_EH_REGION_BEG)
1355 eh_list = alloc_INSN_LIST (insn, eh_list);
1356 else if (kind == NOTE_INSN_EH_REGION_END)
1358 rtx t = XEXP (eh_list, 1);
1359 free_INSN_LIST_node (eh_list);
1364 if (bb->end == insn)
1366 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1368 if (i == n_basic_blocks)
1370 bb = BASIC_BLOCK (i);
1375 /* Identify critical edges and set the bits appropriately. */
1378 mark_critical_edges ()
1380 int i, n = n_basic_blocks;
1383 /* We begin with the entry block. This is not terribly important now,
1384 but could be if a front end (Fortran) implemented alternate entry
1386 bb = ENTRY_BLOCK_PTR;
1393 /* (1) Critical edges must have a source with multiple successors. */
1394 if (bb->succ && bb->succ->succ_next)
1396 for (e = bb->succ; e ; e = e->succ_next)
1398 /* (2) Critical edges must have a destination with multiple
1399 predecessors. Note that we know there is at least one
1400 predecessor -- the edge we followed to get here. */
1401 if (e->dest->pred->pred_next)
1402 e->flags |= EDGE_CRITICAL;
1404 e->flags &= ~EDGE_CRITICAL;
1409 for (e = bb->succ; e ; e = e->succ_next)
1410 e->flags &= ~EDGE_CRITICAL;
1415 bb = BASIC_BLOCK (i);
1419 /* Split a (typically critical) edge. Return the new block.
1420 Abort on abnormal edges.
1422 ??? The code generally expects to be called on critical edges.
1423 The case of a block ending in an unconditional jump to a
1424 block with multiple predecessors is not handled optimally. */
1427 split_edge (edge_in)
1430 basic_block old_pred, bb, old_succ;
1435 /* Abnormal edges cannot be split. */
1436 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1439 old_pred = edge_in->src;
1440 old_succ = edge_in->dest;
1442 /* Remove the existing edge from the destination's pred list. */
1445 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1447 *pp = edge_in->pred_next;
1448 edge_in->pred_next = NULL;
1451 /* Create the new structures. */
1452 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1453 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1456 memset (bb, 0, sizeof (*bb));
1458 /* ??? This info is likely going to be out of date very soon. */
1459 if (old_succ->global_live_at_start)
1461 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1462 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1463 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1464 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1469 bb->succ = edge_out;
1470 bb->count = edge_in->count;
1473 edge_in->flags &= ~EDGE_CRITICAL;
1475 edge_out->pred_next = old_succ->pred;
1476 edge_out->succ_next = NULL;
1478 edge_out->dest = old_succ;
1479 edge_out->flags = EDGE_FALLTHRU;
1480 edge_out->probability = REG_BR_PROB_BASE;
1481 edge_out->count = edge_in->count;
1483 old_succ->pred = edge_out;
1485 /* Tricky case -- if there existed a fallthru into the successor
1486 (and we're not it) we must add a new unconditional jump around
1487 the new block we're actually interested in.
1489 Further, if that edge is critical, this means a second new basic
1490 block must be created to hold it. In order to simplify correct
1491 insn placement, do this before we touch the existing basic block
1492 ordering for the block we were really wanting. */
1493 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1496 for (e = edge_out->pred_next; e ; e = e->pred_next)
1497 if (e->flags & EDGE_FALLTHRU)
1502 basic_block jump_block;
1505 if ((e->flags & EDGE_CRITICAL) == 0
1506 && e->src != ENTRY_BLOCK_PTR)
1508 /* Non critical -- we can simply add a jump to the end
1509 of the existing predecessor. */
1510 jump_block = e->src;
1514 /* We need a new block to hold the jump. The simplest
1515 way to do the bulk of the work here is to recursively
1517 jump_block = split_edge (e);
1518 e = jump_block->succ;
1521 /* Now add the jump insn ... */
1522 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1524 jump_block->end = pos;
1525 if (basic_block_for_insn)
1526 set_block_for_insn (pos, jump_block);
1527 emit_barrier_after (pos);
1529 /* ... let jump know that label is in use, ... */
1530 JUMP_LABEL (pos) = old_succ->head;
1531 ++LABEL_NUSES (old_succ->head);
1533 /* ... and clear fallthru on the outgoing edge. */
1534 e->flags &= ~EDGE_FALLTHRU;
1536 /* Continue splitting the interesting edge. */
1540 /* Place the new block just in front of the successor. */
1541 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1542 if (old_succ == EXIT_BLOCK_PTR)
1543 j = n_basic_blocks - 1;
1545 j = old_succ->index;
1546 for (i = n_basic_blocks - 1; i > j; --i)
1548 basic_block tmp = BASIC_BLOCK (i - 1);
1549 BASIC_BLOCK (i) = tmp;
1552 BASIC_BLOCK (i) = bb;
1555 /* Create the basic block note.
1557 Where we place the note can have a noticable impact on the generated
1558 code. Consider this cfg:
1569 If we need to insert an insn on the edge from block 0 to block 1,
1570 we want to ensure the instructions we insert are outside of any
1571 loop notes that physically sit between block 0 and block 1. Otherwise
1572 we confuse the loop optimizer into thinking the loop is a phony. */
1573 if (old_succ != EXIT_BLOCK_PTR
1574 && PREV_INSN (old_succ->head)
1575 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1576 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1577 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1578 PREV_INSN (old_succ->head));
1579 else if (old_succ != EXIT_BLOCK_PTR)
1580 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1582 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1583 NOTE_BASIC_BLOCK (bb_note) = bb;
1584 bb->head = bb->end = bb_note;
1586 /* Not quite simple -- for non-fallthru edges, we must adjust the
1587 predecessor's jump instruction to target our new block. */
1588 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1590 rtx tmp, insn = old_pred->end;
1591 rtx old_label = old_succ->head;
1592 rtx new_label = gen_label_rtx ();
1594 if (GET_CODE (insn) != JUMP_INSN)
1597 /* ??? Recognize a tablejump and adjust all matching cases. */
1598 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1599 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1600 && GET_CODE (tmp) == JUMP_INSN
1601 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1602 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1607 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1608 vec = XVEC (PATTERN (tmp), 0);
1610 vec = XVEC (PATTERN (tmp), 1);
1612 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1613 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1615 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1616 --LABEL_NUSES (old_label);
1617 ++LABEL_NUSES (new_label);
1620 /* Handle casesi dispatch insns */
1621 if ((tmp = single_set (insn)) != NULL
1622 && SET_DEST (tmp) == pc_rtx
1623 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1624 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1625 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1627 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1629 --LABEL_NUSES (old_label);
1630 ++LABEL_NUSES (new_label);
1635 /* This would have indicated an abnormal edge. */
1636 if (computed_jump_p (insn))
1639 /* A return instruction can't be redirected. */
1640 if (returnjump_p (insn))
1643 /* If the insn doesn't go where we think, we're confused. */
1644 if (JUMP_LABEL (insn) != old_label)
1647 redirect_jump (insn, new_label, 0);
1650 emit_label_before (new_label, bb_note);
1651 bb->head = new_label;
1657 /* Queue instructions for insertion on an edge between two basic blocks.
1658 The new instructions and basic blocks (if any) will not appear in the
1659 CFG until commit_edge_insertions is called. */
1662 insert_insn_on_edge (pattern, e)
1666 /* We cannot insert instructions on an abnormal critical edge.
1667 It will be easier to find the culprit if we die now. */
1668 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1669 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1672 if (e->insns == NULL_RTX)
1675 push_to_sequence (e->insns);
1677 emit_insn (pattern);
1679 e->insns = get_insns ();
1683 /* Update the CFG for the instructions queued on edge E. */
1686 commit_one_edge_insertion (e)
1689 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1692 /* Pull the insns off the edge now since the edge might go away. */
1694 e->insns = NULL_RTX;
1696 /* Figure out where to put these things. If the destination has
1697 one predecessor, insert there. Except for the exit block. */
1698 if (e->dest->pred->pred_next == NULL
1699 && e->dest != EXIT_BLOCK_PTR)
1703 /* Get the location correct wrt a code label, and "nice" wrt
1704 a basic block note, and before everything else. */
1706 if (GET_CODE (tmp) == CODE_LABEL)
1707 tmp = NEXT_INSN (tmp);
1708 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1709 tmp = NEXT_INSN (tmp);
1710 if (tmp == bb->head)
1713 after = PREV_INSN (tmp);
1716 /* If the source has one successor and the edge is not abnormal,
1717 insert there. Except for the entry block. */
1718 else if ((e->flags & EDGE_ABNORMAL) == 0
1719 && e->src->succ->succ_next == NULL
1720 && e->src != ENTRY_BLOCK_PTR)
1723 /* It is possible to have a non-simple jump here. Consider a target
1724 where some forms of unconditional jumps clobber a register. This
1725 happens on the fr30 for example.
1727 We know this block has a single successor, so we can just emit
1728 the queued insns before the jump. */
1729 if (GET_CODE (bb->end) == JUMP_INSN)
1735 /* We'd better be fallthru, or we've lost track of what's what. */
1736 if ((e->flags & EDGE_FALLTHRU) == 0)
1743 /* Otherwise we must split the edge. */
1746 bb = split_edge (e);
1750 /* Now that we've found the spot, do the insertion. */
1752 /* Set the new block number for these insns, if structure is allocated. */
1753 if (basic_block_for_insn)
1756 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1757 set_block_for_insn (i, bb);
1762 emit_insns_before (insns, before);
1763 if (before == bb->head)
1766 last = prev_nonnote_insn (before);
1770 last = emit_insns_after (insns, after);
1771 if (after == bb->end)
1775 if (returnjump_p (last))
1777 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1778 This is not currently a problem because this only happens
1779 for the (single) epilogue, which already has a fallthru edge
1783 if (e->dest != EXIT_BLOCK_PTR
1784 || e->succ_next != NULL
1785 || (e->flags & EDGE_FALLTHRU) == 0)
1787 e->flags &= ~EDGE_FALLTHRU;
1789 emit_barrier_after (last);
1793 flow_delete_insn (before);
1795 else if (GET_CODE (last) == JUMP_INSN)
1799 /* Update the CFG for all queued instructions. */
1802 commit_edge_insertions ()
1807 #ifdef ENABLE_CHECKING
1808 verify_flow_info ();
1812 bb = ENTRY_BLOCK_PTR;
1817 for (e = bb->succ; e ; e = next)
1819 next = e->succ_next;
1821 commit_one_edge_insertion (e);
1824 if (++i >= n_basic_blocks)
1826 bb = BASIC_BLOCK (i);
1830 /* Delete all unreachable basic blocks. */
1833 delete_unreachable_blocks ()
1835 basic_block *worklist, *tos;
1836 int deleted_handler;
1841 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1843 /* Use basic_block->aux as a marker. Clear them all. */
1845 for (i = 0; i < n; ++i)
1846 BASIC_BLOCK (i)->aux = NULL;
1848 /* Add our starting points to the worklist. Almost always there will
1849 be only one. It isn't inconcievable that we might one day directly
1850 support Fortran alternate entry points. */
1852 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1856 /* Mark the block with a handy non-null value. */
1860 /* Iterate: find everything reachable from what we've already seen. */
1862 while (tos != worklist)
1864 basic_block b = *--tos;
1866 for (e = b->succ; e ; e = e->succ_next)
1874 /* Delete all unreachable basic blocks. Count down so that we don't
1875 interfere with the block renumbering that happens in flow_delete_block. */
1877 deleted_handler = 0;
1879 for (i = n - 1; i >= 0; --i)
1881 basic_block b = BASIC_BLOCK (i);
1884 /* This block was found. Tidy up the mark. */
1887 deleted_handler |= flow_delete_block (b);
1890 tidy_fallthru_edges ();
1892 /* If we deleted an exception handler, we may have EH region begin/end
1893 blocks to remove as well. */
1894 if (deleted_handler)
1895 delete_eh_regions ();
1900 /* Find EH regions for which there is no longer a handler, and delete them. */
1903 delete_eh_regions ()
1907 update_rethrow_references ();
1909 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1910 if (GET_CODE (insn) == NOTE)
1912 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1913 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1915 int num = NOTE_EH_HANDLER (insn);
1916 /* A NULL handler indicates a region is no longer needed,
1917 as long as its rethrow label isn't used. */
1918 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1920 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1921 NOTE_SOURCE_FILE (insn) = 0;
1927 /* Return true if NOTE is not one of the ones that must be kept paired,
1928 so that we may simply delete them. */
1931 can_delete_note_p (note)
1934 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1935 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1938 /* Unlink a chain of insns between START and FINISH, leaving notes
1939 that must be paired. */
1942 flow_delete_insn_chain (start, finish)
1945 /* Unchain the insns one by one. It would be quicker to delete all
1946 of these with a single unchaining, rather than one at a time, but
1947 we need to keep the NOTE's. */
1953 next = NEXT_INSN (start);
1954 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1956 else if (GET_CODE (start) == CODE_LABEL
1957 && ! can_delete_label_p (start))
1959 const char *name = LABEL_NAME (start);
1960 PUT_CODE (start, NOTE);
1961 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
1962 NOTE_SOURCE_FILE (start) = name;
1965 next = flow_delete_insn (start);
1967 if (start == finish)
1973 /* Delete the insns in a (non-live) block. We physically delete every
1974 non-deleted-note insn, and update the flow graph appropriately.
1976 Return nonzero if we deleted an exception handler. */
1978 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1979 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1982 flow_delete_block (b)
1985 int deleted_handler = 0;
1988 /* If the head of this block is a CODE_LABEL, then it might be the
1989 label for an exception handler which can't be reached.
1991 We need to remove the label from the exception_handler_label list
1992 and remove the associated NOTE_INSN_EH_REGION_BEG and
1993 NOTE_INSN_EH_REGION_END notes. */
1997 never_reached_warning (insn);
1999 if (GET_CODE (insn) == CODE_LABEL)
2001 rtx x, *prev = &exception_handler_labels;
2003 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2005 if (XEXP (x, 0) == insn)
2007 /* Found a match, splice this label out of the EH label list. */
2008 *prev = XEXP (x, 1);
2009 XEXP (x, 1) = NULL_RTX;
2010 XEXP (x, 0) = NULL_RTX;
2012 /* Remove the handler from all regions */
2013 remove_handler (insn);
2014 deleted_handler = 1;
2017 prev = &XEXP (x, 1);
2021 /* Include any jump table following the basic block. */
2023 if (GET_CODE (end) == JUMP_INSN
2024 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2025 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2026 && GET_CODE (tmp) == JUMP_INSN
2027 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2028 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2031 /* Include any barrier that may follow the basic block. */
2032 tmp = next_nonnote_insn (end);
2033 if (tmp && GET_CODE (tmp) == BARRIER)
2036 /* Selectively delete the entire chain. */
2037 flow_delete_insn_chain (insn, end);
2039 /* Remove the edges into and out of this block. Note that there may
2040 indeed be edges in, if we are removing an unreachable loop. */
2044 for (e = b->pred; e ; e = next)
2046 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2049 next = e->pred_next;
2053 for (e = b->succ; e ; e = next)
2055 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2058 next = e->succ_next;
2067 /* Remove the basic block from the array, and compact behind it. */
2070 return deleted_handler;
2073 /* Remove block B from the basic block array and compact behind it. */
2079 int i, n = n_basic_blocks;
2081 for (i = b->index; i + 1 < n; ++i)
2083 basic_block x = BASIC_BLOCK (i + 1);
2084 BASIC_BLOCK (i) = x;
2088 basic_block_info->num_elements--;
2092 /* Delete INSN by patching it out. Return the next insn. */
2095 flow_delete_insn (insn)
2098 rtx prev = PREV_INSN (insn);
2099 rtx next = NEXT_INSN (insn);
2102 PREV_INSN (insn) = NULL_RTX;
2103 NEXT_INSN (insn) = NULL_RTX;
2104 INSN_DELETED_P (insn) = 1;
2107 NEXT_INSN (prev) = next;
2109 PREV_INSN (next) = prev;
2111 set_last_insn (prev);
2113 if (GET_CODE (insn) == CODE_LABEL)
2114 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2116 /* If deleting a jump, decrement the use count of the label. Deleting
2117 the label itself should happen in the normal course of block merging. */
2118 if (GET_CODE (insn) == JUMP_INSN
2119 && JUMP_LABEL (insn)
2120 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2121 LABEL_NUSES (JUMP_LABEL (insn))--;
2123 /* Also if deleting an insn that references a label. */
2124 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2125 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2126 LABEL_NUSES (XEXP (note, 0))--;
2131 /* True if a given label can be deleted. */
2134 can_delete_label_p (label)
2139 if (LABEL_PRESERVE_P (label))
2142 for (x = forced_labels; x ; x = XEXP (x, 1))
2143 if (label == XEXP (x, 0))
2145 for (x = label_value_list; x ; x = XEXP (x, 1))
2146 if (label == XEXP (x, 0))
2148 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2149 if (label == XEXP (x, 0))
2152 /* User declared labels must be preserved. */
2153 if (LABEL_NAME (label) != 0)
2160 tail_recursion_label_p (label)
2165 for (x = tail_recursion_label_list; x ; x = XEXP (x, 1))
2166 if (label == XEXP (x, 0))
2172 /* Blocks A and B are to be merged into a single block A. The insns
2173 are already contiguous, hence `nomove'. */
2176 merge_blocks_nomove (a, b)
2180 rtx b_head, b_end, a_end;
2181 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2184 /* If there was a CODE_LABEL beginning B, delete it. */
2187 if (GET_CODE (b_head) == CODE_LABEL)
2189 /* Detect basic blocks with nothing but a label. This can happen
2190 in particular at the end of a function. */
2191 if (b_head == b_end)
2193 del_first = del_last = b_head;
2194 b_head = NEXT_INSN (b_head);
2197 /* Delete the basic block note. */
2198 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2200 if (b_head == b_end)
2205 b_head = NEXT_INSN (b_head);
2208 /* If there was a jump out of A, delete it. */
2210 if (GET_CODE (a_end) == JUMP_INSN)
2214 prev = prev_nonnote_insn (a_end);
2221 /* If this was a conditional jump, we need to also delete
2222 the insn that set cc0. */
2223 if (prev && sets_cc0_p (prev))
2226 prev = prev_nonnote_insn (prev);
2235 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2236 del_first = NEXT_INSN (a_end);
2238 /* Delete everything marked above as well as crap that might be
2239 hanging out between the two blocks. */
2240 flow_delete_insn_chain (del_first, del_last);
2242 /* Normally there should only be one successor of A and that is B, but
2243 partway though the merge of blocks for conditional_execution we'll
2244 be merging a TEST block with THEN and ELSE successors. Free the
2245 whole lot of them and hope the caller knows what they're doing. */
2247 remove_edge (a->succ);
2249 /* Adjust the edges out of B for the new owner. */
2250 for (e = b->succ; e ; e = e->succ_next)
2254 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2255 b->pred = b->succ = NULL;
2257 /* Reassociate the insns of B with A. */
2260 if (basic_block_for_insn)
2262 BLOCK_FOR_INSN (b_head) = a;
2263 while (b_head != b_end)
2265 b_head = NEXT_INSN (b_head);
2266 BLOCK_FOR_INSN (b_head) = a;
2276 /* Blocks A and B are to be merged into a single block. A has no incoming
2277 fallthru edge, so it can be moved before B without adding or modifying
2278 any jumps (aside from the jump from A to B). */
2281 merge_blocks_move_predecessor_nojumps (a, b)
2284 rtx start, end, barrier;
2290 barrier = next_nonnote_insn (end);
2291 if (GET_CODE (barrier) != BARRIER)
2293 flow_delete_insn (barrier);
2295 /* Move block and loop notes out of the chain so that we do not
2296 disturb their order.
2298 ??? A better solution would be to squeeze out all the non-nested notes
2299 and adjust the block trees appropriately. Even better would be to have
2300 a tighter connection between block trees and rtl so that this is not
2302 start = squeeze_notes (start, end);
2304 /* Scramble the insn chain. */
2305 if (end != PREV_INSN (b->head))
2306 reorder_insns (start, end, PREV_INSN (b->head));
2310 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2311 a->index, b->index);
2314 /* Swap the records for the two blocks around. Although we are deleting B,
2315 A is now where B was and we want to compact the BB array from where
2317 BASIC_BLOCK(a->index) = b;
2318 BASIC_BLOCK(b->index) = a;
2320 a->index = b->index;
2323 /* Now blocks A and B are contiguous. Merge them. */
2324 merge_blocks_nomove (a, b);
2329 /* Blocks A and B are to be merged into a single block. B has no outgoing
2330 fallthru edge, so it can be moved after A without adding or modifying
2331 any jumps (aside from the jump from A to B). */
2334 merge_blocks_move_successor_nojumps (a, b)
2337 rtx start, end, barrier;
2341 barrier = NEXT_INSN (end);
2343 /* Recognize a jump table following block B. */
2344 if (GET_CODE (barrier) == CODE_LABEL
2345 && NEXT_INSN (barrier)
2346 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2347 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2348 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2350 end = NEXT_INSN (barrier);
2351 barrier = NEXT_INSN (end);
2354 /* There had better have been a barrier there. Delete it. */
2355 if (GET_CODE (barrier) != BARRIER)
2357 flow_delete_insn (barrier);
2359 /* Move block and loop notes out of the chain so that we do not
2360 disturb their order.
2362 ??? A better solution would be to squeeze out all the non-nested notes
2363 and adjust the block trees appropriately. Even better would be to have
2364 a tighter connection between block trees and rtl so that this is not
2366 start = squeeze_notes (start, end);
2368 /* Scramble the insn chain. */
2369 reorder_insns (start, end, a->end);
2371 /* Now blocks A and B are contiguous. Merge them. */
2372 merge_blocks_nomove (a, b);
2376 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2377 b->index, a->index);
2383 /* Attempt to merge basic blocks that are potentially non-adjacent.
2384 Return true iff the attempt succeeded. */
2387 merge_blocks (e, b, c)
2391 /* If C has a tail recursion label, do not merge. There is no
2392 edge recorded from the call_placeholder back to this label, as
2393 that would make optimize_sibling_and_tail_recursive_calls more
2394 complex for no gain. */
2395 if (GET_CODE (c->head) == CODE_LABEL
2396 && tail_recursion_label_p (c->head))
2399 /* If B has a fallthru edge to C, no need to move anything. */
2400 if (e->flags & EDGE_FALLTHRU)
2402 merge_blocks_nomove (b, c);
2406 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2407 b->index, c->index);
2416 int c_has_outgoing_fallthru;
2417 int b_has_incoming_fallthru;
2419 /* We must make sure to not munge nesting of exception regions,
2420 lexical blocks, and loop notes.
2422 The first is taken care of by requiring that the active eh
2423 region at the end of one block always matches the active eh
2424 region at the beginning of the next block.
2426 The later two are taken care of by squeezing out all the notes. */
2428 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2429 executed and we may want to treat blocks which have two out
2430 edges, one normal, one abnormal as only having one edge for
2431 block merging purposes. */
2433 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2434 if (tmp_edge->flags & EDGE_FALLTHRU)
2436 c_has_outgoing_fallthru = (tmp_edge != NULL);
2438 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2439 if (tmp_edge->flags & EDGE_FALLTHRU)
2441 b_has_incoming_fallthru = (tmp_edge != NULL);
2443 /* If B does not have an incoming fallthru, and the exception regions
2444 match, then it can be moved immediately before C without introducing
2447 C can not be the first block, so we do not have to worry about
2448 accessing a non-existent block. */
2449 d = BASIC_BLOCK (c->index - 1);
2450 if (! b_has_incoming_fallthru
2451 && d->eh_end == b->eh_beg
2452 && b->eh_end == c->eh_beg)
2453 return merge_blocks_move_predecessor_nojumps (b, c);
2455 /* Otherwise, we're going to try to move C after B. Make sure the
2456 exception regions match.
2458 If B is the last basic block, then we must not try to access the
2459 block structure for block B + 1. Luckily in that case we do not
2460 need to worry about matching exception regions. */
2461 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2462 if (b->eh_end == c->eh_beg
2463 && (d == NULL || c->eh_end == d->eh_beg))
2465 /* If C does not have an outgoing fallthru, then it can be moved
2466 immediately after B without introducing or modifying jumps. */
2467 if (! c_has_outgoing_fallthru)
2468 return merge_blocks_move_successor_nojumps (b, c);
2470 /* Otherwise, we'll need to insert an extra jump, and possibly
2471 a new block to contain it. */
2472 /* ??? Not implemented yet. */
2479 /* Top level driver for merge_blocks. */
2486 /* Attempt to merge blocks as made possible by edge removal. If a block
2487 has only one successor, and the successor has only one predecessor,
2488 they may be combined. */
2490 for (i = 0; i < n_basic_blocks; )
2492 basic_block c, b = BASIC_BLOCK (i);
2495 /* A loop because chains of blocks might be combineable. */
2496 while ((s = b->succ) != NULL
2497 && s->succ_next == NULL
2498 && (s->flags & EDGE_EH) == 0
2499 && (c = s->dest) != EXIT_BLOCK_PTR
2500 && c->pred->pred_next == NULL
2501 /* If the jump insn has side effects, we can't kill the edge. */
2502 && (GET_CODE (b->end) != JUMP_INSN
2503 || onlyjump_p (b->end))
2504 && merge_blocks (s, b, c))
2507 /* Don't get confused by the index shift caused by deleting blocks. */
2512 /* The given edge should potentially be a fallthru edge. If that is in
2513 fact true, delete the jump and barriers that are in the way. */
2516 tidy_fallthru_edge (e, b, c)
2522 /* ??? In a late-running flow pass, other folks may have deleted basic
2523 blocks by nopping out blocks, leaving multiple BARRIERs between here
2524 and the target label. They ought to be chastized and fixed.
2526 We can also wind up with a sequence of undeletable labels between
2527 one block and the next.
2529 So search through a sequence of barriers, labels, and notes for
2530 the head of block C and assert that we really do fall through. */
2532 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2535 /* Remove what will soon cease being the jump insn from the source block.
2536 If block B consisted only of this single jump, turn it into a deleted
2539 if (GET_CODE (q) == JUMP_INSN
2541 && (any_uncondjump_p (q)
2542 || (b->succ == e && e->succ_next == NULL)))
2545 /* If this was a conditional jump, we need to also delete
2546 the insn that set cc0. */
2547 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2554 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2555 NOTE_SOURCE_FILE (q) = 0;
2558 b->end = q = PREV_INSN (q);
2561 /* Selectively unlink the sequence. */
2562 if (q != PREV_INSN (c->head))
2563 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2565 e->flags |= EDGE_FALLTHRU;
2568 /* Fix up edges that now fall through, or rather should now fall through
2569 but previously required a jump around now deleted blocks. Simplify
2570 the search by only examining blocks numerically adjacent, since this
2571 is how find_basic_blocks created them. */
2574 tidy_fallthru_edges ()
2578 for (i = 1; i < n_basic_blocks; ++i)
2580 basic_block b = BASIC_BLOCK (i - 1);
2581 basic_block c = BASIC_BLOCK (i);
2584 /* We care about simple conditional or unconditional jumps with
2587 If we had a conditional branch to the next instruction when
2588 find_basic_blocks was called, then there will only be one
2589 out edge for the block which ended with the conditional
2590 branch (since we do not create duplicate edges).
2592 Furthermore, the edge will be marked as a fallthru because we
2593 merge the flags for the duplicate edges. So we do not want to
2594 check that the edge is not a FALLTHRU edge. */
2595 if ((s = b->succ) != NULL
2596 && s->succ_next == NULL
2598 /* If the jump insn has side effects, we can't tidy the edge. */
2599 && (GET_CODE (b->end) != JUMP_INSN
2600 || onlyjump_p (b->end)))
2601 tidy_fallthru_edge (s, b, c);
2605 /* Perform data flow analysis.
2606 F is the first insn of the function; FLAGS is a set of PROP_* flags
2607 to be used in accumulating flow info. */
2610 life_analysis (f, file, flags)
2615 #ifdef ELIMINABLE_REGS
2617 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2620 /* Record which registers will be eliminated. We use this in
2623 CLEAR_HARD_REG_SET (elim_reg_set);
2625 #ifdef ELIMINABLE_REGS
2626 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2627 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2629 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2633 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC);
2635 /* The post-reload life analysis have (on a global basis) the same
2636 registers live as was computed by reload itself. elimination
2637 Otherwise offsets and such may be incorrect.
2639 Reload will make some registers as live even though they do not
2642 We don't want to create new auto-incs after reload, since they
2643 are unlikely to be useful and can cause problems with shared
2645 if (reload_completed)
2646 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
2648 /* We want alias analysis information for local dead store elimination. */
2649 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2650 init_alias_analysis ();
2652 /* Always remove no-op moves. Do this before other processing so
2653 that we don't have to keep re-scanning them. */
2654 delete_noop_moves (f);
2656 /* Some targets can emit simpler epilogues if they know that sp was
2657 not ever modified during the function. After reload, of course,
2658 we've already emitted the epilogue so there's no sense searching. */
2659 if (! reload_completed)
2660 notice_stack_pointer_modification (f);
2662 /* Allocate and zero out data structures that will record the
2663 data from lifetime analysis. */
2664 allocate_reg_life_data ();
2665 allocate_bb_life_data ();
2667 /* Find the set of registers live on function exit. */
2668 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2670 /* "Update" life info from zero. It'd be nice to begin the
2671 relaxation with just the exit and noreturn blocks, but that set
2672 is not immediately handy. */
2674 if (flags & PROP_REG_INFO)
2675 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2676 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2679 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2680 end_alias_analysis ();
2683 dump_flow_info (file);
2685 free_basic_block_vars (1);
2688 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2689 Search for REGNO. If found, abort if it is not wider than word_mode. */
2692 verify_wide_reg_1 (px, pregno)
2697 unsigned int regno = *(int *) pregno;
2699 if (GET_CODE (x) == REG && REGNO (x) == regno)
2701 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2708 /* A subroutine of verify_local_live_at_start. Search through insns
2709 between HEAD and END looking for register REGNO. */
2712 verify_wide_reg (regno, head, end)
2719 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2723 head = NEXT_INSN (head);
2726 /* We didn't find the register at all. Something's way screwy. */
2730 /* A subroutine of update_life_info. Verify that there are no untoward
2731 changes in live_at_start during a local update. */
2734 verify_local_live_at_start (new_live_at_start, bb)
2735 regset new_live_at_start;
2738 if (reload_completed)
2740 /* After reload, there are no pseudos, nor subregs of multi-word
2741 registers. The regsets should exactly match. */
2742 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2749 /* Find the set of changed registers. */
2750 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2752 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2754 /* No registers should die. */
2755 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2757 /* Verify that the now-live register is wider than word_mode. */
2758 verify_wide_reg (i, bb->head, bb->end);
2763 /* Updates life information starting with the basic blocks set in BLOCKS.
2764 If BLOCKS is null, consider it to be the universal set.
2766 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2767 we are only expecting local modifications to basic blocks. If we find
2768 extra registers live at the beginning of a block, then we either killed
2769 useful data, or we have a broken split that wants data not provided.
2770 If we find registers removed from live_at_start, that means we have
2771 a broken peephole that is killing a register it shouldn't.
2773 ??? This is not true in one situation -- when a pre-reload splitter
2774 generates subregs of a multi-word pseudo, current life analysis will
2775 lose the kill. So we _can_ have a pseudo go live. How irritating.
2777 Including PROP_REG_INFO does not properly refresh regs_ever_live
2778 unless the caller resets it to zero. */
2781 update_life_info (blocks, extent, prop_flags)
2783 enum update_life_extent extent;
2787 regset_head tmp_head;
2790 tmp = INITIALIZE_REG_SET (tmp_head);
2792 /* For a global update, we go through the relaxation process again. */
2793 if (extent != UPDATE_LIFE_LOCAL)
2795 calculate_global_regs_live (blocks, blocks,
2796 prop_flags & PROP_SCAN_DEAD_CODE);
2798 /* If asked, remove notes from the blocks we'll update. */
2799 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2800 count_or_remove_death_notes (blocks, 1);
2805 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2807 basic_block bb = BASIC_BLOCK (i);
2809 COPY_REG_SET (tmp, bb->global_live_at_end);
2810 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2812 if (extent == UPDATE_LIFE_LOCAL)
2813 verify_local_live_at_start (tmp, bb);
2818 for (i = n_basic_blocks - 1; i >= 0; --i)
2820 basic_block bb = BASIC_BLOCK (i);
2822 COPY_REG_SET (tmp, bb->global_live_at_end);
2823 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2825 if (extent == UPDATE_LIFE_LOCAL)
2826 verify_local_live_at_start (tmp, bb);
2832 if (prop_flags & PROP_REG_INFO)
2834 /* The only pseudos that are live at the beginning of the function
2835 are those that were not set anywhere in the function. local-alloc
2836 doesn't know how to handle these correctly, so mark them as not
2837 local to any one basic block. */
2838 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2839 FIRST_PSEUDO_REGISTER, i,
2840 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2842 /* We have a problem with any pseudoreg that lives across the setjmp.
2843 ANSI says that if a user variable does not change in value between
2844 the setjmp and the longjmp, then the longjmp preserves it. This
2845 includes longjmp from a place where the pseudo appears dead.
2846 (In principle, the value still exists if it is in scope.)
2847 If the pseudo goes in a hard reg, some other value may occupy
2848 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2849 Conclusion: such a pseudo must not go in a hard reg. */
2850 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2851 FIRST_PSEUDO_REGISTER, i,
2853 if (regno_reg_rtx[i] != 0)
2855 REG_LIVE_LENGTH (i) = -1;
2856 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2862 /* Free the variables allocated by find_basic_blocks.
2864 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2867 free_basic_block_vars (keep_head_end_p)
2868 int keep_head_end_p;
2870 if (basic_block_for_insn)
2872 VARRAY_FREE (basic_block_for_insn);
2873 basic_block_for_insn = NULL;
2876 if (! keep_head_end_p)
2879 VARRAY_FREE (basic_block_info);
2882 ENTRY_BLOCK_PTR->aux = NULL;
2883 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2884 EXIT_BLOCK_PTR->aux = NULL;
2885 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2889 /* Return nonzero if the destination of SET equals the source. */
2894 rtx src = SET_SRC (set);
2895 rtx dst = SET_DEST (set);
2897 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2899 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2901 src = SUBREG_REG (src);
2902 dst = SUBREG_REG (dst);
2905 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2906 && REGNO (src) == REGNO (dst));
2909 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2915 rtx pat = PATTERN (insn);
2917 /* Insns carrying these notes are useful later on. */
2918 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2921 if (GET_CODE (pat) == SET && set_noop_p (pat))
2924 if (GET_CODE (pat) == PARALLEL)
2927 /* If nothing but SETs of registers to themselves,
2928 this insn can also be deleted. */
2929 for (i = 0; i < XVECLEN (pat, 0); i++)
2931 rtx tem = XVECEXP (pat, 0, i);
2933 if (GET_CODE (tem) == USE
2934 || GET_CODE (tem) == CLOBBER)
2937 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2946 /* Delete any insns that copy a register to itself. */
2949 delete_noop_moves (f)
2953 for (insn = f; insn; insn = NEXT_INSN (insn))
2955 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2957 PUT_CODE (insn, NOTE);
2958 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2959 NOTE_SOURCE_FILE (insn) = 0;
2964 /* Determine if the stack pointer is constant over the life of the function.
2965 Only useful before prologues have been emitted. */
2968 notice_stack_pointer_modification_1 (x, pat, data)
2970 rtx pat ATTRIBUTE_UNUSED;
2971 void *data ATTRIBUTE_UNUSED;
2973 if (x == stack_pointer_rtx
2974 /* The stack pointer is only modified indirectly as the result
2975 of a push until later in flow. See the comments in rtl.texi
2976 regarding Embedded Side-Effects on Addresses. */
2977 || (GET_CODE (x) == MEM
2978 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2979 || GET_CODE (XEXP (x, 0)) == PRE_INC
2980 || GET_CODE (XEXP (x, 0)) == POST_DEC
2981 || GET_CODE (XEXP (x, 0)) == POST_INC)
2982 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2983 current_function_sp_is_unchanging = 0;
2987 notice_stack_pointer_modification (f)
2992 /* Assume that the stack pointer is unchanging if alloca hasn't
2994 current_function_sp_is_unchanging = !current_function_calls_alloca;
2995 if (! current_function_sp_is_unchanging)
2998 for (insn = f; insn; insn = NEXT_INSN (insn))
3002 /* Check if insn modifies the stack pointer. */
3003 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
3005 if (! current_function_sp_is_unchanging)
3011 /* Mark a register in SET. Hard registers in large modes get all
3012 of their component registers set as well. */
3014 mark_reg (reg, xset)
3018 regset set = (regset) xset;
3019 int regno = REGNO (reg);
3021 if (GET_MODE (reg) == BLKmode)
3024 SET_REGNO_REG_SET (set, regno);
3025 if (regno < FIRST_PSEUDO_REGISTER)
3027 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3029 SET_REGNO_REG_SET (set, regno + n);
3033 /* Mark those regs which are needed at the end of the function as live
3034 at the end of the last basic block. */
3036 mark_regs_live_at_end (set)
3041 /* If exiting needs the right stack value, consider the stack pointer
3042 live at the end of the function. */
3043 if ((HAVE_epilogue && reload_completed)
3044 || ! EXIT_IGNORE_STACK
3045 || (! FRAME_POINTER_REQUIRED
3046 && ! current_function_calls_alloca
3047 && flag_omit_frame_pointer)
3048 || current_function_sp_is_unchanging)
3050 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3053 /* Mark the frame pointer if needed at the end of the function. If
3054 we end up eliminating it, it will be removed from the live list
3055 of each basic block by reload. */
3057 if (! reload_completed || frame_pointer_needed)
3059 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3060 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3061 /* If they are different, also mark the hard frame pointer as live. */
3062 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3063 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3067 #ifdef PIC_OFFSET_TABLE_REGNUM
3068 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3069 /* Many architectures have a GP register even without flag_pic.
3070 Assume the pic register is not in use, or will be handled by
3071 other means, if it is not fixed. */
3072 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3073 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3077 /* Mark all global registers, and all registers used by the epilogue
3078 as being live at the end of the function since they may be
3079 referenced by our caller. */
3080 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3081 if (global_regs[i] || EPILOGUE_USES (i))
3082 SET_REGNO_REG_SET (set, i);
3084 /* Mark all call-saved registers that we actaully used. */
3085 if (HAVE_epilogue && reload_completed)
3087 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3088 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
3089 SET_REGNO_REG_SET (set, i);
3092 /* Mark function return value. */
3093 diddle_return_value (mark_reg, set);
3096 /* Callback function for for_each_successor_phi. DATA is a regset.
3097 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3098 INSN, in the regset. */
3101 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3102 rtx insn ATTRIBUTE_UNUSED;
3103 int dest_regno ATTRIBUTE_UNUSED;
3107 regset live = (regset) data;
3108 SET_REGNO_REG_SET (live, src_regno);
3112 /* Propagate global life info around the graph of basic blocks. Begin
3113 considering blocks with their corresponding bit set in BLOCKS_IN.
3114 If BLOCKS_IN is null, consider it the universal set.
3116 BLOCKS_OUT is set for every block that was changed. */
3119 calculate_global_regs_live (blocks_in, blocks_out, flags)
3120 sbitmap blocks_in, blocks_out;
3123 basic_block *queue, *qhead, *qtail, *qend;
3124 regset tmp, new_live_at_end;
3125 regset_head tmp_head;
3126 regset_head new_live_at_end_head;
3129 tmp = INITIALIZE_REG_SET (tmp_head);
3130 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3132 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3133 because the `head == tail' style test for an empty queue doesn't
3134 work with a full queue. */
3135 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3137 qhead = qend = queue + n_basic_blocks + 2;
3139 /* Clear out the garbage that might be hanging out in bb->aux. */
3140 for (i = n_basic_blocks - 1; i >= 0; --i)
3141 BASIC_BLOCK (i)->aux = NULL;
3143 /* Queue the blocks set in the initial mask. Do this in reverse block
3144 number order so that we are more likely for the first round to do
3145 useful work. We use AUX non-null to flag that the block is queued. */
3148 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3150 basic_block bb = BASIC_BLOCK (i);
3157 for (i = 0; i < n_basic_blocks; ++i)
3159 basic_block bb = BASIC_BLOCK (i);
3166 sbitmap_zero (blocks_out);
3168 while (qhead != qtail)
3170 int rescan, changed;
3179 /* Begin by propogating live_at_start from the successor blocks. */
3180 CLEAR_REG_SET (new_live_at_end);
3181 for (e = bb->succ; e ; e = e->succ_next)
3183 basic_block sb = e->dest;
3184 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3187 /* Force the stack pointer to be live -- which might not already be
3188 the case for blocks within infinite loops. */
3189 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3191 /* Regs used in phi nodes are not included in
3192 global_live_at_start, since they are live only along a
3193 particular edge. Set those regs that are live because of a
3194 phi node alternative corresponding to this particular block. */
3196 for_each_successor_phi (bb, &set_phi_alternative_reg,
3199 if (bb == ENTRY_BLOCK_PTR)
3201 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3205 /* On our first pass through this block, we'll go ahead and continue.
3206 Recognize first pass by local_set NULL. On subsequent passes, we
3207 get to skip out early if live_at_end wouldn't have changed. */
3209 if (bb->local_set == NULL)
3211 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3216 /* If any bits were removed from live_at_end, we'll have to
3217 rescan the block. This wouldn't be necessary if we had
3218 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3219 local_live is really dependent on live_at_end. */
3220 CLEAR_REG_SET (tmp);
3221 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3222 new_live_at_end, BITMAP_AND_COMPL);
3226 /* Find the set of changed bits. Take this opportunity
3227 to notice that this set is empty and early out. */
3228 CLEAR_REG_SET (tmp);
3229 changed = bitmap_operation (tmp, bb->global_live_at_end,
3230 new_live_at_end, BITMAP_XOR);
3234 /* If any of the changed bits overlap with local_set,
3235 we'll have to rescan the block. Detect overlap by
3236 the AND with ~local_set turning off bits. */
3237 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3242 /* Let our caller know that BB changed enough to require its
3243 death notes updated. */
3245 SET_BIT (blocks_out, bb->index);
3249 /* Add to live_at_start the set of all registers in
3250 new_live_at_end that aren't in the old live_at_end. */
3252 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3254 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3256 changed = bitmap_operation (bb->global_live_at_start,
3257 bb->global_live_at_start,
3264 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3266 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3267 into live_at_start. */
3268 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3270 /* If live_at start didn't change, no need to go farther. */
3271 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3274 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3277 /* Queue all predecessors of BB so that we may re-examine
3278 their live_at_end. */
3279 for (e = bb->pred; e ; e = e->pred_next)
3281 basic_block pb = e->src;
3282 if (pb->aux == NULL)
3293 FREE_REG_SET (new_live_at_end);
3297 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3299 basic_block bb = BASIC_BLOCK (i);
3300 FREE_REG_SET (bb->local_set);
3305 for (i = n_basic_blocks - 1; i >= 0; --i)
3307 basic_block bb = BASIC_BLOCK (i);
3308 FREE_REG_SET (bb->local_set);
3315 /* Subroutines of life analysis. */
3317 /* Allocate the permanent data structures that represent the results
3318 of life analysis. Not static since used also for stupid life analysis. */
3321 allocate_bb_life_data ()
3325 for (i = 0; i < n_basic_blocks; i++)
3327 basic_block bb = BASIC_BLOCK (i);
3329 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3330 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3333 ENTRY_BLOCK_PTR->global_live_at_end
3334 = OBSTACK_ALLOC_REG_SET (function_obstack);
3335 EXIT_BLOCK_PTR->global_live_at_start
3336 = OBSTACK_ALLOC_REG_SET (function_obstack);
3338 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3342 allocate_reg_life_data ()
3346 max_regno = max_reg_num ();
3348 /* Recalculate the register space, in case it has grown. Old style
3349 vector oriented regsets would set regset_{size,bytes} here also. */
3350 allocate_reg_info (max_regno, FALSE, FALSE);
3352 /* Reset all the data we'll collect in propagate_block and its
3354 for (i = 0; i < max_regno; i++)
3358 REG_N_DEATHS (i) = 0;
3359 REG_N_CALLS_CROSSED (i) = 0;
3360 REG_LIVE_LENGTH (i) = 0;
3361 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3365 /* Delete dead instructions for propagate_block. */
3368 propagate_block_delete_insn (bb, insn)
3372 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3374 /* If the insn referred to a label, and that label was attached to
3375 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3376 pretty much mandatory to delete it, because the ADDR_VEC may be
3377 referencing labels that no longer exist. */
3381 rtx label = XEXP (inote, 0);
3384 if (LABEL_NUSES (label) == 1
3385 && (next = next_nonnote_insn (label)) != NULL
3386 && GET_CODE (next) == JUMP_INSN
3387 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3388 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3390 rtx pat = PATTERN (next);
3391 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3392 int len = XVECLEN (pat, diff_vec_p);
3395 for (i = 0; i < len; i++)
3396 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3398 flow_delete_insn (next);
3402 if (bb->end == insn)
3403 bb->end = PREV_INSN (insn);
3404 flow_delete_insn (insn);
3407 /* Delete dead libcalls for propagate_block. Return the insn
3408 before the libcall. */
3411 propagate_block_delete_libcall (bb, insn, note)
3415 rtx first = XEXP (note, 0);
3416 rtx before = PREV_INSN (first);
3418 if (insn == bb->end)
3421 flow_delete_insn_chain (first, insn);
3425 /* Update the life-status of regs for one insn. Return the previous insn. */
3428 propagate_one_insn (pbi, insn)
3429 struct propagate_block_info *pbi;
3432 rtx prev = PREV_INSN (insn);
3433 int flags = pbi->flags;
3434 int insn_is_dead = 0;
3435 int libcall_is_dead = 0;
3439 if (! INSN_P (insn))
3442 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3443 if (flags & PROP_SCAN_DEAD_CODE)
3445 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3447 libcall_is_dead = (insn_is_dead && note != 0
3448 && libcall_dead_p (pbi, note, insn));
3451 /* We almost certainly don't want to delete prologue or epilogue
3452 instructions. Warn about probable compiler losage. */
3455 && (((HAVE_epilogue || HAVE_prologue)
3456 && prologue_epilogue_contains (insn))
3457 || (HAVE_sibcall_epilogue
3458 && sibcall_epilogue_contains (insn)))
3459 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
3461 if (flags & PROP_KILL_DEAD_CODE)
3463 warning ("ICE: would have deleted prologue/epilogue insn");
3464 if (!inhibit_warnings)
3467 libcall_is_dead = insn_is_dead = 0;
3470 /* If an instruction consists of just dead store(s) on final pass,
3472 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3474 /* Record sets. Do this even for dead instructions, since they
3475 would have killed the values if they hadn't been deleted. */
3476 mark_set_regs (pbi, PATTERN (insn), insn);
3478 /* CC0 is now known to be dead. Either this insn used it,
3479 in which case it doesn't anymore, or clobbered it,
3480 so the next insn can't use it. */
3483 if (libcall_is_dead)
3485 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3486 insn = NEXT_INSN (prev);
3489 propagate_block_delete_insn (pbi->bb, insn);
3494 /* See if this is an increment or decrement that can be merged into
3495 a following memory address. */
3498 register rtx x = single_set (insn);
3500 /* Does this instruction increment or decrement a register? */
3501 if ((flags & PROP_AUTOINC)
3503 && GET_CODE (SET_DEST (x)) == REG
3504 && (GET_CODE (SET_SRC (x)) == PLUS
3505 || GET_CODE (SET_SRC (x)) == MINUS)
3506 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3507 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3508 /* Ok, look for a following memory ref we can combine with.
3509 If one is found, change the memory ref to a PRE_INC
3510 or PRE_DEC, cancel this insn, and return 1.
3511 Return 0 if nothing has been done. */
3512 && try_pre_increment_1 (pbi, insn))
3515 #endif /* AUTO_INC_DEC */
3517 CLEAR_REG_SET (pbi->new_set);
3519 /* If this is not the final pass, and this insn is copying the value of
3520 a library call and it's dead, don't scan the insns that perform the
3521 library call, so that the call's arguments are not marked live. */
3522 if (libcall_is_dead)
3524 /* Record the death of the dest reg. */
3525 mark_set_regs (pbi, PATTERN (insn), insn);
3527 insn = XEXP (note, 0);
3528 return PREV_INSN (insn);
3530 else if (GET_CODE (PATTERN (insn)) == SET
3531 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3532 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3533 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3534 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3535 /* We have an insn to pop a constant amount off the stack.
3536 (Such insns use PLUS regardless of the direction of the stack,
3537 and any insn to adjust the stack by a constant is always a pop.)
3538 These insns, if not dead stores, have no effect on life. */
3542 /* Any regs live at the time of a call instruction must not go
3543 in a register clobbered by calls. Find all regs now live and
3544 record this for them. */
3546 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3547 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3548 { REG_N_CALLS_CROSSED (i)++; });
3550 /* Record sets. Do this even for dead instructions, since they
3551 would have killed the values if they hadn't been deleted. */
3552 mark_set_regs (pbi, PATTERN (insn), insn);
3554 if (GET_CODE (insn) == CALL_INSN)
3560 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3561 cond = COND_EXEC_TEST (PATTERN (insn));
3563 /* Non-constant calls clobber memory. */
3564 if (! CONST_CALL_P (insn))
3565 free_EXPR_LIST_list (&pbi->mem_set_list);
3567 /* There may be extra registers to be clobbered. */
3568 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3570 note = XEXP (note, 1))
3571 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3572 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3573 cond, insn, pbi->flags);
3575 /* Calls change all call-used and global registers. */
3576 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3577 if (call_used_regs[i] && ! global_regs[i]
3580 /* We do not want REG_UNUSED notes for these registers. */
3581 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3583 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3587 /* If an insn doesn't use CC0, it becomes dead since we assume
3588 that every insn clobbers it. So show it dead here;
3589 mark_used_regs will set it live if it is referenced. */
3594 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3596 /* Sometimes we may have inserted something before INSN (such as a move)
3597 when we make an auto-inc. So ensure we will scan those insns. */
3599 prev = PREV_INSN (insn);
3602 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3608 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3609 cond = COND_EXEC_TEST (PATTERN (insn));
3611 /* Calls use their arguments. */
3612 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3614 note = XEXP (note, 1))
3615 if (GET_CODE (XEXP (note, 0)) == USE)
3616 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3619 /* The stack ptr is used (honorarily) by a CALL insn. */
3620 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3622 /* Calls may also reference any of the global registers,
3623 so they are made live. */
3624 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3626 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3631 /* On final pass, update counts of how many insns in which each reg
3633 if (flags & PROP_REG_INFO)
3634 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3635 { REG_LIVE_LENGTH (i)++; });
3640 /* Initialize a propagate_block_info struct for public consumption.
3641 Note that the structure itself is opaque to this file, but that
3642 the user can use the regsets provided here. */
3644 struct propagate_block_info *
3645 init_propagate_block_info (bb, live, local_set, flags)
3651 struct propagate_block_info *pbi = xmalloc (sizeof(*pbi));
3654 pbi->reg_live = live;
3655 pbi->mem_set_list = NULL_RTX;
3656 pbi->local_set = local_set;
3660 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3661 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3663 pbi->reg_next_use = NULL;
3665 pbi->new_set = BITMAP_XMALLOC ();
3667 #ifdef HAVE_conditional_execution
3668 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
3669 free_reg_cond_life_info);
3670 pbi->reg_cond_reg = BITMAP_XMALLOC ();
3672 /* If this block ends in a conditional branch, for each register live
3673 from one side of the branch and not the other, record the register
3674 as conditionally dead. */
3675 if ((flags & (PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE))
3676 && GET_CODE (bb->end) == JUMP_INSN
3677 && any_condjump_p (bb->end))
3679 regset_head diff_head;
3680 regset diff = INITIALIZE_REG_SET (diff_head);
3681 basic_block bb_true, bb_false;
3682 rtx cond_true, cond_false, set_src;
3685 /* Identify the successor blocks. */
3686 bb_true = bb->succ->dest;
3687 if (bb->succ->succ_next != NULL)
3689 bb_false = bb->succ->succ_next->dest;
3691 if (bb->succ->flags & EDGE_FALLTHRU)
3693 basic_block t = bb_false;
3697 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
3702 /* This can happen with a conditional jump to the next insn. */
3703 if (JUMP_LABEL (bb->end) != bb_true->head)
3706 /* Simplest way to do nothing. */
3710 /* Extract the condition from the branch. */
3711 set_src = SET_SRC (pc_set (bb->end));
3712 cond_true = XEXP (set_src, 0);
3713 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
3714 GET_MODE (cond_true), XEXP (cond_true, 0),
3715 XEXP (cond_true, 1));
3716 if (GET_CODE (XEXP (set_src, 1)) == PC)
3719 cond_false = cond_true;
3723 /* Compute which register lead different lives in the successors. */
3724 if (bitmap_operation (diff, bb_true->global_live_at_start,
3725 bb_false->global_live_at_start, BITMAP_XOR))
3727 if (GET_CODE (XEXP (cond_true, 0)) != REG)
3729 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond_true, 0)));
3731 /* For each such register, mark it conditionally dead. */
3732 EXECUTE_IF_SET_IN_REG_SET
3735 struct reg_cond_life_info *rcli;
3738 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3740 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
3744 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
3746 splay_tree_insert (pbi->reg_cond_dead, i,
3747 (splay_tree_value) rcli);
3751 FREE_REG_SET (diff);
3755 /* If this block has no successors, any stores to the frame that aren't
3756 used later in the block are dead. So make a pass over the block
3757 recording any such that are made and show them dead at the end. We do
3758 a very conservative and simple job here. */
3760 && (flags & PROP_SCAN_DEAD_CODE)
3761 && (bb->succ == NULL
3762 || (bb->succ->succ_next == NULL
3763 && bb->succ->dest == EXIT_BLOCK_PTR)))
3766 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
3767 if (GET_CODE (insn) == INSN
3768 && GET_CODE (PATTERN (insn)) == SET
3769 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
3771 rtx mem = SET_DEST (PATTERN (insn));
3773 if (XEXP (mem, 0) == frame_pointer_rtx
3774 || (GET_CODE (XEXP (mem, 0)) == PLUS
3775 && XEXP (XEXP (mem, 0), 0) == frame_pointer_rtx
3776 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == CONST_INT))
3777 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
3784 /* Release a propagate_block_info struct. */
3787 free_propagate_block_info (pbi)
3788 struct propagate_block_info *pbi;
3790 free_EXPR_LIST_list (&pbi->mem_set_list);
3792 BITMAP_XFREE (pbi->new_set);
3794 #ifdef HAVE_conditional_execution
3795 splay_tree_delete (pbi->reg_cond_dead);
3796 BITMAP_XFREE (pbi->reg_cond_reg);
3799 if (pbi->reg_next_use)
3800 free (pbi->reg_next_use);
3805 /* Compute the registers live at the beginning of a basic block BB from
3806 those live at the end.
3808 When called, REG_LIVE contains those live at the end. On return, it
3809 contains those live at the beginning.
3811 LOCAL_SET, if non-null, will be set with all registers killed by
3812 this basic block. */
3815 propagate_block (bb, live, local_set, flags)
3821 struct propagate_block_info *pbi;
3824 pbi = init_propagate_block_info (bb, live, local_set, flags);
3826 if (flags & PROP_REG_INFO)
3830 /* Process the regs live at the end of the block.
3831 Mark them as not local to any one basic block. */
3832 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3833 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3836 /* Scan the block an insn at a time from end to beginning. */
3838 for (insn = bb->end; ; insn = prev)
3840 /* If this is a call to `setjmp' et al, warn if any
3841 non-volatile datum is live. */
3842 if ((flags & PROP_REG_INFO)
3843 && GET_CODE (insn) == NOTE
3844 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3845 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
3847 prev = propagate_one_insn (pbi, insn);
3849 if (insn == bb->head)
3853 free_propagate_block_info (pbi);
3856 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3857 (SET expressions whose destinations are registers dead after the insn).
3858 NEEDED is the regset that says which regs are alive after the insn.
3860 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3862 If X is the entire body of an insn, NOTES contains the reg notes
3863 pertaining to the insn. */
3866 insn_dead_p (pbi, x, call_ok, notes)
3867 struct propagate_block_info *pbi;
3870 rtx notes ATTRIBUTE_UNUSED;
3872 enum rtx_code code = GET_CODE (x);
3875 /* If flow is invoked after reload, we must take existing AUTO_INC
3876 expresions into account. */
3877 if (reload_completed)
3879 for ( ; notes; notes = XEXP (notes, 1))
3881 if (REG_NOTE_KIND (notes) == REG_INC)
3883 int regno = REGNO (XEXP (notes, 0));
3885 /* Don't delete insns to set global regs. */
3886 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3887 || REGNO_REG_SET_P (pbi->reg_live, regno))
3894 /* If setting something that's a reg or part of one,
3895 see if that register's altered value will be live. */
3899 rtx r = SET_DEST (x);
3902 if (GET_CODE (r) == CC0)
3903 return ! pbi->cc0_live;
3906 /* A SET that is a subroutine call cannot be dead. */
3907 if (GET_CODE (SET_SRC (x)) == CALL)
3913 /* Don't eliminate loads from volatile memory or volatile asms. */
3914 else if (volatile_refs_p (SET_SRC (x)))
3917 if (GET_CODE (r) == MEM)
3921 if (MEM_VOLATILE_P (r))
3924 /* Walk the set of memory locations we are currently tracking
3925 and see if one is an identical match to this memory location.
3926 If so, this memory write is dead (remember, we're walking
3927 backwards from the end of the block to the start). */
3928 temp = pbi->mem_set_list;
3931 if (rtx_equal_p (XEXP (temp, 0), r))
3933 temp = XEXP (temp, 1);
3938 while (GET_CODE (r) == SUBREG
3939 || GET_CODE (r) == STRICT_LOW_PART
3940 || GET_CODE (r) == ZERO_EXTRACT)
3943 if (GET_CODE (r) == REG)
3945 int regno = REGNO (r);
3948 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3951 /* If this is a hard register, verify that subsequent
3952 words are not needed. */
3953 if (regno < FIRST_PSEUDO_REGISTER)
3955 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3958 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3962 /* Don't delete insns to set global regs. */
3963 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3966 /* Make sure insns to set the stack pointer aren't deleted. */
3967 if (regno == STACK_POINTER_REGNUM)
3970 /* Make sure insns to set the frame pointer aren't deleted. */
3971 if (regno == FRAME_POINTER_REGNUM
3972 && (! reload_completed || frame_pointer_needed))
3974 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3975 if (regno == HARD_FRAME_POINTER_REGNUM
3976 && (! reload_completed || frame_pointer_needed))
3980 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3981 /* Make sure insns to set arg pointer are never deleted
3982 (if the arg pointer isn't fixed, there will be a USE
3983 for it, so we can treat it normally). */
3984 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3988 #ifdef PIC_OFFSET_TABLE_REGNUM
3989 /* Before reload, do not allow sets of the pic register
3990 to be deleted. Reload can insert references to
3991 constant pool memory anywhere in the function, making
3992 the PIC register live where it wasn't before. */
3993 if (regno == PIC_OFFSET_TABLE_REGNUM && fixed_regs[regno]
3994 && ! reload_completed)
3998 /* Otherwise, the set is dead. */
4004 /* If performing several activities, insn is dead if each activity
4005 is individually dead. Also, CLOBBERs and USEs can be ignored; a
4006 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
4008 else if (code == PARALLEL)
4010 int i = XVECLEN (x, 0);
4012 for (i--; i >= 0; i--)
4013 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
4014 && GET_CODE (XVECEXP (x, 0, i)) != USE
4015 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
4021 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
4022 is not necessarily true for hard registers. */
4023 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
4024 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
4025 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
4028 /* We do not check other CLOBBER or USE here. An insn consisting of just
4029 a CLOBBER or just a USE should not be deleted. */
4033 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4034 return 1 if the entire library call is dead.
4035 This is true if INSN copies a register (hard or pseudo)
4036 and if the hard return reg of the call insn is dead.
4037 (The caller should have tested the destination of the SET inside
4038 INSN already for death.)
4040 If this insn doesn't just copy a register, then we don't
4041 have an ordinary libcall. In that case, cse could not have
4042 managed to substitute the source for the dest later on,
4043 so we can assume the libcall is dead.
4045 PBI is the block info giving pseudoregs live before this insn.
4046 NOTE is the REG_RETVAL note of the insn. */
4049 libcall_dead_p (pbi, note, insn)
4050 struct propagate_block_info *pbi;
4054 rtx x = single_set (insn);
4058 register rtx r = SET_SRC (x);
4059 if (GET_CODE (r) == REG)
4061 rtx call = XEXP (note, 0);
4065 /* Find the call insn. */
4066 while (call != insn && GET_CODE (call) != CALL_INSN)
4067 call = NEXT_INSN (call);
4069 /* If there is none, do nothing special,
4070 since ordinary death handling can understand these insns. */
4074 /* See if the hard reg holding the value is dead.
4075 If this is a PARALLEL, find the call within it. */
4076 call_pat = PATTERN (call);
4077 if (GET_CODE (call_pat) == PARALLEL)
4079 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4080 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4081 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4084 /* This may be a library call that is returning a value
4085 via invisible pointer. Do nothing special, since
4086 ordinary death handling can understand these insns. */
4090 call_pat = XVECEXP (call_pat, 0, i);
4093 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4099 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4100 live at function entry. Don't count global register variables, variables
4101 in registers that can be used for function arg passing, or variables in
4102 fixed hard registers. */
4105 regno_uninitialized (regno)
4108 if (n_basic_blocks == 0
4109 || (regno < FIRST_PSEUDO_REGISTER
4110 && (global_regs[regno]
4111 || fixed_regs[regno]
4112 || FUNCTION_ARG_REGNO_P (regno))))
4115 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4118 /* 1 if register REGNO was alive at a place where `setjmp' was called
4119 and was set more than once or is an argument.
4120 Such regs may be clobbered by `longjmp'. */
4123 regno_clobbered_at_setjmp (regno)
4126 if (n_basic_blocks == 0)
4129 return ((REG_N_SETS (regno) > 1
4130 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4131 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4134 /* INSN references memory, possibly using autoincrement addressing modes.
4135 Find any entries on the mem_set_list that need to be invalidated due
4136 to an address change. */
4139 invalidate_mems_from_autoinc (pbi, insn)
4140 struct propagate_block_info *pbi;
4143 rtx note = REG_NOTES (insn);
4144 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4146 if (REG_NOTE_KIND (note) == REG_INC)
4148 rtx temp = pbi->mem_set_list;
4149 rtx prev = NULL_RTX;
4154 next = XEXP (temp, 1);
4155 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4157 /* Splice temp out of list. */
4159 XEXP (prev, 1) = next;
4161 pbi->mem_set_list = next;
4162 free_EXPR_LIST_node (temp);
4172 /* Process the registers that are set within X. Their bits are set to
4173 1 in the regset DEAD, because they are dead prior to this insn.
4175 If INSN is nonzero, it is the insn being processed.
4177 FLAGS is the set of operations to perform. */
4180 mark_set_regs (pbi, x, insn)
4181 struct propagate_block_info *pbi;
4184 rtx cond = NULL_RTX;
4189 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
4191 if (REG_NOTE_KIND (link) == REG_INC)
4192 mark_set_1 (pbi, SET, XEXP (link, 0),
4193 (GET_CODE (x) == COND_EXEC
4194 ? COND_EXEC_TEST (x) : NULL_RTX),
4198 switch (code = GET_CODE (x))
4202 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4206 cond = COND_EXEC_TEST (x);
4207 x = COND_EXEC_CODE (x);
4213 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4215 rtx sub = XVECEXP (x, 0, i);
4216 switch (code = GET_CODE (sub))
4219 if (cond != NULL_RTX)
4222 cond = COND_EXEC_TEST (sub);
4223 sub = COND_EXEC_CODE (sub);
4224 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4230 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4245 /* Process a single SET rtx, X. */
4248 mark_set_1 (pbi, code, reg, cond, insn, flags)
4249 struct propagate_block_info *pbi;
4251 rtx reg, cond, insn;
4254 int regno_first = -1, regno_last = -1;
4258 /* Some targets place small structures in registers for
4259 return values of functions. We have to detect this
4260 case specially here to get correct flow information. */
4261 if (GET_CODE (reg) == PARALLEL
4262 && GET_MODE (reg) == BLKmode)
4264 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4265 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
4269 /* Modifying just one hardware register of a multi-reg value or just a
4270 byte field of a register does not mean the value from before this insn
4271 is now dead. Of course, if it was dead after it's unused now. */
4273 switch (GET_CODE (reg))
4277 case STRICT_LOW_PART:
4278 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4280 reg = XEXP (reg, 0);
4281 while (GET_CODE (reg) == SUBREG
4282 || GET_CODE (reg) == ZERO_EXTRACT
4283 || GET_CODE (reg) == SIGN_EXTRACT
4284 || GET_CODE (reg) == STRICT_LOW_PART);
4285 if (GET_CODE (reg) == MEM)
4287 not_dead = REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4291 regno_last = regno_first = REGNO (reg);
4292 if (regno_first < FIRST_PSEUDO_REGISTER)
4293 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4297 if (GET_CODE (SUBREG_REG (reg)) == REG)
4299 enum machine_mode outer_mode = GET_MODE (reg);
4300 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4302 /* Identify the range of registers affected. This is moderately
4303 tricky for hard registers. See alter_subreg. */
4305 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4306 if (regno_first < FIRST_PSEUDO_REGISTER)
4308 #ifdef ALTER_HARD_SUBREG
4309 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4310 inner_mode, regno_first);
4312 regno_first += SUBREG_WORD (reg);
4314 regno_last = (regno_first
4315 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4317 /* Since we've just adjusted the register number ranges, make
4318 sure REG matches. Otherwise some_was_live will be clear
4319 when it shouldn't have been, and we'll create incorrect
4320 REG_UNUSED notes. */
4321 reg = gen_rtx_REG (outer_mode, regno_first);
4325 /* If the number of words in the subreg is less than the number
4326 of words in the full register, we have a well-defined partial
4327 set. Otherwise the high bits are undefined.
4329 This is only really applicable to pseudos, since we just took
4330 care of multi-word hard registers. */
4331 if (((GET_MODE_SIZE (outer_mode)
4332 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4333 < ((GET_MODE_SIZE (inner_mode)
4334 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4335 not_dead = REGNO_REG_SET_P (pbi->reg_live, regno_first);
4337 reg = SUBREG_REG (reg);
4341 reg = SUBREG_REG (reg);
4348 /* If this set is a MEM, then it kills any aliased writes.
4349 If this set is a REG, then it kills any MEMs which use the reg. */
4350 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4352 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4354 rtx temp = pbi->mem_set_list;
4355 rtx prev = NULL_RTX;
4360 next = XEXP (temp, 1);
4361 if ((GET_CODE (reg) == MEM
4362 && output_dependence (XEXP (temp, 0), reg))
4363 || (GET_CODE (reg) == REG
4364 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4366 /* Splice this entry out of the list. */
4368 XEXP (prev, 1) = next;
4370 pbi->mem_set_list = next;
4371 free_EXPR_LIST_node (temp);
4379 /* If the memory reference had embedded side effects (autoincrement
4380 address modes. Then we may need to kill some entries on the
4382 if (insn && GET_CODE (reg) == MEM)
4383 invalidate_mems_from_autoinc (pbi, insn);
4385 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4386 /* ??? With more effort we could track conditional memory life. */
4388 /* We do not know the size of a BLKmode store, so we do not track
4389 them for redundant store elimination. */
4390 && GET_MODE (reg) != BLKmode
4391 /* There are no REG_INC notes for SP, so we can't assume we'll see
4392 everything that invalidates it. To be safe, don't eliminate any
4393 stores though SP; none of them should be redundant anyway. */
4394 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4395 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4398 if (GET_CODE (reg) == REG
4399 && ! (regno_first == FRAME_POINTER_REGNUM
4400 && (! reload_completed || frame_pointer_needed))
4401 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4402 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4403 && (! reload_completed || frame_pointer_needed))
4405 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4406 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4410 int some_was_live = 0, some_was_dead = 0;
4412 for (i = regno_first; i <= regno_last; ++i)
4414 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4416 SET_REGNO_REG_SET (pbi->local_set, i);
4417 if (code != CLOBBER)
4418 SET_REGNO_REG_SET (pbi->new_set, i);
4420 some_was_live |= needed_regno;
4421 some_was_dead |= ! needed_regno;
4424 #ifdef HAVE_conditional_execution
4425 /* Consider conditional death in deciding that the register needs
4427 if (some_was_live && ! not_dead
4428 /* The stack pointer is never dead. Well, not strictly true,
4429 but it's very difficult to tell from here. Hopefully
4430 combine_stack_adjustments will fix up the most egregious
4432 && regno_first != STACK_POINTER_REGNUM)
4434 for (i = regno_first; i <= regno_last; ++i)
4435 if (! mark_regno_cond_dead (pbi, i, cond))
4440 /* Additional data to record if this is the final pass. */
4441 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4442 | PROP_DEATH_NOTES | PROP_AUTOINC))
4445 register int blocknum = pbi->bb->index;
4448 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4450 y = pbi->reg_next_use[regno_first];
4452 /* The next use is no longer next, since a store intervenes. */
4453 for (i = regno_first; i <= regno_last; ++i)
4454 pbi->reg_next_use[i] = 0;
4457 if (flags & PROP_REG_INFO)
4459 for (i = regno_first; i <= regno_last; ++i)
4461 /* Count (weighted) references, stores, etc. This counts a
4462 register twice if it is modified, but that is correct. */
4463 REG_N_SETS (i) += 1;
4464 REG_N_REFS (i) += (optimize_size ? 1
4465 : pbi->bb->loop_depth + 1);
4467 /* The insns where a reg is live are normally counted
4468 elsewhere, but we want the count to include the insn
4469 where the reg is set, and the normal counting mechanism
4470 would not count it. */
4471 REG_LIVE_LENGTH (i) += 1;
4474 /* If this is a hard reg, record this function uses the reg. */
4475 if (regno_first < FIRST_PSEUDO_REGISTER)
4477 for (i = regno_first; i <= regno_last; i++)
4478 regs_ever_live[i] = 1;
4482 /* Keep track of which basic blocks each reg appears in. */
4483 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4484 REG_BASIC_BLOCK (regno_first) = blocknum;
4485 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4486 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4490 if (! some_was_dead)
4492 if (flags & PROP_LOG_LINKS)
4494 /* Make a logical link from the next following insn
4495 that uses this register, back to this insn.
4496 The following insns have already been processed.
4498 We don't build a LOG_LINK for hard registers containing
4499 in ASM_OPERANDs. If these registers get replaced,
4500 we might wind up changing the semantics of the insn,
4501 even if reload can make what appear to be valid
4502 assignments later. */
4503 if (y && (BLOCK_NUM (y) == blocknum)
4504 && (regno_first >= FIRST_PSEUDO_REGISTER
4505 || asm_noperands (PATTERN (y)) < 0))
4506 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4511 else if (! some_was_live)
4513 if (flags & PROP_REG_INFO)
4514 REG_N_DEATHS (regno_first) += 1;
4516 if (flags & PROP_DEATH_NOTES)
4518 /* Note that dead stores have already been deleted
4519 when possible. If we get here, we have found a
4520 dead store that cannot be eliminated (because the
4521 same insn does something useful). Indicate this
4522 by marking the reg being set as dying here. */
4524 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4529 if (flags & PROP_DEATH_NOTES)
4531 /* This is a case where we have a multi-word hard register
4532 and some, but not all, of the words of the register are
4533 needed in subsequent insns. Write REG_UNUSED notes
4534 for those parts that were not needed. This case should
4537 for (i = regno_first; i <= regno_last; ++i)
4538 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4540 = alloc_EXPR_LIST (REG_UNUSED,
4541 gen_rtx_REG (reg_raw_mode[i], i),
4547 /* Mark the register as being dead. */
4550 /* The stack pointer is never dead. Well, not strictly true,
4551 but it's very difficult to tell from here. Hopefully
4552 combine_stack_adjustments will fix up the most egregious
4554 && regno_first != STACK_POINTER_REGNUM)
4556 for (i = regno_first; i <= regno_last; ++i)
4557 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4560 else if (GET_CODE (reg) == REG)
4562 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4563 pbi->reg_next_use[regno_first] = 0;
4566 /* If this is the last pass and this is a SCRATCH, show it will be dying
4567 here and count it. */
4568 else if (GET_CODE (reg) == SCRATCH)
4570 if (flags & PROP_DEATH_NOTES)
4572 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4576 #ifdef HAVE_conditional_execution
4577 /* Mark REGNO conditionally dead. Return true if the register is
4578 now unconditionally dead. */
4581 mark_regno_cond_dead (pbi, regno, cond)
4582 struct propagate_block_info *pbi;
4586 /* If this is a store to a predicate register, the value of the
4587 predicate is changing, we don't know that the predicate as seen
4588 before is the same as that seen after. Flush all dependent
4589 conditions from reg_cond_dead. This will make all such
4590 conditionally live registers unconditionally live. */
4591 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
4592 flush_reg_cond_reg (pbi, regno);
4594 /* If this is an unconditional store, remove any conditional
4595 life that may have existed. */
4596 if (cond == NULL_RTX)
4597 splay_tree_remove (pbi->reg_cond_dead, regno);
4600 splay_tree_node node;
4601 struct reg_cond_life_info *rcli;
4604 /* Otherwise this is a conditional set. Record that fact.
4605 It may have been conditionally used, or there may be a
4606 subsequent set with a complimentary condition. */
4608 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
4611 /* The register was unconditionally live previously.
4612 Record the current condition as the condition under
4613 which it is dead. */
4614 rcli = (struct reg_cond_life_info *)
4615 xmalloc (sizeof (*rcli));
4616 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
4617 splay_tree_insert (pbi->reg_cond_dead, regno,
4618 (splay_tree_value) rcli);
4620 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4621 REGNO (XEXP (cond, 0)));
4623 /* Not unconditionaly dead. */
4628 /* The register was conditionally live previously.
4629 Add the new condition to the old. */
4630 rcli = (struct reg_cond_life_info *) node->value;
4631 ncond = rcli->condition;
4632 ncond = ior_reg_cond (ncond, cond);
4634 /* If the register is now unconditionally dead,
4635 remove the entry in the splay_tree. */
4636 if (ncond == const1_rtx)
4637 splay_tree_remove (pbi->reg_cond_dead, regno);
4640 rcli->condition = ncond;
4642 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4643 REGNO (XEXP (cond, 0)));
4645 /* Not unconditionaly dead. */
4654 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4657 free_reg_cond_life_info (value)
4658 splay_tree_value value;
4660 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
4661 free_EXPR_LIST_list (&rcli->condition);
4665 /* Helper function for flush_reg_cond_reg. */
4668 flush_reg_cond_reg_1 (node, data)
4669 splay_tree_node node;
4672 struct reg_cond_life_info *rcli;
4673 int *xdata = (int *) data;
4674 unsigned int regno = xdata[0];
4677 /* Don't need to search if last flushed value was farther on in
4678 the in-order traversal. */
4679 if (xdata[1] >= (int) node->key)
4682 /* Splice out portions of the expression that refer to regno. */
4683 rcli = (struct reg_cond_life_info *) node->value;
4684 c = *(prev = &rcli->condition);
4687 if (regno == REGNO (XEXP (XEXP (c, 0), 0)))
4689 rtx next = XEXP (c, 1);
4690 free_EXPR_LIST_node (c);
4694 c = *(prev = &XEXP (c, 1));
4697 /* If the entire condition is now NULL, signal the node to be removed. */
4698 if (! rcli->condition)
4700 xdata[1] = node->key;
4707 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
4710 flush_reg_cond_reg (pbi, regno)
4711 struct propagate_block_info *pbi;
4718 while (splay_tree_foreach (pbi->reg_cond_dead,
4719 flush_reg_cond_reg_1, pair) == -1)
4720 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
4722 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
4725 /* Logical arithmetic on predicate conditions. IOR, NOT and NAND.
4726 We actually use EXPR_LIST to chain the sub-expressions together
4727 instead of IOR because it's easier to manipulate and we have
4728 the lists.c functions to reuse nodes.
4730 Return a new rtl expression as appropriate. */
4733 ior_reg_cond (old, x)
4736 enum rtx_code x_code;
4740 /* We expect these conditions to be of the form (eq reg 0). */
4741 x_code = GET_CODE (x);
4742 if (GET_RTX_CLASS (x_code) != '<'
4743 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4744 || XEXP (x, 1) != const0_rtx)
4747 /* Search the expression for an existing sub-expression of X_REG. */
4748 for (c = old; c ; c = XEXP (c, 1))
4750 rtx y = XEXP (c, 0);
4751 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4753 /* If we find X already present in OLD, we need do nothing. */
4754 if (GET_CODE (y) == x_code)
4757 /* If we find X being a compliment of a condition in OLD,
4758 then the entire condition is true. */
4759 if (GET_CODE (y) == reverse_condition (x_code))
4764 /* Otherwise just add to the chain. */
4765 return alloc_EXPR_LIST (0, x, old);
4772 enum rtx_code x_code;
4775 /* We expect these conditions to be of the form (eq reg 0). */
4776 x_code = GET_CODE (x);
4777 if (GET_RTX_CLASS (x_code) != '<'
4778 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4779 || XEXP (x, 1) != const0_rtx)
4782 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4783 VOIDmode, x_reg, const0_rtx),
4788 nand_reg_cond (old, x)
4791 enum rtx_code x_code;
4795 /* We expect these conditions to be of the form (eq reg 0). */
4796 x_code = GET_CODE (x);
4797 if (GET_RTX_CLASS (x_code) != '<'
4798 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4799 || XEXP (x, 1) != const0_rtx)
4802 /* Search the expression for an existing sub-expression of X_REG. */
4804 for (c = *(prev = &old); c ; c = *(prev = &XEXP (c, 1)))
4806 rtx y = XEXP (c, 0);
4807 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4809 /* If we find X already present in OLD, then we need to
4811 if (GET_CODE (y) == x_code)
4813 *prev = XEXP (c, 1);
4814 free_EXPR_LIST_node (c);
4815 return old ? old : const0_rtx;
4818 /* If we find X being a compliment of a condition in OLD,
4819 then we need do nothing. */
4820 if (GET_CODE (y) == reverse_condition (x_code))
4825 /* Otherwise, by implication, the register in question is now live for
4826 the inverse of the condition X. */
4827 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4828 VOIDmode, x_reg, const0_rtx),
4831 #endif /* HAVE_conditional_execution */
4835 /* Try to substitute the auto-inc expression INC as the address inside
4836 MEM which occurs in INSN. Currently, the address of MEM is an expression
4837 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
4838 that has a single set whose source is a PLUS of INCR_REG and something
4842 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
4843 struct propagate_block_info *pbi;
4844 rtx inc, insn, mem, incr, incr_reg;
4846 int regno = REGNO (incr_reg);
4847 rtx set = single_set (incr);
4848 rtx q = SET_DEST (set);
4849 rtx y = SET_SRC (set);
4850 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
4852 /* Make sure this reg appears only once in this insn. */
4853 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
4856 if (dead_or_set_p (incr, incr_reg)
4857 /* Mustn't autoinc an eliminable register. */
4858 && (regno >= FIRST_PSEUDO_REGISTER
4859 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4861 /* This is the simple case. Try to make the auto-inc. If
4862 we can't, we are done. Otherwise, we will do any
4863 needed updates below. */
4864 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
4867 else if (GET_CODE (q) == REG
4868 /* PREV_INSN used here to check the semi-open interval
4870 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4871 /* We must also check for sets of q as q may be
4872 a call clobbered hard register and there may
4873 be a call between PREV_INSN (insn) and incr. */
4874 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4876 /* We have *p followed sometime later by q = p+size.
4877 Both p and q must be live afterward,
4878 and q is not used between INSN and its assignment.
4879 Change it to q = p, ...*q..., q = q+size.
4880 Then fall into the usual case. */
4885 emit_move_insn (q, incr_reg);
4886 insns = get_insns ();
4889 if (basic_block_for_insn)
4890 for (temp = insns; temp; temp = NEXT_INSN (temp))
4891 set_block_for_insn (temp, pbi->bb);
4893 /* If we can't make the auto-inc, or can't make the
4894 replacement into Y, exit. There's no point in making
4895 the change below if we can't do the auto-inc and doing
4896 so is not correct in the pre-inc case. */
4899 validate_change (insn, &XEXP (mem, 0), inc, 1);
4900 validate_change (incr, &XEXP (y, opnum), q, 1);
4901 if (! apply_change_group ())
4904 /* We now know we'll be doing this change, so emit the
4905 new insn(s) and do the updates. */
4906 emit_insns_before (insns, insn);
4908 if (pbi->bb->head == insn)
4909 pbi->bb->head = insns;
4911 /* INCR will become a NOTE and INSN won't contain a
4912 use of INCR_REG. If a use of INCR_REG was just placed in
4913 the insn before INSN, make that the next use.
4914 Otherwise, invalidate it. */
4915 if (GET_CODE (PREV_INSN (insn)) == INSN
4916 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4917 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
4918 pbi->reg_next_use[regno] = PREV_INSN (insn);
4920 pbi->reg_next_use[regno] = 0;
4925 /* REGNO is now used in INCR which is below INSN, but
4926 it previously wasn't live here. If we don't mark
4927 it as live, we'll put a REG_DEAD note for it
4928 on this insn, which is incorrect. */
4929 SET_REGNO_REG_SET (pbi->reg_live, regno);
4931 /* If there are any calls between INSN and INCR, show
4932 that REGNO now crosses them. */
4933 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4934 if (GET_CODE (temp) == CALL_INSN)
4935 REG_N_CALLS_CROSSED (regno)++;
4940 /* If we haven't returned, it means we were able to make the
4941 auto-inc, so update the status. First, record that this insn
4942 has an implicit side effect. */
4945 = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
4947 /* Modify the old increment-insn to simply copy
4948 the already-incremented value of our register. */
4949 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
4952 /* If that makes it a no-op (copying the register into itself) delete
4953 it so it won't appear to be a "use" and a "set" of this
4955 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
4957 /* If the original source was dead, it's dead now. */
4960 while (note = find_reg_note (incr, REG_DEAD, NULL_RTX))
4962 remove_note (incr, note);
4963 if (XEXP (note, 0) != incr_reg)
4964 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
4967 PUT_CODE (incr, NOTE);
4968 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4969 NOTE_SOURCE_FILE (incr) = 0;
4972 if (regno >= FIRST_PSEUDO_REGISTER)
4974 /* Count an extra reference to the reg. When a reg is
4975 incremented, spilling it is worse, so we want to make
4976 that less likely. */
4977 REG_N_REFS (regno) += (optimize_size ? 1 : pbi->bb->loop_depth + 1);
4979 /* Count the increment as a setting of the register,
4980 even though it isn't a SET in rtl. */
4981 REG_N_SETS (regno)++;
4985 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4989 find_auto_inc (pbi, x, insn)
4990 struct propagate_block_info *pbi;
4994 rtx addr = XEXP (x, 0);
4995 HOST_WIDE_INT offset = 0;
4996 rtx set, y, incr, inc_val;
4998 int size = GET_MODE_SIZE (GET_MODE (x));
5000 if (GET_CODE (insn) == JUMP_INSN)
5003 /* Here we detect use of an index register which might be good for
5004 postincrement, postdecrement, preincrement, or predecrement. */
5006 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5007 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
5009 if (GET_CODE (addr) != REG)
5012 regno = REGNO (addr);
5014 /* Is the next use an increment that might make auto-increment? */
5015 incr = pbi->reg_next_use[regno];
5016 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
5018 set = single_set (incr);
5019 if (set == 0 || GET_CODE (set) != SET)
5023 if (GET_CODE (y) != PLUS)
5026 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
5027 inc_val = XEXP (y, 1);
5028 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
5029 inc_val = XEXP (y, 0);
5033 if (GET_CODE (inc_val) == CONST_INT)
5035 if (HAVE_POST_INCREMENT
5036 && (INTVAL (inc_val) == size && offset == 0))
5037 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
5039 else if (HAVE_POST_DECREMENT
5040 && (INTVAL (inc_val) == - size && offset == 0))
5041 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
5043 else if (HAVE_PRE_INCREMENT
5044 && (INTVAL (inc_val) == size && offset == size))
5045 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
5047 else if (HAVE_PRE_DECREMENT
5048 && (INTVAL (inc_val) == - size && offset == - size))
5049 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
5051 else if (HAVE_POST_MODIFY_DISP && offset == 0)
5052 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5053 gen_rtx_PLUS (Pmode,
5056 insn, x, incr, addr);
5058 else if (GET_CODE (inc_val) == REG
5059 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
5063 if (HAVE_POST_MODIFY_REG && offset == 0)
5064 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5065 gen_rtx_PLUS (Pmode,
5068 insn, x, incr, addr);
5072 #endif /* AUTO_INC_DEC */
5075 mark_used_reg (pbi, reg, cond, insn)
5076 struct propagate_block_info *pbi;
5078 rtx cond ATTRIBUTE_UNUSED;
5081 int regno = REGNO (reg);
5082 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
5083 int some_was_dead = ! some_was_live;
5087 /* A hard reg in a wide mode may really be multiple registers.
5088 If so, mark all of them just like the first. */
5089 if (regno < FIRST_PSEUDO_REGISTER)
5091 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5094 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
5095 some_was_live |= needed_regno;
5096 some_was_dead |= ! needed_regno;
5100 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5102 /* Record where each reg is used, so when the reg is set we know
5103 the next insn that uses it. */
5104 pbi->reg_next_use[regno] = insn;
5107 if (pbi->flags & PROP_REG_INFO)
5109 if (regno < FIRST_PSEUDO_REGISTER)
5111 /* If this is a register we are going to try to eliminate,
5112 don't mark it live here. If we are successful in
5113 eliminating it, it need not be live unless it is used for
5114 pseudos, in which case it will have been set live when it
5115 was allocated to the pseudos. If the register will not
5116 be eliminated, reload will set it live at that point.
5118 Otherwise, record that this function uses this register. */
5119 /* ??? The PPC backend tries to "eliminate" on the pic
5120 register to itself. This should be fixed. In the mean
5121 time, hack around it. */
5123 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
5124 && (regno == FRAME_POINTER_REGNUM
5125 || regno == ARG_POINTER_REGNUM)))
5127 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5129 regs_ever_live[regno + --n] = 1;
5135 /* Keep track of which basic block each reg appears in. */
5137 register int blocknum = pbi->bb->index;
5138 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
5139 REG_BASIC_BLOCK (regno) = blocknum;
5140 else if (REG_BASIC_BLOCK (regno) != blocknum)
5141 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
5143 /* Count (weighted) number of uses of each reg. */
5144 REG_N_REFS (regno) += (optimize_size ? 1
5145 : pbi->bb->loop_depth + 1);
5149 /* Find out if any of the register was set this insn. */
5150 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
5151 if (regno < FIRST_PSEUDO_REGISTER)
5153 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5155 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
5158 /* Record and count the insns in which a reg dies. If it is used in
5159 this insn and was dead below the insn then it dies in this insn.
5160 If it was set in this insn, we do not make a REG_DEAD note;
5161 likewise if we already made such a note. */
5162 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
5166 /* Check for the case where the register dying partially
5167 overlaps the register set by this insn. */
5168 if (regno < FIRST_PSEUDO_REGISTER
5169 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
5171 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5173 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
5176 /* If none of the words in X is needed, make a REG_DEAD note.
5177 Otherwise, we must make partial REG_DEAD notes. */
5178 if (! some_was_live)
5180 if ((pbi->flags & PROP_DEATH_NOTES)
5181 && ! find_regno_note (insn, REG_DEAD, regno))
5183 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
5185 if (pbi->flags & PROP_REG_INFO)
5186 REG_N_DEATHS (regno)++;
5190 /* Don't make a REG_DEAD note for a part of a register
5191 that is set in the insn. */
5193 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
5194 for (; n >= regno; n--)
5195 if (! REGNO_REG_SET_P (pbi->reg_live, n)
5196 && ! dead_or_set_regno_p (insn, n))
5198 = alloc_EXPR_LIST (REG_DEAD,
5199 gen_rtx_REG (reg_raw_mode[n], n),
5204 SET_REGNO_REG_SET (pbi->reg_live, regno);
5205 if (regno < FIRST_PSEUDO_REGISTER)
5207 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5209 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
5212 #ifdef HAVE_conditional_execution
5213 /* If this is a conditional use, record that fact. If it is later
5214 conditionally set, we'll know to kill the register. */
5215 if (cond != NULL_RTX)
5217 splay_tree_node node;
5218 struct reg_cond_life_info *rcli;
5223 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5226 /* The register was unconditionally live previously.
5227 No need to do anything. */
5231 /* The register was conditionally live previously.
5232 Subtract the new life cond from the old death cond. */
5233 rcli = (struct reg_cond_life_info *) node->value;
5234 ncond = rcli->condition;
5235 ncond = nand_reg_cond (ncond, cond);
5237 /* If the register is now unconditionally live, remove the
5238 entry in the splay_tree. */
5239 if (ncond == const0_rtx)
5241 rcli->condition = NULL_RTX;
5242 splay_tree_remove (pbi->reg_cond_dead, regno);
5245 rcli->condition = ncond;
5250 /* The register was not previously live at all. Record
5251 the condition under which it is still dead. */
5252 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5253 rcli->condition = not_reg_cond (cond);
5254 splay_tree_insert (pbi->reg_cond_dead, regno,
5255 (splay_tree_value) rcli);
5258 else if (some_was_live)
5260 splay_tree_node node;
5261 struct reg_cond_life_info *rcli;
5263 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5266 /* The register was conditionally live previously, but is now
5267 unconditionally so. Remove it from the conditionally dead
5268 list, so that a conditional set won't cause us to think
5270 rcli = (struct reg_cond_life_info *) node->value;
5271 rcli->condition = NULL_RTX;
5272 splay_tree_remove (pbi->reg_cond_dead, regno);
5279 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5280 This is done assuming the registers needed from X are those that
5281 have 1-bits in PBI->REG_LIVE.
5283 INSN is the containing instruction. If INSN is dead, this function
5287 mark_used_regs (pbi, x, cond, insn)
5288 struct propagate_block_info *pbi;
5291 register RTX_CODE code;
5293 int flags = pbi->flags;
5296 code = GET_CODE (x);
5316 /* If we are clobbering a MEM, mark any registers inside the address
5318 if (GET_CODE (XEXP (x, 0)) == MEM)
5319 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5323 /* Don't bother watching stores to mems if this is not the
5324 final pass. We'll not be deleting dead stores this round. */
5325 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
5327 /* Invalidate the data for the last MEM stored, but only if MEM is
5328 something that can be stored into. */
5329 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5330 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5331 ; /* needn't clear the memory set list */
5334 rtx temp = pbi->mem_set_list;
5335 rtx prev = NULL_RTX;
5340 next = XEXP (temp, 1);
5341 if (anti_dependence (XEXP (temp, 0), x))
5343 /* Splice temp out of the list. */
5345 XEXP (prev, 1) = next;
5347 pbi->mem_set_list = next;
5348 free_EXPR_LIST_node (temp);
5356 /* If the memory reference had embedded side effects (autoincrement
5357 address modes. Then we may need to kill some entries on the
5360 invalidate_mems_from_autoinc (pbi, insn);
5364 if (flags & PROP_AUTOINC)
5365 find_auto_inc (pbi, x, insn);
5370 #ifdef CLASS_CANNOT_CHANGE_MODE
5371 if (GET_CODE (SUBREG_REG (x)) == REG
5372 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5373 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
5374 GET_MODE (SUBREG_REG (x))))
5375 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
5378 /* While we're here, optimize this case. */
5380 if (GET_CODE (x) != REG)
5385 /* See a register other than being set => mark it as needed. */
5386 mark_used_reg (pbi, x, cond, insn);
5391 register rtx testreg = SET_DEST (x);
5394 /* If storing into MEM, don't show it as being used. But do
5395 show the address as being used. */
5396 if (GET_CODE (testreg) == MEM)
5399 if (flags & PROP_AUTOINC)
5400 find_auto_inc (pbi, testreg, insn);
5402 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5403 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5407 /* Storing in STRICT_LOW_PART is like storing in a reg
5408 in that this SET might be dead, so ignore it in TESTREG.
5409 but in some other ways it is like using the reg.
5411 Storing in a SUBREG or a bit field is like storing the entire
5412 register in that if the register's value is not used
5413 then this SET is not needed. */
5414 while (GET_CODE (testreg) == STRICT_LOW_PART
5415 || GET_CODE (testreg) == ZERO_EXTRACT
5416 || GET_CODE (testreg) == SIGN_EXTRACT
5417 || GET_CODE (testreg) == SUBREG)
5419 #ifdef CLASS_CANNOT_CHANGE_MODE
5420 if (GET_CODE (testreg) == SUBREG
5421 && GET_CODE (SUBREG_REG (testreg)) == REG
5422 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5423 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
5424 GET_MODE (testreg)))
5425 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
5428 /* Modifying a single register in an alternate mode
5429 does not use any of the old value. But these other
5430 ways of storing in a register do use the old value. */
5431 if (GET_CODE (testreg) == SUBREG
5432 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5437 testreg = XEXP (testreg, 0);
5440 /* If this is a store into a register, recursively scan the
5441 value being stored. */
5443 if ((GET_CODE (testreg) == PARALLEL
5444 && GET_MODE (testreg) == BLKmode)
5445 || (GET_CODE (testreg) == REG
5446 && (regno = REGNO (testreg),
5447 ! (regno == FRAME_POINTER_REGNUM
5448 && (! reload_completed || frame_pointer_needed)))
5449 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5450 && ! (regno == HARD_FRAME_POINTER_REGNUM
5451 && (! reload_completed || frame_pointer_needed))
5453 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5454 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5459 mark_used_regs (pbi, SET_DEST (x), cond, insn);
5460 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5467 case UNSPEC_VOLATILE:
5471 /* Traditional and volatile asm instructions must be considered to use
5472 and clobber all hard registers, all pseudo-registers and all of
5473 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5475 Consider for instance a volatile asm that changes the fpu rounding
5476 mode. An insn should not be moved across this even if it only uses
5477 pseudo-regs because it might give an incorrectly rounded result.
5479 ?!? Unfortunately, marking all hard registers as live causes massive
5480 problems for the register allocator and marking all pseudos as live
5481 creates mountains of uninitialized variable warnings.
5483 So for now, just clear the memory set list and mark any regs
5484 we can find in ASM_OPERANDS as used. */
5485 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
5486 free_EXPR_LIST_list (&pbi->mem_set_list);
5488 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5489 We can not just fall through here since then we would be confused
5490 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5491 traditional asms unlike their normal usage. */
5492 if (code == ASM_OPERANDS)
5496 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
5497 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
5503 if (cond != NULL_RTX)
5506 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
5508 cond = COND_EXEC_TEST (x);
5509 x = COND_EXEC_CODE (x);
5513 /* We _do_not_ want to scan operands of phi nodes. Operands of
5514 a phi function are evaluated only when control reaches this
5515 block along a particular edge. Therefore, regs that appear
5516 as arguments to phi should not be added to the global live at
5524 /* Recursively scan the operands of this expression. */
5527 register const char *fmt = GET_RTX_FORMAT (code);
5530 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5534 /* Tail recursive case: save a function call level. */
5540 mark_used_regs (pbi, XEXP (x, i), cond, insn);
5542 else if (fmt[i] == 'E')
5545 for (j = 0; j < XVECLEN (x, i); j++)
5546 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
5555 try_pre_increment_1 (pbi, insn)
5556 struct propagate_block_info *pbi;
5559 /* Find the next use of this reg. If in same basic block,
5560 make it do pre-increment or pre-decrement if appropriate. */
5561 rtx x = single_set (insn);
5562 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
5563 * INTVAL (XEXP (SET_SRC (x), 1)));
5564 int regno = REGNO (SET_DEST (x));
5565 rtx y = pbi->reg_next_use[regno];
5567 && BLOCK_NUM (y) == BLOCK_NUM (insn)
5568 /* Don't do this if the reg dies, or gets set in y; a standard addressing
5569 mode would be better. */
5570 && ! dead_or_set_p (y, SET_DEST (x))
5571 && try_pre_increment (y, SET_DEST (x), amount))
5573 /* We have found a suitable auto-increment
5574 and already changed insn Y to do it.
5575 So flush this increment-instruction. */
5576 PUT_CODE (insn, NOTE);
5577 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5578 NOTE_SOURCE_FILE (insn) = 0;
5579 /* Count a reference to this reg for the increment
5580 insn we are deleting. When a reg is incremented.
5581 spilling it is worse, so we want to make that
5583 if (regno >= FIRST_PSEUDO_REGISTER)
5585 REG_N_REFS (regno) += (optimize_size ? 1
5586 : pbi->bb->loop_depth + 1);
5587 REG_N_SETS (regno)++;
5594 /* Try to change INSN so that it does pre-increment or pre-decrement
5595 addressing on register REG in order to add AMOUNT to REG.
5596 AMOUNT is negative for pre-decrement.
5597 Returns 1 if the change could be made.
5598 This checks all about the validity of the result of modifying INSN. */
5601 try_pre_increment (insn, reg, amount)
5603 HOST_WIDE_INT amount;
5607 /* Nonzero if we can try to make a pre-increment or pre-decrement.
5608 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
5610 /* Nonzero if we can try to make a post-increment or post-decrement.
5611 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
5612 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
5613 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
5616 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
5619 /* From the sign of increment, see which possibilities are conceivable
5620 on this target machine. */
5621 if (HAVE_PRE_INCREMENT && amount > 0)
5623 if (HAVE_POST_INCREMENT && amount > 0)
5626 if (HAVE_PRE_DECREMENT && amount < 0)
5628 if (HAVE_POST_DECREMENT && amount < 0)
5631 if (! (pre_ok || post_ok))
5634 /* It is not safe to add a side effect to a jump insn
5635 because if the incremented register is spilled and must be reloaded
5636 there would be no way to store the incremented value back in memory. */
5638 if (GET_CODE (insn) == JUMP_INSN)
5643 use = find_use_as_address (PATTERN (insn), reg, 0);
5644 if (post_ok && (use == 0 || use == (rtx) 1))
5646 use = find_use_as_address (PATTERN (insn), reg, -amount);
5650 if (use == 0 || use == (rtx) 1)
5653 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
5656 /* See if this combination of instruction and addressing mode exists. */
5657 if (! validate_change (insn, &XEXP (use, 0),
5658 gen_rtx_fmt_e (amount > 0
5659 ? (do_post ? POST_INC : PRE_INC)
5660 : (do_post ? POST_DEC : PRE_DEC),
5664 /* Record that this insn now has an implicit side effect on X. */
5665 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
5669 #endif /* AUTO_INC_DEC */
5671 /* Find the place in the rtx X where REG is used as a memory address.
5672 Return the MEM rtx that so uses it.
5673 If PLUSCONST is nonzero, search instead for a memory address equivalent to
5674 (plus REG (const_int PLUSCONST)).
5676 If such an address does not appear, return 0.
5677 If REG appears more than once, or is used other than in such an address,
5681 find_use_as_address (x, reg, plusconst)
5684 HOST_WIDE_INT plusconst;
5686 enum rtx_code code = GET_CODE (x);
5687 const char *fmt = GET_RTX_FORMAT (code);
5689 register rtx value = 0;
5692 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
5695 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
5696 && XEXP (XEXP (x, 0), 0) == reg
5697 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5698 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
5701 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
5703 /* If REG occurs inside a MEM used in a bit-field reference,
5704 that is unacceptable. */
5705 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
5706 return (rtx) (HOST_WIDE_INT) 1;
5710 return (rtx) (HOST_WIDE_INT) 1;
5712 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5716 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5720 return (rtx) (HOST_WIDE_INT) 1;
5722 else if (fmt[i] == 'E')
5725 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5727 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5731 return (rtx) (HOST_WIDE_INT) 1;
5739 /* Write information about registers and basic blocks into FILE.
5740 This is part of making a debugging dump. */
5743 dump_regset (r, outf)
5750 fputs (" (nil)", outf);
5754 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5756 fprintf (outf, " %d", i);
5757 if (i < FIRST_PSEUDO_REGISTER)
5758 fprintf (outf, " [%s]",
5767 dump_regset (r, stderr);
5768 putc ('\n', stderr);
5772 dump_flow_info (file)
5776 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5778 fprintf (file, "%d registers.\n", max_regno);
5779 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5782 enum reg_class class, altclass;
5783 fprintf (file, "\nRegister %d used %d times across %d insns",
5784 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5785 if (REG_BASIC_BLOCK (i) >= 0)
5786 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5788 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5789 (REG_N_SETS (i) == 1) ? "" : "s");
5790 if (REG_USERVAR_P (regno_reg_rtx[i]))
5791 fprintf (file, "; user var");
5792 if (REG_N_DEATHS (i) != 1)
5793 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5794 if (REG_N_CALLS_CROSSED (i) == 1)
5795 fprintf (file, "; crosses 1 call");
5796 else if (REG_N_CALLS_CROSSED (i))
5797 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5798 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5799 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5800 class = reg_preferred_class (i);
5801 altclass = reg_alternate_class (i);
5802 if (class != GENERAL_REGS || altclass != ALL_REGS)
5804 if (altclass == ALL_REGS || class == ALL_REGS)
5805 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5806 else if (altclass == NO_REGS)
5807 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5809 fprintf (file, "; pref %s, else %s",
5810 reg_class_names[(int) class],
5811 reg_class_names[(int) altclass]);
5813 if (REGNO_POINTER_FLAG (i))
5814 fprintf (file, "; pointer");
5815 fprintf (file, ".\n");
5818 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5819 for (i = 0; i < n_basic_blocks; i++)
5821 register basic_block bb = BASIC_BLOCK (i);
5824 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
5825 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth, bb->count);
5827 fprintf (file, "Predecessors: ");
5828 for (e = bb->pred; e ; e = e->pred_next)
5829 dump_edge_info (file, e, 0);
5831 fprintf (file, "\nSuccessors: ");
5832 for (e = bb->succ; e ; e = e->succ_next)
5833 dump_edge_info (file, e, 1);
5835 fprintf (file, "\nRegisters live at start:");
5836 dump_regset (bb->global_live_at_start, file);
5838 fprintf (file, "\nRegisters live at end:");
5839 dump_regset (bb->global_live_at_end, file);
5850 dump_flow_info (stderr);
5854 dump_edge_info (file, e, do_succ)
5859 basic_block side = (do_succ ? e->dest : e->src);
5861 if (side == ENTRY_BLOCK_PTR)
5862 fputs (" ENTRY", file);
5863 else if (side == EXIT_BLOCK_PTR)
5864 fputs (" EXIT", file);
5866 fprintf (file, " %d", side->index);
5869 fprintf (file, " count:%d", e->count);
5873 static const char * const bitnames[] = {
5874 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5877 int i, flags = e->flags;
5881 for (i = 0; flags; i++)
5882 if (flags & (1 << i))
5888 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5889 fputs (bitnames[i], file);
5891 fprintf (file, "%d", i);
5899 /* Print out one basic block with live information at start and end. */
5909 fprintf (outf, ";; Basic block %d, loop depth %d, count %d",
5910 bb->index, bb->loop_depth, bb->count);
5911 if (bb->eh_beg != -1 || bb->eh_end != -1)
5912 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5915 fputs (";; Predecessors: ", outf);
5916 for (e = bb->pred; e ; e = e->pred_next)
5917 dump_edge_info (outf, e, 0);
5920 fputs (";; Registers live at start:", outf);
5921 dump_regset (bb->global_live_at_start, outf);
5924 for (insn = bb->head, last = NEXT_INSN (bb->end);
5926 insn = NEXT_INSN (insn))
5927 print_rtl_single (outf, insn);
5929 fputs (";; Registers live at end:", outf);
5930 dump_regset (bb->global_live_at_end, outf);
5933 fputs (";; Successors: ", outf);
5934 for (e = bb->succ; e; e = e->succ_next)
5935 dump_edge_info (outf, e, 1);
5943 dump_bb (bb, stderr);
5950 dump_bb (BASIC_BLOCK(n), stderr);
5953 /* Like print_rtl, but also print out live information for the start of each
5957 print_rtl_with_bb (outf, rtx_first)
5961 register rtx tmp_rtx;
5964 fprintf (outf, "(nil)\n");
5968 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5969 int max_uid = get_max_uid ();
5970 basic_block *start = (basic_block *)
5971 xcalloc (max_uid, sizeof (basic_block));
5972 basic_block *end = (basic_block *)
5973 xcalloc (max_uid, sizeof (basic_block));
5974 enum bb_state *in_bb_p = (enum bb_state *)
5975 xcalloc (max_uid, sizeof (enum bb_state));
5977 for (i = n_basic_blocks - 1; i >= 0; i--)
5979 basic_block bb = BASIC_BLOCK (i);
5982 start[INSN_UID (bb->head)] = bb;
5983 end[INSN_UID (bb->end)] = bb;
5984 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5986 enum bb_state state = IN_MULTIPLE_BB;
5987 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5989 in_bb_p[INSN_UID(x)] = state;
5996 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
6001 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
6003 fprintf (outf, ";; Start of basic block %d, registers live:",
6005 dump_regset (bb->global_live_at_start, outf);
6009 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
6010 && GET_CODE (tmp_rtx) != NOTE
6011 && GET_CODE (tmp_rtx) != BARRIER)
6012 fprintf (outf, ";; Insn is not within a basic block\n");
6013 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
6014 fprintf (outf, ";; Insn is in multiple basic blocks\n");
6016 did_output = print_rtl_single (outf, tmp_rtx);
6018 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
6020 fprintf (outf, ";; End of basic block %d, registers live:\n",
6022 dump_regset (bb->global_live_at_end, outf);
6035 if (current_function_epilogue_delay_list != 0)
6037 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
6038 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
6039 tmp_rtx = XEXP (tmp_rtx, 1))
6040 print_rtl_single (outf, XEXP (tmp_rtx, 0));
6044 /* Compute dominator relationships using new flow graph structures. */
6046 compute_flow_dominators (dominators, post_dominators)
6047 sbitmap *dominators;
6048 sbitmap *post_dominators;
6051 sbitmap *temp_bitmap;
6053 basic_block *worklist, *workend, *qin, *qout;
6056 /* Allocate a worklist array/queue. Entries are only added to the
6057 list if they were not already on the list. So the size is
6058 bounded by the number of basic blocks. */
6059 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
6060 workend = &worklist[n_basic_blocks];
6062 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6063 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
6067 /* The optimistic setting of dominators requires us to put every
6068 block on the work list initially. */
6069 qin = qout = worklist;
6070 for (bb = 0; bb < n_basic_blocks; bb++)
6072 *qin++ = BASIC_BLOCK (bb);
6073 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
6075 qlen = n_basic_blocks;
6078 /* We want a maximal solution, so initially assume everything dominates
6080 sbitmap_vector_ones (dominators, n_basic_blocks);
6082 /* Mark successors of the entry block so we can identify them below. */
6083 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6084 e->dest->aux = ENTRY_BLOCK_PTR;
6086 /* Iterate until the worklist is empty. */
6089 /* Take the first entry off the worklist. */
6090 basic_block b = *qout++;
6091 if (qout >= workend)
6097 /* Compute the intersection of the dominators of all the
6100 If one of the predecessor blocks is the ENTRY block, then the
6101 intersection of the dominators of the predecessor blocks is
6102 defined as the null set. We can identify such blocks by the
6103 special value in the AUX field in the block structure. */
6104 if (b->aux == ENTRY_BLOCK_PTR)
6106 /* Do not clear the aux field for blocks which are
6107 successors of the ENTRY block. That way we never add
6108 them to the worklist again.
6110 The intersect of dominators of the preds of this block is
6111 defined as the null set. */
6112 sbitmap_zero (temp_bitmap[bb]);
6116 /* Clear the aux field of this block so it can be added to
6117 the worklist again if necessary. */
6119 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
6122 /* Make sure each block always dominates itself. */
6123 SET_BIT (temp_bitmap[bb], bb);
6125 /* If the out state of this block changed, then we need to
6126 add the successors of this block to the worklist if they
6127 are not already on the worklist. */
6128 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
6130 for (e = b->succ; e; e = e->succ_next)
6132 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
6146 if (post_dominators)
6148 /* The optimistic setting of dominators requires us to put every
6149 block on the work list initially. */
6150 qin = qout = worklist;
6151 for (bb = 0; bb < n_basic_blocks; bb++)
6153 *qin++ = BASIC_BLOCK (bb);
6154 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
6156 qlen = n_basic_blocks;
6159 /* We want a maximal solution, so initially assume everything post
6160 dominates everything else. */
6161 sbitmap_vector_ones (post_dominators, n_basic_blocks);
6163 /* Mark predecessors of the exit block so we can identify them below. */
6164 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
6165 e->src->aux = EXIT_BLOCK_PTR;
6167 /* Iterate until the worklist is empty. */
6170 /* Take the first entry off the worklist. */
6171 basic_block b = *qout++;
6172 if (qout >= workend)
6178 /* Compute the intersection of the post dominators of all the
6181 If one of the successor blocks is the EXIT block, then the
6182 intersection of the dominators of the successor blocks is
6183 defined as the null set. We can identify such blocks by the
6184 special value in the AUX field in the block structure. */
6185 if (b->aux == EXIT_BLOCK_PTR)
6187 /* Do not clear the aux field for blocks which are
6188 predecessors of the EXIT block. That way we we never
6189 add them to the worklist again.
6191 The intersect of dominators of the succs of this block is
6192 defined as the null set. */
6193 sbitmap_zero (temp_bitmap[bb]);
6197 /* Clear the aux field of this block so it can be added to
6198 the worklist again if necessary. */
6200 sbitmap_intersection_of_succs (temp_bitmap[bb],
6201 post_dominators, bb);
6204 /* Make sure each block always post dominates itself. */
6205 SET_BIT (temp_bitmap[bb], bb);
6207 /* If the out state of this block changed, then we need to
6208 add the successors of this block to the worklist if they
6209 are not already on the worklist. */
6210 if (sbitmap_a_and_b (post_dominators[bb],
6211 post_dominators[bb],
6214 for (e = b->pred; e; e = e->pred_next)
6216 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
6234 /* Given DOMINATORS, compute the immediate dominators into IDOM. If a
6235 block dominates only itself, its entry remains as INVALID_BLOCK. */
6238 compute_immediate_dominators (idom, dominators)
6240 sbitmap *dominators;
6245 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6247 /* Begin with tmp(n) = dom(n) - { n }. */
6248 for (b = n_basic_blocks; --b >= 0; )
6250 sbitmap_copy (tmp[b], dominators[b]);
6251 RESET_BIT (tmp[b], b);
6254 /* Subtract out all of our dominator's dominators. */
6255 for (b = n_basic_blocks; --b >= 0; )
6257 sbitmap tmp_b = tmp[b];
6260 for (s = n_basic_blocks; --s >= 0; )
6261 if (TEST_BIT (tmp_b, s))
6262 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
6265 /* Find the one bit set in the bitmap and put it in the output array. */
6266 for (b = n_basic_blocks; --b >= 0; )
6269 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
6272 sbitmap_vector_free (tmp);
6275 /* Given POSTDOMINATORS, compute the immediate postdominators into
6276 IDOM. If a block is only dominated by itself, its entry remains as
6280 compute_immediate_postdominators (idom, postdominators)
6282 sbitmap *postdominators;
6284 compute_immediate_dominators (idom, postdominators);
6287 /* Recompute register set/reference counts immediately prior to register
6290 This avoids problems with set/reference counts changing to/from values
6291 which have special meanings to the register allocators.
6293 Additionally, the reference counts are the primary component used by the
6294 register allocators to prioritize pseudos for allocation to hard regs.
6295 More accurate reference counts generally lead to better register allocation.
6297 F is the first insn to be scanned.
6299 LOOP_STEP denotes how much loop_depth should be incremented per
6300 loop nesting level in order to increase the ref count more for
6301 references in a loop.
6303 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6304 possibly other information which is used by the register allocators. */
6307 recompute_reg_usage (f, loop_step)
6308 rtx f ATTRIBUTE_UNUSED;
6309 int loop_step ATTRIBUTE_UNUSED;
6311 allocate_reg_life_data ();
6312 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6315 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6316 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6317 of the number of registers that died. */
6320 count_or_remove_death_notes (blocks, kill)
6326 for (i = n_basic_blocks - 1; i >= 0; --i)
6331 if (blocks && ! TEST_BIT (blocks, i))
6334 bb = BASIC_BLOCK (i);
6336 for (insn = bb->head; ; insn = NEXT_INSN (insn))
6340 rtx *pprev = ®_NOTES (insn);
6345 switch (REG_NOTE_KIND (link))
6348 if (GET_CODE (XEXP (link, 0)) == REG)
6350 rtx reg = XEXP (link, 0);
6353 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6356 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6364 rtx next = XEXP (link, 1);
6365 free_EXPR_LIST_node (link);
6366 *pprev = link = next;
6372 pprev = &XEXP (link, 1);
6379 if (insn == bb->end)
6387 /* Record INSN's block as BB. */
6390 set_block_for_insn (insn, bb)
6394 size_t uid = INSN_UID (insn);
6395 if (uid >= basic_block_for_insn->num_elements)
6399 /* Add one-eighth the size so we don't keep calling xrealloc. */
6400 new_size = uid + (uid + 7) / 8;
6402 VARRAY_GROW (basic_block_for_insn, new_size);
6404 VARRAY_BB (basic_block_for_insn, uid) = bb;
6407 /* Record INSN's block number as BB. */
6408 /* ??? This has got to go. */
6411 set_block_num (insn, bb)
6415 set_block_for_insn (insn, BASIC_BLOCK (bb));
6418 /* Verify the CFG consistency. This function check some CFG invariants and
6419 aborts when something is wrong. Hope that this function will help to
6420 convert many optimization passes to preserve CFG consistent.
6422 Currently it does following checks:
6424 - test head/end pointers
6425 - overlapping of basic blocks
6426 - edge list corectness
6427 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6428 - tails of basic blocks (ensure that boundary is necesary)
6429 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6430 and NOTE_INSN_BASIC_BLOCK
6431 - check that all insns are in the basic blocks
6432 (except the switch handling code, barriers and notes)
6433 - check that all returns are followed by barriers
6435 In future it can be extended check a lot of other stuff as well
6436 (reachability of basic blocks, life information, etc. etc.). */
6441 const int max_uid = get_max_uid ();
6442 const rtx rtx_first = get_insns ();
6443 rtx last_head = get_last_insn ();
6444 basic_block *bb_info;
6446 int i, last_bb_num_seen, num_bb_notes, err = 0;
6448 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6450 for (i = n_basic_blocks - 1; i >= 0; i--)
6452 basic_block bb = BASIC_BLOCK (i);
6453 rtx head = bb->head;
6456 /* Verify the end of the basic block is in the INSN chain. */
6457 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
6462 error ("End insn %d for block %d not found in the insn stream.",
6463 INSN_UID (end), bb->index);
6467 /* Work backwards from the end to the head of the basic block
6468 to verify the head is in the RTL chain. */
6469 for ( ; x != NULL_RTX; x = PREV_INSN (x))
6471 /* While walking over the insn chain, verify insns appear
6472 in only one basic block and initialize the BB_INFO array
6473 used by other passes. */
6474 if (bb_info[INSN_UID (x)] != NULL)
6476 error ("Insn %d is in multiple basic blocks (%d and %d)",
6477 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6480 bb_info[INSN_UID (x)] = bb;
6487 error ("Head insn %d for block %d not found in the insn stream.",
6488 INSN_UID (head), bb->index);
6495 /* Now check the basic blocks (boundaries etc.) */
6496 for (i = n_basic_blocks - 1; i >= 0; i--)
6498 basic_block bb = BASIC_BLOCK (i);
6499 /* Check corectness of edge lists */
6507 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6509 fprintf (stderr, "Predecessor: ");
6510 dump_edge_info (stderr, e, 0);
6511 fprintf (stderr, "\nSuccessor: ");
6512 dump_edge_info (stderr, e, 1);
6516 if (e->dest != EXIT_BLOCK_PTR)
6518 edge e2 = e->dest->pred;
6519 while (e2 && e2 != e)
6523 error ("Basic block %i edge lists are corrupted", bb->index);
6535 error ("Basic block %d pred edge is corrupted", bb->index);
6536 fputs ("Predecessor: ", stderr);
6537 dump_edge_info (stderr, e, 0);
6538 fputs ("\nSuccessor: ", stderr);
6539 dump_edge_info (stderr, e, 1);
6540 fputc ('\n', stderr);
6543 if (e->src != ENTRY_BLOCK_PTR)
6545 edge e2 = e->src->succ;
6546 while (e2 && e2 != e)
6550 error ("Basic block %i edge lists are corrupted", bb->index);
6557 /* OK pointers are correct. Now check the header of basic
6558 block. It ought to contain optional CODE_LABEL followed
6559 by NOTE_BASIC_BLOCK. */
6561 if (GET_CODE (x) == CODE_LABEL)
6565 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6571 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
6573 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6580 /* Do checks for empty blocks here */
6587 if (NOTE_INSN_BASIC_BLOCK_P (x))
6589 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6590 INSN_UID (x), bb->index);
6597 if (GET_CODE (x) == JUMP_INSN
6598 || GET_CODE (x) == CODE_LABEL
6599 || GET_CODE (x) == BARRIER)
6601 error ("In basic block %d:", bb->index);
6602 fatal_insn ("Flow control insn inside a basic block", x);
6610 last_bb_num_seen = -1;
6615 if (NOTE_INSN_BASIC_BLOCK_P (x))
6617 basic_block bb = NOTE_BASIC_BLOCK (x);
6619 if (bb->index != last_bb_num_seen + 1)
6620 fatal ("Basic blocks not numbered consecutively");
6621 last_bb_num_seen = bb->index;
6624 if (!bb_info[INSN_UID (x)])
6626 switch (GET_CODE (x))
6633 /* An addr_vec is placed outside any block block. */
6635 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6636 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6637 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6642 /* But in any case, non-deletable labels can appear anywhere. */
6646 fatal_insn ("Insn outside basic block", x);
6651 && GET_CODE (x) == JUMP_INSN
6652 && returnjump_p (x) && ! condjump_p (x)
6653 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6654 fatal_insn ("Return not followed by barrier", x);
6659 if (num_bb_notes != n_basic_blocks)
6660 fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6661 num_bb_notes, n_basic_blocks);
6670 /* Functions to access an edge list with a vector representation.
6671 Enough data is kept such that given an index number, the
6672 pred and succ that edge represents can be determined, or
6673 given a pred and a succ, its index number can be returned.
6674 This allows algorithms which consume a lot of memory to
6675 represent the normally full matrix of edge (pred,succ) with a
6676 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6677 wasted space in the client code due to sparse flow graphs. */
6679 /* This functions initializes the edge list. Basically the entire
6680 flowgraph is processed, and all edges are assigned a number,
6681 and the data structure is filled in. */
6685 struct edge_list *elist;
6691 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6695 /* Determine the number of edges in the flow graph by counting successor
6696 edges on each basic block. */
6697 for (x = 0; x < n_basic_blocks; x++)
6699 basic_block bb = BASIC_BLOCK (x);
6701 for (e = bb->succ; e; e = e->succ_next)
6704 /* Don't forget successors of the entry block. */
6705 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6708 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6709 elist->num_blocks = block_count;
6710 elist->num_edges = num_edges;
6711 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6715 /* Follow successors of the entry block, and register these edges. */
6716 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6718 elist->index_to_edge[num_edges] = e;
6722 for (x = 0; x < n_basic_blocks; x++)
6724 basic_block bb = BASIC_BLOCK (x);
6726 /* Follow all successors of blocks, and register these edges. */
6727 for (e = bb->succ; e; e = e->succ_next)
6729 elist->index_to_edge[num_edges] = e;
6736 /* This function free's memory associated with an edge list. */
6738 free_edge_list (elist)
6739 struct edge_list *elist;
6743 free (elist->index_to_edge);
6748 /* This function provides debug output showing an edge list. */
6750 print_edge_list (f, elist)
6752 struct edge_list *elist;
6755 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6756 elist->num_blocks - 2, elist->num_edges);
6758 for (x = 0; x < elist->num_edges; x++)
6760 fprintf (f, " %-4d - edge(", x);
6761 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6762 fprintf (f,"entry,");
6764 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6766 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6767 fprintf (f,"exit)\n");
6769 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6773 /* This function provides an internal consistency check of an edge list,
6774 verifying that all edges are present, and that there are no
6777 verify_edge_list (f, elist)
6779 struct edge_list *elist;
6781 int x, pred, succ, index;
6784 for (x = 0; x < n_basic_blocks; x++)
6786 basic_block bb = BASIC_BLOCK (x);
6788 for (e = bb->succ; e; e = e->succ_next)
6790 pred = e->src->index;
6791 succ = e->dest->index;
6792 index = EDGE_INDEX (elist, e->src, e->dest);
6793 if (index == EDGE_INDEX_NO_EDGE)
6795 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6798 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6799 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6800 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6801 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6802 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6803 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6806 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6808 pred = e->src->index;
6809 succ = e->dest->index;
6810 index = EDGE_INDEX (elist, e->src, e->dest);
6811 if (index == EDGE_INDEX_NO_EDGE)
6813 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6816 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6817 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6818 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6819 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6820 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6821 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6823 /* We've verified that all the edges are in the list, no lets make sure
6824 there are no spurious edges in the list. */
6826 for (pred = 0 ; pred < n_basic_blocks; pred++)
6827 for (succ = 0 ; succ < n_basic_blocks; succ++)
6829 basic_block p = BASIC_BLOCK (pred);
6830 basic_block s = BASIC_BLOCK (succ);
6834 for (e = p->succ; e; e = e->succ_next)
6840 for (e = s->pred; e; e = e->pred_next)
6846 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6847 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6848 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6850 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6851 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6852 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6853 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6854 BASIC_BLOCK (succ)));
6856 for (succ = 0 ; succ < n_basic_blocks; succ++)
6858 basic_block p = ENTRY_BLOCK_PTR;
6859 basic_block s = BASIC_BLOCK (succ);
6863 for (e = p->succ; e; e = e->succ_next)
6869 for (e = s->pred; e; e = e->pred_next)
6875 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6876 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6877 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6879 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6880 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6881 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6882 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6883 BASIC_BLOCK (succ)));
6885 for (pred = 0 ; pred < n_basic_blocks; pred++)
6887 basic_block p = BASIC_BLOCK (pred);
6888 basic_block s = EXIT_BLOCK_PTR;
6892 for (e = p->succ; e; e = e->succ_next)
6898 for (e = s->pred; e; e = e->pred_next)
6904 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6905 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6906 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6908 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6909 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6910 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6911 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6916 /* This routine will determine what, if any, edge there is between
6917 a specified predecessor and successor. */
6919 find_edge_index (edge_list, pred, succ)
6920 struct edge_list *edge_list;
6921 basic_block pred, succ;
6924 for (x = 0; x < NUM_EDGES (edge_list); x++)
6926 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6927 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6930 return (EDGE_INDEX_NO_EDGE);
6933 /* This function will remove an edge from the flow graph. */
6938 edge last_pred = NULL;
6939 edge last_succ = NULL;
6941 basic_block src, dest;
6944 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6950 last_succ->succ_next = e->succ_next;
6952 src->succ = e->succ_next;
6954 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6960 last_pred->pred_next = e->pred_next;
6962 dest->pred = e->pred_next;
6968 /* This routine will remove any fake successor edges for a basic block.
6969 When the edge is removed, it is also removed from whatever predecessor
6972 remove_fake_successors (bb)
6976 for (e = bb->succ; e ; )
6980 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6985 /* This routine will remove all fake edges from the flow graph. If
6986 we remove all fake successors, it will automatically remove all
6987 fake predecessors. */
6989 remove_fake_edges ()
6993 for (x = 0; x < n_basic_blocks; x++)
6994 remove_fake_successors (BASIC_BLOCK (x));
6996 /* We've handled all successors except the entry block's. */
6997 remove_fake_successors (ENTRY_BLOCK_PTR);
7000 /* This function will add a fake edge between any block which has no
7001 successors, and the exit block. Some data flow equations require these
7004 add_noreturn_fake_exit_edges ()
7008 for (x = 0; x < n_basic_blocks; x++)
7009 if (BASIC_BLOCK (x)->succ == NULL)
7010 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
7013 /* This function adds a fake edge between any infinite loops to the
7014 exit block. Some optimizations require a path from each node to
7017 See also Morgan, Figure 3.10, pp. 82-83.
7019 The current implementation is ugly, not attempting to minimize the
7020 number of inserted fake edges. To reduce the number of fake edges
7021 to insert, add fake edges from _innermost_ loops containing only
7022 nodes not reachable from the exit block. */
7024 connect_infinite_loops_to_exit ()
7026 basic_block unvisited_block;
7028 /* Perform depth-first search in the reverse graph to find nodes
7029 reachable from the exit block. */
7030 struct depth_first_search_dsS dfs_ds;
7032 flow_dfs_compute_reverse_init (&dfs_ds);
7033 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
7035 /* Repeatedly add fake edges, updating the unreachable nodes. */
7038 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
7039 if (!unvisited_block)
7041 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
7042 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
7045 flow_dfs_compute_reverse_finish (&dfs_ds);
7050 /* Redirect an edge's successor from one block to another. */
7052 redirect_edge_succ (e, new_succ)
7054 basic_block new_succ;
7058 /* Disconnect the edge from the old successor block. */
7059 for (pe = &e->dest->pred; *pe != e ; pe = &(*pe)->pred_next)
7061 *pe = (*pe)->pred_next;
7063 /* Reconnect the edge to the new successor block. */
7064 e->pred_next = new_succ->pred;
7069 /* Redirect an edge's predecessor from one block to another. */
7071 redirect_edge_pred (e, new_pred)
7073 basic_block new_pred;
7077 /* Disconnect the edge from the old predecessor block. */
7078 for (pe = &e->src->succ; *pe != e ; pe = &(*pe)->succ_next)
7080 *pe = (*pe)->succ_next;
7082 /* Reconnect the edge to the new predecessor block. */
7083 e->succ_next = new_pred->succ;
7088 /* Dump the list of basic blocks in the bitmap NODES. */
7090 flow_nodes_print (str, nodes, file)
7092 const sbitmap nodes;
7097 fprintf (file, "%s { ", str);
7098 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
7099 fputs ("}\n", file);
7103 /* Dump the list of exiting edges in the array EDGES. */
7105 flow_exits_print (str, edges, num_edges, file)
7113 fprintf (file, "%s { ", str);
7114 for (i = 0; i < num_edges; i++)
7115 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
7116 fputs ("}\n", file);
7120 /* Dump loop related CFG information. */
7122 flow_loops_cfg_dump (loops, file)
7123 const struct loops *loops;
7128 if (! loops->num || ! file || ! loops->cfg.dom)
7131 for (i = 0; i < n_basic_blocks; i++)
7135 fprintf (file, ";; %d succs { ", i);
7136 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
7137 fprintf (file, "%d ", succ->dest->index);
7138 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
7142 /* Dump the DFS node order. */
7143 if (loops->cfg.dfs_order)
7145 fputs (";; DFS order: ", file);
7146 for (i = 0; i < n_basic_blocks; i++)
7147 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
7150 /* Dump the reverse completion node order. */
7151 if (loops->cfg.rc_order)
7153 fputs (";; RC order: ", file);
7154 for (i = 0; i < n_basic_blocks; i++)
7155 fprintf (file, "%d ", loops->cfg.rc_order[i]);
7161 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7163 flow_loop_nested_p (outer, loop)
7167 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
7171 /* Dump the loop information specified by LOOPS to the stream FILE. */
7173 flow_loops_dump (loops, file, verbose)
7174 const struct loops *loops;
7181 num_loops = loops->num;
7182 if (! num_loops || ! file)
7185 fprintf (file, ";; %d loops found, %d levels\n",
7186 num_loops, loops->levels);
7188 for (i = 0; i < num_loops; i++)
7190 struct loop *loop = &loops->array[i];
7192 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
7193 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
7194 loop->header->index, loop->latch->index,
7195 loop->pre_header ? loop->pre_header->index : -1,
7196 loop->depth, loop->level,
7197 (long) (loop->outer ? (loop->outer - loops->array) : -1));
7198 fprintf (file, ";; %d", loop->num_nodes);
7199 flow_nodes_print (" nodes", loop->nodes, file);
7200 fprintf (file, ";; %d", loop->num_exits);
7201 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
7207 for (j = 0; j < i; j++)
7209 struct loop *oloop = &loops->array[j];
7211 if (loop->header == oloop->header)
7216 smaller = loop->num_nodes < oloop->num_nodes;
7218 /* If the union of LOOP and OLOOP is different than
7219 the larger of LOOP and OLOOP then LOOP and OLOOP
7220 must be disjoint. */
7221 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
7222 smaller ? oloop : loop);
7224 ";; loop header %d shared by loops %d, %d %s\n",
7225 loop->header->index, i, j,
7226 disjoint ? "disjoint" : "nested");
7233 /* Print diagnostics to compare our concept of a loop with
7234 what the loop notes say. */
7235 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
7236 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
7237 != NOTE_INSN_LOOP_BEG)
7238 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
7239 INSN_UID (PREV_INSN (loop->first->head)));
7240 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
7241 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
7242 != NOTE_INSN_LOOP_END)
7243 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
7244 INSN_UID (NEXT_INSN (loop->last->end)));
7249 flow_loops_cfg_dump (loops, file);
7253 /* Free all the memory allocated for LOOPS. */
7255 flow_loops_free (loops)
7256 struct loops *loops;
7265 /* Free the loop descriptors. */
7266 for (i = 0; i < loops->num; i++)
7268 struct loop *loop = &loops->array[i];
7271 sbitmap_free (loop->nodes);
7275 free (loops->array);
7276 loops->array = NULL;
7279 sbitmap_vector_free (loops->cfg.dom);
7280 if (loops->cfg.dfs_order)
7281 free (loops->cfg.dfs_order);
7283 sbitmap_free (loops->shared_headers);
7288 /* Find the exits from the loop using the bitmap of loop nodes NODES
7289 and store in EXITS array. Return the number of exits from the
7292 flow_loop_exits_find (nodes, exits)
7293 const sbitmap nodes;
7302 /* Check all nodes within the loop to see if there are any
7303 successors not in the loop. Note that a node may have multiple
7306 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7307 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7309 basic_block dest = e->dest;
7311 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7319 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
7321 /* Store all exiting edges into an array. */
7323 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7324 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7326 basic_block dest = e->dest;
7328 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7329 (*exits)[num_exits++] = e;
7337 /* Find the nodes contained within the loop with header HEADER and
7338 latch LATCH and store in NODES. Return the number of nodes within
7341 flow_loop_nodes_find (header, latch, nodes)
7350 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7353 /* Start with only the loop header in the set of loop nodes. */
7354 sbitmap_zero (nodes);
7355 SET_BIT (nodes, header->index);
7357 header->loop_depth++;
7359 /* Push the loop latch on to the stack. */
7360 if (! TEST_BIT (nodes, latch->index))
7362 SET_BIT (nodes, latch->index);
7363 latch->loop_depth++;
7365 stack[sp++] = latch;
7374 for (e = node->pred; e; e = e->pred_next)
7376 basic_block ancestor = e->src;
7378 /* If each ancestor not marked as part of loop, add to set of
7379 loop nodes and push on to stack. */
7380 if (ancestor != ENTRY_BLOCK_PTR
7381 && ! TEST_BIT (nodes, ancestor->index))
7383 SET_BIT (nodes, ancestor->index);
7384 ancestor->loop_depth++;
7386 stack[sp++] = ancestor;
7395 /* Compute the depth first search order and store in the array
7396 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
7397 RC_ORDER is non-zero, return the reverse completion number for each
7398 node. Returns the number of nodes visited. A depth first search
7399 tries to get as far away from the starting point as quickly as
7402 flow_depth_first_order_compute (dfs_order, rc_order)
7409 int rcnum = n_basic_blocks - 1;
7412 /* Allocate stack for back-tracking up CFG. */
7413 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
7416 /* Allocate bitmap to track nodes that have been visited. */
7417 visited = sbitmap_alloc (n_basic_blocks);
7419 /* None of the nodes in the CFG have been visited yet. */
7420 sbitmap_zero (visited);
7422 /* Push the first edge on to the stack. */
7423 stack[sp++] = ENTRY_BLOCK_PTR->succ;
7431 /* Look at the edge on the top of the stack. */
7436 /* Check if the edge destination has been visited yet. */
7437 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
7439 /* Mark that we have visited the destination. */
7440 SET_BIT (visited, dest->index);
7443 dfs_order[dfsnum++] = dest->index;
7447 /* Since the DEST node has been visited for the first
7448 time, check its successors. */
7449 stack[sp++] = dest->succ;
7453 /* There are no successors for the DEST node so assign
7454 its reverse completion number. */
7456 rc_order[rcnum--] = dest->index;
7461 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
7463 /* There are no more successors for the SRC node
7464 so assign its reverse completion number. */
7466 rc_order[rcnum--] = src->index;
7470 stack[sp - 1] = e->succ_next;
7477 sbitmap_free (visited);
7479 /* The number of nodes visited should not be greater than
7481 if (dfsnum > n_basic_blocks)
7484 /* There are some nodes left in the CFG that are unreachable. */
7485 if (dfsnum < n_basic_blocks)
7491 /* Compute the depth first search order on the _reverse_ graph and
7492 store in the array DFS_ORDER, marking the nodes visited in VISITED.
7493 Returns the number of nodes visited.
7495 The computation is split into three pieces:
7497 flow_dfs_compute_reverse_init () creates the necessary data
7500 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
7501 structures. The block will start the search.
7503 flow_dfs_compute_reverse_execute () continues (or starts) the
7504 search using the block on the top of the stack, stopping when the
7507 flow_dfs_compute_reverse_finish () destroys the necessary data
7510 Thus, the user will probably call ..._init(), call ..._add_bb() to
7511 add a beginning basic block to the stack, call ..._execute(),
7512 possibly add another bb to the stack and again call ..._execute(),
7513 ..., and finally call _finish(). */
7515 /* Initialize the data structures used for depth-first search on the
7516 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
7517 added to the basic block stack. DATA is the current depth-first
7518 search context. If INITIALIZE_STACK is non-zero, there is an
7519 element on the stack. */
7522 flow_dfs_compute_reverse_init (data)
7523 depth_first_search_ds data;
7525 /* Allocate stack for back-tracking up CFG. */
7527 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK+1))
7528 * sizeof (basic_block));
7531 /* Allocate bitmap to track nodes that have been visited. */
7532 data->visited_blocks
7533 = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
7535 /* None of the nodes in the CFG have been visited yet. */
7536 sbitmap_zero (data->visited_blocks);
7541 /* Add the specified basic block to the top of the dfs data
7542 structures. When the search continues, it will start at the
7546 flow_dfs_compute_reverse_add_bb (data, bb)
7547 depth_first_search_ds data;
7550 data->stack[data->sp++] = bb;
7554 /* Continue the depth-first search through the reverse graph starting
7555 with the block at the stack's top and ending when the stack is
7556 empty. Visited nodes are marked. Returns an unvisited basic
7557 block, or NULL if there is none available. */
7559 flow_dfs_compute_reverse_execute (data)
7560 depth_first_search_ds data;
7566 while (data->sp > 0)
7568 bb = data->stack[--data->sp];
7570 /* Mark that we have visited this node. */
7571 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK+1)))
7573 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK+1));
7575 /* Perform depth-first search on adjacent vertices. */
7576 for (e = bb->pred; e; e = e->pred_next)
7577 flow_dfs_compute_reverse_add_bb (data, e->src);
7581 /* Determine if there are unvisited basic blocks. */
7582 for (i = n_basic_blocks - (INVALID_BLOCK+1); --i >= 0; )
7583 if (!TEST_BIT (data->visited_blocks, i))
7584 return BASIC_BLOCK (i + (INVALID_BLOCK+1));
7588 /* Destroy the data structures needed for depth-first search on the
7592 flow_dfs_compute_reverse_finish (data)
7593 depth_first_search_ds data;
7596 sbitmap_free (data->visited_blocks);
7600 /* Return the block for the pre-header of the loop with header
7601 HEADER where DOM specifies the dominator information. Return NULL if
7602 there is no pre-header. */
7604 flow_loop_pre_header_find (header, dom)
7608 basic_block pre_header;
7611 /* If block p is a predecessor of the header and is the only block
7612 that the header does not dominate, then it is the pre-header. */
7614 for (e = header->pred; e; e = e->pred_next)
7616 basic_block node = e->src;
7618 if (node != ENTRY_BLOCK_PTR
7619 && ! TEST_BIT (dom[node->index], header->index))
7621 if (pre_header == NULL)
7625 /* There are multiple edges into the header from outside
7626 the loop so there is no pre-header block. */
7636 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
7637 previously added. The insertion algorithm assumes that the loops
7638 are added in the order found by a depth first search of the CFG. */
7640 flow_loop_tree_node_add (prevloop, loop)
7641 struct loop *prevloop;
7645 if (flow_loop_nested_p (prevloop, loop))
7647 prevloop->inner = loop;
7648 loop->outer = prevloop;
7652 while (prevloop->outer)
7654 if (flow_loop_nested_p (prevloop->outer, loop))
7656 prevloop->next = loop;
7657 loop->outer = prevloop->outer;
7660 prevloop = prevloop->outer;
7663 prevloop->next = loop;
7668 /* Build the loop hierarchy tree for LOOPS. */
7670 flow_loops_tree_build (loops)
7671 struct loops *loops;
7676 num_loops = loops->num;
7680 /* Root the loop hierarchy tree with the first loop found.
7681 Since we used a depth first search this should be the
7683 loops->tree = &loops->array[0];
7684 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
7686 /* Add the remaining loops to the tree. */
7687 for (i = 1; i < num_loops; i++)
7688 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
7692 /* Helper function to compute loop nesting depth and enclosed loop level
7693 for the natural loop specified by LOOP at the loop depth DEPTH.
7694 Returns the loop level. */
7696 flow_loop_level_compute (loop, depth)
7706 /* Traverse loop tree assigning depth and computing level as the
7707 maximum level of all the inner loops of this loop. The loop
7708 level is equivalent to the height of the loop in the loop tree
7709 and corresponds to the number of enclosed loop levels (including
7711 for (inner = loop->inner; inner; inner = inner->next)
7715 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7720 loop->level = level;
7721 loop->depth = depth;
7726 /* Compute the loop nesting depth and enclosed loop level for the loop
7727 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7731 flow_loops_level_compute (loops)
7732 struct loops *loops;
7738 /* Traverse all the outer level loops. */
7739 for (loop = loops->tree; loop; loop = loop->next)
7741 level = flow_loop_level_compute (loop, 1);
7749 /* Find all the natural loops in the function and save in LOOPS structure
7750 and recalculate loop_depth information in basic block structures.
7751 Return the number of natural loops found. */
7754 flow_loops_find (loops)
7755 struct loops *loops;
7767 loops->array = NULL;
7772 /* Taking care of this degenerate case makes the rest of
7773 this code simpler. */
7774 if (n_basic_blocks == 0)
7777 /* Compute the dominators. */
7778 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7779 compute_flow_dominators (dom, NULL);
7781 /* Count the number of loop edges (back edges). This should be the
7782 same as the number of natural loops. Also clear the loop_depth
7783 and as we work from inner->outer in a loop nest we call
7784 find_loop_nodes_find which will increment loop_depth for nodes
7785 within the current loop, which happens to enclose inner loops. */
7788 for (b = 0; b < n_basic_blocks; b++)
7790 BASIC_BLOCK (b)->loop_depth = 0;
7791 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7793 basic_block latch = e->src;
7795 /* Look for back edges where a predecessor is dominated
7796 by this block. A natural loop has a single entry
7797 node (header) that dominates all the nodes in the
7798 loop. It also has single back edge to the header
7799 from a latch node. Note that multiple natural loops
7800 may share the same header. */
7801 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7808 /* Compute depth first search order of the CFG so that outer
7809 natural loops will be found before inner natural loops. */
7810 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7811 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7812 flow_depth_first_order_compute (dfs_order, rc_order);
7814 /* Allocate loop structures. */
7816 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7818 headers = sbitmap_alloc (n_basic_blocks);
7819 sbitmap_zero (headers);
7821 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7822 sbitmap_zero (loops->shared_headers);
7824 /* Find and record information about all the natural loops
7827 for (b = 0; b < n_basic_blocks; b++)
7831 /* Search the nodes of the CFG in DFS order that we can find
7832 outer loops first. */
7833 header = BASIC_BLOCK (rc_order[b]);
7835 /* Look for all the possible latch blocks for this header. */
7836 for (e = header->pred; e; e = e->pred_next)
7838 basic_block latch = e->src;
7840 /* Look for back edges where a predecessor is dominated
7841 by this block. A natural loop has a single entry
7842 node (header) that dominates all the nodes in the
7843 loop. It also has single back edge to the header
7844 from a latch node. Note that multiple natural loops
7845 may share the same header. */
7846 if (latch != ENTRY_BLOCK_PTR
7847 && TEST_BIT (dom[latch->index], header->index))
7851 loop = loops->array + num_loops;
7853 loop->header = header;
7854 loop->latch = latch;
7855 loop->num = num_loops;
7857 /* Keep track of blocks that are loop headers so
7858 that we can tell which loops should be merged. */
7859 if (TEST_BIT (headers, header->index))
7860 SET_BIT (loops->shared_headers, header->index);
7861 SET_BIT (headers, header->index);
7863 /* Find nodes contained within the loop. */
7864 loop->nodes = sbitmap_alloc (n_basic_blocks);
7866 = flow_loop_nodes_find (header, latch, loop->nodes);
7868 /* Compute first and last blocks within the loop.
7869 These are often the same as the loop header and
7870 loop latch respectively, but this is not always
7873 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7875 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7877 /* Find edges which exit the loop. Note that a node
7878 may have several exit edges. */
7880 = flow_loop_exits_find (loop->nodes, &loop->exits);
7882 /* Look to see if the loop has a pre-header node. */
7884 = flow_loop_pre_header_find (header, dom);
7891 /* Natural loops with shared headers may either be disjoint or
7892 nested. Disjoint loops with shared headers cannot be inner
7893 loops and should be merged. For now just mark loops that share
7895 for (i = 0; i < num_loops; i++)
7896 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7897 loops->array[i].shared = 1;
7899 sbitmap_free (headers);
7902 loops->num = num_loops;
7904 /* Save CFG derived information to avoid recomputing it. */
7905 loops->cfg.dom = dom;
7906 loops->cfg.dfs_order = dfs_order;
7907 loops->cfg.rc_order = rc_order;
7909 /* Build the loop hierarchy tree. */
7910 flow_loops_tree_build (loops);
7912 /* Assign the loop nesting depth and enclosed loop level for each
7914 loops->levels = flow_loops_level_compute (loops);
7920 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7923 flow_loop_outside_edge_p (loop, e)
7924 const struct loop *loop;
7927 if (e->dest != loop->header)
7929 return (e->src == ENTRY_BLOCK_PTR)
7930 || ! TEST_BIT (loop->nodes, e->src->index);
7934 /* Clear LOG_LINKS fields of insns in a chain.
7935 Also clear the global_live_at_{start,end} fields of the basic block
7939 clear_log_links (insns)
7945 for (i = insns; i; i = NEXT_INSN (i))
7949 for (b = 0; b < n_basic_blocks; b++)
7951 basic_block bb = BASIC_BLOCK (b);
7953 bb->global_live_at_start = NULL;
7954 bb->global_live_at_end = NULL;
7957 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
7958 EXIT_BLOCK_PTR->global_live_at_start = NULL;
7961 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
7962 correspond to the hard registers, if any, set in that map. This
7963 could be done far more efficiently by having all sorts of special-cases
7964 with moving single words, but probably isn't worth the trouble. */
7967 reg_set_to_hard_reg_set (to, from)
7973 EXECUTE_IF_SET_IN_BITMAP
7976 if (i >= FIRST_PSEUDO_REGISTER)
7978 SET_HARD_REG_BIT (*to, i);