1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "hard-reg-set.h"
128 #include "basic-block.h"
129 #include "insn-config.h"
133 #include "function.h"
137 #include "insn-flags.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
147 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
148 the stack pointer does not matter. The value is tested only in
149 functions that have frame pointers.
150 No definition is equivalent to always zero. */
151 #ifndef EXIT_IGNORE_STACK
152 #define EXIT_IGNORE_STACK 0
155 #ifndef HAVE_epilogue
156 #define HAVE_epilogue 0
158 #ifndef HAVE_prologue
159 #define HAVE_prologue 0
161 #ifndef HAVE_sibcall_epilogue
162 #define HAVE_sibcall_epilogue 0
165 /* The contents of the current function definition are allocated
166 in this obstack, and all are freed at the end of the function.
167 For top-level functions, this is temporary_obstack.
168 Separate obstacks are made for nested functions. */
170 extern struct obstack *function_obstack;
172 /* Number of basic blocks in the current function. */
176 /* Number of edges in the current function. */
180 /* The basic block array. */
182 varray_type basic_block_info;
184 /* The special entry and exit blocks. */
186 struct basic_block_def entry_exit_blocks[2]
191 NULL, /* local_set */
192 NULL, /* global_live_at_start */
193 NULL, /* global_live_at_end */
195 ENTRY_BLOCK, /* index */
197 -1, -1, /* eh_beg, eh_end */
205 NULL, /* local_set */
206 NULL, /* global_live_at_start */
207 NULL, /* global_live_at_end */
209 EXIT_BLOCK, /* index */
211 -1, -1, /* eh_beg, eh_end */
216 /* Nonzero if the second flow pass has completed. */
219 /* Maximum register number used in this function, plus one. */
223 /* Indexed by n, giving various register information */
225 varray_type reg_n_info;
227 /* Size of a regset for the current function,
228 in (1) bytes and (2) elements. */
233 /* Regset of regs live when calls to `setjmp'-like functions happen. */
234 /* ??? Does this exist only for the setjmp-clobbered warning message? */
236 regset regs_live_at_setjmp;
238 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
239 that have to go in the same hard reg.
240 The first two regs in the list are a pair, and the next two
241 are another pair, etc. */
244 /* Set of registers that may be eliminable. These are handled specially
245 in updating regs_ever_live. */
247 static HARD_REG_SET elim_reg_set;
249 /* The basic block structure for every insn, indexed by uid. */
251 varray_type basic_block_for_insn;
253 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
254 /* ??? Should probably be using LABEL_NUSES instead. It would take a
255 bit of surgery to be able to use or co-opt the routines in jump. */
257 static rtx label_value_list;
258 static rtx tail_recursion_label_list;
260 /* Holds information for tracking conditional register life information. */
261 struct reg_cond_life_info
263 /* An EXPR_LIST of conditions under which a register is dead. */
266 /* ??? Could store mask of bytes that are dead, so that we could finally
267 track lifetimes of multi-word registers accessed via subregs. */
270 /* For use in communicating between propagate_block and its subroutines.
271 Holds all information needed to compute life and def-use information. */
273 struct propagate_block_info
275 /* The basic block we're considering. */
278 /* Bit N is set if register N is conditionally or unconditionally live. */
281 /* Bit N is set if register N is set this insn. */
284 /* Element N is the next insn that uses (hard or pseudo) register N
285 within the current basic block; or zero, if there is no such insn. */
288 /* Contains a list of all the MEMs we are tracking for dead store
292 /* If non-null, record the set of registers set in the basic block. */
295 #ifdef HAVE_conditional_execution
296 /* Indexed by register number, holds a reg_cond_life_info for each
297 register that is not unconditionally live or dead. */
298 splay_tree reg_cond_dead;
300 /* Bit N is set if register N is in an expression in reg_cond_dead. */
304 /* Non-zero if the value of CC0 is live. */
307 /* Flags controling the set of information propagate_block collects. */
311 /* Forward declarations */
312 static int count_basic_blocks PARAMS ((rtx));
313 static void find_basic_blocks_1 PARAMS ((rtx));
314 static rtx find_label_refs PARAMS ((rtx, rtx));
315 static void clear_edges PARAMS ((void));
316 static void make_edges PARAMS ((rtx));
317 static void make_label_edge PARAMS ((sbitmap *, basic_block,
319 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
320 basic_block, rtx, int));
321 static void mark_critical_edges PARAMS ((void));
322 static void move_stray_eh_region_notes PARAMS ((void));
323 static void record_active_eh_regions PARAMS ((rtx));
325 static void commit_one_edge_insertion PARAMS ((edge));
327 static void delete_unreachable_blocks PARAMS ((void));
328 static void delete_eh_regions PARAMS ((void));
329 static int can_delete_note_p PARAMS ((rtx));
330 static void expunge_block PARAMS ((basic_block));
331 static int can_delete_label_p PARAMS ((rtx));
332 static int tail_recursion_label_p PARAMS ((rtx));
333 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
335 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
337 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
338 static void try_merge_blocks PARAMS ((void));
339 static void tidy_fallthru_edges PARAMS ((void));
340 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
341 static void verify_wide_reg PARAMS ((int, rtx, rtx));
342 static void verify_local_live_at_start PARAMS ((regset, basic_block));
343 static int set_noop_p PARAMS ((rtx));
344 static int noop_move_p PARAMS ((rtx));
345 static void delete_noop_moves PARAMS ((rtx));
346 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
347 static void notice_stack_pointer_modification PARAMS ((rtx));
348 static void mark_reg PARAMS ((rtx, void *));
349 static void mark_regs_live_at_end PARAMS ((regset));
350 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
351 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
352 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
353 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
354 static int insn_dead_p PARAMS ((struct propagate_block_info *,
356 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
358 static void mark_set_regs PARAMS ((struct propagate_block_info *,
360 static void mark_set_1 PARAMS ((struct propagate_block_info *,
361 enum rtx_code, rtx, rtx,
363 #ifdef HAVE_conditional_execution
364 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
366 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
367 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
368 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
370 static rtx ior_reg_cond PARAMS ((rtx, rtx));
371 static rtx not_reg_cond PARAMS ((rtx));
372 static rtx nand_reg_cond PARAMS ((rtx, rtx));
375 static void find_auto_inc PARAMS ((struct propagate_block_info *,
377 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
379 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
381 static void mark_used_reg PARAMS ((struct propagate_block_info *,
383 static void mark_used_regs PARAMS ((struct propagate_block_info *,
385 void dump_flow_info PARAMS ((FILE *));
386 void debug_flow_info PARAMS ((void));
387 static void dump_edge_info PARAMS ((FILE *, edge, int));
389 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
391 static void remove_fake_successors PARAMS ((basic_block));
392 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
393 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
394 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
395 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
396 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
397 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
398 static int flow_depth_first_order_compute PARAMS ((int *));
399 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
400 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
401 static void flow_loops_tree_build PARAMS ((struct loops *));
402 static int flow_loop_level_compute PARAMS ((struct loop *, int));
403 static int flow_loops_level_compute PARAMS ((struct loops *));
405 /* Find basic blocks of the current function.
406 F is the first insn of the function and NREGS the number of register
410 find_basic_blocks (f, nregs, file)
412 int nregs ATTRIBUTE_UNUSED;
413 FILE *file ATTRIBUTE_UNUSED;
417 /* Flush out existing data. */
418 if (basic_block_info != NULL)
424 /* Clear bb->aux on all extant basic blocks. We'll use this as a
425 tag for reuse during create_basic_block, just in case some pass
426 copies around basic block notes improperly. */
427 for (i = 0; i < n_basic_blocks; ++i)
428 BASIC_BLOCK (i)->aux = NULL;
430 VARRAY_FREE (basic_block_info);
433 n_basic_blocks = count_basic_blocks (f);
435 /* Size the basic block table. The actual structures will be allocated
436 by find_basic_blocks_1, since we want to keep the structure pointers
437 stable across calls to find_basic_blocks. */
438 /* ??? This whole issue would be much simpler if we called find_basic_blocks
439 exactly once, and thereafter we don't have a single long chain of
440 instructions at all until close to the end of compilation when we
441 actually lay them out. */
443 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
445 find_basic_blocks_1 (f);
447 /* Record the block to which an insn belongs. */
448 /* ??? This should be done another way, by which (perhaps) a label is
449 tagged directly with the basic block that it starts. It is used for
450 more than that currently, but IMO that is the only valid use. */
452 max_uid = get_max_uid ();
454 /* Leave space for insns life_analysis makes in some cases for auto-inc.
455 These cases are rare, so we don't need too much space. */
456 max_uid += max_uid / 10;
459 compute_bb_for_insn (max_uid);
461 /* Discover the edges of our cfg. */
462 record_active_eh_regions (f);
463 make_edges (label_value_list);
465 /* Do very simple cleanup now, for the benefit of code that runs between
466 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
467 tidy_fallthru_edges ();
469 mark_critical_edges ();
471 #ifdef ENABLE_CHECKING
476 /* Count the basic blocks of the function. */
479 count_basic_blocks (f)
483 register RTX_CODE prev_code;
484 register int count = 0;
486 int call_had_abnormal_edge = 0;
488 prev_code = JUMP_INSN;
489 for (insn = f; insn; insn = NEXT_INSN (insn))
491 register RTX_CODE code = GET_CODE (insn);
493 if (code == CODE_LABEL
494 || (GET_RTX_CLASS (code) == 'i'
495 && (prev_code == JUMP_INSN
496 || prev_code == BARRIER
497 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
500 /* Record whether this call created an edge. */
501 if (code == CALL_INSN)
503 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
504 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
506 call_had_abnormal_edge = 0;
508 /* If there is an EH region or rethrow, we have an edge. */
509 if ((eh_region && region > 0)
510 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
511 call_had_abnormal_edge = 1;
512 else if (nonlocal_goto_handler_labels && region >= 0)
513 /* If there is a nonlocal goto label and the specified
514 region number isn't -1, we have an edge. (0 means
515 no throw, but might have a nonlocal goto). */
516 call_had_abnormal_edge = 1;
521 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
523 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
527 /* The rest of the compiler works a bit smoother when we don't have to
528 check for the edge case of do-nothing functions with no basic blocks. */
531 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
538 /* Scan a list of insns for labels referrred to other than by jumps.
539 This is used to scan the alternatives of a call placeholder. */
540 static rtx find_label_refs (f, lvl)
546 for (insn = f; insn; insn = NEXT_INSN (insn))
547 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
551 /* Make a list of all labels referred to other than by jumps
552 (which just don't have the REG_LABEL notes).
554 Make a special exception for labels followed by an ADDR*VEC,
555 as this would be a part of the tablejump setup code.
557 Make a special exception for the eh_return_stub_label, which
558 we know isn't part of any otherwise visible control flow. */
560 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
561 if (REG_NOTE_KIND (note) == REG_LABEL)
563 rtx lab = XEXP (note, 0), next;
565 if (lab == eh_return_stub_label)
567 else if ((next = next_nonnote_insn (lab)) != NULL
568 && GET_CODE (next) == JUMP_INSN
569 && (GET_CODE (PATTERN (next)) == ADDR_VEC
570 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
572 else if (GET_CODE (lab) == NOTE)
575 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
582 /* Find all basic blocks of the function whose first insn is F.
584 Collect and return a list of labels whose addresses are taken. This
585 will be used in make_edges for use with computed gotos. */
588 find_basic_blocks_1 (f)
591 register rtx insn, next;
593 rtx bb_note = NULL_RTX;
594 rtx eh_list = NULL_RTX;
600 /* We process the instructions in a slightly different way than we did
601 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
602 closed out the previous block, so that it gets attached at the proper
603 place. Since this form should be equivalent to the previous,
604 count_basic_blocks continues to use the old form as a check. */
606 for (insn = f; insn; insn = next)
608 enum rtx_code code = GET_CODE (insn);
610 next = NEXT_INSN (insn);
616 int kind = NOTE_LINE_NUMBER (insn);
618 /* Keep a LIFO list of the currently active exception notes. */
619 if (kind == NOTE_INSN_EH_REGION_BEG)
620 eh_list = alloc_INSN_LIST (insn, eh_list);
621 else if (kind == NOTE_INSN_EH_REGION_END)
625 eh_list = XEXP (eh_list, 1);
626 free_INSN_LIST_node (t);
629 /* Look for basic block notes with which to keep the
630 basic_block_info pointers stable. Unthread the note now;
631 we'll put it back at the right place in create_basic_block.
632 Or not at all if we've already found a note in this block. */
633 else if (kind == NOTE_INSN_BASIC_BLOCK)
635 if (bb_note == NULL_RTX)
638 next = flow_delete_insn (insn);
644 /* A basic block starts at a label. If we've closed one off due
645 to a barrier or some such, no need to do it again. */
646 if (head != NULL_RTX)
648 /* While we now have edge lists with which other portions of
649 the compiler might determine a call ending a basic block
650 does not imply an abnormal edge, it will be a bit before
651 everything can be updated. So continue to emit a noop at
652 the end of such a block. */
653 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
655 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
656 end = emit_insn_after (nop, end);
659 create_basic_block (i++, head, end, bb_note);
667 /* A basic block ends at a jump. */
668 if (head == NULL_RTX)
672 /* ??? Make a special check for table jumps. The way this
673 happens is truly and amazingly gross. We are about to
674 create a basic block that contains just a code label and
675 an addr*vec jump insn. Worse, an addr_diff_vec creates
676 its own natural loop.
678 Prevent this bit of brain damage, pasting things together
679 correctly in make_edges.
681 The correct solution involves emitting the table directly
682 on the tablejump instruction as a note, or JUMP_LABEL. */
684 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
685 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
693 goto new_bb_inclusive;
696 /* A basic block ends at a barrier. It may be that an unconditional
697 jump already closed the basic block -- no need to do it again. */
698 if (head == NULL_RTX)
701 /* While we now have edge lists with which other portions of the
702 compiler might determine a call ending a basic block does not
703 imply an abnormal edge, it will be a bit before everything can
704 be updated. So continue to emit a noop at the end of such a
706 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
708 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
709 end = emit_insn_after (nop, end);
711 goto new_bb_exclusive;
715 /* Record whether this call created an edge. */
716 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
717 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
718 int call_has_abnormal_edge = 0;
720 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
722 /* Scan each of the alternatives for label refs. */
723 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
724 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
725 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
726 /* Record its tail recursion label, if any. */
727 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
728 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
731 /* If there is an EH region or rethrow, we have an edge. */
732 if ((eh_list && region > 0)
733 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
734 call_has_abnormal_edge = 1;
735 else if (nonlocal_goto_handler_labels && region >= 0)
736 /* If there is a nonlocal goto label and the specified
737 region number isn't -1, we have an edge. (0 means
738 no throw, but might have a nonlocal goto). */
739 call_has_abnormal_edge = 1;
741 /* A basic block ends at a call that can either throw or
742 do a non-local goto. */
743 if (call_has_abnormal_edge)
746 if (head == NULL_RTX)
751 create_basic_block (i++, head, end, bb_note);
752 head = end = NULL_RTX;
760 if (GET_RTX_CLASS (code) == 'i')
762 if (head == NULL_RTX)
769 if (GET_RTX_CLASS (code) == 'i')
773 /* Make a list of all labels referred to other than by jumps
774 (which just don't have the REG_LABEL notes).
776 Make a special exception for labels followed by an ADDR*VEC,
777 as this would be a part of the tablejump setup code.
779 Make a special exception for the eh_return_stub_label, which
780 we know isn't part of any otherwise visible control flow. */
782 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
783 if (REG_NOTE_KIND (note) == REG_LABEL)
785 rtx lab = XEXP (note, 0), next;
787 if (lab == eh_return_stub_label)
789 else if ((next = next_nonnote_insn (lab)) != NULL
790 && GET_CODE (next) == JUMP_INSN
791 && (GET_CODE (PATTERN (next)) == ADDR_VEC
792 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
794 else if (GET_CODE (lab) == NOTE)
797 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
802 if (head != NULL_RTX)
803 create_basic_block (i++, head, end, bb_note);
805 flow_delete_insn (bb_note);
807 if (i != n_basic_blocks)
810 label_value_list = lvl;
811 tail_recursion_label_list = trll;
814 /* Tidy the CFG by deleting unreachable code and whatnot. */
820 delete_unreachable_blocks ();
821 move_stray_eh_region_notes ();
822 record_active_eh_regions (f);
824 mark_critical_edges ();
826 /* Kill the data we won't maintain. */
827 free_EXPR_LIST_list (&label_value_list);
828 free_EXPR_LIST_list (&tail_recursion_label_list);
831 /* Create a new basic block consisting of the instructions between
832 HEAD and END inclusive. Reuses the note and basic block struct
833 in BB_NOTE, if any. */
836 create_basic_block (index, head, end, bb_note)
838 rtx head, end, bb_note;
843 && ! RTX_INTEGRATED_P (bb_note)
844 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
847 /* If we found an existing note, thread it back onto the chain. */
851 if (GET_CODE (head) == CODE_LABEL)
855 after = PREV_INSN (head);
859 if (after != bb_note && NEXT_INSN (after) != bb_note)
860 reorder_insns (bb_note, bb_note, after);
864 /* Otherwise we must create a note and a basic block structure.
865 Since we allow basic block structs in rtl, give the struct
866 the same lifetime by allocating it off the function obstack
867 rather than using malloc. */
869 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
870 memset (bb, 0, sizeof (*bb));
872 if (GET_CODE (head) == CODE_LABEL)
873 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
876 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
879 NOTE_BASIC_BLOCK (bb_note) = bb;
882 /* Always include the bb note in the block. */
883 if (NEXT_INSN (end) == bb_note)
889 BASIC_BLOCK (index) = bb;
891 /* Tag the block so that we know it has been used when considering
892 other basic block notes. */
896 /* Records the basic block struct in BB_FOR_INSN, for every instruction
897 indexed by INSN_UID. MAX is the size of the array. */
900 compute_bb_for_insn (max)
905 if (basic_block_for_insn)
906 VARRAY_FREE (basic_block_for_insn);
907 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
909 for (i = 0; i < n_basic_blocks; ++i)
911 basic_block bb = BASIC_BLOCK (i);
918 int uid = INSN_UID (insn);
920 VARRAY_BB (basic_block_for_insn, uid) = bb;
923 insn = NEXT_INSN (insn);
928 /* Free the memory associated with the edge structures. */
936 for (i = 0; i < n_basic_blocks; ++i)
938 basic_block bb = BASIC_BLOCK (i);
940 for (e = bb->succ; e ; e = n)
950 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
956 ENTRY_BLOCK_PTR->succ = 0;
957 EXIT_BLOCK_PTR->pred = 0;
962 /* Identify the edges between basic blocks.
964 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
965 that are otherwise unreachable may be reachable with a non-local goto.
967 BB_EH_END is an array indexed by basic block number in which we record
968 the list of exception regions active at the end of the basic block. */
971 make_edges (label_value_list)
972 rtx label_value_list;
975 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
976 sbitmap *edge_cache = NULL;
978 /* Assume no computed jump; revise as we create edges. */
979 current_function_has_computed_jump = 0;
981 /* Heavy use of computed goto in machine-generated code can lead to
982 nearly fully-connected CFGs. In that case we spend a significant
983 amount of time searching the edge lists for duplicates. */
984 if (forced_labels || label_value_list)
986 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
987 sbitmap_vector_zero (edge_cache, n_basic_blocks);
990 /* By nature of the way these get numbered, block 0 is always the entry. */
991 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
993 for (i = 0; i < n_basic_blocks; ++i)
995 basic_block bb = BASIC_BLOCK (i);
998 int force_fallthru = 0;
1000 /* Examine the last instruction of the block, and discover the
1001 ways we can leave the block. */
1004 code = GET_CODE (insn);
1007 if (code == JUMP_INSN)
1011 /* ??? Recognize a tablejump and do the right thing. */
1012 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1013 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1014 && GET_CODE (tmp) == JUMP_INSN
1015 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1016 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1021 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1022 vec = XVEC (PATTERN (tmp), 0);
1024 vec = XVEC (PATTERN (tmp), 1);
1026 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1027 make_label_edge (edge_cache, bb,
1028 XEXP (RTVEC_ELT (vec, j), 0), 0);
1030 /* Some targets (eg, ARM) emit a conditional jump that also
1031 contains the out-of-range target. Scan for these and
1032 add an edge if necessary. */
1033 if ((tmp = single_set (insn)) != NULL
1034 && SET_DEST (tmp) == pc_rtx
1035 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1036 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1037 make_label_edge (edge_cache, bb,
1038 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1040 #ifdef CASE_DROPS_THROUGH
1041 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1042 us naturally detecting fallthru into the next block. */
1047 /* If this is a computed jump, then mark it as reaching
1048 everything on the label_value_list and forced_labels list. */
1049 else if (computed_jump_p (insn))
1051 current_function_has_computed_jump = 1;
1053 for (x = label_value_list; x; x = XEXP (x, 1))
1054 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1056 for (x = forced_labels; x; x = XEXP (x, 1))
1057 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1060 /* Returns create an exit out. */
1061 else if (returnjump_p (insn))
1062 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1064 /* Otherwise, we have a plain conditional or unconditional jump. */
1067 if (! JUMP_LABEL (insn))
1069 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1073 /* If this is a sibling call insn, then this is in effect a
1074 combined call and return, and so we need an edge to the
1075 exit block. No need to worry about EH edges, since we
1076 wouldn't have created the sibling call in the first place. */
1078 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1079 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1080 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1083 /* If this is a CALL_INSN, then mark it as reaching the active EH
1084 handler for this CALL_INSN. If we're handling asynchronous
1085 exceptions then any insn can reach any of the active handlers.
1087 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1089 if (code == CALL_INSN || asynchronous_exceptions)
1091 /* Add any appropriate EH edges. We do this unconditionally
1092 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1093 on the call, and this needn't be within an EH region. */
1094 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1096 /* If we have asynchronous exceptions, do the same for *all*
1097 exception regions active in the block. */
1098 if (asynchronous_exceptions
1099 && bb->eh_beg != bb->eh_end)
1101 if (bb->eh_beg >= 0)
1102 make_eh_edge (edge_cache, eh_nest_info, bb,
1103 NULL_RTX, bb->eh_beg);
1105 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1106 if (GET_CODE (x) == NOTE
1107 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1108 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1110 int region = NOTE_EH_HANDLER (x);
1111 make_eh_edge (edge_cache, eh_nest_info, bb,
1116 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1118 /* ??? This could be made smarter: in some cases it's possible
1119 to tell that certain calls will not do a nonlocal goto.
1121 For example, if the nested functions that do the nonlocal
1122 gotos do not have their addresses taken, then only calls to
1123 those functions or to other nested functions that use them
1124 could possibly do nonlocal gotos. */
1125 /* We do know that a REG_EH_REGION note with a value less
1126 than 0 is guaranteed not to perform a non-local goto. */
1127 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1128 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1129 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1130 make_label_edge (edge_cache, bb, XEXP (x, 0),
1131 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1135 /* We know something about the structure of the function __throw in
1136 libgcc2.c. It is the only function that ever contains eh_stub
1137 labels. It modifies its return address so that the last block
1138 returns to one of the eh_stub labels within it. So we have to
1139 make additional edges in the flow graph. */
1140 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1141 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1143 /* Find out if we can drop through to the next block. */
1144 insn = next_nonnote_insn (insn);
1145 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1146 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1147 else if (i + 1 < n_basic_blocks)
1149 rtx tmp = BLOCK_HEAD (i + 1);
1150 if (GET_CODE (tmp) == NOTE)
1151 tmp = next_nonnote_insn (tmp);
1152 if (force_fallthru || insn == tmp)
1153 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1157 free_eh_nesting_info (eh_nest_info);
1159 sbitmap_vector_free (edge_cache);
1162 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1163 about the edge that is accumulated between calls. */
1166 make_edge (edge_cache, src, dst, flags)
1167 sbitmap *edge_cache;
1168 basic_block src, dst;
1174 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1175 many edges to them, and we didn't allocate memory for it. */
1176 use_edge_cache = (edge_cache
1177 && src != ENTRY_BLOCK_PTR
1178 && dst != EXIT_BLOCK_PTR);
1180 /* Make sure we don't add duplicate edges. */
1181 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1182 for (e = src->succ; e ; e = e->succ_next)
1189 e = (edge) xcalloc (1, sizeof (*e));
1192 e->succ_next = src->succ;
1193 e->pred_next = dst->pred;
1202 SET_BIT (edge_cache[src->index], dst->index);
1205 /* Create an edge from a basic block to a label. */
1208 make_label_edge (edge_cache, src, label, flags)
1209 sbitmap *edge_cache;
1214 if (GET_CODE (label) != CODE_LABEL)
1217 /* If the label was never emitted, this insn is junk, but avoid a
1218 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1219 as a result of a syntax error and a diagnostic has already been
1222 if (INSN_UID (label) == 0)
1225 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1228 /* Create the edges generated by INSN in REGION. */
1231 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1232 sbitmap *edge_cache;
1233 eh_nesting_info *eh_nest_info;
1238 handler_info **handler_list;
1241 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1242 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1245 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1246 EDGE_ABNORMAL | EDGE_EH | is_call);
1250 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1251 dangerous if we intend to move basic blocks around. Move such notes
1252 into the following block. */
1255 move_stray_eh_region_notes ()
1260 if (n_basic_blocks < 2)
1263 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1264 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1266 rtx insn, next, list = NULL_RTX;
1268 b1 = BASIC_BLOCK (i);
1269 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1271 next = NEXT_INSN (insn);
1272 if (GET_CODE (insn) == NOTE
1273 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1274 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1276 /* Unlink from the insn chain. */
1277 NEXT_INSN (PREV_INSN (insn)) = next;
1278 PREV_INSN (next) = PREV_INSN (insn);
1281 NEXT_INSN (insn) = list;
1286 if (list == NULL_RTX)
1289 /* Find where to insert these things. */
1291 if (GET_CODE (insn) == CODE_LABEL)
1292 insn = NEXT_INSN (insn);
1296 next = NEXT_INSN (list);
1297 add_insn_after (list, insn);
1303 /* Recompute eh_beg/eh_end for each basic block. */
1306 record_active_eh_regions (f)
1309 rtx insn, eh_list = NULL_RTX;
1311 basic_block bb = BASIC_BLOCK (0);
1313 for (insn = f; insn ; insn = NEXT_INSN (insn))
1315 if (bb->head == insn)
1316 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1318 if (GET_CODE (insn) == NOTE)
1320 int kind = NOTE_LINE_NUMBER (insn);
1321 if (kind == NOTE_INSN_EH_REGION_BEG)
1322 eh_list = alloc_INSN_LIST (insn, eh_list);
1323 else if (kind == NOTE_INSN_EH_REGION_END)
1325 rtx t = XEXP (eh_list, 1);
1326 free_INSN_LIST_node (eh_list);
1331 if (bb->end == insn)
1333 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1335 if (i == n_basic_blocks)
1337 bb = BASIC_BLOCK (i);
1342 /* Identify critical edges and set the bits appropriately. */
1345 mark_critical_edges ()
1347 int i, n = n_basic_blocks;
1350 /* We begin with the entry block. This is not terribly important now,
1351 but could be if a front end (Fortran) implemented alternate entry
1353 bb = ENTRY_BLOCK_PTR;
1360 /* (1) Critical edges must have a source with multiple successors. */
1361 if (bb->succ && bb->succ->succ_next)
1363 for (e = bb->succ; e ; e = e->succ_next)
1365 /* (2) Critical edges must have a destination with multiple
1366 predecessors. Note that we know there is at least one
1367 predecessor -- the edge we followed to get here. */
1368 if (e->dest->pred->pred_next)
1369 e->flags |= EDGE_CRITICAL;
1371 e->flags &= ~EDGE_CRITICAL;
1376 for (e = bb->succ; e ; e = e->succ_next)
1377 e->flags &= ~EDGE_CRITICAL;
1382 bb = BASIC_BLOCK (i);
1386 /* Split a (typically critical) edge. Return the new block.
1387 Abort on abnormal edges.
1389 ??? The code generally expects to be called on critical edges.
1390 The case of a block ending in an unconditional jump to a
1391 block with multiple predecessors is not handled optimally. */
1394 split_edge (edge_in)
1397 basic_block old_pred, bb, old_succ;
1402 /* Abnormal edges cannot be split. */
1403 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1406 old_pred = edge_in->src;
1407 old_succ = edge_in->dest;
1409 /* Remove the existing edge from the destination's pred list. */
1412 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1414 *pp = edge_in->pred_next;
1415 edge_in->pred_next = NULL;
1418 /* Create the new structures. */
1419 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1420 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1423 memset (bb, 0, sizeof (*bb));
1425 /* ??? This info is likely going to be out of date very soon. */
1426 if (old_succ->global_live_at_start)
1428 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1429 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1430 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1431 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1436 bb->succ = edge_out;
1437 bb->count = edge_in->count;
1440 edge_in->flags &= ~EDGE_CRITICAL;
1442 edge_out->pred_next = old_succ->pred;
1443 edge_out->succ_next = NULL;
1445 edge_out->dest = old_succ;
1446 edge_out->flags = EDGE_FALLTHRU;
1447 edge_out->probability = REG_BR_PROB_BASE;
1448 edge_out->count = edge_in->count;
1450 old_succ->pred = edge_out;
1452 /* Tricky case -- if there existed a fallthru into the successor
1453 (and we're not it) we must add a new unconditional jump around
1454 the new block we're actually interested in.
1456 Further, if that edge is critical, this means a second new basic
1457 block must be created to hold it. In order to simplify correct
1458 insn placement, do this before we touch the existing basic block
1459 ordering for the block we were really wanting. */
1460 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1463 for (e = edge_out->pred_next; e ; e = e->pred_next)
1464 if (e->flags & EDGE_FALLTHRU)
1469 basic_block jump_block;
1472 if ((e->flags & EDGE_CRITICAL) == 0
1473 && e->src != ENTRY_BLOCK_PTR)
1475 /* Non critical -- we can simply add a jump to the end
1476 of the existing predecessor. */
1477 jump_block = e->src;
1481 /* We need a new block to hold the jump. The simplest
1482 way to do the bulk of the work here is to recursively
1484 jump_block = split_edge (e);
1485 e = jump_block->succ;
1488 /* Now add the jump insn ... */
1489 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1491 jump_block->end = pos;
1492 if (basic_block_for_insn)
1493 set_block_for_insn (pos, jump_block);
1494 emit_barrier_after (pos);
1496 /* ... let jump know that label is in use, ... */
1497 JUMP_LABEL (pos) = old_succ->head;
1498 ++LABEL_NUSES (old_succ->head);
1500 /* ... and clear fallthru on the outgoing edge. */
1501 e->flags &= ~EDGE_FALLTHRU;
1503 /* Continue splitting the interesting edge. */
1507 /* Place the new block just in front of the successor. */
1508 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1509 if (old_succ == EXIT_BLOCK_PTR)
1510 j = n_basic_blocks - 1;
1512 j = old_succ->index;
1513 for (i = n_basic_blocks - 1; i > j; --i)
1515 basic_block tmp = BASIC_BLOCK (i - 1);
1516 BASIC_BLOCK (i) = tmp;
1519 BASIC_BLOCK (i) = bb;
1522 /* Create the basic block note.
1524 Where we place the note can have a noticable impact on the generated
1525 code. Consider this cfg:
1536 If we need to insert an insn on the edge from block 0 to block 1,
1537 we want to ensure the instructions we insert are outside of any
1538 loop notes that physically sit between block 0 and block 1. Otherwise
1539 we confuse the loop optimizer into thinking the loop is a phony. */
1540 if (old_succ != EXIT_BLOCK_PTR
1541 && PREV_INSN (old_succ->head)
1542 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1543 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1544 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1545 PREV_INSN (old_succ->head));
1546 else if (old_succ != EXIT_BLOCK_PTR)
1547 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1549 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1550 NOTE_BASIC_BLOCK (bb_note) = bb;
1551 bb->head = bb->end = bb_note;
1553 /* Not quite simple -- for non-fallthru edges, we must adjust the
1554 predecessor's jump instruction to target our new block. */
1555 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1557 rtx tmp, insn = old_pred->end;
1558 rtx old_label = old_succ->head;
1559 rtx new_label = gen_label_rtx ();
1561 if (GET_CODE (insn) != JUMP_INSN)
1564 /* ??? Recognize a tablejump and adjust all matching cases. */
1565 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1566 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1567 && GET_CODE (tmp) == JUMP_INSN
1568 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1569 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1574 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1575 vec = XVEC (PATTERN (tmp), 0);
1577 vec = XVEC (PATTERN (tmp), 1);
1579 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1580 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1582 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1583 --LABEL_NUSES (old_label);
1584 ++LABEL_NUSES (new_label);
1587 /* Handle casesi dispatch insns */
1588 if ((tmp = single_set (insn)) != NULL
1589 && SET_DEST (tmp) == pc_rtx
1590 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1591 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1592 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1594 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1596 --LABEL_NUSES (old_label);
1597 ++LABEL_NUSES (new_label);
1602 /* This would have indicated an abnormal edge. */
1603 if (computed_jump_p (insn))
1606 /* A return instruction can't be redirected. */
1607 if (returnjump_p (insn))
1610 /* If the insn doesn't go where we think, we're confused. */
1611 if (JUMP_LABEL (insn) != old_label)
1614 redirect_jump (insn, new_label, 0);
1617 emit_label_before (new_label, bb_note);
1618 bb->head = new_label;
1624 /* Queue instructions for insertion on an edge between two basic blocks.
1625 The new instructions and basic blocks (if any) will not appear in the
1626 CFG until commit_edge_insertions is called. */
1629 insert_insn_on_edge (pattern, e)
1633 /* We cannot insert instructions on an abnormal critical edge.
1634 It will be easier to find the culprit if we die now. */
1635 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1636 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1639 if (e->insns == NULL_RTX)
1642 push_to_sequence (e->insns);
1644 emit_insn (pattern);
1646 e->insns = get_insns ();
1650 /* Update the CFG for the instructions queued on edge E. */
1653 commit_one_edge_insertion (e)
1656 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1659 /* Pull the insns off the edge now since the edge might go away. */
1661 e->insns = NULL_RTX;
1663 /* Figure out where to put these things. If the destination has
1664 one predecessor, insert there. Except for the exit block. */
1665 if (e->dest->pred->pred_next == NULL
1666 && e->dest != EXIT_BLOCK_PTR)
1670 /* Get the location correct wrt a code label, and "nice" wrt
1671 a basic block note, and before everything else. */
1673 if (GET_CODE (tmp) == CODE_LABEL)
1674 tmp = NEXT_INSN (tmp);
1675 if (GET_CODE (tmp) == NOTE
1676 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1677 tmp = NEXT_INSN (tmp);
1678 if (tmp == bb->head)
1681 after = PREV_INSN (tmp);
1684 /* If the source has one successor and the edge is not abnormal,
1685 insert there. Except for the entry block. */
1686 else if ((e->flags & EDGE_ABNORMAL) == 0
1687 && e->src->succ->succ_next == NULL
1688 && e->src != ENTRY_BLOCK_PTR)
1691 /* It is possible to have a non-simple jump here. Consider a target
1692 where some forms of unconditional jumps clobber a register. This
1693 happens on the fr30 for example.
1695 We know this block has a single successor, so we can just emit
1696 the queued insns before the jump. */
1697 if (GET_CODE (bb->end) == JUMP_INSN)
1703 /* We'd better be fallthru, or we've lost track of what's what. */
1704 if ((e->flags & EDGE_FALLTHRU) == 0)
1711 /* Otherwise we must split the edge. */
1714 bb = split_edge (e);
1718 /* Now that we've found the spot, do the insertion. */
1720 /* Set the new block number for these insns, if structure is allocated. */
1721 if (basic_block_for_insn)
1724 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1725 set_block_for_insn (i, bb);
1730 emit_insns_before (insns, before);
1731 if (before == bb->head)
1734 last = prev_nonnote_insn (before);
1738 last = emit_insns_after (insns, after);
1739 if (after == bb->end)
1743 if (returnjump_p (last))
1745 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1746 This is not currently a problem because this only happens
1747 for the (single) epilogue, which already has a fallthru edge
1751 if (e->dest != EXIT_BLOCK_PTR
1752 || e->succ_next != NULL
1753 || (e->flags & EDGE_FALLTHRU) == 0)
1755 e->flags &= ~EDGE_FALLTHRU;
1757 emit_barrier_after (last);
1761 flow_delete_insn (before);
1763 else if (GET_CODE (last) == JUMP_INSN)
1767 /* Update the CFG for all queued instructions. */
1770 commit_edge_insertions ()
1775 #ifdef ENABLE_CHECKING
1776 verify_flow_info ();
1780 bb = ENTRY_BLOCK_PTR;
1785 for (e = bb->succ; e ; e = next)
1787 next = e->succ_next;
1789 commit_one_edge_insertion (e);
1792 if (++i >= n_basic_blocks)
1794 bb = BASIC_BLOCK (i);
1798 /* Delete all unreachable basic blocks. */
1801 delete_unreachable_blocks ()
1803 basic_block *worklist, *tos;
1804 int deleted_handler;
1809 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1811 /* Use basic_block->aux as a marker. Clear them all. */
1813 for (i = 0; i < n; ++i)
1814 BASIC_BLOCK (i)->aux = NULL;
1816 /* Add our starting points to the worklist. Almost always there will
1817 be only one. It isn't inconcievable that we might one day directly
1818 support Fortran alternate entry points. */
1820 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1824 /* Mark the block with a handy non-null value. */
1828 /* Iterate: find everything reachable from what we've already seen. */
1830 while (tos != worklist)
1832 basic_block b = *--tos;
1834 for (e = b->succ; e ; e = e->succ_next)
1842 /* Delete all unreachable basic blocks. Count down so that we don't
1843 interfere with the block renumbering that happens in flow_delete_block. */
1845 deleted_handler = 0;
1847 for (i = n - 1; i >= 0; --i)
1849 basic_block b = BASIC_BLOCK (i);
1852 /* This block was found. Tidy up the mark. */
1855 deleted_handler |= flow_delete_block (b);
1858 tidy_fallthru_edges ();
1860 /* If we deleted an exception handler, we may have EH region begin/end
1861 blocks to remove as well. */
1862 if (deleted_handler)
1863 delete_eh_regions ();
1868 /* Find EH regions for which there is no longer a handler, and delete them. */
1871 delete_eh_regions ()
1875 update_rethrow_references ();
1877 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1878 if (GET_CODE (insn) == NOTE)
1880 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1881 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1883 int num = NOTE_EH_HANDLER (insn);
1884 /* A NULL handler indicates a region is no longer needed,
1885 as long as its rethrow label isn't used. */
1886 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1888 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1889 NOTE_SOURCE_FILE (insn) = 0;
1895 /* Return true if NOTE is not one of the ones that must be kept paired,
1896 so that we may simply delete them. */
1899 can_delete_note_p (note)
1902 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1903 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1906 /* Unlink a chain of insns between START and FINISH, leaving notes
1907 that must be paired. */
1910 flow_delete_insn_chain (start, finish)
1913 /* Unchain the insns one by one. It would be quicker to delete all
1914 of these with a single unchaining, rather than one at a time, but
1915 we need to keep the NOTE's. */
1921 next = NEXT_INSN (start);
1922 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1924 else if (GET_CODE (start) == CODE_LABEL
1925 && ! can_delete_label_p (start))
1927 const char *name = LABEL_NAME (start);
1928 PUT_CODE (start, NOTE);
1929 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
1930 NOTE_SOURCE_FILE (start) = name;
1933 next = flow_delete_insn (start);
1935 if (start == finish)
1941 /* Delete the insns in a (non-live) block. We physically delete every
1942 non-deleted-note insn, and update the flow graph appropriately.
1944 Return nonzero if we deleted an exception handler. */
1946 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1947 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1950 flow_delete_block (b)
1953 int deleted_handler = 0;
1956 /* If the head of this block is a CODE_LABEL, then it might be the
1957 label for an exception handler which can't be reached.
1959 We need to remove the label from the exception_handler_label list
1960 and remove the associated NOTE_INSN_EH_REGION_BEG and
1961 NOTE_INSN_EH_REGION_END notes. */
1965 never_reached_warning (insn);
1967 if (GET_CODE (insn) == CODE_LABEL)
1969 rtx x, *prev = &exception_handler_labels;
1971 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1973 if (XEXP (x, 0) == insn)
1975 /* Found a match, splice this label out of the EH label list. */
1976 *prev = XEXP (x, 1);
1977 XEXP (x, 1) = NULL_RTX;
1978 XEXP (x, 0) = NULL_RTX;
1980 /* Remove the handler from all regions */
1981 remove_handler (insn);
1982 deleted_handler = 1;
1985 prev = &XEXP (x, 1);
1989 /* Include any jump table following the basic block. */
1991 if (GET_CODE (end) == JUMP_INSN
1992 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1993 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1994 && GET_CODE (tmp) == JUMP_INSN
1995 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1996 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1999 /* Include any barrier that may follow the basic block. */
2000 tmp = next_nonnote_insn (end);
2001 if (tmp && GET_CODE (tmp) == BARRIER)
2004 /* Selectively delete the entire chain. */
2005 flow_delete_insn_chain (insn, end);
2007 /* Remove the edges into and out of this block. Note that there may
2008 indeed be edges in, if we are removing an unreachable loop. */
2012 for (e = b->pred; e ; e = next)
2014 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2017 next = e->pred_next;
2021 for (e = b->succ; e ; e = next)
2023 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2026 next = e->succ_next;
2035 /* Remove the basic block from the array, and compact behind it. */
2038 return deleted_handler;
2041 /* Remove block B from the basic block array and compact behind it. */
2047 int i, n = n_basic_blocks;
2049 for (i = b->index; i + 1 < n; ++i)
2051 basic_block x = BASIC_BLOCK (i + 1);
2052 BASIC_BLOCK (i) = x;
2056 basic_block_info->num_elements--;
2060 /* Delete INSN by patching it out. Return the next insn. */
2063 flow_delete_insn (insn)
2066 rtx prev = PREV_INSN (insn);
2067 rtx next = NEXT_INSN (insn);
2070 PREV_INSN (insn) = NULL_RTX;
2071 NEXT_INSN (insn) = NULL_RTX;
2072 INSN_DELETED_P (insn) = 1;
2075 NEXT_INSN (prev) = next;
2077 PREV_INSN (next) = prev;
2079 set_last_insn (prev);
2081 if (GET_CODE (insn) == CODE_LABEL)
2082 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2084 /* If deleting a jump, decrement the use count of the label. Deleting
2085 the label itself should happen in the normal course of block merging. */
2086 if (GET_CODE (insn) == JUMP_INSN
2087 && JUMP_LABEL (insn)
2088 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2089 LABEL_NUSES (JUMP_LABEL (insn))--;
2091 /* Also if deleting an insn that references a label. */
2092 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2093 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2094 LABEL_NUSES (XEXP (note, 0))--;
2099 /* True if a given label can be deleted. */
2102 can_delete_label_p (label)
2107 if (LABEL_PRESERVE_P (label))
2110 for (x = forced_labels; x ; x = XEXP (x, 1))
2111 if (label == XEXP (x, 0))
2113 for (x = label_value_list; x ; x = XEXP (x, 1))
2114 if (label == XEXP (x, 0))
2116 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2117 if (label == XEXP (x, 0))
2120 /* User declared labels must be preserved. */
2121 if (LABEL_NAME (label) != 0)
2128 tail_recursion_label_p (label)
2133 for (x = tail_recursion_label_list; x ; x = XEXP (x, 1))
2134 if (label == XEXP (x, 0))
2140 /* Blocks A and B are to be merged into a single block A. The insns
2141 are already contiguous, hence `nomove'. */
2144 merge_blocks_nomove (a, b)
2148 rtx b_head, b_end, a_end;
2149 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2152 /* If there was a CODE_LABEL beginning B, delete it. */
2155 if (GET_CODE (b_head) == CODE_LABEL)
2157 /* Detect basic blocks with nothing but a label. This can happen
2158 in particular at the end of a function. */
2159 if (b_head == b_end)
2161 del_first = del_last = b_head;
2162 b_head = NEXT_INSN (b_head);
2165 /* Delete the basic block note. */
2166 if (GET_CODE (b_head) == NOTE
2167 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2169 if (b_head == b_end)
2174 b_head = NEXT_INSN (b_head);
2177 /* If there was a jump out of A, delete it. */
2179 if (GET_CODE (a_end) == JUMP_INSN)
2183 prev = prev_nonnote_insn (a_end);
2190 /* If this was a conditional jump, we need to also delete
2191 the insn that set cc0. */
2192 if (prev && sets_cc0_p (prev))
2195 prev = prev_nonnote_insn (prev);
2205 /* Delete everything marked above as well as crap that might be
2206 hanging out between the two blocks. */
2207 flow_delete_insn_chain (del_first, del_last);
2209 /* Normally there should only be one successor of A and that is B, but
2210 partway though the merge of blocks for conditional_execution we'll
2211 be merging a TEST block with THEN and ELSE successors. Free the
2212 whole lot of them and hope the caller knows what they're doing. */
2214 remove_edge (a->succ);
2216 /* Adjust the edges out of B for the new owner. */
2217 for (e = b->succ; e ; e = e->succ_next)
2221 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2222 b->pred = b->succ = NULL;
2224 /* Reassociate the insns of B with A. */
2227 if (basic_block_for_insn)
2229 BLOCK_FOR_INSN (b_head) = a;
2230 while (b_head != b_end)
2232 b_head = NEXT_INSN (b_head);
2233 BLOCK_FOR_INSN (b_head) = a;
2243 /* Blocks A and B are to be merged into a single block. A has no incoming
2244 fallthru edge, so it can be moved before B without adding or modifying
2245 any jumps (aside from the jump from A to B). */
2248 merge_blocks_move_predecessor_nojumps (a, b)
2251 rtx start, end, barrier;
2257 barrier = next_nonnote_insn (end);
2258 if (GET_CODE (barrier) != BARRIER)
2260 flow_delete_insn (barrier);
2262 /* Move block and loop notes out of the chain so that we do not
2263 disturb their order.
2265 ??? A better solution would be to squeeze out all the non-nested notes
2266 and adjust the block trees appropriately. Even better would be to have
2267 a tighter connection between block trees and rtl so that this is not
2269 start = squeeze_notes (start, end);
2271 /* Scramble the insn chain. */
2272 if (end != PREV_INSN (b->head))
2273 reorder_insns (start, end, PREV_INSN (b->head));
2277 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2278 a->index, b->index);
2281 /* Swap the records for the two blocks around. Although we are deleting B,
2282 A is now where B was and we want to compact the BB array from where
2284 BASIC_BLOCK(a->index) = b;
2285 BASIC_BLOCK(b->index) = a;
2287 a->index = b->index;
2290 /* Now blocks A and B are contiguous. Merge them. */
2291 merge_blocks_nomove (a, b);
2296 /* Blocks A and B are to be merged into a single block. B has no outgoing
2297 fallthru edge, so it can be moved after A without adding or modifying
2298 any jumps (aside from the jump from A to B). */
2301 merge_blocks_move_successor_nojumps (a, b)
2304 rtx start, end, barrier;
2308 barrier = NEXT_INSN (end);
2310 /* Recognize a jump table following block B. */
2311 if (GET_CODE (barrier) == CODE_LABEL
2312 && NEXT_INSN (barrier)
2313 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2314 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2315 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2317 end = NEXT_INSN (barrier);
2318 barrier = NEXT_INSN (end);
2321 /* There had better have been a barrier there. Delete it. */
2322 if (GET_CODE (barrier) != BARRIER)
2324 flow_delete_insn (barrier);
2326 /* Move block and loop notes out of the chain so that we do not
2327 disturb their order.
2329 ??? A better solution would be to squeeze out all the non-nested notes
2330 and adjust the block trees appropriately. Even better would be to have
2331 a tighter connection between block trees and rtl so that this is not
2333 start = squeeze_notes (start, end);
2335 /* Scramble the insn chain. */
2336 reorder_insns (start, end, a->end);
2338 /* Now blocks A and B are contiguous. Merge them. */
2339 merge_blocks_nomove (a, b);
2343 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2344 b->index, a->index);
2350 /* Attempt to merge basic blocks that are potentially non-adjacent.
2351 Return true iff the attempt succeeded. */
2354 merge_blocks (e, b, c)
2358 /* If C has a tail recursion label, do not merge. There is no
2359 edge recorded from the call_placeholder back to this label, as
2360 that would make optimize_sibling_and_tail_recursive_calls more
2361 complex for no gain. */
2362 if (GET_CODE (c->head) == CODE_LABEL
2363 && tail_recursion_label_p (c->head))
2366 /* If B has a fallthru edge to C, no need to move anything. */
2367 if (e->flags & EDGE_FALLTHRU)
2369 merge_blocks_nomove (b, c);
2373 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2374 b->index, c->index);
2383 int c_has_outgoing_fallthru;
2384 int b_has_incoming_fallthru;
2386 /* We must make sure to not munge nesting of exception regions,
2387 lexical blocks, and loop notes.
2389 The first is taken care of by requiring that the active eh
2390 region at the end of one block always matches the active eh
2391 region at the beginning of the next block.
2393 The later two are taken care of by squeezing out all the notes. */
2395 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2396 executed and we may want to treat blocks which have two out
2397 edges, one normal, one abnormal as only having one edge for
2398 block merging purposes. */
2400 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2401 if (tmp_edge->flags & EDGE_FALLTHRU)
2403 c_has_outgoing_fallthru = (tmp_edge != NULL);
2405 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2406 if (tmp_edge->flags & EDGE_FALLTHRU)
2408 b_has_incoming_fallthru = (tmp_edge != NULL);
2410 /* If B does not have an incoming fallthru, and the exception regions
2411 match, then it can be moved immediately before C without introducing
2414 C can not be the first block, so we do not have to worry about
2415 accessing a non-existent block. */
2416 d = BASIC_BLOCK (c->index - 1);
2417 if (! b_has_incoming_fallthru
2418 && d->eh_end == b->eh_beg
2419 && b->eh_end == c->eh_beg)
2420 return merge_blocks_move_predecessor_nojumps (b, c);
2422 /* Otherwise, we're going to try to move C after B. Make sure the
2423 exception regions match.
2425 If B is the last basic block, then we must not try to access the
2426 block structure for block B + 1. Luckily in that case we do not
2427 need to worry about matching exception regions. */
2428 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2429 if (b->eh_end == c->eh_beg
2430 && (d == NULL || c->eh_end == d->eh_beg))
2432 /* If C does not have an outgoing fallthru, then it can be moved
2433 immediately after B without introducing or modifying jumps. */
2434 if (! c_has_outgoing_fallthru)
2435 return merge_blocks_move_successor_nojumps (b, c);
2437 /* Otherwise, we'll need to insert an extra jump, and possibly
2438 a new block to contain it. */
2439 /* ??? Not implemented yet. */
2446 /* Top level driver for merge_blocks. */
2453 /* Attempt to merge blocks as made possible by edge removal. If a block
2454 has only one successor, and the successor has only one predecessor,
2455 they may be combined. */
2457 for (i = 0; i < n_basic_blocks; )
2459 basic_block c, b = BASIC_BLOCK (i);
2462 /* A loop because chains of blocks might be combineable. */
2463 while ((s = b->succ) != NULL
2464 && s->succ_next == NULL
2465 && (s->flags & EDGE_EH) == 0
2466 && (c = s->dest) != EXIT_BLOCK_PTR
2467 && c->pred->pred_next == NULL
2468 /* If the jump insn has side effects, we can't kill the edge. */
2469 && (GET_CODE (b->end) != JUMP_INSN
2470 || onlyjump_p (b->end))
2471 && merge_blocks (s, b, c))
2474 /* Don't get confused by the index shift caused by deleting blocks. */
2479 /* The given edge should potentially be a fallthru edge. If that is in
2480 fact true, delete the jump and barriers that are in the way. */
2483 tidy_fallthru_edge (e, b, c)
2489 /* ??? In a late-running flow pass, other folks may have deleted basic
2490 blocks by nopping out blocks, leaving multiple BARRIERs between here
2491 and the target label. They ought to be chastized and fixed.
2493 We can also wind up with a sequence of undeletable labels between
2494 one block and the next.
2496 So search through a sequence of barriers, labels, and notes for
2497 the head of block C and assert that we really do fall through. */
2499 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2502 /* Remove what will soon cease being the jump insn from the source block.
2503 If block B consisted only of this single jump, turn it into a deleted
2506 if (GET_CODE (q) == JUMP_INSN
2508 && (any_uncondjump_p (q)
2509 || (b->succ == e && e->succ_next == NULL)))
2512 /* If this was a conditional jump, we need to also delete
2513 the insn that set cc0. */
2514 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2521 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2522 NOTE_SOURCE_FILE (q) = 0;
2525 b->end = q = PREV_INSN (q);
2528 /* Selectively unlink the sequence. */
2529 if (q != PREV_INSN (c->head))
2530 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2532 e->flags |= EDGE_FALLTHRU;
2535 /* Fix up edges that now fall through, or rather should now fall through
2536 but previously required a jump around now deleted blocks. Simplify
2537 the search by only examining blocks numerically adjacent, since this
2538 is how find_basic_blocks created them. */
2541 tidy_fallthru_edges ()
2545 for (i = 1; i < n_basic_blocks; ++i)
2547 basic_block b = BASIC_BLOCK (i - 1);
2548 basic_block c = BASIC_BLOCK (i);
2551 /* We care about simple conditional or unconditional jumps with
2554 If we had a conditional branch to the next instruction when
2555 find_basic_blocks was called, then there will only be one
2556 out edge for the block which ended with the conditional
2557 branch (since we do not create duplicate edges).
2559 Furthermore, the edge will be marked as a fallthru because we
2560 merge the flags for the duplicate edges. So we do not want to
2561 check that the edge is not a FALLTHRU edge. */
2562 if ((s = b->succ) != NULL
2563 && s->succ_next == NULL
2565 /* If the jump insn has side effects, we can't tidy the edge. */
2566 && (GET_CODE (b->end) != JUMP_INSN
2567 || onlyjump_p (b->end)))
2568 tidy_fallthru_edge (s, b, c);
2572 /* Perform data flow analysis.
2573 F is the first insn of the function; FLAGS is a set of PROP_* flags
2574 to be used in accumulating flow info. */
2577 life_analysis (f, file, flags)
2582 #ifdef ELIMINABLE_REGS
2584 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2587 /* Record which registers will be eliminated. We use this in
2590 CLEAR_HARD_REG_SET (elim_reg_set);
2592 #ifdef ELIMINABLE_REGS
2593 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2594 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2596 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2600 flags &= PROP_DEATH_NOTES | PROP_REG_INFO;
2602 /* The post-reload life analysis have (on a global basis) the same
2603 registers live as was computed by reload itself. elimination
2604 Otherwise offsets and such may be incorrect.
2606 Reload will make some registers as live even though they do not
2607 appear in the rtl. */
2608 if (reload_completed)
2609 flags &= ~PROP_REG_INFO;
2611 /* We want alias analysis information for local dead store elimination. */
2612 if (flags & PROP_SCAN_DEAD_CODE)
2613 init_alias_analysis ();
2615 /* Always remove no-op moves. Do this before other processing so
2616 that we don't have to keep re-scanning them. */
2617 delete_noop_moves (f);
2619 /* Some targets can emit simpler epilogues if they know that sp was
2620 not ever modified during the function. After reload, of course,
2621 we've already emitted the epilogue so there's no sense searching. */
2622 if (! reload_completed)
2623 notice_stack_pointer_modification (f);
2625 /* Allocate and zero out data structures that will record the
2626 data from lifetime analysis. */
2627 allocate_reg_life_data ();
2628 allocate_bb_life_data ();
2630 /* Find the set of registers live on function exit. */
2631 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2633 /* "Update" life info from zero. It'd be nice to begin the
2634 relaxation with just the exit and noreturn blocks, but that set
2635 is not immediately handy. */
2637 if (flags & PROP_REG_INFO)
2638 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2639 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2642 if (flags & PROP_SCAN_DEAD_CODE)
2643 end_alias_analysis ();
2646 dump_flow_info (file);
2648 free_basic_block_vars (1);
2651 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2652 Search for REGNO. If found, abort if it is not wider than word_mode. */
2655 verify_wide_reg_1 (px, pregno)
2660 unsigned int regno = *(int *) pregno;
2662 if (GET_CODE (x) == REG && REGNO (x) == regno)
2664 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2671 /* A subroutine of verify_local_live_at_start. Search through insns
2672 between HEAD and END looking for register REGNO. */
2675 verify_wide_reg (regno, head, end)
2681 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2682 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2686 head = NEXT_INSN (head);
2689 /* We didn't find the register at all. Something's way screwy. */
2693 /* A subroutine of update_life_info. Verify that there are no untoward
2694 changes in live_at_start during a local update. */
2697 verify_local_live_at_start (new_live_at_start, bb)
2698 regset new_live_at_start;
2701 if (reload_completed)
2703 /* After reload, there are no pseudos, nor subregs of multi-word
2704 registers. The regsets should exactly match. */
2705 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2712 /* Find the set of changed registers. */
2713 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2715 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2717 /* No registers should die. */
2718 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2720 /* Verify that the now-live register is wider than word_mode. */
2721 verify_wide_reg (i, bb->head, bb->end);
2726 /* Updates life information starting with the basic blocks set in BLOCKS.
2727 If BLOCKS is null, consider it to be the universal set.
2729 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2730 we are only expecting local modifications to basic blocks. If we find
2731 extra registers live at the beginning of a block, then we either killed
2732 useful data, or we have a broken split that wants data not provided.
2733 If we find registers removed from live_at_start, that means we have
2734 a broken peephole that is killing a register it shouldn't.
2736 ??? This is not true in one situation -- when a pre-reload splitter
2737 generates subregs of a multi-word pseudo, current life analysis will
2738 lose the kill. So we _can_ have a pseudo go live. How irritating.
2740 Including PROP_REG_INFO does not properly refresh regs_ever_live
2741 unless the caller resets it to zero. */
2744 update_life_info (blocks, extent, prop_flags)
2746 enum update_life_extent extent;
2750 regset_head tmp_head;
2753 tmp = INITIALIZE_REG_SET (tmp_head);
2755 /* For a global update, we go through the relaxation process again. */
2756 if (extent != UPDATE_LIFE_LOCAL)
2758 calculate_global_regs_live (blocks, blocks,
2759 prop_flags & PROP_SCAN_DEAD_CODE);
2761 /* If asked, remove notes from the blocks we'll update. */
2762 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2763 count_or_remove_death_notes (blocks, 1);
2768 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2770 basic_block bb = BASIC_BLOCK (i);
2772 COPY_REG_SET (tmp, bb->global_live_at_end);
2773 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2775 if (extent == UPDATE_LIFE_LOCAL)
2776 verify_local_live_at_start (tmp, bb);
2781 for (i = n_basic_blocks - 1; i >= 0; --i)
2783 basic_block bb = BASIC_BLOCK (i);
2785 COPY_REG_SET (tmp, bb->global_live_at_end);
2786 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2788 if (extent == UPDATE_LIFE_LOCAL)
2789 verify_local_live_at_start (tmp, bb);
2795 if (prop_flags & PROP_REG_INFO)
2797 /* The only pseudos that are live at the beginning of the function
2798 are those that were not set anywhere in the function. local-alloc
2799 doesn't know how to handle these correctly, so mark them as not
2800 local to any one basic block. */
2801 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2802 FIRST_PSEUDO_REGISTER, i,
2803 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2805 /* We have a problem with any pseudoreg that lives across the setjmp.
2806 ANSI says that if a user variable does not change in value between
2807 the setjmp and the longjmp, then the longjmp preserves it. This
2808 includes longjmp from a place where the pseudo appears dead.
2809 (In principle, the value still exists if it is in scope.)
2810 If the pseudo goes in a hard reg, some other value may occupy
2811 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2812 Conclusion: such a pseudo must not go in a hard reg. */
2813 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2814 FIRST_PSEUDO_REGISTER, i,
2816 if (regno_reg_rtx[i] != 0)
2818 REG_LIVE_LENGTH (i) = -1;
2819 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2825 /* Free the variables allocated by find_basic_blocks.
2827 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2830 free_basic_block_vars (keep_head_end_p)
2831 int keep_head_end_p;
2833 if (basic_block_for_insn)
2835 VARRAY_FREE (basic_block_for_insn);
2836 basic_block_for_insn = NULL;
2839 if (! keep_head_end_p)
2842 VARRAY_FREE (basic_block_info);
2845 ENTRY_BLOCK_PTR->aux = NULL;
2846 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2847 EXIT_BLOCK_PTR->aux = NULL;
2848 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2852 /* Return nonzero if the destination of SET equals the source. */
2857 rtx src = SET_SRC (set);
2858 rtx dst = SET_DEST (set);
2860 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2862 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2864 src = SUBREG_REG (src);
2865 dst = SUBREG_REG (dst);
2868 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2869 && REGNO (src) == REGNO (dst));
2872 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2878 rtx pat = PATTERN (insn);
2880 /* Insns carrying these notes are useful later on. */
2881 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2884 if (GET_CODE (pat) == SET && set_noop_p (pat))
2887 if (GET_CODE (pat) == PARALLEL)
2890 /* If nothing but SETs of registers to themselves,
2891 this insn can also be deleted. */
2892 for (i = 0; i < XVECLEN (pat, 0); i++)
2894 rtx tem = XVECEXP (pat, 0, i);
2896 if (GET_CODE (tem) == USE
2897 || GET_CODE (tem) == CLOBBER)
2900 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2909 /* Delete any insns that copy a register to itself. */
2912 delete_noop_moves (f)
2916 for (insn = f; insn; insn = NEXT_INSN (insn))
2918 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2920 PUT_CODE (insn, NOTE);
2921 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2922 NOTE_SOURCE_FILE (insn) = 0;
2927 /* Determine if the stack pointer is constant over the life of the function.
2928 Only useful before prologues have been emitted. */
2931 notice_stack_pointer_modification_1 (x, pat, data)
2933 rtx pat ATTRIBUTE_UNUSED;
2934 void *data ATTRIBUTE_UNUSED;
2936 if (x == stack_pointer_rtx
2937 /* The stack pointer is only modified indirectly as the result
2938 of a push until later in flow. See the comments in rtl.texi
2939 regarding Embedded Side-Effects on Addresses. */
2940 || (GET_CODE (x) == MEM
2941 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2942 || GET_CODE (XEXP (x, 0)) == PRE_INC
2943 || GET_CODE (XEXP (x, 0)) == POST_DEC
2944 || GET_CODE (XEXP (x, 0)) == POST_INC)
2945 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2946 current_function_sp_is_unchanging = 0;
2950 notice_stack_pointer_modification (f)
2955 /* Assume that the stack pointer is unchanging if alloca hasn't
2957 current_function_sp_is_unchanging = !current_function_calls_alloca;
2958 if (! current_function_sp_is_unchanging)
2961 for (insn = f; insn; insn = NEXT_INSN (insn))
2963 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2965 /* Check if insn modifies the stack pointer. */
2966 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2968 if (! current_function_sp_is_unchanging)
2974 /* Mark a register in SET. Hard registers in large modes get all
2975 of their component registers set as well. */
2977 mark_reg (reg, xset)
2981 regset set = (regset) xset;
2982 int regno = REGNO (reg);
2984 if (GET_MODE (reg) == BLKmode)
2987 SET_REGNO_REG_SET (set, regno);
2988 if (regno < FIRST_PSEUDO_REGISTER)
2990 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2992 SET_REGNO_REG_SET (set, regno + n);
2996 /* Mark those regs which are needed at the end of the function as live
2997 at the end of the last basic block. */
2999 mark_regs_live_at_end (set)
3004 /* If exiting needs the right stack value, consider the stack pointer
3005 live at the end of the function. */
3006 if ((HAVE_epilogue && reload_completed)
3007 || ! EXIT_IGNORE_STACK
3008 || (! FRAME_POINTER_REQUIRED
3009 && ! current_function_calls_alloca
3010 && flag_omit_frame_pointer)
3011 || current_function_sp_is_unchanging)
3013 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3016 /* Mark the frame pointer if needed at the end of the function. If
3017 we end up eliminating it, it will be removed from the live list
3018 of each basic block by reload. */
3020 if (! reload_completed || frame_pointer_needed)
3022 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3023 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3024 /* If they are different, also mark the hard frame pointer as live */
3025 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3029 #ifdef PIC_OFFSET_TABLE_REGNUM
3030 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3031 /* Many architectures have a GP register even without flag_pic.
3032 Assume the pic register is not in use, or will be handled by
3033 other means, if it is not fixed. */
3034 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3035 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3039 /* Mark all global registers, and all registers used by the epilogue
3040 as being live at the end of the function since they may be
3041 referenced by our caller. */
3042 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3044 #ifdef EPILOGUE_USES
3045 || EPILOGUE_USES (i)
3048 SET_REGNO_REG_SET (set, i);
3050 /* Mark all call-saved registers that we actaully used. */
3051 if (HAVE_epilogue && reload_completed)
3053 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3054 if (! call_used_regs[i] && regs_ever_live[i])
3055 SET_REGNO_REG_SET (set, i);
3058 /* Mark function return value. */
3059 diddle_return_value (mark_reg, set);
3062 /* Callback function for for_each_successor_phi. DATA is a regset.
3063 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3064 INSN, in the regset. */
3067 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3068 rtx insn ATTRIBUTE_UNUSED;
3069 int dest_regno ATTRIBUTE_UNUSED;
3073 regset live = (regset) data;
3074 SET_REGNO_REG_SET (live, src_regno);
3078 /* Propagate global life info around the graph of basic blocks. Begin
3079 considering blocks with their corresponding bit set in BLOCKS_IN.
3080 If BLOCKS_IN is null, consider it the universal set.
3082 BLOCKS_OUT is set for every block that was changed. */
3085 calculate_global_regs_live (blocks_in, blocks_out, flags)
3086 sbitmap blocks_in, blocks_out;
3089 basic_block *queue, *qhead, *qtail, *qend;
3090 regset tmp, new_live_at_end;
3091 regset_head tmp_head;
3092 regset_head new_live_at_end_head;
3095 tmp = INITIALIZE_REG_SET (tmp_head);
3096 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3098 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3099 because the `head == tail' style test for an empty queue doesn't
3100 work with a full queue. */
3101 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3103 qhead = qend = queue + n_basic_blocks + 2;
3105 /* Clear out the garbage that might be hanging out in bb->aux. */
3106 for (i = n_basic_blocks - 1; i >= 0; --i)
3107 BASIC_BLOCK (i)->aux = NULL;
3109 /* Queue the blocks set in the initial mask. Do this in reverse block
3110 number order so that we are more likely for the first round to do
3111 useful work. We use AUX non-null to flag that the block is queued. */
3114 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3116 basic_block bb = BASIC_BLOCK (i);
3123 for (i = 0; i < n_basic_blocks; ++i)
3125 basic_block bb = BASIC_BLOCK (i);
3132 sbitmap_zero (blocks_out);
3134 while (qhead != qtail)
3136 int rescan, changed;
3145 /* Begin by propogating live_at_start from the successor blocks. */
3146 CLEAR_REG_SET (new_live_at_end);
3147 for (e = bb->succ; e ; e = e->succ_next)
3149 basic_block sb = e->dest;
3150 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3153 /* Force the stack pointer to be live -- which might not already be
3154 the case for blocks within infinite loops. */
3155 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3157 /* Regs used in phi nodes are not included in
3158 global_live_at_start, since they are live only along a
3159 particular edge. Set those regs that are live because of a
3160 phi node alternative corresponding to this particular block. */
3162 for_each_successor_phi (bb, &set_phi_alternative_reg,
3165 if (bb == ENTRY_BLOCK_PTR)
3167 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3171 /* On our first pass through this block, we'll go ahead and continue.
3172 Recognize first pass by local_set NULL. On subsequent passes, we
3173 get to skip out early if live_at_end wouldn't have changed. */
3175 if (bb->local_set == NULL)
3177 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3182 /* If any bits were removed from live_at_end, we'll have to
3183 rescan the block. This wouldn't be necessary if we had
3184 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3185 local_live is really dependant on live_at_end. */
3186 CLEAR_REG_SET (tmp);
3187 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3188 new_live_at_end, BITMAP_AND_COMPL);
3192 /* Find the set of changed bits. Take this opportunity
3193 to notice that this set is empty and early out. */
3194 CLEAR_REG_SET (tmp);
3195 changed = bitmap_operation (tmp, bb->global_live_at_end,
3196 new_live_at_end, BITMAP_XOR);
3200 /* If any of the changed bits overlap with local_set,
3201 we'll have to rescan the block. Detect overlap by
3202 the AND with ~local_set turning off bits. */
3203 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3208 /* Let our caller know that BB changed enough to require its
3209 death notes updated. */
3211 SET_BIT (blocks_out, bb->index);
3215 /* Add to live_at_start the set of all registers in
3216 new_live_at_end that aren't in the old live_at_end. */
3218 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3220 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3222 changed = bitmap_operation (bb->global_live_at_start,
3223 bb->global_live_at_start,
3230 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3232 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3233 into live_at_start. */
3234 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3236 /* If live_at start didn't change, no need to go farther. */
3237 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3240 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3243 /* Queue all predecessors of BB so that we may re-examine
3244 their live_at_end. */
3245 for (e = bb->pred; e ; e = e->pred_next)
3247 basic_block pb = e->src;
3248 if (pb->aux == NULL)
3259 FREE_REG_SET (new_live_at_end);
3263 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3265 basic_block bb = BASIC_BLOCK (i);
3266 FREE_REG_SET (bb->local_set);
3271 for (i = n_basic_blocks - 1; i >= 0; --i)
3273 basic_block bb = BASIC_BLOCK (i);
3274 FREE_REG_SET (bb->local_set);
3281 /* Subroutines of life analysis. */
3283 /* Allocate the permanent data structures that represent the results
3284 of life analysis. Not static since used also for stupid life analysis. */
3287 allocate_bb_life_data ()
3291 for (i = 0; i < n_basic_blocks; i++)
3293 basic_block bb = BASIC_BLOCK (i);
3295 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3296 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3299 ENTRY_BLOCK_PTR->global_live_at_end
3300 = OBSTACK_ALLOC_REG_SET (function_obstack);
3301 EXIT_BLOCK_PTR->global_live_at_start
3302 = OBSTACK_ALLOC_REG_SET (function_obstack);
3304 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3308 allocate_reg_life_data ()
3312 max_regno = max_reg_num ();
3314 /* Recalculate the register space, in case it has grown. Old style
3315 vector oriented regsets would set regset_{size,bytes} here also. */
3316 allocate_reg_info (max_regno, FALSE, FALSE);
3318 /* Reset all the data we'll collect in propagate_block and its
3320 for (i = 0; i < max_regno; i++)
3324 REG_N_DEATHS (i) = 0;
3325 REG_N_CALLS_CROSSED (i) = 0;
3326 REG_LIVE_LENGTH (i) = 0;
3327 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3331 /* Delete dead instructions for propagate_block. */
3334 propagate_block_delete_insn (bb, insn)
3338 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3340 /* If the insn referred to a label, and that label was attached to
3341 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3342 pretty much mandatory to delete it, because the ADDR_VEC may be
3343 referencing labels that no longer exist. */
3347 rtx label = XEXP (inote, 0);
3350 if (LABEL_NUSES (label) == 1
3351 && (next = next_nonnote_insn (label)) != NULL
3352 && GET_CODE (next) == JUMP_INSN
3353 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3354 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3356 rtx pat = PATTERN (next);
3357 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3358 int len = XVECLEN (pat, diff_vec_p);
3361 for (i = 0; i < len; i++)
3362 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3364 flow_delete_insn (next);
3368 if (bb->end == insn)
3369 bb->end = PREV_INSN (insn);
3370 flow_delete_insn (insn);
3373 /* Delete dead libcalls for propagate_block. Return the insn
3374 before the libcall. */
3377 propagate_block_delete_libcall (bb, insn, note)
3381 rtx first = XEXP (note, 0);
3382 rtx before = PREV_INSN (first);
3384 if (insn == bb->end)
3387 flow_delete_insn_chain (first, insn);
3391 /* Update the life-status of regs for one insn. Return the previous insn. */
3394 propagate_one_insn (pbi, insn)
3395 struct propagate_block_info *pbi;
3398 rtx prev = PREV_INSN (insn);
3399 int flags = pbi->flags;
3400 int insn_is_dead = 0;
3401 int libcall_is_dead = 0;
3405 if (! INSN_P (insn))
3408 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3409 if (flags & PROP_SCAN_DEAD_CODE)
3411 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3413 libcall_is_dead = (insn_is_dead && note != 0
3414 && libcall_dead_p (pbi, PATTERN (insn),
3418 /* We almost certainly don't want to delete prologue or epilogue
3419 instructions. Warn about probable compiler losage. */
3422 && (((HAVE_epilogue || HAVE_prologue)
3423 && prologue_epilogue_contains (insn))
3424 || (HAVE_sibcall_epilogue
3425 && sibcall_epilogue_contains (insn))))
3427 if (flags & PROP_KILL_DEAD_CODE)
3429 warning ("ICE: would have deleted prologue/epilogue insn");
3430 if (!inhibit_warnings)
3433 libcall_is_dead = insn_is_dead = 0;
3436 /* If an instruction consists of just dead store(s) on final pass,
3438 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3440 /* Record sets. Do this even for dead instructions, since they
3441 would have killed the values if they hadn't been deleted. */
3442 mark_set_regs (pbi, PATTERN (insn), insn);
3444 /* CC0 is now known to be dead. Either this insn used it,
3445 in which case it doesn't anymore, or clobbered it,
3446 so the next insn can't use it. */
3449 if (libcall_is_dead)
3451 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3452 insn = NEXT_INSN (prev);
3455 propagate_block_delete_insn (pbi->bb, insn);
3460 /* See if this is an increment or decrement that can be merged into
3461 a following memory address. */
3464 register rtx x = single_set (insn);
3466 /* Does this instruction increment or decrement a register? */
3467 if (!reload_completed
3468 && (flags & PROP_AUTOINC)
3470 && GET_CODE (SET_DEST (x)) == REG
3471 && (GET_CODE (SET_SRC (x)) == PLUS
3472 || GET_CODE (SET_SRC (x)) == MINUS)
3473 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3474 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3475 /* Ok, look for a following memory ref we can combine with.
3476 If one is found, change the memory ref to a PRE_INC
3477 or PRE_DEC, cancel this insn, and return 1.
3478 Return 0 if nothing has been done. */
3479 && try_pre_increment_1 (pbi, insn))
3482 #endif /* AUTO_INC_DEC */
3484 CLEAR_REG_SET (pbi->new_set);
3486 /* If this is not the final pass, and this insn is copying the value of
3487 a library call and it's dead, don't scan the insns that perform the
3488 library call, so that the call's arguments are not marked live. */
3489 if (libcall_is_dead)
3491 /* Record the death of the dest reg. */
3492 mark_set_regs (pbi, PATTERN (insn), insn);
3494 insn = XEXP (note, 0);
3495 return PREV_INSN (insn);
3497 else if (GET_CODE (PATTERN (insn)) == SET
3498 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3499 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3500 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3501 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3502 /* We have an insn to pop a constant amount off the stack.
3503 (Such insns use PLUS regardless of the direction of the stack,
3504 and any insn to adjust the stack by a constant is always a pop.)
3505 These insns, if not dead stores, have no effect on life. */
3509 /* Any regs live at the time of a call instruction must not go
3510 in a register clobbered by calls. Find all regs now live and
3511 record this for them. */
3513 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3514 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3515 { REG_N_CALLS_CROSSED (i)++; });
3517 /* Record sets. Do this even for dead instructions, since they
3518 would have killed the values if they hadn't been deleted. */
3519 mark_set_regs (pbi, PATTERN (insn), insn);
3521 if (GET_CODE (insn) == CALL_INSN)
3527 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3528 cond = COND_EXEC_TEST (PATTERN (insn));
3530 /* Non-constant calls clobber memory. */
3531 if (! CONST_CALL_P (insn))
3532 free_EXPR_LIST_list (&pbi->mem_set_list);
3534 /* There may be extra registers to be clobbered. */
3535 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3537 note = XEXP (note, 1))
3538 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3539 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3540 cond, insn, pbi->flags);
3542 /* Calls change all call-used and global registers. */
3543 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3544 if (call_used_regs[i] && ! global_regs[i]
3547 /* We do not want REG_UNUSED notes for these registers. */
3548 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3550 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3554 /* If an insn doesn't use CC0, it becomes dead since we assume
3555 that every insn clobbers it. So show it dead here;
3556 mark_used_regs will set it live if it is referenced. */
3561 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3563 /* Sometimes we may have inserted something before INSN (such as a move)
3564 when we make an auto-inc. So ensure we will scan those insns. */
3566 prev = PREV_INSN (insn);
3569 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3575 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3576 cond = COND_EXEC_TEST (PATTERN (insn));
3578 /* Calls use their arguments. */
3579 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3581 note = XEXP (note, 1))
3582 if (GET_CODE (XEXP (note, 0)) == USE)
3583 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3586 /* The stack ptr is used (honorarily) by a CALL insn. */
3587 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3589 /* Calls may also reference any of the global registers,
3590 so they are made live. */
3591 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3593 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3598 /* On final pass, update counts of how many insns in which each reg
3600 if (flags & PROP_REG_INFO)
3601 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3602 { REG_LIVE_LENGTH (i)++; });
3607 /* Initialize a propagate_block_info struct for public consumption.
3608 Note that the structure itself is opaque to this file, but that
3609 the user can use the regsets provided here. */
3611 struct propagate_block_info *
3612 init_propagate_block_info (bb, live, local_set, flags)
3618 struct propagate_block_info *pbi = xmalloc (sizeof(*pbi));
3621 pbi->reg_live = live;
3622 pbi->mem_set_list = NULL_RTX;
3623 pbi->local_set = local_set;
3627 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3628 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3630 pbi->reg_next_use = NULL;
3632 pbi->new_set = BITMAP_XMALLOC ();
3634 #ifdef HAVE_conditional_execution
3635 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
3636 free_reg_cond_life_info);
3637 pbi->reg_cond_reg = BITMAP_XMALLOC ();
3639 /* If this block ends in a conditional branch, for each register live
3640 from one side of the branch and not the other, record the register
3641 as conditionally dead. */
3642 if (GET_CODE (bb->end) == JUMP_INSN
3643 && any_condjump_p (bb->end))
3645 regset_head diff_head;
3646 regset diff = INITIALIZE_REG_SET (diff_head);
3647 basic_block bb_true, bb_false;
3648 rtx cond_true, cond_false;
3651 /* Identify the successor blocks. */
3652 bb_true = bb->succ->dest;
3653 if (bb->succ->succ_next != NULL)
3655 bb_false = bb->succ->succ_next->dest;
3657 if (bb->succ->flags & EDGE_FALLTHRU)
3659 basic_block t = bb_false;
3663 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
3668 /* This can happen with a conditional jump to the next insn. */
3669 if (JUMP_LABEL (bb->end) != bb_true->head)
3672 /* Simplest way to do nothing. */
3676 /* Extract the condition from the branch. */
3677 cond_true = XEXP (SET_SRC (PATTERN (bb->end)), 0);
3678 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
3679 GET_MODE (cond_true), XEXP (cond_true, 0),
3680 XEXP (cond_true, 1));
3681 if (GET_CODE (XEXP (SET_SRC (PATTERN (bb->end)), 1)) == PC)
3684 cond_false = cond_true;
3688 /* Compute which register lead different lives in the successors. */
3689 if (bitmap_operation (diff, bb_true->global_live_at_start,
3690 bb_false->global_live_at_start, BITMAP_XOR))
3692 if (GET_CODE (XEXP (cond_true, 0)) != REG)
3694 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond_true, 0)));
3696 /* For each such register, mark it conditionally dead. */
3697 EXECUTE_IF_SET_IN_REG_SET
3700 struct reg_cond_life_info *rcli;
3703 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3705 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
3709 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
3711 splay_tree_insert (pbi->reg_cond_dead, i,
3712 (splay_tree_value) rcli);
3716 FREE_REG_SET (diff);
3723 /* Release a propagate_block_info struct. */
3726 free_propagate_block_info (pbi)
3727 struct propagate_block_info *pbi;
3729 free_EXPR_LIST_list (&pbi->mem_set_list);
3731 BITMAP_XFREE (pbi->new_set);
3733 #ifdef HAVE_conditional_execution
3734 splay_tree_delete (pbi->reg_cond_dead);
3735 BITMAP_XFREE (pbi->reg_cond_reg);
3738 if (pbi->reg_next_use)
3739 free (pbi->reg_next_use);
3744 /* Compute the registers live at the beginning of a basic block BB from
3745 those live at the end.
3747 When called, REG_LIVE contains those live at the end. On return, it
3748 contains those live at the beginning.
3750 LOCAL_SET, if non-null, will be set with all registers killed by
3751 this basic block. */
3754 propagate_block (bb, live, local_set, flags)
3760 struct propagate_block_info *pbi;
3763 pbi = init_propagate_block_info (bb, live, local_set, flags);
3765 if (flags & PROP_REG_INFO)
3769 /* Process the regs live at the end of the block.
3770 Mark them as not local to any one basic block. */
3771 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3772 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3775 /* Scan the block an insn at a time from end to beginning. */
3777 for (insn = bb->end; ; insn = prev)
3779 /* If this is a call to `setjmp' et al, warn if any
3780 non-volatile datum is live. */
3781 if ((flags & PROP_REG_INFO)
3782 && GET_CODE (insn) == NOTE
3783 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3784 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
3786 prev = propagate_one_insn (pbi, insn);
3788 if (insn == bb->head)
3792 free_propagate_block_info (pbi);
3795 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3796 (SET expressions whose destinations are registers dead after the insn).
3797 NEEDED is the regset that says which regs are alive after the insn.
3799 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3801 If X is the entire body of an insn, NOTES contains the reg notes
3802 pertaining to the insn. */
3805 insn_dead_p (pbi, x, call_ok, notes)
3806 struct propagate_block_info *pbi;
3809 rtx notes ATTRIBUTE_UNUSED;
3811 enum rtx_code code = GET_CODE (x);
3814 /* If flow is invoked after reload, we must take existing AUTO_INC
3815 expresions into account. */
3816 if (reload_completed)
3818 for ( ; notes; notes = XEXP (notes, 1))
3820 if (REG_NOTE_KIND (notes) == REG_INC)
3822 int regno = REGNO (XEXP (notes, 0));
3824 /* Don't delete insns to set global regs. */
3825 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3826 || REGNO_REG_SET_P (pbi->reg_live, regno))
3833 /* If setting something that's a reg or part of one,
3834 see if that register's altered value will be live. */
3838 rtx r = SET_DEST (x);
3841 if (GET_CODE (r) == CC0)
3842 return ! pbi->cc0_live;
3845 /* A SET that is a subroutine call cannot be dead. */
3846 if (GET_CODE (SET_SRC (x)) == CALL)
3852 /* Don't eliminate loads from volatile memory or volatile asms. */
3853 else if (volatile_refs_p (SET_SRC (x)))
3856 if (GET_CODE (r) == MEM)
3860 if (MEM_VOLATILE_P (r))
3863 /* Walk the set of memory locations we are currently tracking
3864 and see if one is an identical match to this memory location.
3865 If so, this memory write is dead (remember, we're walking
3866 backwards from the end of the block to the start). */
3867 temp = pbi->mem_set_list;
3870 if (rtx_equal_p (XEXP (temp, 0), r))
3872 temp = XEXP (temp, 1);
3877 while (GET_CODE (r) == SUBREG
3878 || GET_CODE (r) == STRICT_LOW_PART
3879 || GET_CODE (r) == ZERO_EXTRACT)
3882 if (GET_CODE (r) == REG)
3884 int regno = REGNO (r);
3887 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3890 /* If this is a hard register, verify that subsequent
3891 words are not needed. */
3892 if (regno < FIRST_PSEUDO_REGISTER)
3894 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3897 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3901 /* Don't delete insns to set global regs. */
3902 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3905 /* Make sure insns to set the stack pointer aren't deleted. */
3906 if (regno == STACK_POINTER_REGNUM)
3909 /* Make sure insns to set the frame pointer aren't deleted. */
3910 if (regno == FRAME_POINTER_REGNUM
3911 && (! reload_completed || frame_pointer_needed))
3913 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3914 if (regno == HARD_FRAME_POINTER_REGNUM
3915 && (! reload_completed || frame_pointer_needed))
3919 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3920 /* Make sure insns to set arg pointer are never deleted
3921 (if the arg pointer isn't fixed, there will be a USE
3922 for it, so we can treat it normally). */
3923 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3927 /* Otherwise, the set is dead. */
3933 /* If performing several activities, insn is dead if each activity
3934 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3935 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3937 else if (code == PARALLEL)
3939 int i = XVECLEN (x, 0);
3941 for (i--; i >= 0; i--)
3942 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3943 && GET_CODE (XVECEXP (x, 0, i)) != USE
3944 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3950 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3951 is not necessarily true for hard registers. */
3952 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3953 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3954 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3957 /* We do not check other CLOBBER or USE here. An insn consisting of just
3958 a CLOBBER or just a USE should not be deleted. */
3962 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3963 return 1 if the entire library call is dead.
3964 This is true if X copies a register (hard or pseudo)
3965 and if the hard return reg of the call insn is dead.
3966 (The caller should have tested the destination of X already for death.)
3968 If this insn doesn't just copy a register, then we don't
3969 have an ordinary libcall. In that case, cse could not have
3970 managed to substitute the source for the dest later on,
3971 so we can assume the libcall is dead.
3973 NEEDED is the bit vector of pseudoregs live before this insn.
3974 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3977 libcall_dead_p (pbi, x, note, insn)
3978 struct propagate_block_info *pbi;
3983 register RTX_CODE code = GET_CODE (x);
3987 register rtx r = SET_SRC (x);
3988 if (GET_CODE (r) == REG)
3990 rtx call = XEXP (note, 0);
3994 /* Find the call insn. */
3995 while (call != insn && GET_CODE (call) != CALL_INSN)
3996 call = NEXT_INSN (call);
3998 /* If there is none, do nothing special,
3999 since ordinary death handling can understand these insns. */
4003 /* See if the hard reg holding the value is dead.
4004 If this is a PARALLEL, find the call within it. */
4005 call_pat = PATTERN (call);
4006 if (GET_CODE (call_pat) == PARALLEL)
4008 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4009 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4010 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4013 /* This may be a library call that is returning a value
4014 via invisible pointer. Do nothing special, since
4015 ordinary death handling can understand these insns. */
4019 call_pat = XVECEXP (call_pat, 0, i);
4022 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4028 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4029 live at function entry. Don't count global register variables, variables
4030 in registers that can be used for function arg passing, or variables in
4031 fixed hard registers. */
4034 regno_uninitialized (regno)
4037 if (n_basic_blocks == 0
4038 || (regno < FIRST_PSEUDO_REGISTER
4039 && (global_regs[regno]
4040 || fixed_regs[regno]
4041 || FUNCTION_ARG_REGNO_P (regno))))
4044 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4047 /* 1 if register REGNO was alive at a place where `setjmp' was called
4048 and was set more than once or is an argument.
4049 Such regs may be clobbered by `longjmp'. */
4052 regno_clobbered_at_setjmp (regno)
4055 if (n_basic_blocks == 0)
4058 return ((REG_N_SETS (regno) > 1
4059 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4060 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4063 /* INSN references memory, possibly using autoincrement addressing modes.
4064 Find any entries on the mem_set_list that need to be invalidated due
4065 to an address change. */
4068 invalidate_mems_from_autoinc (pbi, insn)
4069 struct propagate_block_info *pbi;
4072 rtx note = REG_NOTES (insn);
4073 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4075 if (REG_NOTE_KIND (note) == REG_INC)
4077 rtx temp = pbi->mem_set_list;
4078 rtx prev = NULL_RTX;
4083 next = XEXP (temp, 1);
4084 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4086 /* Splice temp out of list. */
4088 XEXP (prev, 1) = next;
4090 pbi->mem_set_list = next;
4091 free_EXPR_LIST_node (temp);
4101 /* Process the registers that are set within X. Their bits are set to
4102 1 in the regset DEAD, because they are dead prior to this insn.
4104 If INSN is nonzero, it is the insn being processed.
4106 FLAGS is the set of operations to perform. */
4109 mark_set_regs (pbi, x, insn)
4110 struct propagate_block_info *pbi;
4113 rtx cond = NULL_RTX;
4117 switch (code = GET_CODE (x))
4121 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4125 cond = COND_EXEC_TEST (x);
4126 x = COND_EXEC_CODE (x);
4132 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4134 rtx sub = XVECEXP (x, 0, i);
4135 switch (code = GET_CODE (sub))
4138 if (cond != NULL_RTX)
4141 cond = COND_EXEC_TEST (sub);
4142 sub = COND_EXEC_CODE (sub);
4143 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4149 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4164 /* Process a single SET rtx, X. */
4167 mark_set_1 (pbi, code, reg, cond, insn, flags)
4168 struct propagate_block_info *pbi;
4170 rtx reg, cond, insn;
4173 int regno_first = -1, regno_last = -1;
4177 /* Some targets place small structures in registers for
4178 return values of functions. We have to detect this
4179 case specially here to get correct flow information. */
4180 if (GET_CODE (reg) == PARALLEL
4181 && GET_MODE (reg) == BLKmode)
4183 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4184 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
4188 /* Modifying just one hardware register of a multi-reg value or just a
4189 byte field of a register does not mean the value from before this insn
4190 is now dead. Of course, if it was dead after it's unused now. */
4192 switch (GET_CODE (reg))
4196 case STRICT_LOW_PART:
4197 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4199 reg = XEXP (reg, 0);
4200 while (GET_CODE (reg) == SUBREG
4201 || GET_CODE (reg) == ZERO_EXTRACT
4202 || GET_CODE (reg) == SIGN_EXTRACT
4203 || GET_CODE (reg) == STRICT_LOW_PART);
4204 if (GET_CODE (reg) == MEM)
4206 not_dead = REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4210 regno_last = regno_first = REGNO (reg);
4211 if (regno_first < FIRST_PSEUDO_REGISTER)
4212 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4216 if (GET_CODE (SUBREG_REG (reg)) == REG)
4218 enum machine_mode outer_mode = GET_MODE (reg);
4219 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4221 /* Identify the range of registers affected. This is moderately
4222 tricky for hard registers. See alter_subreg. */
4224 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4225 if (regno_first < FIRST_PSEUDO_REGISTER)
4227 #ifdef ALTER_HARD_SUBREG
4228 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4229 inner_mode, regno_first);
4231 regno_first += SUBREG_WORD (reg);
4233 regno_last = (regno_first
4234 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4236 /* Since we've just adjusted the register number ranges, make
4237 sure REG matches. Otherwise some_was_live will be clear
4238 when it shouldn't have been, and we'll create incorrect
4239 REG_UNUSED notes. */
4240 reg = gen_rtx_REG (outer_mode, regno_first);
4244 /* If the number of words in the subreg is less than the number
4245 of words in the full register, we have a well-defined partial
4246 set. Otherwise the high bits are undefined.
4248 This is only really applicable to pseudos, since we just took
4249 care of multi-word hard registers. */
4250 if (((GET_MODE_SIZE (outer_mode)
4251 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4252 < ((GET_MODE_SIZE (inner_mode)
4253 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4254 not_dead = REGNO_REG_SET_P (pbi->reg_live, regno_first);
4256 reg = SUBREG_REG (reg);
4260 reg = SUBREG_REG (reg);
4267 /* If this set is a MEM, then it kills any aliased writes.
4268 If this set is a REG, then it kills any MEMs which use the reg. */
4269 if (flags & PROP_SCAN_DEAD_CODE)
4271 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4273 rtx temp = pbi->mem_set_list;
4274 rtx prev = NULL_RTX;
4279 next = XEXP (temp, 1);
4280 if ((GET_CODE (reg) == MEM
4281 && output_dependence (XEXP (temp, 0), reg))
4282 || (GET_CODE (reg) == REG
4283 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4285 /* Splice this entry out of the list. */
4287 XEXP (prev, 1) = next;
4289 pbi->mem_set_list = next;
4290 free_EXPR_LIST_node (temp);
4298 /* If the memory reference had embedded side effects (autoincrement
4299 address modes. Then we may need to kill some entries on the
4301 if (insn && GET_CODE (reg) == MEM)
4302 invalidate_mems_from_autoinc (pbi, insn);
4304 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4305 /* ??? With more effort we could track conditional memory life. */
4307 /* We do not know the size of a BLKmode store, so we do not track
4308 them for redundant store elimination. */
4309 && GET_MODE (reg) != BLKmode
4310 /* There are no REG_INC notes for SP, so we can't assume we'll see
4311 everything that invalidates it. To be safe, don't eliminate any
4312 stores though SP; none of them should be redundant anyway. */
4313 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4314 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4317 if (GET_CODE (reg) == REG
4318 && ! (regno_first == FRAME_POINTER_REGNUM
4319 && (! reload_completed || frame_pointer_needed))
4320 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4321 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4322 && (! reload_completed || frame_pointer_needed))
4324 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4325 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4329 int some_was_live = 0, some_was_dead = 0;
4331 for (i = regno_first; i <= regno_last; ++i)
4333 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4335 SET_REGNO_REG_SET (pbi->local_set, i);
4336 if (code != CLOBBER)
4337 SET_REGNO_REG_SET (pbi->new_set, i);
4339 some_was_live |= needed_regno;
4340 some_was_dead |= ! needed_regno;
4343 #ifdef HAVE_conditional_execution
4344 /* Consider conditional death in deciding that the register needs
4346 if (some_was_live && ! not_dead
4347 /* The stack pointer is never dead. Well, not strictly true,
4348 but it's very difficult to tell from here. Hopefully
4349 combine_stack_adjustments will fix up the most egregious
4351 && regno_first != STACK_POINTER_REGNUM)
4353 for (i = regno_first; i <= regno_last; ++i)
4354 if (! mark_regno_cond_dead (pbi, i, cond))
4359 /* Additional data to record if this is the final pass. */
4360 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4361 | PROP_DEATH_NOTES | PROP_AUTOINC))
4364 register int blocknum = pbi->bb->index;
4367 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4369 y = pbi->reg_next_use[regno_first];
4371 /* The next use is no longer next, since a store intervenes. */
4372 for (i = regno_first; i <= regno_last; ++i)
4373 pbi->reg_next_use[i] = 0;
4376 if (flags & PROP_REG_INFO)
4378 for (i = regno_first; i <= regno_last; ++i)
4380 /* Count (weighted) references, stores, etc. This counts a
4381 register twice if it is modified, but that is correct. */
4382 REG_N_SETS (i) += 1;
4383 REG_N_REFS (i) += (optimize_size ? 1
4384 : pbi->bb->loop_depth + 1);
4386 /* The insns where a reg is live are normally counted
4387 elsewhere, but we want the count to include the insn
4388 where the reg is set, and the normal counting mechanism
4389 would not count it. */
4390 REG_LIVE_LENGTH (i) += 1;
4393 /* If this is a hard reg, record this function uses the reg. */
4394 if (regno_first < FIRST_PSEUDO_REGISTER)
4396 for (i = regno_first; i <= regno_last; i++)
4397 regs_ever_live[i] = 1;
4401 /* Keep track of which basic blocks each reg appears in. */
4402 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4403 REG_BASIC_BLOCK (regno_first) = blocknum;
4404 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4405 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4409 if (! some_was_dead)
4411 if (flags & PROP_LOG_LINKS)
4413 /* Make a logical link from the next following insn
4414 that uses this register, back to this insn.
4415 The following insns have already been processed.
4417 We don't build a LOG_LINK for hard registers containing
4418 in ASM_OPERANDs. If these registers get replaced,
4419 we might wind up changing the semantics of the insn,
4420 even if reload can make what appear to be valid
4421 assignments later. */
4422 if (y && (BLOCK_NUM (y) == blocknum)
4423 && (regno_first >= FIRST_PSEUDO_REGISTER
4424 || asm_noperands (PATTERN (y)) < 0))
4425 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4430 else if (! some_was_live)
4432 if (flags & PROP_REG_INFO)
4433 REG_N_DEATHS (regno_first) += 1;
4435 if (flags & PROP_DEATH_NOTES)
4437 /* Note that dead stores have already been deleted
4438 when possible. If we get here, we have found a
4439 dead store that cannot be eliminated (because the
4440 same insn does something useful). Indicate this
4441 by marking the reg being set as dying here. */
4443 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4448 if (flags & PROP_DEATH_NOTES)
4450 /* This is a case where we have a multi-word hard register
4451 and some, but not all, of the words of the register are
4452 needed in subsequent insns. Write REG_UNUSED notes
4453 for those parts that were not needed. This case should
4456 for (i = regno_first; i <= regno_last; ++i)
4457 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4459 = alloc_EXPR_LIST (REG_UNUSED,
4460 gen_rtx_REG (reg_raw_mode[i], i),
4466 /* Mark the register as being dead. */
4469 /* The stack pointer is never dead. Well, not strictly true,
4470 but it's very difficult to tell from here. Hopefully
4471 combine_stack_adjustments will fix up the most egregious
4473 && regno_first != STACK_POINTER_REGNUM)
4475 for (i = regno_first; i <= regno_last; ++i)
4476 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4479 else if (GET_CODE (reg) == REG)
4481 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4482 pbi->reg_next_use[regno_first] = 0;
4485 /* If this is the last pass and this is a SCRATCH, show it will be dying
4486 here and count it. */
4487 else if (GET_CODE (reg) == SCRATCH)
4489 if (flags & PROP_DEATH_NOTES)
4491 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4495 #ifdef HAVE_conditional_execution
4496 /* Mark REGNO conditionally dead. Return true if the register is
4497 now unconditionally dead. */
4500 mark_regno_cond_dead (pbi, regno, cond)
4501 struct propagate_block_info *pbi;
4505 /* If this is a store to a predicate register, the value of the
4506 predicate is changing, we don't know that the predicate as seen
4507 before is the same as that seen after. Flush all dependant
4508 conditions from reg_cond_dead. This will make all such
4509 conditionally live registers unconditionally live. */
4510 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
4511 flush_reg_cond_reg (pbi, regno);
4513 /* If this is an unconditional store, remove any conditional
4514 life that may have existed. */
4515 if (cond == NULL_RTX)
4516 splay_tree_remove (pbi->reg_cond_dead, regno);
4519 splay_tree_node node;
4520 struct reg_cond_life_info *rcli;
4523 /* Otherwise this is a conditional set. Record that fact.
4524 It may have been conditionally used, or there may be a
4525 subsequent set with a complimentary condition. */
4527 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
4530 /* The register was unconditionally live previously.
4531 Record the current condition as the condition under
4532 which it is dead. */
4533 rcli = (struct reg_cond_life_info *)
4534 xmalloc (sizeof (*rcli));
4535 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
4536 splay_tree_insert (pbi->reg_cond_dead, regno,
4537 (splay_tree_value) rcli);
4539 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4540 REGNO (XEXP (cond, 0)));
4542 /* Not unconditionaly dead. */
4547 /* The register was conditionally live previously.
4548 Add the new condition to the old. */
4549 rcli = (struct reg_cond_life_info *) node->value;
4550 ncond = rcli->condition;
4551 ncond = ior_reg_cond (ncond, cond);
4553 /* If the register is now unconditionally dead,
4554 remove the entry in the splay_tree. */
4555 if (ncond == const1_rtx)
4556 splay_tree_remove (pbi->reg_cond_dead, regno);
4559 rcli->condition = ncond;
4561 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4562 REGNO (XEXP (cond, 0)));
4564 /* Not unconditionaly dead. */
4573 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4576 free_reg_cond_life_info (value)
4577 splay_tree_value value;
4579 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
4580 free_EXPR_LIST_list (&rcli->condition);
4584 /* Helper function for flush_reg_cond_reg. */
4587 flush_reg_cond_reg_1 (node, data)
4588 splay_tree_node node;
4591 struct reg_cond_life_info *rcli;
4592 int *xdata = (int *) data;
4593 unsigned int regno = xdata[0];
4596 /* Don't need to search if last flushed value was farther on in
4597 the in-order traversal. */
4598 if (xdata[1] >= (int) node->key)
4601 /* Splice out portions of the expression that refer to regno. */
4602 rcli = (struct reg_cond_life_info *) node->value;
4603 c = *(prev = &rcli->condition);
4606 if (regno == REGNO (XEXP (XEXP (c, 0), 0)))
4608 rtx next = XEXP (c, 1);
4609 free_EXPR_LIST_node (c);
4613 c = *(prev = &XEXP (c, 1));
4616 /* If the entire condition is now NULL, signal the node to be removed. */
4617 if (! rcli->condition)
4619 xdata[1] = node->key;
4626 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
4629 flush_reg_cond_reg (pbi, regno)
4630 struct propagate_block_info *pbi;
4637 while (splay_tree_foreach (pbi->reg_cond_dead,
4638 flush_reg_cond_reg_1, pair) == -1)
4639 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
4641 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
4644 /* Logical arithmetic on predicate conditions. IOR, NOT and NAND.
4645 We actually use EXPR_LIST to chain the sub-expressions together
4646 instead of IOR because it's easier to manipulate and we have
4647 the lists.c functions to reuse nodes.
4649 Return a new rtl expression as appropriate. */
4652 ior_reg_cond (old, x)
4655 enum rtx_code x_code;
4659 /* We expect these conditions to be of the form (eq reg 0). */
4660 x_code = GET_CODE (x);
4661 if (GET_RTX_CLASS (x_code) != '<'
4662 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4663 || XEXP (x, 1) != const0_rtx)
4666 /* Search the expression for an existing sub-expression of X_REG. */
4667 for (c = old; c ; c = XEXP (c, 1))
4669 rtx y = XEXP (c, 0);
4670 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4672 /* If we find X already present in OLD, we need do nothing. */
4673 if (GET_CODE (y) == x_code)
4676 /* If we find X being a compliment of a condition in OLD,
4677 then the entire condition is true. */
4678 if (GET_CODE (y) == reverse_condition (x_code))
4683 /* Otherwise just add to the chain. */
4684 return alloc_EXPR_LIST (0, x, old);
4691 enum rtx_code x_code;
4694 /* We expect these conditions to be of the form (eq reg 0). */
4695 x_code = GET_CODE (x);
4696 if (GET_RTX_CLASS (x_code) != '<'
4697 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4698 || XEXP (x, 1) != const0_rtx)
4701 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4702 VOIDmode, x_reg, const0_rtx),
4707 nand_reg_cond (old, x)
4710 enum rtx_code x_code;
4714 /* We expect these conditions to be of the form (eq reg 0). */
4715 x_code = GET_CODE (x);
4716 if (GET_RTX_CLASS (x_code) != '<'
4717 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4718 || XEXP (x, 1) != const0_rtx)
4721 /* Search the expression for an existing sub-expression of X_REG. */
4723 for (c = *(prev = &old); c ; c = *(prev = &XEXP (c, 1)))
4725 rtx y = XEXP (c, 0);
4726 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4728 /* If we find X already present in OLD, then we need to
4730 if (GET_CODE (y) == x_code)
4732 *prev = XEXP (c, 1);
4733 free_EXPR_LIST_node (c);
4734 return old ? old : const0_rtx;
4737 /* If we find X being a compliment of a condition in OLD,
4738 then we need do nothing. */
4739 if (GET_CODE (y) == reverse_condition (x_code))
4744 /* Otherwise, by implication, the register in question is now live for
4745 the inverse of the condition X. */
4746 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4747 VOIDmode, x_reg, const0_rtx),
4750 #endif /* HAVE_conditional_execution */
4754 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4758 find_auto_inc (pbi, x, insn)
4759 struct propagate_block_info *pbi;
4763 rtx addr = XEXP (x, 0);
4764 HOST_WIDE_INT offset = 0;
4767 /* Here we detect use of an index register which might be good for
4768 postincrement, postdecrement, preincrement, or predecrement. */
4770 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4771 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4773 if (GET_CODE (addr) == REG)
4776 register int size = GET_MODE_SIZE (GET_MODE (x));
4779 int regno = REGNO (addr);
4781 /* Is the next use an increment that might make auto-increment? */
4782 if ((incr = pbi->reg_next_use[regno]) != 0
4783 && (set = single_set (incr)) != 0
4784 && GET_CODE (set) == SET
4785 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4786 /* Can't add side effects to jumps; if reg is spilled and
4787 reloaded, there's no way to store back the altered value. */
4788 && GET_CODE (insn) != JUMP_INSN
4789 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4790 && XEXP (y, 0) == addr
4791 && GET_CODE (XEXP (y, 1)) == CONST_INT
4792 && ((HAVE_POST_INCREMENT
4793 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4794 || (HAVE_POST_DECREMENT
4795 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4796 || (HAVE_PRE_INCREMENT
4797 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4798 || (HAVE_PRE_DECREMENT
4799 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4800 /* Make sure this reg appears only once in this insn. */
4801 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4802 use != 0 && use != (rtx) 1))
4804 rtx q = SET_DEST (set);
4805 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4806 ? (offset ? PRE_INC : POST_INC)
4807 : (offset ? PRE_DEC : POST_DEC));
4809 if (dead_or_set_p (incr, addr)
4810 /* Mustn't autoinc an eliminable register. */
4811 && (regno >= FIRST_PSEUDO_REGISTER
4812 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4814 /* This is the simple case. Try to make the auto-inc. If
4815 we can't, we are done. Otherwise, we will do any
4816 needed updates below. */
4817 if (! validate_change (insn, &XEXP (x, 0),
4818 gen_rtx_fmt_e (inc_code, Pmode, addr),
4822 else if (GET_CODE (q) == REG
4823 /* PREV_INSN used here to check the semi-open interval
4825 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4826 /* We must also check for sets of q as q may be
4827 a call clobbered hard register and there may
4828 be a call between PREV_INSN (insn) and incr. */
4829 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4831 /* We have *p followed sometime later by q = p+size.
4832 Both p and q must be live afterward,
4833 and q is not used between INSN and its assignment.
4834 Change it to q = p, ...*q..., q = q+size.
4835 Then fall into the usual case. */
4839 emit_move_insn (q, addr);
4840 insns = get_insns ();
4843 if (basic_block_for_insn)
4844 for (temp = insns; temp; temp = NEXT_INSN (temp))
4845 set_block_for_insn (temp, pbi->bb);
4847 /* If we can't make the auto-inc, or can't make the
4848 replacement into Y, exit. There's no point in making
4849 the change below if we can't do the auto-inc and doing
4850 so is not correct in the pre-inc case. */
4852 validate_change (insn, &XEXP (x, 0),
4853 gen_rtx_fmt_e (inc_code, Pmode, q),
4855 validate_change (incr, &XEXP (y, 0), q, 1);
4856 if (! apply_change_group ())
4859 /* We now know we'll be doing this change, so emit the
4860 new insn(s) and do the updates. */
4861 emit_insns_before (insns, insn);
4863 if (pbi->bb->head == insn)
4864 pbi->bb->head = insns;
4866 /* INCR will become a NOTE and INSN won't contain a
4867 use of ADDR. If a use of ADDR was just placed in
4868 the insn before INSN, make that the next use.
4869 Otherwise, invalidate it. */
4870 if (GET_CODE (PREV_INSN (insn)) == INSN
4871 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4872 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4873 pbi->reg_next_use[regno] = PREV_INSN (insn);
4875 pbi->reg_next_use[regno] = 0;
4880 /* REGNO is now used in INCR which is below INSN, but it
4881 previously wasn't live here. If we don't mark it as
4882 live, we'll put a REG_DEAD note for it on this insn,
4883 which is incorrect. */
4884 SET_REGNO_REG_SET (pbi->reg_live, regno);
4886 /* If there are any calls between INSN and INCR, show
4887 that REGNO now crosses them. */
4888 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4889 if (GET_CODE (temp) == CALL_INSN)
4890 REG_N_CALLS_CROSSED (regno)++;
4895 /* If we haven't returned, it means we were able to make the
4896 auto-inc, so update the status. First, record that this insn
4897 has an implicit side effect. */
4900 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4902 /* Modify the old increment-insn to simply copy
4903 the already-incremented value of our register. */
4904 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4907 /* If that makes it a no-op (copying the register into itself) delete
4908 it so it won't appear to be a "use" and a "set" of this
4910 if (SET_DEST (set) == addr)
4912 /* If the original source was dead, it's dead now. */
4913 rtx note = find_reg_note (incr, REG_DEAD, NULL_RTX);
4914 if (note && XEXP (note, 0) != addr)
4915 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
4917 PUT_CODE (incr, NOTE);
4918 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4919 NOTE_SOURCE_FILE (incr) = 0;
4922 if (regno >= FIRST_PSEUDO_REGISTER)
4924 /* Count an extra reference to the reg. When a reg is
4925 incremented, spilling it is worse, so we want to make
4926 that less likely. */
4927 REG_N_REFS (regno) += (optimize_size ? 1
4928 : pbi->bb->loop_depth + 1);
4930 /* Count the increment as a setting of the register,
4931 even though it isn't a SET in rtl. */
4932 REG_N_SETS (regno)++;
4937 #endif /* AUTO_INC_DEC */
4940 mark_used_reg (pbi, reg, cond, insn)
4941 struct propagate_block_info *pbi;
4943 rtx cond ATTRIBUTE_UNUSED;
4946 int regno = REGNO (reg);
4947 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4948 int some_was_dead = ! some_was_live;
4952 /* A hard reg in a wide mode may really be multiple registers.
4953 If so, mark all of them just like the first. */
4954 if (regno < FIRST_PSEUDO_REGISTER)
4956 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4959 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
4960 some_was_live |= needed_regno;
4961 some_was_dead |= ! needed_regno;
4965 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4967 /* Record where each reg is used, so when the reg is set we know
4968 the next insn that uses it. */
4969 pbi->reg_next_use[regno] = insn;
4972 if (pbi->flags & PROP_REG_INFO)
4974 if (regno < FIRST_PSEUDO_REGISTER)
4976 /* If this is a register we are going to try to eliminate,
4977 don't mark it live here. If we are successful in
4978 eliminating it, it need not be live unless it is used for
4979 pseudos, in which case it will have been set live when it
4980 was allocated to the pseudos. If the register will not
4981 be eliminated, reload will set it live at that point.
4983 Otherwise, record that this function uses this register. */
4984 /* ??? The PPC backend tries to "eliminate" on the pic
4985 register to itself. This should be fixed. In the mean
4986 time, hack around it. */
4988 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
4989 && (regno == FRAME_POINTER_REGNUM
4990 || regno == ARG_POINTER_REGNUM)))
4992 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4994 regs_ever_live[regno + --n] = 1;
5000 /* Keep track of which basic block each reg appears in. */
5002 register int blocknum = pbi->bb->index;
5003 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
5004 REG_BASIC_BLOCK (regno) = blocknum;
5005 else if (REG_BASIC_BLOCK (regno) != blocknum)
5006 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
5008 /* Count (weighted) number of uses of each reg. */
5009 REG_N_REFS (regno) += (optimize_size ? 1
5010 : pbi->bb->loop_depth + 1);
5014 /* Find out if any of the register was set this insn. */
5015 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
5016 if (regno < FIRST_PSEUDO_REGISTER)
5018 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5020 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
5023 /* Record and count the insns in which a reg dies. If it is used in
5024 this insn and was dead below the insn then it dies in this insn.
5025 If it was set in this insn, we do not make a REG_DEAD note;
5026 likewise if we already made such a note. */
5027 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
5031 /* Check for the case where the register dying partially
5032 overlaps the register set by this insn. */
5033 if (regno < FIRST_PSEUDO_REGISTER
5034 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
5036 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5038 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
5041 /* If none of the words in X is needed, make a REG_DEAD note.
5042 Otherwise, we must make partial REG_DEAD notes. */
5043 if (! some_was_live)
5045 if ((pbi->flags & PROP_DEATH_NOTES)
5046 && ! find_regno_note (insn, REG_DEAD, regno))
5048 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
5050 if (pbi->flags & PROP_REG_INFO)
5051 REG_N_DEATHS (regno)++;
5055 /* Don't make a REG_DEAD note for a part of a register
5056 that is set in the insn. */
5058 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
5059 for (; n >= regno; n--)
5060 if (! REGNO_REG_SET_P (pbi->reg_live, n)
5061 && ! dead_or_set_regno_p (insn, n))
5063 = alloc_EXPR_LIST (REG_DEAD,
5064 gen_rtx_REG (reg_raw_mode[n], n),
5069 SET_REGNO_REG_SET (pbi->reg_live, regno);
5070 if (regno < FIRST_PSEUDO_REGISTER)
5072 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5074 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
5077 #ifdef HAVE_conditional_execution
5078 /* If this is a conditional use, record that fact. If it is later
5079 conditionally set, we'll know to kill the register. */
5080 if (cond != NULL_RTX)
5082 splay_tree_node node;
5083 struct reg_cond_life_info *rcli;
5088 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5091 /* The register was unconditionally live previously.
5092 No need to do anything. */
5096 /* The register was conditionally live previously.
5097 Subtract the new life cond from the old death cond. */
5098 rcli = (struct reg_cond_life_info *) node->value;
5099 ncond = rcli->condition;
5100 ncond = nand_reg_cond (ncond, cond);
5102 /* If the register is now unconditionally live, remove the
5103 entry in the splay_tree. */
5104 if (ncond == const0_rtx)
5106 rcli->condition = NULL_RTX;
5107 splay_tree_remove (pbi->reg_cond_dead, regno);
5110 rcli->condition = ncond;
5115 /* The register was not previously live at all. Record
5116 the condition under which it is still dead. */
5117 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5118 rcli->condition = not_reg_cond (cond);
5119 splay_tree_insert (pbi->reg_cond_dead, regno,
5120 (splay_tree_value) rcli);
5123 else if (some_was_live)
5125 splay_tree_node node;
5126 struct reg_cond_life_info *rcli;
5128 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5131 /* The register was conditionally live previously, but is now
5132 unconditionally so. Remove it from the conditionally dead
5133 list, so that a conditional set won't cause us to think
5135 rcli = (struct reg_cond_life_info *) node->value;
5136 rcli->condition = NULL_RTX;
5137 splay_tree_remove (pbi->reg_cond_dead, regno);
5144 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5145 This is done assuming the registers needed from X are those that
5146 have 1-bits in PBI->REG_LIVE.
5148 INSN is the containing instruction. If INSN is dead, this function
5152 mark_used_regs (pbi, x, cond, insn)
5153 struct propagate_block_info *pbi;
5156 register RTX_CODE code;
5158 int flags = pbi->flags;
5161 code = GET_CODE (x);
5181 /* If we are clobbering a MEM, mark any registers inside the address
5183 if (GET_CODE (XEXP (x, 0)) == MEM)
5184 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5188 /* Don't bother watching stores to mems if this is not the
5189 final pass. We'll not be deleting dead stores this round. */
5190 if (flags & PROP_SCAN_DEAD_CODE)
5192 /* Invalidate the data for the last MEM stored, but only if MEM is
5193 something that can be stored into. */
5194 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5195 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5196 ; /* needn't clear the memory set list */
5199 rtx temp = pbi->mem_set_list;
5200 rtx prev = NULL_RTX;
5205 next = XEXP (temp, 1);
5206 if (anti_dependence (XEXP (temp, 0), x))
5208 /* Splice temp out of the list. */
5210 XEXP (prev, 1) = next;
5212 pbi->mem_set_list = next;
5213 free_EXPR_LIST_node (temp);
5221 /* If the memory reference had embedded side effects (autoincrement
5222 address modes. Then we may need to kill some entries on the
5225 invalidate_mems_from_autoinc (pbi, insn);
5229 if (flags & PROP_AUTOINC)
5230 find_auto_inc (pbi, x, insn);
5235 if (GET_CODE (SUBREG_REG (x)) == REG
5236 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5237 && (GET_MODE_SIZE (GET_MODE (x))
5238 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
5239 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
5241 /* While we're here, optimize this case. */
5243 if (GET_CODE (x) != REG)
5248 /* See a register other than being set => mark it as needed. */
5249 mark_used_reg (pbi, x, cond, insn);
5254 register rtx testreg = SET_DEST (x);
5257 /* If storing into MEM, don't show it as being used. But do
5258 show the address as being used. */
5259 if (GET_CODE (testreg) == MEM)
5262 if (flags & PROP_AUTOINC)
5263 find_auto_inc (pbi, testreg, insn);
5265 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5266 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5270 /* Storing in STRICT_LOW_PART is like storing in a reg
5271 in that this SET might be dead, so ignore it in TESTREG.
5272 but in some other ways it is like using the reg.
5274 Storing in a SUBREG or a bit field is like storing the entire
5275 register in that if the register's value is not used
5276 then this SET is not needed. */
5277 while (GET_CODE (testreg) == STRICT_LOW_PART
5278 || GET_CODE (testreg) == ZERO_EXTRACT
5279 || GET_CODE (testreg) == SIGN_EXTRACT
5280 || GET_CODE (testreg) == SUBREG)
5282 if (GET_CODE (testreg) == SUBREG
5283 && GET_CODE (SUBREG_REG (testreg)) == REG
5284 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5285 && (GET_MODE_SIZE (GET_MODE (testreg))
5286 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
5287 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
5289 /* Modifying a single register in an alternate mode
5290 does not use any of the old value. But these other
5291 ways of storing in a register do use the old value. */
5292 if (GET_CODE (testreg) == SUBREG
5293 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5298 testreg = XEXP (testreg, 0);
5301 /* If this is a store into a register, recursively scan the
5302 value being stored. */
5304 if ((GET_CODE (testreg) == PARALLEL
5305 && GET_MODE (testreg) == BLKmode)
5306 || (GET_CODE (testreg) == REG
5307 && (regno = REGNO (testreg),
5308 ! (regno == FRAME_POINTER_REGNUM
5309 && (! reload_completed || frame_pointer_needed)))
5310 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5311 && ! (regno == HARD_FRAME_POINTER_REGNUM
5312 && (! reload_completed || frame_pointer_needed))
5314 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5315 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5320 mark_used_regs (pbi, SET_DEST (x), cond, insn);
5321 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5328 case UNSPEC_VOLATILE:
5332 /* Traditional and volatile asm instructions must be considered to use
5333 and clobber all hard registers, all pseudo-registers and all of
5334 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5336 Consider for instance a volatile asm that changes the fpu rounding
5337 mode. An insn should not be moved across this even if it only uses
5338 pseudo-regs because it might give an incorrectly rounded result.
5340 ?!? Unfortunately, marking all hard registers as live causes massive
5341 problems for the register allocator and marking all pseudos as live
5342 creates mountains of uninitialized variable warnings.
5344 So for now, just clear the memory set list and mark any regs
5345 we can find in ASM_OPERANDS as used. */
5346 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
5347 free_EXPR_LIST_list (&pbi->mem_set_list);
5349 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5350 We can not just fall through here since then we would be confused
5351 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5352 traditional asms unlike their normal usage. */
5353 if (code == ASM_OPERANDS)
5357 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
5358 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
5364 if (cond != NULL_RTX)
5367 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
5369 cond = COND_EXEC_TEST (x);
5370 x = COND_EXEC_CODE (x);
5374 /* We _do_not_ want to scan operands of phi nodes. Operands of
5375 a phi function are evaluated only when control reaches this
5376 block along a particular edge. Therefore, regs that appear
5377 as arguments to phi should not be added to the global live at
5385 /* Recursively scan the operands of this expression. */
5388 register const char *fmt = GET_RTX_FORMAT (code);
5391 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5395 /* Tail recursive case: save a function call level. */
5401 mark_used_regs (pbi, XEXP (x, i), cond, insn);
5403 else if (fmt[i] == 'E')
5406 for (j = 0; j < XVECLEN (x, i); j++)
5407 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
5416 try_pre_increment_1 (pbi, insn)
5417 struct propagate_block_info *pbi;
5420 /* Find the next use of this reg. If in same basic block,
5421 make it do pre-increment or pre-decrement if appropriate. */
5422 rtx x = single_set (insn);
5423 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
5424 * INTVAL (XEXP (SET_SRC (x), 1)));
5425 int regno = REGNO (SET_DEST (x));
5426 rtx y = pbi->reg_next_use[regno];
5428 && BLOCK_NUM (y) == BLOCK_NUM (insn)
5429 /* Don't do this if the reg dies, or gets set in y; a standard addressing
5430 mode would be better. */
5431 && ! dead_or_set_p (y, SET_DEST (x))
5432 && try_pre_increment (y, SET_DEST (x), amount))
5434 /* We have found a suitable auto-increment
5435 and already changed insn Y to do it.
5436 So flush this increment-instruction. */
5437 PUT_CODE (insn, NOTE);
5438 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5439 NOTE_SOURCE_FILE (insn) = 0;
5440 /* Count a reference to this reg for the increment
5441 insn we are deleting. When a reg is incremented.
5442 spilling it is worse, so we want to make that
5444 if (regno >= FIRST_PSEUDO_REGISTER)
5446 REG_N_REFS (regno) += (optimize_size ? 1
5447 : pbi->bb->loop_depth + 1);
5448 REG_N_SETS (regno)++;
5455 /* Try to change INSN so that it does pre-increment or pre-decrement
5456 addressing on register REG in order to add AMOUNT to REG.
5457 AMOUNT is negative for pre-decrement.
5458 Returns 1 if the change could be made.
5459 This checks all about the validity of the result of modifying INSN. */
5462 try_pre_increment (insn, reg, amount)
5464 HOST_WIDE_INT amount;
5468 /* Nonzero if we can try to make a pre-increment or pre-decrement.
5469 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
5471 /* Nonzero if we can try to make a post-increment or post-decrement.
5472 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
5473 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
5474 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
5477 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
5480 /* From the sign of increment, see which possibilities are conceivable
5481 on this target machine. */
5482 if (HAVE_PRE_INCREMENT && amount > 0)
5484 if (HAVE_POST_INCREMENT && amount > 0)
5487 if (HAVE_PRE_DECREMENT && amount < 0)
5489 if (HAVE_POST_DECREMENT && amount < 0)
5492 if (! (pre_ok || post_ok))
5495 /* It is not safe to add a side effect to a jump insn
5496 because if the incremented register is spilled and must be reloaded
5497 there would be no way to store the incremented value back in memory. */
5499 if (GET_CODE (insn) == JUMP_INSN)
5504 use = find_use_as_address (PATTERN (insn), reg, 0);
5505 if (post_ok && (use == 0 || use == (rtx) 1))
5507 use = find_use_as_address (PATTERN (insn), reg, -amount);
5511 if (use == 0 || use == (rtx) 1)
5514 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
5517 /* See if this combination of instruction and addressing mode exists. */
5518 if (! validate_change (insn, &XEXP (use, 0),
5519 gen_rtx_fmt_e (amount > 0
5520 ? (do_post ? POST_INC : PRE_INC)
5521 : (do_post ? POST_DEC : PRE_DEC),
5525 /* Record that this insn now has an implicit side effect on X. */
5526 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
5530 #endif /* AUTO_INC_DEC */
5532 /* Find the place in the rtx X where REG is used as a memory address.
5533 Return the MEM rtx that so uses it.
5534 If PLUSCONST is nonzero, search instead for a memory address equivalent to
5535 (plus REG (const_int PLUSCONST)).
5537 If such an address does not appear, return 0.
5538 If REG appears more than once, or is used other than in such an address,
5542 find_use_as_address (x, reg, plusconst)
5545 HOST_WIDE_INT plusconst;
5547 enum rtx_code code = GET_CODE (x);
5548 const char *fmt = GET_RTX_FORMAT (code);
5550 register rtx value = 0;
5553 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
5556 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
5557 && XEXP (XEXP (x, 0), 0) == reg
5558 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5559 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
5562 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
5564 /* If REG occurs inside a MEM used in a bit-field reference,
5565 that is unacceptable. */
5566 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
5567 return (rtx) (HOST_WIDE_INT) 1;
5571 return (rtx) (HOST_WIDE_INT) 1;
5573 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5577 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5581 return (rtx) (HOST_WIDE_INT) 1;
5583 else if (fmt[i] == 'E')
5586 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5588 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5592 return (rtx) (HOST_WIDE_INT) 1;
5600 /* Write information about registers and basic blocks into FILE.
5601 This is part of making a debugging dump. */
5604 dump_regset (r, outf)
5611 fputs (" (nil)", outf);
5615 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5617 fprintf (outf, " %d", i);
5618 if (i < FIRST_PSEUDO_REGISTER)
5619 fprintf (outf, " [%s]",
5628 dump_regset (r, stderr);
5629 putc ('\n', stderr);
5633 dump_flow_info (file)
5637 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5639 fprintf (file, "%d registers.\n", max_regno);
5640 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5643 enum reg_class class, altclass;
5644 fprintf (file, "\nRegister %d used %d times across %d insns",
5645 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5646 if (REG_BASIC_BLOCK (i) >= 0)
5647 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5649 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5650 (REG_N_SETS (i) == 1) ? "" : "s");
5651 if (REG_USERVAR_P (regno_reg_rtx[i]))
5652 fprintf (file, "; user var");
5653 if (REG_N_DEATHS (i) != 1)
5654 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5655 if (REG_N_CALLS_CROSSED (i) == 1)
5656 fprintf (file, "; crosses 1 call");
5657 else if (REG_N_CALLS_CROSSED (i))
5658 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5659 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5660 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5661 class = reg_preferred_class (i);
5662 altclass = reg_alternate_class (i);
5663 if (class != GENERAL_REGS || altclass != ALL_REGS)
5665 if (altclass == ALL_REGS || class == ALL_REGS)
5666 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5667 else if (altclass == NO_REGS)
5668 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5670 fprintf (file, "; pref %s, else %s",
5671 reg_class_names[(int) class],
5672 reg_class_names[(int) altclass]);
5674 if (REGNO_POINTER_FLAG (i))
5675 fprintf (file, "; pointer");
5676 fprintf (file, ".\n");
5679 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5680 for (i = 0; i < n_basic_blocks; i++)
5682 register basic_block bb = BASIC_BLOCK (i);
5685 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
5686 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth, bb->count);
5688 fprintf (file, "Predecessors: ");
5689 for (e = bb->pred; e ; e = e->pred_next)
5690 dump_edge_info (file, e, 0);
5692 fprintf (file, "\nSuccessors: ");
5693 for (e = bb->succ; e ; e = e->succ_next)
5694 dump_edge_info (file, e, 1);
5696 fprintf (file, "\nRegisters live at start:");
5697 dump_regset (bb->global_live_at_start, file);
5699 fprintf (file, "\nRegisters live at end:");
5700 dump_regset (bb->global_live_at_end, file);
5711 dump_flow_info (stderr);
5715 dump_edge_info (file, e, do_succ)
5720 basic_block side = (do_succ ? e->dest : e->src);
5722 if (side == ENTRY_BLOCK_PTR)
5723 fputs (" ENTRY", file);
5724 else if (side == EXIT_BLOCK_PTR)
5725 fputs (" EXIT", file);
5727 fprintf (file, " %d", side->index);
5730 fprintf (file, " count:%d", e->count);
5734 static const char * const bitnames[] = {
5735 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5738 int i, flags = e->flags;
5742 for (i = 0; flags; i++)
5743 if (flags & (1 << i))
5749 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5750 fputs (bitnames[i], file);
5752 fprintf (file, "%d", i);
5760 /* Print out one basic block with live information at start and end. */
5770 fprintf (outf, ";; Basic block %d, loop depth %d, count %d",
5771 bb->index, bb->loop_depth, bb->count);
5772 if (bb->eh_beg != -1 || bb->eh_end != -1)
5773 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5776 fputs (";; Predecessors: ", outf);
5777 for (e = bb->pred; e ; e = e->pred_next)
5778 dump_edge_info (outf, e, 0);
5781 fputs (";; Registers live at start:", outf);
5782 dump_regset (bb->global_live_at_start, outf);
5785 for (insn = bb->head, last = NEXT_INSN (bb->end);
5787 insn = NEXT_INSN (insn))
5788 print_rtl_single (outf, insn);
5790 fputs (";; Registers live at end:", outf);
5791 dump_regset (bb->global_live_at_end, outf);
5794 fputs (";; Successors: ", outf);
5795 for (e = bb->succ; e; e = e->succ_next)
5796 dump_edge_info (outf, e, 1);
5804 dump_bb (bb, stderr);
5811 dump_bb (BASIC_BLOCK(n), stderr);
5814 /* Like print_rtl, but also print out live information for the start of each
5818 print_rtl_with_bb (outf, rtx_first)
5822 register rtx tmp_rtx;
5825 fprintf (outf, "(nil)\n");
5829 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5830 int max_uid = get_max_uid ();
5831 basic_block *start = (basic_block *)
5832 xcalloc (max_uid, sizeof (basic_block));
5833 basic_block *end = (basic_block *)
5834 xcalloc (max_uid, sizeof (basic_block));
5835 enum bb_state *in_bb_p = (enum bb_state *)
5836 xcalloc (max_uid, sizeof (enum bb_state));
5838 for (i = n_basic_blocks - 1; i >= 0; i--)
5840 basic_block bb = BASIC_BLOCK (i);
5843 start[INSN_UID (bb->head)] = bb;
5844 end[INSN_UID (bb->end)] = bb;
5845 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5847 enum bb_state state = IN_MULTIPLE_BB;
5848 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5850 in_bb_p[INSN_UID(x)] = state;
5857 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5862 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5864 fprintf (outf, ";; Start of basic block %d, registers live:",
5866 dump_regset (bb->global_live_at_start, outf);
5870 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5871 && GET_CODE (tmp_rtx) != NOTE
5872 && GET_CODE (tmp_rtx) != BARRIER)
5873 fprintf (outf, ";; Insn is not within a basic block\n");
5874 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5875 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5877 did_output = print_rtl_single (outf, tmp_rtx);
5879 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5881 fprintf (outf, ";; End of basic block %d, registers live:\n",
5883 dump_regset (bb->global_live_at_end, outf);
5896 if (current_function_epilogue_delay_list != 0)
5898 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5899 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5900 tmp_rtx = XEXP (tmp_rtx, 1))
5901 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5905 /* Compute dominator relationships using new flow graph structures. */
5907 compute_flow_dominators (dominators, post_dominators)
5908 sbitmap *dominators;
5909 sbitmap *post_dominators;
5912 sbitmap *temp_bitmap;
5914 basic_block *worklist, *workend, *qin, *qout;
5917 /* Allocate a worklist array/queue. Entries are only added to the
5918 list if they were not already on the list. So the size is
5919 bounded by the number of basic blocks. */
5920 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5921 workend = &worklist[n_basic_blocks];
5923 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5924 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5928 /* The optimistic setting of dominators requires us to put every
5929 block on the work list initially. */
5930 qin = qout = worklist;
5931 for (bb = 0; bb < n_basic_blocks; bb++)
5933 *qin++ = BASIC_BLOCK (bb);
5934 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5936 qlen = n_basic_blocks;
5939 /* We want a maximal solution, so initially assume everything dominates
5941 sbitmap_vector_ones (dominators, n_basic_blocks);
5943 /* Mark successors of the entry block so we can identify them below. */
5944 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5945 e->dest->aux = ENTRY_BLOCK_PTR;
5947 /* Iterate until the worklist is empty. */
5950 /* Take the first entry off the worklist. */
5951 basic_block b = *qout++;
5952 if (qout >= workend)
5958 /* Compute the intersection of the dominators of all the
5961 If one of the predecessor blocks is the ENTRY block, then the
5962 intersection of the dominators of the predecessor blocks is
5963 defined as the null set. We can identify such blocks by the
5964 special value in the AUX field in the block structure. */
5965 if (b->aux == ENTRY_BLOCK_PTR)
5967 /* Do not clear the aux field for blocks which are
5968 successors of the ENTRY block. That way we we never
5969 add them to the worklist again.
5971 The intersect of dominators of the preds of this block is
5972 defined as the null set. */
5973 sbitmap_zero (temp_bitmap[bb]);
5977 /* Clear the aux field of this block so it can be added to
5978 the worklist again if necessary. */
5980 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5983 /* Make sure each block always dominates itself. */
5984 SET_BIT (temp_bitmap[bb], bb);
5986 /* If the out state of this block changed, then we need to
5987 add the successors of this block to the worklist if they
5988 are not already on the worklist. */
5989 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5991 for (e = b->succ; e; e = e->succ_next)
5993 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
6007 if (post_dominators)
6009 /* The optimistic setting of dominators requires us to put every
6010 block on the work list initially. */
6011 qin = qout = worklist;
6012 for (bb = 0; bb < n_basic_blocks; bb++)
6014 *qin++ = BASIC_BLOCK (bb);
6015 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
6017 qlen = n_basic_blocks;
6020 /* We want a maximal solution, so initially assume everything post
6021 dominates everything else. */
6022 sbitmap_vector_ones (post_dominators, n_basic_blocks);
6024 /* Mark predecessors of the exit block so we can identify them below. */
6025 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
6026 e->src->aux = EXIT_BLOCK_PTR;
6028 /* Iterate until the worklist is empty. */
6031 /* Take the first entry off the worklist. */
6032 basic_block b = *qout++;
6033 if (qout >= workend)
6039 /* Compute the intersection of the post dominators of all the
6042 If one of the successor blocks is the EXIT block, then the
6043 intersection of the dominators of the successor blocks is
6044 defined as the null set. We can identify such blocks by the
6045 special value in the AUX field in the block structure. */
6046 if (b->aux == EXIT_BLOCK_PTR)
6048 /* Do not clear the aux field for blocks which are
6049 predecessors of the EXIT block. That way we we never
6050 add them to the worklist again.
6052 The intersect of dominators of the succs of this block is
6053 defined as the null set. */
6054 sbitmap_zero (temp_bitmap[bb]);
6058 /* Clear the aux field of this block so it can be added to
6059 the worklist again if necessary. */
6061 sbitmap_intersection_of_succs (temp_bitmap[bb],
6062 post_dominators, bb);
6065 /* Make sure each block always post dominates itself. */
6066 SET_BIT (temp_bitmap[bb], bb);
6068 /* If the out state of this block changed, then we need to
6069 add the successors of this block to the worklist if they
6070 are not already on the worklist. */
6071 if (sbitmap_a_and_b (post_dominators[bb],
6072 post_dominators[bb],
6075 for (e = b->pred; e; e = e->pred_next)
6077 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
6095 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
6098 compute_immediate_dominators (idom, dominators)
6100 sbitmap *dominators;
6105 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6107 /* Begin with tmp(n) = dom(n) - { n }. */
6108 for (b = n_basic_blocks; --b >= 0; )
6110 sbitmap_copy (tmp[b], dominators[b]);
6111 RESET_BIT (tmp[b], b);
6114 /* Subtract out all of our dominator's dominators. */
6115 for (b = n_basic_blocks; --b >= 0; )
6117 sbitmap tmp_b = tmp[b];
6120 for (s = n_basic_blocks; --s >= 0; )
6121 if (TEST_BIT (tmp_b, s))
6122 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
6125 /* Find the one bit set in the bitmap and put it in the output array. */
6126 for (b = n_basic_blocks; --b >= 0; )
6129 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
6132 sbitmap_vector_free (tmp);
6135 /* Recompute register set/reference counts immediately prior to register
6138 This avoids problems with set/reference counts changing to/from values
6139 which have special meanings to the register allocators.
6141 Additionally, the reference counts are the primary component used by the
6142 register allocators to prioritize pseudos for allocation to hard regs.
6143 More accurate reference counts generally lead to better register allocation.
6145 F is the first insn to be scanned.
6147 LOOP_STEP denotes how much loop_depth should be incremented per
6148 loop nesting level in order to increase the ref count more for
6149 references in a loop.
6151 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6152 possibly other information which is used by the register allocators. */
6155 recompute_reg_usage (f, loop_step)
6156 rtx f ATTRIBUTE_UNUSED;
6157 int loop_step ATTRIBUTE_UNUSED;
6159 allocate_reg_life_data ();
6160 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6163 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6164 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6165 of the number of registers that died. */
6168 count_or_remove_death_notes (blocks, kill)
6174 for (i = n_basic_blocks - 1; i >= 0; --i)
6179 if (blocks && ! TEST_BIT (blocks, i))
6182 bb = BASIC_BLOCK (i);
6184 for (insn = bb->head; ; insn = NEXT_INSN (insn))
6186 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6188 rtx *pprev = ®_NOTES (insn);
6193 switch (REG_NOTE_KIND (link))
6196 if (GET_CODE (XEXP (link, 0)) == REG)
6198 rtx reg = XEXP (link, 0);
6201 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6204 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6212 rtx next = XEXP (link, 1);
6213 free_EXPR_LIST_node (link);
6214 *pprev = link = next;
6220 pprev = &XEXP (link, 1);
6227 if (insn == bb->end)
6235 /* Record INSN's block as BB. */
6238 set_block_for_insn (insn, bb)
6242 size_t uid = INSN_UID (insn);
6243 if (uid >= basic_block_for_insn->num_elements)
6247 /* Add one-eighth the size so we don't keep calling xrealloc. */
6248 new_size = uid + (uid + 7) / 8;
6250 VARRAY_GROW (basic_block_for_insn, new_size);
6252 VARRAY_BB (basic_block_for_insn, uid) = bb;
6255 /* Record INSN's block number as BB. */
6256 /* ??? This has got to go. */
6259 set_block_num (insn, bb)
6263 set_block_for_insn (insn, BASIC_BLOCK (bb));
6266 /* Verify the CFG consistency. This function check some CFG invariants and
6267 aborts when something is wrong. Hope that this function will help to
6268 convert many optimization passes to preserve CFG consistent.
6270 Currently it does following checks:
6272 - test head/end pointers
6273 - overlapping of basic blocks
6274 - edge list corectness
6275 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6276 - tails of basic blocks (ensure that boundary is necesary)
6277 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6278 and NOTE_INSN_BASIC_BLOCK
6279 - check that all insns are in the basic blocks
6280 (except the switch handling code, barriers and notes)
6281 - check that all returns are followed by barriers
6283 In future it can be extended check a lot of other stuff as well
6284 (reachability of basic blocks, life information, etc. etc.). */
6289 const int max_uid = get_max_uid ();
6290 const rtx rtx_first = get_insns ();
6291 basic_block *bb_info;
6293 int i, last_bb_num_seen, num_bb_notes, err = 0;
6295 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6297 /* First pass check head/end pointers and set bb_info array used by
6299 for (i = n_basic_blocks - 1; i >= 0; i--)
6301 basic_block bb = BASIC_BLOCK (i);
6303 /* Check the head pointer and make sure that it is pointing into
6305 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
6310 error ("Head insn %d for block %d not found in the insn stream.",
6311 INSN_UID (bb->head), bb->index);
6315 /* Check the end pointer and make sure that it is pointing into
6317 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6319 if (bb_info[INSN_UID (x)] != NULL)
6321 error ("Insn %d is in multiple basic blocks (%d and %d)",
6322 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6325 bb_info[INSN_UID (x)] = bb;
6332 error ("End insn %d for block %d not found in the insn stream.",
6333 INSN_UID (bb->end), bb->index);
6338 /* Now check the basic blocks (boundaries etc.) */
6339 for (i = n_basic_blocks - 1; i >= 0; i--)
6341 basic_block bb = BASIC_BLOCK (i);
6342 /* Check corectness of edge lists */
6350 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6352 fprintf (stderr, "Predecessor: ");
6353 dump_edge_info (stderr, e, 0);
6354 fprintf (stderr, "\nSuccessor: ");
6355 dump_edge_info (stderr, e, 1);
6359 if (e->dest != EXIT_BLOCK_PTR)
6361 edge e2 = e->dest->pred;
6362 while (e2 && e2 != e)
6366 error ("Basic block %i edge lists are corrupted", bb->index);
6378 error ("Basic block %d pred edge is corrupted", bb->index);
6379 fputs ("Predecessor: ", stderr);
6380 dump_edge_info (stderr, e, 0);
6381 fputs ("\nSuccessor: ", stderr);
6382 dump_edge_info (stderr, e, 1);
6383 fputc ('\n', stderr);
6386 if (e->src != ENTRY_BLOCK_PTR)
6388 edge e2 = e->src->succ;
6389 while (e2 && e2 != e)
6393 error ("Basic block %i edge lists are corrupted", bb->index);
6400 /* OK pointers are correct. Now check the header of basic
6401 block. It ought to contain optional CODE_LABEL followed
6402 by NOTE_BASIC_BLOCK. */
6404 if (GET_CODE (x) == CODE_LABEL)
6408 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6414 if (GET_CODE (x) != NOTE
6415 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6416 || NOTE_BASIC_BLOCK (x) != bb)
6418 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6425 /* Do checks for empty blocks here */
6432 if (GET_CODE (x) == NOTE
6433 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6435 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6436 INSN_UID (x), bb->index);
6443 if (GET_CODE (x) == JUMP_INSN
6444 || GET_CODE (x) == CODE_LABEL
6445 || GET_CODE (x) == BARRIER)
6447 error ("In basic block %d:", bb->index);
6448 fatal_insn ("Flow control insn inside a basic block", x);
6456 last_bb_num_seen = -1;
6461 if (GET_CODE (x) == NOTE
6462 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6464 basic_block bb = NOTE_BASIC_BLOCK (x);
6466 if (bb->index != last_bb_num_seen + 1)
6467 fatal ("Basic blocks not numbered consecutively");
6468 last_bb_num_seen = bb->index;
6471 if (!bb_info[INSN_UID (x)])
6473 switch (GET_CODE (x))
6480 /* An addr_vec is placed outside any block block. */
6482 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6483 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6484 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6489 /* But in any case, non-deletable labels can appear anywhere. */
6493 fatal_insn ("Insn outside basic block", x);
6497 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6498 && GET_CODE (x) == JUMP_INSN
6499 && returnjump_p (x) && ! condjump_p (x)
6500 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6501 fatal_insn ("Return not followed by barrier", x);
6506 if (num_bb_notes != n_basic_blocks)
6507 fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6508 num_bb_notes, n_basic_blocks);
6517 /* Functions to access an edge list with a vector representation.
6518 Enough data is kept such that given an index number, the
6519 pred and succ that edge reprsents can be determined, or
6520 given a pred and a succ, it's index number can be returned.
6521 This allows algorithms which comsume a lot of memory to
6522 represent the normally full matrix of edge (pred,succ) with a
6523 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6524 wasted space in the client code due to sparse flow graphs. */
6526 /* This functions initializes the edge list. Basically the entire
6527 flowgraph is processed, and all edges are assigned a number,
6528 and the data structure is filed in. */
6532 struct edge_list *elist;
6538 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6542 /* Determine the number of edges in the flow graph by counting successor
6543 edges on each basic block. */
6544 for (x = 0; x < n_basic_blocks; x++)
6546 basic_block bb = BASIC_BLOCK (x);
6548 for (e = bb->succ; e; e = e->succ_next)
6551 /* Don't forget successors of the entry block. */
6552 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6555 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6556 elist->num_blocks = block_count;
6557 elist->num_edges = num_edges;
6558 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6562 /* Follow successors of the entry block, and register these edges. */
6563 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6565 elist->index_to_edge[num_edges] = e;
6569 for (x = 0; x < n_basic_blocks; x++)
6571 basic_block bb = BASIC_BLOCK (x);
6573 /* Follow all successors of blocks, and register these edges. */
6574 for (e = bb->succ; e; e = e->succ_next)
6576 elist->index_to_edge[num_edges] = e;
6583 /* This function free's memory associated with an edge list. */
6585 free_edge_list (elist)
6586 struct edge_list *elist;
6590 free (elist->index_to_edge);
6595 /* This function provides debug output showing an edge list. */
6597 print_edge_list (f, elist)
6599 struct edge_list *elist;
6602 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6603 elist->num_blocks - 2, elist->num_edges);
6605 for (x = 0; x < elist->num_edges; x++)
6607 fprintf (f, " %-4d - edge(", x);
6608 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6609 fprintf (f,"entry,");
6611 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6613 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6614 fprintf (f,"exit)\n");
6616 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6620 /* This function provides an internal consistancy check of an edge list,
6621 verifying that all edges are present, and that there are no
6624 verify_edge_list (f, elist)
6626 struct edge_list *elist;
6628 int x, pred, succ, index;
6631 for (x = 0; x < n_basic_blocks; x++)
6633 basic_block bb = BASIC_BLOCK (x);
6635 for (e = bb->succ; e; e = e->succ_next)
6637 pred = e->src->index;
6638 succ = e->dest->index;
6639 index = EDGE_INDEX (elist, e->src, e->dest);
6640 if (index == EDGE_INDEX_NO_EDGE)
6642 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6645 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6646 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6647 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6648 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6649 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6650 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6653 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6655 pred = e->src->index;
6656 succ = e->dest->index;
6657 index = EDGE_INDEX (elist, e->src, e->dest);
6658 if (index == EDGE_INDEX_NO_EDGE)
6660 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6663 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6664 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6665 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6666 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6667 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6668 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6670 /* We've verified that all the edges are in the list, no lets make sure
6671 there are no spurious edges in the list. */
6673 for (pred = 0 ; pred < n_basic_blocks; pred++)
6674 for (succ = 0 ; succ < n_basic_blocks; succ++)
6676 basic_block p = BASIC_BLOCK (pred);
6677 basic_block s = BASIC_BLOCK (succ);
6681 for (e = p->succ; e; e = e->succ_next)
6687 for (e = s->pred; e; e = e->pred_next)
6693 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6694 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6695 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6697 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6698 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6699 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6700 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6701 BASIC_BLOCK (succ)));
6703 for (succ = 0 ; succ < n_basic_blocks; succ++)
6705 basic_block p = ENTRY_BLOCK_PTR;
6706 basic_block s = BASIC_BLOCK (succ);
6710 for (e = p->succ; e; e = e->succ_next)
6716 for (e = s->pred; e; e = e->pred_next)
6722 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6723 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6724 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6726 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6727 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6728 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6729 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6730 BASIC_BLOCK (succ)));
6732 for (pred = 0 ; pred < n_basic_blocks; pred++)
6734 basic_block p = BASIC_BLOCK (pred);
6735 basic_block s = EXIT_BLOCK_PTR;
6739 for (e = p->succ; e; e = e->succ_next)
6745 for (e = s->pred; e; e = e->pred_next)
6751 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6752 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6753 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6755 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6756 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6757 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6758 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6763 /* This routine will determine what, if any, edge there is between
6764 a specified predecessor and successor. */
6767 find_edge_index (edge_list, pred, succ)
6768 struct edge_list *edge_list;
6769 basic_block pred, succ;
6772 for (x = 0; x < NUM_EDGES (edge_list); x++)
6774 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6775 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6778 return (EDGE_INDEX_NO_EDGE);
6781 /* This function will remove an edge from the flow graph. */
6786 edge last_pred = NULL;
6787 edge last_succ = NULL;
6789 basic_block src, dest;
6792 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6798 last_succ->succ_next = e->succ_next;
6800 src->succ = e->succ_next;
6802 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6808 last_pred->pred_next = e->pred_next;
6810 dest->pred = e->pred_next;
6816 /* This routine will remove any fake successor edges for a basic block.
6817 When the edge is removed, it is also removed from whatever predecessor
6820 remove_fake_successors (bb)
6824 for (e = bb->succ; e ; )
6828 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6833 /* This routine will remove all fake edges from the flow graph. If
6834 we remove all fake successors, it will automatically remove all
6835 fake predecessors. */
6837 remove_fake_edges ()
6841 for (x = 0; x < n_basic_blocks; x++)
6842 remove_fake_successors (BASIC_BLOCK (x));
6844 /* We've handled all successors except the entry block's. */
6845 remove_fake_successors (ENTRY_BLOCK_PTR);
6848 /* This functions will add a fake edge between any block which has no
6849 successors, and the exit block. Some data flow equations require these
6852 add_noreturn_fake_exit_edges ()
6856 for (x = 0; x < n_basic_blocks; x++)
6857 if (BASIC_BLOCK (x)->succ == NULL)
6858 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6861 /* Redirect an edge's successor from one block to another. */
6864 redirect_edge_succ (e, new_succ)
6866 basic_block new_succ;
6870 /* Disconnect the edge from the old successor block. */
6871 for (pe = &e->dest->pred; *pe != e ; pe = &(*pe)->pred_next)
6873 *pe = (*pe)->pred_next;
6875 /* Reconnect the edge to the new successor block. */
6876 e->pred_next = new_succ->pred;
6881 /* Redirect an edge's predecessor from one block to another. */
6884 redirect_edge_pred (e, new_pred)
6886 basic_block new_pred;
6890 /* Disconnect the edge from the old predecessor block. */
6891 for (pe = &e->src->succ; *pe != e ; pe = &(*pe)->succ_next)
6893 *pe = (*pe)->succ_next;
6895 /* Reconnect the edge to the new predecessor block. */
6896 e->succ_next = new_pred->succ;
6901 /* Dump the list of basic blocks in the bitmap NODES. */
6903 flow_nodes_print (str, nodes, file)
6905 const sbitmap nodes;
6910 fprintf (file, "%s { ", str);
6911 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6912 fputs ("}\n", file);
6916 /* Dump the list of exiting edges in the array EDGES. */
6918 flow_exits_print (str, edges, num_edges, file)
6926 fprintf (file, "%s { ", str);
6927 for (i = 0; i < num_edges; i++)
6928 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6929 fputs ("}\n", file);
6933 /* Dump loop related CFG information. */
6935 flow_loops_cfg_dump (loops, file)
6936 const struct loops *loops;
6941 if (! loops->num || ! file || ! loops->cfg.dom)
6944 for (i = 0; i < n_basic_blocks; i++)
6948 fprintf (file, ";; %d succs { ", i);
6949 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6950 fprintf (file, "%d ", succ->dest->index);
6951 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6955 /* Dump the DFS node order. */
6956 if (loops->cfg.dfs_order)
6958 fputs (";; DFS order: ", file);
6959 for (i = 0; i < n_basic_blocks; i++)
6960 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6966 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6968 flow_loop_nested_p (outer, loop)
6972 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6976 /* Dump the loop information specified by LOOPS to the stream FILE. */
6978 flow_loops_dump (loops, file, verbose)
6979 const struct loops *loops;
6986 num_loops = loops->num;
6987 if (! num_loops || ! file)
6990 fprintf (file, ";; %d loops found, %d levels\n",
6991 num_loops, loops->levels);
6993 for (i = 0; i < num_loops; i++)
6995 struct loop *loop = &loops->array[i];
6997 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6998 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6999 loop->header->index, loop->latch->index,
7000 loop->pre_header ? loop->pre_header->index : -1,
7001 loop->depth, loop->level,
7002 (long) (loop->outer ? (loop->outer - loops->array) : -1));
7003 fprintf (file, ";; %d", loop->num_nodes);
7004 flow_nodes_print (" nodes", loop->nodes, file);
7005 fprintf (file, ";; %d", loop->num_exits);
7006 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
7012 for (j = 0; j < i; j++)
7014 struct loop *oloop = &loops->array[j];
7016 if (loop->header == oloop->header)
7021 smaller = loop->num_nodes < oloop->num_nodes;
7023 /* If the union of LOOP and OLOOP is different than
7024 the larger of LOOP and OLOOP then LOOP and OLOOP
7025 must be disjoint. */
7026 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
7027 smaller ? oloop : loop);
7028 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
7029 loop->header->index, i, j,
7030 disjoint ? "disjoint" : "nested");
7037 /* Print diagnostics to compare our concept of a loop with
7038 what the loop notes say. */
7039 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
7040 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
7041 != NOTE_INSN_LOOP_BEG)
7042 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
7043 INSN_UID (PREV_INSN (loop->first->head)));
7044 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
7045 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
7046 != NOTE_INSN_LOOP_END)
7047 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
7048 INSN_UID (NEXT_INSN (loop->last->end)));
7053 flow_loops_cfg_dump (loops, file);
7057 /* Free all the memory allocated for LOOPS. */
7059 flow_loops_free (loops)
7060 struct loops *loops;
7069 /* Free the loop descriptors. */
7070 for (i = 0; i < loops->num; i++)
7072 struct loop *loop = &loops->array[i];
7075 sbitmap_free (loop->nodes);
7079 free (loops->array);
7080 loops->array = NULL;
7083 sbitmap_vector_free (loops->cfg.dom);
7084 if (loops->cfg.dfs_order)
7085 free (loops->cfg.dfs_order);
7087 sbitmap_free (loops->shared_headers);
7092 /* Find the exits from the loop using the bitmap of loop nodes NODES
7093 and store in EXITS array. Return the number of exits from the
7096 flow_loop_exits_find (nodes, exits)
7097 const sbitmap nodes;
7106 /* Check all nodes within the loop to see if there are any
7107 successors not in the loop. Note that a node may have multiple
7110 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7111 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7113 basic_block dest = e->dest;
7115 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7123 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
7125 /* Store all exiting edges into an array. */
7127 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7128 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7130 basic_block dest = e->dest;
7132 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7133 (*exits)[num_exits++] = e;
7141 /* Find the nodes contained within the loop with header HEADER and
7142 latch LATCH and store in NODES. Return the number of nodes within
7145 flow_loop_nodes_find (header, latch, nodes)
7154 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7157 /* Start with only the loop header in the set of loop nodes. */
7158 sbitmap_zero (nodes);
7159 SET_BIT (nodes, header->index);
7161 header->loop_depth++;
7163 /* Push the loop latch on to the stack. */
7164 if (! TEST_BIT (nodes, latch->index))
7166 SET_BIT (nodes, latch->index);
7167 latch->loop_depth++;
7169 stack[sp++] = latch;
7178 for (e = node->pred; e; e = e->pred_next)
7180 basic_block ancestor = e->src;
7182 /* If each ancestor not marked as part of loop, add to set of
7183 loop nodes and push on to stack. */
7184 if (ancestor != ENTRY_BLOCK_PTR
7185 && ! TEST_BIT (nodes, ancestor->index))
7187 SET_BIT (nodes, ancestor->index);
7188 ancestor->loop_depth++;
7190 stack[sp++] = ancestor;
7199 /* Compute the depth first search order and store in the array
7200 DFS_ORDER, marking the nodes visited in VISITED. Returns the
7201 number of nodes visited. */
7203 flow_depth_first_order_compute (dfs_order)
7212 /* Allocate stack for back-tracking up CFG. */
7213 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
7216 /* Allocate bitmap to track nodes that have been visited. */
7217 visited = sbitmap_alloc (n_basic_blocks);
7219 /* None of the nodes in the CFG have been visited yet. */
7220 sbitmap_zero (visited);
7222 /* Start with the first successor edge from the entry block. */
7223 e = ENTRY_BLOCK_PTR->succ;
7226 basic_block src = e->src;
7227 basic_block dest = e->dest;
7229 /* Mark that we have visited this node. */
7230 if (src != ENTRY_BLOCK_PTR)
7231 SET_BIT (visited, src->index);
7233 /* If this node has not been visited before, push the current
7234 edge on to the stack and proceed with the first successor
7235 edge of this node. */
7236 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
7244 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
7247 /* DEST has no successors (for example, a non-returning
7248 function is called) so do not push the current edge
7249 but carry on with its next successor. */
7250 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
7251 SET_BIT (visited, dest->index);
7254 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
7256 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
7258 /* Pop edge off stack. */
7266 sbitmap_free (visited);
7268 /* The number of nodes visited should not be greater than
7270 if (dfsnum > n_basic_blocks)
7273 /* There are some nodes left in the CFG that are unreachable. */
7274 if (dfsnum < n_basic_blocks)
7280 /* Return the block for the pre-header of the loop with header
7281 HEADER where DOM specifies the dominator information. Return NULL if
7282 there is no pre-header. */
7284 flow_loop_pre_header_find (header, dom)
7288 basic_block pre_header;
7291 /* If block p is a predecessor of the header and is the only block
7292 that the header does not dominate, then it is the pre-header. */
7294 for (e = header->pred; e; e = e->pred_next)
7296 basic_block node = e->src;
7298 if (node != ENTRY_BLOCK_PTR
7299 && ! TEST_BIT (dom[node->index], header->index))
7301 if (pre_header == NULL)
7305 /* There are multiple edges into the header from outside
7306 the loop so there is no pre-header block. */
7316 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
7317 previously added. The insertion algorithm assumes that the loops
7318 are added in the order found by a depth first search of the CFG. */
7320 flow_loop_tree_node_add (prevloop, loop)
7321 struct loop *prevloop;
7325 if (flow_loop_nested_p (prevloop, loop))
7327 prevloop->inner = loop;
7328 loop->outer = prevloop;
7332 while (prevloop->outer)
7334 if (flow_loop_nested_p (prevloop->outer, loop))
7336 prevloop->next = loop;
7337 loop->outer = prevloop->outer;
7340 prevloop = prevloop->outer;
7343 prevloop->next = loop;
7348 /* Build the loop hierarchy tree for LOOPS. */
7350 flow_loops_tree_build (loops)
7351 struct loops *loops;
7356 num_loops = loops->num;
7360 /* Root the loop hierarchy tree with the first loop found.
7361 Since we used a depth first search this should be the
7363 loops->tree = &loops->array[0];
7364 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
7366 /* Add the remaining loops to the tree. */
7367 for (i = 1; i < num_loops; i++)
7368 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
7372 /* Helper function to compute loop nesting depth and enclosed loop level
7373 for the natural loop specified by LOOP at the loop depth DEPTH.
7374 Returns the loop level. */
7376 flow_loop_level_compute (loop, depth)
7386 /* Traverse loop tree assigning depth and computing level as the
7387 maximum level of all the inner loops of this loop. The loop
7388 level is equivalent to the height of the loop in the loop tree
7389 and corresponds to the number of enclosed loop levels (including
7391 for (inner = loop->inner; inner; inner = inner->next)
7395 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7400 loop->level = level;
7401 loop->depth = depth;
7406 /* Compute the loop nesting depth and enclosed loop level for the loop
7407 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7411 flow_loops_level_compute (loops)
7412 struct loops *loops;
7418 /* Traverse all the outer level loops. */
7419 for (loop = loops->tree; loop; loop = loop->next)
7421 level = flow_loop_level_compute (loop, 1);
7429 /* Find all the natural loops in the function and save in LOOPS structure
7430 and recalculate loop_depth information in basic block structures.
7431 Return the number of natural loops found. */
7434 flow_loops_find (loops)
7435 struct loops *loops;
7446 loops->array = NULL;
7450 /* Taking care of this degenerate case makes the rest of
7451 this code simpler. */
7452 if (n_basic_blocks == 0)
7455 /* Compute the dominators. */
7456 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7457 compute_flow_dominators (dom, NULL);
7459 /* Count the number of loop edges (back edges). This should be the
7460 same as the number of natural loops. Also clear the loop_depth
7461 and as we work from inner->outer in a loop nest we call
7462 find_loop_nodes_find which will increment loop_depth for nodes
7463 within the current loop, which happens to enclose inner loops. */
7466 for (b = 0; b < n_basic_blocks; b++)
7468 BASIC_BLOCK (b)->loop_depth = 0;
7469 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7471 basic_block latch = e->src;
7473 /* Look for back edges where a predecessor is dominated
7474 by this block. A natural loop has a single entry
7475 node (header) that dominates all the nodes in the
7476 loop. It also has single back edge to the header
7477 from a latch node. Note that multiple natural loops
7478 may share the same header. */
7479 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7486 /* Compute depth first search order of the CFG so that outer
7487 natural loops will be found before inner natural loops. */
7488 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7489 flow_depth_first_order_compute (dfs_order);
7491 /* Allocate loop structures. */
7493 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7495 headers = sbitmap_alloc (n_basic_blocks);
7496 sbitmap_zero (headers);
7498 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7499 sbitmap_zero (loops->shared_headers);
7501 /* Find and record information about all the natural loops
7504 for (b = 0; b < n_basic_blocks; b++)
7508 /* Search the nodes of the CFG in DFS order that we can find
7509 outer loops first. */
7510 header = BASIC_BLOCK (dfs_order[b]);
7512 /* Look for all the possible latch blocks for this header. */
7513 for (e = header->pred; e; e = e->pred_next)
7515 basic_block latch = e->src;
7517 /* Look for back edges where a predecessor is dominated
7518 by this block. A natural loop has a single entry
7519 node (header) that dominates all the nodes in the
7520 loop. It also has single back edge to the header
7521 from a latch node. Note that multiple natural loops
7522 may share the same header. */
7523 if (latch != ENTRY_BLOCK_PTR
7524 && TEST_BIT (dom[latch->index], header->index))
7528 loop = loops->array + num_loops;
7530 loop->header = header;
7531 loop->latch = latch;
7533 /* Keep track of blocks that are loop headers so
7534 that we can tell which loops should be merged. */
7535 if (TEST_BIT (headers, header->index))
7536 SET_BIT (loops->shared_headers, header->index);
7537 SET_BIT (headers, header->index);
7539 /* Find nodes contained within the loop. */
7540 loop->nodes = sbitmap_alloc (n_basic_blocks);
7542 = flow_loop_nodes_find (header, latch, loop->nodes);
7544 /* Compute first and last blocks within the loop.
7545 These are often the same as the loop header and
7546 loop latch respectively, but this is not always
7549 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7551 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7553 /* Find edges which exit the loop. Note that a node
7554 may have several exit edges. */
7556 = flow_loop_exits_find (loop->nodes, &loop->exits);
7558 /* Look to see if the loop has a pre-header node. */
7560 = flow_loop_pre_header_find (header, dom);
7567 /* Natural loops with shared headers may either be disjoint or
7568 nested. Disjoint loops with shared headers cannot be inner
7569 loops and should be merged. For now just mark loops that share
7571 for (i = 0; i < num_loops; i++)
7572 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7573 loops->array[i].shared = 1;
7575 sbitmap_free (headers);
7578 loops->num = num_loops;
7580 /* Save CFG derived information to avoid recomputing it. */
7581 loops->cfg.dom = dom;
7582 loops->cfg.dfs_order = dfs_order;
7584 /* Build the loop hierarchy tree. */
7585 flow_loops_tree_build (loops);
7587 /* Assign the loop nesting depth and enclosed loop level for each
7589 loops->levels = flow_loops_level_compute (loops);
7595 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7598 flow_loop_outside_edge_p (loop, e)
7599 const struct loop *loop;
7602 if (e->dest != loop->header)
7604 return (e->src == ENTRY_BLOCK_PTR)
7605 || ! TEST_BIT (loop->nodes, e->src->index);
7609 /* Clear LOG_LINKS fields of insns in a chain. */
7612 clear_log_links (insns)
7616 for (i = insns; i; i = NEXT_INSN (i))
7617 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')
7621 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
7622 correspond to the hard registers, if any, set in that map. This
7623 could be done far more efficiently by having all sorts of special-cases
7624 with moving single words, but probably isn't worth the trouble. */
7627 reg_set_to_hard_reg_set (to, from)
7633 EXECUTE_IF_SET_IN_BITMAP
7636 if (i >= FIRST_PSEUDO_REGISTER)
7638 SET_HARD_REG_BIT (*to, i);