1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
147 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
148 the stack pointer does not matter. The value is tested only in
149 functions that have frame pointers.
150 No definition is equivalent to always zero. */
151 #ifndef EXIT_IGNORE_STACK
152 #define EXIT_IGNORE_STACK 0
155 #ifndef HAVE_epilogue
156 #define HAVE_epilogue 0
158 #ifndef HAVE_prologue
159 #define HAVE_prologue 0
161 #ifndef HAVE_sibcall_epilogue
162 #define HAVE_sibcall_epilogue 0
165 /* The contents of the current function definition are allocated
166 in this obstack, and all are freed at the end of the function.
167 For top-level functions, this is temporary_obstack.
168 Separate obstacks are made for nested functions. */
170 extern struct obstack *function_obstack;
172 /* Number of basic blocks in the current function. */
176 /* Number of edges in the current function. */
180 /* The basic block array. */
182 varray_type basic_block_info;
184 /* The special entry and exit blocks. */
186 struct basic_block_def entry_exit_blocks[2]
191 NULL, /* local_set */
192 NULL, /* global_live_at_start */
193 NULL, /* global_live_at_end */
195 ENTRY_BLOCK, /* index */
197 -1, -1 /* eh_beg, eh_end */
204 NULL, /* local_set */
205 NULL, /* global_live_at_start */
206 NULL, /* global_live_at_end */
208 EXIT_BLOCK, /* index */
210 -1, -1 /* eh_beg, eh_end */
214 /* Nonzero if the second flow pass has completed. */
217 /* Maximum register number used in this function, plus one. */
221 /* Indexed by n, giving various register information */
223 varray_type reg_n_info;
225 /* Size of a regset for the current function,
226 in (1) bytes and (2) elements. */
231 /* Regset of regs live when calls to `setjmp'-like functions happen. */
232 /* ??? Does this exist only for the setjmp-clobbered warning message? */
234 regset regs_live_at_setjmp;
236 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
237 that have to go in the same hard reg.
238 The first two regs in the list are a pair, and the next two
239 are another pair, etc. */
242 /* Set of registers that may be eliminable. These are handled specially
243 in updating regs_ever_live. */
245 static HARD_REG_SET elim_reg_set;
247 /* The basic block structure for every insn, indexed by uid. */
249 varray_type basic_block_for_insn;
251 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
252 /* ??? Should probably be using LABEL_NUSES instead. It would take a
253 bit of surgery to be able to use or co-opt the routines in jump. */
255 static rtx label_value_list;
257 /* Holds information for tracking conditional register life information. */
258 struct reg_cond_life_info
260 /* An EXPR_LIST of conditions under which a register is dead. */
263 /* ??? Could store mask of bytes that are dead, so that we could finally
264 track lifetimes of multi-word registers accessed via subregs. */
267 /* For use in communicating between propagate_block and its subroutines.
268 Holds all information needed to compute life and def-use information. */
270 struct propagate_block_info
272 /* The basic block we're considering. */
275 /* Bit N is set if register N is conditionally or unconditionally live. */
278 /* Bit N is set if register N is set this insn. */
281 /* Element N is the next insn that uses (hard or pseudo) register N
282 within the current basic block; or zero, if there is no such insn. */
285 /* Contains a list of all the MEMs we are tracking for dead store
289 /* If non-null, record the set of registers set in the basic block. */
292 #ifdef HAVE_conditional_execution
293 /* Indexed by register number, holds a reg_cond_life_info for each
294 register that is not unconditionally live or dead. */
295 splay_tree reg_cond_dead;
297 /* Bit N is set if register N is in an expression in reg_cond_dead. */
301 /* Non-zero if the value of CC0 is live. */
304 /* Flags controling the set of information propagate_block collects. */
308 /* Forward declarations */
309 static int count_basic_blocks PARAMS ((rtx));
310 static rtx find_basic_blocks_1 PARAMS ((rtx));
311 static void clear_edges PARAMS ((void));
312 static void make_edges PARAMS ((rtx));
313 static void make_label_edge PARAMS ((sbitmap *, basic_block,
315 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
316 basic_block, rtx, int));
317 static void mark_critical_edges PARAMS ((void));
318 static void move_stray_eh_region_notes PARAMS ((void));
319 static void record_active_eh_regions PARAMS ((rtx));
321 static void commit_one_edge_insertion PARAMS ((edge));
323 static void delete_unreachable_blocks PARAMS ((void));
324 static void delete_eh_regions PARAMS ((void));
325 static int can_delete_note_p PARAMS ((rtx));
326 static void expunge_block PARAMS ((basic_block));
327 static int can_delete_label_p PARAMS ((rtx));
328 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
330 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
332 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
333 static void try_merge_blocks PARAMS ((void));
334 static void tidy_fallthru_edges PARAMS ((void));
335 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
336 static void verify_wide_reg PARAMS ((int, rtx, rtx));
337 static void verify_local_live_at_start PARAMS ((regset, basic_block));
338 static int set_noop_p PARAMS ((rtx));
339 static int noop_move_p PARAMS ((rtx));
340 static void delete_noop_moves PARAMS ((rtx));
341 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
342 static void notice_stack_pointer_modification PARAMS ((rtx));
343 static void mark_reg PARAMS ((rtx, void *));
344 static void mark_regs_live_at_end PARAMS ((regset));
345 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
346 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
347 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
348 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
349 static int insn_dead_p PARAMS ((struct propagate_block_info *,
351 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
353 static void mark_set_regs PARAMS ((struct propagate_block_info *,
355 static void mark_set_1 PARAMS ((struct propagate_block_info *,
356 enum rtx_code, rtx, rtx,
358 #ifdef HAVE_conditional_execution
359 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
361 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
362 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
363 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
365 static rtx ior_reg_cond PARAMS ((rtx, rtx));
366 static rtx not_reg_cond PARAMS ((rtx));
367 static rtx nand_reg_cond PARAMS ((rtx, rtx));
370 static void find_auto_inc PARAMS ((struct propagate_block_info *,
372 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
374 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
376 static void mark_used_reg PARAMS ((struct propagate_block_info *,
378 static void mark_used_regs PARAMS ((struct propagate_block_info *,
380 void dump_flow_info PARAMS ((FILE *));
381 void debug_flow_info PARAMS ((void));
382 static void dump_edge_info PARAMS ((FILE *, edge, int));
384 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
386 static void remove_fake_successors PARAMS ((basic_block));
387 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
388 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
389 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
390 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
391 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
392 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
393 static int flow_depth_first_order_compute PARAMS ((int *));
394 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
395 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
396 static void flow_loops_tree_build PARAMS ((struct loops *));
397 static int flow_loop_level_compute PARAMS ((struct loop *, int));
398 static int flow_loops_level_compute PARAMS ((struct loops *));
400 /* Find basic blocks of the current function.
401 F is the first insn of the function and NREGS the number of register
405 find_basic_blocks (f, nregs, file)
407 int nregs ATTRIBUTE_UNUSED;
408 FILE *file ATTRIBUTE_UNUSED;
412 /* Flush out existing data. */
413 if (basic_block_info != NULL)
419 /* Clear bb->aux on all extant basic blocks. We'll use this as a
420 tag for reuse during create_basic_block, just in case some pass
421 copies around basic block notes improperly. */
422 for (i = 0; i < n_basic_blocks; ++i)
423 BASIC_BLOCK (i)->aux = NULL;
425 VARRAY_FREE (basic_block_info);
428 n_basic_blocks = count_basic_blocks (f);
430 /* Size the basic block table. The actual structures will be allocated
431 by find_basic_blocks_1, since we want to keep the structure pointers
432 stable across calls to find_basic_blocks. */
433 /* ??? This whole issue would be much simpler if we called find_basic_blocks
434 exactly once, and thereafter we don't have a single long chain of
435 instructions at all until close to the end of compilation when we
436 actually lay them out. */
438 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
440 label_value_list = find_basic_blocks_1 (f);
442 /* Record the block to which an insn belongs. */
443 /* ??? This should be done another way, by which (perhaps) a label is
444 tagged directly with the basic block that it starts. It is used for
445 more than that currently, but IMO that is the only valid use. */
447 max_uid = get_max_uid ();
449 /* Leave space for insns life_analysis makes in some cases for auto-inc.
450 These cases are rare, so we don't need too much space. */
451 max_uid += max_uid / 10;
454 compute_bb_for_insn (max_uid);
456 /* Discover the edges of our cfg. */
457 record_active_eh_regions (f);
458 make_edges (label_value_list);
460 /* Do very simple cleanup now, for the benefit of code that runs between
461 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
462 tidy_fallthru_edges ();
464 mark_critical_edges ();
466 #ifdef ENABLE_CHECKING
471 /* Count the basic blocks of the function. */
474 count_basic_blocks (f)
478 register RTX_CODE prev_code;
479 register int count = 0;
481 int call_had_abnormal_edge = 0;
483 prev_code = JUMP_INSN;
484 for (insn = f; insn; insn = NEXT_INSN (insn))
486 register RTX_CODE code = GET_CODE (insn);
488 if (code == CODE_LABEL
489 || (GET_RTX_CLASS (code) == 'i'
490 && (prev_code == JUMP_INSN
491 || prev_code == BARRIER
492 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
495 /* Record whether this call created an edge. */
496 if (code == CALL_INSN)
498 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
499 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
501 call_had_abnormal_edge = 0;
503 /* If there is an EH region or rethrow, we have an edge. */
504 if ((eh_region && region > 0)
505 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
506 call_had_abnormal_edge = 1;
507 else if (nonlocal_goto_handler_labels && region >= 0)
508 /* If there is a nonlocal goto label and the specified
509 region number isn't -1, we have an edge. (0 means
510 no throw, but might have a nonlocal goto). */
511 call_had_abnormal_edge = 1;
516 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
518 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
522 /* The rest of the compiler works a bit smoother when we don't have to
523 check for the edge case of do-nothing functions with no basic blocks. */
526 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
533 /* Find all basic blocks of the function whose first insn is F.
535 Collect and return a list of labels whose addresses are taken. This
536 will be used in make_edges for use with computed gotos. */
539 find_basic_blocks_1 (f)
542 register rtx insn, next;
544 rtx bb_note = NULL_RTX;
545 rtx eh_list = NULL_RTX;
546 rtx label_value_list = NULL_RTX;
550 /* We process the instructions in a slightly different way than we did
551 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
552 closed out the previous block, so that it gets attached at the proper
553 place. Since this form should be equivalent to the previous,
554 count_basic_blocks continues to use the old form as a check. */
556 for (insn = f; insn; insn = next)
558 enum rtx_code code = GET_CODE (insn);
560 next = NEXT_INSN (insn);
566 int kind = NOTE_LINE_NUMBER (insn);
568 /* Keep a LIFO list of the currently active exception notes. */
569 if (kind == NOTE_INSN_EH_REGION_BEG)
570 eh_list = alloc_INSN_LIST (insn, eh_list);
571 else if (kind == NOTE_INSN_EH_REGION_END)
575 eh_list = XEXP (eh_list, 1);
576 free_INSN_LIST_node (t);
579 /* Look for basic block notes with which to keep the
580 basic_block_info pointers stable. Unthread the note now;
581 we'll put it back at the right place in create_basic_block.
582 Or not at all if we've already found a note in this block. */
583 else if (kind == NOTE_INSN_BASIC_BLOCK)
585 if (bb_note == NULL_RTX)
588 next = flow_delete_insn (insn);
594 /* A basic block starts at a label. If we've closed one off due
595 to a barrier or some such, no need to do it again. */
596 if (head != NULL_RTX)
598 /* While we now have edge lists with which other portions of
599 the compiler might determine a call ending a basic block
600 does not imply an abnormal edge, it will be a bit before
601 everything can be updated. So continue to emit a noop at
602 the end of such a block. */
603 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
605 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
606 end = emit_insn_after (nop, end);
609 create_basic_block (i++, head, end, bb_note);
617 /* A basic block ends at a jump. */
618 if (head == NULL_RTX)
622 /* ??? Make a special check for table jumps. The way this
623 happens is truly and amazingly gross. We are about to
624 create a basic block that contains just a code label and
625 an addr*vec jump insn. Worse, an addr_diff_vec creates
626 its own natural loop.
628 Prevent this bit of brain damage, pasting things together
629 correctly in make_edges.
631 The correct solution involves emitting the table directly
632 on the tablejump instruction as a note, or JUMP_LABEL. */
634 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
635 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
643 goto new_bb_inclusive;
646 /* A basic block ends at a barrier. It may be that an unconditional
647 jump already closed the basic block -- no need to do it again. */
648 if (head == NULL_RTX)
651 /* While we now have edge lists with which other portions of the
652 compiler might determine a call ending a basic block does not
653 imply an abnormal edge, it will be a bit before everything can
654 be updated. So continue to emit a noop at the end of such a
656 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
658 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
659 end = emit_insn_after (nop, end);
661 goto new_bb_exclusive;
665 /* Record whether this call created an edge. */
666 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
667 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
668 int call_has_abnormal_edge = 0;
670 /* If there is an EH region or rethrow, we have an edge. */
671 if ((eh_list && region > 0)
672 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
673 call_has_abnormal_edge = 1;
674 else if (nonlocal_goto_handler_labels && region >= 0)
675 /* If there is a nonlocal goto label and the specified
676 region number isn't -1, we have an edge. (0 means
677 no throw, but might have a nonlocal goto). */
678 call_has_abnormal_edge = 1;
680 /* A basic block ends at a call that can either throw or
681 do a non-local goto. */
682 if (call_has_abnormal_edge)
685 if (head == NULL_RTX)
690 create_basic_block (i++, head, end, bb_note);
691 head = end = NULL_RTX;
699 if (GET_RTX_CLASS (code) == 'i')
701 if (head == NULL_RTX)
708 if (GET_RTX_CLASS (code) == 'i')
712 /* Make a list of all labels referred to other than by jumps
713 (which just don't have the REG_LABEL notes).
715 Make a special exception for labels followed by an ADDR*VEC,
716 as this would be a part of the tablejump setup code.
718 Make a special exception for the eh_return_stub_label, which
719 we know isn't part of any otherwise visible control flow. */
721 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
722 if (REG_NOTE_KIND (note) == REG_LABEL)
724 rtx lab = XEXP (note, 0), next;
726 if (lab == eh_return_stub_label)
728 else if ((next = next_nonnote_insn (lab)) != NULL
729 && GET_CODE (next) == JUMP_INSN
730 && (GET_CODE (PATTERN (next)) == ADDR_VEC
731 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
735 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
740 if (head != NULL_RTX)
741 create_basic_block (i++, head, end, bb_note);
743 flow_delete_insn (bb_note);
745 if (i != n_basic_blocks)
748 return label_value_list;
751 /* Tidy the CFG by deleting unreachable code and whatnot. */
757 delete_unreachable_blocks ();
758 move_stray_eh_region_notes ();
759 record_active_eh_regions (f);
761 mark_critical_edges ();
763 /* Kill the data we won't maintain. */
764 label_value_list = NULL_RTX;
767 /* Create a new basic block consisting of the instructions between
768 HEAD and END inclusive. Reuses the note and basic block struct
769 in BB_NOTE, if any. */
772 create_basic_block (index, head, end, bb_note)
774 rtx head, end, bb_note;
779 && ! RTX_INTEGRATED_P (bb_note)
780 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
783 /* If we found an existing note, thread it back onto the chain. */
787 if (GET_CODE (head) == CODE_LABEL)
791 after = PREV_INSN (head);
795 if (after != bb_note && NEXT_INSN (after) != bb_note)
796 reorder_insns (bb_note, bb_note, after);
800 /* Otherwise we must create a note and a basic block structure.
801 Since we allow basic block structs in rtl, give the struct
802 the same lifetime by allocating it off the function obstack
803 rather than using malloc. */
805 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
806 memset (bb, 0, sizeof (*bb));
808 if (GET_CODE (head) == CODE_LABEL)
809 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
812 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
815 NOTE_BASIC_BLOCK (bb_note) = bb;
818 /* Always include the bb note in the block. */
819 if (NEXT_INSN (end) == bb_note)
825 BASIC_BLOCK (index) = bb;
827 /* Tag the block so that we know it has been used when considering
828 other basic block notes. */
832 /* Records the basic block struct in BB_FOR_INSN, for every instruction
833 indexed by INSN_UID. MAX is the size of the array. */
836 compute_bb_for_insn (max)
841 if (basic_block_for_insn)
842 VARRAY_FREE (basic_block_for_insn);
843 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
845 for (i = 0; i < n_basic_blocks; ++i)
847 basic_block bb = BASIC_BLOCK (i);
854 int uid = INSN_UID (insn);
856 VARRAY_BB (basic_block_for_insn, uid) = bb;
859 insn = NEXT_INSN (insn);
864 /* Free the memory associated with the edge structures. */
872 for (i = 0; i < n_basic_blocks; ++i)
874 basic_block bb = BASIC_BLOCK (i);
876 for (e = bb->succ; e ; e = n)
886 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
892 ENTRY_BLOCK_PTR->succ = 0;
893 EXIT_BLOCK_PTR->pred = 0;
898 /* Identify the edges between basic blocks.
900 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
901 that are otherwise unreachable may be reachable with a non-local goto.
903 BB_EH_END is an array indexed by basic block number in which we record
904 the list of exception regions active at the end of the basic block. */
907 make_edges (label_value_list)
908 rtx label_value_list;
911 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
912 sbitmap *edge_cache = NULL;
914 /* Assume no computed jump; revise as we create edges. */
915 current_function_has_computed_jump = 0;
917 /* Heavy use of computed goto in machine-generated code can lead to
918 nearly fully-connected CFGs. In that case we spend a significant
919 amount of time searching the edge lists for duplicates. */
920 if (forced_labels || label_value_list)
922 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
923 sbitmap_vector_zero (edge_cache, n_basic_blocks);
926 /* By nature of the way these get numbered, block 0 is always the entry. */
927 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
929 for (i = 0; i < n_basic_blocks; ++i)
931 basic_block bb = BASIC_BLOCK (i);
934 int force_fallthru = 0;
936 /* Examine the last instruction of the block, and discover the
937 ways we can leave the block. */
940 code = GET_CODE (insn);
943 if (code == JUMP_INSN)
947 /* ??? Recognize a tablejump and do the right thing. */
948 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
949 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
950 && GET_CODE (tmp) == JUMP_INSN
951 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
952 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
957 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
958 vec = XVEC (PATTERN (tmp), 0);
960 vec = XVEC (PATTERN (tmp), 1);
962 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
963 make_label_edge (edge_cache, bb,
964 XEXP (RTVEC_ELT (vec, j), 0), 0);
966 /* Some targets (eg, ARM) emit a conditional jump that also
967 contains the out-of-range target. Scan for these and
968 add an edge if necessary. */
969 if ((tmp = single_set (insn)) != NULL
970 && SET_DEST (tmp) == pc_rtx
971 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
972 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
973 make_label_edge (edge_cache, bb,
974 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
976 #ifdef CASE_DROPS_THROUGH
977 /* Silly VAXen. The ADDR_VEC is going to be in the way of
978 us naturally detecting fallthru into the next block. */
983 /* If this is a computed jump, then mark it as reaching
984 everything on the label_value_list and forced_labels list. */
985 else if (computed_jump_p (insn))
987 current_function_has_computed_jump = 1;
989 for (x = label_value_list; x; x = XEXP (x, 1))
990 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
992 for (x = forced_labels; x; x = XEXP (x, 1))
993 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
996 /* Returns create an exit out. */
997 else if (returnjump_p (insn))
998 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1000 /* Otherwise, we have a plain conditional or unconditional jump. */
1003 if (! JUMP_LABEL (insn))
1005 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1009 /* If this is a sibling call insn, then this is in effect a
1010 combined call and return, and so we need an edge to the
1011 exit block. No need to worry about EH edges, since we
1012 wouldn't have created the sibling call in the first place. */
1014 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1015 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1018 /* If this is a CALL_INSN, then mark it as reaching the active EH
1019 handler for this CALL_INSN. If we're handling asynchronous
1020 exceptions then any insn can reach any of the active handlers.
1022 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1024 if (code == CALL_INSN || asynchronous_exceptions)
1026 /* Add any appropriate EH edges. We do this unconditionally
1027 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1028 on the call, and this needn't be within an EH region. */
1029 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1031 /* If we have asynchronous exceptions, do the same for *all*
1032 exception regions active in the block. */
1033 if (asynchronous_exceptions
1034 && bb->eh_beg != bb->eh_end)
1036 if (bb->eh_beg >= 0)
1037 make_eh_edge (edge_cache, eh_nest_info, bb,
1038 NULL_RTX, bb->eh_beg);
1040 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1041 if (GET_CODE (x) == NOTE
1042 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1043 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1045 int region = NOTE_EH_HANDLER (x);
1046 make_eh_edge (edge_cache, eh_nest_info, bb,
1051 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1053 /* ??? This could be made smarter: in some cases it's possible
1054 to tell that certain calls will not do a nonlocal goto.
1056 For example, if the nested functions that do the nonlocal
1057 gotos do not have their addresses taken, then only calls to
1058 those functions or to other nested functions that use them
1059 could possibly do nonlocal gotos. */
1060 /* We do know that a REG_EH_REGION note with a value less
1061 than 0 is guaranteed not to perform a non-local goto. */
1062 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1063 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1064 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1065 make_label_edge (edge_cache, bb, XEXP (x, 0),
1066 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1070 /* We know something about the structure of the function __throw in
1071 libgcc2.c. It is the only function that ever contains eh_stub
1072 labels. It modifies its return address so that the last block
1073 returns to one of the eh_stub labels within it. So we have to
1074 make additional edges in the flow graph. */
1075 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1076 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1078 /* Find out if we can drop through to the next block. */
1079 insn = next_nonnote_insn (insn);
1080 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1081 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1082 else if (i + 1 < n_basic_blocks)
1084 rtx tmp = BLOCK_HEAD (i + 1);
1085 if (GET_CODE (tmp) == NOTE)
1086 tmp = next_nonnote_insn (tmp);
1087 if (force_fallthru || insn == tmp)
1088 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1092 free_eh_nesting_info (eh_nest_info);
1094 sbitmap_vector_free (edge_cache);
1097 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1098 about the edge that is accumulated between calls. */
1101 make_edge (edge_cache, src, dst, flags)
1102 sbitmap *edge_cache;
1103 basic_block src, dst;
1109 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1110 many edges to them, and we didn't allocate memory for it. */
1111 use_edge_cache = (edge_cache
1112 && src != ENTRY_BLOCK_PTR
1113 && dst != EXIT_BLOCK_PTR);
1115 /* Make sure we don't add duplicate edges. */
1116 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1117 for (e = src->succ; e ; e = e->succ_next)
1124 e = (edge) xcalloc (1, sizeof (*e));
1127 e->succ_next = src->succ;
1128 e->pred_next = dst->pred;
1137 SET_BIT (edge_cache[src->index], dst->index);
1140 /* Create an edge from a basic block to a label. */
1143 make_label_edge (edge_cache, src, label, flags)
1144 sbitmap *edge_cache;
1149 if (GET_CODE (label) != CODE_LABEL)
1152 /* If the label was never emitted, this insn is junk, but avoid a
1153 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1154 as a result of a syntax error and a diagnostic has already been
1157 if (INSN_UID (label) == 0)
1160 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1163 /* Create the edges generated by INSN in REGION. */
1166 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1167 sbitmap *edge_cache;
1168 eh_nesting_info *eh_nest_info;
1173 handler_info **handler_list;
1176 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1177 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1180 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1181 EDGE_ABNORMAL | EDGE_EH | is_call);
1185 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1186 dangerous if we intend to move basic blocks around. Move such notes
1187 into the following block. */
1190 move_stray_eh_region_notes ()
1195 if (n_basic_blocks < 2)
1198 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1199 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1201 rtx insn, next, list = NULL_RTX;
1203 b1 = BASIC_BLOCK (i);
1204 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1206 next = NEXT_INSN (insn);
1207 if (GET_CODE (insn) == NOTE
1208 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1209 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1211 /* Unlink from the insn chain. */
1212 NEXT_INSN (PREV_INSN (insn)) = next;
1213 PREV_INSN (next) = PREV_INSN (insn);
1216 NEXT_INSN (insn) = list;
1221 if (list == NULL_RTX)
1224 /* Find where to insert these things. */
1226 if (GET_CODE (insn) == CODE_LABEL)
1227 insn = NEXT_INSN (insn);
1231 next = NEXT_INSN (list);
1232 add_insn_after (list, insn);
1238 /* Recompute eh_beg/eh_end for each basic block. */
1241 record_active_eh_regions (f)
1244 rtx insn, eh_list = NULL_RTX;
1246 basic_block bb = BASIC_BLOCK (0);
1248 for (insn = f; insn ; insn = NEXT_INSN (insn))
1250 if (bb->head == insn)
1251 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1253 if (GET_CODE (insn) == NOTE)
1255 int kind = NOTE_LINE_NUMBER (insn);
1256 if (kind == NOTE_INSN_EH_REGION_BEG)
1257 eh_list = alloc_INSN_LIST (insn, eh_list);
1258 else if (kind == NOTE_INSN_EH_REGION_END)
1260 rtx t = XEXP (eh_list, 1);
1261 free_INSN_LIST_node (eh_list);
1266 if (bb->end == insn)
1268 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1270 if (i == n_basic_blocks)
1272 bb = BASIC_BLOCK (i);
1277 /* Identify critical edges and set the bits appropriately. */
1280 mark_critical_edges ()
1282 int i, n = n_basic_blocks;
1285 /* We begin with the entry block. This is not terribly important now,
1286 but could be if a front end (Fortran) implemented alternate entry
1288 bb = ENTRY_BLOCK_PTR;
1295 /* (1) Critical edges must have a source with multiple successors. */
1296 if (bb->succ && bb->succ->succ_next)
1298 for (e = bb->succ; e ; e = e->succ_next)
1300 /* (2) Critical edges must have a destination with multiple
1301 predecessors. Note that we know there is at least one
1302 predecessor -- the edge we followed to get here. */
1303 if (e->dest->pred->pred_next)
1304 e->flags |= EDGE_CRITICAL;
1306 e->flags &= ~EDGE_CRITICAL;
1311 for (e = bb->succ; e ; e = e->succ_next)
1312 e->flags &= ~EDGE_CRITICAL;
1317 bb = BASIC_BLOCK (i);
1321 /* Split a (typically critical) edge. Return the new block.
1322 Abort on abnormal edges.
1324 ??? The code generally expects to be called on critical edges.
1325 The case of a block ending in an unconditional jump to a
1326 block with multiple predecessors is not handled optimally. */
1329 split_edge (edge_in)
1332 basic_block old_pred, bb, old_succ;
1337 /* Abnormal edges cannot be split. */
1338 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1341 old_pred = edge_in->src;
1342 old_succ = edge_in->dest;
1344 /* Remove the existing edge from the destination's pred list. */
1347 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1349 *pp = edge_in->pred_next;
1350 edge_in->pred_next = NULL;
1353 /* Create the new structures. */
1354 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1355 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1358 memset (bb, 0, sizeof (*bb));
1360 /* ??? This info is likely going to be out of date very soon. */
1361 if (old_succ->global_live_at_start)
1363 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1364 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1365 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1366 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1371 bb->succ = edge_out;
1374 edge_in->flags &= ~EDGE_CRITICAL;
1376 edge_out->pred_next = old_succ->pred;
1377 edge_out->succ_next = NULL;
1379 edge_out->dest = old_succ;
1380 edge_out->flags = EDGE_FALLTHRU;
1381 edge_out->probability = REG_BR_PROB_BASE;
1383 old_succ->pred = edge_out;
1385 /* Tricky case -- if there existed a fallthru into the successor
1386 (and we're not it) we must add a new unconditional jump around
1387 the new block we're actually interested in.
1389 Further, if that edge is critical, this means a second new basic
1390 block must be created to hold it. In order to simplify correct
1391 insn placement, do this before we touch the existing basic block
1392 ordering for the block we were really wanting. */
1393 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1396 for (e = edge_out->pred_next; e ; e = e->pred_next)
1397 if (e->flags & EDGE_FALLTHRU)
1402 basic_block jump_block;
1405 if ((e->flags & EDGE_CRITICAL) == 0
1406 && e->src != ENTRY_BLOCK_PTR)
1408 /* Non critical -- we can simply add a jump to the end
1409 of the existing predecessor. */
1410 jump_block = e->src;
1414 /* We need a new block to hold the jump. The simplest
1415 way to do the bulk of the work here is to recursively
1417 jump_block = split_edge (e);
1418 e = jump_block->succ;
1421 /* Now add the jump insn ... */
1422 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1424 jump_block->end = pos;
1425 if (basic_block_for_insn)
1426 set_block_for_insn (pos, jump_block);
1427 emit_barrier_after (pos);
1429 /* ... let jump know that label is in use, ... */
1430 JUMP_LABEL (pos) = old_succ->head;
1431 ++LABEL_NUSES (old_succ->head);
1433 /* ... and clear fallthru on the outgoing edge. */
1434 e->flags &= ~EDGE_FALLTHRU;
1436 /* Continue splitting the interesting edge. */
1440 /* Place the new block just in front of the successor. */
1441 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1442 if (old_succ == EXIT_BLOCK_PTR)
1443 j = n_basic_blocks - 1;
1445 j = old_succ->index;
1446 for (i = n_basic_blocks - 1; i > j; --i)
1448 basic_block tmp = BASIC_BLOCK (i - 1);
1449 BASIC_BLOCK (i) = tmp;
1452 BASIC_BLOCK (i) = bb;
1455 /* Create the basic block note.
1457 Where we place the note can have a noticable impact on the generated
1458 code. Consider this cfg:
1469 If we need to insert an insn on the edge from block 0 to block 1,
1470 we want to ensure the instructions we insert are outside of any
1471 loop notes that physically sit between block 0 and block 1. Otherwise
1472 we confuse the loop optimizer into thinking the loop is a phony. */
1473 if (old_succ != EXIT_BLOCK_PTR
1474 && PREV_INSN (old_succ->head)
1475 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1476 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1477 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1478 PREV_INSN (old_succ->head));
1479 else if (old_succ != EXIT_BLOCK_PTR)
1480 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1482 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1483 NOTE_BASIC_BLOCK (bb_note) = bb;
1484 bb->head = bb->end = bb_note;
1486 /* Not quite simple -- for non-fallthru edges, we must adjust the
1487 predecessor's jump instruction to target our new block. */
1488 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1490 rtx tmp, insn = old_pred->end;
1491 rtx old_label = old_succ->head;
1492 rtx new_label = gen_label_rtx ();
1494 if (GET_CODE (insn) != JUMP_INSN)
1497 /* ??? Recognize a tablejump and adjust all matching cases. */
1498 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1499 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1500 && GET_CODE (tmp) == JUMP_INSN
1501 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1502 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1507 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1508 vec = XVEC (PATTERN (tmp), 0);
1510 vec = XVEC (PATTERN (tmp), 1);
1512 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1513 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1515 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1516 --LABEL_NUSES (old_label);
1517 ++LABEL_NUSES (new_label);
1520 /* Handle casesi dispatch insns */
1521 if ((tmp = single_set (insn)) != NULL
1522 && SET_DEST (tmp) == pc_rtx
1523 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1524 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1525 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1527 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1529 --LABEL_NUSES (old_label);
1530 ++LABEL_NUSES (new_label);
1535 /* This would have indicated an abnormal edge. */
1536 if (computed_jump_p (insn))
1539 /* A return instruction can't be redirected. */
1540 if (returnjump_p (insn))
1543 /* If the insn doesn't go where we think, we're confused. */
1544 if (JUMP_LABEL (insn) != old_label)
1547 redirect_jump (insn, new_label);
1550 emit_label_before (new_label, bb_note);
1551 bb->head = new_label;
1557 /* Queue instructions for insertion on an edge between two basic blocks.
1558 The new instructions and basic blocks (if any) will not appear in the
1559 CFG until commit_edge_insertions is called. */
1562 insert_insn_on_edge (pattern, e)
1566 /* We cannot insert instructions on an abnormal critical edge.
1567 It will be easier to find the culprit if we die now. */
1568 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1569 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1572 if (e->insns == NULL_RTX)
1575 push_to_sequence (e->insns);
1577 emit_insn (pattern);
1579 e->insns = get_insns ();
1583 /* Update the CFG for the instructions queued on edge E. */
1586 commit_one_edge_insertion (e)
1589 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1592 /* Pull the insns off the edge now since the edge might go away. */
1594 e->insns = NULL_RTX;
1596 /* Figure out where to put these things. If the destination has
1597 one predecessor, insert there. Except for the exit block. */
1598 if (e->dest->pred->pred_next == NULL
1599 && e->dest != EXIT_BLOCK_PTR)
1603 /* Get the location correct wrt a code label, and "nice" wrt
1604 a basic block note, and before everything else. */
1606 if (GET_CODE (tmp) == CODE_LABEL)
1607 tmp = NEXT_INSN (tmp);
1608 if (GET_CODE (tmp) == NOTE
1609 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1610 tmp = NEXT_INSN (tmp);
1611 if (tmp == bb->head)
1614 after = PREV_INSN (tmp);
1617 /* If the source has one successor and the edge is not abnormal,
1618 insert there. Except for the entry block. */
1619 else if ((e->flags & EDGE_ABNORMAL) == 0
1620 && e->src->succ->succ_next == NULL
1621 && e->src != ENTRY_BLOCK_PTR)
1624 /* It is possible to have a non-simple jump here. Consider a target
1625 where some forms of unconditional jumps clobber a register. This
1626 happens on the fr30 for example.
1628 We know this block has a single successor, so we can just emit
1629 the queued insns before the jump. */
1630 if (GET_CODE (bb->end) == JUMP_INSN)
1636 /* We'd better be fallthru, or we've lost track of what's what. */
1637 if ((e->flags & EDGE_FALLTHRU) == 0)
1644 /* Otherwise we must split the edge. */
1647 bb = split_edge (e);
1651 /* Now that we've found the spot, do the insertion. */
1653 /* Set the new block number for these insns, if structure is allocated. */
1654 if (basic_block_for_insn)
1657 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1658 set_block_for_insn (i, bb);
1663 emit_insns_before (insns, before);
1664 if (before == bb->head)
1669 rtx last = emit_insns_after (insns, after);
1670 if (after == bb->end)
1674 if (GET_CODE (last) == JUMP_INSN)
1676 if (returnjump_p (last))
1678 /* ??? Remove all outgoing edges from BB and add one
1679 for EXIT. This is not currently a problem because
1680 this only happens for the (single) epilogue, which
1681 already has a fallthru edge to EXIT. */
1684 if (e->dest != EXIT_BLOCK_PTR
1685 || e->succ_next != NULL
1686 || (e->flags & EDGE_FALLTHRU) == 0)
1688 e->flags &= ~EDGE_FALLTHRU;
1690 emit_barrier_after (last);
1699 /* Update the CFG for all queued instructions. */
1702 commit_edge_insertions ()
1707 #ifdef ENABLE_CHECKING
1708 verify_flow_info ();
1712 bb = ENTRY_BLOCK_PTR;
1717 for (e = bb->succ; e ; e = next)
1719 next = e->succ_next;
1721 commit_one_edge_insertion (e);
1724 if (++i >= n_basic_blocks)
1726 bb = BASIC_BLOCK (i);
1730 /* Delete all unreachable basic blocks. */
1733 delete_unreachable_blocks ()
1735 basic_block *worklist, *tos;
1736 int deleted_handler;
1741 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1743 /* Use basic_block->aux as a marker. Clear them all. */
1745 for (i = 0; i < n; ++i)
1746 BASIC_BLOCK (i)->aux = NULL;
1748 /* Add our starting points to the worklist. Almost always there will
1749 be only one. It isn't inconcievable that we might one day directly
1750 support Fortran alternate entry points. */
1752 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1756 /* Mark the block with a handy non-null value. */
1760 /* Iterate: find everything reachable from what we've already seen. */
1762 while (tos != worklist)
1764 basic_block b = *--tos;
1766 for (e = b->succ; e ; e = e->succ_next)
1774 /* Delete all unreachable basic blocks. Count down so that we don't
1775 interfere with the block renumbering that happens in flow_delete_block. */
1777 deleted_handler = 0;
1779 for (i = n - 1; i >= 0; --i)
1781 basic_block b = BASIC_BLOCK (i);
1784 /* This block was found. Tidy up the mark. */
1787 deleted_handler |= flow_delete_block (b);
1790 tidy_fallthru_edges ();
1792 /* If we deleted an exception handler, we may have EH region begin/end
1793 blocks to remove as well. */
1794 if (deleted_handler)
1795 delete_eh_regions ();
1800 /* Find EH regions for which there is no longer a handler, and delete them. */
1803 delete_eh_regions ()
1807 update_rethrow_references ();
1809 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1810 if (GET_CODE (insn) == NOTE)
1812 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1813 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1815 int num = NOTE_EH_HANDLER (insn);
1816 /* A NULL handler indicates a region is no longer needed,
1817 as long as its rethrow label isn't used. */
1818 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1820 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1821 NOTE_SOURCE_FILE (insn) = 0;
1827 /* Return true if NOTE is not one of the ones that must be kept paired,
1828 so that we may simply delete them. */
1831 can_delete_note_p (note)
1834 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1835 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1838 /* Unlink a chain of insns between START and FINISH, leaving notes
1839 that must be paired. */
1842 flow_delete_insn_chain (start, finish)
1845 /* Unchain the insns one by one. It would be quicker to delete all
1846 of these with a single unchaining, rather than one at a time, but
1847 we need to keep the NOTE's. */
1853 next = NEXT_INSN (start);
1854 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1856 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1859 next = flow_delete_insn (start);
1861 if (start == finish)
1867 /* Delete the insns in a (non-live) block. We physically delete every
1868 non-deleted-note insn, and update the flow graph appropriately.
1870 Return nonzero if we deleted an exception handler. */
1872 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1873 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1876 flow_delete_block (b)
1879 int deleted_handler = 0;
1882 /* If the head of this block is a CODE_LABEL, then it might be the
1883 label for an exception handler which can't be reached.
1885 We need to remove the label from the exception_handler_label list
1886 and remove the associated NOTE_INSN_EH_REGION_BEG and
1887 NOTE_INSN_EH_REGION_END notes. */
1891 never_reached_warning (insn);
1893 if (GET_CODE (insn) == CODE_LABEL)
1895 rtx x, *prev = &exception_handler_labels;
1897 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1899 if (XEXP (x, 0) == insn)
1901 /* Found a match, splice this label out of the EH label list. */
1902 *prev = XEXP (x, 1);
1903 XEXP (x, 1) = NULL_RTX;
1904 XEXP (x, 0) = NULL_RTX;
1906 /* Remove the handler from all regions */
1907 remove_handler (insn);
1908 deleted_handler = 1;
1911 prev = &XEXP (x, 1);
1914 /* This label may be referenced by code solely for its value, or
1915 referenced by static data, or something. We have determined
1916 that it is not reachable, but cannot delete the label itself.
1917 Save code space and continue to delete the balance of the block,
1918 along with properly updating the cfg. */
1919 if (!can_delete_label_p (insn))
1921 /* If we've only got one of these, skip the whole deleting
1924 goto no_delete_insns;
1925 insn = NEXT_INSN (insn);
1929 /* Include any jump table following the basic block. */
1931 if (GET_CODE (end) == JUMP_INSN
1932 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1933 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1934 && GET_CODE (tmp) == JUMP_INSN
1935 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1936 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1939 /* Include any barrier that may follow the basic block. */
1940 tmp = next_nonnote_insn (end);
1941 if (tmp && GET_CODE (tmp) == BARRIER)
1944 /* Selectively delete the entire chain. */
1945 flow_delete_insn_chain (insn, end);
1949 /* Remove the edges into and out of this block. Note that there may
1950 indeed be edges in, if we are removing an unreachable loop. */
1954 for (e = b->pred; e ; e = next)
1956 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1959 next = e->pred_next;
1963 for (e = b->succ; e ; e = next)
1965 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1968 next = e->succ_next;
1977 /* Remove the basic block from the array, and compact behind it. */
1980 return deleted_handler;
1983 /* Remove block B from the basic block array and compact behind it. */
1989 int i, n = n_basic_blocks;
1991 for (i = b->index; i + 1 < n; ++i)
1993 basic_block x = BASIC_BLOCK (i + 1);
1994 BASIC_BLOCK (i) = x;
1998 basic_block_info->num_elements--;
2002 /* Delete INSN by patching it out. Return the next insn. */
2005 flow_delete_insn (insn)
2008 rtx prev = PREV_INSN (insn);
2009 rtx next = NEXT_INSN (insn);
2012 PREV_INSN (insn) = NULL_RTX;
2013 NEXT_INSN (insn) = NULL_RTX;
2014 INSN_DELETED_P (insn) = 1;
2017 NEXT_INSN (prev) = next;
2019 PREV_INSN (next) = prev;
2021 set_last_insn (prev);
2023 if (GET_CODE (insn) == CODE_LABEL)
2024 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2026 /* If deleting a jump, decrement the use count of the label. Deleting
2027 the label itself should happen in the normal course of block merging. */
2028 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2029 LABEL_NUSES (JUMP_LABEL (insn))--;
2031 /* Also if deleting an insn that references a label. */
2032 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2033 LABEL_NUSES (XEXP (note, 0))--;
2038 /* True if a given label can be deleted. */
2041 can_delete_label_p (label)
2046 if (LABEL_PRESERVE_P (label))
2049 for (x = forced_labels; x ; x = XEXP (x, 1))
2050 if (label == XEXP (x, 0))
2052 for (x = label_value_list; x ; x = XEXP (x, 1))
2053 if (label == XEXP (x, 0))
2055 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2056 if (label == XEXP (x, 0))
2059 /* User declared labels must be preserved. */
2060 if (LABEL_NAME (label) != 0)
2066 /* Blocks A and B are to be merged into a single block A. The insns
2067 are already contiguous, hence `nomove'. */
2070 merge_blocks_nomove (a, b)
2074 rtx b_head, b_end, a_end;
2075 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2078 /* If there was a CODE_LABEL beginning B, delete it. */
2081 if (GET_CODE (b_head) == CODE_LABEL)
2083 /* Detect basic blocks with nothing but a label. This can happen
2084 in particular at the end of a function. */
2085 if (b_head == b_end)
2087 del_first = del_last = b_head;
2088 b_head = NEXT_INSN (b_head);
2091 /* Delete the basic block note. */
2092 if (GET_CODE (b_head) == NOTE
2093 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2095 if (b_head == b_end)
2100 b_head = NEXT_INSN (b_head);
2103 /* If there was a jump out of A, delete it. */
2105 if (GET_CODE (a_end) == JUMP_INSN)
2109 prev = prev_nonnote_insn (a_end);
2116 /* If this was a conditional jump, we need to also delete
2117 the insn that set cc0. */
2118 if (prev && sets_cc0_p (prev))
2121 prev = prev_nonnote_insn (prev);
2131 /* Delete everything marked above as well as crap that might be
2132 hanging out between the two blocks. */
2133 flow_delete_insn_chain (del_first, del_last);
2135 /* Normally there should only be one successor of A and that is B, but
2136 partway though the merge of blocks for conditional_execution we'll
2137 be merging a TEST block with THEN and ELSE successors. Free the
2138 whole lot of them and hope the caller knows what they're doing. */
2140 remove_edge (a->succ);
2142 /* Adjust the edges out of B for the new owner. */
2143 for (e = b->succ; e ; e = e->succ_next)
2147 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2148 b->pred = b->succ = NULL;
2150 /* Reassociate the insns of B with A. */
2153 if (basic_block_for_insn)
2155 BLOCK_FOR_INSN (b_head) = a;
2156 while (b_head != b_end)
2158 b_head = NEXT_INSN (b_head);
2159 BLOCK_FOR_INSN (b_head) = a;
2169 /* Blocks A and B are to be merged into a single block. A has no incoming
2170 fallthru edge, so it can be moved before B without adding or modifying
2171 any jumps (aside from the jump from A to B). */
2174 merge_blocks_move_predecessor_nojumps (a, b)
2177 rtx start, end, barrier;
2183 /* We want to delete the BARRIER after the end of the insns we are
2184 going to move. If we don't find a BARRIER, then do nothing. This
2185 can happen in some cases if we have labels we can not delete.
2187 Similarly, do nothing if we can not delete the label at the start
2188 of the target block. */
2189 barrier = next_nonnote_insn (end);
2190 if (GET_CODE (barrier) != BARRIER
2191 || (GET_CODE (b->head) == CODE_LABEL
2192 && ! can_delete_label_p (b->head)))
2195 flow_delete_insn (barrier);
2197 /* Move block and loop notes out of the chain so that we do not
2198 disturb their order.
2200 ??? A better solution would be to squeeze out all the non-nested notes
2201 and adjust the block trees appropriately. Even better would be to have
2202 a tighter connection between block trees and rtl so that this is not
2204 start = squeeze_notes (start, end);
2206 /* Scramble the insn chain. */
2207 if (end != PREV_INSN (b->head))
2208 reorder_insns (start, end, PREV_INSN (b->head));
2212 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2213 a->index, b->index);
2216 /* Swap the records for the two blocks around. Although we are deleting B,
2217 A is now where B was and we want to compact the BB array from where
2219 BASIC_BLOCK(a->index) = b;
2220 BASIC_BLOCK(b->index) = a;
2222 a->index = b->index;
2225 /* Now blocks A and B are contiguous. Merge them. */
2226 merge_blocks_nomove (a, b);
2231 /* Blocks A and B are to be merged into a single block. B has no outgoing
2232 fallthru edge, so it can be moved after A without adding or modifying
2233 any jumps (aside from the jump from A to B). */
2236 merge_blocks_move_successor_nojumps (a, b)
2239 rtx start, end, barrier;
2244 /* We want to delete the BARRIER after the end of the insns we are
2245 going to move. If we don't find a BARRIER, then do nothing. This
2246 can happen in some cases if we have labels we can not delete.
2248 Similarly, do nothing if we can not delete the label at the start
2249 of the target block. */
2250 barrier = next_nonnote_insn (end);
2251 if (GET_CODE (barrier) != BARRIER
2252 || (GET_CODE (b->head) == CODE_LABEL
2253 && ! can_delete_label_p (b->head)))
2256 flow_delete_insn (barrier);
2258 /* Move block and loop notes out of the chain so that we do not
2259 disturb their order.
2261 ??? A better solution would be to squeeze out all the non-nested notes
2262 and adjust the block trees appropriately. Even better would be to have
2263 a tighter connection between block trees and rtl so that this is not
2265 start = squeeze_notes (start, end);
2267 /* Scramble the insn chain. */
2268 reorder_insns (start, end, a->end);
2270 /* Now blocks A and B are contiguous. Merge them. */
2271 merge_blocks_nomove (a, b);
2275 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2276 b->index, a->index);
2282 /* Attempt to merge basic blocks that are potentially non-adjacent.
2283 Return true iff the attempt succeeded. */
2286 merge_blocks (e, b, c)
2290 /* If B has a fallthru edge to C, no need to move anything. */
2291 if (e->flags & EDGE_FALLTHRU)
2293 /* If a label still appears somewhere and we cannot delete the label,
2294 then we cannot merge the blocks. The edge was tidied already. */
2296 rtx insn, stop = NEXT_INSN (c->head);
2297 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2298 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2301 merge_blocks_nomove (b, c);
2305 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2306 b->index, c->index);
2315 int c_has_outgoing_fallthru;
2316 int b_has_incoming_fallthru;
2318 /* We must make sure to not munge nesting of exception regions,
2319 lexical blocks, and loop notes.
2321 The first is taken care of by requiring that the active eh
2322 region at the end of one block always matches the active eh
2323 region at the beginning of the next block.
2325 The later two are taken care of by squeezing out all the notes. */
2327 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2328 executed and we may want to treat blocks which have two out
2329 edges, one normal, one abnormal as only having one edge for
2330 block merging purposes. */
2332 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2333 if (tmp_edge->flags & EDGE_FALLTHRU)
2335 c_has_outgoing_fallthru = (tmp_edge != NULL);
2337 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2338 if (tmp_edge->flags & EDGE_FALLTHRU)
2340 b_has_incoming_fallthru = (tmp_edge != NULL);
2342 /* If B does not have an incoming fallthru, and the exception regions
2343 match, then it can be moved immediately before C without introducing
2346 C can not be the first block, so we do not have to worry about
2347 accessing a non-existent block. */
2348 d = BASIC_BLOCK (c->index - 1);
2349 if (! b_has_incoming_fallthru
2350 && d->eh_end == b->eh_beg
2351 && b->eh_end == c->eh_beg)
2352 return merge_blocks_move_predecessor_nojumps (b, c);
2354 /* Otherwise, we're going to try to move C after B. Make sure the
2355 exception regions match.
2357 If B is the last basic block, then we must not try to access the
2358 block structure for block B + 1. Luckily in that case we do not
2359 need to worry about matching exception regions. */
2360 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2361 if (b->eh_end == c->eh_beg
2362 && (d == NULL || c->eh_end == d->eh_beg))
2364 /* If C does not have an outgoing fallthru, then it can be moved
2365 immediately after B without introducing or modifying jumps. */
2366 if (! c_has_outgoing_fallthru)
2367 return merge_blocks_move_successor_nojumps (b, c);
2369 /* Otherwise, we'll need to insert an extra jump, and possibly
2370 a new block to contain it. */
2371 /* ??? Not implemented yet. */
2378 /* Top level driver for merge_blocks. */
2385 /* Attempt to merge blocks as made possible by edge removal. If a block
2386 has only one successor, and the successor has only one predecessor,
2387 they may be combined. */
2389 for (i = 0; i < n_basic_blocks; )
2391 basic_block c, b = BASIC_BLOCK (i);
2394 /* A loop because chains of blocks might be combineable. */
2395 while ((s = b->succ) != NULL
2396 && s->succ_next == NULL
2397 && (s->flags & EDGE_EH) == 0
2398 && (c = s->dest) != EXIT_BLOCK_PTR
2399 && c->pred->pred_next == NULL
2400 /* If the jump insn has side effects, we can't kill the edge. */
2401 && (GET_CODE (b->end) != JUMP_INSN
2402 || onlyjump_p (b->end))
2403 && merge_blocks (s, b, c))
2406 /* Don't get confused by the index shift caused by deleting blocks. */
2411 /* The given edge should potentially be a fallthru edge. If that is in
2412 fact true, delete the jump and barriers that are in the way. */
2415 tidy_fallthru_edge (e, b, c)
2421 /* ??? In a late-running flow pass, other folks may have deleted basic
2422 blocks by nopping out blocks, leaving multiple BARRIERs between here
2423 and the target label. They ought to be chastized and fixed.
2425 We can also wind up with a sequence of undeletable labels between
2426 one block and the next.
2428 So search through a sequence of barriers, labels, and notes for
2429 the head of block C and assert that we really do fall through. */
2431 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2434 /* Remove what will soon cease being the jump insn from the source block.
2435 If block B consisted only of this single jump, turn it into a deleted
2438 if (GET_CODE (q) == JUMP_INSN
2439 && (simplejump_p (q)
2440 || (b->succ == e && e->succ_next == NULL)))
2443 /* If this was a conditional jump, we need to also delete
2444 the insn that set cc0. */
2445 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2452 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2453 NOTE_SOURCE_FILE (q) = 0;
2456 b->end = q = PREV_INSN (q);
2459 /* Selectively unlink the sequence. */
2460 if (q != PREV_INSN (c->head))
2461 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2463 e->flags |= EDGE_FALLTHRU;
2466 /* Fix up edges that now fall through, or rather should now fall through
2467 but previously required a jump around now deleted blocks. Simplify
2468 the search by only examining blocks numerically adjacent, since this
2469 is how find_basic_blocks created them. */
2472 tidy_fallthru_edges ()
2476 for (i = 1; i < n_basic_blocks; ++i)
2478 basic_block b = BASIC_BLOCK (i - 1);
2479 basic_block c = BASIC_BLOCK (i);
2482 /* We care about simple conditional or unconditional jumps with
2485 If we had a conditional branch to the next instruction when
2486 find_basic_blocks was called, then there will only be one
2487 out edge for the block which ended with the conditional
2488 branch (since we do not create duplicate edges).
2490 Furthermore, the edge will be marked as a fallthru because we
2491 merge the flags for the duplicate edges. So we do not want to
2492 check that the edge is not a FALLTHRU edge. */
2493 if ((s = b->succ) != NULL
2494 && s->succ_next == NULL
2496 /* If the jump insn has side effects, we can't tidy the edge. */
2497 && (GET_CODE (b->end) != JUMP_INSN
2498 || onlyjump_p (b->end)))
2499 tidy_fallthru_edge (s, b, c);
2503 /* Perform data flow analysis.
2504 F is the first insn of the function; FLAGS is a set of PROP_* flags
2505 to be used in accumulating flow info. */
2508 life_analysis (f, file, flags)
2513 #ifdef ELIMINABLE_REGS
2515 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2518 /* Record which registers will be eliminated. We use this in
2521 CLEAR_HARD_REG_SET (elim_reg_set);
2523 #ifdef ELIMINABLE_REGS
2524 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2525 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2527 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2531 flags &= PROP_DEATH_NOTES | PROP_REG_INFO;
2533 /* The post-reload life analysis have (on a global basis) the same
2534 registers live as was computed by reload itself. elimination
2535 Otherwise offsets and such may be incorrect.
2537 Reload will make some registers as live even though they do not
2538 appear in the rtl. */
2539 if (reload_completed)
2540 flags &= ~PROP_REG_INFO;
2542 /* We want alias analysis information for local dead store elimination. */
2543 if (flags & PROP_SCAN_DEAD_CODE)
2544 init_alias_analysis ();
2546 /* Always remove no-op moves. Do this before other processing so
2547 that we don't have to keep re-scanning them. */
2548 delete_noop_moves (f);
2550 /* Some targets can emit simpler epilogues if they know that sp was
2551 not ever modified during the function. After reload, of course,
2552 we've already emitted the epilogue so there's no sense searching. */
2553 if (! reload_completed)
2554 notice_stack_pointer_modification (f);
2556 /* Allocate and zero out data structures that will record the
2557 data from lifetime analysis. */
2558 allocate_reg_life_data ();
2559 allocate_bb_life_data ();
2561 /* Find the set of registers live on function exit. */
2562 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2564 /* "Update" life info from zero. It'd be nice to begin the
2565 relaxation with just the exit and noreturn blocks, but that set
2566 is not immediately handy. */
2568 if (flags & PROP_REG_INFO)
2569 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2570 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2573 if (flags & PROP_SCAN_DEAD_CODE)
2574 end_alias_analysis ();
2577 dump_flow_info (file);
2579 free_basic_block_vars (1);
2582 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2583 Search for REGNO. If found, abort if it is not wider than word_mode. */
2586 verify_wide_reg_1 (px, pregno)
2591 unsigned int regno = *(int *) pregno;
2593 if (GET_CODE (x) == REG && REGNO (x) == regno)
2595 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2602 /* A subroutine of verify_local_live_at_start. Search through insns
2603 between HEAD and END looking for register REGNO. */
2606 verify_wide_reg (regno, head, end)
2612 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2613 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2617 head = NEXT_INSN (head);
2620 /* We didn't find the register at all. Something's way screwy. */
2624 /* A subroutine of update_life_info. Verify that there are no untoward
2625 changes in live_at_start during a local update. */
2628 verify_local_live_at_start (new_live_at_start, bb)
2629 regset new_live_at_start;
2632 if (reload_completed)
2634 /* After reload, there are no pseudos, nor subregs of multi-word
2635 registers. The regsets should exactly match. */
2636 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2643 /* Find the set of changed registers. */
2644 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2646 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2648 /* No registers should die. */
2649 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2651 /* Verify that the now-live register is wider than word_mode. */
2652 verify_wide_reg (i, bb->head, bb->end);
2657 /* Updates life information starting with the basic blocks set in BLOCKS.
2658 If BLOCKS is null, consider it to be the universal set.
2660 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
2661 we are only expecting local modifications to basic blocks. If we find
2662 extra registers live at the beginning of a block, then we either killed
2663 useful data, or we have a broken split that wants data not provided.
2664 If we find registers removed from live_at_start, that means we have
2665 a broken peephole that is killing a register it shouldn't.
2667 ??? This is not true in one situation -- when a pre-reload splitter
2668 generates subregs of a multi-word pseudo, current life analysis will
2669 lose the kill. So we _can_ have a pseudo go live. How irritating.
2671 Including PROP_REG_INFO does not properly refresh regs_ever_live
2672 unless the caller resets it to zero. */
2675 update_life_info (blocks, extent, prop_flags)
2677 enum update_life_extent extent;
2681 regset_head tmp_head;
2684 tmp = INITIALIZE_REG_SET (tmp_head);
2686 /* For a global update, we go through the relaxation process again. */
2687 if (extent != UPDATE_LIFE_LOCAL)
2689 calculate_global_regs_live (blocks, blocks,
2690 prop_flags & PROP_SCAN_DEAD_CODE);
2692 /* If asked, remove notes from the blocks we'll update. */
2693 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2694 count_or_remove_death_notes (blocks, 1);
2699 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2701 basic_block bb = BASIC_BLOCK (i);
2703 COPY_REG_SET (tmp, bb->global_live_at_end);
2704 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2706 if (extent == UPDATE_LIFE_LOCAL)
2707 verify_local_live_at_start (tmp, bb);
2712 for (i = n_basic_blocks - 1; i >= 0; --i)
2714 basic_block bb = BASIC_BLOCK (i);
2716 COPY_REG_SET (tmp, bb->global_live_at_end);
2717 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2719 if (extent == UPDATE_LIFE_LOCAL)
2720 verify_local_live_at_start (tmp, bb);
2726 if (prop_flags & PROP_REG_INFO)
2728 /* The only pseudos that are live at the beginning of the function
2729 are those that were not set anywhere in the function. local-alloc
2730 doesn't know how to handle these correctly, so mark them as not
2731 local to any one basic block. */
2732 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2733 FIRST_PSEUDO_REGISTER, i,
2734 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2736 /* We have a problem with any pseudoreg that lives across the setjmp.
2737 ANSI says that if a user variable does not change in value between
2738 the setjmp and the longjmp, then the longjmp preserves it. This
2739 includes longjmp from a place where the pseudo appears dead.
2740 (In principle, the value still exists if it is in scope.)
2741 If the pseudo goes in a hard reg, some other value may occupy
2742 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2743 Conclusion: such a pseudo must not go in a hard reg. */
2744 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2745 FIRST_PSEUDO_REGISTER, i,
2747 if (regno_reg_rtx[i] != 0)
2749 REG_LIVE_LENGTH (i) = -1;
2750 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2756 /* Free the variables allocated by find_basic_blocks.
2758 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2761 free_basic_block_vars (keep_head_end_p)
2762 int keep_head_end_p;
2764 if (basic_block_for_insn)
2766 VARRAY_FREE (basic_block_for_insn);
2767 basic_block_for_insn = NULL;
2770 if (! keep_head_end_p)
2773 VARRAY_FREE (basic_block_info);
2776 ENTRY_BLOCK_PTR->aux = NULL;
2777 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2778 EXIT_BLOCK_PTR->aux = NULL;
2779 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2783 /* Return nonzero if the destination of SET equals the source. */
2788 rtx src = SET_SRC (set);
2789 rtx dst = SET_DEST (set);
2791 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2793 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2795 src = SUBREG_REG (src);
2796 dst = SUBREG_REG (dst);
2799 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2800 && REGNO (src) == REGNO (dst));
2803 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2809 rtx pat = PATTERN (insn);
2811 /* Insns carrying these notes are useful later on. */
2812 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2815 if (GET_CODE (pat) == SET && set_noop_p (pat))
2818 if (GET_CODE (pat) == PARALLEL)
2821 /* If nothing but SETs of registers to themselves,
2822 this insn can also be deleted. */
2823 for (i = 0; i < XVECLEN (pat, 0); i++)
2825 rtx tem = XVECEXP (pat, 0, i);
2827 if (GET_CODE (tem) == USE
2828 || GET_CODE (tem) == CLOBBER)
2831 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2840 /* Delete any insns that copy a register to itself. */
2843 delete_noop_moves (f)
2847 for (insn = f; insn; insn = NEXT_INSN (insn))
2849 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2851 PUT_CODE (insn, NOTE);
2852 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2853 NOTE_SOURCE_FILE (insn) = 0;
2858 /* Determine if the stack pointer is constant over the life of the function.
2859 Only useful before prologues have been emitted. */
2862 notice_stack_pointer_modification_1 (x, pat, data)
2864 rtx pat ATTRIBUTE_UNUSED;
2865 void *data ATTRIBUTE_UNUSED;
2867 if (x == stack_pointer_rtx
2868 /* The stack pointer is only modified indirectly as the result
2869 of a push until later in flow. See the comments in rtl.texi
2870 regarding Embedded Side-Effects on Addresses. */
2871 || (GET_CODE (x) == MEM
2872 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2873 || GET_CODE (XEXP (x, 0)) == PRE_INC
2874 || GET_CODE (XEXP (x, 0)) == POST_DEC
2875 || GET_CODE (XEXP (x, 0)) == POST_INC)
2876 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2877 current_function_sp_is_unchanging = 0;
2881 notice_stack_pointer_modification (f)
2886 /* Assume that the stack pointer is unchanging if alloca hasn't
2888 current_function_sp_is_unchanging = !current_function_calls_alloca;
2889 if (! current_function_sp_is_unchanging)
2892 for (insn = f; insn; insn = NEXT_INSN (insn))
2894 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2896 /* Check if insn modifies the stack pointer. */
2897 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2899 if (! current_function_sp_is_unchanging)
2905 /* Mark a register in SET. Hard registers in large modes get all
2906 of their component registers set as well. */
2908 mark_reg (reg, xset)
2912 regset set = (regset) xset;
2913 int regno = REGNO (reg);
2915 if (GET_MODE (reg) == BLKmode)
2918 SET_REGNO_REG_SET (set, regno);
2919 if (regno < FIRST_PSEUDO_REGISTER)
2921 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2923 SET_REGNO_REG_SET (set, regno + n);
2927 /* Mark those regs which are needed at the end of the function as live
2928 at the end of the last basic block. */
2930 mark_regs_live_at_end (set)
2935 /* If exiting needs the right stack value, consider the stack pointer
2936 live at the end of the function. */
2937 if ((HAVE_epilogue && reload_completed)
2938 || ! EXIT_IGNORE_STACK
2939 || (! FRAME_POINTER_REQUIRED
2940 && ! current_function_calls_alloca
2941 && flag_omit_frame_pointer)
2942 || current_function_sp_is_unchanging)
2944 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2947 /* Mark the frame pointer if needed at the end of the function. If
2948 we end up eliminating it, it will be removed from the live list
2949 of each basic block by reload. */
2951 if (! reload_completed || frame_pointer_needed)
2953 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2954 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2955 /* If they are different, also mark the hard frame pointer as live */
2956 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2960 #ifdef PIC_OFFSET_TABLE_REGNUM
2961 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2962 /* Many architectures have a GP register even without flag_pic.
2963 Assume the pic register is not in use, or will be handled by
2964 other means, if it is not fixed. */
2965 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2966 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2970 /* Mark all global registers, and all registers used by the epilogue
2971 as being live at the end of the function since they may be
2972 referenced by our caller. */
2973 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2975 #ifdef EPILOGUE_USES
2976 || EPILOGUE_USES (i)
2979 SET_REGNO_REG_SET (set, i);
2981 /* Mark all call-saved registers that we actaully used. */
2982 if (HAVE_epilogue && reload_completed)
2984 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2985 if (! call_used_regs[i] && regs_ever_live[i])
2986 SET_REGNO_REG_SET (set, i);
2989 /* Mark function return value. */
2990 diddle_return_value (mark_reg, set);
2993 /* Callback function for for_each_successor_phi. DATA is a regset.
2994 Sets the SRC_REGNO, the regno of the phi alternative for phi node
2995 INSN, in the regset. */
2998 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
2999 rtx insn ATTRIBUTE_UNUSED;
3000 int dest_regno ATTRIBUTE_UNUSED;
3004 regset live = (regset) data;
3005 SET_REGNO_REG_SET (live, src_regno);
3009 /* Propagate global life info around the graph of basic blocks. Begin
3010 considering blocks with their corresponding bit set in BLOCKS_IN.
3011 If BLOCKS_IN is null, consider it the universal set.
3013 BLOCKS_OUT is set for every block that was changed. */
3016 calculate_global_regs_live (blocks_in, blocks_out, flags)
3017 sbitmap blocks_in, blocks_out;
3020 basic_block *queue, *qhead, *qtail, *qend;
3021 regset tmp, new_live_at_end;
3022 regset_head tmp_head;
3023 regset_head new_live_at_end_head;
3026 tmp = INITIALIZE_REG_SET (tmp_head);
3027 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3029 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3030 because the `head == tail' style test for an empty queue doesn't
3031 work with a full queue. */
3032 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3034 qhead = qend = queue + n_basic_blocks + 2;
3036 /* Clear out the garbage that might be hanging out in bb->aux. */
3037 for (i = n_basic_blocks - 1; i >= 0; --i)
3038 BASIC_BLOCK (i)->aux = NULL;
3040 /* Queue the blocks set in the initial mask. Do this in reverse block
3041 number order so that we are more likely for the first round to do
3042 useful work. We use AUX non-null to flag that the block is queued. */
3045 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3047 basic_block bb = BASIC_BLOCK (i);
3054 for (i = 0; i < n_basic_blocks; ++i)
3056 basic_block bb = BASIC_BLOCK (i);
3063 sbitmap_zero (blocks_out);
3065 while (qhead != qtail)
3067 int rescan, changed;
3076 /* Begin by propogating live_at_start from the successor blocks. */
3077 CLEAR_REG_SET (new_live_at_end);
3078 for (e = bb->succ; e ; e = e->succ_next)
3080 basic_block sb = e->dest;
3081 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3084 /* Force the stack pointer to be live -- which might not already be
3085 the case for blocks within infinite loops. */
3086 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3088 /* Regs used in phi nodes are not included in
3089 global_live_at_start, since they are live only along a
3090 particular edge. Set those regs that are live because of a
3091 phi node alternative corresponding to this particular block. */
3093 for_each_successor_phi (bb, &set_phi_alternative_reg,
3096 if (bb == ENTRY_BLOCK_PTR)
3098 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3102 /* On our first pass through this block, we'll go ahead and continue.
3103 Recognize first pass by local_set NULL. On subsequent passes, we
3104 get to skip out early if live_at_end wouldn't have changed. */
3106 if (bb->local_set == NULL)
3108 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3113 /* If any bits were removed from live_at_end, we'll have to
3114 rescan the block. This wouldn't be necessary if we had
3115 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3116 local_live is really dependant on live_at_end. */
3117 CLEAR_REG_SET (tmp);
3118 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3119 new_live_at_end, BITMAP_AND_COMPL);
3123 /* Find the set of changed bits. Take this opportunity
3124 to notice that this set is empty and early out. */
3125 CLEAR_REG_SET (tmp);
3126 changed = bitmap_operation (tmp, bb->global_live_at_end,
3127 new_live_at_end, BITMAP_XOR);
3131 /* If any of the changed bits overlap with local_set,
3132 we'll have to rescan the block. Detect overlap by
3133 the AND with ~local_set turning off bits. */
3134 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3139 /* Let our caller know that BB changed enough to require its
3140 death notes updated. */
3142 SET_BIT (blocks_out, bb->index);
3146 /* Add to live_at_start the set of all registers in
3147 new_live_at_end that aren't in the old live_at_end. */
3149 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3151 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3153 changed = bitmap_operation (bb->global_live_at_start,
3154 bb->global_live_at_start,
3161 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3163 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3164 into live_at_start. */
3165 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3167 /* If live_at start didn't change, no need to go farther. */
3168 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3171 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3174 /* Queue all predecessors of BB so that we may re-examine
3175 their live_at_end. */
3176 for (e = bb->pred; e ; e = e->pred_next)
3178 basic_block pb = e->src;
3179 if (pb->aux == NULL)
3190 FREE_REG_SET (new_live_at_end);
3194 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3196 basic_block bb = BASIC_BLOCK (i);
3197 FREE_REG_SET (bb->local_set);
3202 for (i = n_basic_blocks - 1; i >= 0; --i)
3204 basic_block bb = BASIC_BLOCK (i);
3205 FREE_REG_SET (bb->local_set);
3212 /* Subroutines of life analysis. */
3214 /* Allocate the permanent data structures that represent the results
3215 of life analysis. Not static since used also for stupid life analysis. */
3218 allocate_bb_life_data ()
3222 for (i = 0; i < n_basic_blocks; i++)
3224 basic_block bb = BASIC_BLOCK (i);
3226 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3227 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3230 ENTRY_BLOCK_PTR->global_live_at_end
3231 = OBSTACK_ALLOC_REG_SET (function_obstack);
3232 EXIT_BLOCK_PTR->global_live_at_start
3233 = OBSTACK_ALLOC_REG_SET (function_obstack);
3235 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3239 allocate_reg_life_data ()
3243 max_regno = max_reg_num ();
3245 /* Recalculate the register space, in case it has grown. Old style
3246 vector oriented regsets would set regset_{size,bytes} here also. */
3247 allocate_reg_info (max_regno, FALSE, FALSE);
3249 /* Reset all the data we'll collect in propagate_block and its
3251 for (i = 0; i < max_regno; i++)
3255 REG_N_DEATHS (i) = 0;
3256 REG_N_CALLS_CROSSED (i) = 0;
3257 REG_LIVE_LENGTH (i) = 0;
3258 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3262 /* Delete dead instructions for propagate_block. */
3265 propagate_block_delete_insn (bb, insn)
3269 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3271 /* If the insn referred to a label, and that label was attached to
3272 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3273 pretty much mandatory to delete it, because the ADDR_VEC may be
3274 referencing labels that no longer exist. */
3278 rtx label = XEXP (inote, 0);
3281 if (LABEL_NUSES (label) == 1
3282 && (next = next_nonnote_insn (label)) != NULL
3283 && GET_CODE (next) == JUMP_INSN
3284 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3285 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3287 rtx pat = PATTERN (next);
3288 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3289 int len = XVECLEN (pat, diff_vec_p);
3292 for (i = 0; i < len; i++)
3293 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3295 flow_delete_insn (next);
3299 if (bb->end == insn)
3300 bb->end = PREV_INSN (insn);
3301 flow_delete_insn (insn);
3304 /* Delete dead libcalls for propagate_block. Return the insn
3305 before the libcall. */
3308 propagate_block_delete_libcall (bb, insn, note)
3312 rtx first = XEXP (note, 0);
3313 rtx before = PREV_INSN (first);
3315 if (insn == bb->end)
3318 flow_delete_insn_chain (first, insn);
3322 /* Update the life-status of regs for one insn. Return the previous insn. */
3325 propagate_one_insn (pbi, insn)
3326 struct propagate_block_info *pbi;
3329 rtx prev = PREV_INSN (insn);
3330 int flags = pbi->flags;
3331 int insn_is_dead = 0;
3332 int libcall_is_dead = 0;
3336 if (! INSN_P (insn))
3339 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3340 if (flags & PROP_SCAN_DEAD_CODE)
3342 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3344 libcall_is_dead = (insn_is_dead && note != 0
3345 && libcall_dead_p (pbi, PATTERN (insn),
3349 /* We almost certainly don't want to delete prologue or epilogue
3350 instructions. Warn about probable compiler losage. */
3353 && (((HAVE_epilogue || HAVE_prologue)
3354 && prologue_epilogue_contains (insn))
3355 || (HAVE_sibcall_epilogue
3356 && sibcall_epilogue_contains (insn))))
3358 if (flags & PROP_KILL_DEAD_CODE)
3360 warning ("ICE: would have deleted prologue/epilogue insn");
3361 if (!inhibit_warnings)
3364 libcall_is_dead = insn_is_dead = 0;
3367 /* If an instruction consists of just dead store(s) on final pass,
3369 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3371 /* Record sets. Do this even for dead instructions, since they
3372 would have killed the values if they hadn't been deleted. */
3373 mark_set_regs (pbi, PATTERN (insn), insn);
3375 /* CC0 is now known to be dead. Either this insn used it,
3376 in which case it doesn't anymore, or clobbered it,
3377 so the next insn can't use it. */
3380 if (libcall_is_dead)
3382 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3383 insn = NEXT_INSN (prev);
3386 propagate_block_delete_insn (pbi->bb, insn);
3391 /* See if this is an increment or decrement that can be merged into
3392 a following memory address. */
3395 register rtx x = single_set (insn);
3397 /* Does this instruction increment or decrement a register? */
3398 if (!reload_completed
3399 && (flags & PROP_AUTOINC)
3401 && GET_CODE (SET_DEST (x)) == REG
3402 && (GET_CODE (SET_SRC (x)) == PLUS
3403 || GET_CODE (SET_SRC (x)) == MINUS)
3404 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3405 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3406 /* Ok, look for a following memory ref we can combine with.
3407 If one is found, change the memory ref to a PRE_INC
3408 or PRE_DEC, cancel this insn, and return 1.
3409 Return 0 if nothing has been done. */
3410 && try_pre_increment_1 (pbi, insn))
3413 #endif /* AUTO_INC_DEC */
3415 CLEAR_REG_SET (pbi->new_set);
3417 /* If this is not the final pass, and this insn is copying the value of
3418 a library call and it's dead, don't scan the insns that perform the
3419 library call, so that the call's arguments are not marked live. */
3420 if (libcall_is_dead)
3422 /* Record the death of the dest reg. */
3423 mark_set_regs (pbi, PATTERN (insn), insn);
3425 insn = XEXP (note, 0);
3426 return PREV_INSN (insn);
3428 else if (GET_CODE (PATTERN (insn)) == SET
3429 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3430 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3431 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3432 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3433 /* We have an insn to pop a constant amount off the stack.
3434 (Such insns use PLUS regardless of the direction of the stack,
3435 and any insn to adjust the stack by a constant is always a pop.)
3436 These insns, if not dead stores, have no effect on life. */
3440 /* Any regs live at the time of a call instruction must not go
3441 in a register clobbered by calls. Find all regs now live and
3442 record this for them. */
3444 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3445 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3446 { REG_N_CALLS_CROSSED (i)++; });
3448 /* Record sets. Do this even for dead instructions, since they
3449 would have killed the values if they hadn't been deleted. */
3450 mark_set_regs (pbi, PATTERN (insn), insn);
3452 if (GET_CODE (insn) == CALL_INSN)
3458 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3459 cond = COND_EXEC_TEST (PATTERN (insn));
3461 /* Non-constant calls clobber memory. */
3462 if (! CONST_CALL_P (insn))
3463 free_EXPR_LIST_list (&pbi->mem_set_list);
3465 /* There may be extra registers to be clobbered. */
3466 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3468 note = XEXP (note, 1))
3469 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3470 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3471 cond, insn, pbi->flags);
3473 /* Calls change all call-used and global registers. */
3474 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3475 if (call_used_regs[i] && ! global_regs[i]
3478 /* We do not want REG_UNUSED notes for these registers. */
3479 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3481 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3485 /* If an insn doesn't use CC0, it becomes dead since we assume
3486 that every insn clobbers it. So show it dead here;
3487 mark_used_regs will set it live if it is referenced. */
3492 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3494 /* Sometimes we may have inserted something before INSN (such as a move)
3495 when we make an auto-inc. So ensure we will scan those insns. */
3497 prev = PREV_INSN (insn);
3500 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3506 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3507 cond = COND_EXEC_TEST (PATTERN (insn));
3509 /* Calls use their arguments. */
3510 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3512 note = XEXP (note, 1))
3513 if (GET_CODE (XEXP (note, 0)) == USE)
3514 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3517 /* The stack ptr is used (honorarily) by a CALL insn. */
3518 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3520 /* Calls may also reference any of the global registers,
3521 so they are made live. */
3522 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3524 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3529 /* On final pass, update counts of how many insns in which each reg
3531 if (flags & PROP_REG_INFO)
3532 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3533 { REG_LIVE_LENGTH (i)++; });
3538 /* Initialize a propagate_block_info struct for public consumption.
3539 Note that the structure itself is opaque to this file, but that
3540 the user can use the regsets provided here. */
3542 struct propagate_block_info *
3543 init_propagate_block_info (bb, live, local_set, flags)
3549 struct propagate_block_info *pbi = xmalloc (sizeof(*pbi));
3552 pbi->reg_live = live;
3553 pbi->mem_set_list = NULL_RTX;
3554 pbi->local_set = local_set;
3558 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3559 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3561 pbi->reg_next_use = NULL;
3563 pbi->new_set = BITMAP_XMALLOC ();
3565 #ifdef HAVE_conditional_execution
3566 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
3567 free_reg_cond_life_info);
3568 pbi->reg_cond_reg = BITMAP_XMALLOC ();
3570 /* If this block ends in a conditional branch, for each register live
3571 from one side of the branch and not the other, record the register
3572 as conditionally dead. */
3573 if (GET_CODE (bb->end) == JUMP_INSN
3574 && condjump_p (bb->end)
3575 && ! simplejump_p (bb->end))
3577 regset_head diff_head;
3578 regset diff = INITIALIZE_REG_SET (diff_head);
3579 basic_block bb_true, bb_false;
3580 rtx cond_true, cond_false;
3583 /* Identify the successor blocks. */
3584 bb_true = bb->succ->dest;
3585 if (bb->succ->succ_next != NULL)
3587 bb_false = bb->succ->succ_next->dest;
3589 if (bb->succ->flags & EDGE_FALLTHRU)
3591 basic_block t = bb_false;
3595 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
3600 /* This can happen with a conditional jump to the next insn. */
3601 if (JUMP_LABEL (bb->end) != bb_true->head)
3604 /* Simplest way to do nothing. */
3608 /* Extract the condition from the branch. */
3609 cond_true = XEXP (SET_SRC (PATTERN (bb->end)), 0);
3610 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
3611 GET_MODE (cond_true), XEXP (cond_true, 0),
3612 XEXP (cond_true, 1));
3613 if (GET_CODE (XEXP (SET_SRC (PATTERN (bb->end)), 1)) == PC)
3616 cond_false = cond_true;
3620 /* Compute which register lead different lives in the successors. */
3621 if (bitmap_operation (diff, bb_true->global_live_at_start,
3622 bb_false->global_live_at_start, BITMAP_XOR))
3624 if (GET_CODE (XEXP (cond_true, 0)) != REG)
3626 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond_true, 0)));
3628 /* For each such register, mark it conditionally dead. */
3629 EXECUTE_IF_SET_IN_REG_SET
3632 struct reg_cond_life_info *rcli;
3635 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
3637 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
3641 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
3643 splay_tree_insert (pbi->reg_cond_dead, i,
3644 (splay_tree_value) rcli);
3648 FREE_REG_SET (diff);
3655 /* Release a propagate_block_info struct. */
3658 free_propagate_block_info (pbi)
3659 struct propagate_block_info *pbi;
3661 free_EXPR_LIST_list (&pbi->mem_set_list);
3663 BITMAP_XFREE (pbi->new_set);
3665 #ifdef HAVE_conditional_execution
3666 splay_tree_delete (pbi->reg_cond_dead);
3667 BITMAP_XFREE (pbi->reg_cond_reg);
3670 if (pbi->reg_next_use)
3671 free (pbi->reg_next_use);
3676 /* Compute the registers live at the beginning of a basic block BB from
3677 those live at the end.
3679 When called, REG_LIVE contains those live at the end. On return, it
3680 contains those live at the beginning.
3682 LOCAL_SET, if non-null, will be set with all registers killed by
3683 this basic block. */
3686 propagate_block (bb, live, local_set, flags)
3692 struct propagate_block_info *pbi;
3695 pbi = init_propagate_block_info (bb, live, local_set, flags);
3697 if (flags & PROP_REG_INFO)
3701 /* Process the regs live at the end of the block.
3702 Mark them as not local to any one basic block. */
3703 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3704 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3707 /* Scan the block an insn at a time from end to beginning. */
3709 for (insn = bb->end; ; insn = prev)
3711 /* If this is a call to `setjmp' et al, warn if any
3712 non-volatile datum is live. */
3713 if ((flags & PROP_REG_INFO)
3714 && GET_CODE (insn) == NOTE
3715 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3716 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
3718 prev = propagate_one_insn (pbi, insn);
3720 if (insn == bb->head)
3724 free_propagate_block_info (pbi);
3727 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3728 (SET expressions whose destinations are registers dead after the insn).
3729 NEEDED is the regset that says which regs are alive after the insn.
3731 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3733 If X is the entire body of an insn, NOTES contains the reg notes
3734 pertaining to the insn. */
3737 insn_dead_p (pbi, x, call_ok, notes)
3738 struct propagate_block_info *pbi;
3741 rtx notes ATTRIBUTE_UNUSED;
3743 enum rtx_code code = GET_CODE (x);
3746 /* If flow is invoked after reload, we must take existing AUTO_INC
3747 expresions into account. */
3748 if (reload_completed)
3750 for ( ; notes; notes = XEXP (notes, 1))
3752 if (REG_NOTE_KIND (notes) == REG_INC)
3754 int regno = REGNO (XEXP (notes, 0));
3756 /* Don't delete insns to set global regs. */
3757 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3758 || REGNO_REG_SET_P (pbi->reg_live, regno))
3765 /* If setting something that's a reg or part of one,
3766 see if that register's altered value will be live. */
3770 rtx r = SET_DEST (x);
3773 if (GET_CODE (r) == CC0)
3774 return ! pbi->cc0_live;
3777 /* A SET that is a subroutine call cannot be dead. */
3778 if (GET_CODE (SET_SRC (x)) == CALL)
3784 /* Don't eliminate loads from volatile memory or volatile asms. */
3785 else if (volatile_refs_p (SET_SRC (x)))
3788 if (GET_CODE (r) == MEM)
3792 if (MEM_VOLATILE_P (r))
3795 /* Walk the set of memory locations we are currently tracking
3796 and see if one is an identical match to this memory location.
3797 If so, this memory write is dead (remember, we're walking
3798 backwards from the end of the block to the start. */
3799 temp = pbi->mem_set_list;
3802 if (rtx_equal_p (XEXP (temp, 0), r))
3804 temp = XEXP (temp, 1);
3809 while (GET_CODE (r) == SUBREG
3810 || GET_CODE (r) == STRICT_LOW_PART
3811 || GET_CODE (r) == ZERO_EXTRACT)
3814 if (GET_CODE (r) == REG)
3816 int regno = REGNO (r);
3819 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3822 /* If this is a hard register, verify that subsequent
3823 words are not needed. */
3824 if (regno < FIRST_PSEUDO_REGISTER)
3826 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3829 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3833 /* Don't delete insns to set global regs. */
3834 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3837 /* Make sure insns to set the stack pointer aren't deleted. */
3838 if (regno == STACK_POINTER_REGNUM)
3841 /* Make sure insns to set the frame pointer aren't deleted. */
3842 if (regno == FRAME_POINTER_REGNUM
3843 && (! reload_completed || frame_pointer_needed))
3845 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3846 if (regno == HARD_FRAME_POINTER_REGNUM
3847 && (! reload_completed || frame_pointer_needed))
3851 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3852 /* Make sure insns to set arg pointer are never deleted
3853 (if the arg pointer isn't fixed, there will be a USE
3854 for it, so we can treat it normally). */
3855 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3859 /* Otherwise, the set is dead. */
3865 /* If performing several activities, insn is dead if each activity
3866 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3867 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3869 else if (code == PARALLEL)
3871 int i = XVECLEN (x, 0);
3873 for (i--; i >= 0; i--)
3874 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3875 && GET_CODE (XVECEXP (x, 0, i)) != USE
3876 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3882 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3883 is not necessarily true for hard registers. */
3884 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3885 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3886 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3889 /* We do not check other CLOBBER or USE here. An insn consisting of just
3890 a CLOBBER or just a USE should not be deleted. */
3894 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3895 return 1 if the entire library call is dead.
3896 This is true if X copies a register (hard or pseudo)
3897 and if the hard return reg of the call insn is dead.
3898 (The caller should have tested the destination of X already for death.)
3900 If this insn doesn't just copy a register, then we don't
3901 have an ordinary libcall. In that case, cse could not have
3902 managed to substitute the source for the dest later on,
3903 so we can assume the libcall is dead.
3905 NEEDED is the bit vector of pseudoregs live before this insn.
3906 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3909 libcall_dead_p (pbi, x, note, insn)
3910 struct propagate_block_info *pbi;
3915 register RTX_CODE code = GET_CODE (x);
3919 register rtx r = SET_SRC (x);
3920 if (GET_CODE (r) == REG)
3922 rtx call = XEXP (note, 0);
3926 /* Find the call insn. */
3927 while (call != insn && GET_CODE (call) != CALL_INSN)
3928 call = NEXT_INSN (call);
3930 /* If there is none, do nothing special,
3931 since ordinary death handling can understand these insns. */
3935 /* See if the hard reg holding the value is dead.
3936 If this is a PARALLEL, find the call within it. */
3937 call_pat = PATTERN (call);
3938 if (GET_CODE (call_pat) == PARALLEL)
3940 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3941 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3942 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3945 /* This may be a library call that is returning a value
3946 via invisible pointer. Do nothing special, since
3947 ordinary death handling can understand these insns. */
3951 call_pat = XVECEXP (call_pat, 0, i);
3954 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
3960 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3961 live at function entry. Don't count global register variables, variables
3962 in registers that can be used for function arg passing, or variables in
3963 fixed hard registers. */
3966 regno_uninitialized (regno)
3969 if (n_basic_blocks == 0
3970 || (regno < FIRST_PSEUDO_REGISTER
3971 && (global_regs[regno]
3972 || fixed_regs[regno]
3973 || FUNCTION_ARG_REGNO_P (regno))))
3976 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3979 /* 1 if register REGNO was alive at a place where `setjmp' was called
3980 and was set more than once or is an argument.
3981 Such regs may be clobbered by `longjmp'. */
3984 regno_clobbered_at_setjmp (regno)
3987 if (n_basic_blocks == 0)
3990 return ((REG_N_SETS (regno) > 1
3991 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3992 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3995 /* INSN references memory, possibly using autoincrement addressing modes.
3996 Find any entries on the mem_set_list that need to be invalidated due
3997 to an address change. */
4000 invalidate_mems_from_autoinc (pbi, insn)
4001 struct propagate_block_info *pbi;
4004 rtx note = REG_NOTES (insn);
4005 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4007 if (REG_NOTE_KIND (note) == REG_INC)
4009 rtx temp = pbi->mem_set_list;
4010 rtx prev = NULL_RTX;
4015 next = XEXP (temp, 1);
4016 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4018 /* Splice temp out of list. */
4020 XEXP (prev, 1) = next;
4022 pbi->mem_set_list = next;
4023 free_EXPR_LIST_node (temp);
4033 /* Process the registers that are set within X. Their bits are set to
4034 1 in the regset DEAD, because they are dead prior to this insn.
4036 If INSN is nonzero, it is the insn being processed.
4038 FLAGS is the set of operations to perform. */
4041 mark_set_regs (pbi, x, insn)
4042 struct propagate_block_info *pbi;
4045 rtx cond = NULL_RTX;
4049 switch (code = GET_CODE (x))
4053 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4057 cond = COND_EXEC_TEST (x);
4058 x = COND_EXEC_CODE (x);
4064 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4066 rtx sub = XVECEXP (x, 0, i);
4067 switch (code = GET_CODE (sub))
4070 if (cond != NULL_RTX)
4073 cond = COND_EXEC_TEST (sub);
4074 sub = COND_EXEC_CODE (sub);
4075 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4081 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4096 /* Process a single SET rtx, X. */
4099 mark_set_1 (pbi, code, reg, cond, insn, flags)
4100 struct propagate_block_info *pbi;
4102 rtx reg, cond, insn;
4105 int regno_first = -1, regno_last = -1;
4109 /* Some targets place small structures in registers for
4110 return values of functions. We have to detect this
4111 case specially here to get correct flow information. */
4112 if (GET_CODE (reg) == PARALLEL
4113 && GET_MODE (reg) == BLKmode)
4115 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4116 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
4120 /* Modifying just one hardware register of a multi-reg value or just a
4121 byte field of a register does not mean the value from before this insn
4122 is now dead. Of course, if it was dead after it's unused now. */
4124 switch (GET_CODE (reg))
4128 case STRICT_LOW_PART:
4129 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4131 reg = XEXP (reg, 0);
4132 while (GET_CODE (reg) == SUBREG
4133 || GET_CODE (reg) == ZERO_EXTRACT
4134 || GET_CODE (reg) == SIGN_EXTRACT
4135 || GET_CODE (reg) == STRICT_LOW_PART);
4136 if (GET_CODE (reg) == MEM)
4138 not_dead = REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4142 regno_last = regno_first = REGNO (reg);
4143 if (regno_first < FIRST_PSEUDO_REGISTER)
4144 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4148 if (GET_CODE (SUBREG_REG (reg)) == REG)
4150 enum machine_mode outer_mode = GET_MODE (reg);
4151 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4153 /* Identify the range of registers affected. This is moderately
4154 tricky for hard registers. See alter_subreg. */
4156 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4157 if (regno_first < FIRST_PSEUDO_REGISTER)
4159 #ifdef ALTER_HARD_SUBREG
4160 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4161 inner_mode, regno_first);
4163 regno_first += SUBREG_WORD (reg);
4165 regno_last = (regno_first
4166 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4168 /* Since we've just adjusted the register number ranges, make
4169 sure REG matches. Otherwise some_was_live will be clear
4170 when it shouldn't have been, and we'll create incorrect
4171 REG_UNUSED notes. */
4172 reg = gen_rtx_REG (outer_mode, regno_first);
4176 /* If the number of words in the subreg is less than the number
4177 of words in the full register, we have a well-defined partial
4178 set. Otherwise the high bits are undefined.
4180 This is only really applicable to pseudos, since we just took
4181 care of multi-word hard registers. */
4182 if (((GET_MODE_SIZE (outer_mode)
4183 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4184 < ((GET_MODE_SIZE (inner_mode)
4185 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4186 not_dead = REGNO_REG_SET_P (pbi->reg_live, regno_first);
4188 reg = SUBREG_REG (reg);
4192 reg = SUBREG_REG (reg);
4199 /* If this set is a MEM, then it kills any aliased writes.
4200 If this set is a REG, then it kills any MEMs which use the reg. */
4201 if (flags & PROP_SCAN_DEAD_CODE)
4203 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4205 rtx temp = pbi->mem_set_list;
4206 rtx prev = NULL_RTX;
4211 next = XEXP (temp, 1);
4212 if ((GET_CODE (reg) == MEM
4213 && output_dependence (XEXP (temp, 0), reg))
4214 || (GET_CODE (reg) == REG
4215 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4217 /* Splice this entry out of the list. */
4219 XEXP (prev, 1) = next;
4221 pbi->mem_set_list = next;
4222 free_EXPR_LIST_node (temp);
4230 /* If the memory reference had embedded side effects (autoincrement
4231 address modes. Then we may need to kill some entries on the
4233 if (insn && GET_CODE (reg) == MEM)
4234 invalidate_mems_from_autoinc (pbi, insn);
4236 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4237 /* We do not know the size of a BLKmode store, so we do not track
4238 them for redundant store elimination. */
4239 && GET_MODE (reg) != BLKmode
4240 /* There are no REG_INC notes for SP, so we can't assume we'll see
4241 everything that invalidates it. To be safe, don't eliminate any
4242 stores though SP; none of them should be redundant anyway. */
4243 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4244 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4247 if (GET_CODE (reg) == REG
4248 && ! (regno_first == FRAME_POINTER_REGNUM
4249 && (! reload_completed || frame_pointer_needed))
4250 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4251 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4252 && (! reload_completed || frame_pointer_needed))
4254 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4255 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4259 int some_was_live = 0, some_was_dead = 0;
4261 for (i = regno_first; i <= regno_last; ++i)
4263 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4265 SET_REGNO_REG_SET (pbi->local_set, i);
4266 if (code != CLOBBER)
4267 SET_REGNO_REG_SET (pbi->new_set, i);
4269 some_was_live |= needed_regno;
4270 some_was_dead |= ! needed_regno;
4273 #ifdef HAVE_conditional_execution
4274 /* Consider conditional death in deciding that the register needs
4276 if (some_was_live && ! not_dead
4277 /* The stack pointer is never dead. Well, not strictly true,
4278 but it's very difficult to tell from here. Hopefully
4279 combine_stack_adjustments will fix up the most egregious
4281 && regno_first != STACK_POINTER_REGNUM)
4283 for (i = regno_first; i <= regno_last; ++i)
4284 if (! mark_regno_cond_dead (pbi, i, cond))
4289 /* Additional data to record if this is the final pass. */
4290 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4291 | PROP_DEATH_NOTES | PROP_AUTOINC))
4294 register int blocknum = pbi->bb->index;
4297 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4299 y = pbi->reg_next_use[regno_first];
4301 /* The next use is no longer next, since a store intervenes. */
4302 for (i = regno_first; i <= regno_last; ++i)
4303 pbi->reg_next_use[i] = 0;
4306 if (flags & PROP_REG_INFO)
4308 for (i = regno_first; i <= regno_last; ++i)
4310 /* Count (weighted) references, stores, etc. This counts a
4311 register twice if it is modified, but that is correct. */
4312 REG_N_SETS (i) += 1;
4313 REG_N_REFS (i) += (optimize_size ? 1
4314 : pbi->bb->loop_depth + 1);
4316 /* The insns where a reg is live are normally counted
4317 elsewhere, but we want the count to include the insn
4318 where the reg is set, and the normal counting mechanism
4319 would not count it. */
4320 REG_LIVE_LENGTH (i) += 1;
4323 /* If this is a hard reg, record this function uses the reg. */
4324 if (regno_first < FIRST_PSEUDO_REGISTER)
4326 for (i = regno_first; i <= regno_last; i++)
4327 regs_ever_live[i] = 1;
4331 /* Keep track of which basic blocks each reg appears in. */
4332 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4333 REG_BASIC_BLOCK (regno_first) = blocknum;
4334 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4335 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4339 if (! some_was_dead)
4341 if (flags & PROP_LOG_LINKS)
4343 /* Make a logical link from the next following insn
4344 that uses this register, back to this insn.
4345 The following insns have already been processed.
4347 We don't build a LOG_LINK for hard registers containing
4348 in ASM_OPERANDs. If these registers get replaced,
4349 we might wind up changing the semantics of the insn,
4350 even if reload can make what appear to be valid
4351 assignments later. */
4352 if (y && (BLOCK_NUM (y) == blocknum)
4353 && (regno_first >= FIRST_PSEUDO_REGISTER
4354 || asm_noperands (PATTERN (y)) < 0))
4355 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4360 else if (! some_was_live)
4362 if (flags & PROP_REG_INFO)
4363 REG_N_DEATHS (regno_first) += 1;
4365 if (flags & PROP_DEATH_NOTES)
4367 /* Note that dead stores have already been deleted
4368 when possible. If we get here, we have found a
4369 dead store that cannot be eliminated (because the
4370 same insn does something useful). Indicate this
4371 by marking the reg being set as dying here. */
4373 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4378 if (flags & PROP_DEATH_NOTES)
4380 /* This is a case where we have a multi-word hard register
4381 and some, but not all, of the words of the register are
4382 needed in subsequent insns. Write REG_UNUSED notes
4383 for those parts that were not needed. This case should
4386 for (i = regno_first; i <= regno_last; ++i)
4387 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4389 = alloc_EXPR_LIST (REG_UNUSED,
4390 gen_rtx_REG (reg_raw_mode[i], i),
4396 /* Mark the register as being dead. */
4399 /* The stack pointer is never dead. Well, not strictly true,
4400 but it's very difficult to tell from here. Hopefully
4401 combine_stack_adjustments will fix up the most egregious
4403 && regno_first != STACK_POINTER_REGNUM)
4405 for (i = regno_first; i <= regno_last; ++i)
4406 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4409 else if (GET_CODE (reg) == REG)
4411 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4412 pbi->reg_next_use[regno_first] = 0;
4415 /* If this is the last pass and this is a SCRATCH, show it will be dying
4416 here and count it. */
4417 else if (GET_CODE (reg) == SCRATCH)
4419 if (flags & PROP_DEATH_NOTES)
4421 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4425 #ifdef HAVE_conditional_execution
4426 /* Mark REGNO conditionally dead. Return true if the register is
4427 now unconditionally dead. */
4430 mark_regno_cond_dead (pbi, regno, cond)
4431 struct propagate_block_info *pbi;
4435 /* If this is a store to a predicate register, the value of the
4436 predicate is changing, we don't know that the predicate as seen
4437 before is the same as that seen after. Flush all dependant
4438 conditions from reg_cond_dead. This will make all such
4439 conditionally live registers unconditionally live. */
4440 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
4441 flush_reg_cond_reg (pbi, regno);
4443 /* If this is an unconditional store, remove any conditional
4444 life that may have existed. */
4445 if (cond == NULL_RTX)
4446 splay_tree_remove (pbi->reg_cond_dead, regno);
4449 splay_tree_node node;
4450 struct reg_cond_life_info *rcli;
4453 /* Otherwise this is a conditional set. Record that fact.
4454 It may have been conditionally used, or there may be a
4455 subsequent set with a complimentary condition. */
4457 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
4460 /* The register was unconditionally live previously.
4461 Record the current condition as the condition under
4462 which it is dead. */
4463 rcli = (struct reg_cond_life_info *)
4464 xmalloc (sizeof (*rcli));
4465 rcli->condition = alloc_EXPR_LIST (0, cond, NULL_RTX);
4466 splay_tree_insert (pbi->reg_cond_dead, regno,
4467 (splay_tree_value) rcli);
4469 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4470 REGNO (XEXP (cond, 0)));
4472 /* Not unconditionaly dead. */
4477 /* The register was conditionally live previously.
4478 Add the new condition to the old. */
4479 rcli = (struct reg_cond_life_info *) node->value;
4480 ncond = rcli->condition;
4481 ncond = ior_reg_cond (ncond, cond);
4483 /* If the register is now unconditionally dead,
4484 remove the entry in the splay_tree. */
4485 if (ncond == const1_rtx)
4486 splay_tree_remove (pbi->reg_cond_dead, regno);
4489 rcli->condition = ncond;
4491 SET_REGNO_REG_SET (pbi->reg_cond_reg,
4492 REGNO (XEXP (cond, 0)));
4494 /* Not unconditionaly dead. */
4503 /* Called from splay_tree_delete for pbi->reg_cond_life. */
4506 free_reg_cond_life_info (value)
4507 splay_tree_value value;
4509 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
4510 free_EXPR_LIST_list (&rcli->condition);
4514 /* Helper function for flush_reg_cond_reg. */
4517 flush_reg_cond_reg_1 (node, data)
4518 splay_tree_node node;
4521 struct reg_cond_life_info *rcli;
4522 int *xdata = (int *) data;
4523 unsigned int regno = xdata[0];
4526 /* Don't need to search if last flushed value was farther on in
4527 the in-order traversal. */
4528 if (xdata[1] >= (int) node->key)
4531 /* Splice out portions of the expression that refer to regno. */
4532 rcli = (struct reg_cond_life_info *) node->value;
4533 c = *(prev = &rcli->condition);
4536 if (regno == REGNO (XEXP (XEXP (c, 0), 0)))
4538 rtx next = XEXP (c, 1);
4539 free_EXPR_LIST_node (c);
4543 c = *(prev = &XEXP (c, 1));
4546 /* If the entire condition is now NULL, signal the node to be removed. */
4547 if (! rcli->condition)
4549 xdata[1] = node->key;
4556 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
4559 flush_reg_cond_reg (pbi, regno)
4560 struct propagate_block_info *pbi;
4567 while (splay_tree_foreach (pbi->reg_cond_dead,
4568 flush_reg_cond_reg_1, pair) == -1)
4569 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
4571 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
4574 /* Logical arithmetic on predicate conditions. IOR, NOT and NAND.
4575 We actually use EXPR_LIST to chain the sub-expressions together
4576 instead of IOR because it's easier to manipulate and we have
4577 the lists.c functions to reuse nodes.
4579 Return a new rtl expression as appropriate. */
4582 ior_reg_cond (old, x)
4585 enum rtx_code x_code;
4589 /* We expect these conditions to be of the form (eq reg 0). */
4590 x_code = GET_CODE (x);
4591 if (GET_RTX_CLASS (x_code) != '<'
4592 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4593 || XEXP (x, 1) != const0_rtx)
4596 /* Search the expression for an existing sub-expression of X_REG. */
4597 for (c = old; c ; c = XEXP (c, 1))
4599 rtx y = XEXP (c, 0);
4600 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4602 /* If we find X already present in OLD, we need do nothing. */
4603 if (GET_CODE (y) == x_code)
4606 /* If we find X being a compliment of a condition in OLD,
4607 then the entire condition is true. */
4608 if (GET_CODE (y) == reverse_condition (x_code))
4613 /* Otherwise just add to the chain. */
4614 return alloc_EXPR_LIST (0, x, old);
4621 enum rtx_code x_code;
4624 /* We expect these conditions to be of the form (eq reg 0). */
4625 x_code = GET_CODE (x);
4626 if (GET_RTX_CLASS (x_code) != '<'
4627 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4628 || XEXP (x, 1) != const0_rtx)
4631 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4632 VOIDmode, x_reg, const0_rtx),
4637 nand_reg_cond (old, x)
4640 enum rtx_code x_code;
4644 /* We expect these conditions to be of the form (eq reg 0). */
4645 x_code = GET_CODE (x);
4646 if (GET_RTX_CLASS (x_code) != '<'
4647 || GET_CODE (x_reg = XEXP (x, 0)) != REG
4648 || XEXP (x, 1) != const0_rtx)
4651 /* Search the expression for an existing sub-expression of X_REG. */
4653 for (c = *(prev = &old); c ; c = *(prev = &XEXP (c, 1)))
4655 rtx y = XEXP (c, 0);
4656 if (REGNO (XEXP (y, 0)) == REGNO (x_reg))
4658 /* If we find X already present in OLD, then we need to
4660 if (GET_CODE (y) == x_code)
4662 *prev = XEXP (c, 1);
4663 free_EXPR_LIST_node (c);
4664 return old ? old : const0_rtx;
4667 /* If we find X being a compliment of a condition in OLD,
4668 then we need do nothing. */
4669 if (GET_CODE (y) == reverse_condition (x_code))
4674 /* Otherwise, by implication, the register in question is now live for
4675 the inverse of the condition X. */
4676 return alloc_EXPR_LIST (0, gen_rtx_fmt_ee (reverse_condition (x_code),
4677 VOIDmode, x_reg, const0_rtx),
4680 #endif /* HAVE_conditional_execution */
4684 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4688 find_auto_inc (pbi, x, insn)
4689 struct propagate_block_info *pbi;
4693 rtx addr = XEXP (x, 0);
4694 HOST_WIDE_INT offset = 0;
4697 /* Here we detect use of an index register which might be good for
4698 postincrement, postdecrement, preincrement, or predecrement. */
4700 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4701 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4703 if (GET_CODE (addr) == REG)
4706 register int size = GET_MODE_SIZE (GET_MODE (x));
4709 int regno = REGNO (addr);
4711 /* Is the next use an increment that might make auto-increment? */
4712 if ((incr = pbi->reg_next_use[regno]) != 0
4713 && (set = single_set (incr)) != 0
4714 && GET_CODE (set) == SET
4715 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4716 /* Can't add side effects to jumps; if reg is spilled and
4717 reloaded, there's no way to store back the altered value. */
4718 && GET_CODE (insn) != JUMP_INSN
4719 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4720 && XEXP (y, 0) == addr
4721 && GET_CODE (XEXP (y, 1)) == CONST_INT
4722 && ((HAVE_POST_INCREMENT
4723 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4724 || (HAVE_POST_DECREMENT
4725 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4726 || (HAVE_PRE_INCREMENT
4727 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4728 || (HAVE_PRE_DECREMENT
4729 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4730 /* Make sure this reg appears only once in this insn. */
4731 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4732 use != 0 && use != (rtx) 1))
4734 rtx q = SET_DEST (set);
4735 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4736 ? (offset ? PRE_INC : POST_INC)
4737 : (offset ? PRE_DEC : POST_DEC));
4739 if (dead_or_set_p (incr, addr)
4740 /* Mustn't autoinc an eliminable register. */
4741 && (regno >= FIRST_PSEUDO_REGISTER
4742 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4744 /* This is the simple case. Try to make the auto-inc. If
4745 we can't, we are done. Otherwise, we will do any
4746 needed updates below. */
4747 if (! validate_change (insn, &XEXP (x, 0),
4748 gen_rtx_fmt_e (inc_code, Pmode, addr),
4752 else if (GET_CODE (q) == REG
4753 /* PREV_INSN used here to check the semi-open interval
4755 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4756 /* We must also check for sets of q as q may be
4757 a call clobbered hard register and there may
4758 be a call between PREV_INSN (insn) and incr. */
4759 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4761 /* We have *p followed sometime later by q = p+size.
4762 Both p and q must be live afterward,
4763 and q is not used between INSN and its assignment.
4764 Change it to q = p, ...*q..., q = q+size.
4765 Then fall into the usual case. */
4769 emit_move_insn (q, addr);
4770 insns = get_insns ();
4773 if (basic_block_for_insn)
4774 for (temp = insns; temp; temp = NEXT_INSN (temp))
4775 set_block_for_insn (temp, pbi->bb);
4777 /* If we can't make the auto-inc, or can't make the
4778 replacement into Y, exit. There's no point in making
4779 the change below if we can't do the auto-inc and doing
4780 so is not correct in the pre-inc case. */
4782 validate_change (insn, &XEXP (x, 0),
4783 gen_rtx_fmt_e (inc_code, Pmode, q),
4785 validate_change (incr, &XEXP (y, 0), q, 1);
4786 if (! apply_change_group ())
4789 /* We now know we'll be doing this change, so emit the
4790 new insn(s) and do the updates. */
4791 emit_insns_before (insns, insn);
4793 if (pbi->bb->head == insn)
4794 pbi->bb->head = insns;
4796 /* INCR will become a NOTE and INSN won't contain a
4797 use of ADDR. If a use of ADDR was just placed in
4798 the insn before INSN, make that the next use.
4799 Otherwise, invalidate it. */
4800 if (GET_CODE (PREV_INSN (insn)) == INSN
4801 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4802 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4803 pbi->reg_next_use[regno] = PREV_INSN (insn);
4805 pbi->reg_next_use[regno] = 0;
4810 /* REGNO is now used in INCR which is below INSN, but it
4811 previously wasn't live here. If we don't mark it as
4812 live, we'll put a REG_DEAD note for it on this insn,
4813 which is incorrect. */
4814 SET_REGNO_REG_SET (pbi->reg_live, regno);
4816 /* If there are any calls between INSN and INCR, show
4817 that REGNO now crosses them. */
4818 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4819 if (GET_CODE (temp) == CALL_INSN)
4820 REG_N_CALLS_CROSSED (regno)++;
4825 /* If we haven't returned, it means we were able to make the
4826 auto-inc, so update the status. First, record that this insn
4827 has an implicit side effect. */
4830 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4832 /* Modify the old increment-insn to simply copy
4833 the already-incremented value of our register. */
4834 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4837 /* If that makes it a no-op (copying the register into itself) delete
4838 it so it won't appear to be a "use" and a "set" of this
4840 if (SET_DEST (set) == addr)
4842 /* If the original source was dead, it's dead now. */
4843 rtx note = find_reg_note (incr, REG_DEAD, NULL_RTX);
4844 if (note && XEXP (note, 0) != addr)
4845 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
4847 PUT_CODE (incr, NOTE);
4848 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4849 NOTE_SOURCE_FILE (incr) = 0;
4852 if (regno >= FIRST_PSEUDO_REGISTER)
4854 /* Count an extra reference to the reg. When a reg is
4855 incremented, spilling it is worse, so we want to make
4856 that less likely. */
4857 REG_N_REFS (regno) += (optimize_size ? 1
4858 : pbi->bb->loop_depth + 1);
4860 /* Count the increment as a setting of the register,
4861 even though it isn't a SET in rtl. */
4862 REG_N_SETS (regno)++;
4867 #endif /* AUTO_INC_DEC */
4870 mark_used_reg (pbi, reg, cond, insn)
4871 struct propagate_block_info *pbi;
4873 rtx cond ATTRIBUTE_UNUSED;
4876 int regno = REGNO (reg);
4877 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4878 int some_was_dead = ! some_was_live;
4882 /* A hard reg in a wide mode may really be multiple registers.
4883 If so, mark all of them just like the first. */
4884 if (regno < FIRST_PSEUDO_REGISTER)
4886 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4889 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
4890 some_was_live |= needed_regno;
4891 some_was_dead |= ! needed_regno;
4895 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4897 /* Record where each reg is used, so when the reg is set we know
4898 the next insn that uses it. */
4899 pbi->reg_next_use[regno] = insn;
4902 if (pbi->flags & PROP_REG_INFO)
4904 if (regno < FIRST_PSEUDO_REGISTER)
4906 /* If this is a register we are going to try to eliminate,
4907 don't mark it live here. If we are successful in
4908 eliminating it, it need not be live unless it is used for
4909 pseudos, in which case it will have been set live when it
4910 was allocated to the pseudos. If the register will not
4911 be eliminated, reload will set it live at that point.
4913 Otherwise, record that this function uses this register. */
4914 /* ??? The PPC backend tries to "eliminate" on the pic
4915 register to itself. This should be fixed. In the mean
4916 time, hack around it. */
4918 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
4919 && (regno == FRAME_POINTER_REGNUM
4920 || regno == ARG_POINTER_REGNUM)))
4922 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4924 regs_ever_live[regno + --n] = 1;
4930 /* Keep track of which basic block each reg appears in. */
4932 register int blocknum = pbi->bb->index;
4933 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4934 REG_BASIC_BLOCK (regno) = blocknum;
4935 else if (REG_BASIC_BLOCK (regno) != blocknum)
4936 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4938 /* Count (weighted) number of uses of each reg. */
4939 REG_N_REFS (regno) += (optimize_size ? 1
4940 : pbi->bb->loop_depth + 1);
4944 /* Find out if any of the register was set this insn. */
4945 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
4946 if (regno < FIRST_PSEUDO_REGISTER)
4948 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4950 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
4953 /* Record and count the insns in which a reg dies. If it is used in
4954 this insn and was dead below the insn then it dies in this insn.
4955 If it was set in this insn, we do not make a REG_DEAD note;
4956 likewise if we already made such a note. */
4957 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
4961 /* Check for the case where the register dying partially
4962 overlaps the register set by this insn. */
4963 if (regno < FIRST_PSEUDO_REGISTER
4964 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
4966 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4968 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
4971 /* If none of the words in X is needed, make a REG_DEAD note.
4972 Otherwise, we must make partial REG_DEAD notes. */
4973 if (! some_was_live)
4975 if ((pbi->flags & PROP_DEATH_NOTES)
4976 && ! find_regno_note (insn, REG_DEAD, regno))
4978 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
4980 if (pbi->flags & PROP_REG_INFO)
4981 REG_N_DEATHS (regno)++;
4985 /* Don't make a REG_DEAD note for a part of a register
4986 that is set in the insn. */
4988 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4989 for (; n >= regno; n--)
4990 if (! REGNO_REG_SET_P (pbi->reg_live, n)
4991 && ! dead_or_set_regno_p (insn, n))
4993 = alloc_EXPR_LIST (REG_DEAD,
4994 gen_rtx_REG (reg_raw_mode[n], n),
4999 SET_REGNO_REG_SET (pbi->reg_live, regno);
5000 if (regno < FIRST_PSEUDO_REGISTER)
5002 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5004 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
5007 #ifdef HAVE_conditional_execution
5008 /* If this is a conditional use, record that fact. If it is later
5009 conditionally set, we'll know to kill the register. */
5010 if (cond != NULL_RTX)
5012 splay_tree_node node;
5013 struct reg_cond_life_info *rcli;
5018 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5021 /* The register was unconditionally live previously.
5022 No need to do anything. */
5026 /* The register was conditionally live previously.
5027 Subtract the new life cond from the old death cond. */
5028 rcli = (struct reg_cond_life_info *) node->value;
5029 ncond = rcli->condition;
5030 ncond = nand_reg_cond (ncond, cond);
5032 /* If the register is now unconditionally live, remove the
5033 entry in the splay_tree. */
5034 if (ncond == const0_rtx)
5036 rcli->condition = NULL_RTX;
5037 splay_tree_remove (pbi->reg_cond_dead, regno);
5040 rcli->condition = ncond;
5045 /* The register was not previously live at all. Record
5046 the condition under which it is still dead. */
5047 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5048 rcli->condition = not_reg_cond (cond);
5049 splay_tree_insert (pbi->reg_cond_dead, regno,
5050 (splay_tree_value) rcli);
5056 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5057 This is done assuming the registers needed from X are those that
5058 have 1-bits in PBI->REG_LIVE.
5060 INSN is the containing instruction. If INSN is dead, this function
5064 mark_used_regs (pbi, x, cond, insn)
5065 struct propagate_block_info *pbi;
5068 register RTX_CODE code;
5070 int flags = pbi->flags;
5073 code = GET_CODE (x);
5093 /* If we are clobbering a MEM, mark any registers inside the address
5095 if (GET_CODE (XEXP (x, 0)) == MEM)
5096 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5100 /* Don't bother watching stores to mems if this is not the
5101 final pass. We'll not be deleting dead stores this round. */
5102 if (flags & PROP_SCAN_DEAD_CODE)
5104 /* Invalidate the data for the last MEM stored, but only if MEM is
5105 something that can be stored into. */
5106 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5107 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5108 ; /* needn't clear the memory set list */
5111 rtx temp = pbi->mem_set_list;
5112 rtx prev = NULL_RTX;
5117 next = XEXP (temp, 1);
5118 if (anti_dependence (XEXP (temp, 0), x))
5120 /* Splice temp out of the list. */
5122 XEXP (prev, 1) = next;
5124 pbi->mem_set_list = next;
5125 free_EXPR_LIST_node (temp);
5133 /* If the memory reference had embedded side effects (autoincrement
5134 address modes. Then we may need to kill some entries on the
5137 invalidate_mems_from_autoinc (pbi, insn);
5141 if (flags & PROP_AUTOINC)
5142 find_auto_inc (pbi, x, insn);
5147 if (GET_CODE (SUBREG_REG (x)) == REG
5148 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5149 && (GET_MODE_SIZE (GET_MODE (x))
5150 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
5151 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
5153 /* While we're here, optimize this case. */
5155 if (GET_CODE (x) != REG)
5160 /* See a register other than being set => mark it as needed. */
5161 mark_used_reg (pbi, x, cond, insn);
5166 register rtx testreg = SET_DEST (x);
5169 /* If storing into MEM, don't show it as being used. But do
5170 show the address as being used. */
5171 if (GET_CODE (testreg) == MEM)
5174 if (flags & PROP_AUTOINC)
5175 find_auto_inc (pbi, testreg, insn);
5177 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5178 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5182 /* Storing in STRICT_LOW_PART is like storing in a reg
5183 in that this SET might be dead, so ignore it in TESTREG.
5184 but in some other ways it is like using the reg.
5186 Storing in a SUBREG or a bit field is like storing the entire
5187 register in that if the register's value is not used
5188 then this SET is not needed. */
5189 while (GET_CODE (testreg) == STRICT_LOW_PART
5190 || GET_CODE (testreg) == ZERO_EXTRACT
5191 || GET_CODE (testreg) == SIGN_EXTRACT
5192 || GET_CODE (testreg) == SUBREG)
5194 if (GET_CODE (testreg) == SUBREG
5195 && GET_CODE (SUBREG_REG (testreg)) == REG
5196 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5197 && (GET_MODE_SIZE (GET_MODE (testreg))
5198 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
5199 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
5201 /* Modifying a single register in an alternate mode
5202 does not use any of the old value. But these other
5203 ways of storing in a register do use the old value. */
5204 if (GET_CODE (testreg) == SUBREG
5205 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5210 testreg = XEXP (testreg, 0);
5213 /* If this is a store into a register, recursively scan the
5214 value being stored. */
5216 if ((GET_CODE (testreg) == PARALLEL
5217 && GET_MODE (testreg) == BLKmode)
5218 || (GET_CODE (testreg) == REG
5219 && (regno = REGNO (testreg),
5220 ! (regno == FRAME_POINTER_REGNUM
5221 && (! reload_completed || frame_pointer_needed)))
5222 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5223 && ! (regno == HARD_FRAME_POINTER_REGNUM
5224 && (! reload_completed || frame_pointer_needed))
5226 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5227 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5232 mark_used_regs (pbi, SET_DEST (x), cond, insn);
5233 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5240 case UNSPEC_VOLATILE:
5244 /* Traditional and volatile asm instructions must be considered to use
5245 and clobber all hard registers, all pseudo-registers and all of
5246 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
5248 Consider for instance a volatile asm that changes the fpu rounding
5249 mode. An insn should not be moved across this even if it only uses
5250 pseudo-regs because it might give an incorrectly rounded result.
5252 ?!? Unfortunately, marking all hard registers as live causes massive
5253 problems for the register allocator and marking all pseudos as live
5254 creates mountains of uninitialized variable warnings.
5256 So for now, just clear the memory set list and mark any regs
5257 we can find in ASM_OPERANDS as used. */
5258 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
5259 free_EXPR_LIST_list (&pbi->mem_set_list);
5261 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
5262 We can not just fall through here since then we would be confused
5263 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
5264 traditional asms unlike their normal usage. */
5265 if (code == ASM_OPERANDS)
5269 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
5270 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
5276 if (cond != NULL_RTX)
5279 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
5281 cond = COND_EXEC_TEST (x);
5282 x = COND_EXEC_CODE (x);
5286 /* We _do_not_ want to scan operands of phi nodes. Operands of
5287 a phi function are evaluated only when control reaches this
5288 block along a particular edge. Therefore, regs that appear
5289 as arguments to phi should not be added to the global live at
5297 /* Recursively scan the operands of this expression. */
5300 register const char *fmt = GET_RTX_FORMAT (code);
5303 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5307 /* Tail recursive case: save a function call level. */
5313 mark_used_regs (pbi, XEXP (x, i), cond, insn);
5315 else if (fmt[i] == 'E')
5318 for (j = 0; j < XVECLEN (x, i); j++)
5319 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
5328 try_pre_increment_1 (pbi, insn)
5329 struct propagate_block_info *pbi;
5332 /* Find the next use of this reg. If in same basic block,
5333 make it do pre-increment or pre-decrement if appropriate. */
5334 rtx x = single_set (insn);
5335 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
5336 * INTVAL (XEXP (SET_SRC (x), 1)));
5337 int regno = REGNO (SET_DEST (x));
5338 rtx y = pbi->reg_next_use[regno];
5340 && BLOCK_NUM (y) == BLOCK_NUM (insn)
5341 /* Don't do this if the reg dies, or gets set in y; a standard addressing
5342 mode would be better. */
5343 && ! dead_or_set_p (y, SET_DEST (x))
5344 && try_pre_increment (y, SET_DEST (x), amount))
5346 /* We have found a suitable auto-increment
5347 and already changed insn Y to do it.
5348 So flush this increment-instruction. */
5349 PUT_CODE (insn, NOTE);
5350 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
5351 NOTE_SOURCE_FILE (insn) = 0;
5352 /* Count a reference to this reg for the increment
5353 insn we are deleting. When a reg is incremented.
5354 spilling it is worse, so we want to make that
5356 if (regno >= FIRST_PSEUDO_REGISTER)
5358 REG_N_REFS (regno) += (optimize_size ? 1
5359 : pbi->bb->loop_depth + 1);
5360 REG_N_SETS (regno)++;
5367 /* Try to change INSN so that it does pre-increment or pre-decrement
5368 addressing on register REG in order to add AMOUNT to REG.
5369 AMOUNT is negative for pre-decrement.
5370 Returns 1 if the change could be made.
5371 This checks all about the validity of the result of modifying INSN. */
5374 try_pre_increment (insn, reg, amount)
5376 HOST_WIDE_INT amount;
5380 /* Nonzero if we can try to make a pre-increment or pre-decrement.
5381 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
5383 /* Nonzero if we can try to make a post-increment or post-decrement.
5384 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
5385 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
5386 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
5389 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
5392 /* From the sign of increment, see which possibilities are conceivable
5393 on this target machine. */
5394 if (HAVE_PRE_INCREMENT && amount > 0)
5396 if (HAVE_POST_INCREMENT && amount > 0)
5399 if (HAVE_PRE_DECREMENT && amount < 0)
5401 if (HAVE_POST_DECREMENT && amount < 0)
5404 if (! (pre_ok || post_ok))
5407 /* It is not safe to add a side effect to a jump insn
5408 because if the incremented register is spilled and must be reloaded
5409 there would be no way to store the incremented value back in memory. */
5411 if (GET_CODE (insn) == JUMP_INSN)
5416 use = find_use_as_address (PATTERN (insn), reg, 0);
5417 if (post_ok && (use == 0 || use == (rtx) 1))
5419 use = find_use_as_address (PATTERN (insn), reg, -amount);
5423 if (use == 0 || use == (rtx) 1)
5426 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
5429 /* See if this combination of instruction and addressing mode exists. */
5430 if (! validate_change (insn, &XEXP (use, 0),
5431 gen_rtx_fmt_e (amount > 0
5432 ? (do_post ? POST_INC : PRE_INC)
5433 : (do_post ? POST_DEC : PRE_DEC),
5437 /* Record that this insn now has an implicit side effect on X. */
5438 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
5442 #endif /* AUTO_INC_DEC */
5444 /* Find the place in the rtx X where REG is used as a memory address.
5445 Return the MEM rtx that so uses it.
5446 If PLUSCONST is nonzero, search instead for a memory address equivalent to
5447 (plus REG (const_int PLUSCONST)).
5449 If such an address does not appear, return 0.
5450 If REG appears more than once, or is used other than in such an address,
5454 find_use_as_address (x, reg, plusconst)
5457 HOST_WIDE_INT plusconst;
5459 enum rtx_code code = GET_CODE (x);
5460 const char *fmt = GET_RTX_FORMAT (code);
5462 register rtx value = 0;
5465 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
5468 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
5469 && XEXP (XEXP (x, 0), 0) == reg
5470 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5471 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
5474 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
5476 /* If REG occurs inside a MEM used in a bit-field reference,
5477 that is unacceptable. */
5478 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
5479 return (rtx) (HOST_WIDE_INT) 1;
5483 return (rtx) (HOST_WIDE_INT) 1;
5485 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5489 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5493 return (rtx) (HOST_WIDE_INT) 1;
5495 else if (fmt[i] == 'E')
5498 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5500 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5504 return (rtx) (HOST_WIDE_INT) 1;
5512 /* Write information about registers and basic blocks into FILE.
5513 This is part of making a debugging dump. */
5516 dump_regset (r, outf)
5523 fputs (" (nil)", outf);
5527 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5529 fprintf (outf, " %d", i);
5530 if (i < FIRST_PSEUDO_REGISTER)
5531 fprintf (outf, " [%s]",
5540 dump_regset (r, stderr);
5541 putc ('\n', stderr);
5545 dump_flow_info (file)
5549 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5551 fprintf (file, "%d registers.\n", max_regno);
5552 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5555 enum reg_class class, altclass;
5556 fprintf (file, "\nRegister %d used %d times across %d insns",
5557 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5558 if (REG_BASIC_BLOCK (i) >= 0)
5559 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5561 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5562 (REG_N_SETS (i) == 1) ? "" : "s");
5563 if (REG_USERVAR_P (regno_reg_rtx[i]))
5564 fprintf (file, "; user var");
5565 if (REG_N_DEATHS (i) != 1)
5566 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5567 if (REG_N_CALLS_CROSSED (i) == 1)
5568 fprintf (file, "; crosses 1 call");
5569 else if (REG_N_CALLS_CROSSED (i))
5570 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5571 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5572 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5573 class = reg_preferred_class (i);
5574 altclass = reg_alternate_class (i);
5575 if (class != GENERAL_REGS || altclass != ALL_REGS)
5577 if (altclass == ALL_REGS || class == ALL_REGS)
5578 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5579 else if (altclass == NO_REGS)
5580 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5582 fprintf (file, "; pref %s, else %s",
5583 reg_class_names[(int) class],
5584 reg_class_names[(int) altclass]);
5586 if (REGNO_POINTER_FLAG (i))
5587 fprintf (file, "; pointer");
5588 fprintf (file, ".\n");
5591 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5592 for (i = 0; i < n_basic_blocks; i++)
5594 register basic_block bb = BASIC_BLOCK (i);
5597 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
5598 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
5600 fprintf (file, "Predecessors: ");
5601 for (e = bb->pred; e ; e = e->pred_next)
5602 dump_edge_info (file, e, 0);
5604 fprintf (file, "\nSuccessors: ");
5605 for (e = bb->succ; e ; e = e->succ_next)
5606 dump_edge_info (file, e, 1);
5608 fprintf (file, "\nRegisters live at start:");
5609 dump_regset (bb->global_live_at_start, file);
5611 fprintf (file, "\nRegisters live at end:");
5612 dump_regset (bb->global_live_at_end, file);
5623 dump_flow_info (stderr);
5627 dump_edge_info (file, e, do_succ)
5632 basic_block side = (do_succ ? e->dest : e->src);
5634 if (side == ENTRY_BLOCK_PTR)
5635 fputs (" ENTRY", file);
5636 else if (side == EXIT_BLOCK_PTR)
5637 fputs (" EXIT", file);
5639 fprintf (file, " %d", side->index);
5643 static const char * const bitnames[] = {
5644 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5647 int i, flags = e->flags;
5651 for (i = 0; flags; i++)
5652 if (flags & (1 << i))
5658 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5659 fputs (bitnames[i], file);
5661 fprintf (file, "%d", i);
5669 /* Print out one basic block with live information at start and end. */
5679 fprintf (outf, ";; Basic block %d, loop depth %d",
5680 bb->index, bb->loop_depth);
5681 if (bb->eh_beg != -1 || bb->eh_end != -1)
5682 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5685 fputs (";; Predecessors: ", outf);
5686 for (e = bb->pred; e ; e = e->pred_next)
5687 dump_edge_info (outf, e, 0);
5690 fputs (";; Registers live at start:", outf);
5691 dump_regset (bb->global_live_at_start, outf);
5694 for (insn = bb->head, last = NEXT_INSN (bb->end);
5696 insn = NEXT_INSN (insn))
5697 print_rtl_single (outf, insn);
5699 fputs (";; Registers live at end:", outf);
5700 dump_regset (bb->global_live_at_end, outf);
5703 fputs (";; Successors: ", outf);
5704 for (e = bb->succ; e; e = e->succ_next)
5705 dump_edge_info (outf, e, 1);
5713 dump_bb (bb, stderr);
5720 dump_bb (BASIC_BLOCK(n), stderr);
5723 /* Like print_rtl, but also print out live information for the start of each
5727 print_rtl_with_bb (outf, rtx_first)
5731 register rtx tmp_rtx;
5734 fprintf (outf, "(nil)\n");
5738 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5739 int max_uid = get_max_uid ();
5740 basic_block *start = (basic_block *)
5741 xcalloc (max_uid, sizeof (basic_block));
5742 basic_block *end = (basic_block *)
5743 xcalloc (max_uid, sizeof (basic_block));
5744 enum bb_state *in_bb_p = (enum bb_state *)
5745 xcalloc (max_uid, sizeof (enum bb_state));
5747 for (i = n_basic_blocks - 1; i >= 0; i--)
5749 basic_block bb = BASIC_BLOCK (i);
5752 start[INSN_UID (bb->head)] = bb;
5753 end[INSN_UID (bb->end)] = bb;
5754 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5756 enum bb_state state = IN_MULTIPLE_BB;
5757 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5759 in_bb_p[INSN_UID(x)] = state;
5766 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5771 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5773 fprintf (outf, ";; Start of basic block %d, registers live:",
5775 dump_regset (bb->global_live_at_start, outf);
5779 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5780 && GET_CODE (tmp_rtx) != NOTE
5781 && GET_CODE (tmp_rtx) != BARRIER)
5782 fprintf (outf, ";; Insn is not within a basic block\n");
5783 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5784 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5786 did_output = print_rtl_single (outf, tmp_rtx);
5788 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5790 fprintf (outf, ";; End of basic block %d, registers live:\n",
5792 dump_regset (bb->global_live_at_end, outf);
5805 if (current_function_epilogue_delay_list != 0)
5807 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5808 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5809 tmp_rtx = XEXP (tmp_rtx, 1))
5810 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5814 /* Compute dominator relationships using new flow graph structures. */
5816 compute_flow_dominators (dominators, post_dominators)
5817 sbitmap *dominators;
5818 sbitmap *post_dominators;
5821 sbitmap *temp_bitmap;
5823 basic_block *worklist, *workend, *qin, *qout;
5826 /* Allocate a worklist array/queue. Entries are only added to the
5827 list if they were not already on the list. So the size is
5828 bounded by the number of basic blocks. */
5829 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5830 workend = &worklist[n_basic_blocks];
5832 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5833 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5837 /* The optimistic setting of dominators requires us to put every
5838 block on the work list initially. */
5839 qin = qout = worklist;
5840 for (bb = 0; bb < n_basic_blocks; bb++)
5842 *qin++ = BASIC_BLOCK (bb);
5843 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5845 qlen = n_basic_blocks;
5848 /* We want a maximal solution, so initially assume everything dominates
5850 sbitmap_vector_ones (dominators, n_basic_blocks);
5852 /* Mark successors of the entry block so we can identify them below. */
5853 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5854 e->dest->aux = ENTRY_BLOCK_PTR;
5856 /* Iterate until the worklist is empty. */
5859 /* Take the first entry off the worklist. */
5860 basic_block b = *qout++;
5861 if (qout >= workend)
5867 /* Compute the intersection of the dominators of all the
5870 If one of the predecessor blocks is the ENTRY block, then the
5871 intersection of the dominators of the predecessor blocks is
5872 defined as the null set. We can identify such blocks by the
5873 special value in the AUX field in the block structure. */
5874 if (b->aux == ENTRY_BLOCK_PTR)
5876 /* Do not clear the aux field for blocks which are
5877 successors of the ENTRY block. That way we we never
5878 add them to the worklist again.
5880 The intersect of dominators of the preds of this block is
5881 defined as the null set. */
5882 sbitmap_zero (temp_bitmap[bb]);
5886 /* Clear the aux field of this block so it can be added to
5887 the worklist again if necessary. */
5889 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5892 /* Make sure each block always dominates itself. */
5893 SET_BIT (temp_bitmap[bb], bb);
5895 /* If the out state of this block changed, then we need to
5896 add the successors of this block to the worklist if they
5897 are not already on the worklist. */
5898 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5900 for (e = b->succ; e; e = e->succ_next)
5902 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5916 if (post_dominators)
5918 /* The optimistic setting of dominators requires us to put every
5919 block on the work list initially. */
5920 qin = qout = worklist;
5921 for (bb = 0; bb < n_basic_blocks; bb++)
5923 *qin++ = BASIC_BLOCK (bb);
5924 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5926 qlen = n_basic_blocks;
5929 /* We want a maximal solution, so initially assume everything post
5930 dominates everything else. */
5931 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5933 /* Mark predecessors of the exit block so we can identify them below. */
5934 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5935 e->src->aux = EXIT_BLOCK_PTR;
5937 /* Iterate until the worklist is empty. */
5940 /* Take the first entry off the worklist. */
5941 basic_block b = *qout++;
5942 if (qout >= workend)
5948 /* Compute the intersection of the post dominators of all the
5951 If one of the successor blocks is the EXIT block, then the
5952 intersection of the dominators of the successor blocks is
5953 defined as the null set. We can identify such blocks by the
5954 special value in the AUX field in the block structure. */
5955 if (b->aux == EXIT_BLOCK_PTR)
5957 /* Do not clear the aux field for blocks which are
5958 predecessors of the EXIT block. That way we we never
5959 add them to the worklist again.
5961 The intersect of dominators of the succs of this block is
5962 defined as the null set. */
5963 sbitmap_zero (temp_bitmap[bb]);
5967 /* Clear the aux field of this block so it can be added to
5968 the worklist again if necessary. */
5970 sbitmap_intersection_of_succs (temp_bitmap[bb],
5971 post_dominators, bb);
5974 /* Make sure each block always post dominates itself. */
5975 SET_BIT (temp_bitmap[bb], bb);
5977 /* If the out state of this block changed, then we need to
5978 add the successors of this block to the worklist if they
5979 are not already on the worklist. */
5980 if (sbitmap_a_and_b (post_dominators[bb],
5981 post_dominators[bb],
5984 for (e = b->pred; e; e = e->pred_next)
5986 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
6004 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
6007 compute_immediate_dominators (idom, dominators)
6009 sbitmap *dominators;
6014 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6016 /* Begin with tmp(n) = dom(n) - { n }. */
6017 for (b = n_basic_blocks; --b >= 0; )
6019 sbitmap_copy (tmp[b], dominators[b]);
6020 RESET_BIT (tmp[b], b);
6023 /* Subtract out all of our dominator's dominators. */
6024 for (b = n_basic_blocks; --b >= 0; )
6026 sbitmap tmp_b = tmp[b];
6029 for (s = n_basic_blocks; --s >= 0; )
6030 if (TEST_BIT (tmp_b, s))
6031 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
6034 /* Find the one bit set in the bitmap and put it in the output array. */
6035 for (b = n_basic_blocks; --b >= 0; )
6038 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
6041 sbitmap_vector_free (tmp);
6044 /* Recompute register set/reference counts immediately prior to register
6047 This avoids problems with set/reference counts changing to/from values
6048 which have special meanings to the register allocators.
6050 Additionally, the reference counts are the primary component used by the
6051 register allocators to prioritize pseudos for allocation to hard regs.
6052 More accurate reference counts generally lead to better register allocation.
6054 F is the first insn to be scanned.
6056 LOOP_STEP denotes how much loop_depth should be incremented per
6057 loop nesting level in order to increase the ref count more for
6058 references in a loop.
6060 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6061 possibly other information which is used by the register allocators. */
6064 recompute_reg_usage (f, loop_step)
6065 rtx f ATTRIBUTE_UNUSED;
6066 int loop_step ATTRIBUTE_UNUSED;
6068 allocate_reg_life_data ();
6069 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6072 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6073 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6074 of the number of registers that died. */
6077 count_or_remove_death_notes (blocks, kill)
6083 for (i = n_basic_blocks - 1; i >= 0; --i)
6088 if (blocks && ! TEST_BIT (blocks, i))
6091 bb = BASIC_BLOCK (i);
6093 for (insn = bb->head; ; insn = NEXT_INSN (insn))
6095 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
6097 rtx *pprev = ®_NOTES (insn);
6102 switch (REG_NOTE_KIND (link))
6105 if (GET_CODE (XEXP (link, 0)) == REG)
6107 rtx reg = XEXP (link, 0);
6110 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6113 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6121 rtx next = XEXP (link, 1);
6122 free_EXPR_LIST_node (link);
6123 *pprev = link = next;
6129 pprev = &XEXP (link, 1);
6136 if (insn == bb->end)
6144 /* Record INSN's block as BB. */
6147 set_block_for_insn (insn, bb)
6151 size_t uid = INSN_UID (insn);
6152 if (uid >= basic_block_for_insn->num_elements)
6156 /* Add one-eighth the size so we don't keep calling xrealloc. */
6157 new_size = uid + (uid + 7) / 8;
6159 VARRAY_GROW (basic_block_for_insn, new_size);
6161 VARRAY_BB (basic_block_for_insn, uid) = bb;
6164 /* Record INSN's block number as BB. */
6165 /* ??? This has got to go. */
6168 set_block_num (insn, bb)
6172 set_block_for_insn (insn, BASIC_BLOCK (bb));
6175 /* Verify the CFG consistency. This function check some CFG invariants and
6176 aborts when something is wrong. Hope that this function will help to
6177 convert many optimization passes to preserve CFG consistent.
6179 Currently it does following checks:
6181 - test head/end pointers
6182 - overlapping of basic blocks
6183 - edge list corectness
6184 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6185 - tails of basic blocks (ensure that boundary is necesary)
6186 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6187 and NOTE_INSN_BASIC_BLOCK
6188 - check that all insns are in the basic blocks
6189 (except the switch handling code, barriers and notes)
6190 - check that all returns are followed by barriers
6192 In future it can be extended check a lot of other stuff as well
6193 (reachability of basic blocks, life information, etc. etc.). */
6198 const int max_uid = get_max_uid ();
6199 const rtx rtx_first = get_insns ();
6200 basic_block *bb_info;
6202 int i, last_bb_num_seen, num_bb_notes, err = 0;
6204 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6206 /* First pass check head/end pointers and set bb_info array used by
6208 for (i = n_basic_blocks - 1; i >= 0; i--)
6210 basic_block bb = BASIC_BLOCK (i);
6212 /* Check the head pointer and make sure that it is pointing into
6214 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
6219 error ("Head insn %d for block %d not found in the insn stream.",
6220 INSN_UID (bb->head), bb->index);
6224 /* Check the end pointer and make sure that it is pointing into
6226 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6228 if (bb_info[INSN_UID (x)] != NULL)
6230 error ("Insn %d is in multiple basic blocks (%d and %d)",
6231 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6234 bb_info[INSN_UID (x)] = bb;
6241 error ("End insn %d for block %d not found in the insn stream.",
6242 INSN_UID (bb->end), bb->index);
6247 /* Now check the basic blocks (boundaries etc.) */
6248 for (i = n_basic_blocks - 1; i >= 0; i--)
6250 basic_block bb = BASIC_BLOCK (i);
6251 /* Check corectness of edge lists */
6259 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6261 fprintf (stderr, "Predecessor: ");
6262 dump_edge_info (stderr, e, 0);
6263 fprintf (stderr, "\nSuccessor: ");
6264 dump_edge_info (stderr, e, 1);
6268 if (e->dest != EXIT_BLOCK_PTR)
6270 edge e2 = e->dest->pred;
6271 while (e2 && e2 != e)
6275 error ("Basic block %i edge lists are corrupted", bb->index);
6287 error ("Basic block %d pred edge is corrupted", bb->index);
6288 fputs ("Predecessor: ", stderr);
6289 dump_edge_info (stderr, e, 0);
6290 fputs ("\nSuccessor: ", stderr);
6291 dump_edge_info (stderr, e, 1);
6292 fputc ('\n', stderr);
6295 if (e->src != ENTRY_BLOCK_PTR)
6297 edge e2 = e->src->succ;
6298 while (e2 && e2 != e)
6302 error ("Basic block %i edge lists are corrupted", bb->index);
6309 /* OK pointers are correct. Now check the header of basic
6310 block. It ought to contain optional CODE_LABEL followed
6311 by NOTE_BASIC_BLOCK. */
6313 if (GET_CODE (x) == CODE_LABEL)
6317 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6323 if (GET_CODE (x) != NOTE
6324 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6325 || NOTE_BASIC_BLOCK (x) != bb)
6327 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6334 /* Do checks for empty blocks here */
6341 if (GET_CODE (x) == NOTE
6342 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6344 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6345 INSN_UID (x), bb->index);
6352 if (GET_CODE (x) == JUMP_INSN
6353 || GET_CODE (x) == CODE_LABEL
6354 || GET_CODE (x) == BARRIER)
6356 error ("In basic block %d:", bb->index);
6357 fatal_insn ("Flow control insn inside a basic block", x);
6365 last_bb_num_seen = -1;
6370 if (GET_CODE (x) == NOTE
6371 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6373 basic_block bb = NOTE_BASIC_BLOCK (x);
6375 if (bb->index != last_bb_num_seen + 1)
6376 fatal ("Basic blocks not numbered consecutively");
6377 last_bb_num_seen = bb->index;
6380 if (!bb_info[INSN_UID (x)])
6382 switch (GET_CODE (x))
6389 /* An addr_vec is placed outside any block block. */
6391 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6392 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6393 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6398 /* But in any case, non-deletable labels can appear anywhere. */
6402 fatal_insn ("Insn outside basic block", x);
6406 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6407 && GET_CODE (x) == JUMP_INSN
6408 && returnjump_p (x) && ! condjump_p (x)
6409 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6410 fatal_insn ("Return not followed by barrier", x);
6415 if (num_bb_notes != n_basic_blocks)
6416 fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
6417 num_bb_notes, n_basic_blocks);
6426 /* Functions to access an edge list with a vector representation.
6427 Enough data is kept such that given an index number, the
6428 pred and succ that edge reprsents can be determined, or
6429 given a pred and a succ, it's index number can be returned.
6430 This allows algorithms which comsume a lot of memory to
6431 represent the normally full matrix of edge (pred,succ) with a
6432 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6433 wasted space in the client code due to sparse flow graphs. */
6435 /* This functions initializes the edge list. Basically the entire
6436 flowgraph is processed, and all edges are assigned a number,
6437 and the data structure is filed in. */
6441 struct edge_list *elist;
6447 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6451 /* Determine the number of edges in the flow graph by counting successor
6452 edges on each basic block. */
6453 for (x = 0; x < n_basic_blocks; x++)
6455 basic_block bb = BASIC_BLOCK (x);
6457 for (e = bb->succ; e; e = e->succ_next)
6460 /* Don't forget successors of the entry block. */
6461 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6464 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6465 elist->num_blocks = block_count;
6466 elist->num_edges = num_edges;
6467 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6471 /* Follow successors of the entry block, and register these edges. */
6472 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6474 elist->index_to_edge[num_edges] = e;
6478 for (x = 0; x < n_basic_blocks; x++)
6480 basic_block bb = BASIC_BLOCK (x);
6482 /* Follow all successors of blocks, and register these edges. */
6483 for (e = bb->succ; e; e = e->succ_next)
6485 elist->index_to_edge[num_edges] = e;
6492 /* This function free's memory associated with an edge list. */
6494 free_edge_list (elist)
6495 struct edge_list *elist;
6499 free (elist->index_to_edge);
6504 /* This function provides debug output showing an edge list. */
6506 print_edge_list (f, elist)
6508 struct edge_list *elist;
6511 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6512 elist->num_blocks - 2, elist->num_edges);
6514 for (x = 0; x < elist->num_edges; x++)
6516 fprintf (f, " %-4d - edge(", x);
6517 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6518 fprintf (f,"entry,");
6520 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6522 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6523 fprintf (f,"exit)\n");
6525 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6529 /* This function provides an internal consistancy check of an edge list,
6530 verifying that all edges are present, and that there are no
6533 verify_edge_list (f, elist)
6535 struct edge_list *elist;
6537 int x, pred, succ, index;
6540 for (x = 0; x < n_basic_blocks; x++)
6542 basic_block bb = BASIC_BLOCK (x);
6544 for (e = bb->succ; e; e = e->succ_next)
6546 pred = e->src->index;
6547 succ = e->dest->index;
6548 index = EDGE_INDEX (elist, e->src, e->dest);
6549 if (index == EDGE_INDEX_NO_EDGE)
6551 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6554 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6555 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6556 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6557 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6558 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6559 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6562 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6564 pred = e->src->index;
6565 succ = e->dest->index;
6566 index = EDGE_INDEX (elist, e->src, e->dest);
6567 if (index == EDGE_INDEX_NO_EDGE)
6569 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6572 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6573 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6574 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6575 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6576 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6577 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6579 /* We've verified that all the edges are in the list, no lets make sure
6580 there are no spurious edges in the list. */
6582 for (pred = 0 ; pred < n_basic_blocks; pred++)
6583 for (succ = 0 ; succ < n_basic_blocks; succ++)
6585 basic_block p = BASIC_BLOCK (pred);
6586 basic_block s = BASIC_BLOCK (succ);
6590 for (e = p->succ; e; e = e->succ_next)
6596 for (e = s->pred; e; e = e->pred_next)
6602 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6603 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6604 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6606 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6607 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6608 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6609 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6610 BASIC_BLOCK (succ)));
6612 for (succ = 0 ; succ < n_basic_blocks; succ++)
6614 basic_block p = ENTRY_BLOCK_PTR;
6615 basic_block s = BASIC_BLOCK (succ);
6619 for (e = p->succ; e; e = e->succ_next)
6625 for (e = s->pred; e; e = e->pred_next)
6631 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6632 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6633 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6635 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6636 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6637 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6638 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6639 BASIC_BLOCK (succ)));
6641 for (pred = 0 ; pred < n_basic_blocks; pred++)
6643 basic_block p = BASIC_BLOCK (pred);
6644 basic_block s = EXIT_BLOCK_PTR;
6648 for (e = p->succ; e; e = e->succ_next)
6654 for (e = s->pred; e; e = e->pred_next)
6660 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6661 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6662 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6664 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6665 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6666 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6667 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6672 /* This routine will determine what, if any, edge there is between
6673 a specified predecessor and successor. */
6676 find_edge_index (edge_list, pred, succ)
6677 struct edge_list *edge_list;
6678 basic_block pred, succ;
6681 for (x = 0; x < NUM_EDGES (edge_list); x++)
6683 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6684 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6687 return (EDGE_INDEX_NO_EDGE);
6690 /* This function will remove an edge from the flow graph. */
6695 edge last_pred = NULL;
6696 edge last_succ = NULL;
6698 basic_block src, dest;
6701 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6707 last_succ->succ_next = e->succ_next;
6709 src->succ = e->succ_next;
6711 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6717 last_pred->pred_next = e->pred_next;
6719 dest->pred = e->pred_next;
6725 /* This routine will remove any fake successor edges for a basic block.
6726 When the edge is removed, it is also removed from whatever predecessor
6729 remove_fake_successors (bb)
6733 for (e = bb->succ; e ; )
6737 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6742 /* This routine will remove all fake edges from the flow graph. If
6743 we remove all fake successors, it will automatically remove all
6744 fake predecessors. */
6746 remove_fake_edges ()
6750 for (x = 0; x < n_basic_blocks; x++)
6751 remove_fake_successors (BASIC_BLOCK (x));
6753 /* We've handled all successors except the entry block's. */
6754 remove_fake_successors (ENTRY_BLOCK_PTR);
6757 /* This functions will add a fake edge between any block which has no
6758 successors, and the exit block. Some data flow equations require these
6761 add_noreturn_fake_exit_edges ()
6765 for (x = 0; x < n_basic_blocks; x++)
6766 if (BASIC_BLOCK (x)->succ == NULL)
6767 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6770 /* Dump the list of basic blocks in the bitmap NODES. */
6772 flow_nodes_print (str, nodes, file)
6774 const sbitmap nodes;
6779 fprintf (file, "%s { ", str);
6780 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6781 fputs ("}\n", file);
6785 /* Dump the list of exiting edges in the array EDGES. */
6787 flow_exits_print (str, edges, num_edges, file)
6795 fprintf (file, "%s { ", str);
6796 for (i = 0; i < num_edges; i++)
6797 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6798 fputs ("}\n", file);
6802 /* Dump loop related CFG information. */
6804 flow_loops_cfg_dump (loops, file)
6805 const struct loops *loops;
6810 if (! loops->num || ! file || ! loops->cfg.dom)
6813 for (i = 0; i < n_basic_blocks; i++)
6817 fprintf (file, ";; %d succs { ", i);
6818 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6819 fprintf (file, "%d ", succ->dest->index);
6820 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6824 /* Dump the DFS node order. */
6825 if (loops->cfg.dfs_order)
6827 fputs (";; DFS order: ", file);
6828 for (i = 0; i < n_basic_blocks; i++)
6829 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6835 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6837 flow_loop_nested_p (outer, loop)
6841 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6845 /* Dump the loop information specified by LOOPS to the stream FILE. */
6847 flow_loops_dump (loops, file, verbose)
6848 const struct loops *loops;
6855 num_loops = loops->num;
6856 if (! num_loops || ! file)
6859 fprintf (file, ";; %d loops found, %d levels\n",
6860 num_loops, loops->levels);
6862 for (i = 0; i < num_loops; i++)
6864 struct loop *loop = &loops->array[i];
6866 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6867 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6868 loop->header->index, loop->latch->index,
6869 loop->pre_header ? loop->pre_header->index : -1,
6870 loop->depth, loop->level,
6871 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6872 fprintf (file, ";; %d", loop->num_nodes);
6873 flow_nodes_print (" nodes", loop->nodes, file);
6874 fprintf (file, ";; %d", loop->num_exits);
6875 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6881 for (j = 0; j < i; j++)
6883 struct loop *oloop = &loops->array[j];
6885 if (loop->header == oloop->header)
6890 smaller = loop->num_nodes < oloop->num_nodes;
6892 /* If the union of LOOP and OLOOP is different than
6893 the larger of LOOP and OLOOP then LOOP and OLOOP
6894 must be disjoint. */
6895 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6896 smaller ? oloop : loop);
6897 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6898 loop->header->index, i, j,
6899 disjoint ? "disjoint" : "nested");
6906 /* Print diagnostics to compare our concept of a loop with
6907 what the loop notes say. */
6908 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6909 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6910 != NOTE_INSN_LOOP_BEG)
6911 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6912 INSN_UID (PREV_INSN (loop->first->head)));
6913 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6914 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6915 != NOTE_INSN_LOOP_END)
6916 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6917 INSN_UID (NEXT_INSN (loop->last->end)));
6922 flow_loops_cfg_dump (loops, file);
6926 /* Free all the memory allocated for LOOPS. */
6928 flow_loops_free (loops)
6929 struct loops *loops;
6938 /* Free the loop descriptors. */
6939 for (i = 0; i < loops->num; i++)
6941 struct loop *loop = &loops->array[i];
6944 sbitmap_free (loop->nodes);
6948 free (loops->array);
6949 loops->array = NULL;
6952 sbitmap_vector_free (loops->cfg.dom);
6953 if (loops->cfg.dfs_order)
6954 free (loops->cfg.dfs_order);
6956 sbitmap_free (loops->shared_headers);
6961 /* Find the exits from the loop using the bitmap of loop nodes NODES
6962 and store in EXITS array. Return the number of exits from the
6965 flow_loop_exits_find (nodes, exits)
6966 const sbitmap nodes;
6975 /* Check all nodes within the loop to see if there are any
6976 successors not in the loop. Note that a node may have multiple
6979 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6980 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6982 basic_block dest = e->dest;
6984 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6992 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6994 /* Store all exiting edges into an array. */
6996 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6997 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6999 basic_block dest = e->dest;
7001 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7002 (*exits)[num_exits++] = e;
7010 /* Find the nodes contained within the loop with header HEADER and
7011 latch LATCH and store in NODES. Return the number of nodes within
7014 flow_loop_nodes_find (header, latch, nodes)
7023 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7026 /* Start with only the loop header in the set of loop nodes. */
7027 sbitmap_zero (nodes);
7028 SET_BIT (nodes, header->index);
7030 header->loop_depth++;
7032 /* Push the loop latch on to the stack. */
7033 if (! TEST_BIT (nodes, latch->index))
7035 SET_BIT (nodes, latch->index);
7036 latch->loop_depth++;
7038 stack[sp++] = latch;
7047 for (e = node->pred; e; e = e->pred_next)
7049 basic_block ancestor = e->src;
7051 /* If each ancestor not marked as part of loop, add to set of
7052 loop nodes and push on to stack. */
7053 if (ancestor != ENTRY_BLOCK_PTR
7054 && ! TEST_BIT (nodes, ancestor->index))
7056 SET_BIT (nodes, ancestor->index);
7057 ancestor->loop_depth++;
7059 stack[sp++] = ancestor;
7068 /* Compute the depth first search order and store in the array
7069 DFS_ORDER, marking the nodes visited in VISITED. Returns the
7070 number of nodes visited. */
7072 flow_depth_first_order_compute (dfs_order)
7081 /* Allocate stack for back-tracking up CFG. */
7082 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
7085 /* Allocate bitmap to track nodes that have been visited. */
7086 visited = sbitmap_alloc (n_basic_blocks);
7088 /* None of the nodes in the CFG have been visited yet. */
7089 sbitmap_zero (visited);
7091 /* Start with the first successor edge from the entry block. */
7092 e = ENTRY_BLOCK_PTR->succ;
7095 basic_block src = e->src;
7096 basic_block dest = e->dest;
7098 /* Mark that we have visited this node. */
7099 if (src != ENTRY_BLOCK_PTR)
7100 SET_BIT (visited, src->index);
7102 /* If this node has not been visited before, push the current
7103 edge on to the stack and proceed with the first successor
7104 edge of this node. */
7105 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
7113 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
7116 /* DEST has no successors (for example, a non-returning
7117 function is called) so do not push the current edge
7118 but carry on with its next successor. */
7119 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
7120 SET_BIT (visited, dest->index);
7123 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
7125 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
7127 /* Pop edge off stack. */
7135 sbitmap_free (visited);
7137 /* The number of nodes visited should not be greater than
7139 if (dfsnum > n_basic_blocks)
7142 /* There are some nodes left in the CFG that are unreachable. */
7143 if (dfsnum < n_basic_blocks)
7149 /* Return the block for the pre-header of the loop with header
7150 HEADER where DOM specifies the dominator information. Return NULL if
7151 there is no pre-header. */
7153 flow_loop_pre_header_find (header, dom)
7157 basic_block pre_header;
7160 /* If block p is a predecessor of the header and is the only block
7161 that the header does not dominate, then it is the pre-header. */
7163 for (e = header->pred; e; e = e->pred_next)
7165 basic_block node = e->src;
7167 if (node != ENTRY_BLOCK_PTR
7168 && ! TEST_BIT (dom[node->index], header->index))
7170 if (pre_header == NULL)
7174 /* There are multiple edges into the header from outside
7175 the loop so there is no pre-header block. */
7185 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
7186 previously added. The insertion algorithm assumes that the loops
7187 are added in the order found by a depth first search of the CFG. */
7189 flow_loop_tree_node_add (prevloop, loop)
7190 struct loop *prevloop;
7194 if (flow_loop_nested_p (prevloop, loop))
7196 prevloop->inner = loop;
7197 loop->outer = prevloop;
7201 while (prevloop->outer)
7203 if (flow_loop_nested_p (prevloop->outer, loop))
7205 prevloop->next = loop;
7206 loop->outer = prevloop->outer;
7209 prevloop = prevloop->outer;
7212 prevloop->next = loop;
7217 /* Build the loop hierarchy tree for LOOPS. */
7219 flow_loops_tree_build (loops)
7220 struct loops *loops;
7225 num_loops = loops->num;
7229 /* Root the loop hierarchy tree with the first loop found.
7230 Since we used a depth first search this should be the
7232 loops->tree = &loops->array[0];
7233 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
7235 /* Add the remaining loops to the tree. */
7236 for (i = 1; i < num_loops; i++)
7237 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
7241 /* Helper function to compute loop nesting depth and enclosed loop level
7242 for the natural loop specified by LOOP at the loop depth DEPTH.
7243 Returns the loop level. */
7245 flow_loop_level_compute (loop, depth)
7255 /* Traverse loop tree assigning depth and computing level as the
7256 maximum level of all the inner loops of this loop. The loop
7257 level is equivalent to the height of the loop in the loop tree
7258 and corresponds to the number of enclosed loop levels (including
7260 for (inner = loop->inner; inner; inner = inner->next)
7264 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7269 loop->level = level;
7270 loop->depth = depth;
7275 /* Compute the loop nesting depth and enclosed loop level for the loop
7276 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7280 flow_loops_level_compute (loops)
7281 struct loops *loops;
7287 /* Traverse all the outer level loops. */
7288 for (loop = loops->tree; loop; loop = loop->next)
7290 level = flow_loop_level_compute (loop, 1);
7298 /* Find all the natural loops in the function and save in LOOPS structure
7299 and recalculate loop_depth information in basic block structures.
7300 Return the number of natural loops found. */
7303 flow_loops_find (loops)
7304 struct loops *loops;
7315 loops->array = NULL;
7319 /* Taking care of this degenerate case makes the rest of
7320 this code simpler. */
7321 if (n_basic_blocks == 0)
7324 /* Compute the dominators. */
7325 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7326 compute_flow_dominators (dom, NULL);
7328 /* Count the number of loop edges (back edges). This should be the
7329 same as the number of natural loops. Also clear the loop_depth
7330 and as we work from inner->outer in a loop nest we call
7331 find_loop_nodes_find which will increment loop_depth for nodes
7332 within the current loop, which happens to enclose inner loops. */
7335 for (b = 0; b < n_basic_blocks; b++)
7337 BASIC_BLOCK (b)->loop_depth = 0;
7338 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7340 basic_block latch = e->src;
7342 /* Look for back edges where a predecessor is dominated
7343 by this block. A natural loop has a single entry
7344 node (header) that dominates all the nodes in the
7345 loop. It also has single back edge to the header
7346 from a latch node. Note that multiple natural loops
7347 may share the same header. */
7348 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7355 /* Compute depth first search order of the CFG so that outer
7356 natural loops will be found before inner natural loops. */
7357 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7358 flow_depth_first_order_compute (dfs_order);
7360 /* Allocate loop structures. */
7362 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7364 headers = sbitmap_alloc (n_basic_blocks);
7365 sbitmap_zero (headers);
7367 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7368 sbitmap_zero (loops->shared_headers);
7370 /* Find and record information about all the natural loops
7373 for (b = 0; b < n_basic_blocks; b++)
7377 /* Search the nodes of the CFG in DFS order that we can find
7378 outer loops first. */
7379 header = BASIC_BLOCK (dfs_order[b]);
7381 /* Look for all the possible latch blocks for this header. */
7382 for (e = header->pred; e; e = e->pred_next)
7384 basic_block latch = e->src;
7386 /* Look for back edges where a predecessor is dominated
7387 by this block. A natural loop has a single entry
7388 node (header) that dominates all the nodes in the
7389 loop. It also has single back edge to the header
7390 from a latch node. Note that multiple natural loops
7391 may share the same header. */
7392 if (latch != ENTRY_BLOCK_PTR
7393 && TEST_BIT (dom[latch->index], header->index))
7397 loop = loops->array + num_loops;
7399 loop->header = header;
7400 loop->latch = latch;
7402 /* Keep track of blocks that are loop headers so
7403 that we can tell which loops should be merged. */
7404 if (TEST_BIT (headers, header->index))
7405 SET_BIT (loops->shared_headers, header->index);
7406 SET_BIT (headers, header->index);
7408 /* Find nodes contained within the loop. */
7409 loop->nodes = sbitmap_alloc (n_basic_blocks);
7411 = flow_loop_nodes_find (header, latch, loop->nodes);
7413 /* Compute first and last blocks within the loop.
7414 These are often the same as the loop header and
7415 loop latch respectively, but this is not always
7418 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7420 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7422 /* Find edges which exit the loop. Note that a node
7423 may have several exit edges. */
7425 = flow_loop_exits_find (loop->nodes, &loop->exits);
7427 /* Look to see if the loop has a pre-header node. */
7429 = flow_loop_pre_header_find (header, dom);
7436 /* Natural loops with shared headers may either be disjoint or
7437 nested. Disjoint loops with shared headers cannot be inner
7438 loops and should be merged. For now just mark loops that share
7440 for (i = 0; i < num_loops; i++)
7441 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7442 loops->array[i].shared = 1;
7444 sbitmap_free (headers);
7447 loops->num = num_loops;
7449 /* Save CFG derived information to avoid recomputing it. */
7450 loops->cfg.dom = dom;
7451 loops->cfg.dfs_order = dfs_order;
7453 /* Build the loop hierarchy tree. */
7454 flow_loops_tree_build (loops);
7456 /* Assign the loop nesting depth and enclosed loop level for each
7458 loops->levels = flow_loops_level_compute (loops);
7464 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7466 flow_loop_outside_edge_p (loop, e)
7467 const struct loop *loop;
7470 if (e->dest != loop->header)
7472 return (e->src == ENTRY_BLOCK_PTR)
7473 || ! TEST_BIT (loop->nodes, e->src->index);
7477 /* Clear LOG_LINKS fields of insns in a chain. */
7479 clear_log_links (insns)
7483 for (i = insns; i; i = NEXT_INSN (i))
7484 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')