1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
169 extern struct obstack *function_obstack;
171 /* Number of basic blocks in the current function. */
175 /* Number of edges in the current function. */
179 /* The basic block array. */
181 varray_type basic_block_info;
183 /* The special entry and exit blocks. */
185 struct basic_block_def entry_exit_blocks[2]
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
194 ENTRY_BLOCK, /* index */
196 -1, -1 /* eh_beg, eh_end */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
207 EXIT_BLOCK, /* index */
209 -1, -1 /* eh_beg, eh_end */
213 /* Nonzero if the second flow pass has completed. */
216 /* Maximum register number used in this function, plus one. */
220 /* Indexed by n, giving various register information */
222 varray_type reg_n_info;
224 /* Size of the reg_n_info table. */
226 unsigned int reg_n_max;
228 /* Size of a regset for the current function,
229 in (1) bytes and (2) elements. */
234 /* Regset of regs live when calls to `setjmp'-like functions happen. */
235 /* ??? Does this exist only for the setjmp-clobbered warning message? */
237 regset regs_live_at_setjmp;
239 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
240 that have to go in the same hard reg.
241 The first two regs in the list are a pair, and the next two
242 are another pair, etc. */
245 /* Set of registers that may be eliminable. These are handled specially
246 in updating regs_ever_live. */
248 static HARD_REG_SET elim_reg_set;
250 /* The basic block structure for every insn, indexed by uid. */
252 varray_type basic_block_for_insn;
254 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
255 /* ??? Should probably be using LABEL_NUSES instead. It would take a
256 bit of surgery to be able to use or co-opt the routines in jump. */
258 static rtx label_value_list;
260 /* For use in communicating between propagate_block and its subroutines.
261 Holds all information needed to compute life and def-use information. */
263 struct propagate_block_info
265 /* The basic block we're considering. */
268 /* Bit N is set if register N is conditionally or unconditionally live. */
271 /* Bit N is set if register N is unconditionally dead this insn. */
274 /* Bit N is set if register N is live this insn. */
277 /* Element N is the next insn that uses (hard or pseudo) register N
278 within the current basic block; or zero, if there is no such insn. */
281 /* Contains a list of all the MEMs we are tracking for dead store
285 /* If non-null, record the set of registers set in the basic block. */
288 /* Non-zero if the value of CC0 is live. */
291 /* Flags controling the set of information propagate_block collects. */
295 /* Forward declarations */
296 static int count_basic_blocks PARAMS ((rtx));
297 static rtx find_basic_blocks_1 PARAMS ((rtx));
298 static void clear_edges PARAMS ((void));
299 static void make_edges PARAMS ((rtx));
300 static void make_label_edge PARAMS ((sbitmap *, basic_block,
302 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
303 basic_block, rtx, int));
304 static void mark_critical_edges PARAMS ((void));
305 static void move_stray_eh_region_notes PARAMS ((void));
306 static void record_active_eh_regions PARAMS ((rtx));
308 static void commit_one_edge_insertion PARAMS ((edge));
310 static void delete_unreachable_blocks PARAMS ((void));
311 static void delete_eh_regions PARAMS ((void));
312 static int can_delete_note_p PARAMS ((rtx));
313 static int delete_block PARAMS ((basic_block));
314 static void expunge_block PARAMS ((basic_block));
315 static int can_delete_label_p PARAMS ((rtx));
316 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
318 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
320 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
321 static void try_merge_blocks PARAMS ((void));
322 static void tidy_fallthru_edges PARAMS ((void));
323 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
324 static void verify_wide_reg PARAMS ((int, rtx, rtx));
325 static void verify_local_live_at_start PARAMS ((regset, basic_block));
326 static int set_noop_p PARAMS ((rtx));
327 static int noop_move_p PARAMS ((rtx));
328 static void delete_noop_moves PARAMS ((rtx));
329 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
330 static void notice_stack_pointer_modification PARAMS ((rtx));
331 static void mark_reg PARAMS ((rtx, void *));
332 static void mark_regs_live_at_end PARAMS ((regset));
333 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
334 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
335 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
336 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
337 static void propagate_block PARAMS ((basic_block, regset,
339 static int insn_dead_p PARAMS ((struct propagate_block_info *,
341 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
343 static void mark_set_regs PARAMS ((struct propagate_block_info *,
345 static void mark_set_1 PARAMS ((struct propagate_block_info *,
347 static int mark_set_reg PARAMS ((struct propagate_block_info *,
348 rtx, rtx, int *, int *));
350 static void find_auto_inc PARAMS ((struct propagate_block_info *,
352 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
354 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
356 static void mark_used_reg PARAMS ((struct propagate_block_info *,
358 static void mark_used_regs PARAMS ((struct propagate_block_info *,
360 void dump_flow_info PARAMS ((FILE *));
361 void debug_flow_info PARAMS ((void));
362 static void dump_edge_info PARAMS ((FILE *, edge, int));
364 static void count_reg_sets_1 PARAMS ((rtx, int));
365 static void count_reg_sets PARAMS ((rtx, int));
366 static void count_reg_references PARAMS ((rtx, int));
367 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
369 static void remove_fake_successors PARAMS ((basic_block));
370 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
371 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
372 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
373 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
374 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
375 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
376 static int flow_depth_first_order_compute PARAMS ((int *));
377 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
378 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
379 static void flow_loops_tree_build PARAMS ((struct loops *));
380 static int flow_loop_level_compute PARAMS ((struct loop *, int));
381 static int flow_loops_level_compute PARAMS ((struct loops *));
383 /* Find basic blocks of the current function.
384 F is the first insn of the function and NREGS the number of register
388 find_basic_blocks (f, nregs, file)
390 int nregs ATTRIBUTE_UNUSED;
391 FILE *file ATTRIBUTE_UNUSED;
395 /* Flush out existing data. */
396 if (basic_block_info != NULL)
402 /* Clear bb->aux on all extant basic blocks. We'll use this as a
403 tag for reuse during create_basic_block, just in case some pass
404 copies around basic block notes improperly. */
405 for (i = 0; i < n_basic_blocks; ++i)
406 BASIC_BLOCK (i)->aux = NULL;
408 VARRAY_FREE (basic_block_info);
411 n_basic_blocks = count_basic_blocks (f);
413 /* Size the basic block table. The actual structures will be allocated
414 by find_basic_blocks_1, since we want to keep the structure pointers
415 stable across calls to find_basic_blocks. */
416 /* ??? This whole issue would be much simpler if we called find_basic_blocks
417 exactly once, and thereafter we don't have a single long chain of
418 instructions at all until close to the end of compilation when we
419 actually lay them out. */
421 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
423 label_value_list = find_basic_blocks_1 (f);
425 /* Record the block to which an insn belongs. */
426 /* ??? This should be done another way, by which (perhaps) a label is
427 tagged directly with the basic block that it starts. It is used for
428 more than that currently, but IMO that is the only valid use. */
430 max_uid = get_max_uid ();
432 /* Leave space for insns life_analysis makes in some cases for auto-inc.
433 These cases are rare, so we don't need too much space. */
434 max_uid += max_uid / 10;
437 compute_bb_for_insn (max_uid);
439 /* Discover the edges of our cfg. */
440 record_active_eh_regions (f);
441 make_edges (label_value_list);
443 /* Do very simple cleanup now, for the benefit of code that runs between
444 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
445 tidy_fallthru_edges ();
447 mark_critical_edges ();
449 #ifdef ENABLE_CHECKING
454 /* Count the basic blocks of the function. */
457 count_basic_blocks (f)
461 register RTX_CODE prev_code;
462 register int count = 0;
464 int call_had_abnormal_edge = 0;
466 prev_code = JUMP_INSN;
467 for (insn = f; insn; insn = NEXT_INSN (insn))
469 register RTX_CODE code = GET_CODE (insn);
471 if (code == CODE_LABEL
472 || (GET_RTX_CLASS (code) == 'i'
473 && (prev_code == JUMP_INSN
474 || prev_code == BARRIER
475 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
478 /* Record whether this call created an edge. */
479 if (code == CALL_INSN)
481 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
482 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
484 call_had_abnormal_edge = 0;
486 /* If there is an EH region or rethrow, we have an edge. */
487 if ((eh_region && region > 0)
488 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
489 call_had_abnormal_edge = 1;
490 else if (nonlocal_goto_handler_labels && region >= 0)
491 /* If there is a nonlocal goto label and the specified
492 region number isn't -1, we have an edge. (0 means
493 no throw, but might have a nonlocal goto). */
494 call_had_abnormal_edge = 1;
499 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
501 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
505 /* The rest of the compiler works a bit smoother when we don't have to
506 check for the edge case of do-nothing functions with no basic blocks. */
509 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
516 /* Find all basic blocks of the function whose first insn is F.
518 Collect and return a list of labels whose addresses are taken. This
519 will be used in make_edges for use with computed gotos. */
522 find_basic_blocks_1 (f)
525 register rtx insn, next;
527 rtx bb_note = NULL_RTX;
528 rtx eh_list = NULL_RTX;
529 rtx label_value_list = NULL_RTX;
533 /* We process the instructions in a slightly different way than we did
534 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
535 closed out the previous block, so that it gets attached at the proper
536 place. Since this form should be equivalent to the previous,
537 count_basic_blocks continues to use the old form as a check. */
539 for (insn = f; insn; insn = next)
541 enum rtx_code code = GET_CODE (insn);
543 next = NEXT_INSN (insn);
549 int kind = NOTE_LINE_NUMBER (insn);
551 /* Keep a LIFO list of the currently active exception notes. */
552 if (kind == NOTE_INSN_EH_REGION_BEG)
553 eh_list = alloc_INSN_LIST (insn, eh_list);
554 else if (kind == NOTE_INSN_EH_REGION_END)
558 eh_list = XEXP (eh_list, 1);
559 free_INSN_LIST_node (t);
562 /* Look for basic block notes with which to keep the
563 basic_block_info pointers stable. Unthread the note now;
564 we'll put it back at the right place in create_basic_block.
565 Or not at all if we've already found a note in this block. */
566 else if (kind == NOTE_INSN_BASIC_BLOCK)
568 if (bb_note == NULL_RTX)
571 next = flow_delete_insn (insn);
577 /* A basic block starts at a label. If we've closed one off due
578 to a barrier or some such, no need to do it again. */
579 if (head != NULL_RTX)
581 /* While we now have edge lists with which other portions of
582 the compiler might determine a call ending a basic block
583 does not imply an abnormal edge, it will be a bit before
584 everything can be updated. So continue to emit a noop at
585 the end of such a block. */
586 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
588 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
589 end = emit_insn_after (nop, end);
592 create_basic_block (i++, head, end, bb_note);
600 /* A basic block ends at a jump. */
601 if (head == NULL_RTX)
605 /* ??? Make a special check for table jumps. The way this
606 happens is truly and amazingly gross. We are about to
607 create a basic block that contains just a code label and
608 an addr*vec jump insn. Worse, an addr_diff_vec creates
609 its own natural loop.
611 Prevent this bit of brain damage, pasting things together
612 correctly in make_edges.
614 The correct solution involves emitting the table directly
615 on the tablejump instruction as a note, or JUMP_LABEL. */
617 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
618 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
626 goto new_bb_inclusive;
629 /* A basic block ends at a barrier. It may be that an unconditional
630 jump already closed the basic block -- no need to do it again. */
631 if (head == NULL_RTX)
634 /* While we now have edge lists with which other portions of the
635 compiler might determine a call ending a basic block does not
636 imply an abnormal edge, it will be a bit before everything can
637 be updated. So continue to emit a noop at the end of such a
639 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
641 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
642 end = emit_insn_after (nop, end);
644 goto new_bb_exclusive;
648 /* Record whether this call created an edge. */
649 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
650 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
651 int call_has_abnormal_edge = 0;
653 /* If there is an EH region or rethrow, we have an edge. */
654 if ((eh_list && region > 0)
655 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
656 call_has_abnormal_edge = 1;
657 else if (nonlocal_goto_handler_labels && region >= 0)
658 /* If there is a nonlocal goto label and the specified
659 region number isn't -1, we have an edge. (0 means
660 no throw, but might have a nonlocal goto). */
661 call_has_abnormal_edge = 1;
663 /* A basic block ends at a call that can either throw or
664 do a non-local goto. */
665 if (call_has_abnormal_edge)
668 if (head == NULL_RTX)
673 create_basic_block (i++, head, end, bb_note);
674 head = end = NULL_RTX;
682 if (GET_RTX_CLASS (code) == 'i')
684 if (head == NULL_RTX)
691 if (GET_RTX_CLASS (code) == 'i')
695 /* Make a list of all labels referred to other than by jumps
696 (which just don't have the REG_LABEL notes).
698 Make a special exception for labels followed by an ADDR*VEC,
699 as this would be a part of the tablejump setup code.
701 Make a special exception for the eh_return_stub_label, which
702 we know isn't part of any otherwise visible control flow. */
704 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
705 if (REG_NOTE_KIND (note) == REG_LABEL)
707 rtx lab = XEXP (note, 0), next;
709 if (lab == eh_return_stub_label)
711 else if ((next = next_nonnote_insn (lab)) != NULL
712 && GET_CODE (next) == JUMP_INSN
713 && (GET_CODE (PATTERN (next)) == ADDR_VEC
714 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
718 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
723 if (head != NULL_RTX)
724 create_basic_block (i++, head, end, bb_note);
726 if (i != n_basic_blocks)
729 return label_value_list;
732 /* Tidy the CFG by deleting unreachable code and whatnot. */
738 delete_unreachable_blocks ();
739 move_stray_eh_region_notes ();
740 record_active_eh_regions (f);
742 mark_critical_edges ();
744 /* Kill the data we won't maintain. */
745 label_value_list = NULL_RTX;
748 /* Create a new basic block consisting of the instructions between
749 HEAD and END inclusive. Reuses the note and basic block struct
750 in BB_NOTE, if any. */
753 create_basic_block (index, head, end, bb_note)
755 rtx head, end, bb_note;
760 && ! RTX_INTEGRATED_P (bb_note)
761 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
764 /* If we found an existing note, thread it back onto the chain. */
766 if (GET_CODE (head) == CODE_LABEL)
767 add_insn_after (bb_note, head);
770 add_insn_before (bb_note, head);
776 /* Otherwise we must create a note and a basic block structure.
777 Since we allow basic block structs in rtl, give the struct
778 the same lifetime by allocating it off the function obstack
779 rather than using malloc. */
781 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
782 memset (bb, 0, sizeof (*bb));
784 if (GET_CODE (head) == CODE_LABEL)
785 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
788 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
791 NOTE_BASIC_BLOCK (bb_note) = bb;
794 /* Always include the bb note in the block. */
795 if (NEXT_INSN (end) == bb_note)
801 BASIC_BLOCK (index) = bb;
803 /* Tag the block so that we know it has been used when considering
804 other basic block notes. */
808 /* Records the basic block struct in BB_FOR_INSN, for every instruction
809 indexed by INSN_UID. MAX is the size of the array. */
812 compute_bb_for_insn (max)
817 if (basic_block_for_insn)
818 VARRAY_FREE (basic_block_for_insn);
819 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
821 for (i = 0; i < n_basic_blocks; ++i)
823 basic_block bb = BASIC_BLOCK (i);
830 int uid = INSN_UID (insn);
832 VARRAY_BB (basic_block_for_insn, uid) = bb;
835 insn = NEXT_INSN (insn);
840 /* Free the memory associated with the edge structures. */
848 for (i = 0; i < n_basic_blocks; ++i)
850 basic_block bb = BASIC_BLOCK (i);
852 for (e = bb->succ; e ; e = n)
862 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
868 ENTRY_BLOCK_PTR->succ = 0;
869 EXIT_BLOCK_PTR->pred = 0;
874 /* Identify the edges between basic blocks.
876 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
877 that are otherwise unreachable may be reachable with a non-local goto.
879 BB_EH_END is an array indexed by basic block number in which we record
880 the list of exception regions active at the end of the basic block. */
883 make_edges (label_value_list)
884 rtx label_value_list;
887 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
888 sbitmap *edge_cache = NULL;
890 /* Assume no computed jump; revise as we create edges. */
891 current_function_has_computed_jump = 0;
893 /* Heavy use of computed goto in machine-generated code can lead to
894 nearly fully-connected CFGs. In that case we spend a significant
895 amount of time searching the edge lists for duplicates. */
896 if (forced_labels || label_value_list)
898 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
899 sbitmap_vector_zero (edge_cache, n_basic_blocks);
902 /* By nature of the way these get numbered, block 0 is always the entry. */
903 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
905 for (i = 0; i < n_basic_blocks; ++i)
907 basic_block bb = BASIC_BLOCK (i);
910 int force_fallthru = 0;
912 /* Examine the last instruction of the block, and discover the
913 ways we can leave the block. */
916 code = GET_CODE (insn);
919 if (code == JUMP_INSN)
923 /* ??? Recognize a tablejump and do the right thing. */
924 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
925 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
926 && GET_CODE (tmp) == JUMP_INSN
927 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
928 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
933 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
934 vec = XVEC (PATTERN (tmp), 0);
936 vec = XVEC (PATTERN (tmp), 1);
938 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
939 make_label_edge (edge_cache, bb,
940 XEXP (RTVEC_ELT (vec, j), 0), 0);
942 /* Some targets (eg, ARM) emit a conditional jump that also
943 contains the out-of-range target. Scan for these and
944 add an edge if necessary. */
945 if ((tmp = single_set (insn)) != NULL
946 && SET_DEST (tmp) == pc_rtx
947 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
948 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
949 make_label_edge (edge_cache, bb,
950 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
952 #ifdef CASE_DROPS_THROUGH
953 /* Silly VAXen. The ADDR_VEC is going to be in the way of
954 us naturally detecting fallthru into the next block. */
959 /* If this is a computed jump, then mark it as reaching
960 everything on the label_value_list and forced_labels list. */
961 else if (computed_jump_p (insn))
963 current_function_has_computed_jump = 1;
965 for (x = label_value_list; x; x = XEXP (x, 1))
966 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
968 for (x = forced_labels; x; x = XEXP (x, 1))
969 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
972 /* Returns create an exit out. */
973 else if (returnjump_p (insn))
974 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
976 /* Otherwise, we have a plain conditional or unconditional jump. */
979 if (! JUMP_LABEL (insn))
981 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
985 /* If this is a sibling call insn, then this is in effect a
986 combined call and return, and so we need an edge to the
987 exit block. No need to worry about EH edges, since we
988 wouldn't have created the sibling call in the first place. */
990 if (code == CALL_INSN && SIBLING_CALL_P (insn))
991 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
994 /* If this is a CALL_INSN, then mark it as reaching the active EH
995 handler for this CALL_INSN. If we're handling asynchronous
996 exceptions then any insn can reach any of the active handlers.
998 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1000 if (code == CALL_INSN || asynchronous_exceptions)
1002 /* Add any appropriate EH edges. We do this unconditionally
1003 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1004 on the call, and this needn't be within an EH region. */
1005 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1007 /* If we have asynchronous exceptions, do the same for *all*
1008 exception regions active in the block. */
1009 if (asynchronous_exceptions
1010 && bb->eh_beg != bb->eh_end)
1012 if (bb->eh_beg >= 0)
1013 make_eh_edge (edge_cache, eh_nest_info, bb,
1014 NULL_RTX, bb->eh_beg);
1016 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1017 if (GET_CODE (x) == NOTE
1018 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1019 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1021 int region = NOTE_EH_HANDLER (x);
1022 make_eh_edge (edge_cache, eh_nest_info, bb,
1027 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1029 /* ??? This could be made smarter: in some cases it's possible
1030 to tell that certain calls will not do a nonlocal goto.
1032 For example, if the nested functions that do the nonlocal
1033 gotos do not have their addresses taken, then only calls to
1034 those functions or to other nested functions that use them
1035 could possibly do nonlocal gotos. */
1036 /* We do know that a REG_EH_REGION note with a value less
1037 than 0 is guaranteed not to perform a non-local goto. */
1038 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1039 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1040 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1041 make_label_edge (edge_cache, bb, XEXP (x, 0),
1042 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1046 /* We know something about the structure of the function __throw in
1047 libgcc2.c. It is the only function that ever contains eh_stub
1048 labels. It modifies its return address so that the last block
1049 returns to one of the eh_stub labels within it. So we have to
1050 make additional edges in the flow graph. */
1051 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1052 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1054 /* Find out if we can drop through to the next block. */
1055 insn = next_nonnote_insn (insn);
1056 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1057 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1058 else if (i + 1 < n_basic_blocks)
1060 rtx tmp = BLOCK_HEAD (i + 1);
1061 if (GET_CODE (tmp) == NOTE)
1062 tmp = next_nonnote_insn (tmp);
1063 if (force_fallthru || insn == tmp)
1064 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1068 free_eh_nesting_info (eh_nest_info);
1070 sbitmap_vector_free (edge_cache);
1073 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1074 about the edge that is accumulated between calls. */
1077 make_edge (edge_cache, src, dst, flags)
1078 sbitmap *edge_cache;
1079 basic_block src, dst;
1085 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1086 many edges to them, and we didn't allocate memory for it. */
1087 use_edge_cache = (edge_cache
1088 && src != ENTRY_BLOCK_PTR
1089 && dst != EXIT_BLOCK_PTR);
1091 /* Make sure we don't add duplicate edges. */
1092 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1093 for (e = src->succ; e ; e = e->succ_next)
1100 e = (edge) xcalloc (1, sizeof (*e));
1103 e->succ_next = src->succ;
1104 e->pred_next = dst->pred;
1113 SET_BIT (edge_cache[src->index], dst->index);
1116 /* Create an edge from a basic block to a label. */
1119 make_label_edge (edge_cache, src, label, flags)
1120 sbitmap *edge_cache;
1125 if (GET_CODE (label) != CODE_LABEL)
1128 /* If the label was never emitted, this insn is junk, but avoid a
1129 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1130 as a result of a syntax error and a diagnostic has already been
1133 if (INSN_UID (label) == 0)
1136 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1139 /* Create the edges generated by INSN in REGION. */
1142 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1143 sbitmap *edge_cache;
1144 eh_nesting_info *eh_nest_info;
1149 handler_info **handler_list;
1152 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1153 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1156 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1157 EDGE_ABNORMAL | EDGE_EH | is_call);
1161 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1162 dangerous if we intend to move basic blocks around. Move such notes
1163 into the following block. */
1166 move_stray_eh_region_notes ()
1171 if (n_basic_blocks < 2)
1174 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1175 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1177 rtx insn, next, list = NULL_RTX;
1179 b1 = BASIC_BLOCK (i);
1180 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1182 next = NEXT_INSN (insn);
1183 if (GET_CODE (insn) == NOTE
1184 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1185 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1187 /* Unlink from the insn chain. */
1188 NEXT_INSN (PREV_INSN (insn)) = next;
1189 PREV_INSN (next) = PREV_INSN (insn);
1192 NEXT_INSN (insn) = list;
1197 if (list == NULL_RTX)
1200 /* Find where to insert these things. */
1202 if (GET_CODE (insn) == CODE_LABEL)
1203 insn = NEXT_INSN (insn);
1207 next = NEXT_INSN (list);
1208 add_insn_after (list, insn);
1214 /* Recompute eh_beg/eh_end for each basic block. */
1217 record_active_eh_regions (f)
1220 rtx insn, eh_list = NULL_RTX;
1222 basic_block bb = BASIC_BLOCK (0);
1224 for (insn = f; insn ; insn = NEXT_INSN (insn))
1226 if (bb->head == insn)
1227 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1229 if (GET_CODE (insn) == NOTE)
1231 int kind = NOTE_LINE_NUMBER (insn);
1232 if (kind == NOTE_INSN_EH_REGION_BEG)
1233 eh_list = alloc_INSN_LIST (insn, eh_list);
1234 else if (kind == NOTE_INSN_EH_REGION_END)
1236 rtx t = XEXP (eh_list, 1);
1237 free_INSN_LIST_node (eh_list);
1242 if (bb->end == insn)
1244 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1246 if (i == n_basic_blocks)
1248 bb = BASIC_BLOCK (i);
1253 /* Identify critical edges and set the bits appropriately. */
1256 mark_critical_edges ()
1258 int i, n = n_basic_blocks;
1261 /* We begin with the entry block. This is not terribly important now,
1262 but could be if a front end (Fortran) implemented alternate entry
1264 bb = ENTRY_BLOCK_PTR;
1271 /* (1) Critical edges must have a source with multiple successors. */
1272 if (bb->succ && bb->succ->succ_next)
1274 for (e = bb->succ; e ; e = e->succ_next)
1276 /* (2) Critical edges must have a destination with multiple
1277 predecessors. Note that we know there is at least one
1278 predecessor -- the edge we followed to get here. */
1279 if (e->dest->pred->pred_next)
1280 e->flags |= EDGE_CRITICAL;
1282 e->flags &= ~EDGE_CRITICAL;
1287 for (e = bb->succ; e ; e = e->succ_next)
1288 e->flags &= ~EDGE_CRITICAL;
1293 bb = BASIC_BLOCK (i);
1297 /* Split a (typically critical) edge. Return the new block.
1298 Abort on abnormal edges.
1300 ??? The code generally expects to be called on critical edges.
1301 The case of a block ending in an unconditional jump to a
1302 block with multiple predecessors is not handled optimally. */
1305 split_edge (edge_in)
1308 basic_block old_pred, bb, old_succ;
1313 /* Abnormal edges cannot be split. */
1314 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1317 old_pred = edge_in->src;
1318 old_succ = edge_in->dest;
1320 /* Remove the existing edge from the destination's pred list. */
1323 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1325 *pp = edge_in->pred_next;
1326 edge_in->pred_next = NULL;
1329 /* Create the new structures. */
1330 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1331 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1334 memset (bb, 0, sizeof (*bb));
1335 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1336 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1338 /* ??? This info is likely going to be out of date very soon. */
1339 if (old_succ->global_live_at_start)
1341 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1342 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1346 CLEAR_REG_SET (bb->global_live_at_start);
1347 CLEAR_REG_SET (bb->global_live_at_end);
1352 bb->succ = edge_out;
1355 edge_in->flags &= ~EDGE_CRITICAL;
1357 edge_out->pred_next = old_succ->pred;
1358 edge_out->succ_next = NULL;
1360 edge_out->dest = old_succ;
1361 edge_out->flags = EDGE_FALLTHRU;
1362 edge_out->probability = REG_BR_PROB_BASE;
1364 old_succ->pred = edge_out;
1366 /* Tricky case -- if there existed a fallthru into the successor
1367 (and we're not it) we must add a new unconditional jump around
1368 the new block we're actually interested in.
1370 Further, if that edge is critical, this means a second new basic
1371 block must be created to hold it. In order to simplify correct
1372 insn placement, do this before we touch the existing basic block
1373 ordering for the block we were really wanting. */
1374 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1377 for (e = edge_out->pred_next; e ; e = e->pred_next)
1378 if (e->flags & EDGE_FALLTHRU)
1383 basic_block jump_block;
1386 if ((e->flags & EDGE_CRITICAL) == 0
1387 && e->src != ENTRY_BLOCK_PTR)
1389 /* Non critical -- we can simply add a jump to the end
1390 of the existing predecessor. */
1391 jump_block = e->src;
1395 /* We need a new block to hold the jump. The simplest
1396 way to do the bulk of the work here is to recursively
1398 jump_block = split_edge (e);
1399 e = jump_block->succ;
1402 /* Now add the jump insn ... */
1403 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1405 jump_block->end = pos;
1406 if (basic_block_for_insn)
1407 set_block_for_insn (pos, jump_block);
1408 emit_barrier_after (pos);
1410 /* ... let jump know that label is in use, ... */
1411 JUMP_LABEL (pos) = old_succ->head;
1412 ++LABEL_NUSES (old_succ->head);
1414 /* ... and clear fallthru on the outgoing edge. */
1415 e->flags &= ~EDGE_FALLTHRU;
1417 /* Continue splitting the interesting edge. */
1421 /* Place the new block just in front of the successor. */
1422 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1423 if (old_succ == EXIT_BLOCK_PTR)
1424 j = n_basic_blocks - 1;
1426 j = old_succ->index;
1427 for (i = n_basic_blocks - 1; i > j; --i)
1429 basic_block tmp = BASIC_BLOCK (i - 1);
1430 BASIC_BLOCK (i) = tmp;
1433 BASIC_BLOCK (i) = bb;
1436 /* Create the basic block note.
1438 Where we place the note can have a noticable impact on the generated
1439 code. Consider this cfg:
1450 If we need to insert an insn on the edge from block 0 to block 1,
1451 we want to ensure the instructions we insert are outside of any
1452 loop notes that physically sit between block 0 and block 1. Otherwise
1453 we confuse the loop optimizer into thinking the loop is a phony. */
1454 if (old_succ != EXIT_BLOCK_PTR
1455 && PREV_INSN (old_succ->head)
1456 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1457 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1458 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1459 PREV_INSN (old_succ->head));
1460 else if (old_succ != EXIT_BLOCK_PTR)
1461 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1463 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1464 NOTE_BASIC_BLOCK (bb_note) = bb;
1465 bb->head = bb->end = bb_note;
1467 /* Not quite simple -- for non-fallthru edges, we must adjust the
1468 predecessor's jump instruction to target our new block. */
1469 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1471 rtx tmp, insn = old_pred->end;
1472 rtx old_label = old_succ->head;
1473 rtx new_label = gen_label_rtx ();
1475 if (GET_CODE (insn) != JUMP_INSN)
1478 /* ??? Recognize a tablejump and adjust all matching cases. */
1479 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1480 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1481 && GET_CODE (tmp) == JUMP_INSN
1482 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1483 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1488 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1489 vec = XVEC (PATTERN (tmp), 0);
1491 vec = XVEC (PATTERN (tmp), 1);
1493 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1494 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1496 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1497 --LABEL_NUSES (old_label);
1498 ++LABEL_NUSES (new_label);
1501 /* Handle casesi dispatch insns */
1502 if ((tmp = single_set (insn)) != NULL
1503 && SET_DEST (tmp) == pc_rtx
1504 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1505 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1506 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1508 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1510 --LABEL_NUSES (old_label);
1511 ++LABEL_NUSES (new_label);
1516 /* This would have indicated an abnormal edge. */
1517 if (computed_jump_p (insn))
1520 /* A return instruction can't be redirected. */
1521 if (returnjump_p (insn))
1524 /* If the insn doesn't go where we think, we're confused. */
1525 if (JUMP_LABEL (insn) != old_label)
1528 redirect_jump (insn, new_label);
1531 emit_label_before (new_label, bb_note);
1532 bb->head = new_label;
1538 /* Queue instructions for insertion on an edge between two basic blocks.
1539 The new instructions and basic blocks (if any) will not appear in the
1540 CFG until commit_edge_insertions is called. */
1543 insert_insn_on_edge (pattern, e)
1547 /* We cannot insert instructions on an abnormal critical edge.
1548 It will be easier to find the culprit if we die now. */
1549 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1550 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1553 if (e->insns == NULL_RTX)
1556 push_to_sequence (e->insns);
1558 emit_insn (pattern);
1560 e->insns = get_insns ();
1564 /* Update the CFG for the instructions queued on edge E. */
1567 commit_one_edge_insertion (e)
1570 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1573 /* Pull the insns off the edge now since the edge might go away. */
1575 e->insns = NULL_RTX;
1577 /* Figure out where to put these things. If the destination has
1578 one predecessor, insert there. Except for the exit block. */
1579 if (e->dest->pred->pred_next == NULL
1580 && e->dest != EXIT_BLOCK_PTR)
1584 /* Get the location correct wrt a code label, and "nice" wrt
1585 a basic block note, and before everything else. */
1587 if (GET_CODE (tmp) == CODE_LABEL)
1588 tmp = NEXT_INSN (tmp);
1589 if (GET_CODE (tmp) == NOTE
1590 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1591 tmp = NEXT_INSN (tmp);
1592 if (tmp == bb->head)
1595 after = PREV_INSN (tmp);
1598 /* If the source has one successor and the edge is not abnormal,
1599 insert there. Except for the entry block. */
1600 else if ((e->flags & EDGE_ABNORMAL) == 0
1601 && e->src->succ->succ_next == NULL
1602 && e->src != ENTRY_BLOCK_PTR)
1605 /* It is possible to have a non-simple jump here. Consider a target
1606 where some forms of unconditional jumps clobber a register. This
1607 happens on the fr30 for example.
1609 We know this block has a single successor, so we can just emit
1610 the queued insns before the jump. */
1611 if (GET_CODE (bb->end) == JUMP_INSN)
1617 /* We'd better be fallthru, or we've lost track of what's what. */
1618 if ((e->flags & EDGE_FALLTHRU) == 0)
1625 /* Otherwise we must split the edge. */
1628 bb = split_edge (e);
1632 /* Now that we've found the spot, do the insertion. */
1634 /* Set the new block number for these insns, if structure is allocated. */
1635 if (basic_block_for_insn)
1638 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1639 set_block_for_insn (i, bb);
1644 emit_insns_before (insns, before);
1645 if (before == bb->head)
1650 rtx last = emit_insns_after (insns, after);
1651 if (after == bb->end)
1655 if (GET_CODE (last) == JUMP_INSN)
1657 if (returnjump_p (last))
1659 /* ??? Remove all outgoing edges from BB and add one
1660 for EXIT. This is not currently a problem because
1661 this only happens for the (single) epilogue, which
1662 already has a fallthru edge to EXIT. */
1665 if (e->dest != EXIT_BLOCK_PTR
1666 || e->succ_next != NULL
1667 || (e->flags & EDGE_FALLTHRU) == 0)
1669 e->flags &= ~EDGE_FALLTHRU;
1671 emit_barrier_after (last);
1680 /* Update the CFG for all queued instructions. */
1683 commit_edge_insertions ()
1688 #ifdef ENABLE_CHECKING
1689 verify_flow_info ();
1693 bb = ENTRY_BLOCK_PTR;
1698 for (e = bb->succ; e ; e = next)
1700 next = e->succ_next;
1702 commit_one_edge_insertion (e);
1705 if (++i >= n_basic_blocks)
1707 bb = BASIC_BLOCK (i);
1711 /* Delete all unreachable basic blocks. */
1714 delete_unreachable_blocks ()
1716 basic_block *worklist, *tos;
1717 int deleted_handler;
1722 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1724 /* Use basic_block->aux as a marker. Clear them all. */
1726 for (i = 0; i < n; ++i)
1727 BASIC_BLOCK (i)->aux = NULL;
1729 /* Add our starting points to the worklist. Almost always there will
1730 be only one. It isn't inconcievable that we might one day directly
1731 support Fortran alternate entry points. */
1733 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1737 /* Mark the block with a handy non-null value. */
1741 /* Iterate: find everything reachable from what we've already seen. */
1743 while (tos != worklist)
1745 basic_block b = *--tos;
1747 for (e = b->succ; e ; e = e->succ_next)
1755 /* Delete all unreachable basic blocks. Count down so that we don't
1756 interfere with the block renumbering that happens in delete_block. */
1758 deleted_handler = 0;
1760 for (i = n - 1; i >= 0; --i)
1762 basic_block b = BASIC_BLOCK (i);
1765 /* This block was found. Tidy up the mark. */
1768 deleted_handler |= delete_block (b);
1771 tidy_fallthru_edges ();
1773 /* If we deleted an exception handler, we may have EH region begin/end
1774 blocks to remove as well. */
1775 if (deleted_handler)
1776 delete_eh_regions ();
1781 /* Find EH regions for which there is no longer a handler, and delete them. */
1784 delete_eh_regions ()
1788 update_rethrow_references ();
1790 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1791 if (GET_CODE (insn) == NOTE)
1793 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1794 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1796 int num = NOTE_EH_HANDLER (insn);
1797 /* A NULL handler indicates a region is no longer needed,
1798 as long as its rethrow label isn't used. */
1799 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1801 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1802 NOTE_SOURCE_FILE (insn) = 0;
1808 /* Return true if NOTE is not one of the ones that must be kept paired,
1809 so that we may simply delete them. */
1812 can_delete_note_p (note)
1815 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1816 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1819 /* Unlink a chain of insns between START and FINISH, leaving notes
1820 that must be paired. */
1823 flow_delete_insn_chain (start, finish)
1826 /* Unchain the insns one by one. It would be quicker to delete all
1827 of these with a single unchaining, rather than one at a time, but
1828 we need to keep the NOTE's. */
1834 next = NEXT_INSN (start);
1835 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1837 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1840 next = flow_delete_insn (start);
1842 if (start == finish)
1848 /* Delete the insns in a (non-live) block. We physically delete every
1849 non-deleted-note insn, and update the flow graph appropriately.
1851 Return nonzero if we deleted an exception handler. */
1853 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1854 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1860 int deleted_handler = 0;
1863 /* If the head of this block is a CODE_LABEL, then it might be the
1864 label for an exception handler which can't be reached.
1866 We need to remove the label from the exception_handler_label list
1867 and remove the associated NOTE_INSN_EH_REGION_BEG and
1868 NOTE_INSN_EH_REGION_END notes. */
1872 never_reached_warning (insn);
1874 if (GET_CODE (insn) == CODE_LABEL)
1876 rtx x, *prev = &exception_handler_labels;
1878 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1880 if (XEXP (x, 0) == insn)
1882 /* Found a match, splice this label out of the EH label list. */
1883 *prev = XEXP (x, 1);
1884 XEXP (x, 1) = NULL_RTX;
1885 XEXP (x, 0) = NULL_RTX;
1887 /* Remove the handler from all regions */
1888 remove_handler (insn);
1889 deleted_handler = 1;
1892 prev = &XEXP (x, 1);
1895 /* This label may be referenced by code solely for its value, or
1896 referenced by static data, or something. We have determined
1897 that it is not reachable, but cannot delete the label itself.
1898 Save code space and continue to delete the balance of the block,
1899 along with properly updating the cfg. */
1900 if (!can_delete_label_p (insn))
1902 /* If we've only got one of these, skip the whole deleting
1905 goto no_delete_insns;
1906 insn = NEXT_INSN (insn);
1910 /* Include any jump table following the basic block. */
1912 if (GET_CODE (end) == JUMP_INSN
1913 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1914 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1915 && GET_CODE (tmp) == JUMP_INSN
1916 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1917 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1920 /* Include any barrier that may follow the basic block. */
1921 tmp = next_nonnote_insn (end);
1922 if (tmp && GET_CODE (tmp) == BARRIER)
1925 /* Selectively delete the entire chain. */
1926 flow_delete_insn_chain (insn, end);
1930 /* Remove the edges into and out of this block. Note that there may
1931 indeed be edges in, if we are removing an unreachable loop. */
1935 for (e = b->pred; e ; e = next)
1937 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1940 next = e->pred_next;
1944 for (e = b->succ; e ; e = next)
1946 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1949 next = e->succ_next;
1958 /* Remove the basic block from the array, and compact behind it. */
1961 return deleted_handler;
1964 /* Remove block B from the basic block array and compact behind it. */
1970 int i, n = n_basic_blocks;
1972 for (i = b->index; i + 1 < n; ++i)
1974 basic_block x = BASIC_BLOCK (i + 1);
1975 BASIC_BLOCK (i) = x;
1979 basic_block_info->num_elements--;
1983 /* Delete INSN by patching it out. Return the next insn. */
1986 flow_delete_insn (insn)
1989 rtx prev = PREV_INSN (insn);
1990 rtx next = NEXT_INSN (insn);
1993 PREV_INSN (insn) = NULL_RTX;
1994 NEXT_INSN (insn) = NULL_RTX;
1997 NEXT_INSN (prev) = next;
1999 PREV_INSN (next) = prev;
2001 set_last_insn (prev);
2003 if (GET_CODE (insn) == CODE_LABEL)
2004 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2006 /* If deleting a jump, decrement the use count of the label. Deleting
2007 the label itself should happen in the normal course of block merging. */
2008 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2009 LABEL_NUSES (JUMP_LABEL (insn))--;
2011 /* Also if deleting an insn that references a label. */
2012 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2013 LABEL_NUSES (XEXP (note, 0))--;
2018 /* True if a given label can be deleted. */
2021 can_delete_label_p (label)
2026 if (LABEL_PRESERVE_P (label))
2029 for (x = forced_labels; x ; x = XEXP (x, 1))
2030 if (label == XEXP (x, 0))
2032 for (x = label_value_list; x ; x = XEXP (x, 1))
2033 if (label == XEXP (x, 0))
2035 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2036 if (label == XEXP (x, 0))
2039 /* User declared labels must be preserved. */
2040 if (LABEL_NAME (label) != 0)
2046 /* Blocks A and B are to be merged into a single block A. The insns
2047 are already contiguous, hence `nomove'. */
2050 merge_blocks_nomove (a, b)
2054 rtx b_head, b_end, a_end;
2055 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2058 /* If there was a CODE_LABEL beginning B, delete it. */
2061 if (GET_CODE (b_head) == CODE_LABEL)
2063 /* Detect basic blocks with nothing but a label. This can happen
2064 in particular at the end of a function. */
2065 if (b_head == b_end)
2067 del_first = del_last = b_head;
2068 b_head = NEXT_INSN (b_head);
2071 /* Delete the basic block note. */
2072 if (GET_CODE (b_head) == NOTE
2073 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2075 if (b_head == b_end)
2080 b_head = NEXT_INSN (b_head);
2083 /* If there was a jump out of A, delete it. */
2085 if (GET_CODE (a_end) == JUMP_INSN)
2089 prev = prev_nonnote_insn (a_end);
2096 /* If this was a conditional jump, we need to also delete
2097 the insn that set cc0. */
2098 if (prev && sets_cc0_p (prev))
2101 prev = prev_nonnote_insn (prev);
2111 /* Delete everything marked above as well as crap that might be
2112 hanging out between the two blocks. */
2113 flow_delete_insn_chain (del_first, del_last);
2115 /* Normally there should only be one successor of A and that is B, but
2116 partway though the merge of blocks for conditional_execution we'll
2117 be merging a TEST block with THEN and ELSE successors. Free the
2118 whole lot of them and hope the caller knows what they're doing. */
2120 remove_edge (a->succ);
2122 /* Adjust the edges out of B for the new owner. */
2123 for (e = b->succ; e ; e = e->succ_next)
2127 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2128 b->pred = b->succ = NULL;
2130 /* Reassociate the insns of B with A. */
2133 if (basic_block_for_insn)
2135 BLOCK_FOR_INSN (b_head) = a;
2136 while (b_head != b_end)
2138 b_head = NEXT_INSN (b_head);
2139 BLOCK_FOR_INSN (b_head) = a;
2149 /* Blocks A and B are to be merged into a single block. A has no incoming
2150 fallthru edge, so it can be moved before B without adding or modifying
2151 any jumps (aside from the jump from A to B). */
2154 merge_blocks_move_predecessor_nojumps (a, b)
2157 rtx start, end, barrier;
2163 /* We want to delete the BARRIER after the end of the insns we are
2164 going to move. If we don't find a BARRIER, then do nothing. This
2165 can happen in some cases if we have labels we can not delete.
2167 Similarly, do nothing if we can not delete the label at the start
2168 of the target block. */
2169 barrier = next_nonnote_insn (end);
2170 if (GET_CODE (barrier) != BARRIER
2171 || (GET_CODE (b->head) == CODE_LABEL
2172 && ! can_delete_label_p (b->head)))
2175 flow_delete_insn (barrier);
2177 /* Move block and loop notes out of the chain so that we do not
2178 disturb their order.
2180 ??? A better solution would be to squeeze out all the non-nested notes
2181 and adjust the block trees appropriately. Even better would be to have
2182 a tighter connection between block trees and rtl so that this is not
2184 start = squeeze_notes (start, end);
2186 /* Scramble the insn chain. */
2187 if (end != PREV_INSN (b->head))
2188 reorder_insns (start, end, PREV_INSN (b->head));
2192 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2193 a->index, b->index);
2196 /* Swap the records for the two blocks around. Although we are deleting B,
2197 A is now where B was and we want to compact the BB array from where
2199 BASIC_BLOCK(a->index) = b;
2200 BASIC_BLOCK(b->index) = a;
2202 a->index = b->index;
2205 /* Now blocks A and B are contiguous. Merge them. */
2206 merge_blocks_nomove (a, b);
2211 /* Blocks A and B are to be merged into a single block. B has no outgoing
2212 fallthru edge, so it can be moved after A without adding or modifying
2213 any jumps (aside from the jump from A to B). */
2216 merge_blocks_move_successor_nojumps (a, b)
2219 rtx start, end, barrier;
2224 /* We want to delete the BARRIER after the end of the insns we are
2225 going to move. If we don't find a BARRIER, then do nothing. This
2226 can happen in some cases if we have labels we can not delete.
2228 Similarly, do nothing if we can not delete the label at the start
2229 of the target block. */
2230 barrier = next_nonnote_insn (end);
2231 if (GET_CODE (barrier) != BARRIER
2232 || (GET_CODE (b->head) == CODE_LABEL
2233 && ! can_delete_label_p (b->head)))
2236 flow_delete_insn (barrier);
2238 /* Move block and loop notes out of the chain so that we do not
2239 disturb their order.
2241 ??? A better solution would be to squeeze out all the non-nested notes
2242 and adjust the block trees appropriately. Even better would be to have
2243 a tighter connection between block trees and rtl so that this is not
2245 start = squeeze_notes (start, end);
2247 /* Scramble the insn chain. */
2248 reorder_insns (start, end, a->end);
2250 /* Now blocks A and B are contiguous. Merge them. */
2251 merge_blocks_nomove (a, b);
2255 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2256 b->index, a->index);
2262 /* Attempt to merge basic blocks that are potentially non-adjacent.
2263 Return true iff the attempt succeeded. */
2266 merge_blocks (e, b, c)
2270 /* If B has a fallthru edge to C, no need to move anything. */
2271 if (e->flags & EDGE_FALLTHRU)
2273 /* If a label still appears somewhere and we cannot delete the label,
2274 then we cannot merge the blocks. The edge was tidied already. */
2276 rtx insn, stop = NEXT_INSN (c->head);
2277 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2278 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2281 merge_blocks_nomove (b, c);
2285 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2286 b->index, c->index);
2295 int c_has_outgoing_fallthru;
2296 int b_has_incoming_fallthru;
2298 /* We must make sure to not munge nesting of exception regions,
2299 lexical blocks, and loop notes.
2301 The first is taken care of by requiring that the active eh
2302 region at the end of one block always matches the active eh
2303 region at the beginning of the next block.
2305 The later two are taken care of by squeezing out all the notes. */
2307 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2308 executed and we may want to treat blocks which have two out
2309 edges, one normal, one abnormal as only having one edge for
2310 block merging purposes. */
2312 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2313 if (tmp_edge->flags & EDGE_FALLTHRU)
2315 c_has_outgoing_fallthru = (tmp_edge != NULL);
2317 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2318 if (tmp_edge->flags & EDGE_FALLTHRU)
2320 b_has_incoming_fallthru = (tmp_edge != NULL);
2322 /* If B does not have an incoming fallthru, and the exception regions
2323 match, then it can be moved immediately before C without introducing
2326 C can not be the first block, so we do not have to worry about
2327 accessing a non-existent block. */
2328 d = BASIC_BLOCK (c->index - 1);
2329 if (! b_has_incoming_fallthru
2330 && d->eh_end == b->eh_beg
2331 && b->eh_end == c->eh_beg)
2332 return merge_blocks_move_predecessor_nojumps (b, c);
2334 /* Otherwise, we're going to try to move C after B. Make sure the
2335 exception regions match.
2337 If B is the last basic block, then we must not try to access the
2338 block structure for block B + 1. Luckily in that case we do not
2339 need to worry about matching exception regions. */
2340 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2341 if (b->eh_end == c->eh_beg
2342 && (d == NULL || c->eh_end == d->eh_beg))
2344 /* If C does not have an outgoing fallthru, then it can be moved
2345 immediately after B without introducing or modifying jumps. */
2346 if (! c_has_outgoing_fallthru)
2347 return merge_blocks_move_successor_nojumps (b, c);
2349 /* Otherwise, we'll need to insert an extra jump, and possibly
2350 a new block to contain it. */
2351 /* ??? Not implemented yet. */
2358 /* Top level driver for merge_blocks. */
2365 /* Attempt to merge blocks as made possible by edge removal. If a block
2366 has only one successor, and the successor has only one predecessor,
2367 they may be combined. */
2369 for (i = 0; i < n_basic_blocks; )
2371 basic_block c, b = BASIC_BLOCK (i);
2374 /* A loop because chains of blocks might be combineable. */
2375 while ((s = b->succ) != NULL
2376 && s->succ_next == NULL
2377 && (s->flags & EDGE_EH) == 0
2378 && (c = s->dest) != EXIT_BLOCK_PTR
2379 && c->pred->pred_next == NULL
2380 /* If the jump insn has side effects, we can't kill the edge. */
2381 && (GET_CODE (b->end) != JUMP_INSN
2382 || onlyjump_p (b->end))
2383 && merge_blocks (s, b, c))
2386 /* Don't get confused by the index shift caused by deleting blocks. */
2391 /* The given edge should potentially be a fallthru edge. If that is in
2392 fact true, delete the jump and barriers that are in the way. */
2395 tidy_fallthru_edge (e, b, c)
2401 /* ??? In a late-running flow pass, other folks may have deleted basic
2402 blocks by nopping out blocks, leaving multiple BARRIERs between here
2403 and the target label. They ought to be chastized and fixed.
2405 We can also wind up with a sequence of undeletable labels between
2406 one block and the next.
2408 So search through a sequence of barriers, labels, and notes for
2409 the head of block C and assert that we really do fall through. */
2411 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2414 /* Remove what will soon cease being the jump insn from the source block.
2415 If block B consisted only of this single jump, turn it into a deleted
2418 if (GET_CODE (q) == JUMP_INSN)
2421 /* If this was a conditional jump, we need to also delete
2422 the insn that set cc0. */
2423 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2430 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2431 NOTE_SOURCE_FILE (q) = 0;
2434 b->end = q = PREV_INSN (q);
2437 /* Selectively unlink the sequence. */
2438 if (q != PREV_INSN (c->head))
2439 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2441 e->flags |= EDGE_FALLTHRU;
2444 /* Fix up edges that now fall through, or rather should now fall through
2445 but previously required a jump around now deleted blocks. Simplify
2446 the search by only examining blocks numerically adjacent, since this
2447 is how find_basic_blocks created them. */
2450 tidy_fallthru_edges ()
2454 for (i = 1; i < n_basic_blocks; ++i)
2456 basic_block b = BASIC_BLOCK (i - 1);
2457 basic_block c = BASIC_BLOCK (i);
2460 /* We care about simple conditional or unconditional jumps with
2463 If we had a conditional branch to the next instruction when
2464 find_basic_blocks was called, then there will only be one
2465 out edge for the block which ended with the conditional
2466 branch (since we do not create duplicate edges).
2468 Furthermore, the edge will be marked as a fallthru because we
2469 merge the flags for the duplicate edges. So we do not want to
2470 check that the edge is not a FALLTHRU edge. */
2471 if ((s = b->succ) != NULL
2472 && s->succ_next == NULL
2474 /* If the jump insn has side effects, we can't tidy the edge. */
2475 && (GET_CODE (b->end) != JUMP_INSN
2476 || onlyjump_p (b->end)))
2477 tidy_fallthru_edge (s, b, c);
2481 /* Discover and record the loop depth at the head of each basic block. */
2484 calculate_loop_depth (dump)
2489 /* The loop infrastructure does the real job for us. */
2490 flow_loops_find (&loops);
2493 flow_loops_dump (&loops, dump, 0);
2495 flow_loops_free (&loops);
2498 /* Perform data flow analysis.
2499 F is the first insn of the function and NREGS the number of register numbers
2503 life_analysis (f, nregs, file, remove_dead_code)
2507 int remove_dead_code;
2509 #ifdef ELIMINABLE_REGS
2511 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2516 /* Dead code elimination changes basic block structure and therefore
2517 breaks the SSA phi representation. Particularly, a phi node
2518 can have an alternative value for each incoming block, referenced
2519 by the block number. Removing dead code can bump entire blocks
2520 and therefore cause blocks to be renumbered, invalidating the
2521 numbering of phi alternatives. */
2522 if (remove_dead_code && in_ssa_form)
2525 /* Record which registers will be eliminated. We use this in
2528 CLEAR_HARD_REG_SET (elim_reg_set);
2530 #ifdef ELIMINABLE_REGS
2531 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2532 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2534 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2537 /* We want alias analysis information for local dead store elimination. */
2538 init_alias_analysis ();
2541 flags = PROP_DEATH_NOTES | PROP_REG_INFO;
2545 if (! remove_dead_code)
2546 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2549 /* The post-reload life analysis have (on a global basis) the same
2550 registers live as was computed by reload itself. elimination
2551 Otherwise offsets and such may be incorrect.
2553 Reload will make some registers as live even though they do not
2554 appear in the rtl. */
2555 if (reload_completed)
2556 flags &= ~PROP_REG_INFO;
2560 /* Always remove no-op moves. Do this before other processing so
2561 that we don't have to keep re-scanning them. */
2562 delete_noop_moves (f);
2564 /* Some targets can emit simpler epilogues if they know that sp was
2565 not ever modified during the function. After reload, of course,
2566 we've already emitted the epilogue so there's no sense searching. */
2567 if (! reload_completed)
2568 notice_stack_pointer_modification (f);
2570 /* Allocate and zero out data structures that will record the
2571 data from lifetime analysis. */
2572 allocate_reg_life_data ();
2573 allocate_bb_life_data ();
2574 all_blocks = sbitmap_alloc (n_basic_blocks);
2575 sbitmap_ones (all_blocks);
2577 /* Find the set of registers live on function exit. */
2578 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2580 /* "Update" life info from zero. It'd be nice to begin the
2581 relaxation with just the exit and noreturn blocks, but that set
2582 is not immediately handy. */
2584 if (flags & PROP_REG_INFO)
2585 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2586 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2589 sbitmap_free (all_blocks);
2590 end_alias_analysis ();
2593 dump_flow_info (file);
2595 free_basic_block_vars (1);
2598 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2599 Search for REGNO. If found, abort if it is not wider than word_mode. */
2602 verify_wide_reg_1 (px, pregno)
2607 unsigned int regno = *(int *) pregno;
2609 if (GET_CODE (x) == REG && REGNO (x) == regno)
2611 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2618 /* A subroutine of verify_local_live_at_start. Search through insns
2619 between HEAD and END looking for register REGNO. */
2622 verify_wide_reg (regno, head, end)
2628 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2629 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2633 head = NEXT_INSN (head);
2636 /* We didn't find the register at all. Something's way screwy. */
2640 /* A subroutine of update_life_info. Verify that there are no untoward
2641 changes in live_at_start during a local update. */
2644 verify_local_live_at_start (new_live_at_start, bb)
2645 regset new_live_at_start;
2648 if (reload_completed)
2650 /* After reload, there are no pseudos, nor subregs of multi-word
2651 registers. The regsets should exactly match. */
2652 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2659 /* Find the set of changed registers. */
2660 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2662 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2664 /* No registers should die. */
2665 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2667 /* Verify that the now-live register is wider than word_mode. */
2668 verify_wide_reg (i, bb->head, bb->end);
2673 /* Updates life information starting with the basic blocks set in BLOCKS.
2675 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2676 expecting local modifications to basic blocks. If we find extra
2677 registers live at the beginning of a block, then we either killed
2678 useful data, or we have a broken split that wants data not provided.
2679 If we find registers removed from live_at_start, that means we have
2680 a broken peephole that is killing a register it shouldn't.
2682 ??? This is not true in one situation -- when a pre-reload splitter
2683 generates subregs of a multi-word pseudo, current life analysis will
2684 lose the kill. So we _can_ have a pseudo go live. How irritating.
2686 BLOCK_FOR_INSN is assumed to be correct.
2688 Including PROP_REG_INFO does not properly refresh regs_ever_live
2689 unless the caller resets it to zero. */
2692 update_life_info (blocks, extent, prop_flags)
2694 enum update_life_extent extent;
2698 regset_head tmp_head;
2701 tmp = INITIALIZE_REG_SET (tmp_head);
2703 /* For a global update, we go through the relaxation process again. */
2704 if (extent != UPDATE_LIFE_LOCAL)
2706 calculate_global_regs_live (blocks, blocks,
2707 prop_flags & PROP_SCAN_DEAD_CODE);
2709 /* If asked, remove notes from the blocks we'll update. */
2710 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2711 count_or_remove_death_notes (blocks, 1);
2714 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2716 basic_block bb = BASIC_BLOCK (i);
2718 COPY_REG_SET (tmp, bb->global_live_at_end);
2719 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2721 if (extent == UPDATE_LIFE_LOCAL)
2722 verify_local_live_at_start (tmp, bb);
2727 if (prop_flags & PROP_REG_INFO)
2729 /* The only pseudos that are live at the beginning of the function
2730 are those that were not set anywhere in the function. local-alloc
2731 doesn't know how to handle these correctly, so mark them as not
2732 local to any one basic block. */
2733 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2734 FIRST_PSEUDO_REGISTER, i,
2735 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2737 /* We have a problem with any pseudoreg that lives across the setjmp.
2738 ANSI says that if a user variable does not change in value between
2739 the setjmp and the longjmp, then the longjmp preserves it. This
2740 includes longjmp from a place where the pseudo appears dead.
2741 (In principle, the value still exists if it is in scope.)
2742 If the pseudo goes in a hard reg, some other value may occupy
2743 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2744 Conclusion: such a pseudo must not go in a hard reg. */
2745 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2746 FIRST_PSEUDO_REGISTER, i,
2748 if (regno_reg_rtx[i] != 0)
2750 REG_LIVE_LENGTH (i) = -1;
2751 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2757 /* Free the variables allocated by find_basic_blocks.
2759 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2762 free_basic_block_vars (keep_head_end_p)
2763 int keep_head_end_p;
2765 if (basic_block_for_insn)
2767 VARRAY_FREE (basic_block_for_insn);
2768 basic_block_for_insn = NULL;
2771 if (! keep_head_end_p)
2774 VARRAY_FREE (basic_block_info);
2777 ENTRY_BLOCK_PTR->aux = NULL;
2778 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2779 EXIT_BLOCK_PTR->aux = NULL;
2780 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2784 /* Return nonzero if the destination of SET equals the source. */
2789 rtx src = SET_SRC (set);
2790 rtx dst = SET_DEST (set);
2792 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2794 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2796 src = SUBREG_REG (src);
2797 dst = SUBREG_REG (dst);
2800 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2801 && REGNO (src) == REGNO (dst));
2804 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2810 rtx pat = PATTERN (insn);
2812 /* Insns carrying these notes are useful later on. */
2813 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2816 if (GET_CODE (pat) == SET && set_noop_p (pat))
2819 if (GET_CODE (pat) == PARALLEL)
2822 /* If nothing but SETs of registers to themselves,
2823 this insn can also be deleted. */
2824 for (i = 0; i < XVECLEN (pat, 0); i++)
2826 rtx tem = XVECEXP (pat, 0, i);
2828 if (GET_CODE (tem) == USE
2829 || GET_CODE (tem) == CLOBBER)
2832 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2841 /* Delete any insns that copy a register to itself. */
2844 delete_noop_moves (f)
2848 for (insn = f; insn; insn = NEXT_INSN (insn))
2850 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2852 PUT_CODE (insn, NOTE);
2853 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2854 NOTE_SOURCE_FILE (insn) = 0;
2859 /* Determine if the stack pointer is constant over the life of the function.
2860 Only useful before prologues have been emitted. */
2863 notice_stack_pointer_modification_1 (x, pat, data)
2865 rtx pat ATTRIBUTE_UNUSED;
2866 void *data ATTRIBUTE_UNUSED;
2868 if (x == stack_pointer_rtx
2869 /* The stack pointer is only modified indirectly as the result
2870 of a push until later in flow. See the comments in rtl.texi
2871 regarding Embedded Side-Effects on Addresses. */
2872 || (GET_CODE (x) == MEM
2873 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2874 || GET_CODE (XEXP (x, 0)) == PRE_INC
2875 || GET_CODE (XEXP (x, 0)) == POST_DEC
2876 || GET_CODE (XEXP (x, 0)) == POST_INC)
2877 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2878 current_function_sp_is_unchanging = 0;
2882 notice_stack_pointer_modification (f)
2887 /* Assume that the stack pointer is unchanging if alloca hasn't
2889 current_function_sp_is_unchanging = !current_function_calls_alloca;
2890 if (! current_function_sp_is_unchanging)
2893 for (insn = f; insn; insn = NEXT_INSN (insn))
2895 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2897 /* Check if insn modifies the stack pointer. */
2898 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2900 if (! current_function_sp_is_unchanging)
2906 /* Mark a register in SET. Hard registers in large modes get all
2907 of their component registers set as well. */
2909 mark_reg (reg, xset)
2913 regset set = (regset) xset;
2914 int regno = REGNO (reg);
2916 if (GET_MODE (reg) == BLKmode)
2919 SET_REGNO_REG_SET (set, regno);
2920 if (regno < FIRST_PSEUDO_REGISTER)
2922 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2924 SET_REGNO_REG_SET (set, regno + n);
2928 /* Mark those regs which are needed at the end of the function as live
2929 at the end of the last basic block. */
2931 mark_regs_live_at_end (set)
2936 /* If exiting needs the right stack value, consider the stack pointer
2937 live at the end of the function. */
2938 if ((HAVE_epilogue && reload_completed)
2939 || ! EXIT_IGNORE_STACK
2940 || (! FRAME_POINTER_REQUIRED
2941 && ! current_function_calls_alloca
2942 && flag_omit_frame_pointer)
2943 || current_function_sp_is_unchanging)
2945 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2948 /* Mark the frame pointer if needed at the end of the function. If
2949 we end up eliminating it, it will be removed from the live list
2950 of each basic block by reload. */
2952 if (! reload_completed || frame_pointer_needed)
2954 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2955 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2956 /* If they are different, also mark the hard frame pointer as live */
2957 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2961 #ifdef PIC_OFFSET_TABLE_REGNUM
2962 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2963 /* Many architectures have a GP register even without flag_pic.
2964 Assume the pic register is not in use, or will be handled by
2965 other means, if it is not fixed. */
2966 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2967 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2971 /* Mark all global registers, and all registers used by the epilogue
2972 as being live at the end of the function since they may be
2973 referenced by our caller. */
2974 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2976 #ifdef EPILOGUE_USES
2977 || EPILOGUE_USES (i)
2980 SET_REGNO_REG_SET (set, i);
2982 /* Mark all call-saved registers that we actaully used. */
2983 if (HAVE_epilogue && reload_completed)
2985 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2986 if (! call_used_regs[i] && regs_ever_live[i])
2987 SET_REGNO_REG_SET (set, i);
2990 /* Mark function return value. */
2991 diddle_return_value (mark_reg, set);
2994 /* Callback function for for_each_successor_phi. DATA is a regset.
2995 Sets the SRC_REGNO, the regno of the phi alternative for phi node
2996 INSN, in the regset. */
2999 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3000 rtx insn ATTRIBUTE_UNUSED;
3001 int dest_regno ATTRIBUTE_UNUSED;
3005 regset live = (regset) data;
3006 SET_REGNO_REG_SET (live, src_regno);
3010 /* Propagate global life info around the graph of basic blocks. Begin
3011 considering blocks with their corresponding bit set in BLOCKS_IN.
3012 BLOCKS_OUT is set for every block that was changed. */
3015 calculate_global_regs_live (blocks_in, blocks_out, flags)
3016 sbitmap blocks_in, blocks_out;
3019 basic_block *queue, *qhead, *qtail, *qend;
3020 regset tmp, new_live_at_end;
3021 regset_head tmp_head;
3022 regset_head new_live_at_end_head;
3025 tmp = INITIALIZE_REG_SET (tmp_head);
3026 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3028 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3029 because the `head == tail' style test for an empty queue doesn't
3030 work with a full queue. */
3031 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3033 qhead = qend = queue + n_basic_blocks + 2;
3035 /* Clear out the garbage that might be hanging out in bb->aux. */
3036 for (i = n_basic_blocks - 1; i >= 0; --i)
3037 BASIC_BLOCK (i)->aux = NULL;
3039 /* Queue the blocks set in the initial mask. Do this in reverse block
3040 number order so that we are more likely for the first round to do
3041 useful work. We use AUX non-null to flag that the block is queued. */
3042 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3044 basic_block bb = BASIC_BLOCK (i);
3049 sbitmap_zero (blocks_out);
3051 while (qhead != qtail)
3053 int rescan, changed;
3062 /* Begin by propogating live_at_start from the successor blocks. */
3063 CLEAR_REG_SET (new_live_at_end);
3064 for (e = bb->succ; e ; e = e->succ_next)
3066 basic_block sb = e->dest;
3067 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3070 /* Regs used in phi nodes are not included in
3071 global_live_at_start, since they are live only along a
3072 particular edge. Set those regs that are live because of a
3073 phi node alternative corresponding to this particular block. */
3074 for_each_successor_phi (bb, &set_phi_alternative_reg,
3077 if (bb == ENTRY_BLOCK_PTR)
3079 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3083 /* On our first pass through this block, we'll go ahead and continue.
3084 Recognize first pass by local_set NULL. On subsequent passes, we
3085 get to skip out early if live_at_end wouldn't have changed. */
3087 if (bb->local_set == NULL)
3089 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3094 /* If any bits were removed from live_at_end, we'll have to
3095 rescan the block. This wouldn't be necessary if we had
3096 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3097 local_live is really dependant on live_at_end. */
3098 CLEAR_REG_SET (tmp);
3099 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3100 new_live_at_end, BITMAP_AND_COMPL);
3104 /* Find the set of changed bits. Take this opportunity
3105 to notice that this set is empty and early out. */
3106 CLEAR_REG_SET (tmp);
3107 changed = bitmap_operation (tmp, bb->global_live_at_end,
3108 new_live_at_end, BITMAP_XOR);
3112 /* If any of the changed bits overlap with local_set,
3113 we'll have to rescan the block. Detect overlap by
3114 the AND with ~local_set turning off bits. */
3115 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3120 /* Let our caller know that BB changed enough to require its
3121 death notes updated. */
3122 SET_BIT (blocks_out, bb->index);
3126 /* Add to live_at_start the set of all registers in
3127 new_live_at_end that aren't in the old live_at_end. */
3129 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3131 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3133 changed = bitmap_operation (bb->global_live_at_start,
3134 bb->global_live_at_start,
3141 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3143 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3144 into live_at_start. */
3145 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3147 /* If live_at start didn't change, no need to go farther. */
3148 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3151 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3154 /* Queue all predecessors of BB so that we may re-examine
3155 their live_at_end. */
3156 for (e = bb->pred; e ; e = e->pred_next)
3158 basic_block pb = e->src;
3159 if (pb->aux == NULL)
3170 FREE_REG_SET (new_live_at_end);
3172 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3174 basic_block bb = BASIC_BLOCK (i);
3175 FREE_REG_SET (bb->local_set);
3181 /* Subroutines of life analysis. */
3183 /* Allocate the permanent data structures that represent the results
3184 of life analysis. Not static since used also for stupid life analysis. */
3187 allocate_bb_life_data ()
3191 for (i = 0; i < n_basic_blocks; i++)
3193 basic_block bb = BASIC_BLOCK (i);
3195 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3196 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3199 ENTRY_BLOCK_PTR->global_live_at_end
3200 = OBSTACK_ALLOC_REG_SET (function_obstack);
3201 EXIT_BLOCK_PTR->global_live_at_start
3202 = OBSTACK_ALLOC_REG_SET (function_obstack);
3204 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3208 allocate_reg_life_data ()
3212 /* Recalculate the register space, in case it has grown. Old style
3213 vector oriented regsets would set regset_{size,bytes} here also. */
3214 allocate_reg_info (max_regno, FALSE, FALSE);
3216 /* Reset all the data we'll collect in propagate_block and its
3218 for (i = 0; i < max_regno; i++)
3222 REG_N_DEATHS (i) = 0;
3223 REG_N_CALLS_CROSSED (i) = 0;
3224 REG_LIVE_LENGTH (i) = 0;
3225 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3229 /* Delete dead instructions for propagate_block. */
3232 propagate_block_delete_insn (bb, insn)
3236 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3238 /* If the insn referred to a label, and that label was attached to
3239 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3240 pretty much mandatory to delete it, because the ADDR_VEC may be
3241 referencing labels that no longer exist. */
3245 rtx label = XEXP (inote, 0);
3248 if (LABEL_NUSES (label) == 1
3249 && (next = next_nonnote_insn (label)) != NULL
3250 && GET_CODE (next) == JUMP_INSN
3251 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3252 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3254 rtx pat = PATTERN (next);
3255 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3256 int len = XVECLEN (pat, diff_vec_p);
3259 for (i = 0; i < len; i++)
3260 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3262 flow_delete_insn (next);
3266 if (bb->end == insn)
3267 bb->end = PREV_INSN (insn);
3268 flow_delete_insn (insn);
3271 /* Delete dead libcalls for propagate_block. Return the insn
3272 before the libcall. */
3275 propagate_block_delete_libcall (bb, insn, note)
3279 rtx first = XEXP (note, 0);
3280 rtx before = PREV_INSN (first);
3282 if (insn == bb->end)
3285 flow_delete_insn_chain (first, insn);
3289 /* Compute the registers live at the beginning of a basic block BB from
3290 those live at the end.
3292 When called, REG_LIVE contains those live at the end. On return, it
3293 contains those live at the beginning.
3295 LOCAL_SET, if non-null, will be set with all registers killed by
3296 this basic block. */
3299 propagate_block (bb, live, local_set, flags)
3305 struct propagate_block_info pbi;
3307 regset_head new_live_head, new_dead_head;
3310 pbi.reg_live = live;
3311 pbi.mem_set_list = NULL_RTX;
3312 pbi.local_set = local_set;
3316 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3317 pbi.reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3319 pbi.reg_next_use = NULL;
3321 pbi.new_live = INITIALIZE_REG_SET (new_live_head);
3322 pbi.new_dead = INITIALIZE_REG_SET (new_dead_head);
3324 if (flags & PROP_REG_INFO)
3328 /* Process the regs live at the end of the block.
3329 Mark them as not local to any one basic block. */
3330 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3332 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3336 /* Scan the block an insn at a time from end to beginning. */
3338 for (insn = bb->end; ; insn = prev)
3340 prev = PREV_INSN (insn);
3342 if (GET_CODE (insn) == NOTE)
3344 /* If this is a call to `setjmp' et al,
3345 warn if any non-volatile datum is live. */
3347 if ((flags & PROP_REG_INFO)
3348 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3349 IOR_REG_SET (regs_live_at_setjmp, pbi.reg_live);
3352 /* Update the life-status of regs for this insn.
3353 First DEAD gets which regs are set in this insn
3354 then LIVE gets which regs are used in this insn.
3355 Then the regs live before the insn
3356 are those live after, with DEAD regs turned off,
3357 and then LIVE regs turned on. */
3359 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3362 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3363 int insn_is_dead = 0;
3364 int libcall_is_dead = 0;
3366 if (flags & PROP_SCAN_DEAD_CODE)
3368 insn_is_dead = insn_dead_p (&pbi, PATTERN (insn), 0,
3370 libcall_is_dead = (insn_is_dead && note != 0
3371 && libcall_dead_p (&pbi, PATTERN (insn),
3375 /* We almost certainly don't want to delete prologue or epilogue
3376 instructions. Warn about probable compiler losage. */
3379 && (((HAVE_epilogue || HAVE_prologue)
3380 && prologue_epilogue_contains (insn))
3381 || (HAVE_sibcall_epilogue
3382 && sibcall_epilogue_contains (insn))))
3384 if (flags & PROP_KILL_DEAD_CODE)
3386 warning ("ICE: would have deleted prologue/epilogue insn");
3387 if (!inhibit_warnings)
3390 libcall_is_dead = insn_is_dead = 0;
3393 /* If an instruction consists of just dead store(s) on final pass,
3395 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3397 if (libcall_is_dead)
3399 prev = propagate_block_delete_libcall (bb, insn, note);
3400 insn = NEXT_INSN (prev);
3403 propagate_block_delete_insn (bb, insn);
3405 /* CC0 is now known to be dead. Either this insn used it,
3406 in which case it doesn't anymore, or clobbered it,
3407 so the next insn can't use it. */
3413 /* See if this is an increment or decrement that can be
3414 merged into a following memory address. */
3417 register rtx x = single_set (insn);
3419 /* Does this instruction increment or decrement a register? */
3420 if (!reload_completed
3421 && (flags & PROP_AUTOINC)
3423 && GET_CODE (SET_DEST (x)) == REG
3424 && (GET_CODE (SET_SRC (x)) == PLUS
3425 || GET_CODE (SET_SRC (x)) == MINUS)
3426 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3427 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3428 /* Ok, look for a following memory ref we can combine with.
3429 If one is found, change the memory ref to a PRE_INC
3430 or PRE_DEC, cancel this insn, and return 1.
3431 Return 0 if nothing has been done. */
3432 && try_pre_increment_1 (&pbi, insn))
3435 #endif /* AUTO_INC_DEC */
3437 CLEAR_REG_SET (pbi.new_live);
3438 CLEAR_REG_SET (pbi.new_dead);
3440 /* If this is not the final pass, and this insn is copying the
3441 value of a library call and it's dead, don't scan the
3442 insns that perform the library call, so that the call's
3443 arguments are not marked live. */
3444 if (libcall_is_dead)
3446 /* Record the death of the dest reg. */
3447 mark_set_regs (&pbi, PATTERN (insn), insn);
3449 insn = XEXP (note, 0);
3450 prev = PREV_INSN (insn);
3452 else if (GET_CODE (PATTERN (insn)) == SET
3453 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3454 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3455 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3456 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3457 /* We have an insn to pop a constant amount off the stack.
3458 (Such insns use PLUS regardless of the direction of the stack,
3459 and any insn to adjust the stack by a constant is always a pop.)
3460 These insns, if not dead stores, have no effect on life. */
3464 /* Any regs live at the time of a call instruction
3465 must not go in a register clobbered by calls.
3466 Find all regs now live and record this for them. */
3468 if (GET_CODE (insn) == CALL_INSN
3469 && (flags & PROP_REG_INFO))
3470 EXECUTE_IF_SET_IN_REG_SET (pbi.reg_live, 0, i,
3471 { REG_N_CALLS_CROSSED (i)++; });
3473 /* Record sets. Do this even for dead instructions,
3474 since they would have killed the values if they hadn't
3476 mark_set_regs (&pbi, PATTERN (insn), insn);
3478 /* If an insn doesn't use CC0, it becomes dead since we
3479 assume that every insn clobbers it. So show it dead here;
3480 mark_used_regs will set it live if it is referenced. */
3485 mark_used_regs (&pbi, PATTERN (insn), NULL_RTX, insn);
3487 /* Sometimes we may have inserted something before INSN
3488 (such as a move) when we make an auto-inc. So ensure
3489 we will scan those insns. */
3491 prev = PREV_INSN (insn);
3494 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3500 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3501 cond = COND_EXEC_TEST (PATTERN (insn));
3503 /* Non-constant calls clobber memory. */
3504 if (! CONST_CALL_P (insn))
3505 free_EXPR_LIST_list (&pbi.mem_set_list);
3507 /* There may be extra registers to be clobbered. */
3508 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3510 note = XEXP (note, 1))
3511 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3512 mark_set_1 (&pbi, XEXP (XEXP (note, 0), 0),
3515 /* Calls change all call-used and global registers. */
3516 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3517 if (call_used_regs[i] && ! global_regs[i]
3521 mark_set_reg (&pbi, gen_rtx_REG (reg_raw_mode[i], i),
3522 cond, &dummy, &dummy);
3525 /* Calls use their arguments. */
3526 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3528 note = XEXP (note, 1))
3529 if (GET_CODE (XEXP (note, 0)) == USE)
3530 mark_used_regs (&pbi, XEXP (XEXP (note, 0), 0),
3533 /* The stack ptr is used (honorarily) by a CALL insn. */
3534 SET_REGNO_REG_SET (pbi.new_live, STACK_POINTER_REGNUM);
3536 /* Calls may also reference any of the global registers,
3537 so they are made live. */
3538 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3540 mark_used_reg (&pbi, gen_rtx_REG (reg_raw_mode[i], i),
3545 /* Update reg_live for the registers killed and used. */
3546 AND_COMPL_REG_SET (pbi.reg_live, pbi.new_dead);
3547 IOR_REG_SET (pbi.reg_live, pbi.new_live);
3549 /* On final pass, update counts of how many insns in which
3550 each reg is live. */
3551 if (flags & PROP_REG_INFO)
3552 EXECUTE_IF_SET_IN_REG_SET (pbi.reg_live, 0, i,
3553 { REG_LIVE_LENGTH (i)++; });
3556 if (insn == bb->head)
3560 FREE_REG_SET (pbi.new_live);
3561 FREE_REG_SET (pbi.new_dead);
3562 free_EXPR_LIST_list (&pbi.mem_set_list);
3564 if (pbi.reg_next_use)
3565 free (pbi.reg_next_use);
3569 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3570 (SET expressions whose destinations are registers dead after the insn).
3571 NEEDED is the regset that says which regs are alive after the insn.
3573 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3575 If X is the entire body of an insn, NOTES contains the reg notes
3576 pertaining to the insn. */
3579 insn_dead_p (pbi, x, call_ok, notes)
3580 struct propagate_block_info *pbi;
3583 rtx notes ATTRIBUTE_UNUSED;
3585 enum rtx_code code = GET_CODE (x);
3588 /* If flow is invoked after reload, we must take existing AUTO_INC
3589 expresions into account. */
3590 if (reload_completed)
3592 for ( ; notes; notes = XEXP (notes, 1))
3594 if (REG_NOTE_KIND (notes) == REG_INC)
3596 int regno = REGNO (XEXP (notes, 0));
3598 /* Don't delete insns to set global regs. */
3599 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3600 || REGNO_REG_SET_P (pbi->reg_live, regno))
3607 /* If setting something that's a reg or part of one,
3608 see if that register's altered value will be live. */
3612 rtx r = SET_DEST (x);
3615 if (GET_CODE (r) == CC0)
3616 return ! pbi->cc0_live;
3619 /* A SET that is a subroutine call cannot be dead. */
3620 if (GET_CODE (SET_SRC (x)) == CALL)
3626 /* Don't eliminate loads from volatile memory or volatile asms. */
3627 else if (volatile_refs_p (SET_SRC (x)))
3630 if (GET_CODE (r) == MEM)
3634 if (MEM_VOLATILE_P (r))
3637 /* Walk the set of memory locations we are currently tracking
3638 and see if one is an identical match to this memory location.
3639 If so, this memory write is dead (remember, we're walking
3640 backwards from the end of the block to the start. */
3641 temp = pbi->mem_set_list;
3644 if (rtx_equal_p (XEXP (temp, 0), r))
3646 temp = XEXP (temp, 1);
3651 while (GET_CODE (r) == SUBREG
3652 || GET_CODE (r) == STRICT_LOW_PART
3653 || GET_CODE (r) == ZERO_EXTRACT)
3656 if (GET_CODE (r) == REG)
3658 int regno = REGNO (r);
3661 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3664 /* If this is a hard register, verify that subsequent
3665 words are not needed. */
3666 if (regno < FIRST_PSEUDO_REGISTER)
3668 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3671 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3675 /* Don't delete insns to set global regs. */
3676 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3679 /* Make sure insns to set the stack pointer aren't deleted. */
3680 if (regno == STACK_POINTER_REGNUM)
3683 /* Make sure insns to set the frame pointer aren't deleted. */
3684 if (regno == FRAME_POINTER_REGNUM
3685 && (! reload_completed || frame_pointer_needed))
3687 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3688 if (regno == HARD_FRAME_POINTER_REGNUM
3689 && (! reload_completed || frame_pointer_needed))
3693 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3694 /* Make sure insns to set arg pointer are never deleted
3695 (if the arg pointer isn't fixed, there will be a USE
3696 for it, so we can treat it normally). */
3697 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3701 /* Otherwise, the set is dead. */
3707 /* If performing several activities, insn is dead if each activity
3708 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3709 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3711 else if (code == PARALLEL)
3713 int i = XVECLEN (x, 0);
3715 for (i--; i >= 0; i--)
3716 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3717 && GET_CODE (XVECEXP (x, 0, i)) != USE
3718 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3724 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3725 is not necessarily true for hard registers. */
3726 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3727 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3728 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3731 /* We do not check other CLOBBER or USE here. An insn consisting of just
3732 a CLOBBER or just a USE should not be deleted. */
3736 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3737 return 1 if the entire library call is dead.
3738 This is true if X copies a register (hard or pseudo)
3739 and if the hard return reg of the call insn is dead.
3740 (The caller should have tested the destination of X already for death.)
3742 If this insn doesn't just copy a register, then we don't
3743 have an ordinary libcall. In that case, cse could not have
3744 managed to substitute the source for the dest later on,
3745 so we can assume the libcall is dead.
3747 NEEDED is the bit vector of pseudoregs live before this insn.
3748 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3751 libcall_dead_p (pbi, x, note, insn)
3752 struct propagate_block_info *pbi;
3757 register RTX_CODE code = GET_CODE (x);
3761 register rtx r = SET_SRC (x);
3762 if (GET_CODE (r) == REG)
3764 rtx call = XEXP (note, 0);
3768 /* Find the call insn. */
3769 while (call != insn && GET_CODE (call) != CALL_INSN)
3770 call = NEXT_INSN (call);
3772 /* If there is none, do nothing special,
3773 since ordinary death handling can understand these insns. */
3777 /* See if the hard reg holding the value is dead.
3778 If this is a PARALLEL, find the call within it. */
3779 call_pat = PATTERN (call);
3780 if (GET_CODE (call_pat) == PARALLEL)
3782 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3783 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3784 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3787 /* This may be a library call that is returning a value
3788 via invisible pointer. Do nothing special, since
3789 ordinary death handling can understand these insns. */
3793 call_pat = XVECEXP (call_pat, 0, i);
3796 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
3802 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3803 live at function entry. Don't count global register variables, variables
3804 in registers that can be used for function arg passing, or variables in
3805 fixed hard registers. */
3808 regno_uninitialized (regno)
3811 if (n_basic_blocks == 0
3812 || (regno < FIRST_PSEUDO_REGISTER
3813 && (global_regs[regno]
3814 || fixed_regs[regno]
3815 || FUNCTION_ARG_REGNO_P (regno))))
3818 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3821 /* 1 if register REGNO was alive at a place where `setjmp' was called
3822 and was set more than once or is an argument.
3823 Such regs may be clobbered by `longjmp'. */
3826 regno_clobbered_at_setjmp (regno)
3829 if (n_basic_blocks == 0)
3832 return ((REG_N_SETS (regno) > 1
3833 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3834 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3837 /* INSN references memory, possibly using autoincrement addressing modes.
3838 Find any entries on the mem_set_list that need to be invalidated due
3839 to an address change. */
3841 invalidate_mems_from_autoinc (pbi, insn)
3842 struct propagate_block_info *pbi;
3845 rtx note = REG_NOTES (insn);
3846 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3848 if (REG_NOTE_KIND (note) == REG_INC)
3850 rtx temp = pbi->mem_set_list;
3851 rtx prev = NULL_RTX;
3856 next = XEXP (temp, 1);
3857 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3859 /* Splice temp out of list. */
3861 XEXP (prev, 1) = next;
3863 pbi->mem_set_list = next;
3864 free_EXPR_LIST_node (temp);
3874 /* Process the registers that are set within X. Their bits are set to
3875 1 in the regset DEAD, because they are dead prior to this insn.
3877 If INSN is nonzero, it is the insn being processed.
3879 FLAGS is the set of operations to perform. */
3882 mark_set_regs (pbi, x, insn)
3883 struct propagate_block_info *pbi;
3886 rtx cond = NULL_RTX;
3889 switch (GET_CODE (x))
3893 mark_set_1 (pbi, SET_DEST (x), cond, insn);
3897 cond = COND_EXEC_TEST (x);
3898 x = COND_EXEC_CODE (x);
3904 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3906 rtx sub = XVECEXP (x, 0, i);
3907 switch (GET_CODE (sub))
3910 if (cond != NULL_RTX)
3913 cond = COND_EXEC_TEST (sub);
3914 sub = COND_EXEC_CODE (sub);
3915 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
3921 mark_set_1 (pbi, SET_DEST (sub), cond, insn);
3936 /* Process a single SET rtx, X. */
3939 mark_set_1 (pbi, reg, cond, insn)
3940 struct propagate_block_info *pbi;
3941 rtx reg, cond, insn;
3943 register int regno = -1;
3944 int flags = pbi->flags;
3946 /* Some targets place small structures in registers for
3947 return values of functions. We have to detect this
3948 case specially here to get correct flow information. */
3949 if (GET_CODE (reg) == PARALLEL
3950 && GET_MODE (reg) == BLKmode)
3954 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3955 mark_set_1 (pbi, XVECEXP (reg, 0, i), cond, insn);
3959 /* Modifying just one hardware register of a multi-reg value or just a
3960 byte field of a register does not mean the value from before this insn
3961 is now dead. But it does mean liveness of that register at the end of
3962 the block is significant.
3964 Within mark_set_1, however, we treat it as if the register is indeed
3965 modified. mark_used_regs will, however, also treat this register as
3966 being used. Thus, we treat these insns as setting a new value for the
3967 register as a function of its old value. This cases LOG_LINKS to be
3968 made appropriately and this will help combine.
3970 ??? This is all done incorrectly. We should not be setting bits in
3971 new_dead for these registers, since, as we just explained, they are
3972 not dead. We should be setting bits in local_set, and updating
3973 LOG_LINKS, but that is different. */
3975 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3976 || GET_CODE (reg) == SIGN_EXTRACT
3977 || GET_CODE (reg) == STRICT_LOW_PART)
3978 reg = XEXP (reg, 0);
3980 /* If this set is a MEM, then it kills any aliased writes.
3981 If this set is a REG, then it kills any MEMs which use the reg. */
3982 if (flags & PROP_SCAN_DEAD_CODE)
3984 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
3986 rtx temp = pbi->mem_set_list;
3987 rtx prev = NULL_RTX;
3992 next = XEXP (temp, 1);
3993 if ((GET_CODE (reg) == MEM
3994 && output_dependence (XEXP (temp, 0), reg))
3995 || (GET_CODE (reg) == REG
3996 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3998 /* Splice this entry out of the list. */
4000 XEXP (prev, 1) = next;
4002 pbi->mem_set_list = next;
4003 free_EXPR_LIST_node (temp);
4011 /* If the memory reference had embedded side effects (autoincrement
4012 address modes. Then we may need to kill some entries on the
4014 if (insn && GET_CODE (reg) == MEM)
4015 invalidate_mems_from_autoinc (pbi, insn);
4017 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4018 /* We do not know the size of a BLKmode store, so we do not track
4019 them for redundant store elimination. */
4020 && GET_MODE (reg) != BLKmode
4021 /* There are no REG_INC notes for SP, so we can't assume we'll see
4022 everything that invalidates it. To be safe, don't eliminate any
4023 stores though SP; none of them should be redundant anyway. */
4024 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4025 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4028 if (GET_CODE (reg) == REG
4029 && (regno = REGNO (reg),
4030 ! (regno == FRAME_POINTER_REGNUM
4031 && (! reload_completed || frame_pointer_needed)))
4032 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4033 && ! (regno == HARD_FRAME_POINTER_REGNUM
4034 && (! reload_completed || frame_pointer_needed))
4036 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4037 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4041 int some_was_live, some_was_dead;
4043 /* Perform the pbi datastructure update. */
4044 if (! mark_set_reg (pbi, reg, cond, &some_was_live, &some_was_dead))
4047 /* Additional data to record if this is the final pass. */
4048 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4049 | PROP_DEATH_NOTES | PROP_AUTOINC))
4052 register int blocknum = pbi->bb->index;
4055 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4056 y = pbi->reg_next_use[regno];
4058 /* If this is a hard reg, record this function uses the reg. */
4060 if (regno < FIRST_PSEUDO_REGISTER)
4063 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
4065 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4066 for (i = regno; i < endregno; i++)
4068 /* The next use is no longer "next", since a store
4070 pbi->reg_next_use[i] = 0;
4073 if (flags & PROP_REG_INFO)
4074 for (i = regno; i < endregno; i++)
4076 regs_ever_live[i] = 1;
4082 /* The next use is no longer "next", since a store
4084 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4085 pbi->reg_next_use[regno] = 0;
4087 /* Keep track of which basic blocks each reg appears in. */
4089 if (flags & PROP_REG_INFO)
4091 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4092 REG_BASIC_BLOCK (regno) = blocknum;
4093 else if (REG_BASIC_BLOCK (regno) != blocknum)
4094 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4096 /* Count (weighted) references, stores, etc. This counts a
4097 register twice if it is modified, but that is correct. */
4098 REG_N_SETS (regno)++;
4099 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4101 /* The insns where a reg is live are normally counted
4102 elsewhere, but we want the count to include the insn
4103 where the reg is set, and the normal counting mechanism
4104 would not count it. */
4105 REG_LIVE_LENGTH (regno)++;
4109 if (! some_was_dead)
4111 if (flags & PROP_LOG_LINKS)
4113 /* Make a logical link from the next following insn
4114 that uses this register, back to this insn.
4115 The following insns have already been processed.
4117 We don't build a LOG_LINK for hard registers containing
4118 in ASM_OPERANDs. If these registers get replaced,
4119 we might wind up changing the semantics of the insn,
4120 even if reload can make what appear to be valid
4121 assignments later. */
4122 if (y && (BLOCK_NUM (y) == blocknum)
4123 && (regno >= FIRST_PSEUDO_REGISTER
4124 || asm_noperands (PATTERN (y)) < 0))
4125 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4128 else if (! some_was_live)
4130 if (flags & PROP_REG_INFO)
4131 REG_N_DEATHS (REGNO (reg))++;
4133 if (flags & PROP_DEATH_NOTES)
4135 /* Note that dead stores have already been deleted
4136 when possible. If we get here, we have found a
4137 dead store that cannot be eliminated (because the
4138 same insn does something useful). Indicate this
4139 by marking the reg being set as dying here. */
4141 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4146 if (flags & PROP_DEATH_NOTES)
4148 /* This is a case where we have a multi-word hard register
4149 and some, but not all, of the words of the register are
4150 needed in subsequent insns. Write REG_UNUSED notes
4151 for those parts that were not needed. This case should
4156 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4158 if (! REGNO_REG_SET_P (pbi->reg_live, regno + i))
4162 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4168 else if (GET_CODE (reg) == REG)
4170 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4171 pbi->reg_next_use[regno] = 0;
4174 /* If this is the last pass and this is a SCRATCH, show it will be dying
4175 here and count it. */
4176 else if (GET_CODE (reg) == SCRATCH)
4178 if (flags & PROP_DEATH_NOTES)
4180 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4184 /* Update data structures for a (possibly conditional) store into REG.
4185 Return true if REG is now unconditionally dead. */
4188 mark_set_reg (pbi, reg, cond, p_some_was_live, p_some_was_dead)
4189 struct propagate_block_info *pbi;
4191 rtx cond ATTRIBUTE_UNUSED;
4192 int *p_some_was_live, *p_some_was_dead;
4194 int regno = REGNO (reg);
4195 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4196 int some_was_dead = ! some_was_live;
4198 /* Mark it as a significant register for this basic block. */
4200 SET_REGNO_REG_SET (pbi->local_set, regno);
4202 /* A hard reg in a wide mode may really be multiple registers.
4203 If so, mark all of them just like the first. */
4204 if (regno < FIRST_PSEUDO_REGISTER)
4206 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4209 int regno_n = regno + n;
4210 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno_n);
4212 SET_REGNO_REG_SET (pbi->local_set, regno_n);
4214 some_was_live |= needed_regno;
4215 some_was_dead |= ! needed_regno;
4219 *p_some_was_live = some_was_live;
4220 *p_some_was_dead = some_was_dead;
4222 /* The stack pointer is never dead. Well, not strictly true, but it's
4223 very difficult to tell from here. Hopefully combine_stack_adjustments
4224 will fix up the most egregious errors. */
4225 if (regno == STACK_POINTER_REGNUM)
4228 /* Mark it as dead before this insn. */
4229 SET_REGNO_REG_SET (pbi->new_dead, regno);
4230 if (regno < FIRST_PSEUDO_REGISTER)
4232 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4234 SET_REGNO_REG_SET (pbi->new_dead, regno + n);
4237 /* Unconditionally dead. */
4243 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4247 find_auto_inc (pbi, x, insn)
4248 struct propagate_block_info *pbi;
4252 rtx addr = XEXP (x, 0);
4253 HOST_WIDE_INT offset = 0;
4256 /* Here we detect use of an index register which might be good for
4257 postincrement, postdecrement, preincrement, or predecrement. */
4259 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4260 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4262 if (GET_CODE (addr) == REG)
4265 register int size = GET_MODE_SIZE (GET_MODE (x));
4268 int regno = REGNO (addr);
4270 /* Is the next use an increment that might make auto-increment? */
4271 if ((incr = pbi->reg_next_use[regno]) != 0
4272 && (set = single_set (incr)) != 0
4273 && GET_CODE (set) == SET
4274 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4275 /* Can't add side effects to jumps; if reg is spilled and
4276 reloaded, there's no way to store back the altered value. */
4277 && GET_CODE (insn) != JUMP_INSN
4278 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4279 && XEXP (y, 0) == addr
4280 && GET_CODE (XEXP (y, 1)) == CONST_INT
4281 && ((HAVE_POST_INCREMENT
4282 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4283 || (HAVE_POST_DECREMENT
4284 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4285 || (HAVE_PRE_INCREMENT
4286 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4287 || (HAVE_PRE_DECREMENT
4288 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4289 /* Make sure this reg appears only once in this insn. */
4290 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4291 use != 0 && use != (rtx) 1))
4293 rtx q = SET_DEST (set);
4294 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4295 ? (offset ? PRE_INC : POST_INC)
4296 : (offset ? PRE_DEC : POST_DEC));
4298 if (dead_or_set_p (incr, addr)
4299 /* Mustn't autoinc an eliminable register. */
4300 && (regno >= FIRST_PSEUDO_REGISTER
4301 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4303 /* This is the simple case. Try to make the auto-inc. If
4304 we can't, we are done. Otherwise, we will do any
4305 needed updates below. */
4306 if (! validate_change (insn, &XEXP (x, 0),
4307 gen_rtx_fmt_e (inc_code, Pmode, addr),
4311 else if (GET_CODE (q) == REG
4312 /* PREV_INSN used here to check the semi-open interval
4314 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4315 /* We must also check for sets of q as q may be
4316 a call clobbered hard register and there may
4317 be a call between PREV_INSN (insn) and incr. */
4318 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4320 /* We have *p followed sometime later by q = p+size.
4321 Both p and q must be live afterward,
4322 and q is not used between INSN and its assignment.
4323 Change it to q = p, ...*q..., q = q+size.
4324 Then fall into the usual case. */
4329 emit_move_insn (q, addr);
4330 insns = get_insns ();
4333 bb = BLOCK_FOR_INSN (insn);
4334 for (temp = insns; temp; temp = NEXT_INSN (temp))
4335 set_block_for_insn (temp, bb);
4337 /* If we can't make the auto-inc, or can't make the
4338 replacement into Y, exit. There's no point in making
4339 the change below if we can't do the auto-inc and doing
4340 so is not correct in the pre-inc case. */
4342 validate_change (insn, &XEXP (x, 0),
4343 gen_rtx_fmt_e (inc_code, Pmode, q),
4345 validate_change (incr, &XEXP (y, 0), q, 1);
4346 if (! apply_change_group ())
4349 /* We now know we'll be doing this change, so emit the
4350 new insn(s) and do the updates. */
4351 emit_insns_before (insns, insn);
4353 if (BLOCK_FOR_INSN (insn)->head == insn)
4354 BLOCK_FOR_INSN (insn)->head = insns;
4356 /* INCR will become a NOTE and INSN won't contain a
4357 use of ADDR. If a use of ADDR was just placed in
4358 the insn before INSN, make that the next use.
4359 Otherwise, invalidate it. */
4360 if (GET_CODE (PREV_INSN (insn)) == INSN
4361 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4362 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4363 pbi->reg_next_use[regno] = PREV_INSN (insn);
4365 pbi->reg_next_use[regno] = 0;
4370 /* REGNO is now used in INCR which is below INSN, but it
4371 previously wasn't live here. If we don't mark it as
4372 live, we'll put a REG_DEAD note for it on this insn,
4373 which is incorrect. */
4374 SET_REGNO_REG_SET (pbi->reg_live, regno);
4376 /* If there are any calls between INSN and INCR, show
4377 that REGNO now crosses them. */
4378 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4379 if (GET_CODE (temp) == CALL_INSN)
4380 REG_N_CALLS_CROSSED (regno)++;
4385 /* If we haven't returned, it means we were able to make the
4386 auto-inc, so update the status. First, record that this insn
4387 has an implicit side effect. */
4390 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4392 /* Modify the old increment-insn to simply copy
4393 the already-incremented value of our register. */
4394 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4397 /* If that makes it a no-op (copying the register into itself) delete
4398 it so it won't appear to be a "use" and a "set" of this
4400 if (SET_DEST (set) == addr)
4402 /* If the original source was dead, it's dead now. */
4403 rtx note = find_reg_note (incr, REG_DEAD, NULL_RTX);
4404 if (note && XEXP (note, 0) != addr)
4405 SET_REGNO_REG_SET (pbi->new_dead, REGNO (XEXP (note, 0)));
4407 PUT_CODE (incr, NOTE);
4408 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4409 NOTE_SOURCE_FILE (incr) = 0;
4412 if (regno >= FIRST_PSEUDO_REGISTER)
4414 /* Count an extra reference to the reg. When a reg is
4415 incremented, spilling it is worse, so we want to make
4416 that less likely. */
4417 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4419 /* Count the increment as a setting of the register,
4420 even though it isn't a SET in rtl. */
4421 REG_N_SETS (regno)++;
4426 #endif /* AUTO_INC_DEC */
4429 mark_used_reg (pbi, reg, cond, insn)
4430 struct propagate_block_info *pbi;
4432 rtx cond ATTRIBUTE_UNUSED;
4435 int regno = REGNO (reg);
4436 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4437 int some_was_dead = ! some_was_live;
4439 SET_REGNO_REG_SET (pbi->new_live, regno);
4441 /* A hard reg in a wide mode may really be multiple registers.
4442 If so, mark all of them just like the first. */
4443 if (regno < FIRST_PSEUDO_REGISTER)
4445 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4448 int regno_n = regno + n;
4449 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno_n);
4451 SET_REGNO_REG_SET (pbi->new_live, regno_n);
4452 some_was_live |= needed_regno;
4453 some_was_dead |= ! needed_regno;
4457 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4459 /* Record where each reg is used, so when the reg is set we know
4460 the next insn that uses it. */
4461 pbi->reg_next_use[regno] = insn;
4464 if (pbi->flags & PROP_REG_INFO)
4466 if (regno < FIRST_PSEUDO_REGISTER)
4468 /* If this is a register we are going to try to eliminate,
4469 don't mark it live here. If we are successful in
4470 eliminating it, it need not be live unless it is used for
4471 pseudos, in which case it will have been set live when it
4472 was allocated to the pseudos. If the register will not
4473 be eliminated, reload will set it live at that point.
4475 Otherwise, record that this function uses this register. */
4477 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
4479 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4483 regs_ever_live[regno + --n] = 1;
4489 /* Keep track of which basic block each reg appears in. */
4491 register int blocknum = pbi->bb->index;
4492 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4493 REG_BASIC_BLOCK (regno) = blocknum;
4494 else if (REG_BASIC_BLOCK (regno) != blocknum)
4495 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4497 /* Count (weighted) number of uses of each reg. */
4498 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4502 /* Record and count the insns in which a reg dies. If it is used in
4503 this insn and was dead below the insn then it dies in this insn.
4504 If it was set in this insn, we do not make a REG_DEAD note;
4505 likewise if we already made such a note.
4507 ??? This could be done better. In new_dead we have a record of
4508 which registers are set or clobbered this insn (which in itself is
4509 slightly incorrect, see the commentary near strict_low_part in
4510 mark_set_1), which should be the set of registers that we do not
4511 wish to create death notes for under the above rule. Note that
4512 we have not yet processed the call-clobbered/call-used registers,
4513 and they do not quite follow the above rule, since we do want death
4514 notes for call-clobbered register arguments. Which begs the whole
4515 question of whether we should in fact have death notes for registers
4516 used and clobbered (but not set) in the same insn. The only useful
4517 thing we ought to be getting from dead_or_set_p is detection of
4518 duplicate death notes. */
4520 if ((pbi->flags & PROP_DEATH_NOTES)
4522 && ! dead_or_set_p (insn, reg))
4526 /* Check for the case where the register dying partially
4527 overlaps the register set by this insn. */
4528 if (regno < FIRST_PSEUDO_REGISTER
4529 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
4531 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4533 some_was_live |= dead_or_set_regno_p (insn, regno + n);
4536 /* If none of the words in X is needed, make a REG_DEAD note.
4537 Otherwise, we must make partial REG_DEAD notes. */
4538 if (! some_was_live)
4541 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
4542 REG_N_DEATHS (regno)++;
4546 /* Don't make a REG_DEAD note for a part of a register
4547 that is set in the insn. */
4549 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4550 for (; n >= regno; n--)
4551 if (!REGNO_REG_SET_P (pbi->reg_live, n)
4552 && ! dead_or_set_regno_p (insn, n))
4554 = alloc_EXPR_LIST (REG_DEAD,
4555 gen_rtx_REG (reg_raw_mode[n], n),
4561 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
4562 This is done assuming the registers needed from X are those that
4563 have 1-bits in PBI->REG_LIVE.
4565 INSN is the containing instruction. If INSN is dead, this function
4569 mark_used_regs (pbi, x, cond, insn)
4570 struct propagate_block_info *pbi;
4573 register RTX_CODE code;
4575 int flags = pbi->flags;
4578 code = GET_CODE (x);
4598 /* If we are clobbering a MEM, mark any registers inside the address
4600 if (GET_CODE (XEXP (x, 0)) == MEM)
4601 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
4605 /* Don't bother watching stores to mems if this is not the
4606 final pass. We'll not be deleting dead stores this round. */
4607 if (flags & PROP_SCAN_DEAD_CODE)
4609 /* Invalidate the data for the last MEM stored, but only if MEM is
4610 something that can be stored into. */
4611 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4612 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4613 ; /* needn't clear the memory set list */
4616 rtx temp = pbi->mem_set_list;
4617 rtx prev = NULL_RTX;
4622 next = XEXP (temp, 1);
4623 if (anti_dependence (XEXP (temp, 0), x))
4625 /* Splice temp out of the list. */
4627 XEXP (prev, 1) = next;
4629 pbi->mem_set_list = next;
4630 free_EXPR_LIST_node (temp);
4638 /* If the memory reference had embedded side effects (autoincrement
4639 address modes. Then we may need to kill some entries on the
4642 invalidate_mems_from_autoinc (pbi, insn);
4646 if (flags & PROP_AUTOINC)
4647 find_auto_inc (pbi, x, insn);
4652 if (GET_CODE (SUBREG_REG (x)) == REG
4653 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4654 && (GET_MODE_SIZE (GET_MODE (x))
4655 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4656 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4658 /* While we're here, optimize this case. */
4660 if (GET_CODE (x) != REG)
4665 /* See a register other than being set => mark it as needed. */
4666 mark_used_reg (pbi, x, cond, insn);
4671 register rtx testreg = SET_DEST (x);
4674 /* If storing into MEM, don't show it as being used. But do
4675 show the address as being used. */
4676 if (GET_CODE (testreg) == MEM)
4679 if (flags & PROP_AUTOINC)
4680 find_auto_inc (pbi, testreg, insn);
4682 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
4683 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4687 /* Storing in STRICT_LOW_PART is like storing in a reg
4688 in that this SET might be dead, so ignore it in TESTREG.
4689 but in some other ways it is like using the reg.
4691 Storing in a SUBREG or a bit field is like storing the entire
4692 register in that if the register's value is not used
4693 then this SET is not needed. */
4694 while (GET_CODE (testreg) == STRICT_LOW_PART
4695 || GET_CODE (testreg) == ZERO_EXTRACT
4696 || GET_CODE (testreg) == SIGN_EXTRACT
4697 || GET_CODE (testreg) == SUBREG)
4699 if (GET_CODE (testreg) == SUBREG
4700 && GET_CODE (SUBREG_REG (testreg)) == REG
4701 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4702 && (GET_MODE_SIZE (GET_MODE (testreg))
4703 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4704 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4706 /* Modifying a single register in an alternate mode
4707 does not use any of the old value. But these other
4708 ways of storing in a register do use the old value. */
4709 if (GET_CODE (testreg) == SUBREG
4710 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4715 testreg = XEXP (testreg, 0);
4718 /* If this is a store into a register, recursively scan the
4719 value being stored. */
4721 if ((GET_CODE (testreg) == PARALLEL
4722 && GET_MODE (testreg) == BLKmode)
4723 || (GET_CODE (testreg) == REG
4724 && (regno = REGNO (testreg),
4725 ! (regno == FRAME_POINTER_REGNUM
4726 && (! reload_completed || frame_pointer_needed)))
4727 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4728 && ! (regno == HARD_FRAME_POINTER_REGNUM
4729 && (! reload_completed || frame_pointer_needed))
4731 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4732 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4737 mark_used_regs (pbi, SET_DEST (x), cond, insn);
4738 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4745 case UNSPEC_VOLATILE:
4749 /* Traditional and volatile asm instructions must be considered to use
4750 and clobber all hard registers, all pseudo-registers and all of
4751 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4753 Consider for instance a volatile asm that changes the fpu rounding
4754 mode. An insn should not be moved across this even if it only uses
4755 pseudo-regs because it might give an incorrectly rounded result.
4757 ?!? Unfortunately, marking all hard registers as live causes massive
4758 problems for the register allocator and marking all pseudos as live
4759 creates mountains of uninitialized variable warnings.
4761 So for now, just clear the memory set list and mark any regs
4762 we can find in ASM_OPERANDS as used. */
4763 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4764 free_EXPR_LIST_list (&pbi->mem_set_list);
4766 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4767 We can not just fall through here since then we would be confused
4768 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4769 traditional asms unlike their normal usage. */
4770 if (code == ASM_OPERANDS)
4774 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4775 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
4781 if (cond != NULL_RTX)
4784 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
4786 cond = COND_EXEC_TEST (x);
4787 x = COND_EXEC_CODE (x);
4791 /* We _do_not_ want to scan operands of phi nodes. Operands of
4792 a phi function are evaluated only when control reaches this
4793 block along a particular edge. Therefore, regs that appear
4794 as arguments to phi should not be added to the global live at
4802 /* Recursively scan the operands of this expression. */
4805 register const char *fmt = GET_RTX_FORMAT (code);
4808 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4812 /* Tail recursive case: save a function call level. */
4818 mark_used_regs (pbi, XEXP (x, i), cond, insn);
4820 else if (fmt[i] == 'E')
4823 for (j = 0; j < XVECLEN (x, i); j++)
4824 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
4833 try_pre_increment_1 (pbi, insn)
4834 struct propagate_block_info *pbi;
4837 /* Find the next use of this reg. If in same basic block,
4838 make it do pre-increment or pre-decrement if appropriate. */
4839 rtx x = single_set (insn);
4840 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4841 * INTVAL (XEXP (SET_SRC (x), 1)));
4842 int regno = REGNO (SET_DEST (x));
4843 rtx y = pbi->reg_next_use[regno];
4845 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4846 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4847 mode would be better. */
4848 && ! dead_or_set_p (y, SET_DEST (x))
4849 && try_pre_increment (y, SET_DEST (x), amount))
4851 /* We have found a suitable auto-increment
4852 and already changed insn Y to do it.
4853 So flush this increment-instruction. */
4854 PUT_CODE (insn, NOTE);
4855 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4856 NOTE_SOURCE_FILE (insn) = 0;
4857 /* Count a reference to this reg for the increment
4858 insn we are deleting. When a reg is incremented.
4859 spilling it is worse, so we want to make that
4861 if (regno >= FIRST_PSEUDO_REGISTER)
4863 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4864 REG_N_SETS (regno)++;
4871 /* Try to change INSN so that it does pre-increment or pre-decrement
4872 addressing on register REG in order to add AMOUNT to REG.
4873 AMOUNT is negative for pre-decrement.
4874 Returns 1 if the change could be made.
4875 This checks all about the validity of the result of modifying INSN. */
4878 try_pre_increment (insn, reg, amount)
4880 HOST_WIDE_INT amount;
4884 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4885 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4887 /* Nonzero if we can try to make a post-increment or post-decrement.
4888 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4889 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4890 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4893 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4896 /* From the sign of increment, see which possibilities are conceivable
4897 on this target machine. */
4898 if (HAVE_PRE_INCREMENT && amount > 0)
4900 if (HAVE_POST_INCREMENT && amount > 0)
4903 if (HAVE_PRE_DECREMENT && amount < 0)
4905 if (HAVE_POST_DECREMENT && amount < 0)
4908 if (! (pre_ok || post_ok))
4911 /* It is not safe to add a side effect to a jump insn
4912 because if the incremented register is spilled and must be reloaded
4913 there would be no way to store the incremented value back in memory. */
4915 if (GET_CODE (insn) == JUMP_INSN)
4920 use = find_use_as_address (PATTERN (insn), reg, 0);
4921 if (post_ok && (use == 0 || use == (rtx) 1))
4923 use = find_use_as_address (PATTERN (insn), reg, -amount);
4927 if (use == 0 || use == (rtx) 1)
4930 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4933 /* See if this combination of instruction and addressing mode exists. */
4934 if (! validate_change (insn, &XEXP (use, 0),
4935 gen_rtx_fmt_e (amount > 0
4936 ? (do_post ? POST_INC : PRE_INC)
4937 : (do_post ? POST_DEC : PRE_DEC),
4941 /* Record that this insn now has an implicit side effect on X. */
4942 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4946 #endif /* AUTO_INC_DEC */
4948 /* Find the place in the rtx X where REG is used as a memory address.
4949 Return the MEM rtx that so uses it.
4950 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4951 (plus REG (const_int PLUSCONST)).
4953 If such an address does not appear, return 0.
4954 If REG appears more than once, or is used other than in such an address,
4958 find_use_as_address (x, reg, plusconst)
4961 HOST_WIDE_INT plusconst;
4963 enum rtx_code code = GET_CODE (x);
4964 const char *fmt = GET_RTX_FORMAT (code);
4966 register rtx value = 0;
4969 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4972 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4973 && XEXP (XEXP (x, 0), 0) == reg
4974 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4975 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4978 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4980 /* If REG occurs inside a MEM used in a bit-field reference,
4981 that is unacceptable. */
4982 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4983 return (rtx) (HOST_WIDE_INT) 1;
4987 return (rtx) (HOST_WIDE_INT) 1;
4989 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4993 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4997 return (rtx) (HOST_WIDE_INT) 1;
4999 else if (fmt[i] == 'E')
5002 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5004 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5008 return (rtx) (HOST_WIDE_INT) 1;
5016 /* Write information about registers and basic blocks into FILE.
5017 This is part of making a debugging dump. */
5020 dump_regset (r, outf)
5027 fputs (" (nil)", outf);
5031 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5033 fprintf (outf, " %d", i);
5034 if (i < FIRST_PSEUDO_REGISTER)
5035 fprintf (outf, " [%s]",
5044 dump_regset (r, stderr);
5045 putc ('\n', stderr);
5049 dump_flow_info (file)
5053 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5055 fprintf (file, "%d registers.\n", max_regno);
5056 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5059 enum reg_class class, altclass;
5060 fprintf (file, "\nRegister %d used %d times across %d insns",
5061 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5062 if (REG_BASIC_BLOCK (i) >= 0)
5063 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5065 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5066 (REG_N_SETS (i) == 1) ? "" : "s");
5067 if (REG_USERVAR_P (regno_reg_rtx[i]))
5068 fprintf (file, "; user var");
5069 if (REG_N_DEATHS (i) != 1)
5070 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5071 if (REG_N_CALLS_CROSSED (i) == 1)
5072 fprintf (file, "; crosses 1 call");
5073 else if (REG_N_CALLS_CROSSED (i))
5074 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5075 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5076 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5077 class = reg_preferred_class (i);
5078 altclass = reg_alternate_class (i);
5079 if (class != GENERAL_REGS || altclass != ALL_REGS)
5081 if (altclass == ALL_REGS || class == ALL_REGS)
5082 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5083 else if (altclass == NO_REGS)
5084 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5086 fprintf (file, "; pref %s, else %s",
5087 reg_class_names[(int) class],
5088 reg_class_names[(int) altclass]);
5090 if (REGNO_POINTER_FLAG (i))
5091 fprintf (file, "; pointer");
5092 fprintf (file, ".\n");
5095 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5096 for (i = 0; i < n_basic_blocks; i++)
5098 register basic_block bb = BASIC_BLOCK (i);
5101 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
5102 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
5104 fprintf (file, "Predecessors: ");
5105 for (e = bb->pred; e ; e = e->pred_next)
5106 dump_edge_info (file, e, 0);
5108 fprintf (file, "\nSuccessors: ");
5109 for (e = bb->succ; e ; e = e->succ_next)
5110 dump_edge_info (file, e, 1);
5112 fprintf (file, "\nRegisters live at start:");
5113 dump_regset (bb->global_live_at_start, file);
5115 fprintf (file, "\nRegisters live at end:");
5116 dump_regset (bb->global_live_at_end, file);
5127 dump_flow_info (stderr);
5131 dump_edge_info (file, e, do_succ)
5136 basic_block side = (do_succ ? e->dest : e->src);
5138 if (side == ENTRY_BLOCK_PTR)
5139 fputs (" ENTRY", file);
5140 else if (side == EXIT_BLOCK_PTR)
5141 fputs (" EXIT", file);
5143 fprintf (file, " %d", side->index);
5147 static const char * const bitnames[] = {
5148 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5151 int i, flags = e->flags;
5155 for (i = 0; flags; i++)
5156 if (flags & (1 << i))
5162 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5163 fputs (bitnames[i], file);
5165 fprintf (file, "%d", i);
5173 /* Print out one basic block with live information at start and end. */
5183 fprintf (outf, ";; Basic block %d, loop depth %d",
5184 bb->index, bb->loop_depth);
5185 if (bb->eh_beg != -1 || bb->eh_end != -1)
5186 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5189 fputs (";; Predecessors: ", outf);
5190 for (e = bb->pred; e ; e = e->pred_next)
5191 dump_edge_info (outf, e, 0);
5194 fputs (";; Registers live at start:", outf);
5195 dump_regset (bb->global_live_at_start, outf);
5198 for (insn = bb->head, last = NEXT_INSN (bb->end);
5200 insn = NEXT_INSN (insn))
5201 print_rtl_single (outf, insn);
5203 fputs (";; Registers live at end:", outf);
5204 dump_regset (bb->global_live_at_end, outf);
5207 fputs (";; Successors: ", outf);
5208 for (e = bb->succ; e; e = e->succ_next)
5209 dump_edge_info (outf, e, 1);
5217 dump_bb (bb, stderr);
5224 dump_bb (BASIC_BLOCK(n), stderr);
5227 /* Like print_rtl, but also print out live information for the start of each
5231 print_rtl_with_bb (outf, rtx_first)
5235 register rtx tmp_rtx;
5238 fprintf (outf, "(nil)\n");
5242 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5243 int max_uid = get_max_uid ();
5244 basic_block *start = (basic_block *)
5245 xcalloc (max_uid, sizeof (basic_block));
5246 basic_block *end = (basic_block *)
5247 xcalloc (max_uid, sizeof (basic_block));
5248 enum bb_state *in_bb_p = (enum bb_state *)
5249 xcalloc (max_uid, sizeof (enum bb_state));
5251 for (i = n_basic_blocks - 1; i >= 0; i--)
5253 basic_block bb = BASIC_BLOCK (i);
5256 start[INSN_UID (bb->head)] = bb;
5257 end[INSN_UID (bb->end)] = bb;
5258 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5260 enum bb_state state = IN_MULTIPLE_BB;
5261 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5263 in_bb_p[INSN_UID(x)] = state;
5270 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5275 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5277 fprintf (outf, ";; Start of basic block %d, registers live:",
5279 dump_regset (bb->global_live_at_start, outf);
5283 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5284 && GET_CODE (tmp_rtx) != NOTE
5285 && GET_CODE (tmp_rtx) != BARRIER)
5286 fprintf (outf, ";; Insn is not within a basic block\n");
5287 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5288 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5290 did_output = print_rtl_single (outf, tmp_rtx);
5292 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5294 fprintf (outf, ";; End of basic block %d, registers live:\n",
5296 dump_regset (bb->global_live_at_end, outf);
5309 if (current_function_epilogue_delay_list != 0)
5311 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5312 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5313 tmp_rtx = XEXP (tmp_rtx, 1))
5314 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5318 /* Compute dominator relationships using new flow graph structures. */
5320 compute_flow_dominators (dominators, post_dominators)
5321 sbitmap *dominators;
5322 sbitmap *post_dominators;
5325 sbitmap *temp_bitmap;
5327 basic_block *worklist, *workend, *qin, *qout;
5330 /* Allocate a worklist array/queue. Entries are only added to the
5331 list if they were not already on the list. So the size is
5332 bounded by the number of basic blocks. */
5333 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5334 workend = &worklist[n_basic_blocks];
5336 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5337 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5341 /* The optimistic setting of dominators requires us to put every
5342 block on the work list initially. */
5343 qin = qout = worklist;
5344 for (bb = 0; bb < n_basic_blocks; bb++)
5346 *qin++ = BASIC_BLOCK (bb);
5347 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5349 qlen = n_basic_blocks;
5352 /* We want a maximal solution, so initially assume everything dominates
5354 sbitmap_vector_ones (dominators, n_basic_blocks);
5356 /* Mark successors of the entry block so we can identify them below. */
5357 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5358 e->dest->aux = ENTRY_BLOCK_PTR;
5360 /* Iterate until the worklist is empty. */
5363 /* Take the first entry off the worklist. */
5364 basic_block b = *qout++;
5365 if (qout >= workend)
5371 /* Compute the intersection of the dominators of all the
5374 If one of the predecessor blocks is the ENTRY block, then the
5375 intersection of the dominators of the predecessor blocks is
5376 defined as the null set. We can identify such blocks by the
5377 special value in the AUX field in the block structure. */
5378 if (b->aux == ENTRY_BLOCK_PTR)
5380 /* Do not clear the aux field for blocks which are
5381 successors of the ENTRY block. That way we we never
5382 add them to the worklist again.
5384 The intersect of dominators of the preds of this block is
5385 defined as the null set. */
5386 sbitmap_zero (temp_bitmap[bb]);
5390 /* Clear the aux field of this block so it can be added to
5391 the worklist again if necessary. */
5393 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5396 /* Make sure each block always dominates itself. */
5397 SET_BIT (temp_bitmap[bb], bb);
5399 /* If the out state of this block changed, then we need to
5400 add the successors of this block to the worklist if they
5401 are not already on the worklist. */
5402 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5404 for (e = b->succ; e; e = e->succ_next)
5406 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5420 if (post_dominators)
5422 /* The optimistic setting of dominators requires us to put every
5423 block on the work list initially. */
5424 qin = qout = worklist;
5425 for (bb = 0; bb < n_basic_blocks; bb++)
5427 *qin++ = BASIC_BLOCK (bb);
5428 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5430 qlen = n_basic_blocks;
5433 /* We want a maximal solution, so initially assume everything post
5434 dominates everything else. */
5435 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5437 /* Mark predecessors of the exit block so we can identify them below. */
5438 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5439 e->src->aux = EXIT_BLOCK_PTR;
5441 /* Iterate until the worklist is empty. */
5444 /* Take the first entry off the worklist. */
5445 basic_block b = *qout++;
5446 if (qout >= workend)
5452 /* Compute the intersection of the post dominators of all the
5455 If one of the successor blocks is the EXIT block, then the
5456 intersection of the dominators of the successor blocks is
5457 defined as the null set. We can identify such blocks by the
5458 special value in the AUX field in the block structure. */
5459 if (b->aux == EXIT_BLOCK_PTR)
5461 /* Do not clear the aux field for blocks which are
5462 predecessors of the EXIT block. That way we we never
5463 add them to the worklist again.
5465 The intersect of dominators of the succs of this block is
5466 defined as the null set. */
5467 sbitmap_zero (temp_bitmap[bb]);
5471 /* Clear the aux field of this block so it can be added to
5472 the worklist again if necessary. */
5474 sbitmap_intersection_of_succs (temp_bitmap[bb],
5475 post_dominators, bb);
5478 /* Make sure each block always post dominates itself. */
5479 SET_BIT (temp_bitmap[bb], bb);
5481 /* If the out state of this block changed, then we need to
5482 add the successors of this block to the worklist if they
5483 are not already on the worklist. */
5484 if (sbitmap_a_and_b (post_dominators[bb],
5485 post_dominators[bb],
5488 for (e = b->pred; e; e = e->pred_next)
5490 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5508 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5511 compute_immediate_dominators (idom, dominators)
5513 sbitmap *dominators;
5518 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5520 /* Begin with tmp(n) = dom(n) - { n }. */
5521 for (b = n_basic_blocks; --b >= 0; )
5523 sbitmap_copy (tmp[b], dominators[b]);
5524 RESET_BIT (tmp[b], b);
5527 /* Subtract out all of our dominator's dominators. */
5528 for (b = n_basic_blocks; --b >= 0; )
5530 sbitmap tmp_b = tmp[b];
5533 for (s = n_basic_blocks; --s >= 0; )
5534 if (TEST_BIT (tmp_b, s))
5535 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5538 /* Find the one bit set in the bitmap and put it in the output array. */
5539 for (b = n_basic_blocks; --b >= 0; )
5542 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5545 sbitmap_vector_free (tmp);
5548 /* Count for a single SET rtx, X. */
5551 count_reg_sets_1 (x, loop_depth)
5556 register rtx reg = SET_DEST (x);
5558 /* Find the register that's set/clobbered. */
5559 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5560 || GET_CODE (reg) == SIGN_EXTRACT
5561 || GET_CODE (reg) == STRICT_LOW_PART)
5562 reg = XEXP (reg, 0);
5564 if (GET_CODE (reg) == PARALLEL
5565 && GET_MODE (reg) == BLKmode)
5568 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5569 count_reg_sets_1 (XVECEXP (reg, 0, i), loop_depth);
5573 if (GET_CODE (reg) == REG)
5575 regno = REGNO (reg);
5576 if (regno >= FIRST_PSEUDO_REGISTER)
5578 /* Count (weighted) references, stores, etc. This counts a
5579 register twice if it is modified, but that is correct. */
5580 REG_N_SETS (regno)++;
5581 REG_N_REFS (regno) += loop_depth + 1;
5586 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5587 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5590 count_reg_sets (x, loop_depth)
5594 register RTX_CODE code = GET_CODE (x);
5596 if (code == SET || code == CLOBBER)
5597 count_reg_sets_1 (x, loop_depth);
5598 else if (code == PARALLEL)
5601 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5603 code = GET_CODE (XVECEXP (x, 0, i));
5604 if (code == SET || code == CLOBBER)
5605 count_reg_sets_1 (XVECEXP (x, 0, i), loop_depth);
5610 /* Increment REG_N_REFS by the current loop depth each register reference
5614 count_reg_references (x, loop_depth)
5618 register RTX_CODE code;
5621 code = GET_CODE (x);
5641 /* If we are clobbering a MEM, mark any registers inside the address
5643 if (GET_CODE (XEXP (x, 0)) == MEM)
5644 count_reg_references (XEXP (XEXP (x, 0), 0), loop_depth);
5648 /* While we're here, optimize this case. */
5651 /* In case the SUBREG is not of a register, don't optimize */
5652 if (GET_CODE (x) != REG)
5654 count_reg_references (x, loop_depth);
5658 /* ... fall through ... */
5661 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5662 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5667 register rtx testreg = SET_DEST (x);
5670 /* If storing into MEM, don't show it as being used. But do
5671 show the address as being used. */
5672 if (GET_CODE (testreg) == MEM)
5674 count_reg_references (XEXP (testreg, 0), loop_depth);
5675 count_reg_references (SET_SRC (x), loop_depth);
5679 /* Storing in STRICT_LOW_PART is like storing in a reg
5680 in that this SET might be dead, so ignore it in TESTREG.
5681 but in some other ways it is like using the reg.
5683 Storing in a SUBREG or a bit field is like storing the entire
5684 register in that if the register's value is not used
5685 then this SET is not needed. */
5686 while (GET_CODE (testreg) == STRICT_LOW_PART
5687 || GET_CODE (testreg) == ZERO_EXTRACT
5688 || GET_CODE (testreg) == SIGN_EXTRACT
5689 || GET_CODE (testreg) == SUBREG)
5691 /* Modifying a single register in an alternate mode
5692 does not use any of the old value. But these other
5693 ways of storing in a register do use the old value. */
5694 if (GET_CODE (testreg) == SUBREG
5695 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5700 testreg = XEXP (testreg, 0);
5703 /* If this is a store into a register,
5704 recursively scan the value being stored. */
5706 if ((GET_CODE (testreg) == PARALLEL
5707 && GET_MODE (testreg) == BLKmode)
5708 || GET_CODE (testreg) == REG)
5710 count_reg_references (SET_SRC (x), loop_depth);
5712 count_reg_references (SET_DEST (x), loop_depth);
5722 /* Recursively scan the operands of this expression. */
5725 register const char *fmt = GET_RTX_FORMAT (code);
5728 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5732 /* Tail recursive case: save a function call level. */
5738 count_reg_references (XEXP (x, i), loop_depth);
5740 else if (fmt[i] == 'E')
5743 for (j = 0; j < XVECLEN (x, i); j++)
5744 count_reg_references (XVECEXP (x, i, j), loop_depth);
5750 /* Recompute register set/reference counts immediately prior to register
5753 This avoids problems with set/reference counts changing to/from values
5754 which have special meanings to the register allocators.
5756 Additionally, the reference counts are the primary component used by the
5757 register allocators to prioritize pseudos for allocation to hard regs.
5758 More accurate reference counts generally lead to better register allocation.
5760 F is the first insn to be scanned.
5762 LOOP_STEP denotes how much loop_depth should be incremented per
5763 loop nesting level in order to increase the ref count more for
5764 references in a loop.
5766 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5767 possibly other information which is used by the register allocators. */
5770 recompute_reg_usage (f, loop_step)
5771 rtx f ATTRIBUTE_UNUSED;
5772 int loop_step ATTRIBUTE_UNUSED;
5779 /* Clear out the old data. */
5780 max_reg = max_reg_num ();
5781 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5787 /* Scan each insn in the chain and count how many times each register is
5789 for (index = 0; index < n_basic_blocks; index++)
5791 basic_block bb = BASIC_BLOCK (index);
5792 loop_depth = bb->loop_depth;
5793 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5795 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5799 /* This call will increment REG_N_SETS for each SET or CLOBBER
5800 of a register in INSN. It will also increment REG_N_REFS
5801 by the loop depth for each set of a register in INSN. */
5802 count_reg_sets (PATTERN (insn), loop_depth);
5804 /* count_reg_sets does not detect autoincrement address modes, so
5805 detect them here by looking at the notes attached to INSN. */
5806 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5808 if (REG_NOTE_KIND (links) == REG_INC)
5809 /* Count (weighted) references, stores, etc. This
5810 counts a register twice if it is modified, but
5812 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5815 /* This call will increment REG_N_REFS by the current loop depth
5816 for each reference to a register in INSN. */
5817 count_reg_references (PATTERN (insn), loop_depth);
5819 /* count_reg_references will not include counts for arguments to
5820 function calls, so detect them here by examining the
5821 CALL_INSN_FUNCTION_USAGE data. */
5822 if (GET_CODE (insn) == CALL_INSN)
5826 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5828 note = XEXP (note, 1))
5829 if (GET_CODE (XEXP (note, 0)) == USE)
5830 count_reg_references (XEXP (XEXP (note, 0), 0),
5834 if (insn == bb->end)
5840 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5841 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5842 of the number of registers that died. */
5845 count_or_remove_death_notes (blocks, kill)
5851 for (i = n_basic_blocks - 1; i >= 0; --i)
5856 if (blocks && ! TEST_BIT (blocks, i))
5859 bb = BASIC_BLOCK (i);
5861 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5863 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5865 rtx *pprev = ®_NOTES (insn);
5870 switch (REG_NOTE_KIND (link))
5873 if (GET_CODE (XEXP (link, 0)) == REG)
5875 rtx reg = XEXP (link, 0);
5878 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5881 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5889 rtx next = XEXP (link, 1);
5890 free_EXPR_LIST_node (link);
5891 *pprev = link = next;
5897 pprev = &XEXP (link, 1);
5904 if (insn == bb->end)
5912 /* Record INSN's block as BB. */
5915 set_block_for_insn (insn, bb)
5919 size_t uid = INSN_UID (insn);
5920 if (uid >= basic_block_for_insn->num_elements)
5924 /* Add one-eighth the size so we don't keep calling xrealloc. */
5925 new_size = uid + (uid + 7) / 8;
5927 VARRAY_GROW (basic_block_for_insn, new_size);
5929 VARRAY_BB (basic_block_for_insn, uid) = bb;
5932 /* Record INSN's block number as BB. */
5933 /* ??? This has got to go. */
5936 set_block_num (insn, bb)
5940 set_block_for_insn (insn, BASIC_BLOCK (bb));
5943 /* Verify the CFG consistency. This function check some CFG invariants and
5944 aborts when something is wrong. Hope that this function will help to
5945 convert many optimization passes to preserve CFG consistent.
5947 Currently it does following checks:
5949 - test head/end pointers
5950 - overlapping of basic blocks
5951 - edge list corectness
5952 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5953 - tails of basic blocks (ensure that boundary is necesary)
5954 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5955 and NOTE_INSN_BASIC_BLOCK
5956 - check that all insns are in the basic blocks
5957 (except the switch handling code, barriers and notes)
5958 - check that all returns are followed by barriers
5960 In future it can be extended check a lot of other stuff as well
5961 (reachability of basic blocks, life information, etc. etc.). */
5966 const int max_uid = get_max_uid ();
5967 const rtx rtx_first = get_insns ();
5968 basic_block *bb_info;
5972 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5974 /* First pass check head/end pointers and set bb_info array used by
5976 for (i = n_basic_blocks - 1; i >= 0; i--)
5978 basic_block bb = BASIC_BLOCK (i);
5980 /* Check the head pointer and make sure that it is pointing into
5982 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5987 error ("Head insn %d for block %d not found in the insn stream.",
5988 INSN_UID (bb->head), bb->index);
5992 /* Check the end pointer and make sure that it is pointing into
5994 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5996 if (bb_info[INSN_UID (x)] != NULL)
5998 error ("Insn %d is in multiple basic blocks (%d and %d)",
5999 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6002 bb_info[INSN_UID (x)] = bb;
6009 error ("End insn %d for block %d not found in the insn stream.",
6010 INSN_UID (bb->end), bb->index);
6015 /* Now check the basic blocks (boundaries etc.) */
6016 for (i = n_basic_blocks - 1; i >= 0; i--)
6018 basic_block bb = BASIC_BLOCK (i);
6019 /* Check corectness of edge lists */
6027 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6029 fprintf (stderr, "Predecessor: ");
6030 dump_edge_info (stderr, e, 0);
6031 fprintf (stderr, "\nSuccessor: ");
6032 dump_edge_info (stderr, e, 1);
6036 if (e->dest != EXIT_BLOCK_PTR)
6038 edge e2 = e->dest->pred;
6039 while (e2 && e2 != e)
6043 error ("Basic block %i edge lists are corrupted", bb->index);
6055 error ("Basic block %d pred edge is corrupted", bb->index);
6056 fputs ("Predecessor: ", stderr);
6057 dump_edge_info (stderr, e, 0);
6058 fputs ("\nSuccessor: ", stderr);
6059 dump_edge_info (stderr, e, 1);
6060 fputc ('\n', stderr);
6063 if (e->src != ENTRY_BLOCK_PTR)
6065 edge e2 = e->src->succ;
6066 while (e2 && e2 != e)
6070 error ("Basic block %i edge lists are corrupted", bb->index);
6077 /* OK pointers are correct. Now check the header of basic
6078 block. It ought to contain optional CODE_LABEL followed
6079 by NOTE_BASIC_BLOCK. */
6081 if (GET_CODE (x) == CODE_LABEL)
6085 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6091 if (GET_CODE (x) != NOTE
6092 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6093 || NOTE_BASIC_BLOCK (x) != bb)
6095 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6102 /* Do checks for empty blocks here */
6109 if (GET_CODE (x) == NOTE
6110 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6112 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6113 INSN_UID (x), bb->index);
6120 if (GET_CODE (x) == JUMP_INSN
6121 || GET_CODE (x) == CODE_LABEL
6122 || GET_CODE (x) == BARRIER)
6124 error ("In basic block %d:", bb->index);
6125 fatal_insn ("Flow control insn inside a basic block", x);
6136 if (!bb_info[INSN_UID (x)])
6138 switch (GET_CODE (x))
6145 /* An addr_vec is placed outside any block block. */
6147 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6148 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6149 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6154 /* But in any case, non-deletable labels can appear anywhere. */
6158 fatal_insn ("Insn outside basic block", x);
6162 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6163 && GET_CODE (x) == JUMP_INSN
6164 && returnjump_p (x) && ! condjump_p (x)
6165 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6166 fatal_insn ("Return not followed by barrier", x);
6178 /* Functions to access an edge list with a vector representation.
6179 Enough data is kept such that given an index number, the
6180 pred and succ that edge reprsents can be determined, or
6181 given a pred and a succ, it's index number can be returned.
6182 This allows algorithms which comsume a lot of memory to
6183 represent the normally full matrix of edge (pred,succ) with a
6184 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6185 wasted space in the client code due to sparse flow graphs. */
6187 /* This functions initializes the edge list. Basically the entire
6188 flowgraph is processed, and all edges are assigned a number,
6189 and the data structure is filed in. */
6193 struct edge_list *elist;
6199 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6203 /* Determine the number of edges in the flow graph by counting successor
6204 edges on each basic block. */
6205 for (x = 0; x < n_basic_blocks; x++)
6207 basic_block bb = BASIC_BLOCK (x);
6209 for (e = bb->succ; e; e = e->succ_next)
6212 /* Don't forget successors of the entry block. */
6213 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6216 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6217 elist->num_blocks = block_count;
6218 elist->num_edges = num_edges;
6219 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6223 /* Follow successors of the entry block, and register these edges. */
6224 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6226 elist->index_to_edge[num_edges] = e;
6230 for (x = 0; x < n_basic_blocks; x++)
6232 basic_block bb = BASIC_BLOCK (x);
6234 /* Follow all successors of blocks, and register these edges. */
6235 for (e = bb->succ; e; e = e->succ_next)
6237 elist->index_to_edge[num_edges] = e;
6244 /* This function free's memory associated with an edge list. */
6246 free_edge_list (elist)
6247 struct edge_list *elist;
6251 free (elist->index_to_edge);
6256 /* This function provides debug output showing an edge list. */
6258 print_edge_list (f, elist)
6260 struct edge_list *elist;
6263 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6264 elist->num_blocks - 2, elist->num_edges);
6266 for (x = 0; x < elist->num_edges; x++)
6268 fprintf (f, " %-4d - edge(", x);
6269 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6270 fprintf (f,"entry,");
6272 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6274 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6275 fprintf (f,"exit)\n");
6277 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6281 /* This function provides an internal consistancy check of an edge list,
6282 verifying that all edges are present, and that there are no
6285 verify_edge_list (f, elist)
6287 struct edge_list *elist;
6289 int x, pred, succ, index;
6292 for (x = 0; x < n_basic_blocks; x++)
6294 basic_block bb = BASIC_BLOCK (x);
6296 for (e = bb->succ; e; e = e->succ_next)
6298 pred = e->src->index;
6299 succ = e->dest->index;
6300 index = EDGE_INDEX (elist, e->src, e->dest);
6301 if (index == EDGE_INDEX_NO_EDGE)
6303 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6306 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6307 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6308 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6309 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6310 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6311 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6314 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6316 pred = e->src->index;
6317 succ = e->dest->index;
6318 index = EDGE_INDEX (elist, e->src, e->dest);
6319 if (index == EDGE_INDEX_NO_EDGE)
6321 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6324 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6325 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6326 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6327 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6328 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6329 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6331 /* We've verified that all the edges are in the list, no lets make sure
6332 there are no spurious edges in the list. */
6334 for (pred = 0 ; pred < n_basic_blocks; pred++)
6335 for (succ = 0 ; succ < n_basic_blocks; succ++)
6337 basic_block p = BASIC_BLOCK (pred);
6338 basic_block s = BASIC_BLOCK (succ);
6342 for (e = p->succ; e; e = e->succ_next)
6348 for (e = s->pred; e; e = e->pred_next)
6354 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6355 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6356 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6358 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6359 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6360 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6361 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6362 BASIC_BLOCK (succ)));
6364 for (succ = 0 ; succ < n_basic_blocks; succ++)
6366 basic_block p = ENTRY_BLOCK_PTR;
6367 basic_block s = BASIC_BLOCK (succ);
6371 for (e = p->succ; e; e = e->succ_next)
6377 for (e = s->pred; e; e = e->pred_next)
6383 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6384 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6385 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6387 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6388 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6389 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6390 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6391 BASIC_BLOCK (succ)));
6393 for (pred = 0 ; pred < n_basic_blocks; pred++)
6395 basic_block p = BASIC_BLOCK (pred);
6396 basic_block s = EXIT_BLOCK_PTR;
6400 for (e = p->succ; e; e = e->succ_next)
6406 for (e = s->pred; e; e = e->pred_next)
6412 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6413 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6414 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6416 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6417 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6418 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6419 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6424 /* This routine will determine what, if any, edge there is between
6425 a specified predecessor and successor. */
6428 find_edge_index (edge_list, pred, succ)
6429 struct edge_list *edge_list;
6430 basic_block pred, succ;
6433 for (x = 0; x < NUM_EDGES (edge_list); x++)
6435 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6436 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6439 return (EDGE_INDEX_NO_EDGE);
6442 /* This function will remove an edge from the flow graph. */
6447 edge last_pred = NULL;
6448 edge last_succ = NULL;
6450 basic_block src, dest;
6453 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6459 last_succ->succ_next = e->succ_next;
6461 src->succ = e->succ_next;
6463 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6469 last_pred->pred_next = e->pred_next;
6471 dest->pred = e->pred_next;
6477 /* This routine will remove any fake successor edges for a basic block.
6478 When the edge is removed, it is also removed from whatever predecessor
6481 remove_fake_successors (bb)
6485 for (e = bb->succ; e ; )
6489 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6494 /* This routine will remove all fake edges from the flow graph. If
6495 we remove all fake successors, it will automatically remove all
6496 fake predecessors. */
6498 remove_fake_edges ()
6502 for (x = 0; x < n_basic_blocks; x++)
6503 remove_fake_successors (BASIC_BLOCK (x));
6505 /* We've handled all successors except the entry block's. */
6506 remove_fake_successors (ENTRY_BLOCK_PTR);
6509 /* This functions will add a fake edge between any block which has no
6510 successors, and the exit block. Some data flow equations require these
6513 add_noreturn_fake_exit_edges ()
6517 for (x = 0; x < n_basic_blocks; x++)
6518 if (BASIC_BLOCK (x)->succ == NULL)
6519 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6522 /* Dump the list of basic blocks in the bitmap NODES. */
6524 flow_nodes_print (str, nodes, file)
6526 const sbitmap nodes;
6531 fprintf (file, "%s { ", str);
6532 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6533 fputs ("}\n", file);
6537 /* Dump the list of exiting edges in the array EDGES. */
6539 flow_exits_print (str, edges, num_edges, file)
6547 fprintf (file, "%s { ", str);
6548 for (i = 0; i < num_edges; i++)
6549 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6550 fputs ("}\n", file);
6554 /* Dump loop related CFG information. */
6556 flow_loops_cfg_dump (loops, file)
6557 const struct loops *loops;
6562 if (! loops->num || ! file || ! loops->cfg.dom)
6565 for (i = 0; i < n_basic_blocks; i++)
6569 fprintf (file, ";; %d succs { ", i);
6570 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6571 fprintf (file, "%d ", succ->dest->index);
6572 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6576 /* Dump the DFS node order. */
6577 if (loops->cfg.dfs_order)
6579 fputs (";; DFS order: ", file);
6580 for (i = 0; i < n_basic_blocks; i++)
6581 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6587 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6589 flow_loop_nested_p (outer, loop)
6593 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6597 /* Dump the loop information specified by LOOPS to the stream FILE. */
6599 flow_loops_dump (loops, file, verbose)
6600 const struct loops *loops;
6607 num_loops = loops->num;
6608 if (! num_loops || ! file)
6611 fprintf (file, ";; %d loops found, %d levels\n",
6612 num_loops, loops->levels);
6614 for (i = 0; i < num_loops; i++)
6616 struct loop *loop = &loops->array[i];
6618 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6619 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6620 loop->header->index, loop->latch->index,
6621 loop->pre_header ? loop->pre_header->index : -1,
6622 loop->depth, loop->level,
6623 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6624 fprintf (file, ";; %d", loop->num_nodes);
6625 flow_nodes_print (" nodes", loop->nodes, file);
6626 fprintf (file, ";; %d", loop->num_exits);
6627 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6633 for (j = 0; j < i; j++)
6635 struct loop *oloop = &loops->array[j];
6637 if (loop->header == oloop->header)
6642 smaller = loop->num_nodes < oloop->num_nodes;
6644 /* If the union of LOOP and OLOOP is different than
6645 the larger of LOOP and OLOOP then LOOP and OLOOP
6646 must be disjoint. */
6647 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6648 smaller ? oloop : loop);
6649 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6650 loop->header->index, i, j,
6651 disjoint ? "disjoint" : "nested");
6658 /* Print diagnostics to compare our concept of a loop with
6659 what the loop notes say. */
6660 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6661 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6662 != NOTE_INSN_LOOP_BEG)
6663 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6664 INSN_UID (PREV_INSN (loop->first->head)));
6665 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6666 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6667 != NOTE_INSN_LOOP_END)
6668 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6669 INSN_UID (NEXT_INSN (loop->last->end)));
6674 flow_loops_cfg_dump (loops, file);
6678 /* Free all the memory allocated for LOOPS. */
6680 flow_loops_free (loops)
6681 struct loops *loops;
6690 /* Free the loop descriptors. */
6691 for (i = 0; i < loops->num; i++)
6693 struct loop *loop = &loops->array[i];
6696 sbitmap_free (loop->nodes);
6700 free (loops->array);
6701 loops->array = NULL;
6704 sbitmap_vector_free (loops->cfg.dom);
6705 if (loops->cfg.dfs_order)
6706 free (loops->cfg.dfs_order);
6708 sbitmap_free (loops->shared_headers);
6713 /* Find the exits from the loop using the bitmap of loop nodes NODES
6714 and store in EXITS array. Return the number of exits from the
6717 flow_loop_exits_find (nodes, exits)
6718 const sbitmap nodes;
6727 /* Check all nodes within the loop to see if there are any
6728 successors not in the loop. Note that a node may have multiple
6731 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6732 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6734 basic_block dest = e->dest;
6736 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6744 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6746 /* Store all exiting edges into an array. */
6748 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6749 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6751 basic_block dest = e->dest;
6753 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6754 (*exits)[num_exits++] = e;
6762 /* Find the nodes contained within the loop with header HEADER and
6763 latch LATCH and store in NODES. Return the number of nodes within
6766 flow_loop_nodes_find (header, latch, nodes)
6775 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6778 /* Start with only the loop header in the set of loop nodes. */
6779 sbitmap_zero (nodes);
6780 SET_BIT (nodes, header->index);
6782 header->loop_depth++;
6784 /* Push the loop latch on to the stack. */
6785 if (! TEST_BIT (nodes, latch->index))
6787 SET_BIT (nodes, latch->index);
6788 latch->loop_depth++;
6790 stack[sp++] = latch;
6799 for (e = node->pred; e; e = e->pred_next)
6801 basic_block ancestor = e->src;
6803 /* If each ancestor not marked as part of loop, add to set of
6804 loop nodes and push on to stack. */
6805 if (ancestor != ENTRY_BLOCK_PTR
6806 && ! TEST_BIT (nodes, ancestor->index))
6808 SET_BIT (nodes, ancestor->index);
6809 ancestor->loop_depth++;
6811 stack[sp++] = ancestor;
6820 /* Compute the depth first search order and store in the array
6821 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6822 number of nodes visited. */
6824 flow_depth_first_order_compute (dfs_order)
6833 /* Allocate stack for back-tracking up CFG. */
6834 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6837 /* Allocate bitmap to track nodes that have been visited. */
6838 visited = sbitmap_alloc (n_basic_blocks);
6840 /* None of the nodes in the CFG have been visited yet. */
6841 sbitmap_zero (visited);
6843 /* Start with the first successor edge from the entry block. */
6844 e = ENTRY_BLOCK_PTR->succ;
6847 basic_block src = e->src;
6848 basic_block dest = e->dest;
6850 /* Mark that we have visited this node. */
6851 if (src != ENTRY_BLOCK_PTR)
6852 SET_BIT (visited, src->index);
6854 /* If this node has not been visited before, push the current
6855 edge on to the stack and proceed with the first successor
6856 edge of this node. */
6857 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6865 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6868 /* DEST has no successors (for example, a non-returning
6869 function is called) so do not push the current edge
6870 but carry on with its next successor. */
6871 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6872 SET_BIT (visited, dest->index);
6875 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6877 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6879 /* Pop edge off stack. */
6887 sbitmap_free (visited);
6889 /* The number of nodes visited should not be greater than
6891 if (dfsnum > n_basic_blocks)
6894 /* There are some nodes left in the CFG that are unreachable. */
6895 if (dfsnum < n_basic_blocks)
6901 /* Return the block for the pre-header of the loop with header
6902 HEADER where DOM specifies the dominator information. Return NULL if
6903 there is no pre-header. */
6905 flow_loop_pre_header_find (header, dom)
6909 basic_block pre_header;
6912 /* If block p is a predecessor of the header and is the only block
6913 that the header does not dominate, then it is the pre-header. */
6915 for (e = header->pred; e; e = e->pred_next)
6917 basic_block node = e->src;
6919 if (node != ENTRY_BLOCK_PTR
6920 && ! TEST_BIT (dom[node->index], header->index))
6922 if (pre_header == NULL)
6926 /* There are multiple edges into the header from outside
6927 the loop so there is no pre-header block. */
6937 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6938 previously added. The insertion algorithm assumes that the loops
6939 are added in the order found by a depth first search of the CFG. */
6941 flow_loop_tree_node_add (prevloop, loop)
6942 struct loop *prevloop;
6946 if (flow_loop_nested_p (prevloop, loop))
6948 prevloop->inner = loop;
6949 loop->outer = prevloop;
6953 while (prevloop->outer)
6955 if (flow_loop_nested_p (prevloop->outer, loop))
6957 prevloop->next = loop;
6958 loop->outer = prevloop->outer;
6961 prevloop = prevloop->outer;
6964 prevloop->next = loop;
6969 /* Build the loop hierarchy tree for LOOPS. */
6971 flow_loops_tree_build (loops)
6972 struct loops *loops;
6977 num_loops = loops->num;
6981 /* Root the loop hierarchy tree with the first loop found.
6982 Since we used a depth first search this should be the
6984 loops->tree = &loops->array[0];
6985 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6987 /* Add the remaining loops to the tree. */
6988 for (i = 1; i < num_loops; i++)
6989 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6993 /* Helper function to compute loop nesting depth and enclosed loop level
6994 for the natural loop specified by LOOP at the loop depth DEPTH.
6995 Returns the loop level. */
6997 flow_loop_level_compute (loop, depth)
7007 /* Traverse loop tree assigning depth and computing level as the
7008 maximum level of all the inner loops of this loop. The loop
7009 level is equivalent to the height of the loop in the loop tree
7010 and corresponds to the number of enclosed loop levels (including
7012 for (inner = loop->inner; inner; inner = inner->next)
7016 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7021 loop->level = level;
7022 loop->depth = depth;
7027 /* Compute the loop nesting depth and enclosed loop level for the loop
7028 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7032 flow_loops_level_compute (loops)
7033 struct loops *loops;
7039 /* Traverse all the outer level loops. */
7040 for (loop = loops->tree; loop; loop = loop->next)
7042 level = flow_loop_level_compute (loop, 1);
7050 /* Find all the natural loops in the function and save in LOOPS structure
7051 and recalculate loop_depth information in basic block structures.
7052 Return the number of natural loops found. */
7055 flow_loops_find (loops)
7056 struct loops *loops;
7067 loops->array = NULL;
7071 /* Taking care of this degenerate case makes the rest of
7072 this code simpler. */
7073 if (n_basic_blocks == 0)
7076 /* Compute the dominators. */
7077 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7078 compute_flow_dominators (dom, NULL);
7080 /* Count the number of loop edges (back edges). This should be the
7081 same as the number of natural loops. Also clear the loop_depth
7082 and as we work from inner->outer in a loop nest we call
7083 find_loop_nodes_find which will increment loop_depth for nodes
7084 within the current loop, which happens to enclose inner loops. */
7087 for (b = 0; b < n_basic_blocks; b++)
7089 BASIC_BLOCK (b)->loop_depth = 0;
7090 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7092 basic_block latch = e->src;
7094 /* Look for back edges where a predecessor is dominated
7095 by this block. A natural loop has a single entry
7096 node (header) that dominates all the nodes in the
7097 loop. It also has single back edge to the header
7098 from a latch node. Note that multiple natural loops
7099 may share the same header. */
7100 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7107 /* Compute depth first search order of the CFG so that outer
7108 natural loops will be found before inner natural loops. */
7109 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7110 flow_depth_first_order_compute (dfs_order);
7112 /* Allocate loop structures. */
7114 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7116 headers = sbitmap_alloc (n_basic_blocks);
7117 sbitmap_zero (headers);
7119 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7120 sbitmap_zero (loops->shared_headers);
7122 /* Find and record information about all the natural loops
7125 for (b = 0; b < n_basic_blocks; b++)
7129 /* Search the nodes of the CFG in DFS order that we can find
7130 outer loops first. */
7131 header = BASIC_BLOCK (dfs_order[b]);
7133 /* Look for all the possible latch blocks for this header. */
7134 for (e = header->pred; e; e = e->pred_next)
7136 basic_block latch = e->src;
7138 /* Look for back edges where a predecessor is dominated
7139 by this block. A natural loop has a single entry
7140 node (header) that dominates all the nodes in the
7141 loop. It also has single back edge to the header
7142 from a latch node. Note that multiple natural loops
7143 may share the same header. */
7144 if (latch != ENTRY_BLOCK_PTR
7145 && TEST_BIT (dom[latch->index], header->index))
7149 loop = loops->array + num_loops;
7151 loop->header = header;
7152 loop->latch = latch;
7154 /* Keep track of blocks that are loop headers so
7155 that we can tell which loops should be merged. */
7156 if (TEST_BIT (headers, header->index))
7157 SET_BIT (loops->shared_headers, header->index);
7158 SET_BIT (headers, header->index);
7160 /* Find nodes contained within the loop. */
7161 loop->nodes = sbitmap_alloc (n_basic_blocks);
7163 = flow_loop_nodes_find (header, latch, loop->nodes);
7165 /* Compute first and last blocks within the loop.
7166 These are often the same as the loop header and
7167 loop latch respectively, but this is not always
7170 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7172 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7174 /* Find edges which exit the loop. Note that a node
7175 may have several exit edges. */
7177 = flow_loop_exits_find (loop->nodes, &loop->exits);
7179 /* Look to see if the loop has a pre-header node. */
7181 = flow_loop_pre_header_find (header, dom);
7188 /* Natural loops with shared headers may either be disjoint or
7189 nested. Disjoint loops with shared headers cannot be inner
7190 loops and should be merged. For now just mark loops that share
7192 for (i = 0; i < num_loops; i++)
7193 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7194 loops->array[i].shared = 1;
7196 sbitmap_free (headers);
7199 loops->num = num_loops;
7201 /* Save CFG derived information to avoid recomputing it. */
7202 loops->cfg.dom = dom;
7203 loops->cfg.dfs_order = dfs_order;
7205 /* Build the loop hierarchy tree. */
7206 flow_loops_tree_build (loops);
7208 /* Assign the loop nesting depth and enclosed loop level for each
7210 loops->levels = flow_loops_level_compute (loops);
7216 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7218 flow_loop_outside_edge_p (loop, e)
7219 const struct loop *loop;
7222 if (e->dest != loop->header)
7224 return (e->src == ENTRY_BLOCK_PTR)
7225 || ! TEST_BIT (loop->nodes, e->src->index);
7229 /* Clear LOG_LINKS fields of insns in a chain. */
7231 clear_log_links (insns)
7235 for (i = insns; i; i = NEXT_INSN (i))
7236 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')