1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
169 extern struct obstack *function_obstack;
171 /* Number of basic blocks in the current function. */
175 /* Number of edges in the current function. */
179 /* The basic block array. */
181 varray_type basic_block_info;
183 /* The special entry and exit blocks. */
185 struct basic_block_def entry_exit_blocks[2]
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
194 ENTRY_BLOCK, /* index */
196 -1, -1 /* eh_beg, eh_end */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
207 EXIT_BLOCK, /* index */
209 -1, -1 /* eh_beg, eh_end */
213 /* Nonzero if the second flow pass has completed. */
216 /* Maximum register number used in this function, plus one. */
220 /* Indexed by n, giving various register information */
222 varray_type reg_n_info;
224 /* Size of a regset for the current function,
225 in (1) bytes and (2) elements. */
230 /* Regset of regs live when calls to `setjmp'-like functions happen. */
231 /* ??? Does this exist only for the setjmp-clobbered warning message? */
233 regset regs_live_at_setjmp;
235 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
236 that have to go in the same hard reg.
237 The first two regs in the list are a pair, and the next two
238 are another pair, etc. */
241 /* Set of registers that may be eliminable. These are handled specially
242 in updating regs_ever_live. */
244 static HARD_REG_SET elim_reg_set;
246 /* The basic block structure for every insn, indexed by uid. */
248 varray_type basic_block_for_insn;
250 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
251 /* ??? Should probably be using LABEL_NUSES instead. It would take a
252 bit of surgery to be able to use or co-opt the routines in jump. */
254 static rtx label_value_list;
256 /* For use in communicating between propagate_block and its subroutines.
257 Holds all information needed to compute life and def-use information. */
259 struct propagate_block_info
261 /* The basic block we're considering. */
264 /* Bit N is set if register N is conditionally or unconditionally live. */
267 /* Bit N is set if register N is unconditionally dead this insn. */
270 /* Bit N is set if register N is live this insn. */
273 /* Element N is the next insn that uses (hard or pseudo) register N
274 within the current basic block; or zero, if there is no such insn. */
277 /* Contains a list of all the MEMs we are tracking for dead store
281 /* If non-null, record the set of registers set in the basic block. */
284 /* Non-zero if the value of CC0 is live. */
287 /* Flags controling the set of information propagate_block collects. */
291 /* Forward declarations */
292 static int count_basic_blocks PARAMS ((rtx));
293 static rtx find_basic_blocks_1 PARAMS ((rtx));
294 static void clear_edges PARAMS ((void));
295 static void make_edges PARAMS ((rtx));
296 static void make_label_edge PARAMS ((sbitmap *, basic_block,
298 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
299 basic_block, rtx, int));
300 static void mark_critical_edges PARAMS ((void));
301 static void move_stray_eh_region_notes PARAMS ((void));
302 static void record_active_eh_regions PARAMS ((rtx));
304 static void commit_one_edge_insertion PARAMS ((edge));
306 static void delete_unreachable_blocks PARAMS ((void));
307 static void delete_eh_regions PARAMS ((void));
308 static int can_delete_note_p PARAMS ((rtx));
309 static void expunge_block PARAMS ((basic_block));
310 static int can_delete_label_p PARAMS ((rtx));
311 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
313 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
315 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
316 static void try_merge_blocks PARAMS ((void));
317 static void tidy_fallthru_edges PARAMS ((void));
318 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
319 static void verify_wide_reg PARAMS ((int, rtx, rtx));
320 static void verify_local_live_at_start PARAMS ((regset, basic_block));
321 static int set_noop_p PARAMS ((rtx));
322 static int noop_move_p PARAMS ((rtx));
323 static void delete_noop_moves PARAMS ((rtx));
324 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
325 static void notice_stack_pointer_modification PARAMS ((rtx));
326 static void mark_reg PARAMS ((rtx, void *));
327 static void mark_regs_live_at_end PARAMS ((regset));
328 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
329 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
330 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
331 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
332 static int insn_dead_p PARAMS ((struct propagate_block_info *,
334 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
336 static void mark_set_regs PARAMS ((struct propagate_block_info *,
338 static void mark_set_1 PARAMS ((struct propagate_block_info *,
339 enum rtx_code, rtx, rtx,
342 static void find_auto_inc PARAMS ((struct propagate_block_info *,
344 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
346 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
348 static void mark_used_reg PARAMS ((struct propagate_block_info *,
350 static void mark_used_regs PARAMS ((struct propagate_block_info *,
352 void dump_flow_info PARAMS ((FILE *));
353 void debug_flow_info PARAMS ((void));
354 static void dump_edge_info PARAMS ((FILE *, edge, int));
356 static void count_reg_sets_1 PARAMS ((rtx, int));
357 static void count_reg_sets PARAMS ((rtx, int));
358 static void count_reg_references PARAMS ((rtx, int));
359 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
361 static void remove_fake_successors PARAMS ((basic_block));
362 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
363 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
364 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
365 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
366 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
367 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
368 static int flow_depth_first_order_compute PARAMS ((int *));
369 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
370 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
371 static void flow_loops_tree_build PARAMS ((struct loops *));
372 static int flow_loop_level_compute PARAMS ((struct loop *, int));
373 static int flow_loops_level_compute PARAMS ((struct loops *));
375 /* Find basic blocks of the current function.
376 F is the first insn of the function and NREGS the number of register
380 find_basic_blocks (f, nregs, file)
382 int nregs ATTRIBUTE_UNUSED;
383 FILE *file ATTRIBUTE_UNUSED;
387 /* Flush out existing data. */
388 if (basic_block_info != NULL)
394 /* Clear bb->aux on all extant basic blocks. We'll use this as a
395 tag for reuse during create_basic_block, just in case some pass
396 copies around basic block notes improperly. */
397 for (i = 0; i < n_basic_blocks; ++i)
398 BASIC_BLOCK (i)->aux = NULL;
400 VARRAY_FREE (basic_block_info);
403 n_basic_blocks = count_basic_blocks (f);
405 /* Size the basic block table. The actual structures will be allocated
406 by find_basic_blocks_1, since we want to keep the structure pointers
407 stable across calls to find_basic_blocks. */
408 /* ??? This whole issue would be much simpler if we called find_basic_blocks
409 exactly once, and thereafter we don't have a single long chain of
410 instructions at all until close to the end of compilation when we
411 actually lay them out. */
413 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
415 label_value_list = find_basic_blocks_1 (f);
417 /* Record the block to which an insn belongs. */
418 /* ??? This should be done another way, by which (perhaps) a label is
419 tagged directly with the basic block that it starts. It is used for
420 more than that currently, but IMO that is the only valid use. */
422 max_uid = get_max_uid ();
424 /* Leave space for insns life_analysis makes in some cases for auto-inc.
425 These cases are rare, so we don't need too much space. */
426 max_uid += max_uid / 10;
429 compute_bb_for_insn (max_uid);
431 /* Discover the edges of our cfg. */
432 record_active_eh_regions (f);
433 make_edges (label_value_list);
435 /* Do very simple cleanup now, for the benefit of code that runs between
436 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
437 tidy_fallthru_edges ();
439 mark_critical_edges ();
441 #ifdef ENABLE_CHECKING
446 /* Count the basic blocks of the function. */
449 count_basic_blocks (f)
453 register RTX_CODE prev_code;
454 register int count = 0;
456 int call_had_abnormal_edge = 0;
458 prev_code = JUMP_INSN;
459 for (insn = f; insn; insn = NEXT_INSN (insn))
461 register RTX_CODE code = GET_CODE (insn);
463 if (code == CODE_LABEL
464 || (GET_RTX_CLASS (code) == 'i'
465 && (prev_code == JUMP_INSN
466 || prev_code == BARRIER
467 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
470 /* Record whether this call created an edge. */
471 if (code == CALL_INSN)
473 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
474 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
476 call_had_abnormal_edge = 0;
478 /* If there is an EH region or rethrow, we have an edge. */
479 if ((eh_region && region > 0)
480 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
481 call_had_abnormal_edge = 1;
482 else if (nonlocal_goto_handler_labels && region >= 0)
483 /* If there is a nonlocal goto label and the specified
484 region number isn't -1, we have an edge. (0 means
485 no throw, but might have a nonlocal goto). */
486 call_had_abnormal_edge = 1;
491 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
493 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
497 /* The rest of the compiler works a bit smoother when we don't have to
498 check for the edge case of do-nothing functions with no basic blocks. */
501 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
508 /* Find all basic blocks of the function whose first insn is F.
510 Collect and return a list of labels whose addresses are taken. This
511 will be used in make_edges for use with computed gotos. */
514 find_basic_blocks_1 (f)
517 register rtx insn, next;
519 rtx bb_note = NULL_RTX;
520 rtx eh_list = NULL_RTX;
521 rtx label_value_list = NULL_RTX;
525 /* We process the instructions in a slightly different way than we did
526 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
527 closed out the previous block, so that it gets attached at the proper
528 place. Since this form should be equivalent to the previous,
529 count_basic_blocks continues to use the old form as a check. */
531 for (insn = f; insn; insn = next)
533 enum rtx_code code = GET_CODE (insn);
535 next = NEXT_INSN (insn);
541 int kind = NOTE_LINE_NUMBER (insn);
543 /* Keep a LIFO list of the currently active exception notes. */
544 if (kind == NOTE_INSN_EH_REGION_BEG)
545 eh_list = alloc_INSN_LIST (insn, eh_list);
546 else if (kind == NOTE_INSN_EH_REGION_END)
550 eh_list = XEXP (eh_list, 1);
551 free_INSN_LIST_node (t);
554 /* Look for basic block notes with which to keep the
555 basic_block_info pointers stable. Unthread the note now;
556 we'll put it back at the right place in create_basic_block.
557 Or not at all if we've already found a note in this block. */
558 else if (kind == NOTE_INSN_BASIC_BLOCK)
560 if (bb_note == NULL_RTX)
563 next = flow_delete_insn (insn);
569 /* A basic block starts at a label. If we've closed one off due
570 to a barrier or some such, no need to do it again. */
571 if (head != NULL_RTX)
573 /* While we now have edge lists with which other portions of
574 the compiler might determine a call ending a basic block
575 does not imply an abnormal edge, it will be a bit before
576 everything can be updated. So continue to emit a noop at
577 the end of such a block. */
578 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
580 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
581 end = emit_insn_after (nop, end);
584 create_basic_block (i++, head, end, bb_note);
592 /* A basic block ends at a jump. */
593 if (head == NULL_RTX)
597 /* ??? Make a special check for table jumps. The way this
598 happens is truly and amazingly gross. We are about to
599 create a basic block that contains just a code label and
600 an addr*vec jump insn. Worse, an addr_diff_vec creates
601 its own natural loop.
603 Prevent this bit of brain damage, pasting things together
604 correctly in make_edges.
606 The correct solution involves emitting the table directly
607 on the tablejump instruction as a note, or JUMP_LABEL. */
609 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
610 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
618 goto new_bb_inclusive;
621 /* A basic block ends at a barrier. It may be that an unconditional
622 jump already closed the basic block -- no need to do it again. */
623 if (head == NULL_RTX)
626 /* While we now have edge lists with which other portions of the
627 compiler might determine a call ending a basic block does not
628 imply an abnormal edge, it will be a bit before everything can
629 be updated. So continue to emit a noop at the end of such a
631 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
633 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
634 end = emit_insn_after (nop, end);
636 goto new_bb_exclusive;
640 /* Record whether this call created an edge. */
641 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
642 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
643 int call_has_abnormal_edge = 0;
645 /* If there is an EH region or rethrow, we have an edge. */
646 if ((eh_list && region > 0)
647 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
648 call_has_abnormal_edge = 1;
649 else if (nonlocal_goto_handler_labels && region >= 0)
650 /* If there is a nonlocal goto label and the specified
651 region number isn't -1, we have an edge. (0 means
652 no throw, but might have a nonlocal goto). */
653 call_has_abnormal_edge = 1;
655 /* A basic block ends at a call that can either throw or
656 do a non-local goto. */
657 if (call_has_abnormal_edge)
660 if (head == NULL_RTX)
665 create_basic_block (i++, head, end, bb_note);
666 head = end = NULL_RTX;
674 if (GET_RTX_CLASS (code) == 'i')
676 if (head == NULL_RTX)
683 if (GET_RTX_CLASS (code) == 'i')
687 /* Make a list of all labels referred to other than by jumps
688 (which just don't have the REG_LABEL notes).
690 Make a special exception for labels followed by an ADDR*VEC,
691 as this would be a part of the tablejump setup code.
693 Make a special exception for the eh_return_stub_label, which
694 we know isn't part of any otherwise visible control flow. */
696 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
697 if (REG_NOTE_KIND (note) == REG_LABEL)
699 rtx lab = XEXP (note, 0), next;
701 if (lab == eh_return_stub_label)
703 else if ((next = next_nonnote_insn (lab)) != NULL
704 && GET_CODE (next) == JUMP_INSN
705 && (GET_CODE (PATTERN (next)) == ADDR_VEC
706 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
710 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
715 if (head != NULL_RTX)
716 create_basic_block (i++, head, end, bb_note);
718 if (i != n_basic_blocks)
721 return label_value_list;
724 /* Tidy the CFG by deleting unreachable code and whatnot. */
730 delete_unreachable_blocks ();
731 move_stray_eh_region_notes ();
732 record_active_eh_regions (f);
734 mark_critical_edges ();
736 /* Kill the data we won't maintain. */
737 label_value_list = NULL_RTX;
740 /* Create a new basic block consisting of the instructions between
741 HEAD and END inclusive. Reuses the note and basic block struct
742 in BB_NOTE, if any. */
745 create_basic_block (index, head, end, bb_note)
747 rtx head, end, bb_note;
752 && ! RTX_INTEGRATED_P (bb_note)
753 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
756 /* If we found an existing note, thread it back onto the chain. */
758 if (GET_CODE (head) == CODE_LABEL)
759 add_insn_after (bb_note, head);
762 add_insn_before (bb_note, head);
768 /* Otherwise we must create a note and a basic block structure.
769 Since we allow basic block structs in rtl, give the struct
770 the same lifetime by allocating it off the function obstack
771 rather than using malloc. */
773 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
774 memset (bb, 0, sizeof (*bb));
776 if (GET_CODE (head) == CODE_LABEL)
777 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
780 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
783 NOTE_BASIC_BLOCK (bb_note) = bb;
786 /* Always include the bb note in the block. */
787 if (NEXT_INSN (end) == bb_note)
793 BASIC_BLOCK (index) = bb;
795 /* Tag the block so that we know it has been used when considering
796 other basic block notes. */
800 /* Records the basic block struct in BB_FOR_INSN, for every instruction
801 indexed by INSN_UID. MAX is the size of the array. */
804 compute_bb_for_insn (max)
809 if (basic_block_for_insn)
810 VARRAY_FREE (basic_block_for_insn);
811 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
813 for (i = 0; i < n_basic_blocks; ++i)
815 basic_block bb = BASIC_BLOCK (i);
822 int uid = INSN_UID (insn);
824 VARRAY_BB (basic_block_for_insn, uid) = bb;
827 insn = NEXT_INSN (insn);
832 /* Free the memory associated with the edge structures. */
840 for (i = 0; i < n_basic_blocks; ++i)
842 basic_block bb = BASIC_BLOCK (i);
844 for (e = bb->succ; e ; e = n)
854 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
860 ENTRY_BLOCK_PTR->succ = 0;
861 EXIT_BLOCK_PTR->pred = 0;
866 /* Identify the edges between basic blocks.
868 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
869 that are otherwise unreachable may be reachable with a non-local goto.
871 BB_EH_END is an array indexed by basic block number in which we record
872 the list of exception regions active at the end of the basic block. */
875 make_edges (label_value_list)
876 rtx label_value_list;
879 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
880 sbitmap *edge_cache = NULL;
882 /* Assume no computed jump; revise as we create edges. */
883 current_function_has_computed_jump = 0;
885 /* Heavy use of computed goto in machine-generated code can lead to
886 nearly fully-connected CFGs. In that case we spend a significant
887 amount of time searching the edge lists for duplicates. */
888 if (forced_labels || label_value_list)
890 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
891 sbitmap_vector_zero (edge_cache, n_basic_blocks);
894 /* By nature of the way these get numbered, block 0 is always the entry. */
895 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
897 for (i = 0; i < n_basic_blocks; ++i)
899 basic_block bb = BASIC_BLOCK (i);
902 int force_fallthru = 0;
904 /* Examine the last instruction of the block, and discover the
905 ways we can leave the block. */
908 code = GET_CODE (insn);
911 if (code == JUMP_INSN)
915 /* ??? Recognize a tablejump and do the right thing. */
916 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
917 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
918 && GET_CODE (tmp) == JUMP_INSN
919 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
920 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
925 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
926 vec = XVEC (PATTERN (tmp), 0);
928 vec = XVEC (PATTERN (tmp), 1);
930 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
931 make_label_edge (edge_cache, bb,
932 XEXP (RTVEC_ELT (vec, j), 0), 0);
934 /* Some targets (eg, ARM) emit a conditional jump that also
935 contains the out-of-range target. Scan for these and
936 add an edge if necessary. */
937 if ((tmp = single_set (insn)) != NULL
938 && SET_DEST (tmp) == pc_rtx
939 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
940 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
941 make_label_edge (edge_cache, bb,
942 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
944 #ifdef CASE_DROPS_THROUGH
945 /* Silly VAXen. The ADDR_VEC is going to be in the way of
946 us naturally detecting fallthru into the next block. */
951 /* If this is a computed jump, then mark it as reaching
952 everything on the label_value_list and forced_labels list. */
953 else if (computed_jump_p (insn))
955 current_function_has_computed_jump = 1;
957 for (x = label_value_list; x; x = XEXP (x, 1))
958 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
960 for (x = forced_labels; x; x = XEXP (x, 1))
961 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
964 /* Returns create an exit out. */
965 else if (returnjump_p (insn))
966 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
968 /* Otherwise, we have a plain conditional or unconditional jump. */
971 if (! JUMP_LABEL (insn))
973 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
977 /* If this is a sibling call insn, then this is in effect a
978 combined call and return, and so we need an edge to the
979 exit block. No need to worry about EH edges, since we
980 wouldn't have created the sibling call in the first place. */
982 if (code == CALL_INSN && SIBLING_CALL_P (insn))
983 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
986 /* If this is a CALL_INSN, then mark it as reaching the active EH
987 handler for this CALL_INSN. If we're handling asynchronous
988 exceptions then any insn can reach any of the active handlers.
990 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
992 if (code == CALL_INSN || asynchronous_exceptions)
994 /* Add any appropriate EH edges. We do this unconditionally
995 since there may be a REG_EH_REGION or REG_EH_RETHROW note
996 on the call, and this needn't be within an EH region. */
997 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
999 /* If we have asynchronous exceptions, do the same for *all*
1000 exception regions active in the block. */
1001 if (asynchronous_exceptions
1002 && bb->eh_beg != bb->eh_end)
1004 if (bb->eh_beg >= 0)
1005 make_eh_edge (edge_cache, eh_nest_info, bb,
1006 NULL_RTX, bb->eh_beg);
1008 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1009 if (GET_CODE (x) == NOTE
1010 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1011 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1013 int region = NOTE_EH_HANDLER (x);
1014 make_eh_edge (edge_cache, eh_nest_info, bb,
1019 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1021 /* ??? This could be made smarter: in some cases it's possible
1022 to tell that certain calls will not do a nonlocal goto.
1024 For example, if the nested functions that do the nonlocal
1025 gotos do not have their addresses taken, then only calls to
1026 those functions or to other nested functions that use them
1027 could possibly do nonlocal gotos. */
1028 /* We do know that a REG_EH_REGION note with a value less
1029 than 0 is guaranteed not to perform a non-local goto. */
1030 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1031 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1032 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1033 make_label_edge (edge_cache, bb, XEXP (x, 0),
1034 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1038 /* We know something about the structure of the function __throw in
1039 libgcc2.c. It is the only function that ever contains eh_stub
1040 labels. It modifies its return address so that the last block
1041 returns to one of the eh_stub labels within it. So we have to
1042 make additional edges in the flow graph. */
1043 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1044 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1046 /* Find out if we can drop through to the next block. */
1047 insn = next_nonnote_insn (insn);
1048 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1049 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1050 else if (i + 1 < n_basic_blocks)
1052 rtx tmp = BLOCK_HEAD (i + 1);
1053 if (GET_CODE (tmp) == NOTE)
1054 tmp = next_nonnote_insn (tmp);
1055 if (force_fallthru || insn == tmp)
1056 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1060 free_eh_nesting_info (eh_nest_info);
1062 sbitmap_vector_free (edge_cache);
1065 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1066 about the edge that is accumulated between calls. */
1069 make_edge (edge_cache, src, dst, flags)
1070 sbitmap *edge_cache;
1071 basic_block src, dst;
1077 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1078 many edges to them, and we didn't allocate memory for it. */
1079 use_edge_cache = (edge_cache
1080 && src != ENTRY_BLOCK_PTR
1081 && dst != EXIT_BLOCK_PTR);
1083 /* Make sure we don't add duplicate edges. */
1084 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1085 for (e = src->succ; e ; e = e->succ_next)
1092 e = (edge) xcalloc (1, sizeof (*e));
1095 e->succ_next = src->succ;
1096 e->pred_next = dst->pred;
1105 SET_BIT (edge_cache[src->index], dst->index);
1108 /* Create an edge from a basic block to a label. */
1111 make_label_edge (edge_cache, src, label, flags)
1112 sbitmap *edge_cache;
1117 if (GET_CODE (label) != CODE_LABEL)
1120 /* If the label was never emitted, this insn is junk, but avoid a
1121 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1122 as a result of a syntax error and a diagnostic has already been
1125 if (INSN_UID (label) == 0)
1128 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1131 /* Create the edges generated by INSN in REGION. */
1134 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1135 sbitmap *edge_cache;
1136 eh_nesting_info *eh_nest_info;
1141 handler_info **handler_list;
1144 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1145 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1148 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1149 EDGE_ABNORMAL | EDGE_EH | is_call);
1153 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1154 dangerous if we intend to move basic blocks around. Move such notes
1155 into the following block. */
1158 move_stray_eh_region_notes ()
1163 if (n_basic_blocks < 2)
1166 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1167 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1169 rtx insn, next, list = NULL_RTX;
1171 b1 = BASIC_BLOCK (i);
1172 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1174 next = NEXT_INSN (insn);
1175 if (GET_CODE (insn) == NOTE
1176 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1177 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1179 /* Unlink from the insn chain. */
1180 NEXT_INSN (PREV_INSN (insn)) = next;
1181 PREV_INSN (next) = PREV_INSN (insn);
1184 NEXT_INSN (insn) = list;
1189 if (list == NULL_RTX)
1192 /* Find where to insert these things. */
1194 if (GET_CODE (insn) == CODE_LABEL)
1195 insn = NEXT_INSN (insn);
1199 next = NEXT_INSN (list);
1200 add_insn_after (list, insn);
1206 /* Recompute eh_beg/eh_end for each basic block. */
1209 record_active_eh_regions (f)
1212 rtx insn, eh_list = NULL_RTX;
1214 basic_block bb = BASIC_BLOCK (0);
1216 for (insn = f; insn ; insn = NEXT_INSN (insn))
1218 if (bb->head == insn)
1219 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1221 if (GET_CODE (insn) == NOTE)
1223 int kind = NOTE_LINE_NUMBER (insn);
1224 if (kind == NOTE_INSN_EH_REGION_BEG)
1225 eh_list = alloc_INSN_LIST (insn, eh_list);
1226 else if (kind == NOTE_INSN_EH_REGION_END)
1228 rtx t = XEXP (eh_list, 1);
1229 free_INSN_LIST_node (eh_list);
1234 if (bb->end == insn)
1236 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1238 if (i == n_basic_blocks)
1240 bb = BASIC_BLOCK (i);
1245 /* Identify critical edges and set the bits appropriately. */
1248 mark_critical_edges ()
1250 int i, n = n_basic_blocks;
1253 /* We begin with the entry block. This is not terribly important now,
1254 but could be if a front end (Fortran) implemented alternate entry
1256 bb = ENTRY_BLOCK_PTR;
1263 /* (1) Critical edges must have a source with multiple successors. */
1264 if (bb->succ && bb->succ->succ_next)
1266 for (e = bb->succ; e ; e = e->succ_next)
1268 /* (2) Critical edges must have a destination with multiple
1269 predecessors. Note that we know there is at least one
1270 predecessor -- the edge we followed to get here. */
1271 if (e->dest->pred->pred_next)
1272 e->flags |= EDGE_CRITICAL;
1274 e->flags &= ~EDGE_CRITICAL;
1279 for (e = bb->succ; e ; e = e->succ_next)
1280 e->flags &= ~EDGE_CRITICAL;
1285 bb = BASIC_BLOCK (i);
1289 /* Split a (typically critical) edge. Return the new block.
1290 Abort on abnormal edges.
1292 ??? The code generally expects to be called on critical edges.
1293 The case of a block ending in an unconditional jump to a
1294 block with multiple predecessors is not handled optimally. */
1297 split_edge (edge_in)
1300 basic_block old_pred, bb, old_succ;
1305 /* Abnormal edges cannot be split. */
1306 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1309 old_pred = edge_in->src;
1310 old_succ = edge_in->dest;
1312 /* Remove the existing edge from the destination's pred list. */
1315 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1317 *pp = edge_in->pred_next;
1318 edge_in->pred_next = NULL;
1321 /* Create the new structures. */
1322 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1323 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1326 memset (bb, 0, sizeof (*bb));
1327 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1328 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1330 /* ??? This info is likely going to be out of date very soon. */
1331 if (old_succ->global_live_at_start)
1333 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1334 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1338 CLEAR_REG_SET (bb->global_live_at_start);
1339 CLEAR_REG_SET (bb->global_live_at_end);
1344 bb->succ = edge_out;
1347 edge_in->flags &= ~EDGE_CRITICAL;
1349 edge_out->pred_next = old_succ->pred;
1350 edge_out->succ_next = NULL;
1352 edge_out->dest = old_succ;
1353 edge_out->flags = EDGE_FALLTHRU;
1354 edge_out->probability = REG_BR_PROB_BASE;
1356 old_succ->pred = edge_out;
1358 /* Tricky case -- if there existed a fallthru into the successor
1359 (and we're not it) we must add a new unconditional jump around
1360 the new block we're actually interested in.
1362 Further, if that edge is critical, this means a second new basic
1363 block must be created to hold it. In order to simplify correct
1364 insn placement, do this before we touch the existing basic block
1365 ordering for the block we were really wanting. */
1366 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1369 for (e = edge_out->pred_next; e ; e = e->pred_next)
1370 if (e->flags & EDGE_FALLTHRU)
1375 basic_block jump_block;
1378 if ((e->flags & EDGE_CRITICAL) == 0
1379 && e->src != ENTRY_BLOCK_PTR)
1381 /* Non critical -- we can simply add a jump to the end
1382 of the existing predecessor. */
1383 jump_block = e->src;
1387 /* We need a new block to hold the jump. The simplest
1388 way to do the bulk of the work here is to recursively
1390 jump_block = split_edge (e);
1391 e = jump_block->succ;
1394 /* Now add the jump insn ... */
1395 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1397 jump_block->end = pos;
1398 if (basic_block_for_insn)
1399 set_block_for_insn (pos, jump_block);
1400 emit_barrier_after (pos);
1402 /* ... let jump know that label is in use, ... */
1403 JUMP_LABEL (pos) = old_succ->head;
1404 ++LABEL_NUSES (old_succ->head);
1406 /* ... and clear fallthru on the outgoing edge. */
1407 e->flags &= ~EDGE_FALLTHRU;
1409 /* Continue splitting the interesting edge. */
1413 /* Place the new block just in front of the successor. */
1414 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1415 if (old_succ == EXIT_BLOCK_PTR)
1416 j = n_basic_blocks - 1;
1418 j = old_succ->index;
1419 for (i = n_basic_blocks - 1; i > j; --i)
1421 basic_block tmp = BASIC_BLOCK (i - 1);
1422 BASIC_BLOCK (i) = tmp;
1425 BASIC_BLOCK (i) = bb;
1428 /* Create the basic block note.
1430 Where we place the note can have a noticable impact on the generated
1431 code. Consider this cfg:
1442 If we need to insert an insn on the edge from block 0 to block 1,
1443 we want to ensure the instructions we insert are outside of any
1444 loop notes that physically sit between block 0 and block 1. Otherwise
1445 we confuse the loop optimizer into thinking the loop is a phony. */
1446 if (old_succ != EXIT_BLOCK_PTR
1447 && PREV_INSN (old_succ->head)
1448 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1449 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1450 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1451 PREV_INSN (old_succ->head));
1452 else if (old_succ != EXIT_BLOCK_PTR)
1453 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1455 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1456 NOTE_BASIC_BLOCK (bb_note) = bb;
1457 bb->head = bb->end = bb_note;
1459 /* Not quite simple -- for non-fallthru edges, we must adjust the
1460 predecessor's jump instruction to target our new block. */
1461 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1463 rtx tmp, insn = old_pred->end;
1464 rtx old_label = old_succ->head;
1465 rtx new_label = gen_label_rtx ();
1467 if (GET_CODE (insn) != JUMP_INSN)
1470 /* ??? Recognize a tablejump and adjust all matching cases. */
1471 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1472 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1473 && GET_CODE (tmp) == JUMP_INSN
1474 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1475 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1480 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1481 vec = XVEC (PATTERN (tmp), 0);
1483 vec = XVEC (PATTERN (tmp), 1);
1485 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1486 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1488 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1489 --LABEL_NUSES (old_label);
1490 ++LABEL_NUSES (new_label);
1493 /* Handle casesi dispatch insns */
1494 if ((tmp = single_set (insn)) != NULL
1495 && SET_DEST (tmp) == pc_rtx
1496 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1497 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1498 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1500 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1502 --LABEL_NUSES (old_label);
1503 ++LABEL_NUSES (new_label);
1508 /* This would have indicated an abnormal edge. */
1509 if (computed_jump_p (insn))
1512 /* A return instruction can't be redirected. */
1513 if (returnjump_p (insn))
1516 /* If the insn doesn't go where we think, we're confused. */
1517 if (JUMP_LABEL (insn) != old_label)
1520 redirect_jump (insn, new_label);
1523 emit_label_before (new_label, bb_note);
1524 bb->head = new_label;
1530 /* Queue instructions for insertion on an edge between two basic blocks.
1531 The new instructions and basic blocks (if any) will not appear in the
1532 CFG until commit_edge_insertions is called. */
1535 insert_insn_on_edge (pattern, e)
1539 /* We cannot insert instructions on an abnormal critical edge.
1540 It will be easier to find the culprit if we die now. */
1541 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1542 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1545 if (e->insns == NULL_RTX)
1548 push_to_sequence (e->insns);
1550 emit_insn (pattern);
1552 e->insns = get_insns ();
1556 /* Update the CFG for the instructions queued on edge E. */
1559 commit_one_edge_insertion (e)
1562 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1565 /* Pull the insns off the edge now since the edge might go away. */
1567 e->insns = NULL_RTX;
1569 /* Figure out where to put these things. If the destination has
1570 one predecessor, insert there. Except for the exit block. */
1571 if (e->dest->pred->pred_next == NULL
1572 && e->dest != EXIT_BLOCK_PTR)
1576 /* Get the location correct wrt a code label, and "nice" wrt
1577 a basic block note, and before everything else. */
1579 if (GET_CODE (tmp) == CODE_LABEL)
1580 tmp = NEXT_INSN (tmp);
1581 if (GET_CODE (tmp) == NOTE
1582 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1583 tmp = NEXT_INSN (tmp);
1584 if (tmp == bb->head)
1587 after = PREV_INSN (tmp);
1590 /* If the source has one successor and the edge is not abnormal,
1591 insert there. Except for the entry block. */
1592 else if ((e->flags & EDGE_ABNORMAL) == 0
1593 && e->src->succ->succ_next == NULL
1594 && e->src != ENTRY_BLOCK_PTR)
1597 /* It is possible to have a non-simple jump here. Consider a target
1598 where some forms of unconditional jumps clobber a register. This
1599 happens on the fr30 for example.
1601 We know this block has a single successor, so we can just emit
1602 the queued insns before the jump. */
1603 if (GET_CODE (bb->end) == JUMP_INSN)
1609 /* We'd better be fallthru, or we've lost track of what's what. */
1610 if ((e->flags & EDGE_FALLTHRU) == 0)
1617 /* Otherwise we must split the edge. */
1620 bb = split_edge (e);
1624 /* Now that we've found the spot, do the insertion. */
1626 /* Set the new block number for these insns, if structure is allocated. */
1627 if (basic_block_for_insn)
1630 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1631 set_block_for_insn (i, bb);
1636 emit_insns_before (insns, before);
1637 if (before == bb->head)
1642 rtx last = emit_insns_after (insns, after);
1643 if (after == bb->end)
1647 if (GET_CODE (last) == JUMP_INSN)
1649 if (returnjump_p (last))
1651 /* ??? Remove all outgoing edges from BB and add one
1652 for EXIT. This is not currently a problem because
1653 this only happens for the (single) epilogue, which
1654 already has a fallthru edge to EXIT. */
1657 if (e->dest != EXIT_BLOCK_PTR
1658 || e->succ_next != NULL
1659 || (e->flags & EDGE_FALLTHRU) == 0)
1661 e->flags &= ~EDGE_FALLTHRU;
1663 emit_barrier_after (last);
1672 /* Update the CFG for all queued instructions. */
1675 commit_edge_insertions ()
1680 #ifdef ENABLE_CHECKING
1681 verify_flow_info ();
1685 bb = ENTRY_BLOCK_PTR;
1690 for (e = bb->succ; e ; e = next)
1692 next = e->succ_next;
1694 commit_one_edge_insertion (e);
1697 if (++i >= n_basic_blocks)
1699 bb = BASIC_BLOCK (i);
1703 /* Delete all unreachable basic blocks. */
1706 delete_unreachable_blocks ()
1708 basic_block *worklist, *tos;
1709 int deleted_handler;
1714 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1716 /* Use basic_block->aux as a marker. Clear them all. */
1718 for (i = 0; i < n; ++i)
1719 BASIC_BLOCK (i)->aux = NULL;
1721 /* Add our starting points to the worklist. Almost always there will
1722 be only one. It isn't inconcievable that we might one day directly
1723 support Fortran alternate entry points. */
1725 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1729 /* Mark the block with a handy non-null value. */
1733 /* Iterate: find everything reachable from what we've already seen. */
1735 while (tos != worklist)
1737 basic_block b = *--tos;
1739 for (e = b->succ; e ; e = e->succ_next)
1747 /* Delete all unreachable basic blocks. Count down so that we don't
1748 interfere with the block renumbering that happens in flow_delete_block. */
1750 deleted_handler = 0;
1752 for (i = n - 1; i >= 0; --i)
1754 basic_block b = BASIC_BLOCK (i);
1757 /* This block was found. Tidy up the mark. */
1760 deleted_handler |= flow_delete_block (b);
1763 tidy_fallthru_edges ();
1765 /* If we deleted an exception handler, we may have EH region begin/end
1766 blocks to remove as well. */
1767 if (deleted_handler)
1768 delete_eh_regions ();
1773 /* Find EH regions for which there is no longer a handler, and delete them. */
1776 delete_eh_regions ()
1780 update_rethrow_references ();
1782 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1783 if (GET_CODE (insn) == NOTE)
1785 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1786 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1788 int num = NOTE_EH_HANDLER (insn);
1789 /* A NULL handler indicates a region is no longer needed,
1790 as long as its rethrow label isn't used. */
1791 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1793 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1794 NOTE_SOURCE_FILE (insn) = 0;
1800 /* Return true if NOTE is not one of the ones that must be kept paired,
1801 so that we may simply delete them. */
1804 can_delete_note_p (note)
1807 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1808 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1811 /* Unlink a chain of insns between START and FINISH, leaving notes
1812 that must be paired. */
1815 flow_delete_insn_chain (start, finish)
1818 /* Unchain the insns one by one. It would be quicker to delete all
1819 of these with a single unchaining, rather than one at a time, but
1820 we need to keep the NOTE's. */
1826 next = NEXT_INSN (start);
1827 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1829 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1832 next = flow_delete_insn (start);
1834 if (start == finish)
1840 /* Delete the insns in a (non-live) block. We physically delete every
1841 non-deleted-note insn, and update the flow graph appropriately.
1843 Return nonzero if we deleted an exception handler. */
1845 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1846 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1849 flow_delete_block (b)
1852 int deleted_handler = 0;
1855 /* If the head of this block is a CODE_LABEL, then it might be the
1856 label for an exception handler which can't be reached.
1858 We need to remove the label from the exception_handler_label list
1859 and remove the associated NOTE_INSN_EH_REGION_BEG and
1860 NOTE_INSN_EH_REGION_END notes. */
1864 never_reached_warning (insn);
1866 if (GET_CODE (insn) == CODE_LABEL)
1868 rtx x, *prev = &exception_handler_labels;
1870 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1872 if (XEXP (x, 0) == insn)
1874 /* Found a match, splice this label out of the EH label list. */
1875 *prev = XEXP (x, 1);
1876 XEXP (x, 1) = NULL_RTX;
1877 XEXP (x, 0) = NULL_RTX;
1879 /* Remove the handler from all regions */
1880 remove_handler (insn);
1881 deleted_handler = 1;
1884 prev = &XEXP (x, 1);
1887 /* This label may be referenced by code solely for its value, or
1888 referenced by static data, or something. We have determined
1889 that it is not reachable, but cannot delete the label itself.
1890 Save code space and continue to delete the balance of the block,
1891 along with properly updating the cfg. */
1892 if (!can_delete_label_p (insn))
1894 /* If we've only got one of these, skip the whole deleting
1897 goto no_delete_insns;
1898 insn = NEXT_INSN (insn);
1902 /* Include any jump table following the basic block. */
1904 if (GET_CODE (end) == JUMP_INSN
1905 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1906 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1907 && GET_CODE (tmp) == JUMP_INSN
1908 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1909 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1912 /* Include any barrier that may follow the basic block. */
1913 tmp = next_nonnote_insn (end);
1914 if (tmp && GET_CODE (tmp) == BARRIER)
1917 /* Selectively delete the entire chain. */
1918 flow_delete_insn_chain (insn, end);
1922 /* Remove the edges into and out of this block. Note that there may
1923 indeed be edges in, if we are removing an unreachable loop. */
1927 for (e = b->pred; e ; e = next)
1929 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1932 next = e->pred_next;
1936 for (e = b->succ; e ; e = next)
1938 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1941 next = e->succ_next;
1950 /* Remove the basic block from the array, and compact behind it. */
1953 return deleted_handler;
1956 /* Remove block B from the basic block array and compact behind it. */
1962 int i, n = n_basic_blocks;
1964 for (i = b->index; i + 1 < n; ++i)
1966 basic_block x = BASIC_BLOCK (i + 1);
1967 BASIC_BLOCK (i) = x;
1971 basic_block_info->num_elements--;
1975 /* Delete INSN by patching it out. Return the next insn. */
1978 flow_delete_insn (insn)
1981 rtx prev = PREV_INSN (insn);
1982 rtx next = NEXT_INSN (insn);
1985 PREV_INSN (insn) = NULL_RTX;
1986 NEXT_INSN (insn) = NULL_RTX;
1989 NEXT_INSN (prev) = next;
1991 PREV_INSN (next) = prev;
1993 set_last_insn (prev);
1995 if (GET_CODE (insn) == CODE_LABEL)
1996 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1998 /* If deleting a jump, decrement the use count of the label. Deleting
1999 the label itself should happen in the normal course of block merging. */
2000 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2001 LABEL_NUSES (JUMP_LABEL (insn))--;
2003 /* Also if deleting an insn that references a label. */
2004 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2005 LABEL_NUSES (XEXP (note, 0))--;
2010 /* True if a given label can be deleted. */
2013 can_delete_label_p (label)
2018 if (LABEL_PRESERVE_P (label))
2021 for (x = forced_labels; x ; x = XEXP (x, 1))
2022 if (label == XEXP (x, 0))
2024 for (x = label_value_list; x ; x = XEXP (x, 1))
2025 if (label == XEXP (x, 0))
2027 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2028 if (label == XEXP (x, 0))
2031 /* User declared labels must be preserved. */
2032 if (LABEL_NAME (label) != 0)
2038 /* Blocks A and B are to be merged into a single block A. The insns
2039 are already contiguous, hence `nomove'. */
2042 merge_blocks_nomove (a, b)
2046 rtx b_head, b_end, a_end;
2047 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2050 /* If there was a CODE_LABEL beginning B, delete it. */
2053 if (GET_CODE (b_head) == CODE_LABEL)
2055 /* Detect basic blocks with nothing but a label. This can happen
2056 in particular at the end of a function. */
2057 if (b_head == b_end)
2059 del_first = del_last = b_head;
2060 b_head = NEXT_INSN (b_head);
2063 /* Delete the basic block note. */
2064 if (GET_CODE (b_head) == NOTE
2065 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2067 if (b_head == b_end)
2072 b_head = NEXT_INSN (b_head);
2075 /* If there was a jump out of A, delete it. */
2077 if (GET_CODE (a_end) == JUMP_INSN)
2081 prev = prev_nonnote_insn (a_end);
2088 /* If this was a conditional jump, we need to also delete
2089 the insn that set cc0. */
2090 if (prev && sets_cc0_p (prev))
2093 prev = prev_nonnote_insn (prev);
2103 /* Delete everything marked above as well as crap that might be
2104 hanging out between the two blocks. */
2105 flow_delete_insn_chain (del_first, del_last);
2107 /* Normally there should only be one successor of A and that is B, but
2108 partway though the merge of blocks for conditional_execution we'll
2109 be merging a TEST block with THEN and ELSE successors. Free the
2110 whole lot of them and hope the caller knows what they're doing. */
2112 remove_edge (a->succ);
2114 /* Adjust the edges out of B for the new owner. */
2115 for (e = b->succ; e ; e = e->succ_next)
2119 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2120 b->pred = b->succ = NULL;
2122 /* Reassociate the insns of B with A. */
2125 if (basic_block_for_insn)
2127 BLOCK_FOR_INSN (b_head) = a;
2128 while (b_head != b_end)
2130 b_head = NEXT_INSN (b_head);
2131 BLOCK_FOR_INSN (b_head) = a;
2141 /* Blocks A and B are to be merged into a single block. A has no incoming
2142 fallthru edge, so it can be moved before B without adding or modifying
2143 any jumps (aside from the jump from A to B). */
2146 merge_blocks_move_predecessor_nojumps (a, b)
2149 rtx start, end, barrier;
2155 /* We want to delete the BARRIER after the end of the insns we are
2156 going to move. If we don't find a BARRIER, then do nothing. This
2157 can happen in some cases if we have labels we can not delete.
2159 Similarly, do nothing if we can not delete the label at the start
2160 of the target block. */
2161 barrier = next_nonnote_insn (end);
2162 if (GET_CODE (barrier) != BARRIER
2163 || (GET_CODE (b->head) == CODE_LABEL
2164 && ! can_delete_label_p (b->head)))
2167 flow_delete_insn (barrier);
2169 /* Move block and loop notes out of the chain so that we do not
2170 disturb their order.
2172 ??? A better solution would be to squeeze out all the non-nested notes
2173 and adjust the block trees appropriately. Even better would be to have
2174 a tighter connection between block trees and rtl so that this is not
2176 start = squeeze_notes (start, end);
2178 /* Scramble the insn chain. */
2179 if (end != PREV_INSN (b->head))
2180 reorder_insns (start, end, PREV_INSN (b->head));
2184 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2185 a->index, b->index);
2188 /* Swap the records for the two blocks around. Although we are deleting B,
2189 A is now where B was and we want to compact the BB array from where
2191 BASIC_BLOCK(a->index) = b;
2192 BASIC_BLOCK(b->index) = a;
2194 a->index = b->index;
2197 /* Now blocks A and B are contiguous. Merge them. */
2198 merge_blocks_nomove (a, b);
2203 /* Blocks A and B are to be merged into a single block. B has no outgoing
2204 fallthru edge, so it can be moved after A without adding or modifying
2205 any jumps (aside from the jump from A to B). */
2208 merge_blocks_move_successor_nojumps (a, b)
2211 rtx start, end, barrier;
2216 /* We want to delete the BARRIER after the end of the insns we are
2217 going to move. If we don't find a BARRIER, then do nothing. This
2218 can happen in some cases if we have labels we can not delete.
2220 Similarly, do nothing if we can not delete the label at the start
2221 of the target block. */
2222 barrier = next_nonnote_insn (end);
2223 if (GET_CODE (barrier) != BARRIER
2224 || (GET_CODE (b->head) == CODE_LABEL
2225 && ! can_delete_label_p (b->head)))
2228 flow_delete_insn (barrier);
2230 /* Move block and loop notes out of the chain so that we do not
2231 disturb their order.
2233 ??? A better solution would be to squeeze out all the non-nested notes
2234 and adjust the block trees appropriately. Even better would be to have
2235 a tighter connection between block trees and rtl so that this is not
2237 start = squeeze_notes (start, end);
2239 /* Scramble the insn chain. */
2240 reorder_insns (start, end, a->end);
2242 /* Now blocks A and B are contiguous. Merge them. */
2243 merge_blocks_nomove (a, b);
2247 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2248 b->index, a->index);
2254 /* Attempt to merge basic blocks that are potentially non-adjacent.
2255 Return true iff the attempt succeeded. */
2258 merge_blocks (e, b, c)
2262 /* If B has a fallthru edge to C, no need to move anything. */
2263 if (e->flags & EDGE_FALLTHRU)
2265 /* If a label still appears somewhere and we cannot delete the label,
2266 then we cannot merge the blocks. The edge was tidied already. */
2268 rtx insn, stop = NEXT_INSN (c->head);
2269 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2270 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2273 merge_blocks_nomove (b, c);
2277 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2278 b->index, c->index);
2287 int c_has_outgoing_fallthru;
2288 int b_has_incoming_fallthru;
2290 /* We must make sure to not munge nesting of exception regions,
2291 lexical blocks, and loop notes.
2293 The first is taken care of by requiring that the active eh
2294 region at the end of one block always matches the active eh
2295 region at the beginning of the next block.
2297 The later two are taken care of by squeezing out all the notes. */
2299 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2300 executed and we may want to treat blocks which have two out
2301 edges, one normal, one abnormal as only having one edge for
2302 block merging purposes. */
2304 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2305 if (tmp_edge->flags & EDGE_FALLTHRU)
2307 c_has_outgoing_fallthru = (tmp_edge != NULL);
2309 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2310 if (tmp_edge->flags & EDGE_FALLTHRU)
2312 b_has_incoming_fallthru = (tmp_edge != NULL);
2314 /* If B does not have an incoming fallthru, and the exception regions
2315 match, then it can be moved immediately before C without introducing
2318 C can not be the first block, so we do not have to worry about
2319 accessing a non-existent block. */
2320 d = BASIC_BLOCK (c->index - 1);
2321 if (! b_has_incoming_fallthru
2322 && d->eh_end == b->eh_beg
2323 && b->eh_end == c->eh_beg)
2324 return merge_blocks_move_predecessor_nojumps (b, c);
2326 /* Otherwise, we're going to try to move C after B. Make sure the
2327 exception regions match.
2329 If B is the last basic block, then we must not try to access the
2330 block structure for block B + 1. Luckily in that case we do not
2331 need to worry about matching exception regions. */
2332 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2333 if (b->eh_end == c->eh_beg
2334 && (d == NULL || c->eh_end == d->eh_beg))
2336 /* If C does not have an outgoing fallthru, then it can be moved
2337 immediately after B without introducing or modifying jumps. */
2338 if (! c_has_outgoing_fallthru)
2339 return merge_blocks_move_successor_nojumps (b, c);
2341 /* Otherwise, we'll need to insert an extra jump, and possibly
2342 a new block to contain it. */
2343 /* ??? Not implemented yet. */
2350 /* Top level driver for merge_blocks. */
2357 /* Attempt to merge blocks as made possible by edge removal. If a block
2358 has only one successor, and the successor has only one predecessor,
2359 they may be combined. */
2361 for (i = 0; i < n_basic_blocks; )
2363 basic_block c, b = BASIC_BLOCK (i);
2366 /* A loop because chains of blocks might be combineable. */
2367 while ((s = b->succ) != NULL
2368 && s->succ_next == NULL
2369 && (s->flags & EDGE_EH) == 0
2370 && (c = s->dest) != EXIT_BLOCK_PTR
2371 && c->pred->pred_next == NULL
2372 /* If the jump insn has side effects, we can't kill the edge. */
2373 && (GET_CODE (b->end) != JUMP_INSN
2374 || onlyjump_p (b->end))
2375 && merge_blocks (s, b, c))
2378 /* Don't get confused by the index shift caused by deleting blocks. */
2383 /* The given edge should potentially be a fallthru edge. If that is in
2384 fact true, delete the jump and barriers that are in the way. */
2387 tidy_fallthru_edge (e, b, c)
2393 /* ??? In a late-running flow pass, other folks may have deleted basic
2394 blocks by nopping out blocks, leaving multiple BARRIERs between here
2395 and the target label. They ought to be chastized and fixed.
2397 We can also wind up with a sequence of undeletable labels between
2398 one block and the next.
2400 So search through a sequence of barriers, labels, and notes for
2401 the head of block C and assert that we really do fall through. */
2403 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2406 /* Remove what will soon cease being the jump insn from the source block.
2407 If block B consisted only of this single jump, turn it into a deleted
2410 if (GET_CODE (q) == JUMP_INSN)
2413 /* If this was a conditional jump, we need to also delete
2414 the insn that set cc0. */
2415 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2422 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2423 NOTE_SOURCE_FILE (q) = 0;
2426 b->end = q = PREV_INSN (q);
2429 /* Selectively unlink the sequence. */
2430 if (q != PREV_INSN (c->head))
2431 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2433 e->flags |= EDGE_FALLTHRU;
2436 /* Fix up edges that now fall through, or rather should now fall through
2437 but previously required a jump around now deleted blocks. Simplify
2438 the search by only examining blocks numerically adjacent, since this
2439 is how find_basic_blocks created them. */
2442 tidy_fallthru_edges ()
2446 for (i = 1; i < n_basic_blocks; ++i)
2448 basic_block b = BASIC_BLOCK (i - 1);
2449 basic_block c = BASIC_BLOCK (i);
2452 /* We care about simple conditional or unconditional jumps with
2455 If we had a conditional branch to the next instruction when
2456 find_basic_blocks was called, then there will only be one
2457 out edge for the block which ended with the conditional
2458 branch (since we do not create duplicate edges).
2460 Furthermore, the edge will be marked as a fallthru because we
2461 merge the flags for the duplicate edges. So we do not want to
2462 check that the edge is not a FALLTHRU edge. */
2463 if ((s = b->succ) != NULL
2464 && s->succ_next == NULL
2466 /* If the jump insn has side effects, we can't tidy the edge. */
2467 && (GET_CODE (b->end) != JUMP_INSN
2468 || onlyjump_p (b->end)))
2469 tidy_fallthru_edge (s, b, c);
2473 /* Discover and record the loop depth at the head of each basic block. */
2476 calculate_loop_depth (dump)
2481 /* The loop infrastructure does the real job for us. */
2482 flow_loops_find (&loops);
2485 flow_loops_dump (&loops, dump, 0);
2487 flow_loops_free (&loops);
2490 /* Perform data flow analysis.
2491 F is the first insn of the function; FLAGS is a set of PROP_* flags
2492 to be used in accumulating flow info. */
2495 life_analysis (f, file, flags)
2500 #ifdef ELIMINABLE_REGS
2502 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2506 /* Record which registers will be eliminated. We use this in
2509 CLEAR_HARD_REG_SET (elim_reg_set);
2511 #ifdef ELIMINABLE_REGS
2512 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2513 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2515 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2519 flags &= PROP_DEATH_NOTES | PROP_REG_INFO;
2521 /* The post-reload life analysis have (on a global basis) the same
2522 registers live as was computed by reload itself. elimination
2523 Otherwise offsets and such may be incorrect.
2525 Reload will make some registers as live even though they do not
2526 appear in the rtl. */
2527 if (reload_completed)
2528 flags &= ~PROP_REG_INFO;
2530 /* We want alias analysis information for local dead store elimination. */
2531 if (flags & PROP_SCAN_DEAD_CODE)
2532 init_alias_analysis ();
2534 max_regno = max_reg_num ();
2536 /* Always remove no-op moves. Do this before other processing so
2537 that we don't have to keep re-scanning them. */
2538 delete_noop_moves (f);
2540 /* Some targets can emit simpler epilogues if they know that sp was
2541 not ever modified during the function. After reload, of course,
2542 we've already emitted the epilogue so there's no sense searching. */
2543 if (! reload_completed)
2544 notice_stack_pointer_modification (f);
2546 /* Allocate and zero out data structures that will record the
2547 data from lifetime analysis. */
2548 allocate_reg_life_data ();
2549 allocate_bb_life_data ();
2550 all_blocks = sbitmap_alloc (n_basic_blocks);
2551 sbitmap_ones (all_blocks);
2553 /* Find the set of registers live on function exit. */
2554 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2556 /* "Update" life info from zero. It'd be nice to begin the
2557 relaxation with just the exit and noreturn blocks, but that set
2558 is not immediately handy. */
2560 if (flags & PROP_REG_INFO)
2561 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2562 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2565 sbitmap_free (all_blocks);
2567 if (flags & PROP_SCAN_DEAD_CODE)
2568 end_alias_analysis ();
2571 dump_flow_info (file);
2573 free_basic_block_vars (1);
2576 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2577 Search for REGNO. If found, abort if it is not wider than word_mode. */
2580 verify_wide_reg_1 (px, pregno)
2585 unsigned int regno = *(int *) pregno;
2587 if (GET_CODE (x) == REG && REGNO (x) == regno)
2589 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2596 /* A subroutine of verify_local_live_at_start. Search through insns
2597 between HEAD and END looking for register REGNO. */
2600 verify_wide_reg (regno, head, end)
2606 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2607 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2611 head = NEXT_INSN (head);
2614 /* We didn't find the register at all. Something's way screwy. */
2618 /* A subroutine of update_life_info. Verify that there are no untoward
2619 changes in live_at_start during a local update. */
2622 verify_local_live_at_start (new_live_at_start, bb)
2623 regset new_live_at_start;
2626 if (reload_completed)
2628 /* After reload, there are no pseudos, nor subregs of multi-word
2629 registers. The regsets should exactly match. */
2630 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2637 /* Find the set of changed registers. */
2638 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2640 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2642 /* No registers should die. */
2643 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2645 /* Verify that the now-live register is wider than word_mode. */
2646 verify_wide_reg (i, bb->head, bb->end);
2651 /* Updates life information starting with the basic blocks set in BLOCKS.
2653 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2654 expecting local modifications to basic blocks. If we find extra
2655 registers live at the beginning of a block, then we either killed
2656 useful data, or we have a broken split that wants data not provided.
2657 If we find registers removed from live_at_start, that means we have
2658 a broken peephole that is killing a register it shouldn't.
2660 ??? This is not true in one situation -- when a pre-reload splitter
2661 generates subregs of a multi-word pseudo, current life analysis will
2662 lose the kill. So we _can_ have a pseudo go live. How irritating.
2664 Including PROP_REG_INFO does not properly refresh regs_ever_live
2665 unless the caller resets it to zero. */
2668 update_life_info (blocks, extent, prop_flags)
2670 enum update_life_extent extent;
2674 regset_head tmp_head;
2677 tmp = INITIALIZE_REG_SET (tmp_head);
2679 /* For a global update, we go through the relaxation process again. */
2680 if (extent != UPDATE_LIFE_LOCAL)
2682 calculate_global_regs_live (blocks, blocks,
2683 prop_flags & PROP_SCAN_DEAD_CODE);
2685 /* If asked, remove notes from the blocks we'll update. */
2686 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2687 count_or_remove_death_notes (blocks, 1);
2690 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2692 basic_block bb = BASIC_BLOCK (i);
2694 COPY_REG_SET (tmp, bb->global_live_at_end);
2695 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2697 if (extent == UPDATE_LIFE_LOCAL)
2698 verify_local_live_at_start (tmp, bb);
2703 if (prop_flags & PROP_REG_INFO)
2705 /* The only pseudos that are live at the beginning of the function
2706 are those that were not set anywhere in the function. local-alloc
2707 doesn't know how to handle these correctly, so mark them as not
2708 local to any one basic block. */
2709 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2710 FIRST_PSEUDO_REGISTER, i,
2711 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2713 /* We have a problem with any pseudoreg that lives across the setjmp.
2714 ANSI says that if a user variable does not change in value between
2715 the setjmp and the longjmp, then the longjmp preserves it. This
2716 includes longjmp from a place where the pseudo appears dead.
2717 (In principle, the value still exists if it is in scope.)
2718 If the pseudo goes in a hard reg, some other value may occupy
2719 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2720 Conclusion: such a pseudo must not go in a hard reg. */
2721 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2722 FIRST_PSEUDO_REGISTER, i,
2724 if (regno_reg_rtx[i] != 0)
2726 REG_LIVE_LENGTH (i) = -1;
2727 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2733 /* Free the variables allocated by find_basic_blocks.
2735 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2738 free_basic_block_vars (keep_head_end_p)
2739 int keep_head_end_p;
2741 if (basic_block_for_insn)
2743 VARRAY_FREE (basic_block_for_insn);
2744 basic_block_for_insn = NULL;
2747 if (! keep_head_end_p)
2750 VARRAY_FREE (basic_block_info);
2753 ENTRY_BLOCK_PTR->aux = NULL;
2754 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2755 EXIT_BLOCK_PTR->aux = NULL;
2756 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2760 /* Return nonzero if the destination of SET equals the source. */
2765 rtx src = SET_SRC (set);
2766 rtx dst = SET_DEST (set);
2768 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2770 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2772 src = SUBREG_REG (src);
2773 dst = SUBREG_REG (dst);
2776 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2777 && REGNO (src) == REGNO (dst));
2780 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2786 rtx pat = PATTERN (insn);
2788 /* Insns carrying these notes are useful later on. */
2789 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2792 if (GET_CODE (pat) == SET && set_noop_p (pat))
2795 if (GET_CODE (pat) == PARALLEL)
2798 /* If nothing but SETs of registers to themselves,
2799 this insn can also be deleted. */
2800 for (i = 0; i < XVECLEN (pat, 0); i++)
2802 rtx tem = XVECEXP (pat, 0, i);
2804 if (GET_CODE (tem) == USE
2805 || GET_CODE (tem) == CLOBBER)
2808 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2817 /* Delete any insns that copy a register to itself. */
2820 delete_noop_moves (f)
2824 for (insn = f; insn; insn = NEXT_INSN (insn))
2826 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2828 PUT_CODE (insn, NOTE);
2829 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2830 NOTE_SOURCE_FILE (insn) = 0;
2835 /* Determine if the stack pointer is constant over the life of the function.
2836 Only useful before prologues have been emitted. */
2839 notice_stack_pointer_modification_1 (x, pat, data)
2841 rtx pat ATTRIBUTE_UNUSED;
2842 void *data ATTRIBUTE_UNUSED;
2844 if (x == stack_pointer_rtx
2845 /* The stack pointer is only modified indirectly as the result
2846 of a push until later in flow. See the comments in rtl.texi
2847 regarding Embedded Side-Effects on Addresses. */
2848 || (GET_CODE (x) == MEM
2849 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2850 || GET_CODE (XEXP (x, 0)) == PRE_INC
2851 || GET_CODE (XEXP (x, 0)) == POST_DEC
2852 || GET_CODE (XEXP (x, 0)) == POST_INC)
2853 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2854 current_function_sp_is_unchanging = 0;
2858 notice_stack_pointer_modification (f)
2863 /* Assume that the stack pointer is unchanging if alloca hasn't
2865 current_function_sp_is_unchanging = !current_function_calls_alloca;
2866 if (! current_function_sp_is_unchanging)
2869 for (insn = f; insn; insn = NEXT_INSN (insn))
2871 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2873 /* Check if insn modifies the stack pointer. */
2874 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2876 if (! current_function_sp_is_unchanging)
2882 /* Mark a register in SET. Hard registers in large modes get all
2883 of their component registers set as well. */
2885 mark_reg (reg, xset)
2889 regset set = (regset) xset;
2890 int regno = REGNO (reg);
2892 if (GET_MODE (reg) == BLKmode)
2895 SET_REGNO_REG_SET (set, regno);
2896 if (regno < FIRST_PSEUDO_REGISTER)
2898 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2900 SET_REGNO_REG_SET (set, regno + n);
2904 /* Mark those regs which are needed at the end of the function as live
2905 at the end of the last basic block. */
2907 mark_regs_live_at_end (set)
2912 /* If exiting needs the right stack value, consider the stack pointer
2913 live at the end of the function. */
2914 if ((HAVE_epilogue && reload_completed)
2915 || ! EXIT_IGNORE_STACK
2916 || (! FRAME_POINTER_REQUIRED
2917 && ! current_function_calls_alloca
2918 && flag_omit_frame_pointer)
2919 || current_function_sp_is_unchanging)
2921 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2924 /* Mark the frame pointer if needed at the end of the function. If
2925 we end up eliminating it, it will be removed from the live list
2926 of each basic block by reload. */
2928 if (! reload_completed || frame_pointer_needed)
2930 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2931 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2932 /* If they are different, also mark the hard frame pointer as live */
2933 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2937 #ifdef PIC_OFFSET_TABLE_REGNUM
2938 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2939 /* Many architectures have a GP register even without flag_pic.
2940 Assume the pic register is not in use, or will be handled by
2941 other means, if it is not fixed. */
2942 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2943 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2947 /* Mark all global registers, and all registers used by the epilogue
2948 as being live at the end of the function since they may be
2949 referenced by our caller. */
2950 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2952 #ifdef EPILOGUE_USES
2953 || EPILOGUE_USES (i)
2956 SET_REGNO_REG_SET (set, i);
2958 /* Mark all call-saved registers that we actaully used. */
2959 if (HAVE_epilogue && reload_completed)
2961 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2962 if (! call_used_regs[i] && regs_ever_live[i])
2963 SET_REGNO_REG_SET (set, i);
2966 /* Mark function return value. */
2967 diddle_return_value (mark_reg, set);
2970 /* Callback function for for_each_successor_phi. DATA is a regset.
2971 Sets the SRC_REGNO, the regno of the phi alternative for phi node
2972 INSN, in the regset. */
2975 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
2976 rtx insn ATTRIBUTE_UNUSED;
2977 int dest_regno ATTRIBUTE_UNUSED;
2981 regset live = (regset) data;
2982 SET_REGNO_REG_SET (live, src_regno);
2986 /* Propagate global life info around the graph of basic blocks. Begin
2987 considering blocks with their corresponding bit set in BLOCKS_IN.
2988 BLOCKS_OUT is set for every block that was changed. */
2991 calculate_global_regs_live (blocks_in, blocks_out, flags)
2992 sbitmap blocks_in, blocks_out;
2995 basic_block *queue, *qhead, *qtail, *qend;
2996 regset tmp, new_live_at_end;
2997 regset_head tmp_head;
2998 regset_head new_live_at_end_head;
3001 tmp = INITIALIZE_REG_SET (tmp_head);
3002 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3004 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3005 because the `head == tail' style test for an empty queue doesn't
3006 work with a full queue. */
3007 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3009 qhead = qend = queue + n_basic_blocks + 2;
3011 /* Clear out the garbage that might be hanging out in bb->aux. */
3012 for (i = n_basic_blocks - 1; i >= 0; --i)
3013 BASIC_BLOCK (i)->aux = NULL;
3015 /* Queue the blocks set in the initial mask. Do this in reverse block
3016 number order so that we are more likely for the first round to do
3017 useful work. We use AUX non-null to flag that the block is queued. */
3018 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3020 basic_block bb = BASIC_BLOCK (i);
3025 sbitmap_zero (blocks_out);
3027 while (qhead != qtail)
3029 int rescan, changed;
3038 /* Begin by propogating live_at_start from the successor blocks. */
3039 CLEAR_REG_SET (new_live_at_end);
3040 for (e = bb->succ; e ; e = e->succ_next)
3042 basic_block sb = e->dest;
3043 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3046 /* Regs used in phi nodes are not included in
3047 global_live_at_start, since they are live only along a
3048 particular edge. Set those regs that are live because of a
3049 phi node alternative corresponding to this particular block. */
3050 for_each_successor_phi (bb, &set_phi_alternative_reg,
3053 if (bb == ENTRY_BLOCK_PTR)
3055 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3059 /* On our first pass through this block, we'll go ahead and continue.
3060 Recognize first pass by local_set NULL. On subsequent passes, we
3061 get to skip out early if live_at_end wouldn't have changed. */
3063 if (bb->local_set == NULL)
3065 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3070 /* If any bits were removed from live_at_end, we'll have to
3071 rescan the block. This wouldn't be necessary if we had
3072 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3073 local_live is really dependant on live_at_end. */
3074 CLEAR_REG_SET (tmp);
3075 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3076 new_live_at_end, BITMAP_AND_COMPL);
3080 /* Find the set of changed bits. Take this opportunity
3081 to notice that this set is empty and early out. */
3082 CLEAR_REG_SET (tmp);
3083 changed = bitmap_operation (tmp, bb->global_live_at_end,
3084 new_live_at_end, BITMAP_XOR);
3088 /* If any of the changed bits overlap with local_set,
3089 we'll have to rescan the block. Detect overlap by
3090 the AND with ~local_set turning off bits. */
3091 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3096 /* Let our caller know that BB changed enough to require its
3097 death notes updated. */
3098 SET_BIT (blocks_out, bb->index);
3102 /* Add to live_at_start the set of all registers in
3103 new_live_at_end that aren't in the old live_at_end. */
3105 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3107 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3109 changed = bitmap_operation (bb->global_live_at_start,
3110 bb->global_live_at_start,
3117 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3119 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3120 into live_at_start. */
3121 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3123 /* If live_at start didn't change, no need to go farther. */
3124 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3127 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3130 /* Queue all predecessors of BB so that we may re-examine
3131 their live_at_end. */
3132 for (e = bb->pred; e ; e = e->pred_next)
3134 basic_block pb = e->src;
3135 if (pb->aux == NULL)
3146 FREE_REG_SET (new_live_at_end);
3148 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3150 basic_block bb = BASIC_BLOCK (i);
3151 FREE_REG_SET (bb->local_set);
3157 /* Subroutines of life analysis. */
3159 /* Allocate the permanent data structures that represent the results
3160 of life analysis. Not static since used also for stupid life analysis. */
3163 allocate_bb_life_data ()
3167 for (i = 0; i < n_basic_blocks; i++)
3169 basic_block bb = BASIC_BLOCK (i);
3171 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3172 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3175 ENTRY_BLOCK_PTR->global_live_at_end
3176 = OBSTACK_ALLOC_REG_SET (function_obstack);
3177 EXIT_BLOCK_PTR->global_live_at_start
3178 = OBSTACK_ALLOC_REG_SET (function_obstack);
3180 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3184 allocate_reg_life_data ()
3188 /* Recalculate the register space, in case it has grown. Old style
3189 vector oriented regsets would set regset_{size,bytes} here also. */
3190 allocate_reg_info (max_regno, FALSE, FALSE);
3192 /* Reset all the data we'll collect in propagate_block and its
3194 for (i = 0; i < max_regno; i++)
3198 REG_N_DEATHS (i) = 0;
3199 REG_N_CALLS_CROSSED (i) = 0;
3200 REG_LIVE_LENGTH (i) = 0;
3201 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3205 /* Delete dead instructions for propagate_block. */
3208 propagate_block_delete_insn (bb, insn)
3212 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3214 /* If the insn referred to a label, and that label was attached to
3215 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3216 pretty much mandatory to delete it, because the ADDR_VEC may be
3217 referencing labels that no longer exist. */
3221 rtx label = XEXP (inote, 0);
3224 if (LABEL_NUSES (label) == 1
3225 && (next = next_nonnote_insn (label)) != NULL
3226 && GET_CODE (next) == JUMP_INSN
3227 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3228 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3230 rtx pat = PATTERN (next);
3231 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3232 int len = XVECLEN (pat, diff_vec_p);
3235 for (i = 0; i < len; i++)
3236 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3238 flow_delete_insn (next);
3242 if (bb->end == insn)
3243 bb->end = PREV_INSN (insn);
3244 flow_delete_insn (insn);
3247 /* Delete dead libcalls for propagate_block. Return the insn
3248 before the libcall. */
3251 propagate_block_delete_libcall (bb, insn, note)
3255 rtx first = XEXP (note, 0);
3256 rtx before = PREV_INSN (first);
3258 if (insn == bb->end)
3261 flow_delete_insn_chain (first, insn);
3265 /* Update the life-status of regs for one insn. Return the previous insn. */
3268 propagate_one_insn (pbi, insn)
3269 struct propagate_block_info *pbi;
3272 rtx prev = PREV_INSN (insn);
3273 int flags = pbi->flags;
3274 int insn_is_dead = 0;
3275 int libcall_is_dead = 0;
3279 if (! INSN_P (insn))
3282 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3283 if (flags & PROP_SCAN_DEAD_CODE)
3285 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
3287 libcall_is_dead = (insn_is_dead && note != 0
3288 && libcall_dead_p (pbi, PATTERN (insn),
3292 /* We almost certainly don't want to delete prologue or epilogue
3293 instructions. Warn about probable compiler losage. */
3296 && (((HAVE_epilogue || HAVE_prologue)
3297 && prologue_epilogue_contains (insn))
3298 || (HAVE_sibcall_epilogue
3299 && sibcall_epilogue_contains (insn))))
3301 if (flags & PROP_KILL_DEAD_CODE)
3303 warning ("ICE: would have deleted prologue/epilogue insn");
3304 if (!inhibit_warnings)
3307 libcall_is_dead = insn_is_dead = 0;
3310 /* If an instruction consists of just dead store(s) on final pass,
3312 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3314 if (libcall_is_dead)
3316 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3317 insn = NEXT_INSN (prev);
3320 propagate_block_delete_insn (pbi->bb, insn);
3322 /* CC0 is now known to be dead. Either this insn used it,
3323 in which case it doesn't anymore, or clobbered it,
3324 so the next insn can't use it. */
3330 /* See if this is an increment or decrement that can be merged into
3331 a following memory address. */
3334 register rtx x = single_set (insn);
3336 /* Does this instruction increment or decrement a register? */
3337 if (!reload_completed
3338 && (flags & PROP_AUTOINC)
3340 && GET_CODE (SET_DEST (x)) == REG
3341 && (GET_CODE (SET_SRC (x)) == PLUS
3342 || GET_CODE (SET_SRC (x)) == MINUS)
3343 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3344 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3345 /* Ok, look for a following memory ref we can combine with.
3346 If one is found, change the memory ref to a PRE_INC
3347 or PRE_DEC, cancel this insn, and return 1.
3348 Return 0 if nothing has been done. */
3349 && try_pre_increment_1 (pbi, insn))
3352 #endif /* AUTO_INC_DEC */
3354 CLEAR_REG_SET (pbi->new_live);
3355 CLEAR_REG_SET (pbi->new_dead);
3357 /* If this is not the final pass, and this insn is copying the value of
3358 a library call and it's dead, don't scan the insns that perform the
3359 library call, so that the call's arguments are not marked live. */
3360 if (libcall_is_dead)
3362 /* Record the death of the dest reg. */
3363 mark_set_regs (pbi, PATTERN (insn), insn);
3365 insn = XEXP (note, 0);
3366 return PREV_INSN (insn);
3368 else if (GET_CODE (PATTERN (insn)) == SET
3369 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3370 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3371 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3372 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3373 /* We have an insn to pop a constant amount off the stack.
3374 (Such insns use PLUS regardless of the direction of the stack,
3375 and any insn to adjust the stack by a constant is always a pop.)
3376 These insns, if not dead stores, have no effect on life. */
3380 /* Any regs live at the time of a call instruction must not go
3381 in a register clobbered by calls. Find all regs now live and
3382 record this for them. */
3384 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3385 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3386 { REG_N_CALLS_CROSSED (i)++; });
3388 /* Record sets. Do this even for dead instructions, since they
3389 would have killed the values if they hadn't been deleted. */
3390 mark_set_regs (pbi, PATTERN (insn), insn);
3392 if (GET_CODE (insn) == CALL_INSN)
3398 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3399 cond = COND_EXEC_TEST (PATTERN (insn));
3401 /* Non-constant calls clobber memory. */
3402 if (! CONST_CALL_P (insn))
3403 free_EXPR_LIST_list (&pbi->mem_set_list);
3405 /* There may be extra registers to be clobbered. */
3406 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3408 note = XEXP (note, 1))
3409 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3410 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3411 cond, insn, pbi->flags);
3413 /* Calls change all call-used and global registers. */
3414 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3415 if (call_used_regs[i] && ! global_regs[i]
3418 /* We do not want REG_UNUSED notes for these registers. */
3419 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3420 cond, insn, pbi->flags & ~PROP_DEATH_NOTES);
3424 /* If an insn doesn't use CC0, it becomes dead since we assume
3425 that every insn clobbers it. So show it dead here;
3426 mark_used_regs will set it live if it is referenced. */
3431 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3433 /* Sometimes we may have inserted something before INSN (such as a move)
3434 when we make an auto-inc. So ensure we will scan those insns. */
3436 prev = PREV_INSN (insn);
3439 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3445 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3446 cond = COND_EXEC_TEST (PATTERN (insn));
3448 /* Calls use their arguments. */
3449 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3451 note = XEXP (note, 1))
3452 if (GET_CODE (XEXP (note, 0)) == USE)
3453 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3456 /* The stack ptr is used (honorarily) by a CALL insn. */
3457 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3459 /* Calls may also reference any of the global registers,
3460 so they are made live. */
3461 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3463 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3468 /* Update reg_live for the registers killed and used. */
3469 AND_COMPL_REG_SET (pbi->reg_live, pbi->new_dead);
3470 IOR_REG_SET (pbi->reg_live, pbi->new_live);
3472 /* On final pass, update counts of how many insns in which each reg
3474 if (flags & PROP_REG_INFO)
3475 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3476 { REG_LIVE_LENGTH (i)++; });
3481 /* Initialize a propagate_block_info struct for public consumption.
3482 Note that the structure itself is opaque to this file, but that
3483 the user can use the regsets provided here. */
3485 struct propagate_block_info *
3486 init_propagate_block_info (bb, live, local_set, flags)
3492 struct propagate_block_info *pbi = xmalloc (sizeof(*pbi));
3495 pbi->reg_live = live;
3496 pbi->mem_set_list = NULL_RTX;
3497 pbi->local_set = local_set;
3501 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3502 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3504 pbi->reg_next_use = NULL;
3506 pbi->new_live = BITMAP_XMALLOC ();
3507 pbi->new_dead = BITMAP_XMALLOC ();
3512 /* Release a propagate_block_info struct. */
3515 free_propagate_block_info (pbi)
3516 struct propagate_block_info *pbi;
3518 free_EXPR_LIST_list (&pbi->mem_set_list);
3520 BITMAP_XFREE (pbi->new_live);
3521 BITMAP_XFREE (pbi->new_dead);
3523 if (pbi->reg_next_use)
3524 free (pbi->reg_next_use);
3529 /* Compute the registers live at the beginning of a basic block BB from
3530 those live at the end.
3532 When called, REG_LIVE contains those live at the end. On return, it
3533 contains those live at the beginning.
3535 LOCAL_SET, if non-null, will be set with all registers killed by
3536 this basic block. */
3539 propagate_block (bb, live, local_set, flags)
3545 struct propagate_block_info *pbi;
3548 pbi = init_propagate_block_info (bb, live, local_set, flags);
3550 if (flags & PROP_REG_INFO)
3554 /* Process the regs live at the end of the block.
3555 Mark them as not local to any one basic block. */
3556 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3557 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3560 /* Scan the block an insn at a time from end to beginning. */
3562 for (insn = bb->end; ; insn = prev)
3564 /* If this is a call to `setjmp' et al, warn if any
3565 non-volatile datum is live. */
3566 if ((flags & PROP_REG_INFO)
3567 && GET_CODE (insn) == NOTE
3568 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3569 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
3571 prev = propagate_one_insn (pbi, insn);
3573 if (insn == bb->head)
3577 free_propagate_block_info (pbi);
3580 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3581 (SET expressions whose destinations are registers dead after the insn).
3582 NEEDED is the regset that says which regs are alive after the insn.
3584 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3586 If X is the entire body of an insn, NOTES contains the reg notes
3587 pertaining to the insn. */
3590 insn_dead_p (pbi, x, call_ok, notes)
3591 struct propagate_block_info *pbi;
3594 rtx notes ATTRIBUTE_UNUSED;
3596 enum rtx_code code = GET_CODE (x);
3599 /* If flow is invoked after reload, we must take existing AUTO_INC
3600 expresions into account. */
3601 if (reload_completed)
3603 for ( ; notes; notes = XEXP (notes, 1))
3605 if (REG_NOTE_KIND (notes) == REG_INC)
3607 int regno = REGNO (XEXP (notes, 0));
3609 /* Don't delete insns to set global regs. */
3610 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3611 || REGNO_REG_SET_P (pbi->reg_live, regno))
3618 /* If setting something that's a reg or part of one,
3619 see if that register's altered value will be live. */
3623 rtx r = SET_DEST (x);
3626 if (GET_CODE (r) == CC0)
3627 return ! pbi->cc0_live;
3630 /* A SET that is a subroutine call cannot be dead. */
3631 if (GET_CODE (SET_SRC (x)) == CALL)
3637 /* Don't eliminate loads from volatile memory or volatile asms. */
3638 else if (volatile_refs_p (SET_SRC (x)))
3641 if (GET_CODE (r) == MEM)
3645 if (MEM_VOLATILE_P (r))
3648 /* Walk the set of memory locations we are currently tracking
3649 and see if one is an identical match to this memory location.
3650 If so, this memory write is dead (remember, we're walking
3651 backwards from the end of the block to the start. */
3652 temp = pbi->mem_set_list;
3655 if (rtx_equal_p (XEXP (temp, 0), r))
3657 temp = XEXP (temp, 1);
3662 while (GET_CODE (r) == SUBREG
3663 || GET_CODE (r) == STRICT_LOW_PART
3664 || GET_CODE (r) == ZERO_EXTRACT)
3667 if (GET_CODE (r) == REG)
3669 int regno = REGNO (r);
3672 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3675 /* If this is a hard register, verify that subsequent
3676 words are not needed. */
3677 if (regno < FIRST_PSEUDO_REGISTER)
3679 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3682 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3686 /* Don't delete insns to set global regs. */
3687 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3690 /* Make sure insns to set the stack pointer aren't deleted. */
3691 if (regno == STACK_POINTER_REGNUM)
3694 /* Make sure insns to set the frame pointer aren't deleted. */
3695 if (regno == FRAME_POINTER_REGNUM
3696 && (! reload_completed || frame_pointer_needed))
3698 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3699 if (regno == HARD_FRAME_POINTER_REGNUM
3700 && (! reload_completed || frame_pointer_needed))
3704 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3705 /* Make sure insns to set arg pointer are never deleted
3706 (if the arg pointer isn't fixed, there will be a USE
3707 for it, so we can treat it normally). */
3708 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3712 /* Otherwise, the set is dead. */
3718 /* If performing several activities, insn is dead if each activity
3719 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3720 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3722 else if (code == PARALLEL)
3724 int i = XVECLEN (x, 0);
3726 for (i--; i >= 0; i--)
3727 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3728 && GET_CODE (XVECEXP (x, 0, i)) != USE
3729 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3735 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3736 is not necessarily true for hard registers. */
3737 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3738 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3739 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3742 /* We do not check other CLOBBER or USE here. An insn consisting of just
3743 a CLOBBER or just a USE should not be deleted. */
3747 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3748 return 1 if the entire library call is dead.
3749 This is true if X copies a register (hard or pseudo)
3750 and if the hard return reg of the call insn is dead.
3751 (The caller should have tested the destination of X already for death.)
3753 If this insn doesn't just copy a register, then we don't
3754 have an ordinary libcall. In that case, cse could not have
3755 managed to substitute the source for the dest later on,
3756 so we can assume the libcall is dead.
3758 NEEDED is the bit vector of pseudoregs live before this insn.
3759 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3762 libcall_dead_p (pbi, x, note, insn)
3763 struct propagate_block_info *pbi;
3768 register RTX_CODE code = GET_CODE (x);
3772 register rtx r = SET_SRC (x);
3773 if (GET_CODE (r) == REG)
3775 rtx call = XEXP (note, 0);
3779 /* Find the call insn. */
3780 while (call != insn && GET_CODE (call) != CALL_INSN)
3781 call = NEXT_INSN (call);
3783 /* If there is none, do nothing special,
3784 since ordinary death handling can understand these insns. */
3788 /* See if the hard reg holding the value is dead.
3789 If this is a PARALLEL, find the call within it. */
3790 call_pat = PATTERN (call);
3791 if (GET_CODE (call_pat) == PARALLEL)
3793 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3794 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3795 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3798 /* This may be a library call that is returning a value
3799 via invisible pointer. Do nothing special, since
3800 ordinary death handling can understand these insns. */
3804 call_pat = XVECEXP (call_pat, 0, i);
3807 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
3813 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3814 live at function entry. Don't count global register variables, variables
3815 in registers that can be used for function arg passing, or variables in
3816 fixed hard registers. */
3819 regno_uninitialized (regno)
3822 if (n_basic_blocks == 0
3823 || (regno < FIRST_PSEUDO_REGISTER
3824 && (global_regs[regno]
3825 || fixed_regs[regno]
3826 || FUNCTION_ARG_REGNO_P (regno))))
3829 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3832 /* 1 if register REGNO was alive at a place where `setjmp' was called
3833 and was set more than once or is an argument.
3834 Such regs may be clobbered by `longjmp'. */
3837 regno_clobbered_at_setjmp (regno)
3840 if (n_basic_blocks == 0)
3843 return ((REG_N_SETS (regno) > 1
3844 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3845 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3848 /* INSN references memory, possibly using autoincrement addressing modes.
3849 Find any entries on the mem_set_list that need to be invalidated due
3850 to an address change. */
3852 invalidate_mems_from_autoinc (pbi, insn)
3853 struct propagate_block_info *pbi;
3856 rtx note = REG_NOTES (insn);
3857 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3859 if (REG_NOTE_KIND (note) == REG_INC)
3861 rtx temp = pbi->mem_set_list;
3862 rtx prev = NULL_RTX;
3867 next = XEXP (temp, 1);
3868 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3870 /* Splice temp out of list. */
3872 XEXP (prev, 1) = next;
3874 pbi->mem_set_list = next;
3875 free_EXPR_LIST_node (temp);
3885 /* Process the registers that are set within X. Their bits are set to
3886 1 in the regset DEAD, because they are dead prior to this insn.
3888 If INSN is nonzero, it is the insn being processed.
3890 FLAGS is the set of operations to perform. */
3893 mark_set_regs (pbi, x, insn)
3894 struct propagate_block_info *pbi;
3897 rtx cond = NULL_RTX;
3901 switch (code = GET_CODE (x))
3905 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
3909 cond = COND_EXEC_TEST (x);
3910 x = COND_EXEC_CODE (x);
3916 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3918 rtx sub = XVECEXP (x, 0, i);
3919 switch (code = GET_CODE (sub))
3922 if (cond != NULL_RTX)
3925 cond = COND_EXEC_TEST (sub);
3926 sub = COND_EXEC_CODE (sub);
3927 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
3933 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
3948 /* Process a single SET rtx, X. */
3951 mark_set_1 (pbi, code, reg, cond, insn, flags)
3952 struct propagate_block_info *pbi;
3954 rtx reg, cond, insn;
3957 int regno_first = -1, regno_last = -1;
3960 /* Some targets place small structures in registers for
3961 return values of functions. We have to detect this
3962 case specially here to get correct flow information. */
3963 if (GET_CODE (reg) == PARALLEL
3964 && GET_MODE (reg) == BLKmode)
3966 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3967 mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
3971 /* Modifying just one hardware register of a multi-reg value or just a
3972 byte field of a register does not mean the value from before this insn
3973 is now dead. But it does mean liveness of that register at the end of
3974 the block is significant.
3976 Within mark_set_1, however, we treat it as if the register is indeed
3977 modified. mark_used_regs will, however, also treat this register as
3978 being used. Thus, we treat these insns as setting a new value for the
3979 register as a function of its old value. This cases LOG_LINKS to be
3980 made appropriately and this will help combine.
3982 ??? This is all done incorrectly. We should not be setting bits in
3983 new_dead for these registers, since, as we just explained, they are
3984 not dead. We should be setting bits in local_set, and updating
3985 LOG_LINKS, but that is different. */
3987 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3988 || GET_CODE (reg) == SIGN_EXTRACT
3989 || GET_CODE (reg) == STRICT_LOW_PART)
3990 reg = XEXP (reg, 0);
3992 /* If this set is a MEM, then it kills any aliased writes.
3993 If this set is a REG, then it kills any MEMs which use the reg. */
3994 if (flags & PROP_SCAN_DEAD_CODE)
3996 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
3998 rtx temp = pbi->mem_set_list;
3999 rtx prev = NULL_RTX;
4004 next = XEXP (temp, 1);
4005 if ((GET_CODE (reg) == MEM
4006 && output_dependence (XEXP (temp, 0), reg))
4007 || (GET_CODE (reg) == REG
4008 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4010 /* Splice this entry out of the list. */
4012 XEXP (prev, 1) = next;
4014 pbi->mem_set_list = next;
4015 free_EXPR_LIST_node (temp);
4023 /* If the memory reference had embedded side effects (autoincrement
4024 address modes. Then we may need to kill some entries on the
4026 if (insn && GET_CODE (reg) == MEM)
4027 invalidate_mems_from_autoinc (pbi, insn);
4029 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4030 /* We do not know the size of a BLKmode store, so we do not track
4031 them for redundant store elimination. */
4032 && GET_MODE (reg) != BLKmode
4033 /* There are no REG_INC notes for SP, so we can't assume we'll see
4034 everything that invalidates it. To be safe, don't eliminate any
4035 stores though SP; none of them should be redundant anyway. */
4036 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4037 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4040 if (GET_CODE (reg) == REG
4041 && (regno_first = REGNO (reg),
4042 ! (regno_first == FRAME_POINTER_REGNUM
4043 && (! reload_completed || frame_pointer_needed)))
4044 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4045 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4046 && (! reload_completed || frame_pointer_needed))
4048 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4049 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4053 int some_was_live = 0, some_was_dead = 0;
4055 if (regno_first < FIRST_PSEUDO_REGISTER)
4056 regno_last = (regno_first
4057 + HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1);
4059 regno_last = regno_first;
4061 for (i = regno_first; i <= regno_last; ++i)
4063 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4065 SET_REGNO_REG_SET (pbi->local_set, i);
4067 some_was_live |= needed_regno;
4068 some_was_dead |= ! needed_regno;
4071 /* Additional data to record if this is the final pass. */
4072 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4073 | PROP_DEATH_NOTES | PROP_AUTOINC))
4076 register int blocknum = pbi->bb->index;
4079 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4081 y = pbi->reg_next_use[regno_first];
4083 /* The next use is no longer next, since a store intervenes. */
4084 for (i = regno_first; i <= regno_last; ++i)
4085 pbi->reg_next_use[i] = 0;
4088 if (flags & PROP_REG_INFO)
4090 for (i = regno_first; i <= regno_last; ++i)
4092 /* Count (weighted) references, stores, etc. This counts a
4093 register twice if it is modified, but that is correct. */
4094 REG_N_SETS (i) += 1;
4095 REG_N_REFS (i) += (optimize_size ? 1
4096 : pbi->bb->loop_depth + 1);
4098 /* The insns where a reg is live are normally counted
4099 elsewhere, but we want the count to include the insn
4100 where the reg is set, and the normal counting mechanism
4101 would not count it. */
4102 REG_LIVE_LENGTH (i) += 1;
4105 /* If this is a hard reg, record this function uses the reg. */
4106 if (regno_first < FIRST_PSEUDO_REGISTER)
4108 for (i = regno_first; i <= regno_last; i++)
4109 regs_ever_live[i] = 1;
4113 /* Keep track of which basic blocks each reg appears in. */
4114 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4115 REG_BASIC_BLOCK (regno_first) = blocknum;
4116 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4117 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4121 if (! some_was_dead)
4123 if (flags & PROP_LOG_LINKS)
4125 /* Make a logical link from the next following insn
4126 that uses this register, back to this insn.
4127 The following insns have already been processed.
4129 We don't build a LOG_LINK for hard registers containing
4130 in ASM_OPERANDs. If these registers get replaced,
4131 we might wind up changing the semantics of the insn,
4132 even if reload can make what appear to be valid
4133 assignments later. */
4134 if (y && (BLOCK_NUM (y) == blocknum)
4135 && (regno_first >= FIRST_PSEUDO_REGISTER
4136 || asm_noperands (PATTERN (y)) < 0))
4137 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4140 else if (! some_was_live)
4142 if (flags & PROP_REG_INFO)
4143 REG_N_DEATHS (regno_first) += 1;
4145 if (flags & PROP_DEATH_NOTES)
4147 /* Note that dead stores have already been deleted
4148 when possible. If we get here, we have found a
4149 dead store that cannot be eliminated (because the
4150 same insn does something useful). Indicate this
4151 by marking the reg being set as dying here. */
4153 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4158 if (flags & PROP_DEATH_NOTES)
4160 /* This is a case where we have a multi-word hard register
4161 and some, but not all, of the words of the register are
4162 needed in subsequent insns. Write REG_UNUSED notes
4163 for those parts that were not needed. This case should
4166 for (i = regno_first; i <= regno_last; ++i)
4167 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4169 = alloc_EXPR_LIST (REG_UNUSED,
4170 gen_rtx_REG (reg_raw_mode[i], i),
4176 /* Mark the register as being dead. */
4178 /* The stack pointer is never dead. Well, not strictly true,
4179 but it's very difficult to tell from here. Hopefully
4180 combine_stack_adjustments will fix up the most egregious
4182 && regno_first != STACK_POINTER_REGNUM)
4184 for (i = regno_first; i <= regno_last; ++i)
4185 SET_REGNO_REG_SET (pbi->new_dead, i);
4188 else if (GET_CODE (reg) == REG)
4190 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4191 pbi->reg_next_use[regno_first] = 0;
4194 /* If this is the last pass and this is a SCRATCH, show it will be dying
4195 here and count it. */
4196 else if (GET_CODE (reg) == SCRATCH)
4198 if (flags & PROP_DEATH_NOTES)
4200 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4206 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4210 find_auto_inc (pbi, x, insn)
4211 struct propagate_block_info *pbi;
4215 rtx addr = XEXP (x, 0);
4216 HOST_WIDE_INT offset = 0;
4219 /* Here we detect use of an index register which might be good for
4220 postincrement, postdecrement, preincrement, or predecrement. */
4222 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4223 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4225 if (GET_CODE (addr) == REG)
4228 register int size = GET_MODE_SIZE (GET_MODE (x));
4231 int regno = REGNO (addr);
4233 /* Is the next use an increment that might make auto-increment? */
4234 if ((incr = pbi->reg_next_use[regno]) != 0
4235 && (set = single_set (incr)) != 0
4236 && GET_CODE (set) == SET
4237 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4238 /* Can't add side effects to jumps; if reg is spilled and
4239 reloaded, there's no way to store back the altered value. */
4240 && GET_CODE (insn) != JUMP_INSN
4241 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4242 && XEXP (y, 0) == addr
4243 && GET_CODE (XEXP (y, 1)) == CONST_INT
4244 && ((HAVE_POST_INCREMENT
4245 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4246 || (HAVE_POST_DECREMENT
4247 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4248 || (HAVE_PRE_INCREMENT
4249 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4250 || (HAVE_PRE_DECREMENT
4251 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4252 /* Make sure this reg appears only once in this insn. */
4253 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4254 use != 0 && use != (rtx) 1))
4256 rtx q = SET_DEST (set);
4257 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4258 ? (offset ? PRE_INC : POST_INC)
4259 : (offset ? PRE_DEC : POST_DEC));
4261 if (dead_or_set_p (incr, addr)
4262 /* Mustn't autoinc an eliminable register. */
4263 && (regno >= FIRST_PSEUDO_REGISTER
4264 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
4266 /* This is the simple case. Try to make the auto-inc. If
4267 we can't, we are done. Otherwise, we will do any
4268 needed updates below. */
4269 if (! validate_change (insn, &XEXP (x, 0),
4270 gen_rtx_fmt_e (inc_code, Pmode, addr),
4274 else if (GET_CODE (q) == REG
4275 /* PREV_INSN used here to check the semi-open interval
4277 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4278 /* We must also check for sets of q as q may be
4279 a call clobbered hard register and there may
4280 be a call between PREV_INSN (insn) and incr. */
4281 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4283 /* We have *p followed sometime later by q = p+size.
4284 Both p and q must be live afterward,
4285 and q is not used between INSN and its assignment.
4286 Change it to q = p, ...*q..., q = q+size.
4287 Then fall into the usual case. */
4291 emit_move_insn (q, addr);
4292 insns = get_insns ();
4295 if (basic_block_for_insn)
4296 for (temp = insns; temp; temp = NEXT_INSN (temp))
4297 set_block_for_insn (temp, pbi->bb);
4299 /* If we can't make the auto-inc, or can't make the
4300 replacement into Y, exit. There's no point in making
4301 the change below if we can't do the auto-inc and doing
4302 so is not correct in the pre-inc case. */
4304 validate_change (insn, &XEXP (x, 0),
4305 gen_rtx_fmt_e (inc_code, Pmode, q),
4307 validate_change (incr, &XEXP (y, 0), q, 1);
4308 if (! apply_change_group ())
4311 /* We now know we'll be doing this change, so emit the
4312 new insn(s) and do the updates. */
4313 emit_insns_before (insns, insn);
4315 if (pbi->bb->head == insn)
4316 pbi->bb->head = insns;
4318 /* INCR will become a NOTE and INSN won't contain a
4319 use of ADDR. If a use of ADDR was just placed in
4320 the insn before INSN, make that the next use.
4321 Otherwise, invalidate it. */
4322 if (GET_CODE (PREV_INSN (insn)) == INSN
4323 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4324 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4325 pbi->reg_next_use[regno] = PREV_INSN (insn);
4327 pbi->reg_next_use[regno] = 0;
4332 /* REGNO is now used in INCR which is below INSN, but it
4333 previously wasn't live here. If we don't mark it as
4334 live, we'll put a REG_DEAD note for it on this insn,
4335 which is incorrect. */
4336 SET_REGNO_REG_SET (pbi->reg_live, regno);
4338 /* If there are any calls between INSN and INCR, show
4339 that REGNO now crosses them. */
4340 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4341 if (GET_CODE (temp) == CALL_INSN)
4342 REG_N_CALLS_CROSSED (regno)++;
4347 /* If we haven't returned, it means we were able to make the
4348 auto-inc, so update the status. First, record that this insn
4349 has an implicit side effect. */
4352 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4354 /* Modify the old increment-insn to simply copy
4355 the already-incremented value of our register. */
4356 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4359 /* If that makes it a no-op (copying the register into itself) delete
4360 it so it won't appear to be a "use" and a "set" of this
4362 if (SET_DEST (set) == addr)
4364 /* If the original source was dead, it's dead now. */
4365 rtx note = find_reg_note (incr, REG_DEAD, NULL_RTX);
4366 if (note && XEXP (note, 0) != addr)
4367 SET_REGNO_REG_SET (pbi->new_dead, REGNO (XEXP (note, 0)));
4369 PUT_CODE (incr, NOTE);
4370 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4371 NOTE_SOURCE_FILE (incr) = 0;
4374 if (regno >= FIRST_PSEUDO_REGISTER)
4376 /* Count an extra reference to the reg. When a reg is
4377 incremented, spilling it is worse, so we want to make
4378 that less likely. */
4379 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4381 /* Count the increment as a setting of the register,
4382 even though it isn't a SET in rtl. */
4383 REG_N_SETS (regno)++;
4388 #endif /* AUTO_INC_DEC */
4391 mark_used_reg (pbi, reg, cond, insn)
4392 struct propagate_block_info *pbi;
4394 rtx cond ATTRIBUTE_UNUSED;
4397 int regno = REGNO (reg);
4398 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4399 int some_was_dead = ! some_was_live;
4401 SET_REGNO_REG_SET (pbi->new_live, regno);
4403 /* A hard reg in a wide mode may really be multiple registers.
4404 If so, mark all of them just like the first. */
4405 if (regno < FIRST_PSEUDO_REGISTER)
4407 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4410 int regno_n = regno + n;
4411 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno_n);
4413 SET_REGNO_REG_SET (pbi->new_live, regno_n);
4414 some_was_live |= needed_regno;
4415 some_was_dead |= ! needed_regno;
4419 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4421 /* Record where each reg is used, so when the reg is set we know
4422 the next insn that uses it. */
4423 pbi->reg_next_use[regno] = insn;
4426 if (pbi->flags & PROP_REG_INFO)
4428 if (regno < FIRST_PSEUDO_REGISTER)
4430 /* If this is a register we are going to try to eliminate,
4431 don't mark it live here. If we are successful in
4432 eliminating it, it need not be live unless it is used for
4433 pseudos, in which case it will have been set live when it
4434 was allocated to the pseudos. If the register will not
4435 be eliminated, reload will set it live at that point.
4437 Otherwise, record that this function uses this register. */
4438 /* ??? The PPC backend tries to "eliminate" on the pic
4439 register to itself. This should be fixed. In the mean
4440 time, hack around it. */
4442 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
4443 && (regno == FRAME_POINTER_REGNUM
4444 || regno == ARG_POINTER_REGNUM)))
4446 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4448 regs_ever_live[regno + --n] = 1;
4454 /* Keep track of which basic block each reg appears in. */
4456 register int blocknum = pbi->bb->index;
4457 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4458 REG_BASIC_BLOCK (regno) = blocknum;
4459 else if (REG_BASIC_BLOCK (regno) != blocknum)
4460 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4462 /* Count (weighted) number of uses of each reg. */
4463 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4467 /* Record and count the insns in which a reg dies. If it is used in
4468 this insn and was dead below the insn then it dies in this insn.
4469 If it was set in this insn, we do not make a REG_DEAD note;
4470 likewise if we already made such a note.
4472 ??? This could be done better. In new_dead we have a record of
4473 which registers are set or clobbered this insn (which in itself is
4474 slightly incorrect, see the commentary near strict_low_part in
4475 mark_set_1), which should be the set of registers that we do not
4476 wish to create death notes for under the above rule. Note that
4477 we have not yet processed the call-clobbered/call-used registers,
4478 and they do not quite follow the above rule, since we do want death
4479 notes for call-clobbered register arguments. Which begs the whole
4480 question of whether we should in fact have death notes for registers
4481 used and clobbered (but not set) in the same insn. The only useful
4482 thing we ought to be getting from dead_or_set_p is detection of
4483 duplicate death notes. */
4485 if ((pbi->flags & PROP_DEATH_NOTES)
4487 && ! dead_or_set_p (insn, reg))
4491 /* Check for the case where the register dying partially
4492 overlaps the register set by this insn. */
4493 if (regno < FIRST_PSEUDO_REGISTER
4494 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
4496 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4498 some_was_live |= dead_or_set_regno_p (insn, regno + n);
4501 /* If none of the words in X is needed, make a REG_DEAD note.
4502 Otherwise, we must make partial REG_DEAD notes. */
4503 if (! some_was_live)
4506 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
4507 REG_N_DEATHS (regno)++;
4511 /* Don't make a REG_DEAD note for a part of a register
4512 that is set in the insn. */
4514 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4515 for (; n >= regno; n--)
4516 if (!REGNO_REG_SET_P (pbi->reg_live, n)
4517 && ! dead_or_set_regno_p (insn, n))
4519 = alloc_EXPR_LIST (REG_DEAD,
4520 gen_rtx_REG (reg_raw_mode[n], n),
4526 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
4527 This is done assuming the registers needed from X are those that
4528 have 1-bits in PBI->REG_LIVE.
4530 INSN is the containing instruction. If INSN is dead, this function
4534 mark_used_regs (pbi, x, cond, insn)
4535 struct propagate_block_info *pbi;
4538 register RTX_CODE code;
4540 int flags = pbi->flags;
4543 code = GET_CODE (x);
4563 /* If we are clobbering a MEM, mark any registers inside the address
4565 if (GET_CODE (XEXP (x, 0)) == MEM)
4566 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
4570 /* Don't bother watching stores to mems if this is not the
4571 final pass. We'll not be deleting dead stores this round. */
4572 if (flags & PROP_SCAN_DEAD_CODE)
4574 /* Invalidate the data for the last MEM stored, but only if MEM is
4575 something that can be stored into. */
4576 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4577 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4578 ; /* needn't clear the memory set list */
4581 rtx temp = pbi->mem_set_list;
4582 rtx prev = NULL_RTX;
4587 next = XEXP (temp, 1);
4588 if (anti_dependence (XEXP (temp, 0), x))
4590 /* Splice temp out of the list. */
4592 XEXP (prev, 1) = next;
4594 pbi->mem_set_list = next;
4595 free_EXPR_LIST_node (temp);
4603 /* If the memory reference had embedded side effects (autoincrement
4604 address modes. Then we may need to kill some entries on the
4607 invalidate_mems_from_autoinc (pbi, insn);
4611 if (flags & PROP_AUTOINC)
4612 find_auto_inc (pbi, x, insn);
4617 if (GET_CODE (SUBREG_REG (x)) == REG
4618 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4619 && (GET_MODE_SIZE (GET_MODE (x))
4620 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4621 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4623 /* While we're here, optimize this case. */
4625 if (GET_CODE (x) != REG)
4630 /* See a register other than being set => mark it as needed. */
4631 mark_used_reg (pbi, x, cond, insn);
4636 register rtx testreg = SET_DEST (x);
4639 /* If storing into MEM, don't show it as being used. But do
4640 show the address as being used. */
4641 if (GET_CODE (testreg) == MEM)
4644 if (flags & PROP_AUTOINC)
4645 find_auto_inc (pbi, testreg, insn);
4647 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
4648 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4652 /* Storing in STRICT_LOW_PART is like storing in a reg
4653 in that this SET might be dead, so ignore it in TESTREG.
4654 but in some other ways it is like using the reg.
4656 Storing in a SUBREG or a bit field is like storing the entire
4657 register in that if the register's value is not used
4658 then this SET is not needed. */
4659 while (GET_CODE (testreg) == STRICT_LOW_PART
4660 || GET_CODE (testreg) == ZERO_EXTRACT
4661 || GET_CODE (testreg) == SIGN_EXTRACT
4662 || GET_CODE (testreg) == SUBREG)
4664 if (GET_CODE (testreg) == SUBREG
4665 && GET_CODE (SUBREG_REG (testreg)) == REG
4666 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4667 && (GET_MODE_SIZE (GET_MODE (testreg))
4668 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4669 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4671 /* Modifying a single register in an alternate mode
4672 does not use any of the old value. But these other
4673 ways of storing in a register do use the old value. */
4674 if (GET_CODE (testreg) == SUBREG
4675 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4680 testreg = XEXP (testreg, 0);
4683 /* If this is a store into a register, recursively scan the
4684 value being stored. */
4686 if ((GET_CODE (testreg) == PARALLEL
4687 && GET_MODE (testreg) == BLKmode)
4688 || (GET_CODE (testreg) == REG
4689 && (regno = REGNO (testreg),
4690 ! (regno == FRAME_POINTER_REGNUM
4691 && (! reload_completed || frame_pointer_needed)))
4692 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4693 && ! (regno == HARD_FRAME_POINTER_REGNUM
4694 && (! reload_completed || frame_pointer_needed))
4696 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4697 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4702 mark_used_regs (pbi, SET_DEST (x), cond, insn);
4703 mark_used_regs (pbi, SET_SRC (x), cond, insn);
4710 case UNSPEC_VOLATILE:
4714 /* Traditional and volatile asm instructions must be considered to use
4715 and clobber all hard registers, all pseudo-registers and all of
4716 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4718 Consider for instance a volatile asm that changes the fpu rounding
4719 mode. An insn should not be moved across this even if it only uses
4720 pseudo-regs because it might give an incorrectly rounded result.
4722 ?!? Unfortunately, marking all hard registers as live causes massive
4723 problems for the register allocator and marking all pseudos as live
4724 creates mountains of uninitialized variable warnings.
4726 So for now, just clear the memory set list and mark any regs
4727 we can find in ASM_OPERANDS as used. */
4728 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4729 free_EXPR_LIST_list (&pbi->mem_set_list);
4731 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4732 We can not just fall through here since then we would be confused
4733 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4734 traditional asms unlike their normal usage. */
4735 if (code == ASM_OPERANDS)
4739 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4740 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
4746 if (cond != NULL_RTX)
4749 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
4751 cond = COND_EXEC_TEST (x);
4752 x = COND_EXEC_CODE (x);
4756 /* We _do_not_ want to scan operands of phi nodes. Operands of
4757 a phi function are evaluated only when control reaches this
4758 block along a particular edge. Therefore, regs that appear
4759 as arguments to phi should not be added to the global live at
4767 /* Recursively scan the operands of this expression. */
4770 register const char *fmt = GET_RTX_FORMAT (code);
4773 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4777 /* Tail recursive case: save a function call level. */
4783 mark_used_regs (pbi, XEXP (x, i), cond, insn);
4785 else if (fmt[i] == 'E')
4788 for (j = 0; j < XVECLEN (x, i); j++)
4789 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
4798 try_pre_increment_1 (pbi, insn)
4799 struct propagate_block_info *pbi;
4802 /* Find the next use of this reg. If in same basic block,
4803 make it do pre-increment or pre-decrement if appropriate. */
4804 rtx x = single_set (insn);
4805 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4806 * INTVAL (XEXP (SET_SRC (x), 1)));
4807 int regno = REGNO (SET_DEST (x));
4808 rtx y = pbi->reg_next_use[regno];
4810 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4811 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4812 mode would be better. */
4813 && ! dead_or_set_p (y, SET_DEST (x))
4814 && try_pre_increment (y, SET_DEST (x), amount))
4816 /* We have found a suitable auto-increment
4817 and already changed insn Y to do it.
4818 So flush this increment-instruction. */
4819 PUT_CODE (insn, NOTE);
4820 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4821 NOTE_SOURCE_FILE (insn) = 0;
4822 /* Count a reference to this reg for the increment
4823 insn we are deleting. When a reg is incremented.
4824 spilling it is worse, so we want to make that
4826 if (regno >= FIRST_PSEUDO_REGISTER)
4828 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4829 REG_N_SETS (regno)++;
4836 /* Try to change INSN so that it does pre-increment or pre-decrement
4837 addressing on register REG in order to add AMOUNT to REG.
4838 AMOUNT is negative for pre-decrement.
4839 Returns 1 if the change could be made.
4840 This checks all about the validity of the result of modifying INSN. */
4843 try_pre_increment (insn, reg, amount)
4845 HOST_WIDE_INT amount;
4849 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4850 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4852 /* Nonzero if we can try to make a post-increment or post-decrement.
4853 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4854 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4855 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4858 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4861 /* From the sign of increment, see which possibilities are conceivable
4862 on this target machine. */
4863 if (HAVE_PRE_INCREMENT && amount > 0)
4865 if (HAVE_POST_INCREMENT && amount > 0)
4868 if (HAVE_PRE_DECREMENT && amount < 0)
4870 if (HAVE_POST_DECREMENT && amount < 0)
4873 if (! (pre_ok || post_ok))
4876 /* It is not safe to add a side effect to a jump insn
4877 because if the incremented register is spilled and must be reloaded
4878 there would be no way to store the incremented value back in memory. */
4880 if (GET_CODE (insn) == JUMP_INSN)
4885 use = find_use_as_address (PATTERN (insn), reg, 0);
4886 if (post_ok && (use == 0 || use == (rtx) 1))
4888 use = find_use_as_address (PATTERN (insn), reg, -amount);
4892 if (use == 0 || use == (rtx) 1)
4895 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4898 /* See if this combination of instruction and addressing mode exists. */
4899 if (! validate_change (insn, &XEXP (use, 0),
4900 gen_rtx_fmt_e (amount > 0
4901 ? (do_post ? POST_INC : PRE_INC)
4902 : (do_post ? POST_DEC : PRE_DEC),
4906 /* Record that this insn now has an implicit side effect on X. */
4907 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4911 #endif /* AUTO_INC_DEC */
4913 /* Find the place in the rtx X where REG is used as a memory address.
4914 Return the MEM rtx that so uses it.
4915 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4916 (plus REG (const_int PLUSCONST)).
4918 If such an address does not appear, return 0.
4919 If REG appears more than once, or is used other than in such an address,
4923 find_use_as_address (x, reg, plusconst)
4926 HOST_WIDE_INT plusconst;
4928 enum rtx_code code = GET_CODE (x);
4929 const char *fmt = GET_RTX_FORMAT (code);
4931 register rtx value = 0;
4934 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4937 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4938 && XEXP (XEXP (x, 0), 0) == reg
4939 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4940 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4943 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4945 /* If REG occurs inside a MEM used in a bit-field reference,
4946 that is unacceptable. */
4947 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4948 return (rtx) (HOST_WIDE_INT) 1;
4952 return (rtx) (HOST_WIDE_INT) 1;
4954 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4958 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4962 return (rtx) (HOST_WIDE_INT) 1;
4964 else if (fmt[i] == 'E')
4967 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4969 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4973 return (rtx) (HOST_WIDE_INT) 1;
4981 /* Write information about registers and basic blocks into FILE.
4982 This is part of making a debugging dump. */
4985 dump_regset (r, outf)
4992 fputs (" (nil)", outf);
4996 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4998 fprintf (outf, " %d", i);
4999 if (i < FIRST_PSEUDO_REGISTER)
5000 fprintf (outf, " [%s]",
5009 dump_regset (r, stderr);
5010 putc ('\n', stderr);
5014 dump_flow_info (file)
5018 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5020 fprintf (file, "%d registers.\n", max_regno);
5021 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5024 enum reg_class class, altclass;
5025 fprintf (file, "\nRegister %d used %d times across %d insns",
5026 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5027 if (REG_BASIC_BLOCK (i) >= 0)
5028 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5030 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5031 (REG_N_SETS (i) == 1) ? "" : "s");
5032 if (REG_USERVAR_P (regno_reg_rtx[i]))
5033 fprintf (file, "; user var");
5034 if (REG_N_DEATHS (i) != 1)
5035 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5036 if (REG_N_CALLS_CROSSED (i) == 1)
5037 fprintf (file, "; crosses 1 call");
5038 else if (REG_N_CALLS_CROSSED (i))
5039 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5040 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5041 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5042 class = reg_preferred_class (i);
5043 altclass = reg_alternate_class (i);
5044 if (class != GENERAL_REGS || altclass != ALL_REGS)
5046 if (altclass == ALL_REGS || class == ALL_REGS)
5047 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5048 else if (altclass == NO_REGS)
5049 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5051 fprintf (file, "; pref %s, else %s",
5052 reg_class_names[(int) class],
5053 reg_class_names[(int) altclass]);
5055 if (REGNO_POINTER_FLAG (i))
5056 fprintf (file, "; pointer");
5057 fprintf (file, ".\n");
5060 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5061 for (i = 0; i < n_basic_blocks; i++)
5063 register basic_block bb = BASIC_BLOCK (i);
5066 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
5067 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
5069 fprintf (file, "Predecessors: ");
5070 for (e = bb->pred; e ; e = e->pred_next)
5071 dump_edge_info (file, e, 0);
5073 fprintf (file, "\nSuccessors: ");
5074 for (e = bb->succ; e ; e = e->succ_next)
5075 dump_edge_info (file, e, 1);
5077 fprintf (file, "\nRegisters live at start:");
5078 dump_regset (bb->global_live_at_start, file);
5080 fprintf (file, "\nRegisters live at end:");
5081 dump_regset (bb->global_live_at_end, file);
5092 dump_flow_info (stderr);
5096 dump_edge_info (file, e, do_succ)
5101 basic_block side = (do_succ ? e->dest : e->src);
5103 if (side == ENTRY_BLOCK_PTR)
5104 fputs (" ENTRY", file);
5105 else if (side == EXIT_BLOCK_PTR)
5106 fputs (" EXIT", file);
5108 fprintf (file, " %d", side->index);
5112 static const char * const bitnames[] = {
5113 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5116 int i, flags = e->flags;
5120 for (i = 0; flags; i++)
5121 if (flags & (1 << i))
5127 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5128 fputs (bitnames[i], file);
5130 fprintf (file, "%d", i);
5138 /* Print out one basic block with live information at start and end. */
5148 fprintf (outf, ";; Basic block %d, loop depth %d",
5149 bb->index, bb->loop_depth);
5150 if (bb->eh_beg != -1 || bb->eh_end != -1)
5151 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5154 fputs (";; Predecessors: ", outf);
5155 for (e = bb->pred; e ; e = e->pred_next)
5156 dump_edge_info (outf, e, 0);
5159 fputs (";; Registers live at start:", outf);
5160 dump_regset (bb->global_live_at_start, outf);
5163 for (insn = bb->head, last = NEXT_INSN (bb->end);
5165 insn = NEXT_INSN (insn))
5166 print_rtl_single (outf, insn);
5168 fputs (";; Registers live at end:", outf);
5169 dump_regset (bb->global_live_at_end, outf);
5172 fputs (";; Successors: ", outf);
5173 for (e = bb->succ; e; e = e->succ_next)
5174 dump_edge_info (outf, e, 1);
5182 dump_bb (bb, stderr);
5189 dump_bb (BASIC_BLOCK(n), stderr);
5192 /* Like print_rtl, but also print out live information for the start of each
5196 print_rtl_with_bb (outf, rtx_first)
5200 register rtx tmp_rtx;
5203 fprintf (outf, "(nil)\n");
5207 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5208 int max_uid = get_max_uid ();
5209 basic_block *start = (basic_block *)
5210 xcalloc (max_uid, sizeof (basic_block));
5211 basic_block *end = (basic_block *)
5212 xcalloc (max_uid, sizeof (basic_block));
5213 enum bb_state *in_bb_p = (enum bb_state *)
5214 xcalloc (max_uid, sizeof (enum bb_state));
5216 for (i = n_basic_blocks - 1; i >= 0; i--)
5218 basic_block bb = BASIC_BLOCK (i);
5221 start[INSN_UID (bb->head)] = bb;
5222 end[INSN_UID (bb->end)] = bb;
5223 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5225 enum bb_state state = IN_MULTIPLE_BB;
5226 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5228 in_bb_p[INSN_UID(x)] = state;
5235 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5240 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5242 fprintf (outf, ";; Start of basic block %d, registers live:",
5244 dump_regset (bb->global_live_at_start, outf);
5248 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5249 && GET_CODE (tmp_rtx) != NOTE
5250 && GET_CODE (tmp_rtx) != BARRIER)
5251 fprintf (outf, ";; Insn is not within a basic block\n");
5252 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5253 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5255 did_output = print_rtl_single (outf, tmp_rtx);
5257 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5259 fprintf (outf, ";; End of basic block %d, registers live:\n",
5261 dump_regset (bb->global_live_at_end, outf);
5274 if (current_function_epilogue_delay_list != 0)
5276 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5277 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5278 tmp_rtx = XEXP (tmp_rtx, 1))
5279 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5283 /* Compute dominator relationships using new flow graph structures. */
5285 compute_flow_dominators (dominators, post_dominators)
5286 sbitmap *dominators;
5287 sbitmap *post_dominators;
5290 sbitmap *temp_bitmap;
5292 basic_block *worklist, *workend, *qin, *qout;
5295 /* Allocate a worklist array/queue. Entries are only added to the
5296 list if they were not already on the list. So the size is
5297 bounded by the number of basic blocks. */
5298 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5299 workend = &worklist[n_basic_blocks];
5301 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5302 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5306 /* The optimistic setting of dominators requires us to put every
5307 block on the work list initially. */
5308 qin = qout = worklist;
5309 for (bb = 0; bb < n_basic_blocks; bb++)
5311 *qin++ = BASIC_BLOCK (bb);
5312 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5314 qlen = n_basic_blocks;
5317 /* We want a maximal solution, so initially assume everything dominates
5319 sbitmap_vector_ones (dominators, n_basic_blocks);
5321 /* Mark successors of the entry block so we can identify them below. */
5322 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5323 e->dest->aux = ENTRY_BLOCK_PTR;
5325 /* Iterate until the worklist is empty. */
5328 /* Take the first entry off the worklist. */
5329 basic_block b = *qout++;
5330 if (qout >= workend)
5336 /* Compute the intersection of the dominators of all the
5339 If one of the predecessor blocks is the ENTRY block, then the
5340 intersection of the dominators of the predecessor blocks is
5341 defined as the null set. We can identify such blocks by the
5342 special value in the AUX field in the block structure. */
5343 if (b->aux == ENTRY_BLOCK_PTR)
5345 /* Do not clear the aux field for blocks which are
5346 successors of the ENTRY block. That way we we never
5347 add them to the worklist again.
5349 The intersect of dominators of the preds of this block is
5350 defined as the null set. */
5351 sbitmap_zero (temp_bitmap[bb]);
5355 /* Clear the aux field of this block so it can be added to
5356 the worklist again if necessary. */
5358 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5361 /* Make sure each block always dominates itself. */
5362 SET_BIT (temp_bitmap[bb], bb);
5364 /* If the out state of this block changed, then we need to
5365 add the successors of this block to the worklist if they
5366 are not already on the worklist. */
5367 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5369 for (e = b->succ; e; e = e->succ_next)
5371 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5385 if (post_dominators)
5387 /* The optimistic setting of dominators requires us to put every
5388 block on the work list initially. */
5389 qin = qout = worklist;
5390 for (bb = 0; bb < n_basic_blocks; bb++)
5392 *qin++ = BASIC_BLOCK (bb);
5393 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5395 qlen = n_basic_blocks;
5398 /* We want a maximal solution, so initially assume everything post
5399 dominates everything else. */
5400 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5402 /* Mark predecessors of the exit block so we can identify them below. */
5403 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5404 e->src->aux = EXIT_BLOCK_PTR;
5406 /* Iterate until the worklist is empty. */
5409 /* Take the first entry off the worklist. */
5410 basic_block b = *qout++;
5411 if (qout >= workend)
5417 /* Compute the intersection of the post dominators of all the
5420 If one of the successor blocks is the EXIT block, then the
5421 intersection of the dominators of the successor blocks is
5422 defined as the null set. We can identify such blocks by the
5423 special value in the AUX field in the block structure. */
5424 if (b->aux == EXIT_BLOCK_PTR)
5426 /* Do not clear the aux field for blocks which are
5427 predecessors of the EXIT block. That way we we never
5428 add them to the worklist again.
5430 The intersect of dominators of the succs of this block is
5431 defined as the null set. */
5432 sbitmap_zero (temp_bitmap[bb]);
5436 /* Clear the aux field of this block so it can be added to
5437 the worklist again if necessary. */
5439 sbitmap_intersection_of_succs (temp_bitmap[bb],
5440 post_dominators, bb);
5443 /* Make sure each block always post dominates itself. */
5444 SET_BIT (temp_bitmap[bb], bb);
5446 /* If the out state of this block changed, then we need to
5447 add the successors of this block to the worklist if they
5448 are not already on the worklist. */
5449 if (sbitmap_a_and_b (post_dominators[bb],
5450 post_dominators[bb],
5453 for (e = b->pred; e; e = e->pred_next)
5455 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5473 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5476 compute_immediate_dominators (idom, dominators)
5478 sbitmap *dominators;
5483 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5485 /* Begin with tmp(n) = dom(n) - { n }. */
5486 for (b = n_basic_blocks; --b >= 0; )
5488 sbitmap_copy (tmp[b], dominators[b]);
5489 RESET_BIT (tmp[b], b);
5492 /* Subtract out all of our dominator's dominators. */
5493 for (b = n_basic_blocks; --b >= 0; )
5495 sbitmap tmp_b = tmp[b];
5498 for (s = n_basic_blocks; --s >= 0; )
5499 if (TEST_BIT (tmp_b, s))
5500 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5503 /* Find the one bit set in the bitmap and put it in the output array. */
5504 for (b = n_basic_blocks; --b >= 0; )
5507 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5510 sbitmap_vector_free (tmp);
5513 /* Count for a single SET rtx, X. */
5516 count_reg_sets_1 (x, loop_depth)
5521 register rtx reg = SET_DEST (x);
5523 /* Find the register that's set/clobbered. */
5524 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5525 || GET_CODE (reg) == SIGN_EXTRACT
5526 || GET_CODE (reg) == STRICT_LOW_PART)
5527 reg = XEXP (reg, 0);
5529 if (GET_CODE (reg) == PARALLEL
5530 && GET_MODE (reg) == BLKmode)
5533 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5534 count_reg_sets_1 (XVECEXP (reg, 0, i), loop_depth);
5538 if (GET_CODE (reg) == REG)
5540 regno = REGNO (reg);
5541 if (regno >= FIRST_PSEUDO_REGISTER)
5543 /* Count (weighted) references, stores, etc. This counts a
5544 register twice if it is modified, but that is correct. */
5545 REG_N_SETS (regno)++;
5546 REG_N_REFS (regno) += loop_depth + 1;
5551 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5552 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5555 count_reg_sets (x, loop_depth)
5559 register RTX_CODE code = GET_CODE (x);
5561 if (code == SET || code == CLOBBER)
5562 count_reg_sets_1 (x, loop_depth);
5563 else if (code == PARALLEL)
5566 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5568 code = GET_CODE (XVECEXP (x, 0, i));
5569 if (code == SET || code == CLOBBER)
5570 count_reg_sets_1 (XVECEXP (x, 0, i), loop_depth);
5575 /* Increment REG_N_REFS by the current loop depth each register reference
5579 count_reg_references (x, loop_depth)
5583 register RTX_CODE code;
5586 code = GET_CODE (x);
5606 /* If we are clobbering a MEM, mark any registers inside the address
5608 if (GET_CODE (XEXP (x, 0)) == MEM)
5609 count_reg_references (XEXP (XEXP (x, 0), 0), loop_depth);
5613 /* While we're here, optimize this case. */
5616 /* In case the SUBREG is not of a register, don't optimize */
5617 if (GET_CODE (x) != REG)
5619 count_reg_references (x, loop_depth);
5623 /* ... fall through ... */
5626 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5627 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5632 register rtx testreg = SET_DEST (x);
5635 /* If storing into MEM, don't show it as being used. But do
5636 show the address as being used. */
5637 if (GET_CODE (testreg) == MEM)
5639 count_reg_references (XEXP (testreg, 0), loop_depth);
5640 count_reg_references (SET_SRC (x), loop_depth);
5644 /* Storing in STRICT_LOW_PART is like storing in a reg
5645 in that this SET might be dead, so ignore it in TESTREG.
5646 but in some other ways it is like using the reg.
5648 Storing in a SUBREG or a bit field is like storing the entire
5649 register in that if the register's value is not used
5650 then this SET is not needed. */
5651 while (GET_CODE (testreg) == STRICT_LOW_PART
5652 || GET_CODE (testreg) == ZERO_EXTRACT
5653 || GET_CODE (testreg) == SIGN_EXTRACT
5654 || GET_CODE (testreg) == SUBREG)
5656 /* Modifying a single register in an alternate mode
5657 does not use any of the old value. But these other
5658 ways of storing in a register do use the old value. */
5659 if (GET_CODE (testreg) == SUBREG
5660 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5665 testreg = XEXP (testreg, 0);
5668 /* If this is a store into a register,
5669 recursively scan the value being stored. */
5671 if ((GET_CODE (testreg) == PARALLEL
5672 && GET_MODE (testreg) == BLKmode)
5673 || GET_CODE (testreg) == REG)
5675 count_reg_references (SET_SRC (x), loop_depth);
5677 count_reg_references (SET_DEST (x), loop_depth);
5687 /* Recursively scan the operands of this expression. */
5690 register const char *fmt = GET_RTX_FORMAT (code);
5693 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5697 /* Tail recursive case: save a function call level. */
5703 count_reg_references (XEXP (x, i), loop_depth);
5705 else if (fmt[i] == 'E')
5708 for (j = 0; j < XVECLEN (x, i); j++)
5709 count_reg_references (XVECEXP (x, i, j), loop_depth);
5715 /* Recompute register set/reference counts immediately prior to register
5718 This avoids problems with set/reference counts changing to/from values
5719 which have special meanings to the register allocators.
5721 Additionally, the reference counts are the primary component used by the
5722 register allocators to prioritize pseudos for allocation to hard regs.
5723 More accurate reference counts generally lead to better register allocation.
5725 F is the first insn to be scanned.
5727 LOOP_STEP denotes how much loop_depth should be incremented per
5728 loop nesting level in order to increase the ref count more for
5729 references in a loop.
5731 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5732 possibly other information which is used by the register allocators. */
5735 recompute_reg_usage (f, loop_step)
5736 rtx f ATTRIBUTE_UNUSED;
5737 int loop_step ATTRIBUTE_UNUSED;
5744 /* Clear out the old data. */
5745 max_reg = max_reg_num ();
5746 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5752 /* Scan each insn in the chain and count how many times each register is
5754 for (index = 0; index < n_basic_blocks; index++)
5756 basic_block bb = BASIC_BLOCK (index);
5757 loop_depth = bb->loop_depth;
5758 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5760 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5764 /* This call will increment REG_N_SETS for each SET or CLOBBER
5765 of a register in INSN. It will also increment REG_N_REFS
5766 by the loop depth for each set of a register in INSN. */
5767 count_reg_sets (PATTERN (insn), loop_depth);
5769 /* count_reg_sets does not detect autoincrement address modes, so
5770 detect them here by looking at the notes attached to INSN. */
5771 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5773 if (REG_NOTE_KIND (links) == REG_INC)
5774 /* Count (weighted) references, stores, etc. This
5775 counts a register twice if it is modified, but
5777 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5780 /* This call will increment REG_N_REFS by the current loop depth
5781 for each reference to a register in INSN. */
5782 count_reg_references (PATTERN (insn), loop_depth);
5784 /* count_reg_references will not include counts for arguments to
5785 function calls, so detect them here by examining the
5786 CALL_INSN_FUNCTION_USAGE data. */
5787 if (GET_CODE (insn) == CALL_INSN)
5791 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5793 note = XEXP (note, 1))
5794 if (GET_CODE (XEXP (note, 0)) == USE)
5795 count_reg_references (XEXP (XEXP (note, 0), 0),
5799 if (insn == bb->end)
5805 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5806 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5807 of the number of registers that died. */
5810 count_or_remove_death_notes (blocks, kill)
5816 for (i = n_basic_blocks - 1; i >= 0; --i)
5821 if (blocks && ! TEST_BIT (blocks, i))
5824 bb = BASIC_BLOCK (i);
5826 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5828 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5830 rtx *pprev = ®_NOTES (insn);
5835 switch (REG_NOTE_KIND (link))
5838 if (GET_CODE (XEXP (link, 0)) == REG)
5840 rtx reg = XEXP (link, 0);
5843 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5846 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5854 rtx next = XEXP (link, 1);
5855 free_EXPR_LIST_node (link);
5856 *pprev = link = next;
5862 pprev = &XEXP (link, 1);
5869 if (insn == bb->end)
5877 /* Record INSN's block as BB. */
5880 set_block_for_insn (insn, bb)
5884 size_t uid = INSN_UID (insn);
5885 if (uid >= basic_block_for_insn->num_elements)
5889 /* Add one-eighth the size so we don't keep calling xrealloc. */
5890 new_size = uid + (uid + 7) / 8;
5892 VARRAY_GROW (basic_block_for_insn, new_size);
5894 VARRAY_BB (basic_block_for_insn, uid) = bb;
5897 /* Record INSN's block number as BB. */
5898 /* ??? This has got to go. */
5901 set_block_num (insn, bb)
5905 set_block_for_insn (insn, BASIC_BLOCK (bb));
5908 /* Verify the CFG consistency. This function check some CFG invariants and
5909 aborts when something is wrong. Hope that this function will help to
5910 convert many optimization passes to preserve CFG consistent.
5912 Currently it does following checks:
5914 - test head/end pointers
5915 - overlapping of basic blocks
5916 - edge list corectness
5917 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5918 - tails of basic blocks (ensure that boundary is necesary)
5919 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5920 and NOTE_INSN_BASIC_BLOCK
5921 - check that all insns are in the basic blocks
5922 (except the switch handling code, barriers and notes)
5923 - check that all returns are followed by barriers
5925 In future it can be extended check a lot of other stuff as well
5926 (reachability of basic blocks, life information, etc. etc.). */
5931 const int max_uid = get_max_uid ();
5932 const rtx rtx_first = get_insns ();
5933 basic_block *bb_info;
5937 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5939 /* First pass check head/end pointers and set bb_info array used by
5941 for (i = n_basic_blocks - 1; i >= 0; i--)
5943 basic_block bb = BASIC_BLOCK (i);
5945 /* Check the head pointer and make sure that it is pointing into
5947 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5952 error ("Head insn %d for block %d not found in the insn stream.",
5953 INSN_UID (bb->head), bb->index);
5957 /* Check the end pointer and make sure that it is pointing into
5959 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5961 if (bb_info[INSN_UID (x)] != NULL)
5963 error ("Insn %d is in multiple basic blocks (%d and %d)",
5964 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5967 bb_info[INSN_UID (x)] = bb;
5974 error ("End insn %d for block %d not found in the insn stream.",
5975 INSN_UID (bb->end), bb->index);
5980 /* Now check the basic blocks (boundaries etc.) */
5981 for (i = n_basic_blocks - 1; i >= 0; i--)
5983 basic_block bb = BASIC_BLOCK (i);
5984 /* Check corectness of edge lists */
5992 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5994 fprintf (stderr, "Predecessor: ");
5995 dump_edge_info (stderr, e, 0);
5996 fprintf (stderr, "\nSuccessor: ");
5997 dump_edge_info (stderr, e, 1);
6001 if (e->dest != EXIT_BLOCK_PTR)
6003 edge e2 = e->dest->pred;
6004 while (e2 && e2 != e)
6008 error ("Basic block %i edge lists are corrupted", bb->index);
6020 error ("Basic block %d pred edge is corrupted", bb->index);
6021 fputs ("Predecessor: ", stderr);
6022 dump_edge_info (stderr, e, 0);
6023 fputs ("\nSuccessor: ", stderr);
6024 dump_edge_info (stderr, e, 1);
6025 fputc ('\n', stderr);
6028 if (e->src != ENTRY_BLOCK_PTR)
6030 edge e2 = e->src->succ;
6031 while (e2 && e2 != e)
6035 error ("Basic block %i edge lists are corrupted", bb->index);
6042 /* OK pointers are correct. Now check the header of basic
6043 block. It ought to contain optional CODE_LABEL followed
6044 by NOTE_BASIC_BLOCK. */
6046 if (GET_CODE (x) == CODE_LABEL)
6050 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6056 if (GET_CODE (x) != NOTE
6057 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6058 || NOTE_BASIC_BLOCK (x) != bb)
6060 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6067 /* Do checks for empty blocks here */
6074 if (GET_CODE (x) == NOTE
6075 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6077 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6078 INSN_UID (x), bb->index);
6085 if (GET_CODE (x) == JUMP_INSN
6086 || GET_CODE (x) == CODE_LABEL
6087 || GET_CODE (x) == BARRIER)
6089 error ("In basic block %d:", bb->index);
6090 fatal_insn ("Flow control insn inside a basic block", x);
6101 if (!bb_info[INSN_UID (x)])
6103 switch (GET_CODE (x))
6110 /* An addr_vec is placed outside any block block. */
6112 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6113 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6114 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6119 /* But in any case, non-deletable labels can appear anywhere. */
6123 fatal_insn ("Insn outside basic block", x);
6127 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6128 && GET_CODE (x) == JUMP_INSN
6129 && returnjump_p (x) && ! condjump_p (x)
6130 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6131 fatal_insn ("Return not followed by barrier", x);
6143 /* Functions to access an edge list with a vector representation.
6144 Enough data is kept such that given an index number, the
6145 pred and succ that edge reprsents can be determined, or
6146 given a pred and a succ, it's index number can be returned.
6147 This allows algorithms which comsume a lot of memory to
6148 represent the normally full matrix of edge (pred,succ) with a
6149 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6150 wasted space in the client code due to sparse flow graphs. */
6152 /* This functions initializes the edge list. Basically the entire
6153 flowgraph is processed, and all edges are assigned a number,
6154 and the data structure is filed in. */
6158 struct edge_list *elist;
6164 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6168 /* Determine the number of edges in the flow graph by counting successor
6169 edges on each basic block. */
6170 for (x = 0; x < n_basic_blocks; x++)
6172 basic_block bb = BASIC_BLOCK (x);
6174 for (e = bb->succ; e; e = e->succ_next)
6177 /* Don't forget successors of the entry block. */
6178 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6181 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6182 elist->num_blocks = block_count;
6183 elist->num_edges = num_edges;
6184 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6188 /* Follow successors of the entry block, and register these edges. */
6189 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6191 elist->index_to_edge[num_edges] = e;
6195 for (x = 0; x < n_basic_blocks; x++)
6197 basic_block bb = BASIC_BLOCK (x);
6199 /* Follow all successors of blocks, and register these edges. */
6200 for (e = bb->succ; e; e = e->succ_next)
6202 elist->index_to_edge[num_edges] = e;
6209 /* This function free's memory associated with an edge list. */
6211 free_edge_list (elist)
6212 struct edge_list *elist;
6216 free (elist->index_to_edge);
6221 /* This function provides debug output showing an edge list. */
6223 print_edge_list (f, elist)
6225 struct edge_list *elist;
6228 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6229 elist->num_blocks - 2, elist->num_edges);
6231 for (x = 0; x < elist->num_edges; x++)
6233 fprintf (f, " %-4d - edge(", x);
6234 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6235 fprintf (f,"entry,");
6237 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6239 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6240 fprintf (f,"exit)\n");
6242 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6246 /* This function provides an internal consistancy check of an edge list,
6247 verifying that all edges are present, and that there are no
6250 verify_edge_list (f, elist)
6252 struct edge_list *elist;
6254 int x, pred, succ, index;
6257 for (x = 0; x < n_basic_blocks; x++)
6259 basic_block bb = BASIC_BLOCK (x);
6261 for (e = bb->succ; e; e = e->succ_next)
6263 pred = e->src->index;
6264 succ = e->dest->index;
6265 index = EDGE_INDEX (elist, e->src, e->dest);
6266 if (index == EDGE_INDEX_NO_EDGE)
6268 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6271 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6272 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6273 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6274 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6275 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6276 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6279 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6281 pred = e->src->index;
6282 succ = e->dest->index;
6283 index = EDGE_INDEX (elist, e->src, e->dest);
6284 if (index == EDGE_INDEX_NO_EDGE)
6286 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6289 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6290 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6291 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6292 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6293 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6294 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6296 /* We've verified that all the edges are in the list, no lets make sure
6297 there are no spurious edges in the list. */
6299 for (pred = 0 ; pred < n_basic_blocks; pred++)
6300 for (succ = 0 ; succ < n_basic_blocks; succ++)
6302 basic_block p = BASIC_BLOCK (pred);
6303 basic_block s = BASIC_BLOCK (succ);
6307 for (e = p->succ; e; e = e->succ_next)
6313 for (e = s->pred; e; e = e->pred_next)
6319 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6320 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6321 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6323 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6324 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6325 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6326 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6327 BASIC_BLOCK (succ)));
6329 for (succ = 0 ; succ < n_basic_blocks; succ++)
6331 basic_block p = ENTRY_BLOCK_PTR;
6332 basic_block s = BASIC_BLOCK (succ);
6336 for (e = p->succ; e; e = e->succ_next)
6342 for (e = s->pred; e; e = e->pred_next)
6348 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6349 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6350 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6352 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6353 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6354 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6355 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6356 BASIC_BLOCK (succ)));
6358 for (pred = 0 ; pred < n_basic_blocks; pred++)
6360 basic_block p = BASIC_BLOCK (pred);
6361 basic_block s = EXIT_BLOCK_PTR;
6365 for (e = p->succ; e; e = e->succ_next)
6371 for (e = s->pred; e; e = e->pred_next)
6377 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6378 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6379 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6381 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6382 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6383 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6384 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6389 /* This routine will determine what, if any, edge there is between
6390 a specified predecessor and successor. */
6393 find_edge_index (edge_list, pred, succ)
6394 struct edge_list *edge_list;
6395 basic_block pred, succ;
6398 for (x = 0; x < NUM_EDGES (edge_list); x++)
6400 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6401 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6404 return (EDGE_INDEX_NO_EDGE);
6407 /* This function will remove an edge from the flow graph. */
6412 edge last_pred = NULL;
6413 edge last_succ = NULL;
6415 basic_block src, dest;
6418 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6424 last_succ->succ_next = e->succ_next;
6426 src->succ = e->succ_next;
6428 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6434 last_pred->pred_next = e->pred_next;
6436 dest->pred = e->pred_next;
6442 /* This routine will remove any fake successor edges for a basic block.
6443 When the edge is removed, it is also removed from whatever predecessor
6446 remove_fake_successors (bb)
6450 for (e = bb->succ; e ; )
6454 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6459 /* This routine will remove all fake edges from the flow graph. If
6460 we remove all fake successors, it will automatically remove all
6461 fake predecessors. */
6463 remove_fake_edges ()
6467 for (x = 0; x < n_basic_blocks; x++)
6468 remove_fake_successors (BASIC_BLOCK (x));
6470 /* We've handled all successors except the entry block's. */
6471 remove_fake_successors (ENTRY_BLOCK_PTR);
6474 /* This functions will add a fake edge between any block which has no
6475 successors, and the exit block. Some data flow equations require these
6478 add_noreturn_fake_exit_edges ()
6482 for (x = 0; x < n_basic_blocks; x++)
6483 if (BASIC_BLOCK (x)->succ == NULL)
6484 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6487 /* Dump the list of basic blocks in the bitmap NODES. */
6489 flow_nodes_print (str, nodes, file)
6491 const sbitmap nodes;
6496 fprintf (file, "%s { ", str);
6497 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6498 fputs ("}\n", file);
6502 /* Dump the list of exiting edges in the array EDGES. */
6504 flow_exits_print (str, edges, num_edges, file)
6512 fprintf (file, "%s { ", str);
6513 for (i = 0; i < num_edges; i++)
6514 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6515 fputs ("}\n", file);
6519 /* Dump loop related CFG information. */
6521 flow_loops_cfg_dump (loops, file)
6522 const struct loops *loops;
6527 if (! loops->num || ! file || ! loops->cfg.dom)
6530 for (i = 0; i < n_basic_blocks; i++)
6534 fprintf (file, ";; %d succs { ", i);
6535 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6536 fprintf (file, "%d ", succ->dest->index);
6537 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6541 /* Dump the DFS node order. */
6542 if (loops->cfg.dfs_order)
6544 fputs (";; DFS order: ", file);
6545 for (i = 0; i < n_basic_blocks; i++)
6546 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6552 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6554 flow_loop_nested_p (outer, loop)
6558 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6562 /* Dump the loop information specified by LOOPS to the stream FILE. */
6564 flow_loops_dump (loops, file, verbose)
6565 const struct loops *loops;
6572 num_loops = loops->num;
6573 if (! num_loops || ! file)
6576 fprintf (file, ";; %d loops found, %d levels\n",
6577 num_loops, loops->levels);
6579 for (i = 0; i < num_loops; i++)
6581 struct loop *loop = &loops->array[i];
6583 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6584 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6585 loop->header->index, loop->latch->index,
6586 loop->pre_header ? loop->pre_header->index : -1,
6587 loop->depth, loop->level,
6588 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6589 fprintf (file, ";; %d", loop->num_nodes);
6590 flow_nodes_print (" nodes", loop->nodes, file);
6591 fprintf (file, ";; %d", loop->num_exits);
6592 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6598 for (j = 0; j < i; j++)
6600 struct loop *oloop = &loops->array[j];
6602 if (loop->header == oloop->header)
6607 smaller = loop->num_nodes < oloop->num_nodes;
6609 /* If the union of LOOP and OLOOP is different than
6610 the larger of LOOP and OLOOP then LOOP and OLOOP
6611 must be disjoint. */
6612 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6613 smaller ? oloop : loop);
6614 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6615 loop->header->index, i, j,
6616 disjoint ? "disjoint" : "nested");
6623 /* Print diagnostics to compare our concept of a loop with
6624 what the loop notes say. */
6625 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6626 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6627 != NOTE_INSN_LOOP_BEG)
6628 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6629 INSN_UID (PREV_INSN (loop->first->head)));
6630 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6631 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6632 != NOTE_INSN_LOOP_END)
6633 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6634 INSN_UID (NEXT_INSN (loop->last->end)));
6639 flow_loops_cfg_dump (loops, file);
6643 /* Free all the memory allocated for LOOPS. */
6645 flow_loops_free (loops)
6646 struct loops *loops;
6655 /* Free the loop descriptors. */
6656 for (i = 0; i < loops->num; i++)
6658 struct loop *loop = &loops->array[i];
6661 sbitmap_free (loop->nodes);
6665 free (loops->array);
6666 loops->array = NULL;
6669 sbitmap_vector_free (loops->cfg.dom);
6670 if (loops->cfg.dfs_order)
6671 free (loops->cfg.dfs_order);
6673 sbitmap_free (loops->shared_headers);
6678 /* Find the exits from the loop using the bitmap of loop nodes NODES
6679 and store in EXITS array. Return the number of exits from the
6682 flow_loop_exits_find (nodes, exits)
6683 const sbitmap nodes;
6692 /* Check all nodes within the loop to see if there are any
6693 successors not in the loop. Note that a node may have multiple
6696 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6697 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6699 basic_block dest = e->dest;
6701 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6709 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6711 /* Store all exiting edges into an array. */
6713 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6714 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6716 basic_block dest = e->dest;
6718 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6719 (*exits)[num_exits++] = e;
6727 /* Find the nodes contained within the loop with header HEADER and
6728 latch LATCH and store in NODES. Return the number of nodes within
6731 flow_loop_nodes_find (header, latch, nodes)
6740 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6743 /* Start with only the loop header in the set of loop nodes. */
6744 sbitmap_zero (nodes);
6745 SET_BIT (nodes, header->index);
6747 header->loop_depth++;
6749 /* Push the loop latch on to the stack. */
6750 if (! TEST_BIT (nodes, latch->index))
6752 SET_BIT (nodes, latch->index);
6753 latch->loop_depth++;
6755 stack[sp++] = latch;
6764 for (e = node->pred; e; e = e->pred_next)
6766 basic_block ancestor = e->src;
6768 /* If each ancestor not marked as part of loop, add to set of
6769 loop nodes and push on to stack. */
6770 if (ancestor != ENTRY_BLOCK_PTR
6771 && ! TEST_BIT (nodes, ancestor->index))
6773 SET_BIT (nodes, ancestor->index);
6774 ancestor->loop_depth++;
6776 stack[sp++] = ancestor;
6785 /* Compute the depth first search order and store in the array
6786 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6787 number of nodes visited. */
6789 flow_depth_first_order_compute (dfs_order)
6798 /* Allocate stack for back-tracking up CFG. */
6799 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6802 /* Allocate bitmap to track nodes that have been visited. */
6803 visited = sbitmap_alloc (n_basic_blocks);
6805 /* None of the nodes in the CFG have been visited yet. */
6806 sbitmap_zero (visited);
6808 /* Start with the first successor edge from the entry block. */
6809 e = ENTRY_BLOCK_PTR->succ;
6812 basic_block src = e->src;
6813 basic_block dest = e->dest;
6815 /* Mark that we have visited this node. */
6816 if (src != ENTRY_BLOCK_PTR)
6817 SET_BIT (visited, src->index);
6819 /* If this node has not been visited before, push the current
6820 edge on to the stack and proceed with the first successor
6821 edge of this node. */
6822 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6830 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6833 /* DEST has no successors (for example, a non-returning
6834 function is called) so do not push the current edge
6835 but carry on with its next successor. */
6836 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6837 SET_BIT (visited, dest->index);
6840 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6842 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6844 /* Pop edge off stack. */
6852 sbitmap_free (visited);
6854 /* The number of nodes visited should not be greater than
6856 if (dfsnum > n_basic_blocks)
6859 /* There are some nodes left in the CFG that are unreachable. */
6860 if (dfsnum < n_basic_blocks)
6866 /* Return the block for the pre-header of the loop with header
6867 HEADER where DOM specifies the dominator information. Return NULL if
6868 there is no pre-header. */
6870 flow_loop_pre_header_find (header, dom)
6874 basic_block pre_header;
6877 /* If block p is a predecessor of the header and is the only block
6878 that the header does not dominate, then it is the pre-header. */
6880 for (e = header->pred; e; e = e->pred_next)
6882 basic_block node = e->src;
6884 if (node != ENTRY_BLOCK_PTR
6885 && ! TEST_BIT (dom[node->index], header->index))
6887 if (pre_header == NULL)
6891 /* There are multiple edges into the header from outside
6892 the loop so there is no pre-header block. */
6902 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6903 previously added. The insertion algorithm assumes that the loops
6904 are added in the order found by a depth first search of the CFG. */
6906 flow_loop_tree_node_add (prevloop, loop)
6907 struct loop *prevloop;
6911 if (flow_loop_nested_p (prevloop, loop))
6913 prevloop->inner = loop;
6914 loop->outer = prevloop;
6918 while (prevloop->outer)
6920 if (flow_loop_nested_p (prevloop->outer, loop))
6922 prevloop->next = loop;
6923 loop->outer = prevloop->outer;
6926 prevloop = prevloop->outer;
6929 prevloop->next = loop;
6934 /* Build the loop hierarchy tree for LOOPS. */
6936 flow_loops_tree_build (loops)
6937 struct loops *loops;
6942 num_loops = loops->num;
6946 /* Root the loop hierarchy tree with the first loop found.
6947 Since we used a depth first search this should be the
6949 loops->tree = &loops->array[0];
6950 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6952 /* Add the remaining loops to the tree. */
6953 for (i = 1; i < num_loops; i++)
6954 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6958 /* Helper function to compute loop nesting depth and enclosed loop level
6959 for the natural loop specified by LOOP at the loop depth DEPTH.
6960 Returns the loop level. */
6962 flow_loop_level_compute (loop, depth)
6972 /* Traverse loop tree assigning depth and computing level as the
6973 maximum level of all the inner loops of this loop. The loop
6974 level is equivalent to the height of the loop in the loop tree
6975 and corresponds to the number of enclosed loop levels (including
6977 for (inner = loop->inner; inner; inner = inner->next)
6981 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6986 loop->level = level;
6987 loop->depth = depth;
6992 /* Compute the loop nesting depth and enclosed loop level for the loop
6993 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6997 flow_loops_level_compute (loops)
6998 struct loops *loops;
7004 /* Traverse all the outer level loops. */
7005 for (loop = loops->tree; loop; loop = loop->next)
7007 level = flow_loop_level_compute (loop, 1);
7015 /* Find all the natural loops in the function and save in LOOPS structure
7016 and recalculate loop_depth information in basic block structures.
7017 Return the number of natural loops found. */
7020 flow_loops_find (loops)
7021 struct loops *loops;
7032 loops->array = NULL;
7036 /* Taking care of this degenerate case makes the rest of
7037 this code simpler. */
7038 if (n_basic_blocks == 0)
7041 /* Compute the dominators. */
7042 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7043 compute_flow_dominators (dom, NULL);
7045 /* Count the number of loop edges (back edges). This should be the
7046 same as the number of natural loops. Also clear the loop_depth
7047 and as we work from inner->outer in a loop nest we call
7048 find_loop_nodes_find which will increment loop_depth for nodes
7049 within the current loop, which happens to enclose inner loops. */
7052 for (b = 0; b < n_basic_blocks; b++)
7054 BASIC_BLOCK (b)->loop_depth = 0;
7055 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7057 basic_block latch = e->src;
7059 /* Look for back edges where a predecessor is dominated
7060 by this block. A natural loop has a single entry
7061 node (header) that dominates all the nodes in the
7062 loop. It also has single back edge to the header
7063 from a latch node. Note that multiple natural loops
7064 may share the same header. */
7065 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7072 /* Compute depth first search order of the CFG so that outer
7073 natural loops will be found before inner natural loops. */
7074 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7075 flow_depth_first_order_compute (dfs_order);
7077 /* Allocate loop structures. */
7079 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7081 headers = sbitmap_alloc (n_basic_blocks);
7082 sbitmap_zero (headers);
7084 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7085 sbitmap_zero (loops->shared_headers);
7087 /* Find and record information about all the natural loops
7090 for (b = 0; b < n_basic_blocks; b++)
7094 /* Search the nodes of the CFG in DFS order that we can find
7095 outer loops first. */
7096 header = BASIC_BLOCK (dfs_order[b]);
7098 /* Look for all the possible latch blocks for this header. */
7099 for (e = header->pred; e; e = e->pred_next)
7101 basic_block latch = e->src;
7103 /* Look for back edges where a predecessor is dominated
7104 by this block. A natural loop has a single entry
7105 node (header) that dominates all the nodes in the
7106 loop. It also has single back edge to the header
7107 from a latch node. Note that multiple natural loops
7108 may share the same header. */
7109 if (latch != ENTRY_BLOCK_PTR
7110 && TEST_BIT (dom[latch->index], header->index))
7114 loop = loops->array + num_loops;
7116 loop->header = header;
7117 loop->latch = latch;
7119 /* Keep track of blocks that are loop headers so
7120 that we can tell which loops should be merged. */
7121 if (TEST_BIT (headers, header->index))
7122 SET_BIT (loops->shared_headers, header->index);
7123 SET_BIT (headers, header->index);
7125 /* Find nodes contained within the loop. */
7126 loop->nodes = sbitmap_alloc (n_basic_blocks);
7128 = flow_loop_nodes_find (header, latch, loop->nodes);
7130 /* Compute first and last blocks within the loop.
7131 These are often the same as the loop header and
7132 loop latch respectively, but this is not always
7135 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7137 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7139 /* Find edges which exit the loop. Note that a node
7140 may have several exit edges. */
7142 = flow_loop_exits_find (loop->nodes, &loop->exits);
7144 /* Look to see if the loop has a pre-header node. */
7146 = flow_loop_pre_header_find (header, dom);
7153 /* Natural loops with shared headers may either be disjoint or
7154 nested. Disjoint loops with shared headers cannot be inner
7155 loops and should be merged. For now just mark loops that share
7157 for (i = 0; i < num_loops; i++)
7158 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7159 loops->array[i].shared = 1;
7161 sbitmap_free (headers);
7164 loops->num = num_loops;
7166 /* Save CFG derived information to avoid recomputing it. */
7167 loops->cfg.dom = dom;
7168 loops->cfg.dfs_order = dfs_order;
7170 /* Build the loop hierarchy tree. */
7171 flow_loops_tree_build (loops);
7173 /* Assign the loop nesting depth and enclosed loop level for each
7175 loops->levels = flow_loops_level_compute (loops);
7181 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7183 flow_loop_outside_edge_p (loop, e)
7184 const struct loop *loop;
7187 if (e->dest != loop->header)
7189 return (e->src == ENTRY_BLOCK_PTR)
7190 || ! TEST_BIT (loop->nodes, e->src->index);
7194 /* Clear LOG_LINKS fields of insns in a chain. */
7196 clear_log_links (insns)
7200 for (i = insns; i; i = NEXT_INSN (i))
7201 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')