1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
169 extern struct obstack *function_obstack;
171 /* Number of basic blocks in the current function. */
175 /* Number of edges in the current function. */
179 /* The basic block array. */
181 varray_type basic_block_info;
183 /* The special entry and exit blocks. */
185 struct basic_block_def entry_exit_blocks[2]
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
194 ENTRY_BLOCK, /* index */
196 -1, -1 /* eh_beg, eh_end */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
207 EXIT_BLOCK, /* index */
209 -1, -1 /* eh_beg, eh_end */
213 /* Nonzero if the second flow pass has completed. */
216 /* Maximum register number used in this function, plus one. */
220 /* Indexed by n, giving various register information */
222 varray_type reg_n_info;
224 /* Size of the reg_n_info table. */
226 unsigned int reg_n_max;
228 /* Element N is the next insn that uses (hard or pseudo) register number N
229 within the current basic block; or zero, if there is no such insn.
230 This is valid only during the final backward scan in propagate_block. */
232 static rtx *reg_next_use;
234 /* Size of a regset for the current function,
235 in (1) bytes and (2) elements. */
240 /* Regset of regs live when calls to `setjmp'-like functions happen. */
241 /* ??? Does this exist only for the setjmp-clobbered warning message? */
243 regset regs_live_at_setjmp;
245 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
246 that have to go in the same hard reg.
247 The first two regs in the list are a pair, and the next two
248 are another pair, etc. */
251 /* Depth within loops of basic block being scanned for lifetime analysis,
252 plus one. This is the weight attached to references to registers. */
254 static int loop_depth;
256 /* During propagate_block, this is non-zero if the value of CC0 is live. */
260 /* During propagate_block, this contains a list of all the MEMs we are
261 tracking for dead store elimination. */
263 static rtx mem_set_list;
265 /* Set of registers that may be eliminable. These are handled specially
266 in updating regs_ever_live. */
268 static HARD_REG_SET elim_reg_set;
270 /* The basic block structure for every insn, indexed by uid. */
272 varray_type basic_block_for_insn;
274 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
275 /* ??? Should probably be using LABEL_NUSES instead. It would take a
276 bit of surgery to be able to use or co-opt the routines in jump. */
278 static rtx label_value_list;
280 /* Forward declarations */
281 static int count_basic_blocks PARAMS ((rtx));
282 static rtx find_basic_blocks_1 PARAMS ((rtx));
283 static void clear_edges PARAMS ((void));
284 static void make_edges PARAMS ((rtx));
285 static void make_label_edge PARAMS ((sbitmap *, basic_block,
287 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
288 basic_block, rtx, int));
289 static void mark_critical_edges PARAMS ((void));
290 static void move_stray_eh_region_notes PARAMS ((void));
291 static void record_active_eh_regions PARAMS ((rtx));
293 static void commit_one_edge_insertion PARAMS ((edge));
295 static void delete_unreachable_blocks PARAMS ((void));
296 static void delete_eh_regions PARAMS ((void));
297 static int can_delete_note_p PARAMS ((rtx));
298 static int delete_block PARAMS ((basic_block));
299 static void expunge_block PARAMS ((basic_block));
300 static int can_delete_label_p PARAMS ((rtx));
301 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
303 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
305 static void merge_blocks_nomove PARAMS ((basic_block, basic_block));
306 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
307 static void try_merge_blocks PARAMS ((void));
308 static void tidy_fallthru_edge PARAMS ((edge,basic_block,basic_block));
309 static void tidy_fallthru_edges PARAMS ((void));
310 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
311 static void verify_wide_reg PARAMS ((int, rtx, rtx));
312 static void verify_local_live_at_start PARAMS ((regset, basic_block));
313 static int set_noop_p PARAMS ((rtx));
314 static int noop_move_p PARAMS ((rtx));
315 static void delete_noop_moves PARAMS ((rtx));
316 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
317 static void notice_stack_pointer_modification PARAMS ((rtx));
318 static void mark_reg PARAMS ((rtx, void *));
319 static void mark_regs_live_at_end PARAMS ((regset));
320 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
321 static void propagate_block PARAMS ((basic_block, regset,
323 static int insn_dead_p PARAMS ((rtx, regset, int, rtx));
324 static int libcall_dead_p PARAMS ((rtx, regset, rtx, rtx));
325 static void mark_set_regs PARAMS ((regset, regset, rtx,
327 static void mark_set_1 PARAMS ((regset, regset, rtx,
330 static void find_auto_inc PARAMS ((regset, rtx, rtx));
331 static int try_pre_increment_1 PARAMS ((rtx));
332 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
334 static void mark_used_regs PARAMS ((regset, regset, rtx, int, rtx));
335 void dump_flow_info PARAMS ((FILE *));
336 void debug_flow_info PARAMS ((void));
337 static void dump_edge_info PARAMS ((FILE *, edge, int));
339 static void count_reg_sets_1 PARAMS ((rtx));
340 static void count_reg_sets PARAMS ((rtx));
341 static void count_reg_references PARAMS ((rtx));
342 static void invalidate_mems_from_autoinc PARAMS ((rtx));
343 static void remove_fake_successors PARAMS ((basic_block));
344 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
345 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
346 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
347 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
348 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
349 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
350 static int flow_depth_first_order_compute PARAMS ((int *));
351 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
352 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
353 static void flow_loops_tree_build PARAMS ((struct loops *));
354 static int flow_loop_level_compute PARAMS ((struct loop *, int));
355 static int flow_loops_level_compute PARAMS ((struct loops *));
357 /* This function is always defined so it can be called from the
358 debugger, and it is declared extern so we don't get warnings about
360 void verify_flow_info PARAMS ((void));
361 int flow_loop_outside_edge_p PARAMS ((const struct loop *, edge));
362 void clear_log_links PARAMS ((rtx));
364 /* Find basic blocks of the current function.
365 F is the first insn of the function and NREGS the number of register
369 find_basic_blocks (f, nregs, file)
371 int nregs ATTRIBUTE_UNUSED;
372 FILE *file ATTRIBUTE_UNUSED;
376 /* Flush out existing data. */
377 if (basic_block_info != NULL)
383 /* Clear bb->aux on all extant basic blocks. We'll use this as a
384 tag for reuse during create_basic_block, just in case some pass
385 copies around basic block notes improperly. */
386 for (i = 0; i < n_basic_blocks; ++i)
387 BASIC_BLOCK (i)->aux = NULL;
389 VARRAY_FREE (basic_block_info);
392 n_basic_blocks = count_basic_blocks (f);
394 /* Size the basic block table. The actual structures will be allocated
395 by find_basic_blocks_1, since we want to keep the structure pointers
396 stable across calls to find_basic_blocks. */
397 /* ??? This whole issue would be much simpler if we called find_basic_blocks
398 exactly once, and thereafter we don't have a single long chain of
399 instructions at all until close to the end of compilation when we
400 actually lay them out. */
402 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
404 label_value_list = find_basic_blocks_1 (f);
406 /* Record the block to which an insn belongs. */
407 /* ??? This should be done another way, by which (perhaps) a label is
408 tagged directly with the basic block that it starts. It is used for
409 more than that currently, but IMO that is the only valid use. */
411 max_uid = get_max_uid ();
413 /* Leave space for insns life_analysis makes in some cases for auto-inc.
414 These cases are rare, so we don't need too much space. */
415 max_uid += max_uid / 10;
418 compute_bb_for_insn (max_uid);
420 /* Discover the edges of our cfg. */
421 record_active_eh_regions (f);
422 make_edges (label_value_list);
424 /* Do very simple cleanup now, for the benefit of code that runs between
425 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
426 tidy_fallthru_edges ();
428 mark_critical_edges ();
430 #ifdef ENABLE_CHECKING
435 /* Count the basic blocks of the function. */
438 count_basic_blocks (f)
442 register RTX_CODE prev_code;
443 register int count = 0;
445 int call_had_abnormal_edge = 0;
446 rtx prev_call = NULL_RTX;
448 prev_code = JUMP_INSN;
449 for (insn = f; insn; insn = NEXT_INSN (insn))
451 register RTX_CODE code = GET_CODE (insn);
453 if (code == CODE_LABEL
454 || (GET_RTX_CLASS (code) == 'i'
455 && (prev_code == JUMP_INSN
456 || prev_code == BARRIER
457 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
462 /* Record whether this call created an edge. */
463 if (code == CALL_INSN)
465 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
466 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
468 call_had_abnormal_edge = 0;
470 /* If there is an EH region or rethrow, we have an edge. */
471 if ((eh_region && region > 0)
472 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
473 call_had_abnormal_edge = 1;
476 /* If there is a nonlocal goto label and the specified
477 region number isn't -1, we have an edge. (0 means
478 no throw, but might have a nonlocal goto). */
479 if (nonlocal_goto_handler_labels && region >= 0)
480 call_had_abnormal_edge = 1;
483 else if (code != NOTE)
484 prev_call = NULL_RTX;
488 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
490 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
495 /* The rest of the compiler works a bit smoother when we don't have to
496 check for the edge case of do-nothing functions with no basic blocks. */
499 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
506 /* Find all basic blocks of the function whose first insn is F.
508 Collect and return a list of labels whose addresses are taken. This
509 will be used in make_edges for use with computed gotos. */
512 find_basic_blocks_1 (f)
515 register rtx insn, next;
516 int call_has_abnormal_edge = 0;
518 rtx bb_note = NULL_RTX;
519 rtx eh_list = NULL_RTX;
520 rtx label_value_list = NULL_RTX;
524 /* We process the instructions in a slightly different way than we did
525 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
526 closed out the previous block, so that it gets attached at the proper
527 place. Since this form should be equivalent to the previous,
528 count_basic_blocks continues to use the old form as a check. */
530 for (insn = f; insn; insn = next)
532 enum rtx_code code = GET_CODE (insn);
534 next = NEXT_INSN (insn);
536 if (code == CALL_INSN)
538 /* Record whether this call created an edge. */
539 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
540 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
541 call_has_abnormal_edge = 0;
543 /* If there is an EH region or rethrow, we have an edge. */
544 if ((eh_list && region > 0)
545 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
546 call_has_abnormal_edge = 1;
549 /* If there is a nonlocal goto label and the specified
550 region number isn't -1, we have an edge. (0 means
551 no throw, but might have a nonlocal goto). */
552 if (nonlocal_goto_handler_labels && region >= 0)
553 call_has_abnormal_edge = 1;
561 int kind = NOTE_LINE_NUMBER (insn);
563 /* Keep a LIFO list of the currently active exception notes. */
564 if (kind == NOTE_INSN_EH_REGION_BEG)
565 eh_list = alloc_INSN_LIST (insn, eh_list);
566 else if (kind == NOTE_INSN_EH_REGION_END)
569 eh_list = XEXP (eh_list, 1);
570 free_INSN_LIST_node (t);
573 /* Look for basic block notes with which to keep the
574 basic_block_info pointers stable. Unthread the note now;
575 we'll put it back at the right place in create_basic_block.
576 Or not at all if we've already found a note in this block. */
577 else if (kind == NOTE_INSN_BASIC_BLOCK)
579 if (bb_note == NULL_RTX)
581 next = flow_delete_insn (insn);
588 /* A basic block starts at a label. If we've closed one off due
589 to a barrier or some such, no need to do it again. */
590 if (head != NULL_RTX)
592 /* While we now have edge lists with which other portions of
593 the compiler might determine a call ending a basic block
594 does not imply an abnormal edge, it will be a bit before
595 everything can be updated. So continue to emit a noop at
596 the end of such a block. */
597 if (GET_CODE (end) == CALL_INSN
598 && ! SIBLING_CALL_P (end))
600 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
601 end = emit_insn_after (nop, end);
604 create_basic_block (i++, head, end, bb_note);
611 /* A basic block ends at a jump. */
612 if (head == NULL_RTX)
616 /* ??? Make a special check for table jumps. The way this
617 happens is truly and amazingly gross. We are about to
618 create a basic block that contains just a code label and
619 an addr*vec jump insn. Worse, an addr_diff_vec creates
620 its own natural loop.
622 Prevent this bit of brain damage, pasting things together
623 correctly in make_edges.
625 The correct solution involves emitting the table directly
626 on the tablejump instruction as a note, or JUMP_LABEL. */
628 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
629 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
637 goto new_bb_inclusive;
640 /* A basic block ends at a barrier. It may be that an unconditional
641 jump already closed the basic block -- no need to do it again. */
642 if (head == NULL_RTX)
645 /* While we now have edge lists with which other portions of the
646 compiler might determine a call ending a basic block does not
647 imply an abnormal edge, it will be a bit before everything can
648 be updated. So continue to emit a noop at the end of such a
650 if (GET_CODE (end) == CALL_INSN
651 && ! SIBLING_CALL_P (end))
653 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
654 end = emit_insn_after (nop, end);
656 goto new_bb_exclusive;
659 /* A basic block ends at a call that can either throw or
660 do a non-local goto. */
661 if (call_has_abnormal_edge)
664 if (head == NULL_RTX)
669 create_basic_block (i++, head, end, bb_note);
670 head = end = NULL_RTX;
677 if (GET_RTX_CLASS (code) == 'i')
679 if (head == NULL_RTX)
686 if (GET_RTX_CLASS (code) == 'i')
690 /* Make a list of all labels referred to other than by jumps
691 (which just don't have the REG_LABEL notes).
693 Make a special exception for labels followed by an ADDR*VEC,
694 as this would be a part of the tablejump setup code.
696 Make a special exception for the eh_return_stub_label, which
697 we know isn't part of any otherwise visible control flow. */
699 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
700 if (REG_NOTE_KIND (note) == REG_LABEL)
702 rtx lab = XEXP (note, 0), next;
704 if (lab == eh_return_stub_label)
706 else if ((next = next_nonnote_insn (lab)) != NULL
707 && GET_CODE (next) == JUMP_INSN
708 && (GET_CODE (PATTERN (next)) == ADDR_VEC
709 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
713 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
718 if (head != NULL_RTX)
719 create_basic_block (i++, head, end, bb_note);
721 if (i != n_basic_blocks)
724 return label_value_list;
727 /* Tidy the CFG by deleting unreachable code and whatnot. */
733 delete_unreachable_blocks ();
734 move_stray_eh_region_notes ();
735 record_active_eh_regions (f);
737 mark_critical_edges ();
739 /* Kill the data we won't maintain. */
740 label_value_list = NULL_RTX;
743 /* Create a new basic block consisting of the instructions between
744 HEAD and END inclusive. Reuses the note and basic block struct
745 in BB_NOTE, if any. */
748 create_basic_block (index, head, end, bb_note)
750 rtx head, end, bb_note;
755 && ! RTX_INTEGRATED_P (bb_note)
756 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
759 /* If we found an existing note, thread it back onto the chain. */
761 if (GET_CODE (head) == CODE_LABEL)
762 add_insn_after (bb_note, head);
765 add_insn_before (bb_note, head);
771 /* Otherwise we must create a note and a basic block structure.
772 Since we allow basic block structs in rtl, give the struct
773 the same lifetime by allocating it off the function obstack
774 rather than using malloc. */
776 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
777 memset (bb, 0, sizeof (*bb));
779 if (GET_CODE (head) == CODE_LABEL)
780 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
783 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
786 NOTE_BASIC_BLOCK (bb_note) = bb;
789 /* Always include the bb note in the block. */
790 if (NEXT_INSN (end) == bb_note)
796 BASIC_BLOCK (index) = bb;
798 /* Tag the block so that we know it has been used when considering
799 other basic block notes. */
803 /* Records the basic block struct in BB_FOR_INSN, for every instruction
804 indexed by INSN_UID. MAX is the size of the array. */
807 compute_bb_for_insn (max)
812 if (basic_block_for_insn)
813 VARRAY_FREE (basic_block_for_insn);
814 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
816 for (i = 0; i < n_basic_blocks; ++i)
818 basic_block bb = BASIC_BLOCK (i);
825 int uid = INSN_UID (insn);
827 VARRAY_BB (basic_block_for_insn, uid) = bb;
830 insn = NEXT_INSN (insn);
835 /* Free the memory associated with the edge structures. */
843 for (i = 0; i < n_basic_blocks; ++i)
845 basic_block bb = BASIC_BLOCK (i);
847 for (e = bb->succ; e ; e = n)
857 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
863 ENTRY_BLOCK_PTR->succ = 0;
864 EXIT_BLOCK_PTR->pred = 0;
869 /* Identify the edges between basic blocks.
871 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
872 that are otherwise unreachable may be reachable with a non-local goto.
874 BB_EH_END is an array indexed by basic block number in which we record
875 the list of exception regions active at the end of the basic block. */
878 make_edges (label_value_list)
879 rtx label_value_list;
882 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
883 sbitmap *edge_cache = NULL;
885 /* Assume no computed jump; revise as we create edges. */
886 current_function_has_computed_jump = 0;
888 /* Heavy use of computed goto in machine-generated code can lead to
889 nearly fully-connected CFGs. In that case we spend a significant
890 amount of time searching the edge lists for duplicates. */
891 if (forced_labels || label_value_list)
893 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
894 sbitmap_vector_zero (edge_cache, n_basic_blocks);
897 /* By nature of the way these get numbered, block 0 is always the entry. */
898 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
900 for (i = 0; i < n_basic_blocks; ++i)
902 basic_block bb = BASIC_BLOCK (i);
905 int force_fallthru = 0;
907 /* Examine the last instruction of the block, and discover the
908 ways we can leave the block. */
911 code = GET_CODE (insn);
914 if (code == JUMP_INSN)
918 /* ??? Recognize a tablejump and do the right thing. */
919 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
920 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
921 && GET_CODE (tmp) == JUMP_INSN
922 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
923 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
928 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
929 vec = XVEC (PATTERN (tmp), 0);
931 vec = XVEC (PATTERN (tmp), 1);
933 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
934 make_label_edge (edge_cache, bb,
935 XEXP (RTVEC_ELT (vec, j), 0), 0);
937 /* Some targets (eg, ARM) emit a conditional jump that also
938 contains the out-of-range target. Scan for these and
939 add an edge if necessary. */
940 if ((tmp = single_set (insn)) != NULL
941 && SET_DEST (tmp) == pc_rtx
942 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
943 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
944 make_label_edge (edge_cache, bb,
945 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
947 #ifdef CASE_DROPS_THROUGH
948 /* Silly VAXen. The ADDR_VEC is going to be in the way of
949 us naturally detecting fallthru into the next block. */
954 /* If this is a computed jump, then mark it as reaching
955 everything on the label_value_list and forced_labels list. */
956 else if (computed_jump_p (insn))
958 current_function_has_computed_jump = 1;
960 for (x = label_value_list; x; x = XEXP (x, 1))
961 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
963 for (x = forced_labels; x; x = XEXP (x, 1))
964 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
967 /* Returns create an exit out. */
968 else if (returnjump_p (insn))
969 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
971 /* Otherwise, we have a plain conditional or unconditional jump. */
974 if (! JUMP_LABEL (insn))
976 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
980 /* If this is a sibling call insn, then this is in effect a
981 combined call and return, and so we need an edge to the
982 exit block. No need to worry about EH edges, since we
983 wouldn't have created the sibling call in the first place. */
985 if (code == CALL_INSN && SIBLING_CALL_P (insn))
986 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
989 /* If this is a CALL_INSN, then mark it as reaching the active EH
990 handler for this CALL_INSN. If we're handling asynchronous
991 exceptions then any insn can reach any of the active handlers.
993 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
995 if (code == CALL_INSN || asynchronous_exceptions)
997 /* Add any appropriate EH edges. We do this unconditionally
998 since there may be a REG_EH_REGION or REG_EH_RETHROW note
999 on the call, and this needn't be within an EH region. */
1000 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1002 /* If we have asynchronous exceptions, do the same for *all*
1003 exception regions active in the block. */
1004 if (asynchronous_exceptions
1005 && bb->eh_beg != bb->eh_end)
1007 if (bb->eh_beg >= 0)
1008 make_eh_edge (edge_cache, eh_nest_info, bb,
1009 NULL_RTX, bb->eh_beg);
1011 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1012 if (GET_CODE (x) == NOTE
1013 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1014 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1016 int region = NOTE_EH_HANDLER (x);
1017 make_eh_edge (edge_cache, eh_nest_info, bb,
1022 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1024 /* ??? This could be made smarter: in some cases it's possible
1025 to tell that certain calls will not do a nonlocal goto.
1027 For example, if the nested functions that do the nonlocal
1028 gotos do not have their addresses taken, then only calls to
1029 those functions or to other nested functions that use them
1030 could possibly do nonlocal gotos. */
1031 /* We do know that a REG_EH_REGION note with a value less
1032 than 0 is guaranteed not to perform a non-local goto. */
1033 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1034 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1035 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1036 make_label_edge (edge_cache, bb, XEXP (x, 0),
1037 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1041 /* We know something about the structure of the function __throw in
1042 libgcc2.c. It is the only function that ever contains eh_stub
1043 labels. It modifies its return address so that the last block
1044 returns to one of the eh_stub labels within it. So we have to
1045 make additional edges in the flow graph. */
1046 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1047 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1049 /* Find out if we can drop through to the next block. */
1050 insn = next_nonnote_insn (insn);
1051 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1052 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1053 else if (i + 1 < n_basic_blocks)
1055 rtx tmp = BLOCK_HEAD (i + 1);
1056 if (GET_CODE (tmp) == NOTE)
1057 tmp = next_nonnote_insn (tmp);
1058 if (force_fallthru || insn == tmp)
1059 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1063 free_eh_nesting_info (eh_nest_info);
1065 sbitmap_vector_free (edge_cache);
1068 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1069 about the edge that is accumulated between calls. */
1072 make_edge (edge_cache, src, dst, flags)
1073 sbitmap *edge_cache;
1074 basic_block src, dst;
1080 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1081 many edges to them, and we didn't allocate memory for it. */
1082 use_edge_cache = (edge_cache
1083 && src != ENTRY_BLOCK_PTR
1084 && dst != EXIT_BLOCK_PTR);
1086 /* Make sure we don't add duplicate edges. */
1087 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1088 for (e = src->succ; e ; e = e->succ_next)
1095 e = (edge) xcalloc (1, sizeof (*e));
1098 e->succ_next = src->succ;
1099 e->pred_next = dst->pred;
1108 SET_BIT (edge_cache[src->index], dst->index);
1111 /* Create an edge from a basic block to a label. */
1114 make_label_edge (edge_cache, src, label, flags)
1115 sbitmap *edge_cache;
1120 if (GET_CODE (label) != CODE_LABEL)
1123 /* If the label was never emitted, this insn is junk, but avoid a
1124 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1125 as a result of a syntax error and a diagnostic has already been
1128 if (INSN_UID (label) == 0)
1131 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1134 /* Create the edges generated by INSN in REGION. */
1137 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1138 sbitmap *edge_cache;
1139 eh_nesting_info *eh_nest_info;
1144 handler_info **handler_list;
1147 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1148 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1151 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1152 EDGE_ABNORMAL | EDGE_EH | is_call);
1156 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1157 dangerous if we intend to move basic blocks around. Move such notes
1158 into the following block. */
1161 move_stray_eh_region_notes ()
1166 if (n_basic_blocks < 2)
1169 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1170 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1172 rtx insn, next, list = NULL_RTX;
1174 b1 = BASIC_BLOCK (i);
1175 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1177 next = NEXT_INSN (insn);
1178 if (GET_CODE (insn) == NOTE
1179 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1180 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1182 /* Unlink from the insn chain. */
1183 NEXT_INSN (PREV_INSN (insn)) = next;
1184 PREV_INSN (next) = PREV_INSN (insn);
1187 NEXT_INSN (insn) = list;
1192 if (list == NULL_RTX)
1195 /* Find where to insert these things. */
1197 if (GET_CODE (insn) == CODE_LABEL)
1198 insn = NEXT_INSN (insn);
1202 next = NEXT_INSN (list);
1203 add_insn_after (list, insn);
1209 /* Recompute eh_beg/eh_end for each basic block. */
1212 record_active_eh_regions (f)
1215 rtx insn, eh_list = NULL_RTX;
1217 basic_block bb = BASIC_BLOCK (0);
1219 for (insn = f; insn ; insn = NEXT_INSN (insn))
1221 if (bb->head == insn)
1222 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1224 if (GET_CODE (insn) == NOTE)
1226 int kind = NOTE_LINE_NUMBER (insn);
1227 if (kind == NOTE_INSN_EH_REGION_BEG)
1228 eh_list = alloc_INSN_LIST (insn, eh_list);
1229 else if (kind == NOTE_INSN_EH_REGION_END)
1231 rtx t = XEXP (eh_list, 1);
1232 free_INSN_LIST_node (eh_list);
1237 if (bb->end == insn)
1239 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1241 if (i == n_basic_blocks)
1243 bb = BASIC_BLOCK (i);
1248 /* Identify critical edges and set the bits appropriately. */
1251 mark_critical_edges ()
1253 int i, n = n_basic_blocks;
1256 /* We begin with the entry block. This is not terribly important now,
1257 but could be if a front end (Fortran) implemented alternate entry
1259 bb = ENTRY_BLOCK_PTR;
1266 /* (1) Critical edges must have a source with multiple successors. */
1267 if (bb->succ && bb->succ->succ_next)
1269 for (e = bb->succ; e ; e = e->succ_next)
1271 /* (2) Critical edges must have a destination with multiple
1272 predecessors. Note that we know there is at least one
1273 predecessor -- the edge we followed to get here. */
1274 if (e->dest->pred->pred_next)
1275 e->flags |= EDGE_CRITICAL;
1277 e->flags &= ~EDGE_CRITICAL;
1282 for (e = bb->succ; e ; e = e->succ_next)
1283 e->flags &= ~EDGE_CRITICAL;
1288 bb = BASIC_BLOCK (i);
1292 /* Split a (typically critical) edge. Return the new block.
1293 Abort on abnormal edges.
1295 ??? The code generally expects to be called on critical edges.
1296 The case of a block ending in an unconditional jump to a
1297 block with multiple predecessors is not handled optimally. */
1300 split_edge (edge_in)
1303 basic_block old_pred, bb, old_succ;
1308 /* Abnormal edges cannot be split. */
1309 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1312 old_pred = edge_in->src;
1313 old_succ = edge_in->dest;
1315 /* Remove the existing edge from the destination's pred list. */
1318 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1320 *pp = edge_in->pred_next;
1321 edge_in->pred_next = NULL;
1324 /* Create the new structures. */
1325 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1326 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1329 memset (bb, 0, sizeof (*bb));
1330 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1331 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1333 /* ??? This info is likely going to be out of date very soon. */
1334 if (old_succ->global_live_at_start)
1336 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1337 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1341 CLEAR_REG_SET (bb->global_live_at_start);
1342 CLEAR_REG_SET (bb->global_live_at_end);
1347 bb->succ = edge_out;
1350 edge_in->flags &= ~EDGE_CRITICAL;
1352 edge_out->pred_next = old_succ->pred;
1353 edge_out->succ_next = NULL;
1355 edge_out->dest = old_succ;
1356 edge_out->flags = EDGE_FALLTHRU;
1357 edge_out->probability = REG_BR_PROB_BASE;
1359 old_succ->pred = edge_out;
1361 /* Tricky case -- if there existed a fallthru into the successor
1362 (and we're not it) we must add a new unconditional jump around
1363 the new block we're actually interested in.
1365 Further, if that edge is critical, this means a second new basic
1366 block must be created to hold it. In order to simplify correct
1367 insn placement, do this before we touch the existing basic block
1368 ordering for the block we were really wanting. */
1369 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1372 for (e = edge_out->pred_next; e ; e = e->pred_next)
1373 if (e->flags & EDGE_FALLTHRU)
1378 basic_block jump_block;
1381 if ((e->flags & EDGE_CRITICAL) == 0
1382 && e->src != ENTRY_BLOCK_PTR)
1384 /* Non critical -- we can simply add a jump to the end
1385 of the existing predecessor. */
1386 jump_block = e->src;
1390 /* We need a new block to hold the jump. The simplest
1391 way to do the bulk of the work here is to recursively
1393 jump_block = split_edge (e);
1394 e = jump_block->succ;
1397 /* Now add the jump insn ... */
1398 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1400 jump_block->end = pos;
1401 if (basic_block_for_insn)
1402 set_block_for_insn (pos, jump_block);
1403 emit_barrier_after (pos);
1405 /* ... let jump know that label is in use, ... */
1406 JUMP_LABEL (pos) = old_succ->head;
1407 ++LABEL_NUSES (old_succ->head);
1409 /* ... and clear fallthru on the outgoing edge. */
1410 e->flags &= ~EDGE_FALLTHRU;
1412 /* Continue splitting the interesting edge. */
1416 /* Place the new block just in front of the successor. */
1417 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1418 if (old_succ == EXIT_BLOCK_PTR)
1419 j = n_basic_blocks - 1;
1421 j = old_succ->index;
1422 for (i = n_basic_blocks - 1; i > j; --i)
1424 basic_block tmp = BASIC_BLOCK (i - 1);
1425 BASIC_BLOCK (i) = tmp;
1428 BASIC_BLOCK (i) = bb;
1431 /* Create the basic block note.
1433 Where we place the note can have a noticable impact on the generated
1434 code. Consider this cfg:
1445 If we need to insert an insn on the edge from block 0 to block 1,
1446 we want to ensure the instructions we insert are outside of any
1447 loop notes that physically sit between block 0 and block 1. Otherwise
1448 we confuse the loop optimizer into thinking the loop is a phony. */
1449 if (old_succ != EXIT_BLOCK_PTR
1450 && PREV_INSN (old_succ->head)
1451 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1452 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1453 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1454 PREV_INSN (old_succ->head));
1455 else if (old_succ != EXIT_BLOCK_PTR)
1456 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1458 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1459 NOTE_BASIC_BLOCK (bb_note) = bb;
1460 bb->head = bb->end = bb_note;
1462 /* Not quite simple -- for non-fallthru edges, we must adjust the
1463 predecessor's jump instruction to target our new block. */
1464 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1466 rtx tmp, insn = old_pred->end;
1467 rtx old_label = old_succ->head;
1468 rtx new_label = gen_label_rtx ();
1470 if (GET_CODE (insn) != JUMP_INSN)
1473 /* ??? Recognize a tablejump and adjust all matching cases. */
1474 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1475 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1476 && GET_CODE (tmp) == JUMP_INSN
1477 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1478 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1483 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1484 vec = XVEC (PATTERN (tmp), 0);
1486 vec = XVEC (PATTERN (tmp), 1);
1488 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1489 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1491 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1492 --LABEL_NUSES (old_label);
1493 ++LABEL_NUSES (new_label);
1496 /* Handle casesi dispatch insns */
1497 if ((tmp = single_set (insn)) != NULL
1498 && SET_DEST (tmp) == pc_rtx
1499 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1500 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1501 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1503 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1505 --LABEL_NUSES (old_label);
1506 ++LABEL_NUSES (new_label);
1511 /* This would have indicated an abnormal edge. */
1512 if (computed_jump_p (insn))
1515 /* A return instruction can't be redirected. */
1516 if (returnjump_p (insn))
1519 /* If the insn doesn't go where we think, we're confused. */
1520 if (JUMP_LABEL (insn) != old_label)
1523 redirect_jump (insn, new_label);
1526 emit_label_before (new_label, bb_note);
1527 bb->head = new_label;
1533 /* Queue instructions for insertion on an edge between two basic blocks.
1534 The new instructions and basic blocks (if any) will not appear in the
1535 CFG until commit_edge_insertions is called. */
1538 insert_insn_on_edge (pattern, e)
1542 /* We cannot insert instructions on an abnormal critical edge.
1543 It will be easier to find the culprit if we die now. */
1544 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1545 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1548 if (e->insns == NULL_RTX)
1551 push_to_sequence (e->insns);
1553 emit_insn (pattern);
1555 e->insns = get_insns ();
1559 /* Update the CFG for the instructions queued on edge E. */
1562 commit_one_edge_insertion (e)
1565 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1568 /* Pull the insns off the edge now since the edge might go away. */
1570 e->insns = NULL_RTX;
1572 /* Figure out where to put these things. If the destination has
1573 one predecessor, insert there. Except for the exit block. */
1574 if (e->dest->pred->pred_next == NULL
1575 && e->dest != EXIT_BLOCK_PTR)
1579 /* Get the location correct wrt a code label, and "nice" wrt
1580 a basic block note, and before everything else. */
1582 if (GET_CODE (tmp) == CODE_LABEL)
1583 tmp = NEXT_INSN (tmp);
1584 if (GET_CODE (tmp) == NOTE
1585 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1586 tmp = NEXT_INSN (tmp);
1587 if (tmp == bb->head)
1590 after = PREV_INSN (tmp);
1593 /* If the source has one successor and the edge is not abnormal,
1594 insert there. Except for the entry block. */
1595 else if ((e->flags & EDGE_ABNORMAL) == 0
1596 && e->src->succ->succ_next == NULL
1597 && e->src != ENTRY_BLOCK_PTR)
1600 /* It is possible to have a non-simple jump here. Consider a target
1601 where some forms of unconditional jumps clobber a register. This
1602 happens on the fr30 for example.
1604 We know this block has a single successor, so we can just emit
1605 the queued insns before the jump. */
1606 if (GET_CODE (bb->end) == JUMP_INSN)
1612 /* We'd better be fallthru, or we've lost track of what's what. */
1613 if ((e->flags & EDGE_FALLTHRU) == 0)
1620 /* Otherwise we must split the edge. */
1623 bb = split_edge (e);
1627 /* Now that we've found the spot, do the insertion. */
1629 /* Set the new block number for these insns, if structure is allocated. */
1630 if (basic_block_for_insn)
1633 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1634 set_block_for_insn (i, bb);
1639 emit_insns_before (insns, before);
1640 if (before == bb->head)
1645 rtx last = emit_insns_after (insns, after);
1646 if (after == bb->end)
1650 if (GET_CODE (last) == JUMP_INSN)
1652 if (returnjump_p (last))
1654 /* ??? Remove all outgoing edges from BB and add one
1655 for EXIT. This is not currently a problem because
1656 this only happens for the (single) epilogue, which
1657 already has a fallthru edge to EXIT. */
1660 if (e->dest != EXIT_BLOCK_PTR
1661 || e->succ_next != NULL
1662 || (e->flags & EDGE_FALLTHRU) == 0)
1664 e->flags &= ~EDGE_FALLTHRU;
1666 emit_barrier_after (last);
1675 /* Update the CFG for all queued instructions. */
1678 commit_edge_insertions ()
1683 #ifdef ENABLE_CHECKING
1684 verify_flow_info ();
1688 bb = ENTRY_BLOCK_PTR;
1693 for (e = bb->succ; e ; e = next)
1695 next = e->succ_next;
1697 commit_one_edge_insertion (e);
1700 if (++i >= n_basic_blocks)
1702 bb = BASIC_BLOCK (i);
1706 /* Delete all unreachable basic blocks. */
1709 delete_unreachable_blocks ()
1711 basic_block *worklist, *tos;
1712 int deleted_handler;
1717 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1719 /* Use basic_block->aux as a marker. Clear them all. */
1721 for (i = 0; i < n; ++i)
1722 BASIC_BLOCK (i)->aux = NULL;
1724 /* Add our starting points to the worklist. Almost always there will
1725 be only one. It isn't inconcievable that we might one day directly
1726 support Fortran alternate entry points. */
1728 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1732 /* Mark the block with a handy non-null value. */
1736 /* Iterate: find everything reachable from what we've already seen. */
1738 while (tos != worklist)
1740 basic_block b = *--tos;
1742 for (e = b->succ; e ; e = e->succ_next)
1750 /* Delete all unreachable basic blocks. Count down so that we don't
1751 interfere with the block renumbering that happens in delete_block. */
1753 deleted_handler = 0;
1755 for (i = n - 1; i >= 0; --i)
1757 basic_block b = BASIC_BLOCK (i);
1760 /* This block was found. Tidy up the mark. */
1763 deleted_handler |= delete_block (b);
1766 tidy_fallthru_edges ();
1768 /* If we deleted an exception handler, we may have EH region begin/end
1769 blocks to remove as well. */
1770 if (deleted_handler)
1771 delete_eh_regions ();
1776 /* Find EH regions for which there is no longer a handler, and delete them. */
1779 delete_eh_regions ()
1783 update_rethrow_references ();
1785 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1786 if (GET_CODE (insn) == NOTE)
1788 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1789 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1791 int num = NOTE_EH_HANDLER (insn);
1792 /* A NULL handler indicates a region is no longer needed,
1793 as long as its rethrow label isn't used. */
1794 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1796 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1797 NOTE_SOURCE_FILE (insn) = 0;
1803 /* Return true if NOTE is not one of the ones that must be kept paired,
1804 so that we may simply delete them. */
1807 can_delete_note_p (note)
1810 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1811 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1814 /* Unlink a chain of insns between START and FINISH, leaving notes
1815 that must be paired. */
1818 flow_delete_insn_chain (start, finish)
1821 /* Unchain the insns one by one. It would be quicker to delete all
1822 of these with a single unchaining, rather than one at a time, but
1823 we need to keep the NOTE's. */
1829 next = NEXT_INSN (start);
1830 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1832 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1835 next = flow_delete_insn (start);
1837 if (start == finish)
1843 /* Delete the insns in a (non-live) block. We physically delete every
1844 non-deleted-note insn, and update the flow graph appropriately.
1846 Return nonzero if we deleted an exception handler. */
1848 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1849 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1855 int deleted_handler = 0;
1858 /* If the head of this block is a CODE_LABEL, then it might be the
1859 label for an exception handler which can't be reached.
1861 We need to remove the label from the exception_handler_label list
1862 and remove the associated NOTE_INSN_EH_REGION_BEG and
1863 NOTE_INSN_EH_REGION_END notes. */
1867 never_reached_warning (insn);
1869 if (GET_CODE (insn) == CODE_LABEL)
1871 rtx x, *prev = &exception_handler_labels;
1873 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1875 if (XEXP (x, 0) == insn)
1877 /* Found a match, splice this label out of the EH label list. */
1878 *prev = XEXP (x, 1);
1879 XEXP (x, 1) = NULL_RTX;
1880 XEXP (x, 0) = NULL_RTX;
1882 /* Remove the handler from all regions */
1883 remove_handler (insn);
1884 deleted_handler = 1;
1887 prev = &XEXP (x, 1);
1890 /* This label may be referenced by code solely for its value, or
1891 referenced by static data, or something. We have determined
1892 that it is not reachable, but cannot delete the label itself.
1893 Save code space and continue to delete the balance of the block,
1894 along with properly updating the cfg. */
1895 if (!can_delete_label_p (insn))
1897 /* If we've only got one of these, skip the whole deleting
1900 goto no_delete_insns;
1901 insn = NEXT_INSN (insn);
1905 /* Selectively unlink the insn chain. Include any BARRIER that may
1906 follow the basic block. */
1907 end = next_nonnote_insn (b->end);
1908 if (!end || GET_CODE (end) != BARRIER)
1910 flow_delete_insn_chain (insn, end);
1914 /* Remove the edges into and out of this block. Note that there may
1915 indeed be edges in, if we are removing an unreachable loop. */
1919 for (e = b->pred; e ; e = next)
1921 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1924 next = e->pred_next;
1928 for (e = b->succ; e ; e = next)
1930 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1933 next = e->succ_next;
1942 /* Remove the basic block from the array, and compact behind it. */
1945 return deleted_handler;
1948 /* Remove block B from the basic block array and compact behind it. */
1954 int i, n = n_basic_blocks;
1956 for (i = b->index; i + 1 < n; ++i)
1958 basic_block x = BASIC_BLOCK (i + 1);
1959 BASIC_BLOCK (i) = x;
1963 basic_block_info->num_elements--;
1967 /* Delete INSN by patching it out. Return the next insn. */
1970 flow_delete_insn (insn)
1973 rtx prev = PREV_INSN (insn);
1974 rtx next = NEXT_INSN (insn);
1976 PREV_INSN (insn) = NULL_RTX;
1977 NEXT_INSN (insn) = NULL_RTX;
1980 NEXT_INSN (prev) = next;
1982 PREV_INSN (next) = prev;
1984 set_last_insn (prev);
1986 if (GET_CODE (insn) == CODE_LABEL)
1987 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1989 /* If deleting a jump, decrement the use count of the label. Deleting
1990 the label itself should happen in the normal course of block merging. */
1991 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1992 LABEL_NUSES (JUMP_LABEL (insn))--;
1997 /* True if a given label can be deleted. */
2000 can_delete_label_p (label)
2005 if (LABEL_PRESERVE_P (label))
2008 for (x = forced_labels; x ; x = XEXP (x, 1))
2009 if (label == XEXP (x, 0))
2011 for (x = label_value_list; x ; x = XEXP (x, 1))
2012 if (label == XEXP (x, 0))
2014 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2015 if (label == XEXP (x, 0))
2018 /* User declared labels must be preserved. */
2019 if (LABEL_NAME (label) != 0)
2025 /* Blocks A and B are to be merged into a single block. A has no incoming
2026 fallthru edge, so it can be moved before B without adding or modifying
2027 any jumps (aside from the jump from A to B). */
2030 merge_blocks_move_predecessor_nojumps (a, b)
2033 rtx start, end, barrier;
2039 /* We want to delete the BARRIER after the end of the insns we are
2040 going to move. If we don't find a BARRIER, then do nothing. This
2041 can happen in some cases if we have labels we can not delete.
2043 Similarly, do nothing if we can not delete the label at the start
2044 of the target block. */
2045 barrier = next_nonnote_insn (end);
2046 if (GET_CODE (barrier) != BARRIER
2047 || (GET_CODE (b->head) == CODE_LABEL
2048 && ! can_delete_label_p (b->head)))
2051 flow_delete_insn (barrier);
2053 /* Move block and loop notes out of the chain so that we do not
2054 disturb their order.
2056 ??? A better solution would be to squeeze out all the non-nested notes
2057 and adjust the block trees appropriately. Even better would be to have
2058 a tighter connection between block trees and rtl so that this is not
2060 start = squeeze_notes (start, end);
2062 /* Scramble the insn chain. */
2063 if (end != PREV_INSN (b->head))
2064 reorder_insns (start, end, PREV_INSN (b->head));
2068 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2069 a->index, b->index);
2072 /* Swap the records for the two blocks around. Although we are deleting B,
2073 A is now where B was and we want to compact the BB array from where
2075 BASIC_BLOCK(a->index) = b;
2076 BASIC_BLOCK(b->index) = a;
2078 a->index = b->index;
2081 /* Now blocks A and B are contiguous. Merge them. */
2082 merge_blocks_nomove (a, b);
2087 /* Blocks A and B are to be merged into a single block. B has no outgoing
2088 fallthru edge, so it can be moved after A without adding or modifying
2089 any jumps (aside from the jump from A to B). */
2092 merge_blocks_move_successor_nojumps (a, b)
2095 rtx start, end, barrier;
2100 /* We want to delete the BARRIER after the end of the insns we are
2101 going to move. If we don't find a BARRIER, then do nothing. This
2102 can happen in some cases if we have labels we can not delete.
2104 Similarly, do nothing if we can not delete the label at the start
2105 of the target block. */
2106 barrier = next_nonnote_insn (end);
2107 if (GET_CODE (barrier) != BARRIER
2108 || (GET_CODE (b->head) == CODE_LABEL
2109 && ! can_delete_label_p (b->head)))
2112 flow_delete_insn (barrier);
2114 /* Move block and loop notes out of the chain so that we do not
2115 disturb their order.
2117 ??? A better solution would be to squeeze out all the non-nested notes
2118 and adjust the block trees appropriately. Even better would be to have
2119 a tighter connection between block trees and rtl so that this is not
2121 start = squeeze_notes (start, end);
2123 /* Scramble the insn chain. */
2124 reorder_insns (start, end, a->end);
2126 /* Now blocks A and B are contiguous. Merge them. */
2127 merge_blocks_nomove (a, b);
2131 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2132 b->index, a->index);
2138 /* Blocks A and B are to be merged into a single block. The insns
2139 are already contiguous, hence `nomove'. */
2142 merge_blocks_nomove (a, b)
2146 rtx b_head, b_end, a_end;
2149 /* If there was a CODE_LABEL beginning B, delete it. */
2152 if (GET_CODE (b_head) == CODE_LABEL)
2154 /* Detect basic blocks with nothing but a label. This can happen
2155 in particular at the end of a function. */
2156 if (b_head == b_end)
2158 b_head = flow_delete_insn (b_head);
2161 /* Delete the basic block note. */
2162 if (GET_CODE (b_head) == NOTE
2163 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2165 if (b_head == b_end)
2167 b_head = flow_delete_insn (b_head);
2170 /* If there was a jump out of A, delete it. */
2172 if (GET_CODE (a_end) == JUMP_INSN)
2176 prev = prev_nonnote_insn (a_end);
2181 /* If this was a conditional jump, we need to also delete
2182 the insn that set cc0. */
2184 if (prev && sets_cc0_p (prev))
2187 prev = prev_nonnote_insn (prev);
2190 flow_delete_insn (tmp);
2194 /* Note that a->head != a->end, since we should have at least a
2195 bb note plus the jump, so prev != insn. */
2196 flow_delete_insn (a_end);
2200 /* By definition, there should only be one successor of A, and that is
2201 B. Free that edge struct. */
2205 /* Adjust the edges out of B for the new owner. */
2206 for (e = b->succ; e ; e = e->succ_next)
2210 /* Reassociate the insns of B with A. */
2213 BLOCK_FOR_INSN (b_head) = a;
2214 while (b_head != b_end)
2216 b_head = NEXT_INSN (b_head);
2217 BLOCK_FOR_INSN (b_head) = a;
2223 /* Compact the basic block array. */
2227 /* Attempt to merge basic blocks that are potentially non-adjacent.
2228 Return true iff the attempt succeeded. */
2231 merge_blocks (e, b, c)
2235 /* If B has a fallthru edge to C, no need to move anything. */
2236 if (e->flags & EDGE_FALLTHRU)
2238 /* If a label still appears somewhere and we cannot delete the label,
2239 then we cannot merge the blocks. The edge was tidied already. */
2241 rtx insn, stop = NEXT_INSN (c->head);
2242 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2243 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2246 merge_blocks_nomove (b, c);
2250 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2251 b->index, c->index);
2260 int c_has_outgoing_fallthru;
2261 int b_has_incoming_fallthru;
2263 /* We must make sure to not munge nesting of exception regions,
2264 lexical blocks, and loop notes.
2266 The first is taken care of by requiring that the active eh
2267 region at the end of one block always matches the active eh
2268 region at the beginning of the next block.
2270 The later two are taken care of by squeezing out all the notes. */
2272 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2273 executed and we may want to treat blocks which have two out
2274 edges, one normal, one abnormal as only having one edge for
2275 block merging purposes. */
2277 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2278 if (tmp_edge->flags & EDGE_FALLTHRU)
2280 c_has_outgoing_fallthru = (tmp_edge != NULL);
2282 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2283 if (tmp_edge->flags & EDGE_FALLTHRU)
2285 b_has_incoming_fallthru = (tmp_edge != NULL);
2287 /* If B does not have an incoming fallthru, and the exception regions
2288 match, then it can be moved immediately before C without introducing
2291 C can not be the first block, so we do not have to worry about
2292 accessing a non-existent block. */
2293 d = BASIC_BLOCK (c->index - 1);
2294 if (! b_has_incoming_fallthru
2295 && d->eh_end == b->eh_beg
2296 && b->eh_end == c->eh_beg)
2297 return merge_blocks_move_predecessor_nojumps (b, c);
2299 /* Otherwise, we're going to try to move C after B. Make sure the
2300 exception regions match.
2302 If B is the last basic block, then we must not try to access the
2303 block structure for block B + 1. Luckily in that case we do not
2304 need to worry about matching exception regions. */
2305 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2306 if (b->eh_end == c->eh_beg
2307 && (d == NULL || c->eh_end == d->eh_beg))
2309 /* If C does not have an outgoing fallthru, then it can be moved
2310 immediately after B without introducing or modifying jumps. */
2311 if (! c_has_outgoing_fallthru)
2312 return merge_blocks_move_successor_nojumps (b, c);
2314 /* Otherwise, we'll need to insert an extra jump, and possibly
2315 a new block to contain it. */
2316 /* ??? Not implemented yet. */
2323 /* Top level driver for merge_blocks. */
2330 /* Attempt to merge blocks as made possible by edge removal. If a block
2331 has only one successor, and the successor has only one predecessor,
2332 they may be combined. */
2334 for (i = 0; i < n_basic_blocks; )
2336 basic_block c, b = BASIC_BLOCK (i);
2339 /* A loop because chains of blocks might be combineable. */
2340 while ((s = b->succ) != NULL
2341 && s->succ_next == NULL
2342 && (s->flags & EDGE_EH) == 0
2343 && (c = s->dest) != EXIT_BLOCK_PTR
2344 && c->pred->pred_next == NULL
2345 /* If the jump insn has side effects, we can't kill the edge. */
2346 && (GET_CODE (b->end) != JUMP_INSN
2347 || onlyjump_p (b->end))
2348 && merge_blocks (s, b, c))
2351 /* Don't get confused by the index shift caused by deleting blocks. */
2356 /* The given edge should potentially be a fallthru edge. If that is in
2357 fact true, delete the jump and barriers that are in the way. */
2360 tidy_fallthru_edge (e, b, c)
2366 /* ??? In a late-running flow pass, other folks may have deleted basic
2367 blocks by nopping out blocks, leaving multiple BARRIERs between here
2368 and the target label. They ought to be chastized and fixed.
2370 We can also wind up with a sequence of undeletable labels between
2371 one block and the next.
2373 So search through a sequence of barriers, labels, and notes for
2374 the head of block C and assert that we really do fall through. */
2376 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2379 /* Remove what will soon cease being the jump insn from the source block.
2380 If block B consisted only of this single jump, turn it into a deleted
2383 if (GET_CODE (q) == JUMP_INSN)
2386 /* If this was a conditional jump, we need to also delete
2387 the insn that set cc0. */
2388 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2395 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2396 NOTE_SOURCE_FILE (q) = 0;
2399 b->end = q = PREV_INSN (q);
2402 /* Selectively unlink the sequence. */
2403 if (q != PREV_INSN (c->head))
2404 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2406 e->flags |= EDGE_FALLTHRU;
2409 /* Fix up edges that now fall through, or rather should now fall through
2410 but previously required a jump around now deleted blocks. Simplify
2411 the search by only examining blocks numerically adjacent, since this
2412 is how find_basic_blocks created them. */
2415 tidy_fallthru_edges ()
2419 for (i = 1; i < n_basic_blocks; ++i)
2421 basic_block b = BASIC_BLOCK (i - 1);
2422 basic_block c = BASIC_BLOCK (i);
2425 /* We care about simple conditional or unconditional jumps with
2428 If we had a conditional branch to the next instruction when
2429 find_basic_blocks was called, then there will only be one
2430 out edge for the block which ended with the conditional
2431 branch (since we do not create duplicate edges).
2433 Furthermore, the edge will be marked as a fallthru because we
2434 merge the flags for the duplicate edges. So we do not want to
2435 check that the edge is not a FALLTHRU edge. */
2436 if ((s = b->succ) != NULL
2437 && s->succ_next == NULL
2439 /* If the jump insn has side effects, we can't tidy the edge. */
2440 && (GET_CODE (b->end) != JUMP_INSN
2441 || onlyjump_p (b->end)))
2442 tidy_fallthru_edge (s, b, c);
2446 /* Discover and record the loop depth at the head of each basic block. */
2449 calculate_loop_depth (dump)
2454 /* The loop infrastructure does the real job for us. */
2455 flow_loops_find (&loops);
2458 flow_loops_dump (&loops, dump, 0);
2460 flow_loops_free (&loops);
2463 /* Perform data flow analysis.
2464 F is the first insn of the function and NREGS the number of register numbers
2468 life_analysis (f, nregs, file, remove_dead_code)
2472 int remove_dead_code;
2474 #ifdef ELIMINABLE_REGS
2476 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2481 /* Record which registers will be eliminated. We use this in
2484 CLEAR_HARD_REG_SET (elim_reg_set);
2486 #ifdef ELIMINABLE_REGS
2487 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2488 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2490 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2493 /* We want alias analysis information for local dead store elimination. */
2494 init_alias_analysis ();
2497 flags = PROP_DEATH_NOTES | PROP_REG_INFO;
2501 if (! remove_dead_code)
2502 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2505 /* The post-reload life analysis have (on a global basis) the same
2506 registers live as was computed by reload itself. elimination
2507 Otherwise offsets and such may be incorrect.
2509 Reload will make some registers as live even though they do not
2510 appear in the rtl. */
2511 if (reload_completed)
2512 flags &= ~PROP_REG_INFO;
2516 /* Always remove no-op moves. Do this before other processing so
2517 that we don't have to keep re-scanning them. */
2518 delete_noop_moves (f);
2520 /* Some targets can emit simpler epilogues if they know that sp was
2521 not ever modified during the function. After reload, of course,
2522 we've already emitted the epilogue so there's no sense searching. */
2523 if (! reload_completed)
2524 notice_stack_pointer_modification (f);
2526 /* Allocate and zero out data structures that will record the
2527 data from lifetime analysis. */
2528 allocate_reg_life_data ();
2529 allocate_bb_life_data ();
2530 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2531 all_blocks = sbitmap_alloc (n_basic_blocks);
2532 sbitmap_ones (all_blocks);
2534 /* Find the set of registers live on function exit. */
2535 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2537 /* "Update" life info from zero. It'd be nice to begin the
2538 relaxation with just the exit and noreturn blocks, but that set
2539 is not immediately handy. */
2541 if (flags & PROP_REG_INFO)
2542 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2543 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2546 sbitmap_free (all_blocks);
2547 free (reg_next_use);
2548 reg_next_use = NULL;
2549 end_alias_analysis ();
2552 dump_flow_info (file);
2554 free_basic_block_vars (1);
2557 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2558 Search for REGNO. If found, abort if it is not wider than word_mode. */
2561 verify_wide_reg_1 (px, pregno)
2566 int regno = *(int *) pregno;
2568 if (GET_CODE (x) == REG && REGNO (x) == regno)
2570 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2577 /* A subroutine of verify_local_live_at_start. Search through insns
2578 between HEAD and END looking for register REGNO. */
2581 verify_wide_reg (regno, head, end)
2587 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2588 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2592 head = NEXT_INSN (head);
2595 /* We didn't find the register at all. Something's way screwy. */
2599 /* A subroutine of update_life_info. Verify that there are no untoward
2600 changes in live_at_start during a local update. */
2603 verify_local_live_at_start (new_live_at_start, bb)
2604 regset new_live_at_start;
2607 if (reload_completed)
2609 /* After reload, there are no pseudos, nor subregs of multi-word
2610 registers. The regsets should exactly match. */
2611 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2618 /* Find the set of changed registers. */
2619 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2621 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2623 /* No registers should die. */
2624 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2626 /* Verify that the now-live register is wider than word_mode. */
2627 verify_wide_reg (i, bb->head, bb->end);
2632 /* Updates life information starting with the basic blocks set in BLOCKS.
2634 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2635 expecting local modifications to basic blocks. If we find extra
2636 registers live at the beginning of a block, then we either killed
2637 useful data, or we have a broken split that wants data not provided.
2638 If we find registers removed from live_at_start, that means we have
2639 a broken peephole that is killing a register it shouldn't.
2641 ??? This is not true in one situation -- when a pre-reload splitter
2642 generates subregs of a multi-word pseudo, current life analysis will
2643 lose the kill. So we _can_ have a pseudo go live. How irritating.
2645 BLOCK_FOR_INSN is assumed to be correct.
2647 PROP_FLAGS should not contain PROP_LOG_LINKS unless the caller sets
2648 up reg_next_use. Including PROP_REG_INFO does not properly refresh
2649 regs_ever_live unless the caller resets it to zero. */
2652 update_life_info (blocks, extent, prop_flags)
2654 enum update_life_extent extent;
2658 regset_head tmp_head;
2661 tmp = INITIALIZE_REG_SET (tmp_head);
2663 /* For a global update, we go through the relaxation process again. */
2664 if (extent != UPDATE_LIFE_LOCAL)
2666 calculate_global_regs_live (blocks, blocks,
2667 prop_flags & PROP_SCAN_DEAD_CODE);
2669 /* If asked, remove notes from the blocks we'll update. */
2670 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2671 count_or_remove_death_notes (blocks, 1);
2674 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2676 basic_block bb = BASIC_BLOCK (i);
2678 COPY_REG_SET (tmp, bb->global_live_at_end);
2679 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2681 if (extent == UPDATE_LIFE_LOCAL)
2682 verify_local_live_at_start (tmp, bb);
2687 if (prop_flags & PROP_REG_INFO)
2689 /* The only pseudos that are live at the beginning of the function
2690 are those that were not set anywhere in the function. local-alloc
2691 doesn't know how to handle these correctly, so mark them as not
2692 local to any one basic block. */
2693 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2694 FIRST_PSEUDO_REGISTER, i,
2695 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2697 /* We have a problem with any pseudoreg that lives across the setjmp.
2698 ANSI says that if a user variable does not change in value between
2699 the setjmp and the longjmp, then the longjmp preserves it. This
2700 includes longjmp from a place where the pseudo appears dead.
2701 (In principle, the value still exists if it is in scope.)
2702 If the pseudo goes in a hard reg, some other value may occupy
2703 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2704 Conclusion: such a pseudo must not go in a hard reg. */
2705 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2706 FIRST_PSEUDO_REGISTER, i,
2708 if (regno_reg_rtx[i] != 0)
2710 REG_LIVE_LENGTH (i) = -1;
2711 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2717 /* Free the variables allocated by find_basic_blocks.
2719 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2722 free_basic_block_vars (keep_head_end_p)
2723 int keep_head_end_p;
2725 if (basic_block_for_insn)
2727 VARRAY_FREE (basic_block_for_insn);
2728 basic_block_for_insn = NULL;
2731 if (! keep_head_end_p)
2734 VARRAY_FREE (basic_block_info);
2737 ENTRY_BLOCK_PTR->aux = NULL;
2738 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2739 EXIT_BLOCK_PTR->aux = NULL;
2740 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2744 /* Return nonzero if the destination of SET equals the source. */
2749 rtx src = SET_SRC (set);
2750 rtx dst = SET_DEST (set);
2752 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2754 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2756 src = SUBREG_REG (src);
2757 dst = SUBREG_REG (dst);
2760 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2761 && REGNO (src) == REGNO (dst));
2764 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2770 rtx pat = PATTERN (insn);
2772 /* Insns carrying these notes are useful later on. */
2773 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2776 if (GET_CODE (pat) == SET && set_noop_p (pat))
2779 if (GET_CODE (pat) == PARALLEL)
2782 /* If nothing but SETs of registers to themselves,
2783 this insn can also be deleted. */
2784 for (i = 0; i < XVECLEN (pat, 0); i++)
2786 rtx tem = XVECEXP (pat, 0, i);
2788 if (GET_CODE (tem) == USE
2789 || GET_CODE (tem) == CLOBBER)
2792 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2801 /* Delete any insns that copy a register to itself. */
2804 delete_noop_moves (f)
2808 for (insn = f; insn; insn = NEXT_INSN (insn))
2810 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2812 PUT_CODE (insn, NOTE);
2813 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2814 NOTE_SOURCE_FILE (insn) = 0;
2819 /* Determine if the stack pointer is constant over the life of the function.
2820 Only useful before prologues have been emitted. */
2823 notice_stack_pointer_modification_1 (x, pat, data)
2825 rtx pat ATTRIBUTE_UNUSED;
2826 void *data ATTRIBUTE_UNUSED;
2828 if (x == stack_pointer_rtx
2829 /* The stack pointer is only modified indirectly as the result
2830 of a push until later in flow. See the comments in rtl.texi
2831 regarding Embedded Side-Effects on Addresses. */
2832 || (GET_CODE (x) == MEM
2833 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2834 || GET_CODE (XEXP (x, 0)) == PRE_INC
2835 || GET_CODE (XEXP (x, 0)) == POST_DEC
2836 || GET_CODE (XEXP (x, 0)) == POST_INC)
2837 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2838 current_function_sp_is_unchanging = 0;
2842 notice_stack_pointer_modification (f)
2847 /* Assume that the stack pointer is unchanging if alloca hasn't
2849 current_function_sp_is_unchanging = !current_function_calls_alloca;
2850 if (! current_function_sp_is_unchanging)
2853 for (insn = f; insn; insn = NEXT_INSN (insn))
2855 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2857 /* Check if insn modifies the stack pointer. */
2858 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2860 if (! current_function_sp_is_unchanging)
2866 /* Mark a register in SET. Hard registers in large modes get all
2867 of their component registers set as well. */
2869 mark_reg (reg, xset)
2873 regset set = (regset) xset;
2874 int regno = REGNO (reg);
2876 if (GET_MODE (reg) == BLKmode)
2879 SET_REGNO_REG_SET (set, regno);
2880 if (regno < FIRST_PSEUDO_REGISTER)
2882 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2884 SET_REGNO_REG_SET (set, regno + n);
2888 /* Mark those regs which are needed at the end of the function as live
2889 at the end of the last basic block. */
2891 mark_regs_live_at_end (set)
2896 /* If exiting needs the right stack value, consider the stack pointer
2897 live at the end of the function. */
2898 if ((HAVE_epilogue && reload_completed)
2899 || ! EXIT_IGNORE_STACK
2900 || (! FRAME_POINTER_REQUIRED
2901 && ! current_function_calls_alloca
2902 && flag_omit_frame_pointer)
2903 || current_function_sp_is_unchanging)
2905 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2908 /* Mark the frame pointer if needed at the end of the function. If
2909 we end up eliminating it, it will be removed from the live list
2910 of each basic block by reload. */
2912 if (! reload_completed || frame_pointer_needed)
2914 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2915 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2916 /* If they are different, also mark the hard frame pointer as live */
2917 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2921 #ifdef PIC_OFFSET_TABLE_REGNUM
2922 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2923 /* Many architectures have a GP register even without flag_pic.
2924 Assume the pic register is not in use, or will be handled by
2925 other means, if it is not fixed. */
2926 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2927 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2931 /* Mark all global registers, and all registers used by the epilogue
2932 as being live at the end of the function since they may be
2933 referenced by our caller. */
2934 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2936 #ifdef EPILOGUE_USES
2937 || EPILOGUE_USES (i)
2940 SET_REGNO_REG_SET (set, i);
2942 /* Mark all call-saved registers that we actaully used. */
2943 if (HAVE_epilogue && reload_completed)
2945 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2946 if (! call_used_regs[i] && regs_ever_live[i])
2947 SET_REGNO_REG_SET (set, i);
2950 /* Mark function return value. */
2951 diddle_return_value (mark_reg, set);
2954 /* Propagate global life info around the graph of basic blocks. Begin
2955 considering blocks with their corresponding bit set in BLOCKS_IN.
2956 BLOCKS_OUT is set for every block that was changed. */
2959 calculate_global_regs_live (blocks_in, blocks_out, flags)
2960 sbitmap blocks_in, blocks_out;
2963 basic_block *queue, *qhead, *qtail, *qend;
2964 regset tmp, new_live_at_end;
2965 regset_head tmp_head;
2966 regset_head new_live_at_end_head;
2969 tmp = INITIALIZE_REG_SET (tmp_head);
2970 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
2972 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
2973 because the `head == tail' style test for an empty queue doesn't
2974 work with a full queue. */
2975 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
2977 qhead = qend = queue + n_basic_blocks + 2;
2979 /* Clear out the garbage that might be hanging out in bb->aux. */
2980 for (i = n_basic_blocks - 1; i >= 0; --i)
2981 BASIC_BLOCK (i)->aux = NULL;
2983 /* Queue the blocks set in the initial mask. Do this in reverse block
2984 number order so that we are more likely for the first round to do
2985 useful work. We use AUX non-null to flag that the block is queued. */
2986 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
2988 basic_block bb = BASIC_BLOCK (i);
2993 sbitmap_zero (blocks_out);
2995 while (qhead != qtail)
2997 int rescan, changed;
3006 /* Begin by propogating live_at_start from the successor blocks. */
3007 CLEAR_REG_SET (new_live_at_end);
3008 for (e = bb->succ; e ; e = e->succ_next)
3010 basic_block sb = e->dest;
3011 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3014 if (bb == ENTRY_BLOCK_PTR)
3016 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3020 /* On our first pass through this block, we'll go ahead and continue.
3021 Recognize first pass by local_set NULL. On subsequent passes, we
3022 get to skip out early if live_at_end wouldn't have changed. */
3024 if (bb->local_set == NULL)
3026 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3031 /* If any bits were removed from live_at_end, we'll have to
3032 rescan the block. This wouldn't be necessary if we had
3033 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3034 local_live is really dependant on live_at_end. */
3035 CLEAR_REG_SET (tmp);
3036 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3037 new_live_at_end, BITMAP_AND_COMPL);
3041 /* Find the set of changed bits. Take this opportunity
3042 to notice that this set is empty and early out. */
3043 CLEAR_REG_SET (tmp);
3044 changed = bitmap_operation (tmp, bb->global_live_at_end,
3045 new_live_at_end, BITMAP_XOR);
3049 /* If any of the changed bits overlap with local_set,
3050 we'll have to rescan the block. Detect overlap by
3051 the AND with ~local_set turning off bits. */
3052 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3057 /* Let our caller know that BB changed enough to require its
3058 death notes updated. */
3059 SET_BIT (blocks_out, bb->index);
3063 /* Add to live_at_start the set of all registers in
3064 new_live_at_end that aren't in the old live_at_end. */
3066 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3068 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3070 changed = bitmap_operation (bb->global_live_at_start,
3071 bb->global_live_at_start,
3078 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3080 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3081 into live_at_start. */
3082 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3084 /* If live_at start didn't change, no need to go farther. */
3085 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3088 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3091 /* Queue all predecessors of BB so that we may re-examine
3092 their live_at_end. */
3093 for (e = bb->pred; e ; e = e->pred_next)
3095 basic_block pb = e->src;
3096 if (pb->aux == NULL)
3107 FREE_REG_SET (new_live_at_end);
3109 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3111 basic_block bb = BASIC_BLOCK (i);
3112 FREE_REG_SET (bb->local_set);
3118 /* Subroutines of life analysis. */
3120 /* Allocate the permanent data structures that represent the results
3121 of life analysis. Not static since used also for stupid life analysis. */
3124 allocate_bb_life_data ()
3128 for (i = 0; i < n_basic_blocks; i++)
3130 basic_block bb = BASIC_BLOCK (i);
3132 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3133 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3136 ENTRY_BLOCK_PTR->global_live_at_end
3137 = OBSTACK_ALLOC_REG_SET (function_obstack);
3138 EXIT_BLOCK_PTR->global_live_at_start
3139 = OBSTACK_ALLOC_REG_SET (function_obstack);
3141 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3145 allocate_reg_life_data ()
3149 /* Recalculate the register space, in case it has grown. Old style
3150 vector oriented regsets would set regset_{size,bytes} here also. */
3151 allocate_reg_info (max_regno, FALSE, FALSE);
3153 /* Reset all the data we'll collect in propagate_block and its
3155 for (i = 0; i < max_regno; i++)
3159 REG_N_DEATHS (i) = 0;
3160 REG_N_CALLS_CROSSED (i) = 0;
3161 REG_LIVE_LENGTH (i) = 0;
3162 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3166 /* Compute the registers live at the beginning of a basic block
3167 from those live at the end.
3169 When called, OLD contains those live at the end.
3170 On return, it contains those live at the beginning.
3171 FIRST and LAST are the first and last insns of the basic block.
3173 FINAL is nonzero if we are doing the final pass which is not
3174 for computing the life info (since that has already been done)
3175 but for acting on it. On this pass, we delete dead stores,
3176 set up the logical links and dead-variables lists of instructions,
3177 and merge instructions for autoincrement and autodecrement addresses.
3179 SIGNIFICANT is nonzero only the first time for each basic block.
3180 If it is nonzero, it points to a regset in which we store
3181 a 1 for each register that is set within the block.
3183 BNUM is the number of the basic block. */
3186 propagate_block (bb, old, significant, flags)
3195 regset_head live_head;
3197 regset_head dead_head;
3199 /* Find the loop depth for this block. Ignore loop level changes in the
3200 middle of the basic block -- for register allocation purposes, the
3201 important uses will be in the blocks wholely contained within the loop
3202 not in the loop pre-header or post-trailer. */
3203 loop_depth = bb->loop_depth;
3205 dead = INITIALIZE_REG_SET (live_head);
3206 live = INITIALIZE_REG_SET (dead_head);
3210 if (flags & PROP_REG_INFO)
3214 /* Process the regs live at the end of the block.
3215 Mark them as not local to any one basic block. */
3216 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3218 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3222 /* Scan the block an insn at a time from end to beginning. */
3224 for (insn = bb->end; ; insn = prev)
3226 prev = PREV_INSN (insn);
3228 if (GET_CODE (insn) == NOTE)
3230 /* If this is a call to `setjmp' et al,
3231 warn if any non-volatile datum is live. */
3233 if ((flags & PROP_REG_INFO)
3234 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3235 IOR_REG_SET (regs_live_at_setjmp, old);
3238 /* Update the life-status of regs for this insn.
3239 First DEAD gets which regs are set in this insn
3240 then LIVE gets which regs are used in this insn.
3241 Then the regs live before the insn
3242 are those live after, with DEAD regs turned off,
3243 and then LIVE regs turned on. */
3245 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3248 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3249 int insn_is_dead = 0;
3250 int libcall_is_dead = 0;
3252 if (flags & PROP_SCAN_DEAD_CODE)
3254 insn_is_dead = insn_dead_p (PATTERN (insn), old, 0,
3256 libcall_is_dead = (insn_is_dead && note != 0
3257 && libcall_dead_p (PATTERN (insn), old,
3261 /* We almost certainly don't want to delete prologue or epilogue
3262 instructions. Warn about probable compiler losage. */
3265 && (((HAVE_epilogue || HAVE_prologue)
3266 && prologue_epilogue_contains (insn))
3267 || (HAVE_sibcall_epilogue
3268 && sibcall_epilogue_contains (insn))))
3270 if (flags & PROP_KILL_DEAD_CODE)
3272 warning ("ICE: would have deleted prologue/epilogue insn");
3273 if (!inhibit_warnings)
3276 libcall_is_dead = insn_is_dead = 0;
3279 /* If an instruction consists of just dead store(s) on final pass,
3280 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3281 We could really delete it with delete_insn, but that
3282 can cause trouble for first or last insn in a basic block. */
3283 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3286 /* If the insn referred to a label, note that the label is
3288 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3290 if (REG_NOTE_KIND (inote) == REG_LABEL)
3292 rtx label = XEXP (inote, 0);
3294 LABEL_NUSES (label)--;
3296 /* If this label was attached to an ADDR_VEC, it's
3297 safe to delete the ADDR_VEC. In fact, it's pretty
3298 much mandatory to delete it, because the ADDR_VEC may
3299 be referencing labels that no longer exist. */
3300 if (LABEL_NUSES (label) == 0
3301 && (next = next_nonnote_insn (label)) != NULL
3302 && GET_CODE (next) == JUMP_INSN
3303 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3304 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3306 rtx pat = PATTERN (next);
3307 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3308 int len = XVECLEN (pat, diff_vec_p);
3310 for (i = 0; i < len; i++)
3311 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3312 PUT_CODE (next, NOTE);
3313 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3314 NOTE_SOURCE_FILE (next) = 0;
3319 PUT_CODE (insn, NOTE);
3320 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3321 NOTE_SOURCE_FILE (insn) = 0;
3323 /* CC0 is now known to be dead. Either this insn used it,
3324 in which case it doesn't anymore, or clobbered it,
3325 so the next insn can't use it. */
3328 /* If this insn is copying the return value from a library call,
3329 delete the entire library call. */
3330 if (libcall_is_dead)
3332 rtx first = XEXP (note, 0);
3334 while (INSN_DELETED_P (first))
3335 first = NEXT_INSN (first);
3340 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3341 NOTE_SOURCE_FILE (p) = 0;
3347 CLEAR_REG_SET (dead);
3348 CLEAR_REG_SET (live);
3350 /* See if this is an increment or decrement that can be
3351 merged into a following memory address. */
3354 register rtx x = single_set (insn);
3356 /* Does this instruction increment or decrement a register? */
3357 if (!reload_completed
3358 && (flags & PROP_AUTOINC)
3360 && GET_CODE (SET_DEST (x)) == REG
3361 && (GET_CODE (SET_SRC (x)) == PLUS
3362 || GET_CODE (SET_SRC (x)) == MINUS)
3363 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3364 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3365 /* Ok, look for a following memory ref we can combine with.
3366 If one is found, change the memory ref to a PRE_INC
3367 or PRE_DEC, cancel this insn, and return 1.
3368 Return 0 if nothing has been done. */
3369 && try_pre_increment_1 (insn))
3372 #endif /* AUTO_INC_DEC */
3374 /* If this is not the final pass, and this insn is copying the
3375 value of a library call and it's dead, don't scan the
3376 insns that perform the library call, so that the call's
3377 arguments are not marked live. */
3378 if (libcall_is_dead)
3380 /* Mark the dest reg as `significant'. */
3381 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3382 significant, flags);
3384 insn = XEXP (note, 0);
3385 prev = PREV_INSN (insn);
3387 else if (GET_CODE (PATTERN (insn)) == SET
3388 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3389 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3390 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3391 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3392 /* We have an insn to pop a constant amount off the stack.
3393 (Such insns use PLUS regardless of the direction of the stack,
3394 and any insn to adjust the stack by a constant is always a pop.)
3395 These insns, if not dead stores, have no effect on life. */
3399 /* Any regs live at the time of a call instruction
3400 must not go in a register clobbered by calls.
3401 Find all regs now live and record this for them. */
3403 if (GET_CODE (insn) == CALL_INSN
3404 && (flags & PROP_REG_INFO))
3405 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3407 REG_N_CALLS_CROSSED (i)++;
3410 /* LIVE gets the regs used in INSN;
3411 DEAD gets those set by it. Dead insns don't make anything
3414 mark_set_regs (old, dead, PATTERN (insn),
3415 insn, significant, flags);
3417 /* If an insn doesn't use CC0, it becomes dead since we
3418 assume that every insn clobbers it. So show it dead here;
3419 mark_used_regs will set it live if it is referenced. */
3423 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3425 /* Sometimes we may have inserted something before INSN (such as
3426 a move) when we make an auto-inc. So ensure we will scan
3429 prev = PREV_INSN (insn);
3432 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3438 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3440 note = XEXP (note, 1))
3441 if (GET_CODE (XEXP (note, 0)) == USE)
3442 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3445 /* Each call clobbers all call-clobbered regs that are not
3446 global or fixed. Note that the function-value reg is a
3447 call-clobbered reg, and mark_set_regs has already had
3448 a chance to handle it. */
3450 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3451 if (call_used_regs[i] && ! global_regs[i]
3454 SET_REGNO_REG_SET (dead, i);
3456 SET_REGNO_REG_SET (significant, i);
3459 /* The stack ptr is used (honorarily) by a CALL insn. */
3460 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3462 /* Calls may also reference any of the global registers,
3463 so they are made live. */
3464 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3466 mark_used_regs (old, live,
3467 gen_rtx_REG (reg_raw_mode[i], i),
3470 /* Calls also clobber memory. */
3471 free_EXPR_LIST_list (&mem_set_list);
3474 /* Update OLD for the registers used or set. */
3475 AND_COMPL_REG_SET (old, dead);
3476 IOR_REG_SET (old, live);
3480 /* On final pass, update counts of how many insns in which
3481 each reg is live. */
3482 if (flags & PROP_REG_INFO)
3483 EXECUTE_IF_SET_IN_REG_SET (old, 0, i, { REG_LIVE_LENGTH (i)++; });
3486 if (insn == bb->head)
3490 FREE_REG_SET (dead);
3491 FREE_REG_SET (live);
3492 free_EXPR_LIST_list (&mem_set_list);
3495 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3496 (SET expressions whose destinations are registers dead after the insn).
3497 NEEDED is the regset that says which regs are alive after the insn.
3499 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3501 If X is the entire body of an insn, NOTES contains the reg notes
3502 pertaining to the insn. */
3505 insn_dead_p (x, needed, call_ok, notes)
3509 rtx notes ATTRIBUTE_UNUSED;
3511 enum rtx_code code = GET_CODE (x);
3514 /* If flow is invoked after reload, we must take existing AUTO_INC
3515 expresions into account. */
3516 if (reload_completed)
3518 for ( ; notes; notes = XEXP (notes, 1))
3520 if (REG_NOTE_KIND (notes) == REG_INC)
3522 int regno = REGNO (XEXP (notes, 0));
3524 /* Don't delete insns to set global regs. */
3525 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3526 || REGNO_REG_SET_P (needed, regno))
3533 /* If setting something that's a reg or part of one,
3534 see if that register's altered value will be live. */
3538 rtx r = SET_DEST (x);
3541 if (GET_CODE (r) == CC0)
3545 /* A SET that is a subroutine call cannot be dead. */
3546 if (GET_CODE (SET_SRC (x)) == CALL)
3552 /* Don't eliminate loads from volatile memory or volatile asms. */
3553 else if (volatile_refs_p (SET_SRC (x)))
3556 if (GET_CODE (r) == MEM)
3560 if (MEM_VOLATILE_P (r))
3563 /* Walk the set of memory locations we are currently tracking
3564 and see if one is an identical match to this memory location.
3565 If so, this memory write is dead (remember, we're walking
3566 backwards from the end of the block to the start. */
3567 temp = mem_set_list;
3570 if (rtx_equal_p (XEXP (temp, 0), r))
3572 temp = XEXP (temp, 1);
3577 while (GET_CODE (r) == SUBREG
3578 || GET_CODE (r) == STRICT_LOW_PART
3579 || GET_CODE (r) == ZERO_EXTRACT)
3582 if (GET_CODE (r) == REG)
3584 int regno = REGNO (r);
3587 if (REGNO_REG_SET_P (needed, regno))
3590 /* If this is a hard register, verify that subsequent
3591 words are not needed. */
3592 if (regno < FIRST_PSEUDO_REGISTER)
3594 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3597 if (REGNO_REG_SET_P (needed, regno+n))
3601 /* Don't delete insns to set global regs. */
3602 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3605 /* Make sure insns to set the stack pointer aren't deleted. */
3606 if (regno == STACK_POINTER_REGNUM)
3609 /* Make sure insns to set the frame pointer aren't deleted. */
3610 if (regno == FRAME_POINTER_REGNUM
3611 && (! reload_completed || frame_pointer_needed))
3613 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3614 if (regno == HARD_FRAME_POINTER_REGNUM
3615 && (! reload_completed || frame_pointer_needed))
3619 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3620 /* Make sure insns to set arg pointer are never deleted
3621 (if the arg pointer isn't fixed, there will be a USE
3622 for it, so we can treat it normally). */
3623 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3627 /* Otherwise, the set is dead. */
3633 /* If performing several activities, insn is dead if each activity
3634 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3635 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3637 else if (code == PARALLEL)
3639 int i = XVECLEN (x, 0);
3641 for (i--; i >= 0; i--)
3642 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3643 && GET_CODE (XVECEXP (x, 0, i)) != USE
3644 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3650 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3651 is not necessarily true for hard registers. */
3652 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3653 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3654 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3657 /* We do not check other CLOBBER or USE here. An insn consisting of just
3658 a CLOBBER or just a USE should not be deleted. */
3662 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3663 return 1 if the entire library call is dead.
3664 This is true if X copies a register (hard or pseudo)
3665 and if the hard return reg of the call insn is dead.
3666 (The caller should have tested the destination of X already for death.)
3668 If this insn doesn't just copy a register, then we don't
3669 have an ordinary libcall. In that case, cse could not have
3670 managed to substitute the source for the dest later on,
3671 so we can assume the libcall is dead.
3673 NEEDED is the bit vector of pseudoregs live before this insn.
3674 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3677 libcall_dead_p (x, needed, note, insn)
3683 register RTX_CODE code = GET_CODE (x);
3687 register rtx r = SET_SRC (x);
3688 if (GET_CODE (r) == REG)
3690 rtx call = XEXP (note, 0);
3694 /* Find the call insn. */
3695 while (call != insn && GET_CODE (call) != CALL_INSN)
3696 call = NEXT_INSN (call);
3698 /* If there is none, do nothing special,
3699 since ordinary death handling can understand these insns. */
3703 /* See if the hard reg holding the value is dead.
3704 If this is a PARALLEL, find the call within it. */
3705 call_pat = PATTERN (call);
3706 if (GET_CODE (call_pat) == PARALLEL)
3708 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3709 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3710 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3713 /* This may be a library call that is returning a value
3714 via invisible pointer. Do nothing special, since
3715 ordinary death handling can understand these insns. */
3719 call_pat = XVECEXP (call_pat, 0, i);
3722 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3728 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3729 live at function entry. Don't count global register variables, variables
3730 in registers that can be used for function arg passing, or variables in
3731 fixed hard registers. */
3734 regno_uninitialized (regno)
3737 if (n_basic_blocks == 0
3738 || (regno < FIRST_PSEUDO_REGISTER
3739 && (global_regs[regno]
3740 || fixed_regs[regno]
3741 || FUNCTION_ARG_REGNO_P (regno))))
3744 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3747 /* 1 if register REGNO was alive at a place where `setjmp' was called
3748 and was set more than once or is an argument.
3749 Such regs may be clobbered by `longjmp'. */
3752 regno_clobbered_at_setjmp (regno)
3755 if (n_basic_blocks == 0)
3758 return ((REG_N_SETS (regno) > 1
3759 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3760 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3763 /* INSN references memory, possibly using autoincrement addressing modes.
3764 Find any entries on the mem_set_list that need to be invalidated due
3765 to an address change. */
3767 invalidate_mems_from_autoinc (insn)
3770 rtx note = REG_NOTES (insn);
3771 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3773 if (REG_NOTE_KIND (note) == REG_INC)
3775 rtx temp = mem_set_list;
3776 rtx prev = NULL_RTX;
3781 next = XEXP (temp, 1);
3782 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3784 /* Splice temp out of list. */
3786 XEXP (prev, 1) = next;
3788 mem_set_list = next;
3789 free_EXPR_LIST_node (temp);
3799 /* Process the registers that are set within X. Their bits are set to
3800 1 in the regset DEAD, because they are dead prior to this insn.
3802 If INSN is nonzero, it is the insn being processed.
3804 FLAGS is the set of operations to perform. */
3807 mark_set_regs (needed, dead, x, insn, significant, flags)
3815 register RTX_CODE code = GET_CODE (x);
3817 if (code == SET || code == CLOBBER)
3818 mark_set_1 (needed, dead, x, insn, significant, flags);
3819 else if (code == PARALLEL)
3822 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3824 code = GET_CODE (XVECEXP (x, 0, i));
3825 if (code == SET || code == CLOBBER)
3826 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3827 significant, flags);
3832 /* Process a single SET rtx, X. */
3835 mark_set_1 (needed, dead, x, insn, significant, flags)
3843 register int regno = -1;
3844 register rtx reg = SET_DEST (x);
3846 /* Some targets place small structures in registers for
3847 return values of functions. We have to detect this
3848 case specially here to get correct flow information. */
3849 if (GET_CODE (reg) == PARALLEL
3850 && GET_MODE (reg) == BLKmode)
3854 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3855 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3856 significant, flags);
3860 /* Modifying just one hardware register of a multi-reg value
3861 or just a byte field of a register
3862 does not mean the value from before this insn is now dead.
3863 But it does mean liveness of that register at the end of the block
3866 Within mark_set_1, however, we treat it as if the register is
3867 indeed modified. mark_used_regs will, however, also treat this
3868 register as being used. Thus, we treat these insns as setting a
3869 new value for the register as a function of its old value. This
3870 cases LOG_LINKS to be made appropriately and this will help combine. */
3872 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3873 || GET_CODE (reg) == SIGN_EXTRACT
3874 || GET_CODE (reg) == STRICT_LOW_PART)
3875 reg = XEXP (reg, 0);
3877 /* If this set is a MEM, then it kills any aliased writes.
3878 If this set is a REG, then it kills any MEMs which use the reg. */
3879 if (flags & PROP_SCAN_DEAD_CODE)
3881 if (GET_CODE (reg) == MEM
3882 || GET_CODE (reg) == REG)
3884 rtx temp = mem_set_list;
3885 rtx prev = NULL_RTX;
3890 next = XEXP (temp, 1);
3891 if ((GET_CODE (reg) == MEM
3892 && output_dependence (XEXP (temp, 0), reg))
3893 || (GET_CODE (reg) == REG
3894 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3896 /* Splice this entry out of the list. */
3898 XEXP (prev, 1) = next;
3900 mem_set_list = next;
3901 free_EXPR_LIST_node (temp);
3909 /* If the memory reference had embedded side effects (autoincrement
3910 address modes. Then we may need to kill some entries on the
3912 if (insn && GET_CODE (reg) == MEM)
3913 invalidate_mems_from_autoinc (insn);
3915 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3916 /* We do not know the size of a BLKmode store, so we do not track
3917 them for redundant store elimination. */
3918 && GET_MODE (reg) != BLKmode
3919 /* There are no REG_INC notes for SP, so we can't assume we'll see
3920 everything that invalidates it. To be safe, don't eliminate any
3921 stores though SP; none of them should be redundant anyway. */
3922 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3923 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3926 if (GET_CODE (reg) == REG
3927 && (regno = REGNO (reg),
3928 ! (regno == FRAME_POINTER_REGNUM
3929 && (! reload_completed || frame_pointer_needed)))
3930 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3931 && ! (regno == HARD_FRAME_POINTER_REGNUM
3932 && (! reload_completed || frame_pointer_needed))
3934 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3935 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3937 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3938 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3940 int some_needed = REGNO_REG_SET_P (needed, regno);
3941 int some_not_needed = ! some_needed;
3943 /* Mark it as a significant register for this basic block. */
3945 SET_REGNO_REG_SET (significant, regno);
3947 /* Mark it as dead before this insn. */
3948 SET_REGNO_REG_SET (dead, regno);
3950 /* A hard reg in a wide mode may really be multiple registers.
3951 If so, mark all of them just like the first. */
3952 if (regno < FIRST_PSEUDO_REGISTER)
3956 /* Nothing below is needed for the stack pointer; get out asap.
3957 Eg, log links aren't needed, since combine won't use them. */
3958 if (regno == STACK_POINTER_REGNUM)
3961 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3964 int regno_n = regno + n;
3965 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3967 SET_REGNO_REG_SET (significant, regno_n);
3969 SET_REGNO_REG_SET (dead, regno_n);
3970 some_needed |= needed_regno;
3971 some_not_needed |= ! needed_regno;
3975 /* Additional data to record if this is the final pass. */
3976 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
3977 | PROP_DEATH_NOTES | PROP_AUTOINC))
3980 register int blocknum = BLOCK_NUM (insn);
3983 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3984 y = reg_next_use[regno];
3986 /* If this is a hard reg, record this function uses the reg. */
3988 if (regno < FIRST_PSEUDO_REGISTER)
3991 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
3993 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3994 for (i = regno; i < endregno; i++)
3996 /* The next use is no longer "next", since a store
3998 reg_next_use[i] = 0;
4001 if (flags & PROP_REG_INFO)
4002 for (i = regno; i < endregno; i++)
4004 regs_ever_live[i] = 1;
4010 /* The next use is no longer "next", since a store
4012 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4013 reg_next_use[regno] = 0;
4015 /* Keep track of which basic blocks each reg appears in. */
4017 if (flags & PROP_REG_INFO)
4019 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4020 REG_BASIC_BLOCK (regno) = blocknum;
4021 else if (REG_BASIC_BLOCK (regno) != blocknum)
4022 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4024 /* Count (weighted) references, stores, etc. This counts a
4025 register twice if it is modified, but that is correct. */
4026 REG_N_SETS (regno)++;
4027 REG_N_REFS (regno) += loop_depth + 1;
4029 /* The insns where a reg is live are normally counted
4030 elsewhere, but we want the count to include the insn
4031 where the reg is set, and the normal counting mechanism
4032 would not count it. */
4033 REG_LIVE_LENGTH (regno)++;
4037 if (! some_not_needed)
4039 if (flags & PROP_LOG_LINKS)
4041 /* Make a logical link from the next following insn
4042 that uses this register, back to this insn.
4043 The following insns have already been processed.
4045 We don't build a LOG_LINK for hard registers containing
4046 in ASM_OPERANDs. If these registers get replaced,
4047 we might wind up changing the semantics of the insn,
4048 even if reload can make what appear to be valid
4049 assignments later. */
4050 if (y && (BLOCK_NUM (y) == blocknum)
4051 && (regno >= FIRST_PSEUDO_REGISTER
4052 || asm_noperands (PATTERN (y)) < 0))
4053 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4056 else if (! some_needed)
4058 if (flags & PROP_REG_INFO)
4059 REG_N_DEATHS (REGNO (reg))++;
4061 if (flags & PROP_DEATH_NOTES)
4063 /* Note that dead stores have already been deleted
4064 when possible. If we get here, we have found a
4065 dead store that cannot be eliminated (because the
4066 same insn does something useful). Indicate this
4067 by marking the reg being set as dying here. */
4069 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4074 if (flags & PROP_DEATH_NOTES)
4076 /* This is a case where we have a multi-word hard register
4077 and some, but not all, of the words of the register are
4078 needed in subsequent insns. Write REG_UNUSED notes
4079 for those parts that were not needed. This case should
4084 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4086 if (!REGNO_REG_SET_P (needed, regno + i))
4090 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4096 else if (GET_CODE (reg) == REG)
4098 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4099 reg_next_use[regno] = 0;
4102 /* If this is the last pass and this is a SCRATCH, show it will be dying
4103 here and count it. */
4104 else if (GET_CODE (reg) == SCRATCH)
4106 if (flags & PROP_DEATH_NOTES)
4108 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4114 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4118 find_auto_inc (needed, x, insn)
4123 rtx addr = XEXP (x, 0);
4124 HOST_WIDE_INT offset = 0;
4127 /* Here we detect use of an index register which might be good for
4128 postincrement, postdecrement, preincrement, or predecrement. */
4130 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4131 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4133 if (GET_CODE (addr) == REG)
4136 register int size = GET_MODE_SIZE (GET_MODE (x));
4139 int regno = REGNO (addr);
4141 /* Is the next use an increment that might make auto-increment? */
4142 if ((incr = reg_next_use[regno]) != 0
4143 && (set = single_set (incr)) != 0
4144 && GET_CODE (set) == SET
4145 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4146 /* Can't add side effects to jumps; if reg is spilled and
4147 reloaded, there's no way to store back the altered value. */
4148 && GET_CODE (insn) != JUMP_INSN
4149 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4150 && XEXP (y, 0) == addr
4151 && GET_CODE (XEXP (y, 1)) == CONST_INT
4152 && ((HAVE_POST_INCREMENT
4153 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4154 || (HAVE_POST_DECREMENT
4155 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4156 || (HAVE_PRE_INCREMENT
4157 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4158 || (HAVE_PRE_DECREMENT
4159 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4160 /* Make sure this reg appears only once in this insn. */
4161 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4162 use != 0 && use != (rtx) 1))
4164 rtx q = SET_DEST (set);
4165 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4166 ? (offset ? PRE_INC : POST_INC)
4167 : (offset ? PRE_DEC : POST_DEC));
4169 if (dead_or_set_p (incr, addr))
4171 /* This is the simple case. Try to make the auto-inc. If
4172 we can't, we are done. Otherwise, we will do any
4173 needed updates below. */
4174 if (! validate_change (insn, &XEXP (x, 0),
4175 gen_rtx_fmt_e (inc_code, Pmode, addr),
4179 else if (GET_CODE (q) == REG
4180 /* PREV_INSN used here to check the semi-open interval
4182 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4183 /* We must also check for sets of q as q may be
4184 a call clobbered hard register and there may
4185 be a call between PREV_INSN (insn) and incr. */
4186 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4188 /* We have *p followed sometime later by q = p+size.
4189 Both p and q must be live afterward,
4190 and q is not used between INSN and its assignment.
4191 Change it to q = p, ...*q..., q = q+size.
4192 Then fall into the usual case. */
4197 emit_move_insn (q, addr);
4198 insns = get_insns ();
4201 bb = BLOCK_FOR_INSN (insn);
4202 for (temp = insns; temp; temp = NEXT_INSN (temp))
4203 set_block_for_insn (temp, bb);
4205 /* If we can't make the auto-inc, or can't make the
4206 replacement into Y, exit. There's no point in making
4207 the change below if we can't do the auto-inc and doing
4208 so is not correct in the pre-inc case. */
4210 validate_change (insn, &XEXP (x, 0),
4211 gen_rtx_fmt_e (inc_code, Pmode, q),
4213 validate_change (incr, &XEXP (y, 0), q, 1);
4214 if (! apply_change_group ())
4217 /* We now know we'll be doing this change, so emit the
4218 new insn(s) and do the updates. */
4219 emit_insns_before (insns, insn);
4221 if (BLOCK_FOR_INSN (insn)->head == insn)
4222 BLOCK_FOR_INSN (insn)->head = insns;
4224 /* INCR will become a NOTE and INSN won't contain a
4225 use of ADDR. If a use of ADDR was just placed in
4226 the insn before INSN, make that the next use.
4227 Otherwise, invalidate it. */
4228 if (GET_CODE (PREV_INSN (insn)) == INSN
4229 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4230 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4231 reg_next_use[regno] = PREV_INSN (insn);
4233 reg_next_use[regno] = 0;
4238 /* REGNO is now used in INCR which is below INSN, but
4239 it previously wasn't live here. If we don't mark
4240 it as needed, we'll put a REG_DEAD note for it
4241 on this insn, which is incorrect. */
4242 SET_REGNO_REG_SET (needed, regno);
4244 /* If there are any calls between INSN and INCR, show
4245 that REGNO now crosses them. */
4246 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4247 if (GET_CODE (temp) == CALL_INSN)
4248 REG_N_CALLS_CROSSED (regno)++;
4253 /* If we haven't returned, it means we were able to make the
4254 auto-inc, so update the status. First, record that this insn
4255 has an implicit side effect. */
4258 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4260 /* Modify the old increment-insn to simply copy
4261 the already-incremented value of our register. */
4262 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4265 /* If that makes it a no-op (copying the register into itself) delete
4266 it so it won't appear to be a "use" and a "set" of this
4268 if (SET_DEST (set) == addr)
4270 PUT_CODE (incr, NOTE);
4271 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4272 NOTE_SOURCE_FILE (incr) = 0;
4275 if (regno >= FIRST_PSEUDO_REGISTER)
4277 /* Count an extra reference to the reg. When a reg is
4278 incremented, spilling it is worse, so we want to make
4279 that less likely. */
4280 REG_N_REFS (regno) += loop_depth + 1;
4282 /* Count the increment as a setting of the register,
4283 even though it isn't a SET in rtl. */
4284 REG_N_SETS (regno)++;
4289 #endif /* AUTO_INC_DEC */
4291 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4292 This is done assuming the registers needed from X
4293 are those that have 1-bits in NEEDED.
4295 FLAGS is the set of enabled operations.
4297 INSN is the containing instruction. If INSN is dead, this function is not
4301 mark_used_regs (needed, live, x, flags, insn)
4308 register RTX_CODE code;
4313 code = GET_CODE (x);
4333 /* If we are clobbering a MEM, mark any registers inside the address
4335 if (GET_CODE (XEXP (x, 0)) == MEM)
4336 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4340 /* Don't bother watching stores to mems if this is not the
4341 final pass. We'll not be deleting dead stores this round. */
4342 if (flags & PROP_SCAN_DEAD_CODE)
4344 /* Invalidate the data for the last MEM stored, but only if MEM is
4345 something that can be stored into. */
4346 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4347 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4348 ; /* needn't clear the memory set list */
4351 rtx temp = mem_set_list;
4352 rtx prev = NULL_RTX;
4357 next = XEXP (temp, 1);
4358 if (anti_dependence (XEXP (temp, 0), x))
4360 /* Splice temp out of the list. */
4362 XEXP (prev, 1) = next;
4364 mem_set_list = next;
4365 free_EXPR_LIST_node (temp);
4373 /* If the memory reference had embedded side effects (autoincrement
4374 address modes. Then we may need to kill some entries on the
4377 invalidate_mems_from_autoinc (insn);
4381 if (flags & PROP_AUTOINC)
4382 find_auto_inc (needed, x, insn);
4387 if (GET_CODE (SUBREG_REG (x)) == REG
4388 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4389 && (GET_MODE_SIZE (GET_MODE (x))
4390 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4391 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4393 /* While we're here, optimize this case. */
4396 /* In case the SUBREG is not of a register, don't optimize */
4397 if (GET_CODE (x) != REG)
4399 mark_used_regs (needed, live, x, flags, insn);
4403 /* ... fall through ... */
4406 /* See a register other than being set
4407 => mark it as needed. */
4411 int some_needed = REGNO_REG_SET_P (needed, regno);
4412 int some_not_needed = ! some_needed;
4414 SET_REGNO_REG_SET (live, regno);
4416 /* A hard reg in a wide mode may really be multiple registers.
4417 If so, mark all of them just like the first. */
4418 if (regno < FIRST_PSEUDO_REGISTER)
4422 /* For stack ptr or fixed arg pointer,
4423 nothing below can be necessary, so waste no more time. */
4424 if (regno == STACK_POINTER_REGNUM
4425 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4426 || (regno == HARD_FRAME_POINTER_REGNUM
4427 && (! reload_completed || frame_pointer_needed))
4429 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4430 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4432 || (regno == FRAME_POINTER_REGNUM
4433 && (! reload_completed || frame_pointer_needed)))
4435 /* If this is a register we are going to try to eliminate,
4436 don't mark it live here. If we are successful in
4437 eliminating it, it need not be live unless it is used for
4438 pseudos, in which case it will have been set live when
4439 it was allocated to the pseudos. If the register will not
4440 be eliminated, reload will set it live at that point. */
4442 if ((flags & PROP_REG_INFO)
4443 && ! TEST_HARD_REG_BIT (elim_reg_set, regno))
4444 regs_ever_live[regno] = 1;
4447 /* No death notes for global register variables;
4448 their values are live after this function exits. */
4449 if (global_regs[regno])
4451 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4452 reg_next_use[regno] = insn;
4456 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4459 int regno_n = regno + n;
4460 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4462 SET_REGNO_REG_SET (live, regno_n);
4463 some_needed |= needed_regno;
4464 some_not_needed |= ! needed_regno;
4468 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4470 /* Record where each reg is used, so when the reg
4471 is set we know the next insn that uses it. */
4473 reg_next_use[regno] = insn;
4475 if (flags & PROP_REG_INFO)
4477 if (regno < FIRST_PSEUDO_REGISTER)
4479 /* If a hard reg is being used,
4480 record that this function does use it. */
4482 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4486 regs_ever_live[regno + --i] = 1;
4491 /* Keep track of which basic block each reg appears in. */
4493 register int blocknum = BLOCK_NUM (insn);
4495 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4496 REG_BASIC_BLOCK (regno) = blocknum;
4497 else if (REG_BASIC_BLOCK (regno) != blocknum)
4498 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4500 /* Count (weighted) number of uses of each reg. */
4502 REG_N_REFS (regno) += loop_depth + 1;
4506 /* Record and count the insns in which a reg dies.
4507 If it is used in this insn and was dead below the insn
4508 then it dies in this insn. If it was set in this insn,
4509 we do not make a REG_DEAD note; likewise if we already
4510 made such a note. */
4512 if (flags & PROP_DEATH_NOTES)
4515 && ! dead_or_set_p (insn, x)
4517 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4521 /* Check for the case where the register dying partially
4522 overlaps the register set by this insn. */
4523 if (regno < FIRST_PSEUDO_REGISTER
4524 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4526 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4528 some_needed |= dead_or_set_regno_p (insn, regno + n);
4531 /* If none of the words in X is needed, make a REG_DEAD
4532 note. Otherwise, we must make partial REG_DEAD notes. */
4536 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4537 REG_N_DEATHS (regno)++;
4543 /* Don't make a REG_DEAD note for a part of a register
4544 that is set in the insn. */
4546 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4548 if (!REGNO_REG_SET_P (needed, regno + i)
4549 && ! dead_or_set_regno_p (insn, regno + i))
4552 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4563 register rtx testreg = SET_DEST (x);
4566 /* If storing into MEM, don't show it as being used. But do
4567 show the address as being used. */
4568 if (GET_CODE (testreg) == MEM)
4571 if (flags & PROP_AUTOINC)
4572 find_auto_inc (needed, testreg, insn);
4574 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4575 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4579 /* Storing in STRICT_LOW_PART is like storing in a reg
4580 in that this SET might be dead, so ignore it in TESTREG.
4581 but in some other ways it is like using the reg.
4583 Storing in a SUBREG or a bit field is like storing the entire
4584 register in that if the register's value is not used
4585 then this SET is not needed. */
4586 while (GET_CODE (testreg) == STRICT_LOW_PART
4587 || GET_CODE (testreg) == ZERO_EXTRACT
4588 || GET_CODE (testreg) == SIGN_EXTRACT
4589 || GET_CODE (testreg) == SUBREG)
4591 if (GET_CODE (testreg) == SUBREG
4592 && GET_CODE (SUBREG_REG (testreg)) == REG
4593 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4594 && (GET_MODE_SIZE (GET_MODE (testreg))
4595 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4596 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4598 /* Modifying a single register in an alternate mode
4599 does not use any of the old value. But these other
4600 ways of storing in a register do use the old value. */
4601 if (GET_CODE (testreg) == SUBREG
4602 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4607 testreg = XEXP (testreg, 0);
4610 /* If this is a store into a register,
4611 recursively scan the value being stored. */
4613 if ((GET_CODE (testreg) == PARALLEL
4614 && GET_MODE (testreg) == BLKmode)
4615 || (GET_CODE (testreg) == REG
4616 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4617 && (! reload_completed || frame_pointer_needed)))
4618 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4619 && ! (regno == HARD_FRAME_POINTER_REGNUM
4620 && (! reload_completed || frame_pointer_needed))
4622 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4623 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4626 /* We used to exclude global_regs here, but that seems wrong.
4627 Storing in them is like storing in mem. */
4629 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4631 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4638 case UNSPEC_VOLATILE:
4642 /* Traditional and volatile asm instructions must be considered to use
4643 and clobber all hard registers, all pseudo-registers and all of
4644 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4646 Consider for instance a volatile asm that changes the fpu rounding
4647 mode. An insn should not be moved across this even if it only uses
4648 pseudo-regs because it might give an incorrectly rounded result.
4650 ?!? Unfortunately, marking all hard registers as live causes massive
4651 problems for the register allocator and marking all pseudos as live
4652 creates mountains of uninitialized variable warnings.
4654 So for now, just clear the memory set list and mark any regs
4655 we can find in ASM_OPERANDS as used. */
4656 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4657 free_EXPR_LIST_list (&mem_set_list);
4659 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4660 We can not just fall through here since then we would be confused
4661 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4662 traditional asms unlike their normal usage. */
4663 if (code == ASM_OPERANDS)
4667 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4668 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4679 /* Recursively scan the operands of this expression. */
4682 register const char *fmt = GET_RTX_FORMAT (code);
4685 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4689 /* Tail recursive case: save a function call level. */
4695 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4697 else if (fmt[i] == 'E')
4700 for (j = 0; j < XVECLEN (x, i); j++)
4701 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4710 try_pre_increment_1 (insn)
4713 /* Find the next use of this reg. If in same basic block,
4714 make it do pre-increment or pre-decrement if appropriate. */
4715 rtx x = single_set (insn);
4716 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4717 * INTVAL (XEXP (SET_SRC (x), 1)));
4718 int regno = REGNO (SET_DEST (x));
4719 rtx y = reg_next_use[regno];
4721 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4722 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4723 mode would be better. */
4724 && ! dead_or_set_p (y, SET_DEST (x))
4725 && try_pre_increment (y, SET_DEST (x), amount))
4727 /* We have found a suitable auto-increment
4728 and already changed insn Y to do it.
4729 So flush this increment-instruction. */
4730 PUT_CODE (insn, NOTE);
4731 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4732 NOTE_SOURCE_FILE (insn) = 0;
4733 /* Count a reference to this reg for the increment
4734 insn we are deleting. When a reg is incremented.
4735 spilling it is worse, so we want to make that
4737 if (regno >= FIRST_PSEUDO_REGISTER)
4739 REG_N_REFS (regno) += loop_depth + 1;
4740 REG_N_SETS (regno)++;
4747 /* Try to change INSN so that it does pre-increment or pre-decrement
4748 addressing on register REG in order to add AMOUNT to REG.
4749 AMOUNT is negative for pre-decrement.
4750 Returns 1 if the change could be made.
4751 This checks all about the validity of the result of modifying INSN. */
4754 try_pre_increment (insn, reg, amount)
4756 HOST_WIDE_INT amount;
4760 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4761 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4763 /* Nonzero if we can try to make a post-increment or post-decrement.
4764 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4765 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4766 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4769 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4772 /* From the sign of increment, see which possibilities are conceivable
4773 on this target machine. */
4774 if (HAVE_PRE_INCREMENT && amount > 0)
4776 if (HAVE_POST_INCREMENT && amount > 0)
4779 if (HAVE_PRE_DECREMENT && amount < 0)
4781 if (HAVE_POST_DECREMENT && amount < 0)
4784 if (! (pre_ok || post_ok))
4787 /* It is not safe to add a side effect to a jump insn
4788 because if the incremented register is spilled and must be reloaded
4789 there would be no way to store the incremented value back in memory. */
4791 if (GET_CODE (insn) == JUMP_INSN)
4796 use = find_use_as_address (PATTERN (insn), reg, 0);
4797 if (post_ok && (use == 0 || use == (rtx) 1))
4799 use = find_use_as_address (PATTERN (insn), reg, -amount);
4803 if (use == 0 || use == (rtx) 1)
4806 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4809 /* See if this combination of instruction and addressing mode exists. */
4810 if (! validate_change (insn, &XEXP (use, 0),
4811 gen_rtx_fmt_e (amount > 0
4812 ? (do_post ? POST_INC : PRE_INC)
4813 : (do_post ? POST_DEC : PRE_DEC),
4817 /* Record that this insn now has an implicit side effect on X. */
4818 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4822 #endif /* AUTO_INC_DEC */
4824 /* Find the place in the rtx X where REG is used as a memory address.
4825 Return the MEM rtx that so uses it.
4826 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4827 (plus REG (const_int PLUSCONST)).
4829 If such an address does not appear, return 0.
4830 If REG appears more than once, or is used other than in such an address,
4834 find_use_as_address (x, reg, plusconst)
4837 HOST_WIDE_INT plusconst;
4839 enum rtx_code code = GET_CODE (x);
4840 const char *fmt = GET_RTX_FORMAT (code);
4842 register rtx value = 0;
4845 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4848 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4849 && XEXP (XEXP (x, 0), 0) == reg
4850 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4851 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4854 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4856 /* If REG occurs inside a MEM used in a bit-field reference,
4857 that is unacceptable. */
4858 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4859 return (rtx) (HOST_WIDE_INT) 1;
4863 return (rtx) (HOST_WIDE_INT) 1;
4865 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4869 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4873 return (rtx) (HOST_WIDE_INT) 1;
4875 else if (fmt[i] == 'E')
4878 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4880 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4884 return (rtx) (HOST_WIDE_INT) 1;
4892 /* Write information about registers and basic blocks into FILE.
4893 This is part of making a debugging dump. */
4896 dump_regset (r, outf)
4903 fputs (" (nil)", outf);
4907 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4909 fprintf (outf, " %d", i);
4910 if (i < FIRST_PSEUDO_REGISTER)
4911 fprintf (outf, " [%s]",
4920 dump_regset (r, stderr);
4921 putc ('\n', stderr);
4925 dump_flow_info (file)
4929 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4931 fprintf (file, "%d registers.\n", max_regno);
4932 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4935 enum reg_class class, altclass;
4936 fprintf (file, "\nRegister %d used %d times across %d insns",
4937 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4938 if (REG_BASIC_BLOCK (i) >= 0)
4939 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4941 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4942 (REG_N_SETS (i) == 1) ? "" : "s");
4943 if (REG_USERVAR_P (regno_reg_rtx[i]))
4944 fprintf (file, "; user var");
4945 if (REG_N_DEATHS (i) != 1)
4946 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4947 if (REG_N_CALLS_CROSSED (i) == 1)
4948 fprintf (file, "; crosses 1 call");
4949 else if (REG_N_CALLS_CROSSED (i))
4950 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4951 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4952 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4953 class = reg_preferred_class (i);
4954 altclass = reg_alternate_class (i);
4955 if (class != GENERAL_REGS || altclass != ALL_REGS)
4957 if (altclass == ALL_REGS || class == ALL_REGS)
4958 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4959 else if (altclass == NO_REGS)
4960 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4962 fprintf (file, "; pref %s, else %s",
4963 reg_class_names[(int) class],
4964 reg_class_names[(int) altclass]);
4966 if (REGNO_POINTER_FLAG (i))
4967 fprintf (file, "; pointer");
4968 fprintf (file, ".\n");
4971 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
4972 for (i = 0; i < n_basic_blocks; i++)
4974 register basic_block bb = BASIC_BLOCK (i);
4977 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
4978 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
4980 fprintf (file, "Predecessors: ");
4981 for (e = bb->pred; e ; e = e->pred_next)
4982 dump_edge_info (file, e, 0);
4984 fprintf (file, "\nSuccessors: ");
4985 for (e = bb->succ; e ; e = e->succ_next)
4986 dump_edge_info (file, e, 1);
4988 fprintf (file, "\nRegisters live at start:");
4989 dump_regset (bb->global_live_at_start, file);
4991 fprintf (file, "\nRegisters live at end:");
4992 dump_regset (bb->global_live_at_end, file);
5003 dump_flow_info (stderr);
5007 dump_edge_info (file, e, do_succ)
5012 basic_block side = (do_succ ? e->dest : e->src);
5014 if (side == ENTRY_BLOCK_PTR)
5015 fputs (" ENTRY", file);
5016 else if (side == EXIT_BLOCK_PTR)
5017 fputs (" EXIT", file);
5019 fprintf (file, " %d", side->index);
5023 static const char * const bitnames[] = {
5024 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5027 int i, flags = e->flags;
5031 for (i = 0; flags; i++)
5032 if (flags & (1 << i))
5038 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5039 fputs (bitnames[i], file);
5041 fprintf (file, "%d", i);
5049 /* Print out one basic block with live information at start and end. */
5059 fprintf (outf, ";; Basic block %d, loop depth %d",
5060 bb->index, bb->loop_depth - 1);
5061 if (bb->eh_beg != -1 || bb->eh_end != -1)
5062 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5065 fputs (";; Predecessors: ", outf);
5066 for (e = bb->pred; e ; e = e->pred_next)
5067 dump_edge_info (outf, e, 0);
5070 fputs (";; Registers live at start:", outf);
5071 dump_regset (bb->global_live_at_start, outf);
5074 for (insn = bb->head, last = NEXT_INSN (bb->end);
5076 insn = NEXT_INSN (insn))
5077 print_rtl_single (outf, insn);
5079 fputs (";; Registers live at end:", outf);
5080 dump_regset (bb->global_live_at_end, outf);
5083 fputs (";; Successors: ", outf);
5084 for (e = bb->succ; e; e = e->succ_next)
5085 dump_edge_info (outf, e, 1);
5093 dump_bb (bb, stderr);
5100 dump_bb (BASIC_BLOCK(n), stderr);
5103 /* Like print_rtl, but also print out live information for the start of each
5107 print_rtl_with_bb (outf, rtx_first)
5111 register rtx tmp_rtx;
5114 fprintf (outf, "(nil)\n");
5118 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5119 int max_uid = get_max_uid ();
5120 basic_block *start = (basic_block *)
5121 xcalloc (max_uid, sizeof (basic_block));
5122 basic_block *end = (basic_block *)
5123 xcalloc (max_uid, sizeof (basic_block));
5124 enum bb_state *in_bb_p = (enum bb_state *)
5125 xcalloc (max_uid, sizeof (enum bb_state));
5127 for (i = n_basic_blocks - 1; i >= 0; i--)
5129 basic_block bb = BASIC_BLOCK (i);
5132 start[INSN_UID (bb->head)] = bb;
5133 end[INSN_UID (bb->end)] = bb;
5134 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5136 enum bb_state state = IN_MULTIPLE_BB;
5137 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5139 in_bb_p[INSN_UID(x)] = state;
5146 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5151 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5153 fprintf (outf, ";; Start of basic block %d, registers live:",
5155 dump_regset (bb->global_live_at_start, outf);
5159 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5160 && GET_CODE (tmp_rtx) != NOTE
5161 && GET_CODE (tmp_rtx) != BARRIER)
5162 fprintf (outf, ";; Insn is not within a basic block\n");
5163 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5164 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5166 did_output = print_rtl_single (outf, tmp_rtx);
5168 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5169 fprintf (outf, ";; End of basic block %d\n", bb->index);
5180 if (current_function_epilogue_delay_list != 0)
5182 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5183 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5184 tmp_rtx = XEXP (tmp_rtx, 1))
5185 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5189 /* Compute dominator relationships using new flow graph structures. */
5191 compute_flow_dominators (dominators, post_dominators)
5192 sbitmap *dominators;
5193 sbitmap *post_dominators;
5196 sbitmap *temp_bitmap;
5198 basic_block *worklist, *tos;
5200 /* Allocate a worklist array/queue. Entries are only added to the
5201 list if they were not already on the list. So the size is
5202 bounded by the number of basic blocks. */
5203 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block)
5206 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5207 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5211 /* The optimistic setting of dominators requires us to put every
5212 block on the work list initially. */
5213 for (bb = 0; bb < n_basic_blocks; bb++)
5215 *tos++ = BASIC_BLOCK (bb);
5216 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5219 /* We want a maximal solution, so initially assume everything dominates
5221 sbitmap_vector_ones (dominators, n_basic_blocks);
5223 /* Mark successors of the entry block so we can identify them below. */
5224 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5225 e->dest->aux = ENTRY_BLOCK_PTR;
5227 /* Iterate until the worklist is empty. */
5228 while (tos != worklist)
5230 /* Take the first entry off the worklist. */
5231 basic_block b = *--tos;
5234 /* Compute the intersection of the dominators of all the
5237 If one of the predecessor blocks is the ENTRY block, then the
5238 intersection of the dominators of the predecessor blocks is
5239 defined as the null set. We can identify such blocks by the
5240 special value in the AUX field in the block structure. */
5241 if (b->aux == ENTRY_BLOCK_PTR)
5243 /* Do not clear the aux field for blocks which are
5244 successors of the ENTRY block. That way we we never
5245 add them to the worklist again.
5247 The intersect of dominators of the preds of this block is
5248 defined as the null set. */
5249 sbitmap_zero (temp_bitmap[bb]);
5253 /* Clear the aux field of this block so it can be added to
5254 the worklist again if necessary. */
5256 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5259 /* Make sure each block always dominates itself. */
5260 SET_BIT (temp_bitmap[bb], bb);
5262 /* If the out state of this block changed, then we need to
5263 add the successors of this block to the worklist if they
5264 are not already on the worklist. */
5265 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5267 for (e = b->succ; e; e = e->succ_next)
5269 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5279 if (post_dominators)
5281 /* The optimistic setting of dominators requires us to put every
5282 block on the work list initially. */
5283 for (bb = 0; bb < n_basic_blocks; bb++)
5285 *tos++ = BASIC_BLOCK (bb);
5286 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5289 /* We want a maximal solution, so initially assume everything post
5290 dominates everything else. */
5291 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5293 /* Mark predecessors of the exit block so we can identify them below. */
5294 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5295 e->src->aux = EXIT_BLOCK_PTR;
5297 /* Iterate until the worklist is empty. */
5298 while (tos != worklist)
5300 /* Take the first entry off the worklist. */
5301 basic_block b = *--tos;
5304 /* Compute the intersection of the post dominators of all the
5307 If one of the successor blocks is the EXIT block, then the
5308 intersection of the dominators of the successor blocks is
5309 defined as the null set. We can identify such blocks by the
5310 special value in the AUX field in the block structure. */
5311 if (b->aux == EXIT_BLOCK_PTR)
5313 /* Do not clear the aux field for blocks which are
5314 predecessors of the EXIT block. That way we we never
5315 add them to the worklist again.
5317 The intersect of dominators of the succs of this block is
5318 defined as the null set. */
5319 sbitmap_zero (temp_bitmap[bb]);
5323 /* Clear the aux field of this block so it can be added to
5324 the worklist again if necessary. */
5326 sbitmap_intersection_of_succs (temp_bitmap[bb],
5327 post_dominators, bb);
5330 /* Make sure each block always post dominates itself. */
5331 SET_BIT (temp_bitmap[bb], bb);
5333 /* If the out state of this block changed, then we need to
5334 add the successors of this block to the worklist if they
5335 are not already on the worklist. */
5336 if (sbitmap_a_and_b (post_dominators[bb],
5337 post_dominators[bb],
5340 for (e = b->pred; e; e = e->pred_next)
5342 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5354 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5357 compute_immediate_dominators (idom, dominators)
5359 sbitmap *dominators;
5364 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5366 /* Begin with tmp(n) = dom(n) - { n }. */
5367 for (b = n_basic_blocks; --b >= 0; )
5369 sbitmap_copy (tmp[b], dominators[b]);
5370 RESET_BIT (tmp[b], b);
5373 /* Subtract out all of our dominator's dominators. */
5374 for (b = n_basic_blocks; --b >= 0; )
5376 sbitmap tmp_b = tmp[b];
5379 for (s = n_basic_blocks; --s >= 0; )
5380 if (TEST_BIT (tmp_b, s))
5381 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5384 /* Find the one bit set in the bitmap and put it in the output array. */
5385 for (b = n_basic_blocks; --b >= 0; )
5388 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5391 sbitmap_vector_free (tmp);
5394 /* Count for a single SET rtx, X. */
5397 count_reg_sets_1 (x)
5401 register rtx reg = SET_DEST (x);
5403 /* Find the register that's set/clobbered. */
5404 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5405 || GET_CODE (reg) == SIGN_EXTRACT
5406 || GET_CODE (reg) == STRICT_LOW_PART)
5407 reg = XEXP (reg, 0);
5409 if (GET_CODE (reg) == PARALLEL
5410 && GET_MODE (reg) == BLKmode)
5413 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5414 count_reg_sets_1 (XVECEXP (reg, 0, i));
5418 if (GET_CODE (reg) == REG)
5420 regno = REGNO (reg);
5421 if (regno >= FIRST_PSEUDO_REGISTER)
5423 /* Count (weighted) references, stores, etc. This counts a
5424 register twice if it is modified, but that is correct. */
5425 REG_N_SETS (regno)++;
5426 REG_N_REFS (regno) += loop_depth + 1;
5431 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5432 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5438 register RTX_CODE code = GET_CODE (x);
5440 if (code == SET || code == CLOBBER)
5441 count_reg_sets_1 (x);
5442 else if (code == PARALLEL)
5445 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5447 code = GET_CODE (XVECEXP (x, 0, i));
5448 if (code == SET || code == CLOBBER)
5449 count_reg_sets_1 (XVECEXP (x, 0, i));
5454 /* Increment REG_N_REFS by the current loop depth each register reference
5458 count_reg_references (x)
5461 register RTX_CODE code;
5464 code = GET_CODE (x);
5484 /* If we are clobbering a MEM, mark any registers inside the address
5486 if (GET_CODE (XEXP (x, 0)) == MEM)
5487 count_reg_references (XEXP (XEXP (x, 0), 0));
5491 /* While we're here, optimize this case. */
5494 /* In case the SUBREG is not of a register, don't optimize */
5495 if (GET_CODE (x) != REG)
5497 count_reg_references (x);
5501 /* ... fall through ... */
5504 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5505 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5510 register rtx testreg = SET_DEST (x);
5513 /* If storing into MEM, don't show it as being used. But do
5514 show the address as being used. */
5515 if (GET_CODE (testreg) == MEM)
5517 count_reg_references (XEXP (testreg, 0));
5518 count_reg_references (SET_SRC (x));
5522 /* Storing in STRICT_LOW_PART is like storing in a reg
5523 in that this SET might be dead, so ignore it in TESTREG.
5524 but in some other ways it is like using the reg.
5526 Storing in a SUBREG or a bit field is like storing the entire
5527 register in that if the register's value is not used
5528 then this SET is not needed. */
5529 while (GET_CODE (testreg) == STRICT_LOW_PART
5530 || GET_CODE (testreg) == ZERO_EXTRACT
5531 || GET_CODE (testreg) == SIGN_EXTRACT
5532 || GET_CODE (testreg) == SUBREG)
5534 /* Modifying a single register in an alternate mode
5535 does not use any of the old value. But these other
5536 ways of storing in a register do use the old value. */
5537 if (GET_CODE (testreg) == SUBREG
5538 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5543 testreg = XEXP (testreg, 0);
5546 /* If this is a store into a register,
5547 recursively scan the value being stored. */
5549 if ((GET_CODE (testreg) == PARALLEL
5550 && GET_MODE (testreg) == BLKmode)
5551 || GET_CODE (testreg) == REG)
5553 count_reg_references (SET_SRC (x));
5555 count_reg_references (SET_DEST (x));
5565 /* Recursively scan the operands of this expression. */
5568 register const char *fmt = GET_RTX_FORMAT (code);
5571 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5575 /* Tail recursive case: save a function call level. */
5581 count_reg_references (XEXP (x, i));
5583 else if (fmt[i] == 'E')
5586 for (j = 0; j < XVECLEN (x, i); j++)
5587 count_reg_references (XVECEXP (x, i, j));
5593 /* Recompute register set/reference counts immediately prior to register
5596 This avoids problems with set/reference counts changing to/from values
5597 which have special meanings to the register allocators.
5599 Additionally, the reference counts are the primary component used by the
5600 register allocators to prioritize pseudos for allocation to hard regs.
5601 More accurate reference counts generally lead to better register allocation.
5603 F is the first insn to be scanned.
5605 LOOP_STEP denotes how much loop_depth should be incremented per
5606 loop nesting level in order to increase the ref count more for
5607 references in a loop.
5609 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5610 possibly other information which is used by the register allocators. */
5613 recompute_reg_usage (f, loop_step)
5614 rtx f ATTRIBUTE_UNUSED;
5615 int loop_step ATTRIBUTE_UNUSED;
5621 /* Clear out the old data. */
5622 max_reg = max_reg_num ();
5623 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5629 /* Scan each insn in the chain and count how many times each register is
5631 for (index = 0; index < n_basic_blocks; index++)
5633 basic_block bb = BASIC_BLOCK (index);
5634 loop_depth = bb->loop_depth;
5635 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5637 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5641 /* This call will increment REG_N_SETS for each SET or CLOBBER
5642 of a register in INSN. It will also increment REG_N_REFS
5643 by the loop depth for each set of a register in INSN. */
5644 count_reg_sets (PATTERN (insn));
5646 /* count_reg_sets does not detect autoincrement address modes, so
5647 detect them here by looking at the notes attached to INSN. */
5648 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5650 if (REG_NOTE_KIND (links) == REG_INC)
5651 /* Count (weighted) references, stores, etc. This counts a
5652 register twice if it is modified, but that is correct. */
5653 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5656 /* This call will increment REG_N_REFS by the current loop depth for
5657 each reference to a register in INSN. */
5658 count_reg_references (PATTERN (insn));
5660 /* count_reg_references will not include counts for arguments to
5661 function calls, so detect them here by examining the
5662 CALL_INSN_FUNCTION_USAGE data. */
5663 if (GET_CODE (insn) == CALL_INSN)
5667 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5669 note = XEXP (note, 1))
5670 if (GET_CODE (XEXP (note, 0)) == USE)
5671 count_reg_references (XEXP (XEXP (note, 0), 0));
5674 if (insn == bb->end)
5680 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5681 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5682 of the number of registers that died. */
5685 count_or_remove_death_notes (blocks, kill)
5691 for (i = n_basic_blocks - 1; i >= 0; --i)
5696 if (blocks && ! TEST_BIT (blocks, i))
5699 bb = BASIC_BLOCK (i);
5701 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5703 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5705 rtx *pprev = ®_NOTES (insn);
5710 switch (REG_NOTE_KIND (link))
5713 if (GET_CODE (XEXP (link, 0)) == REG)
5715 rtx reg = XEXP (link, 0);
5718 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5721 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5729 rtx next = XEXP (link, 1);
5730 free_EXPR_LIST_node (link);
5731 *pprev = link = next;
5737 pprev = &XEXP (link, 1);
5744 if (insn == bb->end)
5752 /* Record INSN's block as BB. */
5755 set_block_for_insn (insn, bb)
5759 size_t uid = INSN_UID (insn);
5760 if (uid >= basic_block_for_insn->num_elements)
5764 /* Add one-eighth the size so we don't keep calling xrealloc. */
5765 new_size = uid + (uid + 7) / 8;
5767 VARRAY_GROW (basic_block_for_insn, new_size);
5769 VARRAY_BB (basic_block_for_insn, uid) = bb;
5772 /* Record INSN's block number as BB. */
5773 /* ??? This has got to go. */
5776 set_block_num (insn, bb)
5780 set_block_for_insn (insn, BASIC_BLOCK (bb));
5783 /* Verify the CFG consistency. This function check some CFG invariants and
5784 aborts when something is wrong. Hope that this function will help to
5785 convert many optimization passes to preserve CFG consistent.
5787 Currently it does following checks:
5789 - test head/end pointers
5790 - overlapping of basic blocks
5791 - edge list corectness
5792 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5793 - tails of basic blocks (ensure that boundary is necesary)
5794 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5795 and NOTE_INSN_BASIC_BLOCK
5796 - check that all insns are in the basic blocks
5797 (except the switch handling code, barriers and notes)
5798 - check that all returns are followed by barriers
5800 In future it can be extended check a lot of other stuff as well
5801 (reachability of basic blocks, life information, etc. etc.). */
5806 const int max_uid = get_max_uid ();
5807 const rtx rtx_first = get_insns ();
5808 basic_block *bb_info;
5812 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5814 /* First pass check head/end pointers and set bb_info array used by
5816 for (i = n_basic_blocks - 1; i >= 0; i--)
5818 basic_block bb = BASIC_BLOCK (i);
5820 /* Check the head pointer and make sure that it is pointing into
5822 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5827 error ("Head insn %d for block %d not found in the insn stream.",
5828 INSN_UID (bb->head), bb->index);
5832 /* Check the end pointer and make sure that it is pointing into
5834 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5836 if (bb_info[INSN_UID (x)] != NULL)
5838 error ("Insn %d is in multiple basic blocks (%d and %d)",
5839 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5842 bb_info[INSN_UID (x)] = bb;
5849 error ("End insn %d for block %d not found in the insn stream.",
5850 INSN_UID (bb->end), bb->index);
5855 /* Now check the basic blocks (boundaries etc.) */
5856 for (i = n_basic_blocks - 1; i >= 0; i--)
5858 basic_block bb = BASIC_BLOCK (i);
5859 /* Check corectness of edge lists */
5867 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5869 fprintf (stderr, "Predecessor: ");
5870 dump_edge_info (stderr, e, 0);
5871 fprintf (stderr, "\nSuccessor: ");
5872 dump_edge_info (stderr, e, 1);
5876 if (e->dest != EXIT_BLOCK_PTR)
5878 edge e2 = e->dest->pred;
5879 while (e2 && e2 != e)
5883 error ("Basic block %i edge lists are corrupted", bb->index);
5895 error ("Basic block %d pred edge is corrupted", bb->index);
5896 fputs ("Predecessor: ", stderr);
5897 dump_edge_info (stderr, e, 0);
5898 fputs ("\nSuccessor: ", stderr);
5899 dump_edge_info (stderr, e, 1);
5900 fputc ('\n', stderr);
5903 if (e->src != ENTRY_BLOCK_PTR)
5905 edge e2 = e->src->succ;
5906 while (e2 && e2 != e)
5910 error ("Basic block %i edge lists are corrupted", bb->index);
5917 /* OK pointers are correct. Now check the header of basic
5918 block. It ought to contain optional CODE_LABEL followed
5919 by NOTE_BASIC_BLOCK. */
5921 if (GET_CODE (x) == CODE_LABEL)
5925 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5931 if (GET_CODE (x) != NOTE
5932 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5933 || NOTE_BASIC_BLOCK (x) != bb)
5935 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5942 /* Do checks for empty blocks here */
5949 if (GET_CODE (x) == NOTE
5950 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5952 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
5953 INSN_UID (x), bb->index);
5960 if (GET_CODE (x) == JUMP_INSN
5961 || GET_CODE (x) == CODE_LABEL
5962 || GET_CODE (x) == BARRIER)
5964 error ("In basic block %d:", bb->index);
5965 fatal_insn ("Flow control insn inside a basic block", x);
5976 if (!bb_info[INSN_UID (x)])
5978 switch (GET_CODE (x))
5985 /* An addr_vec is placed outside any block block. */
5987 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5988 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5989 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5994 /* But in any case, non-deletable labels can appear anywhere. */
5998 fatal_insn ("Insn outside basic block", x);
6002 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6003 && GET_CODE (x) == JUMP_INSN
6004 && returnjump_p (x) && ! condjump_p (x)
6005 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6006 fatal_insn ("Return not followed by barrier", x);
6018 /* Functions to access an edge list with a vector representation.
6019 Enough data is kept such that given an index number, the
6020 pred and succ that edge reprsents can be determined, or
6021 given a pred and a succ, it's index number can be returned.
6022 This allows algorithms which comsume a lot of memory to
6023 represent the normally full matrix of edge (pred,succ) with a
6024 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6025 wasted space in the client code due to sparse flow graphs. */
6027 /* This functions initializes the edge list. Basically the entire
6028 flowgraph is processed, and all edges are assigned a number,
6029 and the data structure is filed in. */
6033 struct edge_list *elist;
6039 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6043 /* Determine the number of edges in the flow graph by counting successor
6044 edges on each basic block. */
6045 for (x = 0; x < n_basic_blocks; x++)
6047 basic_block bb = BASIC_BLOCK (x);
6049 for (e = bb->succ; e; e = e->succ_next)
6052 /* Don't forget successors of the entry block. */
6053 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6056 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6057 elist->num_blocks = block_count;
6058 elist->num_edges = num_edges;
6059 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6063 /* Follow successors of the entry block, and register these edges. */
6064 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6066 elist->index_to_edge[num_edges] = e;
6070 for (x = 0; x < n_basic_blocks; x++)
6072 basic_block bb = BASIC_BLOCK (x);
6074 /* Follow all successors of blocks, and register these edges. */
6075 for (e = bb->succ; e; e = e->succ_next)
6077 elist->index_to_edge[num_edges] = e;
6084 /* This function free's memory associated with an edge list. */
6086 free_edge_list (elist)
6087 struct edge_list *elist;
6091 free (elist->index_to_edge);
6096 /* This function provides debug output showing an edge list. */
6098 print_edge_list (f, elist)
6100 struct edge_list *elist;
6103 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6104 elist->num_blocks - 2, elist->num_edges);
6106 for (x = 0; x < elist->num_edges; x++)
6108 fprintf (f, " %-4d - edge(", x);
6109 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6110 fprintf (f,"entry,");
6112 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6114 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6115 fprintf (f,"exit)\n");
6117 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6121 /* This function provides an internal consistancy check of an edge list,
6122 verifying that all edges are present, and that there are no
6125 verify_edge_list (f, elist)
6127 struct edge_list *elist;
6129 int x, pred, succ, index;
6132 for (x = 0; x < n_basic_blocks; x++)
6134 basic_block bb = BASIC_BLOCK (x);
6136 for (e = bb->succ; e; e = e->succ_next)
6138 pred = e->src->index;
6139 succ = e->dest->index;
6140 index = EDGE_INDEX (elist, e->src, e->dest);
6141 if (index == EDGE_INDEX_NO_EDGE)
6143 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6146 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6147 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6148 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6149 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6150 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6151 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6154 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6156 pred = e->src->index;
6157 succ = e->dest->index;
6158 index = EDGE_INDEX (elist, e->src, e->dest);
6159 if (index == EDGE_INDEX_NO_EDGE)
6161 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6164 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6165 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6166 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6167 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6168 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6169 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6171 /* We've verified that all the edges are in the list, no lets make sure
6172 there are no spurious edges in the list. */
6174 for (pred = 0 ; pred < n_basic_blocks; pred++)
6175 for (succ = 0 ; succ < n_basic_blocks; succ++)
6177 basic_block p = BASIC_BLOCK (pred);
6178 basic_block s = BASIC_BLOCK (succ);
6182 for (e = p->succ; e; e = e->succ_next)
6188 for (e = s->pred; e; e = e->pred_next)
6194 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6195 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6196 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6198 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6199 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6200 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6201 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6202 BASIC_BLOCK (succ)));
6204 for (succ = 0 ; succ < n_basic_blocks; succ++)
6206 basic_block p = ENTRY_BLOCK_PTR;
6207 basic_block s = BASIC_BLOCK (succ);
6211 for (e = p->succ; e; e = e->succ_next)
6217 for (e = s->pred; e; e = e->pred_next)
6223 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6224 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6225 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6227 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6228 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6229 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6230 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6231 BASIC_BLOCK (succ)));
6233 for (pred = 0 ; pred < n_basic_blocks; pred++)
6235 basic_block p = BASIC_BLOCK (pred);
6236 basic_block s = EXIT_BLOCK_PTR;
6240 for (e = p->succ; e; e = e->succ_next)
6246 for (e = s->pred; e; e = e->pred_next)
6252 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6253 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6254 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6256 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6257 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6258 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6259 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6264 /* This routine will determine what, if any, edge there is between
6265 a specified predecessor and successor. */
6268 find_edge_index (edge_list, pred, succ)
6269 struct edge_list *edge_list;
6270 basic_block pred, succ;
6273 for (x = 0; x < NUM_EDGES (edge_list); x++)
6275 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6276 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6279 return (EDGE_INDEX_NO_EDGE);
6282 /* This function will remove an edge from the flow graph. */
6287 edge last_pred = NULL;
6288 edge last_succ = NULL;
6290 basic_block src, dest;
6293 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6299 last_succ->succ_next = e->succ_next;
6301 src->succ = e->succ_next;
6303 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6309 last_pred->pred_next = e->pred_next;
6311 dest->pred = e->pred_next;
6317 /* This routine will remove any fake successor edges for a basic block.
6318 When the edge is removed, it is also removed from whatever predecessor
6321 remove_fake_successors (bb)
6325 for (e = bb->succ; e ; )
6329 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6334 /* This routine will remove all fake edges from the flow graph. If
6335 we remove all fake successors, it will automatically remove all
6336 fake predecessors. */
6338 remove_fake_edges ()
6342 for (x = 0; x < n_basic_blocks; x++)
6343 remove_fake_successors (BASIC_BLOCK (x));
6345 /* We've handled all successors except the entry block's. */
6346 remove_fake_successors (ENTRY_BLOCK_PTR);
6349 /* This functions will add a fake edge between any block which has no
6350 successors, and the exit block. Some data flow equations require these
6353 add_noreturn_fake_exit_edges ()
6357 for (x = 0; x < n_basic_blocks; x++)
6358 if (BASIC_BLOCK (x)->succ == NULL)
6359 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6362 /* Dump the list of basic blocks in the bitmap NODES. */
6364 flow_nodes_print (str, nodes, file)
6366 const sbitmap nodes;
6371 fprintf (file, "%s { ", str);
6372 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6373 fputs ("}\n", file);
6377 /* Dump the list of exiting edges in the array EDGES. */
6379 flow_exits_print (str, edges, num_edges, file)
6387 fprintf (file, "%s { ", str);
6388 for (i = 0; i < num_edges; i++)
6389 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6390 fputs ("}\n", file);
6394 /* Dump loop related CFG information. */
6396 flow_loops_cfg_dump (loops, file)
6397 const struct loops *loops;
6402 if (! loops->num || ! file || ! loops->cfg.dom)
6405 for (i = 0; i < n_basic_blocks; i++)
6409 fprintf (file, ";; %d succs { ", i);
6410 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6411 fprintf (file, "%d ", succ->dest->index);
6412 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6416 /* Dump the DFS node order. */
6417 if (loops->cfg.dfs_order)
6419 fputs (";; DFS order: ", file);
6420 for (i = 0; i < n_basic_blocks; i++)
6421 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6427 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6429 flow_loop_nested_p (outer, loop)
6433 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6437 /* Dump the loop information specified by LOOPS to the stream FILE. */
6439 flow_loops_dump (loops, file, verbose)
6440 const struct loops *loops;
6447 num_loops = loops->num;
6448 if (! num_loops || ! file)
6451 fprintf (file, ";; %d loops found, %d levels\n",
6452 num_loops, loops->levels);
6454 for (i = 0; i < num_loops; i++)
6456 struct loop *loop = &loops->array[i];
6458 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6459 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6460 loop->header->index, loop->latch->index,
6461 loop->pre_header ? loop->pre_header->index : -1,
6462 loop->depth, loop->level,
6463 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6464 fprintf (file, ";; %d", loop->num_nodes);
6465 flow_nodes_print (" nodes", loop->nodes, file);
6466 fprintf (file, ";; %d", loop->num_exits);
6467 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6473 for (j = 0; j < i; j++)
6475 struct loop *oloop = &loops->array[j];
6477 if (loop->header == oloop->header)
6482 smaller = loop->num_nodes < oloop->num_nodes;
6484 /* If the union of LOOP and OLOOP is different than
6485 the larger of LOOP and OLOOP then LOOP and OLOOP
6486 must be disjoint. */
6487 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6488 smaller ? oloop : loop);
6489 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6490 loop->header->index, i, j,
6491 disjoint ? "disjoint" : "nested");
6498 /* Print diagnostics to compare our concept of a loop with
6499 what the loop notes say. */
6500 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6501 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6502 != NOTE_INSN_LOOP_BEG)
6503 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6504 INSN_UID (PREV_INSN (loop->first->head)));
6505 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6506 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6507 != NOTE_INSN_LOOP_END)
6508 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6509 INSN_UID (NEXT_INSN (loop->last->end)));
6514 flow_loops_cfg_dump (loops, file);
6518 /* Free all the memory allocated for LOOPS. */
6520 flow_loops_free (loops)
6521 struct loops *loops;
6530 /* Free the loop descriptors. */
6531 for (i = 0; i < loops->num; i++)
6533 struct loop *loop = &loops->array[i];
6536 sbitmap_free (loop->nodes);
6540 free (loops->array);
6541 loops->array = NULL;
6544 sbitmap_vector_free (loops->cfg.dom);
6545 if (loops->cfg.dfs_order)
6546 free (loops->cfg.dfs_order);
6548 sbitmap_free (loops->shared_headers);
6553 /* Find the exits from the loop using the bitmap of loop nodes NODES
6554 and store in EXITS array. Return the number of exits from the
6557 flow_loop_exits_find (nodes, exits)
6558 const sbitmap nodes;
6567 /* Check all nodes within the loop to see if there are any
6568 successors not in the loop. Note that a node may have multiple
6571 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6572 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6574 basic_block dest = e->dest;
6576 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6584 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6586 /* Store all exiting edges into an array. */
6588 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6589 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6591 basic_block dest = e->dest;
6593 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6594 (*exits)[num_exits++] = e;
6602 /* Find the nodes contained within the loop with header HEADER and
6603 latch LATCH and store in NODES. Return the number of nodes within
6606 flow_loop_nodes_find (header, latch, nodes)
6615 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6618 /* Start with only the loop header in the set of loop nodes. */
6619 sbitmap_zero (nodes);
6620 SET_BIT (nodes, header->index);
6622 header->loop_depth++;
6624 /* Push the loop latch on to the stack. */
6625 if (! TEST_BIT (nodes, latch->index))
6627 SET_BIT (nodes, latch->index);
6628 latch->loop_depth++;
6630 stack[sp++] = latch;
6639 for (e = node->pred; e; e = e->pred_next)
6641 basic_block ancestor = e->src;
6643 /* If each ancestor not marked as part of loop, add to set of
6644 loop nodes and push on to stack. */
6645 if (ancestor != ENTRY_BLOCK_PTR
6646 && ! TEST_BIT (nodes, ancestor->index))
6648 SET_BIT (nodes, ancestor->index);
6649 ancestor->loop_depth++;
6651 stack[sp++] = ancestor;
6660 /* Compute the depth first search order and store in the array
6661 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6662 number of nodes visited. */
6664 flow_depth_first_order_compute (dfs_order)
6673 /* Allocate stack for back-tracking up CFG. */
6674 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6677 /* Allocate bitmap to track nodes that have been visited. */
6678 visited = sbitmap_alloc (n_basic_blocks);
6680 /* None of the nodes in the CFG have been visited yet. */
6681 sbitmap_zero (visited);
6683 /* Start with the first successor edge from the entry block. */
6684 e = ENTRY_BLOCK_PTR->succ;
6687 basic_block src = e->src;
6688 basic_block dest = e->dest;
6690 /* Mark that we have visited this node. */
6691 if (src != ENTRY_BLOCK_PTR)
6692 SET_BIT (visited, src->index);
6694 /* If this node has not been visited before, push the current
6695 edge on to the stack and proceed with the first successor
6696 edge of this node. */
6697 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6705 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6708 /* DEST has no successors (for example, a non-returning
6709 function is called) so do not push the current edge
6710 but carry on with its next successor. */
6711 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6712 SET_BIT (visited, dest->index);
6715 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6717 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6719 /* Pop edge off stack. */
6727 sbitmap_free (visited);
6729 /* The number of nodes visited should not be greater than
6731 if (dfsnum > n_basic_blocks)
6734 /* There are some nodes left in the CFG that are unreachable. */
6735 if (dfsnum < n_basic_blocks)
6741 /* Return the block for the pre-header of the loop with header
6742 HEADER where DOM specifies the dominator information. Return NULL if
6743 there is no pre-header. */
6745 flow_loop_pre_header_find (header, dom)
6749 basic_block pre_header;
6752 /* If block p is a predecessor of the header and is the only block
6753 that the header does not dominate, then it is the pre-header. */
6755 for (e = header->pred; e; e = e->pred_next)
6757 basic_block node = e->src;
6759 if (node != ENTRY_BLOCK_PTR
6760 && ! TEST_BIT (dom[node->index], header->index))
6762 if (pre_header == NULL)
6766 /* There are multiple edges into the header from outside
6767 the loop so there is no pre-header block. */
6777 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6778 previously added. The insertion algorithm assumes that the loops
6779 are added in the order found by a depth first search of the CFG. */
6781 flow_loop_tree_node_add (prevloop, loop)
6782 struct loop *prevloop;
6786 if (flow_loop_nested_p (prevloop, loop))
6788 prevloop->inner = loop;
6789 loop->outer = prevloop;
6793 while (prevloop->outer)
6795 if (flow_loop_nested_p (prevloop->outer, loop))
6797 prevloop->next = loop;
6798 loop->outer = prevloop->outer;
6801 prevloop = prevloop->outer;
6804 prevloop->next = loop;
6809 /* Build the loop hierarchy tree for LOOPS. */
6811 flow_loops_tree_build (loops)
6812 struct loops *loops;
6817 num_loops = loops->num;
6821 /* Root the loop hierarchy tree with the first loop found.
6822 Since we used a depth first search this should be the
6824 loops->tree = &loops->array[0];
6825 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6827 /* Add the remaining loops to the tree. */
6828 for (i = 1; i < num_loops; i++)
6829 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6833 /* Helper function to compute loop nesting depth and enclosed loop level
6834 for the natural loop specified by LOOP at the loop depth DEPTH.
6835 Returns the loop level. */
6837 flow_loop_level_compute (loop, depth)
6847 /* Traverse loop tree assigning depth and computing level as the
6848 maximum level of all the inner loops of this loop. The loop
6849 level is equivalent to the height of the loop in the loop tree
6850 and corresponds to the number of enclosed loop levels (including
6852 for (inner = loop->inner; inner; inner = inner->next)
6856 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6861 loop->level = level;
6862 loop->depth = depth;
6867 /* Compute the loop nesting depth and enclosed loop level for the loop
6868 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6872 flow_loops_level_compute (loops)
6873 struct loops *loops;
6879 /* Traverse all the outer level loops. */
6880 for (loop = loops->tree; loop; loop = loop->next)
6882 level = flow_loop_level_compute (loop, 1);
6890 /* Find all the natural loops in the function and save in LOOPS structure
6891 and recalculate loop_depth information in basic block structures.
6892 Return the number of natural loops found. */
6895 flow_loops_find (loops)
6896 struct loops *loops;
6907 loops->array = NULL;
6911 /* Taking care of this degenerate case makes the rest of
6912 this code simpler. */
6913 if (n_basic_blocks == 0)
6916 /* Compute the dominators. */
6917 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6918 compute_flow_dominators (dom, NULL);
6920 /* Count the number of loop edges (back edges). This should be the
6921 same as the number of natural loops. Also clear the loop_depth
6922 and as we work from inner->outer in a loop nest we call
6923 find_loop_nodes_find which will increment loop_depth for nodes
6924 within the current loop, which happens to enclose inner loops. */
6927 for (b = 0; b < n_basic_blocks; b++)
6929 BASIC_BLOCK (b)->loop_depth = 0;
6930 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
6932 basic_block latch = e->src;
6934 /* Look for back edges where a predecessor is dominated
6935 by this block. A natural loop has a single entry
6936 node (header) that dominates all the nodes in the
6937 loop. It also has single back edge to the header
6938 from a latch node. Note that multiple natural loops
6939 may share the same header. */
6940 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
6947 /* Compute depth first search order of the CFG so that outer
6948 natural loops will be found before inner natural loops. */
6949 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
6950 flow_depth_first_order_compute (dfs_order);
6952 /* Allocate loop structures. */
6954 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
6956 headers = sbitmap_alloc (n_basic_blocks);
6957 sbitmap_zero (headers);
6959 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
6960 sbitmap_zero (loops->shared_headers);
6962 /* Find and record information about all the natural loops
6965 for (b = 0; b < n_basic_blocks; b++)
6969 /* Search the nodes of the CFG in DFS order that we can find
6970 outer loops first. */
6971 header = BASIC_BLOCK (dfs_order[b]);
6973 /* Look for all the possible latch blocks for this header. */
6974 for (e = header->pred; e; e = e->pred_next)
6976 basic_block latch = e->src;
6978 /* Look for back edges where a predecessor is dominated
6979 by this block. A natural loop has a single entry
6980 node (header) that dominates all the nodes in the
6981 loop. It also has single back edge to the header
6982 from a latch node. Note that multiple natural loops
6983 may share the same header. */
6984 if (latch != ENTRY_BLOCK_PTR
6985 && TEST_BIT (dom[latch->index], header->index))
6989 loop = loops->array + num_loops;
6991 loop->header = header;
6992 loop->latch = latch;
6994 /* Keep track of blocks that are loop headers so
6995 that we can tell which loops should be merged. */
6996 if (TEST_BIT (headers, header->index))
6997 SET_BIT (loops->shared_headers, header->index);
6998 SET_BIT (headers, header->index);
7000 /* Find nodes contained within the loop. */
7001 loop->nodes = sbitmap_alloc (n_basic_blocks);
7003 = flow_loop_nodes_find (header, latch, loop->nodes);
7005 /* Compute first and last blocks within the loop.
7006 These are often the same as the loop header and
7007 loop latch respectively, but this is not always
7010 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7012 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7014 /* Find edges which exit the loop. Note that a node
7015 may have several exit edges. */
7017 = flow_loop_exits_find (loop->nodes, &loop->exits);
7019 /* Look to see if the loop has a pre-header node. */
7021 = flow_loop_pre_header_find (header, dom);
7028 /* Natural loops with shared headers may either be disjoint or
7029 nested. Disjoint loops with shared headers cannot be inner
7030 loops and should be merged. For now just mark loops that share
7032 for (i = 0; i < num_loops; i++)
7033 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7034 loops->array[i].shared = 1;
7036 sbitmap_free (headers);
7039 loops->num = num_loops;
7041 /* Save CFG derived information to avoid recomputing it. */
7042 loops->cfg.dom = dom;
7043 loops->cfg.dfs_order = dfs_order;
7045 /* Build the loop hierarchy tree. */
7046 flow_loops_tree_build (loops);
7048 /* Assign the loop nesting depth and enclosed loop level for each
7050 loops->levels = flow_loops_level_compute (loops);
7056 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7058 flow_loop_outside_edge_p (loop, e)
7059 const struct loop *loop;
7062 if (e->dest != loop->header)
7064 return (e->src == ENTRY_BLOCK_PTR)
7065 || ! TEST_BIT (loop->nodes, e->src->index);
7069 /* Clear LOG_LINKS fields of insns in a chain. */
7071 clear_log_links (insns)
7075 for (i = insns; i; i = NEXT_INSN (i))
7076 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')