1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
107 reg_n_calls_crosses and reg_basic_block.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "basic-block.h"
127 #include "insn-config.h"
129 #include "hard-reg-set.h"
132 #include "function.h"
136 #include "insn-flags.h"
140 #define obstack_chunk_alloc xmalloc
141 #define obstack_chunk_free free
144 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
145 the stack pointer does not matter. The value is tested only in
146 functions that have frame pointers.
147 No definition is equivalent to always zero. */
148 #ifndef EXIT_IGNORE_STACK
149 #define EXIT_IGNORE_STACK 0
152 #ifndef HAVE_epilogue
153 #define HAVE_epilogue 0
156 #ifndef HAVE_prologue
157 #define HAVE_prologue 0
160 /* The contents of the current function definition are allocated
161 in this obstack, and all are freed at the end of the function.
162 For top-level functions, this is temporary_obstack.
163 Separate obstacks are made for nested functions. */
165 extern struct obstack *function_obstack;
167 /* Number of basic blocks in the current function. */
171 /* Number of edges in the current function. */
175 /* The basic block array. */
177 varray_type basic_block_info;
179 /* The special entry and exit blocks. */
181 struct basic_block_def entry_exit_blocks[2] =
188 NULL, /* local_set */
189 NULL, /* global_live_at_start */
190 NULL, /* global_live_at_end */
192 ENTRY_BLOCK, /* index */
194 -1, -1 /* eh_beg, eh_end */
201 NULL, /* local_set */
202 NULL, /* global_live_at_start */
203 NULL, /* global_live_at_end */
205 EXIT_BLOCK, /* index */
207 -1, -1 /* eh_beg, eh_end */
211 /* Nonzero if the second flow pass has completed. */
214 /* Maximum register number used in this function, plus one. */
218 /* Indexed by n, giving various register information */
220 varray_type reg_n_info;
222 /* Size of the reg_n_info table. */
224 unsigned int reg_n_max;
226 /* Element N is the next insn that uses (hard or pseudo) register number N
227 within the current basic block; or zero, if there is no such insn.
228 This is valid only during the final backward scan in propagate_block. */
230 static rtx *reg_next_use;
232 /* Size of a regset for the current function,
233 in (1) bytes and (2) elements. */
238 /* Regset of regs live when calls to `setjmp'-like functions happen. */
239 /* ??? Does this exist only for the setjmp-clobbered warning message? */
241 regset regs_live_at_setjmp;
243 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
244 that have to go in the same hard reg.
245 The first two regs in the list are a pair, and the next two
246 are another pair, etc. */
249 /* Depth within loops of basic block being scanned for lifetime analysis,
250 plus one. This is the weight attached to references to registers. */
252 static int loop_depth;
254 /* During propagate_block, this is non-zero if the value of CC0 is live. */
258 /* During propagate_block, this contains a list of all the MEMs we are
259 tracking for dead store elimination. */
261 static rtx mem_set_list;
263 /* Set of registers that may be eliminable. These are handled specially
264 in updating regs_ever_live. */
266 static HARD_REG_SET elim_reg_set;
268 /* The basic block structure for every insn, indexed by uid. */
270 varray_type basic_block_for_insn;
272 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
273 /* ??? Should probably be using LABEL_NUSES instead. It would take a
274 bit of surgery to be able to use or co-opt the routines in jump. */
276 static rtx label_value_list;
278 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
280 #define INSN_VOLATILE(INSN) bitmap_bit_p (uid_volatile, INSN_UID (INSN))
281 #define SET_INSN_VOLATILE(INSN) bitmap_set_bit (uid_volatile, INSN_UID (INSN))
282 static bitmap uid_volatile;
284 /* Forward declarations */
285 static int count_basic_blocks PROTO((rtx));
286 static rtx find_basic_blocks_1 PROTO((rtx));
287 static void create_basic_block PROTO((int, rtx, rtx, rtx));
288 static void clear_edges PROTO((void));
289 static void make_edges PROTO((rtx));
290 static void make_edge PROTO((sbitmap *, basic_block,
292 static void make_label_edge PROTO((sbitmap *, basic_block,
294 static void make_eh_edge PROTO((sbitmap *, eh_nesting_info *,
295 basic_block, rtx, int));
296 static void mark_critical_edges PROTO((void));
297 static void move_stray_eh_region_notes PROTO((void));
298 static void record_active_eh_regions PROTO((rtx));
300 static void commit_one_edge_insertion PROTO((edge));
302 static void delete_unreachable_blocks PROTO((void));
303 static void delete_eh_regions PROTO((void));
304 static int can_delete_note_p PROTO((rtx));
305 static int delete_block PROTO((basic_block));
306 static void expunge_block PROTO((basic_block));
307 static rtx flow_delete_insn PROTO((rtx));
308 static int can_delete_label_p PROTO((rtx));
309 static int merge_blocks_move_predecessor_nojumps PROTO((basic_block,
311 static int merge_blocks_move_successor_nojumps PROTO((basic_block,
313 static void merge_blocks_nomove PROTO((basic_block, basic_block));
314 static int merge_blocks PROTO((edge,basic_block,basic_block));
315 static void try_merge_blocks PROTO((void));
316 static void tidy_fallthru_edge PROTO((edge,basic_block,basic_block));
317 static void calculate_loop_depth PROTO((rtx));
319 static int verify_wide_reg_1 PROTO((rtx *, void *));
320 static void verify_wide_reg PROTO((int, rtx, rtx));
321 static void verify_local_live_at_start PROTO((regset, basic_block));
322 static int set_noop_p PROTO((rtx));
323 static int noop_move_p PROTO((rtx));
324 static void notice_stack_pointer_modification PROTO ((rtx, rtx, void *));
325 static void record_volatile_insns PROTO((rtx));
326 static void mark_reg PROTO((regset, rtx));
327 static void mark_regs_live_at_end PROTO((regset));
328 static void life_analysis_1 PROTO((rtx, int, int));
329 static void calculate_global_regs_live PROTO((sbitmap, sbitmap, int));
330 static void propagate_block PROTO((regset, rtx, rtx,
332 static int insn_dead_p PROTO((rtx, regset, int, rtx));
333 static int libcall_dead_p PROTO((rtx, regset, rtx, rtx));
334 static void mark_set_regs PROTO((regset, regset, rtx,
336 static void mark_set_1 PROTO((regset, regset, rtx,
339 static void find_auto_inc PROTO((regset, rtx, rtx));
340 static int try_pre_increment_1 PROTO((rtx));
341 static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT));
343 static void mark_used_regs PROTO((regset, regset, rtx, int, rtx));
344 void dump_flow_info PROTO((FILE *));
345 void debug_flow_info PROTO((void));
346 static void dump_edge_info PROTO((FILE *, edge, int));
348 static void count_reg_sets_1 PROTO ((rtx));
349 static void count_reg_sets PROTO ((rtx));
350 static void count_reg_references PROTO ((rtx));
351 static void invalidate_mems_from_autoinc PROTO ((rtx));
352 static void remove_edge PROTO ((edge));
353 static void remove_fake_successors PROTO ((basic_block));
355 /* This function is always defined so it can be called from the
356 debugger, and it is declared extern so we don't get warnings about
358 void verify_flow_info PROTO ((void));
361 /* Find basic blocks of the current function.
362 F is the first insn of the function and NREGS the number of register
366 find_basic_blocks (f, nregs, file, do_cleanup)
368 int nregs ATTRIBUTE_UNUSED;
369 FILE *file ATTRIBUTE_UNUSED;
374 /* Flush out existing data. */
375 if (basic_block_info != NULL)
381 /* Clear bb->aux on all extant basic blocks. We'll use this as a
382 tag for reuse during create_basic_block, just in case some pass
383 copies around basic block notes improperly. */
384 for (i = 0; i < n_basic_blocks; ++i)
385 BASIC_BLOCK (i)->aux = NULL;
387 VARRAY_FREE (basic_block_info);
390 n_basic_blocks = count_basic_blocks (f);
392 /* Size the basic block table. The actual structures will be allocated
393 by find_basic_blocks_1, since we want to keep the structure pointers
394 stable across calls to find_basic_blocks. */
395 /* ??? This whole issue would be much simpler if we called find_basic_blocks
396 exactly once, and thereafter we don't have a single long chain of
397 instructions at all until close to the end of compilation when we
398 actually lay them out. */
400 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
402 label_value_list = find_basic_blocks_1 (f);
404 /* Record the block to which an insn belongs. */
405 /* ??? This should be done another way, by which (perhaps) a label is
406 tagged directly with the basic block that it starts. It is used for
407 more than that currently, but IMO that is the only valid use. */
409 max_uid = get_max_uid ();
411 /* Leave space for insns life_analysis makes in some cases for auto-inc.
412 These cases are rare, so we don't need too much space. */
413 max_uid += max_uid / 10;
416 compute_bb_for_insn (max_uid);
418 /* Discover the edges of our cfg. */
420 record_active_eh_regions (f);
421 make_edges (label_value_list);
423 /* Delete unreachable blocks, then merge blocks when possible. */
427 delete_unreachable_blocks ();
428 move_stray_eh_region_notes ();
429 record_active_eh_regions (f);
433 /* Mark critical edges. */
435 mark_critical_edges ();
437 /* Discover the loop depth at the start of each basic block to aid
438 register allocation. */
439 calculate_loop_depth (f);
441 /* Kill the data we won't maintain. */
442 label_value_list = NULL_RTX;
444 #ifdef ENABLE_CHECKING
449 /* Count the basic blocks of the function. */
452 count_basic_blocks (f)
456 register RTX_CODE prev_code;
457 register int count = 0;
459 int call_had_abnormal_edge = 0;
460 rtx prev_call = NULL_RTX;
462 prev_code = JUMP_INSN;
463 for (insn = f; insn; insn = NEXT_INSN (insn))
465 register RTX_CODE code = GET_CODE (insn);
467 if (code == CODE_LABEL
468 || (GET_RTX_CLASS (code) == 'i'
469 && (prev_code == JUMP_INSN
470 || prev_code == BARRIER
471 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
475 /* If the previous insn was a call that did not create an
476 abnormal edge, we want to add a nop so that the CALL_INSN
477 itself is not at basic_block_end. This allows us to
478 easily distinguish between normal calls and those which
479 create abnormal edges in the flow graph. */
481 if (count > 0 && prev_call != 0 && !call_had_abnormal_edge)
483 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
484 emit_insn_after (nop, prev_call);
488 /* Record whether this call created an edge. */
489 if (code == CALL_INSN)
491 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
492 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
494 call_had_abnormal_edge = 0;
496 /* If there is a specified EH region, we have an edge. */
497 if (eh_region && region > 0)
498 call_had_abnormal_edge = 1;
501 /* If there is a nonlocal goto label and the specified
502 region number isn't -1, we have an edge. (0 means
503 no throw, but might have a nonlocal goto). */
504 if (nonlocal_goto_handler_labels && region >= 0)
505 call_had_abnormal_edge = 1;
508 else if (code != NOTE)
509 prev_call = NULL_RTX;
513 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
515 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
520 /* The rest of the compiler works a bit smoother when we don't have to
521 check for the edge case of do-nothing functions with no basic blocks. */
524 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
531 /* Find all basic blocks of the function whose first insn is F.
533 Collect and return a list of labels whose addresses are taken. This
534 will be used in make_edges for use with computed gotos. */
537 find_basic_blocks_1 (f)
540 register rtx insn, next;
541 int call_has_abnormal_edge = 0;
543 rtx bb_note = NULL_RTX;
544 rtx eh_list = NULL_RTX;
545 rtx label_value_list = NULL_RTX;
549 /* We process the instructions in a slightly different way than we did
550 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
551 closed out the previous block, so that it gets attached at the proper
552 place. Since this form should be equivalent to the previous,
553 count_basic_blocks continues to use the old form as a check. */
555 for (insn = f; insn; insn = next)
557 enum rtx_code code = GET_CODE (insn);
559 next = NEXT_INSN (insn);
561 if (code == CALL_INSN)
563 /* Record whether this call created an edge. */
564 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
565 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
566 call_has_abnormal_edge = 0;
568 /* If there is an EH region, we have an edge. */
569 if (eh_list && region > 0)
570 call_has_abnormal_edge = 1;
573 /* If there is a nonlocal goto label and the specified
574 region number isn't -1, we have an edge. (0 means
575 no throw, but might have a nonlocal goto). */
576 if (nonlocal_goto_handler_labels && region >= 0)
577 call_has_abnormal_edge = 1;
585 int kind = NOTE_LINE_NUMBER (insn);
587 /* Keep a LIFO list of the currently active exception notes. */
588 if (kind == NOTE_INSN_EH_REGION_BEG)
589 eh_list = alloc_INSN_LIST (insn, eh_list);
590 else if (kind == NOTE_INSN_EH_REGION_END)
593 eh_list = XEXP (eh_list, 1);
594 free_INSN_LIST_node (t);
597 /* Look for basic block notes with which to keep the
598 basic_block_info pointers stable. Unthread the note now;
599 we'll put it back at the right place in create_basic_block.
600 Or not at all if we've already found a note in this block. */
601 else if (kind == NOTE_INSN_BASIC_BLOCK)
603 if (bb_note == NULL_RTX)
605 next = flow_delete_insn (insn);
612 /* A basic block starts at a label. If we've closed one off due
613 to a barrier or some such, no need to do it again. */
614 if (head != NULL_RTX)
616 /* While we now have edge lists with which other portions of
617 the compiler might determine a call ending a basic block
618 does not imply an abnormal edge, it will be a bit before
619 everything can be updated. So continue to emit a noop at
620 the end of such a block. */
621 if (GET_CODE (end) == CALL_INSN)
623 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
624 end = emit_insn_after (nop, end);
627 create_basic_block (i++, head, end, bb_note);
634 /* A basic block ends at a jump. */
635 if (head == NULL_RTX)
639 /* ??? Make a special check for table jumps. The way this
640 happens is truly and amazingly gross. We are about to
641 create a basic block that contains just a code label and
642 an addr*vec jump insn. Worse, an addr_diff_vec creates
643 its own natural loop.
645 Prevent this bit of brain damage, pasting things together
646 correctly in make_edges.
648 The correct solution involves emitting the table directly
649 on the tablejump instruction as a note, or JUMP_LABEL. */
651 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
652 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
660 goto new_bb_inclusive;
663 /* A basic block ends at a barrier. It may be that an unconditional
664 jump already closed the basic block -- no need to do it again. */
665 if (head == NULL_RTX)
668 /* While we now have edge lists with which other portions of the
669 compiler might determine a call ending a basic block does not
670 imply an abnormal edge, it will be a bit before everything can
671 be updated. So continue to emit a noop at the end of such a
673 if (GET_CODE (end) == CALL_INSN)
675 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
676 end = emit_insn_after (nop, end);
678 goto new_bb_exclusive;
681 /* A basic block ends at a call that can either throw or
682 do a non-local goto. */
683 if (call_has_abnormal_edge)
686 if (head == NULL_RTX)
691 create_basic_block (i++, head, end, bb_note);
692 head = end = NULL_RTX;
699 if (GET_RTX_CLASS (code) == 'i')
701 if (head == NULL_RTX)
708 if (GET_RTX_CLASS (code) == 'i')
712 /* Make a list of all labels referred to other than by jumps
713 (which just don't have the REG_LABEL notes).
715 Make a special exception for labels followed by an ADDR*VEC,
716 as this would be a part of the tablejump setup code.
718 Make a special exception for the eh_return_stub_label, which
719 we know isn't part of any otherwise visible control flow. */
721 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
722 if (REG_NOTE_KIND (note) == REG_LABEL)
724 rtx lab = XEXP (note, 0), next;
726 if (lab == eh_return_stub_label)
728 else if ((next = next_nonnote_insn (lab)) != NULL
729 && GET_CODE (next) == JUMP_INSN
730 && (GET_CODE (PATTERN (next)) == ADDR_VEC
731 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
735 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
740 if (head != NULL_RTX)
741 create_basic_block (i++, head, end, bb_note);
743 if (i != n_basic_blocks)
746 return label_value_list;
749 /* Create a new basic block consisting of the instructions between
750 HEAD and END inclusive. Reuses the note and basic block struct
751 in BB_NOTE, if any. */
754 create_basic_block (index, head, end, bb_note)
756 rtx head, end, bb_note;
761 && ! RTX_INTEGRATED_P (bb_note)
762 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
765 /* If we found an existing note, thread it back onto the chain. */
767 if (GET_CODE (head) == CODE_LABEL)
768 add_insn_after (bb_note, head);
771 add_insn_before (bb_note, head);
777 /* Otherwise we must create a note and a basic block structure.
778 Since we allow basic block structs in rtl, give the struct
779 the same lifetime by allocating it off the function obstack
780 rather than using malloc. */
782 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
783 memset (bb, 0, sizeof (*bb));
785 if (GET_CODE (head) == CODE_LABEL)
786 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
789 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
792 NOTE_BASIC_BLOCK (bb_note) = bb;
795 /* Always include the bb note in the block. */
796 if (NEXT_INSN (end) == bb_note)
802 BASIC_BLOCK (index) = bb;
804 /* Tag the block so that we know it has been used when considering
805 other basic block notes. */
809 /* Records the basic block struct in BB_FOR_INSN, for every instruction
810 indexed by INSN_UID. MAX is the size of the array. */
813 compute_bb_for_insn (max)
818 if (basic_block_for_insn)
819 VARRAY_FREE (basic_block_for_insn);
820 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
822 for (i = 0; i < n_basic_blocks; ++i)
824 basic_block bb = BASIC_BLOCK (i);
831 int uid = INSN_UID (insn);
833 VARRAY_BB (basic_block_for_insn, uid) = bb;
836 insn = NEXT_INSN (insn);
841 /* Free the memory associated with the edge structures. */
849 for (i = 0; i < n_basic_blocks; ++i)
851 basic_block bb = BASIC_BLOCK (i);
853 for (e = bb->succ; e ; e = n)
863 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
869 ENTRY_BLOCK_PTR->succ = 0;
870 EXIT_BLOCK_PTR->pred = 0;
875 /* Identify the edges between basic blocks.
877 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
878 that are otherwise unreachable may be reachable with a non-local goto.
880 BB_EH_END is an array indexed by basic block number in which we record
881 the list of exception regions active at the end of the basic block. */
884 make_edges (label_value_list)
885 rtx label_value_list;
888 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
889 sbitmap *edge_cache = NULL;
891 /* Assume no computed jump; revise as we create edges. */
892 current_function_has_computed_jump = 0;
894 /* Heavy use of computed goto in machine-generated code can lead to
895 nearly fully-connected CFGs. In that case we spend a significant
896 amount of time searching the edge lists for duplicates. */
897 if (forced_labels || label_value_list)
899 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
900 sbitmap_vector_zero (edge_cache, n_basic_blocks);
903 /* By nature of the way these get numbered, block 0 is always the entry. */
904 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
906 for (i = 0; i < n_basic_blocks; ++i)
908 basic_block bb = BASIC_BLOCK (i);
911 int force_fallthru = 0;
913 /* Examine the last instruction of the block, and discover the
914 ways we can leave the block. */
917 code = GET_CODE (insn);
920 if (code == JUMP_INSN)
924 /* ??? Recognize a tablejump and do the right thing. */
925 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
926 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
927 && GET_CODE (tmp) == JUMP_INSN
928 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
929 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
934 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
935 vec = XVEC (PATTERN (tmp), 0);
937 vec = XVEC (PATTERN (tmp), 1);
939 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
940 make_label_edge (edge_cache, bb,
941 XEXP (RTVEC_ELT (vec, j), 0), 0);
943 /* Some targets (eg, ARM) emit a conditional jump that also
944 contains the out-of-range target. Scan for these and
945 add an edge if necessary. */
946 if ((tmp = single_set (insn)) != NULL
947 && SET_DEST (tmp) == pc_rtx
948 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
949 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
950 make_label_edge (edge_cache, bb,
951 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
953 #ifdef CASE_DROPS_THROUGH
954 /* Silly VAXen. The ADDR_VEC is going to be in the way of
955 us naturally detecting fallthru into the next block. */
960 /* If this is a computed jump, then mark it as reaching
961 everything on the label_value_list and forced_labels list. */
962 else if (computed_jump_p (insn))
964 current_function_has_computed_jump = 1;
966 for (x = label_value_list; x; x = XEXP (x, 1))
967 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
969 for (x = forced_labels; x; x = XEXP (x, 1))
970 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
973 /* Returns create an exit out. */
974 else if (returnjump_p (insn))
975 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
977 /* Otherwise, we have a plain conditional or unconditional jump. */
980 if (! JUMP_LABEL (insn))
982 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
986 /* If this is a CALL_INSN, then mark it as reaching the active EH
987 handler for this CALL_INSN. If we're handling asynchronous
988 exceptions then any insn can reach any of the active handlers.
990 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
992 if (code == CALL_INSN || asynchronous_exceptions)
994 /* If there's an EH region active at the end of a block,
995 add the appropriate edges. */
997 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
999 /* If we have asynchronous exceptions, do the same for *all*
1000 exception regions active in the block. */
1001 if (asynchronous_exceptions
1002 && bb->eh_beg != bb->eh_end)
1004 if (bb->eh_beg >= 0)
1005 make_eh_edge (edge_cache, eh_nest_info, bb,
1006 NULL_RTX, bb->eh_beg);
1008 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1009 if (GET_CODE (x) == NOTE
1010 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1011 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1013 int region = NOTE_EH_HANDLER (x);
1014 make_eh_edge (edge_cache, eh_nest_info, bb,
1019 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1021 /* ??? This could be made smarter: in some cases it's possible
1022 to tell that certain calls will not do a nonlocal goto.
1024 For example, if the nested functions that do the nonlocal
1025 gotos do not have their addresses taken, then only calls to
1026 those functions or to other nested functions that use them
1027 could possibly do nonlocal gotos. */
1028 /* We do know that a REG_EH_REGION note with a value less
1029 than 0 is guaranteed not to perform a non-local goto. */
1030 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1031 if (!note || XINT (XEXP (note, 0), 0) >= 0)
1032 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1033 make_label_edge (edge_cache, bb, XEXP (x, 0),
1034 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1038 /* We know something about the structure of the function __throw in
1039 libgcc2.c. It is the only function that ever contains eh_stub
1040 labels. It modifies its return address so that the last block
1041 returns to one of the eh_stub labels within it. So we have to
1042 make additional edges in the flow graph. */
1043 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1044 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1046 /* Find out if we can drop through to the next block. */
1047 insn = next_nonnote_insn (insn);
1048 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1049 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1050 else if (i + 1 < n_basic_blocks)
1052 rtx tmp = BLOCK_HEAD (i + 1);
1053 if (GET_CODE (tmp) == NOTE)
1054 tmp = next_nonnote_insn (tmp);
1055 if (force_fallthru || insn == tmp)
1056 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1060 free_eh_nesting_info (eh_nest_info);
1062 sbitmap_vector_free (edge_cache);
1065 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1066 about the edge that is accumulated between calls. */
1069 make_edge (edge_cache, src, dst, flags)
1070 sbitmap *edge_cache;
1071 basic_block src, dst;
1077 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1078 many edges to them, and we didn't allocate memory for it. */
1079 use_edge_cache = (edge_cache
1080 && src != ENTRY_BLOCK_PTR
1081 && dst != EXIT_BLOCK_PTR);
1083 /* Make sure we don't add duplicate edges. */
1084 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1085 for (e = src->succ; e ; e = e->succ_next)
1092 e = (edge) xcalloc (1, sizeof (*e));
1095 e->succ_next = src->succ;
1096 e->pred_next = dst->pred;
1105 SET_BIT (edge_cache[src->index], dst->index);
1108 /* Create an edge from a basic block to a label. */
1111 make_label_edge (edge_cache, src, label, flags)
1112 sbitmap *edge_cache;
1117 if (GET_CODE (label) != CODE_LABEL)
1120 /* If the label was never emitted, this insn is junk, but avoid a
1121 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1122 as a result of a syntax error and a diagnostic has already been
1125 if (INSN_UID (label) == 0)
1128 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1131 /* Create the edges generated by INSN in REGION. */
1134 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1135 sbitmap *edge_cache;
1136 eh_nesting_info *eh_nest_info;
1141 handler_info **handler_list;
1144 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1145 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1148 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1149 EDGE_ABNORMAL | EDGE_EH | is_call);
1153 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1154 dangerous if we intend to move basic blocks around. Move such notes
1155 into the following block. */
1158 move_stray_eh_region_notes ()
1163 if (n_basic_blocks < 2)
1166 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1167 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1169 rtx insn, next, list = NULL_RTX;
1171 b1 = BASIC_BLOCK (i);
1172 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1174 next = NEXT_INSN (insn);
1175 if (GET_CODE (insn) == NOTE
1176 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1177 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1179 /* Unlink from the insn chain. */
1180 NEXT_INSN (PREV_INSN (insn)) = next;
1181 PREV_INSN (next) = PREV_INSN (insn);
1184 NEXT_INSN (insn) = list;
1189 if (list == NULL_RTX)
1192 /* Find where to insert these things. */
1194 if (GET_CODE (insn) == CODE_LABEL)
1195 insn = NEXT_INSN (insn);
1199 next = NEXT_INSN (list);
1200 add_insn_after (list, insn);
1206 /* Recompute eh_beg/eh_end for each basic block. */
1209 record_active_eh_regions (f)
1212 rtx insn, eh_list = NULL_RTX;
1214 basic_block bb = BASIC_BLOCK (0);
1216 for (insn = f; insn ; insn = NEXT_INSN (insn))
1218 if (bb->head == insn)
1219 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1221 if (GET_CODE (insn) == NOTE)
1223 int kind = NOTE_LINE_NUMBER (insn);
1224 if (kind == NOTE_INSN_EH_REGION_BEG)
1225 eh_list = alloc_INSN_LIST (insn, eh_list);
1226 else if (kind == NOTE_INSN_EH_REGION_END)
1228 rtx t = XEXP (eh_list, 1);
1229 free_INSN_LIST_node (eh_list);
1234 if (bb->end == insn)
1236 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1238 if (i == n_basic_blocks)
1240 bb = BASIC_BLOCK (i);
1245 /* Identify critical edges and set the bits appropriately. */
1248 mark_critical_edges ()
1250 int i, n = n_basic_blocks;
1253 /* We begin with the entry block. This is not terribly important now,
1254 but could be if a front end (Fortran) implemented alternate entry
1256 bb = ENTRY_BLOCK_PTR;
1263 /* (1) Critical edges must have a source with multiple successors. */
1264 if (bb->succ && bb->succ->succ_next)
1266 for (e = bb->succ; e ; e = e->succ_next)
1268 /* (2) Critical edges must have a destination with multiple
1269 predecessors. Note that we know there is at least one
1270 predecessor -- the edge we followed to get here. */
1271 if (e->dest->pred->pred_next)
1272 e->flags |= EDGE_CRITICAL;
1274 e->flags &= ~EDGE_CRITICAL;
1279 for (e = bb->succ; e ; e = e->succ_next)
1280 e->flags &= ~EDGE_CRITICAL;
1285 bb = BASIC_BLOCK (i);
1289 /* Split a (typically critical) edge. Return the new block.
1290 Abort on abnormal edges.
1292 ??? The code generally expects to be called on critical edges.
1293 The case of a block ending in an unconditional jump to a
1294 block with multiple predecessors is not handled optimally. */
1297 split_edge (edge_in)
1300 basic_block old_pred, bb, old_succ;
1305 /* Abnormal edges cannot be split. */
1306 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1309 old_pred = edge_in->src;
1310 old_succ = edge_in->dest;
1312 /* Remove the existing edge from the destination's pred list. */
1315 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1317 *pp = edge_in->pred_next;
1318 edge_in->pred_next = NULL;
1321 /* Create the new structures. */
1322 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1323 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1326 memset (bb, 0, sizeof (*bb));
1327 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1328 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1330 /* ??? This info is likely going to be out of date very soon. */
1331 if (old_succ->global_live_at_start)
1333 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1334 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1338 CLEAR_REG_SET (bb->global_live_at_start);
1339 CLEAR_REG_SET (bb->global_live_at_end);
1344 bb->succ = edge_out;
1347 edge_in->flags &= ~EDGE_CRITICAL;
1349 edge_out->pred_next = old_succ->pred;
1350 edge_out->succ_next = NULL;
1352 edge_out->dest = old_succ;
1353 edge_out->flags = EDGE_FALLTHRU;
1354 edge_out->probability = REG_BR_PROB_BASE;
1356 old_succ->pred = edge_out;
1358 /* Tricky case -- if there existed a fallthru into the successor
1359 (and we're not it) we must add a new unconditional jump around
1360 the new block we're actually interested in.
1362 Further, if that edge is critical, this means a second new basic
1363 block must be created to hold it. In order to simplify correct
1364 insn placement, do this before we touch the existing basic block
1365 ordering for the block we were really wanting. */
1366 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1369 for (e = edge_out->pred_next; e ; e = e->pred_next)
1370 if (e->flags & EDGE_FALLTHRU)
1375 basic_block jump_block;
1378 if ((e->flags & EDGE_CRITICAL) == 0)
1380 /* Non critical -- we can simply add a jump to the end
1381 of the existing predecessor. */
1382 jump_block = e->src;
1386 /* We need a new block to hold the jump. The simplest
1387 way to do the bulk of the work here is to recursively
1389 jump_block = split_edge (e);
1390 e = jump_block->succ;
1393 /* Now add the jump insn ... */
1394 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1396 jump_block->end = pos;
1397 emit_barrier_after (pos);
1399 /* ... let jump know that label is in use, ... */
1400 JUMP_LABEL (pos) = old_succ->head;
1401 ++LABEL_NUSES (old_succ->head);
1403 /* ... and clear fallthru on the outgoing edge. */
1404 e->flags &= ~EDGE_FALLTHRU;
1406 /* Continue splitting the interesting edge. */
1410 /* Place the new block just in front of the successor. */
1411 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1412 if (old_succ == EXIT_BLOCK_PTR)
1413 j = n_basic_blocks - 1;
1415 j = old_succ->index;
1416 for (i = n_basic_blocks - 1; i > j; --i)
1418 basic_block tmp = BASIC_BLOCK (i - 1);
1419 BASIC_BLOCK (i) = tmp;
1422 BASIC_BLOCK (i) = bb;
1425 /* Create the basic block note. */
1426 if (old_succ != EXIT_BLOCK_PTR)
1427 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1429 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1430 NOTE_BASIC_BLOCK (bb_note) = bb;
1431 bb->head = bb->end = bb_note;
1433 /* Not quite simple -- for non-fallthru edges, we must adjust the
1434 predecessor's jump instruction to target our new block. */
1435 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1437 rtx tmp, insn = old_pred->end;
1438 rtx old_label = old_succ->head;
1439 rtx new_label = gen_label_rtx ();
1441 if (GET_CODE (insn) != JUMP_INSN)
1444 /* ??? Recognize a tablejump and adjust all matching cases. */
1445 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1446 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1447 && GET_CODE (tmp) == JUMP_INSN
1448 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1449 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1454 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1455 vec = XVEC (PATTERN (tmp), 0);
1457 vec = XVEC (PATTERN (tmp), 1);
1459 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1460 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1462 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1463 --LABEL_NUSES (old_label);
1464 ++LABEL_NUSES (new_label);
1467 /* Handle casesi dispatch insns */
1468 if ((tmp = single_set (insn)) != NULL
1469 && SET_DEST (tmp) == pc_rtx
1470 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1471 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1472 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1474 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1476 --LABEL_NUSES (old_label);
1477 ++LABEL_NUSES (new_label);
1482 /* This would have indicated an abnormal edge. */
1483 if (computed_jump_p (insn))
1486 /* A return instruction can't be redirected. */
1487 if (returnjump_p (insn))
1490 /* If the insn doesn't go where we think, we're confused. */
1491 if (JUMP_LABEL (insn) != old_label)
1494 redirect_jump (insn, new_label);
1497 emit_label_before (new_label, bb_note);
1498 bb->head = new_label;
1504 /* Queue instructions for insertion on an edge between two basic blocks.
1505 The new instructions and basic blocks (if any) will not appear in the
1506 CFG until commit_edge_insertions is called. */
1509 insert_insn_on_edge (pattern, e)
1513 /* We cannot insert instructions on an abnormal critical edge.
1514 It will be easier to find the culprit if we die now. */
1515 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1516 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1519 if (e->insns == NULL_RTX)
1522 push_to_sequence (e->insns);
1524 emit_insn (pattern);
1526 e->insns = get_insns ();
1530 /* Update the CFG for the instructions queued on edge E. */
1533 commit_one_edge_insertion (e)
1536 rtx before = NULL_RTX, after = NULL_RTX, tmp;
1539 /* Figure out where to put these things. If the destination has
1540 one predecessor, insert there. Except for the exit block. */
1541 if (e->dest->pred->pred_next == NULL
1542 && e->dest != EXIT_BLOCK_PTR)
1546 /* Get the location correct wrt a code label, and "nice" wrt
1547 a basic block note, and before everything else. */
1549 if (GET_CODE (tmp) == CODE_LABEL)
1550 tmp = NEXT_INSN (tmp);
1551 if (GET_CODE (tmp) == NOTE
1552 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1553 tmp = NEXT_INSN (tmp);
1554 if (tmp == bb->head)
1557 after = PREV_INSN (tmp);
1560 /* If the source has one successor and the edge is not abnormal,
1561 insert there. Except for the entry block. */
1562 else if ((e->flags & EDGE_ABNORMAL) == 0
1563 && e->src->succ->succ_next == NULL
1564 && e->src != ENTRY_BLOCK_PTR)
1567 if (GET_CODE (bb->end) == JUMP_INSN)
1569 /* ??? Is it possible to wind up with non-simple jumps? Perhaps
1570 a jump with delay slots already filled? */
1571 if (! simplejump_p (bb->end))
1578 /* We'd better be fallthru, or we've lost track of what's what. */
1579 if ((e->flags & EDGE_FALLTHRU) == 0)
1586 /* Otherwise we must split the edge. */
1589 bb = split_edge (e);
1593 /* Now that we've found the spot, do the insertion. */
1595 e->insns = NULL_RTX;
1597 /* Set the new block number for these insns, if structure is allocated. */
1598 if (basic_block_for_insn)
1601 for (i = tmp; i != NULL_RTX; i = NEXT_INSN (i))
1602 set_block_for_insn (i, bb);
1607 emit_insns_before (tmp, before);
1608 if (before == bb->head)
1613 tmp = emit_insns_after (tmp, after);
1614 if (after == bb->end)
1619 /* Update the CFG for all queued instructions. */
1622 commit_edge_insertions ()
1628 bb = ENTRY_BLOCK_PTR;
1633 for (e = bb->succ; e ; e = next)
1635 next = e->succ_next;
1637 commit_one_edge_insertion (e);
1640 if (++i >= n_basic_blocks)
1642 bb = BASIC_BLOCK (i);
1646 /* Delete all unreachable basic blocks. */
1649 delete_unreachable_blocks ()
1651 basic_block *worklist, *tos;
1652 int deleted_handler;
1657 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1659 /* Use basic_block->aux as a marker. Clear them all. */
1661 for (i = 0; i < n; ++i)
1662 BASIC_BLOCK (i)->aux = NULL;
1664 /* Add our starting points to the worklist. Almost always there will
1665 be only one. It isn't inconcievable that we might one day directly
1666 support Fortran alternate entry points. */
1668 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1672 /* Mark the block with a handy non-null value. */
1676 /* Iterate: find everything reachable from what we've already seen. */
1678 while (tos != worklist)
1680 basic_block b = *--tos;
1682 for (e = b->succ; e ; e = e->succ_next)
1690 /* Delete all unreachable basic blocks. Count down so that we don't
1691 interfere with the block renumbering that happens in delete_block. */
1693 deleted_handler = 0;
1695 for (i = n - 1; i >= 0; --i)
1697 basic_block b = BASIC_BLOCK (i);
1700 /* This block was found. Tidy up the mark. */
1703 deleted_handler |= delete_block (b);
1706 /* Fix up edges that now fall through, or rather should now fall through
1707 but previously required a jump around now deleted blocks. Simplify
1708 the search by only examining blocks numerically adjacent, since this
1709 is how find_basic_blocks created them. */
1711 for (i = 1; i < n_basic_blocks; ++i)
1713 basic_block b = BASIC_BLOCK (i - 1);
1714 basic_block c = BASIC_BLOCK (i);
1717 /* We care about simple conditional or unconditional jumps with
1720 If we had a conditional branch to the next instruction when
1721 find_basic_blocks was called, then there will only be one
1722 out edge for the block which ended with the conditional
1723 branch (since we do not create duplicate edges).
1725 Furthermore, the edge will be marked as a fallthru because we
1726 merge the flags for the duplicate edges. So we do not want to
1727 check that the edge is not a FALLTHRU edge. */
1728 if ((s = b->succ) != NULL
1729 && s->succ_next == NULL
1731 /* If the jump insn has side effects, we can't tidy the edge. */
1732 && (GET_CODE (b->end) != JUMP_INSN
1733 || onlyjump_p (b->end)))
1734 tidy_fallthru_edge (s, b, c);
1737 /* If we deleted an exception handler, we may have EH region begin/end
1738 blocks to remove as well. */
1739 if (deleted_handler)
1740 delete_eh_regions ();
1745 /* Find EH regions for which there is no longer a handler, and delete them. */
1748 delete_eh_regions ()
1752 update_rethrow_references ();
1754 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1755 if (GET_CODE (insn) == NOTE)
1757 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1758 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1760 int num = NOTE_EH_HANDLER (insn);
1761 /* A NULL handler indicates a region is no longer needed,
1762 as long as it isn't the target of a rethrow. */
1763 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1765 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1766 NOTE_SOURCE_FILE (insn) = 0;
1772 /* Return true if NOTE is not one of the ones that must be kept paired,
1773 so that we may simply delete them. */
1776 can_delete_note_p (note)
1779 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1780 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1783 /* Unlink a chain of insns between START and FINISH, leaving notes
1784 that must be paired. */
1787 flow_delete_insn_chain (start, finish)
1790 /* Unchain the insns one by one. It would be quicker to delete all
1791 of these with a single unchaining, rather than one at a time, but
1792 we need to keep the NOTE's. */
1798 next = NEXT_INSN (start);
1799 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1801 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1804 next = flow_delete_insn (start);
1806 if (start == finish)
1812 /* Delete the insns in a (non-live) block. We physically delete every
1813 non-deleted-note insn, and update the flow graph appropriately.
1815 Return nonzero if we deleted an exception handler. */
1817 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1818 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1824 int deleted_handler = 0;
1827 /* If the head of this block is a CODE_LABEL, then it might be the
1828 label for an exception handler which can't be reached.
1830 We need to remove the label from the exception_handler_label list
1831 and remove the associated NOTE_INSN_EH_REGION_BEG and
1832 NOTE_INSN_EH_REGION_END notes. */
1836 never_reached_warning (insn);
1838 if (GET_CODE (insn) == CODE_LABEL)
1840 rtx x, *prev = &exception_handler_labels;
1842 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1844 if (XEXP (x, 0) == insn)
1846 /* Found a match, splice this label out of the EH label list. */
1847 *prev = XEXP (x, 1);
1848 XEXP (x, 1) = NULL_RTX;
1849 XEXP (x, 0) = NULL_RTX;
1851 /* Remove the handler from all regions */
1852 remove_handler (insn);
1853 deleted_handler = 1;
1856 prev = &XEXP (x, 1);
1859 /* This label may be referenced by code solely for its value, or
1860 referenced by static data, or something. We have determined
1861 that it is not reachable, but cannot delete the label itself.
1862 Save code space and continue to delete the balance of the block,
1863 along with properly updating the cfg. */
1864 if (!can_delete_label_p (insn))
1866 /* If we've only got one of these, skip the whole deleting
1869 goto no_delete_insns;
1870 insn = NEXT_INSN (insn);
1874 /* Selectively unlink the insn chain. Include any BARRIER that may
1875 follow the basic block. */
1876 end = next_nonnote_insn (b->end);
1877 if (!end || GET_CODE (end) != BARRIER)
1879 flow_delete_insn_chain (insn, end);
1883 /* Remove the edges into and out of this block. Note that there may
1884 indeed be edges in, if we are removing an unreachable loop. */
1888 for (e = b->pred; e ; e = next)
1890 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1893 next = e->pred_next;
1897 for (e = b->succ; e ; e = next)
1899 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1902 next = e->succ_next;
1911 /* Remove the basic block from the array, and compact behind it. */
1914 return deleted_handler;
1917 /* Remove block B from the basic block array and compact behind it. */
1923 int i, n = n_basic_blocks;
1925 for (i = b->index; i + 1 < n; ++i)
1927 basic_block x = BASIC_BLOCK (i + 1);
1928 BASIC_BLOCK (i) = x;
1932 basic_block_info->num_elements--;
1936 /* Delete INSN by patching it out. Return the next insn. */
1939 flow_delete_insn (insn)
1942 rtx prev = PREV_INSN (insn);
1943 rtx next = NEXT_INSN (insn);
1945 PREV_INSN (insn) = NULL_RTX;
1946 NEXT_INSN (insn) = NULL_RTX;
1949 NEXT_INSN (prev) = next;
1951 PREV_INSN (next) = prev;
1953 set_last_insn (prev);
1955 if (GET_CODE (insn) == CODE_LABEL)
1956 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1958 /* If deleting a jump, decrement the use count of the label. Deleting
1959 the label itself should happen in the normal course of block merging. */
1960 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1961 LABEL_NUSES (JUMP_LABEL (insn))--;
1966 /* True if a given label can be deleted. */
1969 can_delete_label_p (label)
1974 if (LABEL_PRESERVE_P (label))
1977 for (x = forced_labels; x ; x = XEXP (x, 1))
1978 if (label == XEXP (x, 0))
1980 for (x = label_value_list; x ; x = XEXP (x, 1))
1981 if (label == XEXP (x, 0))
1983 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
1984 if (label == XEXP (x, 0))
1987 /* User declared labels must be preserved. */
1988 if (LABEL_NAME (label) != 0)
1994 /* Blocks A and B are to be merged into a single block. A has no incoming
1995 fallthru edge, so it can be moved before B without adding or modifying
1996 any jumps (aside from the jump from A to B). */
1999 merge_blocks_move_predecessor_nojumps (a, b)
2002 rtx start, end, barrier;
2008 /* We want to delete the BARRIER after the end of the insns we are
2009 going to move. If we don't find a BARRIER, then do nothing. This
2010 can happen in some cases if we have labels we can not delete.
2012 Similarly, do nothing if we can not delete the label at the start
2013 of the target block. */
2014 barrier = next_nonnote_insn (end);
2015 if (GET_CODE (barrier) != BARRIER
2016 || (GET_CODE (b->head) == CODE_LABEL
2017 && ! can_delete_label_p (b->head)))
2020 flow_delete_insn (barrier);
2022 /* Move block and loop notes out of the chain so that we do not
2023 disturb their order.
2025 ??? A better solution would be to squeeze out all the non-nested notes
2026 and adjust the block trees appropriately. Even better would be to have
2027 a tighter connection between block trees and rtl so that this is not
2029 start = squeeze_notes (start, end);
2031 /* Scramble the insn chain. */
2032 if (end != PREV_INSN (b->head))
2033 reorder_insns (start, end, PREV_INSN (b->head));
2037 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2038 a->index, b->index);
2041 /* Swap the records for the two blocks around. Although we are deleting B,
2042 A is now where B was and we want to compact the BB array from where
2044 BASIC_BLOCK(a->index) = b;
2045 BASIC_BLOCK(b->index) = a;
2047 a->index = b->index;
2050 /* Now blocks A and B are contiguous. Merge them. */
2051 merge_blocks_nomove (a, b);
2056 /* Blocks A and B are to be merged into a single block. B has no outgoing
2057 fallthru edge, so it can be moved after A without adding or modifying
2058 any jumps (aside from the jump from A to B). */
2061 merge_blocks_move_successor_nojumps (a, b)
2064 rtx start, end, barrier;
2069 /* We want to delete the BARRIER after the end of the insns we are
2070 going to move. If we don't find a BARRIER, then do nothing. This
2071 can happen in some cases if we have labels we can not delete.
2073 Similarly, do nothing if we can not delete the label at the start
2074 of the target block. */
2075 barrier = next_nonnote_insn (end);
2076 if (GET_CODE (barrier) != BARRIER
2077 || (GET_CODE (b->head) == CODE_LABEL
2078 && ! can_delete_label_p (b->head)))
2081 flow_delete_insn (barrier);
2083 /* Move block and loop notes out of the chain so that we do not
2084 disturb their order.
2086 ??? A better solution would be to squeeze out all the non-nested notes
2087 and adjust the block trees appropriately. Even better would be to have
2088 a tighter connection between block trees and rtl so that this is not
2090 start = squeeze_notes (start, end);
2092 /* Scramble the insn chain. */
2093 reorder_insns (start, end, a->end);
2095 /* Now blocks A and B are contiguous. Merge them. */
2096 merge_blocks_nomove (a, b);
2100 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2101 b->index, a->index);
2107 /* Blocks A and B are to be merged into a single block. The insns
2108 are already contiguous, hence `nomove'. */
2111 merge_blocks_nomove (a, b)
2115 rtx b_head, b_end, a_end;
2118 /* If there was a CODE_LABEL beginning B, delete it. */
2121 if (GET_CODE (b_head) == CODE_LABEL)
2123 /* Detect basic blocks with nothing but a label. This can happen
2124 in particular at the end of a function. */
2125 if (b_head == b_end)
2127 b_head = flow_delete_insn (b_head);
2130 /* Delete the basic block note. */
2131 if (GET_CODE (b_head) == NOTE
2132 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2134 if (b_head == b_end)
2136 b_head = flow_delete_insn (b_head);
2139 /* If there was a jump out of A, delete it. */
2141 if (GET_CODE (a_end) == JUMP_INSN)
2145 prev = prev_nonnote_insn (a_end);
2150 /* If this was a conditional jump, we need to also delete
2151 the insn that set cc0. */
2153 if (prev && sets_cc0_p (prev))
2156 prev = prev_nonnote_insn (prev);
2159 flow_delete_insn (tmp);
2163 /* Note that a->head != a->end, since we should have at least a
2164 bb note plus the jump, so prev != insn. */
2165 flow_delete_insn (a_end);
2169 /* By definition, there should only be one successor of A, and that is
2170 B. Free that edge struct. */
2174 /* Adjust the edges out of B for the new owner. */
2175 for (e = b->succ; e ; e = e->succ_next)
2179 /* Reassociate the insns of B with A. */
2182 BLOCK_FOR_INSN (b_head) = a;
2183 while (b_head != b_end)
2185 b_head = NEXT_INSN (b_head);
2186 BLOCK_FOR_INSN (b_head) = a;
2192 /* Compact the basic block array. */
2196 /* Attempt to merge basic blocks that are potentially non-adjacent.
2197 Return true iff the attempt succeeded. */
2200 merge_blocks (e, b, c)
2204 /* If B has a fallthru edge to C, no need to move anything. */
2205 if (e->flags & EDGE_FALLTHRU)
2207 /* If a label still appears somewhere and we cannot delete the label,
2208 then we cannot merge the blocks. The edge was tidied already. */
2210 rtx insn, stop = NEXT_INSN (c->head);
2211 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2212 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2215 merge_blocks_nomove (b, c);
2219 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2220 b->index, c->index);
2229 int c_has_outgoing_fallthru;
2230 int b_has_incoming_fallthru;
2232 /* We must make sure to not munge nesting of exception regions,
2233 lexical blocks, and loop notes.
2235 The first is taken care of by requiring that the active eh
2236 region at the end of one block always matches the active eh
2237 region at the beginning of the next block.
2239 The later two are taken care of by squeezing out all the notes. */
2241 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2242 executed and we may want to treat blocks which have two out
2243 edges, one normal, one abnormal as only having one edge for
2244 block merging purposes. */
2246 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2247 if (tmp_edge->flags & EDGE_FALLTHRU)
2249 c_has_outgoing_fallthru = (tmp_edge != NULL);
2251 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2252 if (tmp_edge->flags & EDGE_FALLTHRU)
2254 b_has_incoming_fallthru = (tmp_edge != NULL);
2256 /* If B does not have an incoming fallthru, and the exception regions
2257 match, then it can be moved immediately before C without introducing
2260 C can not be the first block, so we do not have to worry about
2261 accessing a non-existent block. */
2262 d = BASIC_BLOCK (c->index - 1);
2263 if (! b_has_incoming_fallthru
2264 && d->eh_end == b->eh_beg
2265 && b->eh_end == c->eh_beg)
2266 return merge_blocks_move_predecessor_nojumps (b, c);
2268 /* Otherwise, we're going to try to move C after B. Make sure the
2269 exception regions match.
2271 If B is the last basic block, then we must not try to access the
2272 block structure for block B + 1. Luckily in that case we do not
2273 need to worry about matching exception regions. */
2274 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2275 if (b->eh_end == c->eh_beg
2276 && (d == NULL || c->eh_end == d->eh_beg))
2278 /* If C does not have an outgoing fallthru, then it can be moved
2279 immediately after B without introducing or modifying jumps. */
2280 if (! c_has_outgoing_fallthru)
2281 return merge_blocks_move_successor_nojumps (b, c);
2283 /* Otherwise, we'll need to insert an extra jump, and possibly
2284 a new block to contain it. */
2285 /* ??? Not implemented yet. */
2292 /* Top level driver for merge_blocks. */
2299 /* Attempt to merge blocks as made possible by edge removal. If a block
2300 has only one successor, and the successor has only one predecessor,
2301 they may be combined. */
2303 for (i = 0; i < n_basic_blocks; )
2305 basic_block c, b = BASIC_BLOCK (i);
2308 /* A loop because chains of blocks might be combineable. */
2309 while ((s = b->succ) != NULL
2310 && s->succ_next == NULL
2311 && (s->flags & EDGE_EH) == 0
2312 && (c = s->dest) != EXIT_BLOCK_PTR
2313 && c->pred->pred_next == NULL
2314 /* If the jump insn has side effects, we can't kill the edge. */
2315 && (GET_CODE (b->end) != JUMP_INSN
2316 || onlyjump_p (b->end))
2317 && merge_blocks (s, b, c))
2320 /* Don't get confused by the index shift caused by deleting blocks. */
2325 /* The given edge should potentially a fallthru edge. If that is in
2326 fact true, delete the unconditional jump and barriers that are in
2330 tidy_fallthru_edge (e, b, c)
2336 /* ??? In a late-running flow pass, other folks may have deleted basic
2337 blocks by nopping out blocks, leaving multiple BARRIERs between here
2338 and the target label. They ought to be chastized and fixed.
2340 We can also wind up with a sequence of undeletable labels between
2341 one block and the next.
2343 So search through a sequence of barriers, labels, and notes for
2344 the head of block C and assert that we really do fall through. */
2346 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2349 /* Remove what will soon cease being the jump insn from the source block.
2350 If block B consisted only of this single jump, turn it into a deleted
2353 if (GET_CODE (q) == JUMP_INSN)
2356 /* If this was a conditional jump, we need to also delete
2357 the insn that set cc0. */
2358 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2365 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2366 NOTE_SOURCE_FILE (q) = 0;
2369 b->end = q = PREV_INSN (q);
2372 /* Selectively unlink the sequence. */
2373 if (q != PREV_INSN (c->head))
2374 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2376 e->flags |= EDGE_FALLTHRU;
2379 /* Discover and record the loop depth at the head of each basic block. */
2382 calculate_loop_depth (insns)
2387 int i = 0, depth = 1;
2389 bb = BASIC_BLOCK (i);
2390 for (insn = insns; insn ; insn = NEXT_INSN (insn))
2392 if (insn == bb->head)
2394 bb->loop_depth = depth;
2395 if (++i >= n_basic_blocks)
2397 bb = BASIC_BLOCK (i);
2400 if (GET_CODE (insn) == NOTE)
2402 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2404 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2407 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. */
2414 /* Perform data flow analysis.
2415 F is the first insn of the function and NREGS the number of register numbers
2419 life_analysis (f, nregs, file, remove_dead_code)
2423 int remove_dead_code;
2425 #ifdef ELIMINABLE_REGS
2427 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2431 /* Record which registers will be eliminated. We use this in
2434 CLEAR_HARD_REG_SET (elim_reg_set);
2436 #ifdef ELIMINABLE_REGS
2437 for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
2438 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2440 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2443 /* Allocate a bitmap to be filled in by record_volatile_insns. */
2444 uid_volatile = BITMAP_XMALLOC ();
2446 /* We want alias analysis information for local dead store elimination. */
2447 init_alias_analysis ();
2450 if (! remove_dead_code)
2451 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2452 life_analysis_1 (f, nregs, flags);
2454 if (! reload_completed)
2455 mark_constant_function ();
2457 end_alias_analysis ();
2460 dump_flow_info (file);
2462 BITMAP_XFREE (uid_volatile);
2463 free_basic_block_vars (1);
2466 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2467 Search for REGNO. If found, abort if it is not wider than word_mode. */
2470 verify_wide_reg_1 (px, pregno)
2475 int regno = *(int *) pregno;
2477 if (GET_CODE (x) == REG && REGNO (x) == regno)
2479 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2486 /* A subroutine of verify_local_live_at_start. Search through insns
2487 between HEAD and END looking for register REGNO. */
2490 verify_wide_reg (regno, head, end)
2496 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2497 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2501 head = NEXT_INSN (head);
2504 /* We didn't find the register at all. Something's way screwy. */
2508 /* A subroutine of update_life_info. Verify that there are no untoward
2509 changes in live_at_start during a local update. */
2512 verify_local_live_at_start (new_live_at_start, bb)
2513 regset new_live_at_start;
2516 if (reload_completed)
2518 /* After reload, there are no pseudos, nor subregs of multi-word
2519 registers. The regsets should exactly match. */
2520 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2527 /* Find the set of changed registers. */
2528 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2530 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2532 /* No registers should die. */
2533 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2535 /* Verify that the now-live register is wider than word_mode. */
2536 verify_wide_reg (i, bb->head, bb->end);
2541 /* Updates death notes starting with the basic blocks set in BLOCKS.
2543 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2544 expecting local modifications to basic blocks. If we find extra
2545 registers live at the beginning of a block, then we either killed
2546 useful data, or we have a broken split that wants data not provided.
2547 If we find registers removed from live_at_start, that means we have
2548 a broken peephole that is killing a register it shouldn't.
2550 ??? This is not true in one situation -- when a pre-reload splitter
2551 generates subregs of a multi-word pseudo, current life analysis will
2552 lose the kill. So we _can_ have a pseudo go live. How irritating.
2554 BLOCK_FOR_INSN is assumed to be correct.
2556 ??? PROP_FLAGS should not contain PROP_LOG_LINKS. Need to set up
2557 reg_next_use for that. Including PROP_REG_INFO does not refresh
2558 regs_ever_live unless the caller resets it to zero. */
2561 update_life_info (blocks, extent, prop_flags)
2563 enum update_life_extent extent;
2569 tmp = ALLOCA_REG_SET ();
2571 /* For a global update, we go through the relaxation process again. */
2572 if (extent != UPDATE_LIFE_LOCAL)
2574 calculate_global_regs_live (blocks, blocks,
2575 prop_flags & PROP_SCAN_DEAD_CODE);
2577 /* If asked, remove notes from the blocks we'll update. */
2578 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2579 count_or_remove_death_notes (blocks, 1);
2582 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2584 basic_block bb = BASIC_BLOCK (i);
2586 COPY_REG_SET (tmp, bb->global_live_at_end);
2587 propagate_block (tmp, bb->head, bb->end, (regset) NULL, i,
2590 if (extent == UPDATE_LIFE_LOCAL)
2591 verify_local_live_at_start (tmp, bb);
2597 /* Free the variables allocated by find_basic_blocks.
2599 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2602 free_basic_block_vars (keep_head_end_p)
2603 int keep_head_end_p;
2605 if (basic_block_for_insn)
2607 VARRAY_FREE (basic_block_for_insn);
2608 basic_block_for_insn = NULL;
2611 if (! keep_head_end_p)
2614 VARRAY_FREE (basic_block_info);
2617 ENTRY_BLOCK_PTR->aux = NULL;
2618 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2619 EXIT_BLOCK_PTR->aux = NULL;
2620 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2624 /* Return nonzero if the destination of SET equals the source. */
2629 rtx src = SET_SRC (set);
2630 rtx dst = SET_DEST (set);
2631 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2632 && REGNO (src) == REGNO (dst))
2634 if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG
2635 || SUBREG_WORD (src) != SUBREG_WORD (dst))
2637 src = SUBREG_REG (src);
2638 dst = SUBREG_REG (dst);
2639 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2640 && REGNO (src) == REGNO (dst))
2645 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2651 rtx pat = PATTERN (insn);
2653 /* Insns carrying these notes are useful later on. */
2654 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2657 if (GET_CODE (pat) == SET && set_noop_p (pat))
2660 if (GET_CODE (pat) == PARALLEL)
2663 /* If nothing but SETs of registers to themselves,
2664 this insn can also be deleted. */
2665 for (i = 0; i < XVECLEN (pat, 0); i++)
2667 rtx tem = XVECEXP (pat, 0, i);
2669 if (GET_CODE (tem) == USE
2670 || GET_CODE (tem) == CLOBBER)
2673 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2683 notice_stack_pointer_modification (x, pat, data)
2685 rtx pat ATTRIBUTE_UNUSED;
2686 void *data ATTRIBUTE_UNUSED;
2688 if (x == stack_pointer_rtx
2689 /* The stack pointer is only modified indirectly as the result
2690 of a push until later in flow. See the comments in rtl.texi
2691 regarding Embedded Side-Effects on Addresses. */
2692 || (GET_CODE (x) == MEM
2693 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2694 || GET_CODE (XEXP (x, 0)) == PRE_INC
2695 || GET_CODE (XEXP (x, 0)) == POST_DEC
2696 || GET_CODE (XEXP (x, 0)) == POST_INC)
2697 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2698 current_function_sp_is_unchanging = 0;
2701 /* Record which insns refer to any volatile memory
2702 or for any reason can't be deleted just because they are dead stores.
2703 Also, delete any insns that copy a register to itself.
2704 And see if the stack pointer is modified. */
2706 record_volatile_insns (f)
2710 for (insn = f; insn; insn = NEXT_INSN (insn))
2712 enum rtx_code code1 = GET_CODE (insn);
2713 if (code1 == CALL_INSN)
2714 SET_INSN_VOLATILE (insn);
2715 else if (code1 == INSN || code1 == JUMP_INSN)
2717 if (GET_CODE (PATTERN (insn)) != USE
2718 && volatile_refs_p (PATTERN (insn)))
2719 SET_INSN_VOLATILE (insn);
2721 /* A SET that makes space on the stack cannot be dead.
2722 (Such SETs occur only for allocating variable-size data,
2723 so they will always have a PLUS or MINUS according to the
2724 direction of stack growth.)
2725 Even if this function never uses this stack pointer value,
2726 signal handlers do! */
2727 else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
2728 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2729 #ifdef STACK_GROWS_DOWNWARD
2730 && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
2732 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2734 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
2735 SET_INSN_VOLATILE (insn);
2737 /* Delete (in effect) any obvious no-op moves. */
2738 else if (noop_move_p (insn))
2740 PUT_CODE (insn, NOTE);
2741 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2742 NOTE_SOURCE_FILE (insn) = 0;
2746 /* Check if insn modifies the stack pointer. */
2747 if ( current_function_sp_is_unchanging
2748 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2749 note_stores (PATTERN (insn),
2750 notice_stack_pointer_modification,
2755 /* Mark a register in SET. Hard registers in large modes get all
2756 of their component registers set as well. */
2762 int regno = REGNO (reg);
2764 SET_REGNO_REG_SET (set, regno);
2765 if (regno < FIRST_PSEUDO_REGISTER)
2767 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2769 SET_REGNO_REG_SET (set, regno + n);
2773 /* Mark those regs which are needed at the end of the function as live
2774 at the end of the last basic block. */
2776 mark_regs_live_at_end (set)
2782 /* If exiting needs the right stack value, consider the stack pointer
2783 live at the end of the function. */
2784 if ((HAVE_epilogue && reload_completed)
2785 || ! EXIT_IGNORE_STACK
2786 || (! FRAME_POINTER_REQUIRED
2787 && ! current_function_calls_alloca
2788 && flag_omit_frame_pointer)
2789 || current_function_sp_is_unchanging)
2791 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2794 /* Mark the frame pointer if needed at the end of the function. If
2795 we end up eliminating it, it will be removed from the live list
2796 of each basic block by reload. */
2798 if (! reload_completed || frame_pointer_needed)
2800 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2801 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2802 /* If they are different, also mark the hard frame pointer as live */
2803 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2807 #ifdef PIC_OFFSET_TABLE_REGNUM
2808 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2809 /* Many architectures have a GP register even without flag_pic.
2810 Assume the pic register is not in use, or will be handled by
2811 other means, if it is not fixed. */
2812 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2813 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2817 /* Mark all global registers, and all registers used by the epilogue
2818 as being live at the end of the function since they may be
2819 referenced by our caller. */
2820 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2822 #ifdef EPILOGUE_USES
2823 || EPILOGUE_USES (i)
2826 SET_REGNO_REG_SET (set, i);
2828 /* Mark all call-saved registers that we actaully used. */
2829 if (HAVE_epilogue && reload_completed)
2831 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2832 if (! call_used_regs[i] && regs_ever_live[i])
2833 SET_REGNO_REG_SET (set, i);
2836 /* Mark function return value. */
2837 /* ??? Only do this after reload. Consider a non-void function that
2838 omits a return statement. Across that edge we'll have the return
2839 register live, and no set for it. Thus the return register will
2840 be live back through the CFG to the entry, and thus we die. A
2841 possible solution is to emit a clobber at exits without returns. */
2843 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2844 if (reload_completed
2845 && type != void_type_node)
2849 if (current_function_returns_struct
2850 || current_function_returns_pcc_struct)
2851 type = build_pointer_type (type);
2853 #ifdef FUNCTION_OUTGOING_VALUE
2854 outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
2856 outgoing = FUNCTION_VALUE (type, current_function_decl);
2859 if (GET_CODE (outgoing) == REG)
2860 mark_reg (set, outgoing);
2861 else if (GET_CODE (outgoing) == PARALLEL)
2863 int len = XVECLEN (outgoing, 0);
2865 /* Check for a NULL entry, used to indicate that the parameter
2866 goes on the stack and in registers. */
2867 i = (XEXP (XVECEXP (outgoing, 0, 0), 0) ? 0 : 1);
2869 for ( ; i < len; ++i)
2871 rtx r = XVECEXP (outgoing, 0, i);
2872 if (GET_CODE (r) == REG)
2881 /* Determine which registers are live at the start of each
2882 basic block of the function whose first insn is F.
2883 NREGS is the number of registers used in F.
2884 We allocate the vector basic_block_live_at_start
2885 and the regsets that it points to, and fill them with the data.
2886 regset_size and regset_bytes are also set here. */
2889 life_analysis_1 (f, nregs, flags)
2894 char save_regs_ever_live[FIRST_PSEUDO_REGISTER];
2899 /* Allocate and zero out many data structures
2900 that will record the data from lifetime analysis. */
2902 allocate_reg_life_data ();
2903 allocate_bb_life_data ();
2905 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2907 /* Assume that the stack pointer is unchanging if alloca hasn't been used.
2908 This will be cleared by record_volatile_insns if it encounters an insn
2909 which modifies the stack pointer. */
2910 current_function_sp_is_unchanging = !current_function_calls_alloca;
2911 record_volatile_insns (f);
2913 /* Find the set of registers live on function exit. Do this before
2914 zeroing regs_ever_live, as we use that data post-reload. */
2915 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2917 /* The post-reload life analysis have (on a global basis) the same
2918 registers live as was computed by reload itself. elimination
2919 Otherwise offsets and such may be incorrect.
2921 Reload will make some registers as live even though they do not
2922 appear in the rtl. */
2923 if (reload_completed)
2924 memcpy (save_regs_ever_live, regs_ever_live, sizeof (regs_ever_live));
2925 memset (regs_ever_live, 0, sizeof regs_ever_live);
2927 /* Compute register life at block boundaries. It'd be nice to
2928 begin with just the exit and noreturn blocks, but that set
2929 is not immediately handy. */
2932 blocks = sbitmap_alloc (n_basic_blocks);
2933 sbitmap_ones (blocks);
2934 calculate_global_regs_live (blocks, blocks, flags & PROP_SCAN_DEAD_CODE);
2935 sbitmap_free (blocks);
2938 /* The only pseudos that are live at the beginning of the function are
2939 those that were not set anywhere in the function. local-alloc doesn't
2940 know how to handle these correctly, so mark them as not local to any
2943 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2944 FIRST_PSEUDO_REGISTER, i,
2945 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2947 /* Now the life information is accurate. Make one more pass over each
2948 basic block to delete dead stores, create autoincrement addressing
2949 and record how many times each register is used, is set, or dies. */
2952 tmp = ALLOCA_REG_SET ();
2954 for (i = n_basic_blocks - 1; i >= 0; --i)
2956 basic_block bb = BASIC_BLOCK (i);
2958 COPY_REG_SET (tmp, bb->global_live_at_end);
2959 propagate_block (tmp, bb->head, bb->end, (regset) NULL, i, flags);
2965 /* We have a problem with any pseudoreg that lives across the setjmp.
2966 ANSI says that if a user variable does not change in value between
2967 the setjmp and the longjmp, then the longjmp preserves it. This
2968 includes longjmp from a place where the pseudo appears dead.
2969 (In principle, the value still exists if it is in scope.)
2970 If the pseudo goes in a hard reg, some other value may occupy
2971 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2972 Conclusion: such a pseudo must not go in a hard reg. */
2973 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2974 FIRST_PSEUDO_REGISTER, i,
2976 if (regno_reg_rtx[i] != 0)
2978 REG_LIVE_LENGTH (i) = -1;
2979 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2983 /* Restore regs_ever_live that was provided by reload. */
2984 if (reload_completed)
2985 memcpy (regs_ever_live, save_regs_ever_live, sizeof (regs_ever_live));
2988 free (reg_next_use);
2989 reg_next_use = NULL;
2992 /* Propagate global life info around the graph of basic blocks. Begin
2993 considering blocks with their corresponding bit set in BLOCKS_IN.
2994 BLOCKS_OUT is set for every block that was changed. */
2997 calculate_global_regs_live (blocks_in, blocks_out, flags)
2998 sbitmap blocks_in, blocks_out;
3001 basic_block *queue, *qhead, *qtail, *qend;
3002 regset tmp, new_live_at_end;
3005 tmp = ALLOCA_REG_SET ();
3006 new_live_at_end = ALLOCA_REG_SET ();
3008 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3009 because the `head == tail' style test for an empty queue doesn't
3010 work with a full queue. */
3011 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3013 qhead = qend = queue + n_basic_blocks + 2;
3015 /* Queue the blocks set in the initial mask. Do this in reverse block
3016 number order so that we are more likely for the first round to do
3017 useful work. We use AUX non-null to flag that the block is queued. */
3018 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3020 basic_block bb = BASIC_BLOCK (i);
3025 sbitmap_zero (blocks_out);
3027 while (qhead != qtail)
3029 int rescan, changed;
3038 /* Begin by propogating live_at_start from the successor blocks. */
3039 CLEAR_REG_SET (new_live_at_end);
3040 for (e = bb->succ; e ; e = e->succ_next)
3042 basic_block sb = e->dest;
3043 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3046 if (bb == ENTRY_BLOCK_PTR)
3048 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3052 /* On our first pass through this block, we'll go ahead and continue.
3053 Recognize first pass by local_set NULL. On subsequent passes, we
3054 get to skip out early if live_at_end wouldn't have changed. */
3056 if (bb->local_set == NULL)
3058 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3063 /* If any bits were removed from live_at_end, we'll have to
3064 rescan the block. This wouldn't be necessary if we had
3065 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3066 local_live is really dependant on live_at_end. */
3067 CLEAR_REG_SET (tmp);
3068 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3069 new_live_at_end, BITMAP_AND_COMPL);
3073 /* Find the set of changed bits. Take this opportunity
3074 to notice that this set is empty and early out. */
3075 CLEAR_REG_SET (tmp);
3076 changed = bitmap_operation (tmp, bb->global_live_at_end,
3077 new_live_at_end, BITMAP_XOR);
3081 /* If any of the changed bits overlap with local_set,
3082 we'll have to rescan the block. Detect overlap by
3083 the AND with ~local_set turning off bits. */
3084 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3089 /* Let our caller know that BB changed enough to require its
3090 death notes updated. */
3091 SET_BIT (blocks_out, bb->index);
3095 /* Add to live_at_start the set of all registers in
3096 new_live_at_end that aren't in the old live_at_end. */
3098 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3100 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3102 changed = bitmap_operation (bb->global_live_at_start,
3103 bb->global_live_at_start,
3110 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3112 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3113 into live_at_start. */
3114 propagate_block (new_live_at_end, bb->head, bb->end,
3115 bb->local_set, bb->index, flags);
3117 /* If live_at start didn't change, no need to go farther. */
3118 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3121 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3124 /* Queue all predecessors of BB so that we may re-examine
3125 their live_at_end. */
3126 for (e = bb->pred; e ; e = e->pred_next)
3128 basic_block pb = e->src;
3129 if (pb->aux == NULL)
3140 FREE_REG_SET (new_live_at_end);
3142 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3144 basic_block bb = BASIC_BLOCK (i);
3145 FREE_REG_SET (bb->local_set);
3151 /* Subroutines of life analysis. */
3153 /* Allocate the permanent data structures that represent the results
3154 of life analysis. Not static since used also for stupid life analysis. */
3157 allocate_bb_life_data ()
3161 for (i = 0; i < n_basic_blocks; i++)
3163 basic_block bb = BASIC_BLOCK (i);
3165 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3166 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3169 ENTRY_BLOCK_PTR->global_live_at_end
3170 = OBSTACK_ALLOC_REG_SET (function_obstack);
3171 EXIT_BLOCK_PTR->global_live_at_start
3172 = OBSTACK_ALLOC_REG_SET (function_obstack);
3174 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3178 allocate_reg_life_data ()
3182 /* Recalculate the register space, in case it has grown. Old style
3183 vector oriented regsets would set regset_{size,bytes} here also. */
3184 allocate_reg_info (max_regno, FALSE, FALSE);
3186 /* Reset all the data we'll collect in propagate_block and its
3188 for (i = 0; i < max_regno; i++)
3192 REG_N_DEATHS (i) = 0;
3193 REG_N_CALLS_CROSSED (i) = 0;
3194 REG_LIVE_LENGTH (i) = 0;
3195 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3199 /* Compute the registers live at the beginning of a basic block
3200 from those live at the end.
3202 When called, OLD contains those live at the end.
3203 On return, it contains those live at the beginning.
3204 FIRST and LAST are the first and last insns of the basic block.
3206 FINAL is nonzero if we are doing the final pass which is not
3207 for computing the life info (since that has already been done)
3208 but for acting on it. On this pass, we delete dead stores,
3209 set up the logical links and dead-variables lists of instructions,
3210 and merge instructions for autoincrement and autodecrement addresses.
3212 SIGNIFICANT is nonzero only the first time for each basic block.
3213 If it is nonzero, it points to a regset in which we store
3214 a 1 for each register that is set within the block.
3216 BNUM is the number of the basic block. */
3219 propagate_block (old, first, last, significant, bnum, flags)
3220 register regset old;
3232 /* Find the loop depth for this block. Ignore loop level changes in the
3233 middle of the basic block -- for register allocation purposes, the
3234 important uses will be in the blocks wholely contained within the loop
3235 not in the loop pre-header or post-trailer. */
3236 loop_depth = BASIC_BLOCK (bnum)->loop_depth;
3238 dead = ALLOCA_REG_SET ();
3239 live = ALLOCA_REG_SET ();
3243 if (flags & PROP_REG_INFO)
3247 /* Process the regs live at the end of the block.
3248 Mark them as not local to any one basic block. */
3249 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3251 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3255 /* Scan the block an insn at a time from end to beginning. */
3257 for (insn = last; ; insn = prev)
3259 prev = PREV_INSN (insn);
3261 if (GET_CODE (insn) == NOTE)
3263 /* If this is a call to `setjmp' et al,
3264 warn if any non-volatile datum is live. */
3266 if ((flags & PROP_REG_INFO)
3267 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3268 IOR_REG_SET (regs_live_at_setjmp, old);
3271 /* Update the life-status of regs for this insn.
3272 First DEAD gets which regs are set in this insn
3273 then LIVE gets which regs are used in this insn.
3274 Then the regs live before the insn
3275 are those live after, with DEAD regs turned off,
3276 and then LIVE regs turned on. */
3278 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3281 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3282 int insn_is_dead = 0;
3283 int libcall_is_dead = 0;
3285 if (flags & PROP_SCAN_DEAD_CODE)
3287 insn_is_dead = (insn_dead_p (PATTERN (insn), old, 0, REG_NOTES (insn))
3288 /* Don't delete something that refers to volatile storage! */
3289 && ! INSN_VOLATILE (insn));
3290 libcall_is_dead = (insn_is_dead && note != 0
3291 && libcall_dead_p (PATTERN (insn), old, note, insn));
3294 /* We almost certainly don't want to delete prologue or epilogue
3295 instructions. Warn about probable compiler losage. */
3296 if ((flags & PROP_KILL_DEAD_CODE)
3299 && (HAVE_epilogue || HAVE_prologue)
3300 && prologue_epilogue_contains (insn))
3302 warning ("ICE: would have deleted prologue/epilogue insn");
3304 libcall_is_dead = insn_is_dead = 0;
3307 /* If an instruction consists of just dead store(s) on final pass,
3308 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3309 We could really delete it with delete_insn, but that
3310 can cause trouble for first or last insn in a basic block. */
3311 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3314 /* If the insn referred to a label, note that the label is
3316 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3318 if (REG_NOTE_KIND (inote) == REG_LABEL)
3320 rtx label = XEXP (inote, 0);
3322 LABEL_NUSES (label)--;
3324 /* If this label was attached to an ADDR_VEC, it's
3325 safe to delete the ADDR_VEC. In fact, it's pretty much
3326 mandatory to delete it, because the ADDR_VEC may
3327 be referencing labels that no longer exist. */
3328 if (LABEL_NUSES (label) == 0
3329 && (next = next_nonnote_insn (label)) != NULL
3330 && GET_CODE (next) == JUMP_INSN
3331 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3332 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3334 rtx pat = PATTERN (next);
3335 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3336 int len = XVECLEN (pat, diff_vec_p);
3338 for (i = 0; i < len; i++)
3339 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3340 PUT_CODE (next, NOTE);
3341 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3342 NOTE_SOURCE_FILE (next) = 0;
3347 PUT_CODE (insn, NOTE);
3348 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3349 NOTE_SOURCE_FILE (insn) = 0;
3351 /* CC0 is now known to be dead. Either this insn used it,
3352 in which case it doesn't anymore, or clobbered it,
3353 so the next insn can't use it. */
3356 /* If this insn is copying the return value from a library call,
3357 delete the entire library call. */
3358 if (libcall_is_dead)
3360 rtx first = XEXP (note, 0);
3362 while (INSN_DELETED_P (first))
3363 first = NEXT_INSN (first);
3368 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3369 NOTE_SOURCE_FILE (p) = 0;
3375 CLEAR_REG_SET (dead);
3376 CLEAR_REG_SET (live);
3378 /* See if this is an increment or decrement that can be
3379 merged into a following memory address. */
3382 register rtx x = single_set (insn);
3384 /* Does this instruction increment or decrement a register? */
3385 if (!reload_completed
3386 && (flags & PROP_AUTOINC)
3388 && GET_CODE (SET_DEST (x)) == REG
3389 && (GET_CODE (SET_SRC (x)) == PLUS
3390 || GET_CODE (SET_SRC (x)) == MINUS)
3391 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3392 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3393 /* Ok, look for a following memory ref we can combine with.
3394 If one is found, change the memory ref to a PRE_INC
3395 or PRE_DEC, cancel this insn, and return 1.
3396 Return 0 if nothing has been done. */
3397 && try_pre_increment_1 (insn))
3400 #endif /* AUTO_INC_DEC */
3402 /* If this is not the final pass, and this insn is copying the
3403 value of a library call and it's dead, don't scan the
3404 insns that perform the library call, so that the call's
3405 arguments are not marked live. */
3406 if (libcall_is_dead)
3408 /* Mark the dest reg as `significant'. */
3409 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3410 significant, flags);
3412 insn = XEXP (note, 0);
3413 prev = PREV_INSN (insn);
3415 else if (GET_CODE (PATTERN (insn)) == SET
3416 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3417 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3418 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3419 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3420 /* We have an insn to pop a constant amount off the stack.
3421 (Such insns use PLUS regardless of the direction of the stack,
3422 and any insn to adjust the stack by a constant is always a pop.)
3423 These insns, if not dead stores, have no effect on life. */
3427 /* Any regs live at the time of a call instruction
3428 must not go in a register clobbered by calls.
3429 Find all regs now live and record this for them. */
3431 if (GET_CODE (insn) == CALL_INSN
3432 && (flags & PROP_REG_INFO))
3433 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3435 REG_N_CALLS_CROSSED (i)++;
3438 /* LIVE gets the regs used in INSN;
3439 DEAD gets those set by it. Dead insns don't make anything
3442 mark_set_regs (old, dead, PATTERN (insn),
3443 insn, significant, flags);
3445 /* If an insn doesn't use CC0, it becomes dead since we
3446 assume that every insn clobbers it. So show it dead here;
3447 mark_used_regs will set it live if it is referenced. */
3451 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3453 /* Sometimes we may have inserted something before INSN (such as
3454 a move) when we make an auto-inc. So ensure we will scan
3457 prev = PREV_INSN (insn);
3460 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3466 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3468 note = XEXP (note, 1))
3469 if (GET_CODE (XEXP (note, 0)) == USE)
3470 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3473 /* Each call clobbers all call-clobbered regs that are not
3474 global or fixed. Note that the function-value reg is a
3475 call-clobbered reg, and mark_set_regs has already had
3476 a chance to handle it. */
3478 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3479 if (call_used_regs[i] && ! global_regs[i]
3482 SET_REGNO_REG_SET (dead, i);
3484 SET_REGNO_REG_SET (significant, i);
3487 /* The stack ptr is used (honorarily) by a CALL insn. */
3488 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3490 /* Calls may also reference any of the global registers,
3491 so they are made live. */
3492 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3494 mark_used_regs (old, live,
3495 gen_rtx_REG (reg_raw_mode[i], i),
3498 /* Calls also clobber memory. */
3499 free_EXPR_LIST_list (&mem_set_list);
3502 /* Update OLD for the registers used or set. */
3503 AND_COMPL_REG_SET (old, dead);
3504 IOR_REG_SET (old, live);
3508 /* On final pass, update counts of how many insns each reg is live
3510 if (flags & PROP_REG_INFO)
3511 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3512 { REG_LIVE_LENGTH (i)++; });
3519 FREE_REG_SET (dead);
3520 FREE_REG_SET (live);
3521 free_EXPR_LIST_list (&mem_set_list);
3524 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3525 (SET expressions whose destinations are registers dead after the insn).
3526 NEEDED is the regset that says which regs are alive after the insn.
3528 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3530 If X is the entire body of an insn, NOTES contains the reg notes
3531 pertaining to the insn. */
3534 insn_dead_p (x, needed, call_ok, notes)
3538 rtx notes ATTRIBUTE_UNUSED;
3540 enum rtx_code code = GET_CODE (x);
3543 /* If flow is invoked after reload, we must take existing AUTO_INC
3544 expresions into account. */
3545 if (reload_completed)
3547 for ( ; notes; notes = XEXP (notes, 1))
3549 if (REG_NOTE_KIND (notes) == REG_INC)
3551 int regno = REGNO (XEXP (notes, 0));
3553 /* Don't delete insns to set global regs. */
3554 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3555 || REGNO_REG_SET_P (needed, regno))
3562 /* If setting something that's a reg or part of one,
3563 see if that register's altered value will be live. */
3567 rtx r = SET_DEST (x);
3569 /* A SET that is a subroutine call cannot be dead. */
3570 if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
3574 if (GET_CODE (r) == CC0)
3578 if (GET_CODE (r) == MEM && ! MEM_VOLATILE_P (r))
3581 /* Walk the set of memory locations we are currently tracking
3582 and see if one is an identical match to this memory location.
3583 If so, this memory write is dead (remember, we're walking
3584 backwards from the end of the block to the start. */
3585 temp = mem_set_list;
3588 if (rtx_equal_p (XEXP (temp, 0), r))
3590 temp = XEXP (temp, 1);
3594 while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
3595 || GET_CODE (r) == ZERO_EXTRACT)
3598 if (GET_CODE (r) == REG)
3600 int regno = REGNO (r);
3602 /* Don't delete insns to set global regs. */
3603 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3604 /* Make sure insns to set frame pointer aren't deleted. */
3605 || (regno == FRAME_POINTER_REGNUM
3606 && (! reload_completed || frame_pointer_needed))
3607 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3608 || (regno == HARD_FRAME_POINTER_REGNUM
3609 && (! reload_completed || frame_pointer_needed))
3611 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3612 /* Make sure insns to set arg pointer are never deleted
3613 (if the arg pointer isn't fixed, there will be a USE for
3614 it, so we can treat it normally). */
3615 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3617 || REGNO_REG_SET_P (needed, regno))
3620 /* If this is a hard register, verify that subsequent words are
3622 if (regno < FIRST_PSEUDO_REGISTER)
3624 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3627 if (REGNO_REG_SET_P (needed, regno+n))
3635 /* If performing several activities,
3636 insn is dead if each activity is individually dead.
3637 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
3638 that's inside a PARALLEL doesn't make the insn worth keeping. */
3639 else if (code == PARALLEL)
3641 int i = XVECLEN (x, 0);
3643 for (i--; i >= 0; i--)
3644 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3645 && GET_CODE (XVECEXP (x, 0, i)) != USE
3646 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3652 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3653 is not necessarily true for hard registers. */
3654 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3655 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3656 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3659 /* We do not check other CLOBBER or USE here. An insn consisting of just
3660 a CLOBBER or just a USE should not be deleted. */
3664 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3665 return 1 if the entire library call is dead.
3666 This is true if X copies a register (hard or pseudo)
3667 and if the hard return reg of the call insn is dead.
3668 (The caller should have tested the destination of X already for death.)
3670 If this insn doesn't just copy a register, then we don't
3671 have an ordinary libcall. In that case, cse could not have
3672 managed to substitute the source for the dest later on,
3673 so we can assume the libcall is dead.
3675 NEEDED is the bit vector of pseudoregs live before this insn.
3676 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3679 libcall_dead_p (x, needed, note, insn)
3685 register RTX_CODE code = GET_CODE (x);
3689 register rtx r = SET_SRC (x);
3690 if (GET_CODE (r) == REG)
3692 rtx call = XEXP (note, 0);
3696 /* Find the call insn. */
3697 while (call != insn && GET_CODE (call) != CALL_INSN)
3698 call = NEXT_INSN (call);
3700 /* If there is none, do nothing special,
3701 since ordinary death handling can understand these insns. */
3705 /* See if the hard reg holding the value is dead.
3706 If this is a PARALLEL, find the call within it. */
3707 call_pat = PATTERN (call);
3708 if (GET_CODE (call_pat) == PARALLEL)
3710 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3711 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3712 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3715 /* This may be a library call that is returning a value
3716 via invisible pointer. Do nothing special, since
3717 ordinary death handling can understand these insns. */
3721 call_pat = XVECEXP (call_pat, 0, i);
3724 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3730 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3731 live at function entry. Don't count global register variables, variables
3732 in registers that can be used for function arg passing, or variables in
3733 fixed hard registers. */
3736 regno_uninitialized (regno)
3739 if (n_basic_blocks == 0
3740 || (regno < FIRST_PSEUDO_REGISTER
3741 && (global_regs[regno]
3742 || fixed_regs[regno]
3743 || FUNCTION_ARG_REGNO_P (regno))))
3746 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3749 /* 1 if register REGNO was alive at a place where `setjmp' was called
3750 and was set more than once or is an argument.
3751 Such regs may be clobbered by `longjmp'. */
3754 regno_clobbered_at_setjmp (regno)
3757 if (n_basic_blocks == 0)
3760 return ((REG_N_SETS (regno) > 1
3761 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3762 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3765 /* INSN references memory, possibly using autoincrement addressing modes.
3766 Find any entries on the mem_set_list that need to be invalidated due
3767 to an address change. */
3769 invalidate_mems_from_autoinc (insn)
3772 rtx note = REG_NOTES (insn);
3773 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3775 if (REG_NOTE_KIND (note) == REG_INC)
3777 rtx temp = mem_set_list;
3778 rtx prev = NULL_RTX;
3783 next = XEXP (temp, 1);
3784 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3786 /* Splice temp out of list. */
3788 XEXP (prev, 1) = next;
3790 mem_set_list = next;
3791 free_EXPR_LIST_node (temp);
3801 /* Process the registers that are set within X. Their bits are set to
3802 1 in the regset DEAD, because they are dead prior to this insn.
3804 If INSN is nonzero, it is the insn being processed.
3806 FLAGS is the set of operations to perform. */
3809 mark_set_regs (needed, dead, x, insn, significant, flags)
3817 register RTX_CODE code = GET_CODE (x);
3819 if (code == SET || code == CLOBBER)
3820 mark_set_1 (needed, dead, x, insn, significant, flags);
3821 else if (code == PARALLEL)
3824 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3826 code = GET_CODE (XVECEXP (x, 0, i));
3827 if (code == SET || code == CLOBBER)
3828 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3829 significant, flags);
3834 /* Process a single SET rtx, X. */
3837 mark_set_1 (needed, dead, x, insn, significant, flags)
3845 register int regno = -1;
3846 register rtx reg = SET_DEST (x);
3848 /* Some targets place small structures in registers for
3849 return values of functions. We have to detect this
3850 case specially here to get correct flow information. */
3851 if (GET_CODE (reg) == PARALLEL
3852 && GET_MODE (reg) == BLKmode)
3856 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3857 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3858 significant, flags);
3862 /* Modifying just one hardware register of a multi-reg value
3863 or just a byte field of a register
3864 does not mean the value from before this insn is now dead.
3865 But it does mean liveness of that register at the end of the block
3868 Within mark_set_1, however, we treat it as if the register is
3869 indeed modified. mark_used_regs will, however, also treat this
3870 register as being used. Thus, we treat these insns as setting a
3871 new value for the register as a function of its old value. This
3872 cases LOG_LINKS to be made appropriately and this will help combine. */
3874 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3875 || GET_CODE (reg) == SIGN_EXTRACT
3876 || GET_CODE (reg) == STRICT_LOW_PART)
3877 reg = XEXP (reg, 0);
3879 /* If this set is a MEM, then it kills any aliased writes.
3880 If this set is a REG, then it kills any MEMs which use the reg. */
3881 if (flags & PROP_SCAN_DEAD_CODE)
3883 if (GET_CODE (reg) == MEM
3884 || GET_CODE (reg) == REG)
3886 rtx temp = mem_set_list;
3887 rtx prev = NULL_RTX;
3892 next = XEXP (temp, 1);
3893 if ((GET_CODE (reg) == MEM
3894 && output_dependence (XEXP (temp, 0), reg))
3895 || (GET_CODE (reg) == REG
3896 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3898 /* Splice this entry out of the list. */
3900 XEXP (prev, 1) = next;
3902 mem_set_list = next;
3903 free_EXPR_LIST_node (temp);
3911 /* If the memory reference had embedded side effects (autoincrement
3912 address modes. Then we may need to kill some entries on the
3914 if (insn && GET_CODE (reg) == MEM)
3915 invalidate_mems_from_autoinc (insn);
3917 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3918 /* We do not know the size of a BLKmode store, so we do not track
3919 them for redundant store elimination. */
3920 && GET_MODE (reg) != BLKmode
3921 /* There are no REG_INC notes for SP, so we can't assume we'll see
3922 everything that invalidates it. To be safe, don't eliminate any
3923 stores though SP; none of them should be redundant anyway. */
3924 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3925 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3928 if (GET_CODE (reg) == REG
3929 && (regno = REGNO (reg),
3930 ! (regno == FRAME_POINTER_REGNUM
3931 && (! reload_completed || frame_pointer_needed)))
3932 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3933 && ! (regno == HARD_FRAME_POINTER_REGNUM
3934 && (! reload_completed || frame_pointer_needed))
3936 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3937 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3939 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3940 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3942 int some_needed = REGNO_REG_SET_P (needed, regno);
3943 int some_not_needed = ! some_needed;
3945 /* Mark it as a significant register for this basic block. */
3947 SET_REGNO_REG_SET (significant, regno);
3949 /* Mark it as dead before this insn. */
3950 SET_REGNO_REG_SET (dead, regno);
3952 /* A hard reg in a wide mode may really be multiple registers.
3953 If so, mark all of them just like the first. */
3954 if (regno < FIRST_PSEUDO_REGISTER)
3958 /* Nothing below is needed for the stack pointer; get out asap.
3959 Eg, log links aren't needed, since combine won't use them. */
3960 if (regno == STACK_POINTER_REGNUM)
3963 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3966 int regno_n = regno + n;
3967 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3969 SET_REGNO_REG_SET (significant, regno_n);
3971 SET_REGNO_REG_SET (dead, regno_n);
3972 some_needed |= needed_regno;
3973 some_not_needed |= ! needed_regno;
3977 /* Additional data to record if this is the final pass. */
3978 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
3979 | PROP_DEATH_NOTES | PROP_AUTOINC))
3982 register int blocknum = BLOCK_NUM (insn);
3985 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3986 y = reg_next_use[regno];
3988 /* If this is a hard reg, record this function uses the reg. */
3990 if (regno < FIRST_PSEUDO_REGISTER)
3993 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
3995 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3996 for (i = regno; i < endregno; i++)
3998 /* The next use is no longer "next", since a store
4000 reg_next_use[i] = 0;
4003 if (flags & PROP_REG_INFO)
4004 for (i = regno; i < endregno; i++)
4006 regs_ever_live[i] = 1;
4012 /* The next use is no longer "next", since a store
4014 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4015 reg_next_use[regno] = 0;
4017 /* Keep track of which basic blocks each reg appears in. */
4019 if (flags & PROP_REG_INFO)
4021 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4022 REG_BASIC_BLOCK (regno) = blocknum;
4023 else if (REG_BASIC_BLOCK (regno) != blocknum)
4024 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4026 /* Count (weighted) references, stores, etc. This counts a
4027 register twice if it is modified, but that is correct. */
4028 REG_N_SETS (regno)++;
4029 REG_N_REFS (regno) += loop_depth;
4031 /* The insns where a reg is live are normally counted
4032 elsewhere, but we want the count to include the insn
4033 where the reg is set, and the normal counting mechanism
4034 would not count it. */
4035 REG_LIVE_LENGTH (regno)++;
4039 if (! some_not_needed)
4041 if (flags & PROP_LOG_LINKS)
4043 /* Make a logical link from the next following insn
4044 that uses this register, back to this insn.
4045 The following insns have already been processed.
4047 We don't build a LOG_LINK for hard registers containing
4048 in ASM_OPERANDs. If these registers get replaced,
4049 we might wind up changing the semantics of the insn,
4050 even if reload can make what appear to be valid
4051 assignments later. */
4052 if (y && (BLOCK_NUM (y) == blocknum)
4053 && (regno >= FIRST_PSEUDO_REGISTER
4054 || asm_noperands (PATTERN (y)) < 0))
4055 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4058 else if (! some_needed)
4060 if (flags & PROP_REG_INFO)
4061 REG_N_DEATHS (REGNO (reg))++;
4063 if (flags & PROP_DEATH_NOTES)
4065 /* Note that dead stores have already been deleted
4066 when possible. If we get here, we have found a
4067 dead store that cannot be eliminated (because the
4068 same insn does something useful). Indicate this
4069 by marking the reg being set as dying here. */
4071 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4076 if (flags & PROP_DEATH_NOTES)
4078 /* This is a case where we have a multi-word hard register
4079 and some, but not all, of the words of the register are
4080 needed in subsequent insns. Write REG_UNUSED notes
4081 for those parts that were not needed. This case should
4086 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4088 if (!REGNO_REG_SET_P (needed, regno + i))
4092 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4098 else if (GET_CODE (reg) == REG)
4100 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4101 reg_next_use[regno] = 0;
4104 /* If this is the last pass and this is a SCRATCH, show it will be dying
4105 here and count it. */
4106 else if (GET_CODE (reg) == SCRATCH)
4108 if (flags & PROP_DEATH_NOTES)
4110 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4116 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4120 find_auto_inc (needed, x, insn)
4125 rtx addr = XEXP (x, 0);
4126 HOST_WIDE_INT offset = 0;
4129 /* Here we detect use of an index register which might be good for
4130 postincrement, postdecrement, preincrement, or predecrement. */
4132 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4133 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4135 if (GET_CODE (addr) == REG)
4138 register int size = GET_MODE_SIZE (GET_MODE (x));
4141 int regno = REGNO (addr);
4143 /* Is the next use an increment that might make auto-increment? */
4144 if ((incr = reg_next_use[regno]) != 0
4145 && (set = single_set (incr)) != 0
4146 && GET_CODE (set) == SET
4147 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4148 /* Can't add side effects to jumps; if reg is spilled and
4149 reloaded, there's no way to store back the altered value. */
4150 && GET_CODE (insn) != JUMP_INSN
4151 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4152 && XEXP (y, 0) == addr
4153 && GET_CODE (XEXP (y, 1)) == CONST_INT
4154 && ((HAVE_POST_INCREMENT
4155 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4156 || (HAVE_POST_DECREMENT
4157 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4158 || (HAVE_PRE_INCREMENT
4159 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4160 || (HAVE_PRE_DECREMENT
4161 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4162 /* Make sure this reg appears only once in this insn. */
4163 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4164 use != 0 && use != (rtx) 1))
4166 rtx q = SET_DEST (set);
4167 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4168 ? (offset ? PRE_INC : POST_INC)
4169 : (offset ? PRE_DEC : POST_DEC));
4171 if (dead_or_set_p (incr, addr))
4173 /* This is the simple case. Try to make the auto-inc. If
4174 we can't, we are done. Otherwise, we will do any
4175 needed updates below. */
4176 if (! validate_change (insn, &XEXP (x, 0),
4177 gen_rtx_fmt_e (inc_code, Pmode, addr),
4181 else if (GET_CODE (q) == REG
4182 /* PREV_INSN used here to check the semi-open interval
4184 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4185 /* We must also check for sets of q as q may be
4186 a call clobbered hard register and there may
4187 be a call between PREV_INSN (insn) and incr. */
4188 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4190 /* We have *p followed sometime later by q = p+size.
4191 Both p and q must be live afterward,
4192 and q is not used between INSN and its assignment.
4193 Change it to q = p, ...*q..., q = q+size.
4194 Then fall into the usual case. */
4199 emit_move_insn (q, addr);
4200 insns = get_insns ();
4203 bb = BLOCK_FOR_INSN (insn);
4204 for (temp = insns; temp; temp = NEXT_INSN (temp))
4205 set_block_for_insn (temp, bb);
4207 /* If we can't make the auto-inc, or can't make the
4208 replacement into Y, exit. There's no point in making
4209 the change below if we can't do the auto-inc and doing
4210 so is not correct in the pre-inc case. */
4212 validate_change (insn, &XEXP (x, 0),
4213 gen_rtx_fmt_e (inc_code, Pmode, q),
4215 validate_change (incr, &XEXP (y, 0), q, 1);
4216 if (! apply_change_group ())
4219 /* We now know we'll be doing this change, so emit the
4220 new insn(s) and do the updates. */
4221 emit_insns_before (insns, insn);
4223 if (BLOCK_FOR_INSN (insn)->head == insn)
4224 BLOCK_FOR_INSN (insn)->head = insns;
4226 /* INCR will become a NOTE and INSN won't contain a
4227 use of ADDR. If a use of ADDR was just placed in
4228 the insn before INSN, make that the next use.
4229 Otherwise, invalidate it. */
4230 if (GET_CODE (PREV_INSN (insn)) == INSN
4231 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4232 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4233 reg_next_use[regno] = PREV_INSN (insn);
4235 reg_next_use[regno] = 0;
4240 /* REGNO is now used in INCR which is below INSN, but
4241 it previously wasn't live here. If we don't mark
4242 it as needed, we'll put a REG_DEAD note for it
4243 on this insn, which is incorrect. */
4244 SET_REGNO_REG_SET (needed, regno);
4246 /* If there are any calls between INSN and INCR, show
4247 that REGNO now crosses them. */
4248 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4249 if (GET_CODE (temp) == CALL_INSN)
4250 REG_N_CALLS_CROSSED (regno)++;
4255 /* If we haven't returned, it means we were able to make the
4256 auto-inc, so update the status. First, record that this insn
4257 has an implicit side effect. */
4260 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4262 /* Modify the old increment-insn to simply copy
4263 the already-incremented value of our register. */
4264 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4267 /* If that makes it a no-op (copying the register into itself) delete
4268 it so it won't appear to be a "use" and a "set" of this
4270 if (SET_DEST (set) == addr)
4272 PUT_CODE (incr, NOTE);
4273 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4274 NOTE_SOURCE_FILE (incr) = 0;
4277 if (regno >= FIRST_PSEUDO_REGISTER)
4279 /* Count an extra reference to the reg. When a reg is
4280 incremented, spilling it is worse, so we want to make
4281 that less likely. */
4282 REG_N_REFS (regno) += loop_depth;
4284 /* Count the increment as a setting of the register,
4285 even though it isn't a SET in rtl. */
4286 REG_N_SETS (regno)++;
4291 #endif /* AUTO_INC_DEC */
4293 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4294 This is done assuming the registers needed from X
4295 are those that have 1-bits in NEEDED.
4297 FLAGS is the set of enabled operations.
4299 INSN is the containing instruction. If INSN is dead, this function is not
4303 mark_used_regs (needed, live, x, flags, insn)
4310 register RTX_CODE code;
4315 code = GET_CODE (x);
4335 /* If we are clobbering a MEM, mark any registers inside the address
4337 if (GET_CODE (XEXP (x, 0)) == MEM)
4338 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4342 /* Don't bother watching stores to mems if this is not the
4343 final pass. We'll not be deleting dead stores this round. */
4344 if (flags & PROP_SCAN_DEAD_CODE)
4346 /* Invalidate the data for the last MEM stored, but only if MEM is
4347 something that can be stored into. */
4348 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4349 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4350 ; /* needn't clear the memory set list */
4353 rtx temp = mem_set_list;
4354 rtx prev = NULL_RTX;
4359 next = XEXP (temp, 1);
4360 if (anti_dependence (XEXP (temp, 0), x))
4362 /* Splice temp out of the list. */
4364 XEXP (prev, 1) = next;
4366 mem_set_list = next;
4367 free_EXPR_LIST_node (temp);
4375 /* If the memory reference had embedded side effects (autoincrement
4376 address modes. Then we may need to kill some entries on the
4379 invalidate_mems_from_autoinc (insn);
4383 if (flags & PROP_AUTOINC)
4384 find_auto_inc (needed, x, insn);
4389 if (GET_CODE (SUBREG_REG (x)) == REG
4390 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4391 && (GET_MODE_SIZE (GET_MODE (x))
4392 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4393 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4395 /* While we're here, optimize this case. */
4398 /* In case the SUBREG is not of a register, don't optimize */
4399 if (GET_CODE (x) != REG)
4401 mark_used_regs (needed, live, x, flags, insn);
4405 /* ... fall through ... */
4408 /* See a register other than being set
4409 => mark it as needed. */
4413 int some_needed = REGNO_REG_SET_P (needed, regno);
4414 int some_not_needed = ! some_needed;
4416 SET_REGNO_REG_SET (live, regno);
4418 /* A hard reg in a wide mode may really be multiple registers.
4419 If so, mark all of them just like the first. */
4420 if (regno < FIRST_PSEUDO_REGISTER)
4424 /* For stack ptr or fixed arg pointer,
4425 nothing below can be necessary, so waste no more time. */
4426 if (regno == STACK_POINTER_REGNUM
4427 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4428 || (regno == HARD_FRAME_POINTER_REGNUM
4429 && (! reload_completed || frame_pointer_needed))
4431 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4432 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4434 || (regno == FRAME_POINTER_REGNUM
4435 && (! reload_completed || frame_pointer_needed)))
4437 /* If this is a register we are going to try to eliminate,
4438 don't mark it live here. If we are successful in
4439 eliminating it, it need not be live unless it is used for
4440 pseudos, in which case it will have been set live when
4441 it was allocated to the pseudos. If the register will not
4442 be eliminated, reload will set it live at that point. */
4444 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
4445 regs_ever_live[regno] = 1;
4448 /* No death notes for global register variables;
4449 their values are live after this function exits. */
4450 if (global_regs[regno])
4452 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4453 reg_next_use[regno] = insn;
4457 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4460 int regno_n = regno + n;
4461 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4463 SET_REGNO_REG_SET (live, regno_n);
4464 some_needed |= needed_regno;
4465 some_not_needed |= ! needed_regno;
4469 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4471 /* Record where each reg is used, so when the reg
4472 is set we know the next insn that uses it. */
4474 reg_next_use[regno] = insn;
4476 if (flags & PROP_REG_INFO)
4478 if (regno < FIRST_PSEUDO_REGISTER)
4480 /* If a hard reg is being used,
4481 record that this function does use it. */
4483 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4487 regs_ever_live[regno + --i] = 1;
4492 /* Keep track of which basic block each reg appears in. */
4494 register int blocknum = BLOCK_NUM (insn);
4496 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4497 REG_BASIC_BLOCK (regno) = blocknum;
4498 else if (REG_BASIC_BLOCK (regno) != blocknum)
4499 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4501 /* Count (weighted) number of uses of each reg. */
4503 REG_N_REFS (regno) += loop_depth;
4507 /* Record and count the insns in which a reg dies.
4508 If it is used in this insn and was dead below the insn
4509 then it dies in this insn. If it was set in this insn,
4510 we do not make a REG_DEAD note; likewise if we already
4511 made such a note. */
4513 if (flags & PROP_DEATH_NOTES)
4516 && ! dead_or_set_p (insn, x)
4518 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4522 /* Check for the case where the register dying partially
4523 overlaps the register set by this insn. */
4524 if (regno < FIRST_PSEUDO_REGISTER
4525 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4527 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4529 some_needed |= dead_or_set_regno_p (insn, regno + n);
4532 /* If none of the words in X is needed, make a REG_DEAD
4533 note. Otherwise, we must make partial REG_DEAD notes. */
4537 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4538 REG_N_DEATHS (regno)++;
4544 /* Don't make a REG_DEAD note for a part of a register
4545 that is set in the insn. */
4547 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4549 if (!REGNO_REG_SET_P (needed, regno + i)
4550 && ! dead_or_set_regno_p (insn, regno + i))
4553 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4564 register rtx testreg = SET_DEST (x);
4567 /* If storing into MEM, don't show it as being used. But do
4568 show the address as being used. */
4569 if (GET_CODE (testreg) == MEM)
4572 if (flags & PROP_AUTOINC)
4573 find_auto_inc (needed, testreg, insn);
4575 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4576 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4580 /* Storing in STRICT_LOW_PART is like storing in a reg
4581 in that this SET might be dead, so ignore it in TESTREG.
4582 but in some other ways it is like using the reg.
4584 Storing in a SUBREG or a bit field is like storing the entire
4585 register in that if the register's value is not used
4586 then this SET is not needed. */
4587 while (GET_CODE (testreg) == STRICT_LOW_PART
4588 || GET_CODE (testreg) == ZERO_EXTRACT
4589 || GET_CODE (testreg) == SIGN_EXTRACT
4590 || GET_CODE (testreg) == SUBREG)
4592 if (GET_CODE (testreg) == SUBREG
4593 && GET_CODE (SUBREG_REG (testreg)) == REG
4594 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4595 && (GET_MODE_SIZE (GET_MODE (testreg))
4596 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4597 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4599 /* Modifying a single register in an alternate mode
4600 does not use any of the old value. But these other
4601 ways of storing in a register do use the old value. */
4602 if (GET_CODE (testreg) == SUBREG
4603 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4608 testreg = XEXP (testreg, 0);
4611 /* If this is a store into a register,
4612 recursively scan the value being stored. */
4614 if ((GET_CODE (testreg) == PARALLEL
4615 && GET_MODE (testreg) == BLKmode)
4616 || (GET_CODE (testreg) == REG
4617 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4618 && (! reload_completed || frame_pointer_needed)))
4619 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4620 && ! (regno == HARD_FRAME_POINTER_REGNUM
4621 && (! reload_completed || frame_pointer_needed))
4623 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4624 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4627 /* We used to exclude global_regs here, but that seems wrong.
4628 Storing in them is like storing in mem. */
4630 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4632 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4639 /* ??? This info should have been gotten from mark_regs_live_at_end,
4640 as applied to the EXIT block, and propagated along the edge that
4641 connects this block to the EXIT. */
4645 case UNSPEC_VOLATILE:
4649 /* Traditional and volatile asm instructions must be considered to use
4650 and clobber all hard registers, all pseudo-registers and all of
4651 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4653 Consider for instance a volatile asm that changes the fpu rounding
4654 mode. An insn should not be moved across this even if it only uses
4655 pseudo-regs because it might give an incorrectly rounded result.
4657 ?!? Unfortunately, marking all hard registers as live causes massive
4658 problems for the register allocator and marking all pseudos as live
4659 creates mountains of uninitialized variable warnings.
4661 So for now, just clear the memory set list and mark any regs
4662 we can find in ASM_OPERANDS as used. */
4663 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4664 free_EXPR_LIST_list (&mem_set_list);
4666 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4667 We can not just fall through here since then we would be confused
4668 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4669 traditional asms unlike their normal usage. */
4670 if (code == ASM_OPERANDS)
4674 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4675 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4686 /* Recursively scan the operands of this expression. */
4689 register const char *fmt = GET_RTX_FORMAT (code);
4692 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4696 /* Tail recursive case: save a function call level. */
4702 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4704 else if (fmt[i] == 'E')
4707 for (j = 0; j < XVECLEN (x, i); j++)
4708 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4717 try_pre_increment_1 (insn)
4720 /* Find the next use of this reg. If in same basic block,
4721 make it do pre-increment or pre-decrement if appropriate. */
4722 rtx x = single_set (insn);
4723 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4724 * INTVAL (XEXP (SET_SRC (x), 1)));
4725 int regno = REGNO (SET_DEST (x));
4726 rtx y = reg_next_use[regno];
4728 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4729 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4730 mode would be better. */
4731 && ! dead_or_set_p (y, SET_DEST (x))
4732 && try_pre_increment (y, SET_DEST (x), amount))
4734 /* We have found a suitable auto-increment
4735 and already changed insn Y to do it.
4736 So flush this increment-instruction. */
4737 PUT_CODE (insn, NOTE);
4738 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4739 NOTE_SOURCE_FILE (insn) = 0;
4740 /* Count a reference to this reg for the increment
4741 insn we are deleting. When a reg is incremented.
4742 spilling it is worse, so we want to make that
4744 if (regno >= FIRST_PSEUDO_REGISTER)
4746 REG_N_REFS (regno) += loop_depth;
4747 REG_N_SETS (regno)++;
4754 /* Try to change INSN so that it does pre-increment or pre-decrement
4755 addressing on register REG in order to add AMOUNT to REG.
4756 AMOUNT is negative for pre-decrement.
4757 Returns 1 if the change could be made.
4758 This checks all about the validity of the result of modifying INSN. */
4761 try_pre_increment (insn, reg, amount)
4763 HOST_WIDE_INT amount;
4767 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4768 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4770 /* Nonzero if we can try to make a post-increment or post-decrement.
4771 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4772 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4773 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4776 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4779 /* From the sign of increment, see which possibilities are conceivable
4780 on this target machine. */
4781 if (HAVE_PRE_INCREMENT && amount > 0)
4783 if (HAVE_POST_INCREMENT && amount > 0)
4786 if (HAVE_PRE_DECREMENT && amount < 0)
4788 if (HAVE_POST_DECREMENT && amount < 0)
4791 if (! (pre_ok || post_ok))
4794 /* It is not safe to add a side effect to a jump insn
4795 because if the incremented register is spilled and must be reloaded
4796 there would be no way to store the incremented value back in memory. */
4798 if (GET_CODE (insn) == JUMP_INSN)
4803 use = find_use_as_address (PATTERN (insn), reg, 0);
4804 if (post_ok && (use == 0 || use == (rtx) 1))
4806 use = find_use_as_address (PATTERN (insn), reg, -amount);
4810 if (use == 0 || use == (rtx) 1)
4813 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4816 /* See if this combination of instruction and addressing mode exists. */
4817 if (! validate_change (insn, &XEXP (use, 0),
4818 gen_rtx_fmt_e (amount > 0
4819 ? (do_post ? POST_INC : PRE_INC)
4820 : (do_post ? POST_DEC : PRE_DEC),
4824 /* Record that this insn now has an implicit side effect on X. */
4825 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4829 #endif /* AUTO_INC_DEC */
4831 /* Find the place in the rtx X where REG is used as a memory address.
4832 Return the MEM rtx that so uses it.
4833 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4834 (plus REG (const_int PLUSCONST)).
4836 If such an address does not appear, return 0.
4837 If REG appears more than once, or is used other than in such an address,
4841 find_use_as_address (x, reg, plusconst)
4844 HOST_WIDE_INT plusconst;
4846 enum rtx_code code = GET_CODE (x);
4847 const char *fmt = GET_RTX_FORMAT (code);
4849 register rtx value = 0;
4852 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4855 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4856 && XEXP (XEXP (x, 0), 0) == reg
4857 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4858 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4861 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4863 /* If REG occurs inside a MEM used in a bit-field reference,
4864 that is unacceptable. */
4865 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4866 return (rtx) (HOST_WIDE_INT) 1;
4870 return (rtx) (HOST_WIDE_INT) 1;
4872 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4876 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4880 return (rtx) (HOST_WIDE_INT) 1;
4885 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4887 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4891 return (rtx) (HOST_WIDE_INT) 1;
4899 /* Write information about registers and basic blocks into FILE.
4900 This is part of making a debugging dump. */
4903 dump_flow_info (file)
4907 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4909 fprintf (file, "%d registers.\n", max_regno);
4910 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4913 enum reg_class class, altclass;
4914 fprintf (file, "\nRegister %d used %d times across %d insns",
4915 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4916 if (REG_BASIC_BLOCK (i) >= 0)
4917 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4919 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4920 (REG_N_SETS (i) == 1) ? "" : "s");
4921 if (REG_USERVAR_P (regno_reg_rtx[i]))
4922 fprintf (file, "; user var");
4923 if (REG_N_DEATHS (i) != 1)
4924 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4925 if (REG_N_CALLS_CROSSED (i) == 1)
4926 fprintf (file, "; crosses 1 call");
4927 else if (REG_N_CALLS_CROSSED (i))
4928 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4929 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4930 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4931 class = reg_preferred_class (i);
4932 altclass = reg_alternate_class (i);
4933 if (class != GENERAL_REGS || altclass != ALL_REGS)
4935 if (altclass == ALL_REGS || class == ALL_REGS)
4936 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4937 else if (altclass == NO_REGS)
4938 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4940 fprintf (file, "; pref %s, else %s",
4941 reg_class_names[(int) class],
4942 reg_class_names[(int) altclass]);
4944 if (REGNO_POINTER_FLAG (i))
4945 fprintf (file, "; pointer");
4946 fprintf (file, ".\n");
4949 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
4950 for (i = 0; i < n_basic_blocks; i++)
4952 register basic_block bb = BASIC_BLOCK (i);
4956 fprintf (file, "\nBasic block %d: first insn %d, last %d.\n",
4957 i, INSN_UID (bb->head), INSN_UID (bb->end));
4959 fprintf (file, "Predecessors: ");
4960 for (e = bb->pred; e ; e = e->pred_next)
4961 dump_edge_info (file, e, 0);
4963 fprintf (file, "\nSuccessors: ");
4964 for (e = bb->succ; e ; e = e->succ_next)
4965 dump_edge_info (file, e, 1);
4967 fprintf (file, "\nRegisters live at start:");
4968 if (bb->global_live_at_start)
4970 for (regno = 0; regno < max_regno; regno++)
4971 if (REGNO_REG_SET_P (bb->global_live_at_start, regno))
4972 fprintf (file, " %d", regno);
4975 fprintf (file, " n/a");
4977 fprintf (file, "\nRegisters live at end:");
4978 if (bb->global_live_at_end)
4980 for (regno = 0; regno < max_regno; regno++)
4981 if (REGNO_REG_SET_P (bb->global_live_at_end, regno))
4982 fprintf (file, " %d", regno);
4985 fprintf (file, " n/a");
4996 dump_flow_info (stderr);
5000 dump_edge_info (file, e, do_succ)
5005 basic_block side = (do_succ ? e->dest : e->src);
5007 if (side == ENTRY_BLOCK_PTR)
5008 fputs (" ENTRY", file);
5009 else if (side == EXIT_BLOCK_PTR)
5010 fputs (" EXIT", file);
5012 fprintf (file, " %d", side->index);
5016 static const char * const bitnames[] = {
5017 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5020 int i, flags = e->flags;
5024 for (i = 0; flags; i++)
5025 if (flags & (1 << i))
5031 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5032 fputs (bitnames[i], file);
5034 fprintf (file, "%d", i);
5042 /* Like print_rtl, but also print out live information for the start of each
5046 print_rtl_with_bb (outf, rtx_first)
5050 register rtx tmp_rtx;
5053 fprintf (outf, "(nil)\n");
5057 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5058 int max_uid = get_max_uid ();
5059 basic_block *start = (basic_block *)
5060 xcalloc (max_uid, sizeof (basic_block));
5061 basic_block *end = (basic_block *)
5062 xcalloc (max_uid, sizeof (basic_block));
5063 enum bb_state *in_bb_p = (enum bb_state *)
5064 xcalloc (max_uid, sizeof (enum bb_state));
5066 for (i = n_basic_blocks - 1; i >= 0; i--)
5068 basic_block bb = BASIC_BLOCK (i);
5071 start[INSN_UID (bb->head)] = bb;
5072 end[INSN_UID (bb->end)] = bb;
5073 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5075 enum bb_state state = IN_MULTIPLE_BB;
5076 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5078 in_bb_p[INSN_UID(x)] = state;
5085 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5090 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5092 fprintf (outf, ";; Start of basic block %d, registers live:",
5095 EXECUTE_IF_SET_IN_REG_SET (bb->global_live_at_start, 0, i,
5097 fprintf (outf, " %d", i);
5098 if (i < FIRST_PSEUDO_REGISTER)
5099 fprintf (outf, " [%s]",
5105 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5106 && GET_CODE (tmp_rtx) != NOTE
5107 && GET_CODE (tmp_rtx) != BARRIER
5109 fprintf (outf, ";; Insn is not within a basic block\n");
5110 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5111 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5113 did_output = print_rtl_single (outf, tmp_rtx);
5115 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5116 fprintf (outf, ";; End of basic block %d\n", bb->index);
5127 if (current_function_epilogue_delay_list != 0)
5129 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5130 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5131 tmp_rtx = XEXP (tmp_rtx, 1))
5132 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5136 /* Compute dominator relationships using new flow graph structures. */
5138 compute_flow_dominators (dominators, post_dominators)
5139 sbitmap *dominators;
5140 sbitmap *post_dominators;
5143 sbitmap *temp_bitmap;
5145 basic_block *worklist, *tos;
5147 /* Allocate a worklist array/queue. Entries are only added to the
5148 list if they were not already on the list. So the size is
5149 bounded by the number of basic blocks. */
5150 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block)
5153 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5154 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5158 /* The optimistic setting of dominators requires us to put every
5159 block on the work list initially. */
5160 for (bb = 0; bb < n_basic_blocks; bb++)
5162 *tos++ = BASIC_BLOCK (bb);
5163 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5166 /* We want a maximal solution, so initially assume everything dominates
5168 sbitmap_vector_ones (dominators, n_basic_blocks);
5170 /* Mark successors of the entry block so we can identify them below. */
5171 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5172 e->dest->aux = ENTRY_BLOCK_PTR;
5174 /* Iterate until the worklist is empty. */
5175 while (tos != worklist)
5177 /* Take the first entry off the worklist. */
5178 basic_block b = *--tos;
5181 /* Compute the intersection of the dominators of all the
5184 If one of the predecessor blocks is the ENTRY block, then the
5185 intersection of the dominators of the predecessor blocks is
5186 defined as the null set. We can identify such blocks by the
5187 special value in the AUX field in the block structure. */
5188 if (b->aux == ENTRY_BLOCK_PTR)
5190 /* Do not clear the aux field for blocks which are
5191 successors of the ENTRY block. That way we we never
5192 add them to the worklist again.
5194 The intersect of dominators of the preds of this block is
5195 defined as the null set. */
5196 sbitmap_zero (temp_bitmap[bb]);
5200 /* Clear the aux field of this block so it can be added to
5201 the worklist again if necessary. */
5203 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5206 /* Make sure each block always dominates itself. */
5207 SET_BIT (temp_bitmap[bb], bb);
5209 /* If the out state of this block changed, then we need to
5210 add the successors of this block to the worklist if they
5211 are not already on the worklist. */
5212 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5214 for (e = b->succ; e; e = e->succ_next)
5216 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5226 if (post_dominators)
5228 /* The optimistic setting of dominators requires us to put every
5229 block on the work list initially. */
5230 for (bb = 0; bb < n_basic_blocks; bb++)
5232 *tos++ = BASIC_BLOCK (bb);
5233 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5236 /* We want a maximal solution, so initially assume everything post
5237 dominates everything else. */
5238 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5240 /* Mark predecessors of the exit block so we can identify them below. */
5241 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5242 e->src->aux = EXIT_BLOCK_PTR;
5244 /* Iterate until the worklist is empty. */
5245 while (tos != worklist)
5247 /* Take the first entry off the worklist. */
5248 basic_block b = *--tos;
5251 /* Compute the intersection of the post dominators of all the
5254 If one of the successor blocks is the EXIT block, then the
5255 intersection of the dominators of the successor blocks is
5256 defined as the null set. We can identify such blocks by the
5257 special value in the AUX field in the block structure. */
5258 if (b->aux == EXIT_BLOCK_PTR)
5260 /* Do not clear the aux field for blocks which are
5261 predecessors of the EXIT block. That way we we never
5262 add them to the worklist again.
5264 The intersect of dominators of the succs of this block is
5265 defined as the null set. */
5266 sbitmap_zero (temp_bitmap[bb]);
5270 /* Clear the aux field of this block so it can be added to
5271 the worklist again if necessary. */
5273 sbitmap_intersection_of_succs (temp_bitmap[bb],
5274 post_dominators, bb);
5277 /* Make sure each block always post dominates itself. */
5278 SET_BIT (temp_bitmap[bb], bb);
5280 /* If the out state of this block changed, then we need to
5281 add the successors of this block to the worklist if they
5282 are not already on the worklist. */
5283 if (sbitmap_a_and_b (post_dominators[bb],
5284 post_dominators[bb],
5287 for (e = b->pred; e; e = e->pred_next)
5289 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5301 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5304 compute_immediate_dominators (idom, dominators)
5306 sbitmap *dominators;
5311 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5313 /* Begin with tmp(n) = dom(n) - { n }. */
5314 for (b = n_basic_blocks; --b >= 0; )
5316 sbitmap_copy (tmp[b], dominators[b]);
5317 RESET_BIT (tmp[b], b);
5320 /* Subtract out all of our dominator's dominators. */
5321 for (b = n_basic_blocks; --b >= 0; )
5323 sbitmap tmp_b = tmp[b];
5326 for (s = n_basic_blocks; --s >= 0; )
5327 if (TEST_BIT (tmp_b, s))
5328 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5331 /* Find the one bit set in the bitmap and put it in the output array. */
5332 for (b = n_basic_blocks; --b >= 0; )
5335 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5338 sbitmap_vector_free (tmp);
5341 /* Count for a single SET rtx, X. */
5344 count_reg_sets_1 (x)
5348 register rtx reg = SET_DEST (x);
5350 /* Find the register that's set/clobbered. */
5351 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5352 || GET_CODE (reg) == SIGN_EXTRACT
5353 || GET_CODE (reg) == STRICT_LOW_PART)
5354 reg = XEXP (reg, 0);
5356 if (GET_CODE (reg) == PARALLEL
5357 && GET_MODE (reg) == BLKmode)
5360 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5361 count_reg_sets_1 (XVECEXP (reg, 0, i));
5365 if (GET_CODE (reg) == REG)
5367 regno = REGNO (reg);
5368 if (regno >= FIRST_PSEUDO_REGISTER)
5370 /* Count (weighted) references, stores, etc. This counts a
5371 register twice if it is modified, but that is correct. */
5372 REG_N_SETS (regno)++;
5374 REG_N_REFS (regno) += loop_depth;
5379 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5380 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5386 register RTX_CODE code = GET_CODE (x);
5388 if (code == SET || code == CLOBBER)
5389 count_reg_sets_1 (x);
5390 else if (code == PARALLEL)
5393 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5395 code = GET_CODE (XVECEXP (x, 0, i));
5396 if (code == SET || code == CLOBBER)
5397 count_reg_sets_1 (XVECEXP (x, 0, i));
5402 /* Increment REG_N_REFS by the current loop depth each register reference
5406 count_reg_references (x)
5409 register RTX_CODE code;
5412 code = GET_CODE (x);
5432 /* If we are clobbering a MEM, mark any registers inside the address
5434 if (GET_CODE (XEXP (x, 0)) == MEM)
5435 count_reg_references (XEXP (XEXP (x, 0), 0));
5439 /* While we're here, optimize this case. */
5442 /* In case the SUBREG is not of a register, don't optimize */
5443 if (GET_CODE (x) != REG)
5445 count_reg_references (x);
5449 /* ... fall through ... */
5452 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5453 REG_N_REFS (REGNO (x)) += loop_depth;
5458 register rtx testreg = SET_DEST (x);
5461 /* If storing into MEM, don't show it as being used. But do
5462 show the address as being used. */
5463 if (GET_CODE (testreg) == MEM)
5465 count_reg_references (XEXP (testreg, 0));
5466 count_reg_references (SET_SRC (x));
5470 /* Storing in STRICT_LOW_PART is like storing in a reg
5471 in that this SET might be dead, so ignore it in TESTREG.
5472 but in some other ways it is like using the reg.
5474 Storing in a SUBREG or a bit field is like storing the entire
5475 register in that if the register's value is not used
5476 then this SET is not needed. */
5477 while (GET_CODE (testreg) == STRICT_LOW_PART
5478 || GET_CODE (testreg) == ZERO_EXTRACT
5479 || GET_CODE (testreg) == SIGN_EXTRACT
5480 || GET_CODE (testreg) == SUBREG)
5482 /* Modifying a single register in an alternate mode
5483 does not use any of the old value. But these other
5484 ways of storing in a register do use the old value. */
5485 if (GET_CODE (testreg) == SUBREG
5486 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5491 testreg = XEXP (testreg, 0);
5494 /* If this is a store into a register,
5495 recursively scan the value being stored. */
5497 if ((GET_CODE (testreg) == PARALLEL
5498 && GET_MODE (testreg) == BLKmode)
5499 || GET_CODE (testreg) == REG)
5501 count_reg_references (SET_SRC (x));
5503 count_reg_references (SET_DEST (x));
5513 /* Recursively scan the operands of this expression. */
5516 register const char *fmt = GET_RTX_FORMAT (code);
5519 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5523 /* Tail recursive case: save a function call level. */
5529 count_reg_references (XEXP (x, i));
5531 else if (fmt[i] == 'E')
5534 for (j = 0; j < XVECLEN (x, i); j++)
5535 count_reg_references (XVECEXP (x, i, j));
5541 /* Recompute register set/reference counts immediately prior to register
5544 This avoids problems with set/reference counts changing to/from values
5545 which have special meanings to the register allocators.
5547 Additionally, the reference counts are the primary component used by the
5548 register allocators to prioritize pseudos for allocation to hard regs.
5549 More accurate reference counts generally lead to better register allocation.
5551 F is the first insn to be scanned.
5552 LOOP_STEP denotes how much loop_depth should be incremented per
5553 loop nesting level in order to increase the ref count more for references
5556 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5557 possibly other information which is used by the register allocators. */
5560 recompute_reg_usage (f, loop_step)
5567 /* Clear out the old data. */
5568 max_reg = max_reg_num ();
5569 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5575 /* Scan each insn in the chain and count how many times each register is
5578 for (insn = f; insn; insn = NEXT_INSN (insn))
5580 /* Keep track of loop depth. */
5581 if (GET_CODE (insn) == NOTE)
5583 /* Look for loop boundaries. */
5584 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
5585 loop_depth -= loop_step;
5586 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
5587 loop_depth += loop_step;
5589 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
5590 Abort now rather than setting register status incorrectly. */
5591 if (loop_depth == 0)
5594 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5598 /* This call will increment REG_N_SETS for each SET or CLOBBER
5599 of a register in INSN. It will also increment REG_N_REFS
5600 by the loop depth for each set of a register in INSN. */
5601 count_reg_sets (PATTERN (insn));
5603 /* count_reg_sets does not detect autoincrement address modes, so
5604 detect them here by looking at the notes attached to INSN. */
5605 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5607 if (REG_NOTE_KIND (links) == REG_INC)
5608 /* Count (weighted) references, stores, etc. This counts a
5609 register twice if it is modified, but that is correct. */
5610 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5613 /* This call will increment REG_N_REFS by the current loop depth for
5614 each reference to a register in INSN. */
5615 count_reg_references (PATTERN (insn));
5617 /* count_reg_references will not include counts for arguments to
5618 function calls, so detect them here by examining the
5619 CALL_INSN_FUNCTION_USAGE data. */
5620 if (GET_CODE (insn) == CALL_INSN)
5624 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5626 note = XEXP (note, 1))
5627 if (GET_CODE (XEXP (note, 0)) == USE)
5628 count_reg_references (XEXP (XEXP (note, 0), 0));
5634 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5635 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5636 of the number of registers that died. */
5639 count_or_remove_death_notes (blocks, kill)
5645 for (i = n_basic_blocks - 1; i >= 0; --i)
5650 if (blocks && ! TEST_BIT (blocks, i))
5653 bb = BASIC_BLOCK (i);
5655 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5657 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5659 rtx *pprev = ®_NOTES (insn);
5664 switch (REG_NOTE_KIND (link))
5667 if (GET_CODE (XEXP (link, 0)) == REG)
5669 rtx reg = XEXP (link, 0);
5672 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5675 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5683 rtx next = XEXP (link, 1);
5684 free_EXPR_LIST_node (link);
5685 *pprev = link = next;
5691 pprev = &XEXP (link, 1);
5698 if (insn == bb->end)
5706 /* Record INSN's block as BB. */
5709 set_block_for_insn (insn, bb)
5713 size_t uid = INSN_UID (insn);
5714 if (uid >= basic_block_for_insn->num_elements)
5718 /* Add one-eighth the size so we don't keep calling xrealloc. */
5719 new_size = uid + (uid + 7) / 8;
5721 VARRAY_GROW (basic_block_for_insn, new_size);
5723 VARRAY_BB (basic_block_for_insn, uid) = bb;
5726 /* Record INSN's block number as BB. */
5727 /* ??? This has got to go. */
5730 set_block_num (insn, bb)
5734 set_block_for_insn (insn, BASIC_BLOCK (bb));
5737 /* Verify the CFG consistency. This function check some CFG invariants and
5738 aborts when something is wrong. Hope that this function will help to
5739 convert many optimization passes to preserve CFG consistent.
5741 Currently it does following checks:
5743 - test head/end pointers
5744 - overlapping of basic blocks
5745 - edge list corectness
5746 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5747 - tails of basic blocks (ensure that boundary is necesary)
5748 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5749 and NOTE_INSN_BASIC_BLOCK
5750 - check that all insns are in the basic blocks
5751 (except the switch handling code, barriers and notes)
5753 In future it can be extended check a lot of other stuff as well
5754 (reachability of basic blocks, life information, etc. etc.). */
5759 const int max_uid = get_max_uid ();
5760 const rtx rtx_first = get_insns ();
5761 basic_block *bb_info;
5765 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5767 /* First pass check head/end pointers and set bb_info array used by
5769 for (i = n_basic_blocks - 1; i >= 0; i--)
5771 basic_block bb = BASIC_BLOCK (i);
5773 /* Check the head pointer and make sure that it is pointing into
5775 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5780 error ("Head insn %d for block %d not found in the insn stream.",
5781 INSN_UID (bb->head), bb->index);
5785 /* Check the end pointer and make sure that it is pointing into
5787 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5789 if (bb_info[INSN_UID (x)] != NULL)
5791 error ("Insn %d is in multiple basic blocks (%d and %d)",
5792 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5795 bb_info[INSN_UID (x)] = bb;
5802 error ("End insn %d for block %d not found in the insn stream.",
5803 INSN_UID (bb->end), bb->index);
5808 /* Now check the basic blocks (boundaries etc.) */
5809 for (i = n_basic_blocks - 1; i >= 0; i--)
5811 basic_block bb = BASIC_BLOCK (i);
5812 /* Check corectness of edge lists */
5820 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5822 fprintf (stderr, "Predecessor: ");
5823 dump_edge_info (stderr, e, 0);
5824 fprintf (stderr, "\nSuccessor: ");
5825 dump_edge_info (stderr, e, 1);
5829 if (e->dest != EXIT_BLOCK_PTR)
5831 edge e2 = e->dest->pred;
5832 while (e2 && e2 != e)
5836 error ("Basic block %i edge lists are corrupted", bb->index);
5848 error ("Basic block %d pred edge is corrupted", bb->index);
5849 fputs ("Predecessor: ", stderr);
5850 dump_edge_info (stderr, e, 0);
5851 fputs ("\nSuccessor: ", stderr);
5852 dump_edge_info (stderr, e, 1);
5853 fputc ('\n', stderr);
5856 if (e->src != ENTRY_BLOCK_PTR)
5858 edge e2 = e->src->succ;
5859 while (e2 && e2 != e)
5863 error ("Basic block %i edge lists are corrupted", bb->index);
5870 /* OK pointers are correct. Now check the header of basic
5871 block. It ought to contain optional CODE_LABEL followed
5872 by NOTE_BASIC_BLOCK. */
5874 if (GET_CODE (x) == CODE_LABEL)
5878 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5884 if (GET_CODE (x) != NOTE
5885 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5886 || NOTE_BASIC_BLOCK (x) != bb)
5888 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5895 /* Do checks for empty blocks here */
5902 if (GET_CODE (x) == NOTE
5903 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5905 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
5906 INSN_UID (x), bb->index);
5913 if (GET_CODE (x) == JUMP_INSN
5914 || GET_CODE (x) == CODE_LABEL
5915 || GET_CODE (x) == BARRIER)
5917 error ("In basic block %d:", bb->index);
5918 fatal_insn ("Flow control insn inside a basic block", x);
5929 if (!bb_info[INSN_UID (x)])
5931 switch (GET_CODE (x))
5938 /* An addr_vec is placed outside any block block. */
5940 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5941 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5942 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5947 /* But in any case, non-deletable labels can appear anywhere. */
5951 fatal_insn ("Insn outside basic block", x);
5965 /* Functions to access an edge list with a vector representation.
5966 Enough data is kept such that given an index number, the
5967 pred and succ that edge reprsents can be determined, or
5968 given a pred and a succ, it's index number can be returned.
5969 This allows algorithms which comsume a lot of memory to
5970 represent the normally full matrix of edge (pred,succ) with a
5971 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
5972 wasted space in the client code due to sparse flow graphs. */
5974 /* This functions initializes the edge list. Basically the entire
5975 flowgraph is processed, and all edges are assigned a number,
5976 and the data structure is filed in. */
5980 struct edge_list *elist;
5986 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
5990 /* Determine the number of edges in the flow graph by counting successor
5991 edges on each basic block. */
5992 for (x = 0; x < n_basic_blocks; x++)
5994 basic_block bb = BASIC_BLOCK (x);
5996 for (e = bb->succ; e; e = e->succ_next)
5999 /* Don't forget successors of the entry block. */
6000 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6003 elist = xmalloc (sizeof (struct edge_list));
6004 elist->num_blocks = block_count;
6005 elist->num_edges = num_edges;
6006 elist->index_to_edge = xmalloc (sizeof (edge) * num_edges);
6010 /* Follow successors of the entry block, and register these edges. */
6011 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6013 elist->index_to_edge[num_edges] = e;
6017 for (x = 0; x < n_basic_blocks; x++)
6019 basic_block bb = BASIC_BLOCK (x);
6021 /* Follow all successors of blocks, and register these edges. */
6022 for (e = bb->succ; e; e = e->succ_next)
6024 elist->index_to_edge[num_edges] = e;
6031 /* This function free's memory associated with an edge list. */
6033 free_edge_list (elist)
6034 struct edge_list *elist;
6038 free (elist->index_to_edge);
6043 /* This function provides debug output showing an edge list. */
6045 print_edge_list (f, elist)
6047 struct edge_list *elist;
6050 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6051 elist->num_blocks - 2, elist->num_edges);
6053 for (x = 0; x < elist->num_edges; x++)
6055 fprintf (f, " %-4d - edge(", x);
6056 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6057 fprintf (f,"entry,");
6059 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6061 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6062 fprintf (f,"exit)\n");
6064 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6068 /* This function provides an internal consistancy check of an edge list,
6069 verifying that all edges are present, and that there are no
6072 verify_edge_list (f, elist)
6074 struct edge_list *elist;
6076 int x, pred, succ, index;
6079 for (x = 0; x < n_basic_blocks; x++)
6081 basic_block bb = BASIC_BLOCK (x);
6083 for (e = bb->succ; e; e = e->succ_next)
6085 pred = e->src->index;
6086 succ = e->dest->index;
6087 index = EDGE_INDEX (elist, e->src, e->dest);
6088 if (index == EDGE_INDEX_NO_EDGE)
6090 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6093 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6094 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6095 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6096 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6097 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6098 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6101 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6103 pred = e->src->index;
6104 succ = e->dest->index;
6105 index = EDGE_INDEX (elist, e->src, e->dest);
6106 if (index == EDGE_INDEX_NO_EDGE)
6108 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6111 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6112 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6113 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6114 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6115 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6116 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6118 /* We've verified that all the edges are in the list, no lets make sure
6119 there are no spurious edges in the list. */
6121 for (pred = 0 ; pred < n_basic_blocks; pred++)
6122 for (succ = 0 ; succ < n_basic_blocks; succ++)
6124 basic_block p = BASIC_BLOCK (pred);
6125 basic_block s = BASIC_BLOCK (succ);
6129 for (e = p->succ; e; e = e->succ_next)
6135 for (e = s->pred; e; e = e->pred_next)
6141 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6142 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6143 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6145 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6146 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6147 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6148 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6149 BASIC_BLOCK (succ)));
6151 for (succ = 0 ; succ < n_basic_blocks; succ++)
6153 basic_block p = ENTRY_BLOCK_PTR;
6154 basic_block s = BASIC_BLOCK (succ);
6158 for (e = p->succ; e; e = e->succ_next)
6164 for (e = s->pred; e; e = e->pred_next)
6170 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6171 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6172 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6174 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6175 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6176 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6177 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6178 BASIC_BLOCK (succ)));
6180 for (pred = 0 ; pred < n_basic_blocks; pred++)
6182 basic_block p = BASIC_BLOCK (pred);
6183 basic_block s = EXIT_BLOCK_PTR;
6187 for (e = p->succ; e; e = e->succ_next)
6193 for (e = s->pred; e; e = e->pred_next)
6199 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6200 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6201 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6203 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6204 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6205 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6206 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6211 /* This routine will determine what, if any, edge there is between
6212 a specified predecessor and successor. */
6215 find_edge_index (edge_list, pred, succ)
6216 struct edge_list *edge_list;
6217 basic_block pred, succ;
6220 for (x = 0; x < NUM_EDGES (edge_list); x++)
6222 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6223 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6226 return (EDGE_INDEX_NO_EDGE);
6229 /* This function will remove an edge from the flow graph. */
6234 edge last_pred = NULL;
6235 edge last_succ = NULL;
6237 basic_block src, dest;
6240 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6246 last_succ->succ_next = e->succ_next;
6248 src->succ = e->succ_next;
6250 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6256 last_pred->pred_next = e->pred_next;
6258 dest->pred = e->pred_next;
6264 /* This routine will remove any fake successor edges for a basic block.
6265 When the edge is removed, it is also removed from whatever predecessor
6268 remove_fake_successors (bb)
6272 for (e = bb->succ; e ; )
6276 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6281 /* This routine will remove all fake edges from the flow graph. If
6282 we remove all fake successors, it will automatically remove all
6283 fake predecessors. */
6285 remove_fake_edges ()
6289 for (x = 0; x < n_basic_blocks; x++)
6290 remove_fake_successors (BASIC_BLOCK (x));
6292 /* We've handled all successors except the entry block's. */
6293 remove_fake_successors (ENTRY_BLOCK_PTR);
6296 /* This functions will add a fake edge between any block which has no
6297 successors, and the exit block. Some data flow equations require these
6300 add_noreturn_fake_exit_edges ()
6304 for (x = 0; x < n_basic_blocks; x++)
6305 if (BASIC_BLOCK (x)->succ == NULL)
6306 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);