1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
107 reg_n_calls_crosses and reg_basic_block.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "basic-block.h"
127 #include "insn-config.h"
129 #include "hard-reg-set.h"
132 #include "function.h"
136 #include "insn-flags.h"
140 #define obstack_chunk_alloc xmalloc
141 #define obstack_chunk_free free
144 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
145 the stack pointer does not matter. The value is tested only in
146 functions that have frame pointers.
147 No definition is equivalent to always zero. */
148 #ifndef EXIT_IGNORE_STACK
149 #define EXIT_IGNORE_STACK 0
152 #ifndef HAVE_epilogue
153 #define HAVE_epilogue 0
156 #ifndef HAVE_prologue
157 #define HAVE_prologue 0
161 #define CLEAN_ALLOCA alloca (0)
167 /* The contents of the current function definition are allocated
168 in this obstack, and all are freed at the end of the function.
169 For top-level functions, this is temporary_obstack.
170 Separate obstacks are made for nested functions. */
172 extern struct obstack *function_obstack;
174 /* Number of basic blocks in the current function. */
178 /* Number of edges in the current function. */
182 /* The basic block array. */
184 varray_type basic_block_info;
186 /* The special entry and exit blocks. */
188 struct basic_block_def entry_exit_blocks[2] =
195 NULL, /* local_set */
196 NULL, /* global_live_at_start */
197 NULL, /* global_live_at_end */
199 ENTRY_BLOCK, /* index */
201 -1, -1 /* eh_beg, eh_end */
208 NULL, /* local_set */
209 NULL, /* global_live_at_start */
210 NULL, /* global_live_at_end */
212 EXIT_BLOCK, /* index */
214 -1, -1 /* eh_beg, eh_end */
218 /* Nonzero if the second flow pass has completed. */
221 /* Maximum register number used in this function, plus one. */
225 /* Indexed by n, giving various register information */
227 varray_type reg_n_info;
229 /* Size of the reg_n_info table. */
231 unsigned int reg_n_max;
233 /* Element N is the next insn that uses (hard or pseudo) register number N
234 within the current basic block; or zero, if there is no such insn.
235 This is valid only during the final backward scan in propagate_block. */
237 static rtx *reg_next_use;
239 /* Size of a regset for the current function,
240 in (1) bytes and (2) elements. */
245 /* Regset of regs live when calls to `setjmp'-like functions happen. */
246 /* ??? Does this exist only for the setjmp-clobbered warning message? */
248 regset regs_live_at_setjmp;
250 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
251 that have to go in the same hard reg.
252 The first two regs in the list are a pair, and the next two
253 are another pair, etc. */
256 /* Depth within loops of basic block being scanned for lifetime analysis,
257 plus one. This is the weight attached to references to registers. */
259 static int loop_depth;
261 /* During propagate_block, this is non-zero if the value of CC0 is live. */
265 /* During propagate_block, this contains a list of all the MEMs we are
266 tracking for dead store elimination. */
268 static rtx mem_set_list;
270 /* Set of registers that may be eliminable. These are handled specially
271 in updating regs_ever_live. */
273 static HARD_REG_SET elim_reg_set;
275 /* The basic block structure for every insn, indexed by uid. */
277 varray_type basic_block_for_insn;
279 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
280 /* ??? Should probably be using LABEL_NUSES instead. It would take a
281 bit of surgery to be able to use or co-opt the routines in jump. */
283 static rtx label_value_list;
285 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
287 #define INSN_VOLATILE(INSN) bitmap_bit_p (uid_volatile, INSN_UID (INSN))
288 #define SET_INSN_VOLATILE(INSN) bitmap_set_bit (uid_volatile, INSN_UID (INSN))
289 static bitmap uid_volatile;
291 /* Forward declarations */
292 static int count_basic_blocks PROTO((rtx));
293 static rtx find_basic_blocks_1 PROTO((rtx));
294 static void create_basic_block PROTO((int, rtx, rtx, rtx));
295 static void clear_edges PROTO((void));
296 static void make_edges PROTO((rtx));
297 static void make_edge PROTO((sbitmap *, basic_block,
299 static void make_label_edge PROTO((sbitmap *, basic_block,
301 static void make_eh_edge PROTO((sbitmap *, eh_nesting_info *,
302 basic_block, rtx, int));
303 static void mark_critical_edges PROTO((void));
304 static void move_stray_eh_region_notes PROTO((void));
305 static void record_active_eh_regions PROTO((rtx));
307 static void commit_one_edge_insertion PROTO((edge));
309 static void delete_unreachable_blocks PROTO((void));
310 static void delete_eh_regions PROTO((void));
311 static int can_delete_note_p PROTO((rtx));
312 static int delete_block PROTO((basic_block));
313 static void expunge_block PROTO((basic_block));
314 static rtx flow_delete_insn PROTO((rtx));
315 static int can_delete_label_p PROTO((rtx));
316 static int merge_blocks_move_predecessor_nojumps PROTO((basic_block,
318 static int merge_blocks_move_successor_nojumps PROTO((basic_block,
320 static void merge_blocks_nomove PROTO((basic_block, basic_block));
321 static int merge_blocks PROTO((edge,basic_block,basic_block));
322 static void try_merge_blocks PROTO((void));
323 static void tidy_fallthru_edge PROTO((edge,basic_block,basic_block));
324 static void calculate_loop_depth PROTO((rtx));
326 static int verify_wide_reg_1 PROTO((rtx *, void *));
327 static void verify_wide_reg PROTO((int, rtx, rtx));
328 static void verify_local_live_at_start PROTO((regset, basic_block));
329 static int set_noop_p PROTO((rtx));
330 static int noop_move_p PROTO((rtx));
331 static void notice_stack_pointer_modification PROTO ((rtx, rtx, void *));
332 static void record_volatile_insns PROTO((rtx));
333 static void mark_reg PROTO((regset, rtx));
334 static void mark_regs_live_at_end PROTO((regset));
335 static void life_analysis_1 PROTO((rtx, int, int));
336 static void calculate_global_regs_live PROTO((sbitmap, sbitmap, int));
337 static void propagate_block PROTO((regset, rtx, rtx,
339 static int insn_dead_p PROTO((rtx, regset, int, rtx));
340 static int libcall_dead_p PROTO((rtx, regset, rtx, rtx));
341 static void mark_set_regs PROTO((regset, regset, rtx,
343 static void mark_set_1 PROTO((regset, regset, rtx,
346 static void find_auto_inc PROTO((regset, rtx, rtx));
347 static int try_pre_increment_1 PROTO((rtx));
348 static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT));
350 static void mark_used_regs PROTO((regset, regset, rtx, int, rtx));
351 void dump_flow_info PROTO((FILE *));
352 static void dump_edge_info PROTO((FILE *, edge, int));
354 static int_list_ptr alloc_int_list_node PROTO ((int_list_block **));
355 static int_list_ptr add_int_list_node PROTO ((int_list_block **,
358 static void add_pred_succ PROTO ((int, int, int_list_ptr *,
359 int_list_ptr *, int *, int *));
361 static void count_reg_sets_1 PROTO ((rtx));
362 static void count_reg_sets PROTO ((rtx));
363 static void count_reg_references PROTO ((rtx));
364 static void invalidate_mems_from_autoinc PROTO ((rtx));
365 static void remove_edge PROTO ((edge));
366 static void remove_fake_successors PROTO ((basic_block));
368 /* This function is always defined so it can be called from the
369 debugger, and it is declared extern so we don't get warnings about
371 void verify_flow_info PROTO ((void));
374 /* Find basic blocks of the current function.
375 F is the first insn of the function and NREGS the number of register
379 find_basic_blocks (f, nregs, file, do_cleanup)
381 int nregs ATTRIBUTE_UNUSED;
382 FILE *file ATTRIBUTE_UNUSED;
387 /* Flush out existing data. */
388 if (basic_block_info != NULL)
394 /* Clear bb->aux on all extant basic blocks. We'll use this as a
395 tag for reuse during create_basic_block, just in case some pass
396 copies around basic block notes improperly. */
397 for (i = 0; i < n_basic_blocks; ++i)
398 BASIC_BLOCK (i)->aux = NULL;
400 VARRAY_FREE (basic_block_info);
403 n_basic_blocks = count_basic_blocks (f);
405 /* Size the basic block table. The actual structures will be allocated
406 by find_basic_blocks_1, since we want to keep the structure pointers
407 stable across calls to find_basic_blocks. */
408 /* ??? This whole issue would be much simpler if we called find_basic_blocks
409 exactly once, and thereafter we don't have a single long chain of
410 instructions at all until close to the end of compilation when we
411 actually lay them out. */
413 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
415 label_value_list = find_basic_blocks_1 (f);
417 /* Record the block to which an insn belongs. */
418 /* ??? This should be done another way, by which (perhaps) a label is
419 tagged directly with the basic block that it starts. It is used for
420 more than that currently, but IMO that is the only valid use. */
422 max_uid = get_max_uid ();
424 /* Leave space for insns life_analysis makes in some cases for auto-inc.
425 These cases are rare, so we don't need too much space. */
426 max_uid += max_uid / 10;
429 compute_bb_for_insn (max_uid);
431 /* Discover the edges of our cfg. */
433 record_active_eh_regions (f);
434 make_edges (label_value_list);
436 /* Delete unreachable blocks, then merge blocks when possible. */
440 delete_unreachable_blocks ();
441 move_stray_eh_region_notes ();
442 record_active_eh_regions (f);
446 /* Mark critical edges. */
448 mark_critical_edges ();
450 /* Discover the loop depth at the start of each basic block to aid
451 register allocation. */
452 calculate_loop_depth (f);
454 /* Kill the data we won't maintain. */
455 label_value_list = NULL_RTX;
457 #ifdef ENABLE_CHECKING
462 /* Count the basic blocks of the function. */
465 count_basic_blocks (f)
469 register RTX_CODE prev_code;
470 register int count = 0;
472 int call_had_abnormal_edge = 0;
473 rtx prev_call = NULL_RTX;
475 prev_code = JUMP_INSN;
476 for (insn = f; insn; insn = NEXT_INSN (insn))
478 register RTX_CODE code = GET_CODE (insn);
480 if (code == CODE_LABEL
481 || (GET_RTX_CLASS (code) == 'i'
482 && (prev_code == JUMP_INSN
483 || prev_code == BARRIER
484 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
488 /* If the previous insn was a call that did not create an
489 abnormal edge, we want to add a nop so that the CALL_INSN
490 itself is not at basic_block_end. This allows us to
491 easily distinguish between normal calls and those which
492 create abnormal edges in the flow graph. */
494 if (count > 0 && prev_call != 0 && !call_had_abnormal_edge)
496 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
497 emit_insn_after (nop, prev_call);
501 /* Record whether this call created an edge. */
502 if (code == CALL_INSN)
504 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
505 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
507 call_had_abnormal_edge = 0;
509 /* If there is a specified EH region, we have an edge. */
510 if (eh_region && region > 0)
511 call_had_abnormal_edge = 1;
514 /* If there is a nonlocal goto label and the specified
515 region number isn't -1, we have an edge. (0 means
516 no throw, but might have a nonlocal goto). */
517 if (nonlocal_goto_handler_labels && region >= 0)
518 call_had_abnormal_edge = 1;
521 else if (code != NOTE)
522 prev_call = NULL_RTX;
526 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
528 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
533 /* The rest of the compiler works a bit smoother when we don't have to
534 check for the edge case of do-nothing functions with no basic blocks. */
537 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
544 /* Find all basic blocks of the function whose first insn is F.
546 Collect and return a list of labels whose addresses are taken. This
547 will be used in make_edges for use with computed gotos. */
550 find_basic_blocks_1 (f)
553 register rtx insn, next;
554 int call_has_abnormal_edge = 0;
556 rtx bb_note = NULL_RTX;
557 rtx eh_list = NULL_RTX;
558 rtx label_value_list = NULL_RTX;
562 /* We process the instructions in a slightly different way than we did
563 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
564 closed out the previous block, so that it gets attached at the proper
565 place. Since this form should be equivalent to the previous,
566 count_basic_blocks continues to use the old form as a check. */
568 for (insn = f; insn; insn = next)
570 enum rtx_code code = GET_CODE (insn);
572 next = NEXT_INSN (insn);
574 if (code == CALL_INSN)
576 /* Record whether this call created an edge. */
577 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
578 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
579 call_has_abnormal_edge = 0;
581 /* If there is an EH region, we have an edge. */
582 if (eh_list && region > 0)
583 call_has_abnormal_edge = 1;
586 /* If there is a nonlocal goto label and the specified
587 region number isn't -1, we have an edge. (0 means
588 no throw, but might have a nonlocal goto). */
589 if (nonlocal_goto_handler_labels && region >= 0)
590 call_has_abnormal_edge = 1;
598 int kind = NOTE_LINE_NUMBER (insn);
600 /* Keep a LIFO list of the currently active exception notes. */
601 if (kind == NOTE_INSN_EH_REGION_BEG)
602 eh_list = alloc_INSN_LIST (insn, eh_list);
603 else if (kind == NOTE_INSN_EH_REGION_END)
606 eh_list = XEXP (eh_list, 1);
607 free_INSN_LIST_node (t);
610 /* Look for basic block notes with which to keep the
611 basic_block_info pointers stable. Unthread the note now;
612 we'll put it back at the right place in create_basic_block.
613 Or not at all if we've already found a note in this block. */
614 else if (kind == NOTE_INSN_BASIC_BLOCK)
616 if (bb_note == NULL_RTX)
618 next = flow_delete_insn (insn);
625 /* A basic block starts at a label. If we've closed one off due
626 to a barrier or some such, no need to do it again. */
627 if (head != NULL_RTX)
629 /* While we now have edge lists with which other portions of
630 the compiler might determine a call ending a basic block
631 does not imply an abnormal edge, it will be a bit before
632 everything can be updated. So continue to emit a noop at
633 the end of such a block. */
634 if (GET_CODE (end) == CALL_INSN)
636 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
637 end = emit_insn_after (nop, end);
640 create_basic_block (i++, head, end, bb_note);
647 /* A basic block ends at a jump. */
648 if (head == NULL_RTX)
652 /* ??? Make a special check for table jumps. The way this
653 happens is truly and amazingly gross. We are about to
654 create a basic block that contains just a code label and
655 an addr*vec jump insn. Worse, an addr_diff_vec creates
656 its own natural loop.
658 Prevent this bit of brain damage, pasting things together
659 correctly in make_edges.
661 The correct solution involves emitting the table directly
662 on the tablejump instruction as a note, or JUMP_LABEL. */
664 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
665 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
673 goto new_bb_inclusive;
676 /* A basic block ends at a barrier. It may be that an unconditional
677 jump already closed the basic block -- no need to do it again. */
678 if (head == NULL_RTX)
681 /* While we now have edge lists with which other portions of the
682 compiler might determine a call ending a basic block does not
683 imply an abnormal edge, it will be a bit before everything can
684 be updated. So continue to emit a noop at the end of such a
686 if (GET_CODE (end) == CALL_INSN)
688 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
689 end = emit_insn_after (nop, end);
691 goto new_bb_exclusive;
694 /* A basic block ends at a call that can either throw or
695 do a non-local goto. */
696 if (call_has_abnormal_edge)
699 if (head == NULL_RTX)
704 create_basic_block (i++, head, end, bb_note);
705 head = end = NULL_RTX;
712 if (GET_RTX_CLASS (code) == 'i')
714 if (head == NULL_RTX)
721 if (GET_RTX_CLASS (code) == 'i')
725 /* Make a list of all labels referred to other than by jumps
726 (which just don't have the REG_LABEL notes).
728 Make a special exception for labels followed by an ADDR*VEC,
729 as this would be a part of the tablejump setup code.
731 Make a special exception for the eh_return_stub_label, which
732 we know isn't part of any otherwise visible control flow. */
734 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
735 if (REG_NOTE_KIND (note) == REG_LABEL)
737 rtx lab = XEXP (note, 0), next;
739 if (lab == eh_return_stub_label)
741 else if ((next = next_nonnote_insn (lab)) != NULL
742 && GET_CODE (next) == JUMP_INSN
743 && (GET_CODE (PATTERN (next)) == ADDR_VEC
744 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
748 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
753 if (head != NULL_RTX)
754 create_basic_block (i++, head, end, bb_note);
756 if (i != n_basic_blocks)
759 return label_value_list;
762 /* Create a new basic block consisting of the instructions between
763 HEAD and END inclusive. Reuses the note and basic block struct
764 in BB_NOTE, if any. */
767 create_basic_block (index, head, end, bb_note)
769 rtx head, end, bb_note;
774 && ! RTX_INTEGRATED_P (bb_note)
775 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
778 /* If we found an existing note, thread it back onto the chain. */
780 if (GET_CODE (head) == CODE_LABEL)
781 add_insn_after (bb_note, head);
784 add_insn_before (bb_note, head);
790 /* Otherwise we must create a note and a basic block structure.
791 Since we allow basic block structs in rtl, give the struct
792 the same lifetime by allocating it off the function obstack
793 rather than using malloc. */
795 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
796 memset (bb, 0, sizeof (*bb));
798 if (GET_CODE (head) == CODE_LABEL)
799 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
802 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
805 NOTE_BASIC_BLOCK (bb_note) = bb;
808 /* Always include the bb note in the block. */
809 if (NEXT_INSN (end) == bb_note)
815 BASIC_BLOCK (index) = bb;
817 /* Tag the block so that we know it has been used when considering
818 other basic block notes. */
822 /* Records the basic block struct in BB_FOR_INSN, for every instruction
823 indexed by INSN_UID. MAX is the size of the array. */
826 compute_bb_for_insn (max)
831 if (basic_block_for_insn)
832 VARRAY_FREE (basic_block_for_insn);
833 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
835 for (i = 0; i < n_basic_blocks; ++i)
837 basic_block bb = BASIC_BLOCK (i);
844 int uid = INSN_UID (insn);
846 VARRAY_BB (basic_block_for_insn, uid) = bb;
849 insn = NEXT_INSN (insn);
854 /* Free the memory associated with the edge structures. */
862 for (i = 0; i < n_basic_blocks; ++i)
864 basic_block bb = BASIC_BLOCK (i);
866 for (e = bb->succ; e ; e = n)
876 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
882 ENTRY_BLOCK_PTR->succ = 0;
883 EXIT_BLOCK_PTR->pred = 0;
888 /* Identify the edges between basic blocks.
890 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
891 that are otherwise unreachable may be reachable with a non-local goto.
893 BB_EH_END is an array indexed by basic block number in which we record
894 the list of exception regions active at the end of the basic block. */
897 make_edges (label_value_list)
898 rtx label_value_list;
901 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
902 sbitmap *edge_cache = NULL;
904 /* Assume no computed jump; revise as we create edges. */
905 current_function_has_computed_jump = 0;
907 /* Heavy use of computed goto in machine-generated code can lead to
908 nearly fully-connected CFGs. In that case we spend a significant
909 amount of time searching the edge lists for duplicates. */
910 if (forced_labels || label_value_list)
912 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
913 sbitmap_vector_zero (edge_cache, n_basic_blocks);
916 /* By nature of the way these get numbered, block 0 is always the entry. */
917 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
919 for (i = 0; i < n_basic_blocks; ++i)
921 basic_block bb = BASIC_BLOCK (i);
924 int force_fallthru = 0;
926 /* Examine the last instruction of the block, and discover the
927 ways we can leave the block. */
930 code = GET_CODE (insn);
933 if (code == JUMP_INSN)
937 /* ??? Recognize a tablejump and do the right thing. */
938 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
939 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
940 && GET_CODE (tmp) == JUMP_INSN
941 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
942 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
947 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
948 vec = XVEC (PATTERN (tmp), 0);
950 vec = XVEC (PATTERN (tmp), 1);
952 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
953 make_label_edge (edge_cache, bb,
954 XEXP (RTVEC_ELT (vec, j), 0), 0);
956 /* Some targets (eg, ARM) emit a conditional jump that also
957 contains the out-of-range target. Scan for these and
958 add an edge if necessary. */
959 if ((tmp = single_set (insn)) != NULL
960 && SET_DEST (tmp) == pc_rtx
961 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
962 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
963 make_label_edge (edge_cache, bb,
964 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
966 #ifdef CASE_DROPS_THROUGH
967 /* Silly VAXen. The ADDR_VEC is going to be in the way of
968 us naturally detecting fallthru into the next block. */
973 /* If this is a computed jump, then mark it as reaching
974 everything on the label_value_list and forced_labels list. */
975 else if (computed_jump_p (insn))
977 current_function_has_computed_jump = 1;
979 for (x = label_value_list; x; x = XEXP (x, 1))
980 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
982 for (x = forced_labels; x; x = XEXP (x, 1))
983 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
986 /* Returns create an exit out. */
987 else if (returnjump_p (insn))
988 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
990 /* Otherwise, we have a plain conditional or unconditional jump. */
993 if (! JUMP_LABEL (insn))
995 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
999 /* If this is a CALL_INSN, then mark it as reaching the active EH
1000 handler for this CALL_INSN. If we're handling asynchronous
1001 exceptions then any insn can reach any of the active handlers.
1003 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1005 if (code == CALL_INSN || asynchronous_exceptions)
1007 /* If there's an EH region active at the end of a block,
1008 add the appropriate edges. */
1009 if (bb->eh_end >= 0)
1010 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1012 /* If we have asynchronous exceptions, do the same for *all*
1013 exception regions active in the block. */
1014 if (asynchronous_exceptions
1015 && bb->eh_beg != bb->eh_end)
1017 if (bb->eh_beg >= 0)
1018 make_eh_edge (edge_cache, eh_nest_info, bb,
1019 NULL_RTX, bb->eh_beg);
1021 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1022 if (GET_CODE (x) == NOTE
1023 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1024 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1026 int region = NOTE_EH_HANDLER (x);
1027 make_eh_edge (edge_cache, eh_nest_info, bb,
1032 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1034 /* ??? This could be made smarter: in some cases it's possible
1035 to tell that certain calls will not do a nonlocal goto.
1037 For example, if the nested functions that do the nonlocal
1038 gotos do not have their addresses taken, then only calls to
1039 those functions or to other nested functions that use them
1040 could possibly do nonlocal gotos. */
1041 /* We do know that a REG_EH_REGION note with a value less
1042 than 0 is guaranteed not to perform a non-local goto. */
1043 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1044 if (!note || XINT (XEXP (note, 0), 0) >= 0)
1045 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1046 make_label_edge (edge_cache, bb, XEXP (x, 0),
1047 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1051 /* We know something about the structure of the function __throw in
1052 libgcc2.c. It is the only function that ever contains eh_stub
1053 labels. It modifies its return address so that the last block
1054 returns to one of the eh_stub labels within it. So we have to
1055 make additional edges in the flow graph. */
1056 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1057 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1059 /* Find out if we can drop through to the next block. */
1060 insn = next_nonnote_insn (insn);
1061 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1062 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1063 else if (i + 1 < n_basic_blocks)
1065 rtx tmp = BLOCK_HEAD (i + 1);
1066 if (GET_CODE (tmp) == NOTE)
1067 tmp = next_nonnote_insn (tmp);
1068 if (force_fallthru || insn == tmp)
1069 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1073 free_eh_nesting_info (eh_nest_info);
1075 sbitmap_vector_free (edge_cache);
1078 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1079 about the edge that is accumulated between calls. */
1082 make_edge (edge_cache, src, dst, flags)
1083 sbitmap *edge_cache;
1084 basic_block src, dst;
1090 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1091 many edges to them, and we didn't allocate memory for it. */
1092 use_edge_cache = (edge_cache
1093 && src != ENTRY_BLOCK_PTR
1094 && dst != EXIT_BLOCK_PTR);
1096 /* Make sure we don't add duplicate edges. */
1097 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1098 for (e = src->succ; e ; e = e->succ_next)
1105 e = (edge) xcalloc (1, sizeof (*e));
1108 e->succ_next = src->succ;
1109 e->pred_next = dst->pred;
1118 SET_BIT (edge_cache[src->index], dst->index);
1121 /* Create an edge from a basic block to a label. */
1124 make_label_edge (edge_cache, src, label, flags)
1125 sbitmap *edge_cache;
1130 if (GET_CODE (label) != CODE_LABEL)
1133 /* If the label was never emitted, this insn is junk, but avoid a
1134 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1135 as a result of a syntax error and a diagnostic has already been
1138 if (INSN_UID (label) == 0)
1141 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1144 /* Create the edges generated by INSN in REGION. */
1147 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1148 sbitmap *edge_cache;
1149 eh_nesting_info *eh_nest_info;
1154 handler_info **handler_list;
1157 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1158 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1161 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1162 EDGE_ABNORMAL | EDGE_EH | is_call);
1166 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1167 dangerous if we intend to move basic blocks around. Move such notes
1168 into the following block. */
1171 move_stray_eh_region_notes ()
1176 if (n_basic_blocks < 2)
1179 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1180 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1182 rtx insn, next, list = NULL_RTX;
1184 b1 = BASIC_BLOCK (i);
1185 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1187 next = NEXT_INSN (insn);
1188 if (GET_CODE (insn) == NOTE
1189 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1190 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1192 /* Unlink from the insn chain. */
1193 NEXT_INSN (PREV_INSN (insn)) = next;
1194 PREV_INSN (next) = PREV_INSN (insn);
1197 NEXT_INSN (insn) = list;
1202 if (list == NULL_RTX)
1205 /* Find where to insert these things. */
1207 if (GET_CODE (insn) == CODE_LABEL)
1208 insn = NEXT_INSN (insn);
1212 next = NEXT_INSN (list);
1213 add_insn_after (list, insn);
1219 /* Recompute eh_beg/eh_end for each basic block. */
1222 record_active_eh_regions (f)
1225 rtx insn, eh_list = NULL_RTX;
1227 basic_block bb = BASIC_BLOCK (0);
1229 for (insn = f; insn ; insn = NEXT_INSN (insn))
1231 if (bb->head == insn)
1232 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1234 if (GET_CODE (insn) == NOTE)
1236 int kind = NOTE_LINE_NUMBER (insn);
1237 if (kind == NOTE_INSN_EH_REGION_BEG)
1238 eh_list = alloc_INSN_LIST (insn, eh_list);
1239 else if (kind == NOTE_INSN_EH_REGION_END)
1241 rtx t = XEXP (eh_list, 1);
1242 free_INSN_LIST_node (eh_list);
1247 if (bb->end == insn)
1249 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1251 if (i == n_basic_blocks)
1253 bb = BASIC_BLOCK (i);
1258 /* Identify critical edges and set the bits appropriately. */
1261 mark_critical_edges ()
1263 int i, n = n_basic_blocks;
1266 /* We begin with the entry block. This is not terribly important now,
1267 but could be if a front end (Fortran) implemented alternate entry
1269 bb = ENTRY_BLOCK_PTR;
1276 /* (1) Critical edges must have a source with multiple successors. */
1277 if (bb->succ && bb->succ->succ_next)
1279 for (e = bb->succ; e ; e = e->succ_next)
1281 /* (2) Critical edges must have a destination with multiple
1282 predecessors. Note that we know there is at least one
1283 predecessor -- the edge we followed to get here. */
1284 if (e->dest->pred->pred_next)
1285 e->flags |= EDGE_CRITICAL;
1287 e->flags &= ~EDGE_CRITICAL;
1292 for (e = bb->succ; e ; e = e->succ_next)
1293 e->flags &= ~EDGE_CRITICAL;
1298 bb = BASIC_BLOCK (i);
1302 /* Split a (typically critical) edge. Return the new block.
1303 Abort on abnormal edges.
1305 ??? The code generally expects to be called on critical edges.
1306 The case of a block ending in an unconditional jump to a
1307 block with multiple predecessors is not handled optimally. */
1310 split_edge (edge_in)
1313 basic_block old_pred, bb, old_succ;
1318 /* Abnormal edges cannot be split. */
1319 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1322 old_pred = edge_in->src;
1323 old_succ = edge_in->dest;
1325 /* Remove the existing edge from the destination's pred list. */
1328 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1330 *pp = edge_in->pred_next;
1331 edge_in->pred_next = NULL;
1334 /* Create the new structures. */
1335 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1336 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1339 memset (bb, 0, sizeof (*bb));
1340 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1341 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1343 /* ??? This info is likely going to be out of date very soon. */
1344 if (old_succ->global_live_at_start)
1346 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1347 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1351 CLEAR_REG_SET (bb->global_live_at_start);
1352 CLEAR_REG_SET (bb->global_live_at_end);
1357 bb->succ = edge_out;
1360 edge_in->flags &= ~EDGE_CRITICAL;
1362 edge_out->pred_next = old_succ->pred;
1363 edge_out->succ_next = NULL;
1365 edge_out->dest = old_succ;
1366 edge_out->flags = EDGE_FALLTHRU;
1367 edge_out->probability = REG_BR_PROB_BASE;
1369 old_succ->pred = edge_out;
1371 /* Tricky case -- if there existed a fallthru into the successor
1372 (and we're not it) we must add a new unconditional jump around
1373 the new block we're actually interested in.
1375 Further, if that edge is critical, this means a second new basic
1376 block must be created to hold it. In order to simplify correct
1377 insn placement, do this before we touch the existing basic block
1378 ordering for the block we were really wanting. */
1379 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1382 for (e = edge_out->pred_next; e ; e = e->pred_next)
1383 if (e->flags & EDGE_FALLTHRU)
1388 basic_block jump_block;
1391 if ((e->flags & EDGE_CRITICAL) == 0)
1393 /* Non critical -- we can simply add a jump to the end
1394 of the existing predecessor. */
1395 jump_block = e->src;
1399 /* We need a new block to hold the jump. The simplest
1400 way to do the bulk of the work here is to recursively
1402 jump_block = split_edge (e);
1403 e = jump_block->succ;
1406 /* Now add the jump insn ... */
1407 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1409 jump_block->end = pos;
1410 emit_barrier_after (pos);
1412 /* ... let jump know that label is in use, ... */
1413 JUMP_LABEL (pos) = old_succ->head;
1414 ++LABEL_NUSES (old_succ->head);
1416 /* ... and clear fallthru on the outgoing edge. */
1417 e->flags &= ~EDGE_FALLTHRU;
1419 /* Continue splitting the interesting edge. */
1423 /* Place the new block just in front of the successor. */
1424 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1425 if (old_succ == EXIT_BLOCK_PTR)
1426 j = n_basic_blocks - 1;
1428 j = old_succ->index;
1429 for (i = n_basic_blocks - 1; i > j; --i)
1431 basic_block tmp = BASIC_BLOCK (i - 1);
1432 BASIC_BLOCK (i) = tmp;
1435 BASIC_BLOCK (i) = bb;
1438 /* Create the basic block note. */
1439 if (old_succ != EXIT_BLOCK_PTR)
1440 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1442 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1443 NOTE_BASIC_BLOCK (bb_note) = bb;
1444 bb->head = bb->end = bb_note;
1446 /* Not quite simple -- for non-fallthru edges, we must adjust the
1447 predecessor's jump instruction to target our new block. */
1448 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1450 rtx tmp, insn = old_pred->end;
1451 rtx old_label = old_succ->head;
1452 rtx new_label = gen_label_rtx ();
1454 if (GET_CODE (insn) != JUMP_INSN)
1457 /* ??? Recognize a tablejump and adjust all matching cases. */
1458 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1459 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1460 && GET_CODE (tmp) == JUMP_INSN
1461 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1462 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1467 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1468 vec = XVEC (PATTERN (tmp), 0);
1470 vec = XVEC (PATTERN (tmp), 1);
1472 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1473 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1475 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1476 --LABEL_NUSES (old_label);
1477 ++LABEL_NUSES (new_label);
1480 /* Handle casesi dispatch insns */
1481 if ((tmp = single_set (insn)) != NULL
1482 && SET_DEST (tmp) == pc_rtx
1483 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1484 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1485 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1487 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1489 --LABEL_NUSES (old_label);
1490 ++LABEL_NUSES (new_label);
1495 /* This would have indicated an abnormal edge. */
1496 if (computed_jump_p (insn))
1499 /* A return instruction can't be redirected. */
1500 if (returnjump_p (insn))
1503 /* If the insn doesn't go where we think, we're confused. */
1504 if (JUMP_LABEL (insn) != old_label)
1507 redirect_jump (insn, new_label);
1510 emit_label_before (new_label, bb_note);
1511 bb->head = new_label;
1517 /* Queue instructions for insertion on an edge between two basic blocks.
1518 The new instructions and basic blocks (if any) will not appear in the
1519 CFG until commit_edge_insertions is called. */
1522 insert_insn_on_edge (pattern, e)
1526 /* We cannot insert instructions on an abnormal critical edge.
1527 It will be easier to find the culprit if we die now. */
1528 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1529 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1532 if (e->insns == NULL_RTX)
1535 push_to_sequence (e->insns);
1537 emit_insn (pattern);
1539 e->insns = get_insns ();
1543 /* Update the CFG for the instructions queued on edge E. */
1546 commit_one_edge_insertion (e)
1549 rtx before = NULL_RTX, after = NULL_RTX, tmp;
1552 /* Figure out where to put these things. If the destination has
1553 one predecessor, insert there. Except for the exit block. */
1554 if (e->dest->pred->pred_next == NULL
1555 && e->dest != EXIT_BLOCK_PTR)
1559 /* Get the location correct wrt a code label, and "nice" wrt
1560 a basic block note, and before everything else. */
1562 if (GET_CODE (tmp) == CODE_LABEL)
1563 tmp = NEXT_INSN (tmp);
1564 if (GET_CODE (tmp) == NOTE
1565 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1566 tmp = NEXT_INSN (tmp);
1567 if (tmp == bb->head)
1570 after = PREV_INSN (tmp);
1573 /* If the source has one successor and the edge is not abnormal,
1574 insert there. Except for the entry block. */
1575 else if ((e->flags & EDGE_ABNORMAL) == 0
1576 && e->src->succ->succ_next == NULL
1577 && e->src != ENTRY_BLOCK_PTR)
1580 if (GET_CODE (bb->end) == JUMP_INSN)
1582 /* ??? Is it possible to wind up with non-simple jumps? Perhaps
1583 a jump with delay slots already filled? */
1584 if (! simplejump_p (bb->end))
1591 /* We'd better be fallthru, or we've lost track of what's what. */
1592 if ((e->flags & EDGE_FALLTHRU) == 0)
1599 /* Otherwise we must split the edge. */
1602 bb = split_edge (e);
1606 /* Now that we've found the spot, do the insertion. */
1608 e->insns = NULL_RTX;
1610 /* Set the new block number for these insns, if structure is allocated. */
1611 if (basic_block_for_insn)
1614 for (i = tmp; i != NULL_RTX; i = NEXT_INSN (i))
1615 set_block_for_insn (i, bb);
1620 emit_insns_before (tmp, before);
1621 if (before == bb->head)
1626 tmp = emit_insns_after (tmp, after);
1627 if (after == bb->end)
1632 /* Update the CFG for all queued instructions. */
1635 commit_edge_insertions ()
1641 bb = ENTRY_BLOCK_PTR;
1646 for (e = bb->succ; e ; e = next)
1648 next = e->succ_next;
1650 commit_one_edge_insertion (e);
1653 if (++i >= n_basic_blocks)
1655 bb = BASIC_BLOCK (i);
1659 /* Delete all unreachable basic blocks. */
1662 delete_unreachable_blocks ()
1664 basic_block *worklist, *tos;
1665 int deleted_handler;
1670 tos = worklist = (basic_block *) alloca (sizeof (basic_block) * n);
1672 /* Use basic_block->aux as a marker. Clear them all. */
1674 for (i = 0; i < n; ++i)
1675 BASIC_BLOCK (i)->aux = NULL;
1677 /* Add our starting points to the worklist. Almost always there will
1678 be only one. It isn't inconcievable that we might one day directly
1679 support Fortran alternate entry points. */
1681 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1685 /* Mark the block with a handy non-null value. */
1689 /* Iterate: find everything reachable from what we've already seen. */
1691 while (tos != worklist)
1693 basic_block b = *--tos;
1695 for (e = b->succ; e ; e = e->succ_next)
1703 /* Delete all unreachable basic blocks. Count down so that we don't
1704 interfere with the block renumbering that happens in delete_block. */
1706 deleted_handler = 0;
1708 for (i = n - 1; i >= 0; --i)
1710 basic_block b = BASIC_BLOCK (i);
1713 /* This block was found. Tidy up the mark. */
1716 deleted_handler |= delete_block (b);
1719 /* Fix up edges that now fall through, or rather should now fall through
1720 but previously required a jump around now deleted blocks. Simplify
1721 the search by only examining blocks numerically adjacent, since this
1722 is how find_basic_blocks created them. */
1724 for (i = 1; i < n_basic_blocks; ++i)
1726 basic_block b = BASIC_BLOCK (i - 1);
1727 basic_block c = BASIC_BLOCK (i);
1730 /* We care about simple conditional or unconditional jumps with
1733 If we had a conditional branch to the next instruction when
1734 find_basic_blocks was called, then there will only be one
1735 out edge for the block which ended with the conditional
1736 branch (since we do not create duplicate edges).
1738 Furthermore, the edge will be marked as a fallthru because we
1739 merge the flags for the duplicate edges. So we do not want to
1740 check that the edge is not a FALLTHRU edge. */
1741 if ((s = b->succ) != NULL
1742 && s->succ_next == NULL
1744 /* If the jump insn has side effects, we can't tidy the edge. */
1745 && (GET_CODE (b->end) != JUMP_INSN
1746 || onlyjump_p (b->end)))
1747 tidy_fallthru_edge (s, b, c);
1750 /* If we deleted an exception handler, we may have EH region begin/end
1751 blocks to remove as well. */
1752 if (deleted_handler)
1753 delete_eh_regions ();
1756 /* Find EH regions for which there is no longer a handler, and delete them. */
1759 delete_eh_regions ()
1763 update_rethrow_references ();
1765 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1766 if (GET_CODE (insn) == NOTE)
1768 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1769 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1771 int num = NOTE_EH_HANDLER (insn);
1772 /* A NULL handler indicates a region is no longer needed,
1773 as long as it isn't the target of a rethrow. */
1774 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1776 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1777 NOTE_SOURCE_FILE (insn) = 0;
1783 /* Return true if NOTE is not one of the ones that must be kept paired,
1784 so that we may simply delete them. */
1787 can_delete_note_p (note)
1790 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1791 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1794 /* Unlink a chain of insns between START and FINISH, leaving notes
1795 that must be paired. */
1798 flow_delete_insn_chain (start, finish)
1801 /* Unchain the insns one by one. It would be quicker to delete all
1802 of these with a single unchaining, rather than one at a time, but
1803 we need to keep the NOTE's. */
1809 next = NEXT_INSN (start);
1810 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1812 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1815 next = flow_delete_insn (start);
1817 if (start == finish)
1823 /* Delete the insns in a (non-live) block. We physically delete every
1824 non-deleted-note insn, and update the flow graph appropriately.
1826 Return nonzero if we deleted an exception handler. */
1828 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1829 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1835 int deleted_handler = 0;
1838 /* If the head of this block is a CODE_LABEL, then it might be the
1839 label for an exception handler which can't be reached.
1841 We need to remove the label from the exception_handler_label list
1842 and remove the associated NOTE_INSN_EH_REGION_BEG and
1843 NOTE_INSN_EH_REGION_END notes. */
1847 never_reached_warning (insn);
1849 if (GET_CODE (insn) == CODE_LABEL)
1851 rtx x, *prev = &exception_handler_labels;
1853 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1855 if (XEXP (x, 0) == insn)
1857 /* Found a match, splice this label out of the EH label list. */
1858 *prev = XEXP (x, 1);
1859 XEXP (x, 1) = NULL_RTX;
1860 XEXP (x, 0) = NULL_RTX;
1862 /* Remove the handler from all regions */
1863 remove_handler (insn);
1864 deleted_handler = 1;
1867 prev = &XEXP (x, 1);
1870 /* This label may be referenced by code solely for its value, or
1871 referenced by static data, or something. We have determined
1872 that it is not reachable, but cannot delete the label itself.
1873 Save code space and continue to delete the balance of the block,
1874 along with properly updating the cfg. */
1875 if (!can_delete_label_p (insn))
1877 /* If we've only got one of these, skip the whole deleting
1880 goto no_delete_insns;
1881 insn = NEXT_INSN (insn);
1885 /* Selectively unlink the insn chain. Include any BARRIER that may
1886 follow the basic block. */
1887 end = next_nonnote_insn (b->end);
1888 if (!end || GET_CODE (end) != BARRIER)
1890 flow_delete_insn_chain (insn, end);
1894 /* Remove the edges into and out of this block. Note that there may
1895 indeed be edges in, if we are removing an unreachable loop. */
1899 for (e = b->pred; e ; e = next)
1901 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1904 next = e->pred_next;
1908 for (e = b->succ; e ; e = next)
1910 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1913 next = e->succ_next;
1922 /* Remove the basic block from the array, and compact behind it. */
1925 return deleted_handler;
1928 /* Remove block B from the basic block array and compact behind it. */
1934 int i, n = n_basic_blocks;
1936 for (i = b->index; i + 1 < n; ++i)
1938 basic_block x = BASIC_BLOCK (i + 1);
1939 BASIC_BLOCK (i) = x;
1943 basic_block_info->num_elements--;
1947 /* Delete INSN by patching it out. Return the next insn. */
1950 flow_delete_insn (insn)
1953 rtx prev = PREV_INSN (insn);
1954 rtx next = NEXT_INSN (insn);
1956 PREV_INSN (insn) = NULL_RTX;
1957 NEXT_INSN (insn) = NULL_RTX;
1960 NEXT_INSN (prev) = next;
1962 PREV_INSN (next) = prev;
1964 set_last_insn (prev);
1966 if (GET_CODE (insn) == CODE_LABEL)
1967 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1969 /* If deleting a jump, decrement the use count of the label. Deleting
1970 the label itself should happen in the normal course of block merging. */
1971 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1972 LABEL_NUSES (JUMP_LABEL (insn))--;
1977 /* True if a given label can be deleted. */
1980 can_delete_label_p (label)
1985 if (LABEL_PRESERVE_P (label))
1988 for (x = forced_labels; x ; x = XEXP (x, 1))
1989 if (label == XEXP (x, 0))
1991 for (x = label_value_list; x ; x = XEXP (x, 1))
1992 if (label == XEXP (x, 0))
1994 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
1995 if (label == XEXP (x, 0))
1998 /* User declared labels must be preserved. */
1999 if (LABEL_NAME (label) != 0)
2005 /* Blocks A and B are to be merged into a single block. A has no incoming
2006 fallthru edge, so it can be moved before B without adding or modifying
2007 any jumps (aside from the jump from A to B). */
2010 merge_blocks_move_predecessor_nojumps (a, b)
2013 rtx start, end, barrier;
2019 /* We want to delete the BARRIER after the end of the insns we are
2020 going to move. If we don't find a BARRIER, then do nothing. This
2021 can happen in some cases if we have labels we can not delete.
2023 Similarly, do nothing if we can not delete the label at the start
2024 of the target block. */
2025 barrier = next_nonnote_insn (end);
2026 if (GET_CODE (barrier) != BARRIER
2027 || (GET_CODE (b->head) == CODE_LABEL
2028 && ! can_delete_label_p (b->head)))
2031 flow_delete_insn (barrier);
2033 /* Move block and loop notes out of the chain so that we do not
2034 disturb their order.
2036 ??? A better solution would be to squeeze out all the non-nested notes
2037 and adjust the block trees appropriately. Even better would be to have
2038 a tighter connection between block trees and rtl so that this is not
2040 start = squeeze_notes (start, end);
2042 /* Scramble the insn chain. */
2043 if (end != PREV_INSN (b->head))
2044 reorder_insns (start, end, PREV_INSN (b->head));
2048 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2049 a->index, b->index);
2052 /* Swap the records for the two blocks around. Although we are deleting B,
2053 A is now where B was and we want to compact the BB array from where
2055 BASIC_BLOCK(a->index) = b;
2056 BASIC_BLOCK(b->index) = a;
2058 a->index = b->index;
2061 /* Now blocks A and B are contiguous. Merge them. */
2062 merge_blocks_nomove (a, b);
2067 /* Blocks A and B are to be merged into a single block. B has no outgoing
2068 fallthru edge, so it can be moved after A without adding or modifying
2069 any jumps (aside from the jump from A to B). */
2072 merge_blocks_move_successor_nojumps (a, b)
2075 rtx start, end, barrier;
2080 /* We want to delete the BARRIER after the end of the insns we are
2081 going to move. If we don't find a BARRIER, then do nothing. This
2082 can happen in some cases if we have labels we can not delete.
2084 Similarly, do nothing if we can not delete the label at the start
2085 of the target block. */
2086 barrier = next_nonnote_insn (end);
2087 if (GET_CODE (barrier) != BARRIER
2088 || (GET_CODE (b->head) == CODE_LABEL
2089 && ! can_delete_label_p (b->head)))
2092 flow_delete_insn (barrier);
2094 /* Move block and loop notes out of the chain so that we do not
2095 disturb their order.
2097 ??? A better solution would be to squeeze out all the non-nested notes
2098 and adjust the block trees appropriately. Even better would be to have
2099 a tighter connection between block trees and rtl so that this is not
2101 start = squeeze_notes (start, end);
2103 /* Scramble the insn chain. */
2104 reorder_insns (start, end, a->end);
2106 /* Now blocks A and B are contiguous. Merge them. */
2107 merge_blocks_nomove (a, b);
2111 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2112 b->index, a->index);
2118 /* Blocks A and B are to be merged into a single block. The insns
2119 are already contiguous, hence `nomove'. */
2122 merge_blocks_nomove (a, b)
2126 rtx b_head, b_end, a_end;
2129 /* If there was a CODE_LABEL beginning B, delete it. */
2132 if (GET_CODE (b_head) == CODE_LABEL)
2134 /* Detect basic blocks with nothing but a label. This can happen
2135 in particular at the end of a function. */
2136 if (b_head == b_end)
2138 b_head = flow_delete_insn (b_head);
2141 /* Delete the basic block note. */
2142 if (GET_CODE (b_head) == NOTE
2143 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2145 if (b_head == b_end)
2147 b_head = flow_delete_insn (b_head);
2150 /* If there was a jump out of A, delete it. */
2152 if (GET_CODE (a_end) == JUMP_INSN)
2156 prev = prev_nonnote_insn (a_end);
2161 /* If this was a conditional jump, we need to also delete
2162 the insn that set cc0. */
2164 if (prev && sets_cc0_p (prev))
2167 prev = prev_nonnote_insn (prev);
2170 flow_delete_insn (tmp);
2174 /* Note that a->head != a->end, since we should have at least a
2175 bb note plus the jump, so prev != insn. */
2176 flow_delete_insn (a_end);
2180 /* By definition, there should only be one successor of A, and that is
2181 B. Free that edge struct. */
2185 /* Adjust the edges out of B for the new owner. */
2186 for (e = b->succ; e ; e = e->succ_next)
2190 /* Reassociate the insns of B with A. */
2193 BLOCK_FOR_INSN (b_head) = a;
2194 while (b_head != b_end)
2196 b_head = NEXT_INSN (b_head);
2197 BLOCK_FOR_INSN (b_head) = a;
2203 /* Compact the basic block array. */
2207 /* Attempt to merge basic blocks that are potentially non-adjacent.
2208 Return true iff the attempt succeeded. */
2211 merge_blocks (e, b, c)
2215 /* If B has a fallthru edge to C, no need to move anything. */
2216 if (e->flags & EDGE_FALLTHRU)
2218 /* If a label still appears somewhere and we cannot delete the label,
2219 then we cannot merge the blocks. The edge was tidied already. */
2221 rtx insn, stop = NEXT_INSN (c->head);
2222 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2223 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2226 merge_blocks_nomove (b, c);
2230 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2231 b->index, c->index);
2240 int c_has_outgoing_fallthru;
2241 int b_has_incoming_fallthru;
2243 /* We must make sure to not munge nesting of exception regions,
2244 lexical blocks, and loop notes.
2246 The first is taken care of by requiring that the active eh
2247 region at the end of one block always matches the active eh
2248 region at the beginning of the next block.
2250 The later two are taken care of by squeezing out all the notes. */
2252 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2253 executed and we may want to treat blocks which have two out
2254 edges, one normal, one abnormal as only having one edge for
2255 block merging purposes. */
2257 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2258 if (tmp_edge->flags & EDGE_FALLTHRU)
2260 c_has_outgoing_fallthru = (tmp_edge != NULL);
2262 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2263 if (tmp_edge->flags & EDGE_FALLTHRU)
2265 b_has_incoming_fallthru = (tmp_edge != NULL);
2267 /* If B does not have an incoming fallthru, and the exception regions
2268 match, then it can be moved immediately before C without introducing
2271 C can not be the first block, so we do not have to worry about
2272 accessing a non-existent block. */
2273 d = BASIC_BLOCK (c->index - 1);
2274 if (! b_has_incoming_fallthru
2275 && d->eh_end == b->eh_beg
2276 && b->eh_end == c->eh_beg)
2277 return merge_blocks_move_predecessor_nojumps (b, c);
2279 /* Otherwise, we're going to try to move C after B. Make sure the
2280 exception regions match.
2282 If B is the last basic block, then we must not try to access the
2283 block structure for block B + 1. Luckily in that case we do not
2284 need to worry about matching exception regions. */
2285 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2286 if (b->eh_end == c->eh_beg
2287 && (d == NULL || c->eh_end == d->eh_beg))
2289 /* If C does not have an outgoing fallthru, then it can be moved
2290 immediately after B without introducing or modifying jumps. */
2291 if (! c_has_outgoing_fallthru)
2292 return merge_blocks_move_successor_nojumps (b, c);
2294 /* Otherwise, we'll need to insert an extra jump, and possibly
2295 a new block to contain it. */
2296 /* ??? Not implemented yet. */
2303 /* Top level driver for merge_blocks. */
2310 /* Attempt to merge blocks as made possible by edge removal. If a block
2311 has only one successor, and the successor has only one predecessor,
2312 they may be combined. */
2314 for (i = 0; i < n_basic_blocks; )
2316 basic_block c, b = BASIC_BLOCK (i);
2319 /* A loop because chains of blocks might be combineable. */
2320 while ((s = b->succ) != NULL
2321 && s->succ_next == NULL
2322 && (s->flags & EDGE_EH) == 0
2323 && (c = s->dest) != EXIT_BLOCK_PTR
2324 && c->pred->pred_next == NULL
2325 /* If the jump insn has side effects, we can't kill the edge. */
2326 && (GET_CODE (b->end) != JUMP_INSN
2327 || onlyjump_p (b->end))
2328 && merge_blocks (s, b, c))
2331 /* Don't get confused by the index shift caused by deleting blocks. */
2336 /* The given edge should potentially a fallthru edge. If that is in
2337 fact true, delete the unconditional jump and barriers that are in
2341 tidy_fallthru_edge (e, b, c)
2347 /* ??? In a late-running flow pass, other folks may have deleted basic
2348 blocks by nopping out blocks, leaving multiple BARRIERs between here
2349 and the target label. They ought to be chastized and fixed.
2351 We can also wind up with a sequence of undeletable labels between
2352 one block and the next.
2354 So search through a sequence of barriers, labels, and notes for
2355 the head of block C and assert that we really do fall through. */
2357 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2360 /* Remove what will soon cease being the jump insn from the source block.
2361 If block B consisted only of this single jump, turn it into a deleted
2364 if (GET_CODE (q) == JUMP_INSN)
2367 /* If this was a conditional jump, we need to also delete
2368 the insn that set cc0. */
2369 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2376 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2377 NOTE_SOURCE_FILE (q) = 0;
2380 b->end = q = PREV_INSN (q);
2383 /* Selectively unlink the sequence. */
2384 if (q != PREV_INSN (c->head))
2385 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2387 e->flags |= EDGE_FALLTHRU;
2390 /* Discover and record the loop depth at the head of each basic block. */
2393 calculate_loop_depth (insns)
2398 int i = 0, depth = 1;
2400 bb = BASIC_BLOCK (i);
2401 for (insn = insns; insn ; insn = NEXT_INSN (insn))
2403 if (insn == bb->head)
2405 bb->loop_depth = depth;
2406 if (++i >= n_basic_blocks)
2408 bb = BASIC_BLOCK (i);
2411 if (GET_CODE (insn) == NOTE)
2413 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2415 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2418 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. */
2425 /* Perform data flow analysis.
2426 F is the first insn of the function and NREGS the number of register numbers
2430 life_analysis (f, nregs, file, remove_dead_code)
2434 int remove_dead_code;
2436 #ifdef ELIMINABLE_REGS
2438 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2442 /* Record which registers will be eliminated. We use this in
2445 CLEAR_HARD_REG_SET (elim_reg_set);
2447 #ifdef ELIMINABLE_REGS
2448 for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
2449 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2451 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2454 /* Allocate a bitmap to be filled in by record_volatile_insns. */
2455 uid_volatile = BITMAP_ALLOCA ();
2457 /* We want alias analysis information for local dead store elimination. */
2458 init_alias_analysis ();
2461 if (! remove_dead_code)
2462 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2463 life_analysis_1 (f, nregs, flags);
2465 if (! reload_completed)
2466 mark_constant_function ();
2468 end_alias_analysis ();
2471 dump_flow_info (file);
2473 BITMAP_FREE (uid_volatile);
2474 free_basic_block_vars (1);
2477 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2478 Search for REGNO. If found, abort if it is not wider than word_mode. */
2481 verify_wide_reg_1 (px, pregno)
2486 int regno = *(int *) pregno;
2488 if (GET_CODE (x) == REG && REGNO (x) == regno)
2490 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2497 /* A subroutine of verify_local_live_at_start. Search through insns
2498 between HEAD and END looking for register REGNO. */
2501 verify_wide_reg (regno, head, end)
2507 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2508 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2512 head = NEXT_INSN (head);
2515 /* We didn't find the register at all. Something's way screwy. */
2519 /* A subroutine of update_life_info. Verify that there are no untoward
2520 changes in live_at_start during a local update. */
2523 verify_local_live_at_start (new_live_at_start, bb)
2524 regset new_live_at_start;
2527 if (reload_completed)
2529 /* After reload, there are no pseudos, nor subregs of multi-word
2530 registers. The regsets should exactly match. */
2531 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2538 /* Find the set of changed registers. */
2539 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2541 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2543 /* No registers should die. */
2544 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2546 /* Verify that the now-live register is wider than word_mode. */
2547 verify_wide_reg (i, bb->head, bb->end);
2552 /* Updates death notes starting with the basic blocks set in BLOCKS.
2554 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2555 expecting local modifications to basic blocks. If we find extra
2556 registers live at the beginning of a block, then we either killed
2557 useful data, or we have a broken split that wants data not provided.
2558 If we find registers removed from live_at_start, that means we have
2559 a broken peephole that is killing a register it shouldn't.
2561 ??? This is not true in one situation -- when a pre-reload splitter
2562 generates subregs of a multi-word pseudo, current life analysis will
2563 lose the kill. So we _can_ have a pseudo go live. How irritating.
2565 BLOCK_FOR_INSN is assumed to be correct.
2567 ??? PROP_FLAGS should not contain PROP_LOG_LINKS. Need to set up
2568 reg_next_use for that. Including PROP_REG_INFO does not refresh
2569 regs_ever_live unless the caller resets it to zero. */
2572 update_life_info (blocks, extent, prop_flags)
2574 enum update_life_extent extent;
2580 tmp = ALLOCA_REG_SET ();
2582 /* For a global update, we go through the relaxation process again. */
2583 if (extent != UPDATE_LIFE_LOCAL)
2585 calculate_global_regs_live (blocks, blocks,
2586 prop_flags & PROP_SCAN_DEAD_CODE);
2588 /* If asked, remove notes from the blocks we'll update. */
2589 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2590 count_or_remove_death_notes (blocks, 1);
2593 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2595 basic_block bb = BASIC_BLOCK (i);
2597 COPY_REG_SET (tmp, bb->global_live_at_end);
2598 propagate_block (tmp, bb->head, bb->end, (regset) NULL, i,
2601 if (extent == UPDATE_LIFE_LOCAL)
2602 verify_local_live_at_start (tmp, bb);
2610 /* Free the variables allocated by find_basic_blocks.
2612 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2615 free_basic_block_vars (keep_head_end_p)
2616 int keep_head_end_p;
2618 if (basic_block_for_insn)
2620 VARRAY_FREE (basic_block_for_insn);
2621 basic_block_for_insn = NULL;
2624 if (! keep_head_end_p)
2627 VARRAY_FREE (basic_block_info);
2630 ENTRY_BLOCK_PTR->aux = NULL;
2631 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2632 EXIT_BLOCK_PTR->aux = NULL;
2633 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2637 /* Return nonzero if the destination of SET equals the source. */
2642 rtx src = SET_SRC (set);
2643 rtx dst = SET_DEST (set);
2644 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2645 && REGNO (src) == REGNO (dst))
2647 if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG
2648 || SUBREG_WORD (src) != SUBREG_WORD (dst))
2650 src = SUBREG_REG (src);
2651 dst = SUBREG_REG (dst);
2652 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2653 && REGNO (src) == REGNO (dst))
2658 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2664 rtx pat = PATTERN (insn);
2666 /* Insns carrying these notes are useful later on. */
2667 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2670 if (GET_CODE (pat) == SET && set_noop_p (pat))
2673 if (GET_CODE (pat) == PARALLEL)
2676 /* If nothing but SETs of registers to themselves,
2677 this insn can also be deleted. */
2678 for (i = 0; i < XVECLEN (pat, 0); i++)
2680 rtx tem = XVECEXP (pat, 0, i);
2682 if (GET_CODE (tem) == USE
2683 || GET_CODE (tem) == CLOBBER)
2686 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2696 notice_stack_pointer_modification (x, pat, data)
2698 rtx pat ATTRIBUTE_UNUSED;
2699 void *data ATTRIBUTE_UNUSED;
2701 if (x == stack_pointer_rtx
2702 /* The stack pointer is only modified indirectly as the result
2703 of a push until later in flow. See the comments in rtl.texi
2704 regarding Embedded Side-Effects on Addresses. */
2705 || (GET_CODE (x) == MEM
2706 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2707 || GET_CODE (XEXP (x, 0)) == PRE_INC
2708 || GET_CODE (XEXP (x, 0)) == POST_DEC
2709 || GET_CODE (XEXP (x, 0)) == POST_INC)
2710 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2711 current_function_sp_is_unchanging = 0;
2714 /* Record which insns refer to any volatile memory
2715 or for any reason can't be deleted just because they are dead stores.
2716 Also, delete any insns that copy a register to itself.
2717 And see if the stack pointer is modified. */
2719 record_volatile_insns (f)
2723 for (insn = f; insn; insn = NEXT_INSN (insn))
2725 enum rtx_code code1 = GET_CODE (insn);
2726 if (code1 == CALL_INSN)
2727 SET_INSN_VOLATILE (insn);
2728 else if (code1 == INSN || code1 == JUMP_INSN)
2730 if (GET_CODE (PATTERN (insn)) != USE
2731 && volatile_refs_p (PATTERN (insn)))
2732 SET_INSN_VOLATILE (insn);
2734 /* A SET that makes space on the stack cannot be dead.
2735 (Such SETs occur only for allocating variable-size data,
2736 so they will always have a PLUS or MINUS according to the
2737 direction of stack growth.)
2738 Even if this function never uses this stack pointer value,
2739 signal handlers do! */
2740 else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
2741 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2742 #ifdef STACK_GROWS_DOWNWARD
2743 && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
2745 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2747 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
2748 SET_INSN_VOLATILE (insn);
2750 /* Delete (in effect) any obvious no-op moves. */
2751 else if (noop_move_p (insn))
2753 PUT_CODE (insn, NOTE);
2754 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2755 NOTE_SOURCE_FILE (insn) = 0;
2759 /* Check if insn modifies the stack pointer. */
2760 if ( current_function_sp_is_unchanging
2761 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2762 note_stores (PATTERN (insn),
2763 notice_stack_pointer_modification,
2768 /* Mark a register in SET. Hard registers in large modes get all
2769 of their component registers set as well. */
2775 int regno = REGNO (reg);
2777 SET_REGNO_REG_SET (set, regno);
2778 if (regno < FIRST_PSEUDO_REGISTER)
2780 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2782 SET_REGNO_REG_SET (set, regno + n);
2786 /* Mark those regs which are needed at the end of the function as live
2787 at the end of the last basic block. */
2789 mark_regs_live_at_end (set)
2795 /* If exiting needs the right stack value, consider the stack pointer
2796 live at the end of the function. */
2797 if ((HAVE_epilogue && reload_completed)
2798 || ! EXIT_IGNORE_STACK
2799 || (! FRAME_POINTER_REQUIRED
2800 && ! current_function_calls_alloca
2801 && flag_omit_frame_pointer)
2802 || current_function_sp_is_unchanging)
2804 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2807 /* Mark the frame pointer if needed at the end of the function. If
2808 we end up eliminating it, it will be removed from the live list
2809 of each basic block by reload. */
2811 if (! reload_completed || frame_pointer_needed)
2813 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2814 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2815 /* If they are different, also mark the hard frame pointer as live */
2816 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2820 #ifdef PIC_OFFSET_TABLE_REGNUM
2821 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2822 /* Many architectures have a GP register even without flag_pic.
2823 Assume the pic register is not in use, or will be handled by
2824 other means, if it is not fixed. */
2825 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2826 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2830 /* Mark all global registers, and all registers used by the epilogue
2831 as being live at the end of the function since they may be
2832 referenced by our caller. */
2833 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2835 #ifdef EPILOGUE_USES
2836 || EPILOGUE_USES (i)
2839 SET_REGNO_REG_SET (set, i);
2841 /* Mark all call-saved registers that we actaully used. */
2842 if (HAVE_epilogue && reload_completed)
2844 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2845 if (! call_used_regs[i] && regs_ever_live[i])
2846 SET_REGNO_REG_SET (set, i);
2849 /* Mark function return value. */
2850 /* ??? Only do this after reload. Consider a non-void function that
2851 omits a return statement. Across that edge we'll have the return
2852 register live, and no set for it. Thus the return register will
2853 be live back through the CFG to the entry, and thus we die. A
2854 possible solution is to emit a clobber at exits without returns. */
2856 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2857 if (reload_completed
2858 && type != void_type_node)
2862 if (current_function_returns_struct
2863 || current_function_returns_pcc_struct)
2864 type = build_pointer_type (type);
2866 #ifdef FUNCTION_OUTGOING_VALUE
2867 outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
2869 outgoing = FUNCTION_VALUE (type, current_function_decl);
2872 if (GET_CODE (outgoing) == REG)
2873 mark_reg (set, outgoing);
2874 else if (GET_CODE (outgoing) == PARALLEL)
2876 int len = XVECLEN (outgoing, 0);
2878 /* Check for a NULL entry, used to indicate that the parameter
2879 goes on the stack and in registers. */
2880 i = (XEXP (XVECEXP (outgoing, 0, 0), 0) ? 0 : 1);
2882 for ( ; i < len; ++i)
2884 rtx r = XVECEXP (outgoing, 0, i);
2885 if (GET_CODE (r) == REG)
2894 /* Determine which registers are live at the start of each
2895 basic block of the function whose first insn is F.
2896 NREGS is the number of registers used in F.
2897 We allocate the vector basic_block_live_at_start
2898 and the regsets that it points to, and fill them with the data.
2899 regset_size and regset_bytes are also set here. */
2902 life_analysis_1 (f, nregs, flags)
2907 char save_regs_ever_live[FIRST_PSEUDO_REGISTER];
2912 /* Allocate and zero out many data structures
2913 that will record the data from lifetime analysis. */
2915 allocate_reg_life_data ();
2916 allocate_bb_life_data ();
2918 reg_next_use = (rtx *) alloca (nregs * sizeof (rtx));
2919 memset (reg_next_use, 0, nregs * sizeof (rtx));
2921 /* Assume that the stack pointer is unchanging if alloca hasn't been used.
2922 This will be cleared by record_volatile_insns if it encounters an insn
2923 which modifies the stack pointer. */
2924 current_function_sp_is_unchanging = !current_function_calls_alloca;
2925 record_volatile_insns (f);
2927 /* Find the set of registers live on function exit. Do this before
2928 zeroing regs_ever_live, as we use that data post-reload. */
2929 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2931 /* The post-reload life analysis have (on a global basis) the same
2932 registers live as was computed by reload itself. elimination
2933 Otherwise offsets and such may be incorrect.
2935 Reload will make some registers as live even though they do not
2936 appear in the rtl. */
2937 if (reload_completed)
2938 memcpy (save_regs_ever_live, regs_ever_live, sizeof (regs_ever_live));
2939 memset (regs_ever_live, 0, sizeof regs_ever_live);
2941 /* Compute register life at block boundaries. It'd be nice to
2942 begin with just the exit and noreturn blocks, but that set
2943 is not immediately handy. */
2946 blocks = sbitmap_alloc (n_basic_blocks);
2947 sbitmap_ones (blocks);
2948 calculate_global_regs_live (blocks, blocks, flags & PROP_SCAN_DEAD_CODE);
2951 /* The only pseudos that are live at the beginning of the function are
2952 those that were not set anywhere in the function. local-alloc doesn't
2953 know how to handle these correctly, so mark them as not local to any
2956 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2957 FIRST_PSEUDO_REGISTER, i,
2958 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2960 /* Now the life information is accurate. Make one more pass over each
2961 basic block to delete dead stores, create autoincrement addressing
2962 and record how many times each register is used, is set, or dies. */
2965 tmp = ALLOCA_REG_SET ();
2967 for (i = n_basic_blocks - 1; i >= 0; --i)
2969 basic_block bb = BASIC_BLOCK (i);
2971 COPY_REG_SET (tmp, bb->global_live_at_end);
2972 propagate_block (tmp, bb->head, bb->end, (regset) NULL, i, flags);
2980 /* We have a problem with any pseudoreg that lives across the setjmp.
2981 ANSI says that if a user variable does not change in value between
2982 the setjmp and the longjmp, then the longjmp preserves it. This
2983 includes longjmp from a place where the pseudo appears dead.
2984 (In principle, the value still exists if it is in scope.)
2985 If the pseudo goes in a hard reg, some other value may occupy
2986 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2987 Conclusion: such a pseudo must not go in a hard reg. */
2988 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2989 FIRST_PSEUDO_REGISTER, i,
2991 if (regno_reg_rtx[i] != 0)
2993 REG_LIVE_LENGTH (i) = -1;
2994 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2998 /* Restore regs_ever_live that was provided by reload. */
2999 if (reload_completed)
3000 memcpy (regs_ever_live, save_regs_ever_live, sizeof (regs_ever_live));
3002 reg_next_use = NULL;
3005 /* Propagate global life info around the graph of basic blocks. Begin
3006 considering blocks with their corresponding bit set in BLOCKS_IN.
3007 BLOCKS_OUT is set for every block that was changed. */
3010 calculate_global_regs_live (blocks_in, blocks_out, flags)
3011 sbitmap blocks_in, blocks_out;
3014 basic_block *queue, *qhead, *qtail, *qend;
3015 regset tmp, new_live_at_end;
3018 tmp = ALLOCA_REG_SET ();
3019 new_live_at_end = ALLOCA_REG_SET ();
3021 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3022 because the `head == tail' style test for an empty queue doesn't
3023 work with a full queue. */
3024 queue = (basic_block *) alloca ((n_basic_blocks + 2) * sizeof (*queue));
3026 qhead = qend = queue + n_basic_blocks + 2;
3028 /* Queue the blocks set in the initial mask. Do this in reverse block
3029 number order so that we are more likely for the first round to do
3030 useful work. We use AUX non-null to flag that the block is queued. */
3031 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3033 basic_block bb = BASIC_BLOCK (i);
3038 sbitmap_zero (blocks_out);
3040 while (qhead != qtail)
3042 int rescan, changed;
3051 /* Begin by propogating live_at_start from the successor blocks. */
3052 CLEAR_REG_SET (new_live_at_end);
3053 for (e = bb->succ; e ; e = e->succ_next)
3055 basic_block sb = e->dest;
3056 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3059 if (bb == ENTRY_BLOCK_PTR)
3061 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3065 /* On our first pass through this block, we'll go ahead and continue.
3066 Recognize first pass by local_set NULL. On subsequent passes, we
3067 get to skip out early if live_at_end wouldn't have changed. */
3069 if (bb->local_set == NULL)
3071 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3076 /* If any bits were removed from live_at_end, we'll have to
3077 rescan the block. This wouldn't be necessary if we had
3078 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3079 local_live is really dependant on live_at_end. */
3080 CLEAR_REG_SET (tmp);
3081 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3082 new_live_at_end, BITMAP_AND_COMPL);
3086 /* Find the set of changed bits. Take this opportunity
3087 to notice that this set is empty and early out. */
3088 CLEAR_REG_SET (tmp);
3089 changed = bitmap_operation (tmp, bb->global_live_at_end,
3090 new_live_at_end, BITMAP_XOR);
3094 /* If any of the changed bits overlap with local_set,
3095 we'll have to rescan the block. Detect overlap by
3096 the AND with ~local_set turning off bits. */
3097 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3102 /* Let our caller know that BB changed enough to require its
3103 death notes updated. */
3104 SET_BIT (blocks_out, bb->index);
3108 /* Add to live_at_start the set of all registers in
3109 new_live_at_end that aren't in the old live_at_end. */
3111 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3113 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3115 changed = bitmap_operation (bb->global_live_at_start,
3116 bb->global_live_at_start,
3123 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3125 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3126 into live_at_start. */
3127 propagate_block (new_live_at_end, bb->head, bb->end,
3128 bb->local_set, i, flags);
3130 /* If live_at start didn't change, no need to go farther. */
3131 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3134 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3137 /* Queue all predecessors of BB so that we may re-examine
3138 their live_at_end. */
3139 for (e = bb->pred; e ; e = e->pred_next)
3141 basic_block pb = e->src;
3142 if (pb->aux == NULL)
3153 FREE_REG_SET (new_live_at_end);
3155 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3157 basic_block bb = BASIC_BLOCK (i);
3158 FREE_REG_SET (bb->local_set);
3162 /* Subroutines of life analysis. */
3164 /* Allocate the permanent data structures that represent the results
3165 of life analysis. Not static since used also for stupid life analysis. */
3168 allocate_bb_life_data ()
3172 for (i = 0; i < n_basic_blocks; i++)
3174 basic_block bb = BASIC_BLOCK (i);
3176 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3177 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3180 ENTRY_BLOCK_PTR->global_live_at_end
3181 = OBSTACK_ALLOC_REG_SET (function_obstack);
3182 EXIT_BLOCK_PTR->global_live_at_start
3183 = OBSTACK_ALLOC_REG_SET (function_obstack);
3185 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3189 allocate_reg_life_data ()
3193 /* Recalculate the register space, in case it has grown. Old style
3194 vector oriented regsets would set regset_{size,bytes} here also. */
3195 allocate_reg_info (max_regno, FALSE, FALSE);
3197 /* Reset all the data we'll collect in propagate_block and its
3199 for (i = 0; i < max_regno; i++)
3203 REG_N_DEATHS (i) = 0;
3204 REG_N_CALLS_CROSSED (i) = 0;
3205 REG_LIVE_LENGTH (i) = 0;
3206 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3210 /* Compute the registers live at the beginning of a basic block
3211 from those live at the end.
3213 When called, OLD contains those live at the end.
3214 On return, it contains those live at the beginning.
3215 FIRST and LAST are the first and last insns of the basic block.
3217 FINAL is nonzero if we are doing the final pass which is not
3218 for computing the life info (since that has already been done)
3219 but for acting on it. On this pass, we delete dead stores,
3220 set up the logical links and dead-variables lists of instructions,
3221 and merge instructions for autoincrement and autodecrement addresses.
3223 SIGNIFICANT is nonzero only the first time for each basic block.
3224 If it is nonzero, it points to a regset in which we store
3225 a 1 for each register that is set within the block.
3227 BNUM is the number of the basic block. */
3230 propagate_block (old, first, last, significant, bnum, flags)
3231 register regset old;
3243 /* Find the loop depth for this block. Ignore loop level changes in the
3244 middle of the basic block -- for register allocation purposes, the
3245 important uses will be in the blocks wholely contained within the loop
3246 not in the loop pre-header or post-trailer. */
3247 loop_depth = BASIC_BLOCK (bnum)->loop_depth;
3249 dead = ALLOCA_REG_SET ();
3250 live = ALLOCA_REG_SET ();
3254 if (flags & PROP_REG_INFO)
3258 /* Process the regs live at the end of the block.
3259 Mark them as not local to any one basic block. */
3260 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3262 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3266 /* Scan the block an insn at a time from end to beginning. */
3268 for (insn = last; ; insn = prev)
3270 prev = PREV_INSN (insn);
3272 if (GET_CODE (insn) == NOTE)
3274 /* If this is a call to `setjmp' et al,
3275 warn if any non-volatile datum is live. */
3277 if ((flags & PROP_REG_INFO)
3278 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3279 IOR_REG_SET (regs_live_at_setjmp, old);
3282 /* Update the life-status of regs for this insn.
3283 First DEAD gets which regs are set in this insn
3284 then LIVE gets which regs are used in this insn.
3285 Then the regs live before the insn
3286 are those live after, with DEAD regs turned off,
3287 and then LIVE regs turned on. */
3289 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3292 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3293 int insn_is_dead = 0;
3294 int libcall_is_dead = 0;
3296 if (flags & PROP_SCAN_DEAD_CODE)
3298 insn_is_dead = (insn_dead_p (PATTERN (insn), old, 0, REG_NOTES (insn))
3299 /* Don't delete something that refers to volatile storage! */
3300 && ! INSN_VOLATILE (insn));
3301 libcall_is_dead = (insn_is_dead && note != 0
3302 && libcall_dead_p (PATTERN (insn), old, note, insn));
3305 /* We almost certainly don't want to delete prologue or epilogue
3306 instructions. Warn about probable compiler losage. */
3307 if ((flags & PROP_KILL_DEAD_CODE)
3310 && (HAVE_epilogue || HAVE_prologue)
3311 && prologue_epilogue_contains (insn))
3313 warning ("ICE: would have deleted prologue/epilogue insn");
3315 libcall_is_dead = insn_is_dead = 0;
3318 /* If an instruction consists of just dead store(s) on final pass,
3319 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3320 We could really delete it with delete_insn, but that
3321 can cause trouble for first or last insn in a basic block. */
3322 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3325 /* If the insn referred to a label, note that the label is
3327 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3329 if (REG_NOTE_KIND (inote) == REG_LABEL)
3331 rtx label = XEXP (inote, 0);
3333 LABEL_NUSES (label)--;
3335 /* If this label was attached to an ADDR_VEC, it's
3336 safe to delete the ADDR_VEC. In fact, it's pretty much
3337 mandatory to delete it, because the ADDR_VEC may
3338 be referencing labels that no longer exist. */
3339 if (LABEL_NUSES (label) == 0
3340 && (next = next_nonnote_insn (label)) != NULL
3341 && GET_CODE (next) == JUMP_INSN
3342 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3343 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3345 rtx pat = PATTERN (next);
3346 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3347 int len = XVECLEN (pat, diff_vec_p);
3349 for (i = 0; i < len; i++)
3350 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3351 PUT_CODE (next, NOTE);
3352 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3353 NOTE_SOURCE_FILE (next) = 0;
3358 PUT_CODE (insn, NOTE);
3359 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3360 NOTE_SOURCE_FILE (insn) = 0;
3362 /* CC0 is now known to be dead. Either this insn used it,
3363 in which case it doesn't anymore, or clobbered it,
3364 so the next insn can't use it. */
3367 /* If this insn is copying the return value from a library call,
3368 delete the entire library call. */
3369 if (libcall_is_dead)
3371 rtx first = XEXP (note, 0);
3373 while (INSN_DELETED_P (first))
3374 first = NEXT_INSN (first);
3379 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3380 NOTE_SOURCE_FILE (p) = 0;
3386 CLEAR_REG_SET (dead);
3387 CLEAR_REG_SET (live);
3389 /* See if this is an increment or decrement that can be
3390 merged into a following memory address. */
3393 register rtx x = single_set (insn);
3395 /* Does this instruction increment or decrement a register? */
3396 if (!reload_completed
3397 && (flags & PROP_AUTOINC)
3399 && GET_CODE (SET_DEST (x)) == REG
3400 && (GET_CODE (SET_SRC (x)) == PLUS
3401 || GET_CODE (SET_SRC (x)) == MINUS)
3402 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3403 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3404 /* Ok, look for a following memory ref we can combine with.
3405 If one is found, change the memory ref to a PRE_INC
3406 or PRE_DEC, cancel this insn, and return 1.
3407 Return 0 if nothing has been done. */
3408 && try_pre_increment_1 (insn))
3411 #endif /* AUTO_INC_DEC */
3413 /* If this is not the final pass, and this insn is copying the
3414 value of a library call and it's dead, don't scan the
3415 insns that perform the library call, so that the call's
3416 arguments are not marked live. */
3417 if (libcall_is_dead)
3419 /* Mark the dest reg as `significant'. */
3420 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3421 significant, flags);
3423 insn = XEXP (note, 0);
3424 prev = PREV_INSN (insn);
3426 else if (GET_CODE (PATTERN (insn)) == SET
3427 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3428 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3429 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3430 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3431 /* We have an insn to pop a constant amount off the stack.
3432 (Such insns use PLUS regardless of the direction of the stack,
3433 and any insn to adjust the stack by a constant is always a pop.)
3434 These insns, if not dead stores, have no effect on life. */
3438 /* Any regs live at the time of a call instruction
3439 must not go in a register clobbered by calls.
3440 Find all regs now live and record this for them. */
3442 if (GET_CODE (insn) == CALL_INSN
3443 && (flags & PROP_REG_INFO))
3444 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3446 REG_N_CALLS_CROSSED (i)++;
3449 /* LIVE gets the regs used in INSN;
3450 DEAD gets those set by it. Dead insns don't make anything
3453 mark_set_regs (old, dead, PATTERN (insn),
3454 insn, significant, flags);
3456 /* If an insn doesn't use CC0, it becomes dead since we
3457 assume that every insn clobbers it. So show it dead here;
3458 mark_used_regs will set it live if it is referenced. */
3462 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3464 /* Sometimes we may have inserted something before INSN (such as
3465 a move) when we make an auto-inc. So ensure we will scan
3468 prev = PREV_INSN (insn);
3471 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3477 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3479 note = XEXP (note, 1))
3480 if (GET_CODE (XEXP (note, 0)) == USE)
3481 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3484 /* Each call clobbers all call-clobbered regs that are not
3485 global or fixed. Note that the function-value reg is a
3486 call-clobbered reg, and mark_set_regs has already had
3487 a chance to handle it. */
3489 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3490 if (call_used_regs[i] && ! global_regs[i]
3493 SET_REGNO_REG_SET (dead, i);
3495 SET_REGNO_REG_SET (significant, i);
3498 /* The stack ptr is used (honorarily) by a CALL insn. */
3499 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3501 /* Calls may also reference any of the global registers,
3502 so they are made live. */
3503 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3505 mark_used_regs (old, live,
3506 gen_rtx_REG (reg_raw_mode[i], i),
3509 /* Calls also clobber memory. */
3510 free_EXPR_LIST_list (&mem_set_list);
3513 /* Update OLD for the registers used or set. */
3514 AND_COMPL_REG_SET (old, dead);
3515 IOR_REG_SET (old, live);
3519 /* On final pass, update counts of how many insns each reg is live
3521 if (flags & PROP_REG_INFO)
3522 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3523 { REG_LIVE_LENGTH (i)++; });
3530 FREE_REG_SET (dead);
3531 FREE_REG_SET (live);
3532 free_EXPR_LIST_list (&mem_set_list);
3535 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3536 (SET expressions whose destinations are registers dead after the insn).
3537 NEEDED is the regset that says which regs are alive after the insn.
3539 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3541 If X is the entire body of an insn, NOTES contains the reg notes
3542 pertaining to the insn. */
3545 insn_dead_p (x, needed, call_ok, notes)
3549 rtx notes ATTRIBUTE_UNUSED;
3551 enum rtx_code code = GET_CODE (x);
3554 /* If flow is invoked after reload, we must take existing AUTO_INC
3555 expresions into account. */
3556 if (reload_completed)
3558 for ( ; notes; notes = XEXP (notes, 1))
3560 if (REG_NOTE_KIND (notes) == REG_INC)
3562 int regno = REGNO (XEXP (notes, 0));
3564 /* Don't delete insns to set global regs. */
3565 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3566 || REGNO_REG_SET_P (needed, regno))
3573 /* If setting something that's a reg or part of one,
3574 see if that register's altered value will be live. */
3578 rtx r = SET_DEST (x);
3580 /* A SET that is a subroutine call cannot be dead. */
3581 if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
3585 if (GET_CODE (r) == CC0)
3589 if (GET_CODE (r) == MEM && ! MEM_VOLATILE_P (r))
3592 /* Walk the set of memory locations we are currently tracking
3593 and see if one is an identical match to this memory location.
3594 If so, this memory write is dead (remember, we're walking
3595 backwards from the end of the block to the start. */
3596 temp = mem_set_list;
3599 if (rtx_equal_p (XEXP (temp, 0), r))
3601 temp = XEXP (temp, 1);
3605 while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
3606 || GET_CODE (r) == ZERO_EXTRACT)
3609 if (GET_CODE (r) == REG)
3611 int regno = REGNO (r);
3613 /* Don't delete insns to set global regs. */
3614 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3615 /* Make sure insns to set frame pointer aren't deleted. */
3616 || (regno == FRAME_POINTER_REGNUM
3617 && (! reload_completed || frame_pointer_needed))
3618 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3619 || (regno == HARD_FRAME_POINTER_REGNUM
3620 && (! reload_completed || frame_pointer_needed))
3622 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3623 /* Make sure insns to set arg pointer are never deleted
3624 (if the arg pointer isn't fixed, there will be a USE for
3625 it, so we can treat it normally). */
3626 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3628 || REGNO_REG_SET_P (needed, regno))
3631 /* If this is a hard register, verify that subsequent words are
3633 if (regno < FIRST_PSEUDO_REGISTER)
3635 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3638 if (REGNO_REG_SET_P (needed, regno+n))
3646 /* If performing several activities,
3647 insn is dead if each activity is individually dead.
3648 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
3649 that's inside a PARALLEL doesn't make the insn worth keeping. */
3650 else if (code == PARALLEL)
3652 int i = XVECLEN (x, 0);
3654 for (i--; i >= 0; i--)
3655 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3656 && GET_CODE (XVECEXP (x, 0, i)) != USE
3657 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3663 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3664 is not necessarily true for hard registers. */
3665 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3666 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3667 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3670 /* We do not check other CLOBBER or USE here. An insn consisting of just
3671 a CLOBBER or just a USE should not be deleted. */
3675 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3676 return 1 if the entire library call is dead.
3677 This is true if X copies a register (hard or pseudo)
3678 and if the hard return reg of the call insn is dead.
3679 (The caller should have tested the destination of X already for death.)
3681 If this insn doesn't just copy a register, then we don't
3682 have an ordinary libcall. In that case, cse could not have
3683 managed to substitute the source for the dest later on,
3684 so we can assume the libcall is dead.
3686 NEEDED is the bit vector of pseudoregs live before this insn.
3687 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3690 libcall_dead_p (x, needed, note, insn)
3696 register RTX_CODE code = GET_CODE (x);
3700 register rtx r = SET_SRC (x);
3701 if (GET_CODE (r) == REG)
3703 rtx call = XEXP (note, 0);
3707 /* Find the call insn. */
3708 while (call != insn && GET_CODE (call) != CALL_INSN)
3709 call = NEXT_INSN (call);
3711 /* If there is none, do nothing special,
3712 since ordinary death handling can understand these insns. */
3716 /* See if the hard reg holding the value is dead.
3717 If this is a PARALLEL, find the call within it. */
3718 call_pat = PATTERN (call);
3719 if (GET_CODE (call_pat) == PARALLEL)
3721 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3722 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3723 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3726 /* This may be a library call that is returning a value
3727 via invisible pointer. Do nothing special, since
3728 ordinary death handling can understand these insns. */
3732 call_pat = XVECEXP (call_pat, 0, i);
3735 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3741 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3742 live at function entry. Don't count global register variables, variables
3743 in registers that can be used for function arg passing, or variables in
3744 fixed hard registers. */
3747 regno_uninitialized (regno)
3750 if (n_basic_blocks == 0
3751 || (regno < FIRST_PSEUDO_REGISTER
3752 && (global_regs[regno]
3753 || fixed_regs[regno]
3754 || FUNCTION_ARG_REGNO_P (regno))))
3757 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3760 /* 1 if register REGNO was alive at a place where `setjmp' was called
3761 and was set more than once or is an argument.
3762 Such regs may be clobbered by `longjmp'. */
3765 regno_clobbered_at_setjmp (regno)
3768 if (n_basic_blocks == 0)
3771 return ((REG_N_SETS (regno) > 1
3772 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3773 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3776 /* INSN references memory, possibly using autoincrement addressing modes.
3777 Find any entries on the mem_set_list that need to be invalidated due
3778 to an address change. */
3780 invalidate_mems_from_autoinc (insn)
3783 rtx note = REG_NOTES (insn);
3784 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3786 if (REG_NOTE_KIND (note) == REG_INC)
3788 rtx temp = mem_set_list;
3789 rtx prev = NULL_RTX;
3794 next = XEXP (temp, 1);
3795 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3797 /* Splice temp out of list. */
3799 XEXP (prev, 1) = next;
3801 mem_set_list = next;
3802 free_EXPR_LIST_node (temp);
3812 /* Process the registers that are set within X. Their bits are set to
3813 1 in the regset DEAD, because they are dead prior to this insn.
3815 If INSN is nonzero, it is the insn being processed.
3817 FLAGS is the set of operations to perform. */
3820 mark_set_regs (needed, dead, x, insn, significant, flags)
3828 register RTX_CODE code = GET_CODE (x);
3830 if (code == SET || code == CLOBBER)
3831 mark_set_1 (needed, dead, x, insn, significant, flags);
3832 else if (code == PARALLEL)
3835 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3837 code = GET_CODE (XVECEXP (x, 0, i));
3838 if (code == SET || code == CLOBBER)
3839 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3840 significant, flags);
3845 /* Process a single SET rtx, X. */
3848 mark_set_1 (needed, dead, x, insn, significant, flags)
3856 register int regno = -1;
3857 register rtx reg = SET_DEST (x);
3859 /* Some targets place small structures in registers for
3860 return values of functions. We have to detect this
3861 case specially here to get correct flow information. */
3862 if (GET_CODE (reg) == PARALLEL
3863 && GET_MODE (reg) == BLKmode)
3867 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3868 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3869 significant, flags);
3873 /* Modifying just one hardware register of a multi-reg value
3874 or just a byte field of a register
3875 does not mean the value from before this insn is now dead.
3876 But it does mean liveness of that register at the end of the block
3879 Within mark_set_1, however, we treat it as if the register is
3880 indeed modified. mark_used_regs will, however, also treat this
3881 register as being used. Thus, we treat these insns as setting a
3882 new value for the register as a function of its old value. This
3883 cases LOG_LINKS to be made appropriately and this will help combine. */
3885 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3886 || GET_CODE (reg) == SIGN_EXTRACT
3887 || GET_CODE (reg) == STRICT_LOW_PART)
3888 reg = XEXP (reg, 0);
3890 /* If this set is a MEM, then it kills any aliased writes.
3891 If this set is a REG, then it kills any MEMs which use the reg. */
3892 if (flags & PROP_SCAN_DEAD_CODE)
3894 if (GET_CODE (reg) == MEM
3895 || GET_CODE (reg) == REG)
3897 rtx temp = mem_set_list;
3898 rtx prev = NULL_RTX;
3903 next = XEXP (temp, 1);
3904 if ((GET_CODE (reg) == MEM
3905 && output_dependence (XEXP (temp, 0), reg))
3906 || (GET_CODE (reg) == REG
3907 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3909 /* Splice this entry out of the list. */
3911 XEXP (prev, 1) = next;
3913 mem_set_list = next;
3914 free_EXPR_LIST_node (temp);
3922 /* If the memory reference had embedded side effects (autoincrement
3923 address modes. Then we may need to kill some entries on the
3925 if (insn && GET_CODE (reg) == MEM)
3926 invalidate_mems_from_autoinc (insn);
3928 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3929 /* We do not know the size of a BLKmode store, so we do not track
3930 them for redundant store elimination. */
3931 && GET_MODE (reg) != BLKmode
3932 /* There are no REG_INC notes for SP, so we can't assume we'll see
3933 everything that invalidates it. To be safe, don't eliminate any
3934 stores though SP; none of them should be redundant anyway. */
3935 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3936 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3939 if (GET_CODE (reg) == REG
3940 && (regno = REGNO (reg),
3941 ! (regno == FRAME_POINTER_REGNUM
3942 && (! reload_completed || frame_pointer_needed)))
3943 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3944 && ! (regno == HARD_FRAME_POINTER_REGNUM
3945 && (! reload_completed || frame_pointer_needed))
3947 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3948 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3950 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3951 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3953 int some_needed = REGNO_REG_SET_P (needed, regno);
3954 int some_not_needed = ! some_needed;
3956 /* Mark it as a significant register for this basic block. */
3958 SET_REGNO_REG_SET (significant, regno);
3960 /* Mark it as dead before this insn. */
3961 SET_REGNO_REG_SET (dead, regno);
3963 /* A hard reg in a wide mode may really be multiple registers.
3964 If so, mark all of them just like the first. */
3965 if (regno < FIRST_PSEUDO_REGISTER)
3969 /* Nothing below is needed for the stack pointer; get out asap.
3970 Eg, log links aren't needed, since combine won't use them. */
3971 if (regno == STACK_POINTER_REGNUM)
3974 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3977 int regno_n = regno + n;
3978 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3980 SET_REGNO_REG_SET (significant, regno_n);
3982 SET_REGNO_REG_SET (dead, regno_n);
3983 some_needed |= needed_regno;
3984 some_not_needed |= ! needed_regno;
3988 /* Additional data to record if this is the final pass. */
3989 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
3990 | PROP_DEATH_NOTES | PROP_AUTOINC))
3993 register int blocknum = BLOCK_NUM (insn);
3996 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3997 y = reg_next_use[regno];
3999 /* If this is a hard reg, record this function uses the reg. */
4001 if (regno < FIRST_PSEUDO_REGISTER)
4004 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
4006 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4007 for (i = regno; i < endregno; i++)
4009 /* The next use is no longer "next", since a store
4011 reg_next_use[i] = 0;
4014 if (flags & PROP_REG_INFO)
4015 for (i = regno; i < endregno; i++)
4017 regs_ever_live[i] = 1;
4023 /* The next use is no longer "next", since a store
4025 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4026 reg_next_use[regno] = 0;
4028 /* Keep track of which basic blocks each reg appears in. */
4030 if (flags & PROP_REG_INFO)
4032 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4033 REG_BASIC_BLOCK (regno) = blocknum;
4034 else if (REG_BASIC_BLOCK (regno) != blocknum)
4035 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4037 /* Count (weighted) references, stores, etc. This counts a
4038 register twice if it is modified, but that is correct. */
4039 REG_N_SETS (regno)++;
4040 REG_N_REFS (regno) += loop_depth;
4042 /* The insns where a reg is live are normally counted
4043 elsewhere, but we want the count to include the insn
4044 where the reg is set, and the normal counting mechanism
4045 would not count it. */
4046 REG_LIVE_LENGTH (regno)++;
4050 if (! some_not_needed)
4052 if (flags & PROP_LOG_LINKS)
4054 /* Make a logical link from the next following insn
4055 that uses this register, back to this insn.
4056 The following insns have already been processed.
4058 We don't build a LOG_LINK for hard registers containing
4059 in ASM_OPERANDs. If these registers get replaced,
4060 we might wind up changing the semantics of the insn,
4061 even if reload can make what appear to be valid
4062 assignments later. */
4063 if (y && (BLOCK_NUM (y) == blocknum)
4064 && (regno >= FIRST_PSEUDO_REGISTER
4065 || asm_noperands (PATTERN (y)) < 0))
4066 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4069 else if (! some_needed)
4071 if (flags & PROP_REG_INFO)
4072 REG_N_DEATHS (REGNO (reg))++;
4074 if (flags & PROP_DEATH_NOTES)
4076 /* Note that dead stores have already been deleted
4077 when possible. If we get here, we have found a
4078 dead store that cannot be eliminated (because the
4079 same insn does something useful). Indicate this
4080 by marking the reg being set as dying here. */
4082 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4087 if (flags & PROP_DEATH_NOTES)
4089 /* This is a case where we have a multi-word hard register
4090 and some, but not all, of the words of the register are
4091 needed in subsequent insns. Write REG_UNUSED notes
4092 for those parts that were not needed. This case should
4097 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4099 if (!REGNO_REG_SET_P (needed, regno + i))
4103 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4109 else if (GET_CODE (reg) == REG)
4111 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4112 reg_next_use[regno] = 0;
4115 /* If this is the last pass and this is a SCRATCH, show it will be dying
4116 here and count it. */
4117 else if (GET_CODE (reg) == SCRATCH)
4119 if (flags & PROP_DEATH_NOTES)
4121 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4127 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4131 find_auto_inc (needed, x, insn)
4136 rtx addr = XEXP (x, 0);
4137 HOST_WIDE_INT offset = 0;
4140 /* Here we detect use of an index register which might be good for
4141 postincrement, postdecrement, preincrement, or predecrement. */
4143 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4144 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4146 if (GET_CODE (addr) == REG)
4149 register int size = GET_MODE_SIZE (GET_MODE (x));
4152 int regno = REGNO (addr);
4154 /* Is the next use an increment that might make auto-increment? */
4155 if ((incr = reg_next_use[regno]) != 0
4156 && (set = single_set (incr)) != 0
4157 && GET_CODE (set) == SET
4158 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4159 /* Can't add side effects to jumps; if reg is spilled and
4160 reloaded, there's no way to store back the altered value. */
4161 && GET_CODE (insn) != JUMP_INSN
4162 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4163 && XEXP (y, 0) == addr
4164 && GET_CODE (XEXP (y, 1)) == CONST_INT
4165 && ((HAVE_POST_INCREMENT
4166 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4167 || (HAVE_POST_DECREMENT
4168 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4169 || (HAVE_PRE_INCREMENT
4170 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4171 || (HAVE_PRE_DECREMENT
4172 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4173 /* Make sure this reg appears only once in this insn. */
4174 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4175 use != 0 && use != (rtx) 1))
4177 rtx q = SET_DEST (set);
4178 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4179 ? (offset ? PRE_INC : POST_INC)
4180 : (offset ? PRE_DEC : POST_DEC));
4182 if (dead_or_set_p (incr, addr))
4184 /* This is the simple case. Try to make the auto-inc. If
4185 we can't, we are done. Otherwise, we will do any
4186 needed updates below. */
4187 if (! validate_change (insn, &XEXP (x, 0),
4188 gen_rtx_fmt_e (inc_code, Pmode, addr),
4192 else if (GET_CODE (q) == REG
4193 /* PREV_INSN used here to check the semi-open interval
4195 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4196 /* We must also check for sets of q as q may be
4197 a call clobbered hard register and there may
4198 be a call between PREV_INSN (insn) and incr. */
4199 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4201 /* We have *p followed sometime later by q = p+size.
4202 Both p and q must be live afterward,
4203 and q is not used between INSN and its assignment.
4204 Change it to q = p, ...*q..., q = q+size.
4205 Then fall into the usual case. */
4210 emit_move_insn (q, addr);
4211 insns = get_insns ();
4214 bb = BLOCK_FOR_INSN (insn);
4215 for (temp = insns; temp; temp = NEXT_INSN (temp))
4216 set_block_for_insn (temp, bb);
4218 /* If we can't make the auto-inc, or can't make the
4219 replacement into Y, exit. There's no point in making
4220 the change below if we can't do the auto-inc and doing
4221 so is not correct in the pre-inc case. */
4223 validate_change (insn, &XEXP (x, 0),
4224 gen_rtx_fmt_e (inc_code, Pmode, q),
4226 validate_change (incr, &XEXP (y, 0), q, 1);
4227 if (! apply_change_group ())
4230 /* We now know we'll be doing this change, so emit the
4231 new insn(s) and do the updates. */
4232 emit_insns_before (insns, insn);
4234 if (BLOCK_FOR_INSN (insn)->head == insn)
4235 BLOCK_FOR_INSN (insn)->head = insns;
4237 /* INCR will become a NOTE and INSN won't contain a
4238 use of ADDR. If a use of ADDR was just placed in
4239 the insn before INSN, make that the next use.
4240 Otherwise, invalidate it. */
4241 if (GET_CODE (PREV_INSN (insn)) == INSN
4242 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4243 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4244 reg_next_use[regno] = PREV_INSN (insn);
4246 reg_next_use[regno] = 0;
4251 /* REGNO is now used in INCR which is below INSN, but
4252 it previously wasn't live here. If we don't mark
4253 it as needed, we'll put a REG_DEAD note for it
4254 on this insn, which is incorrect. */
4255 SET_REGNO_REG_SET (needed, regno);
4257 /* If there are any calls between INSN and INCR, show
4258 that REGNO now crosses them. */
4259 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4260 if (GET_CODE (temp) == CALL_INSN)
4261 REG_N_CALLS_CROSSED (regno)++;
4266 /* If we haven't returned, it means we were able to make the
4267 auto-inc, so update the status. First, record that this insn
4268 has an implicit side effect. */
4271 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4273 /* Modify the old increment-insn to simply copy
4274 the already-incremented value of our register. */
4275 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4278 /* If that makes it a no-op (copying the register into itself) delete
4279 it so it won't appear to be a "use" and a "set" of this
4281 if (SET_DEST (set) == addr)
4283 PUT_CODE (incr, NOTE);
4284 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4285 NOTE_SOURCE_FILE (incr) = 0;
4288 if (regno >= FIRST_PSEUDO_REGISTER)
4290 /* Count an extra reference to the reg. When a reg is
4291 incremented, spilling it is worse, so we want to make
4292 that less likely. */
4293 REG_N_REFS (regno) += loop_depth;
4295 /* Count the increment as a setting of the register,
4296 even though it isn't a SET in rtl. */
4297 REG_N_SETS (regno)++;
4302 #endif /* AUTO_INC_DEC */
4304 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4305 This is done assuming the registers needed from X
4306 are those that have 1-bits in NEEDED.
4308 FLAGS is the set of enabled operations.
4310 INSN is the containing instruction. If INSN is dead, this function is not
4314 mark_used_regs (needed, live, x, flags, insn)
4321 register RTX_CODE code;
4326 code = GET_CODE (x);
4346 /* If we are clobbering a MEM, mark any registers inside the address
4348 if (GET_CODE (XEXP (x, 0)) == MEM)
4349 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4353 /* Don't bother watching stores to mems if this is not the
4354 final pass. We'll not be deleting dead stores this round. */
4355 if (flags & PROP_SCAN_DEAD_CODE)
4357 /* Invalidate the data for the last MEM stored, but only if MEM is
4358 something that can be stored into. */
4359 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4360 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4361 ; /* needn't clear the memory set list */
4364 rtx temp = mem_set_list;
4365 rtx prev = NULL_RTX;
4370 next = XEXP (temp, 1);
4371 if (anti_dependence (XEXP (temp, 0), x))
4373 /* Splice temp out of the list. */
4375 XEXP (prev, 1) = next;
4377 mem_set_list = next;
4378 free_EXPR_LIST_node (temp);
4386 /* If the memory reference had embedded side effects (autoincrement
4387 address modes. Then we may need to kill some entries on the
4390 invalidate_mems_from_autoinc (insn);
4394 if (flags & PROP_AUTOINC)
4395 find_auto_inc (needed, x, insn);
4400 if (GET_CODE (SUBREG_REG (x)) == REG
4401 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4402 && (GET_MODE_SIZE (GET_MODE (x))
4403 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4404 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4406 /* While we're here, optimize this case. */
4409 /* In case the SUBREG is not of a register, don't optimize */
4410 if (GET_CODE (x) != REG)
4412 mark_used_regs (needed, live, x, flags, insn);
4416 /* ... fall through ... */
4419 /* See a register other than being set
4420 => mark it as needed. */
4424 int some_needed = REGNO_REG_SET_P (needed, regno);
4425 int some_not_needed = ! some_needed;
4427 SET_REGNO_REG_SET (live, regno);
4429 /* A hard reg in a wide mode may really be multiple registers.
4430 If so, mark all of them just like the first. */
4431 if (regno < FIRST_PSEUDO_REGISTER)
4435 /* For stack ptr or fixed arg pointer,
4436 nothing below can be necessary, so waste no more time. */
4437 if (regno == STACK_POINTER_REGNUM
4438 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4439 || (regno == HARD_FRAME_POINTER_REGNUM
4440 && (! reload_completed || frame_pointer_needed))
4442 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4443 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4445 || (regno == FRAME_POINTER_REGNUM
4446 && (! reload_completed || frame_pointer_needed)))
4448 /* If this is a register we are going to try to eliminate,
4449 don't mark it live here. If we are successful in
4450 eliminating it, it need not be live unless it is used for
4451 pseudos, in which case it will have been set live when
4452 it was allocated to the pseudos. If the register will not
4453 be eliminated, reload will set it live at that point. */
4455 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
4456 regs_ever_live[regno] = 1;
4459 /* No death notes for global register variables;
4460 their values are live after this function exits. */
4461 if (global_regs[regno])
4463 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4464 reg_next_use[regno] = insn;
4468 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4471 int regno_n = regno + n;
4472 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4474 SET_REGNO_REG_SET (live, regno_n);
4475 some_needed |= needed_regno;
4476 some_not_needed |= ! needed_regno;
4480 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4482 /* Record where each reg is used, so when the reg
4483 is set we know the next insn that uses it. */
4485 reg_next_use[regno] = insn;
4487 if (flags & PROP_REG_INFO)
4489 if (regno < FIRST_PSEUDO_REGISTER)
4491 /* If a hard reg is being used,
4492 record that this function does use it. */
4494 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4498 regs_ever_live[regno + --i] = 1;
4503 /* Keep track of which basic block each reg appears in. */
4505 register int blocknum = BLOCK_NUM (insn);
4507 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4508 REG_BASIC_BLOCK (regno) = blocknum;
4509 else if (REG_BASIC_BLOCK (regno) != blocknum)
4510 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4512 /* Count (weighted) number of uses of each reg. */
4514 REG_N_REFS (regno) += loop_depth;
4518 /* Record and count the insns in which a reg dies.
4519 If it is used in this insn and was dead below the insn
4520 then it dies in this insn. If it was set in this insn,
4521 we do not make a REG_DEAD note; likewise if we already
4522 made such a note. */
4524 if (flags & PROP_DEATH_NOTES)
4527 && ! dead_or_set_p (insn, x)
4529 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4533 /* Check for the case where the register dying partially
4534 overlaps the register set by this insn. */
4535 if (regno < FIRST_PSEUDO_REGISTER
4536 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4538 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4540 some_needed |= dead_or_set_regno_p (insn, regno + n);
4543 /* If none of the words in X is needed, make a REG_DEAD
4544 note. Otherwise, we must make partial REG_DEAD notes. */
4548 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4549 REG_N_DEATHS (regno)++;
4555 /* Don't make a REG_DEAD note for a part of a register
4556 that is set in the insn. */
4558 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4560 if (!REGNO_REG_SET_P (needed, regno + i)
4561 && ! dead_or_set_regno_p (insn, regno + i))
4564 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4575 register rtx testreg = SET_DEST (x);
4578 /* If storing into MEM, don't show it as being used. But do
4579 show the address as being used. */
4580 if (GET_CODE (testreg) == MEM)
4583 if (flags & PROP_AUTOINC)
4584 find_auto_inc (needed, testreg, insn);
4586 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4587 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4591 /* Storing in STRICT_LOW_PART is like storing in a reg
4592 in that this SET might be dead, so ignore it in TESTREG.
4593 but in some other ways it is like using the reg.
4595 Storing in a SUBREG or a bit field is like storing the entire
4596 register in that if the register's value is not used
4597 then this SET is not needed. */
4598 while (GET_CODE (testreg) == STRICT_LOW_PART
4599 || GET_CODE (testreg) == ZERO_EXTRACT
4600 || GET_CODE (testreg) == SIGN_EXTRACT
4601 || GET_CODE (testreg) == SUBREG)
4603 if (GET_CODE (testreg) == SUBREG
4604 && GET_CODE (SUBREG_REG (testreg)) == REG
4605 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4606 && (GET_MODE_SIZE (GET_MODE (testreg))
4607 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4608 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4610 /* Modifying a single register in an alternate mode
4611 does not use any of the old value. But these other
4612 ways of storing in a register do use the old value. */
4613 if (GET_CODE (testreg) == SUBREG
4614 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4619 testreg = XEXP (testreg, 0);
4622 /* If this is a store into a register,
4623 recursively scan the value being stored. */
4625 if ((GET_CODE (testreg) == PARALLEL
4626 && GET_MODE (testreg) == BLKmode)
4627 || (GET_CODE (testreg) == REG
4628 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4629 && (! reload_completed || frame_pointer_needed)))
4630 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4631 && ! (regno == HARD_FRAME_POINTER_REGNUM
4632 && (! reload_completed || frame_pointer_needed))
4634 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4635 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4638 /* We used to exclude global_regs here, but that seems wrong.
4639 Storing in them is like storing in mem. */
4641 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4643 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4650 /* ??? This info should have been gotten from mark_regs_live_at_end,
4651 as applied to the EXIT block, and propagated along the edge that
4652 connects this block to the EXIT. */
4656 case UNSPEC_VOLATILE:
4660 /* Traditional and volatile asm instructions must be considered to use
4661 and clobber all hard registers, all pseudo-registers and all of
4662 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4664 Consider for instance a volatile asm that changes the fpu rounding
4665 mode. An insn should not be moved across this even if it only uses
4666 pseudo-regs because it might give an incorrectly rounded result.
4668 ?!? Unfortunately, marking all hard registers as live causes massive
4669 problems for the register allocator and marking all pseudos as live
4670 creates mountains of uninitialized variable warnings.
4672 So for now, just clear the memory set list and mark any regs
4673 we can find in ASM_OPERANDS as used. */
4674 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4675 free_EXPR_LIST_list (&mem_set_list);
4677 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4678 We can not just fall through here since then we would be confused
4679 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4680 traditional asms unlike their normal usage. */
4681 if (code == ASM_OPERANDS)
4685 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4686 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4697 /* Recursively scan the operands of this expression. */
4700 register const char *fmt = GET_RTX_FORMAT (code);
4703 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4707 /* Tail recursive case: save a function call level. */
4713 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4715 else if (fmt[i] == 'E')
4718 for (j = 0; j < XVECLEN (x, i); j++)
4719 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4728 try_pre_increment_1 (insn)
4731 /* Find the next use of this reg. If in same basic block,
4732 make it do pre-increment or pre-decrement if appropriate. */
4733 rtx x = single_set (insn);
4734 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4735 * INTVAL (XEXP (SET_SRC (x), 1)));
4736 int regno = REGNO (SET_DEST (x));
4737 rtx y = reg_next_use[regno];
4739 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4740 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4741 mode would be better. */
4742 && ! dead_or_set_p (y, SET_DEST (x))
4743 && try_pre_increment (y, SET_DEST (x), amount))
4745 /* We have found a suitable auto-increment
4746 and already changed insn Y to do it.
4747 So flush this increment-instruction. */
4748 PUT_CODE (insn, NOTE);
4749 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4750 NOTE_SOURCE_FILE (insn) = 0;
4751 /* Count a reference to this reg for the increment
4752 insn we are deleting. When a reg is incremented.
4753 spilling it is worse, so we want to make that
4755 if (regno >= FIRST_PSEUDO_REGISTER)
4757 REG_N_REFS (regno) += loop_depth;
4758 REG_N_SETS (regno)++;
4765 /* Try to change INSN so that it does pre-increment or pre-decrement
4766 addressing on register REG in order to add AMOUNT to REG.
4767 AMOUNT is negative for pre-decrement.
4768 Returns 1 if the change could be made.
4769 This checks all about the validity of the result of modifying INSN. */
4772 try_pre_increment (insn, reg, amount)
4774 HOST_WIDE_INT amount;
4778 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4779 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4781 /* Nonzero if we can try to make a post-increment or post-decrement.
4782 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4783 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4784 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4787 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4790 /* From the sign of increment, see which possibilities are conceivable
4791 on this target machine. */
4792 if (HAVE_PRE_INCREMENT && amount > 0)
4794 if (HAVE_POST_INCREMENT && amount > 0)
4797 if (HAVE_PRE_DECREMENT && amount < 0)
4799 if (HAVE_POST_DECREMENT && amount < 0)
4802 if (! (pre_ok || post_ok))
4805 /* It is not safe to add a side effect to a jump insn
4806 because if the incremented register is spilled and must be reloaded
4807 there would be no way to store the incremented value back in memory. */
4809 if (GET_CODE (insn) == JUMP_INSN)
4814 use = find_use_as_address (PATTERN (insn), reg, 0);
4815 if (post_ok && (use == 0 || use == (rtx) 1))
4817 use = find_use_as_address (PATTERN (insn), reg, -amount);
4821 if (use == 0 || use == (rtx) 1)
4824 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4827 /* See if this combination of instruction and addressing mode exists. */
4828 if (! validate_change (insn, &XEXP (use, 0),
4829 gen_rtx_fmt_e (amount > 0
4830 ? (do_post ? POST_INC : PRE_INC)
4831 : (do_post ? POST_DEC : PRE_DEC),
4835 /* Record that this insn now has an implicit side effect on X. */
4836 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4840 #endif /* AUTO_INC_DEC */
4842 /* Find the place in the rtx X where REG is used as a memory address.
4843 Return the MEM rtx that so uses it.
4844 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4845 (plus REG (const_int PLUSCONST)).
4847 If such an address does not appear, return 0.
4848 If REG appears more than once, or is used other than in such an address,
4852 find_use_as_address (x, reg, plusconst)
4855 HOST_WIDE_INT plusconst;
4857 enum rtx_code code = GET_CODE (x);
4858 const char *fmt = GET_RTX_FORMAT (code);
4860 register rtx value = 0;
4863 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4866 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4867 && XEXP (XEXP (x, 0), 0) == reg
4868 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4869 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4872 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4874 /* If REG occurs inside a MEM used in a bit-field reference,
4875 that is unacceptable. */
4876 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4877 return (rtx) (HOST_WIDE_INT) 1;
4881 return (rtx) (HOST_WIDE_INT) 1;
4883 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4887 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4891 return (rtx) (HOST_WIDE_INT) 1;
4896 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4898 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4902 return (rtx) (HOST_WIDE_INT) 1;
4910 /* Write information about registers and basic blocks into FILE.
4911 This is part of making a debugging dump. */
4914 dump_flow_info (file)
4918 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4920 fprintf (file, "%d registers.\n", max_regno);
4921 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4924 enum reg_class class, altclass;
4925 fprintf (file, "\nRegister %d used %d times across %d insns",
4926 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4927 if (REG_BASIC_BLOCK (i) >= 0)
4928 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4930 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4931 (REG_N_SETS (i) == 1) ? "" : "s");
4932 if (REG_USERVAR_P (regno_reg_rtx[i]))
4933 fprintf (file, "; user var");
4934 if (REG_N_DEATHS (i) != 1)
4935 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4936 if (REG_N_CALLS_CROSSED (i) == 1)
4937 fprintf (file, "; crosses 1 call");
4938 else if (REG_N_CALLS_CROSSED (i))
4939 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4940 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4941 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4942 class = reg_preferred_class (i);
4943 altclass = reg_alternate_class (i);
4944 if (class != GENERAL_REGS || altclass != ALL_REGS)
4946 if (altclass == ALL_REGS || class == ALL_REGS)
4947 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4948 else if (altclass == NO_REGS)
4949 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4951 fprintf (file, "; pref %s, else %s",
4952 reg_class_names[(int) class],
4953 reg_class_names[(int) altclass]);
4955 if (REGNO_POINTER_FLAG (i))
4956 fprintf (file, "; pointer");
4957 fprintf (file, ".\n");
4960 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
4961 for (i = 0; i < n_basic_blocks; i++)
4963 register basic_block bb = BASIC_BLOCK (i);
4967 fprintf (file, "\nBasic block %d: first insn %d, last %d.\n",
4968 i, INSN_UID (bb->head), INSN_UID (bb->end));
4970 fprintf (file, "Predecessors: ");
4971 for (e = bb->pred; e ; e = e->pred_next)
4972 dump_edge_info (file, e, 0);
4974 fprintf (file, "\nSuccessors: ");
4975 for (e = bb->succ; e ; e = e->succ_next)
4976 dump_edge_info (file, e, 1);
4978 fprintf (file, "\nRegisters live at start:");
4979 if (bb->global_live_at_start)
4981 for (regno = 0; regno < max_regno; regno++)
4982 if (REGNO_REG_SET_P (bb->global_live_at_start, regno))
4983 fprintf (file, " %d", regno);
4986 fprintf (file, " n/a");
4988 fprintf (file, "\nRegisters live at end:");
4989 if (bb->global_live_at_end)
4991 for (regno = 0; regno < max_regno; regno++)
4992 if (REGNO_REG_SET_P (bb->global_live_at_end, regno))
4993 fprintf (file, " %d", regno);
4996 fprintf (file, " n/a");
5007 dump_flow_info (stderr);
5011 dump_edge_info (file, e, do_succ)
5016 basic_block side = (do_succ ? e->dest : e->src);
5018 if (side == ENTRY_BLOCK_PTR)
5019 fputs (" ENTRY", file);
5020 else if (side == EXIT_BLOCK_PTR)
5021 fputs (" EXIT", file);
5023 fprintf (file, " %d", side->index);
5027 static const char * const bitnames[] = {
5028 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5031 int i, flags = e->flags;
5035 for (i = 0; flags; i++)
5036 if (flags & (1 << i))
5042 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5043 fputs (bitnames[i], file);
5045 fprintf (file, "%d", i);
5053 /* Like print_rtl, but also print out live information for the start of each
5057 print_rtl_with_bb (outf, rtx_first)
5061 register rtx tmp_rtx;
5064 fprintf (outf, "(nil)\n");
5068 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5069 int max_uid = get_max_uid ();
5070 basic_block *start = (basic_block *)
5071 alloca (max_uid * sizeof (basic_block));
5072 basic_block *end = (basic_block *)
5073 alloca (max_uid * sizeof (basic_block));
5074 enum bb_state *in_bb_p = (enum bb_state *)
5075 alloca (max_uid * sizeof (enum bb_state));
5077 memset (start, 0, max_uid * sizeof (basic_block));
5078 memset (end, 0, max_uid * sizeof (basic_block));
5079 memset (in_bb_p, 0, max_uid * sizeof (enum bb_state));
5081 for (i = n_basic_blocks - 1; i >= 0; i--)
5083 basic_block bb = BASIC_BLOCK (i);
5086 start[INSN_UID (bb->head)] = bb;
5087 end[INSN_UID (bb->end)] = bb;
5088 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5090 enum bb_state state = IN_MULTIPLE_BB;
5091 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5093 in_bb_p[INSN_UID(x)] = state;
5100 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5105 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5107 fprintf (outf, ";; Start of basic block %d, registers live:",
5110 EXECUTE_IF_SET_IN_REG_SET (bb->global_live_at_start, 0, i,
5112 fprintf (outf, " %d", i);
5113 if (i < FIRST_PSEUDO_REGISTER)
5114 fprintf (outf, " [%s]",
5120 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5121 && GET_CODE (tmp_rtx) != NOTE
5122 && GET_CODE (tmp_rtx) != BARRIER
5124 fprintf (outf, ";; Insn is not within a basic block\n");
5125 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5126 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5128 did_output = print_rtl_single (outf, tmp_rtx);
5130 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5131 fprintf (outf, ";; End of basic block %d\n", bb->index);
5138 if (current_function_epilogue_delay_list != 0)
5140 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5141 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5142 tmp_rtx = XEXP (tmp_rtx, 1))
5143 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5148 /* Integer list support. */
5150 /* Allocate a node from list *HEAD_PTR. */
5153 alloc_int_list_node (head_ptr)
5154 int_list_block **head_ptr;
5156 struct int_list_block *first_blk = *head_ptr;
5158 if (first_blk == NULL || first_blk->nodes_left <= 0)
5160 first_blk = (struct int_list_block *) xmalloc (sizeof (struct int_list_block));
5161 first_blk->nodes_left = INT_LIST_NODES_IN_BLK;
5162 first_blk->next = *head_ptr;
5163 *head_ptr = first_blk;
5166 first_blk->nodes_left--;
5167 return &first_blk->nodes[first_blk->nodes_left];
5170 /* Pointer to head of predecessor/successor block list. */
5171 static int_list_block *pred_int_list_blocks;
5173 /* Add a new node to integer list LIST with value VAL.
5174 LIST is a pointer to a list object to allow for different implementations.
5175 If *LIST is initially NULL, the list is empty.
5176 The caller must not care whether the element is added to the front or
5177 to the end of the list (to allow for different implementations). */
5180 add_int_list_node (blk_list, list, val)
5181 int_list_block **blk_list;
5185 int_list_ptr p = alloc_int_list_node (blk_list);
5193 /* Free the blocks of lists at BLK_LIST. */
5196 free_int_list (blk_list)
5197 int_list_block **blk_list;
5199 int_list_block *p, *next;
5201 for (p = *blk_list; p != NULL; p = next)
5207 /* Mark list as empty for the next function we compile. */
5211 /* Predecessor/successor computation. */
5213 /* Mark PRED_BB a precessor of SUCC_BB,
5214 and conversely SUCC_BB a successor of PRED_BB. */
5217 add_pred_succ (pred_bb, succ_bb, s_preds, s_succs, num_preds, num_succs)
5220 int_list_ptr *s_preds;
5221 int_list_ptr *s_succs;
5225 if (succ_bb != EXIT_BLOCK)
5227 add_int_list_node (&pred_int_list_blocks, &s_preds[succ_bb], pred_bb);
5228 num_preds[succ_bb]++;
5230 if (pred_bb != ENTRY_BLOCK)
5232 add_int_list_node (&pred_int_list_blocks, &s_succs[pred_bb], succ_bb);
5233 num_succs[pred_bb]++;
5237 /* Convert edge lists into pred/succ lists for backward compatibility. */
5240 compute_preds_succs (s_preds, s_succs, num_preds, num_succs)
5241 int_list_ptr *s_preds;
5242 int_list_ptr *s_succs;
5246 int i, n = n_basic_blocks;
5249 memset (s_preds, 0, n_basic_blocks * sizeof (int_list_ptr));
5250 memset (s_succs, 0, n_basic_blocks * sizeof (int_list_ptr));
5251 memset (num_preds, 0, n_basic_blocks * sizeof (int));
5252 memset (num_succs, 0, n_basic_blocks * sizeof (int));
5254 for (i = 0; i < n; ++i)
5256 basic_block bb = BASIC_BLOCK (i);
5258 for (e = bb->succ; e ; e = e->succ_next)
5259 add_pred_succ (i, e->dest->index, s_preds, s_succs,
5260 num_preds, num_succs);
5263 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
5264 add_pred_succ (ENTRY_BLOCK, e->dest->index, s_preds, s_succs,
5265 num_preds, num_succs);
5269 dump_bb_data (file, preds, succs, live_info)
5271 int_list_ptr *preds;
5272 int_list_ptr *succs;
5278 fprintf (file, "BB data\n\n");
5279 for (bb = 0; bb < n_basic_blocks; bb++)
5281 fprintf (file, "BB %d, start %d, end %d\n", bb,
5282 INSN_UID (BLOCK_HEAD (bb)), INSN_UID (BLOCK_END (bb)));
5283 fprintf (file, " preds:");
5284 for (p = preds[bb]; p != NULL; p = p->next)
5286 int pred_bb = INT_LIST_VAL (p);
5287 if (pred_bb == ENTRY_BLOCK)
5288 fprintf (file, " entry");
5290 fprintf (file, " %d", pred_bb);
5292 fprintf (file, "\n");
5293 fprintf (file, " succs:");
5294 for (p = succs[bb]; p != NULL; p = p->next)
5296 int succ_bb = INT_LIST_VAL (p);
5297 if (succ_bb == EXIT_BLOCK)
5298 fprintf (file, " exit");
5300 fprintf (file, " %d", succ_bb);
5305 fprintf (file, "\nRegisters live at start:");
5306 for (regno = 0; regno < max_regno; regno++)
5307 if (REGNO_REG_SET_P (BASIC_BLOCK (bb)->global_live_at_start, regno))
5308 fprintf (file, " %d", regno);
5309 fprintf (file, "\n");
5311 fprintf (file, "\n");
5313 fprintf (file, "\n");
5316 /* Free basic block data storage. */
5321 free_int_list (&pred_int_list_blocks);
5324 /* Compute dominator relationships. */
5326 compute_dominators (dominators, post_dominators, s_preds, s_succs)
5327 sbitmap *dominators;
5328 sbitmap *post_dominators;
5329 int_list_ptr *s_preds;
5330 int_list_ptr *s_succs;
5332 int bb, changed, passes;
5333 sbitmap *temp_bitmap;
5335 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5336 sbitmap_vector_ones (dominators, n_basic_blocks);
5337 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5338 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5340 sbitmap_zero (dominators[0]);
5341 SET_BIT (dominators[0], 0);
5343 sbitmap_zero (post_dominators[n_basic_blocks - 1]);
5344 SET_BIT (post_dominators[n_basic_blocks - 1], 0);
5351 for (bb = 1; bb < n_basic_blocks; bb++)
5353 sbitmap_intersect_of_predecessors (temp_bitmap[bb], dominators,
5355 SET_BIT (temp_bitmap[bb], bb);
5356 changed |= sbitmap_a_and_b (dominators[bb],
5359 sbitmap_intersect_of_successors (temp_bitmap[bb], post_dominators,
5361 SET_BIT (temp_bitmap[bb], bb);
5362 changed |= sbitmap_a_and_b (post_dominators[bb],
5363 post_dominators[bb],
5372 /* Compute dominator relationships using new flow graph structures. */
5374 compute_flow_dominators (dominators, post_dominators)
5375 sbitmap *dominators;
5376 sbitmap *post_dominators;
5378 int bb, changed, passes;
5379 sbitmap *temp_bitmap;
5381 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5382 sbitmap_vector_ones (dominators, n_basic_blocks);
5383 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5384 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5386 sbitmap_zero (dominators[0]);
5387 SET_BIT (dominators[0], 0);
5389 sbitmap_zero (post_dominators[n_basic_blocks - 1]);
5390 SET_BIT (post_dominators[n_basic_blocks - 1], 0);
5397 for (bb = 1; bb < n_basic_blocks; bb++)
5399 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5400 SET_BIT (temp_bitmap[bb], bb);
5401 changed |= sbitmap_a_and_b (dominators[bb],
5404 sbitmap_intersection_of_succs (temp_bitmap[bb], post_dominators, bb);
5405 SET_BIT (temp_bitmap[bb], bb);
5406 changed |= sbitmap_a_and_b (post_dominators[bb],
5407 post_dominators[bb],
5416 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5419 compute_immediate_dominators (idom, dominators)
5421 sbitmap *dominators;
5426 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5428 /* Begin with tmp(n) = dom(n) - { n }. */
5429 for (b = n_basic_blocks; --b >= 0; )
5431 sbitmap_copy (tmp[b], dominators[b]);
5432 RESET_BIT (tmp[b], b);
5435 /* Subtract out all of our dominator's dominators. */
5436 for (b = n_basic_blocks; --b >= 0; )
5438 sbitmap tmp_b = tmp[b];
5441 for (s = n_basic_blocks; --s >= 0; )
5442 if (TEST_BIT (tmp_b, s))
5443 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5446 /* Find the one bit set in the bitmap and put it in the output array. */
5447 for (b = n_basic_blocks; --b >= 0; )
5450 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5453 sbitmap_vector_free (tmp);
5456 /* Count for a single SET rtx, X. */
5459 count_reg_sets_1 (x)
5463 register rtx reg = SET_DEST (x);
5465 /* Find the register that's set/clobbered. */
5466 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5467 || GET_CODE (reg) == SIGN_EXTRACT
5468 || GET_CODE (reg) == STRICT_LOW_PART)
5469 reg = XEXP (reg, 0);
5471 if (GET_CODE (reg) == PARALLEL
5472 && GET_MODE (reg) == BLKmode)
5475 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5476 count_reg_sets_1 (XVECEXP (reg, 0, i));
5480 if (GET_CODE (reg) == REG)
5482 regno = REGNO (reg);
5483 if (regno >= FIRST_PSEUDO_REGISTER)
5485 /* Count (weighted) references, stores, etc. This counts a
5486 register twice if it is modified, but that is correct. */
5487 REG_N_SETS (regno)++;
5489 REG_N_REFS (regno) += loop_depth;
5494 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5495 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5501 register RTX_CODE code = GET_CODE (x);
5503 if (code == SET || code == CLOBBER)
5504 count_reg_sets_1 (x);
5505 else if (code == PARALLEL)
5508 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5510 code = GET_CODE (XVECEXP (x, 0, i));
5511 if (code == SET || code == CLOBBER)
5512 count_reg_sets_1 (XVECEXP (x, 0, i));
5517 /* Increment REG_N_REFS by the current loop depth each register reference
5521 count_reg_references (x)
5524 register RTX_CODE code;
5527 code = GET_CODE (x);
5547 /* If we are clobbering a MEM, mark any registers inside the address
5549 if (GET_CODE (XEXP (x, 0)) == MEM)
5550 count_reg_references (XEXP (XEXP (x, 0), 0));
5554 /* While we're here, optimize this case. */
5557 /* In case the SUBREG is not of a register, don't optimize */
5558 if (GET_CODE (x) != REG)
5560 count_reg_references (x);
5564 /* ... fall through ... */
5567 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5568 REG_N_REFS (REGNO (x)) += loop_depth;
5573 register rtx testreg = SET_DEST (x);
5576 /* If storing into MEM, don't show it as being used. But do
5577 show the address as being used. */
5578 if (GET_CODE (testreg) == MEM)
5580 count_reg_references (XEXP (testreg, 0));
5581 count_reg_references (SET_SRC (x));
5585 /* Storing in STRICT_LOW_PART is like storing in a reg
5586 in that this SET might be dead, so ignore it in TESTREG.
5587 but in some other ways it is like using the reg.
5589 Storing in a SUBREG or a bit field is like storing the entire
5590 register in that if the register's value is not used
5591 then this SET is not needed. */
5592 while (GET_CODE (testreg) == STRICT_LOW_PART
5593 || GET_CODE (testreg) == ZERO_EXTRACT
5594 || GET_CODE (testreg) == SIGN_EXTRACT
5595 || GET_CODE (testreg) == SUBREG)
5597 /* Modifying a single register in an alternate mode
5598 does not use any of the old value. But these other
5599 ways of storing in a register do use the old value. */
5600 if (GET_CODE (testreg) == SUBREG
5601 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5606 testreg = XEXP (testreg, 0);
5609 /* If this is a store into a register,
5610 recursively scan the value being stored. */
5612 if ((GET_CODE (testreg) == PARALLEL
5613 && GET_MODE (testreg) == BLKmode)
5614 || GET_CODE (testreg) == REG)
5616 count_reg_references (SET_SRC (x));
5618 count_reg_references (SET_DEST (x));
5628 /* Recursively scan the operands of this expression. */
5631 register const char *fmt = GET_RTX_FORMAT (code);
5634 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5638 /* Tail recursive case: save a function call level. */
5644 count_reg_references (XEXP (x, i));
5646 else if (fmt[i] == 'E')
5649 for (j = 0; j < XVECLEN (x, i); j++)
5650 count_reg_references (XVECEXP (x, i, j));
5656 /* Recompute register set/reference counts immediately prior to register
5659 This avoids problems with set/reference counts changing to/from values
5660 which have special meanings to the register allocators.
5662 Additionally, the reference counts are the primary component used by the
5663 register allocators to prioritize pseudos for allocation to hard regs.
5664 More accurate reference counts generally lead to better register allocation.
5666 F is the first insn to be scanned.
5667 LOOP_STEP denotes how much loop_depth should be incremented per
5668 loop nesting level in order to increase the ref count more for references
5671 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5672 possibly other information which is used by the register allocators. */
5675 recompute_reg_usage (f, loop_step)
5682 /* Clear out the old data. */
5683 max_reg = max_reg_num ();
5684 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5690 /* Scan each insn in the chain and count how many times each register is
5693 for (insn = f; insn; insn = NEXT_INSN (insn))
5695 /* Keep track of loop depth. */
5696 if (GET_CODE (insn) == NOTE)
5698 /* Look for loop boundaries. */
5699 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
5700 loop_depth -= loop_step;
5701 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
5702 loop_depth += loop_step;
5704 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
5705 Abort now rather than setting register status incorrectly. */
5706 if (loop_depth == 0)
5709 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5713 /* This call will increment REG_N_SETS for each SET or CLOBBER
5714 of a register in INSN. It will also increment REG_N_REFS
5715 by the loop depth for each set of a register in INSN. */
5716 count_reg_sets (PATTERN (insn));
5718 /* count_reg_sets does not detect autoincrement address modes, so
5719 detect them here by looking at the notes attached to INSN. */
5720 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5722 if (REG_NOTE_KIND (links) == REG_INC)
5723 /* Count (weighted) references, stores, etc. This counts a
5724 register twice if it is modified, but that is correct. */
5725 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5728 /* This call will increment REG_N_REFS by the current loop depth for
5729 each reference to a register in INSN. */
5730 count_reg_references (PATTERN (insn));
5732 /* count_reg_references will not include counts for arguments to
5733 function calls, so detect them here by examining the
5734 CALL_INSN_FUNCTION_USAGE data. */
5735 if (GET_CODE (insn) == CALL_INSN)
5739 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5741 note = XEXP (note, 1))
5742 if (GET_CODE (XEXP (note, 0)) == USE)
5743 count_reg_references (XEXP (XEXP (note, 0), 0));
5749 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5750 blocks. Returns a count of the number of registers that died. */
5753 count_or_remove_death_notes (blocks, kill)
5759 for (i = n_basic_blocks - 1; i >= 0; --i)
5764 if (! TEST_BIT (blocks, i))
5767 bb = BASIC_BLOCK (i);
5769 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5771 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5773 rtx *pprev = ®_NOTES (insn);
5778 switch (REG_NOTE_KIND (link))
5781 if (GET_CODE (XEXP (link, 0)) == REG)
5783 rtx reg = XEXP (link, 0);
5786 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5789 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5797 rtx next = XEXP (link, 1);
5798 free_EXPR_LIST_node (link);
5799 *pprev = link = next;
5805 pprev = &XEXP (link, 1);
5812 if (insn == bb->end)
5820 /* Record INSN's block as BB. */
5823 set_block_for_insn (insn, bb)
5827 size_t uid = INSN_UID (insn);
5828 if (uid >= basic_block_for_insn->num_elements)
5832 /* Add one-eighth the size so we don't keep calling xrealloc. */
5833 new_size = uid + (uid + 7) / 8;
5835 VARRAY_GROW (basic_block_for_insn, new_size);
5837 VARRAY_BB (basic_block_for_insn, uid) = bb;
5840 /* Record INSN's block number as BB. */
5841 /* ??? This has got to go. */
5844 set_block_num (insn, bb)
5848 set_block_for_insn (insn, BASIC_BLOCK (bb));
5851 /* Verify the CFG consistency. This function check some CFG invariants and
5852 aborts when something is wrong. Hope that this function will help to
5853 convert many optimization passes to preserve CFG consistent.
5855 Currently it does following checks:
5857 - test head/end pointers
5858 - overlapping of basic blocks
5859 - edge list corectness
5860 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5861 - tails of basic blocks (ensure that boundary is necesary)
5862 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5863 and NOTE_INSN_BASIC_BLOCK
5864 - check that all insns are in the basic blocks
5865 (except the switch handling code, barriers and notes)
5867 In future it can be extended check a lot of other stuff as well
5868 (reachability of basic blocks, life information, etc. etc.). */
5873 const int max_uid = get_max_uid ();
5874 const rtx rtx_first = get_insns ();
5875 basic_block *bb_info;
5879 bb_info = (basic_block *) alloca (max_uid * sizeof (basic_block));
5880 memset (bb_info, 0, max_uid * sizeof (basic_block));
5882 /* First pass check head/end pointers and set bb_info array used by
5884 for (i = n_basic_blocks - 1; i >= 0; i--)
5886 basic_block bb = BASIC_BLOCK (i);
5888 /* Check the head pointer and make sure that it is pointing into
5890 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5895 error ("Head insn %d for block %d not found in the insn stream.",
5896 INSN_UID (bb->head), bb->index);
5900 /* Check the end pointer and make sure that it is pointing into
5902 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5904 if (bb_info[INSN_UID (x)] != NULL)
5906 error ("Insn %d is in multiple basic blocks (%d and %d)",
5907 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5910 bb_info[INSN_UID (x)] = bb;
5917 error ("End insn %d for block %d not found in the insn stream.",
5918 INSN_UID (bb->end), bb->index);
5923 /* Now check the basic blocks (boundaries etc.) */
5924 for (i = n_basic_blocks - 1; i >= 0; i--)
5926 basic_block bb = BASIC_BLOCK (i);
5927 /* Check corectness of edge lists */
5935 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5937 fprintf (stderr, "Predecessor: ");
5938 dump_edge_info (stderr, e, 0);
5939 fprintf (stderr, "\nSuccessor: ");
5940 dump_edge_info (stderr, e, 1);
5944 if (e->dest != EXIT_BLOCK_PTR)
5946 edge e2 = e->dest->pred;
5947 while (e2 && e2 != e)
5951 error ("Basic block %i edge lists are corrupted", bb->index);
5963 error ("Basic block %d pred edge is corrupted", bb->index);
5964 fputs ("Predecessor: ", stderr);
5965 dump_edge_info (stderr, e, 0);
5966 fputs ("\nSuccessor: ", stderr);
5967 dump_edge_info (stderr, e, 1);
5968 fputc ('\n', stderr);
5971 if (e->src != ENTRY_BLOCK_PTR)
5973 edge e2 = e->src->succ;
5974 while (e2 && e2 != e)
5978 error ("Basic block %i edge lists are corrupted", bb->index);
5985 /* OK pointers are correct. Now check the header of basic
5986 block. It ought to contain optional CODE_LABEL followed
5987 by NOTE_BASIC_BLOCK. */
5989 if (GET_CODE (x) == CODE_LABEL)
5993 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5999 if (GET_CODE (x) != NOTE
6000 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6001 || NOTE_BASIC_BLOCK (x) != bb)
6003 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6010 /* Do checks for empty blocks here */
6017 if (GET_CODE (x) == NOTE
6018 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6020 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6021 INSN_UID (x), bb->index);
6028 if (GET_CODE (x) == JUMP_INSN
6029 || GET_CODE (x) == CODE_LABEL
6030 || GET_CODE (x) == BARRIER)
6032 error ("In basic block %d:", bb->index);
6033 fatal_insn ("Flow control insn inside a basic block", x);
6044 if (!bb_info[INSN_UID (x)])
6046 switch (GET_CODE (x))
6053 /* An addr_vec is placed outside any block block. */
6055 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6056 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6057 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6062 /* But in any case, non-deletable labels can appear anywhere. */
6066 fatal_insn ("Insn outside basic block", x);
6077 /* Functions to access an edge list with a vector representation.
6078 Enough data is kept such that given an index number, the
6079 pred and succ that edge reprsents can be determined, or
6080 given a pred and a succ, it's index number can be returned.
6081 This allows algorithms which comsume a lot of memory to
6082 represent the normally full matrix of edge (pred,succ) with a
6083 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6084 wasted space in the client code due to sparse flow graphs. */
6086 /* This functions initializes the edge list. Basically the entire
6087 flowgraph is processed, and all edges are assigned a number,
6088 and the data structure is filed in. */
6092 struct edge_list *elist;
6098 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6102 /* Determine the number of edges in the flow graph by counting successor
6103 edges on each basic block. */
6104 for (x = 0; x < n_basic_blocks; x++)
6106 basic_block bb = BASIC_BLOCK (x);
6108 for (e = bb->succ; e; e = e->succ_next)
6111 /* Don't forget successors of the entry block. */
6112 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6115 elist = xmalloc (sizeof (struct edge_list));
6116 elist->num_blocks = block_count;
6117 elist->num_edges = num_edges;
6118 elist->index_to_edge = xmalloc (sizeof (edge) * num_edges);
6122 /* Follow successors of the entry block, and register these edges. */
6123 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6125 elist->index_to_edge[num_edges] = e;
6129 for (x = 0; x < n_basic_blocks; x++)
6131 basic_block bb = BASIC_BLOCK (x);
6133 /* Follow all successors of blocks, and register these edges. */
6134 for (e = bb->succ; e; e = e->succ_next)
6136 elist->index_to_edge[num_edges] = e;
6143 /* This function free's memory associated with an edge list. */
6145 free_edge_list (elist)
6146 struct edge_list *elist;
6150 free (elist->index_to_edge);
6155 /* This function provides debug output showing an edge list. */
6157 print_edge_list (f, elist)
6159 struct edge_list *elist;
6162 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6163 elist->num_blocks - 2, elist->num_edges);
6165 for (x = 0; x < elist->num_edges; x++)
6167 fprintf (f, " %-4d - edge(", x);
6168 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6169 fprintf (f,"entry,");
6171 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6173 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6174 fprintf (f,"exit)\n");
6176 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6180 /* This function provides an internal consistancy check of an edge list,
6181 verifying that all edges are present, and that there are no
6184 verify_edge_list (f, elist)
6186 struct edge_list *elist;
6188 int x, pred, succ, index;
6191 for (x = 0; x < n_basic_blocks; x++)
6193 basic_block bb = BASIC_BLOCK (x);
6195 for (e = bb->succ; e; e = e->succ_next)
6197 pred = e->src->index;
6198 succ = e->dest->index;
6199 index = EDGE_INDEX (elist, e->src, e->dest);
6200 if (index == EDGE_INDEX_NO_EDGE)
6202 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6205 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6206 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6207 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6208 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6209 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6210 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6213 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6215 pred = e->src->index;
6216 succ = e->dest->index;
6217 index = EDGE_INDEX (elist, e->src, e->dest);
6218 if (index == EDGE_INDEX_NO_EDGE)
6220 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6223 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6224 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6225 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6226 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6227 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6228 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6230 /* We've verified that all the edges are in the list, no lets make sure
6231 there are no spurious edges in the list. */
6233 for (pred = 0 ; pred < n_basic_blocks; pred++)
6234 for (succ = 0 ; succ < n_basic_blocks; succ++)
6236 basic_block p = BASIC_BLOCK (pred);
6237 basic_block s = BASIC_BLOCK (succ);
6241 for (e = p->succ; e; e = e->succ_next)
6247 for (e = s->pred; e; e = e->pred_next)
6253 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6254 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6255 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6257 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6258 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6259 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6260 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6261 BASIC_BLOCK (succ)));
6263 for (succ = 0 ; succ < n_basic_blocks; succ++)
6265 basic_block p = ENTRY_BLOCK_PTR;
6266 basic_block s = BASIC_BLOCK (succ);
6270 for (e = p->succ; e; e = e->succ_next)
6276 for (e = s->pred; e; e = e->pred_next)
6282 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6283 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6284 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6286 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6287 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6288 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6289 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6290 BASIC_BLOCK (succ)));
6292 for (pred = 0 ; pred < n_basic_blocks; pred++)
6294 basic_block p = BASIC_BLOCK (pred);
6295 basic_block s = EXIT_BLOCK_PTR;
6299 for (e = p->succ; e; e = e->succ_next)
6305 for (e = s->pred; e; e = e->pred_next)
6311 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6312 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6313 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6315 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6316 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6317 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6318 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6323 /* This routine will determine what, if any, edge there is between
6324 a specified predecessor and successor. */
6327 find_edge_index (edge_list, pred, succ)
6328 struct edge_list *edge_list;
6329 basic_block pred, succ;
6332 for (x = 0; x < NUM_EDGES (edge_list); x++)
6334 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6335 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6338 return (EDGE_INDEX_NO_EDGE);
6341 /* This function will remove an edge from the flow graph. */
6346 edge last_pred = NULL;
6347 edge last_succ = NULL;
6349 basic_block src, dest;
6352 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6358 last_succ->succ_next = e->succ_next;
6360 src->succ = e->succ_next;
6362 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6368 last_pred->pred_next = e->pred_next;
6370 dest->pred = e->pred_next;
6376 /* This routine will remove any fake successor edges for a basic block.
6377 When the edge is removed, it is also removed from whatever predecessor
6380 remove_fake_successors (bb)
6384 for (e = bb->succ; e ; )
6388 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6393 /* This routine will remove all fake edges from the flow graph. If
6394 we remove all fake successors, it will automatically remove all
6395 fake predecessors. */
6397 remove_fake_edges ()
6401 for (x = 0; x < n_basic_blocks; x++)
6402 remove_fake_successors (BASIC_BLOCK (x));
6404 /* We've handled all successors except the entry block's. */
6405 remove_fake_successors (ENTRY_BLOCK_PTR);
6408 /* This functions will add a fake edge between any block which has no
6409 successors, and the exit block. Some data flow equations require these
6412 add_noreturn_fake_exit_edges ()
6416 for (x = 0; x < n_basic_blocks; x++)
6417 if (BASIC_BLOCK (x)->succ == NULL)
6418 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);