1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: reg_n_refs, reg_n_deaths, reg_n_sets, reg_live_length,
107 reg_n_calls_crosses and reg_basic_block.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "basic-block.h"
127 #include "insn-config.h"
129 #include "hard-reg-set.h"
132 #include "function.h"
136 #include "insn-flags.h"
140 #define obstack_chunk_alloc xmalloc
141 #define obstack_chunk_free free
144 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
145 the stack pointer does not matter. The value is tested only in
146 functions that have frame pointers.
147 No definition is equivalent to always zero. */
148 #ifndef EXIT_IGNORE_STACK
149 #define EXIT_IGNORE_STACK 0
152 #ifndef HAVE_epilogue
153 #define HAVE_epilogue 0
156 #ifndef HAVE_prologue
157 #define HAVE_prologue 0
161 #define CLEAN_ALLOCA alloca (0)
167 /* The contents of the current function definition are allocated
168 in this obstack, and all are freed at the end of the function.
169 For top-level functions, this is temporary_obstack.
170 Separate obstacks are made for nested functions. */
172 extern struct obstack *function_obstack;
174 /* Number of basic blocks in the current function. */
178 /* Number of edges in the current function. */
182 /* The basic block array. */
184 varray_type basic_block_info;
186 /* The special entry and exit blocks. */
188 struct basic_block_def entry_exit_blocks[2] =
195 NULL, /* local_set */
196 NULL, /* global_live_at_start */
197 NULL, /* global_live_at_end */
199 ENTRY_BLOCK, /* index */
201 -1, -1 /* eh_beg, eh_end */
208 NULL, /* local_set */
209 NULL, /* global_live_at_start */
210 NULL, /* global_live_at_end */
212 EXIT_BLOCK, /* index */
214 -1, -1 /* eh_beg, eh_end */
218 /* Nonzero if the second flow pass has completed. */
221 /* Maximum register number used in this function, plus one. */
225 /* Indexed by n, giving various register information */
227 varray_type reg_n_info;
229 /* Size of the reg_n_info table. */
231 unsigned int reg_n_max;
233 /* Element N is the next insn that uses (hard or pseudo) register number N
234 within the current basic block; or zero, if there is no such insn.
235 This is valid only during the final backward scan in propagate_block. */
237 static rtx *reg_next_use;
239 /* Size of a regset for the current function,
240 in (1) bytes and (2) elements. */
245 /* Regset of regs live when calls to `setjmp'-like functions happen. */
246 /* ??? Does this exist only for the setjmp-clobbered warning message? */
248 regset regs_live_at_setjmp;
250 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
251 that have to go in the same hard reg.
252 The first two regs in the list are a pair, and the next two
253 are another pair, etc. */
256 /* Depth within loops of basic block being scanned for lifetime analysis,
257 plus one. This is the weight attached to references to registers. */
259 static int loop_depth;
261 /* During propagate_block, this is non-zero if the value of CC0 is live. */
265 /* During propagate_block, this contains a list of all the MEMs we are
266 tracking for dead store elimination. */
268 static rtx mem_set_list;
270 /* Set of registers that may be eliminable. These are handled specially
271 in updating regs_ever_live. */
273 static HARD_REG_SET elim_reg_set;
275 /* The basic block structure for every insn, indexed by uid. */
277 varray_type basic_block_for_insn;
279 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
280 /* ??? Should probably be using LABEL_NUSES instead. It would take a
281 bit of surgery to be able to use or co-opt the routines in jump. */
283 static rtx label_value_list;
285 /* INSN_VOLATILE (insn) is 1 if the insn refers to anything volatile. */
287 #define INSN_VOLATILE(INSN) bitmap_bit_p (uid_volatile, INSN_UID (INSN))
288 #define SET_INSN_VOLATILE(INSN) bitmap_set_bit (uid_volatile, INSN_UID (INSN))
289 static bitmap uid_volatile;
291 /* Forward declarations */
292 static int count_basic_blocks PROTO((rtx));
293 static rtx find_basic_blocks_1 PROTO((rtx));
294 static void create_basic_block PROTO((int, rtx, rtx, rtx));
295 static void clear_edges PROTO((void));
296 static void make_edges PROTO((rtx));
297 static void make_edge PROTO((sbitmap *, basic_block,
299 static void make_label_edge PROTO((sbitmap *, basic_block,
301 static void make_eh_edge PROTO((sbitmap *, eh_nesting_info *,
302 basic_block, rtx, int));
303 static void mark_critical_edges PROTO((void));
304 static void move_stray_eh_region_notes PROTO((void));
305 static void record_active_eh_regions PROTO((rtx));
307 static void commit_one_edge_insertion PROTO((edge));
309 static void delete_unreachable_blocks PROTO((void));
310 static void delete_eh_regions PROTO((void));
311 static int can_delete_note_p PROTO((rtx));
312 static int delete_block PROTO((basic_block));
313 static void expunge_block PROTO((basic_block));
314 static rtx flow_delete_insn PROTO((rtx));
315 static int can_delete_label_p PROTO((rtx));
316 static int merge_blocks_move_predecessor_nojumps PROTO((basic_block,
318 static int merge_blocks_move_successor_nojumps PROTO((basic_block,
320 static void merge_blocks_nomove PROTO((basic_block, basic_block));
321 static int merge_blocks PROTO((edge,basic_block,basic_block));
322 static void try_merge_blocks PROTO((void));
323 static void tidy_fallthru_edge PROTO((edge,basic_block,basic_block));
324 static void calculate_loop_depth PROTO((rtx));
326 static int verify_wide_reg_1 PROTO((rtx *, void *));
327 static void verify_wide_reg PROTO((int, rtx, rtx));
328 static void verify_local_live_at_start PROTO((regset, basic_block));
329 static int set_noop_p PROTO((rtx));
330 static int noop_move_p PROTO((rtx));
331 static void notice_stack_pointer_modification PROTO ((rtx, rtx));
332 static void record_volatile_insns PROTO((rtx));
333 static void mark_reg PROTO((regset, rtx));
334 static void mark_regs_live_at_end PROTO((regset));
335 static void life_analysis_1 PROTO((rtx, int, int));
336 static void calculate_global_regs_live PROTO((sbitmap, sbitmap, int));
337 static void propagate_block PROTO((regset, rtx, rtx,
339 static int insn_dead_p PROTO((rtx, regset, int, rtx));
340 static int libcall_dead_p PROTO((rtx, regset, rtx, rtx));
341 static void mark_set_regs PROTO((regset, regset, rtx,
343 static void mark_set_1 PROTO((regset, regset, rtx,
346 static void find_auto_inc PROTO((regset, rtx, rtx));
347 static int try_pre_increment_1 PROTO((rtx));
348 static int try_pre_increment PROTO((rtx, rtx, HOST_WIDE_INT));
350 static void mark_used_regs PROTO((regset, regset, rtx, int, rtx));
351 void dump_flow_info PROTO((FILE *));
352 static void dump_edge_info PROTO((FILE *, edge, int));
354 static int_list_ptr alloc_int_list_node PROTO ((int_list_block **));
355 static int_list_ptr add_int_list_node PROTO ((int_list_block **,
358 static void add_pred_succ PROTO ((int, int, int_list_ptr *,
359 int_list_ptr *, int *, int *));
361 static void count_reg_sets_1 PROTO ((rtx));
362 static void count_reg_sets PROTO ((rtx));
363 static void count_reg_references PROTO ((rtx));
364 static void notice_stack_pointer_modification PROTO ((rtx, rtx));
365 static void invalidate_mems_from_autoinc PROTO ((rtx));
366 static void remove_edge PROTO ((edge));
367 static void remove_fake_successors PROTO ((basic_block));
369 /* This function is always defined so it can be called from the
370 debugger, and it is declared extern so we don't get warnings about
372 void verify_flow_info PROTO ((void));
374 /* Flags for propagate_block. */
376 #define PROP_DEATH_NOTES 1 /* Create DEAD and UNUSED notes. */
377 #define PROP_LOG_LINKS 2 /* Create LOG_LINKS. */
378 #define PROP_REG_INFO 4 /* Update regs_ever_live et al. */
379 #define PROP_KILL_DEAD_CODE 8 /* Remove dead code. */
380 #define PROP_SCAN_DEAD_CODE 16 /* Scan for dead code. */
381 #define PROP_AUTOINC 32 /* Create autoinc mem references. */
382 #define PROP_FINAL 63 /* All of the above. */
384 /* Find basic blocks of the current function.
385 F is the first insn of the function and NREGS the number of register
389 find_basic_blocks (f, nregs, file, do_cleanup)
391 int nregs ATTRIBUTE_UNUSED;
392 FILE *file ATTRIBUTE_UNUSED;
397 /* Flush out existing data. */
398 if (basic_block_info != NULL)
404 /* Clear bb->aux on all extant basic blocks. We'll use this as a
405 tag for reuse during create_basic_block, just in case some pass
406 copies around basic block notes improperly. */
407 for (i = 0; i < n_basic_blocks; ++i)
408 BASIC_BLOCK (i)->aux = NULL;
410 VARRAY_FREE (basic_block_info);
413 n_basic_blocks = count_basic_blocks (f);
415 /* Size the basic block table. The actual structures will be allocated
416 by find_basic_blocks_1, since we want to keep the structure pointers
417 stable across calls to find_basic_blocks. */
418 /* ??? This whole issue would be much simpler if we called find_basic_blocks
419 exactly once, and thereafter we don't have a single long chain of
420 instructions at all until close to the end of compilation when we
421 actually lay them out. */
423 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
425 label_value_list = find_basic_blocks_1 (f);
427 /* Record the block to which an insn belongs. */
428 /* ??? This should be done another way, by which (perhaps) a label is
429 tagged directly with the basic block that it starts. It is used for
430 more than that currently, but IMO that is the only valid use. */
432 max_uid = get_max_uid ();
434 /* Leave space for insns life_analysis makes in some cases for auto-inc.
435 These cases are rare, so we don't need too much space. */
436 max_uid += max_uid / 10;
439 compute_bb_for_insn (max_uid);
441 /* Discover the edges of our cfg. */
443 record_active_eh_regions (f);
444 make_edges (label_value_list);
446 /* Delete unreachable blocks, then merge blocks when possible. */
450 delete_unreachable_blocks ();
451 move_stray_eh_region_notes ();
452 record_active_eh_regions (f);
456 /* Mark critical edges. */
458 mark_critical_edges ();
460 /* Discover the loop depth at the start of each basic block to aid
461 register allocation. */
462 calculate_loop_depth (f);
464 /* Kill the data we won't maintain. */
465 label_value_list = NULL_RTX;
467 #ifdef ENABLE_CHECKING
472 /* Count the basic blocks of the function. */
475 count_basic_blocks (f)
479 register RTX_CODE prev_code;
480 register int count = 0;
482 int call_had_abnormal_edge = 0;
483 rtx prev_call = NULL_RTX;
485 prev_code = JUMP_INSN;
486 for (insn = f; insn; insn = NEXT_INSN (insn))
488 register RTX_CODE code = GET_CODE (insn);
490 if (code == CODE_LABEL
491 || (GET_RTX_CLASS (code) == 'i'
492 && (prev_code == JUMP_INSN
493 || prev_code == BARRIER
494 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
498 /* If the previous insn was a call that did not create an
499 abnormal edge, we want to add a nop so that the CALL_INSN
500 itself is not at basic_block_end. This allows us to
501 easily distinguish between normal calls and those which
502 create abnormal edges in the flow graph. */
504 if (count > 0 && prev_call != 0 && !call_had_abnormal_edge)
506 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
507 emit_insn_after (nop, prev_call);
511 /* Record whether this call created an edge. */
512 if (code == CALL_INSN)
514 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
515 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
517 call_had_abnormal_edge = 0;
519 /* If there is a specified EH region, we have an edge. */
520 if (eh_region && region > 0)
521 call_had_abnormal_edge = 1;
524 /* If there is a nonlocal goto label and the specified
525 region number isn't -1, we have an edge. (0 means
526 no throw, but might have a nonlocal goto). */
527 if (nonlocal_goto_handler_labels && region >= 0)
528 call_had_abnormal_edge = 1;
531 else if (code != NOTE)
532 prev_call = NULL_RTX;
536 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
538 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
543 /* The rest of the compiler works a bit smoother when we don't have to
544 check for the edge case of do-nothing functions with no basic blocks. */
547 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
554 /* Find all basic blocks of the function whose first insn is F.
556 Collect and return a list of labels whose addresses are taken. This
557 will be used in make_edges for use with computed gotos. */
560 find_basic_blocks_1 (f)
563 register rtx insn, next;
564 int call_has_abnormal_edge = 0;
566 rtx bb_note = NULL_RTX;
567 rtx eh_list = NULL_RTX;
568 rtx label_value_list = NULL_RTX;
572 /* We process the instructions in a slightly different way than we did
573 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
574 closed out the previous block, so that it gets attached at the proper
575 place. Since this form should be equivalent to the previous,
576 count_basic_blocks continues to use the old form as a check. */
578 for (insn = f; insn; insn = next)
580 enum rtx_code code = GET_CODE (insn);
582 next = NEXT_INSN (insn);
584 if (code == CALL_INSN)
586 /* Record whether this call created an edge. */
587 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
588 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
589 call_has_abnormal_edge = 0;
591 /* If there is an EH region, we have an edge. */
592 if (eh_list && region > 0)
593 call_has_abnormal_edge = 1;
596 /* If there is a nonlocal goto label and the specified
597 region number isn't -1, we have an edge. (0 means
598 no throw, but might have a nonlocal goto). */
599 if (nonlocal_goto_handler_labels && region >= 0)
600 call_has_abnormal_edge = 1;
608 int kind = NOTE_LINE_NUMBER (insn);
610 /* Keep a LIFO list of the currently active exception notes. */
611 if (kind == NOTE_INSN_EH_REGION_BEG)
612 eh_list = alloc_INSN_LIST (insn, eh_list);
613 else if (kind == NOTE_INSN_EH_REGION_END)
616 eh_list = XEXP (eh_list, 1);
617 free_INSN_LIST_node (t);
620 /* Look for basic block notes with which to keep the
621 basic_block_info pointers stable. Unthread the note now;
622 we'll put it back at the right place in create_basic_block.
623 Or not at all if we've already found a note in this block. */
624 else if (kind == NOTE_INSN_BASIC_BLOCK)
626 if (bb_note == NULL_RTX)
628 next = flow_delete_insn (insn);
635 /* A basic block starts at a label. If we've closed one off due
636 to a barrier or some such, no need to do it again. */
637 if (head != NULL_RTX)
639 /* While we now have edge lists with which other portions of
640 the compiler might determine a call ending a basic block
641 does not imply an abnormal edge, it will be a bit before
642 everything can be updated. So continue to emit a noop at
643 the end of such a block. */
644 if (GET_CODE (end) == CALL_INSN)
646 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
647 end = emit_insn_after (nop, end);
650 create_basic_block (i++, head, end, bb_note);
657 /* A basic block ends at a jump. */
658 if (head == NULL_RTX)
662 /* ??? Make a special check for table jumps. The way this
663 happens is truly and amazingly gross. We are about to
664 create a basic block that contains just a code label and
665 an addr*vec jump insn. Worse, an addr_diff_vec creates
666 its own natural loop.
668 Prevent this bit of brain damage, pasting things together
669 correctly in make_edges.
671 The correct solution involves emitting the table directly
672 on the tablejump instruction as a note, or JUMP_LABEL. */
674 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
675 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
683 goto new_bb_inclusive;
686 /* A basic block ends at a barrier. It may be that an unconditional
687 jump already closed the basic block -- no need to do it again. */
688 if (head == NULL_RTX)
691 /* While we now have edge lists with which other portions of the
692 compiler might determine a call ending a basic block does not
693 imply an abnormal edge, it will be a bit before everything can
694 be updated. So continue to emit a noop at the end of such a
696 if (GET_CODE (end) == CALL_INSN)
698 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
699 end = emit_insn_after (nop, end);
701 goto new_bb_exclusive;
704 /* A basic block ends at a call that can either throw or
705 do a non-local goto. */
706 if (call_has_abnormal_edge)
709 if (head == NULL_RTX)
714 create_basic_block (i++, head, end, bb_note);
715 head = end = NULL_RTX;
722 if (GET_RTX_CLASS (code) == 'i')
724 if (head == NULL_RTX)
731 if (GET_RTX_CLASS (code) == 'i')
735 /* Make a list of all labels referred to other than by jumps
736 (which just don't have the REG_LABEL notes).
738 Make a special exception for labels followed by an ADDR*VEC,
739 as this would be a part of the tablejump setup code.
741 Make a special exception for the eh_return_stub_label, which
742 we know isn't part of any otherwise visible control flow. */
744 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
745 if (REG_NOTE_KIND (note) == REG_LABEL)
747 rtx lab = XEXP (note, 0), next;
749 if (lab == eh_return_stub_label)
751 else if ((next = next_nonnote_insn (lab)) != NULL
752 && GET_CODE (next) == JUMP_INSN
753 && (GET_CODE (PATTERN (next)) == ADDR_VEC
754 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
758 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
763 if (head != NULL_RTX)
764 create_basic_block (i++, head, end, bb_note);
766 if (i != n_basic_blocks)
769 return label_value_list;
772 /* Create a new basic block consisting of the instructions between
773 HEAD and END inclusive. Reuses the note and basic block struct
774 in BB_NOTE, if any. */
777 create_basic_block (index, head, end, bb_note)
779 rtx head, end, bb_note;
784 && ! RTX_INTEGRATED_P (bb_note)
785 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
788 /* If we found an existing note, thread it back onto the chain. */
790 if (GET_CODE (head) == CODE_LABEL)
791 add_insn_after (bb_note, head);
794 add_insn_before (bb_note, head);
800 /* Otherwise we must create a note and a basic block structure.
801 Since we allow basic block structs in rtl, give the struct
802 the same lifetime by allocating it off the function obstack
803 rather than using malloc. */
805 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
806 memset (bb, 0, sizeof (*bb));
808 if (GET_CODE (head) == CODE_LABEL)
809 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
812 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
815 NOTE_BASIC_BLOCK (bb_note) = bb;
818 /* Always include the bb note in the block. */
819 if (NEXT_INSN (end) == bb_note)
825 BASIC_BLOCK (index) = bb;
827 /* Tag the block so that we know it has been used when considering
828 other basic block notes. */
832 /* Records the basic block struct in BB_FOR_INSN, for every instruction
833 indexed by INSN_UID. MAX is the size of the array. */
836 compute_bb_for_insn (max)
841 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
843 for (i = 0; i < n_basic_blocks; ++i)
845 basic_block bb = BASIC_BLOCK (i);
852 int uid = INSN_UID (insn);
854 VARRAY_BB (basic_block_for_insn, uid) = bb;
857 insn = NEXT_INSN (insn);
862 /* Free the memory associated with the edge structures. */
870 for (i = 0; i < n_basic_blocks; ++i)
872 basic_block bb = BASIC_BLOCK (i);
874 for (e = bb->succ; e ; e = n)
884 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
890 ENTRY_BLOCK_PTR->succ = 0;
891 EXIT_BLOCK_PTR->pred = 0;
896 /* Identify the edges between basic blocks.
898 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
899 that are otherwise unreachable may be reachable with a non-local goto.
901 BB_EH_END is an array indexed by basic block number in which we record
902 the list of exception regions active at the end of the basic block. */
905 make_edges (label_value_list)
906 rtx label_value_list;
909 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
910 sbitmap *edge_cache = NULL;
912 /* Assume no computed jump; revise as we create edges. */
913 current_function_has_computed_jump = 0;
915 /* Heavy use of computed goto in machine-generated code can lead to
916 nearly fully-connected CFGs. In that case we spend a significant
917 amount of time searching the edge lists for duplicates. */
918 if (forced_labels || label_value_list)
920 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
921 sbitmap_vector_zero (edge_cache, n_basic_blocks);
924 /* By nature of the way these get numbered, block 0 is always the entry. */
925 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
927 for (i = 0; i < n_basic_blocks; ++i)
929 basic_block bb = BASIC_BLOCK (i);
932 int force_fallthru = 0;
934 /* Examine the last instruction of the block, and discover the
935 ways we can leave the block. */
938 code = GET_CODE (insn);
941 if (code == JUMP_INSN)
945 /* ??? Recognize a tablejump and do the right thing. */
946 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
947 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
948 && GET_CODE (tmp) == JUMP_INSN
949 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
950 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
955 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
956 vec = XVEC (PATTERN (tmp), 0);
958 vec = XVEC (PATTERN (tmp), 1);
960 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
961 make_label_edge (edge_cache, bb,
962 XEXP (RTVEC_ELT (vec, j), 0), 0);
964 /* Some targets (eg, ARM) emit a conditional jump that also
965 contains the out-of-range target. Scan for these and
966 add an edge if necessary. */
967 if ((tmp = single_set (insn)) != NULL
968 && SET_DEST (tmp) == pc_rtx
969 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
970 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
971 make_label_edge (edge_cache, bb,
972 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
974 #ifdef CASE_DROPS_THROUGH
975 /* Silly VAXen. The ADDR_VEC is going to be in the way of
976 us naturally detecting fallthru into the next block. */
981 /* If this is a computed jump, then mark it as reaching
982 everything on the label_value_list and forced_labels list. */
983 else if (computed_jump_p (insn))
985 current_function_has_computed_jump = 1;
987 for (x = label_value_list; x; x = XEXP (x, 1))
988 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
990 for (x = forced_labels; x; x = XEXP (x, 1))
991 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
994 /* Returns create an exit out. */
995 else if (returnjump_p (insn))
996 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
998 /* Otherwise, we have a plain conditional or unconditional jump. */
1001 if (! JUMP_LABEL (insn))
1003 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1007 /* If this is a CALL_INSN, then mark it as reaching the active EH
1008 handler for this CALL_INSN. If we're handling asynchronous
1009 exceptions then any insn can reach any of the active handlers.
1011 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1013 if (code == CALL_INSN || asynchronous_exceptions)
1015 /* If there's an EH region active at the end of a block,
1016 add the appropriate edges. */
1017 if (bb->eh_end >= 0)
1018 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1020 /* If we have asynchronous exceptions, do the same for *all*
1021 exception regions active in the block. */
1022 if (asynchronous_exceptions
1023 && bb->eh_beg != bb->eh_end)
1025 if (bb->eh_beg >= 0)
1026 make_eh_edge (edge_cache, eh_nest_info, bb,
1027 NULL_RTX, bb->eh_beg);
1029 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1030 if (GET_CODE (x) == NOTE
1031 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1032 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1034 int region = NOTE_EH_HANDLER (x);
1035 make_eh_edge (edge_cache, eh_nest_info, bb,
1040 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1042 /* ??? This could be made smarter: in some cases it's possible
1043 to tell that certain calls will not do a nonlocal goto.
1045 For example, if the nested functions that do the nonlocal
1046 gotos do not have their addresses taken, then only calls to
1047 those functions or to other nested functions that use them
1048 could possibly do nonlocal gotos. */
1049 /* We do know that a REG_EH_REGION note with a value less
1050 than 0 is guaranteed not to perform a non-local goto. */
1051 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1052 if (!note || XINT (XEXP (note, 0), 0) >= 0)
1053 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1054 make_label_edge (edge_cache, bb, XEXP (x, 0),
1055 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1059 /* We know something about the structure of the function __throw in
1060 libgcc2.c. It is the only function that ever contains eh_stub
1061 labels. It modifies its return address so that the last block
1062 returns to one of the eh_stub labels within it. So we have to
1063 make additional edges in the flow graph. */
1064 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1065 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1067 /* Find out if we can drop through to the next block. */
1068 insn = next_nonnote_insn (insn);
1069 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1070 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1071 else if (i + 1 < n_basic_blocks)
1073 rtx tmp = BLOCK_HEAD (i + 1);
1074 if (GET_CODE (tmp) == NOTE)
1075 tmp = next_nonnote_insn (tmp);
1076 if (force_fallthru || insn == tmp)
1077 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1081 free_eh_nesting_info (eh_nest_info);
1083 sbitmap_vector_free (edge_cache);
1086 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1087 about the edge that is accumulated between calls. */
1090 make_edge (edge_cache, src, dst, flags)
1091 sbitmap *edge_cache;
1092 basic_block src, dst;
1098 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1099 many edges to them, and we didn't allocate memory for it. */
1100 use_edge_cache = (edge_cache
1101 && src != ENTRY_BLOCK_PTR
1102 && dst != EXIT_BLOCK_PTR);
1104 /* Make sure we don't add duplicate edges. */
1105 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1106 for (e = src->succ; e ; e = e->succ_next)
1113 e = (edge) xcalloc (1, sizeof (*e));
1116 e->succ_next = src->succ;
1117 e->pred_next = dst->pred;
1126 SET_BIT (edge_cache[src->index], dst->index);
1129 /* Create an edge from a basic block to a label. */
1132 make_label_edge (edge_cache, src, label, flags)
1133 sbitmap *edge_cache;
1138 if (GET_CODE (label) != CODE_LABEL)
1141 /* If the label was never emitted, this insn is junk, but avoid a
1142 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1143 as a result of a syntax error and a diagnostic has already been
1146 if (INSN_UID (label) == 0)
1149 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1152 /* Create the edges generated by INSN in REGION. */
1155 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1156 sbitmap *edge_cache;
1157 eh_nesting_info *eh_nest_info;
1162 handler_info **handler_list;
1165 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1166 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1169 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1170 EDGE_ABNORMAL | EDGE_EH | is_call);
1174 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1175 dangerous if we intend to move basic blocks around. Move such notes
1176 into the following block. */
1179 move_stray_eh_region_notes ()
1184 if (n_basic_blocks < 2)
1187 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1188 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1190 rtx insn, next, list = NULL_RTX;
1192 b1 = BASIC_BLOCK (i);
1193 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1195 next = NEXT_INSN (insn);
1196 if (GET_CODE (insn) == NOTE
1197 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1198 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1200 /* Unlink from the insn chain. */
1201 NEXT_INSN (PREV_INSN (insn)) = next;
1202 PREV_INSN (next) = PREV_INSN (insn);
1205 NEXT_INSN (insn) = list;
1210 if (list == NULL_RTX)
1213 /* Find where to insert these things. */
1215 if (GET_CODE (insn) == CODE_LABEL)
1216 insn = NEXT_INSN (insn);
1220 next = NEXT_INSN (list);
1221 add_insn_after (list, insn);
1227 /* Recompute eh_beg/eh_end for each basic block. */
1230 record_active_eh_regions (f)
1233 rtx insn, eh_list = NULL_RTX;
1235 basic_block bb = BASIC_BLOCK (0);
1237 for (insn = f; insn ; insn = NEXT_INSN (insn))
1239 if (bb->head == insn)
1240 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1242 if (GET_CODE (insn) == NOTE)
1244 int kind = NOTE_LINE_NUMBER (insn);
1245 if (kind == NOTE_INSN_EH_REGION_BEG)
1246 eh_list = alloc_INSN_LIST (insn, eh_list);
1247 else if (kind == NOTE_INSN_EH_REGION_END)
1249 rtx t = XEXP (eh_list, 1);
1250 free_INSN_LIST_node (eh_list);
1255 if (bb->end == insn)
1257 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1259 if (i == n_basic_blocks)
1261 bb = BASIC_BLOCK (i);
1266 /* Identify critical edges and set the bits appropriately. */
1269 mark_critical_edges ()
1271 int i, n = n_basic_blocks;
1274 /* We begin with the entry block. This is not terribly important now,
1275 but could be if a front end (Fortran) implemented alternate entry
1277 bb = ENTRY_BLOCK_PTR;
1284 /* (1) Critical edges must have a source with multiple successors. */
1285 if (bb->succ && bb->succ->succ_next)
1287 for (e = bb->succ; e ; e = e->succ_next)
1289 /* (2) Critical edges must have a destination with multiple
1290 predecessors. Note that we know there is at least one
1291 predecessor -- the edge we followed to get here. */
1292 if (e->dest->pred->pred_next)
1293 e->flags |= EDGE_CRITICAL;
1295 e->flags &= ~EDGE_CRITICAL;
1300 for (e = bb->succ; e ; e = e->succ_next)
1301 e->flags &= ~EDGE_CRITICAL;
1306 bb = BASIC_BLOCK (i);
1310 /* Split a (typically critical) edge. Return the new block.
1311 Abort on abnormal edges.
1313 ??? The code generally expects to be called on critical edges.
1314 The case of a block ending in an unconditional jump to a
1315 block with multiple predecessors is not handled optimally. */
1318 split_edge (edge_in)
1321 basic_block old_pred, bb, old_succ;
1326 /* Abnormal edges cannot be split. */
1327 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1330 old_pred = edge_in->src;
1331 old_succ = edge_in->dest;
1333 /* Remove the existing edge from the destination's pred list. */
1336 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1338 *pp = edge_in->pred_next;
1339 edge_in->pred_next = NULL;
1342 /* Create the new structures. */
1343 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1344 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1347 memset (bb, 0, sizeof (*bb));
1348 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1349 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1351 /* ??? This info is likely going to be out of date very soon. */
1352 if (old_succ->global_live_at_start)
1354 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1355 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1359 CLEAR_REG_SET (bb->global_live_at_start);
1360 CLEAR_REG_SET (bb->global_live_at_end);
1365 bb->succ = edge_out;
1368 edge_in->flags &= ~EDGE_CRITICAL;
1370 edge_out->pred_next = old_succ->pred;
1371 edge_out->succ_next = NULL;
1373 edge_out->dest = old_succ;
1374 edge_out->flags = EDGE_FALLTHRU;
1375 edge_out->probability = REG_BR_PROB_BASE;
1377 old_succ->pred = edge_out;
1379 /* Tricky case -- if there existed a fallthru into the successor
1380 (and we're not it) we must add a new unconditional jump around
1381 the new block we're actually interested in.
1383 Further, if that edge is critical, this means a second new basic
1384 block must be created to hold it. In order to simplify correct
1385 insn placement, do this before we touch the existing basic block
1386 ordering for the block we were really wanting. */
1387 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1390 for (e = edge_out->pred_next; e ; e = e->pred_next)
1391 if (e->flags & EDGE_FALLTHRU)
1396 basic_block jump_block;
1399 if ((e->flags & EDGE_CRITICAL) == 0)
1401 /* Non critical -- we can simply add a jump to the end
1402 of the existing predecessor. */
1403 jump_block = e->src;
1407 /* We need a new block to hold the jump. The simplest
1408 way to do the bulk of the work here is to recursively
1410 jump_block = split_edge (e);
1411 e = jump_block->succ;
1414 /* Now add the jump insn ... */
1415 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1417 jump_block->end = pos;
1418 emit_barrier_after (pos);
1420 /* ... let jump know that label is in use, ... */
1421 JUMP_LABEL (pos) = old_succ->head;
1422 ++LABEL_NUSES (old_succ->head);
1424 /* ... and clear fallthru on the outgoing edge. */
1425 e->flags &= ~EDGE_FALLTHRU;
1427 /* Continue splitting the interesting edge. */
1431 /* Place the new block just in front of the successor. */
1432 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1433 if (old_succ == EXIT_BLOCK_PTR)
1434 j = n_basic_blocks - 1;
1436 j = old_succ->index;
1437 for (i = n_basic_blocks - 1; i > j; --i)
1439 basic_block tmp = BASIC_BLOCK (i - 1);
1440 BASIC_BLOCK (i) = tmp;
1443 BASIC_BLOCK (i) = bb;
1446 /* Create the basic block note. */
1447 if (old_succ != EXIT_BLOCK_PTR)
1448 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1450 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1451 NOTE_BASIC_BLOCK (bb_note) = bb;
1452 bb->head = bb->end = bb_note;
1454 /* Not quite simple -- for non-fallthru edges, we must adjust the
1455 predecessor's jump instruction to target our new block. */
1456 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1458 rtx tmp, insn = old_pred->end;
1459 rtx old_label = old_succ->head;
1460 rtx new_label = gen_label_rtx ();
1462 if (GET_CODE (insn) != JUMP_INSN)
1465 /* ??? Recognize a tablejump and adjust all matching cases. */
1466 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1467 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1468 && GET_CODE (tmp) == JUMP_INSN
1469 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1470 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1475 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1476 vec = XVEC (PATTERN (tmp), 0);
1478 vec = XVEC (PATTERN (tmp), 1);
1480 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1481 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1483 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1484 --LABEL_NUSES (old_label);
1485 ++LABEL_NUSES (new_label);
1490 /* This would have indicated an abnormal edge. */
1491 if (computed_jump_p (insn))
1494 /* A return instruction can't be redirected. */
1495 if (returnjump_p (insn))
1498 /* If the insn doesn't go where we think, we're confused. */
1499 if (JUMP_LABEL (insn) != old_label)
1502 redirect_jump (insn, new_label);
1505 emit_label_before (new_label, bb_note);
1506 bb->head = new_label;
1512 /* Queue instructions for insertion on an edge between two basic blocks.
1513 The new instructions and basic blocks (if any) will not appear in the
1514 CFG until commit_edge_insertions is called. */
1517 insert_insn_on_edge (pattern, e)
1521 /* We cannot insert instructions on an abnormal critical edge.
1522 It will be easier to find the culprit if we die now. */
1523 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1524 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1527 if (e->insns == NULL_RTX)
1530 push_to_sequence (e->insns);
1532 emit_insn (pattern);
1534 e->insns = get_insns ();
1538 /* Update the CFG for the instructions queued on edge E. */
1541 commit_one_edge_insertion (e)
1544 rtx before = NULL_RTX, after = NULL_RTX, tmp;
1547 /* Figure out where to put these things. If the destination has
1548 one predecessor, insert there. Except for the exit block. */
1549 if (e->dest->pred->pred_next == NULL
1550 && e->dest != EXIT_BLOCK_PTR)
1554 /* Get the location correct wrt a code label, and "nice" wrt
1555 a basic block note, and before everything else. */
1557 if (GET_CODE (tmp) == CODE_LABEL)
1558 tmp = NEXT_INSN (tmp);
1559 if (GET_CODE (tmp) == NOTE
1560 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1561 tmp = NEXT_INSN (tmp);
1562 if (tmp == bb->head)
1565 after = PREV_INSN (tmp);
1568 /* If the source has one successor and the edge is not abnormal,
1569 insert there. Except for the entry block. */
1570 else if ((e->flags & EDGE_ABNORMAL) == 0
1571 && e->src->succ->succ_next == NULL
1572 && e->src != ENTRY_BLOCK_PTR)
1575 if (GET_CODE (bb->end) == JUMP_INSN)
1577 /* ??? Is it possible to wind up with non-simple jumps? Perhaps
1578 a jump with delay slots already filled? */
1579 if (! simplejump_p (bb->end))
1586 /* We'd better be fallthru, or we've lost track of what's what. */
1587 if ((e->flags & EDGE_FALLTHRU) == 0)
1594 /* Otherwise we must split the edge. */
1597 bb = split_edge (e);
1601 /* Now that we've found the spot, do the insertion. */
1603 e->insns = NULL_RTX;
1605 /* Set the new block number for these insns, if structure is allocated. */
1606 if (basic_block_for_insn)
1609 for (i = tmp; i != NULL_RTX; i = NEXT_INSN (i))
1610 set_block_for_insn (i, bb);
1615 emit_insns_before (tmp, before);
1616 if (before == bb->head)
1621 tmp = emit_insns_after (tmp, after);
1622 if (after == bb->end)
1627 /* Update the CFG for all queued instructions. */
1630 commit_edge_insertions ()
1636 bb = ENTRY_BLOCK_PTR;
1641 for (e = bb->succ; e ; e = next)
1643 next = e->succ_next;
1645 commit_one_edge_insertion (e);
1648 if (++i >= n_basic_blocks)
1650 bb = BASIC_BLOCK (i);
1654 /* Delete all unreachable basic blocks. */
1657 delete_unreachable_blocks ()
1659 basic_block *worklist, *tos;
1660 int deleted_handler;
1665 tos = worklist = (basic_block *) alloca (sizeof (basic_block) * n);
1667 /* Use basic_block->aux as a marker. Clear them all. */
1669 for (i = 0; i < n; ++i)
1670 BASIC_BLOCK (i)->aux = NULL;
1672 /* Add our starting points to the worklist. Almost always there will
1673 be only one. It isn't inconcievable that we might one day directly
1674 support Fortran alternate entry points. */
1676 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1680 /* Mark the block with a handy non-null value. */
1684 /* Iterate: find everything reachable from what we've already seen. */
1686 while (tos != worklist)
1688 basic_block b = *--tos;
1690 for (e = b->succ; e ; e = e->succ_next)
1698 /* Delete all unreachable basic blocks. Count down so that we don't
1699 interfere with the block renumbering that happens in delete_block. */
1701 deleted_handler = 0;
1703 for (i = n - 1; i >= 0; --i)
1705 basic_block b = BASIC_BLOCK (i);
1708 /* This block was found. Tidy up the mark. */
1711 deleted_handler |= delete_block (b);
1714 /* Fix up edges that now fall through, or rather should now fall through
1715 but previously required a jump around now deleted blocks. Simplify
1716 the search by only examining blocks numerically adjacent, since this
1717 is how find_basic_blocks created them. */
1719 for (i = 1; i < n_basic_blocks; ++i)
1721 basic_block b = BASIC_BLOCK (i - 1);
1722 basic_block c = BASIC_BLOCK (i);
1725 /* We care about simple conditional or unconditional jumps with
1728 If we had a conditional branch to the next instruction when
1729 find_basic_blocks was called, then there will only be one
1730 out edge for the block which ended with the conditional
1731 branch (since we do not create duplicate edges).
1733 Furthermore, the edge will be marked as a fallthru because we
1734 merge the flags for the duplicate edges. So we do not want to
1735 check that the edge is not a FALLTHRU edge. */
1736 if ((s = b->succ) != NULL
1737 && s->succ_next == NULL
1739 /* If the jump insn has side effects, we can't tidy the edge. */
1740 && (GET_CODE (b->end) != JUMP_INSN
1741 || onlyjump_p (b->end)))
1742 tidy_fallthru_edge (s, b, c);
1745 /* If we deleted an exception handler, we may have EH region begin/end
1746 blocks to remove as well. */
1747 if (deleted_handler)
1748 delete_eh_regions ();
1751 /* Find EH regions for which there is no longer a handler, and delete them. */
1754 delete_eh_regions ()
1758 update_rethrow_references ();
1760 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1761 if (GET_CODE (insn) == NOTE)
1763 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1764 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1766 int num = NOTE_EH_HANDLER (insn);
1767 /* A NULL handler indicates a region is no longer needed,
1768 as long as it isn't the target of a rethrow. */
1769 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1771 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1772 NOTE_SOURCE_FILE (insn) = 0;
1778 /* Return true if NOTE is not one of the ones that must be kept paired,
1779 so that we may simply delete them. */
1782 can_delete_note_p (note)
1785 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1786 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1789 /* Unlink a chain of insns between START and FINISH, leaving notes
1790 that must be paired. */
1793 flow_delete_insn_chain (start, finish)
1796 /* Unchain the insns one by one. It would be quicker to delete all
1797 of these with a single unchaining, rather than one at a time, but
1798 we need to keep the NOTE's. */
1804 next = NEXT_INSN (start);
1805 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1807 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1810 next = flow_delete_insn (start);
1812 if (start == finish)
1818 /* Delete the insns in a (non-live) block. We physically delete every
1819 non-deleted-note insn, and update the flow graph appropriately.
1821 Return nonzero if we deleted an exception handler. */
1823 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1824 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1830 int deleted_handler = 0;
1833 /* If the head of this block is a CODE_LABEL, then it might be the
1834 label for an exception handler which can't be reached.
1836 We need to remove the label from the exception_handler_label list
1837 and remove the associated NOTE_INSN_EH_REGION_BEG and
1838 NOTE_INSN_EH_REGION_END notes. */
1842 never_reached_warning (insn);
1844 if (GET_CODE (insn) == CODE_LABEL)
1846 rtx x, *prev = &exception_handler_labels;
1848 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1850 if (XEXP (x, 0) == insn)
1852 /* Found a match, splice this label out of the EH label list. */
1853 *prev = XEXP (x, 1);
1854 XEXP (x, 1) = NULL_RTX;
1855 XEXP (x, 0) = NULL_RTX;
1857 /* Remove the handler from all regions */
1858 remove_handler (insn);
1859 deleted_handler = 1;
1862 prev = &XEXP (x, 1);
1865 /* This label may be referenced by code solely for its value, or
1866 referenced by static data, or something. We have determined
1867 that it is not reachable, but cannot delete the label itself.
1868 Save code space and continue to delete the balance of the block,
1869 along with properly updating the cfg. */
1870 if (!can_delete_label_p (insn))
1872 /* If we've only got one of these, skip the whole deleting
1875 goto no_delete_insns;
1876 insn = NEXT_INSN (insn);
1880 /* Selectively unlink the insn chain. Include any BARRIER that may
1881 follow the basic block. */
1882 end = next_nonnote_insn (b->end);
1883 if (!end || GET_CODE (end) != BARRIER)
1885 flow_delete_insn_chain (insn, end);
1889 /* Remove the edges into and out of this block. Note that there may
1890 indeed be edges in, if we are removing an unreachable loop. */
1894 for (e = b->pred; e ; e = next)
1896 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1899 next = e->pred_next;
1903 for (e = b->succ; e ; e = next)
1905 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1908 next = e->succ_next;
1917 /* Remove the basic block from the array, and compact behind it. */
1920 return deleted_handler;
1923 /* Remove block B from the basic block array and compact behind it. */
1929 int i, n = n_basic_blocks;
1931 for (i = b->index; i + 1 < n; ++i)
1933 basic_block x = BASIC_BLOCK (i + 1);
1934 BASIC_BLOCK (i) = x;
1938 basic_block_info->num_elements--;
1942 /* Delete INSN by patching it out. Return the next insn. */
1945 flow_delete_insn (insn)
1948 rtx prev = PREV_INSN (insn);
1949 rtx next = NEXT_INSN (insn);
1951 PREV_INSN (insn) = NULL_RTX;
1952 NEXT_INSN (insn) = NULL_RTX;
1955 NEXT_INSN (prev) = next;
1957 PREV_INSN (next) = prev;
1959 set_last_insn (prev);
1961 if (GET_CODE (insn) == CODE_LABEL)
1962 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1964 /* If deleting a jump, decrement the use count of the label. Deleting
1965 the label itself should happen in the normal course of block merging. */
1966 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1967 LABEL_NUSES (JUMP_LABEL (insn))--;
1972 /* True if a given label can be deleted. */
1975 can_delete_label_p (label)
1980 if (LABEL_PRESERVE_P (label))
1983 for (x = forced_labels; x ; x = XEXP (x, 1))
1984 if (label == XEXP (x, 0))
1986 for (x = label_value_list; x ; x = XEXP (x, 1))
1987 if (label == XEXP (x, 0))
1989 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
1990 if (label == XEXP (x, 0))
1993 /* User declared labels must be preserved. */
1994 if (LABEL_NAME (label) != 0)
2000 /* Blocks A and B are to be merged into a single block. A has no incoming
2001 fallthru edge, so it can be moved before B without adding or modifying
2002 any jumps (aside from the jump from A to B). */
2005 merge_blocks_move_predecessor_nojumps (a, b)
2008 rtx start, end, insertpoint, barrier;
2012 insertpoint = PREV_INSN (b->head);
2014 /* We want to delete the BARRIER after the end of the insns we are
2015 going to move. If we don't find a BARRIER, then do nothing. This
2016 can happen in some cases if we have labels we can not delete.
2018 Similarly, do nothing if we can not delete the label at the start
2019 of the target block. */
2020 barrier = next_nonnote_insn (end);
2021 if (GET_CODE (barrier) != BARRIER
2022 || (GET_CODE (b->head) == CODE_LABEL
2023 && ! can_delete_label_p (b->head)))
2026 flow_delete_insn (barrier);
2028 /* Move block and loop notes out of the chain so that we do not
2029 disturb their order.
2031 ??? A better solution would be to squeeze out all the non-nested notes
2032 and adjust the block trees appropriately. Even better would be to have
2033 a tighter connection between block trees and rtl so that this is not
2035 start = squeeze_notes (start, end);
2037 /* Scramble the insn chain. */
2038 reorder_insns (start, end, insertpoint);
2040 /* Now blocks A and B are contiguous. Merge them. */
2041 merge_blocks_nomove (a, b);
2045 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2046 a->index, b->index);
2052 /* Blocks A and B are to be merged into a single block. B has no outgoing
2053 fallthru edge, so it can be moved after A without adding or modifying
2054 any jumps (aside from the jump from A to B). */
2057 merge_blocks_move_successor_nojumps (a, b)
2060 rtx start, end, insertpoint, barrier;
2064 insertpoint = a->end;
2066 /* We want to delete the BARRIER after the end of the insns we are
2067 going to move. If we don't find a BARRIER, then do nothing. This
2068 can happen in some cases if we have labels we can not delete.
2070 Similarly, do nothing if we can not delete the label at the start
2071 of the target block. */
2072 barrier = next_nonnote_insn (end);
2073 if (GET_CODE (barrier) != BARRIER
2074 || (GET_CODE (b->head) == CODE_LABEL
2075 && ! can_delete_label_p (b->head)))
2078 flow_delete_insn (barrier);
2080 /* Move block and loop notes out of the chain so that we do not
2081 disturb their order.
2083 ??? A better solution would be to squeeze out all the non-nested notes
2084 and adjust the block trees appropriately. Even better would be to have
2085 a tighter connection between block trees and rtl so that this is not
2087 start = squeeze_notes (start, end);
2089 /* Scramble the insn chain. */
2090 reorder_insns (start, end, insertpoint);
2092 /* Now blocks A and B are contiguous. Merge them. */
2093 merge_blocks_nomove (a, b);
2097 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2098 b->index, a->index);
2104 /* Blocks A and B are to be merged into a single block. The insns
2105 are already contiguous, hence `nomove'. */
2108 merge_blocks_nomove (a, b)
2112 rtx b_head, b_end, a_end;
2115 /* If there was a CODE_LABEL beginning B, delete it. */
2118 if (GET_CODE (b_head) == CODE_LABEL)
2120 /* Detect basic blocks with nothing but a label. This can happen
2121 in particular at the end of a function. */
2122 if (b_head == b_end)
2124 b_head = flow_delete_insn (b_head);
2127 /* Delete the basic block note. */
2128 if (GET_CODE (b_head) == NOTE
2129 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2131 if (b_head == b_end)
2133 b_head = flow_delete_insn (b_head);
2136 /* If there was a jump out of A, delete it. */
2138 if (GET_CODE (a_end) == JUMP_INSN)
2142 prev = prev_nonnote_insn (a_end);
2147 /* If this was a conditional jump, we need to also delete
2148 the insn that set cc0. */
2150 if (prev && sets_cc0_p (prev))
2153 prev = prev_nonnote_insn (prev);
2156 flow_delete_insn (tmp);
2160 /* Note that a->head != a->end, since we should have at least a
2161 bb note plus the jump, so prev != insn. */
2162 flow_delete_insn (a_end);
2166 /* By definition, there should only be one successor of A, and that is
2167 B. Free that edge struct. */
2171 /* Adjust the edges out of B for the new owner. */
2172 for (e = b->succ; e ; e = e->succ_next)
2176 /* Reassociate the insns of B with A. */
2179 BLOCK_FOR_INSN (b_head) = a;
2180 while (b_head != b_end)
2182 b_head = NEXT_INSN (b_head);
2183 BLOCK_FOR_INSN (b_head) = a;
2189 /* Compact the basic block array. */
2193 /* Attempt to merge basic blocks that are potentially non-adjacent.
2194 Return true iff the attempt succeeded. */
2197 merge_blocks (e, b, c)
2201 /* If B has a fallthru edge to C, no need to move anything. */
2202 if (e->flags & EDGE_FALLTHRU)
2204 /* If a label still appears somewhere and we cannot delete the label,
2205 then we cannot merge the blocks. The edge was tidied already. */
2207 rtx insn, stop = NEXT_INSN (c->head);
2208 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2209 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2212 merge_blocks_nomove (b, c);
2216 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2217 b->index, c->index);
2226 int c_has_outgoing_fallthru;
2227 int b_has_incoming_fallthru;
2229 /* We must make sure to not munge nesting of exception regions,
2230 lexical blocks, and loop notes.
2232 The first is taken care of by requiring that the active eh
2233 region at the end of one block always matches the active eh
2234 region at the beginning of the next block.
2236 The later two are taken care of by squeezing out all the notes. */
2238 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2239 executed and we may want to treat blocks which have two out
2240 edges, one normal, one abnormal as only having one edge for
2241 block merging purposes. */
2243 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2244 if (tmp_edge->flags & EDGE_FALLTHRU)
2246 c_has_outgoing_fallthru = (tmp_edge != NULL);
2248 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2249 if (tmp_edge->flags & EDGE_FALLTHRU)
2251 b_has_incoming_fallthru = (tmp_edge != NULL);
2253 /* If B does not have an incoming fallthru, and the exception regions
2254 match, then it can be moved immediately before C without introducing
2257 C can not be the first block, so we do not have to worry about
2258 accessing a non-existent block. */
2259 d = BASIC_BLOCK (c->index - 1);
2260 if (! b_has_incoming_fallthru
2261 && d->eh_end == b->eh_beg
2262 && b->eh_end == c->eh_beg)
2263 return merge_blocks_move_predecessor_nojumps (b, c);
2265 /* Otherwise, we're going to try to move C after B. Make sure the
2266 exception regions match.
2268 If B is the last basic block, then we must not try to access the
2269 block structure for block B + 1. Luckily in that case we do not
2270 need to worry about matching exception regions. */
2271 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2272 if (b->eh_end == c->eh_beg
2273 && (d == NULL || c->eh_end == d->eh_beg))
2275 /* If C does not have an outgoing fallthru, then it can be moved
2276 immediately after B without introducing or modifying jumps. */
2277 if (! c_has_outgoing_fallthru)
2278 return merge_blocks_move_successor_nojumps (b, c);
2280 /* Otherwise, we'll need to insert an extra jump, and possibly
2281 a new block to contain it. */
2282 /* ??? Not implemented yet. */
2289 /* Top level driver for merge_blocks. */
2296 /* Attempt to merge blocks as made possible by edge removal. If a block
2297 has only one successor, and the successor has only one predecessor,
2298 they may be combined. */
2300 for (i = 0; i < n_basic_blocks; )
2302 basic_block c, b = BASIC_BLOCK (i);
2305 /* A loop because chains of blocks might be combineable. */
2306 while ((s = b->succ) != NULL
2307 && s->succ_next == NULL
2308 && (s->flags & EDGE_EH) == 0
2309 && (c = s->dest) != EXIT_BLOCK_PTR
2310 && c->pred->pred_next == NULL
2311 /* If the jump insn has side effects, we can't kill the edge. */
2312 && (GET_CODE (b->end) != JUMP_INSN
2313 || onlyjump_p (b->end))
2314 && merge_blocks (s, b, c))
2317 /* Don't get confused by the index shift caused by deleting blocks. */
2322 /* The given edge should potentially a fallthru edge. If that is in
2323 fact true, delete the unconditional jump and barriers that are in
2327 tidy_fallthru_edge (e, b, c)
2333 /* ??? In a late-running flow pass, other folks may have deleted basic
2334 blocks by nopping out blocks, leaving multiple BARRIERs between here
2335 and the target label. They ought to be chastized and fixed.
2337 We can also wind up with a sequence of undeletable labels between
2338 one block and the next.
2340 So search through a sequence of barriers, labels, and notes for
2341 the head of block C and assert that we really do fall through. */
2343 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2346 /* Remove what will soon cease being the jump insn from the source block.
2347 If block B consisted only of this single jump, turn it into a deleted
2350 if (GET_CODE (q) == JUMP_INSN)
2353 /* If this was a conditional jump, we need to also delete
2354 the insn that set cc0. */
2355 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2362 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2363 NOTE_SOURCE_FILE (q) = 0;
2366 b->end = q = PREV_INSN (q);
2369 /* Selectively unlink the sequence. */
2370 if (q != PREV_INSN (c->head))
2371 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2373 e->flags |= EDGE_FALLTHRU;
2376 /* Discover and record the loop depth at the head of each basic block. */
2379 calculate_loop_depth (insns)
2384 int i = 0, depth = 1;
2386 bb = BASIC_BLOCK (i);
2387 for (insn = insns; insn ; insn = NEXT_INSN (insn))
2389 if (insn == bb->head)
2391 bb->loop_depth = depth;
2392 if (++i >= n_basic_blocks)
2394 bb = BASIC_BLOCK (i);
2397 if (GET_CODE (insn) == NOTE)
2399 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2401 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
2404 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error. */
2411 /* Perform data flow analysis.
2412 F is the first insn of the function and NREGS the number of register numbers
2416 life_analysis (f, nregs, file, remove_dead_code)
2420 int remove_dead_code;
2422 #ifdef ELIMINABLE_REGS
2424 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2428 /* Record which registers will be eliminated. We use this in
2431 CLEAR_HARD_REG_SET (elim_reg_set);
2433 #ifdef ELIMINABLE_REGS
2434 for (i = 0; i < sizeof eliminables / sizeof eliminables[0]; i++)
2435 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2437 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2440 /* Allocate a bitmap to be filled in by record_volatile_insns. */
2441 uid_volatile = BITMAP_ALLOCA ();
2443 /* We want alias analysis information for local dead store elimination. */
2444 init_alias_analysis ();
2447 if (! remove_dead_code)
2448 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2449 life_analysis_1 (f, nregs, flags);
2451 if (! reload_completed)
2452 mark_constant_function ();
2454 end_alias_analysis ();
2457 dump_flow_info (file);
2459 BITMAP_FREE (uid_volatile);
2460 free_basic_block_vars (1);
2463 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2464 Search for REGNO. If found, abort if it is not wider than word_mode. */
2467 verify_wide_reg_1 (px, pregno)
2472 int regno = *(int *) pregno;
2474 if (GET_CODE (x) == REG && REGNO (x) == regno)
2476 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2483 /* A subroutine of verify_local_live_at_start. Search through insns
2484 between HEAD and END looking for register REGNO. */
2487 verify_wide_reg (regno, head, end)
2493 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2494 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2498 head = NEXT_INSN (head);
2501 /* We didn't find the register at all. Something's way screwy. */
2505 /* A subroutine of update_life_info. Verify that there are no untoward
2506 changes in live_at_start during a local update. */
2509 verify_local_live_at_start (new_live_at_start, bb)
2510 regset new_live_at_start;
2513 if (reload_completed)
2515 /* After reload, there are no pseudos, nor subregs of multi-word
2516 registers. The regsets should exactly match. */
2517 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2524 /* Find the set of changed registers. */
2525 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2527 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2529 /* No registers should die. */
2530 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2532 /* Verify that the now-live register is wider than word_mode. */
2533 verify_wide_reg (i, bb->head, bb->end);
2538 /* Updates death notes starting with the basic blocks set in BLOCKS.
2540 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2541 expecting local modifications to basic blocks. If we find extra
2542 registers live at the beginning of a block, then we either killed
2543 useful data, or we have a broken split that wants data not provided.
2544 If we find registers removed from live_at_start, that means we have
2545 a broken peephole that is killing a register it shouldn't.
2547 ??? This is not true in one situation -- when a pre-reload splitter
2548 generates subregs of a multi-word pseudo, current life analysis will
2549 lose the kill. So we _can_ have a pseudo go live. How irritating. */
2552 update_life_info (blocks, extent)
2554 enum update_life_extent extent;
2559 compute_bb_for_insn (get_max_uid ());
2560 tmp = ALLOCA_REG_SET ();
2562 /* For a global update, we go through the relaxation process again. */
2563 if (extent != UPDATE_LIFE_LOCAL)
2565 calculate_global_regs_live (blocks, blocks, 0);
2567 /* If asked, remove notes from the blocks we'll update. */
2568 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2569 count_or_remove_death_notes (blocks, 1);
2572 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2574 basic_block bb = BASIC_BLOCK (i);
2576 COPY_REG_SET (tmp, bb->global_live_at_end);
2577 propagate_block (tmp, bb->head, bb->end, (regset) NULL, i,
2580 if (extent == UPDATE_LIFE_LOCAL)
2581 verify_local_live_at_start (tmp, bb);
2587 free_basic_block_vars (1);
2590 /* Free the variables allocated by find_basic_blocks.
2592 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2595 free_basic_block_vars (keep_head_end_p)
2596 int keep_head_end_p;
2598 if (basic_block_for_insn)
2600 VARRAY_FREE (basic_block_for_insn);
2601 basic_block_for_insn = NULL;
2604 if (! keep_head_end_p)
2607 VARRAY_FREE (basic_block_info);
2610 ENTRY_BLOCK_PTR->aux = NULL;
2611 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2612 EXIT_BLOCK_PTR->aux = NULL;
2613 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2617 /* Return nonzero if the destination of SET equals the source. */
2622 rtx src = SET_SRC (set);
2623 rtx dst = SET_DEST (set);
2624 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2625 && REGNO (src) == REGNO (dst))
2627 if (GET_CODE (src) != SUBREG || GET_CODE (dst) != SUBREG
2628 || SUBREG_WORD (src) != SUBREG_WORD (dst))
2630 src = SUBREG_REG (src);
2631 dst = SUBREG_REG (dst);
2632 if (GET_CODE (src) == REG && GET_CODE (dst) == REG
2633 && REGNO (src) == REGNO (dst))
2638 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2644 rtx pat = PATTERN (insn);
2646 /* Insns carrying these notes are useful later on. */
2647 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2650 if (GET_CODE (pat) == SET && set_noop_p (pat))
2653 if (GET_CODE (pat) == PARALLEL)
2656 /* If nothing but SETs of registers to themselves,
2657 this insn can also be deleted. */
2658 for (i = 0; i < XVECLEN (pat, 0); i++)
2660 rtx tem = XVECEXP (pat, 0, i);
2662 if (GET_CODE (tem) == USE
2663 || GET_CODE (tem) == CLOBBER)
2666 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2676 notice_stack_pointer_modification (x, pat)
2678 rtx pat ATTRIBUTE_UNUSED;
2680 if (x == stack_pointer_rtx
2681 /* The stack pointer is only modified indirectly as the result
2682 of a push until later in flow. See the comments in rtl.texi
2683 regarding Embedded Side-Effects on Addresses. */
2684 || (GET_CODE (x) == MEM
2685 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2686 || GET_CODE (XEXP (x, 0)) == PRE_INC
2687 || GET_CODE (XEXP (x, 0)) == POST_DEC
2688 || GET_CODE (XEXP (x, 0)) == POST_INC)
2689 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2690 current_function_sp_is_unchanging = 0;
2693 /* Record which insns refer to any volatile memory
2694 or for any reason can't be deleted just because they are dead stores.
2695 Also, delete any insns that copy a register to itself.
2696 And see if the stack pointer is modified. */
2698 record_volatile_insns (f)
2702 for (insn = f; insn; insn = NEXT_INSN (insn))
2704 enum rtx_code code1 = GET_CODE (insn);
2705 if (code1 == CALL_INSN)
2706 SET_INSN_VOLATILE (insn);
2707 else if (code1 == INSN || code1 == JUMP_INSN)
2709 if (GET_CODE (PATTERN (insn)) != USE
2710 && volatile_refs_p (PATTERN (insn)))
2711 SET_INSN_VOLATILE (insn);
2713 /* A SET that makes space on the stack cannot be dead.
2714 (Such SETs occur only for allocating variable-size data,
2715 so they will always have a PLUS or MINUS according to the
2716 direction of stack growth.)
2717 Even if this function never uses this stack pointer value,
2718 signal handlers do! */
2719 else if (code1 == INSN && GET_CODE (PATTERN (insn)) == SET
2720 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
2721 #ifdef STACK_GROWS_DOWNWARD
2722 && GET_CODE (SET_SRC (PATTERN (insn))) == MINUS
2724 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
2726 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx)
2727 SET_INSN_VOLATILE (insn);
2729 /* Delete (in effect) any obvious no-op moves. */
2730 else if (noop_move_p (insn))
2732 PUT_CODE (insn, NOTE);
2733 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2734 NOTE_SOURCE_FILE (insn) = 0;
2738 /* Check if insn modifies the stack pointer. */
2739 if ( current_function_sp_is_unchanging
2740 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2741 note_stores (PATTERN (insn), notice_stack_pointer_modification);
2745 /* Mark a register in SET. Hard registers in large modes get all
2746 of their component registers set as well. */
2752 int regno = REGNO (reg);
2754 SET_REGNO_REG_SET (set, regno);
2755 if (regno < FIRST_PSEUDO_REGISTER)
2757 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2759 SET_REGNO_REG_SET (set, regno + n);
2763 /* Mark those regs which are needed at the end of the function as live
2764 at the end of the last basic block. */
2766 mark_regs_live_at_end (set)
2772 /* If exiting needs the right stack value, consider the stack pointer
2773 live at the end of the function. */
2774 if ((HAVE_epilogue && reload_completed)
2775 || ! EXIT_IGNORE_STACK
2776 || (! FRAME_POINTER_REQUIRED
2777 && ! current_function_calls_alloca
2778 && flag_omit_frame_pointer)
2779 || current_function_sp_is_unchanging)
2781 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2784 /* Mark the frame pointer if needed at the end of the function. If
2785 we end up eliminating it, it will be removed from the live list
2786 of each basic block by reload. */
2788 if (! reload_completed || frame_pointer_needed)
2790 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2791 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2792 /* If they are different, also mark the hard frame pointer as live */
2793 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2797 #ifdef PIC_OFFSET_TABLE_REGNUM
2798 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2799 /* Many architectures have a GP register even without flag_pic.
2800 Assume the pic register is not in use, or will be handled by
2801 other means, if it is not fixed. */
2802 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2803 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2807 /* Mark all global registers, and all registers used by the epilogue
2808 as being live at the end of the function since they may be
2809 referenced by our caller. */
2810 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2812 #ifdef EPILOGUE_USES
2813 || EPILOGUE_USES (i)
2816 SET_REGNO_REG_SET (set, i);
2818 /* Mark all call-saved registers that we actaully used. */
2819 if (HAVE_epilogue && reload_completed)
2821 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2822 if (! call_used_regs[i] && regs_ever_live[i])
2823 SET_REGNO_REG_SET (set, i);
2826 /* Mark function return value. */
2827 /* ??? Only do this after reload. Consider a non-void function that
2828 omits a return statement. Across that edge we'll have the return
2829 register live, and no set for it. Thus the return register will
2830 be live back through the CFG to the entry, and thus we die. A
2831 possible solution is to emit a clobber at exits without returns. */
2833 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2834 if (reload_completed
2835 && type != void_type_node)
2839 if (current_function_returns_struct
2840 || current_function_returns_pcc_struct)
2841 type = build_pointer_type (type);
2843 #ifdef FUNCTION_OUTGOING_VALUE
2844 outgoing = FUNCTION_OUTGOING_VALUE (type, current_function_decl);
2846 outgoing = FUNCTION_VALUE (type, current_function_decl);
2849 if (GET_CODE (outgoing) == REG)
2850 mark_reg (set, outgoing);
2851 else if (GET_CODE (outgoing) == PARALLEL)
2853 int len = XVECLEN (outgoing, 0);
2855 /* Check for a NULL entry, used to indicate that the parameter
2856 goes on the stack and in registers. */
2857 i = (XEXP (XVECEXP (outgoing, 0, 0), 0) ? 0 : 1);
2859 for ( ; i < len; ++i)
2861 rtx r = XVECEXP (outgoing, 0, i);
2862 if (GET_CODE (r) == REG)
2871 /* Determine which registers are live at the start of each
2872 basic block of the function whose first insn is F.
2873 NREGS is the number of registers used in F.
2874 We allocate the vector basic_block_live_at_start
2875 and the regsets that it points to, and fill them with the data.
2876 regset_size and regset_bytes are also set here. */
2879 life_analysis_1 (f, nregs, flags)
2884 char save_regs_ever_live[FIRST_PSEUDO_REGISTER];
2889 /* Allocate and zero out many data structures
2890 that will record the data from lifetime analysis. */
2892 allocate_reg_life_data ();
2893 allocate_bb_life_data ();
2895 reg_next_use = (rtx *) alloca (nregs * sizeof (rtx));
2896 memset (reg_next_use, 0, nregs * sizeof (rtx));
2898 /* Assume that the stack pointer is unchanging if alloca hasn't been used.
2899 This will be cleared by record_volatile_insns if it encounters an insn
2900 which modifies the stack pointer. */
2901 current_function_sp_is_unchanging = !current_function_calls_alloca;
2902 record_volatile_insns (f);
2904 /* Find the set of registers live on function exit. Do this before
2905 zeroing regs_ever_live, as we use that data post-reload. */
2906 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2908 /* The post-reload life analysis have (on a global basis) the same
2909 registers live as was computed by reload itself. elimination
2910 Otherwise offsets and such may be incorrect.
2912 Reload will make some registers as live even though they do not
2913 appear in the rtl. */
2914 if (reload_completed)
2915 memcpy (save_regs_ever_live, regs_ever_live, sizeof (regs_ever_live));
2916 memset (regs_ever_live, 0, sizeof regs_ever_live);
2918 /* Compute register life at block boundaries. It'd be nice to
2919 begin with just the exit and noreturn blocks, but that set
2920 is not immediately handy. */
2923 blocks = sbitmap_alloc (n_basic_blocks);
2924 sbitmap_ones (blocks);
2925 calculate_global_regs_live (blocks, blocks, flags & PROP_SCAN_DEAD_CODE);
2928 /* The only pseudos that are live at the beginning of the function are
2929 those that were not set anywhere in the function. local-alloc doesn't
2930 know how to handle these correctly, so mark them as not local to any
2933 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2934 FIRST_PSEUDO_REGISTER, i,
2935 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2937 /* Now the life information is accurate. Make one more pass over each
2938 basic block to delete dead stores, create autoincrement addressing
2939 and record how many times each register is used, is set, or dies. */
2942 tmp = ALLOCA_REG_SET ();
2944 for (i = n_basic_blocks - 1; i >= 0; --i)
2946 basic_block bb = BASIC_BLOCK (i);
2948 COPY_REG_SET (tmp, bb->global_live_at_end);
2949 propagate_block (tmp, bb->head, bb->end, (regset) NULL, i, flags);
2957 /* We have a problem with any pseudoreg that lives across the setjmp.
2958 ANSI says that if a user variable does not change in value between
2959 the setjmp and the longjmp, then the longjmp preserves it. This
2960 includes longjmp from a place where the pseudo appears dead.
2961 (In principle, the value still exists if it is in scope.)
2962 If the pseudo goes in a hard reg, some other value may occupy
2963 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2964 Conclusion: such a pseudo must not go in a hard reg. */
2965 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2966 FIRST_PSEUDO_REGISTER, i,
2968 if (regno_reg_rtx[i] != 0)
2970 REG_LIVE_LENGTH (i) = -1;
2971 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2975 /* Restore regs_ever_live that was provided by reload. */
2976 if (reload_completed)
2977 memcpy (regs_ever_live, save_regs_ever_live, sizeof (regs_ever_live));
2979 reg_next_use = NULL;
2982 /* Propagate global life info around the graph of basic blocks. Begin
2983 considering blocks with their corresponding bit set in BLOCKS_IN.
2984 BLOCKS_OUT is set for every block that was changed. */
2987 calculate_global_regs_live (blocks_in, blocks_out, flags)
2988 sbitmap blocks_in, blocks_out;
2991 basic_block *queue, *qhead, *qtail, *qend;
2992 regset tmp, new_live_at_end;
2995 tmp = ALLOCA_REG_SET ();
2996 new_live_at_end = ALLOCA_REG_SET ();
2998 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
2999 because the `head == tail' style test for an empty queue doesn't
3000 work with a full queue. */
3001 queue = (basic_block *) alloca ((n_basic_blocks + 2) * sizeof (*queue));
3003 qhead = qend = queue + n_basic_blocks + 2;
3005 /* Queue the blocks set in the initial mask. Do this in reverse block
3006 number order so that we are more likely for the first round to do
3007 useful work. We use AUX non-null to flag that the block is queued. */
3008 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3010 basic_block bb = BASIC_BLOCK (i);
3015 sbitmap_zero (blocks_out);
3017 while (qhead != qtail)
3019 int rescan, changed;
3028 /* Begin by propogating live_at_start from the successor blocks. */
3029 CLEAR_REG_SET (new_live_at_end);
3030 for (e = bb->succ; e ; e = e->succ_next)
3032 basic_block sb = e->dest;
3033 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3036 if (bb == ENTRY_BLOCK_PTR)
3038 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3042 /* On our first pass through this block, we'll go ahead and continue.
3043 Recognize first pass by local_set NULL. On subsequent passes, we
3044 get to skip out early if live_at_end wouldn't have changed. */
3046 if (bb->local_set == NULL)
3048 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3053 /* If any bits were removed from live_at_end, we'll have to
3054 rescan the block. This wouldn't be necessary if we had
3055 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3056 local_live is really dependant on live_at_end. */
3057 CLEAR_REG_SET (tmp);
3058 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3059 new_live_at_end, BITMAP_AND_COMPL);
3063 /* Find the set of changed bits. Take this opportunity
3064 to notice that this set is empty and early out. */
3065 CLEAR_REG_SET (tmp);
3066 changed = bitmap_operation (tmp, bb->global_live_at_end,
3067 new_live_at_end, BITMAP_XOR);
3071 /* If any of the changed bits overlap with local_set,
3072 we'll have to rescan the block. Detect overlap by
3073 the AND with ~local_set turning off bits. */
3074 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3079 /* Let our caller know that BB changed enough to require its
3080 death notes updated. */
3081 SET_BIT (blocks_out, bb->index);
3085 /* Add to live_at_start the set of all registers in
3086 new_live_at_end that aren't in the old live_at_end. */
3088 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3090 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3092 changed = bitmap_operation (bb->global_live_at_start,
3093 bb->global_live_at_start,
3100 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3102 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3103 into live_at_start. */
3104 propagate_block (new_live_at_end, bb->head, bb->end,
3105 bb->local_set, i, flags);
3107 /* If live_at start didn't change, no need to go farther. */
3108 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3111 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3114 /* Queue all predecessors of BB so that we may re-examine
3115 their live_at_end. */
3116 for (e = bb->pred; e ; e = e->pred_next)
3118 basic_block pb = e->src;
3119 if (pb->aux == NULL)
3130 FREE_REG_SET (new_live_at_end);
3132 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3134 basic_block bb = BASIC_BLOCK (i);
3135 FREE_REG_SET (bb->local_set);
3139 /* Subroutines of life analysis. */
3141 /* Allocate the permanent data structures that represent the results
3142 of life analysis. Not static since used also for stupid life analysis. */
3145 allocate_bb_life_data ()
3149 for (i = 0; i < n_basic_blocks; i++)
3151 basic_block bb = BASIC_BLOCK (i);
3153 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3154 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3157 ENTRY_BLOCK_PTR->global_live_at_end
3158 = OBSTACK_ALLOC_REG_SET (function_obstack);
3159 EXIT_BLOCK_PTR->global_live_at_start
3160 = OBSTACK_ALLOC_REG_SET (function_obstack);
3162 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3166 allocate_reg_life_data ()
3170 /* Recalculate the register space, in case it has grown. Old style
3171 vector oriented regsets would set regset_{size,bytes} here also. */
3172 allocate_reg_info (max_regno, FALSE, FALSE);
3174 /* Because both reg_scan and flow_analysis want to set up the REG_N_SETS
3175 information, explicitly reset it here. The allocation should have
3176 already happened on the previous reg_scan pass. Make sure in case
3177 some more registers were allocated. */
3178 for (i = 0; i < max_regno; i++)
3182 /* Compute the registers live at the beginning of a basic block
3183 from those live at the end.
3185 When called, OLD contains those live at the end.
3186 On return, it contains those live at the beginning.
3187 FIRST and LAST are the first and last insns of the basic block.
3189 FINAL is nonzero if we are doing the final pass which is not
3190 for computing the life info (since that has already been done)
3191 but for acting on it. On this pass, we delete dead stores,
3192 set up the logical links and dead-variables lists of instructions,
3193 and merge instructions for autoincrement and autodecrement addresses.
3195 SIGNIFICANT is nonzero only the first time for each basic block.
3196 If it is nonzero, it points to a regset in which we store
3197 a 1 for each register that is set within the block.
3199 BNUM is the number of the basic block. */
3202 propagate_block (old, first, last, significant, bnum, flags)
3203 register regset old;
3215 /* Find the loop depth for this block. Ignore loop level changes in the
3216 middle of the basic block -- for register allocation purposes, the
3217 important uses will be in the blocks wholely contained within the loop
3218 not in the loop pre-header or post-trailer. */
3219 loop_depth = BASIC_BLOCK (bnum)->loop_depth;
3221 dead = ALLOCA_REG_SET ();
3222 live = ALLOCA_REG_SET ();
3226 if (flags & PROP_REG_INFO)
3230 /* Process the regs live at the end of the block.
3231 Mark them as not local to any one basic block. */
3232 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3234 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3238 /* Scan the block an insn at a time from end to beginning. */
3240 for (insn = last; ; insn = prev)
3242 prev = PREV_INSN (insn);
3244 if (GET_CODE (insn) == NOTE)
3246 /* If this is a call to `setjmp' et al,
3247 warn if any non-volatile datum is live. */
3249 if ((flags & PROP_REG_INFO)
3250 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3251 IOR_REG_SET (regs_live_at_setjmp, old);
3254 /* Update the life-status of regs for this insn.
3255 First DEAD gets which regs are set in this insn
3256 then LIVE gets which regs are used in this insn.
3257 Then the regs live before the insn
3258 are those live after, with DEAD regs turned off,
3259 and then LIVE regs turned on. */
3261 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3264 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3265 int insn_is_dead = 0;
3266 int libcall_is_dead = 0;
3268 if (flags & PROP_SCAN_DEAD_CODE)
3270 insn_is_dead = (insn_dead_p (PATTERN (insn), old, 0, REG_NOTES (insn))
3271 /* Don't delete something that refers to volatile storage! */
3272 && ! INSN_VOLATILE (insn));
3273 libcall_is_dead = (insn_is_dead && note != 0
3274 && libcall_dead_p (PATTERN (insn), old, note, insn));
3277 /* We almost certainly don't want to delete prologue or epilogue
3278 instructions. Warn about probable compiler losage. */
3279 if ((flags & PROP_KILL_DEAD_CODE)
3282 && (HAVE_epilogue || HAVE_prologue)
3283 && prologue_epilogue_contains (insn))
3285 warning ("ICE: would have deleted prologue/epilogue insn");
3287 libcall_is_dead = insn_is_dead = 0;
3290 /* If an instruction consists of just dead store(s) on final pass,
3291 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3292 We could really delete it with delete_insn, but that
3293 can cause trouble for first or last insn in a basic block. */
3294 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3296 PUT_CODE (insn, NOTE);
3297 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3298 NOTE_SOURCE_FILE (insn) = 0;
3300 /* CC0 is now known to be dead. Either this insn used it,
3301 in which case it doesn't anymore, or clobbered it,
3302 so the next insn can't use it. */
3305 /* If this insn is copying the return value from a library call,
3306 delete the entire library call. */
3307 if (libcall_is_dead)
3309 rtx first = XEXP (note, 0);
3311 while (INSN_DELETED_P (first))
3312 first = NEXT_INSN (first);
3317 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3318 NOTE_SOURCE_FILE (p) = 0;
3324 CLEAR_REG_SET (dead);
3325 CLEAR_REG_SET (live);
3327 /* See if this is an increment or decrement that can be
3328 merged into a following memory address. */
3331 register rtx x = single_set (insn);
3333 /* Does this instruction increment or decrement a register? */
3334 if (!reload_completed
3335 && (flags & PROP_AUTOINC)
3337 && GET_CODE (SET_DEST (x)) == REG
3338 && (GET_CODE (SET_SRC (x)) == PLUS
3339 || GET_CODE (SET_SRC (x)) == MINUS)
3340 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3341 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3342 /* Ok, look for a following memory ref we can combine with.
3343 If one is found, change the memory ref to a PRE_INC
3344 or PRE_DEC, cancel this insn, and return 1.
3345 Return 0 if nothing has been done. */
3346 && try_pre_increment_1 (insn))
3349 #endif /* AUTO_INC_DEC */
3351 /* If this is not the final pass, and this insn is copying the
3352 value of a library call and it's dead, don't scan the
3353 insns that perform the library call, so that the call's
3354 arguments are not marked live. */
3355 if (libcall_is_dead)
3357 /* Mark the dest reg as `significant'. */
3358 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3359 significant, flags);
3361 insn = XEXP (note, 0);
3362 prev = PREV_INSN (insn);
3364 else if (GET_CODE (PATTERN (insn)) == SET
3365 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3366 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3367 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3368 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3369 /* We have an insn to pop a constant amount off the stack.
3370 (Such insns use PLUS regardless of the direction of the stack,
3371 and any insn to adjust the stack by a constant is always a pop.)
3372 These insns, if not dead stores, have no effect on life. */
3376 /* Any regs live at the time of a call instruction
3377 must not go in a register clobbered by calls.
3378 Find all regs now live and record this for them. */
3380 if (GET_CODE (insn) == CALL_INSN
3381 && (flags & PROP_REG_INFO))
3382 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3384 REG_N_CALLS_CROSSED (i)++;
3387 /* LIVE gets the regs used in INSN;
3388 DEAD gets those set by it. Dead insns don't make anything
3391 mark_set_regs (old, dead, PATTERN (insn),
3392 insn, significant, flags);
3394 /* If an insn doesn't use CC0, it becomes dead since we
3395 assume that every insn clobbers it. So show it dead here;
3396 mark_used_regs will set it live if it is referenced. */
3400 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3402 /* Sometimes we may have inserted something before INSN (such as
3403 a move) when we make an auto-inc. So ensure we will scan
3406 prev = PREV_INSN (insn);
3409 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3415 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3417 note = XEXP (note, 1))
3418 if (GET_CODE (XEXP (note, 0)) == USE)
3419 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3422 /* Each call clobbers all call-clobbered regs that are not
3423 global or fixed. Note that the function-value reg is a
3424 call-clobbered reg, and mark_set_regs has already had
3425 a chance to handle it. */
3427 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3428 if (call_used_regs[i] && ! global_regs[i]
3431 SET_REGNO_REG_SET (dead, i);
3433 SET_REGNO_REG_SET (significant, i);
3436 /* The stack ptr is used (honorarily) by a CALL insn. */
3437 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3439 /* Calls may also reference any of the global registers,
3440 so they are made live. */
3441 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3443 mark_used_regs (old, live,
3444 gen_rtx_REG (reg_raw_mode[i], i),
3447 /* Calls also clobber memory. */
3448 free_EXPR_LIST_list (&mem_set_list);
3451 /* Update OLD for the registers used or set. */
3452 AND_COMPL_REG_SET (old, dead);
3453 IOR_REG_SET (old, live);
3457 /* On final pass, update counts of how many insns each reg is live
3459 if (flags & PROP_REG_INFO)
3460 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3461 { REG_LIVE_LENGTH (i)++; });
3468 FREE_REG_SET (dead);
3469 FREE_REG_SET (live);
3470 free_EXPR_LIST_list (&mem_set_list);
3473 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3474 (SET expressions whose destinations are registers dead after the insn).
3475 NEEDED is the regset that says which regs are alive after the insn.
3477 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3479 If X is the entire body of an insn, NOTES contains the reg notes
3480 pertaining to the insn. */
3483 insn_dead_p (x, needed, call_ok, notes)
3487 rtx notes ATTRIBUTE_UNUSED;
3489 enum rtx_code code = GET_CODE (x);
3492 /* If flow is invoked after reload, we must take existing AUTO_INC
3493 expresions into account. */
3494 if (reload_completed)
3496 for ( ; notes; notes = XEXP (notes, 1))
3498 if (REG_NOTE_KIND (notes) == REG_INC)
3500 int regno = REGNO (XEXP (notes, 0));
3502 /* Don't delete insns to set global regs. */
3503 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3504 || REGNO_REG_SET_P (needed, regno))
3511 /* If setting something that's a reg or part of one,
3512 see if that register's altered value will be live. */
3516 rtx r = SET_DEST (x);
3518 /* A SET that is a subroutine call cannot be dead. */
3519 if (! call_ok && GET_CODE (SET_SRC (x)) == CALL)
3523 if (GET_CODE (r) == CC0)
3527 if (GET_CODE (r) == MEM && ! MEM_VOLATILE_P (r))
3530 /* Walk the set of memory locations we are currently tracking
3531 and see if one is an identical match to this memory location.
3532 If so, this memory write is dead (remember, we're walking
3533 backwards from the end of the block to the start. */
3534 temp = mem_set_list;
3537 if (rtx_equal_p (XEXP (temp, 0), r))
3539 temp = XEXP (temp, 1);
3543 while (GET_CODE (r) == SUBREG || GET_CODE (r) == STRICT_LOW_PART
3544 || GET_CODE (r) == ZERO_EXTRACT)
3547 if (GET_CODE (r) == REG)
3549 int regno = REGNO (r);
3551 /* Don't delete insns to set global regs. */
3552 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3553 /* Make sure insns to set frame pointer aren't deleted. */
3554 || (regno == FRAME_POINTER_REGNUM
3555 && (! reload_completed || frame_pointer_needed))
3556 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3557 || (regno == HARD_FRAME_POINTER_REGNUM
3558 && (! reload_completed || frame_pointer_needed))
3560 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3561 /* Make sure insns to set arg pointer are never deleted
3562 (if the arg pointer isn't fixed, there will be a USE for
3563 it, so we can treat it normally). */
3564 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3566 || REGNO_REG_SET_P (needed, regno))
3569 /* If this is a hard register, verify that subsequent words are
3571 if (regno < FIRST_PSEUDO_REGISTER)
3573 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3576 if (REGNO_REG_SET_P (needed, regno+n))
3584 /* If performing several activities,
3585 insn is dead if each activity is individually dead.
3586 Also, CLOBBERs and USEs can be ignored; a CLOBBER or USE
3587 that's inside a PARALLEL doesn't make the insn worth keeping. */
3588 else if (code == PARALLEL)
3590 int i = XVECLEN (x, 0);
3592 for (i--; i >= 0; i--)
3593 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3594 && GET_CODE (XVECEXP (x, 0, i)) != USE
3595 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3601 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3602 is not necessarily true for hard registers. */
3603 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3604 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3605 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3608 /* We do not check other CLOBBER or USE here. An insn consisting of just
3609 a CLOBBER or just a USE should not be deleted. */
3613 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3614 return 1 if the entire library call is dead.
3615 This is true if X copies a register (hard or pseudo)
3616 and if the hard return reg of the call insn is dead.
3617 (The caller should have tested the destination of X already for death.)
3619 If this insn doesn't just copy a register, then we don't
3620 have an ordinary libcall. In that case, cse could not have
3621 managed to substitute the source for the dest later on,
3622 so we can assume the libcall is dead.
3624 NEEDED is the bit vector of pseudoregs live before this insn.
3625 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3628 libcall_dead_p (x, needed, note, insn)
3634 register RTX_CODE code = GET_CODE (x);
3638 register rtx r = SET_SRC (x);
3639 if (GET_CODE (r) == REG)
3641 rtx call = XEXP (note, 0);
3645 /* Find the call insn. */
3646 while (call != insn && GET_CODE (call) != CALL_INSN)
3647 call = NEXT_INSN (call);
3649 /* If there is none, do nothing special,
3650 since ordinary death handling can understand these insns. */
3654 /* See if the hard reg holding the value is dead.
3655 If this is a PARALLEL, find the call within it. */
3656 call_pat = PATTERN (call);
3657 if (GET_CODE (call_pat) == PARALLEL)
3659 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3660 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3661 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3664 /* This may be a library call that is returning a value
3665 via invisible pointer. Do nothing special, since
3666 ordinary death handling can understand these insns. */
3670 call_pat = XVECEXP (call_pat, 0, i);
3673 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3679 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3680 live at function entry. Don't count global register variables, variables
3681 in registers that can be used for function arg passing, or variables in
3682 fixed hard registers. */
3685 regno_uninitialized (regno)
3688 if (n_basic_blocks == 0
3689 || (regno < FIRST_PSEUDO_REGISTER
3690 && (global_regs[regno]
3691 || fixed_regs[regno]
3692 || FUNCTION_ARG_REGNO_P (regno))))
3695 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3698 /* 1 if register REGNO was alive at a place where `setjmp' was called
3699 and was set more than once or is an argument.
3700 Such regs may be clobbered by `longjmp'. */
3703 regno_clobbered_at_setjmp (regno)
3706 if (n_basic_blocks == 0)
3709 return ((REG_N_SETS (regno) > 1
3710 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3711 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3714 /* INSN references memory, possibly using autoincrement addressing modes.
3715 Find any entries on the mem_set_list that need to be invalidated due
3716 to an address change. */
3718 invalidate_mems_from_autoinc (insn)
3721 rtx note = REG_NOTES (insn);
3722 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3724 if (REG_NOTE_KIND (note) == REG_INC)
3726 rtx temp = mem_set_list;
3727 rtx prev = NULL_RTX;
3732 next = XEXP (temp, 1);
3733 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3735 /* Splice temp out of list. */
3737 XEXP (prev, 1) = next;
3739 mem_set_list = next;
3740 free_EXPR_LIST_node (temp);
3750 /* Process the registers that are set within X. Their bits are set to
3751 1 in the regset DEAD, because they are dead prior to this insn.
3753 If INSN is nonzero, it is the insn being processed.
3755 FLAGS is the set of operations to perform. */
3758 mark_set_regs (needed, dead, x, insn, significant, flags)
3766 register RTX_CODE code = GET_CODE (x);
3768 if (code == SET || code == CLOBBER)
3769 mark_set_1 (needed, dead, x, insn, significant, flags);
3770 else if (code == PARALLEL)
3773 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3775 code = GET_CODE (XVECEXP (x, 0, i));
3776 if (code == SET || code == CLOBBER)
3777 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3778 significant, flags);
3783 /* Process a single SET rtx, X. */
3786 mark_set_1 (needed, dead, x, insn, significant, flags)
3794 register int regno = -1;
3795 register rtx reg = SET_DEST (x);
3797 /* Some targets place small structures in registers for
3798 return values of functions. We have to detect this
3799 case specially here to get correct flow information. */
3800 if (GET_CODE (reg) == PARALLEL
3801 && GET_MODE (reg) == BLKmode)
3805 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3806 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3807 significant, flags);
3811 /* Modifying just one hardware register of a multi-reg value
3812 or just a byte field of a register
3813 does not mean the value from before this insn is now dead.
3814 But it does mean liveness of that register at the end of the block
3817 Within mark_set_1, however, we treat it as if the register is
3818 indeed modified. mark_used_regs will, however, also treat this
3819 register as being used. Thus, we treat these insns as setting a
3820 new value for the register as a function of its old value. This
3821 cases LOG_LINKS to be made appropriately and this will help combine. */
3823 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3824 || GET_CODE (reg) == SIGN_EXTRACT
3825 || GET_CODE (reg) == STRICT_LOW_PART)
3826 reg = XEXP (reg, 0);
3828 /* If this set is a MEM, then it kills any aliased writes.
3829 If this set is a REG, then it kills any MEMs which use the reg. */
3830 if (flags & PROP_SCAN_DEAD_CODE)
3832 if (GET_CODE (reg) == MEM
3833 || GET_CODE (reg) == REG)
3835 rtx temp = mem_set_list;
3836 rtx prev = NULL_RTX;
3841 next = XEXP (temp, 1);
3842 if ((GET_CODE (reg) == MEM
3843 && output_dependence (XEXP (temp, 0), reg))
3844 || (GET_CODE (reg) == REG
3845 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3847 /* Splice this entry out of the list. */
3849 XEXP (prev, 1) = next;
3851 mem_set_list = next;
3852 free_EXPR_LIST_node (temp);
3860 /* If the memory reference had embedded side effects (autoincrement
3861 address modes. Then we may need to kill some entries on the
3863 if (insn && GET_CODE (reg) == MEM)
3864 invalidate_mems_from_autoinc (insn);
3866 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3867 /* We do not know the size of a BLKmode store, so we do not track
3868 them for redundant store elimination. */
3869 && GET_MODE (reg) != BLKmode
3870 /* There are no REG_INC notes for SP, so we can't assume we'll see
3871 everything that invalidates it. To be safe, don't eliminate any
3872 stores though SP; none of them should be redundant anyway. */
3873 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3874 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3877 if (GET_CODE (reg) == REG
3878 && (regno = REGNO (reg),
3879 ! (regno == FRAME_POINTER_REGNUM
3880 && (! reload_completed || frame_pointer_needed)))
3881 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3882 && ! (regno == HARD_FRAME_POINTER_REGNUM
3883 && (! reload_completed || frame_pointer_needed))
3885 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3886 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3888 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3889 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3891 int some_needed = REGNO_REG_SET_P (needed, regno);
3892 int some_not_needed = ! some_needed;
3894 /* Mark it as a significant register for this basic block. */
3896 SET_REGNO_REG_SET (significant, regno);
3898 /* Mark it as dead before this insn. */
3899 SET_REGNO_REG_SET (dead, regno);
3901 /* A hard reg in a wide mode may really be multiple registers.
3902 If so, mark all of them just like the first. */
3903 if (regno < FIRST_PSEUDO_REGISTER)
3907 /* Nothing below is needed for the stack pointer; get out asap.
3908 Eg, log links aren't needed, since combine won't use them. */
3909 if (regno == STACK_POINTER_REGNUM)
3912 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3915 int regno_n = regno + n;
3916 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3918 SET_REGNO_REG_SET (significant, regno_n);
3920 SET_REGNO_REG_SET (dead, regno_n);
3921 some_needed |= needed_regno;
3922 some_not_needed |= ! needed_regno;
3926 /* Additional data to record if this is the final pass. */
3927 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
3928 | PROP_DEATH_NOTES | PROP_AUTOINC))
3931 register int blocknum = BLOCK_NUM (insn);
3934 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3935 y = reg_next_use[regno];
3937 /* If this is a hard reg, record this function uses the reg. */
3939 if (regno < FIRST_PSEUDO_REGISTER)
3942 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
3944 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3945 for (i = regno; i < endregno; i++)
3947 /* The next use is no longer "next", since a store
3949 reg_next_use[i] = 0;
3952 if (flags & PROP_REG_INFO)
3953 for (i = regno; i < endregno; i++)
3955 regs_ever_live[i] = 1;
3961 /* The next use is no longer "next", since a store
3963 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3964 reg_next_use[regno] = 0;
3966 /* Keep track of which basic blocks each reg appears in. */
3968 if (flags & PROP_REG_INFO)
3970 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
3971 REG_BASIC_BLOCK (regno) = blocknum;
3972 else if (REG_BASIC_BLOCK (regno) != blocknum)
3973 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
3975 /* Count (weighted) references, stores, etc. This counts a
3976 register twice if it is modified, but that is correct. */
3977 REG_N_SETS (regno)++;
3978 REG_N_REFS (regno) += loop_depth;
3980 /* The insns where a reg is live are normally counted
3981 elsewhere, but we want the count to include the insn
3982 where the reg is set, and the normal counting mechanism
3983 would not count it. */
3984 REG_LIVE_LENGTH (regno)++;
3988 if (! some_not_needed)
3990 if (flags & PROP_LOG_LINKS)
3992 /* Make a logical link from the next following insn
3993 that uses this register, back to this insn.
3994 The following insns have already been processed.
3996 We don't build a LOG_LINK for hard registers containing
3997 in ASM_OPERANDs. If these registers get replaced,
3998 we might wind up changing the semantics of the insn,
3999 even if reload can make what appear to be valid
4000 assignments later. */
4001 if (y && (BLOCK_NUM (y) == blocknum)
4002 && (regno >= FIRST_PSEUDO_REGISTER
4003 || asm_noperands (PATTERN (y)) < 0))
4004 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4007 else if (! some_needed)
4009 if (flags & PROP_REG_INFO)
4010 REG_N_DEATHS (REGNO (reg))++;
4012 if (flags & PROP_DEATH_NOTES)
4014 /* Note that dead stores have already been deleted
4015 when possible. If we get here, we have found a
4016 dead store that cannot be eliminated (because the
4017 same insn does something useful). Indicate this
4018 by marking the reg being set as dying here. */
4020 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4025 if (flags & PROP_DEATH_NOTES)
4027 /* This is a case where we have a multi-word hard register
4028 and some, but not all, of the words of the register are
4029 needed in subsequent insns. Write REG_UNUSED notes
4030 for those parts that were not needed. This case should
4035 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4037 if (!REGNO_REG_SET_P (needed, regno + i))
4041 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4047 else if (GET_CODE (reg) == REG)
4049 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4050 reg_next_use[regno] = 0;
4053 /* If this is the last pass and this is a SCRATCH, show it will be dying
4054 here and count it. */
4055 else if (GET_CODE (reg) == SCRATCH)
4057 if (flags & PROP_DEATH_NOTES)
4059 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4065 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4069 find_auto_inc (needed, x, insn)
4074 rtx addr = XEXP (x, 0);
4075 HOST_WIDE_INT offset = 0;
4078 /* Here we detect use of an index register which might be good for
4079 postincrement, postdecrement, preincrement, or predecrement. */
4081 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4082 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4084 if (GET_CODE (addr) == REG)
4087 register int size = GET_MODE_SIZE (GET_MODE (x));
4090 int regno = REGNO (addr);
4092 /* Is the next use an increment that might make auto-increment? */
4093 if ((incr = reg_next_use[regno]) != 0
4094 && (set = single_set (incr)) != 0
4095 && GET_CODE (set) == SET
4096 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4097 /* Can't add side effects to jumps; if reg is spilled and
4098 reloaded, there's no way to store back the altered value. */
4099 && GET_CODE (insn) != JUMP_INSN
4100 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4101 && XEXP (y, 0) == addr
4102 && GET_CODE (XEXP (y, 1)) == CONST_INT
4103 && ((HAVE_POST_INCREMENT
4104 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4105 || (HAVE_POST_DECREMENT
4106 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4107 || (HAVE_PRE_INCREMENT
4108 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4109 || (HAVE_PRE_DECREMENT
4110 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4111 /* Make sure this reg appears only once in this insn. */
4112 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4113 use != 0 && use != (rtx) 1))
4115 rtx q = SET_DEST (set);
4116 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4117 ? (offset ? PRE_INC : POST_INC)
4118 : (offset ? PRE_DEC : POST_DEC));
4120 if (dead_or_set_p (incr, addr))
4122 /* This is the simple case. Try to make the auto-inc. If
4123 we can't, we are done. Otherwise, we will do any
4124 needed updates below. */
4125 if (! validate_change (insn, &XEXP (x, 0),
4126 gen_rtx_fmt_e (inc_code, Pmode, addr),
4130 else if (GET_CODE (q) == REG
4131 /* PREV_INSN used here to check the semi-open interval
4133 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4134 /* We must also check for sets of q as q may be
4135 a call clobbered hard register and there may
4136 be a call between PREV_INSN (insn) and incr. */
4137 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4139 /* We have *p followed sometime later by q = p+size.
4140 Both p and q must be live afterward,
4141 and q is not used between INSN and its assignment.
4142 Change it to q = p, ...*q..., q = q+size.
4143 Then fall into the usual case. */
4148 emit_move_insn (q, addr);
4149 insns = get_insns ();
4152 bb = BLOCK_FOR_INSN (insn);
4153 for (temp = insns; temp; temp = NEXT_INSN (temp))
4154 set_block_for_insn (temp, bb);
4156 /* If we can't make the auto-inc, or can't make the
4157 replacement into Y, exit. There's no point in making
4158 the change below if we can't do the auto-inc and doing
4159 so is not correct in the pre-inc case. */
4161 validate_change (insn, &XEXP (x, 0),
4162 gen_rtx_fmt_e (inc_code, Pmode, q),
4164 validate_change (incr, &XEXP (y, 0), q, 1);
4165 if (! apply_change_group ())
4168 /* We now know we'll be doing this change, so emit the
4169 new insn(s) and do the updates. */
4170 emit_insns_before (insns, insn);
4172 if (BLOCK_FOR_INSN (insn)->head == insn)
4173 BLOCK_FOR_INSN (insn)->head = insns;
4175 /* INCR will become a NOTE and INSN won't contain a
4176 use of ADDR. If a use of ADDR was just placed in
4177 the insn before INSN, make that the next use.
4178 Otherwise, invalidate it. */
4179 if (GET_CODE (PREV_INSN (insn)) == INSN
4180 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4181 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4182 reg_next_use[regno] = PREV_INSN (insn);
4184 reg_next_use[regno] = 0;
4189 /* REGNO is now used in INCR which is below INSN, but
4190 it previously wasn't live here. If we don't mark
4191 it as needed, we'll put a REG_DEAD note for it
4192 on this insn, which is incorrect. */
4193 SET_REGNO_REG_SET (needed, regno);
4195 /* If there are any calls between INSN and INCR, show
4196 that REGNO now crosses them. */
4197 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4198 if (GET_CODE (temp) == CALL_INSN)
4199 REG_N_CALLS_CROSSED (regno)++;
4204 /* If we haven't returned, it means we were able to make the
4205 auto-inc, so update the status. First, record that this insn
4206 has an implicit side effect. */
4209 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4211 /* Modify the old increment-insn to simply copy
4212 the already-incremented value of our register. */
4213 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4216 /* If that makes it a no-op (copying the register into itself) delete
4217 it so it won't appear to be a "use" and a "set" of this
4219 if (SET_DEST (set) == addr)
4221 PUT_CODE (incr, NOTE);
4222 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4223 NOTE_SOURCE_FILE (incr) = 0;
4226 if (regno >= FIRST_PSEUDO_REGISTER)
4228 /* Count an extra reference to the reg. When a reg is
4229 incremented, spilling it is worse, so we want to make
4230 that less likely. */
4231 REG_N_REFS (regno) += loop_depth;
4233 /* Count the increment as a setting of the register,
4234 even though it isn't a SET in rtl. */
4235 REG_N_SETS (regno)++;
4240 #endif /* AUTO_INC_DEC */
4242 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4243 This is done assuming the registers needed from X
4244 are those that have 1-bits in NEEDED.
4246 FLAGS is the set of enabled operations.
4248 INSN is the containing instruction. If INSN is dead, this function is not
4252 mark_used_regs (needed, live, x, flags, insn)
4259 register RTX_CODE code;
4264 code = GET_CODE (x);
4284 /* If we are clobbering a MEM, mark any registers inside the address
4286 if (GET_CODE (XEXP (x, 0)) == MEM)
4287 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4291 /* Don't bother watching stores to mems if this is not the
4292 final pass. We'll not be deleting dead stores this round. */
4293 if (flags & PROP_SCAN_DEAD_CODE)
4295 /* Invalidate the data for the last MEM stored, but only if MEM is
4296 something that can be stored into. */
4297 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4298 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4299 ; /* needn't clear the memory set list */
4302 rtx temp = mem_set_list;
4303 rtx prev = NULL_RTX;
4308 next = XEXP (temp, 1);
4309 if (anti_dependence (XEXP (temp, 0), x))
4311 /* Splice temp out of the list. */
4313 XEXP (prev, 1) = next;
4315 mem_set_list = next;
4316 free_EXPR_LIST_node (temp);
4324 /* If the memory reference had embedded side effects (autoincrement
4325 address modes. Then we may need to kill some entries on the
4328 invalidate_mems_from_autoinc (insn);
4332 if (flags & PROP_AUTOINC)
4333 find_auto_inc (needed, x, insn);
4338 if (GET_CODE (SUBREG_REG (x)) == REG
4339 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4340 && (GET_MODE_SIZE (GET_MODE (x))
4341 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4342 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4344 /* While we're here, optimize this case. */
4347 /* In case the SUBREG is not of a register, don't optimize */
4348 if (GET_CODE (x) != REG)
4350 mark_used_regs (needed, live, x, flags, insn);
4354 /* ... fall through ... */
4357 /* See a register other than being set
4358 => mark it as needed. */
4362 int some_needed = REGNO_REG_SET_P (needed, regno);
4363 int some_not_needed = ! some_needed;
4365 SET_REGNO_REG_SET (live, regno);
4367 /* A hard reg in a wide mode may really be multiple registers.
4368 If so, mark all of them just like the first. */
4369 if (regno < FIRST_PSEUDO_REGISTER)
4373 /* For stack ptr or fixed arg pointer,
4374 nothing below can be necessary, so waste no more time. */
4375 if (regno == STACK_POINTER_REGNUM
4376 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4377 || (regno == HARD_FRAME_POINTER_REGNUM
4378 && (! reload_completed || frame_pointer_needed))
4380 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4381 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4383 || (regno == FRAME_POINTER_REGNUM
4384 && (! reload_completed || frame_pointer_needed)))
4386 /* If this is a register we are going to try to eliminate,
4387 don't mark it live here. If we are successful in
4388 eliminating it, it need not be live unless it is used for
4389 pseudos, in which case it will have been set live when
4390 it was allocated to the pseudos. If the register will not
4391 be eliminated, reload will set it live at that point. */
4393 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
4394 regs_ever_live[regno] = 1;
4397 /* No death notes for global register variables;
4398 their values are live after this function exits. */
4399 if (global_regs[regno])
4401 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4402 reg_next_use[regno] = insn;
4406 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4409 int regno_n = regno + n;
4410 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4412 SET_REGNO_REG_SET (live, regno_n);
4413 some_needed |= needed_regno;
4414 some_not_needed |= ! needed_regno;
4418 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4420 /* Record where each reg is used, so when the reg
4421 is set we know the next insn that uses it. */
4423 reg_next_use[regno] = insn;
4425 if (flags & PROP_REG_INFO)
4427 if (regno < FIRST_PSEUDO_REGISTER)
4429 /* If a hard reg is being used,
4430 record that this function does use it. */
4432 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4436 regs_ever_live[regno + --i] = 1;
4441 /* Keep track of which basic block each reg appears in. */
4443 register int blocknum = BLOCK_NUM (insn);
4445 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4446 REG_BASIC_BLOCK (regno) = blocknum;
4447 else if (REG_BASIC_BLOCK (regno) != blocknum)
4448 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4450 /* Count (weighted) number of uses of each reg. */
4452 REG_N_REFS (regno) += loop_depth;
4456 /* Record and count the insns in which a reg dies.
4457 If it is used in this insn and was dead below the insn
4458 then it dies in this insn. If it was set in this insn,
4459 we do not make a REG_DEAD note; likewise if we already
4460 made such a note. */
4462 if (flags & PROP_DEATH_NOTES)
4465 && ! dead_or_set_p (insn, x)
4467 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4471 /* Check for the case where the register dying partially
4472 overlaps the register set by this insn. */
4473 if (regno < FIRST_PSEUDO_REGISTER
4474 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4476 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4478 some_needed |= dead_or_set_regno_p (insn, regno + n);
4481 /* If none of the words in X is needed, make a REG_DEAD
4482 note. Otherwise, we must make partial REG_DEAD notes. */
4486 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4487 REG_N_DEATHS (regno)++;
4493 /* Don't make a REG_DEAD note for a part of a register
4494 that is set in the insn. */
4496 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4498 if (!REGNO_REG_SET_P (needed, regno + i)
4499 && ! dead_or_set_regno_p (insn, regno + i))
4502 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4513 register rtx testreg = SET_DEST (x);
4516 /* If storing into MEM, don't show it as being used. But do
4517 show the address as being used. */
4518 if (GET_CODE (testreg) == MEM)
4521 if (flags & PROP_AUTOINC)
4522 find_auto_inc (needed, testreg, insn);
4524 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4525 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4529 /* Storing in STRICT_LOW_PART is like storing in a reg
4530 in that this SET might be dead, so ignore it in TESTREG.
4531 but in some other ways it is like using the reg.
4533 Storing in a SUBREG or a bit field is like storing the entire
4534 register in that if the register's value is not used
4535 then this SET is not needed. */
4536 while (GET_CODE (testreg) == STRICT_LOW_PART
4537 || GET_CODE (testreg) == ZERO_EXTRACT
4538 || GET_CODE (testreg) == SIGN_EXTRACT
4539 || GET_CODE (testreg) == SUBREG)
4541 if (GET_CODE (testreg) == SUBREG
4542 && GET_CODE (SUBREG_REG (testreg)) == REG
4543 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4544 && (GET_MODE_SIZE (GET_MODE (testreg))
4545 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4546 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4548 /* Modifying a single register in an alternate mode
4549 does not use any of the old value. But these other
4550 ways of storing in a register do use the old value. */
4551 if (GET_CODE (testreg) == SUBREG
4552 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4557 testreg = XEXP (testreg, 0);
4560 /* If this is a store into a register,
4561 recursively scan the value being stored. */
4563 if ((GET_CODE (testreg) == PARALLEL
4564 && GET_MODE (testreg) == BLKmode)
4565 || (GET_CODE (testreg) == REG
4566 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4567 && (! reload_completed || frame_pointer_needed)))
4568 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4569 && ! (regno == HARD_FRAME_POINTER_REGNUM
4570 && (! reload_completed || frame_pointer_needed))
4572 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4573 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4576 /* We used to exclude global_regs here, but that seems wrong.
4577 Storing in them is like storing in mem. */
4579 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4581 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4588 /* ??? This info should have been gotten from mark_regs_live_at_end,
4589 as applied to the EXIT block, and propagated along the edge that
4590 connects this block to the EXIT. */
4594 case UNSPEC_VOLATILE:
4598 /* Traditional and volatile asm instructions must be considered to use
4599 and clobber all hard registers, all pseudo-registers and all of
4600 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4602 Consider for instance a volatile asm that changes the fpu rounding
4603 mode. An insn should not be moved across this even if it only uses
4604 pseudo-regs because it might give an incorrectly rounded result.
4606 ?!? Unfortunately, marking all hard registers as live causes massive
4607 problems for the register allocator and marking all pseudos as live
4608 creates mountains of uninitialized variable warnings.
4610 So for now, just clear the memory set list and mark any regs
4611 we can find in ASM_OPERANDS as used. */
4612 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4613 free_EXPR_LIST_list (&mem_set_list);
4615 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4616 We can not just fall through here since then we would be confused
4617 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4618 traditional asms unlike their normal usage. */
4619 if (code == ASM_OPERANDS)
4623 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4624 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4635 /* Recursively scan the operands of this expression. */
4638 register const char *fmt = GET_RTX_FORMAT (code);
4641 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4645 /* Tail recursive case: save a function call level. */
4651 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4653 else if (fmt[i] == 'E')
4656 for (j = 0; j < XVECLEN (x, i); j++)
4657 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4666 try_pre_increment_1 (insn)
4669 /* Find the next use of this reg. If in same basic block,
4670 make it do pre-increment or pre-decrement if appropriate. */
4671 rtx x = single_set (insn);
4672 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4673 * INTVAL (XEXP (SET_SRC (x), 1)));
4674 int regno = REGNO (SET_DEST (x));
4675 rtx y = reg_next_use[regno];
4677 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4678 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4679 mode would be better. */
4680 && ! dead_or_set_p (y, SET_DEST (x))
4681 && try_pre_increment (y, SET_DEST (x), amount))
4683 /* We have found a suitable auto-increment
4684 and already changed insn Y to do it.
4685 So flush this increment-instruction. */
4686 PUT_CODE (insn, NOTE);
4687 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4688 NOTE_SOURCE_FILE (insn) = 0;
4689 /* Count a reference to this reg for the increment
4690 insn we are deleting. When a reg is incremented.
4691 spilling it is worse, so we want to make that
4693 if (regno >= FIRST_PSEUDO_REGISTER)
4695 REG_N_REFS (regno) += loop_depth;
4696 REG_N_SETS (regno)++;
4703 /* Try to change INSN so that it does pre-increment or pre-decrement
4704 addressing on register REG in order to add AMOUNT to REG.
4705 AMOUNT is negative for pre-decrement.
4706 Returns 1 if the change could be made.
4707 This checks all about the validity of the result of modifying INSN. */
4710 try_pre_increment (insn, reg, amount)
4712 HOST_WIDE_INT amount;
4716 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4717 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4719 /* Nonzero if we can try to make a post-increment or post-decrement.
4720 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4721 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4722 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4725 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4728 /* From the sign of increment, see which possibilities are conceivable
4729 on this target machine. */
4730 if (HAVE_PRE_INCREMENT && amount > 0)
4732 if (HAVE_POST_INCREMENT && amount > 0)
4735 if (HAVE_PRE_DECREMENT && amount < 0)
4737 if (HAVE_POST_DECREMENT && amount < 0)
4740 if (! (pre_ok || post_ok))
4743 /* It is not safe to add a side effect to a jump insn
4744 because if the incremented register is spilled and must be reloaded
4745 there would be no way to store the incremented value back in memory. */
4747 if (GET_CODE (insn) == JUMP_INSN)
4752 use = find_use_as_address (PATTERN (insn), reg, 0);
4753 if (post_ok && (use == 0 || use == (rtx) 1))
4755 use = find_use_as_address (PATTERN (insn), reg, -amount);
4759 if (use == 0 || use == (rtx) 1)
4762 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4765 /* See if this combination of instruction and addressing mode exists. */
4766 if (! validate_change (insn, &XEXP (use, 0),
4767 gen_rtx_fmt_e (amount > 0
4768 ? (do_post ? POST_INC : PRE_INC)
4769 : (do_post ? POST_DEC : PRE_DEC),
4773 /* Record that this insn now has an implicit side effect on X. */
4774 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4778 #endif /* AUTO_INC_DEC */
4780 /* Find the place in the rtx X where REG is used as a memory address.
4781 Return the MEM rtx that so uses it.
4782 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4783 (plus REG (const_int PLUSCONST)).
4785 If such an address does not appear, return 0.
4786 If REG appears more than once, or is used other than in such an address,
4790 find_use_as_address (x, reg, plusconst)
4793 HOST_WIDE_INT plusconst;
4795 enum rtx_code code = GET_CODE (x);
4796 const char *fmt = GET_RTX_FORMAT (code);
4798 register rtx value = 0;
4801 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4804 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4805 && XEXP (XEXP (x, 0), 0) == reg
4806 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4807 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4810 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4812 /* If REG occurs inside a MEM used in a bit-field reference,
4813 that is unacceptable. */
4814 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4815 return (rtx) (HOST_WIDE_INT) 1;
4819 return (rtx) (HOST_WIDE_INT) 1;
4821 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4825 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4829 return (rtx) (HOST_WIDE_INT) 1;
4834 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4836 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4840 return (rtx) (HOST_WIDE_INT) 1;
4848 /* Write information about registers and basic blocks into FILE.
4849 This is part of making a debugging dump. */
4852 dump_flow_info (file)
4856 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4858 fprintf (file, "%d registers.\n", max_regno);
4859 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4862 enum reg_class class, altclass;
4863 fprintf (file, "\nRegister %d used %d times across %d insns",
4864 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4865 if (REG_BASIC_BLOCK (i) >= 0)
4866 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4868 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4869 (REG_N_SETS (i) == 1) ? "" : "s");
4870 if (REG_USERVAR_P (regno_reg_rtx[i]))
4871 fprintf (file, "; user var");
4872 if (REG_N_DEATHS (i) != 1)
4873 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4874 if (REG_N_CALLS_CROSSED (i) == 1)
4875 fprintf (file, "; crosses 1 call");
4876 else if (REG_N_CALLS_CROSSED (i))
4877 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4878 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4879 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4880 class = reg_preferred_class (i);
4881 altclass = reg_alternate_class (i);
4882 if (class != GENERAL_REGS || altclass != ALL_REGS)
4884 if (altclass == ALL_REGS || class == ALL_REGS)
4885 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4886 else if (altclass == NO_REGS)
4887 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4889 fprintf (file, "; pref %s, else %s",
4890 reg_class_names[(int) class],
4891 reg_class_names[(int) altclass]);
4893 if (REGNO_POINTER_FLAG (i))
4894 fprintf (file, "; pointer");
4895 fprintf (file, ".\n");
4898 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
4899 for (i = 0; i < n_basic_blocks; i++)
4901 register basic_block bb = BASIC_BLOCK (i);
4905 fprintf (file, "\nBasic block %d: first insn %d, last %d.\n",
4906 i, INSN_UID (bb->head), INSN_UID (bb->end));
4908 fprintf (file, "Predecessors: ");
4909 for (e = bb->pred; e ; e = e->pred_next)
4910 dump_edge_info (file, e, 0);
4912 fprintf (file, "\nSuccessors: ");
4913 for (e = bb->succ; e ; e = e->succ_next)
4914 dump_edge_info (file, e, 1);
4916 fprintf (file, "\nRegisters live at start:");
4917 if (bb->global_live_at_start)
4919 for (regno = 0; regno < max_regno; regno++)
4920 if (REGNO_REG_SET_P (bb->global_live_at_start, regno))
4921 fprintf (file, " %d", regno);
4924 fprintf (file, " n/a");
4926 fprintf (file, "\nRegisters live at end:");
4927 if (bb->global_live_at_end)
4929 for (regno = 0; regno < max_regno; regno++)
4930 if (REGNO_REG_SET_P (bb->global_live_at_end, regno))
4931 fprintf (file, " %d", regno);
4934 fprintf (file, " n/a");
4943 dump_edge_info (file, e, do_succ)
4948 basic_block side = (do_succ ? e->dest : e->src);
4950 if (side == ENTRY_BLOCK_PTR)
4951 fputs (" ENTRY", file);
4952 else if (side == EXIT_BLOCK_PTR)
4953 fputs (" EXIT", file);
4955 fprintf (file, " %d", side->index);
4959 static const char * const bitnames[] = {
4960 "fallthru", "crit", "ab", "abcall", "eh", "fake"
4963 int i, flags = e->flags;
4967 for (i = 0; flags; i++)
4968 if (flags & (1 << i))
4974 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
4975 fputs (bitnames[i], file);
4977 fprintf (file, "%d", i);
4985 /* Like print_rtl, but also print out live information for the start of each
4989 print_rtl_with_bb (outf, rtx_first)
4993 register rtx tmp_rtx;
4996 fprintf (outf, "(nil)\n");
5000 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5001 int max_uid = get_max_uid ();
5002 basic_block *start = (basic_block *)
5003 alloca (max_uid * sizeof (basic_block));
5004 basic_block *end = (basic_block *)
5005 alloca (max_uid * sizeof (basic_block));
5006 enum bb_state *in_bb_p = (enum bb_state *)
5007 alloca (max_uid * sizeof (enum bb_state));
5009 memset (start, 0, max_uid * sizeof (basic_block));
5010 memset (end, 0, max_uid * sizeof (basic_block));
5011 memset (in_bb_p, 0, max_uid * sizeof (enum bb_state));
5013 for (i = n_basic_blocks - 1; i >= 0; i--)
5015 basic_block bb = BASIC_BLOCK (i);
5018 start[INSN_UID (bb->head)] = bb;
5019 end[INSN_UID (bb->end)] = bb;
5020 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5022 enum bb_state state = IN_MULTIPLE_BB;
5023 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5025 in_bb_p[INSN_UID(x)] = state;
5032 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5037 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5039 fprintf (outf, ";; Start of basic block %d, registers live:",
5042 EXECUTE_IF_SET_IN_REG_SET (bb->global_live_at_start, 0, i,
5044 fprintf (outf, " %d", i);
5045 if (i < FIRST_PSEUDO_REGISTER)
5046 fprintf (outf, " [%s]",
5052 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5053 && GET_CODE (tmp_rtx) != NOTE
5054 && GET_CODE (tmp_rtx) != BARRIER
5056 fprintf (outf, ";; Insn is not within a basic block\n");
5057 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5058 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5060 did_output = print_rtl_single (outf, tmp_rtx);
5062 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5063 fprintf (outf, ";; End of basic block %d\n", bb->index);
5070 if (current_function_epilogue_delay_list != 0)
5072 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5073 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5074 tmp_rtx = XEXP (tmp_rtx, 1))
5075 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5080 /* Integer list support. */
5082 /* Allocate a node from list *HEAD_PTR. */
5085 alloc_int_list_node (head_ptr)
5086 int_list_block **head_ptr;
5088 struct int_list_block *first_blk = *head_ptr;
5090 if (first_blk == NULL || first_blk->nodes_left <= 0)
5092 first_blk = (struct int_list_block *) xmalloc (sizeof (struct int_list_block));
5093 first_blk->nodes_left = INT_LIST_NODES_IN_BLK;
5094 first_blk->next = *head_ptr;
5095 *head_ptr = first_blk;
5098 first_blk->nodes_left--;
5099 return &first_blk->nodes[first_blk->nodes_left];
5102 /* Pointer to head of predecessor/successor block list. */
5103 static int_list_block *pred_int_list_blocks;
5105 /* Add a new node to integer list LIST with value VAL.
5106 LIST is a pointer to a list object to allow for different implementations.
5107 If *LIST is initially NULL, the list is empty.
5108 The caller must not care whether the element is added to the front or
5109 to the end of the list (to allow for different implementations). */
5112 add_int_list_node (blk_list, list, val)
5113 int_list_block **blk_list;
5117 int_list_ptr p = alloc_int_list_node (blk_list);
5125 /* Free the blocks of lists at BLK_LIST. */
5128 free_int_list (blk_list)
5129 int_list_block **blk_list;
5131 int_list_block *p, *next;
5133 for (p = *blk_list; p != NULL; p = next)
5139 /* Mark list as empty for the next function we compile. */
5143 /* Predecessor/successor computation. */
5145 /* Mark PRED_BB a precessor of SUCC_BB,
5146 and conversely SUCC_BB a successor of PRED_BB. */
5149 add_pred_succ (pred_bb, succ_bb, s_preds, s_succs, num_preds, num_succs)
5152 int_list_ptr *s_preds;
5153 int_list_ptr *s_succs;
5157 if (succ_bb != EXIT_BLOCK)
5159 add_int_list_node (&pred_int_list_blocks, &s_preds[succ_bb], pred_bb);
5160 num_preds[succ_bb]++;
5162 if (pred_bb != ENTRY_BLOCK)
5164 add_int_list_node (&pred_int_list_blocks, &s_succs[pred_bb], succ_bb);
5165 num_succs[pred_bb]++;
5169 /* Convert edge lists into pred/succ lists for backward compatibility. */
5172 compute_preds_succs (s_preds, s_succs, num_preds, num_succs)
5173 int_list_ptr *s_preds;
5174 int_list_ptr *s_succs;
5178 int i, n = n_basic_blocks;
5181 memset (s_preds, 0, n_basic_blocks * sizeof (int_list_ptr));
5182 memset (s_succs, 0, n_basic_blocks * sizeof (int_list_ptr));
5183 memset (num_preds, 0, n_basic_blocks * sizeof (int));
5184 memset (num_succs, 0, n_basic_blocks * sizeof (int));
5186 for (i = 0; i < n; ++i)
5188 basic_block bb = BASIC_BLOCK (i);
5190 for (e = bb->succ; e ; e = e->succ_next)
5191 add_pred_succ (i, e->dest->index, s_preds, s_succs,
5192 num_preds, num_succs);
5195 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
5196 add_pred_succ (ENTRY_BLOCK, e->dest->index, s_preds, s_succs,
5197 num_preds, num_succs);
5201 dump_bb_data (file, preds, succs, live_info)
5203 int_list_ptr *preds;
5204 int_list_ptr *succs;
5210 fprintf (file, "BB data\n\n");
5211 for (bb = 0; bb < n_basic_blocks; bb++)
5213 fprintf (file, "BB %d, start %d, end %d\n", bb,
5214 INSN_UID (BLOCK_HEAD (bb)), INSN_UID (BLOCK_END (bb)));
5215 fprintf (file, " preds:");
5216 for (p = preds[bb]; p != NULL; p = p->next)
5218 int pred_bb = INT_LIST_VAL (p);
5219 if (pred_bb == ENTRY_BLOCK)
5220 fprintf (file, " entry");
5222 fprintf (file, " %d", pred_bb);
5224 fprintf (file, "\n");
5225 fprintf (file, " succs:");
5226 for (p = succs[bb]; p != NULL; p = p->next)
5228 int succ_bb = INT_LIST_VAL (p);
5229 if (succ_bb == EXIT_BLOCK)
5230 fprintf (file, " exit");
5232 fprintf (file, " %d", succ_bb);
5237 fprintf (file, "\nRegisters live at start:");
5238 for (regno = 0; regno < max_regno; regno++)
5239 if (REGNO_REG_SET_P (BASIC_BLOCK (bb)->global_live_at_start, regno))
5240 fprintf (file, " %d", regno);
5241 fprintf (file, "\n");
5243 fprintf (file, "\n");
5245 fprintf (file, "\n");
5248 /* Free basic block data storage. */
5253 free_int_list (&pred_int_list_blocks);
5256 /* Compute dominator relationships. */
5258 compute_dominators (dominators, post_dominators, s_preds, s_succs)
5259 sbitmap *dominators;
5260 sbitmap *post_dominators;
5261 int_list_ptr *s_preds;
5262 int_list_ptr *s_succs;
5264 int bb, changed, passes;
5265 sbitmap *temp_bitmap;
5267 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5268 sbitmap_vector_ones (dominators, n_basic_blocks);
5269 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5270 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5272 sbitmap_zero (dominators[0]);
5273 SET_BIT (dominators[0], 0);
5275 sbitmap_zero (post_dominators[n_basic_blocks - 1]);
5276 SET_BIT (post_dominators[n_basic_blocks - 1], 0);
5283 for (bb = 1; bb < n_basic_blocks; bb++)
5285 sbitmap_intersect_of_predecessors (temp_bitmap[bb], dominators,
5287 SET_BIT (temp_bitmap[bb], bb);
5288 changed |= sbitmap_a_and_b (dominators[bb],
5291 sbitmap_intersect_of_successors (temp_bitmap[bb], post_dominators,
5293 SET_BIT (temp_bitmap[bb], bb);
5294 changed |= sbitmap_a_and_b (post_dominators[bb],
5295 post_dominators[bb],
5304 /* Compute dominator relationships using new flow graph structures. */
5306 compute_flow_dominators (dominators, post_dominators)
5307 sbitmap *dominators;
5308 sbitmap *post_dominators;
5310 int bb, changed, passes;
5311 sbitmap *temp_bitmap;
5313 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5314 sbitmap_vector_ones (dominators, n_basic_blocks);
5315 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5316 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5318 sbitmap_zero (dominators[0]);
5319 SET_BIT (dominators[0], 0);
5321 sbitmap_zero (post_dominators[n_basic_blocks - 1]);
5322 SET_BIT (post_dominators[n_basic_blocks - 1], 0);
5329 for (bb = 1; bb < n_basic_blocks; bb++)
5331 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5332 SET_BIT (temp_bitmap[bb], bb);
5333 changed |= sbitmap_a_and_b (dominators[bb],
5336 sbitmap_intersection_of_succs (temp_bitmap[bb], post_dominators, bb);
5337 SET_BIT (temp_bitmap[bb], bb);
5338 changed |= sbitmap_a_and_b (post_dominators[bb],
5339 post_dominators[bb],
5348 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5351 compute_immediate_dominators (idom, dominators)
5353 sbitmap *dominators;
5358 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5360 /* Begin with tmp(n) = dom(n) - { n }. */
5361 for (b = n_basic_blocks; --b >= 0; )
5363 sbitmap_copy (tmp[b], dominators[b]);
5364 RESET_BIT (tmp[b], b);
5367 /* Subtract out all of our dominator's dominators. */
5368 for (b = n_basic_blocks; --b >= 0; )
5370 sbitmap tmp_b = tmp[b];
5373 for (s = n_basic_blocks; --s >= 0; )
5374 if (TEST_BIT (tmp_b, s))
5375 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5378 /* Find the one bit set in the bitmap and put it in the output array. */
5379 for (b = n_basic_blocks; --b >= 0; )
5382 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5385 sbitmap_vector_free (tmp);
5388 /* Count for a single SET rtx, X. */
5391 count_reg_sets_1 (x)
5395 register rtx reg = SET_DEST (x);
5397 /* Find the register that's set/clobbered. */
5398 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5399 || GET_CODE (reg) == SIGN_EXTRACT
5400 || GET_CODE (reg) == STRICT_LOW_PART)
5401 reg = XEXP (reg, 0);
5403 if (GET_CODE (reg) == PARALLEL
5404 && GET_MODE (reg) == BLKmode)
5407 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5408 count_reg_sets_1 (XVECEXP (reg, 0, i));
5412 if (GET_CODE (reg) == REG)
5414 regno = REGNO (reg);
5415 if (regno >= FIRST_PSEUDO_REGISTER)
5417 /* Count (weighted) references, stores, etc. This counts a
5418 register twice if it is modified, but that is correct. */
5419 REG_N_SETS (regno)++;
5421 REG_N_REFS (regno) += loop_depth;
5426 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5427 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5433 register RTX_CODE code = GET_CODE (x);
5435 if (code == SET || code == CLOBBER)
5436 count_reg_sets_1 (x);
5437 else if (code == PARALLEL)
5440 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5442 code = GET_CODE (XVECEXP (x, 0, i));
5443 if (code == SET || code == CLOBBER)
5444 count_reg_sets_1 (XVECEXP (x, 0, i));
5449 /* Increment REG_N_REFS by the current loop depth each register reference
5453 count_reg_references (x)
5456 register RTX_CODE code;
5459 code = GET_CODE (x);
5479 /* If we are clobbering a MEM, mark any registers inside the address
5481 if (GET_CODE (XEXP (x, 0)) == MEM)
5482 count_reg_references (XEXP (XEXP (x, 0), 0));
5486 /* While we're here, optimize this case. */
5489 /* In case the SUBREG is not of a register, don't optimize */
5490 if (GET_CODE (x) != REG)
5492 count_reg_references (x);
5496 /* ... fall through ... */
5499 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5500 REG_N_REFS (REGNO (x)) += loop_depth;
5505 register rtx testreg = SET_DEST (x);
5508 /* If storing into MEM, don't show it as being used. But do
5509 show the address as being used. */
5510 if (GET_CODE (testreg) == MEM)
5512 count_reg_references (XEXP (testreg, 0));
5513 count_reg_references (SET_SRC (x));
5517 /* Storing in STRICT_LOW_PART is like storing in a reg
5518 in that this SET might be dead, so ignore it in TESTREG.
5519 but in some other ways it is like using the reg.
5521 Storing in a SUBREG or a bit field is like storing the entire
5522 register in that if the register's value is not used
5523 then this SET is not needed. */
5524 while (GET_CODE (testreg) == STRICT_LOW_PART
5525 || GET_CODE (testreg) == ZERO_EXTRACT
5526 || GET_CODE (testreg) == SIGN_EXTRACT
5527 || GET_CODE (testreg) == SUBREG)
5529 /* Modifying a single register in an alternate mode
5530 does not use any of the old value. But these other
5531 ways of storing in a register do use the old value. */
5532 if (GET_CODE (testreg) == SUBREG
5533 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5538 testreg = XEXP (testreg, 0);
5541 /* If this is a store into a register,
5542 recursively scan the value being stored. */
5544 if ((GET_CODE (testreg) == PARALLEL
5545 && GET_MODE (testreg) == BLKmode)
5546 || GET_CODE (testreg) == REG)
5548 count_reg_references (SET_SRC (x));
5550 count_reg_references (SET_DEST (x));
5560 /* Recursively scan the operands of this expression. */
5563 register const char *fmt = GET_RTX_FORMAT (code);
5566 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5570 /* Tail recursive case: save a function call level. */
5576 count_reg_references (XEXP (x, i));
5578 else if (fmt[i] == 'E')
5581 for (j = 0; j < XVECLEN (x, i); j++)
5582 count_reg_references (XVECEXP (x, i, j));
5588 /* Recompute register set/reference counts immediately prior to register
5591 This avoids problems with set/reference counts changing to/from values
5592 which have special meanings to the register allocators.
5594 Additionally, the reference counts are the primary component used by the
5595 register allocators to prioritize pseudos for allocation to hard regs.
5596 More accurate reference counts generally lead to better register allocation.
5598 F is the first insn to be scanned.
5599 LOOP_STEP denotes how much loop_depth should be incremented per
5600 loop nesting level in order to increase the ref count more for references
5603 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5604 possibly other information which is used by the register allocators. */
5607 recompute_reg_usage (f, loop_step)
5614 /* Clear out the old data. */
5615 max_reg = max_reg_num ();
5616 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5622 /* Scan each insn in the chain and count how many times each register is
5625 for (insn = f; insn; insn = NEXT_INSN (insn))
5627 /* Keep track of loop depth. */
5628 if (GET_CODE (insn) == NOTE)
5630 /* Look for loop boundaries. */
5631 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
5632 loop_depth -= loop_step;
5633 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
5634 loop_depth += loop_step;
5636 /* If we have LOOP_DEPTH == 0, there has been a bookkeeping error.
5637 Abort now rather than setting register status incorrectly. */
5638 if (loop_depth == 0)
5641 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5645 /* This call will increment REG_N_SETS for each SET or CLOBBER
5646 of a register in INSN. It will also increment REG_N_REFS
5647 by the loop depth for each set of a register in INSN. */
5648 count_reg_sets (PATTERN (insn));
5650 /* count_reg_sets does not detect autoincrement address modes, so
5651 detect them here by looking at the notes attached to INSN. */
5652 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5654 if (REG_NOTE_KIND (links) == REG_INC)
5655 /* Count (weighted) references, stores, etc. This counts a
5656 register twice if it is modified, but that is correct. */
5657 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5660 /* This call will increment REG_N_REFS by the current loop depth for
5661 each reference to a register in INSN. */
5662 count_reg_references (PATTERN (insn));
5664 /* count_reg_references will not include counts for arguments to
5665 function calls, so detect them here by examining the
5666 CALL_INSN_FUNCTION_USAGE data. */
5667 if (GET_CODE (insn) == CALL_INSN)
5671 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5673 note = XEXP (note, 1))
5674 if (GET_CODE (XEXP (note, 0)) == USE)
5675 count_reg_references (XEXP (XEXP (note, 0), 0));
5681 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5682 blocks. Returns a count of the number of registers that died. */
5685 count_or_remove_death_notes (blocks, kill)
5691 for (i = n_basic_blocks - 1; i >= 0; --i)
5696 if (! TEST_BIT (blocks, i))
5699 bb = BASIC_BLOCK (i);
5701 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5703 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5705 rtx *pprev = ®_NOTES (insn);
5710 switch (REG_NOTE_KIND (link))
5713 if (GET_CODE (XEXP (link, 0)) == REG)
5715 rtx reg = XEXP (link, 0);
5718 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5721 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5729 rtx next = XEXP (link, 1);
5730 free_EXPR_LIST_node (link);
5731 *pprev = link = next;
5737 pprev = &XEXP (link, 1);
5744 if (insn == bb->end)
5752 /* Record INSN's block as BB. */
5755 set_block_for_insn (insn, bb)
5759 size_t uid = INSN_UID (insn);
5760 if (uid >= basic_block_for_insn->num_elements)
5764 /* Add one-eighth the size so we don't keep calling xrealloc. */
5765 new_size = uid + (uid + 7) / 8;
5767 VARRAY_GROW (basic_block_for_insn, new_size);
5769 VARRAY_BB (basic_block_for_insn, uid) = bb;
5772 /* Record INSN's block number as BB. */
5773 /* ??? This has got to go. */
5776 set_block_num (insn, bb)
5780 set_block_for_insn (insn, BASIC_BLOCK (bb));
5783 /* Verify the CFG consistency. This function check some CFG invariants and
5784 aborts when something is wrong. Hope that this function will help to
5785 convert many optimization passes to preserve CFG consistent.
5787 Currently it does following checks:
5789 - test head/end pointers
5790 - overlapping of basic blocks
5791 - edge list corectness
5792 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5793 - tails of basic blocks (ensure that boundary is necesary)
5794 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5795 and NOTE_INSN_BASIC_BLOCK
5796 - check that all insns are in the basic blocks
5797 (except the switch handling code, barriers and notes)
5799 In future it can be extended check a lot of other stuff as well
5800 (reachability of basic blocks, life information, etc. etc.). */
5805 const int max_uid = get_max_uid ();
5806 const rtx rtx_first = get_insns ();
5807 basic_block *bb_info;
5811 bb_info = (basic_block *) alloca (max_uid * sizeof (basic_block));
5812 memset (bb_info, 0, max_uid * sizeof (basic_block));
5814 /* First pass check head/end pointers and set bb_info array used by
5816 for (i = n_basic_blocks - 1; i >= 0; i--)
5818 basic_block bb = BASIC_BLOCK (i);
5820 /* Check the head pointer and make sure that it is pointing into
5822 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5827 error ("Head insn %d for block %d not found in the insn stream.",
5828 INSN_UID (bb->head), bb->index);
5832 /* Check the end pointer and make sure that it is pointing into
5834 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5836 if (bb_info[INSN_UID (x)] != NULL)
5838 error ("Insn %d is in multiple basic blocks (%d and %d)",
5839 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5842 bb_info[INSN_UID (x)] = bb;
5849 error ("End insn %d for block %d not found in the insn stream.",
5850 INSN_UID (bb->end), bb->index);
5855 /* Now check the basic blocks (boundaries etc.) */
5856 for (i = n_basic_blocks - 1; i >= 0; i--)
5858 basic_block bb = BASIC_BLOCK (i);
5859 /* Check corectness of edge lists */
5867 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5869 fprintf (stderr, "Predecessor: ");
5870 dump_edge_info (stderr, e, 0);
5871 fprintf (stderr, "\nSuccessor: ");
5872 dump_edge_info (stderr, e, 1);
5876 if (e->dest != EXIT_BLOCK_PTR)
5878 edge e2 = e->dest->pred;
5879 while (e2 && e2 != e)
5883 error ("Basic block %i edge lists are corrupted", bb->index);
5895 error ("Basic block %d pred edge is corrupted", bb->index);
5896 fputs ("Predecessor: ", stderr);
5897 dump_edge_info (stderr, e, 0);
5898 fputs ("\nSuccessor: ", stderr);
5899 dump_edge_info (stderr, e, 1);
5900 fputc ('\n', stderr);
5903 if (e->src != ENTRY_BLOCK_PTR)
5905 edge e2 = e->src->succ;
5906 while (e2 && e2 != e)
5910 error ("Basic block %i edge lists are corrupted", bb->index);
5917 /* OK pointers are correct. Now check the header of basic
5918 block. It ought to contain optional CODE_LABEL followed
5919 by NOTE_BASIC_BLOCK. */
5921 if (GET_CODE (x) == CODE_LABEL)
5925 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5931 if (GET_CODE (x) != NOTE
5932 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5933 || NOTE_BASIC_BLOCK (x) != bb)
5935 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5942 /* Do checks for empty blocks here */
5949 if (GET_CODE (x) == NOTE
5950 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5952 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
5953 INSN_UID (x), bb->index);
5960 if (GET_CODE (x) == JUMP_INSN
5961 || GET_CODE (x) == CODE_LABEL
5962 || GET_CODE (x) == BARRIER)
5964 error ("In basic block %d:", bb->index);
5965 fatal_insn ("Flow control insn inside a basic block", x);
5976 if (!bb_info[INSN_UID (x)])
5978 switch (GET_CODE (x))
5985 /* An addr_vec is placed outside any block block. */
5987 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5988 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5989 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5994 /* But in any case, non-deletable labels can appear anywhere. */
5998 fatal_insn ("Insn outside basic block", x);
6009 /* Functions to access an edge list with a vector representation.
6010 Enough data is kept such that given an index number, the
6011 pred and succ that edge reprsents can be determined, or
6012 given a pred and a succ, it's index number can be returned.
6013 This allows algorithms which comsume a lot of memory to
6014 represent the normally full matrix of edge (pred,succ) with a
6015 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6016 wasted space in the client code due to sparse flow graphs. */
6018 /* This functions initializes the edge list. Basically the entire
6019 flowgraph is processed, and all edges are assigned a number,
6020 and the data structure is filed in. */
6024 struct edge_list *elist;
6030 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6034 /* Determine the number of edges in the flow graph by counting successor
6035 edges on each basic block. */
6036 for (x = 0; x < n_basic_blocks; x++)
6038 basic_block bb = BASIC_BLOCK (x);
6040 for (e = bb->succ; e; e = e->succ_next)
6043 /* Don't forget successors of the entry block. */
6044 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6047 elist = xmalloc (sizeof (struct edge_list));
6048 elist->num_blocks = block_count;
6049 elist->num_edges = num_edges;
6050 elist->index_to_edge = xmalloc (sizeof (edge) * num_edges);
6054 /* Follow successors of the entry block, and register these edges. */
6055 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6057 elist->index_to_edge[num_edges] = e;
6061 for (x = 0; x < n_basic_blocks; x++)
6063 basic_block bb = BASIC_BLOCK (x);
6065 /* Follow all successors of blocks, and register these edges. */
6066 for (e = bb->succ; e; e = e->succ_next)
6068 elist->index_to_edge[num_edges] = e;
6075 /* This function free's memory associated with an edge list. */
6077 free_edge_list (elist)
6078 struct edge_list *elist;
6082 free (elist->index_to_edge);
6087 /* This function provides debug output showing an edge list. */
6089 print_edge_list (f, elist)
6091 struct edge_list *elist;
6094 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6095 elist->num_blocks - 2, elist->num_edges);
6097 for (x = 0; x < elist->num_edges; x++)
6099 fprintf (f, " %-4d - edge(", x);
6100 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6101 fprintf (f,"entry,");
6103 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6105 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6106 fprintf (f,"exit)\n");
6108 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6112 /* This function provides an internal consistancy check of an edge list,
6113 verifying that all edges are present, and that there are no
6116 verify_edge_list (f, elist)
6118 struct edge_list *elist;
6120 int x, pred, succ, index;
6123 for (x = 0; x < n_basic_blocks; x++)
6125 basic_block bb = BASIC_BLOCK (x);
6127 for (e = bb->succ; e; e = e->succ_next)
6129 pred = e->src->index;
6130 succ = e->dest->index;
6131 index = EDGE_INDEX (elist, e->src, e->dest);
6132 if (index == EDGE_INDEX_NO_EDGE)
6134 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6137 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6138 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6139 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6140 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6141 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6142 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6145 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6147 pred = e->src->index;
6148 succ = e->dest->index;
6149 index = EDGE_INDEX (elist, e->src, e->dest);
6150 if (index == EDGE_INDEX_NO_EDGE)
6152 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6155 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6156 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6157 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6158 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6159 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6160 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6162 /* We've verified that all the edges are in the list, no lets make sure
6163 there are no spurious edges in the list. */
6165 for (pred = 0 ; pred < n_basic_blocks; pred++)
6166 for (succ = 0 ; succ < n_basic_blocks; succ++)
6168 basic_block p = BASIC_BLOCK (pred);
6169 basic_block s = BASIC_BLOCK (succ);
6173 for (e = p->succ; e; e = e->succ_next)
6179 for (e = s->pred; e; e = e->pred_next)
6185 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6186 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6187 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6189 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6190 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6191 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6192 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6193 BASIC_BLOCK (succ)));
6195 for (succ = 0 ; succ < n_basic_blocks; succ++)
6197 basic_block p = ENTRY_BLOCK_PTR;
6198 basic_block s = BASIC_BLOCK (succ);
6202 for (e = p->succ; e; e = e->succ_next)
6208 for (e = s->pred; e; e = e->pred_next)
6214 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6215 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6216 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6218 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6219 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6220 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6221 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6222 BASIC_BLOCK (succ)));
6224 for (pred = 0 ; pred < n_basic_blocks; pred++)
6226 basic_block p = BASIC_BLOCK (pred);
6227 basic_block s = EXIT_BLOCK_PTR;
6231 for (e = p->succ; e; e = e->succ_next)
6237 for (e = s->pred; e; e = e->pred_next)
6243 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6244 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6245 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6247 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6248 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6249 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6250 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6255 /* This routine will determine what, if any, edge there is between
6256 a specified predecessor and successor. */
6259 find_edge_index (edge_list, pred, succ)
6260 struct edge_list *edge_list;
6261 basic_block pred, succ;
6264 for (x = 0; x < NUM_EDGES (edge_list); x++)
6266 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6267 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6270 return (EDGE_INDEX_NO_EDGE);
6273 /* This function will remove an edge from the flow graph. */
6278 edge last_pred = NULL;
6279 edge last_succ = NULL;
6281 basic_block src, dest;
6284 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6290 last_succ->succ_next = e->succ_next;
6292 src->succ = e->succ_next;
6294 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6300 last_pred->pred_next = e->pred_next;
6302 dest->pred = e->pred_next;
6308 /* This routine will remove any fake successor edges for a basic block.
6309 When the edge is removed, it is also removed from whatever predecessor
6312 remove_fake_successors (bb)
6316 for (e = bb->succ; e ; )
6320 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6325 /* This routine will remove all fake edges from the flow graph. If
6326 we remove all fake successors, it will automatically remove all
6327 fake predecessors. */
6329 remove_fake_edges ()
6333 for (x = 0; x < n_basic_blocks; x++)
6334 remove_fake_successors (BASIC_BLOCK (x));
6336 /* We've handled all successors except the entry block's. */
6337 remove_fake_successors (ENTRY_BLOCK_PTR);
6340 /* This functions will add a fake edge between any block which has no
6341 successors, and the exit block. Some data flow equations require these
6344 add_noreturn_fake_exit_edges ()
6348 for (x = 0; x < n_basic_blocks; x++)
6349 if (BASIC_BLOCK (x)->succ == NULL)
6350 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);