1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 88, 92-99, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "basic-block.h"
127 #include "insn-config.h"
129 #include "hard-reg-set.h"
132 #include "function.h"
136 #include "insn-flags.h"
141 #define obstack_chunk_alloc xmalloc
142 #define obstack_chunk_free free
145 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
146 the stack pointer does not matter. The value is tested only in
147 functions that have frame pointers.
148 No definition is equivalent to always zero. */
149 #ifndef EXIT_IGNORE_STACK
150 #define EXIT_IGNORE_STACK 0
153 #ifndef HAVE_epilogue
154 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
161 /* The contents of the current function definition are allocated
162 in this obstack, and all are freed at the end of the function.
163 For top-level functions, this is temporary_obstack.
164 Separate obstacks are made for nested functions. */
166 extern struct obstack *function_obstack;
168 /* Number of basic blocks in the current function. */
172 /* Number of edges in the current function. */
176 /* The basic block array. */
178 varray_type basic_block_info;
180 /* The special entry and exit blocks. */
182 struct basic_block_def entry_exit_blocks[2]
187 NULL, /* local_set */
188 NULL, /* global_live_at_start */
189 NULL, /* global_live_at_end */
191 ENTRY_BLOCK, /* index */
193 -1, -1 /* eh_beg, eh_end */
200 NULL, /* local_set */
201 NULL, /* global_live_at_start */
202 NULL, /* global_live_at_end */
204 EXIT_BLOCK, /* index */
206 -1, -1 /* eh_beg, eh_end */
210 /* Nonzero if the second flow pass has completed. */
213 /* Maximum register number used in this function, plus one. */
217 /* Indexed by n, giving various register information */
219 varray_type reg_n_info;
221 /* Size of the reg_n_info table. */
223 unsigned int reg_n_max;
225 /* Element N is the next insn that uses (hard or pseudo) register number N
226 within the current basic block; or zero, if there is no such insn.
227 This is valid only during the final backward scan in propagate_block. */
229 static rtx *reg_next_use;
231 /* Size of a regset for the current function,
232 in (1) bytes and (2) elements. */
237 /* Regset of regs live when calls to `setjmp'-like functions happen. */
238 /* ??? Does this exist only for the setjmp-clobbered warning message? */
240 regset regs_live_at_setjmp;
242 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
243 that have to go in the same hard reg.
244 The first two regs in the list are a pair, and the next two
245 are another pair, etc. */
248 /* Depth within loops of basic block being scanned for lifetime analysis,
249 plus one. This is the weight attached to references to registers. */
251 static int loop_depth;
253 /* During propagate_block, this is non-zero if the value of CC0 is live. */
257 /* During propagate_block, this contains a list of all the MEMs we are
258 tracking for dead store elimination. */
260 static rtx mem_set_list;
262 /* Set of registers that may be eliminable. These are handled specially
263 in updating regs_ever_live. */
265 static HARD_REG_SET elim_reg_set;
267 /* The basic block structure for every insn, indexed by uid. */
269 varray_type basic_block_for_insn;
271 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
272 /* ??? Should probably be using LABEL_NUSES instead. It would take a
273 bit of surgery to be able to use or co-opt the routines in jump. */
275 static rtx label_value_list;
277 /* Forward declarations */
278 static int count_basic_blocks PARAMS ((rtx));
279 static rtx find_basic_blocks_1 PARAMS ((rtx));
280 static void create_basic_block PARAMS ((int, rtx, rtx, rtx));
281 static void clear_edges PARAMS ((void));
282 static void make_edges PARAMS ((rtx));
283 static void make_label_edge PARAMS ((sbitmap *, basic_block,
285 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
286 basic_block, rtx, int));
287 static void mark_critical_edges PARAMS ((void));
288 static void move_stray_eh_region_notes PARAMS ((void));
289 static void record_active_eh_regions PARAMS ((rtx));
291 static void commit_one_edge_insertion PARAMS ((edge));
293 static void delete_unreachable_blocks PARAMS ((void));
294 static void delete_eh_regions PARAMS ((void));
295 static int can_delete_note_p PARAMS ((rtx));
296 static int delete_block PARAMS ((basic_block));
297 static void expunge_block PARAMS ((basic_block));
298 static int can_delete_label_p PARAMS ((rtx));
299 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
301 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
303 static void merge_blocks_nomove PARAMS ((basic_block, basic_block));
304 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
305 static void try_merge_blocks PARAMS ((void));
306 static void tidy_fallthru_edge PARAMS ((edge,basic_block,basic_block));
307 static void tidy_fallthru_edges PARAMS ((void));
308 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
309 static void verify_wide_reg PARAMS ((int, rtx, rtx));
310 static void verify_local_live_at_start PARAMS ((regset, basic_block));
311 static int set_noop_p PARAMS ((rtx));
312 static int noop_move_p PARAMS ((rtx));
313 static void delete_noop_moves PARAMS ((rtx));
314 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
315 static void notice_stack_pointer_modification PARAMS ((rtx));
316 static void mark_reg PARAMS ((rtx, void *));
317 static void mark_regs_live_at_end PARAMS ((regset));
318 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
319 static void propagate_block PARAMS ((basic_block, regset,
321 static int insn_dead_p PARAMS ((rtx, regset, int, rtx));
322 static int libcall_dead_p PARAMS ((rtx, regset, rtx, rtx));
323 static void mark_set_regs PARAMS ((regset, regset, rtx,
325 static void mark_set_1 PARAMS ((regset, regset, rtx,
328 static void find_auto_inc PARAMS ((regset, rtx, rtx));
329 static int try_pre_increment_1 PARAMS ((rtx));
330 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
332 static void mark_used_regs PARAMS ((regset, regset, rtx, int, rtx));
333 void dump_flow_info PARAMS ((FILE *));
334 void debug_flow_info PARAMS ((void));
335 static void dump_edge_info PARAMS ((FILE *, edge, int));
337 static void count_reg_sets_1 PARAMS ((rtx));
338 static void count_reg_sets PARAMS ((rtx));
339 static void count_reg_references PARAMS ((rtx));
340 static void invalidate_mems_from_autoinc PARAMS ((rtx));
341 static void remove_fake_successors PARAMS ((basic_block));
342 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
343 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
344 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
345 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
346 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
347 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
348 static int flow_depth_first_order_compute PARAMS ((int *));
349 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
350 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
351 static void flow_loops_tree_build PARAMS ((struct loops *));
352 static int flow_loop_level_compute PARAMS ((struct loop *, int));
353 static int flow_loops_level_compute PARAMS ((struct loops *));
355 /* This function is always defined so it can be called from the
356 debugger, and it is declared extern so we don't get warnings about
358 void verify_flow_info PARAMS ((void));
359 int flow_loop_outside_edge_p PARAMS ((const struct loop *, edge));
361 /* Find basic blocks of the current function.
362 F is the first insn of the function and NREGS the number of register
366 find_basic_blocks (f, nregs, file)
368 int nregs ATTRIBUTE_UNUSED;
369 FILE *file ATTRIBUTE_UNUSED;
373 /* Flush out existing data. */
374 if (basic_block_info != NULL)
380 /* Clear bb->aux on all extant basic blocks. We'll use this as a
381 tag for reuse during create_basic_block, just in case some pass
382 copies around basic block notes improperly. */
383 for (i = 0; i < n_basic_blocks; ++i)
384 BASIC_BLOCK (i)->aux = NULL;
386 VARRAY_FREE (basic_block_info);
389 n_basic_blocks = count_basic_blocks (f);
391 /* Size the basic block table. The actual structures will be allocated
392 by find_basic_blocks_1, since we want to keep the structure pointers
393 stable across calls to find_basic_blocks. */
394 /* ??? This whole issue would be much simpler if we called find_basic_blocks
395 exactly once, and thereafter we don't have a single long chain of
396 instructions at all until close to the end of compilation when we
397 actually lay them out. */
399 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
401 label_value_list = find_basic_blocks_1 (f);
403 /* Record the block to which an insn belongs. */
404 /* ??? This should be done another way, by which (perhaps) a label is
405 tagged directly with the basic block that it starts. It is used for
406 more than that currently, but IMO that is the only valid use. */
408 max_uid = get_max_uid ();
410 /* Leave space for insns life_analysis makes in some cases for auto-inc.
411 These cases are rare, so we don't need too much space. */
412 max_uid += max_uid / 10;
415 compute_bb_for_insn (max_uid);
417 /* Discover the edges of our cfg. */
418 record_active_eh_regions (f);
419 make_edges (label_value_list);
421 /* Do very simple cleanup now, for the benefit of code that runs between
422 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
423 tidy_fallthru_edges ();
425 mark_critical_edges ();
427 #ifdef ENABLE_CHECKING
432 /* Count the basic blocks of the function. */
435 count_basic_blocks (f)
439 register RTX_CODE prev_code;
440 register int count = 0;
442 int call_had_abnormal_edge = 0;
443 rtx prev_call = NULL_RTX;
445 prev_code = JUMP_INSN;
446 for (insn = f; insn; insn = NEXT_INSN (insn))
448 register RTX_CODE code = GET_CODE (insn);
450 if (code == CODE_LABEL
451 || (GET_RTX_CLASS (code) == 'i'
452 && (prev_code == JUMP_INSN
453 || prev_code == BARRIER
454 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
459 /* Record whether this call created an edge. */
460 if (code == CALL_INSN)
462 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
463 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
465 call_had_abnormal_edge = 0;
467 /* If there is a specified EH region, we have an edge. */
468 if (eh_region && region > 0)
469 call_had_abnormal_edge = 1;
472 /* If there is a nonlocal goto label and the specified
473 region number isn't -1, we have an edge. (0 means
474 no throw, but might have a nonlocal goto). */
475 if (nonlocal_goto_handler_labels && region >= 0)
476 call_had_abnormal_edge = 1;
479 else if (code != NOTE)
480 prev_call = NULL_RTX;
484 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
486 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
491 /* The rest of the compiler works a bit smoother when we don't have to
492 check for the edge case of do-nothing functions with no basic blocks. */
495 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
502 /* Find all basic blocks of the function whose first insn is F.
504 Collect and return a list of labels whose addresses are taken. This
505 will be used in make_edges for use with computed gotos. */
508 find_basic_blocks_1 (f)
511 register rtx insn, next;
512 int call_has_abnormal_edge = 0;
514 rtx bb_note = NULL_RTX;
515 rtx eh_list = NULL_RTX;
516 rtx label_value_list = NULL_RTX;
520 /* We process the instructions in a slightly different way than we did
521 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
522 closed out the previous block, so that it gets attached at the proper
523 place. Since this form should be equivalent to the previous,
524 count_basic_blocks continues to use the old form as a check. */
526 for (insn = f; insn; insn = next)
528 enum rtx_code code = GET_CODE (insn);
530 next = NEXT_INSN (insn);
532 if (code == CALL_INSN)
534 /* Record whether this call created an edge. */
535 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
536 int region = (note ? XWINT (XEXP (note, 0), 0) : 1);
537 call_has_abnormal_edge = 0;
539 /* If there is an EH region, we have an edge. */
540 if (eh_list && region > 0)
541 call_has_abnormal_edge = 1;
544 /* If there is a nonlocal goto label and the specified
545 region number isn't -1, we have an edge. (0 means
546 no throw, but might have a nonlocal goto). */
547 if (nonlocal_goto_handler_labels && region >= 0)
548 call_has_abnormal_edge = 1;
556 int kind = NOTE_LINE_NUMBER (insn);
558 /* Keep a LIFO list of the currently active exception notes. */
559 if (kind == NOTE_INSN_EH_REGION_BEG)
560 eh_list = alloc_INSN_LIST (insn, eh_list);
561 else if (kind == NOTE_INSN_EH_REGION_END)
564 eh_list = XEXP (eh_list, 1);
565 free_INSN_LIST_node (t);
568 /* Look for basic block notes with which to keep the
569 basic_block_info pointers stable. Unthread the note now;
570 we'll put it back at the right place in create_basic_block.
571 Or not at all if we've already found a note in this block. */
572 else if (kind == NOTE_INSN_BASIC_BLOCK)
574 if (bb_note == NULL_RTX)
576 next = flow_delete_insn (insn);
583 /* A basic block starts at a label. If we've closed one off due
584 to a barrier or some such, no need to do it again. */
585 if (head != NULL_RTX)
587 /* While we now have edge lists with which other portions of
588 the compiler might determine a call ending a basic block
589 does not imply an abnormal edge, it will be a bit before
590 everything can be updated. So continue to emit a noop at
591 the end of such a block. */
592 if (GET_CODE (end) == CALL_INSN)
594 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
595 end = emit_insn_after (nop, end);
598 create_basic_block (i++, head, end, bb_note);
605 /* A basic block ends at a jump. */
606 if (head == NULL_RTX)
610 /* ??? Make a special check for table jumps. The way this
611 happens is truly and amazingly gross. We are about to
612 create a basic block that contains just a code label and
613 an addr*vec jump insn. Worse, an addr_diff_vec creates
614 its own natural loop.
616 Prevent this bit of brain damage, pasting things together
617 correctly in make_edges.
619 The correct solution involves emitting the table directly
620 on the tablejump instruction as a note, or JUMP_LABEL. */
622 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
623 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
631 goto new_bb_inclusive;
634 /* A basic block ends at a barrier. It may be that an unconditional
635 jump already closed the basic block -- no need to do it again. */
636 if (head == NULL_RTX)
639 /* While we now have edge lists with which other portions of the
640 compiler might determine a call ending a basic block does not
641 imply an abnormal edge, it will be a bit before everything can
642 be updated. So continue to emit a noop at the end of such a
644 if (GET_CODE (end) == CALL_INSN)
646 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
647 end = emit_insn_after (nop, end);
649 goto new_bb_exclusive;
652 /* A basic block ends at a call that can either throw or
653 do a non-local goto. */
654 if (call_has_abnormal_edge)
657 if (head == NULL_RTX)
662 create_basic_block (i++, head, end, bb_note);
663 head = end = NULL_RTX;
670 if (GET_RTX_CLASS (code) == 'i')
672 if (head == NULL_RTX)
679 if (GET_RTX_CLASS (code) == 'i')
683 /* Make a list of all labels referred to other than by jumps
684 (which just don't have the REG_LABEL notes).
686 Make a special exception for labels followed by an ADDR*VEC,
687 as this would be a part of the tablejump setup code.
689 Make a special exception for the eh_return_stub_label, which
690 we know isn't part of any otherwise visible control flow. */
692 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
693 if (REG_NOTE_KIND (note) == REG_LABEL)
695 rtx lab = XEXP (note, 0), next;
697 if (lab == eh_return_stub_label)
699 else if ((next = next_nonnote_insn (lab)) != NULL
700 && GET_CODE (next) == JUMP_INSN
701 && (GET_CODE (PATTERN (next)) == ADDR_VEC
702 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
706 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
711 if (head != NULL_RTX)
712 create_basic_block (i++, head, end, bb_note);
714 if (i != n_basic_blocks)
717 return label_value_list;
720 /* Tidy the CFG by deleting unreachable code and whatnot. */
726 delete_unreachable_blocks ();
727 move_stray_eh_region_notes ();
728 record_active_eh_regions (f);
730 mark_critical_edges ();
732 /* Kill the data we won't maintain. */
733 label_value_list = NULL_RTX;
736 /* Create a new basic block consisting of the instructions between
737 HEAD and END inclusive. Reuses the note and basic block struct
738 in BB_NOTE, if any. */
741 create_basic_block (index, head, end, bb_note)
743 rtx head, end, bb_note;
748 && ! RTX_INTEGRATED_P (bb_note)
749 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
752 /* If we found an existing note, thread it back onto the chain. */
754 if (GET_CODE (head) == CODE_LABEL)
755 add_insn_after (bb_note, head);
758 add_insn_before (bb_note, head);
764 /* Otherwise we must create a note and a basic block structure.
765 Since we allow basic block structs in rtl, give the struct
766 the same lifetime by allocating it off the function obstack
767 rather than using malloc. */
769 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
770 memset (bb, 0, sizeof (*bb));
772 if (GET_CODE (head) == CODE_LABEL)
773 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
776 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
779 NOTE_BASIC_BLOCK (bb_note) = bb;
782 /* Always include the bb note in the block. */
783 if (NEXT_INSN (end) == bb_note)
789 BASIC_BLOCK (index) = bb;
791 /* Tag the block so that we know it has been used when considering
792 other basic block notes. */
796 /* Records the basic block struct in BB_FOR_INSN, for every instruction
797 indexed by INSN_UID. MAX is the size of the array. */
800 compute_bb_for_insn (max)
805 if (basic_block_for_insn)
806 VARRAY_FREE (basic_block_for_insn);
807 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
809 for (i = 0; i < n_basic_blocks; ++i)
811 basic_block bb = BASIC_BLOCK (i);
818 int uid = INSN_UID (insn);
820 VARRAY_BB (basic_block_for_insn, uid) = bb;
823 insn = NEXT_INSN (insn);
828 /* Free the memory associated with the edge structures. */
836 for (i = 0; i < n_basic_blocks; ++i)
838 basic_block bb = BASIC_BLOCK (i);
840 for (e = bb->succ; e ; e = n)
850 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
856 ENTRY_BLOCK_PTR->succ = 0;
857 EXIT_BLOCK_PTR->pred = 0;
862 /* Identify the edges between basic blocks.
864 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
865 that are otherwise unreachable may be reachable with a non-local goto.
867 BB_EH_END is an array indexed by basic block number in which we record
868 the list of exception regions active at the end of the basic block. */
871 make_edges (label_value_list)
872 rtx label_value_list;
875 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
876 sbitmap *edge_cache = NULL;
878 /* Assume no computed jump; revise as we create edges. */
879 current_function_has_computed_jump = 0;
881 /* Heavy use of computed goto in machine-generated code can lead to
882 nearly fully-connected CFGs. In that case we spend a significant
883 amount of time searching the edge lists for duplicates. */
884 if (forced_labels || label_value_list)
886 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
887 sbitmap_vector_zero (edge_cache, n_basic_blocks);
890 /* By nature of the way these get numbered, block 0 is always the entry. */
891 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
893 for (i = 0; i < n_basic_blocks; ++i)
895 basic_block bb = BASIC_BLOCK (i);
898 int force_fallthru = 0;
900 /* Examine the last instruction of the block, and discover the
901 ways we can leave the block. */
904 code = GET_CODE (insn);
907 if (code == JUMP_INSN)
911 /* ??? Recognize a tablejump and do the right thing. */
912 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
913 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
914 && GET_CODE (tmp) == JUMP_INSN
915 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
916 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
921 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
922 vec = XVEC (PATTERN (tmp), 0);
924 vec = XVEC (PATTERN (tmp), 1);
926 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
927 make_label_edge (edge_cache, bb,
928 XEXP (RTVEC_ELT (vec, j), 0), 0);
930 /* Some targets (eg, ARM) emit a conditional jump that also
931 contains the out-of-range target. Scan for these and
932 add an edge if necessary. */
933 if ((tmp = single_set (insn)) != NULL
934 && SET_DEST (tmp) == pc_rtx
935 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
936 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
937 make_label_edge (edge_cache, bb,
938 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
940 #ifdef CASE_DROPS_THROUGH
941 /* Silly VAXen. The ADDR_VEC is going to be in the way of
942 us naturally detecting fallthru into the next block. */
947 /* If this is a computed jump, then mark it as reaching
948 everything on the label_value_list and forced_labels list. */
949 else if (computed_jump_p (insn))
951 current_function_has_computed_jump = 1;
953 for (x = label_value_list; x; x = XEXP (x, 1))
954 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
956 for (x = forced_labels; x; x = XEXP (x, 1))
957 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
960 /* Returns create an exit out. */
961 else if (returnjump_p (insn))
962 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
964 /* Otherwise, we have a plain conditional or unconditional jump. */
967 if (! JUMP_LABEL (insn))
969 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
973 /* If this is a CALL_INSN, then mark it as reaching the active EH
974 handler for this CALL_INSN. If we're handling asynchronous
975 exceptions then any insn can reach any of the active handlers.
977 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
979 if (code == CALL_INSN || asynchronous_exceptions)
981 /* If there's an EH region active at the end of a block,
982 add the appropriate edges. */
984 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
986 /* If we have asynchronous exceptions, do the same for *all*
987 exception regions active in the block. */
988 if (asynchronous_exceptions
989 && bb->eh_beg != bb->eh_end)
992 make_eh_edge (edge_cache, eh_nest_info, bb,
993 NULL_RTX, bb->eh_beg);
995 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
996 if (GET_CODE (x) == NOTE
997 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
998 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1000 int region = NOTE_EH_HANDLER (x);
1001 make_eh_edge (edge_cache, eh_nest_info, bb,
1006 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1008 /* ??? This could be made smarter: in some cases it's possible
1009 to tell that certain calls will not do a nonlocal goto.
1011 For example, if the nested functions that do the nonlocal
1012 gotos do not have their addresses taken, then only calls to
1013 those functions or to other nested functions that use them
1014 could possibly do nonlocal gotos. */
1015 /* We do know that a REG_EH_REGION note with a value less
1016 than 0 is guaranteed not to perform a non-local goto. */
1017 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1018 if (!note || XINT (XEXP (note, 0), 0) >= 0)
1019 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1020 make_label_edge (edge_cache, bb, XEXP (x, 0),
1021 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1025 /* We know something about the structure of the function __throw in
1026 libgcc2.c. It is the only function that ever contains eh_stub
1027 labels. It modifies its return address so that the last block
1028 returns to one of the eh_stub labels within it. So we have to
1029 make additional edges in the flow graph. */
1030 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1031 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1033 /* Find out if we can drop through to the next block. */
1034 insn = next_nonnote_insn (insn);
1035 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1036 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1037 else if (i + 1 < n_basic_blocks)
1039 rtx tmp = BLOCK_HEAD (i + 1);
1040 if (GET_CODE (tmp) == NOTE)
1041 tmp = next_nonnote_insn (tmp);
1042 if (force_fallthru || insn == tmp)
1043 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1047 free_eh_nesting_info (eh_nest_info);
1049 sbitmap_vector_free (edge_cache);
1052 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1053 about the edge that is accumulated between calls. */
1056 make_edge (edge_cache, src, dst, flags)
1057 sbitmap *edge_cache;
1058 basic_block src, dst;
1064 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1065 many edges to them, and we didn't allocate memory for it. */
1066 use_edge_cache = (edge_cache
1067 && src != ENTRY_BLOCK_PTR
1068 && dst != EXIT_BLOCK_PTR);
1070 /* Make sure we don't add duplicate edges. */
1071 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1072 for (e = src->succ; e ; e = e->succ_next)
1079 e = (edge) xcalloc (1, sizeof (*e));
1082 e->succ_next = src->succ;
1083 e->pred_next = dst->pred;
1092 SET_BIT (edge_cache[src->index], dst->index);
1095 /* Create an edge from a basic block to a label. */
1098 make_label_edge (edge_cache, src, label, flags)
1099 sbitmap *edge_cache;
1104 if (GET_CODE (label) != CODE_LABEL)
1107 /* If the label was never emitted, this insn is junk, but avoid a
1108 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1109 as a result of a syntax error and a diagnostic has already been
1112 if (INSN_UID (label) == 0)
1115 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1118 /* Create the edges generated by INSN in REGION. */
1121 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1122 sbitmap *edge_cache;
1123 eh_nesting_info *eh_nest_info;
1128 handler_info **handler_list;
1131 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1132 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1135 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1136 EDGE_ABNORMAL | EDGE_EH | is_call);
1140 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1141 dangerous if we intend to move basic blocks around. Move such notes
1142 into the following block. */
1145 move_stray_eh_region_notes ()
1150 if (n_basic_blocks < 2)
1153 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1154 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1156 rtx insn, next, list = NULL_RTX;
1158 b1 = BASIC_BLOCK (i);
1159 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1161 next = NEXT_INSN (insn);
1162 if (GET_CODE (insn) == NOTE
1163 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1164 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1166 /* Unlink from the insn chain. */
1167 NEXT_INSN (PREV_INSN (insn)) = next;
1168 PREV_INSN (next) = PREV_INSN (insn);
1171 NEXT_INSN (insn) = list;
1176 if (list == NULL_RTX)
1179 /* Find where to insert these things. */
1181 if (GET_CODE (insn) == CODE_LABEL)
1182 insn = NEXT_INSN (insn);
1186 next = NEXT_INSN (list);
1187 add_insn_after (list, insn);
1193 /* Recompute eh_beg/eh_end for each basic block. */
1196 record_active_eh_regions (f)
1199 rtx insn, eh_list = NULL_RTX;
1201 basic_block bb = BASIC_BLOCK (0);
1203 for (insn = f; insn ; insn = NEXT_INSN (insn))
1205 if (bb->head == insn)
1206 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1208 if (GET_CODE (insn) == NOTE)
1210 int kind = NOTE_LINE_NUMBER (insn);
1211 if (kind == NOTE_INSN_EH_REGION_BEG)
1212 eh_list = alloc_INSN_LIST (insn, eh_list);
1213 else if (kind == NOTE_INSN_EH_REGION_END)
1215 rtx t = XEXP (eh_list, 1);
1216 free_INSN_LIST_node (eh_list);
1221 if (bb->end == insn)
1223 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1225 if (i == n_basic_blocks)
1227 bb = BASIC_BLOCK (i);
1232 /* Identify critical edges and set the bits appropriately. */
1235 mark_critical_edges ()
1237 int i, n = n_basic_blocks;
1240 /* We begin with the entry block. This is not terribly important now,
1241 but could be if a front end (Fortran) implemented alternate entry
1243 bb = ENTRY_BLOCK_PTR;
1250 /* (1) Critical edges must have a source with multiple successors. */
1251 if (bb->succ && bb->succ->succ_next)
1253 for (e = bb->succ; e ; e = e->succ_next)
1255 /* (2) Critical edges must have a destination with multiple
1256 predecessors. Note that we know there is at least one
1257 predecessor -- the edge we followed to get here. */
1258 if (e->dest->pred->pred_next)
1259 e->flags |= EDGE_CRITICAL;
1261 e->flags &= ~EDGE_CRITICAL;
1266 for (e = bb->succ; e ; e = e->succ_next)
1267 e->flags &= ~EDGE_CRITICAL;
1272 bb = BASIC_BLOCK (i);
1276 /* Split a (typically critical) edge. Return the new block.
1277 Abort on abnormal edges.
1279 ??? The code generally expects to be called on critical edges.
1280 The case of a block ending in an unconditional jump to a
1281 block with multiple predecessors is not handled optimally. */
1284 split_edge (edge_in)
1287 basic_block old_pred, bb, old_succ;
1292 /* Abnormal edges cannot be split. */
1293 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1296 old_pred = edge_in->src;
1297 old_succ = edge_in->dest;
1299 /* Remove the existing edge from the destination's pred list. */
1302 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1304 *pp = edge_in->pred_next;
1305 edge_in->pred_next = NULL;
1308 /* Create the new structures. */
1309 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1310 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1313 memset (bb, 0, sizeof (*bb));
1314 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1315 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1317 /* ??? This info is likely going to be out of date very soon. */
1318 if (old_succ->global_live_at_start)
1320 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1321 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1325 CLEAR_REG_SET (bb->global_live_at_start);
1326 CLEAR_REG_SET (bb->global_live_at_end);
1331 bb->succ = edge_out;
1334 edge_in->flags &= ~EDGE_CRITICAL;
1336 edge_out->pred_next = old_succ->pred;
1337 edge_out->succ_next = NULL;
1339 edge_out->dest = old_succ;
1340 edge_out->flags = EDGE_FALLTHRU;
1341 edge_out->probability = REG_BR_PROB_BASE;
1343 old_succ->pred = edge_out;
1345 /* Tricky case -- if there existed a fallthru into the successor
1346 (and we're not it) we must add a new unconditional jump around
1347 the new block we're actually interested in.
1349 Further, if that edge is critical, this means a second new basic
1350 block must be created to hold it. In order to simplify correct
1351 insn placement, do this before we touch the existing basic block
1352 ordering for the block we were really wanting. */
1353 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1356 for (e = edge_out->pred_next; e ; e = e->pred_next)
1357 if (e->flags & EDGE_FALLTHRU)
1362 basic_block jump_block;
1365 if ((e->flags & EDGE_CRITICAL) == 0)
1367 /* Non critical -- we can simply add a jump to the end
1368 of the existing predecessor. */
1369 jump_block = e->src;
1373 /* We need a new block to hold the jump. The simplest
1374 way to do the bulk of the work here is to recursively
1376 jump_block = split_edge (e);
1377 e = jump_block->succ;
1380 /* Now add the jump insn ... */
1381 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1383 jump_block->end = pos;
1384 if (basic_block_for_insn)
1385 set_block_for_insn (pos, jump_block);
1386 emit_barrier_after (pos);
1388 /* ... let jump know that label is in use, ... */
1389 JUMP_LABEL (pos) = old_succ->head;
1390 ++LABEL_NUSES (old_succ->head);
1392 /* ... and clear fallthru on the outgoing edge. */
1393 e->flags &= ~EDGE_FALLTHRU;
1395 /* Continue splitting the interesting edge. */
1399 /* Place the new block just in front of the successor. */
1400 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1401 if (old_succ == EXIT_BLOCK_PTR)
1402 j = n_basic_blocks - 1;
1404 j = old_succ->index;
1405 for (i = n_basic_blocks - 1; i > j; --i)
1407 basic_block tmp = BASIC_BLOCK (i - 1);
1408 BASIC_BLOCK (i) = tmp;
1411 BASIC_BLOCK (i) = bb;
1414 /* Create the basic block note.
1416 Where we place the note can have a noticable impact on the generated
1417 code. Consider this cfg:
1428 If we need to insert an insn on the edge from block 0 to block 1,
1429 we want to ensure the instructions we insert are outside of any
1430 loop notes that physically sit between block 0 and block 1. Otherwise
1431 we confuse the loop optimizer into thinking the loop is a phony. */
1432 if (old_succ != EXIT_BLOCK_PTR
1433 && PREV_INSN (old_succ->head)
1434 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1435 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1436 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1437 PREV_INSN (old_succ->head));
1438 else if (old_succ != EXIT_BLOCK_PTR)
1439 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1441 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1442 NOTE_BASIC_BLOCK (bb_note) = bb;
1443 bb->head = bb->end = bb_note;
1445 /* Not quite simple -- for non-fallthru edges, we must adjust the
1446 predecessor's jump instruction to target our new block. */
1447 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1449 rtx tmp, insn = old_pred->end;
1450 rtx old_label = old_succ->head;
1451 rtx new_label = gen_label_rtx ();
1453 if (GET_CODE (insn) != JUMP_INSN)
1456 /* ??? Recognize a tablejump and adjust all matching cases. */
1457 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1458 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1459 && GET_CODE (tmp) == JUMP_INSN
1460 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1461 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1466 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1467 vec = XVEC (PATTERN (tmp), 0);
1469 vec = XVEC (PATTERN (tmp), 1);
1471 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1472 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1474 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1475 --LABEL_NUSES (old_label);
1476 ++LABEL_NUSES (new_label);
1479 /* Handle casesi dispatch insns */
1480 if ((tmp = single_set (insn)) != NULL
1481 && SET_DEST (tmp) == pc_rtx
1482 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1483 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1484 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1486 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1488 --LABEL_NUSES (old_label);
1489 ++LABEL_NUSES (new_label);
1494 /* This would have indicated an abnormal edge. */
1495 if (computed_jump_p (insn))
1498 /* A return instruction can't be redirected. */
1499 if (returnjump_p (insn))
1502 /* If the insn doesn't go where we think, we're confused. */
1503 if (JUMP_LABEL (insn) != old_label)
1506 redirect_jump (insn, new_label);
1509 emit_label_before (new_label, bb_note);
1510 bb->head = new_label;
1516 /* Queue instructions for insertion on an edge between two basic blocks.
1517 The new instructions and basic blocks (if any) will not appear in the
1518 CFG until commit_edge_insertions is called. */
1521 insert_insn_on_edge (pattern, e)
1525 /* We cannot insert instructions on an abnormal critical edge.
1526 It will be easier to find the culprit if we die now. */
1527 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1528 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1531 if (e->insns == NULL_RTX)
1534 push_to_sequence (e->insns);
1536 emit_insn (pattern);
1538 e->insns = get_insns ();
1542 /* Update the CFG for the instructions queued on edge E. */
1545 commit_one_edge_insertion (e)
1548 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1551 /* Pull the insns off the edge now since the edge might go away. */
1553 e->insns = NULL_RTX;
1555 /* Figure out where to put these things. If the destination has
1556 one predecessor, insert there. Except for the exit block. */
1557 if (e->dest->pred->pred_next == NULL
1558 && e->dest != EXIT_BLOCK_PTR)
1562 /* Get the location correct wrt a code label, and "nice" wrt
1563 a basic block note, and before everything else. */
1565 if (GET_CODE (tmp) == CODE_LABEL)
1566 tmp = NEXT_INSN (tmp);
1567 if (GET_CODE (tmp) == NOTE
1568 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1569 tmp = NEXT_INSN (tmp);
1570 if (tmp == bb->head)
1573 after = PREV_INSN (tmp);
1576 /* If the source has one successor and the edge is not abnormal,
1577 insert there. Except for the entry block. */
1578 else if ((e->flags & EDGE_ABNORMAL) == 0
1579 && e->src->succ->succ_next == NULL
1580 && e->src != ENTRY_BLOCK_PTR)
1583 /* It is possible to have a non-simple jump here. Consider a target
1584 where some forms of unconditional jumps clobber a register. This
1585 happens on the fr30 for example.
1587 We know this block has a single successor, so we can just emit
1588 the queued insns before the jump. */
1589 if (GET_CODE (bb->end) == JUMP_INSN)
1595 /* We'd better be fallthru, or we've lost track of what's what. */
1596 if ((e->flags & EDGE_FALLTHRU) == 0)
1603 /* Otherwise we must split the edge. */
1606 bb = split_edge (e);
1610 /* Now that we've found the spot, do the insertion. */
1612 /* Set the new block number for these insns, if structure is allocated. */
1613 if (basic_block_for_insn)
1616 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1617 set_block_for_insn (i, bb);
1622 emit_insns_before (insns, before);
1623 if (before == bb->head)
1628 rtx last = emit_insns_after (insns, after);
1629 if (after == bb->end)
1633 if (GET_CODE (last) == JUMP_INSN)
1635 if (returnjump_p (last))
1637 /* ??? Remove all outgoing edges from BB and add one
1638 for EXIT. This is not currently a problem because
1639 this only happens for the (single) epilogue, which
1640 already has a fallthru edge to EXIT. */
1643 if (e->dest != EXIT_BLOCK_PTR
1644 || e->succ_next != NULL
1645 || (e->flags & EDGE_FALLTHRU) == 0)
1647 e->flags &= ~EDGE_FALLTHRU;
1649 emit_barrier_after (last);
1658 /* Update the CFG for all queued instructions. */
1661 commit_edge_insertions ()
1666 #ifdef ENABLE_CHECKING
1667 verify_flow_info ();
1671 bb = ENTRY_BLOCK_PTR;
1676 for (e = bb->succ; e ; e = next)
1678 next = e->succ_next;
1680 commit_one_edge_insertion (e);
1683 if (++i >= n_basic_blocks)
1685 bb = BASIC_BLOCK (i);
1689 /* Delete all unreachable basic blocks. */
1692 delete_unreachable_blocks ()
1694 basic_block *worklist, *tos;
1695 int deleted_handler;
1700 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1702 /* Use basic_block->aux as a marker. Clear them all. */
1704 for (i = 0; i < n; ++i)
1705 BASIC_BLOCK (i)->aux = NULL;
1707 /* Add our starting points to the worklist. Almost always there will
1708 be only one. It isn't inconcievable that we might one day directly
1709 support Fortran alternate entry points. */
1711 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1715 /* Mark the block with a handy non-null value. */
1719 /* Iterate: find everything reachable from what we've already seen. */
1721 while (tos != worklist)
1723 basic_block b = *--tos;
1725 for (e = b->succ; e ; e = e->succ_next)
1733 /* Delete all unreachable basic blocks. Count down so that we don't
1734 interfere with the block renumbering that happens in delete_block. */
1736 deleted_handler = 0;
1738 for (i = n - 1; i >= 0; --i)
1740 basic_block b = BASIC_BLOCK (i);
1743 /* This block was found. Tidy up the mark. */
1746 deleted_handler |= delete_block (b);
1749 tidy_fallthru_edges ();
1751 /* If we deleted an exception handler, we may have EH region begin/end
1752 blocks to remove as well. */
1753 if (deleted_handler)
1754 delete_eh_regions ();
1759 /* Find EH regions for which there is no longer a handler, and delete them. */
1762 delete_eh_regions ()
1766 update_rethrow_references ();
1768 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1769 if (GET_CODE (insn) == NOTE)
1771 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1772 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1774 int num = NOTE_EH_HANDLER (insn);
1775 /* A NULL handler indicates a region is no longer needed,
1776 as long as it isn't the target of a rethrow. */
1777 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1779 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1780 NOTE_SOURCE_FILE (insn) = 0;
1786 /* Return true if NOTE is not one of the ones that must be kept paired,
1787 so that we may simply delete them. */
1790 can_delete_note_p (note)
1793 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1794 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1797 /* Unlink a chain of insns between START and FINISH, leaving notes
1798 that must be paired. */
1801 flow_delete_insn_chain (start, finish)
1804 /* Unchain the insns one by one. It would be quicker to delete all
1805 of these with a single unchaining, rather than one at a time, but
1806 we need to keep the NOTE's. */
1812 next = NEXT_INSN (start);
1813 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1815 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1818 next = flow_delete_insn (start);
1820 if (start == finish)
1826 /* Delete the insns in a (non-live) block. We physically delete every
1827 non-deleted-note insn, and update the flow graph appropriately.
1829 Return nonzero if we deleted an exception handler. */
1831 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1832 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1838 int deleted_handler = 0;
1841 /* If the head of this block is a CODE_LABEL, then it might be the
1842 label for an exception handler which can't be reached.
1844 We need to remove the label from the exception_handler_label list
1845 and remove the associated NOTE_INSN_EH_REGION_BEG and
1846 NOTE_INSN_EH_REGION_END notes. */
1850 never_reached_warning (insn);
1852 if (GET_CODE (insn) == CODE_LABEL)
1854 rtx x, *prev = &exception_handler_labels;
1856 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1858 if (XEXP (x, 0) == insn)
1860 /* Found a match, splice this label out of the EH label list. */
1861 *prev = XEXP (x, 1);
1862 XEXP (x, 1) = NULL_RTX;
1863 XEXP (x, 0) = NULL_RTX;
1865 /* Remove the handler from all regions */
1866 remove_handler (insn);
1867 deleted_handler = 1;
1870 prev = &XEXP (x, 1);
1873 /* This label may be referenced by code solely for its value, or
1874 referenced by static data, or something. We have determined
1875 that it is not reachable, but cannot delete the label itself.
1876 Save code space and continue to delete the balance of the block,
1877 along with properly updating the cfg. */
1878 if (!can_delete_label_p (insn))
1880 /* If we've only got one of these, skip the whole deleting
1883 goto no_delete_insns;
1884 insn = NEXT_INSN (insn);
1888 /* Selectively unlink the insn chain. Include any BARRIER that may
1889 follow the basic block. */
1890 end = next_nonnote_insn (b->end);
1891 if (!end || GET_CODE (end) != BARRIER)
1893 flow_delete_insn_chain (insn, end);
1897 /* Remove the edges into and out of this block. Note that there may
1898 indeed be edges in, if we are removing an unreachable loop. */
1902 for (e = b->pred; e ; e = next)
1904 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1907 next = e->pred_next;
1911 for (e = b->succ; e ; e = next)
1913 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1916 next = e->succ_next;
1925 /* Remove the basic block from the array, and compact behind it. */
1928 return deleted_handler;
1931 /* Remove block B from the basic block array and compact behind it. */
1937 int i, n = n_basic_blocks;
1939 for (i = b->index; i + 1 < n; ++i)
1941 basic_block x = BASIC_BLOCK (i + 1);
1942 BASIC_BLOCK (i) = x;
1946 basic_block_info->num_elements--;
1950 /* Delete INSN by patching it out. Return the next insn. */
1953 flow_delete_insn (insn)
1956 rtx prev = PREV_INSN (insn);
1957 rtx next = NEXT_INSN (insn);
1959 PREV_INSN (insn) = NULL_RTX;
1960 NEXT_INSN (insn) = NULL_RTX;
1963 NEXT_INSN (prev) = next;
1965 PREV_INSN (next) = prev;
1967 set_last_insn (prev);
1969 if (GET_CODE (insn) == CODE_LABEL)
1970 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1972 /* If deleting a jump, decrement the use count of the label. Deleting
1973 the label itself should happen in the normal course of block merging. */
1974 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1975 LABEL_NUSES (JUMP_LABEL (insn))--;
1980 /* True if a given label can be deleted. */
1983 can_delete_label_p (label)
1988 if (LABEL_PRESERVE_P (label))
1991 for (x = forced_labels; x ; x = XEXP (x, 1))
1992 if (label == XEXP (x, 0))
1994 for (x = label_value_list; x ; x = XEXP (x, 1))
1995 if (label == XEXP (x, 0))
1997 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
1998 if (label == XEXP (x, 0))
2001 /* User declared labels must be preserved. */
2002 if (LABEL_NAME (label) != 0)
2008 /* Blocks A and B are to be merged into a single block. A has no incoming
2009 fallthru edge, so it can be moved before B without adding or modifying
2010 any jumps (aside from the jump from A to B). */
2013 merge_blocks_move_predecessor_nojumps (a, b)
2016 rtx start, end, barrier;
2022 /* We want to delete the BARRIER after the end of the insns we are
2023 going to move. If we don't find a BARRIER, then do nothing. This
2024 can happen in some cases if we have labels we can not delete.
2026 Similarly, do nothing if we can not delete the label at the start
2027 of the target block. */
2028 barrier = next_nonnote_insn (end);
2029 if (GET_CODE (barrier) != BARRIER
2030 || (GET_CODE (b->head) == CODE_LABEL
2031 && ! can_delete_label_p (b->head)))
2034 flow_delete_insn (barrier);
2036 /* Move block and loop notes out of the chain so that we do not
2037 disturb their order.
2039 ??? A better solution would be to squeeze out all the non-nested notes
2040 and adjust the block trees appropriately. Even better would be to have
2041 a tighter connection between block trees and rtl so that this is not
2043 start = squeeze_notes (start, end);
2045 /* Scramble the insn chain. */
2046 if (end != PREV_INSN (b->head))
2047 reorder_insns (start, end, PREV_INSN (b->head));
2051 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2052 a->index, b->index);
2055 /* Swap the records for the two blocks around. Although we are deleting B,
2056 A is now where B was and we want to compact the BB array from where
2058 BASIC_BLOCK(a->index) = b;
2059 BASIC_BLOCK(b->index) = a;
2061 a->index = b->index;
2064 /* Now blocks A and B are contiguous. Merge them. */
2065 merge_blocks_nomove (a, b);
2070 /* Blocks A and B are to be merged into a single block. B has no outgoing
2071 fallthru edge, so it can be moved after A without adding or modifying
2072 any jumps (aside from the jump from A to B). */
2075 merge_blocks_move_successor_nojumps (a, b)
2078 rtx start, end, barrier;
2083 /* We want to delete the BARRIER after the end of the insns we are
2084 going to move. If we don't find a BARRIER, then do nothing. This
2085 can happen in some cases if we have labels we can not delete.
2087 Similarly, do nothing if we can not delete the label at the start
2088 of the target block. */
2089 barrier = next_nonnote_insn (end);
2090 if (GET_CODE (barrier) != BARRIER
2091 || (GET_CODE (b->head) == CODE_LABEL
2092 && ! can_delete_label_p (b->head)))
2095 flow_delete_insn (barrier);
2097 /* Move block and loop notes out of the chain so that we do not
2098 disturb their order.
2100 ??? A better solution would be to squeeze out all the non-nested notes
2101 and adjust the block trees appropriately. Even better would be to have
2102 a tighter connection between block trees and rtl so that this is not
2104 start = squeeze_notes (start, end);
2106 /* Scramble the insn chain. */
2107 reorder_insns (start, end, a->end);
2109 /* Now blocks A and B are contiguous. Merge them. */
2110 merge_blocks_nomove (a, b);
2114 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2115 b->index, a->index);
2121 /* Blocks A and B are to be merged into a single block. The insns
2122 are already contiguous, hence `nomove'. */
2125 merge_blocks_nomove (a, b)
2129 rtx b_head, b_end, a_end;
2132 /* If there was a CODE_LABEL beginning B, delete it. */
2135 if (GET_CODE (b_head) == CODE_LABEL)
2137 /* Detect basic blocks with nothing but a label. This can happen
2138 in particular at the end of a function. */
2139 if (b_head == b_end)
2141 b_head = flow_delete_insn (b_head);
2144 /* Delete the basic block note. */
2145 if (GET_CODE (b_head) == NOTE
2146 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2148 if (b_head == b_end)
2150 b_head = flow_delete_insn (b_head);
2153 /* If there was a jump out of A, delete it. */
2155 if (GET_CODE (a_end) == JUMP_INSN)
2159 prev = prev_nonnote_insn (a_end);
2164 /* If this was a conditional jump, we need to also delete
2165 the insn that set cc0. */
2167 if (prev && sets_cc0_p (prev))
2170 prev = prev_nonnote_insn (prev);
2173 flow_delete_insn (tmp);
2177 /* Note that a->head != a->end, since we should have at least a
2178 bb note plus the jump, so prev != insn. */
2179 flow_delete_insn (a_end);
2183 /* By definition, there should only be one successor of A, and that is
2184 B. Free that edge struct. */
2188 /* Adjust the edges out of B for the new owner. */
2189 for (e = b->succ; e ; e = e->succ_next)
2193 /* Reassociate the insns of B with A. */
2196 BLOCK_FOR_INSN (b_head) = a;
2197 while (b_head != b_end)
2199 b_head = NEXT_INSN (b_head);
2200 BLOCK_FOR_INSN (b_head) = a;
2206 /* Compact the basic block array. */
2210 /* Attempt to merge basic blocks that are potentially non-adjacent.
2211 Return true iff the attempt succeeded. */
2214 merge_blocks (e, b, c)
2218 /* If B has a fallthru edge to C, no need to move anything. */
2219 if (e->flags & EDGE_FALLTHRU)
2221 /* If a label still appears somewhere and we cannot delete the label,
2222 then we cannot merge the blocks. The edge was tidied already. */
2224 rtx insn, stop = NEXT_INSN (c->head);
2225 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2226 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2229 merge_blocks_nomove (b, c);
2233 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2234 b->index, c->index);
2243 int c_has_outgoing_fallthru;
2244 int b_has_incoming_fallthru;
2246 /* We must make sure to not munge nesting of exception regions,
2247 lexical blocks, and loop notes.
2249 The first is taken care of by requiring that the active eh
2250 region at the end of one block always matches the active eh
2251 region at the beginning of the next block.
2253 The later two are taken care of by squeezing out all the notes. */
2255 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2256 executed and we may want to treat blocks which have two out
2257 edges, one normal, one abnormal as only having one edge for
2258 block merging purposes. */
2260 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2261 if (tmp_edge->flags & EDGE_FALLTHRU)
2263 c_has_outgoing_fallthru = (tmp_edge != NULL);
2265 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2266 if (tmp_edge->flags & EDGE_FALLTHRU)
2268 b_has_incoming_fallthru = (tmp_edge != NULL);
2270 /* If B does not have an incoming fallthru, and the exception regions
2271 match, then it can be moved immediately before C without introducing
2274 C can not be the first block, so we do not have to worry about
2275 accessing a non-existent block. */
2276 d = BASIC_BLOCK (c->index - 1);
2277 if (! b_has_incoming_fallthru
2278 && d->eh_end == b->eh_beg
2279 && b->eh_end == c->eh_beg)
2280 return merge_blocks_move_predecessor_nojumps (b, c);
2282 /* Otherwise, we're going to try to move C after B. Make sure the
2283 exception regions match.
2285 If B is the last basic block, then we must not try to access the
2286 block structure for block B + 1. Luckily in that case we do not
2287 need to worry about matching exception regions. */
2288 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2289 if (b->eh_end == c->eh_beg
2290 && (d == NULL || c->eh_end == d->eh_beg))
2292 /* If C does not have an outgoing fallthru, then it can be moved
2293 immediately after B without introducing or modifying jumps. */
2294 if (! c_has_outgoing_fallthru)
2295 return merge_blocks_move_successor_nojumps (b, c);
2297 /* Otherwise, we'll need to insert an extra jump, and possibly
2298 a new block to contain it. */
2299 /* ??? Not implemented yet. */
2306 /* Top level driver for merge_blocks. */
2313 /* Attempt to merge blocks as made possible by edge removal. If a block
2314 has only one successor, and the successor has only one predecessor,
2315 they may be combined. */
2317 for (i = 0; i < n_basic_blocks; )
2319 basic_block c, b = BASIC_BLOCK (i);
2322 /* A loop because chains of blocks might be combineable. */
2323 while ((s = b->succ) != NULL
2324 && s->succ_next == NULL
2325 && (s->flags & EDGE_EH) == 0
2326 && (c = s->dest) != EXIT_BLOCK_PTR
2327 && c->pred->pred_next == NULL
2328 /* If the jump insn has side effects, we can't kill the edge. */
2329 && (GET_CODE (b->end) != JUMP_INSN
2330 || onlyjump_p (b->end))
2331 && merge_blocks (s, b, c))
2334 /* Don't get confused by the index shift caused by deleting blocks. */
2339 /* The given edge should potentially be a fallthru edge. If that is in
2340 fact true, delete the jump and barriers that are in the way. */
2343 tidy_fallthru_edge (e, b, c)
2349 /* ??? In a late-running flow pass, other folks may have deleted basic
2350 blocks by nopping out blocks, leaving multiple BARRIERs between here
2351 and the target label. They ought to be chastized and fixed.
2353 We can also wind up with a sequence of undeletable labels between
2354 one block and the next.
2356 So search through a sequence of barriers, labels, and notes for
2357 the head of block C and assert that we really do fall through. */
2359 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2362 /* Remove what will soon cease being the jump insn from the source block.
2363 If block B consisted only of this single jump, turn it into a deleted
2366 if (GET_CODE (q) == JUMP_INSN)
2369 /* If this was a conditional jump, we need to also delete
2370 the insn that set cc0. */
2371 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2378 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2379 NOTE_SOURCE_FILE (q) = 0;
2382 b->end = q = PREV_INSN (q);
2385 /* Selectively unlink the sequence. */
2386 if (q != PREV_INSN (c->head))
2387 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2389 e->flags |= EDGE_FALLTHRU;
2392 /* Fix up edges that now fall through, or rather should now fall through
2393 but previously required a jump around now deleted blocks. Simplify
2394 the search by only examining blocks numerically adjacent, since this
2395 is how find_basic_blocks created them. */
2398 tidy_fallthru_edges ()
2402 for (i = 1; i < n_basic_blocks; ++i)
2404 basic_block b = BASIC_BLOCK (i - 1);
2405 basic_block c = BASIC_BLOCK (i);
2408 /* We care about simple conditional or unconditional jumps with
2411 If we had a conditional branch to the next instruction when
2412 find_basic_blocks was called, then there will only be one
2413 out edge for the block which ended with the conditional
2414 branch (since we do not create duplicate edges).
2416 Furthermore, the edge will be marked as a fallthru because we
2417 merge the flags for the duplicate edges. So we do not want to
2418 check that the edge is not a FALLTHRU edge. */
2419 if ((s = b->succ) != NULL
2420 && s->succ_next == NULL
2422 /* If the jump insn has side effects, we can't tidy the edge. */
2423 && (GET_CODE (b->end) != JUMP_INSN
2424 || onlyjump_p (b->end)))
2425 tidy_fallthru_edge (s, b, c);
2429 /* Discover and record the loop depth at the head of each basic block. */
2432 calculate_loop_depth (dump)
2437 /* The loop infrastructure does the real job for us. */
2438 flow_loops_find (&loops);
2441 flow_loops_dump (&loops, dump, 0);
2443 flow_loops_free (&loops);
2446 /* Perform data flow analysis.
2447 F is the first insn of the function and NREGS the number of register numbers
2451 life_analysis (f, nregs, file, remove_dead_code)
2455 int remove_dead_code;
2458 #ifdef ELIMINABLE_REGS
2459 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2464 /* Record which registers will be eliminated. We use this in
2467 CLEAR_HARD_REG_SET (elim_reg_set);
2469 #ifdef ELIMINABLE_REGS
2470 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2471 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2473 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2476 /* We want alias analysis information for local dead store elimination. */
2477 init_alias_analysis ();
2480 flags = PROP_DEATH_NOTES | PROP_REG_INFO;
2484 if (! remove_dead_code)
2485 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2488 /* The post-reload life analysis have (on a global basis) the same
2489 registers live as was computed by reload itself. elimination
2490 Otherwise offsets and such may be incorrect.
2492 Reload will make some registers as live even though they do not
2493 appear in the rtl. */
2494 if (reload_completed)
2495 flags &= ~PROP_REG_INFO;
2499 /* Always remove no-op moves. Do this before other processing so
2500 that we don't have to keep re-scanning them. */
2501 delete_noop_moves (f);
2503 /* Some targets can emit simpler epilogues if they know that sp was
2504 not ever modified during the function. After reload, of course,
2505 we've already emitted the epilogue so there's no sense searching. */
2506 if (! reload_completed)
2507 notice_stack_pointer_modification (f);
2509 /* Allocate and zero out data structures that will record the
2510 data from lifetime analysis. */
2511 allocate_reg_life_data ();
2512 allocate_bb_life_data ();
2513 reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
2514 all_blocks = sbitmap_alloc (n_basic_blocks);
2515 sbitmap_ones (all_blocks);
2517 /* Find the set of registers live on function exit. */
2518 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2520 /* "Update" life info from zero. It'd be nice to begin the
2521 relaxation with just the exit and noreturn blocks, but that set
2522 is not immediately handy. */
2523 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2526 sbitmap_free (all_blocks);
2527 free (reg_next_use);
2528 reg_next_use = NULL;
2529 end_alias_analysis ();
2532 dump_flow_info (file);
2534 free_basic_block_vars (1);
2537 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2538 Search for REGNO. If found, abort if it is not wider than word_mode. */
2541 verify_wide_reg_1 (px, pregno)
2546 int regno = *(int *) pregno;
2548 if (GET_CODE (x) == REG && REGNO (x) == regno)
2550 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2557 /* A subroutine of verify_local_live_at_start. Search through insns
2558 between HEAD and END looking for register REGNO. */
2561 verify_wide_reg (regno, head, end)
2567 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2568 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2572 head = NEXT_INSN (head);
2575 /* We didn't find the register at all. Something's way screwy. */
2579 /* A subroutine of update_life_info. Verify that there are no untoward
2580 changes in live_at_start during a local update. */
2583 verify_local_live_at_start (new_live_at_start, bb)
2584 regset new_live_at_start;
2587 if (reload_completed)
2589 /* After reload, there are no pseudos, nor subregs of multi-word
2590 registers. The regsets should exactly match. */
2591 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2598 /* Find the set of changed registers. */
2599 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2601 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2603 /* No registers should die. */
2604 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2606 /* Verify that the now-live register is wider than word_mode. */
2607 verify_wide_reg (i, bb->head, bb->end);
2612 /* Updates life information starting with the basic blocks set in BLOCKS.
2614 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2615 expecting local modifications to basic blocks. If we find extra
2616 registers live at the beginning of a block, then we either killed
2617 useful data, or we have a broken split that wants data not provided.
2618 If we find registers removed from live_at_start, that means we have
2619 a broken peephole that is killing a register it shouldn't.
2621 ??? This is not true in one situation -- when a pre-reload splitter
2622 generates subregs of a multi-word pseudo, current life analysis will
2623 lose the kill. So we _can_ have a pseudo go live. How irritating.
2625 BLOCK_FOR_INSN is assumed to be correct.
2627 PROP_FLAGS should not contain PROP_LOG_LINKS unless the caller sets
2628 up reg_next_use. Including PROP_REG_INFO does not properly refresh
2629 regs_ever_live unless the caller resets it to zero. */
2632 update_life_info (blocks, extent, prop_flags)
2634 enum update_life_extent extent;
2640 tmp = ALLOCA_REG_SET ();
2642 /* For a global update, we go through the relaxation process again. */
2643 if (extent != UPDATE_LIFE_LOCAL)
2645 calculate_global_regs_live (blocks, blocks,
2646 prop_flags & PROP_SCAN_DEAD_CODE);
2648 /* If asked, remove notes from the blocks we'll update. */
2649 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2650 count_or_remove_death_notes (blocks, 1);
2653 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2655 basic_block bb = BASIC_BLOCK (i);
2657 COPY_REG_SET (tmp, bb->global_live_at_end);
2658 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2660 if (extent == UPDATE_LIFE_LOCAL)
2661 verify_local_live_at_start (tmp, bb);
2666 if (prop_flags & PROP_REG_INFO)
2668 /* The only pseudos that are live at the beginning of the function
2669 are those that were not set anywhere in the function. local-alloc
2670 doesn't know how to handle these correctly, so mark them as not
2671 local to any one basic block. */
2672 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2673 FIRST_PSEUDO_REGISTER, i,
2674 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2676 /* We have a problem with any pseudoreg that lives across the setjmp.
2677 ANSI says that if a user variable does not change in value between
2678 the setjmp and the longjmp, then the longjmp preserves it. This
2679 includes longjmp from a place where the pseudo appears dead.
2680 (In principle, the value still exists if it is in scope.)
2681 If the pseudo goes in a hard reg, some other value may occupy
2682 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2683 Conclusion: such a pseudo must not go in a hard reg. */
2684 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2685 FIRST_PSEUDO_REGISTER, i,
2687 if (regno_reg_rtx[i] != 0)
2689 REG_LIVE_LENGTH (i) = -1;
2690 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2696 /* Free the variables allocated by find_basic_blocks.
2698 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2701 free_basic_block_vars (keep_head_end_p)
2702 int keep_head_end_p;
2704 if (basic_block_for_insn)
2706 VARRAY_FREE (basic_block_for_insn);
2707 basic_block_for_insn = NULL;
2710 if (! keep_head_end_p)
2713 VARRAY_FREE (basic_block_info);
2716 ENTRY_BLOCK_PTR->aux = NULL;
2717 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2718 EXIT_BLOCK_PTR->aux = NULL;
2719 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2723 /* Return nonzero if the destination of SET equals the source. */
2728 rtx src = SET_SRC (set);
2729 rtx dst = SET_DEST (set);
2731 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2733 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2735 src = SUBREG_REG (src);
2736 dst = SUBREG_REG (dst);
2739 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2740 && REGNO (src) == REGNO (dst));
2743 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2749 rtx pat = PATTERN (insn);
2751 /* Insns carrying these notes are useful later on. */
2752 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2755 if (GET_CODE (pat) == SET && set_noop_p (pat))
2758 if (GET_CODE (pat) == PARALLEL)
2761 /* If nothing but SETs of registers to themselves,
2762 this insn can also be deleted. */
2763 for (i = 0; i < XVECLEN (pat, 0); i++)
2765 rtx tem = XVECEXP (pat, 0, i);
2767 if (GET_CODE (tem) == USE
2768 || GET_CODE (tem) == CLOBBER)
2771 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2780 /* Delete any insns that copy a register to itself. */
2783 delete_noop_moves (f)
2787 for (insn = f; insn; insn = NEXT_INSN (insn))
2789 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2791 PUT_CODE (insn, NOTE);
2792 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2793 NOTE_SOURCE_FILE (insn) = 0;
2798 /* Determine if the stack pointer is constant over the life of the function.
2799 Only useful before prologues have been emitted. */
2802 notice_stack_pointer_modification_1 (x, pat, data)
2804 rtx pat ATTRIBUTE_UNUSED;
2805 void *data ATTRIBUTE_UNUSED;
2807 if (x == stack_pointer_rtx
2808 /* The stack pointer is only modified indirectly as the result
2809 of a push until later in flow. See the comments in rtl.texi
2810 regarding Embedded Side-Effects on Addresses. */
2811 || (GET_CODE (x) == MEM
2812 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2813 || GET_CODE (XEXP (x, 0)) == PRE_INC
2814 || GET_CODE (XEXP (x, 0)) == POST_DEC
2815 || GET_CODE (XEXP (x, 0)) == POST_INC)
2816 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2817 current_function_sp_is_unchanging = 0;
2821 notice_stack_pointer_modification (f)
2826 /* Assume that the stack pointer is unchanging if alloca hasn't
2828 current_function_sp_is_unchanging = !current_function_calls_alloca;
2829 if (! current_function_sp_is_unchanging)
2832 for (insn = f; insn; insn = NEXT_INSN (insn))
2834 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2836 /* Check if insn modifies the stack pointer. */
2837 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2839 if (! current_function_sp_is_unchanging)
2845 /* Mark a register in SET. Hard registers in large modes get all
2846 of their component registers set as well. */
2848 mark_reg (reg, xset)
2852 regset set = (regset) xset;
2853 int regno = REGNO (reg);
2855 if (GET_MODE (reg) == BLKmode)
2858 SET_REGNO_REG_SET (set, regno);
2859 if (regno < FIRST_PSEUDO_REGISTER)
2861 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2863 SET_REGNO_REG_SET (set, regno + n);
2867 /* Mark those regs which are needed at the end of the function as live
2868 at the end of the last basic block. */
2870 mark_regs_live_at_end (set)
2875 /* If exiting needs the right stack value, consider the stack pointer
2876 live at the end of the function. */
2877 if ((HAVE_epilogue && reload_completed)
2878 || ! EXIT_IGNORE_STACK
2879 || (! FRAME_POINTER_REQUIRED
2880 && ! current_function_calls_alloca
2881 && flag_omit_frame_pointer)
2882 || current_function_sp_is_unchanging)
2884 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2887 /* Mark the frame pointer if needed at the end of the function. If
2888 we end up eliminating it, it will be removed from the live list
2889 of each basic block by reload. */
2891 if (! reload_completed || frame_pointer_needed)
2893 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2894 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2895 /* If they are different, also mark the hard frame pointer as live */
2896 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2900 #ifdef PIC_OFFSET_TABLE_REGNUM
2901 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2902 /* Many architectures have a GP register even without flag_pic.
2903 Assume the pic register is not in use, or will be handled by
2904 other means, if it is not fixed. */
2905 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2906 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2910 /* Mark all global registers, and all registers used by the epilogue
2911 as being live at the end of the function since they may be
2912 referenced by our caller. */
2913 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2915 #ifdef EPILOGUE_USES
2916 || EPILOGUE_USES (i)
2919 SET_REGNO_REG_SET (set, i);
2921 /* Mark all call-saved registers that we actaully used. */
2922 if (HAVE_epilogue && reload_completed)
2924 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2925 if (! call_used_regs[i] && regs_ever_live[i])
2926 SET_REGNO_REG_SET (set, i);
2929 /* Mark function return value. */
2930 diddle_return_value (mark_reg, set);
2933 /* Propagate global life info around the graph of basic blocks. Begin
2934 considering blocks with their corresponding bit set in BLOCKS_IN.
2935 BLOCKS_OUT is set for every block that was changed. */
2938 calculate_global_regs_live (blocks_in, blocks_out, flags)
2939 sbitmap blocks_in, blocks_out;
2942 basic_block *queue, *qhead, *qtail, *qend;
2943 regset tmp, new_live_at_end;
2946 tmp = ALLOCA_REG_SET ();
2947 new_live_at_end = ALLOCA_REG_SET ();
2949 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
2950 because the `head == tail' style test for an empty queue doesn't
2951 work with a full queue. */
2952 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
2954 qhead = qend = queue + n_basic_blocks + 2;
2956 /* Clear out the garbage that might be hanging out in bb->aux. */
2957 for (i = n_basic_blocks - 1; i >= 0; --i)
2958 BASIC_BLOCK (i)->aux = NULL;
2960 /* Queue the blocks set in the initial mask. Do this in reverse block
2961 number order so that we are more likely for the first round to do
2962 useful work. We use AUX non-null to flag that the block is queued. */
2963 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
2965 basic_block bb = BASIC_BLOCK (i);
2970 sbitmap_zero (blocks_out);
2972 while (qhead != qtail)
2974 int rescan, changed;
2983 /* Begin by propogating live_at_start from the successor blocks. */
2984 CLEAR_REG_SET (new_live_at_end);
2985 for (e = bb->succ; e ; e = e->succ_next)
2987 basic_block sb = e->dest;
2988 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
2991 if (bb == ENTRY_BLOCK_PTR)
2993 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
2997 /* On our first pass through this block, we'll go ahead and continue.
2998 Recognize first pass by local_set NULL. On subsequent passes, we
2999 get to skip out early if live_at_end wouldn't have changed. */
3001 if (bb->local_set == NULL)
3003 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3008 /* If any bits were removed from live_at_end, we'll have to
3009 rescan the block. This wouldn't be necessary if we had
3010 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3011 local_live is really dependant on live_at_end. */
3012 CLEAR_REG_SET (tmp);
3013 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3014 new_live_at_end, BITMAP_AND_COMPL);
3018 /* Find the set of changed bits. Take this opportunity
3019 to notice that this set is empty and early out. */
3020 CLEAR_REG_SET (tmp);
3021 changed = bitmap_operation (tmp, bb->global_live_at_end,
3022 new_live_at_end, BITMAP_XOR);
3026 /* If any of the changed bits overlap with local_set,
3027 we'll have to rescan the block. Detect overlap by
3028 the AND with ~local_set turning off bits. */
3029 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3034 /* Let our caller know that BB changed enough to require its
3035 death notes updated. */
3036 SET_BIT (blocks_out, bb->index);
3040 /* Add to live_at_start the set of all registers in
3041 new_live_at_end that aren't in the old live_at_end. */
3043 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3045 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3047 changed = bitmap_operation (bb->global_live_at_start,
3048 bb->global_live_at_start,
3055 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3057 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3058 into live_at_start. */
3059 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3061 /* If live_at start didn't change, no need to go farther. */
3062 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3065 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3068 /* Queue all predecessors of BB so that we may re-examine
3069 their live_at_end. */
3070 for (e = bb->pred; e ; e = e->pred_next)
3072 basic_block pb = e->src;
3073 if (pb->aux == NULL)
3084 FREE_REG_SET (new_live_at_end);
3086 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3088 basic_block bb = BASIC_BLOCK (i);
3089 FREE_REG_SET (bb->local_set);
3095 /* Subroutines of life analysis. */
3097 /* Allocate the permanent data structures that represent the results
3098 of life analysis. Not static since used also for stupid life analysis. */
3101 allocate_bb_life_data ()
3105 for (i = 0; i < n_basic_blocks; i++)
3107 basic_block bb = BASIC_BLOCK (i);
3109 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3110 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3113 ENTRY_BLOCK_PTR->global_live_at_end
3114 = OBSTACK_ALLOC_REG_SET (function_obstack);
3115 EXIT_BLOCK_PTR->global_live_at_start
3116 = OBSTACK_ALLOC_REG_SET (function_obstack);
3118 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3122 allocate_reg_life_data ()
3126 /* Recalculate the register space, in case it has grown. Old style
3127 vector oriented regsets would set regset_{size,bytes} here also. */
3128 allocate_reg_info (max_regno, FALSE, FALSE);
3130 /* Reset all the data we'll collect in propagate_block and its
3132 for (i = 0; i < max_regno; i++)
3136 REG_N_DEATHS (i) = 0;
3137 REG_N_CALLS_CROSSED (i) = 0;
3138 REG_LIVE_LENGTH (i) = 0;
3139 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3143 /* Compute the registers live at the beginning of a basic block
3144 from those live at the end.
3146 When called, OLD contains those live at the end.
3147 On return, it contains those live at the beginning.
3148 FIRST and LAST are the first and last insns of the basic block.
3150 FINAL is nonzero if we are doing the final pass which is not
3151 for computing the life info (since that has already been done)
3152 but for acting on it. On this pass, we delete dead stores,
3153 set up the logical links and dead-variables lists of instructions,
3154 and merge instructions for autoincrement and autodecrement addresses.
3156 SIGNIFICANT is nonzero only the first time for each basic block.
3157 If it is nonzero, it points to a regset in which we store
3158 a 1 for each register that is set within the block.
3160 BNUM is the number of the basic block. */
3163 propagate_block (bb, old, significant, flags)
3174 /* Find the loop depth for this block. Ignore loop level changes in the
3175 middle of the basic block -- for register allocation purposes, the
3176 important uses will be in the blocks wholely contained within the loop
3177 not in the loop pre-header or post-trailer. */
3178 loop_depth = bb->loop_depth;
3180 dead = ALLOCA_REG_SET ();
3181 live = ALLOCA_REG_SET ();
3185 if (flags & PROP_REG_INFO)
3189 /* Process the regs live at the end of the block.
3190 Mark them as not local to any one basic block. */
3191 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3193 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3197 /* Scan the block an insn at a time from end to beginning. */
3199 for (insn = bb->end; ; insn = prev)
3201 prev = PREV_INSN (insn);
3203 if (GET_CODE (insn) == NOTE)
3205 /* If this is a call to `setjmp' et al,
3206 warn if any non-volatile datum is live. */
3208 if ((flags & PROP_REG_INFO)
3209 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3210 IOR_REG_SET (regs_live_at_setjmp, old);
3213 /* Update the life-status of regs for this insn.
3214 First DEAD gets which regs are set in this insn
3215 then LIVE gets which regs are used in this insn.
3216 Then the regs live before the insn
3217 are those live after, with DEAD regs turned off,
3218 and then LIVE regs turned on. */
3220 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3223 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3224 int insn_is_dead = 0;
3225 int libcall_is_dead = 0;
3227 if (flags & PROP_SCAN_DEAD_CODE)
3229 insn_is_dead = insn_dead_p (PATTERN (insn), old, 0,
3231 libcall_is_dead = (insn_is_dead && note != 0
3232 && libcall_dead_p (PATTERN (insn), old,
3236 /* We almost certainly don't want to delete prologue or epilogue
3237 instructions. Warn about probable compiler losage. */
3240 && (HAVE_epilogue || HAVE_prologue)
3241 && prologue_epilogue_contains (insn))
3243 if (flags & PROP_KILL_DEAD_CODE)
3245 warning ("ICE: would have deleted prologue/epilogue insn");
3246 if (!inhibit_warnings)
3249 libcall_is_dead = insn_is_dead = 0;
3252 /* If an instruction consists of just dead store(s) on final pass,
3253 "delete" it by turning it into a NOTE of type NOTE_INSN_DELETED.
3254 We could really delete it with delete_insn, but that
3255 can cause trouble for first or last insn in a basic block. */
3256 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3259 /* If the insn referred to a label, note that the label is
3261 for (inote = REG_NOTES (insn); inote; inote = XEXP (inote, 1))
3263 if (REG_NOTE_KIND (inote) == REG_LABEL)
3265 rtx label = XEXP (inote, 0);
3267 LABEL_NUSES (label)--;
3269 /* If this label was attached to an ADDR_VEC, it's
3270 safe to delete the ADDR_VEC. In fact, it's pretty
3271 much mandatory to delete it, because the ADDR_VEC may
3272 be referencing labels that no longer exist. */
3273 if (LABEL_NUSES (label) == 0
3274 && (next = next_nonnote_insn (label)) != NULL
3275 && GET_CODE (next) == JUMP_INSN
3276 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3277 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3279 rtx pat = PATTERN (next);
3280 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3281 int len = XVECLEN (pat, diff_vec_p);
3283 for (i = 0; i < len; i++)
3284 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3285 PUT_CODE (next, NOTE);
3286 NOTE_LINE_NUMBER (next) = NOTE_INSN_DELETED;
3287 NOTE_SOURCE_FILE (next) = 0;
3292 PUT_CODE (insn, NOTE);
3293 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3294 NOTE_SOURCE_FILE (insn) = 0;
3296 /* CC0 is now known to be dead. Either this insn used it,
3297 in which case it doesn't anymore, or clobbered it,
3298 so the next insn can't use it. */
3301 /* If this insn is copying the return value from a library call,
3302 delete the entire library call. */
3303 if (libcall_is_dead)
3305 rtx first = XEXP (note, 0);
3307 while (INSN_DELETED_P (first))
3308 first = NEXT_INSN (first);
3313 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3314 NOTE_SOURCE_FILE (p) = 0;
3320 CLEAR_REG_SET (dead);
3321 CLEAR_REG_SET (live);
3323 /* See if this is an increment or decrement that can be
3324 merged into a following memory address. */
3327 register rtx x = single_set (insn);
3329 /* Does this instruction increment or decrement a register? */
3330 if (!reload_completed
3331 && (flags & PROP_AUTOINC)
3333 && GET_CODE (SET_DEST (x)) == REG
3334 && (GET_CODE (SET_SRC (x)) == PLUS
3335 || GET_CODE (SET_SRC (x)) == MINUS)
3336 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3337 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3338 /* Ok, look for a following memory ref we can combine with.
3339 If one is found, change the memory ref to a PRE_INC
3340 or PRE_DEC, cancel this insn, and return 1.
3341 Return 0 if nothing has been done. */
3342 && try_pre_increment_1 (insn))
3345 #endif /* AUTO_INC_DEC */
3347 /* If this is not the final pass, and this insn is copying the
3348 value of a library call and it's dead, don't scan the
3349 insns that perform the library call, so that the call's
3350 arguments are not marked live. */
3351 if (libcall_is_dead)
3353 /* Mark the dest reg as `significant'. */
3354 mark_set_regs (old, dead, PATTERN (insn), NULL_RTX,
3355 significant, flags);
3357 insn = XEXP (note, 0);
3358 prev = PREV_INSN (insn);
3360 else if (GET_CODE (PATTERN (insn)) == SET
3361 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3362 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3363 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3364 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3365 /* We have an insn to pop a constant amount off the stack.
3366 (Such insns use PLUS regardless of the direction of the stack,
3367 and any insn to adjust the stack by a constant is always a pop.)
3368 These insns, if not dead stores, have no effect on life. */
3372 /* Any regs live at the time of a call instruction
3373 must not go in a register clobbered by calls.
3374 Find all regs now live and record this for them. */
3376 if (GET_CODE (insn) == CALL_INSN
3377 && (flags & PROP_REG_INFO))
3378 EXECUTE_IF_SET_IN_REG_SET (old, 0, i,
3380 REG_N_CALLS_CROSSED (i)++;
3383 /* LIVE gets the regs used in INSN;
3384 DEAD gets those set by it. Dead insns don't make anything
3387 mark_set_regs (old, dead, PATTERN (insn),
3388 insn, significant, flags);
3390 /* If an insn doesn't use CC0, it becomes dead since we
3391 assume that every insn clobbers it. So show it dead here;
3392 mark_used_regs will set it live if it is referenced. */
3396 mark_used_regs (old, live, PATTERN (insn), flags, insn);
3398 /* Sometimes we may have inserted something before INSN (such as
3399 a move) when we make an auto-inc. So ensure we will scan
3402 prev = PREV_INSN (insn);
3405 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3411 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3413 note = XEXP (note, 1))
3414 if (GET_CODE (XEXP (note, 0)) == USE)
3415 mark_used_regs (old, live, XEXP (XEXP (note, 0), 0),
3418 /* Each call clobbers all call-clobbered regs that are not
3419 global or fixed. Note that the function-value reg is a
3420 call-clobbered reg, and mark_set_regs has already had
3421 a chance to handle it. */
3423 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3424 if (call_used_regs[i] && ! global_regs[i]
3427 SET_REGNO_REG_SET (dead, i);
3429 SET_REGNO_REG_SET (significant, i);
3432 /* The stack ptr is used (honorarily) by a CALL insn. */
3433 SET_REGNO_REG_SET (live, STACK_POINTER_REGNUM);
3435 /* Calls may also reference any of the global registers,
3436 so they are made live. */
3437 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3439 mark_used_regs (old, live,
3440 gen_rtx_REG (reg_raw_mode[i], i),
3443 /* Calls also clobber memory. */
3444 free_EXPR_LIST_list (&mem_set_list);
3447 /* Update OLD for the registers used or set. */
3448 AND_COMPL_REG_SET (old, dead);
3449 IOR_REG_SET (old, live);
3453 /* On final pass, update counts of how many insns in which
3454 each reg is live. */
3455 if (flags & PROP_REG_INFO)
3456 EXECUTE_IF_SET_IN_REG_SET (old, 0, i, { REG_LIVE_LENGTH (i)++; });
3459 if (insn == bb->head)
3463 FREE_REG_SET (dead);
3464 FREE_REG_SET (live);
3465 free_EXPR_LIST_list (&mem_set_list);
3468 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3469 (SET expressions whose destinations are registers dead after the insn).
3470 NEEDED is the regset that says which regs are alive after the insn.
3472 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3474 If X is the entire body of an insn, NOTES contains the reg notes
3475 pertaining to the insn. */
3478 insn_dead_p (x, needed, call_ok, notes)
3482 rtx notes ATTRIBUTE_UNUSED;
3484 enum rtx_code code = GET_CODE (x);
3487 /* If flow is invoked after reload, we must take existing AUTO_INC
3488 expresions into account. */
3489 if (reload_completed)
3491 for ( ; notes; notes = XEXP (notes, 1))
3493 if (REG_NOTE_KIND (notes) == REG_INC)
3495 int regno = REGNO (XEXP (notes, 0));
3497 /* Don't delete insns to set global regs. */
3498 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3499 || REGNO_REG_SET_P (needed, regno))
3506 /* If setting something that's a reg or part of one,
3507 see if that register's altered value will be live. */
3511 rtx r = SET_DEST (x);
3514 if (GET_CODE (r) == CC0)
3518 /* A SET that is a subroutine call cannot be dead. */
3519 if (GET_CODE (SET_SRC (x)) == CALL)
3525 /* Don't eliminate loads from volatile memory or volatile asms. */
3526 else if (volatile_refs_p (SET_SRC (x)))
3529 if (GET_CODE (r) == MEM)
3533 if (MEM_VOLATILE_P (r))
3536 /* Walk the set of memory locations we are currently tracking
3537 and see if one is an identical match to this memory location.
3538 If so, this memory write is dead (remember, we're walking
3539 backwards from the end of the block to the start. */
3540 temp = mem_set_list;
3543 if (rtx_equal_p (XEXP (temp, 0), r))
3545 temp = XEXP (temp, 1);
3550 while (GET_CODE (r) == SUBREG
3551 || GET_CODE (r) == STRICT_LOW_PART
3552 || GET_CODE (r) == ZERO_EXTRACT)
3555 if (GET_CODE (r) == REG)
3557 int regno = REGNO (r);
3560 if (REGNO_REG_SET_P (needed, regno))
3563 /* If this is a hard register, verify that subsequent
3564 words are not needed. */
3565 if (regno < FIRST_PSEUDO_REGISTER)
3567 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3570 if (REGNO_REG_SET_P (needed, regno+n))
3574 /* Don't delete insns to set global regs. */
3575 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3578 /* Make sure insns to set the stack pointer aren't deleted. */
3579 if (regno == STACK_POINTER_REGNUM)
3582 /* Make sure insns to set the frame pointer aren't deleted. */
3583 if (regno == FRAME_POINTER_REGNUM
3584 && (! reload_completed || frame_pointer_needed))
3586 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3587 if (regno == HARD_FRAME_POINTER_REGNUM
3588 && (! reload_completed || frame_pointer_needed))
3592 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3593 /* Make sure insns to set arg pointer are never deleted
3594 (if the arg pointer isn't fixed, there will be a USE
3595 for it, so we can treat it normally). */
3596 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3600 /* Otherwise, the set is dead. */
3606 /* If performing several activities, insn is dead if each activity
3607 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3608 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3610 else if (code == PARALLEL)
3612 int i = XVECLEN (x, 0);
3614 for (i--; i >= 0; i--)
3615 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3616 && GET_CODE (XVECEXP (x, 0, i)) != USE
3617 && ! insn_dead_p (XVECEXP (x, 0, i), needed, call_ok, NULL_RTX))
3623 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3624 is not necessarily true for hard registers. */
3625 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3626 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3627 && ! REGNO_REG_SET_P (needed, REGNO (XEXP (x, 0))))
3630 /* We do not check other CLOBBER or USE here. An insn consisting of just
3631 a CLOBBER or just a USE should not be deleted. */
3635 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3636 return 1 if the entire library call is dead.
3637 This is true if X copies a register (hard or pseudo)
3638 and if the hard return reg of the call insn is dead.
3639 (The caller should have tested the destination of X already for death.)
3641 If this insn doesn't just copy a register, then we don't
3642 have an ordinary libcall. In that case, cse could not have
3643 managed to substitute the source for the dest later on,
3644 so we can assume the libcall is dead.
3646 NEEDED is the bit vector of pseudoregs live before this insn.
3647 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3650 libcall_dead_p (x, needed, note, insn)
3656 register RTX_CODE code = GET_CODE (x);
3660 register rtx r = SET_SRC (x);
3661 if (GET_CODE (r) == REG)
3663 rtx call = XEXP (note, 0);
3667 /* Find the call insn. */
3668 while (call != insn && GET_CODE (call) != CALL_INSN)
3669 call = NEXT_INSN (call);
3671 /* If there is none, do nothing special,
3672 since ordinary death handling can understand these insns. */
3676 /* See if the hard reg holding the value is dead.
3677 If this is a PARALLEL, find the call within it. */
3678 call_pat = PATTERN (call);
3679 if (GET_CODE (call_pat) == PARALLEL)
3681 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3682 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3683 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3686 /* This may be a library call that is returning a value
3687 via invisible pointer. Do nothing special, since
3688 ordinary death handling can understand these insns. */
3692 call_pat = XVECEXP (call_pat, 0, i);
3695 return insn_dead_p (call_pat, needed, 1, REG_NOTES (call));
3701 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3702 live at function entry. Don't count global register variables, variables
3703 in registers that can be used for function arg passing, or variables in
3704 fixed hard registers. */
3707 regno_uninitialized (regno)
3710 if (n_basic_blocks == 0
3711 || (regno < FIRST_PSEUDO_REGISTER
3712 && (global_regs[regno]
3713 || fixed_regs[regno]
3714 || FUNCTION_ARG_REGNO_P (regno))))
3717 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3720 /* 1 if register REGNO was alive at a place where `setjmp' was called
3721 and was set more than once or is an argument.
3722 Such regs may be clobbered by `longjmp'. */
3725 regno_clobbered_at_setjmp (regno)
3728 if (n_basic_blocks == 0)
3731 return ((REG_N_SETS (regno) > 1
3732 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3733 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3736 /* INSN references memory, possibly using autoincrement addressing modes.
3737 Find any entries on the mem_set_list that need to be invalidated due
3738 to an address change. */
3740 invalidate_mems_from_autoinc (insn)
3743 rtx note = REG_NOTES (insn);
3744 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3746 if (REG_NOTE_KIND (note) == REG_INC)
3748 rtx temp = mem_set_list;
3749 rtx prev = NULL_RTX;
3754 next = XEXP (temp, 1);
3755 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3757 /* Splice temp out of list. */
3759 XEXP (prev, 1) = next;
3761 mem_set_list = next;
3762 free_EXPR_LIST_node (temp);
3772 /* Process the registers that are set within X. Their bits are set to
3773 1 in the regset DEAD, because they are dead prior to this insn.
3775 If INSN is nonzero, it is the insn being processed.
3777 FLAGS is the set of operations to perform. */
3780 mark_set_regs (needed, dead, x, insn, significant, flags)
3788 register RTX_CODE code = GET_CODE (x);
3790 if (code == SET || code == CLOBBER)
3791 mark_set_1 (needed, dead, x, insn, significant, flags);
3792 else if (code == PARALLEL)
3795 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3797 code = GET_CODE (XVECEXP (x, 0, i));
3798 if (code == SET || code == CLOBBER)
3799 mark_set_1 (needed, dead, XVECEXP (x, 0, i), insn,
3800 significant, flags);
3805 /* Process a single SET rtx, X. */
3808 mark_set_1 (needed, dead, x, insn, significant, flags)
3816 register int regno = -1;
3817 register rtx reg = SET_DEST (x);
3819 /* Some targets place small structures in registers for
3820 return values of functions. We have to detect this
3821 case specially here to get correct flow information. */
3822 if (GET_CODE (reg) == PARALLEL
3823 && GET_MODE (reg) == BLKmode)
3827 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3828 mark_set_1 (needed, dead, XVECEXP (reg, 0, i), insn,
3829 significant, flags);
3833 /* Modifying just one hardware register of a multi-reg value
3834 or just a byte field of a register
3835 does not mean the value from before this insn is now dead.
3836 But it does mean liveness of that register at the end of the block
3839 Within mark_set_1, however, we treat it as if the register is
3840 indeed modified. mark_used_regs will, however, also treat this
3841 register as being used. Thus, we treat these insns as setting a
3842 new value for the register as a function of its old value. This
3843 cases LOG_LINKS to be made appropriately and this will help combine. */
3845 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
3846 || GET_CODE (reg) == SIGN_EXTRACT
3847 || GET_CODE (reg) == STRICT_LOW_PART)
3848 reg = XEXP (reg, 0);
3850 /* If this set is a MEM, then it kills any aliased writes.
3851 If this set is a REG, then it kills any MEMs which use the reg. */
3852 if (flags & PROP_SCAN_DEAD_CODE)
3854 if (GET_CODE (reg) == MEM
3855 || GET_CODE (reg) == REG)
3857 rtx temp = mem_set_list;
3858 rtx prev = NULL_RTX;
3863 next = XEXP (temp, 1);
3864 if ((GET_CODE (reg) == MEM
3865 && output_dependence (XEXP (temp, 0), reg))
3866 || (GET_CODE (reg) == REG
3867 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
3869 /* Splice this entry out of the list. */
3871 XEXP (prev, 1) = next;
3873 mem_set_list = next;
3874 free_EXPR_LIST_node (temp);
3882 /* If the memory reference had embedded side effects (autoincrement
3883 address modes. Then we may need to kill some entries on the
3885 if (insn && GET_CODE (reg) == MEM)
3886 invalidate_mems_from_autoinc (insn);
3888 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
3889 /* We do not know the size of a BLKmode store, so we do not track
3890 them for redundant store elimination. */
3891 && GET_MODE (reg) != BLKmode
3892 /* There are no REG_INC notes for SP, so we can't assume we'll see
3893 everything that invalidates it. To be safe, don't eliminate any
3894 stores though SP; none of them should be redundant anyway. */
3895 && ! reg_mentioned_p (stack_pointer_rtx, reg))
3896 mem_set_list = alloc_EXPR_LIST (0, reg, mem_set_list);
3899 if (GET_CODE (reg) == REG
3900 && (regno = REGNO (reg),
3901 ! (regno == FRAME_POINTER_REGNUM
3902 && (! reload_completed || frame_pointer_needed)))
3903 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3904 && ! (regno == HARD_FRAME_POINTER_REGNUM
3905 && (! reload_completed || frame_pointer_needed))
3907 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3908 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3910 && ! (regno < FIRST_PSEUDO_REGISTER && global_regs[regno]))
3911 /* && regno != STACK_POINTER_REGNUM) -- let's try without this. */
3913 int some_needed = REGNO_REG_SET_P (needed, regno);
3914 int some_not_needed = ! some_needed;
3916 /* Mark it as a significant register for this basic block. */
3918 SET_REGNO_REG_SET (significant, regno);
3920 /* Mark it as dead before this insn. */
3921 SET_REGNO_REG_SET (dead, regno);
3923 /* A hard reg in a wide mode may really be multiple registers.
3924 If so, mark all of them just like the first. */
3925 if (regno < FIRST_PSEUDO_REGISTER)
3929 /* Nothing below is needed for the stack pointer; get out asap.
3930 Eg, log links aren't needed, since combine won't use them. */
3931 if (regno == STACK_POINTER_REGNUM)
3934 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3937 int regno_n = regno + n;
3938 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
3940 SET_REGNO_REG_SET (significant, regno_n);
3942 SET_REGNO_REG_SET (dead, regno_n);
3943 some_needed |= needed_regno;
3944 some_not_needed |= ! needed_regno;
3948 /* Additional data to record if this is the final pass. */
3949 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
3950 | PROP_DEATH_NOTES | PROP_AUTOINC))
3953 register int blocknum = BLOCK_NUM (insn);
3956 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3957 y = reg_next_use[regno];
3959 /* If this is a hard reg, record this function uses the reg. */
3961 if (regno < FIRST_PSEUDO_REGISTER)
3964 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
3966 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3967 for (i = regno; i < endregno; i++)
3969 /* The next use is no longer "next", since a store
3971 reg_next_use[i] = 0;
3974 if (flags & PROP_REG_INFO)
3975 for (i = regno; i < endregno; i++)
3977 regs_ever_live[i] = 1;
3983 /* The next use is no longer "next", since a store
3985 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3986 reg_next_use[regno] = 0;
3988 /* Keep track of which basic blocks each reg appears in. */
3990 if (flags & PROP_REG_INFO)
3992 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
3993 REG_BASIC_BLOCK (regno) = blocknum;
3994 else if (REG_BASIC_BLOCK (regno) != blocknum)
3995 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
3997 /* Count (weighted) references, stores, etc. This counts a
3998 register twice if it is modified, but that is correct. */
3999 REG_N_SETS (regno)++;
4000 REG_N_REFS (regno) += loop_depth + 1;
4002 /* The insns where a reg is live are normally counted
4003 elsewhere, but we want the count to include the insn
4004 where the reg is set, and the normal counting mechanism
4005 would not count it. */
4006 REG_LIVE_LENGTH (regno)++;
4010 if (! some_not_needed)
4012 if (flags & PROP_LOG_LINKS)
4014 /* Make a logical link from the next following insn
4015 that uses this register, back to this insn.
4016 The following insns have already been processed.
4018 We don't build a LOG_LINK for hard registers containing
4019 in ASM_OPERANDs. If these registers get replaced,
4020 we might wind up changing the semantics of the insn,
4021 even if reload can make what appear to be valid
4022 assignments later. */
4023 if (y && (BLOCK_NUM (y) == blocknum)
4024 && (regno >= FIRST_PSEUDO_REGISTER
4025 || asm_noperands (PATTERN (y)) < 0))
4026 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4029 else if (! some_needed)
4031 if (flags & PROP_REG_INFO)
4032 REG_N_DEATHS (REGNO (reg))++;
4034 if (flags & PROP_DEATH_NOTES)
4036 /* Note that dead stores have already been deleted
4037 when possible. If we get here, we have found a
4038 dead store that cannot be eliminated (because the
4039 same insn does something useful). Indicate this
4040 by marking the reg being set as dying here. */
4042 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4047 if (flags & PROP_DEATH_NOTES)
4049 /* This is a case where we have a multi-word hard register
4050 and some, but not all, of the words of the register are
4051 needed in subsequent insns. Write REG_UNUSED notes
4052 for those parts that were not needed. This case should
4057 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4059 if (!REGNO_REG_SET_P (needed, regno + i))
4063 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4069 else if (GET_CODE (reg) == REG)
4071 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4072 reg_next_use[regno] = 0;
4075 /* If this is the last pass and this is a SCRATCH, show it will be dying
4076 here and count it. */
4077 else if (GET_CODE (reg) == SCRATCH)
4079 if (flags & PROP_DEATH_NOTES)
4081 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4087 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4091 find_auto_inc (needed, x, insn)
4096 rtx addr = XEXP (x, 0);
4097 HOST_WIDE_INT offset = 0;
4100 /* Here we detect use of an index register which might be good for
4101 postincrement, postdecrement, preincrement, or predecrement. */
4103 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4104 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4106 if (GET_CODE (addr) == REG)
4109 register int size = GET_MODE_SIZE (GET_MODE (x));
4112 int regno = REGNO (addr);
4114 /* Is the next use an increment that might make auto-increment? */
4115 if ((incr = reg_next_use[regno]) != 0
4116 && (set = single_set (incr)) != 0
4117 && GET_CODE (set) == SET
4118 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4119 /* Can't add side effects to jumps; if reg is spilled and
4120 reloaded, there's no way to store back the altered value. */
4121 && GET_CODE (insn) != JUMP_INSN
4122 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4123 && XEXP (y, 0) == addr
4124 && GET_CODE (XEXP (y, 1)) == CONST_INT
4125 && ((HAVE_POST_INCREMENT
4126 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4127 || (HAVE_POST_DECREMENT
4128 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4129 || (HAVE_PRE_INCREMENT
4130 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4131 || (HAVE_PRE_DECREMENT
4132 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4133 /* Make sure this reg appears only once in this insn. */
4134 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4135 use != 0 && use != (rtx) 1))
4137 rtx q = SET_DEST (set);
4138 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4139 ? (offset ? PRE_INC : POST_INC)
4140 : (offset ? PRE_DEC : POST_DEC));
4142 if (dead_or_set_p (incr, addr))
4144 /* This is the simple case. Try to make the auto-inc. If
4145 we can't, we are done. Otherwise, we will do any
4146 needed updates below. */
4147 if (! validate_change (insn, &XEXP (x, 0),
4148 gen_rtx_fmt_e (inc_code, Pmode, addr),
4152 else if (GET_CODE (q) == REG
4153 /* PREV_INSN used here to check the semi-open interval
4155 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4156 /* We must also check for sets of q as q may be
4157 a call clobbered hard register and there may
4158 be a call between PREV_INSN (insn) and incr. */
4159 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4161 /* We have *p followed sometime later by q = p+size.
4162 Both p and q must be live afterward,
4163 and q is not used between INSN and its assignment.
4164 Change it to q = p, ...*q..., q = q+size.
4165 Then fall into the usual case. */
4170 emit_move_insn (q, addr);
4171 insns = get_insns ();
4174 bb = BLOCK_FOR_INSN (insn);
4175 for (temp = insns; temp; temp = NEXT_INSN (temp))
4176 set_block_for_insn (temp, bb);
4178 /* If we can't make the auto-inc, or can't make the
4179 replacement into Y, exit. There's no point in making
4180 the change below if we can't do the auto-inc and doing
4181 so is not correct in the pre-inc case. */
4183 validate_change (insn, &XEXP (x, 0),
4184 gen_rtx_fmt_e (inc_code, Pmode, q),
4186 validate_change (incr, &XEXP (y, 0), q, 1);
4187 if (! apply_change_group ())
4190 /* We now know we'll be doing this change, so emit the
4191 new insn(s) and do the updates. */
4192 emit_insns_before (insns, insn);
4194 if (BLOCK_FOR_INSN (insn)->head == insn)
4195 BLOCK_FOR_INSN (insn)->head = insns;
4197 /* INCR will become a NOTE and INSN won't contain a
4198 use of ADDR. If a use of ADDR was just placed in
4199 the insn before INSN, make that the next use.
4200 Otherwise, invalidate it. */
4201 if (GET_CODE (PREV_INSN (insn)) == INSN
4202 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4203 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4204 reg_next_use[regno] = PREV_INSN (insn);
4206 reg_next_use[regno] = 0;
4211 /* REGNO is now used in INCR which is below INSN, but
4212 it previously wasn't live here. If we don't mark
4213 it as needed, we'll put a REG_DEAD note for it
4214 on this insn, which is incorrect. */
4215 SET_REGNO_REG_SET (needed, regno);
4217 /* If there are any calls between INSN and INCR, show
4218 that REGNO now crosses them. */
4219 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4220 if (GET_CODE (temp) == CALL_INSN)
4221 REG_N_CALLS_CROSSED (regno)++;
4226 /* If we haven't returned, it means we were able to make the
4227 auto-inc, so update the status. First, record that this insn
4228 has an implicit side effect. */
4231 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4233 /* Modify the old increment-insn to simply copy
4234 the already-incremented value of our register. */
4235 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4238 /* If that makes it a no-op (copying the register into itself) delete
4239 it so it won't appear to be a "use" and a "set" of this
4241 if (SET_DEST (set) == addr)
4243 PUT_CODE (incr, NOTE);
4244 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4245 NOTE_SOURCE_FILE (incr) = 0;
4248 if (regno >= FIRST_PSEUDO_REGISTER)
4250 /* Count an extra reference to the reg. When a reg is
4251 incremented, spilling it is worse, so we want to make
4252 that less likely. */
4253 REG_N_REFS (regno) += loop_depth + 1;
4255 /* Count the increment as a setting of the register,
4256 even though it isn't a SET in rtl. */
4257 REG_N_SETS (regno)++;
4262 #endif /* AUTO_INC_DEC */
4264 /* Scan expression X and store a 1-bit in LIVE for each reg it uses.
4265 This is done assuming the registers needed from X
4266 are those that have 1-bits in NEEDED.
4268 FLAGS is the set of enabled operations.
4270 INSN is the containing instruction. If INSN is dead, this function is not
4274 mark_used_regs (needed, live, x, flags, insn)
4281 register RTX_CODE code;
4286 code = GET_CODE (x);
4306 /* If we are clobbering a MEM, mark any registers inside the address
4308 if (GET_CODE (XEXP (x, 0)) == MEM)
4309 mark_used_regs (needed, live, XEXP (XEXP (x, 0), 0), flags, insn);
4313 /* Don't bother watching stores to mems if this is not the
4314 final pass. We'll not be deleting dead stores this round. */
4315 if (flags & PROP_SCAN_DEAD_CODE)
4317 /* Invalidate the data for the last MEM stored, but only if MEM is
4318 something that can be stored into. */
4319 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4320 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4321 ; /* needn't clear the memory set list */
4324 rtx temp = mem_set_list;
4325 rtx prev = NULL_RTX;
4330 next = XEXP (temp, 1);
4331 if (anti_dependence (XEXP (temp, 0), x))
4333 /* Splice temp out of the list. */
4335 XEXP (prev, 1) = next;
4337 mem_set_list = next;
4338 free_EXPR_LIST_node (temp);
4346 /* If the memory reference had embedded side effects (autoincrement
4347 address modes. Then we may need to kill some entries on the
4350 invalidate_mems_from_autoinc (insn);
4354 if (flags & PROP_AUTOINC)
4355 find_auto_inc (needed, x, insn);
4360 if (GET_CODE (SUBREG_REG (x)) == REG
4361 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4362 && (GET_MODE_SIZE (GET_MODE (x))
4363 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4364 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4366 /* While we're here, optimize this case. */
4369 /* In case the SUBREG is not of a register, don't optimize */
4370 if (GET_CODE (x) != REG)
4372 mark_used_regs (needed, live, x, flags, insn);
4376 /* ... fall through ... */
4379 /* See a register other than being set
4380 => mark it as needed. */
4384 int some_needed = REGNO_REG_SET_P (needed, regno);
4385 int some_not_needed = ! some_needed;
4387 SET_REGNO_REG_SET (live, regno);
4389 /* A hard reg in a wide mode may really be multiple registers.
4390 If so, mark all of them just like the first. */
4391 if (regno < FIRST_PSEUDO_REGISTER)
4395 /* For stack ptr or fixed arg pointer,
4396 nothing below can be necessary, so waste no more time. */
4397 if (regno == STACK_POINTER_REGNUM
4398 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4399 || (regno == HARD_FRAME_POINTER_REGNUM
4400 && (! reload_completed || frame_pointer_needed))
4402 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4403 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4405 || (regno == FRAME_POINTER_REGNUM
4406 && (! reload_completed || frame_pointer_needed)))
4408 /* If this is a register we are going to try to eliminate,
4409 don't mark it live here. If we are successful in
4410 eliminating it, it need not be live unless it is used for
4411 pseudos, in which case it will have been set live when
4412 it was allocated to the pseudos. If the register will not
4413 be eliminated, reload will set it live at that point. */
4415 if ((flags & PROP_REG_INFO)
4416 && ! TEST_HARD_REG_BIT (elim_reg_set, regno))
4417 regs_ever_live[regno] = 1;
4420 /* No death notes for global register variables;
4421 their values are live after this function exits. */
4422 if (global_regs[regno])
4424 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4425 reg_next_use[regno] = insn;
4429 n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4432 int regno_n = regno + n;
4433 int needed_regno = REGNO_REG_SET_P (needed, regno_n);
4435 SET_REGNO_REG_SET (live, regno_n);
4436 some_needed |= needed_regno;
4437 some_not_needed |= ! needed_regno;
4441 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4443 /* Record where each reg is used, so when the reg
4444 is set we know the next insn that uses it. */
4446 reg_next_use[regno] = insn;
4448 if (flags & PROP_REG_INFO)
4450 if (regno < FIRST_PSEUDO_REGISTER)
4452 /* If a hard reg is being used,
4453 record that this function does use it. */
4455 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
4459 regs_ever_live[regno + --i] = 1;
4464 /* Keep track of which basic block each reg appears in. */
4466 register int blocknum = BLOCK_NUM (insn);
4468 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4469 REG_BASIC_BLOCK (regno) = blocknum;
4470 else if (REG_BASIC_BLOCK (regno) != blocknum)
4471 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4473 /* Count (weighted) number of uses of each reg. */
4475 REG_N_REFS (regno) += loop_depth + 1;
4479 /* Record and count the insns in which a reg dies.
4480 If it is used in this insn and was dead below the insn
4481 then it dies in this insn. If it was set in this insn,
4482 we do not make a REG_DEAD note; likewise if we already
4483 made such a note. */
4485 if (flags & PROP_DEATH_NOTES)
4488 && ! dead_or_set_p (insn, x)
4490 && (regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
4494 /* Check for the case where the register dying partially
4495 overlaps the register set by this insn. */
4496 if (regno < FIRST_PSEUDO_REGISTER
4497 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
4499 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
4501 some_needed |= dead_or_set_regno_p (insn, regno + n);
4504 /* If none of the words in X is needed, make a REG_DEAD
4505 note. Otherwise, we must make partial REG_DEAD notes. */
4509 = alloc_EXPR_LIST (REG_DEAD, x, REG_NOTES (insn));
4510 REG_N_DEATHS (regno)++;
4516 /* Don't make a REG_DEAD note for a part of a register
4517 that is set in the insn. */
4519 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1;
4521 if (!REGNO_REG_SET_P (needed, regno + i)
4522 && ! dead_or_set_regno_p (insn, regno + i))
4525 (REG_DEAD, gen_rtx_REG (reg_raw_mode[regno + i],
4536 register rtx testreg = SET_DEST (x);
4539 /* If storing into MEM, don't show it as being used. But do
4540 show the address as being used. */
4541 if (GET_CODE (testreg) == MEM)
4544 if (flags & PROP_AUTOINC)
4545 find_auto_inc (needed, testreg, insn);
4547 mark_used_regs (needed, live, XEXP (testreg, 0), flags, insn);
4548 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4552 /* Storing in STRICT_LOW_PART is like storing in a reg
4553 in that this SET might be dead, so ignore it in TESTREG.
4554 but in some other ways it is like using the reg.
4556 Storing in a SUBREG or a bit field is like storing the entire
4557 register in that if the register's value is not used
4558 then this SET is not needed. */
4559 while (GET_CODE (testreg) == STRICT_LOW_PART
4560 || GET_CODE (testreg) == ZERO_EXTRACT
4561 || GET_CODE (testreg) == SIGN_EXTRACT
4562 || GET_CODE (testreg) == SUBREG)
4564 if (GET_CODE (testreg) == SUBREG
4565 && GET_CODE (SUBREG_REG (testreg)) == REG
4566 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4567 && (GET_MODE_SIZE (GET_MODE (testreg))
4568 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4569 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4571 /* Modifying a single register in an alternate mode
4572 does not use any of the old value. But these other
4573 ways of storing in a register do use the old value. */
4574 if (GET_CODE (testreg) == SUBREG
4575 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4580 testreg = XEXP (testreg, 0);
4583 /* If this is a store into a register,
4584 recursively scan the value being stored. */
4586 if ((GET_CODE (testreg) == PARALLEL
4587 && GET_MODE (testreg) == BLKmode)
4588 || (GET_CODE (testreg) == REG
4589 && (regno = REGNO (testreg), ! (regno == FRAME_POINTER_REGNUM
4590 && (! reload_completed || frame_pointer_needed)))
4591 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4592 && ! (regno == HARD_FRAME_POINTER_REGNUM
4593 && (! reload_completed || frame_pointer_needed))
4595 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4596 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4599 /* We used to exclude global_regs here, but that seems wrong.
4600 Storing in them is like storing in mem. */
4602 mark_used_regs (needed, live, SET_SRC (x), flags, insn);
4604 mark_used_regs (needed, live, SET_DEST (x), flags, insn);
4611 case UNSPEC_VOLATILE:
4615 /* Traditional and volatile asm instructions must be considered to use
4616 and clobber all hard registers, all pseudo-registers and all of
4617 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4619 Consider for instance a volatile asm that changes the fpu rounding
4620 mode. An insn should not be moved across this even if it only uses
4621 pseudo-regs because it might give an incorrectly rounded result.
4623 ?!? Unfortunately, marking all hard registers as live causes massive
4624 problems for the register allocator and marking all pseudos as live
4625 creates mountains of uninitialized variable warnings.
4627 So for now, just clear the memory set list and mark any regs
4628 we can find in ASM_OPERANDS as used. */
4629 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4630 free_EXPR_LIST_list (&mem_set_list);
4632 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4633 We can not just fall through here since then we would be confused
4634 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4635 traditional asms unlike their normal usage. */
4636 if (code == ASM_OPERANDS)
4640 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4641 mark_used_regs (needed, live, ASM_OPERANDS_INPUT (x, j),
4652 /* Recursively scan the operands of this expression. */
4655 register const char *fmt = GET_RTX_FORMAT (code);
4658 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4662 /* Tail recursive case: save a function call level. */
4668 mark_used_regs (needed, live, XEXP (x, i), flags, insn);
4670 else if (fmt[i] == 'E')
4673 for (j = 0; j < XVECLEN (x, i); j++)
4674 mark_used_regs (needed, live, XVECEXP (x, i, j), flags, insn);
4683 try_pre_increment_1 (insn)
4686 /* Find the next use of this reg. If in same basic block,
4687 make it do pre-increment or pre-decrement if appropriate. */
4688 rtx x = single_set (insn);
4689 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4690 * INTVAL (XEXP (SET_SRC (x), 1)));
4691 int regno = REGNO (SET_DEST (x));
4692 rtx y = reg_next_use[regno];
4694 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4695 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4696 mode would be better. */
4697 && ! dead_or_set_p (y, SET_DEST (x))
4698 && try_pre_increment (y, SET_DEST (x), amount))
4700 /* We have found a suitable auto-increment
4701 and already changed insn Y to do it.
4702 So flush this increment-instruction. */
4703 PUT_CODE (insn, NOTE);
4704 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4705 NOTE_SOURCE_FILE (insn) = 0;
4706 /* Count a reference to this reg for the increment
4707 insn we are deleting. When a reg is incremented.
4708 spilling it is worse, so we want to make that
4710 if (regno >= FIRST_PSEUDO_REGISTER)
4712 REG_N_REFS (regno) += loop_depth + 1;
4713 REG_N_SETS (regno)++;
4720 /* Try to change INSN so that it does pre-increment or pre-decrement
4721 addressing on register REG in order to add AMOUNT to REG.
4722 AMOUNT is negative for pre-decrement.
4723 Returns 1 if the change could be made.
4724 This checks all about the validity of the result of modifying INSN. */
4727 try_pre_increment (insn, reg, amount)
4729 HOST_WIDE_INT amount;
4733 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4734 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4736 /* Nonzero if we can try to make a post-increment or post-decrement.
4737 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4738 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4739 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4742 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4745 /* From the sign of increment, see which possibilities are conceivable
4746 on this target machine. */
4747 if (HAVE_PRE_INCREMENT && amount > 0)
4749 if (HAVE_POST_INCREMENT && amount > 0)
4752 if (HAVE_PRE_DECREMENT && amount < 0)
4754 if (HAVE_POST_DECREMENT && amount < 0)
4757 if (! (pre_ok || post_ok))
4760 /* It is not safe to add a side effect to a jump insn
4761 because if the incremented register is spilled and must be reloaded
4762 there would be no way to store the incremented value back in memory. */
4764 if (GET_CODE (insn) == JUMP_INSN)
4769 use = find_use_as_address (PATTERN (insn), reg, 0);
4770 if (post_ok && (use == 0 || use == (rtx) 1))
4772 use = find_use_as_address (PATTERN (insn), reg, -amount);
4776 if (use == 0 || use == (rtx) 1)
4779 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4782 /* See if this combination of instruction and addressing mode exists. */
4783 if (! validate_change (insn, &XEXP (use, 0),
4784 gen_rtx_fmt_e (amount > 0
4785 ? (do_post ? POST_INC : PRE_INC)
4786 : (do_post ? POST_DEC : PRE_DEC),
4790 /* Record that this insn now has an implicit side effect on X. */
4791 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4795 #endif /* AUTO_INC_DEC */
4797 /* Find the place in the rtx X where REG is used as a memory address.
4798 Return the MEM rtx that so uses it.
4799 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4800 (plus REG (const_int PLUSCONST)).
4802 If such an address does not appear, return 0.
4803 If REG appears more than once, or is used other than in such an address,
4807 find_use_as_address (x, reg, plusconst)
4810 HOST_WIDE_INT plusconst;
4812 enum rtx_code code = GET_CODE (x);
4813 const char *fmt = GET_RTX_FORMAT (code);
4815 register rtx value = 0;
4818 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4821 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4822 && XEXP (XEXP (x, 0), 0) == reg
4823 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4824 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4827 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4829 /* If REG occurs inside a MEM used in a bit-field reference,
4830 that is unacceptable. */
4831 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4832 return (rtx) (HOST_WIDE_INT) 1;
4836 return (rtx) (HOST_WIDE_INT) 1;
4838 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4842 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
4846 return (rtx) (HOST_WIDE_INT) 1;
4848 else if (fmt[i] == 'E')
4851 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4853 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
4857 return (rtx) (HOST_WIDE_INT) 1;
4865 /* Write information about registers and basic blocks into FILE.
4866 This is part of making a debugging dump. */
4869 dump_regset (r, outf)
4876 fputs (" (nil)", outf);
4880 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
4882 fprintf (outf, " %d", i);
4883 if (i < FIRST_PSEUDO_REGISTER)
4884 fprintf (outf, " [%s]",
4893 dump_regset (r, stderr);
4894 putc ('\n', stderr);
4898 dump_flow_info (file)
4902 static const char * const reg_class_names[] = REG_CLASS_NAMES;
4904 fprintf (file, "%d registers.\n", max_regno);
4905 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
4908 enum reg_class class, altclass;
4909 fprintf (file, "\nRegister %d used %d times across %d insns",
4910 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
4911 if (REG_BASIC_BLOCK (i) >= 0)
4912 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
4914 fprintf (file, "; set %d time%s", REG_N_SETS (i),
4915 (REG_N_SETS (i) == 1) ? "" : "s");
4916 if (REG_USERVAR_P (regno_reg_rtx[i]))
4917 fprintf (file, "; user var");
4918 if (REG_N_DEATHS (i) != 1)
4919 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
4920 if (REG_N_CALLS_CROSSED (i) == 1)
4921 fprintf (file, "; crosses 1 call");
4922 else if (REG_N_CALLS_CROSSED (i))
4923 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
4924 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
4925 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
4926 class = reg_preferred_class (i);
4927 altclass = reg_alternate_class (i);
4928 if (class != GENERAL_REGS || altclass != ALL_REGS)
4930 if (altclass == ALL_REGS || class == ALL_REGS)
4931 fprintf (file, "; pref %s", reg_class_names[(int) class]);
4932 else if (altclass == NO_REGS)
4933 fprintf (file, "; %s or none", reg_class_names[(int) class]);
4935 fprintf (file, "; pref %s, else %s",
4936 reg_class_names[(int) class],
4937 reg_class_names[(int) altclass]);
4939 if (REGNO_POINTER_FLAG (i))
4940 fprintf (file, "; pointer");
4941 fprintf (file, ".\n");
4944 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
4945 for (i = 0; i < n_basic_blocks; i++)
4947 register basic_block bb = BASIC_BLOCK (i);
4950 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
4951 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
4953 fprintf (file, "Predecessors: ");
4954 for (e = bb->pred; e ; e = e->pred_next)
4955 dump_edge_info (file, e, 0);
4957 fprintf (file, "\nSuccessors: ");
4958 for (e = bb->succ; e ; e = e->succ_next)
4959 dump_edge_info (file, e, 1);
4961 fprintf (file, "\nRegisters live at start:");
4962 dump_regset (bb->global_live_at_start, file);
4964 fprintf (file, "\nRegisters live at end:");
4965 dump_regset (bb->global_live_at_end, file);
4976 dump_flow_info (stderr);
4980 dump_edge_info (file, e, do_succ)
4985 basic_block side = (do_succ ? e->dest : e->src);
4987 if (side == ENTRY_BLOCK_PTR)
4988 fputs (" ENTRY", file);
4989 else if (side == EXIT_BLOCK_PTR)
4990 fputs (" EXIT", file);
4992 fprintf (file, " %d", side->index);
4996 static const char * const bitnames[] = {
4997 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5000 int i, flags = e->flags;
5004 for (i = 0; flags; i++)
5005 if (flags & (1 << i))
5011 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5012 fputs (bitnames[i], file);
5014 fprintf (file, "%d", i);
5022 /* Print out one basic block with live information at start and end. */
5032 fprintf (outf, ";; Basic block %d, loop depth %d",
5033 bb->index, bb->loop_depth - 1);
5034 if (bb->eh_beg != -1 || bb->eh_end != -1)
5035 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5038 fputs (";; Predecessors: ", outf);
5039 for (e = bb->pred; e ; e = e->pred_next)
5040 dump_edge_info (outf, e, 0);
5043 fputs (";; Registers live at start:", outf);
5044 dump_regset (bb->global_live_at_start, outf);
5047 for (insn = bb->head, last = NEXT_INSN (bb->end);
5049 insn = NEXT_INSN (insn))
5050 print_rtl_single (outf, insn);
5052 fputs (";; Registers live at end:", outf);
5053 dump_regset (bb->global_live_at_end, outf);
5056 fputs (";; Successors: ", outf);
5057 for (e = bb->succ; e; e = e->succ_next)
5058 dump_edge_info (outf, e, 1);
5066 dump_bb (bb, stderr);
5073 dump_bb (BASIC_BLOCK(n), stderr);
5076 /* Like print_rtl, but also print out live information for the start of each
5080 print_rtl_with_bb (outf, rtx_first)
5084 register rtx tmp_rtx;
5087 fprintf (outf, "(nil)\n");
5091 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5092 int max_uid = get_max_uid ();
5093 basic_block *start = (basic_block *)
5094 xcalloc (max_uid, sizeof (basic_block));
5095 basic_block *end = (basic_block *)
5096 xcalloc (max_uid, sizeof (basic_block));
5097 enum bb_state *in_bb_p = (enum bb_state *)
5098 xcalloc (max_uid, sizeof (enum bb_state));
5100 for (i = n_basic_blocks - 1; i >= 0; i--)
5102 basic_block bb = BASIC_BLOCK (i);
5105 start[INSN_UID (bb->head)] = bb;
5106 end[INSN_UID (bb->end)] = bb;
5107 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5109 enum bb_state state = IN_MULTIPLE_BB;
5110 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5112 in_bb_p[INSN_UID(x)] = state;
5119 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5124 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5126 fprintf (outf, ";; Start of basic block %d, registers live:",
5128 dump_regset (bb->global_live_at_start, outf);
5132 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5133 && GET_CODE (tmp_rtx) != NOTE
5134 && GET_CODE (tmp_rtx) != BARRIER)
5135 fprintf (outf, ";; Insn is not within a basic block\n");
5136 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5137 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5139 did_output = print_rtl_single (outf, tmp_rtx);
5141 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5142 fprintf (outf, ";; End of basic block %d\n", bb->index);
5153 if (current_function_epilogue_delay_list != 0)
5155 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5156 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5157 tmp_rtx = XEXP (tmp_rtx, 1))
5158 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5162 /* Compute dominator relationships using new flow graph structures. */
5164 compute_flow_dominators (dominators, post_dominators)
5165 sbitmap *dominators;
5166 sbitmap *post_dominators;
5169 sbitmap *temp_bitmap;
5171 basic_block *worklist, *tos;
5173 /* Allocate a worklist array/queue. Entries are only added to the
5174 list if they were not already on the list. So the size is
5175 bounded by the number of basic blocks. */
5176 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block)
5179 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5180 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5184 /* The optimistic setting of dominators requires us to put every
5185 block on the work list initially. */
5186 for (bb = 0; bb < n_basic_blocks; bb++)
5188 *tos++ = BASIC_BLOCK (bb);
5189 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5192 /* We want a maximal solution, so initially assume everything dominates
5194 sbitmap_vector_ones (dominators, n_basic_blocks);
5196 /* Mark successors of the entry block so we can identify them below. */
5197 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5198 e->dest->aux = ENTRY_BLOCK_PTR;
5200 /* Iterate until the worklist is empty. */
5201 while (tos != worklist)
5203 /* Take the first entry off the worklist. */
5204 basic_block b = *--tos;
5207 /* Compute the intersection of the dominators of all the
5210 If one of the predecessor blocks is the ENTRY block, then the
5211 intersection of the dominators of the predecessor blocks is
5212 defined as the null set. We can identify such blocks by the
5213 special value in the AUX field in the block structure. */
5214 if (b->aux == ENTRY_BLOCK_PTR)
5216 /* Do not clear the aux field for blocks which are
5217 successors of the ENTRY block. That way we we never
5218 add them to the worklist again.
5220 The intersect of dominators of the preds of this block is
5221 defined as the null set. */
5222 sbitmap_zero (temp_bitmap[bb]);
5226 /* Clear the aux field of this block so it can be added to
5227 the worklist again if necessary. */
5229 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5232 /* Make sure each block always dominates itself. */
5233 SET_BIT (temp_bitmap[bb], bb);
5235 /* If the out state of this block changed, then we need to
5236 add the successors of this block to the worklist if they
5237 are not already on the worklist. */
5238 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5240 for (e = b->succ; e; e = e->succ_next)
5242 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5252 if (post_dominators)
5254 /* The optimistic setting of dominators requires us to put every
5255 block on the work list initially. */
5256 for (bb = 0; bb < n_basic_blocks; bb++)
5258 *tos++ = BASIC_BLOCK (bb);
5259 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5262 /* We want a maximal solution, so initially assume everything post
5263 dominates everything else. */
5264 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5266 /* Mark predecessors of the exit block so we can identify them below. */
5267 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5268 e->src->aux = EXIT_BLOCK_PTR;
5270 /* Iterate until the worklist is empty. */
5271 while (tos != worklist)
5273 /* Take the first entry off the worklist. */
5274 basic_block b = *--tos;
5277 /* Compute the intersection of the post dominators of all the
5280 If one of the successor blocks is the EXIT block, then the
5281 intersection of the dominators of the successor blocks is
5282 defined as the null set. We can identify such blocks by the
5283 special value in the AUX field in the block structure. */
5284 if (b->aux == EXIT_BLOCK_PTR)
5286 /* Do not clear the aux field for blocks which are
5287 predecessors of the EXIT block. That way we we never
5288 add them to the worklist again.
5290 The intersect of dominators of the succs of this block is
5291 defined as the null set. */
5292 sbitmap_zero (temp_bitmap[bb]);
5296 /* Clear the aux field of this block so it can be added to
5297 the worklist again if necessary. */
5299 sbitmap_intersection_of_succs (temp_bitmap[bb],
5300 post_dominators, bb);
5303 /* Make sure each block always post dominates itself. */
5304 SET_BIT (temp_bitmap[bb], bb);
5306 /* If the out state of this block changed, then we need to
5307 add the successors of this block to the worklist if they
5308 are not already on the worklist. */
5309 if (sbitmap_a_and_b (post_dominators[bb],
5310 post_dominators[bb],
5313 for (e = b->pred; e; e = e->pred_next)
5315 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5327 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5330 compute_immediate_dominators (idom, dominators)
5332 sbitmap *dominators;
5337 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5339 /* Begin with tmp(n) = dom(n) - { n }. */
5340 for (b = n_basic_blocks; --b >= 0; )
5342 sbitmap_copy (tmp[b], dominators[b]);
5343 RESET_BIT (tmp[b], b);
5346 /* Subtract out all of our dominator's dominators. */
5347 for (b = n_basic_blocks; --b >= 0; )
5349 sbitmap tmp_b = tmp[b];
5352 for (s = n_basic_blocks; --s >= 0; )
5353 if (TEST_BIT (tmp_b, s))
5354 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5357 /* Find the one bit set in the bitmap and put it in the output array. */
5358 for (b = n_basic_blocks; --b >= 0; )
5361 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5364 sbitmap_vector_free (tmp);
5367 /* Count for a single SET rtx, X. */
5370 count_reg_sets_1 (x)
5374 register rtx reg = SET_DEST (x);
5376 /* Find the register that's set/clobbered. */
5377 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5378 || GET_CODE (reg) == SIGN_EXTRACT
5379 || GET_CODE (reg) == STRICT_LOW_PART)
5380 reg = XEXP (reg, 0);
5382 if (GET_CODE (reg) == PARALLEL
5383 && GET_MODE (reg) == BLKmode)
5386 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5387 count_reg_sets_1 (XVECEXP (reg, 0, i));
5391 if (GET_CODE (reg) == REG)
5393 regno = REGNO (reg);
5394 if (regno >= FIRST_PSEUDO_REGISTER)
5396 /* Count (weighted) references, stores, etc. This counts a
5397 register twice if it is modified, but that is correct. */
5398 REG_N_SETS (regno)++;
5399 REG_N_REFS (regno) += loop_depth + 1;
5404 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5405 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5411 register RTX_CODE code = GET_CODE (x);
5413 if (code == SET || code == CLOBBER)
5414 count_reg_sets_1 (x);
5415 else if (code == PARALLEL)
5418 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5420 code = GET_CODE (XVECEXP (x, 0, i));
5421 if (code == SET || code == CLOBBER)
5422 count_reg_sets_1 (XVECEXP (x, 0, i));
5427 /* Increment REG_N_REFS by the current loop depth each register reference
5431 count_reg_references (x)
5434 register RTX_CODE code;
5437 code = GET_CODE (x);
5457 /* If we are clobbering a MEM, mark any registers inside the address
5459 if (GET_CODE (XEXP (x, 0)) == MEM)
5460 count_reg_references (XEXP (XEXP (x, 0), 0));
5464 /* While we're here, optimize this case. */
5467 /* In case the SUBREG is not of a register, don't optimize */
5468 if (GET_CODE (x) != REG)
5470 count_reg_references (x);
5474 /* ... fall through ... */
5477 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5478 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5483 register rtx testreg = SET_DEST (x);
5486 /* If storing into MEM, don't show it as being used. But do
5487 show the address as being used. */
5488 if (GET_CODE (testreg) == MEM)
5490 count_reg_references (XEXP (testreg, 0));
5491 count_reg_references (SET_SRC (x));
5495 /* Storing in STRICT_LOW_PART is like storing in a reg
5496 in that this SET might be dead, so ignore it in TESTREG.
5497 but in some other ways it is like using the reg.
5499 Storing in a SUBREG or a bit field is like storing the entire
5500 register in that if the register's value is not used
5501 then this SET is not needed. */
5502 while (GET_CODE (testreg) == STRICT_LOW_PART
5503 || GET_CODE (testreg) == ZERO_EXTRACT
5504 || GET_CODE (testreg) == SIGN_EXTRACT
5505 || GET_CODE (testreg) == SUBREG)
5507 /* Modifying a single register in an alternate mode
5508 does not use any of the old value. But these other
5509 ways of storing in a register do use the old value. */
5510 if (GET_CODE (testreg) == SUBREG
5511 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5516 testreg = XEXP (testreg, 0);
5519 /* If this is a store into a register,
5520 recursively scan the value being stored. */
5522 if ((GET_CODE (testreg) == PARALLEL
5523 && GET_MODE (testreg) == BLKmode)
5524 || GET_CODE (testreg) == REG)
5526 count_reg_references (SET_SRC (x));
5528 count_reg_references (SET_DEST (x));
5538 /* Recursively scan the operands of this expression. */
5541 register const char *fmt = GET_RTX_FORMAT (code);
5544 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5548 /* Tail recursive case: save a function call level. */
5554 count_reg_references (XEXP (x, i));
5556 else if (fmt[i] == 'E')
5559 for (j = 0; j < XVECLEN (x, i); j++)
5560 count_reg_references (XVECEXP (x, i, j));
5566 /* Recompute register set/reference counts immediately prior to register
5569 This avoids problems with set/reference counts changing to/from values
5570 which have special meanings to the register allocators.
5572 Additionally, the reference counts are the primary component used by the
5573 register allocators to prioritize pseudos for allocation to hard regs.
5574 More accurate reference counts generally lead to better register allocation.
5576 F is the first insn to be scanned.
5578 LOOP_STEP denotes how much loop_depth should be incremented per
5579 loop nesting level in order to increase the ref count more for
5580 references in a loop.
5582 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5583 possibly other information which is used by the register allocators. */
5586 recompute_reg_usage (f, loop_step)
5587 rtx f ATTRIBUTE_UNUSED;
5588 int loop_step ATTRIBUTE_UNUSED;
5594 /* Clear out the old data. */
5595 max_reg = max_reg_num ();
5596 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5602 /* Scan each insn in the chain and count how many times each register is
5604 for (index = 0; index < n_basic_blocks; index++)
5606 basic_block bb = BASIC_BLOCK (index);
5607 loop_depth = bb->loop_depth;
5608 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5610 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5614 /* This call will increment REG_N_SETS for each SET or CLOBBER
5615 of a register in INSN. It will also increment REG_N_REFS
5616 by the loop depth for each set of a register in INSN. */
5617 count_reg_sets (PATTERN (insn));
5619 /* count_reg_sets does not detect autoincrement address modes, so
5620 detect them here by looking at the notes attached to INSN. */
5621 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5623 if (REG_NOTE_KIND (links) == REG_INC)
5624 /* Count (weighted) references, stores, etc. This counts a
5625 register twice if it is modified, but that is correct. */
5626 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5629 /* This call will increment REG_N_REFS by the current loop depth for
5630 each reference to a register in INSN. */
5631 count_reg_references (PATTERN (insn));
5633 /* count_reg_references will not include counts for arguments to
5634 function calls, so detect them here by examining the
5635 CALL_INSN_FUNCTION_USAGE data. */
5636 if (GET_CODE (insn) == CALL_INSN)
5640 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5642 note = XEXP (note, 1))
5643 if (GET_CODE (XEXP (note, 0)) == USE)
5644 count_reg_references (XEXP (XEXP (note, 0), 0));
5647 if (insn == bb->end)
5653 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5654 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5655 of the number of registers that died. */
5658 count_or_remove_death_notes (blocks, kill)
5664 for (i = n_basic_blocks - 1; i >= 0; --i)
5669 if (blocks && ! TEST_BIT (blocks, i))
5672 bb = BASIC_BLOCK (i);
5674 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5676 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5678 rtx *pprev = ®_NOTES (insn);
5683 switch (REG_NOTE_KIND (link))
5686 if (GET_CODE (XEXP (link, 0)) == REG)
5688 rtx reg = XEXP (link, 0);
5691 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5694 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5702 rtx next = XEXP (link, 1);
5703 free_EXPR_LIST_node (link);
5704 *pprev = link = next;
5710 pprev = &XEXP (link, 1);
5717 if (insn == bb->end)
5725 /* Record INSN's block as BB. */
5728 set_block_for_insn (insn, bb)
5732 size_t uid = INSN_UID (insn);
5733 if (uid >= basic_block_for_insn->num_elements)
5737 /* Add one-eighth the size so we don't keep calling xrealloc. */
5738 new_size = uid + (uid + 7) / 8;
5740 VARRAY_GROW (basic_block_for_insn, new_size);
5742 VARRAY_BB (basic_block_for_insn, uid) = bb;
5745 /* Record INSN's block number as BB. */
5746 /* ??? This has got to go. */
5749 set_block_num (insn, bb)
5753 set_block_for_insn (insn, BASIC_BLOCK (bb));
5756 /* Verify the CFG consistency. This function check some CFG invariants and
5757 aborts when something is wrong. Hope that this function will help to
5758 convert many optimization passes to preserve CFG consistent.
5760 Currently it does following checks:
5762 - test head/end pointers
5763 - overlapping of basic blocks
5764 - edge list corectness
5765 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5766 - tails of basic blocks (ensure that boundary is necesary)
5767 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5768 and NOTE_INSN_BASIC_BLOCK
5769 - check that all insns are in the basic blocks
5770 (except the switch handling code, barriers and notes)
5772 In future it can be extended check a lot of other stuff as well
5773 (reachability of basic blocks, life information, etc. etc.). */
5778 const int max_uid = get_max_uid ();
5779 const rtx rtx_first = get_insns ();
5780 basic_block *bb_info;
5784 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5786 /* First pass check head/end pointers and set bb_info array used by
5788 for (i = n_basic_blocks - 1; i >= 0; i--)
5790 basic_block bb = BASIC_BLOCK (i);
5792 /* Check the head pointer and make sure that it is pointing into
5794 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
5799 error ("Head insn %d for block %d not found in the insn stream.",
5800 INSN_UID (bb->head), bb->index);
5804 /* Check the end pointer and make sure that it is pointing into
5806 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5808 if (bb_info[INSN_UID (x)] != NULL)
5810 error ("Insn %d is in multiple basic blocks (%d and %d)",
5811 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
5814 bb_info[INSN_UID (x)] = bb;
5821 error ("End insn %d for block %d not found in the insn stream.",
5822 INSN_UID (bb->end), bb->index);
5827 /* Now check the basic blocks (boundaries etc.) */
5828 for (i = n_basic_blocks - 1; i >= 0; i--)
5830 basic_block bb = BASIC_BLOCK (i);
5831 /* Check corectness of edge lists */
5839 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
5841 fprintf (stderr, "Predecessor: ");
5842 dump_edge_info (stderr, e, 0);
5843 fprintf (stderr, "\nSuccessor: ");
5844 dump_edge_info (stderr, e, 1);
5848 if (e->dest != EXIT_BLOCK_PTR)
5850 edge e2 = e->dest->pred;
5851 while (e2 && e2 != e)
5855 error ("Basic block %i edge lists are corrupted", bb->index);
5867 error ("Basic block %d pred edge is corrupted", bb->index);
5868 fputs ("Predecessor: ", stderr);
5869 dump_edge_info (stderr, e, 0);
5870 fputs ("\nSuccessor: ", stderr);
5871 dump_edge_info (stderr, e, 1);
5872 fputc ('\n', stderr);
5875 if (e->src != ENTRY_BLOCK_PTR)
5877 edge e2 = e->src->succ;
5878 while (e2 && e2 != e)
5882 error ("Basic block %i edge lists are corrupted", bb->index);
5889 /* OK pointers are correct. Now check the header of basic
5890 block. It ought to contain optional CODE_LABEL followed
5891 by NOTE_BASIC_BLOCK. */
5893 if (GET_CODE (x) == CODE_LABEL)
5897 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
5903 if (GET_CODE (x) != NOTE
5904 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
5905 || NOTE_BASIC_BLOCK (x) != bb)
5907 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
5914 /* Do checks for empty blocks here */
5921 if (GET_CODE (x) == NOTE
5922 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
5924 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
5925 INSN_UID (x), bb->index);
5932 if (GET_CODE (x) == JUMP_INSN
5933 || GET_CODE (x) == CODE_LABEL
5934 || GET_CODE (x) == BARRIER)
5936 error ("In basic block %d:", bb->index);
5937 fatal_insn ("Flow control insn inside a basic block", x);
5948 if (!bb_info[INSN_UID (x)])
5950 switch (GET_CODE (x))
5957 /* An addr_vec is placed outside any block block. */
5959 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
5960 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
5961 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
5966 /* But in any case, non-deletable labels can appear anywhere. */
5970 fatal_insn ("Insn outside basic block", x);
5984 /* Functions to access an edge list with a vector representation.
5985 Enough data is kept such that given an index number, the
5986 pred and succ that edge reprsents can be determined, or
5987 given a pred and a succ, it's index number can be returned.
5988 This allows algorithms which comsume a lot of memory to
5989 represent the normally full matrix of edge (pred,succ) with a
5990 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
5991 wasted space in the client code due to sparse flow graphs. */
5993 /* This functions initializes the edge list. Basically the entire
5994 flowgraph is processed, and all edges are assigned a number,
5995 and the data structure is filed in. */
5999 struct edge_list *elist;
6005 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6009 /* Determine the number of edges in the flow graph by counting successor
6010 edges on each basic block. */
6011 for (x = 0; x < n_basic_blocks; x++)
6013 basic_block bb = BASIC_BLOCK (x);
6015 for (e = bb->succ; e; e = e->succ_next)
6018 /* Don't forget successors of the entry block. */
6019 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6022 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6023 elist->num_blocks = block_count;
6024 elist->num_edges = num_edges;
6025 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6029 /* Follow successors of the entry block, and register these edges. */
6030 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6032 elist->index_to_edge[num_edges] = e;
6036 for (x = 0; x < n_basic_blocks; x++)
6038 basic_block bb = BASIC_BLOCK (x);
6040 /* Follow all successors of blocks, and register these edges. */
6041 for (e = bb->succ; e; e = e->succ_next)
6043 elist->index_to_edge[num_edges] = e;
6050 /* This function free's memory associated with an edge list. */
6052 free_edge_list (elist)
6053 struct edge_list *elist;
6057 free (elist->index_to_edge);
6062 /* This function provides debug output showing an edge list. */
6064 print_edge_list (f, elist)
6066 struct edge_list *elist;
6069 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6070 elist->num_blocks - 2, elist->num_edges);
6072 for (x = 0; x < elist->num_edges; x++)
6074 fprintf (f, " %-4d - edge(", x);
6075 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6076 fprintf (f,"entry,");
6078 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6080 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6081 fprintf (f,"exit)\n");
6083 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6087 /* This function provides an internal consistancy check of an edge list,
6088 verifying that all edges are present, and that there are no
6091 verify_edge_list (f, elist)
6093 struct edge_list *elist;
6095 int x, pred, succ, index;
6098 for (x = 0; x < n_basic_blocks; x++)
6100 basic_block bb = BASIC_BLOCK (x);
6102 for (e = bb->succ; e; e = e->succ_next)
6104 pred = e->src->index;
6105 succ = e->dest->index;
6106 index = EDGE_INDEX (elist, e->src, e->dest);
6107 if (index == EDGE_INDEX_NO_EDGE)
6109 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6112 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6113 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6114 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6115 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6116 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6117 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6120 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6122 pred = e->src->index;
6123 succ = e->dest->index;
6124 index = EDGE_INDEX (elist, e->src, e->dest);
6125 if (index == EDGE_INDEX_NO_EDGE)
6127 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6130 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6131 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6132 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6133 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6134 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6135 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6137 /* We've verified that all the edges are in the list, no lets make sure
6138 there are no spurious edges in the list. */
6140 for (pred = 0 ; pred < n_basic_blocks; pred++)
6141 for (succ = 0 ; succ < n_basic_blocks; succ++)
6143 basic_block p = BASIC_BLOCK (pred);
6144 basic_block s = BASIC_BLOCK (succ);
6148 for (e = p->succ; e; e = e->succ_next)
6154 for (e = s->pred; e; e = e->pred_next)
6160 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6161 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6162 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6164 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6165 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6166 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6167 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6168 BASIC_BLOCK (succ)));
6170 for (succ = 0 ; succ < n_basic_blocks; succ++)
6172 basic_block p = ENTRY_BLOCK_PTR;
6173 basic_block s = BASIC_BLOCK (succ);
6177 for (e = p->succ; e; e = e->succ_next)
6183 for (e = s->pred; e; e = e->pred_next)
6189 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6190 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6191 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6193 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6194 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6195 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6196 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6197 BASIC_BLOCK (succ)));
6199 for (pred = 0 ; pred < n_basic_blocks; pred++)
6201 basic_block p = BASIC_BLOCK (pred);
6202 basic_block s = EXIT_BLOCK_PTR;
6206 for (e = p->succ; e; e = e->succ_next)
6212 for (e = s->pred; e; e = e->pred_next)
6218 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6219 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6220 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6222 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6223 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6224 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6225 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6230 /* This routine will determine what, if any, edge there is between
6231 a specified predecessor and successor. */
6234 find_edge_index (edge_list, pred, succ)
6235 struct edge_list *edge_list;
6236 basic_block pred, succ;
6239 for (x = 0; x < NUM_EDGES (edge_list); x++)
6241 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6242 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6245 return (EDGE_INDEX_NO_EDGE);
6248 /* This function will remove an edge from the flow graph. */
6253 edge last_pred = NULL;
6254 edge last_succ = NULL;
6256 basic_block src, dest;
6259 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6265 last_succ->succ_next = e->succ_next;
6267 src->succ = e->succ_next;
6269 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6275 last_pred->pred_next = e->pred_next;
6277 dest->pred = e->pred_next;
6283 /* This routine will remove any fake successor edges for a basic block.
6284 When the edge is removed, it is also removed from whatever predecessor
6287 remove_fake_successors (bb)
6291 for (e = bb->succ; e ; )
6295 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6300 /* This routine will remove all fake edges from the flow graph. If
6301 we remove all fake successors, it will automatically remove all
6302 fake predecessors. */
6304 remove_fake_edges ()
6308 for (x = 0; x < n_basic_blocks; x++)
6309 remove_fake_successors (BASIC_BLOCK (x));
6311 /* We've handled all successors except the entry block's. */
6312 remove_fake_successors (ENTRY_BLOCK_PTR);
6315 /* This functions will add a fake edge between any block which has no
6316 successors, and the exit block. Some data flow equations require these
6319 add_noreturn_fake_exit_edges ()
6323 for (x = 0; x < n_basic_blocks; x++)
6324 if (BASIC_BLOCK (x)->succ == NULL)
6325 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6328 /* Dump the list of basic blocks in the bitmap NODES. */
6330 flow_nodes_print (str, nodes, file)
6332 const sbitmap nodes;
6337 fprintf (file, "%s { ", str);
6338 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6339 fputs ("}\n", file);
6343 /* Dump the list of exiting edges in the array EDGES. */
6345 flow_exits_print (str, edges, num_edges, file)
6353 fprintf (file, "%s { ", str);
6354 for (i = 0; i < num_edges; i++)
6355 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6356 fputs ("}\n", file);
6360 /* Dump loop related CFG information. */
6362 flow_loops_cfg_dump (loops, file)
6363 const struct loops *loops;
6368 if (! loops->num || ! file || ! loops->cfg.dom)
6371 for (i = 0; i < n_basic_blocks; i++)
6375 fprintf (file, ";; %d succs { ", i);
6376 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6377 fprintf (file, "%d ", succ->dest->index);
6378 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6382 /* Dump the DFS node order. */
6383 if (loops->cfg.dfs_order)
6385 fputs (";; DFS order: ", file);
6386 for (i = 0; i < n_basic_blocks; i++)
6387 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6393 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6395 flow_loop_nested_p (outer, loop)
6399 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6403 /* Dump the loop information specified by LOOPS to the stream FILE. */
6405 flow_loops_dump (loops, file, verbose)
6406 const struct loops *loops;
6413 num_loops = loops->num;
6414 if (! num_loops || ! file)
6417 fprintf (file, ";; %d loops found, %d levels\n",
6418 num_loops, loops->levels);
6420 for (i = 0; i < num_loops; i++)
6422 struct loop *loop = &loops->array[i];
6424 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6425 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6426 loop->header->index, loop->latch->index,
6427 loop->pre_header ? loop->pre_header->index : -1,
6428 loop->depth, loop->level,
6429 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6430 fprintf (file, ";; %d", loop->num_nodes);
6431 flow_nodes_print (" nodes", loop->nodes, file);
6432 fprintf (file, ";; %d", loop->num_exits);
6433 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6439 for (j = 0; j < i; j++)
6441 struct loop *oloop = &loops->array[j];
6443 if (loop->header == oloop->header)
6448 smaller = loop->num_nodes < oloop->num_nodes;
6450 /* If the union of LOOP and OLOOP is different than
6451 the larger of LOOP and OLOOP then LOOP and OLOOP
6452 must be disjoint. */
6453 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6454 smaller ? oloop : loop);
6455 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6456 loop->header->index, i, j,
6457 disjoint ? "disjoint" : "nested");
6464 /* Print diagnostics to compare our concept of a loop with
6465 what the loop notes say. */
6466 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6467 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6468 != NOTE_INSN_LOOP_BEG)
6469 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6470 INSN_UID (PREV_INSN (loop->first->head)));
6471 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6472 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6473 != NOTE_INSN_LOOP_END)
6474 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6475 INSN_UID (NEXT_INSN (loop->last->end)));
6480 flow_loops_cfg_dump (loops, file);
6484 /* Free all the memory allocated for LOOPS. */
6486 flow_loops_free (loops)
6487 struct loops *loops;
6496 /* Free the loop descriptors. */
6497 for (i = 0; i < loops->num; i++)
6499 struct loop *loop = &loops->array[i];
6502 sbitmap_free (loop->nodes);
6506 free (loops->array);
6507 loops->array = NULL;
6510 sbitmap_vector_free (loops->cfg.dom);
6511 if (loops->cfg.dfs_order)
6512 free (loops->cfg.dfs_order);
6514 sbitmap_free (loops->shared_headers);
6519 /* Find the exits from the loop using the bitmap of loop nodes NODES
6520 and store in EXITS array. Return the number of exits from the
6523 flow_loop_exits_find (nodes, exits)
6524 const sbitmap nodes;
6533 /* Check all nodes within the loop to see if there are any
6534 successors not in the loop. Note that a node may have multiple
6537 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6538 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6540 basic_block dest = e->dest;
6542 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6550 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6552 /* Store all exiting edges into an array. */
6554 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6555 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6557 basic_block dest = e->dest;
6559 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6560 (*exits)[num_exits++] = e;
6568 /* Find the nodes contained within the loop with header HEADER and
6569 latch LATCH and store in NODES. Return the number of nodes within
6572 flow_loop_nodes_find (header, latch, nodes)
6581 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6584 /* Start with only the loop header in the set of loop nodes. */
6585 sbitmap_zero (nodes);
6586 SET_BIT (nodes, header->index);
6588 header->loop_depth++;
6590 /* Push the loop latch on to the stack. */
6591 if (! TEST_BIT (nodes, latch->index))
6593 SET_BIT (nodes, latch->index);
6594 latch->loop_depth++;
6596 stack[sp++] = latch;
6605 for (e = node->pred; e; e = e->pred_next)
6607 basic_block ancestor = e->src;
6609 /* If each ancestor not marked as part of loop, add to set of
6610 loop nodes and push on to stack. */
6611 if (ancestor != ENTRY_BLOCK_PTR
6612 && ! TEST_BIT (nodes, ancestor->index))
6614 SET_BIT (nodes, ancestor->index);
6615 ancestor->loop_depth++;
6617 stack[sp++] = ancestor;
6626 /* Compute the depth first search order and store in the array
6627 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6628 number of nodes visited. */
6630 flow_depth_first_order_compute (dfs_order)
6639 /* Allocate stack for back-tracking up CFG. */
6640 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6643 /* Allocate bitmap to track nodes that have been visited. */
6644 visited = sbitmap_alloc (n_basic_blocks);
6646 /* None of the nodes in the CFG have been visited yet. */
6647 sbitmap_zero (visited);
6649 /* Start with the first successor edge from the entry block. */
6650 e = ENTRY_BLOCK_PTR->succ;
6653 basic_block src = e->src;
6654 basic_block dest = e->dest;
6656 /* Mark that we have visited this node. */
6657 if (src != ENTRY_BLOCK_PTR)
6658 SET_BIT (visited, src->index);
6660 /* If this node has not been visited before, push the current
6661 edge on to the stack and proceed with the first successor
6662 edge of this node. */
6663 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6671 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6674 /* DEST has no successors (for example, a non-returning
6675 function is called) so do not push the current edge
6676 but carry on with its next successor. */
6677 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6678 SET_BIT (visited, dest->index);
6681 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6683 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6685 /* Pop edge off stack. */
6693 sbitmap_free (visited);
6695 /* The number of nodes visited should not be greater than
6697 if (dfsnum > n_basic_blocks)
6700 /* There are some nodes left in the CFG that are unreachable. */
6701 if (dfsnum < n_basic_blocks)
6707 /* Return the block for the pre-header of the loop with header
6708 HEADER where DOM specifies the dominator information. Return NULL if
6709 there is no pre-header. */
6711 flow_loop_pre_header_find (header, dom)
6715 basic_block pre_header;
6718 /* If block p is a predecessor of the header and is the only block
6719 that the header does not dominate, then it is the pre-header. */
6721 for (e = header->pred; e; e = e->pred_next)
6723 basic_block node = e->src;
6725 if (node != ENTRY_BLOCK_PTR
6726 && ! TEST_BIT (dom[node->index], header->index))
6728 if (pre_header == NULL)
6732 /* There are multiple edges into the header from outside
6733 the loop so there is no pre-header block. */
6743 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6744 previously added. The insertion algorithm assumes that the loops
6745 are added in the order found by a depth first search of the CFG. */
6747 flow_loop_tree_node_add (prevloop, loop)
6748 struct loop *prevloop;
6752 if (flow_loop_nested_p (prevloop, loop))
6754 prevloop->inner = loop;
6755 loop->outer = prevloop;
6759 while (prevloop->outer)
6761 if (flow_loop_nested_p (prevloop->outer, loop))
6763 prevloop->next = loop;
6764 loop->outer = prevloop->outer;
6767 prevloop = prevloop->outer;
6770 prevloop->next = loop;
6775 /* Build the loop hierarchy tree for LOOPS. */
6777 flow_loops_tree_build (loops)
6778 struct loops *loops;
6783 num_loops = loops->num;
6787 /* Root the loop hierarchy tree with the first loop found.
6788 Since we used a depth first search this should be the
6790 loops->tree = &loops->array[0];
6791 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
6793 /* Add the remaining loops to the tree. */
6794 for (i = 1; i < num_loops; i++)
6795 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
6799 /* Helper function to compute loop nesting depth and enclosed loop level
6800 for the natural loop specified by LOOP at the loop depth DEPTH.
6801 Returns the loop level. */
6803 flow_loop_level_compute (loop, depth)
6813 /* Traverse loop tree assigning depth and computing level as the
6814 maximum level of all the inner loops of this loop. The loop
6815 level is equivalent to the height of the loop in the loop tree
6816 and corresponds to the number of enclosed loop levels (including
6818 for (inner = loop->inner; inner; inner = inner->next)
6822 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
6827 loop->level = level;
6828 loop->depth = depth;
6833 /* Compute the loop nesting depth and enclosed loop level for the loop
6834 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
6838 flow_loops_level_compute (loops)
6839 struct loops *loops;
6845 /* Traverse all the outer level loops. */
6846 for (loop = loops->tree; loop; loop = loop->next)
6848 level = flow_loop_level_compute (loop, 1);
6856 /* Find all the natural loops in the function and save in LOOPS structure
6857 and recalculate loop_depth information in basic block structures.
6858 Return the number of natural loops found. */
6861 flow_loops_find (loops)
6862 struct loops *loops;
6873 loops->array = NULL;
6877 /* Taking care of this degenerate case makes the rest of
6878 this code simpler. */
6879 if (n_basic_blocks == 0)
6882 /* Compute the dominators. */
6883 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
6884 compute_flow_dominators (dom, NULL);
6886 /* Count the number of loop edges (back edges). This should be the
6887 same as the number of natural loops. Also clear the loop_depth
6888 and as we work from inner->outer in a loop nest we call
6889 find_loop_nodes_find which will increment loop_depth for nodes
6890 within the current loop, which happens to enclose inner loops. */
6893 for (b = 0; b < n_basic_blocks; b++)
6895 BASIC_BLOCK (b)->loop_depth = 0;
6896 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
6898 basic_block latch = e->src;
6900 /* Look for back edges where a predecessor is dominated
6901 by this block. A natural loop has a single entry
6902 node (header) that dominates all the nodes in the
6903 loop. It also has single back edge to the header
6904 from a latch node. Note that multiple natural loops
6905 may share the same header. */
6906 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
6913 /* Compute depth first search order of the CFG so that outer
6914 natural loops will be found before inner natural loops. */
6915 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
6916 flow_depth_first_order_compute (dfs_order);
6918 /* Allocate loop structures. */
6920 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
6922 headers = sbitmap_alloc (n_basic_blocks);
6923 sbitmap_zero (headers);
6925 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
6926 sbitmap_zero (loops->shared_headers);
6928 /* Find and record information about all the natural loops
6931 for (b = 0; b < n_basic_blocks; b++)
6935 /* Search the nodes of the CFG in DFS order that we can find
6936 outer loops first. */
6937 header = BASIC_BLOCK (dfs_order[b]);
6939 /* Look for all the possible latch blocks for this header. */
6940 for (e = header->pred; e; e = e->pred_next)
6942 basic_block latch = e->src;
6944 /* Look for back edges where a predecessor is dominated
6945 by this block. A natural loop has a single entry
6946 node (header) that dominates all the nodes in the
6947 loop. It also has single back edge to the header
6948 from a latch node. Note that multiple natural loops
6949 may share the same header. */
6950 if (latch != ENTRY_BLOCK_PTR
6951 && TEST_BIT (dom[latch->index], header->index))
6955 loop = loops->array + num_loops;
6957 loop->header = header;
6958 loop->latch = latch;
6960 /* Keep track of blocks that are loop headers so
6961 that we can tell which loops should be merged. */
6962 if (TEST_BIT (headers, header->index))
6963 SET_BIT (loops->shared_headers, header->index);
6964 SET_BIT (headers, header->index);
6966 /* Find nodes contained within the loop. */
6967 loop->nodes = sbitmap_alloc (n_basic_blocks);
6969 = flow_loop_nodes_find (header, latch, loop->nodes);
6971 /* Compute first and last blocks within the loop.
6972 These are often the same as the loop header and
6973 loop latch respectively, but this is not always
6976 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
6978 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
6980 /* Find edges which exit the loop. Note that a node
6981 may have several exit edges. */
6983 = flow_loop_exits_find (loop->nodes, &loop->exits);
6985 /* Look to see if the loop has a pre-header node. */
6987 = flow_loop_pre_header_find (header, dom);
6994 /* Natural loops with shared headers may either be disjoint or
6995 nested. Disjoint loops with shared headers cannot be inner
6996 loops and should be merged. For now just mark loops that share
6998 for (i = 0; i < num_loops; i++)
6999 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7000 loops->array[i].shared = 1;
7002 sbitmap_free (headers);
7005 loops->num = num_loops;
7007 /* Save CFG derived information to avoid recomputing it. */
7008 loops->cfg.dom = dom;
7009 loops->cfg.dfs_order = dfs_order;
7011 /* Build the loop hierarchy tree. */
7012 flow_loops_tree_build (loops);
7014 /* Assign the loop nesting depth and enclosed loop level for each
7016 loops->levels = flow_loops_level_compute (loops);
7022 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7024 flow_loop_outside_edge_p (loop, e)
7025 const struct loop *loop;
7028 if (e->dest != loop->header)
7030 return (e->src == ENTRY_BLOCK_PTR)
7031 || ! TEST_BIT (loop->nodes, e->src->index);
7036 typedef struct reorder_block_def {
7039 basic_block add_jump;
7044 } *reorder_block_def;
7046 #define REORDER_BLOCK_HEAD 0x1
7047 #define REORDER_BLOCK_VISITED 0x2
7048 #define REORDER_MOVED_BLOCK_END 0x3
7050 #define REORDER_BLOCK_FLAGS(bb) \
7051 ((reorder_block_def) (bb)->aux)->flags
7053 #define REORDER_BLOCK_INDEX(bb) \
7054 ((reorder_block_def) (bb)->aux)->index
7056 #define REORDER_BLOCK_ADD_JUMP(bb) \
7057 ((reorder_block_def) (bb)->aux)->add_jump
7059 #define REORDER_BLOCK_SUCC(bb) \
7060 ((reorder_block_def) (bb)->aux)->succ
7062 #define REORDER_BLOCK_OLD_END(bb) \
7063 ((reorder_block_def) (bb)->aux)->end
7065 #define REORDER_BLOCK_BEGIN(bb) \
7066 ((reorder_block_def) (bb)->aux)->block_begin
7068 #define REORDER_BLOCK_END(bb) \
7069 ((reorder_block_def) (bb)->aux)->block_end
7072 static int reorder_index;
7073 static basic_block reorder_last_visited;
7075 #define REORDER_SKIP_BEFORE 0x1
7076 #define REORDER_SKIP_AFTER 0x2
7077 #define REORDER_SKIP_BLOCK_END 0x3
7079 /* Skip over insns BEFORE or AFTER BB which are typically associated with
7083 skip_insns_between_block (bb, skip_type)
7087 rtx insn, last_insn;
7089 if (skip_type == REORDER_SKIP_BEFORE)
7091 if (bb == ENTRY_BLOCK_PTR)
7093 last_insn = bb->head;
7094 for (insn = PREV_INSN (bb->head);
7095 insn && insn != BASIC_BLOCK (bb->index - 1)->end;
7096 last_insn = insn, insn = PREV_INSN (insn))
7098 if (NEXT_INSN (insn) != last_insn)
7100 if (GET_CODE (insn) == NOTE
7101 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_LOOP_END
7102 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK
7103 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
7109 else if (skip_type == REORDER_SKIP_AFTER
7110 || skip_type == REORDER_SKIP_BLOCK_END)
7112 last_insn = bb->end;
7114 if (bb == EXIT_BLOCK_PTR)
7117 for (insn = NEXT_INSN (bb->end);
7119 last_insn = insn, insn = NEXT_INSN (insn))
7121 if (bb->index + 1 != n_basic_blocks
7122 && insn == BASIC_BLOCK (bb->index + 1)->head)
7125 if (GET_CODE (insn) == BARRIER
7126 || GET_CODE (insn) == JUMP_INSN
7127 || (GET_CODE (insn) == NOTE
7128 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
7129 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)))
7131 if (GET_CODE (insn) == CODE_LABEL
7132 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
7133 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
7134 || GET_CODE (PATTERN
7135 (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
7137 insn = NEXT_INSN (insn);
7143 if (skip_type == REORDER_SKIP_BLOCK_END)
7145 int found_block_end = 0;
7147 for (; insn; last_insn = insn, insn = NEXT_INSN (insn))
7149 if (bb->index + 1 != n_basic_blocks
7150 && insn == BASIC_BLOCK (bb->index + 1)->head)
7153 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
7155 found_block_end = 1;
7158 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
7160 if (GET_CODE (insn) == NOTE
7161 && NOTE_LINE_NUMBER (insn) >= 0
7163 && NOTE_LINE_NUMBER (NEXT_INSN (insn)) == NOTE_INSN_BLOCK_END)
7167 if (! found_block_end)
7174 /* Return common destination for blocks BB0 and BB1. */
7177 get_common_dest (bb0, bb1)
7178 basic_block bb0, bb1;
7182 for (e0 = bb0->succ; e0; e0 = e0->succ_next)
7184 for (e1 = bb1->succ; e1; e1 = e1->succ_next)
7186 if (e0->dest == e1->dest)
7196 /* Move the destination block for edge E after chain end block CEB
7197 Adding jumps and labels is deferred until fixup_reorder_chain. */
7200 chain_reorder_blocks (e, ceb)
7204 basic_block sb = e->src;
7205 basic_block db = e->dest;
7206 rtx cebe_insn, cebbe_insn, dbh_insn, dbe_insn;
7209 enum cond_types {NO_COND, PREDICT_THEN_WITH_ELSE, PREDICT_ELSE,
7210 PREDICT_THEN_NO_ELSE, PREDICT_NOT_THEN_NO_ELSE};
7211 enum cond_types cond_type;
7212 enum cond_block_types {NO_COND_BLOCK, THEN_BLOCK, ELSE_BLOCK,
7214 enum cond_block_types cond_block_type;
7217 fprintf (rtl_dump_file,
7218 "Edge from basic block %d to basic block %d last visited %d\n",
7219 sb->index, db->index, ceb->index);
7221 dbh_insn = skip_insns_between_block (db, REORDER_SKIP_BEFORE);
7222 cebe_insn = skip_insns_between_block (ceb, REORDER_SKIP_AFTER);
7223 cebbe_insn = skip_insns_between_block (ceb, REORDER_SKIP_BLOCK_END);
7226 int block_begins = 0;
7229 for (insn = dbh_insn; insn && insn != db->end; insn = NEXT_INSN (insn))
7231 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
7237 REORDER_BLOCK_BEGIN (sb) = block_begins;
7245 for (insn = cebe_insn; insn; insn = NEXT_INSN (insn))
7247 if (PREV_INSN (insn) == cebbe_insn)
7249 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
7255 REORDER_BLOCK_END (ceb) = block_ends;
7258 /* Blocks are in original order. */
7259 if (sb->index == ceb->index
7260 && ceb->index + 1 == db->index && NEXT_INSN (cebe_insn))
7263 /* Get the type of block and type of condition. */
7264 cond_type = NO_COND;
7265 cond_block_type = NO_COND_BLOCK;
7266 if (GET_CODE (sb->end) == JUMP_INSN && ! simplejump_p (sb->end)
7267 && condjump_p (sb->end))
7269 if (e->flags & EDGE_FALLTHRU)
7270 cond_block_type = THEN_BLOCK;
7271 else if (get_common_dest (sb->succ->dest, sb))
7272 cond_block_type = NO_ELSE_BLOCK;
7274 cond_block_type = ELSE_BLOCK;
7276 if (sb->succ->succ_next
7277 && get_common_dest (sb->succ->dest, sb))
7279 if (cond_block_type == THEN_BLOCK)
7281 if (! (REORDER_BLOCK_FLAGS (sb->succ->succ_next->dest)
7282 & REORDER_BLOCK_VISITED))
7283 cond_type = PREDICT_THEN_NO_ELSE;
7285 cond_type = PREDICT_NOT_THEN_NO_ELSE;
7287 else if (cond_block_type == NO_ELSE_BLOCK)
7289 if (! (REORDER_BLOCK_FLAGS (sb->succ->dest)
7290 & REORDER_BLOCK_VISITED))
7291 cond_type = PREDICT_NOT_THEN_NO_ELSE;
7293 cond_type = PREDICT_THEN_NO_ELSE;
7298 if (cond_block_type == THEN_BLOCK)
7300 if (! (REORDER_BLOCK_FLAGS (sb->succ->succ_next->dest)
7301 & REORDER_BLOCK_VISITED))
7302 cond_type = PREDICT_THEN_WITH_ELSE;
7304 cond_type = PREDICT_ELSE;
7306 else if (cond_block_type == ELSE_BLOCK
7307 && sb->succ->dest != EXIT_BLOCK_PTR)
7309 if (! (REORDER_BLOCK_FLAGS (sb->succ->dest)
7310 & REORDER_BLOCK_VISITED))
7311 cond_type = PREDICT_ELSE;
7313 cond_type = PREDICT_THEN_WITH_ELSE;
7320 static const char * cond_type_str [] = {"not cond jump", "predict then",
7322 "predict then w/o else",
7323 "predict not then w/o else"};
7324 static const char * cond_block_type_str [] = {"not then or else block",
7327 "then w/o else block"};
7329 fprintf (rtl_dump_file, " %s (looking at %s)\n",
7330 cond_type_str[(int)cond_type],
7331 cond_block_type_str[(int)cond_block_type]);
7334 /* Reflect that then block will move and we'll jump to it. */
7335 if (cond_block_type != THEN_BLOCK
7336 && (cond_type == PREDICT_ELSE
7337 || cond_type == PREDICT_NOT_THEN_NO_ELSE))
7340 fprintf (rtl_dump_file,
7341 " then jump from block %d to block %d\n",
7342 sb->index, sb->succ->dest->index);
7344 /* Jump to reordered then block. */
7345 REORDER_BLOCK_ADD_JUMP (sb) = sb->succ->dest;
7348 /* Reflect that then block will jump back when we have no else. */
7349 if (cond_block_type != THEN_BLOCK
7350 && cond_type == PREDICT_NOT_THEN_NO_ELSE)
7352 for (ee = sb->succ->dest->succ;
7353 ee && ! (ee->flags & EDGE_FALLTHRU);
7357 if (ee && ! (GET_CODE (sb->succ->dest->end) == JUMP_INSN
7358 && ! simplejump_p (sb->succ->dest->end)))
7360 REORDER_BLOCK_ADD_JUMP (sb->succ->dest) = ee->dest;
7364 /* Reflect that else block will jump back. */
7365 if (cond_block_type == ELSE_BLOCK
7366 && (cond_type == PREDICT_THEN_WITH_ELSE || cond_type == PREDICT_ELSE))
7371 && last_edge->dest != EXIT_BLOCK_PTR
7372 && GET_CODE (last_edge->dest->head) == CODE_LABEL
7373 && ! (GET_CODE (db->end) == JUMP_INSN))
7376 fprintf (rtl_dump_file,
7377 " else jump from block %d to block %d\n",
7378 db->index, last_edge->dest->index);
7380 REORDER_BLOCK_ADD_JUMP (db) = last_edge->dest;
7384 /* This block's successor has already been reordered. This can happen
7385 when we reorder a chain starting at a then or else. */
7386 for (last_edge = db->succ;
7387 last_edge && ! (last_edge->flags & EDGE_FALLTHRU);
7388 last_edge = last_edge->succ_next)
7392 && last_edge->dest != EXIT_BLOCK_PTR
7393 && (REORDER_BLOCK_FLAGS (last_edge->dest)
7394 & REORDER_BLOCK_VISITED))
7397 fprintf (rtl_dump_file,
7398 " end of chain jump from block %d to block %d\n",
7399 db->index, last_edge->dest->index);
7401 REORDER_BLOCK_ADD_JUMP (db) = last_edge->dest;
7404 dbh_insn = skip_insns_between_block (db, REORDER_SKIP_BEFORE);
7405 cebe_insn = skip_insns_between_block (ceb, REORDER_SKIP_AFTER);
7406 dbe_insn = skip_insns_between_block (db, REORDER_SKIP_AFTER);
7408 /* Leave behind any lexical block markers. */
7409 if (debug_info_level > DINFO_LEVEL_TERSE
7410 && ceb->index + 1 < db->index)
7412 rtx insn, last_insn = get_last_insn ();
7413 insn = NEXT_INSN (ceb->end);
7415 insn = REORDER_BLOCK_OLD_END (ceb);
7417 if (NEXT_INSN (cebe_insn) == 0)
7418 set_last_insn (cebe_insn);
7419 for (; insn && insn != db->head/*dbh_insn*/;
7420 insn = NEXT_INSN (insn))
7422 if (GET_CODE (insn) == NOTE
7423 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG))
7425 cebe_insn = emit_note_after (NOTE_INSN_BLOCK_BEG, cebe_insn);
7428 if (GET_CODE (insn) == NOTE
7429 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
7431 cebe_insn = emit_note_after (NOTE_INSN_BLOCK_END, cebe_insn);
7435 set_last_insn (last_insn);
7438 /* Rechain predicted block. */
7439 NEXT_INSN (cebe_insn) = dbh_insn;
7440 PREV_INSN (dbh_insn) = cebe_insn;
7442 REORDER_BLOCK_OLD_END (db) = NEXT_INSN (dbe_insn);
7443 if (db->index != n_basic_blocks - 1)
7444 NEXT_INSN (dbe_insn) = 0;
7450 /* Reorder blocks starting at block B. */
7453 make_reorder_chain (bb)
7457 basic_block visited_edge = NULL;
7461 if (bb == EXIT_BLOCK_PTR)
7464 /* Find the most probable block. */
7466 block_end = bb->end;
7467 if (GET_CODE (block_end) == JUMP_INSN && condjump_p (block_end))
7469 rtx note = find_reg_note (block_end, REG_BR_PROB, 0);
7472 probability = XINT (XEXP (note, 0), 0);
7476 if (probability >= REG_BR_PROB_BASE / 2)
7477 e = bb->succ->succ_next;
7480 /* Add chosen successor to chain and recurse on it. */
7481 if (e && e->dest != EXIT_BLOCK_PTR
7482 && e->dest != e->src
7483 && (! (REORDER_BLOCK_FLAGS (e->dest) & REORDER_BLOCK_VISITED)
7484 || (REORDER_BLOCK_FLAGS (e->dest) == REORDER_BLOCK_HEAD)))
7486 if (! (REORDER_BLOCK_FLAGS (bb) & REORDER_BLOCK_VISITED))
7488 REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_HEAD;
7489 REORDER_BLOCK_INDEX (bb) = reorder_index++;
7490 REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_VISITED;
7493 if (REORDER_BLOCK_FLAGS (e->dest) & REORDER_BLOCK_VISITED)
7494 REORDER_BLOCK_FLAGS (e->dest) &= ~REORDER_BLOCK_HEAD;
7496 REORDER_BLOCK_SUCC (bb) = e;
7498 visited_edge = e->dest;
7500 reorder_last_visited = chain_reorder_blocks (e, bb);
7503 && ! (REORDER_BLOCK_FLAGS (e->dest)
7504 & REORDER_BLOCK_VISITED))
7505 make_reorder_chain (e->dest);
7509 if (! (REORDER_BLOCK_FLAGS (bb) & REORDER_BLOCK_VISITED))
7511 REORDER_BLOCK_INDEX (bb) = reorder_index++;
7512 REORDER_BLOCK_FLAGS (bb) |= REORDER_BLOCK_VISITED;
7516 /* Recurse on the successors. */
7517 for (e = bb->succ; e; e = e->succ_next)
7519 if (e->dest && e->dest == EXIT_BLOCK_PTR)
7523 && e->dest != e->src
7524 && e->dest != visited_edge
7525 && ! (REORDER_BLOCK_FLAGS (e->dest)
7526 & REORDER_BLOCK_VISITED))
7528 reorder_last_visited
7529 = chain_reorder_blocks (e, reorder_last_visited);
7530 make_reorder_chain (e->dest);
7536 /* Fixup jumps and labels after reordering basic blocks. */
7539 fixup_reorder_chain ()
7544 /* Set the new last insn. */
7546 i < n_basic_blocks - 1
7547 && REORDER_BLOCK_INDEX (BASIC_BLOCK (i)) != n_basic_blocks;
7551 for (insn = BASIC_BLOCK (i)->head;
7552 NEXT_INSN (insn) != 0;
7553 insn = NEXT_INSN (insn))
7556 set_last_insn (insn);
7558 /* Add jumps and labels to fixup blocks. */
7559 for (i = 0; i < n_basic_blocks - 1; i++)
7561 if (REORDER_BLOCK_ADD_JUMP (BASIC_BLOCK (i)))
7563 rtx new_label = gen_label_rtx ();
7564 rtx label_insn, jump_insn, barrier_insn;
7566 label_insn = emit_label_before (new_label,
7567 REORDER_BLOCK_ADD_JUMP (BASIC_BLOCK (i))->head);
7568 REORDER_BLOCK_ADD_JUMP (BASIC_BLOCK (i))->head = label_insn;
7570 jump_insn = emit_jump_insn_after (gen_jump (label_insn),
7571 BASIC_BLOCK (i)->end);
7572 JUMP_LABEL (jump_insn) = label_insn;
7573 ++LABEL_NUSES (label_insn);
7574 barrier_insn = emit_barrier_after (jump_insn);
7575 if (GET_CODE (BASIC_BLOCK (i)->end) != JUMP_INSN)
7576 BASIC_BLOCK (i)->end = barrier_insn;
7577 /* Add block for jump. Typically this is when a then is not
7578 predicted and we are jumping to the moved then block. */
7583 b = (basic_block) obstack_alloc (function_obstack, sizeof (*b));
7584 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
7585 BASIC_BLOCK (n_basic_blocks - 1) = b;
7586 b->index = n_basic_blocks - 1;
7587 b->head = emit_note_before (NOTE_INSN_BASIC_BLOCK, jump_insn);
7588 NOTE_BASIC_BLOCK (b->head) = b;
7589 b->end = barrier_insn;
7592 basic_block nb = BASIC_BLOCK (n_basic_blocks - 1);
7593 nb->global_live_at_start
7594 = OBSTACK_ALLOC_REG_SET (function_obstack);
7595 nb->global_live_at_end
7596 = OBSTACK_ALLOC_REG_SET (function_obstack);
7598 COPY_REG_SET (nb->global_live_at_start,
7599 BASIC_BLOCK (i)->global_live_at_start);
7600 COPY_REG_SET (nb->global_live_at_end,
7601 BASIC_BLOCK (i)->global_live_at_start);
7602 if (BASIC_BLOCK (i)->local_set)
7604 OBSTACK_ALLOC_REG_SET (function_obstack);
7605 COPY_REG_SET (nb->local_set, BASIC_BLOCK (i)->local_set);
7608 BASIC_BLOCK (nb->index)->local_set = 0;
7610 nb->aux = xcalloc (1, sizeof (struct reorder_block_def));
7611 REORDER_BLOCK_INDEX (BASIC_BLOCK (n_basic_blocks - 1))
7612 = REORDER_BLOCK_INDEX (BASIC_BLOCK (i)) + 1;
7613 /* Relink to new block. */
7614 nb->succ = BASIC_BLOCK (i)->succ;
7616 make_edge (0, BASIC_BLOCK (i), nb, 0);
7617 BASIC_BLOCK (i)->succ->succ_next
7618 = BASIC_BLOCK (i)->succ->succ_next->succ_next;
7619 nb->succ->succ_next = 0;
7620 /* Fix reorder block index to reflect new block. */
7621 for (j = 0; j < n_basic_blocks - 1; j++)
7623 basic_block bbj = BASIC_BLOCK (j);
7624 basic_block bbi = BASIC_BLOCK (i);
7625 if (REORDER_BLOCK_INDEX (bbj)
7626 >= REORDER_BLOCK_INDEX (bbi) + 1)
7627 REORDER_BLOCK_INDEX (bbj)++;
7636 /* Reorder basic blocks. */
7639 reorder_basic_blocks ()
7642 struct loops loops_info;
7646 if (profile_arc_flag)
7649 if (n_basic_blocks <= 1)
7652 /* Exception edges are not currently handled. */
7653 for (i = 0; i < n_basic_blocks; i++)
7657 for (e = BASIC_BLOCK (i)->succ; e && ! (e->flags & EDGE_EH);
7661 if (e && (e->flags & EDGE_EH))
7667 /* Find natural loops using the CFG. */
7668 num_loops = flow_loops_find (&loops_info);
7670 /* Dump loop information. */
7671 flow_loops_dump (&loops_info, rtl_dump_file, 0);
7673 /* Estimate using heuristics if no profiling info is available. */
7674 if (! flag_branch_probabilities)
7675 estimate_probability (&loops_info);
7677 reorder_last_visited = BASIC_BLOCK (0);
7679 for (i = 0; i < n_basic_blocks; i++)
7681 BASIC_BLOCK (i)->aux = xcalloc (1, sizeof (struct reorder_block_def));
7685 = NEXT_INSN (skip_insns_between_block (BASIC_BLOCK (n_basic_blocks - 1),
7686 REORDER_SKIP_AFTER));
7688 make_reorder_chain (BASIC_BLOCK (0));
7690 fixup_reorder_chain ();
7692 #ifdef ENABLE_CHECKING
7694 rtx insn, last_insn;
7695 last_insn = get_insns ();
7696 for (insn = NEXT_INSN (get_insns ());
7697 insn && PREV_INSN (insn) == last_insn
7698 && NEXT_INSN (PREV_INSN (insn)) == insn;
7700 insn = NEXT_INSN (insn))
7706 fprintf (rtl_dump_file, "insn chaining error at %d\n",
7707 INSN_UID (last_insn));
7713 /* Put basic_block_info in new order. */
7714 for (i = 0; i < n_basic_blocks - 1; i++)
7716 for (j = i; i != REORDER_BLOCK_INDEX (BASIC_BLOCK (j)); j++)
7719 if (REORDER_BLOCK_INDEX (BASIC_BLOCK (j)) == i
7724 int rbi = REORDER_BLOCK_INDEX (BASIC_BLOCK (j));
7726 temprbi = BASIC_BLOCK (rbi)->index;
7727 BASIC_BLOCK (rbi)->index = BASIC_BLOCK (j)->index;
7728 BASIC_BLOCK (j)->index = temprbi;
7729 tempbb = BASIC_BLOCK (rbi);
7730 BASIC_BLOCK (rbi) = BASIC_BLOCK (j);
7731 BASIC_BLOCK (j) = tempbb;
7735 NEXT_INSN (BASIC_BLOCK (n_basic_blocks - 1)->end) = last_insn;
7737 for (i = 0; i < n_basic_blocks - 1; i++)
7739 free (BASIC_BLOCK (i)->aux);
7742 /* Free loop information. */
7743 flow_loops_free (&loops_info);