1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 /* This file contains the data flow analysis pass of the compiler. It
24 computes data flow information which tells combine_instructions
25 which insns to consider combining and controls register allocation.
27 Additional data flow information that is too bulky to record is
28 generated during the analysis, and is used at that time to create
29 autoincrement and autodecrement addressing.
31 The first step is dividing the function into basic blocks.
32 find_basic_blocks does this. Then life_analysis determines
33 where each register is live and where it is dead.
35 ** find_basic_blocks **
37 find_basic_blocks divides the current function's rtl into basic
38 blocks and constructs the CFG. The blocks are recorded in the
39 basic_block_info array; the CFG exists in the edge structures
40 referenced by the blocks.
42 find_basic_blocks also finds any unreachable loops and deletes them.
46 life_analysis is called immediately after find_basic_blocks.
47 It uses the basic block information to determine where each
48 hard or pseudo register is live.
50 ** live-register info **
52 The information about where each register is live is in two parts:
53 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
55 basic_block->global_live_at_start has an element for each basic
56 block, and the element is a bit-vector with a bit for each hard or
57 pseudo register. The bit is 1 if the register is live at the
58 beginning of the basic block.
60 Two types of elements can be added to an insn's REG_NOTES.
61 A REG_DEAD note is added to an insn's REG_NOTES for any register
62 that meets both of two conditions: The value in the register is not
63 needed in subsequent insns and the insn does not replace the value in
64 the register (in the case of multi-word hard registers, the value in
65 each register must be replaced by the insn to avoid a REG_DEAD note).
67 In the vast majority of cases, an object in a REG_DEAD note will be
68 used somewhere in the insn. The (rare) exception to this is if an
69 insn uses a multi-word hard register and only some of the registers are
70 needed in subsequent insns. In that case, REG_DEAD notes will be
71 provided for those hard registers that are not subsequently needed.
72 Partial REG_DEAD notes of this type do not occur when an insn sets
73 only some of the hard registers used in such a multi-word operand;
74 omitting REG_DEAD notes for objects stored in an insn is optional and
75 the desire to do so does not justify the complexity of the partial
78 REG_UNUSED notes are added for each register that is set by the insn
79 but is unused subsequently (if every register set by the insn is unused
80 and the insn does not reference memory or have some other side-effect,
81 the insn is deleted instead). If only part of a multi-word hard
82 register is used in a subsequent insn, REG_UNUSED notes are made for
83 the parts that will not be used.
85 To determine which registers are live after any insn, one can
86 start from the beginning of the basic block and scan insns, noting
87 which registers are set by each insn and which die there.
89 ** Other actions of life_analysis **
91 life_analysis sets up the LOG_LINKS fields of insns because the
92 information needed to do so is readily available.
94 life_analysis deletes insns whose only effect is to store a value
97 life_analysis notices cases where a reference to a register as
98 a memory address can be combined with a preceding or following
99 incrementation or decrementation of the register. The separate
100 instruction to increment or decrement is deleted and the address
101 is changed to a POST_INC or similar rtx.
103 Each time an incrementing or decrementing address is created,
104 a REG_INC element is added to the insn's REG_NOTES list.
106 life_analysis fills in certain vectors containing information about
107 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
108 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
110 life_analysis sets current_function_sp_is_unchanging if the function
111 doesn't modify the stack pointer. */
115 Split out from life_analysis:
116 - local property discovery (bb->local_live, bb->local_set)
117 - global property computation
119 - pre/post modify transformation
127 #include "basic-block.h"
128 #include "insn-config.h"
130 #include "hard-reg-set.h"
133 #include "function.h"
137 #include "insn-flags.h"
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
164 /* The contents of the current function definition are allocated
165 in this obstack, and all are freed at the end of the function.
166 For top-level functions, this is temporary_obstack.
167 Separate obstacks are made for nested functions. */
169 extern struct obstack *function_obstack;
171 /* Number of basic blocks in the current function. */
175 /* Number of edges in the current function. */
179 /* The basic block array. */
181 varray_type basic_block_info;
183 /* The special entry and exit blocks. */
185 struct basic_block_def entry_exit_blocks[2]
190 NULL, /* local_set */
191 NULL, /* global_live_at_start */
192 NULL, /* global_live_at_end */
194 ENTRY_BLOCK, /* index */
196 -1, -1 /* eh_beg, eh_end */
203 NULL, /* local_set */
204 NULL, /* global_live_at_start */
205 NULL, /* global_live_at_end */
207 EXIT_BLOCK, /* index */
209 -1, -1 /* eh_beg, eh_end */
213 /* Nonzero if the second flow pass has completed. */
216 /* Maximum register number used in this function, plus one. */
220 /* Indexed by n, giving various register information */
222 varray_type reg_n_info;
224 /* Size of the reg_n_info table. */
226 unsigned int reg_n_max;
228 /* Size of a regset for the current function,
229 in (1) bytes and (2) elements. */
234 /* Regset of regs live when calls to `setjmp'-like functions happen. */
235 /* ??? Does this exist only for the setjmp-clobbered warning message? */
237 regset regs_live_at_setjmp;
239 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
240 that have to go in the same hard reg.
241 The first two regs in the list are a pair, and the next two
242 are another pair, etc. */
245 /* Set of registers that may be eliminable. These are handled specially
246 in updating regs_ever_live. */
248 static HARD_REG_SET elim_reg_set;
250 /* The basic block structure for every insn, indexed by uid. */
252 varray_type basic_block_for_insn;
254 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
255 /* ??? Should probably be using LABEL_NUSES instead. It would take a
256 bit of surgery to be able to use or co-opt the routines in jump. */
258 static rtx label_value_list;
260 /* For use in communicating between propagate_block and its subroutines.
261 Holds all information needed to compute life and def-use information. */
263 struct propagate_block_info
265 /* The basic block we're considering. */
268 /* Bit N is set if register N is conditionally or unconditionally live. */
271 /* Element N is the next insn that uses (hard or pseudo) register N
272 within the current basic block; or zero, if there is no such insn. */
275 /* Contains a list of all the MEMs we are tracking for dead store
279 /* If non-null, record the set of registers set in the basic block. */
282 /* Non-zero if the value of CC0 is live. */
285 /* Flags controling the set of information propagate_block collects. */
289 /* Forward declarations */
290 static int count_basic_blocks PARAMS ((rtx));
291 static rtx find_basic_blocks_1 PARAMS ((rtx));
292 static void clear_edges PARAMS ((void));
293 static void make_edges PARAMS ((rtx));
294 static void make_label_edge PARAMS ((sbitmap *, basic_block,
296 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
297 basic_block, rtx, int));
298 static void mark_critical_edges PARAMS ((void));
299 static void move_stray_eh_region_notes PARAMS ((void));
300 static void record_active_eh_regions PARAMS ((rtx));
302 static void commit_one_edge_insertion PARAMS ((edge));
304 static void delete_unreachable_blocks PARAMS ((void));
305 static void delete_eh_regions PARAMS ((void));
306 static int can_delete_note_p PARAMS ((rtx));
307 static int delete_block PARAMS ((basic_block));
308 static void expunge_block PARAMS ((basic_block));
309 static int can_delete_label_p PARAMS ((rtx));
310 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
312 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
314 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
315 static void try_merge_blocks PARAMS ((void));
316 static void tidy_fallthru_edges PARAMS ((void));
317 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
318 static void verify_wide_reg PARAMS ((int, rtx, rtx));
319 static void verify_local_live_at_start PARAMS ((regset, basic_block));
320 static int set_noop_p PARAMS ((rtx));
321 static int noop_move_p PARAMS ((rtx));
322 static void delete_noop_moves PARAMS ((rtx));
323 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
324 static void notice_stack_pointer_modification PARAMS ((rtx));
325 static void mark_reg PARAMS ((rtx, void *));
326 static void mark_regs_live_at_end PARAMS ((regset));
327 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
328 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
329 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
330 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
331 static void propagate_block PARAMS ((basic_block, regset,
333 static int insn_dead_p PARAMS ((struct propagate_block_info *,
335 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
337 static void mark_set_regs PARAMS ((struct propagate_block_info *,
339 static void mark_set_1 PARAMS ((struct propagate_block_info *,
340 regset, rtx, rtx, rtx));
341 static int mark_set_reg PARAMS ((struct propagate_block_info *,
345 static void find_auto_inc PARAMS ((struct propagate_block_info *,
347 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
349 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
351 static void mark_used_reg PARAMS ((struct propagate_block_info *,
352 regset, rtx, rtx, rtx));
353 static void mark_used_regs PARAMS ((struct propagate_block_info *,
354 regset, rtx, rtx, rtx));
355 void dump_flow_info PARAMS ((FILE *));
356 void debug_flow_info PARAMS ((void));
357 static void dump_edge_info PARAMS ((FILE *, edge, int));
359 static void count_reg_sets_1 PARAMS ((rtx, int));
360 static void count_reg_sets PARAMS ((rtx, int));
361 static void count_reg_references PARAMS ((rtx, int));
362 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
364 static void remove_fake_successors PARAMS ((basic_block));
365 static void flow_nodes_print PARAMS ((const char *, const sbitmap, FILE *));
366 static void flow_exits_print PARAMS ((const char *, const edge *, int, FILE *));
367 static void flow_loops_cfg_dump PARAMS ((const struct loops *, FILE *));
368 static int flow_loop_nested_p PARAMS ((struct loop *, struct loop *));
369 static int flow_loop_exits_find PARAMS ((const sbitmap, edge **));
370 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
371 static int flow_depth_first_order_compute PARAMS ((int *));
372 static basic_block flow_loop_pre_header_find PARAMS ((basic_block, const sbitmap *));
373 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
374 static void flow_loops_tree_build PARAMS ((struct loops *));
375 static int flow_loop_level_compute PARAMS ((struct loop *, int));
376 static int flow_loops_level_compute PARAMS ((struct loops *));
378 /* Find basic blocks of the current function.
379 F is the first insn of the function and NREGS the number of register
383 find_basic_blocks (f, nregs, file)
385 int nregs ATTRIBUTE_UNUSED;
386 FILE *file ATTRIBUTE_UNUSED;
390 /* Flush out existing data. */
391 if (basic_block_info != NULL)
397 /* Clear bb->aux on all extant basic blocks. We'll use this as a
398 tag for reuse during create_basic_block, just in case some pass
399 copies around basic block notes improperly. */
400 for (i = 0; i < n_basic_blocks; ++i)
401 BASIC_BLOCK (i)->aux = NULL;
403 VARRAY_FREE (basic_block_info);
406 n_basic_blocks = count_basic_blocks (f);
408 /* Size the basic block table. The actual structures will be allocated
409 by find_basic_blocks_1, since we want to keep the structure pointers
410 stable across calls to find_basic_blocks. */
411 /* ??? This whole issue would be much simpler if we called find_basic_blocks
412 exactly once, and thereafter we don't have a single long chain of
413 instructions at all until close to the end of compilation when we
414 actually lay them out. */
416 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
418 label_value_list = find_basic_blocks_1 (f);
420 /* Record the block to which an insn belongs. */
421 /* ??? This should be done another way, by which (perhaps) a label is
422 tagged directly with the basic block that it starts. It is used for
423 more than that currently, but IMO that is the only valid use. */
425 max_uid = get_max_uid ();
427 /* Leave space for insns life_analysis makes in some cases for auto-inc.
428 These cases are rare, so we don't need too much space. */
429 max_uid += max_uid / 10;
432 compute_bb_for_insn (max_uid);
434 /* Discover the edges of our cfg. */
435 record_active_eh_regions (f);
436 make_edges (label_value_list);
438 /* Do very simple cleanup now, for the benefit of code that runs between
439 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
440 tidy_fallthru_edges ();
442 mark_critical_edges ();
444 #ifdef ENABLE_CHECKING
449 /* Count the basic blocks of the function. */
452 count_basic_blocks (f)
456 register RTX_CODE prev_code;
457 register int count = 0;
459 int call_had_abnormal_edge = 0;
460 rtx prev_call = NULL_RTX;
462 prev_code = JUMP_INSN;
463 for (insn = f; insn; insn = NEXT_INSN (insn))
465 register RTX_CODE code = GET_CODE (insn);
467 if (code == CODE_LABEL
468 || (GET_RTX_CLASS (code) == 'i'
469 && (prev_code == JUMP_INSN
470 || prev_code == BARRIER
471 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
476 /* Record whether this call created an edge. */
477 if (code == CALL_INSN)
479 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
480 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
482 call_had_abnormal_edge = 0;
484 /* If there is an EH region or rethrow, we have an edge. */
485 if ((eh_region && region > 0)
486 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
487 call_had_abnormal_edge = 1;
490 /* If there is a nonlocal goto label and the specified
491 region number isn't -1, we have an edge. (0 means
492 no throw, but might have a nonlocal goto). */
493 if (nonlocal_goto_handler_labels && region >= 0)
494 call_had_abnormal_edge = 1;
497 else if (code != NOTE)
498 prev_call = NULL_RTX;
502 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
504 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
509 /* The rest of the compiler works a bit smoother when we don't have to
510 check for the edge case of do-nothing functions with no basic blocks. */
513 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
520 /* Find all basic blocks of the function whose first insn is F.
522 Collect and return a list of labels whose addresses are taken. This
523 will be used in make_edges for use with computed gotos. */
526 find_basic_blocks_1 (f)
529 register rtx insn, next;
530 int call_has_abnormal_edge = 0;
532 rtx bb_note = NULL_RTX;
533 rtx eh_list = NULL_RTX;
534 rtx label_value_list = NULL_RTX;
538 /* We process the instructions in a slightly different way than we did
539 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
540 closed out the previous block, so that it gets attached at the proper
541 place. Since this form should be equivalent to the previous,
542 count_basic_blocks continues to use the old form as a check. */
544 for (insn = f; insn; insn = next)
546 enum rtx_code code = GET_CODE (insn);
548 next = NEXT_INSN (insn);
550 if (code == CALL_INSN)
552 /* Record whether this call created an edge. */
553 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
554 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
555 call_has_abnormal_edge = 0;
557 /* If there is an EH region or rethrow, we have an edge. */
558 if ((eh_list && region > 0)
559 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
560 call_has_abnormal_edge = 1;
563 /* If there is a nonlocal goto label and the specified
564 region number isn't -1, we have an edge. (0 means
565 no throw, but might have a nonlocal goto). */
566 if (nonlocal_goto_handler_labels && region >= 0)
567 call_has_abnormal_edge = 1;
575 int kind = NOTE_LINE_NUMBER (insn);
577 /* Keep a LIFO list of the currently active exception notes. */
578 if (kind == NOTE_INSN_EH_REGION_BEG)
579 eh_list = alloc_INSN_LIST (insn, eh_list);
580 else if (kind == NOTE_INSN_EH_REGION_END)
583 eh_list = XEXP (eh_list, 1);
584 free_INSN_LIST_node (t);
587 /* Look for basic block notes with which to keep the
588 basic_block_info pointers stable. Unthread the note now;
589 we'll put it back at the right place in create_basic_block.
590 Or not at all if we've already found a note in this block. */
591 else if (kind == NOTE_INSN_BASIC_BLOCK)
593 if (bb_note == NULL_RTX)
595 next = flow_delete_insn (insn);
602 /* A basic block starts at a label. If we've closed one off due
603 to a barrier or some such, no need to do it again. */
604 if (head != NULL_RTX)
606 /* While we now have edge lists with which other portions of
607 the compiler might determine a call ending a basic block
608 does not imply an abnormal edge, it will be a bit before
609 everything can be updated. So continue to emit a noop at
610 the end of such a block. */
611 if (GET_CODE (end) == CALL_INSN
612 && ! SIBLING_CALL_P (end))
614 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
615 end = emit_insn_after (nop, end);
618 create_basic_block (i++, head, end, bb_note);
625 /* A basic block ends at a jump. */
626 if (head == NULL_RTX)
630 /* ??? Make a special check for table jumps. The way this
631 happens is truly and amazingly gross. We are about to
632 create a basic block that contains just a code label and
633 an addr*vec jump insn. Worse, an addr_diff_vec creates
634 its own natural loop.
636 Prevent this bit of brain damage, pasting things together
637 correctly in make_edges.
639 The correct solution involves emitting the table directly
640 on the tablejump instruction as a note, or JUMP_LABEL. */
642 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
643 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
651 goto new_bb_inclusive;
654 /* A basic block ends at a barrier. It may be that an unconditional
655 jump already closed the basic block -- no need to do it again. */
656 if (head == NULL_RTX)
659 /* While we now have edge lists with which other portions of the
660 compiler might determine a call ending a basic block does not
661 imply an abnormal edge, it will be a bit before everything can
662 be updated. So continue to emit a noop at the end of such a
664 if (GET_CODE (end) == CALL_INSN
665 && ! SIBLING_CALL_P (end))
667 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
668 end = emit_insn_after (nop, end);
670 goto new_bb_exclusive;
673 /* A basic block ends at a call that can either throw or
674 do a non-local goto. */
675 if (call_has_abnormal_edge)
678 if (head == NULL_RTX)
683 create_basic_block (i++, head, end, bb_note);
684 head = end = NULL_RTX;
691 if (GET_RTX_CLASS (code) == 'i')
693 if (head == NULL_RTX)
700 if (GET_RTX_CLASS (code) == 'i')
704 /* Make a list of all labels referred to other than by jumps
705 (which just don't have the REG_LABEL notes).
707 Make a special exception for labels followed by an ADDR*VEC,
708 as this would be a part of the tablejump setup code.
710 Make a special exception for the eh_return_stub_label, which
711 we know isn't part of any otherwise visible control flow. */
713 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
714 if (REG_NOTE_KIND (note) == REG_LABEL)
716 rtx lab = XEXP (note, 0), next;
718 if (lab == eh_return_stub_label)
720 else if ((next = next_nonnote_insn (lab)) != NULL
721 && GET_CODE (next) == JUMP_INSN
722 && (GET_CODE (PATTERN (next)) == ADDR_VEC
723 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
727 = alloc_EXPR_LIST (0, XEXP (note, 0), label_value_list);
732 if (head != NULL_RTX)
733 create_basic_block (i++, head, end, bb_note);
735 if (i != n_basic_blocks)
738 return label_value_list;
741 /* Tidy the CFG by deleting unreachable code and whatnot. */
747 delete_unreachable_blocks ();
748 move_stray_eh_region_notes ();
749 record_active_eh_regions (f);
751 mark_critical_edges ();
753 /* Kill the data we won't maintain. */
754 label_value_list = NULL_RTX;
757 /* Create a new basic block consisting of the instructions between
758 HEAD and END inclusive. Reuses the note and basic block struct
759 in BB_NOTE, if any. */
762 create_basic_block (index, head, end, bb_note)
764 rtx head, end, bb_note;
769 && ! RTX_INTEGRATED_P (bb_note)
770 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
773 /* If we found an existing note, thread it back onto the chain. */
775 if (GET_CODE (head) == CODE_LABEL)
776 add_insn_after (bb_note, head);
779 add_insn_before (bb_note, head);
785 /* Otherwise we must create a note and a basic block structure.
786 Since we allow basic block structs in rtl, give the struct
787 the same lifetime by allocating it off the function obstack
788 rather than using malloc. */
790 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
791 memset (bb, 0, sizeof (*bb));
793 if (GET_CODE (head) == CODE_LABEL)
794 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
797 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
800 NOTE_BASIC_BLOCK (bb_note) = bb;
803 /* Always include the bb note in the block. */
804 if (NEXT_INSN (end) == bb_note)
810 BASIC_BLOCK (index) = bb;
812 /* Tag the block so that we know it has been used when considering
813 other basic block notes. */
817 /* Records the basic block struct in BB_FOR_INSN, for every instruction
818 indexed by INSN_UID. MAX is the size of the array. */
821 compute_bb_for_insn (max)
826 if (basic_block_for_insn)
827 VARRAY_FREE (basic_block_for_insn);
828 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
830 for (i = 0; i < n_basic_blocks; ++i)
832 basic_block bb = BASIC_BLOCK (i);
839 int uid = INSN_UID (insn);
841 VARRAY_BB (basic_block_for_insn, uid) = bb;
844 insn = NEXT_INSN (insn);
849 /* Free the memory associated with the edge structures. */
857 for (i = 0; i < n_basic_blocks; ++i)
859 basic_block bb = BASIC_BLOCK (i);
861 for (e = bb->succ; e ; e = n)
871 for (e = ENTRY_BLOCK_PTR->succ; e ; e = n)
877 ENTRY_BLOCK_PTR->succ = 0;
878 EXIT_BLOCK_PTR->pred = 0;
883 /* Identify the edges between basic blocks.
885 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
886 that are otherwise unreachable may be reachable with a non-local goto.
888 BB_EH_END is an array indexed by basic block number in which we record
889 the list of exception regions active at the end of the basic block. */
892 make_edges (label_value_list)
893 rtx label_value_list;
896 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
897 sbitmap *edge_cache = NULL;
899 /* Assume no computed jump; revise as we create edges. */
900 current_function_has_computed_jump = 0;
902 /* Heavy use of computed goto in machine-generated code can lead to
903 nearly fully-connected CFGs. In that case we spend a significant
904 amount of time searching the edge lists for duplicates. */
905 if (forced_labels || label_value_list)
907 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
908 sbitmap_vector_zero (edge_cache, n_basic_blocks);
911 /* By nature of the way these get numbered, block 0 is always the entry. */
912 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
914 for (i = 0; i < n_basic_blocks; ++i)
916 basic_block bb = BASIC_BLOCK (i);
919 int force_fallthru = 0;
921 /* Examine the last instruction of the block, and discover the
922 ways we can leave the block. */
925 code = GET_CODE (insn);
928 if (code == JUMP_INSN)
932 /* ??? Recognize a tablejump and do the right thing. */
933 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
934 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
935 && GET_CODE (tmp) == JUMP_INSN
936 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
937 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
942 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
943 vec = XVEC (PATTERN (tmp), 0);
945 vec = XVEC (PATTERN (tmp), 1);
947 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
948 make_label_edge (edge_cache, bb,
949 XEXP (RTVEC_ELT (vec, j), 0), 0);
951 /* Some targets (eg, ARM) emit a conditional jump that also
952 contains the out-of-range target. Scan for these and
953 add an edge if necessary. */
954 if ((tmp = single_set (insn)) != NULL
955 && SET_DEST (tmp) == pc_rtx
956 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
957 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
958 make_label_edge (edge_cache, bb,
959 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
961 #ifdef CASE_DROPS_THROUGH
962 /* Silly VAXen. The ADDR_VEC is going to be in the way of
963 us naturally detecting fallthru into the next block. */
968 /* If this is a computed jump, then mark it as reaching
969 everything on the label_value_list and forced_labels list. */
970 else if (computed_jump_p (insn))
972 current_function_has_computed_jump = 1;
974 for (x = label_value_list; x; x = XEXP (x, 1))
975 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
977 for (x = forced_labels; x; x = XEXP (x, 1))
978 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
981 /* Returns create an exit out. */
982 else if (returnjump_p (insn))
983 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
985 /* Otherwise, we have a plain conditional or unconditional jump. */
988 if (! JUMP_LABEL (insn))
990 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
994 /* If this is a sibling call insn, then this is in effect a
995 combined call and return, and so we need an edge to the
996 exit block. No need to worry about EH edges, since we
997 wouldn't have created the sibling call in the first place. */
999 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1000 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1003 /* If this is a CALL_INSN, then mark it as reaching the active EH
1004 handler for this CALL_INSN. If we're handling asynchronous
1005 exceptions then any insn can reach any of the active handlers.
1007 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1009 if (code == CALL_INSN || asynchronous_exceptions)
1011 /* Add any appropriate EH edges. We do this unconditionally
1012 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1013 on the call, and this needn't be within an EH region. */
1014 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1016 /* If we have asynchronous exceptions, do the same for *all*
1017 exception regions active in the block. */
1018 if (asynchronous_exceptions
1019 && bb->eh_beg != bb->eh_end)
1021 if (bb->eh_beg >= 0)
1022 make_eh_edge (edge_cache, eh_nest_info, bb,
1023 NULL_RTX, bb->eh_beg);
1025 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1026 if (GET_CODE (x) == NOTE
1027 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1028 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1030 int region = NOTE_EH_HANDLER (x);
1031 make_eh_edge (edge_cache, eh_nest_info, bb,
1036 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1038 /* ??? This could be made smarter: in some cases it's possible
1039 to tell that certain calls will not do a nonlocal goto.
1041 For example, if the nested functions that do the nonlocal
1042 gotos do not have their addresses taken, then only calls to
1043 those functions or to other nested functions that use them
1044 could possibly do nonlocal gotos. */
1045 /* We do know that a REG_EH_REGION note with a value less
1046 than 0 is guaranteed not to perform a non-local goto. */
1047 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1048 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1049 for (x = nonlocal_goto_handler_labels; x ; x = XEXP (x, 1))
1050 make_label_edge (edge_cache, bb, XEXP (x, 0),
1051 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1055 /* We know something about the structure of the function __throw in
1056 libgcc2.c. It is the only function that ever contains eh_stub
1057 labels. It modifies its return address so that the last block
1058 returns to one of the eh_stub labels within it. So we have to
1059 make additional edges in the flow graph. */
1060 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1061 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1063 /* Find out if we can drop through to the next block. */
1064 insn = next_nonnote_insn (insn);
1065 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1066 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1067 else if (i + 1 < n_basic_blocks)
1069 rtx tmp = BLOCK_HEAD (i + 1);
1070 if (GET_CODE (tmp) == NOTE)
1071 tmp = next_nonnote_insn (tmp);
1072 if (force_fallthru || insn == tmp)
1073 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1077 free_eh_nesting_info (eh_nest_info);
1079 sbitmap_vector_free (edge_cache);
1082 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1083 about the edge that is accumulated between calls. */
1086 make_edge (edge_cache, src, dst, flags)
1087 sbitmap *edge_cache;
1088 basic_block src, dst;
1094 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1095 many edges to them, and we didn't allocate memory for it. */
1096 use_edge_cache = (edge_cache
1097 && src != ENTRY_BLOCK_PTR
1098 && dst != EXIT_BLOCK_PTR);
1100 /* Make sure we don't add duplicate edges. */
1101 if (! use_edge_cache || TEST_BIT (edge_cache[src->index], dst->index))
1102 for (e = src->succ; e ; e = e->succ_next)
1109 e = (edge) xcalloc (1, sizeof (*e));
1112 e->succ_next = src->succ;
1113 e->pred_next = dst->pred;
1122 SET_BIT (edge_cache[src->index], dst->index);
1125 /* Create an edge from a basic block to a label. */
1128 make_label_edge (edge_cache, src, label, flags)
1129 sbitmap *edge_cache;
1134 if (GET_CODE (label) != CODE_LABEL)
1137 /* If the label was never emitted, this insn is junk, but avoid a
1138 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1139 as a result of a syntax error and a diagnostic has already been
1142 if (INSN_UID (label) == 0)
1145 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1148 /* Create the edges generated by INSN in REGION. */
1151 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1152 sbitmap *edge_cache;
1153 eh_nesting_info *eh_nest_info;
1158 handler_info **handler_list;
1161 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1162 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1165 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1166 EDGE_ABNORMAL | EDGE_EH | is_call);
1170 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1171 dangerous if we intend to move basic blocks around. Move such notes
1172 into the following block. */
1175 move_stray_eh_region_notes ()
1180 if (n_basic_blocks < 2)
1183 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1184 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1186 rtx insn, next, list = NULL_RTX;
1188 b1 = BASIC_BLOCK (i);
1189 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1191 next = NEXT_INSN (insn);
1192 if (GET_CODE (insn) == NOTE
1193 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1194 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1196 /* Unlink from the insn chain. */
1197 NEXT_INSN (PREV_INSN (insn)) = next;
1198 PREV_INSN (next) = PREV_INSN (insn);
1201 NEXT_INSN (insn) = list;
1206 if (list == NULL_RTX)
1209 /* Find where to insert these things. */
1211 if (GET_CODE (insn) == CODE_LABEL)
1212 insn = NEXT_INSN (insn);
1216 next = NEXT_INSN (list);
1217 add_insn_after (list, insn);
1223 /* Recompute eh_beg/eh_end for each basic block. */
1226 record_active_eh_regions (f)
1229 rtx insn, eh_list = NULL_RTX;
1231 basic_block bb = BASIC_BLOCK (0);
1233 for (insn = f; insn ; insn = NEXT_INSN (insn))
1235 if (bb->head == insn)
1236 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1238 if (GET_CODE (insn) == NOTE)
1240 int kind = NOTE_LINE_NUMBER (insn);
1241 if (kind == NOTE_INSN_EH_REGION_BEG)
1242 eh_list = alloc_INSN_LIST (insn, eh_list);
1243 else if (kind == NOTE_INSN_EH_REGION_END)
1245 rtx t = XEXP (eh_list, 1);
1246 free_INSN_LIST_node (eh_list);
1251 if (bb->end == insn)
1253 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1255 if (i == n_basic_blocks)
1257 bb = BASIC_BLOCK (i);
1262 /* Identify critical edges and set the bits appropriately. */
1265 mark_critical_edges ()
1267 int i, n = n_basic_blocks;
1270 /* We begin with the entry block. This is not terribly important now,
1271 but could be if a front end (Fortran) implemented alternate entry
1273 bb = ENTRY_BLOCK_PTR;
1280 /* (1) Critical edges must have a source with multiple successors. */
1281 if (bb->succ && bb->succ->succ_next)
1283 for (e = bb->succ; e ; e = e->succ_next)
1285 /* (2) Critical edges must have a destination with multiple
1286 predecessors. Note that we know there is at least one
1287 predecessor -- the edge we followed to get here. */
1288 if (e->dest->pred->pred_next)
1289 e->flags |= EDGE_CRITICAL;
1291 e->flags &= ~EDGE_CRITICAL;
1296 for (e = bb->succ; e ; e = e->succ_next)
1297 e->flags &= ~EDGE_CRITICAL;
1302 bb = BASIC_BLOCK (i);
1306 /* Split a (typically critical) edge. Return the new block.
1307 Abort on abnormal edges.
1309 ??? The code generally expects to be called on critical edges.
1310 The case of a block ending in an unconditional jump to a
1311 block with multiple predecessors is not handled optimally. */
1314 split_edge (edge_in)
1317 basic_block old_pred, bb, old_succ;
1322 /* Abnormal edges cannot be split. */
1323 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1326 old_pred = edge_in->src;
1327 old_succ = edge_in->dest;
1329 /* Remove the existing edge from the destination's pred list. */
1332 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1334 *pp = edge_in->pred_next;
1335 edge_in->pred_next = NULL;
1338 /* Create the new structures. */
1339 bb = (basic_block) obstack_alloc (function_obstack, sizeof (*bb));
1340 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1343 memset (bb, 0, sizeof (*bb));
1344 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
1345 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
1347 /* ??? This info is likely going to be out of date very soon. */
1348 if (old_succ->global_live_at_start)
1350 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1351 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1355 CLEAR_REG_SET (bb->global_live_at_start);
1356 CLEAR_REG_SET (bb->global_live_at_end);
1361 bb->succ = edge_out;
1364 edge_in->flags &= ~EDGE_CRITICAL;
1366 edge_out->pred_next = old_succ->pred;
1367 edge_out->succ_next = NULL;
1369 edge_out->dest = old_succ;
1370 edge_out->flags = EDGE_FALLTHRU;
1371 edge_out->probability = REG_BR_PROB_BASE;
1373 old_succ->pred = edge_out;
1375 /* Tricky case -- if there existed a fallthru into the successor
1376 (and we're not it) we must add a new unconditional jump around
1377 the new block we're actually interested in.
1379 Further, if that edge is critical, this means a second new basic
1380 block must be created to hold it. In order to simplify correct
1381 insn placement, do this before we touch the existing basic block
1382 ordering for the block we were really wanting. */
1383 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1386 for (e = edge_out->pred_next; e ; e = e->pred_next)
1387 if (e->flags & EDGE_FALLTHRU)
1392 basic_block jump_block;
1395 if ((e->flags & EDGE_CRITICAL) == 0
1396 && e->src != ENTRY_BLOCK_PTR)
1398 /* Non critical -- we can simply add a jump to the end
1399 of the existing predecessor. */
1400 jump_block = e->src;
1404 /* We need a new block to hold the jump. The simplest
1405 way to do the bulk of the work here is to recursively
1407 jump_block = split_edge (e);
1408 e = jump_block->succ;
1411 /* Now add the jump insn ... */
1412 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1414 jump_block->end = pos;
1415 if (basic_block_for_insn)
1416 set_block_for_insn (pos, jump_block);
1417 emit_barrier_after (pos);
1419 /* ... let jump know that label is in use, ... */
1420 JUMP_LABEL (pos) = old_succ->head;
1421 ++LABEL_NUSES (old_succ->head);
1423 /* ... and clear fallthru on the outgoing edge. */
1424 e->flags &= ~EDGE_FALLTHRU;
1426 /* Continue splitting the interesting edge. */
1430 /* Place the new block just in front of the successor. */
1431 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1432 if (old_succ == EXIT_BLOCK_PTR)
1433 j = n_basic_blocks - 1;
1435 j = old_succ->index;
1436 for (i = n_basic_blocks - 1; i > j; --i)
1438 basic_block tmp = BASIC_BLOCK (i - 1);
1439 BASIC_BLOCK (i) = tmp;
1442 BASIC_BLOCK (i) = bb;
1445 /* Create the basic block note.
1447 Where we place the note can have a noticable impact on the generated
1448 code. Consider this cfg:
1459 If we need to insert an insn on the edge from block 0 to block 1,
1460 we want to ensure the instructions we insert are outside of any
1461 loop notes that physically sit between block 0 and block 1. Otherwise
1462 we confuse the loop optimizer into thinking the loop is a phony. */
1463 if (old_succ != EXIT_BLOCK_PTR
1464 && PREV_INSN (old_succ->head)
1465 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1466 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1467 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1468 PREV_INSN (old_succ->head));
1469 else if (old_succ != EXIT_BLOCK_PTR)
1470 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1472 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1473 NOTE_BASIC_BLOCK (bb_note) = bb;
1474 bb->head = bb->end = bb_note;
1476 /* Not quite simple -- for non-fallthru edges, we must adjust the
1477 predecessor's jump instruction to target our new block. */
1478 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1480 rtx tmp, insn = old_pred->end;
1481 rtx old_label = old_succ->head;
1482 rtx new_label = gen_label_rtx ();
1484 if (GET_CODE (insn) != JUMP_INSN)
1487 /* ??? Recognize a tablejump and adjust all matching cases. */
1488 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1489 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1490 && GET_CODE (tmp) == JUMP_INSN
1491 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1492 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1497 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1498 vec = XVEC (PATTERN (tmp), 0);
1500 vec = XVEC (PATTERN (tmp), 1);
1502 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1503 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1505 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1506 --LABEL_NUSES (old_label);
1507 ++LABEL_NUSES (new_label);
1510 /* Handle casesi dispatch insns */
1511 if ((tmp = single_set (insn)) != NULL
1512 && SET_DEST (tmp) == pc_rtx
1513 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1514 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1515 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1517 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1519 --LABEL_NUSES (old_label);
1520 ++LABEL_NUSES (new_label);
1525 /* This would have indicated an abnormal edge. */
1526 if (computed_jump_p (insn))
1529 /* A return instruction can't be redirected. */
1530 if (returnjump_p (insn))
1533 /* If the insn doesn't go where we think, we're confused. */
1534 if (JUMP_LABEL (insn) != old_label)
1537 redirect_jump (insn, new_label);
1540 emit_label_before (new_label, bb_note);
1541 bb->head = new_label;
1547 /* Queue instructions for insertion on an edge between two basic blocks.
1548 The new instructions and basic blocks (if any) will not appear in the
1549 CFG until commit_edge_insertions is called. */
1552 insert_insn_on_edge (pattern, e)
1556 /* We cannot insert instructions on an abnormal critical edge.
1557 It will be easier to find the culprit if we die now. */
1558 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1559 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1562 if (e->insns == NULL_RTX)
1565 push_to_sequence (e->insns);
1567 emit_insn (pattern);
1569 e->insns = get_insns ();
1573 /* Update the CFG for the instructions queued on edge E. */
1576 commit_one_edge_insertion (e)
1579 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp;
1582 /* Pull the insns off the edge now since the edge might go away. */
1584 e->insns = NULL_RTX;
1586 /* Figure out where to put these things. If the destination has
1587 one predecessor, insert there. Except for the exit block. */
1588 if (e->dest->pred->pred_next == NULL
1589 && e->dest != EXIT_BLOCK_PTR)
1593 /* Get the location correct wrt a code label, and "nice" wrt
1594 a basic block note, and before everything else. */
1596 if (GET_CODE (tmp) == CODE_LABEL)
1597 tmp = NEXT_INSN (tmp);
1598 if (GET_CODE (tmp) == NOTE
1599 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BASIC_BLOCK)
1600 tmp = NEXT_INSN (tmp);
1601 if (tmp == bb->head)
1604 after = PREV_INSN (tmp);
1607 /* If the source has one successor and the edge is not abnormal,
1608 insert there. Except for the entry block. */
1609 else if ((e->flags & EDGE_ABNORMAL) == 0
1610 && e->src->succ->succ_next == NULL
1611 && e->src != ENTRY_BLOCK_PTR)
1614 /* It is possible to have a non-simple jump here. Consider a target
1615 where some forms of unconditional jumps clobber a register. This
1616 happens on the fr30 for example.
1618 We know this block has a single successor, so we can just emit
1619 the queued insns before the jump. */
1620 if (GET_CODE (bb->end) == JUMP_INSN)
1626 /* We'd better be fallthru, or we've lost track of what's what. */
1627 if ((e->flags & EDGE_FALLTHRU) == 0)
1634 /* Otherwise we must split the edge. */
1637 bb = split_edge (e);
1641 /* Now that we've found the spot, do the insertion. */
1643 /* Set the new block number for these insns, if structure is allocated. */
1644 if (basic_block_for_insn)
1647 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1648 set_block_for_insn (i, bb);
1653 emit_insns_before (insns, before);
1654 if (before == bb->head)
1659 rtx last = emit_insns_after (insns, after);
1660 if (after == bb->end)
1664 if (GET_CODE (last) == JUMP_INSN)
1666 if (returnjump_p (last))
1668 /* ??? Remove all outgoing edges from BB and add one
1669 for EXIT. This is not currently a problem because
1670 this only happens for the (single) epilogue, which
1671 already has a fallthru edge to EXIT. */
1674 if (e->dest != EXIT_BLOCK_PTR
1675 || e->succ_next != NULL
1676 || (e->flags & EDGE_FALLTHRU) == 0)
1678 e->flags &= ~EDGE_FALLTHRU;
1680 emit_barrier_after (last);
1689 /* Update the CFG for all queued instructions. */
1692 commit_edge_insertions ()
1697 #ifdef ENABLE_CHECKING
1698 verify_flow_info ();
1702 bb = ENTRY_BLOCK_PTR;
1707 for (e = bb->succ; e ; e = next)
1709 next = e->succ_next;
1711 commit_one_edge_insertion (e);
1714 if (++i >= n_basic_blocks)
1716 bb = BASIC_BLOCK (i);
1720 /* Delete all unreachable basic blocks. */
1723 delete_unreachable_blocks ()
1725 basic_block *worklist, *tos;
1726 int deleted_handler;
1731 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
1733 /* Use basic_block->aux as a marker. Clear them all. */
1735 for (i = 0; i < n; ++i)
1736 BASIC_BLOCK (i)->aux = NULL;
1738 /* Add our starting points to the worklist. Almost always there will
1739 be only one. It isn't inconcievable that we might one day directly
1740 support Fortran alternate entry points. */
1742 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
1746 /* Mark the block with a handy non-null value. */
1750 /* Iterate: find everything reachable from what we've already seen. */
1752 while (tos != worklist)
1754 basic_block b = *--tos;
1756 for (e = b->succ; e ; e = e->succ_next)
1764 /* Delete all unreachable basic blocks. Count down so that we don't
1765 interfere with the block renumbering that happens in delete_block. */
1767 deleted_handler = 0;
1769 for (i = n - 1; i >= 0; --i)
1771 basic_block b = BASIC_BLOCK (i);
1774 /* This block was found. Tidy up the mark. */
1777 deleted_handler |= delete_block (b);
1780 tidy_fallthru_edges ();
1782 /* If we deleted an exception handler, we may have EH region begin/end
1783 blocks to remove as well. */
1784 if (deleted_handler)
1785 delete_eh_regions ();
1790 /* Find EH regions for which there is no longer a handler, and delete them. */
1793 delete_eh_regions ()
1797 update_rethrow_references ();
1799 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1800 if (GET_CODE (insn) == NOTE)
1802 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG) ||
1803 (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1805 int num = NOTE_EH_HANDLER (insn);
1806 /* A NULL handler indicates a region is no longer needed,
1807 as long as its rethrow label isn't used. */
1808 if (get_first_handler (num) == NULL && ! rethrow_used (num))
1810 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1811 NOTE_SOURCE_FILE (insn) = 0;
1817 /* Return true if NOTE is not one of the ones that must be kept paired,
1818 so that we may simply delete them. */
1821 can_delete_note_p (note)
1824 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
1825 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
1828 /* Unlink a chain of insns between START and FINISH, leaving notes
1829 that must be paired. */
1832 flow_delete_insn_chain (start, finish)
1835 /* Unchain the insns one by one. It would be quicker to delete all
1836 of these with a single unchaining, rather than one at a time, but
1837 we need to keep the NOTE's. */
1843 next = NEXT_INSN (start);
1844 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
1846 else if (GET_CODE (start) == CODE_LABEL && !can_delete_label_p (start))
1849 next = flow_delete_insn (start);
1851 if (start == finish)
1857 /* Delete the insns in a (non-live) block. We physically delete every
1858 non-deleted-note insn, and update the flow graph appropriately.
1860 Return nonzero if we deleted an exception handler. */
1862 /* ??? Preserving all such notes strikes me as wrong. It would be nice
1863 to post-process the stream to remove empty blocks, loops, ranges, etc. */
1869 int deleted_handler = 0;
1872 /* If the head of this block is a CODE_LABEL, then it might be the
1873 label for an exception handler which can't be reached.
1875 We need to remove the label from the exception_handler_label list
1876 and remove the associated NOTE_INSN_EH_REGION_BEG and
1877 NOTE_INSN_EH_REGION_END notes. */
1881 never_reached_warning (insn);
1883 if (GET_CODE (insn) == CODE_LABEL)
1885 rtx x, *prev = &exception_handler_labels;
1887 for (x = exception_handler_labels; x; x = XEXP (x, 1))
1889 if (XEXP (x, 0) == insn)
1891 /* Found a match, splice this label out of the EH label list. */
1892 *prev = XEXP (x, 1);
1893 XEXP (x, 1) = NULL_RTX;
1894 XEXP (x, 0) = NULL_RTX;
1896 /* Remove the handler from all regions */
1897 remove_handler (insn);
1898 deleted_handler = 1;
1901 prev = &XEXP (x, 1);
1904 /* This label may be referenced by code solely for its value, or
1905 referenced by static data, or something. We have determined
1906 that it is not reachable, but cannot delete the label itself.
1907 Save code space and continue to delete the balance of the block,
1908 along with properly updating the cfg. */
1909 if (!can_delete_label_p (insn))
1911 /* If we've only got one of these, skip the whole deleting
1914 goto no_delete_insns;
1915 insn = NEXT_INSN (insn);
1919 /* Include any jump table following the basic block. */
1921 if (GET_CODE (end) == JUMP_INSN
1922 && (tmp = JUMP_LABEL (end)) != NULL_RTX
1923 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1924 && GET_CODE (tmp) == JUMP_INSN
1925 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1926 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1929 /* Include any barrier that may follow the basic block. */
1930 tmp = next_nonnote_insn (end);
1931 if (tmp && GET_CODE (tmp) == BARRIER)
1934 /* Selectively delete the entire chain. */
1935 flow_delete_insn_chain (insn, end);
1939 /* Remove the edges into and out of this block. Note that there may
1940 indeed be edges in, if we are removing an unreachable loop. */
1944 for (e = b->pred; e ; e = next)
1946 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
1949 next = e->pred_next;
1953 for (e = b->succ; e ; e = next)
1955 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
1958 next = e->succ_next;
1967 /* Remove the basic block from the array, and compact behind it. */
1970 return deleted_handler;
1973 /* Remove block B from the basic block array and compact behind it. */
1979 int i, n = n_basic_blocks;
1981 for (i = b->index; i + 1 < n; ++i)
1983 basic_block x = BASIC_BLOCK (i + 1);
1984 BASIC_BLOCK (i) = x;
1988 basic_block_info->num_elements--;
1992 /* Delete INSN by patching it out. Return the next insn. */
1995 flow_delete_insn (insn)
1998 rtx prev = PREV_INSN (insn);
1999 rtx next = NEXT_INSN (insn);
2002 PREV_INSN (insn) = NULL_RTX;
2003 NEXT_INSN (insn) = NULL_RTX;
2006 NEXT_INSN (prev) = next;
2008 PREV_INSN (next) = prev;
2010 set_last_insn (prev);
2012 if (GET_CODE (insn) == CODE_LABEL)
2013 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2015 /* If deleting a jump, decrement the use count of the label. Deleting
2016 the label itself should happen in the normal course of block merging. */
2017 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2018 LABEL_NUSES (JUMP_LABEL (insn))--;
2020 /* Also if deleting an insn that references a label. */
2021 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX)
2022 LABEL_NUSES (XEXP (note, 0))--;
2027 /* True if a given label can be deleted. */
2030 can_delete_label_p (label)
2035 if (LABEL_PRESERVE_P (label))
2038 for (x = forced_labels; x ; x = XEXP (x, 1))
2039 if (label == XEXP (x, 0))
2041 for (x = label_value_list; x ; x = XEXP (x, 1))
2042 if (label == XEXP (x, 0))
2044 for (x = exception_handler_labels; x ; x = XEXP (x, 1))
2045 if (label == XEXP (x, 0))
2048 /* User declared labels must be preserved. */
2049 if (LABEL_NAME (label) != 0)
2055 /* Blocks A and B are to be merged into a single block A. The insns
2056 are already contiguous, hence `nomove'. */
2059 merge_blocks_nomove (a, b)
2063 rtx b_head, b_end, a_end;
2064 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2067 /* If there was a CODE_LABEL beginning B, delete it. */
2070 if (GET_CODE (b_head) == CODE_LABEL)
2072 /* Detect basic blocks with nothing but a label. This can happen
2073 in particular at the end of a function. */
2074 if (b_head == b_end)
2076 del_first = del_last = b_head;
2077 b_head = NEXT_INSN (b_head);
2080 /* Delete the basic block note. */
2081 if (GET_CODE (b_head) == NOTE
2082 && NOTE_LINE_NUMBER (b_head) == NOTE_INSN_BASIC_BLOCK)
2084 if (b_head == b_end)
2089 b_head = NEXT_INSN (b_head);
2092 /* If there was a jump out of A, delete it. */
2094 if (GET_CODE (a_end) == JUMP_INSN)
2098 prev = prev_nonnote_insn (a_end);
2105 /* If this was a conditional jump, we need to also delete
2106 the insn that set cc0. */
2107 if (prev && sets_cc0_p (prev))
2110 prev = prev_nonnote_insn (prev);
2120 /* Delete everything marked above as well as crap that might be
2121 hanging out between the two blocks. */
2122 flow_delete_insn_chain (del_first, del_last);
2124 /* Normally there should only be one successor of A and that is B, but
2125 partway though the merge of blocks for conditional_execution we'll
2126 be merging a TEST block with THEN and ELSE successors. Free the
2127 whole lot of them and hope the caller knows what they're doing. */
2129 remove_edge (a->succ);
2131 /* Adjust the edges out of B for the new owner. */
2132 for (e = b->succ; e ; e = e->succ_next)
2136 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2137 b->pred = b->succ = NULL;
2139 /* Reassociate the insns of B with A. */
2142 if (basic_block_for_insn)
2144 BLOCK_FOR_INSN (b_head) = a;
2145 while (b_head != b_end)
2147 b_head = NEXT_INSN (b_head);
2148 BLOCK_FOR_INSN (b_head) = a;
2158 /* Blocks A and B are to be merged into a single block. A has no incoming
2159 fallthru edge, so it can be moved before B without adding or modifying
2160 any jumps (aside from the jump from A to B). */
2163 merge_blocks_move_predecessor_nojumps (a, b)
2166 rtx start, end, barrier;
2172 /* We want to delete the BARRIER after the end of the insns we are
2173 going to move. If we don't find a BARRIER, then do nothing. This
2174 can happen in some cases if we have labels we can not delete.
2176 Similarly, do nothing if we can not delete the label at the start
2177 of the target block. */
2178 barrier = next_nonnote_insn (end);
2179 if (GET_CODE (barrier) != BARRIER
2180 || (GET_CODE (b->head) == CODE_LABEL
2181 && ! can_delete_label_p (b->head)))
2184 flow_delete_insn (barrier);
2186 /* Move block and loop notes out of the chain so that we do not
2187 disturb their order.
2189 ??? A better solution would be to squeeze out all the non-nested notes
2190 and adjust the block trees appropriately. Even better would be to have
2191 a tighter connection between block trees and rtl so that this is not
2193 start = squeeze_notes (start, end);
2195 /* Scramble the insn chain. */
2196 if (end != PREV_INSN (b->head))
2197 reorder_insns (start, end, PREV_INSN (b->head));
2201 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2202 a->index, b->index);
2205 /* Swap the records for the two blocks around. Although we are deleting B,
2206 A is now where B was and we want to compact the BB array from where
2208 BASIC_BLOCK(a->index) = b;
2209 BASIC_BLOCK(b->index) = a;
2211 a->index = b->index;
2214 /* Now blocks A and B are contiguous. Merge them. */
2215 merge_blocks_nomove (a, b);
2220 /* Blocks A and B are to be merged into a single block. B has no outgoing
2221 fallthru edge, so it can be moved after A without adding or modifying
2222 any jumps (aside from the jump from A to B). */
2225 merge_blocks_move_successor_nojumps (a, b)
2228 rtx start, end, barrier;
2233 /* We want to delete the BARRIER after the end of the insns we are
2234 going to move. If we don't find a BARRIER, then do nothing. This
2235 can happen in some cases if we have labels we can not delete.
2237 Similarly, do nothing if we can not delete the label at the start
2238 of the target block. */
2239 barrier = next_nonnote_insn (end);
2240 if (GET_CODE (barrier) != BARRIER
2241 || (GET_CODE (b->head) == CODE_LABEL
2242 && ! can_delete_label_p (b->head)))
2245 flow_delete_insn (barrier);
2247 /* Move block and loop notes out of the chain so that we do not
2248 disturb their order.
2250 ??? A better solution would be to squeeze out all the non-nested notes
2251 and adjust the block trees appropriately. Even better would be to have
2252 a tighter connection between block trees and rtl so that this is not
2254 start = squeeze_notes (start, end);
2256 /* Scramble the insn chain. */
2257 reorder_insns (start, end, a->end);
2259 /* Now blocks A and B are contiguous. Merge them. */
2260 merge_blocks_nomove (a, b);
2264 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2265 b->index, a->index);
2271 /* Attempt to merge basic blocks that are potentially non-adjacent.
2272 Return true iff the attempt succeeded. */
2275 merge_blocks (e, b, c)
2279 /* If B has a fallthru edge to C, no need to move anything. */
2280 if (e->flags & EDGE_FALLTHRU)
2282 /* If a label still appears somewhere and we cannot delete the label,
2283 then we cannot merge the blocks. The edge was tidied already. */
2285 rtx insn, stop = NEXT_INSN (c->head);
2286 for (insn = NEXT_INSN (b->end); insn != stop; insn = NEXT_INSN (insn))
2287 if (GET_CODE (insn) == CODE_LABEL && !can_delete_label_p (insn))
2290 merge_blocks_nomove (b, c);
2294 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2295 b->index, c->index);
2304 int c_has_outgoing_fallthru;
2305 int b_has_incoming_fallthru;
2307 /* We must make sure to not munge nesting of exception regions,
2308 lexical blocks, and loop notes.
2310 The first is taken care of by requiring that the active eh
2311 region at the end of one block always matches the active eh
2312 region at the beginning of the next block.
2314 The later two are taken care of by squeezing out all the notes. */
2316 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2317 executed and we may want to treat blocks which have two out
2318 edges, one normal, one abnormal as only having one edge for
2319 block merging purposes. */
2321 for (tmp_edge = c->succ; tmp_edge ; tmp_edge = tmp_edge->succ_next)
2322 if (tmp_edge->flags & EDGE_FALLTHRU)
2324 c_has_outgoing_fallthru = (tmp_edge != NULL);
2326 for (tmp_edge = b->pred; tmp_edge ; tmp_edge = tmp_edge->pred_next)
2327 if (tmp_edge->flags & EDGE_FALLTHRU)
2329 b_has_incoming_fallthru = (tmp_edge != NULL);
2331 /* If B does not have an incoming fallthru, and the exception regions
2332 match, then it can be moved immediately before C without introducing
2335 C can not be the first block, so we do not have to worry about
2336 accessing a non-existent block. */
2337 d = BASIC_BLOCK (c->index - 1);
2338 if (! b_has_incoming_fallthru
2339 && d->eh_end == b->eh_beg
2340 && b->eh_end == c->eh_beg)
2341 return merge_blocks_move_predecessor_nojumps (b, c);
2343 /* Otherwise, we're going to try to move C after B. Make sure the
2344 exception regions match.
2346 If B is the last basic block, then we must not try to access the
2347 block structure for block B + 1. Luckily in that case we do not
2348 need to worry about matching exception regions. */
2349 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2350 if (b->eh_end == c->eh_beg
2351 && (d == NULL || c->eh_end == d->eh_beg))
2353 /* If C does not have an outgoing fallthru, then it can be moved
2354 immediately after B without introducing or modifying jumps. */
2355 if (! c_has_outgoing_fallthru)
2356 return merge_blocks_move_successor_nojumps (b, c);
2358 /* Otherwise, we'll need to insert an extra jump, and possibly
2359 a new block to contain it. */
2360 /* ??? Not implemented yet. */
2367 /* Top level driver for merge_blocks. */
2374 /* Attempt to merge blocks as made possible by edge removal. If a block
2375 has only one successor, and the successor has only one predecessor,
2376 they may be combined. */
2378 for (i = 0; i < n_basic_blocks; )
2380 basic_block c, b = BASIC_BLOCK (i);
2383 /* A loop because chains of blocks might be combineable. */
2384 while ((s = b->succ) != NULL
2385 && s->succ_next == NULL
2386 && (s->flags & EDGE_EH) == 0
2387 && (c = s->dest) != EXIT_BLOCK_PTR
2388 && c->pred->pred_next == NULL
2389 /* If the jump insn has side effects, we can't kill the edge. */
2390 && (GET_CODE (b->end) != JUMP_INSN
2391 || onlyjump_p (b->end))
2392 && merge_blocks (s, b, c))
2395 /* Don't get confused by the index shift caused by deleting blocks. */
2400 /* The given edge should potentially be a fallthru edge. If that is in
2401 fact true, delete the jump and barriers that are in the way. */
2404 tidy_fallthru_edge (e, b, c)
2410 /* ??? In a late-running flow pass, other folks may have deleted basic
2411 blocks by nopping out blocks, leaving multiple BARRIERs between here
2412 and the target label. They ought to be chastized and fixed.
2414 We can also wind up with a sequence of undeletable labels between
2415 one block and the next.
2417 So search through a sequence of barriers, labels, and notes for
2418 the head of block C and assert that we really do fall through. */
2420 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2423 /* Remove what will soon cease being the jump insn from the source block.
2424 If block B consisted only of this single jump, turn it into a deleted
2427 if (GET_CODE (q) == JUMP_INSN)
2430 /* If this was a conditional jump, we need to also delete
2431 the insn that set cc0. */
2432 if (! simplejump_p (q) && condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2439 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2440 NOTE_SOURCE_FILE (q) = 0;
2443 b->end = q = PREV_INSN (q);
2446 /* Selectively unlink the sequence. */
2447 if (q != PREV_INSN (c->head))
2448 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2450 e->flags |= EDGE_FALLTHRU;
2453 /* Fix up edges that now fall through, or rather should now fall through
2454 but previously required a jump around now deleted blocks. Simplify
2455 the search by only examining blocks numerically adjacent, since this
2456 is how find_basic_blocks created them. */
2459 tidy_fallthru_edges ()
2463 for (i = 1; i < n_basic_blocks; ++i)
2465 basic_block b = BASIC_BLOCK (i - 1);
2466 basic_block c = BASIC_BLOCK (i);
2469 /* We care about simple conditional or unconditional jumps with
2472 If we had a conditional branch to the next instruction when
2473 find_basic_blocks was called, then there will only be one
2474 out edge for the block which ended with the conditional
2475 branch (since we do not create duplicate edges).
2477 Furthermore, the edge will be marked as a fallthru because we
2478 merge the flags for the duplicate edges. So we do not want to
2479 check that the edge is not a FALLTHRU edge. */
2480 if ((s = b->succ) != NULL
2481 && s->succ_next == NULL
2483 /* If the jump insn has side effects, we can't tidy the edge. */
2484 && (GET_CODE (b->end) != JUMP_INSN
2485 || onlyjump_p (b->end)))
2486 tidy_fallthru_edge (s, b, c);
2490 /* Discover and record the loop depth at the head of each basic block. */
2493 calculate_loop_depth (dump)
2498 /* The loop infrastructure does the real job for us. */
2499 flow_loops_find (&loops);
2502 flow_loops_dump (&loops, dump, 0);
2504 flow_loops_free (&loops);
2507 /* Perform data flow analysis.
2508 F is the first insn of the function and NREGS the number of register numbers
2512 life_analysis (f, nregs, file, remove_dead_code)
2516 int remove_dead_code;
2518 #ifdef ELIMINABLE_REGS
2520 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2525 /* Dead code elimination changes basic block structure and therefore
2526 breaks the SSA phi representation. Particularly, a phi node
2527 can have an alternative value for each incoming block, referenced
2528 by the block number. Removing dead code can bump entire blocks
2529 and therefore cause blocks to be renumbered, invalidating the
2530 numbering of phi alternatives. */
2531 if (remove_dead_code && in_ssa_form)
2534 /* Record which registers will be eliminated. We use this in
2537 CLEAR_HARD_REG_SET (elim_reg_set);
2539 #ifdef ELIMINABLE_REGS
2540 for (i = 0; i < (int) (sizeof eliminables / sizeof eliminables[0]); i++)
2541 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2543 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2546 /* We want alias analysis information for local dead store elimination. */
2547 init_alias_analysis ();
2550 flags = PROP_DEATH_NOTES | PROP_REG_INFO;
2554 if (! remove_dead_code)
2555 flags &= ~(PROP_SCAN_DEAD_CODE | PROP_KILL_DEAD_CODE);
2558 /* The post-reload life analysis have (on a global basis) the same
2559 registers live as was computed by reload itself. elimination
2560 Otherwise offsets and such may be incorrect.
2562 Reload will make some registers as live even though they do not
2563 appear in the rtl. */
2564 if (reload_completed)
2565 flags &= ~PROP_REG_INFO;
2569 /* Always remove no-op moves. Do this before other processing so
2570 that we don't have to keep re-scanning them. */
2571 delete_noop_moves (f);
2573 /* Some targets can emit simpler epilogues if they know that sp was
2574 not ever modified during the function. After reload, of course,
2575 we've already emitted the epilogue so there's no sense searching. */
2576 if (! reload_completed)
2577 notice_stack_pointer_modification (f);
2579 /* Allocate and zero out data structures that will record the
2580 data from lifetime analysis. */
2581 allocate_reg_life_data ();
2582 allocate_bb_life_data ();
2583 all_blocks = sbitmap_alloc (n_basic_blocks);
2584 sbitmap_ones (all_blocks);
2586 /* Find the set of registers live on function exit. */
2587 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2589 /* "Update" life info from zero. It'd be nice to begin the
2590 relaxation with just the exit and noreturn blocks, but that set
2591 is not immediately handy. */
2593 if (flags & PROP_REG_INFO)
2594 memset (regs_ever_live, 0, sizeof(regs_ever_live));
2595 update_life_info (all_blocks, UPDATE_LIFE_GLOBAL, flags);
2598 sbitmap_free (all_blocks);
2599 end_alias_analysis ();
2602 dump_flow_info (file);
2604 free_basic_block_vars (1);
2607 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2608 Search for REGNO. If found, abort if it is not wider than word_mode. */
2611 verify_wide_reg_1 (px, pregno)
2616 unsigned int regno = *(int *) pregno;
2618 if (GET_CODE (x) == REG && REGNO (x) == regno)
2620 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2627 /* A subroutine of verify_local_live_at_start. Search through insns
2628 between HEAD and END looking for register REGNO. */
2631 verify_wide_reg (regno, head, end)
2637 if (GET_RTX_CLASS (GET_CODE (head)) == 'i'
2638 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2642 head = NEXT_INSN (head);
2645 /* We didn't find the register at all. Something's way screwy. */
2649 /* A subroutine of update_life_info. Verify that there are no untoward
2650 changes in live_at_start during a local update. */
2653 verify_local_live_at_start (new_live_at_start, bb)
2654 regset new_live_at_start;
2657 if (reload_completed)
2659 /* After reload, there are no pseudos, nor subregs of multi-word
2660 registers. The regsets should exactly match. */
2661 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
2668 /* Find the set of changed registers. */
2669 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
2671 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
2673 /* No registers should die. */
2674 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
2676 /* Verify that the now-live register is wider than word_mode. */
2677 verify_wide_reg (i, bb->head, bb->end);
2682 /* Updates life information starting with the basic blocks set in BLOCKS.
2684 If LOCAL_ONLY, such as after splitting or peepholeing, we are only
2685 expecting local modifications to basic blocks. If we find extra
2686 registers live at the beginning of a block, then we either killed
2687 useful data, or we have a broken split that wants data not provided.
2688 If we find registers removed from live_at_start, that means we have
2689 a broken peephole that is killing a register it shouldn't.
2691 ??? This is not true in one situation -- when a pre-reload splitter
2692 generates subregs of a multi-word pseudo, current life analysis will
2693 lose the kill. So we _can_ have a pseudo go live. How irritating.
2695 BLOCK_FOR_INSN is assumed to be correct.
2697 Including PROP_REG_INFO does not properly refresh regs_ever_live
2698 unless the caller resets it to zero. */
2701 update_life_info (blocks, extent, prop_flags)
2703 enum update_life_extent extent;
2707 regset_head tmp_head;
2710 tmp = INITIALIZE_REG_SET (tmp_head);
2712 /* For a global update, we go through the relaxation process again. */
2713 if (extent != UPDATE_LIFE_LOCAL)
2715 calculate_global_regs_live (blocks, blocks,
2716 prop_flags & PROP_SCAN_DEAD_CODE);
2718 /* If asked, remove notes from the blocks we'll update. */
2719 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
2720 count_or_remove_death_notes (blocks, 1);
2723 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2725 basic_block bb = BASIC_BLOCK (i);
2727 COPY_REG_SET (tmp, bb->global_live_at_end);
2728 propagate_block (bb, tmp, (regset) NULL, prop_flags);
2730 if (extent == UPDATE_LIFE_LOCAL)
2731 verify_local_live_at_start (tmp, bb);
2736 if (prop_flags & PROP_REG_INFO)
2738 /* The only pseudos that are live at the beginning of the function
2739 are those that were not set anywhere in the function. local-alloc
2740 doesn't know how to handle these correctly, so mark them as not
2741 local to any one basic block. */
2742 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
2743 FIRST_PSEUDO_REGISTER, i,
2744 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
2746 /* We have a problem with any pseudoreg that lives across the setjmp.
2747 ANSI says that if a user variable does not change in value between
2748 the setjmp and the longjmp, then the longjmp preserves it. This
2749 includes longjmp from a place where the pseudo appears dead.
2750 (In principle, the value still exists if it is in scope.)
2751 If the pseudo goes in a hard reg, some other value may occupy
2752 that hard reg where this pseudo is dead, thus clobbering the pseudo.
2753 Conclusion: such a pseudo must not go in a hard reg. */
2754 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
2755 FIRST_PSEUDO_REGISTER, i,
2757 if (regno_reg_rtx[i] != 0)
2759 REG_LIVE_LENGTH (i) = -1;
2760 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
2766 /* Free the variables allocated by find_basic_blocks.
2768 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
2771 free_basic_block_vars (keep_head_end_p)
2772 int keep_head_end_p;
2774 if (basic_block_for_insn)
2776 VARRAY_FREE (basic_block_for_insn);
2777 basic_block_for_insn = NULL;
2780 if (! keep_head_end_p)
2783 VARRAY_FREE (basic_block_info);
2786 ENTRY_BLOCK_PTR->aux = NULL;
2787 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
2788 EXIT_BLOCK_PTR->aux = NULL;
2789 EXIT_BLOCK_PTR->global_live_at_start = NULL;
2793 /* Return nonzero if the destination of SET equals the source. */
2798 rtx src = SET_SRC (set);
2799 rtx dst = SET_DEST (set);
2801 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
2803 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
2805 src = SUBREG_REG (src);
2806 dst = SUBREG_REG (dst);
2809 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
2810 && REGNO (src) == REGNO (dst));
2813 /* Return nonzero if an insn consists only of SETs, each of which only sets a
2819 rtx pat = PATTERN (insn);
2821 /* Insns carrying these notes are useful later on. */
2822 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
2825 if (GET_CODE (pat) == SET && set_noop_p (pat))
2828 if (GET_CODE (pat) == PARALLEL)
2831 /* If nothing but SETs of registers to themselves,
2832 this insn can also be deleted. */
2833 for (i = 0; i < XVECLEN (pat, 0); i++)
2835 rtx tem = XVECEXP (pat, 0, i);
2837 if (GET_CODE (tem) == USE
2838 || GET_CODE (tem) == CLOBBER)
2841 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
2850 /* Delete any insns that copy a register to itself. */
2853 delete_noop_moves (f)
2857 for (insn = f; insn; insn = NEXT_INSN (insn))
2859 if (GET_CODE (insn) == INSN && noop_move_p (insn))
2861 PUT_CODE (insn, NOTE);
2862 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2863 NOTE_SOURCE_FILE (insn) = 0;
2868 /* Determine if the stack pointer is constant over the life of the function.
2869 Only useful before prologues have been emitted. */
2872 notice_stack_pointer_modification_1 (x, pat, data)
2874 rtx pat ATTRIBUTE_UNUSED;
2875 void *data ATTRIBUTE_UNUSED;
2877 if (x == stack_pointer_rtx
2878 /* The stack pointer is only modified indirectly as the result
2879 of a push until later in flow. See the comments in rtl.texi
2880 regarding Embedded Side-Effects on Addresses. */
2881 || (GET_CODE (x) == MEM
2882 && (GET_CODE (XEXP (x, 0)) == PRE_DEC
2883 || GET_CODE (XEXP (x, 0)) == PRE_INC
2884 || GET_CODE (XEXP (x, 0)) == POST_DEC
2885 || GET_CODE (XEXP (x, 0)) == POST_INC)
2886 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
2887 current_function_sp_is_unchanging = 0;
2891 notice_stack_pointer_modification (f)
2896 /* Assume that the stack pointer is unchanging if alloca hasn't
2898 current_function_sp_is_unchanging = !current_function_calls_alloca;
2899 if (! current_function_sp_is_unchanging)
2902 for (insn = f; insn; insn = NEXT_INSN (insn))
2904 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2906 /* Check if insn modifies the stack pointer. */
2907 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
2909 if (! current_function_sp_is_unchanging)
2915 /* Mark a register in SET. Hard registers in large modes get all
2916 of their component registers set as well. */
2918 mark_reg (reg, xset)
2922 regset set = (regset) xset;
2923 int regno = REGNO (reg);
2925 if (GET_MODE (reg) == BLKmode)
2928 SET_REGNO_REG_SET (set, regno);
2929 if (regno < FIRST_PSEUDO_REGISTER)
2931 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2933 SET_REGNO_REG_SET (set, regno + n);
2937 /* Mark those regs which are needed at the end of the function as live
2938 at the end of the last basic block. */
2940 mark_regs_live_at_end (set)
2945 /* If exiting needs the right stack value, consider the stack pointer
2946 live at the end of the function. */
2947 if ((HAVE_epilogue && reload_completed)
2948 || ! EXIT_IGNORE_STACK
2949 || (! FRAME_POINTER_REQUIRED
2950 && ! current_function_calls_alloca
2951 && flag_omit_frame_pointer)
2952 || current_function_sp_is_unchanging)
2954 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
2957 /* Mark the frame pointer if needed at the end of the function. If
2958 we end up eliminating it, it will be removed from the live list
2959 of each basic block by reload. */
2961 if (! reload_completed || frame_pointer_needed)
2963 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
2964 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2965 /* If they are different, also mark the hard frame pointer as live */
2966 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
2970 #ifdef PIC_OFFSET_TABLE_REGNUM
2971 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
2972 /* Many architectures have a GP register even without flag_pic.
2973 Assume the pic register is not in use, or will be handled by
2974 other means, if it is not fixed. */
2975 if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
2976 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
2980 /* Mark all global registers, and all registers used by the epilogue
2981 as being live at the end of the function since they may be
2982 referenced by our caller. */
2983 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2985 #ifdef EPILOGUE_USES
2986 || EPILOGUE_USES (i)
2989 SET_REGNO_REG_SET (set, i);
2991 /* Mark all call-saved registers that we actaully used. */
2992 if (HAVE_epilogue && reload_completed)
2994 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2995 if (! call_used_regs[i] && regs_ever_live[i])
2996 SET_REGNO_REG_SET (set, i);
2999 /* Mark function return value. */
3000 diddle_return_value (mark_reg, set);
3003 /* Callback function for for_each_successor_phi. DATA is a regset.
3004 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3005 INSN, in the regset. */
3008 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3009 rtx insn ATTRIBUTE_UNUSED;
3010 int dest_regno ATTRIBUTE_UNUSED;
3014 regset live = (regset) data;
3015 SET_REGNO_REG_SET (live, src_regno);
3019 /* Propagate global life info around the graph of basic blocks. Begin
3020 considering blocks with their corresponding bit set in BLOCKS_IN.
3021 BLOCKS_OUT is set for every block that was changed. */
3024 calculate_global_regs_live (blocks_in, blocks_out, flags)
3025 sbitmap blocks_in, blocks_out;
3028 basic_block *queue, *qhead, *qtail, *qend;
3029 regset tmp, new_live_at_end;
3030 regset_head tmp_head;
3031 regset_head new_live_at_end_head;
3034 tmp = INITIALIZE_REG_SET (tmp_head);
3035 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3037 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3038 because the `head == tail' style test for an empty queue doesn't
3039 work with a full queue. */
3040 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3042 qhead = qend = queue + n_basic_blocks + 2;
3044 /* Clear out the garbage that might be hanging out in bb->aux. */
3045 for (i = n_basic_blocks - 1; i >= 0; --i)
3046 BASIC_BLOCK (i)->aux = NULL;
3048 /* Queue the blocks set in the initial mask. Do this in reverse block
3049 number order so that we are more likely for the first round to do
3050 useful work. We use AUX non-null to flag that the block is queued. */
3051 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3053 basic_block bb = BASIC_BLOCK (i);
3058 sbitmap_zero (blocks_out);
3060 while (qhead != qtail)
3062 int rescan, changed;
3071 /* Begin by propogating live_at_start from the successor blocks. */
3072 CLEAR_REG_SET (new_live_at_end);
3073 for (e = bb->succ; e ; e = e->succ_next)
3075 basic_block sb = e->dest;
3076 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3079 /* Regs used in phi nodes are not included in
3080 global_live_at_start, since they are live only along a
3081 particular edge. Set those regs that are live because of a
3082 phi node alternative corresponding to this particular block. */
3083 for_each_successor_phi (bb, &set_phi_alternative_reg,
3086 if (bb == ENTRY_BLOCK_PTR)
3088 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3092 /* On our first pass through this block, we'll go ahead and continue.
3093 Recognize first pass by local_set NULL. On subsequent passes, we
3094 get to skip out early if live_at_end wouldn't have changed. */
3096 if (bb->local_set == NULL)
3098 bb->local_set = OBSTACK_ALLOC_REG_SET (function_obstack);
3103 /* If any bits were removed from live_at_end, we'll have to
3104 rescan the block. This wouldn't be necessary if we had
3105 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3106 local_live is really dependant on live_at_end. */
3107 CLEAR_REG_SET (tmp);
3108 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3109 new_live_at_end, BITMAP_AND_COMPL);
3113 /* Find the set of changed bits. Take this opportunity
3114 to notice that this set is empty and early out. */
3115 CLEAR_REG_SET (tmp);
3116 changed = bitmap_operation (tmp, bb->global_live_at_end,
3117 new_live_at_end, BITMAP_XOR);
3121 /* If any of the changed bits overlap with local_set,
3122 we'll have to rescan the block. Detect overlap by
3123 the AND with ~local_set turning off bits. */
3124 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3129 /* Let our caller know that BB changed enough to require its
3130 death notes updated. */
3131 SET_BIT (blocks_out, bb->index);
3135 /* Add to live_at_start the set of all registers in
3136 new_live_at_end that aren't in the old live_at_end. */
3138 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3140 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3142 changed = bitmap_operation (bb->global_live_at_start,
3143 bb->global_live_at_start,
3150 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3152 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3153 into live_at_start. */
3154 propagate_block (bb, new_live_at_end, bb->local_set, flags);
3156 /* If live_at start didn't change, no need to go farther. */
3157 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3160 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3163 /* Queue all predecessors of BB so that we may re-examine
3164 their live_at_end. */
3165 for (e = bb->pred; e ; e = e->pred_next)
3167 basic_block pb = e->src;
3168 if (pb->aux == NULL)
3179 FREE_REG_SET (new_live_at_end);
3181 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3183 basic_block bb = BASIC_BLOCK (i);
3184 FREE_REG_SET (bb->local_set);
3190 /* Subroutines of life analysis. */
3192 /* Allocate the permanent data structures that represent the results
3193 of life analysis. Not static since used also for stupid life analysis. */
3196 allocate_bb_life_data ()
3200 for (i = 0; i < n_basic_blocks; i++)
3202 basic_block bb = BASIC_BLOCK (i);
3204 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (function_obstack);
3205 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (function_obstack);
3208 ENTRY_BLOCK_PTR->global_live_at_end
3209 = OBSTACK_ALLOC_REG_SET (function_obstack);
3210 EXIT_BLOCK_PTR->global_live_at_start
3211 = OBSTACK_ALLOC_REG_SET (function_obstack);
3213 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (function_obstack);
3217 allocate_reg_life_data ()
3221 /* Recalculate the register space, in case it has grown. Old style
3222 vector oriented regsets would set regset_{size,bytes} here also. */
3223 allocate_reg_info (max_regno, FALSE, FALSE);
3225 /* Reset all the data we'll collect in propagate_block and its
3227 for (i = 0; i < max_regno; i++)
3231 REG_N_DEATHS (i) = 0;
3232 REG_N_CALLS_CROSSED (i) = 0;
3233 REG_LIVE_LENGTH (i) = 0;
3234 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3238 /* Delete dead instructions for propagate_block. */
3241 propagate_block_delete_insn (bb, insn)
3245 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3247 /* If the insn referred to a label, and that label was attached to
3248 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3249 pretty much mandatory to delete it, because the ADDR_VEC may be
3250 referencing labels that no longer exist. */
3254 rtx label = XEXP (inote, 0);
3257 if (LABEL_NUSES (label) == 1
3258 && (next = next_nonnote_insn (label)) != NULL
3259 && GET_CODE (next) == JUMP_INSN
3260 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3261 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3263 rtx pat = PATTERN (next);
3264 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3265 int len = XVECLEN (pat, diff_vec_p);
3268 for (i = 0; i < len; i++)
3269 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3271 flow_delete_insn (next);
3275 if (bb->end == insn)
3276 bb->end = PREV_INSN (insn);
3277 flow_delete_insn (insn);
3280 /* Delete dead libcalls for propagate_block. Return the insn
3281 before the libcall. */
3284 propagate_block_delete_libcall (bb, insn, note)
3288 rtx first = XEXP (note, 0);
3289 rtx before = PREV_INSN (first);
3291 if (insn == bb->end)
3294 flow_delete_insn_chain (first, insn);
3298 /* Compute the registers live at the beginning of a basic block BB from
3299 those live at the end.
3301 When called, REG_LIVE contains those live at the end. On return, it
3302 contains those live at the beginning.
3304 LOCAL_SET, if non-null, will be set with all registers killed by
3305 this basic block. */
3308 propagate_block (bb, live, local_set, flags)
3314 struct propagate_block_info pbi;
3316 regset_head tmp_head;
3320 pbi.reg_live = live;
3321 pbi.mem_set_list = NULL_RTX;
3322 pbi.local_set = local_set;
3326 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3327 pbi.reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3329 pbi.reg_next_use = NULL;
3331 tmp = INITIALIZE_REG_SET (tmp_head);
3333 if (flags & PROP_REG_INFO)
3337 /* Process the regs live at the end of the block.
3338 Mark them as not local to any one basic block. */
3339 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
3341 REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL;
3345 /* Scan the block an insn at a time from end to beginning. */
3347 for (insn = bb->end; ; insn = prev)
3349 prev = PREV_INSN (insn);
3351 if (GET_CODE (insn) == NOTE)
3353 /* If this is a call to `setjmp' et al,
3354 warn if any non-volatile datum is live. */
3356 if ((flags & PROP_REG_INFO)
3357 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
3358 IOR_REG_SET (regs_live_at_setjmp, pbi.reg_live);
3361 /* Update the life-status of regs for this insn.
3362 First DEAD gets which regs are set in this insn
3363 then LIVE gets which regs are used in this insn.
3364 Then the regs live before the insn
3365 are those live after, with DEAD regs turned off,
3366 and then LIVE regs turned on. */
3368 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3371 rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3372 int insn_is_dead = 0;
3373 int libcall_is_dead = 0;
3375 if (flags & PROP_SCAN_DEAD_CODE)
3377 insn_is_dead = insn_dead_p (&pbi, PATTERN (insn), 0,
3379 libcall_is_dead = (insn_is_dead && note != 0
3380 && libcall_dead_p (&pbi, PATTERN (insn),
3384 /* We almost certainly don't want to delete prologue or epilogue
3385 instructions. Warn about probable compiler losage. */
3388 && (((HAVE_epilogue || HAVE_prologue)
3389 && prologue_epilogue_contains (insn))
3390 || (HAVE_sibcall_epilogue
3391 && sibcall_epilogue_contains (insn))))
3393 if (flags & PROP_KILL_DEAD_CODE)
3395 warning ("ICE: would have deleted prologue/epilogue insn");
3396 if (!inhibit_warnings)
3399 libcall_is_dead = insn_is_dead = 0;
3402 /* If an instruction consists of just dead store(s) on final pass,
3404 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3406 if (libcall_is_dead)
3408 prev = propagate_block_delete_libcall (bb, insn, note);
3409 insn = NEXT_INSN (prev);
3412 propagate_block_delete_insn (bb, insn);
3414 /* CC0 is now known to be dead. Either this insn used it,
3415 in which case it doesn't anymore, or clobbered it,
3416 so the next insn can't use it. */
3422 /* See if this is an increment or decrement that can be
3423 merged into a following memory address. */
3426 register rtx x = single_set (insn);
3428 /* Does this instruction increment or decrement a register? */
3429 if (!reload_completed
3430 && (flags & PROP_AUTOINC)
3432 && GET_CODE (SET_DEST (x)) == REG
3433 && (GET_CODE (SET_SRC (x)) == PLUS
3434 || GET_CODE (SET_SRC (x)) == MINUS)
3435 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3436 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3437 /* Ok, look for a following memory ref we can combine with.
3438 If one is found, change the memory ref to a PRE_INC
3439 or PRE_DEC, cancel this insn, and return 1.
3440 Return 0 if nothing has been done. */
3441 && try_pre_increment_1 (&pbi, insn))
3444 #endif /* AUTO_INC_DEC */
3446 CLEAR_REG_SET (tmp);
3448 /* If this is not the final pass, and this insn is copying the
3449 value of a library call and it's dead, don't scan the
3450 insns that perform the library call, so that the call's
3451 arguments are not marked live. */
3452 if (libcall_is_dead)
3454 /* Record the death of the dest reg. */
3455 mark_set_regs (&pbi, tmp, PATTERN (insn), insn);
3456 AND_COMPL_REG_SET (pbi.reg_live, tmp);
3458 insn = XEXP (note, 0);
3459 prev = PREV_INSN (insn);
3461 else if (GET_CODE (PATTERN (insn)) == SET
3462 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3463 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3464 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3465 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3466 /* We have an insn to pop a constant amount off the stack.
3467 (Such insns use PLUS regardless of the direction of the stack,
3468 and any insn to adjust the stack by a constant is always a pop.)
3469 These insns, if not dead stores, have no effect on life. */
3473 /* Any regs live at the time of a call instruction
3474 must not go in a register clobbered by calls.
3475 Find all regs now live and record this for them. */
3477 if (GET_CODE (insn) == CALL_INSN
3478 && (flags & PROP_REG_INFO))
3479 EXECUTE_IF_SET_IN_REG_SET (pbi.reg_live, 0, i,
3480 { REG_N_CALLS_CROSSED (i)++; });
3482 /* Record sets. Do this even for dead instructions,
3483 since they would have killed the values if they hadn't
3485 mark_set_regs (&pbi, tmp, PATTERN (insn), insn);
3487 /* Each call clobbers all call-clobbered regs that are not
3488 global or fixed. Note that the function-value reg is a
3489 call-clobbered reg, and mark_set_regs has already had
3490 a chance to handle it. */
3491 if (GET_CODE (insn) == CALL_INSN)
3497 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3498 cond = COND_EXEC_TEST (PATTERN (insn));
3500 /* Non-constant calls clobber memory. */
3501 if (! CONST_CALL_P (insn))
3502 free_EXPR_LIST_list (&pbi.mem_set_list);
3504 /* There may be extra registers to be clobbered. */
3505 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3507 note = XEXP (note, 1))
3508 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3509 mark_set_1 (&pbi, tmp, XEXP (XEXP (note, 0), 0),
3512 /* Calls change all call-used and global registers. */
3513 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3514 if (call_used_regs[i] && ! global_regs[i]
3518 mark_set_reg (&pbi, tmp,
3519 gen_rtx_REG (reg_raw_mode[i], i),
3520 cond, &dummy, &dummy);
3524 /* Update live for the registers killed. */
3525 AND_COMPL_REG_SET (pbi.reg_live, tmp);
3526 CLEAR_REG_SET (tmp);
3528 /* If an insn doesn't use CC0, it becomes dead since we
3529 assume that every insn clobbers it. So show it dead here;
3530 mark_used_regs will set it live if it is referenced. */
3535 mark_used_regs (&pbi, tmp, PATTERN (insn), NULL_RTX, insn);
3537 /* Sometimes we may have inserted something before INSN
3538 (such as a move) when we make an auto-inc. So ensure
3539 we will scan those insns. */
3541 prev = PREV_INSN (insn);
3544 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3550 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3551 cond = COND_EXEC_TEST (PATTERN (insn));
3553 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3555 note = XEXP (note, 1))
3556 if (GET_CODE (XEXP (note, 0)) == USE)
3557 mark_used_regs (&pbi, tmp, XEXP (XEXP (note, 0), 0),
3560 /* The stack ptr is used (honorarily) by a CALL insn. */
3561 SET_REGNO_REG_SET (tmp, STACK_POINTER_REGNUM);
3563 /* Calls may also reference any of the global registers,
3564 so they are made live. */
3565 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3567 mark_used_reg (&pbi, tmp,
3568 gen_rtx_REG (reg_raw_mode[i], i),
3572 /* Update live for the registers used. */
3573 IOR_REG_SET (pbi.reg_live, tmp);
3576 /* On final pass, update counts of how many insns in which
3577 each reg is live. */
3578 if (flags & PROP_REG_INFO)
3579 EXECUTE_IF_SET_IN_REG_SET (pbi.reg_live, 0, i,
3580 { REG_LIVE_LENGTH (i)++; });
3583 if (insn == bb->head)
3588 free_EXPR_LIST_list (&pbi.mem_set_list);
3590 if (pbi.reg_next_use)
3591 free (pbi.reg_next_use);
3595 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
3596 (SET expressions whose destinations are registers dead after the insn).
3597 NEEDED is the regset that says which regs are alive after the insn.
3599 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
3601 If X is the entire body of an insn, NOTES contains the reg notes
3602 pertaining to the insn. */
3605 insn_dead_p (pbi, x, call_ok, notes)
3606 struct propagate_block_info *pbi;
3609 rtx notes ATTRIBUTE_UNUSED;
3611 enum rtx_code code = GET_CODE (x);
3614 /* If flow is invoked after reload, we must take existing AUTO_INC
3615 expresions into account. */
3616 if (reload_completed)
3618 for ( ; notes; notes = XEXP (notes, 1))
3620 if (REG_NOTE_KIND (notes) == REG_INC)
3622 int regno = REGNO (XEXP (notes, 0));
3624 /* Don't delete insns to set global regs. */
3625 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3626 || REGNO_REG_SET_P (pbi->reg_live, regno))
3633 /* If setting something that's a reg or part of one,
3634 see if that register's altered value will be live. */
3638 rtx r = SET_DEST (x);
3641 if (GET_CODE (r) == CC0)
3642 return ! pbi->cc0_live;
3645 /* A SET that is a subroutine call cannot be dead. */
3646 if (GET_CODE (SET_SRC (x)) == CALL)
3652 /* Don't eliminate loads from volatile memory or volatile asms. */
3653 else if (volatile_refs_p (SET_SRC (x)))
3656 if (GET_CODE (r) == MEM)
3660 if (MEM_VOLATILE_P (r))
3663 /* Walk the set of memory locations we are currently tracking
3664 and see if one is an identical match to this memory location.
3665 If so, this memory write is dead (remember, we're walking
3666 backwards from the end of the block to the start. */
3667 temp = pbi->mem_set_list;
3670 if (rtx_equal_p (XEXP (temp, 0), r))
3672 temp = XEXP (temp, 1);
3677 while (GET_CODE (r) == SUBREG
3678 || GET_CODE (r) == STRICT_LOW_PART
3679 || GET_CODE (r) == ZERO_EXTRACT)
3682 if (GET_CODE (r) == REG)
3684 int regno = REGNO (r);
3687 if (REGNO_REG_SET_P (pbi->reg_live, regno))
3690 /* If this is a hard register, verify that subsequent
3691 words are not needed. */
3692 if (regno < FIRST_PSEUDO_REGISTER)
3694 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
3697 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
3701 /* Don't delete insns to set global regs. */
3702 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
3705 /* Make sure insns to set the stack pointer aren't deleted. */
3706 if (regno == STACK_POINTER_REGNUM)
3709 /* Make sure insns to set the frame pointer aren't deleted. */
3710 if (regno == FRAME_POINTER_REGNUM
3711 && (! reload_completed || frame_pointer_needed))
3713 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3714 if (regno == HARD_FRAME_POINTER_REGNUM
3715 && (! reload_completed || frame_pointer_needed))
3719 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3720 /* Make sure insns to set arg pointer are never deleted
3721 (if the arg pointer isn't fixed, there will be a USE
3722 for it, so we can treat it normally). */
3723 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
3727 /* Otherwise, the set is dead. */
3733 /* If performing several activities, insn is dead if each activity
3734 is individually dead. Also, CLOBBERs and USEs can be ignored; a
3735 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
3737 else if (code == PARALLEL)
3739 int i = XVECLEN (x, 0);
3741 for (i--; i >= 0; i--)
3742 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
3743 && GET_CODE (XVECEXP (x, 0, i)) != USE
3744 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
3750 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
3751 is not necessarily true for hard registers. */
3752 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
3753 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
3754 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
3757 /* We do not check other CLOBBER or USE here. An insn consisting of just
3758 a CLOBBER or just a USE should not be deleted. */
3762 /* If X is the pattern of the last insn in a libcall, and assuming X is dead,
3763 return 1 if the entire library call is dead.
3764 This is true if X copies a register (hard or pseudo)
3765 and if the hard return reg of the call insn is dead.
3766 (The caller should have tested the destination of X already for death.)
3768 If this insn doesn't just copy a register, then we don't
3769 have an ordinary libcall. In that case, cse could not have
3770 managed to substitute the source for the dest later on,
3771 so we can assume the libcall is dead.
3773 NEEDED is the bit vector of pseudoregs live before this insn.
3774 NOTE is the REG_RETVAL note of the insn. INSN is the insn itself. */
3777 libcall_dead_p (pbi, x, note, insn)
3778 struct propagate_block_info *pbi;
3783 register RTX_CODE code = GET_CODE (x);
3787 register rtx r = SET_SRC (x);
3788 if (GET_CODE (r) == REG)
3790 rtx call = XEXP (note, 0);
3794 /* Find the call insn. */
3795 while (call != insn && GET_CODE (call) != CALL_INSN)
3796 call = NEXT_INSN (call);
3798 /* If there is none, do nothing special,
3799 since ordinary death handling can understand these insns. */
3803 /* See if the hard reg holding the value is dead.
3804 If this is a PARALLEL, find the call within it. */
3805 call_pat = PATTERN (call);
3806 if (GET_CODE (call_pat) == PARALLEL)
3808 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
3809 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
3810 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
3813 /* This may be a library call that is returning a value
3814 via invisible pointer. Do nothing special, since
3815 ordinary death handling can understand these insns. */
3819 call_pat = XVECEXP (call_pat, 0, i);
3822 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
3828 /* Return 1 if register REGNO was used before it was set, i.e. if it is
3829 live at function entry. Don't count global register variables, variables
3830 in registers that can be used for function arg passing, or variables in
3831 fixed hard registers. */
3834 regno_uninitialized (regno)
3837 if (n_basic_blocks == 0
3838 || (regno < FIRST_PSEUDO_REGISTER
3839 && (global_regs[regno]
3840 || fixed_regs[regno]
3841 || FUNCTION_ARG_REGNO_P (regno))))
3844 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
3847 /* 1 if register REGNO was alive at a place where `setjmp' was called
3848 and was set more than once or is an argument.
3849 Such regs may be clobbered by `longjmp'. */
3852 regno_clobbered_at_setjmp (regno)
3855 if (n_basic_blocks == 0)
3858 return ((REG_N_SETS (regno) > 1
3859 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
3860 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
3863 /* INSN references memory, possibly using autoincrement addressing modes.
3864 Find any entries on the mem_set_list that need to be invalidated due
3865 to an address change. */
3867 invalidate_mems_from_autoinc (pbi, insn)
3868 struct propagate_block_info *pbi;
3871 rtx note = REG_NOTES (insn);
3872 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3874 if (REG_NOTE_KIND (note) == REG_INC)
3876 rtx temp = pbi->mem_set_list;
3877 rtx prev = NULL_RTX;
3882 next = XEXP (temp, 1);
3883 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
3885 /* Splice temp out of list. */
3887 XEXP (prev, 1) = next;
3889 pbi->mem_set_list = next;
3890 free_EXPR_LIST_node (temp);
3900 /* Process the registers that are set within X. Their bits are set to
3901 1 in the regset DEAD, because they are dead prior to this insn.
3903 If INSN is nonzero, it is the insn being processed.
3905 FLAGS is the set of operations to perform. */
3908 mark_set_regs (pbi, new_dead, x, insn)
3909 struct propagate_block_info *pbi;
3913 rtx cond = NULL_RTX;
3916 switch (GET_CODE (x))
3920 mark_set_1 (pbi, new_dead, SET_DEST (x), cond, insn);
3924 cond = COND_EXEC_TEST (x);
3925 x = COND_EXEC_CODE (x);
3931 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3933 rtx sub = XVECEXP (x, 0, i);
3934 switch (GET_CODE (sub))
3937 if (cond != NULL_RTX)
3940 cond = COND_EXEC_TEST (sub);
3941 sub = COND_EXEC_CODE (sub);
3942 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
3948 mark_set_1 (pbi, new_dead, SET_DEST (sub), cond, insn);
3963 /* Process a single SET rtx, X. */
3966 mark_set_1 (pbi, new_dead, reg, cond, insn)
3967 struct propagate_block_info *pbi;
3969 rtx reg, cond, insn;
3971 register int regno = -1;
3972 int flags = pbi->flags;
3974 /* Some targets place small structures in registers for
3975 return values of functions. We have to detect this
3976 case specially here to get correct flow information. */
3977 if (GET_CODE (reg) == PARALLEL
3978 && GET_MODE (reg) == BLKmode)
3982 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
3983 mark_set_1 (pbi, new_dead, XVECEXP (reg, 0, i), cond, insn);
3987 /* Modifying just one hardware register of a multi-reg value
3988 or just a byte field of a register
3989 does not mean the value from before this insn is now dead.
3990 But it does mean liveness of that register at the end of the block
3993 Within mark_set_1, however, we treat it as if the register is
3994 indeed modified. mark_used_regs will, however, also treat this
3995 register as being used. Thus, we treat these insns as setting a
3996 new value for the register as a function of its old value. This
3997 cases LOG_LINKS to be made appropriately and this will help combine. */
3999 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
4000 || GET_CODE (reg) == SIGN_EXTRACT
4001 || GET_CODE (reg) == STRICT_LOW_PART)
4002 reg = XEXP (reg, 0);
4004 /* If this set is a MEM, then it kills any aliased writes.
4005 If this set is a REG, then it kills any MEMs which use the reg. */
4006 if (flags & PROP_SCAN_DEAD_CODE)
4008 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4010 rtx temp = pbi->mem_set_list;
4011 rtx prev = NULL_RTX;
4016 next = XEXP (temp, 1);
4017 if ((GET_CODE (reg) == MEM
4018 && output_dependence (XEXP (temp, 0), reg))
4019 || (GET_CODE (reg) == REG
4020 && reg_overlap_mentioned_p (reg, XEXP (temp, 0))))
4022 /* Splice this entry out of the list. */
4024 XEXP (prev, 1) = next;
4026 pbi->mem_set_list = next;
4027 free_EXPR_LIST_node (temp);
4035 /* If the memory reference had embedded side effects (autoincrement
4036 address modes. Then we may need to kill some entries on the
4038 if (insn && GET_CODE (reg) == MEM)
4039 invalidate_mems_from_autoinc (pbi, insn);
4041 if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
4042 /* We do not know the size of a BLKmode store, so we do not track
4043 them for redundant store elimination. */
4044 && GET_MODE (reg) != BLKmode
4045 /* There are no REG_INC notes for SP, so we can't assume we'll see
4046 everything that invalidates it. To be safe, don't eliminate any
4047 stores though SP; none of them should be redundant anyway. */
4048 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4049 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4052 if (GET_CODE (reg) == REG
4053 && (regno = REGNO (reg),
4054 ! (regno == FRAME_POINTER_REGNUM
4055 && (! reload_completed || frame_pointer_needed)))
4056 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4057 && ! (regno == HARD_FRAME_POINTER_REGNUM
4058 && (! reload_completed || frame_pointer_needed))
4060 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4061 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4065 int some_was_live, some_was_dead;
4067 /* Perform the pbi datastructure update. */
4068 if (! mark_set_reg (pbi, new_dead, reg, cond,
4069 &some_was_live, &some_was_dead))
4072 /* Additional data to record if this is the final pass. */
4073 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4074 | PROP_DEATH_NOTES | PROP_AUTOINC))
4077 register int blocknum = pbi->bb->index;
4080 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4081 y = pbi->reg_next_use[regno];
4083 /* If this is a hard reg, record this function uses the reg. */
4085 if (regno < FIRST_PSEUDO_REGISTER)
4088 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
4090 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4091 for (i = regno; i < endregno; i++)
4093 /* The next use is no longer "next", since a store
4095 pbi->reg_next_use[i] = 0;
4098 if (flags & PROP_REG_INFO)
4099 for (i = regno; i < endregno; i++)
4101 regs_ever_live[i] = 1;
4107 /* The next use is no longer "next", since a store
4109 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4110 pbi->reg_next_use[regno] = 0;
4112 /* Keep track of which basic blocks each reg appears in. */
4114 if (flags & PROP_REG_INFO)
4116 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4117 REG_BASIC_BLOCK (regno) = blocknum;
4118 else if (REG_BASIC_BLOCK (regno) != blocknum)
4119 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4121 /* Count (weighted) references, stores, etc. This counts a
4122 register twice if it is modified, but that is correct. */
4123 REG_N_SETS (regno)++;
4124 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4126 /* The insns where a reg is live are normally counted
4127 elsewhere, but we want the count to include the insn
4128 where the reg is set, and the normal counting mechanism
4129 would not count it. */
4130 REG_LIVE_LENGTH (regno)++;
4134 if (! some_was_dead)
4136 if (flags & PROP_LOG_LINKS)
4138 /* Make a logical link from the next following insn
4139 that uses this register, back to this insn.
4140 The following insns have already been processed.
4142 We don't build a LOG_LINK for hard registers containing
4143 in ASM_OPERANDs. If these registers get replaced,
4144 we might wind up changing the semantics of the insn,
4145 even if reload can make what appear to be valid
4146 assignments later. */
4147 if (y && (BLOCK_NUM (y) == blocknum)
4148 && (regno >= FIRST_PSEUDO_REGISTER
4149 || asm_noperands (PATTERN (y)) < 0))
4150 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4153 else if (! some_was_live)
4155 if (flags & PROP_REG_INFO)
4156 REG_N_DEATHS (REGNO (reg))++;
4158 if (flags & PROP_DEATH_NOTES)
4160 /* Note that dead stores have already been deleted
4161 when possible. If we get here, we have found a
4162 dead store that cannot be eliminated (because the
4163 same insn does something useful). Indicate this
4164 by marking the reg being set as dying here. */
4166 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4171 if (flags & PROP_DEATH_NOTES)
4173 /* This is a case where we have a multi-word hard register
4174 and some, but not all, of the words of the register are
4175 needed in subsequent insns. Write REG_UNUSED notes
4176 for those parts that were not needed. This case should
4181 for (i = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4183 if (! REGNO_REG_SET_P (pbi->reg_live, regno + i))
4187 gen_rtx_REG (reg_raw_mode[regno + i], regno + i),
4193 else if (GET_CODE (reg) == REG)
4195 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4196 pbi->reg_next_use[regno] = 0;
4199 /* If this is the last pass and this is a SCRATCH, show it will be dying
4200 here and count it. */
4201 else if (GET_CODE (reg) == SCRATCH)
4203 if (flags & PROP_DEATH_NOTES)
4205 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4209 /* Update data structures for a (possibly conditional) store into REG.
4210 Return true if REG is now unconditionally dead. */
4213 mark_set_reg (pbi, new_dead, reg, cond, p_some_was_live, p_some_was_dead)
4214 struct propagate_block_info *pbi;
4217 rtx cond ATTRIBUTE_UNUSED;
4218 int *p_some_was_live, *p_some_was_dead;
4220 int regno = REGNO (reg);
4221 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4222 int some_was_dead = ! some_was_live;
4224 /* Mark it as a significant register for this basic block. */
4226 SET_REGNO_REG_SET (pbi->local_set, regno);
4228 /* A hard reg in a wide mode may really be multiple registers.
4229 If so, mark all of them just like the first. */
4230 if (regno < FIRST_PSEUDO_REGISTER)
4232 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4235 int regno_n = regno + n;
4236 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno_n);
4238 SET_REGNO_REG_SET (pbi->local_set, regno_n);
4240 some_was_live |= needed_regno;
4241 some_was_dead |= ! needed_regno;
4245 *p_some_was_live = some_was_live;
4246 *p_some_was_dead = some_was_dead;
4248 /* The stack pointer is never dead. Well, not strictly true, but it's
4249 very difficult to tell from here. Hopefully combine_stack_adjustments
4250 will fix up the most egregious errors. */
4251 if (regno == STACK_POINTER_REGNUM)
4254 /* Mark it as dead before this insn. */
4255 SET_REGNO_REG_SET (new_dead, regno);
4256 if (regno < FIRST_PSEUDO_REGISTER)
4258 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4260 SET_REGNO_REG_SET (new_dead, regno + n);
4263 /* Unconditionally dead. */
4269 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
4273 find_auto_inc (pbi, x, insn)
4274 struct propagate_block_info *pbi;
4278 rtx addr = XEXP (x, 0);
4279 HOST_WIDE_INT offset = 0;
4282 /* Here we detect use of an index register which might be good for
4283 postincrement, postdecrement, preincrement, or predecrement. */
4285 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
4286 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
4288 if (GET_CODE (addr) == REG)
4291 register int size = GET_MODE_SIZE (GET_MODE (x));
4294 int regno = REGNO (addr);
4296 /* Is the next use an increment that might make auto-increment? */
4297 if ((incr = pbi->reg_next_use[regno]) != 0
4298 && (set = single_set (incr)) != 0
4299 && GET_CODE (set) == SET
4300 && BLOCK_NUM (incr) == BLOCK_NUM (insn)
4301 /* Can't add side effects to jumps; if reg is spilled and
4302 reloaded, there's no way to store back the altered value. */
4303 && GET_CODE (insn) != JUMP_INSN
4304 && (y = SET_SRC (set), GET_CODE (y) == PLUS)
4305 && XEXP (y, 0) == addr
4306 && GET_CODE (XEXP (y, 1)) == CONST_INT
4307 && ((HAVE_POST_INCREMENT
4308 && (INTVAL (XEXP (y, 1)) == size && offset == 0))
4309 || (HAVE_POST_DECREMENT
4310 && (INTVAL (XEXP (y, 1)) == - size && offset == 0))
4311 || (HAVE_PRE_INCREMENT
4312 && (INTVAL (XEXP (y, 1)) == size && offset == size))
4313 || (HAVE_PRE_DECREMENT
4314 && (INTVAL (XEXP (y, 1)) == - size && offset == - size)))
4315 /* Make sure this reg appears only once in this insn. */
4316 && (use = find_use_as_address (PATTERN (insn), addr, offset),
4317 use != 0 && use != (rtx) 1))
4319 rtx q = SET_DEST (set);
4320 enum rtx_code inc_code = (INTVAL (XEXP (y, 1)) == size
4321 ? (offset ? PRE_INC : POST_INC)
4322 : (offset ? PRE_DEC : POST_DEC));
4324 if (dead_or_set_p (incr, addr))
4326 /* This is the simple case. Try to make the auto-inc. If
4327 we can't, we are done. Otherwise, we will do any
4328 needed updates below. */
4329 if (! validate_change (insn, &XEXP (x, 0),
4330 gen_rtx_fmt_e (inc_code, Pmode, addr),
4334 else if (GET_CODE (q) == REG
4335 /* PREV_INSN used here to check the semi-open interval
4337 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
4338 /* We must also check for sets of q as q may be
4339 a call clobbered hard register and there may
4340 be a call between PREV_INSN (insn) and incr. */
4341 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
4343 /* We have *p followed sometime later by q = p+size.
4344 Both p and q must be live afterward,
4345 and q is not used between INSN and its assignment.
4346 Change it to q = p, ...*q..., q = q+size.
4347 Then fall into the usual case. */
4352 emit_move_insn (q, addr);
4353 insns = get_insns ();
4356 bb = BLOCK_FOR_INSN (insn);
4357 for (temp = insns; temp; temp = NEXT_INSN (temp))
4358 set_block_for_insn (temp, bb);
4360 /* If we can't make the auto-inc, or can't make the
4361 replacement into Y, exit. There's no point in making
4362 the change below if we can't do the auto-inc and doing
4363 so is not correct in the pre-inc case. */
4365 validate_change (insn, &XEXP (x, 0),
4366 gen_rtx_fmt_e (inc_code, Pmode, q),
4368 validate_change (incr, &XEXP (y, 0), q, 1);
4369 if (! apply_change_group ())
4372 /* We now know we'll be doing this change, so emit the
4373 new insn(s) and do the updates. */
4374 emit_insns_before (insns, insn);
4376 if (BLOCK_FOR_INSN (insn)->head == insn)
4377 BLOCK_FOR_INSN (insn)->head = insns;
4379 /* INCR will become a NOTE and INSN won't contain a
4380 use of ADDR. If a use of ADDR was just placed in
4381 the insn before INSN, make that the next use.
4382 Otherwise, invalidate it. */
4383 if (GET_CODE (PREV_INSN (insn)) == INSN
4384 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
4385 && SET_SRC (PATTERN (PREV_INSN (insn))) == addr)
4386 pbi->reg_next_use[regno] = PREV_INSN (insn);
4388 pbi->reg_next_use[regno] = 0;
4393 /* REGNO is now used in INCR which is below INSN, but it
4394 previously wasn't live here. If we don't mark it as
4395 live, we'll put a REG_DEAD note for it on this insn,
4396 which is incorrect. */
4397 SET_REGNO_REG_SET (pbi->reg_live, regno);
4399 /* If there are any calls between INSN and INCR, show
4400 that REGNO now crosses them. */
4401 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
4402 if (GET_CODE (temp) == CALL_INSN)
4403 REG_N_CALLS_CROSSED (regno)++;
4408 /* If we haven't returned, it means we were able to make the
4409 auto-inc, so update the status. First, record that this insn
4410 has an implicit side effect. */
4413 = alloc_EXPR_LIST (REG_INC, addr, REG_NOTES (insn));
4415 /* Modify the old increment-insn to simply copy
4416 the already-incremented value of our register. */
4417 if (! validate_change (incr, &SET_SRC (set), addr, 0))
4420 /* If that makes it a no-op (copying the register into itself) delete
4421 it so it won't appear to be a "use" and a "set" of this
4423 if (SET_DEST (set) == addr)
4425 PUT_CODE (incr, NOTE);
4426 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
4427 NOTE_SOURCE_FILE (incr) = 0;
4430 if (regno >= FIRST_PSEUDO_REGISTER)
4432 /* Count an extra reference to the reg. When a reg is
4433 incremented, spilling it is worse, so we want to make
4434 that less likely. */
4435 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4437 /* Count the increment as a setting of the register,
4438 even though it isn't a SET in rtl. */
4439 REG_N_SETS (regno)++;
4444 #endif /* AUTO_INC_DEC */
4447 mark_used_reg (pbi, new_live, reg, cond, insn)
4448 struct propagate_block_info *pbi;
4451 rtx cond ATTRIBUTE_UNUSED;
4454 int regno = REGNO (reg);
4455 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
4456 int some_was_dead = ! some_was_live;
4458 SET_REGNO_REG_SET (new_live, regno);
4460 /* A hard reg in a wide mode may really be multiple registers.
4461 If so, mark all of them just like the first. */
4462 if (regno < FIRST_PSEUDO_REGISTER)
4464 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4467 int regno_n = regno + n;
4468 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno_n);
4470 SET_REGNO_REG_SET (new_live, regno_n);
4471 some_was_live |= needed_regno;
4472 some_was_dead |= ! needed_regno;
4476 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4478 /* Record where each reg is used, so when the reg is set we know
4479 the next insn that uses it. */
4480 pbi->reg_next_use[regno] = insn;
4483 if (pbi->flags & PROP_REG_INFO)
4485 if (regno < FIRST_PSEUDO_REGISTER)
4487 /* If this is a register we are going to try to eliminate,
4488 don't mark it live here. If we are successful in
4489 eliminating it, it need not be live unless it is used for
4490 pseudos, in which case it will have been set live when it
4491 was allocated to the pseudos. If the register will not
4492 be eliminated, reload will set it live at that point.
4494 Otherwise, record that this function uses this register. */
4496 if (! TEST_HARD_REG_BIT (elim_reg_set, regno))
4498 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4502 regs_ever_live[regno + --n] = 1;
4508 /* Keep track of which basic block each reg appears in. */
4510 register int blocknum = pbi->bb->index;
4511 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
4512 REG_BASIC_BLOCK (regno) = blocknum;
4513 else if (REG_BASIC_BLOCK (regno) != blocknum)
4514 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
4516 /* Count (weighted) number of uses of each reg. */
4517 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4521 /* Record and count the insns in which a reg dies. If it is used in
4522 this insn and was dead below the insn then it dies in this insn.
4524 If it was set in this insn, we do not make a REG_DEAD note;
4525 likewise if we already made such a note. Recall that dead_or_set_p
4526 checks for complete overlap, and thus is not suitable for the first
4527 case. But it does handle the existing note case. Also recall that
4528 reg_set_p, when presented with the complete insn, will try to infer
4529 things about a call_insn that we do not wish. */
4531 if ((pbi->flags & PROP_DEATH_NOTES)
4533 && ! reg_set_p (reg, PATTERN (insn))
4534 && ! dead_or_set_p (insn, reg))
4538 /* Check for the case where the register dying partially
4539 overlaps the register set by this insn. */
4540 if (regno < FIRST_PSEUDO_REGISTER
4541 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
4543 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4545 some_was_live |= dead_or_set_regno_p (insn, regno + n);
4548 /* If none of the words in X is needed, make a REG_DEAD note.
4549 Otherwise, we must make partial REG_DEAD notes. */
4550 if (! some_was_live)
4553 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
4554 REG_N_DEATHS (regno)++;
4558 /* Don't make a REG_DEAD note for a part of a register
4559 that is set in the insn. */
4561 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
4562 for (; n >= regno; n--)
4563 if (!REGNO_REG_SET_P (pbi->reg_live, n)
4564 && ! dead_or_set_regno_p (insn, n))
4566 = alloc_EXPR_LIST (REG_DEAD,
4567 gen_rtx_REG (reg_raw_mode[n], n),
4573 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
4574 This is done assuming the registers needed from X are those that
4575 have 1-bits in PBI->REG_LIVE.
4577 INSN is the containing instruction. If INSN is dead, this function
4581 mark_used_regs (pbi, new_live, x, cond, insn)
4582 struct propagate_block_info *pbi;
4586 register RTX_CODE code;
4588 int flags = pbi->flags;
4591 code = GET_CODE (x);
4611 /* If we are clobbering a MEM, mark any registers inside the address
4613 if (GET_CODE (XEXP (x, 0)) == MEM)
4614 mark_used_regs (pbi, new_live, XEXP (XEXP (x, 0), 0), cond, insn);
4618 /* Don't bother watching stores to mems if this is not the
4619 final pass. We'll not be deleting dead stores this round. */
4620 if (flags & PROP_SCAN_DEAD_CODE)
4622 /* Invalidate the data for the last MEM stored, but only if MEM is
4623 something that can be stored into. */
4624 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
4625 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
4626 ; /* needn't clear the memory set list */
4629 rtx temp = pbi->mem_set_list;
4630 rtx prev = NULL_RTX;
4635 next = XEXP (temp, 1);
4636 if (anti_dependence (XEXP (temp, 0), x))
4638 /* Splice temp out of the list. */
4640 XEXP (prev, 1) = next;
4642 pbi->mem_set_list = next;
4643 free_EXPR_LIST_node (temp);
4651 /* If the memory reference had embedded side effects (autoincrement
4652 address modes. Then we may need to kill some entries on the
4655 invalidate_mems_from_autoinc (pbi, insn);
4659 if (flags & PROP_AUTOINC)
4660 find_auto_inc (pbi, x, insn);
4665 if (GET_CODE (SUBREG_REG (x)) == REG
4666 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
4667 && (GET_MODE_SIZE (GET_MODE (x))
4668 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
4669 REG_CHANGES_SIZE (REGNO (SUBREG_REG (x))) = 1;
4671 /* While we're here, optimize this case. */
4673 if (GET_CODE (x) != REG)
4678 /* See a register other than being set => mark it as needed. */
4679 mark_used_reg (pbi, new_live, x, cond, insn);
4684 register rtx testreg = SET_DEST (x);
4687 /* If storing into MEM, don't show it as being used. But do
4688 show the address as being used. */
4689 if (GET_CODE (testreg) == MEM)
4692 if (flags & PROP_AUTOINC)
4693 find_auto_inc (pbi, testreg, insn);
4695 mark_used_regs (pbi, new_live, XEXP (testreg, 0), cond, insn);
4696 mark_used_regs (pbi, new_live, SET_SRC (x), cond, insn);
4700 /* Storing in STRICT_LOW_PART is like storing in a reg
4701 in that this SET might be dead, so ignore it in TESTREG.
4702 but in some other ways it is like using the reg.
4704 Storing in a SUBREG or a bit field is like storing the entire
4705 register in that if the register's value is not used
4706 then this SET is not needed. */
4707 while (GET_CODE (testreg) == STRICT_LOW_PART
4708 || GET_CODE (testreg) == ZERO_EXTRACT
4709 || GET_CODE (testreg) == SIGN_EXTRACT
4710 || GET_CODE (testreg) == SUBREG)
4712 if (GET_CODE (testreg) == SUBREG
4713 && GET_CODE (SUBREG_REG (testreg)) == REG
4714 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
4715 && (GET_MODE_SIZE (GET_MODE (testreg))
4716 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (testreg)))))
4717 REG_CHANGES_SIZE (REGNO (SUBREG_REG (testreg))) = 1;
4719 /* Modifying a single register in an alternate mode
4720 does not use any of the old value. But these other
4721 ways of storing in a register do use the old value. */
4722 if (GET_CODE (testreg) == SUBREG
4723 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
4728 testreg = XEXP (testreg, 0);
4731 /* If this is a store into a register, recursively scan the
4732 value being stored. */
4734 if ((GET_CODE (testreg) == PARALLEL
4735 && GET_MODE (testreg) == BLKmode)
4736 || (GET_CODE (testreg) == REG
4737 && (regno = REGNO (testreg),
4738 ! (regno == FRAME_POINTER_REGNUM
4739 && (! reload_completed || frame_pointer_needed)))
4740 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4741 && ! (regno == HARD_FRAME_POINTER_REGNUM
4742 && (! reload_completed || frame_pointer_needed))
4744 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4745 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4748 /* We used to exclude global_regs here, but that seems wrong.
4749 Storing in them is like storing in mem. */
4751 mark_used_regs (pbi, new_live, SET_SRC (x), cond, insn);
4753 mark_used_regs (pbi, new_live, SET_DEST (x), cond, insn);
4760 case UNSPEC_VOLATILE:
4764 /* Traditional and volatile asm instructions must be considered to use
4765 and clobber all hard registers, all pseudo-registers and all of
4766 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
4768 Consider for instance a volatile asm that changes the fpu rounding
4769 mode. An insn should not be moved across this even if it only uses
4770 pseudo-regs because it might give an incorrectly rounded result.
4772 ?!? Unfortunately, marking all hard registers as live causes massive
4773 problems for the register allocator and marking all pseudos as live
4774 creates mountains of uninitialized variable warnings.
4776 So for now, just clear the memory set list and mark any regs
4777 we can find in ASM_OPERANDS as used. */
4778 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
4779 free_EXPR_LIST_list (&pbi->mem_set_list);
4781 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
4782 We can not just fall through here since then we would be confused
4783 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
4784 traditional asms unlike their normal usage. */
4785 if (code == ASM_OPERANDS)
4789 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
4790 mark_used_regs (pbi, new_live, ASM_OPERANDS_INPUT (x, j),
4797 if (cond != NULL_RTX)
4800 mark_used_regs (pbi, new_live, COND_EXEC_TEST (x), NULL_RTX, insn);
4802 cond = COND_EXEC_TEST (x);
4803 x = COND_EXEC_CODE (x);
4807 /* We _do_not_ want to scan operands of phi nodes. Operands of
4808 a phi function are evaluated only when control reaches this
4809 block along a particular edge. Therefore, regs that appear
4810 as arguments to phi should not be added to the global live at
4818 /* Recursively scan the operands of this expression. */
4821 register const char *fmt = GET_RTX_FORMAT (code);
4824 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4828 /* Tail recursive case: save a function call level. */
4834 mark_used_regs (pbi, new_live, XEXP (x, i), cond, insn);
4836 else if (fmt[i] == 'E')
4839 for (j = 0; j < XVECLEN (x, i); j++)
4840 mark_used_regs (pbi, new_live, XVECEXP (x, i, j), cond, insn);
4849 try_pre_increment_1 (pbi, insn)
4850 struct propagate_block_info *pbi;
4853 /* Find the next use of this reg. If in same basic block,
4854 make it do pre-increment or pre-decrement if appropriate. */
4855 rtx x = single_set (insn);
4856 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
4857 * INTVAL (XEXP (SET_SRC (x), 1)));
4858 int regno = REGNO (SET_DEST (x));
4859 rtx y = pbi->reg_next_use[regno];
4861 && BLOCK_NUM (y) == BLOCK_NUM (insn)
4862 /* Don't do this if the reg dies, or gets set in y; a standard addressing
4863 mode would be better. */
4864 && ! dead_or_set_p (y, SET_DEST (x))
4865 && try_pre_increment (y, SET_DEST (x), amount))
4867 /* We have found a suitable auto-increment
4868 and already changed insn Y to do it.
4869 So flush this increment-instruction. */
4870 PUT_CODE (insn, NOTE);
4871 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4872 NOTE_SOURCE_FILE (insn) = 0;
4873 /* Count a reference to this reg for the increment
4874 insn we are deleting. When a reg is incremented.
4875 spilling it is worse, so we want to make that
4877 if (regno >= FIRST_PSEUDO_REGISTER)
4879 REG_N_REFS (regno) += pbi->bb->loop_depth + 1;
4880 REG_N_SETS (regno)++;
4887 /* Try to change INSN so that it does pre-increment or pre-decrement
4888 addressing on register REG in order to add AMOUNT to REG.
4889 AMOUNT is negative for pre-decrement.
4890 Returns 1 if the change could be made.
4891 This checks all about the validity of the result of modifying INSN. */
4894 try_pre_increment (insn, reg, amount)
4896 HOST_WIDE_INT amount;
4900 /* Nonzero if we can try to make a pre-increment or pre-decrement.
4901 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
4903 /* Nonzero if we can try to make a post-increment or post-decrement.
4904 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
4905 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
4906 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
4909 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
4912 /* From the sign of increment, see which possibilities are conceivable
4913 on this target machine. */
4914 if (HAVE_PRE_INCREMENT && amount > 0)
4916 if (HAVE_POST_INCREMENT && amount > 0)
4919 if (HAVE_PRE_DECREMENT && amount < 0)
4921 if (HAVE_POST_DECREMENT && amount < 0)
4924 if (! (pre_ok || post_ok))
4927 /* It is not safe to add a side effect to a jump insn
4928 because if the incremented register is spilled and must be reloaded
4929 there would be no way to store the incremented value back in memory. */
4931 if (GET_CODE (insn) == JUMP_INSN)
4936 use = find_use_as_address (PATTERN (insn), reg, 0);
4937 if (post_ok && (use == 0 || use == (rtx) 1))
4939 use = find_use_as_address (PATTERN (insn), reg, -amount);
4943 if (use == 0 || use == (rtx) 1)
4946 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
4949 /* See if this combination of instruction and addressing mode exists. */
4950 if (! validate_change (insn, &XEXP (use, 0),
4951 gen_rtx_fmt_e (amount > 0
4952 ? (do_post ? POST_INC : PRE_INC)
4953 : (do_post ? POST_DEC : PRE_DEC),
4957 /* Record that this insn now has an implicit side effect on X. */
4958 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
4962 #endif /* AUTO_INC_DEC */
4964 /* Find the place in the rtx X where REG is used as a memory address.
4965 Return the MEM rtx that so uses it.
4966 If PLUSCONST is nonzero, search instead for a memory address equivalent to
4967 (plus REG (const_int PLUSCONST)).
4969 If such an address does not appear, return 0.
4970 If REG appears more than once, or is used other than in such an address,
4974 find_use_as_address (x, reg, plusconst)
4977 HOST_WIDE_INT plusconst;
4979 enum rtx_code code = GET_CODE (x);
4980 const char *fmt = GET_RTX_FORMAT (code);
4982 register rtx value = 0;
4985 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
4988 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
4989 && XEXP (XEXP (x, 0), 0) == reg
4990 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4991 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
4994 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
4996 /* If REG occurs inside a MEM used in a bit-field reference,
4997 that is unacceptable. */
4998 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
4999 return (rtx) (HOST_WIDE_INT) 1;
5003 return (rtx) (HOST_WIDE_INT) 1;
5005 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5009 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
5013 return (rtx) (HOST_WIDE_INT) 1;
5015 else if (fmt[i] == 'E')
5018 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5020 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
5024 return (rtx) (HOST_WIDE_INT) 1;
5032 /* Write information about registers and basic blocks into FILE.
5033 This is part of making a debugging dump. */
5036 dump_regset (r, outf)
5043 fputs (" (nil)", outf);
5047 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
5049 fprintf (outf, " %d", i);
5050 if (i < FIRST_PSEUDO_REGISTER)
5051 fprintf (outf, " [%s]",
5060 dump_regset (r, stderr);
5061 putc ('\n', stderr);
5065 dump_flow_info (file)
5069 static const char * const reg_class_names[] = REG_CLASS_NAMES;
5071 fprintf (file, "%d registers.\n", max_regno);
5072 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
5075 enum reg_class class, altclass;
5076 fprintf (file, "\nRegister %d used %d times across %d insns",
5077 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
5078 if (REG_BASIC_BLOCK (i) >= 0)
5079 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
5081 fprintf (file, "; set %d time%s", REG_N_SETS (i),
5082 (REG_N_SETS (i) == 1) ? "" : "s");
5083 if (REG_USERVAR_P (regno_reg_rtx[i]))
5084 fprintf (file, "; user var");
5085 if (REG_N_DEATHS (i) != 1)
5086 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
5087 if (REG_N_CALLS_CROSSED (i) == 1)
5088 fprintf (file, "; crosses 1 call");
5089 else if (REG_N_CALLS_CROSSED (i))
5090 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
5091 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
5092 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
5093 class = reg_preferred_class (i);
5094 altclass = reg_alternate_class (i);
5095 if (class != GENERAL_REGS || altclass != ALL_REGS)
5097 if (altclass == ALL_REGS || class == ALL_REGS)
5098 fprintf (file, "; pref %s", reg_class_names[(int) class]);
5099 else if (altclass == NO_REGS)
5100 fprintf (file, "; %s or none", reg_class_names[(int) class]);
5102 fprintf (file, "; pref %s, else %s",
5103 reg_class_names[(int) class],
5104 reg_class_names[(int) altclass]);
5106 if (REGNO_POINTER_FLAG (i))
5107 fprintf (file, "; pointer");
5108 fprintf (file, ".\n");
5111 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
5112 for (i = 0; i < n_basic_blocks; i++)
5114 register basic_block bb = BASIC_BLOCK (i);
5117 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d.\n",
5118 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
5120 fprintf (file, "Predecessors: ");
5121 for (e = bb->pred; e ; e = e->pred_next)
5122 dump_edge_info (file, e, 0);
5124 fprintf (file, "\nSuccessors: ");
5125 for (e = bb->succ; e ; e = e->succ_next)
5126 dump_edge_info (file, e, 1);
5128 fprintf (file, "\nRegisters live at start:");
5129 dump_regset (bb->global_live_at_start, file);
5131 fprintf (file, "\nRegisters live at end:");
5132 dump_regset (bb->global_live_at_end, file);
5143 dump_flow_info (stderr);
5147 dump_edge_info (file, e, do_succ)
5152 basic_block side = (do_succ ? e->dest : e->src);
5154 if (side == ENTRY_BLOCK_PTR)
5155 fputs (" ENTRY", file);
5156 else if (side == EXIT_BLOCK_PTR)
5157 fputs (" EXIT", file);
5159 fprintf (file, " %d", side->index);
5163 static const char * const bitnames[] = {
5164 "fallthru", "crit", "ab", "abcall", "eh", "fake"
5167 int i, flags = e->flags;
5171 for (i = 0; flags; i++)
5172 if (flags & (1 << i))
5178 if (i < (int)(sizeof (bitnames) / sizeof (*bitnames)))
5179 fputs (bitnames[i], file);
5181 fprintf (file, "%d", i);
5189 /* Print out one basic block with live information at start and end. */
5199 fprintf (outf, ";; Basic block %d, loop depth %d",
5200 bb->index, bb->loop_depth);
5201 if (bb->eh_beg != -1 || bb->eh_end != -1)
5202 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
5205 fputs (";; Predecessors: ", outf);
5206 for (e = bb->pred; e ; e = e->pred_next)
5207 dump_edge_info (outf, e, 0);
5210 fputs (";; Registers live at start:", outf);
5211 dump_regset (bb->global_live_at_start, outf);
5214 for (insn = bb->head, last = NEXT_INSN (bb->end);
5216 insn = NEXT_INSN (insn))
5217 print_rtl_single (outf, insn);
5219 fputs (";; Registers live at end:", outf);
5220 dump_regset (bb->global_live_at_end, outf);
5223 fputs (";; Successors: ", outf);
5224 for (e = bb->succ; e; e = e->succ_next)
5225 dump_edge_info (outf, e, 1);
5233 dump_bb (bb, stderr);
5240 dump_bb (BASIC_BLOCK(n), stderr);
5243 /* Like print_rtl, but also print out live information for the start of each
5247 print_rtl_with_bb (outf, rtx_first)
5251 register rtx tmp_rtx;
5254 fprintf (outf, "(nil)\n");
5258 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
5259 int max_uid = get_max_uid ();
5260 basic_block *start = (basic_block *)
5261 xcalloc (max_uid, sizeof (basic_block));
5262 basic_block *end = (basic_block *)
5263 xcalloc (max_uid, sizeof (basic_block));
5264 enum bb_state *in_bb_p = (enum bb_state *)
5265 xcalloc (max_uid, sizeof (enum bb_state));
5267 for (i = n_basic_blocks - 1; i >= 0; i--)
5269 basic_block bb = BASIC_BLOCK (i);
5272 start[INSN_UID (bb->head)] = bb;
5273 end[INSN_UID (bb->end)] = bb;
5274 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
5276 enum bb_state state = IN_MULTIPLE_BB;
5277 if (in_bb_p[INSN_UID(x)] == NOT_IN_BB)
5279 in_bb_p[INSN_UID(x)] = state;
5286 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
5291 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
5293 fprintf (outf, ";; Start of basic block %d, registers live:",
5295 dump_regset (bb->global_live_at_start, outf);
5299 if (in_bb_p[INSN_UID(tmp_rtx)] == NOT_IN_BB
5300 && GET_CODE (tmp_rtx) != NOTE
5301 && GET_CODE (tmp_rtx) != BARRIER)
5302 fprintf (outf, ";; Insn is not within a basic block\n");
5303 else if (in_bb_p[INSN_UID(tmp_rtx)] == IN_MULTIPLE_BB)
5304 fprintf (outf, ";; Insn is in multiple basic blocks\n");
5306 did_output = print_rtl_single (outf, tmp_rtx);
5308 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
5310 fprintf (outf, ";; End of basic block %d, registers live:\n",
5312 dump_regset (bb->global_live_at_end, outf);
5325 if (current_function_epilogue_delay_list != 0)
5327 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
5328 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
5329 tmp_rtx = XEXP (tmp_rtx, 1))
5330 print_rtl_single (outf, XEXP (tmp_rtx, 0));
5334 /* Compute dominator relationships using new flow graph structures. */
5336 compute_flow_dominators (dominators, post_dominators)
5337 sbitmap *dominators;
5338 sbitmap *post_dominators;
5341 sbitmap *temp_bitmap;
5343 basic_block *worklist, *workend, *qin, *qout;
5346 /* Allocate a worklist array/queue. Entries are only added to the
5347 list if they were not already on the list. So the size is
5348 bounded by the number of basic blocks. */
5349 worklist = (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
5350 workend = &worklist[n_basic_blocks];
5352 temp_bitmap = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5353 sbitmap_vector_zero (temp_bitmap, n_basic_blocks);
5357 /* The optimistic setting of dominators requires us to put every
5358 block on the work list initially. */
5359 qin = qout = worklist;
5360 for (bb = 0; bb < n_basic_blocks; bb++)
5362 *qin++ = BASIC_BLOCK (bb);
5363 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5365 qlen = n_basic_blocks;
5368 /* We want a maximal solution, so initially assume everything dominates
5370 sbitmap_vector_ones (dominators, n_basic_blocks);
5372 /* Mark successors of the entry block so we can identify them below. */
5373 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
5374 e->dest->aux = ENTRY_BLOCK_PTR;
5376 /* Iterate until the worklist is empty. */
5379 /* Take the first entry off the worklist. */
5380 basic_block b = *qout++;
5381 if (qout >= workend)
5387 /* Compute the intersection of the dominators of all the
5390 If one of the predecessor blocks is the ENTRY block, then the
5391 intersection of the dominators of the predecessor blocks is
5392 defined as the null set. We can identify such blocks by the
5393 special value in the AUX field in the block structure. */
5394 if (b->aux == ENTRY_BLOCK_PTR)
5396 /* Do not clear the aux field for blocks which are
5397 successors of the ENTRY block. That way we we never
5398 add them to the worklist again.
5400 The intersect of dominators of the preds of this block is
5401 defined as the null set. */
5402 sbitmap_zero (temp_bitmap[bb]);
5406 /* Clear the aux field of this block so it can be added to
5407 the worklist again if necessary. */
5409 sbitmap_intersection_of_preds (temp_bitmap[bb], dominators, bb);
5412 /* Make sure each block always dominates itself. */
5413 SET_BIT (temp_bitmap[bb], bb);
5415 /* If the out state of this block changed, then we need to
5416 add the successors of this block to the worklist if they
5417 are not already on the worklist. */
5418 if (sbitmap_a_and_b (dominators[bb], dominators[bb], temp_bitmap[bb]))
5420 for (e = b->succ; e; e = e->succ_next)
5422 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
5436 if (post_dominators)
5438 /* The optimistic setting of dominators requires us to put every
5439 block on the work list initially. */
5440 qin = qout = worklist;
5441 for (bb = 0; bb < n_basic_blocks; bb++)
5443 *qin++ = BASIC_BLOCK (bb);
5444 BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
5446 qlen = n_basic_blocks;
5449 /* We want a maximal solution, so initially assume everything post
5450 dominates everything else. */
5451 sbitmap_vector_ones (post_dominators, n_basic_blocks);
5453 /* Mark predecessors of the exit block so we can identify them below. */
5454 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
5455 e->src->aux = EXIT_BLOCK_PTR;
5457 /* Iterate until the worklist is empty. */
5460 /* Take the first entry off the worklist. */
5461 basic_block b = *qout++;
5462 if (qout >= workend)
5468 /* Compute the intersection of the post dominators of all the
5471 If one of the successor blocks is the EXIT block, then the
5472 intersection of the dominators of the successor blocks is
5473 defined as the null set. We can identify such blocks by the
5474 special value in the AUX field in the block structure. */
5475 if (b->aux == EXIT_BLOCK_PTR)
5477 /* Do not clear the aux field for blocks which are
5478 predecessors of the EXIT block. That way we we never
5479 add them to the worklist again.
5481 The intersect of dominators of the succs of this block is
5482 defined as the null set. */
5483 sbitmap_zero (temp_bitmap[bb]);
5487 /* Clear the aux field of this block so it can be added to
5488 the worklist again if necessary. */
5490 sbitmap_intersection_of_succs (temp_bitmap[bb],
5491 post_dominators, bb);
5494 /* Make sure each block always post dominates itself. */
5495 SET_BIT (temp_bitmap[bb], bb);
5497 /* If the out state of this block changed, then we need to
5498 add the successors of this block to the worklist if they
5499 are not already on the worklist. */
5500 if (sbitmap_a_and_b (post_dominators[bb],
5501 post_dominators[bb],
5504 for (e = b->pred; e; e = e->pred_next)
5506 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
5524 /* Given DOMINATORS, compute the immediate dominators into IDOM. */
5527 compute_immediate_dominators (idom, dominators)
5529 sbitmap *dominators;
5534 tmp = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
5536 /* Begin with tmp(n) = dom(n) - { n }. */
5537 for (b = n_basic_blocks; --b >= 0; )
5539 sbitmap_copy (tmp[b], dominators[b]);
5540 RESET_BIT (tmp[b], b);
5543 /* Subtract out all of our dominator's dominators. */
5544 for (b = n_basic_blocks; --b >= 0; )
5546 sbitmap tmp_b = tmp[b];
5549 for (s = n_basic_blocks; --s >= 0; )
5550 if (TEST_BIT (tmp_b, s))
5551 sbitmap_difference (tmp_b, tmp_b, tmp[s]);
5554 /* Find the one bit set in the bitmap and put it in the output array. */
5555 for (b = n_basic_blocks; --b >= 0; )
5558 EXECUTE_IF_SET_IN_SBITMAP (tmp[b], 0, t, { idom[b] = t; });
5561 sbitmap_vector_free (tmp);
5564 /* Count for a single SET rtx, X. */
5567 count_reg_sets_1 (x, loop_depth)
5572 register rtx reg = SET_DEST (x);
5574 /* Find the register that's set/clobbered. */
5575 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
5576 || GET_CODE (reg) == SIGN_EXTRACT
5577 || GET_CODE (reg) == STRICT_LOW_PART)
5578 reg = XEXP (reg, 0);
5580 if (GET_CODE (reg) == PARALLEL
5581 && GET_MODE (reg) == BLKmode)
5584 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5585 count_reg_sets_1 (XVECEXP (reg, 0, i), loop_depth);
5589 if (GET_CODE (reg) == REG)
5591 regno = REGNO (reg);
5592 if (regno >= FIRST_PSEUDO_REGISTER)
5594 /* Count (weighted) references, stores, etc. This counts a
5595 register twice if it is modified, but that is correct. */
5596 REG_N_SETS (regno)++;
5597 REG_N_REFS (regno) += loop_depth + 1;
5602 /* Increment REG_N_SETS for each SET or CLOBBER found in X; also increment
5603 REG_N_REFS by the current loop depth for each SET or CLOBBER found. */
5606 count_reg_sets (x, loop_depth)
5610 register RTX_CODE code = GET_CODE (x);
5612 if (code == SET || code == CLOBBER)
5613 count_reg_sets_1 (x, loop_depth);
5614 else if (code == PARALLEL)
5617 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5619 code = GET_CODE (XVECEXP (x, 0, i));
5620 if (code == SET || code == CLOBBER)
5621 count_reg_sets_1 (XVECEXP (x, 0, i), loop_depth);
5626 /* Increment REG_N_REFS by the current loop depth each register reference
5630 count_reg_references (x, loop_depth)
5634 register RTX_CODE code;
5637 code = GET_CODE (x);
5657 /* If we are clobbering a MEM, mark any registers inside the address
5659 if (GET_CODE (XEXP (x, 0)) == MEM)
5660 count_reg_references (XEXP (XEXP (x, 0), 0), loop_depth);
5664 /* While we're here, optimize this case. */
5667 /* In case the SUBREG is not of a register, don't optimize */
5668 if (GET_CODE (x) != REG)
5670 count_reg_references (x, loop_depth);
5674 /* ... fall through ... */
5677 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
5678 REG_N_REFS (REGNO (x)) += loop_depth + 1;
5683 register rtx testreg = SET_DEST (x);
5686 /* If storing into MEM, don't show it as being used. But do
5687 show the address as being used. */
5688 if (GET_CODE (testreg) == MEM)
5690 count_reg_references (XEXP (testreg, 0), loop_depth);
5691 count_reg_references (SET_SRC (x), loop_depth);
5695 /* Storing in STRICT_LOW_PART is like storing in a reg
5696 in that this SET might be dead, so ignore it in TESTREG.
5697 but in some other ways it is like using the reg.
5699 Storing in a SUBREG or a bit field is like storing the entire
5700 register in that if the register's value is not used
5701 then this SET is not needed. */
5702 while (GET_CODE (testreg) == STRICT_LOW_PART
5703 || GET_CODE (testreg) == ZERO_EXTRACT
5704 || GET_CODE (testreg) == SIGN_EXTRACT
5705 || GET_CODE (testreg) == SUBREG)
5707 /* Modifying a single register in an alternate mode
5708 does not use any of the old value. But these other
5709 ways of storing in a register do use the old value. */
5710 if (GET_CODE (testreg) == SUBREG
5711 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5716 testreg = XEXP (testreg, 0);
5719 /* If this is a store into a register,
5720 recursively scan the value being stored. */
5722 if ((GET_CODE (testreg) == PARALLEL
5723 && GET_MODE (testreg) == BLKmode)
5724 || GET_CODE (testreg) == REG)
5726 count_reg_references (SET_SRC (x), loop_depth);
5728 count_reg_references (SET_DEST (x), loop_depth);
5738 /* Recursively scan the operands of this expression. */
5741 register const char *fmt = GET_RTX_FORMAT (code);
5744 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5748 /* Tail recursive case: save a function call level. */
5754 count_reg_references (XEXP (x, i), loop_depth);
5756 else if (fmt[i] == 'E')
5759 for (j = 0; j < XVECLEN (x, i); j++)
5760 count_reg_references (XVECEXP (x, i, j), loop_depth);
5766 /* Recompute register set/reference counts immediately prior to register
5769 This avoids problems with set/reference counts changing to/from values
5770 which have special meanings to the register allocators.
5772 Additionally, the reference counts are the primary component used by the
5773 register allocators to prioritize pseudos for allocation to hard regs.
5774 More accurate reference counts generally lead to better register allocation.
5776 F is the first insn to be scanned.
5778 LOOP_STEP denotes how much loop_depth should be incremented per
5779 loop nesting level in order to increase the ref count more for
5780 references in a loop.
5782 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
5783 possibly other information which is used by the register allocators. */
5786 recompute_reg_usage (f, loop_step)
5787 rtx f ATTRIBUTE_UNUSED;
5788 int loop_step ATTRIBUTE_UNUSED;
5795 /* Clear out the old data. */
5796 max_reg = max_reg_num ();
5797 for (i = FIRST_PSEUDO_REGISTER; i < max_reg; i++)
5803 /* Scan each insn in the chain and count how many times each register is
5805 for (index = 0; index < n_basic_blocks; index++)
5807 basic_block bb = BASIC_BLOCK (index);
5808 loop_depth = bb->loop_depth;
5809 for (insn = bb->head; insn; insn = NEXT_INSN (insn))
5811 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5815 /* This call will increment REG_N_SETS for each SET or CLOBBER
5816 of a register in INSN. It will also increment REG_N_REFS
5817 by the loop depth for each set of a register in INSN. */
5818 count_reg_sets (PATTERN (insn), loop_depth);
5820 /* count_reg_sets does not detect autoincrement address modes, so
5821 detect them here by looking at the notes attached to INSN. */
5822 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
5824 if (REG_NOTE_KIND (links) == REG_INC)
5825 /* Count (weighted) references, stores, etc. This
5826 counts a register twice if it is modified, but
5828 REG_N_SETS (REGNO (XEXP (links, 0)))++;
5831 /* This call will increment REG_N_REFS by the current loop depth
5832 for each reference to a register in INSN. */
5833 count_reg_references (PATTERN (insn), loop_depth);
5835 /* count_reg_references will not include counts for arguments to
5836 function calls, so detect them here by examining the
5837 CALL_INSN_FUNCTION_USAGE data. */
5838 if (GET_CODE (insn) == CALL_INSN)
5842 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5844 note = XEXP (note, 1))
5845 if (GET_CODE (XEXP (note, 0)) == USE)
5846 count_reg_references (XEXP (XEXP (note, 0), 0),
5850 if (insn == bb->end)
5856 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
5857 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
5858 of the number of registers that died. */
5861 count_or_remove_death_notes (blocks, kill)
5867 for (i = n_basic_blocks - 1; i >= 0; --i)
5872 if (blocks && ! TEST_BIT (blocks, i))
5875 bb = BASIC_BLOCK (i);
5877 for (insn = bb->head; ; insn = NEXT_INSN (insn))
5879 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
5881 rtx *pprev = ®_NOTES (insn);
5886 switch (REG_NOTE_KIND (link))
5889 if (GET_CODE (XEXP (link, 0)) == REG)
5891 rtx reg = XEXP (link, 0);
5894 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
5897 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
5905 rtx next = XEXP (link, 1);
5906 free_EXPR_LIST_node (link);
5907 *pprev = link = next;
5913 pprev = &XEXP (link, 1);
5920 if (insn == bb->end)
5928 /* Record INSN's block as BB. */
5931 set_block_for_insn (insn, bb)
5935 size_t uid = INSN_UID (insn);
5936 if (uid >= basic_block_for_insn->num_elements)
5940 /* Add one-eighth the size so we don't keep calling xrealloc. */
5941 new_size = uid + (uid + 7) / 8;
5943 VARRAY_GROW (basic_block_for_insn, new_size);
5945 VARRAY_BB (basic_block_for_insn, uid) = bb;
5948 /* Record INSN's block number as BB. */
5949 /* ??? This has got to go. */
5952 set_block_num (insn, bb)
5956 set_block_for_insn (insn, BASIC_BLOCK (bb));
5959 /* Verify the CFG consistency. This function check some CFG invariants and
5960 aborts when something is wrong. Hope that this function will help to
5961 convert many optimization passes to preserve CFG consistent.
5963 Currently it does following checks:
5965 - test head/end pointers
5966 - overlapping of basic blocks
5967 - edge list corectness
5968 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
5969 - tails of basic blocks (ensure that boundary is necesary)
5970 - scans body of the basic block for JUMP_INSN, CODE_LABEL
5971 and NOTE_INSN_BASIC_BLOCK
5972 - check that all insns are in the basic blocks
5973 (except the switch handling code, barriers and notes)
5974 - check that all returns are followed by barriers
5976 In future it can be extended check a lot of other stuff as well
5977 (reachability of basic blocks, life information, etc. etc.). */
5982 const int max_uid = get_max_uid ();
5983 const rtx rtx_first = get_insns ();
5984 basic_block *bb_info;
5988 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
5990 /* First pass check head/end pointers and set bb_info array used by
5992 for (i = n_basic_blocks - 1; i >= 0; i--)
5994 basic_block bb = BASIC_BLOCK (i);
5996 /* Check the head pointer and make sure that it is pointing into
5998 for (x = rtx_first; x != NULL_RTX; x = NEXT_INSN (x))
6003 error ("Head insn %d for block %d not found in the insn stream.",
6004 INSN_UID (bb->head), bb->index);
6008 /* Check the end pointer and make sure that it is pointing into
6010 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6012 if (bb_info[INSN_UID (x)] != NULL)
6014 error ("Insn %d is in multiple basic blocks (%d and %d)",
6015 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6018 bb_info[INSN_UID (x)] = bb;
6025 error ("End insn %d for block %d not found in the insn stream.",
6026 INSN_UID (bb->end), bb->index);
6031 /* Now check the basic blocks (boundaries etc.) */
6032 for (i = n_basic_blocks - 1; i >= 0; i--)
6034 basic_block bb = BASIC_BLOCK (i);
6035 /* Check corectness of edge lists */
6043 fprintf (stderr, "verify_flow_info: Basic block %d succ edge is corrupted\n",
6045 fprintf (stderr, "Predecessor: ");
6046 dump_edge_info (stderr, e, 0);
6047 fprintf (stderr, "\nSuccessor: ");
6048 dump_edge_info (stderr, e, 1);
6052 if (e->dest != EXIT_BLOCK_PTR)
6054 edge e2 = e->dest->pred;
6055 while (e2 && e2 != e)
6059 error ("Basic block %i edge lists are corrupted", bb->index);
6071 error ("Basic block %d pred edge is corrupted", bb->index);
6072 fputs ("Predecessor: ", stderr);
6073 dump_edge_info (stderr, e, 0);
6074 fputs ("\nSuccessor: ", stderr);
6075 dump_edge_info (stderr, e, 1);
6076 fputc ('\n', stderr);
6079 if (e->src != ENTRY_BLOCK_PTR)
6081 edge e2 = e->src->succ;
6082 while (e2 && e2 != e)
6086 error ("Basic block %i edge lists are corrupted", bb->index);
6093 /* OK pointers are correct. Now check the header of basic
6094 block. It ought to contain optional CODE_LABEL followed
6095 by NOTE_BASIC_BLOCK. */
6097 if (GET_CODE (x) == CODE_LABEL)
6101 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6107 if (GET_CODE (x) != NOTE
6108 || NOTE_LINE_NUMBER (x) != NOTE_INSN_BASIC_BLOCK
6109 || NOTE_BASIC_BLOCK (x) != bb)
6111 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6118 /* Do checks for empty blocks here */
6125 if (GET_CODE (x) == NOTE
6126 && NOTE_LINE_NUMBER (x) == NOTE_INSN_BASIC_BLOCK)
6128 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6129 INSN_UID (x), bb->index);
6136 if (GET_CODE (x) == JUMP_INSN
6137 || GET_CODE (x) == CODE_LABEL
6138 || GET_CODE (x) == BARRIER)
6140 error ("In basic block %d:", bb->index);
6141 fatal_insn ("Flow control insn inside a basic block", x);
6152 if (!bb_info[INSN_UID (x)])
6154 switch (GET_CODE (x))
6161 /* An addr_vec is placed outside any block block. */
6163 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6164 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6165 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6170 /* But in any case, non-deletable labels can appear anywhere. */
6174 fatal_insn ("Insn outside basic block", x);
6178 if (GET_RTX_CLASS (GET_CODE (x)) == 'i'
6179 && GET_CODE (x) == JUMP_INSN
6180 && returnjump_p (x) && ! condjump_p (x)
6181 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
6182 fatal_insn ("Return not followed by barrier", x);
6194 /* Functions to access an edge list with a vector representation.
6195 Enough data is kept such that given an index number, the
6196 pred and succ that edge reprsents can be determined, or
6197 given a pred and a succ, it's index number can be returned.
6198 This allows algorithms which comsume a lot of memory to
6199 represent the normally full matrix of edge (pred,succ) with a
6200 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
6201 wasted space in the client code due to sparse flow graphs. */
6203 /* This functions initializes the edge list. Basically the entire
6204 flowgraph is processed, and all edges are assigned a number,
6205 and the data structure is filed in. */
6209 struct edge_list *elist;
6215 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
6219 /* Determine the number of edges in the flow graph by counting successor
6220 edges on each basic block. */
6221 for (x = 0; x < n_basic_blocks; x++)
6223 basic_block bb = BASIC_BLOCK (x);
6225 for (e = bb->succ; e; e = e->succ_next)
6228 /* Don't forget successors of the entry block. */
6229 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6232 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
6233 elist->num_blocks = block_count;
6234 elist->num_edges = num_edges;
6235 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
6239 /* Follow successors of the entry block, and register these edges. */
6240 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6242 elist->index_to_edge[num_edges] = e;
6246 for (x = 0; x < n_basic_blocks; x++)
6248 basic_block bb = BASIC_BLOCK (x);
6250 /* Follow all successors of blocks, and register these edges. */
6251 for (e = bb->succ; e; e = e->succ_next)
6253 elist->index_to_edge[num_edges] = e;
6260 /* This function free's memory associated with an edge list. */
6262 free_edge_list (elist)
6263 struct edge_list *elist;
6267 free (elist->index_to_edge);
6272 /* This function provides debug output showing an edge list. */
6274 print_edge_list (f, elist)
6276 struct edge_list *elist;
6279 fprintf(f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
6280 elist->num_blocks - 2, elist->num_edges);
6282 for (x = 0; x < elist->num_edges; x++)
6284 fprintf (f, " %-4d - edge(", x);
6285 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
6286 fprintf (f,"entry,");
6288 fprintf (f,"%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
6290 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
6291 fprintf (f,"exit)\n");
6293 fprintf (f,"%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
6297 /* This function provides an internal consistancy check of an edge list,
6298 verifying that all edges are present, and that there are no
6301 verify_edge_list (f, elist)
6303 struct edge_list *elist;
6305 int x, pred, succ, index;
6308 for (x = 0; x < n_basic_blocks; x++)
6310 basic_block bb = BASIC_BLOCK (x);
6312 for (e = bb->succ; e; e = e->succ_next)
6314 pred = e->src->index;
6315 succ = e->dest->index;
6316 index = EDGE_INDEX (elist, e->src, e->dest);
6317 if (index == EDGE_INDEX_NO_EDGE)
6319 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6322 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6323 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6324 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6325 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6326 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6327 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6330 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
6332 pred = e->src->index;
6333 succ = e->dest->index;
6334 index = EDGE_INDEX (elist, e->src, e->dest);
6335 if (index == EDGE_INDEX_NO_EDGE)
6337 fprintf (f, "*p* No index for edge from %d to %d\n",pred, succ);
6340 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
6341 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
6342 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
6343 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
6344 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
6345 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
6347 /* We've verified that all the edges are in the list, no lets make sure
6348 there are no spurious edges in the list. */
6350 for (pred = 0 ; pred < n_basic_blocks; pred++)
6351 for (succ = 0 ; succ < n_basic_blocks; succ++)
6353 basic_block p = BASIC_BLOCK (pred);
6354 basic_block s = BASIC_BLOCK (succ);
6358 for (e = p->succ; e; e = e->succ_next)
6364 for (e = s->pred; e; e = e->pred_next)
6370 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6371 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6372 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
6374 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
6375 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6376 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
6377 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6378 BASIC_BLOCK (succ)));
6380 for (succ = 0 ; succ < n_basic_blocks; succ++)
6382 basic_block p = ENTRY_BLOCK_PTR;
6383 basic_block s = BASIC_BLOCK (succ);
6387 for (e = p->succ; e; e = e->succ_next)
6393 for (e = s->pred; e; e = e->pred_next)
6399 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6400 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6401 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
6403 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
6404 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6405 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
6406 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
6407 BASIC_BLOCK (succ)));
6409 for (pred = 0 ; pred < n_basic_blocks; pred++)
6411 basic_block p = BASIC_BLOCK (pred);
6412 basic_block s = EXIT_BLOCK_PTR;
6416 for (e = p->succ; e; e = e->succ_next)
6422 for (e = s->pred; e; e = e->pred_next)
6428 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6429 == EDGE_INDEX_NO_EDGE && found_edge != 0)
6430 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
6432 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
6433 != EDGE_INDEX_NO_EDGE && found_edge == 0)
6434 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
6435 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
6440 /* This routine will determine what, if any, edge there is between
6441 a specified predecessor and successor. */
6444 find_edge_index (edge_list, pred, succ)
6445 struct edge_list *edge_list;
6446 basic_block pred, succ;
6449 for (x = 0; x < NUM_EDGES (edge_list); x++)
6451 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
6452 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
6455 return (EDGE_INDEX_NO_EDGE);
6458 /* This function will remove an edge from the flow graph. */
6463 edge last_pred = NULL;
6464 edge last_succ = NULL;
6466 basic_block src, dest;
6469 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
6475 last_succ->succ_next = e->succ_next;
6477 src->succ = e->succ_next;
6479 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
6485 last_pred->pred_next = e->pred_next;
6487 dest->pred = e->pred_next;
6493 /* This routine will remove any fake successor edges for a basic block.
6494 When the edge is removed, it is also removed from whatever predecessor
6497 remove_fake_successors (bb)
6501 for (e = bb->succ; e ; )
6505 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
6510 /* This routine will remove all fake edges from the flow graph. If
6511 we remove all fake successors, it will automatically remove all
6512 fake predecessors. */
6514 remove_fake_edges ()
6518 for (x = 0; x < n_basic_blocks; x++)
6519 remove_fake_successors (BASIC_BLOCK (x));
6521 /* We've handled all successors except the entry block's. */
6522 remove_fake_successors (ENTRY_BLOCK_PTR);
6525 /* This functions will add a fake edge between any block which has no
6526 successors, and the exit block. Some data flow equations require these
6529 add_noreturn_fake_exit_edges ()
6533 for (x = 0; x < n_basic_blocks; x++)
6534 if (BASIC_BLOCK (x)->succ == NULL)
6535 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
6538 /* Dump the list of basic blocks in the bitmap NODES. */
6540 flow_nodes_print (str, nodes, file)
6542 const sbitmap nodes;
6547 fprintf (file, "%s { ", str);
6548 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
6549 fputs ("}\n", file);
6553 /* Dump the list of exiting edges in the array EDGES. */
6555 flow_exits_print (str, edges, num_edges, file)
6563 fprintf (file, "%s { ", str);
6564 for (i = 0; i < num_edges; i++)
6565 fprintf (file, "%d->%d ", edges[i]->src->index, edges[i]->dest->index);
6566 fputs ("}\n", file);
6570 /* Dump loop related CFG information. */
6572 flow_loops_cfg_dump (loops, file)
6573 const struct loops *loops;
6578 if (! loops->num || ! file || ! loops->cfg.dom)
6581 for (i = 0; i < n_basic_blocks; i++)
6585 fprintf (file, ";; %d succs { ", i);
6586 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
6587 fprintf (file, "%d ", succ->dest->index);
6588 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
6592 /* Dump the DFS node order. */
6593 if (loops->cfg.dfs_order)
6595 fputs (";; DFS order: ", file);
6596 for (i = 0; i < n_basic_blocks; i++)
6597 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
6603 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
6605 flow_loop_nested_p (outer, loop)
6609 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
6613 /* Dump the loop information specified by LOOPS to the stream FILE. */
6615 flow_loops_dump (loops, file, verbose)
6616 const struct loops *loops;
6623 num_loops = loops->num;
6624 if (! num_loops || ! file)
6627 fprintf (file, ";; %d loops found, %d levels\n",
6628 num_loops, loops->levels);
6630 for (i = 0; i < num_loops; i++)
6632 struct loop *loop = &loops->array[i];
6634 fprintf (file, ";; loop %d (%d to %d):\n;; header %d, latch %d, pre-header %d, depth %d, level %d, outer %ld\n",
6635 i, INSN_UID (loop->header->head), INSN_UID (loop->latch->end),
6636 loop->header->index, loop->latch->index,
6637 loop->pre_header ? loop->pre_header->index : -1,
6638 loop->depth, loop->level,
6639 (long) (loop->outer ? (loop->outer - loops->array) : -1));
6640 fprintf (file, ";; %d", loop->num_nodes);
6641 flow_nodes_print (" nodes", loop->nodes, file);
6642 fprintf (file, ";; %d", loop->num_exits);
6643 flow_exits_print (" exits", loop->exits, loop->num_exits, file);
6649 for (j = 0; j < i; j++)
6651 struct loop *oloop = &loops->array[j];
6653 if (loop->header == oloop->header)
6658 smaller = loop->num_nodes < oloop->num_nodes;
6660 /* If the union of LOOP and OLOOP is different than
6661 the larger of LOOP and OLOOP then LOOP and OLOOP
6662 must be disjoint. */
6663 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
6664 smaller ? oloop : loop);
6665 fprintf (file, ";; loop header %d shared by loops %d, %d %s\n",
6666 loop->header->index, i, j,
6667 disjoint ? "disjoint" : "nested");
6674 /* Print diagnostics to compare our concept of a loop with
6675 what the loop notes say. */
6676 if (GET_CODE (PREV_INSN (loop->first->head)) != NOTE
6677 || NOTE_LINE_NUMBER (PREV_INSN (loop->first->head))
6678 != NOTE_INSN_LOOP_BEG)
6679 fprintf (file, ";; No NOTE_INSN_LOOP_BEG at %d\n",
6680 INSN_UID (PREV_INSN (loop->first->head)));
6681 if (GET_CODE (NEXT_INSN (loop->last->end)) != NOTE
6682 || NOTE_LINE_NUMBER (NEXT_INSN (loop->last->end))
6683 != NOTE_INSN_LOOP_END)
6684 fprintf (file, ";; No NOTE_INSN_LOOP_END at %d\n",
6685 INSN_UID (NEXT_INSN (loop->last->end)));
6690 flow_loops_cfg_dump (loops, file);
6694 /* Free all the memory allocated for LOOPS. */
6696 flow_loops_free (loops)
6697 struct loops *loops;
6706 /* Free the loop descriptors. */
6707 for (i = 0; i < loops->num; i++)
6709 struct loop *loop = &loops->array[i];
6712 sbitmap_free (loop->nodes);
6716 free (loops->array);
6717 loops->array = NULL;
6720 sbitmap_vector_free (loops->cfg.dom);
6721 if (loops->cfg.dfs_order)
6722 free (loops->cfg.dfs_order);
6724 sbitmap_free (loops->shared_headers);
6729 /* Find the exits from the loop using the bitmap of loop nodes NODES
6730 and store in EXITS array. Return the number of exits from the
6733 flow_loop_exits_find (nodes, exits)
6734 const sbitmap nodes;
6743 /* Check all nodes within the loop to see if there are any
6744 successors not in the loop. Note that a node may have multiple
6747 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6748 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6750 basic_block dest = e->dest;
6752 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6760 *exits = (edge *) xmalloc (num_exits * sizeof (edge *));
6762 /* Store all exiting edges into an array. */
6764 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
6765 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
6767 basic_block dest = e->dest;
6769 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
6770 (*exits)[num_exits++] = e;
6778 /* Find the nodes contained within the loop with header HEADER and
6779 latch LATCH and store in NODES. Return the number of nodes within
6782 flow_loop_nodes_find (header, latch, nodes)
6791 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
6794 /* Start with only the loop header in the set of loop nodes. */
6795 sbitmap_zero (nodes);
6796 SET_BIT (nodes, header->index);
6798 header->loop_depth++;
6800 /* Push the loop latch on to the stack. */
6801 if (! TEST_BIT (nodes, latch->index))
6803 SET_BIT (nodes, latch->index);
6804 latch->loop_depth++;
6806 stack[sp++] = latch;
6815 for (e = node->pred; e; e = e->pred_next)
6817 basic_block ancestor = e->src;
6819 /* If each ancestor not marked as part of loop, add to set of
6820 loop nodes and push on to stack. */
6821 if (ancestor != ENTRY_BLOCK_PTR
6822 && ! TEST_BIT (nodes, ancestor->index))
6824 SET_BIT (nodes, ancestor->index);
6825 ancestor->loop_depth++;
6827 stack[sp++] = ancestor;
6836 /* Compute the depth first search order and store in the array
6837 DFS_ORDER, marking the nodes visited in VISITED. Returns the
6838 number of nodes visited. */
6840 flow_depth_first_order_compute (dfs_order)
6849 /* Allocate stack for back-tracking up CFG. */
6850 stack = (edge *) xmalloc (n_basic_blocks * sizeof (edge));
6853 /* Allocate bitmap to track nodes that have been visited. */
6854 visited = sbitmap_alloc (n_basic_blocks);
6856 /* None of the nodes in the CFG have been visited yet. */
6857 sbitmap_zero (visited);
6859 /* Start with the first successor edge from the entry block. */
6860 e = ENTRY_BLOCK_PTR->succ;
6863 basic_block src = e->src;
6864 basic_block dest = e->dest;
6866 /* Mark that we have visited this node. */
6867 if (src != ENTRY_BLOCK_PTR)
6868 SET_BIT (visited, src->index);
6870 /* If this node has not been visited before, push the current
6871 edge on to the stack and proceed with the first successor
6872 edge of this node. */
6873 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6881 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index)
6884 /* DEST has no successors (for example, a non-returning
6885 function is called) so do not push the current edge
6886 but carry on with its next successor. */
6887 dfs_order[dest->index] = n_basic_blocks - ++dfsnum;
6888 SET_BIT (visited, dest->index);
6891 while (! e->succ_next && src != ENTRY_BLOCK_PTR)
6893 dfs_order[src->index] = n_basic_blocks - ++dfsnum;
6895 /* Pop edge off stack. */
6903 sbitmap_free (visited);
6905 /* The number of nodes visited should not be greater than
6907 if (dfsnum > n_basic_blocks)
6910 /* There are some nodes left in the CFG that are unreachable. */
6911 if (dfsnum < n_basic_blocks)
6917 /* Return the block for the pre-header of the loop with header
6918 HEADER where DOM specifies the dominator information. Return NULL if
6919 there is no pre-header. */
6921 flow_loop_pre_header_find (header, dom)
6925 basic_block pre_header;
6928 /* If block p is a predecessor of the header and is the only block
6929 that the header does not dominate, then it is the pre-header. */
6931 for (e = header->pred; e; e = e->pred_next)
6933 basic_block node = e->src;
6935 if (node != ENTRY_BLOCK_PTR
6936 && ! TEST_BIT (dom[node->index], header->index))
6938 if (pre_header == NULL)
6942 /* There are multiple edges into the header from outside
6943 the loop so there is no pre-header block. */
6953 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
6954 previously added. The insertion algorithm assumes that the loops
6955 are added in the order found by a depth first search of the CFG. */
6957 flow_loop_tree_node_add (prevloop, loop)
6958 struct loop *prevloop;
6962 if (flow_loop_nested_p (prevloop, loop))
6964 prevloop->inner = loop;
6965 loop->outer = prevloop;
6969 while (prevloop->outer)
6971 if (flow_loop_nested_p (prevloop->outer, loop))
6973 prevloop->next = loop;
6974 loop->outer = prevloop->outer;
6977 prevloop = prevloop->outer;
6980 prevloop->next = loop;
6985 /* Build the loop hierarchy tree for LOOPS. */
6987 flow_loops_tree_build (loops)
6988 struct loops *loops;
6993 num_loops = loops->num;
6997 /* Root the loop hierarchy tree with the first loop found.
6998 Since we used a depth first search this should be the
7000 loops->tree = &loops->array[0];
7001 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
7003 /* Add the remaining loops to the tree. */
7004 for (i = 1; i < num_loops; i++)
7005 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
7009 /* Helper function to compute loop nesting depth and enclosed loop level
7010 for the natural loop specified by LOOP at the loop depth DEPTH.
7011 Returns the loop level. */
7013 flow_loop_level_compute (loop, depth)
7023 /* Traverse loop tree assigning depth and computing level as the
7024 maximum level of all the inner loops of this loop. The loop
7025 level is equivalent to the height of the loop in the loop tree
7026 and corresponds to the number of enclosed loop levels (including
7028 for (inner = loop->inner; inner; inner = inner->next)
7032 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
7037 loop->level = level;
7038 loop->depth = depth;
7043 /* Compute the loop nesting depth and enclosed loop level for the loop
7044 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
7048 flow_loops_level_compute (loops)
7049 struct loops *loops;
7055 /* Traverse all the outer level loops. */
7056 for (loop = loops->tree; loop; loop = loop->next)
7058 level = flow_loop_level_compute (loop, 1);
7066 /* Find all the natural loops in the function and save in LOOPS structure
7067 and recalculate loop_depth information in basic block structures.
7068 Return the number of natural loops found. */
7071 flow_loops_find (loops)
7072 struct loops *loops;
7083 loops->array = NULL;
7087 /* Taking care of this degenerate case makes the rest of
7088 this code simpler. */
7089 if (n_basic_blocks == 0)
7092 /* Compute the dominators. */
7093 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
7094 compute_flow_dominators (dom, NULL);
7096 /* Count the number of loop edges (back edges). This should be the
7097 same as the number of natural loops. Also clear the loop_depth
7098 and as we work from inner->outer in a loop nest we call
7099 find_loop_nodes_find which will increment loop_depth for nodes
7100 within the current loop, which happens to enclose inner loops. */
7103 for (b = 0; b < n_basic_blocks; b++)
7105 BASIC_BLOCK (b)->loop_depth = 0;
7106 for (e = BASIC_BLOCK (b)->pred; e; e = e->pred_next)
7108 basic_block latch = e->src;
7110 /* Look for back edges where a predecessor is dominated
7111 by this block. A natural loop has a single entry
7112 node (header) that dominates all the nodes in the
7113 loop. It also has single back edge to the header
7114 from a latch node. Note that multiple natural loops
7115 may share the same header. */
7116 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
7123 /* Compute depth first search order of the CFG so that outer
7124 natural loops will be found before inner natural loops. */
7125 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
7126 flow_depth_first_order_compute (dfs_order);
7128 /* Allocate loop structures. */
7130 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
7132 headers = sbitmap_alloc (n_basic_blocks);
7133 sbitmap_zero (headers);
7135 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
7136 sbitmap_zero (loops->shared_headers);
7138 /* Find and record information about all the natural loops
7141 for (b = 0; b < n_basic_blocks; b++)
7145 /* Search the nodes of the CFG in DFS order that we can find
7146 outer loops first. */
7147 header = BASIC_BLOCK (dfs_order[b]);
7149 /* Look for all the possible latch blocks for this header. */
7150 for (e = header->pred; e; e = e->pred_next)
7152 basic_block latch = e->src;
7154 /* Look for back edges where a predecessor is dominated
7155 by this block. A natural loop has a single entry
7156 node (header) that dominates all the nodes in the
7157 loop. It also has single back edge to the header
7158 from a latch node. Note that multiple natural loops
7159 may share the same header. */
7160 if (latch != ENTRY_BLOCK_PTR
7161 && TEST_BIT (dom[latch->index], header->index))
7165 loop = loops->array + num_loops;
7167 loop->header = header;
7168 loop->latch = latch;
7170 /* Keep track of blocks that are loop headers so
7171 that we can tell which loops should be merged. */
7172 if (TEST_BIT (headers, header->index))
7173 SET_BIT (loops->shared_headers, header->index);
7174 SET_BIT (headers, header->index);
7176 /* Find nodes contained within the loop. */
7177 loop->nodes = sbitmap_alloc (n_basic_blocks);
7179 = flow_loop_nodes_find (header, latch, loop->nodes);
7181 /* Compute first and last blocks within the loop.
7182 These are often the same as the loop header and
7183 loop latch respectively, but this is not always
7186 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
7188 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
7190 /* Find edges which exit the loop. Note that a node
7191 may have several exit edges. */
7193 = flow_loop_exits_find (loop->nodes, &loop->exits);
7195 /* Look to see if the loop has a pre-header node. */
7197 = flow_loop_pre_header_find (header, dom);
7204 /* Natural loops with shared headers may either be disjoint or
7205 nested. Disjoint loops with shared headers cannot be inner
7206 loops and should be merged. For now just mark loops that share
7208 for (i = 0; i < num_loops; i++)
7209 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
7210 loops->array[i].shared = 1;
7212 sbitmap_free (headers);
7215 loops->num = num_loops;
7217 /* Save CFG derived information to avoid recomputing it. */
7218 loops->cfg.dom = dom;
7219 loops->cfg.dfs_order = dfs_order;
7221 /* Build the loop hierarchy tree. */
7222 flow_loops_tree_build (loops);
7224 /* Assign the loop nesting depth and enclosed loop level for each
7226 loops->levels = flow_loops_level_compute (loops);
7232 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
7234 flow_loop_outside_edge_p (loop, e)
7235 const struct loop *loop;
7238 if (e->dest != loop->header)
7240 return (e->src == ENTRY_BLOCK_PTR)
7241 || ! TEST_BIT (loop->nodes, e->src->index);
7245 /* Clear LOG_LINKS fields of insns in a chain. */
7247 clear_log_links (insns)
7251 for (i = insns; i; i = NEXT_INSN (i))
7252 if (GET_RTX_CLASS (GET_CODE (i)) == 'i')