1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
132 #include "function.h"
136 #include "insn-flags.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
165 #define LOCAL_REGNO(REGNO) 0
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
171 /* The obstack on which the flow graph components are allocated. */
173 struct obstack flow_obstack;
174 static char *flow_firstobj;
176 /* Number of basic blocks in the current function. */
180 /* Number of edges in the current function. */
184 /* The basic block array. */
186 varray_type basic_block_info;
188 /* The special entry and exit blocks. */
190 struct basic_block_def entry_exit_blocks[2]
195 NULL, /* local_set */
196 NULL, /* cond_local_set */
197 NULL, /* global_live_at_start */
198 NULL, /* global_live_at_end */
200 ENTRY_BLOCK, /* index */
202 -1, -1, /* eh_beg, eh_end */
210 NULL, /* local_set */
211 NULL, /* cond_local_set */
212 NULL, /* global_live_at_start */
213 NULL, /* global_live_at_end */
215 EXIT_BLOCK, /* index */
217 -1, -1, /* eh_beg, eh_end */
222 /* Nonzero if the second flow pass has completed. */
225 /* Maximum register number used in this function, plus one. */
229 /* Indexed by n, giving various register information */
231 varray_type reg_n_info;
233 /* Size of a regset for the current function,
234 in (1) bytes and (2) elements. */
239 /* Regset of regs live when calls to `setjmp'-like functions happen. */
240 /* ??? Does this exist only for the setjmp-clobbered warning message? */
242 regset regs_live_at_setjmp;
244 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
245 that have to go in the same hard reg.
246 The first two regs in the list are a pair, and the next two
247 are another pair, etc. */
250 /* Callback that determines if it's ok for a function to have no
251 noreturn attribute. */
252 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
254 /* Set of registers that may be eliminable. These are handled specially
255 in updating regs_ever_live. */
257 static HARD_REG_SET elim_reg_set;
259 /* The basic block structure for every insn, indexed by uid. */
261 varray_type basic_block_for_insn;
263 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
264 /* ??? Should probably be using LABEL_NUSES instead. It would take a
265 bit of surgery to be able to use or co-opt the routines in jump. */
267 static rtx label_value_list;
268 static rtx tail_recursion_label_list;
270 /* Holds information for tracking conditional register life information. */
271 struct reg_cond_life_info
273 /* An EXPR_LIST of conditions under which a register is dead. */
276 /* ??? Could store mask of bytes that are dead, so that we could finally
277 track lifetimes of multi-word registers accessed via subregs. */
280 /* For use in communicating between propagate_block and its subroutines.
281 Holds all information needed to compute life and def-use information. */
283 struct propagate_block_info
285 /* The basic block we're considering. */
288 /* Bit N is set if register N is conditionally or unconditionally live. */
291 /* Bit N is set if register N is set this insn. */
294 /* Element N is the next insn that uses (hard or pseudo) register N
295 within the current basic block; or zero, if there is no such insn. */
298 /* Contains a list of all the MEMs we are tracking for dead store
302 /* If non-null, record the set of registers set unconditionally in the
306 /* If non-null, record the set of registers set conditionally in the
308 regset cond_local_set;
310 #ifdef HAVE_conditional_execution
311 /* Indexed by register number, holds a reg_cond_life_info for each
312 register that is not unconditionally live or dead. */
313 splay_tree reg_cond_dead;
315 /* Bit N is set if register N is in an expression in reg_cond_dead. */
319 /* The length of mem_set_list. */
320 int mem_set_list_len;
322 /* Non-zero if the value of CC0 is live. */
325 /* Flags controling the set of information propagate_block collects. */
329 /* Maximum length of pbi->mem_set_list before we start dropping
330 new elements on the floor. */
331 #define MAX_MEM_SET_LIST_LEN 100
333 /* Store the data structures necessary for depth-first search. */
334 struct depth_first_search_dsS {
335 /* stack for backtracking during the algorithm */
338 /* number of edges in the stack. That is, positions 0, ..., sp-1
342 /* record of basic blocks already seen by depth-first search */
343 sbitmap visited_blocks;
345 typedef struct depth_first_search_dsS *depth_first_search_ds;
347 /* Have print_rtl_and_abort give the same information that fancy_abort
349 #define print_rtl_and_abort() \
350 print_rtl_and_abort_fcn (__FILE__, __LINE__, __FUNCTION__)
352 /* Forward declarations */
353 static int count_basic_blocks PARAMS ((rtx));
354 static void find_basic_blocks_1 PARAMS ((rtx));
355 static rtx find_label_refs PARAMS ((rtx, rtx));
356 static void clear_edges PARAMS ((void));
357 static void make_edges PARAMS ((rtx));
358 static void make_label_edge PARAMS ((sbitmap *, basic_block,
360 static void make_eh_edge PARAMS ((sbitmap *, eh_nesting_info *,
361 basic_block, rtx, int));
362 static void mark_critical_edges PARAMS ((void));
363 static void move_stray_eh_region_notes PARAMS ((void));
364 static void record_active_eh_regions PARAMS ((rtx));
366 static void commit_one_edge_insertion PARAMS ((edge));
368 static void delete_unreachable_blocks PARAMS ((void));
369 static void delete_eh_regions PARAMS ((void));
370 static int can_delete_note_p PARAMS ((rtx));
371 static void expunge_block PARAMS ((basic_block));
372 static int can_delete_label_p PARAMS ((rtx));
373 static int tail_recursion_label_p PARAMS ((rtx));
374 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
376 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
378 static int merge_blocks PARAMS ((edge,basic_block,basic_block));
379 static void try_merge_blocks PARAMS ((void));
380 static void tidy_fallthru_edges PARAMS ((void));
381 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
382 static void verify_wide_reg PARAMS ((int, rtx, rtx));
383 static void verify_local_live_at_start PARAMS ((regset, basic_block));
384 static int set_noop_p PARAMS ((rtx));
385 static int noop_move_p PARAMS ((rtx));
386 static void delete_noop_moves PARAMS ((rtx));
387 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
388 static void notice_stack_pointer_modification PARAMS ((rtx));
389 static void mark_reg PARAMS ((rtx, void *));
390 static void mark_regs_live_at_end PARAMS ((regset));
391 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
392 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
393 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
394 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
395 static int insn_dead_p PARAMS ((struct propagate_block_info *,
397 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
399 static void mark_set_regs PARAMS ((struct propagate_block_info *,
401 static void mark_set_1 PARAMS ((struct propagate_block_info *,
402 enum rtx_code, rtx, rtx,
404 #ifdef HAVE_conditional_execution
405 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
407 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
408 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
409 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
411 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
412 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
413 static rtx not_reg_cond PARAMS ((rtx));
414 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
417 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
418 rtx, rtx, rtx, rtx, rtx));
419 static void find_auto_inc PARAMS ((struct propagate_block_info *,
421 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
423 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
425 static void mark_used_reg PARAMS ((struct propagate_block_info *,
427 static void mark_used_regs PARAMS ((struct propagate_block_info *,
429 void dump_flow_info PARAMS ((FILE *));
430 void debug_flow_info PARAMS ((void));
431 static void dump_edge_info PARAMS ((FILE *, edge, int));
432 static void print_rtl_and_abort_fcn PARAMS ((const char *, int,
436 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
438 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
440 static void remove_fake_successors PARAMS ((basic_block));
441 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
443 static void flow_edge_list_print PARAMS ((const char *, const edge *,
445 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
447 static int flow_loop_nested_p PARAMS ((struct loop *,
449 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
451 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
452 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
453 static int flow_depth_first_order_compute PARAMS ((int *, int *));
454 static void flow_dfs_compute_reverse_init
455 PARAMS ((depth_first_search_ds));
456 static void flow_dfs_compute_reverse_add_bb
457 PARAMS ((depth_first_search_ds, basic_block));
458 static basic_block flow_dfs_compute_reverse_execute
459 PARAMS ((depth_first_search_ds));
460 static void flow_dfs_compute_reverse_finish
461 PARAMS ((depth_first_search_ds));
462 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
463 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
465 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
466 static void flow_loops_tree_build PARAMS ((struct loops *));
467 static int flow_loop_level_compute PARAMS ((struct loop *, int));
468 static int flow_loops_level_compute PARAMS ((struct loops *));
469 static void allocate_bb_life_data PARAMS ((void));
471 /* Find basic blocks of the current function.
472 F is the first insn of the function and NREGS the number of register
476 find_basic_blocks (f, nregs, file)
478 int nregs ATTRIBUTE_UNUSED;
479 FILE *file ATTRIBUTE_UNUSED;
483 /* Flush out existing data. */
484 if (basic_block_info != NULL)
490 /* Clear bb->aux on all extant basic blocks. We'll use this as a
491 tag for reuse during create_basic_block, just in case some pass
492 copies around basic block notes improperly. */
493 for (i = 0; i < n_basic_blocks; ++i)
494 BASIC_BLOCK (i)->aux = NULL;
496 VARRAY_FREE (basic_block_info);
499 n_basic_blocks = count_basic_blocks (f);
501 /* Size the basic block table. The actual structures will be allocated
502 by find_basic_blocks_1, since we want to keep the structure pointers
503 stable across calls to find_basic_blocks. */
504 /* ??? This whole issue would be much simpler if we called find_basic_blocks
505 exactly once, and thereafter we don't have a single long chain of
506 instructions at all until close to the end of compilation when we
507 actually lay them out. */
509 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
511 find_basic_blocks_1 (f);
513 /* Record the block to which an insn belongs. */
514 /* ??? This should be done another way, by which (perhaps) a label is
515 tagged directly with the basic block that it starts. It is used for
516 more than that currently, but IMO that is the only valid use. */
518 max_uid = get_max_uid ();
520 /* Leave space for insns life_analysis makes in some cases for auto-inc.
521 These cases are rare, so we don't need too much space. */
522 max_uid += max_uid / 10;
525 compute_bb_for_insn (max_uid);
527 /* Discover the edges of our cfg. */
528 record_active_eh_regions (f);
529 make_edges (label_value_list);
531 /* Do very simple cleanup now, for the benefit of code that runs between
532 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
533 tidy_fallthru_edges ();
535 mark_critical_edges ();
537 #ifdef ENABLE_CHECKING
543 check_function_return_warnings ()
545 if (warn_missing_noreturn
546 && !TREE_THIS_VOLATILE (cfun->decl)
547 && EXIT_BLOCK_PTR->pred == NULL
548 && (lang_missing_noreturn_ok_p
549 && !lang_missing_noreturn_ok_p (cfun->decl)))
550 warning ("function might be possible candidate for attribute `noreturn'");
552 /* If we have a path to EXIT, then we do return. */
553 if (TREE_THIS_VOLATILE (cfun->decl)
554 && EXIT_BLOCK_PTR->pred != NULL)
555 warning ("`noreturn' function does return");
557 /* If the clobber_return_insn appears in some basic block, then we
558 do reach the end without returning a value. */
559 else if (warn_return_type
560 && cfun->x_clobber_return_insn != NULL
561 && EXIT_BLOCK_PTR->pred != NULL)
563 int max_uid = get_max_uid ();
565 /* If clobber_return_insn was excised by jump1, then renumber_insns
566 can make max_uid smaller than the number still recorded in our rtx.
567 That's fine, since this is a quick way of verifying that the insn
568 is no longer in the chain. */
569 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
571 /* Recompute insn->block mapping, since the initial mapping is
572 set before we delete unreachable blocks. */
573 compute_bb_for_insn (max_uid);
575 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
576 warning ("control reaches end of non-void function");
581 /* Count the basic blocks of the function. */
584 count_basic_blocks (f)
588 register RTX_CODE prev_code;
589 register int count = 0;
591 int call_had_abnormal_edge = 0;
593 prev_code = JUMP_INSN;
594 for (insn = f; insn; insn = NEXT_INSN (insn))
596 register RTX_CODE code = GET_CODE (insn);
598 if (code == CODE_LABEL
599 || (GET_RTX_CLASS (code) == 'i'
600 && (prev_code == JUMP_INSN
601 || prev_code == BARRIER
602 || (prev_code == CALL_INSN && call_had_abnormal_edge))))
605 /* Record whether this call created an edge. */
606 if (code == CALL_INSN)
608 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
609 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
611 call_had_abnormal_edge = 0;
613 /* If there is an EH region or rethrow, we have an edge. */
614 if ((eh_region && region > 0)
615 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
616 call_had_abnormal_edge = 1;
617 else if (nonlocal_goto_handler_labels && region >= 0)
618 /* If there is a nonlocal goto label and the specified
619 region number isn't -1, we have an edge. (0 means
620 no throw, but might have a nonlocal goto). */
621 call_had_abnormal_edge = 1;
626 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
628 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
632 /* The rest of the compiler works a bit smoother when we don't have to
633 check for the edge case of do-nothing functions with no basic blocks. */
636 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
643 /* Scan a list of insns for labels referred to other than by jumps.
644 This is used to scan the alternatives of a call placeholder. */
646 find_label_refs (f, lvl)
652 for (insn = f; insn; insn = NEXT_INSN (insn))
653 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
657 /* Make a list of all labels referred to other than by jumps
658 (which just don't have the REG_LABEL notes).
660 Make a special exception for labels followed by an ADDR*VEC,
661 as this would be a part of the tablejump setup code.
663 Make a special exception for the eh_return_stub_label, which
664 we know isn't part of any otherwise visible control flow.
666 Make a special exception to registers loaded with label
667 values just before jump insns that use them. */
669 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
670 if (REG_NOTE_KIND (note) == REG_LABEL)
672 rtx lab = XEXP (note, 0), next;
674 if (lab == eh_return_stub_label)
676 else if ((next = next_nonnote_insn (lab)) != NULL
677 && GET_CODE (next) == JUMP_INSN
678 && (GET_CODE (PATTERN (next)) == ADDR_VEC
679 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
681 else if (GET_CODE (lab) == NOTE)
683 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
684 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
687 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
694 /* Find all basic blocks of the function whose first insn is F.
696 Collect and return a list of labels whose addresses are taken. This
697 will be used in make_edges for use with computed gotos. */
700 find_basic_blocks_1 (f)
703 register rtx insn, next;
705 rtx bb_note = NULL_RTX;
706 rtx eh_list = NULL_RTX;
712 /* We process the instructions in a slightly different way than we did
713 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
714 closed out the previous block, so that it gets attached at the proper
715 place. Since this form should be equivalent to the previous,
716 count_basic_blocks continues to use the old form as a check. */
718 for (insn = f; insn; insn = next)
720 enum rtx_code code = GET_CODE (insn);
722 next = NEXT_INSN (insn);
728 int kind = NOTE_LINE_NUMBER (insn);
730 /* Keep a LIFO list of the currently active exception notes. */
731 if (kind == NOTE_INSN_EH_REGION_BEG)
732 eh_list = alloc_INSN_LIST (insn, eh_list);
733 else if (kind == NOTE_INSN_EH_REGION_END)
737 eh_list = XEXP (eh_list, 1);
738 free_INSN_LIST_node (t);
741 /* Look for basic block notes with which to keep the
742 basic_block_info pointers stable. Unthread the note now;
743 we'll put it back at the right place in create_basic_block.
744 Or not at all if we've already found a note in this block. */
745 else if (kind == NOTE_INSN_BASIC_BLOCK)
747 if (bb_note == NULL_RTX)
750 next = flow_delete_insn (insn);
756 /* A basic block starts at a label. If we've closed one off due
757 to a barrier or some such, no need to do it again. */
758 if (head != NULL_RTX)
760 /* While we now have edge lists with which other portions of
761 the compiler might determine a call ending a basic block
762 does not imply an abnormal edge, it will be a bit before
763 everything can be updated. So continue to emit a noop at
764 the end of such a block. */
765 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
767 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
768 end = emit_insn_after (nop, end);
771 create_basic_block (i++, head, end, bb_note);
779 /* A basic block ends at a jump. */
780 if (head == NULL_RTX)
784 /* ??? Make a special check for table jumps. The way this
785 happens is truly and amazingly gross. We are about to
786 create a basic block that contains just a code label and
787 an addr*vec jump insn. Worse, an addr_diff_vec creates
788 its own natural loop.
790 Prevent this bit of brain damage, pasting things together
791 correctly in make_edges.
793 The correct solution involves emitting the table directly
794 on the tablejump instruction as a note, or JUMP_LABEL. */
796 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
797 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
805 goto new_bb_inclusive;
808 /* A basic block ends at a barrier. It may be that an unconditional
809 jump already closed the basic block -- no need to do it again. */
810 if (head == NULL_RTX)
813 /* While we now have edge lists with which other portions of the
814 compiler might determine a call ending a basic block does not
815 imply an abnormal edge, it will be a bit before everything can
816 be updated. So continue to emit a noop at the end of such a
818 if (GET_CODE (end) == CALL_INSN && ! SIBLING_CALL_P (end))
820 rtx nop = gen_rtx_USE (VOIDmode, const0_rtx);
821 end = emit_insn_after (nop, end);
823 goto new_bb_exclusive;
827 /* Record whether this call created an edge. */
828 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
829 int region = (note ? INTVAL (XEXP (note, 0)) : 1);
830 int call_has_abnormal_edge = 0;
832 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
834 /* Scan each of the alternatives for label refs. */
835 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
836 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
837 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
838 /* Record its tail recursion label, if any. */
839 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
840 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
843 /* If there is an EH region or rethrow, we have an edge. */
844 if ((eh_list && region > 0)
845 || find_reg_note (insn, REG_EH_RETHROW, NULL_RTX))
846 call_has_abnormal_edge = 1;
847 else if (nonlocal_goto_handler_labels && region >= 0)
848 /* If there is a nonlocal goto label and the specified
849 region number isn't -1, we have an edge. (0 means
850 no throw, but might have a nonlocal goto). */
851 call_has_abnormal_edge = 1;
853 /* A basic block ends at a call that can either throw or
854 do a non-local goto. */
855 if (call_has_abnormal_edge)
858 if (head == NULL_RTX)
863 create_basic_block (i++, head, end, bb_note);
864 head = end = NULL_RTX;
872 if (GET_RTX_CLASS (code) == 'i')
874 if (head == NULL_RTX)
881 if (GET_RTX_CLASS (code) == 'i'
882 && GET_CODE (insn) != JUMP_INSN)
886 /* Make a list of all labels referred to other than by jumps.
888 Make a special exception for labels followed by an ADDR*VEC,
889 as this would be a part of the tablejump setup code.
891 Make a special exception for the eh_return_stub_label, which
892 we know isn't part of any otherwise visible control flow.
894 Make a special exception to registers loaded with label
895 values just before jump insns that use them. */
897 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
898 if (REG_NOTE_KIND (note) == REG_LABEL)
900 rtx lab = XEXP (note, 0), next;
902 if (lab == eh_return_stub_label)
904 else if ((next = next_nonnote_insn (lab)) != NULL
905 && GET_CODE (next) == JUMP_INSN
906 && (GET_CODE (PATTERN (next)) == ADDR_VEC
907 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
909 else if (GET_CODE (lab) == NOTE)
911 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
912 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
915 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
920 if (head != NULL_RTX)
921 create_basic_block (i++, head, end, bb_note);
923 flow_delete_insn (bb_note);
925 if (i != n_basic_blocks)
928 label_value_list = lvl;
929 tail_recursion_label_list = trll;
932 /* Tidy the CFG by deleting unreachable code and whatnot. */
938 delete_unreachable_blocks ();
939 move_stray_eh_region_notes ();
940 record_active_eh_regions (f);
942 mark_critical_edges ();
944 /* Kill the data we won't maintain. */
945 free_EXPR_LIST_list (&label_value_list);
946 free_EXPR_LIST_list (&tail_recursion_label_list);
949 /* Create a new basic block consisting of the instructions between
950 HEAD and END inclusive. Reuses the note and basic block struct
951 in BB_NOTE, if any. */
954 create_basic_block (index, head, end, bb_note)
956 rtx head, end, bb_note;
961 && ! RTX_INTEGRATED_P (bb_note)
962 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
965 /* If we found an existing note, thread it back onto the chain. */
969 if (GET_CODE (head) == CODE_LABEL)
973 after = PREV_INSN (head);
977 if (after != bb_note && NEXT_INSN (after) != bb_note)
978 reorder_insns (bb_note, bb_note, after);
982 /* Otherwise we must create a note and a basic block structure.
983 Since we allow basic block structs in rtl, give the struct
984 the same lifetime by allocating it off the function obstack
985 rather than using malloc. */
987 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
988 memset (bb, 0, sizeof (*bb));
990 if (GET_CODE (head) == CODE_LABEL)
991 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
994 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
997 NOTE_BASIC_BLOCK (bb_note) = bb;
1000 /* Always include the bb note in the block. */
1001 if (NEXT_INSN (end) == bb_note)
1007 BASIC_BLOCK (index) = bb;
1009 /* Tag the block so that we know it has been used when considering
1010 other basic block notes. */
1014 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1015 indexed by INSN_UID. MAX is the size of the array. */
1018 compute_bb_for_insn (max)
1023 if (basic_block_for_insn)
1024 VARRAY_FREE (basic_block_for_insn);
1025 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
1027 for (i = 0; i < n_basic_blocks; ++i)
1029 basic_block bb = BASIC_BLOCK (i);
1036 int uid = INSN_UID (insn);
1038 VARRAY_BB (basic_block_for_insn, uid) = bb;
1041 insn = NEXT_INSN (insn);
1046 /* Free the memory associated with the edge structures. */
1054 for (i = 0; i < n_basic_blocks; ++i)
1056 basic_block bb = BASIC_BLOCK (i);
1058 for (e = bb->succ; e; e = n)
1068 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1074 ENTRY_BLOCK_PTR->succ = 0;
1075 EXIT_BLOCK_PTR->pred = 0;
1080 /* Identify the edges between basic blocks.
1082 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1083 that are otherwise unreachable may be reachable with a non-local goto.
1085 BB_EH_END is an array indexed by basic block number in which we record
1086 the list of exception regions active at the end of the basic block. */
1089 make_edges (label_value_list)
1090 rtx label_value_list;
1093 eh_nesting_info *eh_nest_info = init_eh_nesting_info ();
1094 sbitmap *edge_cache = NULL;
1096 /* Assume no computed jump; revise as we create edges. */
1097 current_function_has_computed_jump = 0;
1099 /* Heavy use of computed goto in machine-generated code can lead to
1100 nearly fully-connected CFGs. In that case we spend a significant
1101 amount of time searching the edge lists for duplicates. */
1102 if (forced_labels || label_value_list)
1104 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1105 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1108 /* By nature of the way these get numbered, block 0 is always the entry. */
1109 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1111 for (i = 0; i < n_basic_blocks; ++i)
1113 basic_block bb = BASIC_BLOCK (i);
1116 int force_fallthru = 0;
1118 /* Examine the last instruction of the block, and discover the
1119 ways we can leave the block. */
1122 code = GET_CODE (insn);
1125 if (code == JUMP_INSN)
1129 /* Recognize a non-local goto as a branch outside the
1130 current function. */
1131 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1134 /* ??? Recognize a tablejump and do the right thing. */
1135 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1136 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1137 && GET_CODE (tmp) == JUMP_INSN
1138 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1139 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1144 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1145 vec = XVEC (PATTERN (tmp), 0);
1147 vec = XVEC (PATTERN (tmp), 1);
1149 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1150 make_label_edge (edge_cache, bb,
1151 XEXP (RTVEC_ELT (vec, j), 0), 0);
1153 /* Some targets (eg, ARM) emit a conditional jump that also
1154 contains the out-of-range target. Scan for these and
1155 add an edge if necessary. */
1156 if ((tmp = single_set (insn)) != NULL
1157 && SET_DEST (tmp) == pc_rtx
1158 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1159 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1160 make_label_edge (edge_cache, bb,
1161 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1163 #ifdef CASE_DROPS_THROUGH
1164 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1165 us naturally detecting fallthru into the next block. */
1170 /* If this is a computed jump, then mark it as reaching
1171 everything on the label_value_list and forced_labels list. */
1172 else if (computed_jump_p (insn))
1174 current_function_has_computed_jump = 1;
1176 for (x = label_value_list; x; x = XEXP (x, 1))
1177 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1179 for (x = forced_labels; x; x = XEXP (x, 1))
1180 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1183 /* Returns create an exit out. */
1184 else if (returnjump_p (insn))
1185 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1187 /* Otherwise, we have a plain conditional or unconditional jump. */
1190 if (! JUMP_LABEL (insn))
1192 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1196 /* If this is a sibling call insn, then this is in effect a
1197 combined call and return, and so we need an edge to the
1198 exit block. No need to worry about EH edges, since we
1199 wouldn't have created the sibling call in the first place. */
1201 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1202 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1203 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1205 /* If this is a CALL_INSN, then mark it as reaching the active EH
1206 handler for this CALL_INSN. If we're handling asynchronous
1207 exceptions then any insn can reach any of the active handlers.
1209 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1211 else if (code == CALL_INSN || asynchronous_exceptions)
1213 /* Add any appropriate EH edges. We do this unconditionally
1214 since there may be a REG_EH_REGION or REG_EH_RETHROW note
1215 on the call, and this needn't be within an EH region. */
1216 make_eh_edge (edge_cache, eh_nest_info, bb, insn, bb->eh_end);
1218 /* If we have asynchronous exceptions, do the same for *all*
1219 exception regions active in the block. */
1220 if (asynchronous_exceptions
1221 && bb->eh_beg != bb->eh_end)
1223 if (bb->eh_beg >= 0)
1224 make_eh_edge (edge_cache, eh_nest_info, bb,
1225 NULL_RTX, bb->eh_beg);
1227 for (x = bb->head; x != bb->end; x = NEXT_INSN (x))
1228 if (GET_CODE (x) == NOTE
1229 && (NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_BEG
1230 || NOTE_LINE_NUMBER (x) == NOTE_INSN_EH_REGION_END))
1232 int region = NOTE_EH_HANDLER (x);
1233 make_eh_edge (edge_cache, eh_nest_info, bb,
1238 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1240 /* ??? This could be made smarter: in some cases it's possible
1241 to tell that certain calls will not do a nonlocal goto.
1243 For example, if the nested functions that do the nonlocal
1244 gotos do not have their addresses taken, then only calls to
1245 those functions or to other nested functions that use them
1246 could possibly do nonlocal gotos. */
1247 /* We do know that a REG_EH_REGION note with a value less
1248 than 0 is guaranteed not to perform a non-local goto. */
1249 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1250 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1251 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1252 make_label_edge (edge_cache, bb, XEXP (x, 0),
1253 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1257 /* We know something about the structure of the function __throw in
1258 libgcc2.c. It is the only function that ever contains eh_stub
1259 labels. It modifies its return address so that the last block
1260 returns to one of the eh_stub labels within it. So we have to
1261 make additional edges in the flow graph. */
1262 if (i + 1 == n_basic_blocks && eh_return_stub_label != 0)
1263 make_label_edge (edge_cache, bb, eh_return_stub_label, EDGE_EH);
1265 /* Find out if we can drop through to the next block. */
1266 insn = next_nonnote_insn (insn);
1267 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1268 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1269 else if (i + 1 < n_basic_blocks)
1271 rtx tmp = BLOCK_HEAD (i + 1);
1272 if (GET_CODE (tmp) == NOTE)
1273 tmp = next_nonnote_insn (tmp);
1274 if (force_fallthru || insn == tmp)
1275 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1279 free_eh_nesting_info (eh_nest_info);
1281 sbitmap_vector_free (edge_cache);
1284 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1285 about the edge that is accumulated between calls. */
1288 make_edge (edge_cache, src, dst, flags)
1289 sbitmap *edge_cache;
1290 basic_block src, dst;
1296 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1297 many edges to them, and we didn't allocate memory for it. */
1298 use_edge_cache = (edge_cache
1299 && src != ENTRY_BLOCK_PTR
1300 && dst != EXIT_BLOCK_PTR);
1302 /* Make sure we don't add duplicate edges. */
1303 switch (use_edge_cache)
1306 /* Quick test for non-existance of the edge. */
1307 if (! TEST_BIT (edge_cache[src->index], dst->index))
1310 /* The edge exists; early exit if no work to do. */
1316 for (e = src->succ; e; e = e->succ_next)
1325 e = (edge) xcalloc (1, sizeof (*e));
1328 e->succ_next = src->succ;
1329 e->pred_next = dst->pred;
1338 SET_BIT (edge_cache[src->index], dst->index);
1341 /* Create an edge from a basic block to a label. */
1344 make_label_edge (edge_cache, src, label, flags)
1345 sbitmap *edge_cache;
1350 if (GET_CODE (label) != CODE_LABEL)
1353 /* If the label was never emitted, this insn is junk, but avoid a
1354 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1355 as a result of a syntax error and a diagnostic has already been
1358 if (INSN_UID (label) == 0)
1361 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1364 /* Create the edges generated by INSN in REGION. */
1367 make_eh_edge (edge_cache, eh_nest_info, src, insn, region)
1368 sbitmap *edge_cache;
1369 eh_nesting_info *eh_nest_info;
1374 handler_info **handler_list;
1377 is_call = (insn && GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1378 num = reachable_handlers (region, eh_nest_info, insn, &handler_list);
1381 make_label_edge (edge_cache, src, handler_list[num]->handler_label,
1382 EDGE_ABNORMAL | EDGE_EH | is_call);
1386 /* EH_REGION notes appearing between basic blocks is ambiguous, and even
1387 dangerous if we intend to move basic blocks around. Move such notes
1388 into the following block. */
1391 move_stray_eh_region_notes ()
1396 if (n_basic_blocks < 2)
1399 b2 = BASIC_BLOCK (n_basic_blocks - 1);
1400 for (i = n_basic_blocks - 2; i >= 0; --i, b2 = b1)
1402 rtx insn, next, list = NULL_RTX;
1404 b1 = BASIC_BLOCK (i);
1405 for (insn = NEXT_INSN (b1->end); insn != b2->head; insn = next)
1407 next = NEXT_INSN (insn);
1408 if (GET_CODE (insn) == NOTE
1409 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG
1410 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
1412 /* Unlink from the insn chain. */
1413 NEXT_INSN (PREV_INSN (insn)) = next;
1414 PREV_INSN (next) = PREV_INSN (insn);
1417 NEXT_INSN (insn) = list;
1422 if (list == NULL_RTX)
1425 /* Find where to insert these things. */
1427 if (GET_CODE (insn) == CODE_LABEL)
1428 insn = NEXT_INSN (insn);
1432 next = NEXT_INSN (list);
1433 add_insn_after (list, insn);
1439 /* Recompute eh_beg/eh_end for each basic block. */
1442 record_active_eh_regions (f)
1445 rtx insn, eh_list = NULL_RTX;
1447 basic_block bb = BASIC_BLOCK (0);
1449 for (insn = f; insn; insn = NEXT_INSN (insn))
1451 if (bb->head == insn)
1452 bb->eh_beg = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1454 if (GET_CODE (insn) == NOTE)
1456 int kind = NOTE_LINE_NUMBER (insn);
1457 if (kind == NOTE_INSN_EH_REGION_BEG)
1458 eh_list = alloc_INSN_LIST (insn, eh_list);
1459 else if (kind == NOTE_INSN_EH_REGION_END)
1461 rtx t = XEXP (eh_list, 1);
1462 free_INSN_LIST_node (eh_list);
1467 if (bb->end == insn)
1469 bb->eh_end = (eh_list ? NOTE_EH_HANDLER (XEXP (eh_list, 0)) : -1);
1471 if (i == n_basic_blocks)
1473 bb = BASIC_BLOCK (i);
1478 /* Identify critical edges and set the bits appropriately. */
1481 mark_critical_edges ()
1483 int i, n = n_basic_blocks;
1486 /* We begin with the entry block. This is not terribly important now,
1487 but could be if a front end (Fortran) implemented alternate entry
1489 bb = ENTRY_BLOCK_PTR;
1496 /* (1) Critical edges must have a source with multiple successors. */
1497 if (bb->succ && bb->succ->succ_next)
1499 for (e = bb->succ; e; e = e->succ_next)
1501 /* (2) Critical edges must have a destination with multiple
1502 predecessors. Note that we know there is at least one
1503 predecessor -- the edge we followed to get here. */
1504 if (e->dest->pred->pred_next)
1505 e->flags |= EDGE_CRITICAL;
1507 e->flags &= ~EDGE_CRITICAL;
1512 for (e = bb->succ; e; e = e->succ_next)
1513 e->flags &= ~EDGE_CRITICAL;
1518 bb = BASIC_BLOCK (i);
1522 /* Split a block BB after insn INSN creating a new fallthru edge.
1523 Return the new edge. Note that to keep other parts of the compiler happy,
1524 this function renumbers all the basic blocks so that the new
1525 one has a number one greater than the block split. */
1528 split_block (bb, insn)
1538 /* There is no point splitting the block after its end. */
1539 if (bb->end == insn)
1542 /* Create the new structures. */
1543 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1544 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1547 memset (new_bb, 0, sizeof (*new_bb));
1549 new_bb->head = NEXT_INSN (insn);
1550 new_bb->end = bb->end;
1553 new_bb->succ = bb->succ;
1554 bb->succ = new_edge;
1555 new_bb->pred = new_edge;
1556 new_bb->count = bb->count;
1557 new_bb->loop_depth = bb->loop_depth;
1560 new_edge->dest = new_bb;
1561 new_edge->flags = EDGE_FALLTHRU;
1562 new_edge->probability = REG_BR_PROB_BASE;
1563 new_edge->count = bb->count;
1565 /* Redirect the src of the successor edges of bb to point to new_bb. */
1566 for (e = new_bb->succ; e; e = e->succ_next)
1569 /* Place the new block just after the block being split. */
1570 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1572 /* Some parts of the compiler expect blocks to be number in
1573 sequential order so insert the new block immediately after the
1574 block being split.. */
1576 for (i = n_basic_blocks - 1; i > j + 1; --i)
1578 basic_block tmp = BASIC_BLOCK (i - 1);
1579 BASIC_BLOCK (i) = tmp;
1583 BASIC_BLOCK (i) = new_bb;
1586 /* Create the basic block note. */
1587 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1589 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1590 new_bb->head = bb_note;
1592 update_bb_for_insn (new_bb);
1594 if (bb->global_live_at_start)
1596 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1597 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1598 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1600 /* We now have to calculate which registers are live at the end
1601 of the split basic block and at the start of the new basic
1602 block. Start with those registers that are known to be live
1603 at the end of the original basic block and get
1604 propagate_block to determine which registers are live. */
1605 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1606 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1607 COPY_REG_SET (bb->global_live_at_end,
1608 new_bb->global_live_at_start);
1615 /* Split a (typically critical) edge. Return the new block.
1616 Abort on abnormal edges.
1618 ??? The code generally expects to be called on critical edges.
1619 The case of a block ending in an unconditional jump to a
1620 block with multiple predecessors is not handled optimally. */
1623 split_edge (edge_in)
1626 basic_block old_pred, bb, old_succ;
1631 /* Abnormal edges cannot be split. */
1632 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1635 old_pred = edge_in->src;
1636 old_succ = edge_in->dest;
1638 /* Remove the existing edge from the destination's pred list. */
1641 for (pp = &old_succ->pred; *pp != edge_in; pp = &(*pp)->pred_next)
1643 *pp = edge_in->pred_next;
1644 edge_in->pred_next = NULL;
1647 /* Create the new structures. */
1648 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1649 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
1652 memset (bb, 0, sizeof (*bb));
1654 /* ??? This info is likely going to be out of date very soon. */
1655 if (old_succ->global_live_at_start)
1657 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1658 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1659 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
1660 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
1665 bb->succ = edge_out;
1666 bb->count = edge_in->count;
1669 edge_in->flags &= ~EDGE_CRITICAL;
1671 edge_out->pred_next = old_succ->pred;
1672 edge_out->succ_next = NULL;
1674 edge_out->dest = old_succ;
1675 edge_out->flags = EDGE_FALLTHRU;
1676 edge_out->probability = REG_BR_PROB_BASE;
1677 edge_out->count = edge_in->count;
1679 old_succ->pred = edge_out;
1681 /* Tricky case -- if there existed a fallthru into the successor
1682 (and we're not it) we must add a new unconditional jump around
1683 the new block we're actually interested in.
1685 Further, if that edge is critical, this means a second new basic
1686 block must be created to hold it. In order to simplify correct
1687 insn placement, do this before we touch the existing basic block
1688 ordering for the block we were really wanting. */
1689 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1692 for (e = edge_out->pred_next; e; e = e->pred_next)
1693 if (e->flags & EDGE_FALLTHRU)
1698 basic_block jump_block;
1701 if ((e->flags & EDGE_CRITICAL) == 0
1702 && e->src != ENTRY_BLOCK_PTR)
1704 /* Non critical -- we can simply add a jump to the end
1705 of the existing predecessor. */
1706 jump_block = e->src;
1710 /* We need a new block to hold the jump. The simplest
1711 way to do the bulk of the work here is to recursively
1713 jump_block = split_edge (e);
1714 e = jump_block->succ;
1717 /* Now add the jump insn ... */
1718 pos = emit_jump_insn_after (gen_jump (old_succ->head),
1720 jump_block->end = pos;
1721 if (basic_block_for_insn)
1722 set_block_for_insn (pos, jump_block);
1723 emit_barrier_after (pos);
1725 /* ... let jump know that label is in use, ... */
1726 JUMP_LABEL (pos) = old_succ->head;
1727 ++LABEL_NUSES (old_succ->head);
1729 /* ... and clear fallthru on the outgoing edge. */
1730 e->flags &= ~EDGE_FALLTHRU;
1732 /* Continue splitting the interesting edge. */
1736 /* Place the new block just in front of the successor. */
1737 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1738 if (old_succ == EXIT_BLOCK_PTR)
1739 j = n_basic_blocks - 1;
1741 j = old_succ->index;
1742 for (i = n_basic_blocks - 1; i > j; --i)
1744 basic_block tmp = BASIC_BLOCK (i - 1);
1745 BASIC_BLOCK (i) = tmp;
1748 BASIC_BLOCK (i) = bb;
1751 /* Create the basic block note.
1753 Where we place the note can have a noticable impact on the generated
1754 code. Consider this cfg:
1764 If we need to insert an insn on the edge from block 0 to block 1,
1765 we want to ensure the instructions we insert are outside of any
1766 loop notes that physically sit between block 0 and block 1. Otherwise
1767 we confuse the loop optimizer into thinking the loop is a phony. */
1768 if (old_succ != EXIT_BLOCK_PTR
1769 && PREV_INSN (old_succ->head)
1770 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
1771 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
1772 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1773 PREV_INSN (old_succ->head));
1774 else if (old_succ != EXIT_BLOCK_PTR)
1775 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
1777 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
1778 NOTE_BASIC_BLOCK (bb_note) = bb;
1779 bb->head = bb->end = bb_note;
1781 /* Not quite simple -- for non-fallthru edges, we must adjust the
1782 predecessor's jump instruction to target our new block. */
1783 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1785 rtx tmp, insn = old_pred->end;
1786 rtx old_label = old_succ->head;
1787 rtx new_label = gen_label_rtx ();
1789 if (GET_CODE (insn) != JUMP_INSN)
1792 /* ??? Recognize a tablejump and adjust all matching cases. */
1793 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1794 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1795 && GET_CODE (tmp) == JUMP_INSN
1796 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1797 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1802 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1803 vec = XVEC (PATTERN (tmp), 0);
1805 vec = XVEC (PATTERN (tmp), 1);
1807 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1808 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1810 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (VOIDmode, new_label);
1811 --LABEL_NUSES (old_label);
1812 ++LABEL_NUSES (new_label);
1815 /* Handle casesi dispatch insns */
1816 if ((tmp = single_set (insn)) != NULL
1817 && SET_DEST (tmp) == pc_rtx
1818 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1819 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1820 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1822 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1824 --LABEL_NUSES (old_label);
1825 ++LABEL_NUSES (new_label);
1830 /* This would have indicated an abnormal edge. */
1831 if (computed_jump_p (insn))
1834 /* A return instruction can't be redirected. */
1835 if (returnjump_p (insn))
1838 /* If the insn doesn't go where we think, we're confused. */
1839 if (JUMP_LABEL (insn) != old_label)
1842 redirect_jump (insn, new_label, 0);
1845 emit_label_before (new_label, bb_note);
1846 bb->head = new_label;
1852 /* Queue instructions for insertion on an edge between two basic blocks.
1853 The new instructions and basic blocks (if any) will not appear in the
1854 CFG until commit_edge_insertions is called. */
1857 insert_insn_on_edge (pattern, e)
1861 /* We cannot insert instructions on an abnormal critical edge.
1862 It will be easier to find the culprit if we die now. */
1863 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
1864 == (EDGE_ABNORMAL|EDGE_CRITICAL))
1867 if (e->insns == NULL_RTX)
1870 push_to_sequence (e->insns);
1872 emit_insn (pattern);
1874 e->insns = get_insns ();
1878 /* Update the CFG for the instructions queued on edge E. */
1881 commit_one_edge_insertion (e)
1884 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1887 /* Pull the insns off the edge now since the edge might go away. */
1889 e->insns = NULL_RTX;
1891 /* Figure out where to put these things. If the destination has
1892 one predecessor, insert there. Except for the exit block. */
1893 if (e->dest->pred->pred_next == NULL
1894 && e->dest != EXIT_BLOCK_PTR)
1898 /* Get the location correct wrt a code label, and "nice" wrt
1899 a basic block note, and before everything else. */
1901 if (GET_CODE (tmp) == CODE_LABEL)
1902 tmp = NEXT_INSN (tmp);
1903 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1904 tmp = NEXT_INSN (tmp);
1905 if (tmp == bb->head)
1908 after = PREV_INSN (tmp);
1911 /* If the source has one successor and the edge is not abnormal,
1912 insert there. Except for the entry block. */
1913 else if ((e->flags & EDGE_ABNORMAL) == 0
1914 && e->src->succ->succ_next == NULL
1915 && e->src != ENTRY_BLOCK_PTR)
1918 /* It is possible to have a non-simple jump here. Consider a target
1919 where some forms of unconditional jumps clobber a register. This
1920 happens on the fr30 for example.
1922 We know this block has a single successor, so we can just emit
1923 the queued insns before the jump. */
1924 if (GET_CODE (bb->end) == JUMP_INSN)
1930 /* We'd better be fallthru, or we've lost track of what's what. */
1931 if ((e->flags & EDGE_FALLTHRU) == 0)
1938 /* Otherwise we must split the edge. */
1941 bb = split_edge (e);
1945 /* Now that we've found the spot, do the insertion. */
1947 /* Set the new block number for these insns, if structure is allocated. */
1948 if (basic_block_for_insn)
1951 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
1952 set_block_for_insn (i, bb);
1957 emit_insns_before (insns, before);
1958 if (before == bb->head)
1961 last = prev_nonnote_insn (before);
1965 last = emit_insns_after (insns, after);
1966 if (after == bb->end)
1970 if (returnjump_p (last))
1972 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1973 This is not currently a problem because this only happens
1974 for the (single) epilogue, which already has a fallthru edge
1978 if (e->dest != EXIT_BLOCK_PTR
1979 || e->succ_next != NULL
1980 || (e->flags & EDGE_FALLTHRU) == 0)
1982 e->flags &= ~EDGE_FALLTHRU;
1984 emit_barrier_after (last);
1988 flow_delete_insn (before);
1990 else if (GET_CODE (last) == JUMP_INSN)
1994 /* Update the CFG for all queued instructions. */
1997 commit_edge_insertions ()
2002 #ifdef ENABLE_CHECKING
2003 verify_flow_info ();
2007 bb = ENTRY_BLOCK_PTR;
2012 for (e = bb->succ; e; e = next)
2014 next = e->succ_next;
2016 commit_one_edge_insertion (e);
2019 if (++i >= n_basic_blocks)
2021 bb = BASIC_BLOCK (i);
2025 /* Add fake edges to the function exit for any non constant calls in
2026 the bitmap of blocks specified by BLOCKS or to the whole CFG if
2027 BLOCKS is zero. Return the nuber of blocks that were split. */
2030 flow_call_edges_add (blocks)
2034 int blocks_split = 0;
2038 /* Map bb indicies into basic block pointers since split_block
2039 will renumber the basic blocks. */
2041 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
2045 for (i = 0; i < n_basic_blocks; i++)
2046 bbs[bb_num++] = BASIC_BLOCK (i);
2050 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2052 bbs[bb_num++] = BASIC_BLOCK (i);
2057 /* Now add fake edges to the function exit for any non constant
2058 calls since there is no way that we can determine if they will
2061 for (i = 0; i < bb_num; i++)
2063 basic_block bb = bbs[i];
2067 for (insn = bb->end; ; insn = prev_insn)
2069 prev_insn = PREV_INSN (insn);
2070 if (GET_CODE (insn) == CALL_INSN && ! CONST_CALL_P (insn))
2074 /* Note that the following may create a new basic block
2075 and renumber the existing basic blocks. */
2076 e = split_block (bb, insn);
2080 make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2082 if (insn == bb->head)
2088 verify_flow_info ();
2091 return blocks_split;
2094 /* Delete all unreachable basic blocks. */
2097 delete_unreachable_blocks ()
2099 basic_block *worklist, *tos;
2100 int deleted_handler;
2105 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
2107 /* Use basic_block->aux as a marker. Clear them all. */
2109 for (i = 0; i < n; ++i)
2110 BASIC_BLOCK (i)->aux = NULL;
2112 /* Add our starting points to the worklist. Almost always there will
2113 be only one. It isn't inconcievable that we might one day directly
2114 support Fortran alternate entry points. */
2116 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
2120 /* Mark the block with a handy non-null value. */
2124 /* Iterate: find everything reachable from what we've already seen. */
2126 while (tos != worklist)
2128 basic_block b = *--tos;
2130 for (e = b->succ; e; e = e->succ_next)
2138 /* Delete all unreachable basic blocks. Count down so that we don't
2139 interfere with the block renumbering that happens in flow_delete_block. */
2141 deleted_handler = 0;
2143 for (i = n - 1; i >= 0; --i)
2145 basic_block b = BASIC_BLOCK (i);
2148 /* This block was found. Tidy up the mark. */
2151 deleted_handler |= flow_delete_block (b);
2154 tidy_fallthru_edges ();
2156 /* If we deleted an exception handler, we may have EH region begin/end
2157 blocks to remove as well. */
2158 if (deleted_handler)
2159 delete_eh_regions ();
2164 /* Find EH regions for which there is no longer a handler, and delete them. */
2167 delete_eh_regions ()
2171 update_rethrow_references ();
2173 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2174 if (GET_CODE (insn) == NOTE)
2176 if ((NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2177 || (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END))
2179 int num = NOTE_EH_HANDLER (insn);
2180 /* A NULL handler indicates a region is no longer needed,
2181 as long as its rethrow label isn't used. */
2182 if (get_first_handler (num) == NULL && ! rethrow_used (num))
2184 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2185 NOTE_SOURCE_FILE (insn) = 0;
2191 /* Return true if NOTE is not one of the ones that must be kept paired,
2192 so that we may simply delete them. */
2195 can_delete_note_p (note)
2198 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2199 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2202 /* Unlink a chain of insns between START and FINISH, leaving notes
2203 that must be paired. */
2206 flow_delete_insn_chain (start, finish)
2209 /* Unchain the insns one by one. It would be quicker to delete all
2210 of these with a single unchaining, rather than one at a time, but
2211 we need to keep the NOTE's. */
2217 next = NEXT_INSN (start);
2218 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2220 else if (GET_CODE (start) == CODE_LABEL
2221 && ! can_delete_label_p (start))
2223 const char *name = LABEL_NAME (start);
2224 PUT_CODE (start, NOTE);
2225 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2226 NOTE_SOURCE_FILE (start) = name;
2229 next = flow_delete_insn (start);
2231 if (start == finish)
2237 /* Delete the insns in a (non-live) block. We physically delete every
2238 non-deleted-note insn, and update the flow graph appropriately.
2240 Return nonzero if we deleted an exception handler. */
2242 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2243 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2246 flow_delete_block (b)
2249 int deleted_handler = 0;
2252 /* If the head of this block is a CODE_LABEL, then it might be the
2253 label for an exception handler which can't be reached.
2255 We need to remove the label from the exception_handler_label list
2256 and remove the associated NOTE_INSN_EH_REGION_BEG and
2257 NOTE_INSN_EH_REGION_END notes. */
2261 never_reached_warning (insn);
2263 if (GET_CODE (insn) == CODE_LABEL)
2265 rtx x, *prev = &exception_handler_labels;
2267 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2269 if (XEXP (x, 0) == insn)
2271 /* Found a match, splice this label out of the EH label list. */
2272 *prev = XEXP (x, 1);
2273 XEXP (x, 1) = NULL_RTX;
2274 XEXP (x, 0) = NULL_RTX;
2276 /* Remove the handler from all regions */
2277 remove_handler (insn);
2278 deleted_handler = 1;
2281 prev = &XEXP (x, 1);
2285 /* Include any jump table following the basic block. */
2287 if (GET_CODE (end) == JUMP_INSN
2288 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2289 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2290 && GET_CODE (tmp) == JUMP_INSN
2291 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2292 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2295 /* Include any barrier that may follow the basic block. */
2296 tmp = next_nonnote_insn (end);
2297 if (tmp && GET_CODE (tmp) == BARRIER)
2300 /* Selectively delete the entire chain. */
2301 flow_delete_insn_chain (insn, end);
2303 /* Remove the edges into and out of this block. Note that there may
2304 indeed be edges in, if we are removing an unreachable loop. */
2308 for (e = b->pred; e; e = next)
2310 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2313 next = e->pred_next;
2317 for (e = b->succ; e; e = next)
2319 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2322 next = e->succ_next;
2331 /* Remove the basic block from the array, and compact behind it. */
2334 return deleted_handler;
2337 /* Remove block B from the basic block array and compact behind it. */
2343 int i, n = n_basic_blocks;
2345 for (i = b->index; i + 1 < n; ++i)
2347 basic_block x = BASIC_BLOCK (i + 1);
2348 BASIC_BLOCK (i) = x;
2352 basic_block_info->num_elements--;
2356 /* Delete INSN by patching it out. Return the next insn. */
2359 flow_delete_insn (insn)
2362 rtx prev = PREV_INSN (insn);
2363 rtx next = NEXT_INSN (insn);
2366 PREV_INSN (insn) = NULL_RTX;
2367 NEXT_INSN (insn) = NULL_RTX;
2368 INSN_DELETED_P (insn) = 1;
2371 NEXT_INSN (prev) = next;
2373 PREV_INSN (next) = prev;
2375 set_last_insn (prev);
2377 if (GET_CODE (insn) == CODE_LABEL)
2378 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2380 /* If deleting a jump, decrement the use count of the label. Deleting
2381 the label itself should happen in the normal course of block merging. */
2382 if (GET_CODE (insn) == JUMP_INSN
2383 && JUMP_LABEL (insn)
2384 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2385 LABEL_NUSES (JUMP_LABEL (insn))--;
2387 /* Also if deleting an insn that references a label. */
2388 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2389 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2390 LABEL_NUSES (XEXP (note, 0))--;
2395 /* True if a given label can be deleted. */
2398 can_delete_label_p (label)
2403 if (LABEL_PRESERVE_P (label))
2406 for (x = forced_labels; x; x = XEXP (x, 1))
2407 if (label == XEXP (x, 0))
2409 for (x = label_value_list; x; x = XEXP (x, 1))
2410 if (label == XEXP (x, 0))
2412 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2413 if (label == XEXP (x, 0))
2416 /* User declared labels must be preserved. */
2417 if (LABEL_NAME (label) != 0)
2424 tail_recursion_label_p (label)
2429 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2430 if (label == XEXP (x, 0))
2436 /* Blocks A and B are to be merged into a single block A. The insns
2437 are already contiguous, hence `nomove'. */
2440 merge_blocks_nomove (a, b)
2444 rtx b_head, b_end, a_end;
2445 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2448 /* If there was a CODE_LABEL beginning B, delete it. */
2451 if (GET_CODE (b_head) == CODE_LABEL)
2453 /* Detect basic blocks with nothing but a label. This can happen
2454 in particular at the end of a function. */
2455 if (b_head == b_end)
2457 del_first = del_last = b_head;
2458 b_head = NEXT_INSN (b_head);
2461 /* Delete the basic block note. */
2462 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2464 if (b_head == b_end)
2469 b_head = NEXT_INSN (b_head);
2472 /* If there was a jump out of A, delete it. */
2474 if (GET_CODE (a_end) == JUMP_INSN)
2478 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2479 if (GET_CODE (prev) != NOTE
2480 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
2487 /* If this was a conditional jump, we need to also delete
2488 the insn that set cc0. */
2489 if (prev && sets_cc0_p (prev))
2492 prev = prev_nonnote_insn (prev);
2501 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2502 del_first = NEXT_INSN (a_end);
2504 /* Delete everything marked above as well as crap that might be
2505 hanging out between the two blocks. */
2506 flow_delete_insn_chain (del_first, del_last);
2508 /* Normally there should only be one successor of A and that is B, but
2509 partway though the merge of blocks for conditional_execution we'll
2510 be merging a TEST block with THEN and ELSE successors. Free the
2511 whole lot of them and hope the caller knows what they're doing. */
2513 remove_edge (a->succ);
2515 /* Adjust the edges out of B for the new owner. */
2516 for (e = b->succ; e; e = e->succ_next)
2520 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2521 b->pred = b->succ = NULL;
2523 /* Reassociate the insns of B with A. */
2526 if (basic_block_for_insn)
2528 BLOCK_FOR_INSN (b_head) = a;
2529 while (b_head != b_end)
2531 b_head = NEXT_INSN (b_head);
2532 BLOCK_FOR_INSN (b_head) = a;
2542 /* Blocks A and B are to be merged into a single block. A has no incoming
2543 fallthru edge, so it can be moved before B without adding or modifying
2544 any jumps (aside from the jump from A to B). */
2547 merge_blocks_move_predecessor_nojumps (a, b)
2550 rtx start, end, barrier;
2556 barrier = next_nonnote_insn (end);
2557 if (GET_CODE (barrier) != BARRIER)
2559 flow_delete_insn (barrier);
2561 /* Move block and loop notes out of the chain so that we do not
2562 disturb their order.
2564 ??? A better solution would be to squeeze out all the non-nested notes
2565 and adjust the block trees appropriately. Even better would be to have
2566 a tighter connection between block trees and rtl so that this is not
2568 start = squeeze_notes (start, end);
2570 /* Scramble the insn chain. */
2571 if (end != PREV_INSN (b->head))
2572 reorder_insns (start, end, PREV_INSN (b->head));
2576 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2577 a->index, b->index);
2580 /* Swap the records for the two blocks around. Although we are deleting B,
2581 A is now where B was and we want to compact the BB array from where
2583 BASIC_BLOCK (a->index) = b;
2584 BASIC_BLOCK (b->index) = a;
2586 a->index = b->index;
2589 /* Now blocks A and B are contiguous. Merge them. */
2590 merge_blocks_nomove (a, b);
2595 /* Blocks A and B are to be merged into a single block. B has no outgoing
2596 fallthru edge, so it can be moved after A without adding or modifying
2597 any jumps (aside from the jump from A to B). */
2600 merge_blocks_move_successor_nojumps (a, b)
2603 rtx start, end, barrier;
2607 barrier = NEXT_INSN (end);
2609 /* Recognize a jump table following block B. */
2610 if (GET_CODE (barrier) == CODE_LABEL
2611 && NEXT_INSN (barrier)
2612 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2613 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2614 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2616 end = NEXT_INSN (barrier);
2617 barrier = NEXT_INSN (end);
2620 /* There had better have been a barrier there. Delete it. */
2621 if (GET_CODE (barrier) != BARRIER)
2623 flow_delete_insn (barrier);
2625 /* Move block and loop notes out of the chain so that we do not
2626 disturb their order.
2628 ??? A better solution would be to squeeze out all the non-nested notes
2629 and adjust the block trees appropriately. Even better would be to have
2630 a tighter connection between block trees and rtl so that this is not
2632 start = squeeze_notes (start, end);
2634 /* Scramble the insn chain. */
2635 reorder_insns (start, end, a->end);
2637 /* Now blocks A and B are contiguous. Merge them. */
2638 merge_blocks_nomove (a, b);
2642 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2643 b->index, a->index);
2649 /* Attempt to merge basic blocks that are potentially non-adjacent.
2650 Return true iff the attempt succeeded. */
2653 merge_blocks (e, b, c)
2657 /* If C has a tail recursion label, do not merge. There is no
2658 edge recorded from the call_placeholder back to this label, as
2659 that would make optimize_sibling_and_tail_recursive_calls more
2660 complex for no gain. */
2661 if (GET_CODE (c->head) == CODE_LABEL
2662 && tail_recursion_label_p (c->head))
2665 /* If B has a fallthru edge to C, no need to move anything. */
2666 if (e->flags & EDGE_FALLTHRU)
2668 merge_blocks_nomove (b, c);
2672 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2673 b->index, c->index);
2682 int c_has_outgoing_fallthru;
2683 int b_has_incoming_fallthru;
2685 /* We must make sure to not munge nesting of exception regions,
2686 lexical blocks, and loop notes.
2688 The first is taken care of by requiring that the active eh
2689 region at the end of one block always matches the active eh
2690 region at the beginning of the next block.
2692 The later two are taken care of by squeezing out all the notes. */
2694 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2695 executed and we may want to treat blocks which have two out
2696 edges, one normal, one abnormal as only having one edge for
2697 block merging purposes. */
2699 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
2700 if (tmp_edge->flags & EDGE_FALLTHRU)
2702 c_has_outgoing_fallthru = (tmp_edge != NULL);
2704 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
2705 if (tmp_edge->flags & EDGE_FALLTHRU)
2707 b_has_incoming_fallthru = (tmp_edge != NULL);
2709 /* If B does not have an incoming fallthru, and the exception regions
2710 match, then it can be moved immediately before C without introducing
2713 C can not be the first block, so we do not have to worry about
2714 accessing a non-existent block. */
2715 d = BASIC_BLOCK (c->index - 1);
2716 if (! b_has_incoming_fallthru
2717 && d->eh_end == b->eh_beg
2718 && b->eh_end == c->eh_beg)
2719 return merge_blocks_move_predecessor_nojumps (b, c);
2721 /* Otherwise, we're going to try to move C after B. Make sure the
2722 exception regions match.
2724 If B is the last basic block, then we must not try to access the
2725 block structure for block B + 1. Luckily in that case we do not
2726 need to worry about matching exception regions. */
2727 d = (b->index + 1 < n_basic_blocks ? BASIC_BLOCK (b->index + 1) : NULL);
2728 if (b->eh_end == c->eh_beg
2729 && (d == NULL || c->eh_end == d->eh_beg))
2731 /* If C does not have an outgoing fallthru, then it can be moved
2732 immediately after B without introducing or modifying jumps. */
2733 if (! c_has_outgoing_fallthru)
2734 return merge_blocks_move_successor_nojumps (b, c);
2736 /* Otherwise, we'll need to insert an extra jump, and possibly
2737 a new block to contain it. */
2738 /* ??? Not implemented yet. */
2745 /* Top level driver for merge_blocks. */
2752 /* Attempt to merge blocks as made possible by edge removal. If a block
2753 has only one successor, and the successor has only one predecessor,
2754 they may be combined. */
2756 for (i = 0; i < n_basic_blocks;)
2758 basic_block c, b = BASIC_BLOCK (i);
2761 /* A loop because chains of blocks might be combineable. */
2762 while ((s = b->succ) != NULL
2763 && s->succ_next == NULL
2764 && (s->flags & EDGE_EH) == 0
2765 && (c = s->dest) != EXIT_BLOCK_PTR
2766 && c->pred->pred_next == NULL
2767 /* If the jump insn has side effects, we can't kill the edge. */
2768 && (GET_CODE (b->end) != JUMP_INSN
2769 || onlyjump_p (b->end))
2770 && merge_blocks (s, b, c))
2773 /* Don't get confused by the index shift caused by deleting blocks. */
2778 /* The given edge should potentially be a fallthru edge. If that is in
2779 fact true, delete the jump and barriers that are in the way. */
2782 tidy_fallthru_edge (e, b, c)
2788 /* ??? In a late-running flow pass, other folks may have deleted basic
2789 blocks by nopping out blocks, leaving multiple BARRIERs between here
2790 and the target label. They ought to be chastized and fixed.
2792 We can also wind up with a sequence of undeletable labels between
2793 one block and the next.
2795 So search through a sequence of barriers, labels, and notes for
2796 the head of block C and assert that we really do fall through. */
2798 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
2801 /* Remove what will soon cease being the jump insn from the source block.
2802 If block B consisted only of this single jump, turn it into a deleted
2805 if (GET_CODE (q) == JUMP_INSN
2807 && (any_uncondjump_p (q)
2808 || (b->succ == e && e->succ_next == NULL)))
2811 /* If this was a conditional jump, we need to also delete
2812 the insn that set cc0. */
2813 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
2820 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
2821 NOTE_SOURCE_FILE (q) = 0;
2827 /* We don't want a block to end on a line-number note since that has
2828 the potential of changing the code between -g and not -g. */
2829 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
2836 /* Selectively unlink the sequence. */
2837 if (q != PREV_INSN (c->head))
2838 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
2840 e->flags |= EDGE_FALLTHRU;
2843 /* Fix up edges that now fall through, or rather should now fall through
2844 but previously required a jump around now deleted blocks. Simplify
2845 the search by only examining blocks numerically adjacent, since this
2846 is how find_basic_blocks created them. */
2849 tidy_fallthru_edges ()
2853 for (i = 1; i < n_basic_blocks; ++i)
2855 basic_block b = BASIC_BLOCK (i - 1);
2856 basic_block c = BASIC_BLOCK (i);
2859 /* We care about simple conditional or unconditional jumps with
2862 If we had a conditional branch to the next instruction when
2863 find_basic_blocks was called, then there will only be one
2864 out edge for the block which ended with the conditional
2865 branch (since we do not create duplicate edges).
2867 Furthermore, the edge will be marked as a fallthru because we
2868 merge the flags for the duplicate edges. So we do not want to
2869 check that the edge is not a FALLTHRU edge. */
2870 if ((s = b->succ) != NULL
2871 && s->succ_next == NULL
2873 /* If the jump insn has side effects, we can't tidy the edge. */
2874 && (GET_CODE (b->end) != JUMP_INSN
2875 || onlyjump_p (b->end)))
2876 tidy_fallthru_edge (s, b, c);
2880 /* Perform data flow analysis.
2881 F is the first insn of the function; FLAGS is a set of PROP_* flags
2882 to be used in accumulating flow info. */
2885 life_analysis (f, file, flags)
2890 #ifdef ELIMINABLE_REGS
2892 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
2895 /* Record which registers will be eliminated. We use this in
2898 CLEAR_HARD_REG_SET (elim_reg_set);
2900 #ifdef ELIMINABLE_REGS
2901 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2902 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2904 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2908 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC);
2910 /* The post-reload life analysis have (on a global basis) the same
2911 registers live as was computed by reload itself. elimination
2912 Otherwise offsets and such may be incorrect.
2914 Reload will make some registers as live even though they do not
2917 We don't want to create new auto-incs after reload, since they
2918 are unlikely to be useful and can cause problems with shared
2920 if (reload_completed)
2921 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
2923 /* We want alias analysis information for local dead store elimination. */
2924 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2925 init_alias_analysis ();
2927 /* Always remove no-op moves. Do this before other processing so
2928 that we don't have to keep re-scanning them. */
2929 delete_noop_moves (f);
2931 /* Some targets can emit simpler epilogues if they know that sp was
2932 not ever modified during the function. After reload, of course,
2933 we've already emitted the epilogue so there's no sense searching. */
2934 if (! reload_completed)
2935 notice_stack_pointer_modification (f);
2937 /* Allocate and zero out data structures that will record the
2938 data from lifetime analysis. */
2939 allocate_reg_life_data ();
2940 allocate_bb_life_data ();
2942 /* Find the set of registers live on function exit. */
2943 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
2945 /* "Update" life info from zero. It'd be nice to begin the
2946 relaxation with just the exit and noreturn blocks, but that set
2947 is not immediately handy. */
2949 if (flags & PROP_REG_INFO)
2950 memset (regs_ever_live, 0, sizeof (regs_ever_live));
2951 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
2954 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
2955 end_alias_analysis ();
2958 dump_flow_info (file);
2960 free_basic_block_vars (1);
2963 /* A subroutine of verify_wide_reg, called through for_each_rtx.
2964 Search for REGNO. If found, abort if it is not wider than word_mode. */
2967 verify_wide_reg_1 (px, pregno)
2972 unsigned int regno = *(int *) pregno;
2974 if (GET_CODE (x) == REG && REGNO (x) == regno)
2976 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
2983 /* A subroutine of verify_local_live_at_start. Search through insns
2984 between HEAD and END looking for register REGNO. */
2987 verify_wide_reg (regno, head, end)
2994 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
2998 head = NEXT_INSN (head);
3001 /* We didn't find the register at all. Something's way screwy. */
3003 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
3004 print_rtl_and_abort ();
3007 /* A subroutine of update_life_info. Verify that there are no untoward
3008 changes in live_at_start during a local update. */
3011 verify_local_live_at_start (new_live_at_start, bb)
3012 regset new_live_at_start;
3015 if (reload_completed)
3017 /* After reload, there are no pseudos, nor subregs of multi-word
3018 registers. The regsets should exactly match. */
3019 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
3023 fprintf (rtl_dump_file,
3024 "live_at_start mismatch in bb %d, aborting\n",
3026 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
3027 debug_bitmap_file (rtl_dump_file, new_live_at_start);
3029 print_rtl_and_abort ();
3036 /* Find the set of changed registers. */
3037 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
3039 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
3041 /* No registers should die. */
3042 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
3045 fprintf (rtl_dump_file,
3046 "Register %d died unexpectedly in block %d\n", i,
3048 print_rtl_and_abort ();
3051 /* Verify that the now-live register is wider than word_mode. */
3052 verify_wide_reg (i, bb->head, bb->end);
3057 /* Updates life information starting with the basic blocks set in BLOCKS.
3058 If BLOCKS is null, consider it to be the universal set.
3060 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
3061 we are only expecting local modifications to basic blocks. If we find
3062 extra registers live at the beginning of a block, then we either killed
3063 useful data, or we have a broken split that wants data not provided.
3064 If we find registers removed from live_at_start, that means we have
3065 a broken peephole that is killing a register it shouldn't.
3067 ??? This is not true in one situation -- when a pre-reload splitter
3068 generates subregs of a multi-word pseudo, current life analysis will
3069 lose the kill. So we _can_ have a pseudo go live. How irritating.
3071 Including PROP_REG_INFO does not properly refresh regs_ever_live
3072 unless the caller resets it to zero. */
3075 update_life_info (blocks, extent, prop_flags)
3077 enum update_life_extent extent;
3081 regset_head tmp_head;
3084 tmp = INITIALIZE_REG_SET (tmp_head);
3086 /* For a global update, we go through the relaxation process again. */
3087 if (extent != UPDATE_LIFE_LOCAL)
3089 calculate_global_regs_live (blocks, blocks,
3090 prop_flags & PROP_SCAN_DEAD_CODE);
3092 /* If asked, remove notes from the blocks we'll update. */
3093 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
3094 count_or_remove_death_notes (blocks, 1);
3099 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
3101 basic_block bb = BASIC_BLOCK (i);
3103 COPY_REG_SET (tmp, bb->global_live_at_end);
3104 propagate_block (bb, tmp, NULL, NULL, prop_flags);
3106 if (extent == UPDATE_LIFE_LOCAL)
3107 verify_local_live_at_start (tmp, bb);
3112 for (i = n_basic_blocks - 1; i >= 0; --i)
3114 basic_block bb = BASIC_BLOCK (i);
3116 COPY_REG_SET (tmp, bb->global_live_at_end);
3117 propagate_block (bb, tmp, NULL, NULL, prop_flags);
3119 if (extent == UPDATE_LIFE_LOCAL)
3120 verify_local_live_at_start (tmp, bb);
3126 if (prop_flags & PROP_REG_INFO)
3128 /* The only pseudos that are live at the beginning of the function
3129 are those that were not set anywhere in the function. local-alloc
3130 doesn't know how to handle these correctly, so mark them as not
3131 local to any one basic block. */
3132 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
3133 FIRST_PSEUDO_REGISTER, i,
3134 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
3136 /* We have a problem with any pseudoreg that lives across the setjmp.
3137 ANSI says that if a user variable does not change in value between
3138 the setjmp and the longjmp, then the longjmp preserves it. This
3139 includes longjmp from a place where the pseudo appears dead.
3140 (In principle, the value still exists if it is in scope.)
3141 If the pseudo goes in a hard reg, some other value may occupy
3142 that hard reg where this pseudo is dead, thus clobbering the pseudo.
3143 Conclusion: such a pseudo must not go in a hard reg. */
3144 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
3145 FIRST_PSEUDO_REGISTER, i,
3147 if (regno_reg_rtx[i] != 0)
3149 REG_LIVE_LENGTH (i) = -1;
3150 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3156 /* Free the variables allocated by find_basic_blocks.
3158 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
3161 free_basic_block_vars (keep_head_end_p)
3162 int keep_head_end_p;
3164 if (basic_block_for_insn)
3166 VARRAY_FREE (basic_block_for_insn);
3167 basic_block_for_insn = NULL;
3170 if (! keep_head_end_p)
3173 VARRAY_FREE (basic_block_info);
3176 ENTRY_BLOCK_PTR->aux = NULL;
3177 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
3178 EXIT_BLOCK_PTR->aux = NULL;
3179 EXIT_BLOCK_PTR->global_live_at_start = NULL;
3183 /* Return nonzero if the destination of SET equals the source. */
3189 rtx src = SET_SRC (set);
3190 rtx dst = SET_DEST (set);
3192 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
3194 if (SUBREG_WORD (src) != SUBREG_WORD (dst))
3196 src = SUBREG_REG (src);
3197 dst = SUBREG_REG (dst);
3200 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
3201 && REGNO (src) == REGNO (dst));
3204 /* Return nonzero if an insn consists only of SETs, each of which only sets a
3211 rtx pat = PATTERN (insn);
3213 /* Insns carrying these notes are useful later on. */
3214 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
3217 if (GET_CODE (pat) == SET && set_noop_p (pat))
3220 if (GET_CODE (pat) == PARALLEL)
3223 /* If nothing but SETs of registers to themselves,
3224 this insn can also be deleted. */
3225 for (i = 0; i < XVECLEN (pat, 0); i++)
3227 rtx tem = XVECEXP (pat, 0, i);
3229 if (GET_CODE (tem) == USE
3230 || GET_CODE (tem) == CLOBBER)
3233 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
3242 /* Delete any insns that copy a register to itself. */
3245 delete_noop_moves (f)
3249 for (insn = f; insn; insn = NEXT_INSN (insn))
3251 if (GET_CODE (insn) == INSN && noop_move_p (insn))
3253 PUT_CODE (insn, NOTE);
3254 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3255 NOTE_SOURCE_FILE (insn) = 0;
3260 /* Determine if the stack pointer is constant over the life of the function.
3261 Only useful before prologues have been emitted. */
3264 notice_stack_pointer_modification_1 (x, pat, data)
3266 rtx pat ATTRIBUTE_UNUSED;
3267 void *data ATTRIBUTE_UNUSED;
3269 if (x == stack_pointer_rtx
3270 /* The stack pointer is only modified indirectly as the result
3271 of a push until later in flow. See the comments in rtl.texi
3272 regarding Embedded Side-Effects on Addresses. */
3273 || (GET_CODE (x) == MEM
3274 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
3275 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
3276 current_function_sp_is_unchanging = 0;
3280 notice_stack_pointer_modification (f)
3285 /* Assume that the stack pointer is unchanging if alloca hasn't
3287 current_function_sp_is_unchanging = !current_function_calls_alloca;
3288 if (! current_function_sp_is_unchanging)
3291 for (insn = f; insn; insn = NEXT_INSN (insn))
3295 /* Check if insn modifies the stack pointer. */
3296 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
3298 if (! current_function_sp_is_unchanging)
3304 /* Mark a register in SET. Hard registers in large modes get all
3305 of their component registers set as well. */
3308 mark_reg (reg, xset)
3312 regset set = (regset) xset;
3313 int regno = REGNO (reg);
3315 if (GET_MODE (reg) == BLKmode)
3318 SET_REGNO_REG_SET (set, regno);
3319 if (regno < FIRST_PSEUDO_REGISTER)
3321 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3323 SET_REGNO_REG_SET (set, regno + n);
3327 /* Mark those regs which are needed at the end of the function as live
3328 at the end of the last basic block. */
3331 mark_regs_live_at_end (set)
3336 /* If exiting needs the right stack value, consider the stack pointer
3337 live at the end of the function. */
3338 if ((HAVE_epilogue && reload_completed)
3339 || ! EXIT_IGNORE_STACK
3340 || (! FRAME_POINTER_REQUIRED
3341 && ! current_function_calls_alloca
3342 && flag_omit_frame_pointer)
3343 || current_function_sp_is_unchanging)
3345 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
3348 /* Mark the frame pointer if needed at the end of the function. If
3349 we end up eliminating it, it will be removed from the live list
3350 of each basic block by reload. */
3352 if (! reload_completed || frame_pointer_needed)
3354 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
3355 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3356 /* If they are different, also mark the hard frame pointer as live. */
3357 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3358 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
3362 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3363 /* Many architectures have a GP register even without flag_pic.
3364 Assume the pic register is not in use, or will be handled by
3365 other means, if it is not fixed. */
3366 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3367 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3368 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
3371 /* Mark all global registers, and all registers used by the epilogue
3372 as being live at the end of the function since they may be
3373 referenced by our caller. */
3374 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3375 if (global_regs[i] || EPILOGUE_USES (i))
3376 SET_REGNO_REG_SET (set, i);
3378 /* Mark all call-saved registers that we actaully used. */
3379 if (HAVE_epilogue && reload_completed)
3381 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3382 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
3383 SET_REGNO_REG_SET (set, i);
3386 /* Mark function return value. */
3387 diddle_return_value (mark_reg, set);
3390 /* Callback function for for_each_successor_phi. DATA is a regset.
3391 Sets the SRC_REGNO, the regno of the phi alternative for phi node
3392 INSN, in the regset. */
3395 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
3396 rtx insn ATTRIBUTE_UNUSED;
3397 int dest_regno ATTRIBUTE_UNUSED;
3401 regset live = (regset) data;
3402 SET_REGNO_REG_SET (live, src_regno);
3406 /* Propagate global life info around the graph of basic blocks. Begin
3407 considering blocks with their corresponding bit set in BLOCKS_IN.
3408 If BLOCKS_IN is null, consider it the universal set.
3410 BLOCKS_OUT is set for every block that was changed. */
3413 calculate_global_regs_live (blocks_in, blocks_out, flags)
3414 sbitmap blocks_in, blocks_out;
3417 basic_block *queue, *qhead, *qtail, *qend;
3418 regset tmp, new_live_at_end;
3419 regset_head tmp_head;
3420 regset_head new_live_at_end_head;
3423 tmp = INITIALIZE_REG_SET (tmp_head);
3424 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
3426 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
3427 because the `head == tail' style test for an empty queue doesn't
3428 work with a full queue. */
3429 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
3431 qhead = qend = queue + n_basic_blocks + 2;
3433 /* Queue the blocks set in the initial mask. Do this in reverse block
3434 number order so that we are more likely for the first round to do
3435 useful work. We use AUX non-null to flag that the block is queued. */
3438 /* Clear out the garbage that might be hanging out in bb->aux. */
3439 for (i = n_basic_blocks - 1; i >= 0; --i)
3440 BASIC_BLOCK (i)->aux = NULL;
3442 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
3444 basic_block bb = BASIC_BLOCK (i);
3451 for (i = 0; i < n_basic_blocks; ++i)
3453 basic_block bb = BASIC_BLOCK (i);
3460 sbitmap_zero (blocks_out);
3462 while (qhead != qtail)
3464 int rescan, changed;
3473 /* Begin by propogating live_at_start from the successor blocks. */
3474 CLEAR_REG_SET (new_live_at_end);
3475 for (e = bb->succ; e; e = e->succ_next)
3477 basic_block sb = e->dest;
3478 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
3481 /* The all-important stack pointer must always be live. */
3482 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
3484 /* Before reload, there are a few registers that must be forced
3485 live everywhere -- which might not already be the case for
3486 blocks within infinite loops. */
3487 if (! reload_completed)
3489 /* Any reference to any pseudo before reload is a potential
3490 reference of the frame pointer. */
3491 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
3493 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3494 /* Pseudos with argument area equivalences may require
3495 reloading via the argument pointer. */
3496 if (fixed_regs[ARG_POINTER_REGNUM])
3497 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
3500 /* Any constant, or pseudo with constant equivalences, may
3501 require reloading from memory using the pic register. */
3502 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3503 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3504 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
3507 /* Regs used in phi nodes are not included in
3508 global_live_at_start, since they are live only along a
3509 particular edge. Set those regs that are live because of a
3510 phi node alternative corresponding to this particular block. */
3512 for_each_successor_phi (bb, &set_phi_alternative_reg,
3515 if (bb == ENTRY_BLOCK_PTR)
3517 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3521 /* On our first pass through this block, we'll go ahead and continue.
3522 Recognize first pass by local_set NULL. On subsequent passes, we
3523 get to skip out early if live_at_end wouldn't have changed. */
3525 if (bb->local_set == NULL)
3527 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3528 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3533 /* If any bits were removed from live_at_end, we'll have to
3534 rescan the block. This wouldn't be necessary if we had
3535 precalculated local_live, however with PROP_SCAN_DEAD_CODE
3536 local_live is really dependent on live_at_end. */
3537 CLEAR_REG_SET (tmp);
3538 rescan = bitmap_operation (tmp, bb->global_live_at_end,
3539 new_live_at_end, BITMAP_AND_COMPL);
3543 /* If any of the registers in the new live_at_end set are
3544 conditionally set in this basic block, we must rescan.
3545 This is because conditional lifetimes at the end of the
3546 block do not just take the live_at_end set into account,
3547 but also the liveness at the start of each successor
3548 block. We can miss changes in those sets if we only
3549 compare the new live_at_end against the previous one. */
3550 CLEAR_REG_SET (tmp);
3551 rescan = bitmap_operation (tmp, new_live_at_end,
3552 bb->cond_local_set, BITMAP_AND);
3557 /* Find the set of changed bits. Take this opportunity
3558 to notice that this set is empty and early out. */
3559 CLEAR_REG_SET (tmp);
3560 changed = bitmap_operation (tmp, bb->global_live_at_end,
3561 new_live_at_end, BITMAP_XOR);
3565 /* If any of the changed bits overlap with local_set,
3566 we'll have to rescan the block. Detect overlap by
3567 the AND with ~local_set turning off bits. */
3568 rescan = bitmap_operation (tmp, tmp, bb->local_set,
3573 /* Let our caller know that BB changed enough to require its
3574 death notes updated. */
3576 SET_BIT (blocks_out, bb->index);
3580 /* Add to live_at_start the set of all registers in
3581 new_live_at_end that aren't in the old live_at_end. */
3583 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
3585 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3587 changed = bitmap_operation (bb->global_live_at_start,
3588 bb->global_live_at_start,
3595 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
3597 /* Rescan the block insn by insn to turn (a copy of) live_at_end
3598 into live_at_start. */
3599 propagate_block (bb, new_live_at_end, bb->local_set,
3600 bb->cond_local_set, flags);
3602 /* If live_at start didn't change, no need to go farther. */
3603 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
3606 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
3609 /* Queue all predecessors of BB so that we may re-examine
3610 their live_at_end. */
3611 for (e = bb->pred; e; e = e->pred_next)
3613 basic_block pb = e->src;
3614 if (pb->aux == NULL)
3625 FREE_REG_SET (new_live_at_end);
3629 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
3631 basic_block bb = BASIC_BLOCK (i);
3632 FREE_REG_SET (bb->local_set);
3633 FREE_REG_SET (bb->cond_local_set);
3638 for (i = n_basic_blocks - 1; i >= 0; --i)
3640 basic_block bb = BASIC_BLOCK (i);
3641 FREE_REG_SET (bb->local_set);
3642 FREE_REG_SET (bb->cond_local_set);
3649 /* Subroutines of life analysis. */
3651 /* Allocate the permanent data structures that represent the results
3652 of life analysis. Not static since used also for stupid life analysis. */
3655 allocate_bb_life_data ()
3659 for (i = 0; i < n_basic_blocks; i++)
3661 basic_block bb = BASIC_BLOCK (i);
3663 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3664 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3667 ENTRY_BLOCK_PTR->global_live_at_end
3668 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3669 EXIT_BLOCK_PTR->global_live_at_start
3670 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3672 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
3676 allocate_reg_life_data ()
3680 max_regno = max_reg_num ();
3682 /* Recalculate the register space, in case it has grown. Old style
3683 vector oriented regsets would set regset_{size,bytes} here also. */
3684 allocate_reg_info (max_regno, FALSE, FALSE);
3686 /* Reset all the data we'll collect in propagate_block and its
3688 for (i = 0; i < max_regno; i++)
3692 REG_N_DEATHS (i) = 0;
3693 REG_N_CALLS_CROSSED (i) = 0;
3694 REG_LIVE_LENGTH (i) = 0;
3695 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
3699 /* Delete dead instructions for propagate_block. */
3702 propagate_block_delete_insn (bb, insn)
3706 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3708 /* If the insn referred to a label, and that label was attached to
3709 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
3710 pretty much mandatory to delete it, because the ADDR_VEC may be
3711 referencing labels that no longer exist. */
3715 rtx label = XEXP (inote, 0);
3718 if (LABEL_NUSES (label) == 1
3719 && (next = next_nonnote_insn (label)) != NULL
3720 && GET_CODE (next) == JUMP_INSN
3721 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3722 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
3724 rtx pat = PATTERN (next);
3725 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3726 int len = XVECLEN (pat, diff_vec_p);
3729 for (i = 0; i < len; i++)
3730 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
3732 flow_delete_insn (next);
3736 if (bb->end == insn)
3737 bb->end = PREV_INSN (insn);
3738 flow_delete_insn (insn);
3741 /* Delete dead libcalls for propagate_block. Return the insn
3742 before the libcall. */
3745 propagate_block_delete_libcall (bb, insn, note)
3749 rtx first = XEXP (note, 0);
3750 rtx before = PREV_INSN (first);
3752 if (insn == bb->end)
3755 flow_delete_insn_chain (first, insn);
3759 /* Update the life-status of regs for one insn. Return the previous insn. */
3762 propagate_one_insn (pbi, insn)
3763 struct propagate_block_info *pbi;
3766 rtx prev = PREV_INSN (insn);
3767 int flags = pbi->flags;
3768 int insn_is_dead = 0;
3769 int libcall_is_dead = 0;
3773 if (! INSN_P (insn))
3776 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
3777 if (flags & PROP_SCAN_DEAD_CODE)
3779 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
3780 libcall_is_dead = (insn_is_dead && note != 0
3781 && libcall_dead_p (pbi, note, insn));
3784 /* If an instruction consists of just dead store(s) on final pass,
3786 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
3788 /* If we're trying to delete a prologue or epilogue instruction
3789 that isn't flagged as possibly being dead, something is wrong.
3790 But if we are keeping the stack pointer depressed, we might well
3791 be deleting insns that are used to compute the amount to update
3792 it by, so they are fine. */
3793 if (reload_completed
3794 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
3795 && (TYPE_RETURNS_STACK_DEPRESSED
3796 (TREE_TYPE (current_function_decl))))
3797 && (((HAVE_epilogue || HAVE_prologue)
3798 && prologue_epilogue_contains (insn))
3799 || (HAVE_sibcall_epilogue
3800 && sibcall_epilogue_contains (insn)))
3801 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
3804 /* Record sets. Do this even for dead instructions, since they
3805 would have killed the values if they hadn't been deleted. */
3806 mark_set_regs (pbi, PATTERN (insn), insn);
3808 /* CC0 is now known to be dead. Either this insn used it,
3809 in which case it doesn't anymore, or clobbered it,
3810 so the next insn can't use it. */
3813 if (libcall_is_dead)
3815 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
3816 insn = NEXT_INSN (prev);
3819 propagate_block_delete_insn (pbi->bb, insn);
3824 /* See if this is an increment or decrement that can be merged into
3825 a following memory address. */
3828 register rtx x = single_set (insn);
3830 /* Does this instruction increment or decrement a register? */
3831 if ((flags & PROP_AUTOINC)
3833 && GET_CODE (SET_DEST (x)) == REG
3834 && (GET_CODE (SET_SRC (x)) == PLUS
3835 || GET_CODE (SET_SRC (x)) == MINUS)
3836 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
3837 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
3838 /* Ok, look for a following memory ref we can combine with.
3839 If one is found, change the memory ref to a PRE_INC
3840 or PRE_DEC, cancel this insn, and return 1.
3841 Return 0 if nothing has been done. */
3842 && try_pre_increment_1 (pbi, insn))
3845 #endif /* AUTO_INC_DEC */
3847 CLEAR_REG_SET (pbi->new_set);
3849 /* If this is not the final pass, and this insn is copying the value of
3850 a library call and it's dead, don't scan the insns that perform the
3851 library call, so that the call's arguments are not marked live. */
3852 if (libcall_is_dead)
3854 /* Record the death of the dest reg. */
3855 mark_set_regs (pbi, PATTERN (insn), insn);
3857 insn = XEXP (note, 0);
3858 return PREV_INSN (insn);
3860 else if (GET_CODE (PATTERN (insn)) == SET
3861 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
3862 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
3863 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
3864 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
3865 /* We have an insn to pop a constant amount off the stack.
3866 (Such insns use PLUS regardless of the direction of the stack,
3867 and any insn to adjust the stack by a constant is always a pop.)
3868 These insns, if not dead stores, have no effect on life. */
3872 /* Any regs live at the time of a call instruction must not go
3873 in a register clobbered by calls. Find all regs now live and
3874 record this for them. */
3876 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
3877 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3878 { REG_N_CALLS_CROSSED (i)++; });
3880 /* Record sets. Do this even for dead instructions, since they
3881 would have killed the values if they hadn't been deleted. */
3882 mark_set_regs (pbi, PATTERN (insn), insn);
3884 if (GET_CODE (insn) == CALL_INSN)
3890 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3891 cond = COND_EXEC_TEST (PATTERN (insn));
3893 /* Non-constant calls clobber memory. */
3894 if (! CONST_CALL_P (insn))
3896 free_EXPR_LIST_list (&pbi->mem_set_list);
3897 pbi->mem_set_list_len = 0;
3900 /* There may be extra registers to be clobbered. */
3901 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3903 note = XEXP (note, 1))
3904 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3905 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
3906 cond, insn, pbi->flags);
3908 /* Calls change all call-used and global registers. */
3909 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3910 if (call_used_regs[i] && ! global_regs[i]
3913 /* We do not want REG_UNUSED notes for these registers. */
3914 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
3916 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
3920 /* If an insn doesn't use CC0, it becomes dead since we assume
3921 that every insn clobbers it. So show it dead here;
3922 mark_used_regs will set it live if it is referenced. */
3927 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
3929 /* Sometimes we may have inserted something before INSN (such as a move)
3930 when we make an auto-inc. So ensure we will scan those insns. */
3932 prev = PREV_INSN (insn);
3935 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
3941 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
3942 cond = COND_EXEC_TEST (PATTERN (insn));
3944 /* Calls use their arguments. */
3945 for (note = CALL_INSN_FUNCTION_USAGE (insn);
3947 note = XEXP (note, 1))
3948 if (GET_CODE (XEXP (note, 0)) == USE)
3949 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
3952 /* The stack ptr is used (honorarily) by a CALL insn. */
3953 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
3955 /* Calls may also reference any of the global registers,
3956 so they are made live. */
3957 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3959 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
3964 /* On final pass, update counts of how many insns in which each reg
3966 if (flags & PROP_REG_INFO)
3967 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
3968 { REG_LIVE_LENGTH (i)++; });
3973 /* Initialize a propagate_block_info struct for public consumption.
3974 Note that the structure itself is opaque to this file, but that
3975 the user can use the regsets provided here. */
3977 struct propagate_block_info *
3978 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
3980 regset live, local_set, cond_local_set;
3983 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
3986 pbi->reg_live = live;
3987 pbi->mem_set_list = NULL_RTX;
3988 pbi->mem_set_list_len = 0;
3989 pbi->local_set = local_set;
3990 pbi->cond_local_set = cond_local_set;
3994 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
3995 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
3997 pbi->reg_next_use = NULL;
3999 pbi->new_set = BITMAP_XMALLOC ();
4001 #ifdef HAVE_conditional_execution
4002 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
4003 free_reg_cond_life_info);
4004 pbi->reg_cond_reg = BITMAP_XMALLOC ();
4006 /* If this block ends in a conditional branch, for each register live
4007 from one side of the branch and not the other, record the register
4008 as conditionally dead. */
4009 if (GET_CODE (bb->end) == JUMP_INSN
4010 && any_condjump_p (bb->end))
4012 regset_head diff_head;
4013 regset diff = INITIALIZE_REG_SET (diff_head);
4014 basic_block bb_true, bb_false;
4015 rtx cond_true, cond_false, set_src;
4018 /* Identify the successor blocks. */
4019 bb_true = bb->succ->dest;
4020 if (bb->succ->succ_next != NULL)
4022 bb_false = bb->succ->succ_next->dest;
4024 if (bb->succ->flags & EDGE_FALLTHRU)
4026 basic_block t = bb_false;
4030 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
4035 /* This can happen with a conditional jump to the next insn. */
4036 if (JUMP_LABEL (bb->end) != bb_true->head)
4039 /* Simplest way to do nothing. */
4043 /* Extract the condition from the branch. */
4044 set_src = SET_SRC (pc_set (bb->end));
4045 cond_true = XEXP (set_src, 0);
4046 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
4047 GET_MODE (cond_true), XEXP (cond_true, 0),
4048 XEXP (cond_true, 1));
4049 if (GET_CODE (XEXP (set_src, 1)) == PC)
4052 cond_false = cond_true;
4056 /* Compute which register lead different lives in the successors. */
4057 if (bitmap_operation (diff, bb_true->global_live_at_start,
4058 bb_false->global_live_at_start, BITMAP_XOR))
4060 rtx reg = XEXP (cond_true, 0);
4062 if (GET_CODE (reg) == SUBREG)
4063 reg = SUBREG_REG (reg);
4065 if (GET_CODE (reg) != REG)
4068 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
4070 /* For each such register, mark it conditionally dead. */
4071 EXECUTE_IF_SET_IN_REG_SET
4074 struct reg_cond_life_info *rcli;
4077 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
4079 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
4083 rcli->condition = cond;
4085 splay_tree_insert (pbi->reg_cond_dead, i,
4086 (splay_tree_value) rcli);
4090 FREE_REG_SET (diff);
4094 /* If this block has no successors, any stores to the frame that aren't
4095 used later in the block are dead. So make a pass over the block
4096 recording any such that are made and show them dead at the end. We do
4097 a very conservative and simple job here. */
4099 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
4100 && (TYPE_RETURNS_STACK_DEPRESSED
4101 (TREE_TYPE (current_function_decl))))
4102 && (flags & PROP_SCAN_DEAD_CODE)
4103 && (bb->succ == NULL
4104 || (bb->succ->succ_next == NULL
4105 && bb->succ->dest == EXIT_BLOCK_PTR)))
4108 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
4109 if (GET_CODE (insn) == INSN
4110 && GET_CODE (PATTERN (insn)) == SET
4111 && GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
4113 rtx mem = SET_DEST (PATTERN (insn));
4115 /* This optimization is performed by faking a store to the
4116 memory at the end of the block. This doesn't work for
4117 unchanging memories because multiple stores to unchanging
4118 memory is illegal and alias analysis doesn't consider it. */
4119 if (RTX_UNCHANGING_P (mem))
4122 if (XEXP (mem, 0) == frame_pointer_rtx
4123 || (GET_CODE (XEXP (mem, 0)) == PLUS
4124 && XEXP (XEXP (mem, 0), 0) == frame_pointer_rtx
4125 && GET_CODE (XEXP (XEXP (mem, 0), 1)) == CONST_INT))
4128 /* Store a copy of mem, otherwise the address may be scrogged
4129 by find_auto_inc. This matters because insn_dead_p uses
4130 an rtx_equal_p check to determine if two addresses are
4131 the same. This works before find_auto_inc, but fails
4132 after find_auto_inc, causing discrepencies between the
4133 set of live registers calculated during the
4134 calculate_global_regs_live phase and what actually exists
4135 after flow completes, leading to aborts. */
4136 if (flags & PROP_AUTOINC)
4137 mem = shallow_copy_rtx (mem);
4139 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
4140 if (++pbi->mem_set_list_len >= MAX_MEM_SET_LIST_LEN)
4149 /* Release a propagate_block_info struct. */
4152 free_propagate_block_info (pbi)
4153 struct propagate_block_info *pbi;
4155 free_EXPR_LIST_list (&pbi->mem_set_list);
4157 BITMAP_XFREE (pbi->new_set);
4159 #ifdef HAVE_conditional_execution
4160 splay_tree_delete (pbi->reg_cond_dead);
4161 BITMAP_XFREE (pbi->reg_cond_reg);
4164 if (pbi->reg_next_use)
4165 free (pbi->reg_next_use);
4170 /* Compute the registers live at the beginning of a basic block BB from
4171 those live at the end.
4173 When called, REG_LIVE contains those live at the end. On return, it
4174 contains those live at the beginning.
4176 LOCAL_SET, if non-null, will be set with all registers killed
4177 unconditionally by this basic block.
4178 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
4179 killed conditionally by this basic block. If there is any unconditional
4180 set of a register, then the corresponding bit will be set in LOCAL_SET
4181 and cleared in COND_LOCAL_SET.
4182 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
4183 case, the resulting set will be equal to the union of the two sets that
4184 would otherwise be computed. */
4187 propagate_block (bb, live, local_set, cond_local_set, flags)
4191 regset cond_local_set;
4194 struct propagate_block_info *pbi;
4197 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
4199 if (flags & PROP_REG_INFO)
4203 /* Process the regs live at the end of the block.
4204 Mark them as not local to any one basic block. */
4205 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
4206 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4209 /* Scan the block an insn at a time from end to beginning. */
4211 for (insn = bb->end;; insn = prev)
4213 /* If this is a call to `setjmp' et al, warn if any
4214 non-volatile datum is live. */
4215 if ((flags & PROP_REG_INFO)
4216 && GET_CODE (insn) == NOTE
4217 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
4218 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
4220 prev = propagate_one_insn (pbi, insn);
4222 if (insn == bb->head)
4226 free_propagate_block_info (pbi);
4229 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
4230 (SET expressions whose destinations are registers dead after the insn).
4231 NEEDED is the regset that says which regs are alive after the insn.
4233 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
4235 If X is the entire body of an insn, NOTES contains the reg notes
4236 pertaining to the insn. */
4239 insn_dead_p (pbi, x, call_ok, notes)
4240 struct propagate_block_info *pbi;
4243 rtx notes ATTRIBUTE_UNUSED;
4245 enum rtx_code code = GET_CODE (x);
4248 /* If flow is invoked after reload, we must take existing AUTO_INC
4249 expresions into account. */
4250 if (reload_completed)
4252 for (; notes; notes = XEXP (notes, 1))
4254 if (REG_NOTE_KIND (notes) == REG_INC)
4256 int regno = REGNO (XEXP (notes, 0));
4258 /* Don't delete insns to set global regs. */
4259 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
4260 || REGNO_REG_SET_P (pbi->reg_live, regno))
4267 /* If setting something that's a reg or part of one,
4268 see if that register's altered value will be live. */
4272 rtx r = SET_DEST (x);
4275 if (GET_CODE (r) == CC0)
4276 return ! pbi->cc0_live;
4279 /* A SET that is a subroutine call cannot be dead. */
4280 if (GET_CODE (SET_SRC (x)) == CALL)
4286 /* Don't eliminate loads from volatile memory or volatile asms. */
4287 else if (volatile_refs_p (SET_SRC (x)))
4290 if (GET_CODE (r) == MEM)
4294 if (MEM_VOLATILE_P (r))
4297 /* Walk the set of memory locations we are currently tracking
4298 and see if one is an identical match to this memory location.
4299 If so, this memory write is dead (remember, we're walking
4300 backwards from the end of the block to the start). */
4301 temp = pbi->mem_set_list;
4304 rtx mem = XEXP (temp, 0);
4306 if (rtx_equal_p (mem, r))
4309 /* Check if memory reference matches an auto increment. Only
4310 post increment/decrement or modify are valid. */
4311 if (GET_MODE (mem) == GET_MODE (r)
4312 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
4313 || GET_CODE (XEXP (mem, 0)) == POST_INC
4314 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
4315 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
4316 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
4319 temp = XEXP (temp, 1);
4324 while (GET_CODE (r) == SUBREG
4325 || GET_CODE (r) == STRICT_LOW_PART
4326 || GET_CODE (r) == ZERO_EXTRACT)
4329 if (GET_CODE (r) == REG)
4331 int regno = REGNO (r);
4334 if (REGNO_REG_SET_P (pbi->reg_live, regno))
4337 /* If this is a hard register, verify that subsequent
4338 words are not needed. */
4339 if (regno < FIRST_PSEUDO_REGISTER)
4341 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
4344 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
4348 /* Don't delete insns to set global regs. */
4349 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
4352 /* Make sure insns to set the stack pointer aren't deleted. */
4353 if (regno == STACK_POINTER_REGNUM)
4356 /* ??? These bits might be redundant with the force live bits
4357 in calculate_global_regs_live. We would delete from
4358 sequential sets; whether this actually affects real code
4359 for anything but the stack pointer I don't know. */
4360 /* Make sure insns to set the frame pointer aren't deleted. */
4361 if (regno == FRAME_POINTER_REGNUM
4362 && (! reload_completed || frame_pointer_needed))
4364 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4365 if (regno == HARD_FRAME_POINTER_REGNUM
4366 && (! reload_completed || frame_pointer_needed))
4370 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4371 /* Make sure insns to set arg pointer are never deleted
4372 (if the arg pointer isn't fixed, there will be a USE
4373 for it, so we can treat it normally). */
4374 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
4378 /* Otherwise, the set is dead. */
4384 /* If performing several activities, insn is dead if each activity
4385 is individually dead. Also, CLOBBERs and USEs can be ignored; a
4386 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
4388 else if (code == PARALLEL)
4390 int i = XVECLEN (x, 0);
4392 for (i--; i >= 0; i--)
4393 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
4394 && GET_CODE (XVECEXP (x, 0, i)) != USE
4395 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
4401 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
4402 is not necessarily true for hard registers. */
4403 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
4404 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
4405 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
4408 /* We do not check other CLOBBER or USE here. An insn consisting of just
4409 a CLOBBER or just a USE should not be deleted. */
4413 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
4414 return 1 if the entire library call is dead.
4415 This is true if INSN copies a register (hard or pseudo)
4416 and if the hard return reg of the call insn is dead.
4417 (The caller should have tested the destination of the SET inside
4418 INSN already for death.)
4420 If this insn doesn't just copy a register, then we don't
4421 have an ordinary libcall. In that case, cse could not have
4422 managed to substitute the source for the dest later on,
4423 so we can assume the libcall is dead.
4425 PBI is the block info giving pseudoregs live before this insn.
4426 NOTE is the REG_RETVAL note of the insn. */
4429 libcall_dead_p (pbi, note, insn)
4430 struct propagate_block_info *pbi;
4434 rtx x = single_set (insn);
4438 register rtx r = SET_SRC (x);
4439 if (GET_CODE (r) == REG)
4441 rtx call = XEXP (note, 0);
4445 /* Find the call insn. */
4446 while (call != insn && GET_CODE (call) != CALL_INSN)
4447 call = NEXT_INSN (call);
4449 /* If there is none, do nothing special,
4450 since ordinary death handling can understand these insns. */
4454 /* See if the hard reg holding the value is dead.
4455 If this is a PARALLEL, find the call within it. */
4456 call_pat = PATTERN (call);
4457 if (GET_CODE (call_pat) == PARALLEL)
4459 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
4460 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
4461 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
4464 /* This may be a library call that is returning a value
4465 via invisible pointer. Do nothing special, since
4466 ordinary death handling can understand these insns. */
4470 call_pat = XVECEXP (call_pat, 0, i);
4473 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
4479 /* Return 1 if register REGNO was used before it was set, i.e. if it is
4480 live at function entry. Don't count global register variables, variables
4481 in registers that can be used for function arg passing, or variables in
4482 fixed hard registers. */
4485 regno_uninitialized (regno)
4488 if (n_basic_blocks == 0
4489 || (regno < FIRST_PSEUDO_REGISTER
4490 && (global_regs[regno]
4491 || fixed_regs[regno]
4492 || FUNCTION_ARG_REGNO_P (regno))))
4495 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
4498 /* 1 if register REGNO was alive at a place where `setjmp' was called
4499 and was set more than once or is an argument.
4500 Such regs may be clobbered by `longjmp'. */
4503 regno_clobbered_at_setjmp (regno)
4506 if (n_basic_blocks == 0)
4509 return ((REG_N_SETS (regno) > 1
4510 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
4511 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
4514 /* INSN references memory, possibly using autoincrement addressing modes.
4515 Find any entries on the mem_set_list that need to be invalidated due
4516 to an address change. */
4519 invalidate_mems_from_autoinc (pbi, insn)
4520 struct propagate_block_info *pbi;
4523 rtx note = REG_NOTES (insn);
4524 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
4526 if (REG_NOTE_KIND (note) == REG_INC)
4528 rtx temp = pbi->mem_set_list;
4529 rtx prev = NULL_RTX;
4534 next = XEXP (temp, 1);
4535 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
4537 /* Splice temp out of list. */
4539 XEXP (prev, 1) = next;
4541 pbi->mem_set_list = next;
4542 free_EXPR_LIST_node (temp);
4543 pbi->mem_set_list_len--;
4553 /* EXP is either a MEM or a REG. Remove any dependant entries
4554 from pbi->mem_set_list. */
4557 invalidate_mems_from_set (pbi, exp)
4558 struct propagate_block_info *pbi;
4561 rtx temp = pbi->mem_set_list;
4562 rtx prev = NULL_RTX;
4567 next = XEXP (temp, 1);
4568 if ((GET_CODE (exp) == MEM
4569 && output_dependence (XEXP (temp, 0), exp))
4570 || (GET_CODE (exp) == REG
4571 && reg_overlap_mentioned_p (exp, XEXP (temp, 0))))
4573 /* Splice this entry out of the list. */
4575 XEXP (prev, 1) = next;
4577 pbi->mem_set_list = next;
4578 free_EXPR_LIST_node (temp);
4579 pbi->mem_set_list_len--;
4587 /* Process the registers that are set within X. Their bits are set to
4588 1 in the regset DEAD, because they are dead prior to this insn.
4590 If INSN is nonzero, it is the insn being processed.
4592 FLAGS is the set of operations to perform. */
4595 mark_set_regs (pbi, x, insn)
4596 struct propagate_block_info *pbi;
4599 rtx cond = NULL_RTX;
4604 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
4606 if (REG_NOTE_KIND (link) == REG_INC)
4607 mark_set_1 (pbi, SET, XEXP (link, 0),
4608 (GET_CODE (x) == COND_EXEC
4609 ? COND_EXEC_TEST (x) : NULL_RTX),
4613 switch (code = GET_CODE (x))
4617 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
4621 cond = COND_EXEC_TEST (x);
4622 x = COND_EXEC_CODE (x);
4628 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
4630 rtx sub = XVECEXP (x, 0, i);
4631 switch (code = GET_CODE (sub))
4634 if (cond != NULL_RTX)
4637 cond = COND_EXEC_TEST (sub);
4638 sub = COND_EXEC_CODE (sub);
4639 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
4645 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
4660 /* Process a single SET rtx, X. */
4663 mark_set_1 (pbi, code, reg, cond, insn, flags)
4664 struct propagate_block_info *pbi;
4666 rtx reg, cond, insn;
4669 int regno_first = -1, regno_last = -1;
4673 /* Modifying just one hardware register of a multi-reg value or just a
4674 byte field of a register does not mean the value from before this insn
4675 is now dead. Of course, if it was dead after it's unused now. */
4677 switch (GET_CODE (reg))
4680 /* Some targets place small structures in registers for return values of
4681 functions. We have to detect this case specially here to get correct
4682 flow information. */
4683 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
4684 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
4685 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
4691 case STRICT_LOW_PART:
4692 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
4694 reg = XEXP (reg, 0);
4695 while (GET_CODE (reg) == SUBREG
4696 || GET_CODE (reg) == ZERO_EXTRACT
4697 || GET_CODE (reg) == SIGN_EXTRACT
4698 || GET_CODE (reg) == STRICT_LOW_PART);
4699 if (GET_CODE (reg) == MEM)
4701 not_dead = REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
4705 regno_last = regno_first = REGNO (reg);
4706 if (regno_first < FIRST_PSEUDO_REGISTER)
4707 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
4711 if (GET_CODE (SUBREG_REG (reg)) == REG)
4713 enum machine_mode outer_mode = GET_MODE (reg);
4714 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
4716 /* Identify the range of registers affected. This is moderately
4717 tricky for hard registers. See alter_subreg. */
4719 regno_last = regno_first = REGNO (SUBREG_REG (reg));
4720 if (regno_first < FIRST_PSEUDO_REGISTER)
4722 #ifdef ALTER_HARD_SUBREG
4723 regno_first = ALTER_HARD_SUBREG (outer_mode, SUBREG_WORD (reg),
4724 inner_mode, regno_first);
4726 regno_first += SUBREG_WORD (reg);
4728 regno_last = (regno_first
4729 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
4731 /* Since we've just adjusted the register number ranges, make
4732 sure REG matches. Otherwise some_was_live will be clear
4733 when it shouldn't have been, and we'll create incorrect
4734 REG_UNUSED notes. */
4735 reg = gen_rtx_REG (outer_mode, regno_first);
4739 /* If the number of words in the subreg is less than the number
4740 of words in the full register, we have a well-defined partial
4741 set. Otherwise the high bits are undefined.
4743 This is only really applicable to pseudos, since we just took
4744 care of multi-word hard registers. */
4745 if (((GET_MODE_SIZE (outer_mode)
4746 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
4747 < ((GET_MODE_SIZE (inner_mode)
4748 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
4749 not_dead = REGNO_REG_SET_P (pbi->reg_live, regno_first);
4751 reg = SUBREG_REG (reg);
4755 reg = SUBREG_REG (reg);
4762 /* If this set is a MEM, then it kills any aliased writes.
4763 If this set is a REG, then it kills any MEMs which use the reg. */
4764 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
4766 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
4767 invalidate_mems_from_set (pbi, reg);
4769 /* If the memory reference had embedded side effects (autoincrement
4770 address modes. Then we may need to kill some entries on the
4772 if (insn && GET_CODE (reg) == MEM)
4773 invalidate_mems_from_autoinc (pbi, insn);
4775 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN
4776 && GET_CODE (reg) == MEM && ! side_effects_p (reg)
4777 /* ??? With more effort we could track conditional memory life. */
4779 /* We do not know the size of a BLKmode store, so we do not track
4780 them for redundant store elimination. */
4781 && GET_MODE (reg) != BLKmode
4782 /* There are no REG_INC notes for SP, so we can't assume we'll see
4783 everything that invalidates it. To be safe, don't eliminate any
4784 stores though SP; none of them should be redundant anyway. */
4785 && ! reg_mentioned_p (stack_pointer_rtx, reg))
4788 /* Store a copy of mem, otherwise the address may be
4789 scrogged by find_auto_inc. */
4790 if (flags & PROP_AUTOINC)
4791 reg = shallow_copy_rtx (reg);
4793 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
4794 pbi->mem_set_list_len++;
4798 if (GET_CODE (reg) == REG
4799 && ! (regno_first == FRAME_POINTER_REGNUM
4800 && (! reload_completed || frame_pointer_needed))
4801 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4802 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
4803 && (! reload_completed || frame_pointer_needed))
4805 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4806 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
4810 int some_was_live = 0, some_was_dead = 0;
4812 for (i = regno_first; i <= regno_last; ++i)
4814 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
4817 /* Order of the set operation matters here since both
4818 sets may be the same. */
4819 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
4820 if (cond != NULL_RTX
4821 && ! REGNO_REG_SET_P (pbi->local_set, i))
4822 SET_REGNO_REG_SET (pbi->cond_local_set, i);
4824 SET_REGNO_REG_SET (pbi->local_set, i);
4826 if (code != CLOBBER)
4827 SET_REGNO_REG_SET (pbi->new_set, i);
4829 some_was_live |= needed_regno;
4830 some_was_dead |= ! needed_regno;
4833 #ifdef HAVE_conditional_execution
4834 /* Consider conditional death in deciding that the register needs
4836 if (some_was_live && ! not_dead
4837 /* The stack pointer is never dead. Well, not strictly true,
4838 but it's very difficult to tell from here. Hopefully
4839 combine_stack_adjustments will fix up the most egregious
4841 && regno_first != STACK_POINTER_REGNUM)
4843 for (i = regno_first; i <= regno_last; ++i)
4844 if (! mark_regno_cond_dead (pbi, i, cond))
4849 /* Additional data to record if this is the final pass. */
4850 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
4851 | PROP_DEATH_NOTES | PROP_AUTOINC))
4854 register int blocknum = pbi->bb->index;
4857 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4859 y = pbi->reg_next_use[regno_first];
4861 /* The next use is no longer next, since a store intervenes. */
4862 for (i = regno_first; i <= regno_last; ++i)
4863 pbi->reg_next_use[i] = 0;
4866 if (flags & PROP_REG_INFO)
4868 for (i = regno_first; i <= regno_last; ++i)
4870 /* Count (weighted) references, stores, etc. This counts a
4871 register twice if it is modified, but that is correct. */
4872 REG_N_SETS (i) += 1;
4873 REG_N_REFS (i) += (optimize_size ? 1
4874 : pbi->bb->loop_depth + 1);
4876 /* The insns where a reg is live are normally counted
4877 elsewhere, but we want the count to include the insn
4878 where the reg is set, and the normal counting mechanism
4879 would not count it. */
4880 REG_LIVE_LENGTH (i) += 1;
4883 /* If this is a hard reg, record this function uses the reg. */
4884 if (regno_first < FIRST_PSEUDO_REGISTER)
4886 for (i = regno_first; i <= regno_last; i++)
4887 regs_ever_live[i] = 1;
4891 /* Keep track of which basic blocks each reg appears in. */
4892 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
4893 REG_BASIC_BLOCK (regno_first) = blocknum;
4894 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
4895 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
4899 if (! some_was_dead)
4901 if (flags & PROP_LOG_LINKS)
4903 /* Make a logical link from the next following insn
4904 that uses this register, back to this insn.
4905 The following insns have already been processed.
4907 We don't build a LOG_LINK for hard registers containing
4908 in ASM_OPERANDs. If these registers get replaced,
4909 we might wind up changing the semantics of the insn,
4910 even if reload can make what appear to be valid
4911 assignments later. */
4912 if (y && (BLOCK_NUM (y) == blocknum)
4913 && (regno_first >= FIRST_PSEUDO_REGISTER
4914 || asm_noperands (PATTERN (y)) < 0))
4915 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
4920 else if (! some_was_live)
4922 if (flags & PROP_REG_INFO)
4923 REG_N_DEATHS (regno_first) += 1;
4925 if (flags & PROP_DEATH_NOTES)
4927 /* Note that dead stores have already been deleted
4928 when possible. If we get here, we have found a
4929 dead store that cannot be eliminated (because the
4930 same insn does something useful). Indicate this
4931 by marking the reg being set as dying here. */
4933 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4938 if (flags & PROP_DEATH_NOTES)
4940 /* This is a case where we have a multi-word hard register
4941 and some, but not all, of the words of the register are
4942 needed in subsequent insns. Write REG_UNUSED notes
4943 for those parts that were not needed. This case should
4946 for (i = regno_first; i <= regno_last; ++i)
4947 if (! REGNO_REG_SET_P (pbi->reg_live, i))
4949 = alloc_EXPR_LIST (REG_UNUSED,
4950 gen_rtx_REG (reg_raw_mode[i], i),
4956 /* Mark the register as being dead. */
4959 /* The stack pointer is never dead. Well, not strictly true,
4960 but it's very difficult to tell from here. Hopefully
4961 combine_stack_adjustments will fix up the most egregious
4963 && regno_first != STACK_POINTER_REGNUM)
4965 for (i = regno_first; i <= regno_last; ++i)
4966 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
4969 else if (GET_CODE (reg) == REG)
4971 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
4972 pbi->reg_next_use[regno_first] = 0;
4975 /* If this is the last pass and this is a SCRATCH, show it will be dying
4976 here and count it. */
4977 else if (GET_CODE (reg) == SCRATCH)
4979 if (flags & PROP_DEATH_NOTES)
4981 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
4985 #ifdef HAVE_conditional_execution
4986 /* Mark REGNO conditionally dead.
4987 Return true if the register is now unconditionally dead. */
4990 mark_regno_cond_dead (pbi, regno, cond)
4991 struct propagate_block_info *pbi;
4995 /* If this is a store to a predicate register, the value of the
4996 predicate is changing, we don't know that the predicate as seen
4997 before is the same as that seen after. Flush all dependent
4998 conditions from reg_cond_dead. This will make all such
4999 conditionally live registers unconditionally live. */
5000 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
5001 flush_reg_cond_reg (pbi, regno);
5003 /* If this is an unconditional store, remove any conditional
5004 life that may have existed. */
5005 if (cond == NULL_RTX)
5006 splay_tree_remove (pbi->reg_cond_dead, regno);
5009 splay_tree_node node;
5010 struct reg_cond_life_info *rcli;
5013 /* Otherwise this is a conditional set. Record that fact.
5014 It may have been conditionally used, or there may be a
5015 subsequent set with a complimentary condition. */
5017 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5020 /* The register was unconditionally live previously.
5021 Record the current condition as the condition under
5022 which it is dead. */
5023 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5024 rcli->condition = cond;
5025 splay_tree_insert (pbi->reg_cond_dead, regno,
5026 (splay_tree_value) rcli);
5028 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5030 /* Not unconditionaly dead. */
5035 /* The register was conditionally live previously.
5036 Add the new condition to the old. */
5037 rcli = (struct reg_cond_life_info *) node->value;
5038 ncond = rcli->condition;
5039 ncond = ior_reg_cond (ncond, cond, 1);
5041 /* If the register is now unconditionally dead,
5042 remove the entry in the splay_tree. */
5043 if (ncond == const1_rtx)
5044 splay_tree_remove (pbi->reg_cond_dead, regno);
5047 rcli->condition = ncond;
5049 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5051 /* Not unconditionaly dead. */
5060 /* Called from splay_tree_delete for pbi->reg_cond_life. */
5063 free_reg_cond_life_info (value)
5064 splay_tree_value value;
5066 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
5070 /* Helper function for flush_reg_cond_reg. */
5073 flush_reg_cond_reg_1 (node, data)
5074 splay_tree_node node;
5077 struct reg_cond_life_info *rcli;
5078 int *xdata = (int *) data;
5079 unsigned int regno = xdata[0];
5081 /* Don't need to search if last flushed value was farther on in
5082 the in-order traversal. */
5083 if (xdata[1] >= (int) node->key)
5086 /* Splice out portions of the expression that refer to regno. */
5087 rcli = (struct reg_cond_life_info *) node->value;
5088 rcli->condition = elim_reg_cond (rcli->condition, regno);
5090 /* If the entire condition is now false, signal the node to be removed. */
5091 if (rcli->condition == const0_rtx)
5093 xdata[1] = node->key;
5096 else if (rcli->condition == const1_rtx)
5102 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
5105 flush_reg_cond_reg (pbi, regno)
5106 struct propagate_block_info *pbi;
5113 while (splay_tree_foreach (pbi->reg_cond_dead,
5114 flush_reg_cond_reg_1, pair) == -1)
5115 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
5117 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
5120 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
5121 For ior/and, the ADD flag determines whether we want to add the new
5122 condition X to the old one unconditionally. If it is zero, we will
5123 only return a new expression if X allows us to simplify part of
5124 OLD, otherwise we return OLD unchanged to the caller.
5125 If ADD is nonzero, we will return a new condition in all cases. The
5126 toplevel caller of one of these functions should always pass 1 for
5130 ior_reg_cond (old, x, add)
5136 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
5138 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
5139 && GET_CODE (x) == reverse_condition (GET_CODE (old))
5140 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5142 if (GET_CODE (x) == GET_CODE (old)
5143 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5147 return gen_rtx_IOR (0, old, x);
5150 switch (GET_CODE (old))
5153 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
5154 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
5155 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5157 if (op0 == const0_rtx)
5159 if (op1 == const0_rtx)
5161 if (op0 == const1_rtx || op1 == const1_rtx)
5163 if (op0 == XEXP (old, 0))
5164 op0 = gen_rtx_IOR (0, op0, x);
5166 op1 = gen_rtx_IOR (0, op1, x);
5167 return gen_rtx_IOR (0, op0, op1);
5171 return gen_rtx_IOR (0, old, x);
5174 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
5175 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
5176 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5178 if (op0 == const1_rtx)
5180 if (op1 == const1_rtx)
5182 if (op0 == const0_rtx || op1 == const0_rtx)
5184 if (op0 == XEXP (old, 0))
5185 op0 = gen_rtx_IOR (0, op0, x);
5187 op1 = gen_rtx_IOR (0, op1, x);
5188 return gen_rtx_AND (0, op0, op1);
5192 return gen_rtx_IOR (0, old, x);
5195 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
5196 if (op0 != XEXP (old, 0))
5197 return not_reg_cond (op0);
5200 return gen_rtx_IOR (0, old, x);
5211 enum rtx_code x_code;
5213 if (x == const0_rtx)
5215 else if (x == const1_rtx)
5217 x_code = GET_CODE (x);
5220 if (GET_RTX_CLASS (x_code) == '<'
5221 && GET_CODE (XEXP (x, 0)) == REG)
5223 if (XEXP (x, 1) != const0_rtx)
5226 return gen_rtx_fmt_ee (reverse_condition (x_code),
5227 VOIDmode, XEXP (x, 0), const0_rtx);
5229 return gen_rtx_NOT (0, x);
5233 and_reg_cond (old, x, add)
5239 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
5241 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
5242 && GET_CODE (x) == reverse_condition (GET_CODE (old))
5243 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5245 if (GET_CODE (x) == GET_CODE (old)
5246 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
5250 return gen_rtx_AND (0, old, x);
5253 switch (GET_CODE (old))
5256 op0 = and_reg_cond (XEXP (old, 0), x, 0);
5257 op1 = and_reg_cond (XEXP (old, 1), x, 0);
5258 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5260 if (op0 == const0_rtx)
5262 if (op1 == const0_rtx)
5264 if (op0 == const1_rtx || op1 == const1_rtx)
5266 if (op0 == XEXP (old, 0))
5267 op0 = gen_rtx_AND (0, op0, x);
5269 op1 = gen_rtx_AND (0, op1, x);
5270 return gen_rtx_IOR (0, op0, op1);
5274 return gen_rtx_AND (0, old, x);
5277 op0 = and_reg_cond (XEXP (old, 0), x, 0);
5278 op1 = and_reg_cond (XEXP (old, 1), x, 0);
5279 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
5281 if (op0 == const1_rtx)
5283 if (op1 == const1_rtx)
5285 if (op0 == const0_rtx || op1 == const0_rtx)
5287 if (op0 == XEXP (old, 0))
5288 op0 = gen_rtx_AND (0, op0, x);
5290 op1 = gen_rtx_AND (0, op1, x);
5291 return gen_rtx_AND (0, op0, op1);
5295 return gen_rtx_AND (0, old, x);
5298 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
5299 if (op0 != XEXP (old, 0))
5300 return not_reg_cond (op0);
5303 return gen_rtx_AND (0, old, x);
5310 /* Given a condition X, remove references to reg REGNO and return the
5311 new condition. The removal will be done so that all conditions
5312 involving REGNO are considered to evaluate to false. This function
5313 is used when the value of REGNO changes. */
5316 elim_reg_cond (x, regno)
5322 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
5324 if (REGNO (XEXP (x, 0)) == regno)
5329 switch (GET_CODE (x))
5332 op0 = elim_reg_cond (XEXP (x, 0), regno);
5333 op1 = elim_reg_cond (XEXP (x, 1), regno);
5334 if (op0 == const0_rtx || op1 == const0_rtx)
5336 if (op0 == const1_rtx)
5338 if (op1 == const1_rtx)
5340 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
5342 return gen_rtx_AND (0, op0, op1);
5345 op0 = elim_reg_cond (XEXP (x, 0), regno);
5346 op1 = elim_reg_cond (XEXP (x, 1), regno);
5347 if (op0 == const1_rtx || op1 == const1_rtx)
5349 if (op0 == const0_rtx)
5351 if (op1 == const0_rtx)
5353 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
5355 return gen_rtx_IOR (0, op0, op1);
5358 op0 = elim_reg_cond (XEXP (x, 0), regno);
5359 if (op0 == const0_rtx)
5361 if (op0 == const1_rtx)
5363 if (op0 != XEXP (x, 0))
5364 return not_reg_cond (op0);
5371 #endif /* HAVE_conditional_execution */
5375 /* Try to substitute the auto-inc expression INC as the address inside
5376 MEM which occurs in INSN. Currently, the address of MEM is an expression
5377 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
5378 that has a single set whose source is a PLUS of INCR_REG and something
5382 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
5383 struct propagate_block_info *pbi;
5384 rtx inc, insn, mem, incr, incr_reg;
5386 int regno = REGNO (incr_reg);
5387 rtx set = single_set (incr);
5388 rtx q = SET_DEST (set);
5389 rtx y = SET_SRC (set);
5390 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
5392 /* Make sure this reg appears only once in this insn. */
5393 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
5396 if (dead_or_set_p (incr, incr_reg)
5397 /* Mustn't autoinc an eliminable register. */
5398 && (regno >= FIRST_PSEUDO_REGISTER
5399 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
5401 /* This is the simple case. Try to make the auto-inc. If
5402 we can't, we are done. Otherwise, we will do any
5403 needed updates below. */
5404 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
5407 else if (GET_CODE (q) == REG
5408 /* PREV_INSN used here to check the semi-open interval
5410 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
5411 /* We must also check for sets of q as q may be
5412 a call clobbered hard register and there may
5413 be a call between PREV_INSN (insn) and incr. */
5414 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
5416 /* We have *p followed sometime later by q = p+size.
5417 Both p and q must be live afterward,
5418 and q is not used between INSN and its assignment.
5419 Change it to q = p, ...*q..., q = q+size.
5420 Then fall into the usual case. */
5424 emit_move_insn (q, incr_reg);
5425 insns = get_insns ();
5428 if (basic_block_for_insn)
5429 for (temp = insns; temp; temp = NEXT_INSN (temp))
5430 set_block_for_insn (temp, pbi->bb);
5432 /* If we can't make the auto-inc, or can't make the
5433 replacement into Y, exit. There's no point in making
5434 the change below if we can't do the auto-inc and doing
5435 so is not correct in the pre-inc case. */
5438 validate_change (insn, &XEXP (mem, 0), inc, 1);
5439 validate_change (incr, &XEXP (y, opnum), q, 1);
5440 if (! apply_change_group ())
5443 /* We now know we'll be doing this change, so emit the
5444 new insn(s) and do the updates. */
5445 emit_insns_before (insns, insn);
5447 if (pbi->bb->head == insn)
5448 pbi->bb->head = insns;
5450 /* INCR will become a NOTE and INSN won't contain a
5451 use of INCR_REG. If a use of INCR_REG was just placed in
5452 the insn before INSN, make that the next use.
5453 Otherwise, invalidate it. */
5454 if (GET_CODE (PREV_INSN (insn)) == INSN
5455 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
5456 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
5457 pbi->reg_next_use[regno] = PREV_INSN (insn);
5459 pbi->reg_next_use[regno] = 0;
5464 /* REGNO is now used in INCR which is below INSN, but
5465 it previously wasn't live here. If we don't mark
5466 it as live, we'll put a REG_DEAD note for it
5467 on this insn, which is incorrect. */
5468 SET_REGNO_REG_SET (pbi->reg_live, regno);
5470 /* If there are any calls between INSN and INCR, show
5471 that REGNO now crosses them. */
5472 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
5473 if (GET_CODE (temp) == CALL_INSN)
5474 REG_N_CALLS_CROSSED (regno)++;
5479 /* If we haven't returned, it means we were able to make the
5480 auto-inc, so update the status. First, record that this insn
5481 has an implicit side effect. */
5483 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
5485 /* Modify the old increment-insn to simply copy
5486 the already-incremented value of our register. */
5487 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
5490 /* If that makes it a no-op (copying the register into itself) delete
5491 it so it won't appear to be a "use" and a "set" of this
5493 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
5495 /* If the original source was dead, it's dead now. */
5498 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
5500 remove_note (incr, note);
5501 if (XEXP (note, 0) != incr_reg)
5502 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
5505 PUT_CODE (incr, NOTE);
5506 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
5507 NOTE_SOURCE_FILE (incr) = 0;
5510 if (regno >= FIRST_PSEUDO_REGISTER)
5512 /* Count an extra reference to the reg. When a reg is
5513 incremented, spilling it is worse, so we want to make
5514 that less likely. */
5515 REG_N_REFS (regno) += (optimize_size ? 1 : pbi->bb->loop_depth + 1);
5517 /* Count the increment as a setting of the register,
5518 even though it isn't a SET in rtl. */
5519 REG_N_SETS (regno)++;
5523 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
5527 find_auto_inc (pbi, x, insn)
5528 struct propagate_block_info *pbi;
5532 rtx addr = XEXP (x, 0);
5533 HOST_WIDE_INT offset = 0;
5534 rtx set, y, incr, inc_val;
5536 int size = GET_MODE_SIZE (GET_MODE (x));
5538 if (GET_CODE (insn) == JUMP_INSN)
5541 /* Here we detect use of an index register which might be good for
5542 postincrement, postdecrement, preincrement, or predecrement. */
5544 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5545 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
5547 if (GET_CODE (addr) != REG)
5550 regno = REGNO (addr);
5552 /* Is the next use an increment that might make auto-increment? */
5553 incr = pbi->reg_next_use[regno];
5554 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
5556 set = single_set (incr);
5557 if (set == 0 || GET_CODE (set) != SET)
5561 if (GET_CODE (y) != PLUS)
5564 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
5565 inc_val = XEXP (y, 1);
5566 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
5567 inc_val = XEXP (y, 0);
5571 if (GET_CODE (inc_val) == CONST_INT)
5573 if (HAVE_POST_INCREMENT
5574 && (INTVAL (inc_val) == size && offset == 0))
5575 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
5577 else if (HAVE_POST_DECREMENT
5578 && (INTVAL (inc_val) == -size && offset == 0))
5579 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
5581 else if (HAVE_PRE_INCREMENT
5582 && (INTVAL (inc_val) == size && offset == size))
5583 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
5585 else if (HAVE_PRE_DECREMENT
5586 && (INTVAL (inc_val) == -size && offset == -size))
5587 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
5589 else if (HAVE_POST_MODIFY_DISP && offset == 0)
5590 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5591 gen_rtx_PLUS (Pmode,
5594 insn, x, incr, addr);
5596 else if (GET_CODE (inc_val) == REG
5597 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
5601 if (HAVE_POST_MODIFY_REG && offset == 0)
5602 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
5603 gen_rtx_PLUS (Pmode,
5606 insn, x, incr, addr);
5610 #endif /* AUTO_INC_DEC */
5613 mark_used_reg (pbi, reg, cond, insn)
5614 struct propagate_block_info *pbi;
5616 rtx cond ATTRIBUTE_UNUSED;
5619 int regno = REGNO (reg);
5620 int some_was_live = REGNO_REG_SET_P (pbi->reg_live, regno);
5621 int some_was_dead = ! some_was_live;
5625 /* A hard reg in a wide mode may really be multiple registers.
5626 If so, mark all of them just like the first. */
5627 if (regno < FIRST_PSEUDO_REGISTER)
5629 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5632 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, regno + n);
5633 some_was_live |= needed_regno;
5634 some_was_dead |= ! needed_regno;
5638 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5640 /* Record where each reg is used, so when the reg is set we know
5641 the next insn that uses it. */
5642 pbi->reg_next_use[regno] = insn;
5645 if (pbi->flags & PROP_REG_INFO)
5647 if (regno < FIRST_PSEUDO_REGISTER)
5649 /* If this is a register we are going to try to eliminate,
5650 don't mark it live here. If we are successful in
5651 eliminating it, it need not be live unless it is used for
5652 pseudos, in which case it will have been set live when it
5653 was allocated to the pseudos. If the register will not
5654 be eliminated, reload will set it live at that point.
5656 Otherwise, record that this function uses this register. */
5657 /* ??? The PPC backend tries to "eliminate" on the pic
5658 register to itself. This should be fixed. In the mean
5659 time, hack around it. */
5661 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
5662 && (regno == FRAME_POINTER_REGNUM
5663 || regno == ARG_POINTER_REGNUM)))
5665 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5667 regs_ever_live[regno + --n] = 1;
5673 /* Keep track of which basic block each reg appears in. */
5675 register int blocknum = pbi->bb->index;
5676 if (REG_BASIC_BLOCK (regno) == REG_BLOCK_UNKNOWN)
5677 REG_BASIC_BLOCK (regno) = blocknum;
5678 else if (REG_BASIC_BLOCK (regno) != blocknum)
5679 REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
5681 /* Count (weighted) number of uses of each reg. */
5682 REG_N_REFS (regno) += (optimize_size ? 1
5683 : pbi->bb->loop_depth + 1);
5687 /* Find out if any of the register was set this insn. */
5688 some_not_set = ! REGNO_REG_SET_P (pbi->new_set, regno);
5689 if (regno < FIRST_PSEUDO_REGISTER)
5691 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5693 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, regno + n);
5696 /* Record and count the insns in which a reg dies. If it is used in
5697 this insn and was dead below the insn then it dies in this insn.
5698 If it was set in this insn, we do not make a REG_DEAD note;
5699 likewise if we already made such a note. */
5700 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
5704 /* Check for the case where the register dying partially
5705 overlaps the register set by this insn. */
5706 if (regno < FIRST_PSEUDO_REGISTER
5707 && HARD_REGNO_NREGS (regno, GET_MODE (reg)) > 1)
5709 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5711 some_was_live |= REGNO_REG_SET_P (pbi->new_set, regno + n);
5714 /* If none of the words in X is needed, make a REG_DEAD note.
5715 Otherwise, we must make partial REG_DEAD notes. */
5716 if (! some_was_live)
5718 if ((pbi->flags & PROP_DEATH_NOTES)
5719 && ! find_regno_note (insn, REG_DEAD, regno))
5721 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
5723 if (pbi->flags & PROP_REG_INFO)
5724 REG_N_DEATHS (regno)++;
5728 /* Don't make a REG_DEAD note for a part of a register
5729 that is set in the insn. */
5731 n = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1;
5732 for (; n >= regno; n--)
5733 if (! REGNO_REG_SET_P (pbi->reg_live, n)
5734 && ! dead_or_set_regno_p (insn, n))
5736 = alloc_EXPR_LIST (REG_DEAD,
5737 gen_rtx_REG (reg_raw_mode[n], n),
5742 SET_REGNO_REG_SET (pbi->reg_live, regno);
5743 if (regno < FIRST_PSEUDO_REGISTER)
5745 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
5747 SET_REGNO_REG_SET (pbi->reg_live, regno + n);
5750 #ifdef HAVE_conditional_execution
5751 /* If this is a conditional use, record that fact. If it is later
5752 conditionally set, we'll know to kill the register. */
5753 if (cond != NULL_RTX)
5755 splay_tree_node node;
5756 struct reg_cond_life_info *rcli;
5761 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5764 /* The register was unconditionally live previously.
5765 No need to do anything. */
5769 /* The register was conditionally live previously.
5770 Subtract the new life cond from the old death cond. */
5771 rcli = (struct reg_cond_life_info *) node->value;
5772 ncond = rcli->condition;
5773 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
5775 /* If the register is now unconditionally live, remove the
5776 entry in the splay_tree. */
5777 if (ncond == const0_rtx)
5779 rcli->condition = NULL_RTX;
5780 splay_tree_remove (pbi->reg_cond_dead, regno);
5784 rcli->condition = ncond;
5785 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5791 /* The register was not previously live at all. Record
5792 the condition under which it is still dead. */
5793 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5794 rcli->condition = not_reg_cond (cond);
5795 splay_tree_insert (pbi->reg_cond_dead, regno,
5796 (splay_tree_value) rcli);
5798 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
5801 else if (some_was_live)
5803 splay_tree_node node;
5804 struct reg_cond_life_info *rcli;
5806 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
5809 /* The register was conditionally live previously, but is now
5810 unconditionally so. Remove it from the conditionally dead
5811 list, so that a conditional set won't cause us to think
5813 rcli = (struct reg_cond_life_info *) node->value;
5814 rcli->condition = NULL_RTX;
5815 splay_tree_remove (pbi->reg_cond_dead, regno);
5822 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
5823 This is done assuming the registers needed from X are those that
5824 have 1-bits in PBI->REG_LIVE.
5826 INSN is the containing instruction. If INSN is dead, this function
5830 mark_used_regs (pbi, x, cond, insn)
5831 struct propagate_block_info *pbi;
5834 register RTX_CODE code;
5836 int flags = pbi->flags;
5839 code = GET_CODE (x);
5859 /* If we are clobbering a MEM, mark any registers inside the address
5861 if (GET_CODE (XEXP (x, 0)) == MEM)
5862 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
5866 /* Don't bother watching stores to mems if this is not the
5867 final pass. We'll not be deleting dead stores this round. */
5868 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
5870 /* Invalidate the data for the last MEM stored, but only if MEM is
5871 something that can be stored into. */
5872 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
5873 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
5874 /* Needn't clear the memory set list. */
5878 rtx temp = pbi->mem_set_list;
5879 rtx prev = NULL_RTX;
5884 next = XEXP (temp, 1);
5885 if (anti_dependence (XEXP (temp, 0), x))
5887 /* Splice temp out of the list. */
5889 XEXP (prev, 1) = next;
5891 pbi->mem_set_list = next;
5892 free_EXPR_LIST_node (temp);
5893 pbi->mem_set_list_len--;
5901 /* If the memory reference had embedded side effects (autoincrement
5902 address modes. Then we may need to kill some entries on the
5905 invalidate_mems_from_autoinc (pbi, insn);
5909 if (flags & PROP_AUTOINC)
5910 find_auto_inc (pbi, x, insn);
5915 #ifdef CLASS_CANNOT_CHANGE_MODE
5916 if (GET_CODE (SUBREG_REG (x)) == REG
5917 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
5918 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
5919 GET_MODE (SUBREG_REG (x))))
5920 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
5923 /* While we're here, optimize this case. */
5925 if (GET_CODE (x) != REG)
5930 /* See a register other than being set => mark it as needed. */
5931 mark_used_reg (pbi, x, cond, insn);
5936 register rtx testreg = SET_DEST (x);
5939 /* If storing into MEM, don't show it as being used. But do
5940 show the address as being used. */
5941 if (GET_CODE (testreg) == MEM)
5944 if (flags & PROP_AUTOINC)
5945 find_auto_inc (pbi, testreg, insn);
5947 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
5948 mark_used_regs (pbi, SET_SRC (x), cond, insn);
5952 /* Storing in STRICT_LOW_PART is like storing in a reg
5953 in that this SET might be dead, so ignore it in TESTREG.
5954 but in some other ways it is like using the reg.
5956 Storing in a SUBREG or a bit field is like storing the entire
5957 register in that if the register's value is not used
5958 then this SET is not needed. */
5959 while (GET_CODE (testreg) == STRICT_LOW_PART
5960 || GET_CODE (testreg) == ZERO_EXTRACT
5961 || GET_CODE (testreg) == SIGN_EXTRACT
5962 || GET_CODE (testreg) == SUBREG)
5964 #ifdef CLASS_CANNOT_CHANGE_MODE
5965 if (GET_CODE (testreg) == SUBREG
5966 && GET_CODE (SUBREG_REG (testreg)) == REG
5967 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
5968 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
5969 GET_MODE (testreg)))
5970 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
5973 /* Modifying a single register in an alternate mode
5974 does not use any of the old value. But these other
5975 ways of storing in a register do use the old value. */
5976 if (GET_CODE (testreg) == SUBREG
5977 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
5982 testreg = XEXP (testreg, 0);
5985 /* If this is a store into a register or group of registers,
5986 recursively scan the value being stored. */
5988 if ((GET_CODE (testreg) == PARALLEL
5989 && GET_MODE (testreg) == BLKmode)
5990 || (GET_CODE (testreg) == REG
5991 && (regno = REGNO (testreg),
5992 ! (regno == FRAME_POINTER_REGNUM
5993 && (! reload_completed || frame_pointer_needed)))
5994 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5995 && ! (regno == HARD_FRAME_POINTER_REGNUM
5996 && (! reload_completed || frame_pointer_needed))
5998 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5999 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
6004 mark_used_regs (pbi, SET_DEST (x), cond, insn);
6005 mark_used_regs (pbi, SET_SRC (x), cond, insn);
6012 case UNSPEC_VOLATILE:
6016 /* Traditional and volatile asm instructions must be considered to use
6017 and clobber all hard registers, all pseudo-registers and all of
6018 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
6020 Consider for instance a volatile asm that changes the fpu rounding
6021 mode. An insn should not be moved across this even if it only uses
6022 pseudo-regs because it might give an incorrectly rounded result.
6024 ?!? Unfortunately, marking all hard registers as live causes massive
6025 problems for the register allocator and marking all pseudos as live
6026 creates mountains of uninitialized variable warnings.
6028 So for now, just clear the memory set list and mark any regs
6029 we can find in ASM_OPERANDS as used. */
6030 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
6032 free_EXPR_LIST_list (&pbi->mem_set_list);
6033 pbi->mem_set_list_len = 0;
6036 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
6037 We can not just fall through here since then we would be confused
6038 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
6039 traditional asms unlike their normal usage. */
6040 if (code == ASM_OPERANDS)
6044 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
6045 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
6051 if (cond != NULL_RTX)
6054 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
6056 cond = COND_EXEC_TEST (x);
6057 x = COND_EXEC_CODE (x);
6061 /* We _do_not_ want to scan operands of phi nodes. Operands of
6062 a phi function are evaluated only when control reaches this
6063 block along a particular edge. Therefore, regs that appear
6064 as arguments to phi should not be added to the global live at
6072 /* Recursively scan the operands of this expression. */
6075 register const char *fmt = GET_RTX_FORMAT (code);
6078 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6082 /* Tail recursive case: save a function call level. */
6088 mark_used_regs (pbi, XEXP (x, i), cond, insn);
6090 else if (fmt[i] == 'E')
6093 for (j = 0; j < XVECLEN (x, i); j++)
6094 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
6103 try_pre_increment_1 (pbi, insn)
6104 struct propagate_block_info *pbi;
6107 /* Find the next use of this reg. If in same basic block,
6108 make it do pre-increment or pre-decrement if appropriate. */
6109 rtx x = single_set (insn);
6110 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
6111 * INTVAL (XEXP (SET_SRC (x), 1)));
6112 int regno = REGNO (SET_DEST (x));
6113 rtx y = pbi->reg_next_use[regno];
6115 && SET_DEST (x) != stack_pointer_rtx
6116 && BLOCK_NUM (y) == BLOCK_NUM (insn)
6117 /* Don't do this if the reg dies, or gets set in y; a standard addressing
6118 mode would be better. */
6119 && ! dead_or_set_p (y, SET_DEST (x))
6120 && try_pre_increment (y, SET_DEST (x), amount))
6122 /* We have found a suitable auto-increment and already changed
6123 insn Y to do it. So flush this increment instruction. */
6124 propagate_block_delete_insn (pbi->bb, insn);
6126 /* Count a reference to this reg for the increment insn we are
6127 deleting. When a reg is incremented, spilling it is worse,
6128 so we want to make that less likely. */
6129 if (regno >= FIRST_PSEUDO_REGISTER)
6131 REG_N_REFS (regno) += (optimize_size ? 1
6132 : pbi->bb->loop_depth + 1);
6133 REG_N_SETS (regno)++;
6136 /* Flush any remembered memories depending on the value of
6137 the incremented register. */
6138 invalidate_mems_from_set (pbi, SET_DEST (x));
6145 /* Try to change INSN so that it does pre-increment or pre-decrement
6146 addressing on register REG in order to add AMOUNT to REG.
6147 AMOUNT is negative for pre-decrement.
6148 Returns 1 if the change could be made.
6149 This checks all about the validity of the result of modifying INSN. */
6152 try_pre_increment (insn, reg, amount)
6154 HOST_WIDE_INT amount;
6158 /* Nonzero if we can try to make a pre-increment or pre-decrement.
6159 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
6161 /* Nonzero if we can try to make a post-increment or post-decrement.
6162 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
6163 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
6164 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
6167 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
6170 /* From the sign of increment, see which possibilities are conceivable
6171 on this target machine. */
6172 if (HAVE_PRE_INCREMENT && amount > 0)
6174 if (HAVE_POST_INCREMENT && amount > 0)
6177 if (HAVE_PRE_DECREMENT && amount < 0)
6179 if (HAVE_POST_DECREMENT && amount < 0)
6182 if (! (pre_ok || post_ok))
6185 /* It is not safe to add a side effect to a jump insn
6186 because if the incremented register is spilled and must be reloaded
6187 there would be no way to store the incremented value back in memory. */
6189 if (GET_CODE (insn) == JUMP_INSN)
6194 use = find_use_as_address (PATTERN (insn), reg, 0);
6195 if (post_ok && (use == 0 || use == (rtx) 1))
6197 use = find_use_as_address (PATTERN (insn), reg, -amount);
6201 if (use == 0 || use == (rtx) 1)
6204 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
6207 /* See if this combination of instruction and addressing mode exists. */
6208 if (! validate_change (insn, &XEXP (use, 0),
6209 gen_rtx_fmt_e (amount > 0
6210 ? (do_post ? POST_INC : PRE_INC)
6211 : (do_post ? POST_DEC : PRE_DEC),
6215 /* Record that this insn now has an implicit side effect on X. */
6216 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
6220 #endif /* AUTO_INC_DEC */
6222 /* Find the place in the rtx X where REG is used as a memory address.
6223 Return the MEM rtx that so uses it.
6224 If PLUSCONST is nonzero, search instead for a memory address equivalent to
6225 (plus REG (const_int PLUSCONST)).
6227 If such an address does not appear, return 0.
6228 If REG appears more than once, or is used other than in such an address,
6232 find_use_as_address (x, reg, plusconst)
6235 HOST_WIDE_INT plusconst;
6237 enum rtx_code code = GET_CODE (x);
6238 const char *fmt = GET_RTX_FORMAT (code);
6240 register rtx value = 0;
6243 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
6246 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
6247 && XEXP (XEXP (x, 0), 0) == reg
6248 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6249 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
6252 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
6254 /* If REG occurs inside a MEM used in a bit-field reference,
6255 that is unacceptable. */
6256 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
6257 return (rtx) (HOST_WIDE_INT) 1;
6261 return (rtx) (HOST_WIDE_INT) 1;
6263 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6267 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
6271 return (rtx) (HOST_WIDE_INT) 1;
6273 else if (fmt[i] == 'E')
6276 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6278 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
6282 return (rtx) (HOST_WIDE_INT) 1;
6290 /* Write information about registers and basic blocks into FILE.
6291 This is part of making a debugging dump. */
6294 dump_regset (r, outf)
6301 fputs (" (nil)", outf);
6305 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
6307 fprintf (outf, " %d", i);
6308 if (i < FIRST_PSEUDO_REGISTER)
6309 fprintf (outf, " [%s]",
6318 dump_regset (r, stderr);
6319 putc ('\n', stderr);
6323 dump_flow_info (file)
6327 static const char * const reg_class_names[] = REG_CLASS_NAMES;
6329 fprintf (file, "%d registers.\n", max_regno);
6330 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
6333 enum reg_class class, altclass;
6334 fprintf (file, "\nRegister %d used %d times across %d insns",
6335 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
6336 if (REG_BASIC_BLOCK (i) >= 0)
6337 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
6339 fprintf (file, "; set %d time%s", REG_N_SETS (i),
6340 (REG_N_SETS (i) == 1) ? "" : "s");
6341 if (REG_USERVAR_P (regno_reg_rtx[i]))
6342 fprintf (file, "; user var");
6343 if (REG_N_DEATHS (i) != 1)
6344 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
6345 if (REG_N_CALLS_CROSSED (i) == 1)
6346 fprintf (file, "; crosses 1 call");
6347 else if (REG_N_CALLS_CROSSED (i))
6348 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
6349 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
6350 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
6351 class = reg_preferred_class (i);
6352 altclass = reg_alternate_class (i);
6353 if (class != GENERAL_REGS || altclass != ALL_REGS)
6355 if (altclass == ALL_REGS || class == ALL_REGS)
6356 fprintf (file, "; pref %s", reg_class_names[(int) class]);
6357 else if (altclass == NO_REGS)
6358 fprintf (file, "; %s or none", reg_class_names[(int) class]);
6360 fprintf (file, "; pref %s, else %s",
6361 reg_class_names[(int) class],
6362 reg_class_names[(int) altclass]);
6364 if (REG_POINTER (regno_reg_rtx[i]))
6365 fprintf (file, "; pointer");
6366 fprintf (file, ".\n");
6369 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
6370 for (i = 0; i < n_basic_blocks; i++)
6372 register basic_block bb = BASIC_BLOCK (i);
6375 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count %d.\n",
6376 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth, bb->count);
6378 fprintf (file, "Predecessors: ");
6379 for (e = bb->pred; e; e = e->pred_next)
6380 dump_edge_info (file, e, 0);
6382 fprintf (file, "\nSuccessors: ");
6383 for (e = bb->succ; e; e = e->succ_next)
6384 dump_edge_info (file, e, 1);
6386 fprintf (file, "\nRegisters live at start:");
6387 dump_regset (bb->global_live_at_start, file);
6389 fprintf (file, "\nRegisters live at end:");
6390 dump_regset (bb->global_live_at_end, file);
6401 dump_flow_info (stderr);
6405 dump_edge_info (file, e, do_succ)
6410 basic_block side = (do_succ ? e->dest : e->src);
6412 if (side == ENTRY_BLOCK_PTR)
6413 fputs (" ENTRY", file);
6414 else if (side == EXIT_BLOCK_PTR)
6415 fputs (" EXIT", file);
6417 fprintf (file, " %d", side->index);
6420 fprintf (file, " count:%d", e->count);
6424 static const char * const bitnames[] = {
6425 "fallthru", "crit", "ab", "abcall", "eh", "fake"
6428 int i, flags = e->flags;
6432 for (i = 0; flags; i++)
6433 if (flags & (1 << i))
6439 if (i < (int) ARRAY_SIZE (bitnames))
6440 fputs (bitnames[i], file);
6442 fprintf (file, "%d", i);
6449 /* Print out one basic block with live information at start and end. */
6460 fprintf (outf, ";; Basic block %d, loop depth %d, count %d",
6461 bb->index, bb->loop_depth, bb->count);
6462 if (bb->eh_beg != -1 || bb->eh_end != -1)
6463 fprintf (outf, ", eh regions %d/%d", bb->eh_beg, bb->eh_end);
6466 fputs (";; Predecessors: ", outf);
6467 for (e = bb->pred; e; e = e->pred_next)
6468 dump_edge_info (outf, e, 0);
6471 fputs (";; Registers live at start:", outf);
6472 dump_regset (bb->global_live_at_start, outf);
6475 for (insn = bb->head, last = NEXT_INSN (bb->end);
6477 insn = NEXT_INSN (insn))
6478 print_rtl_single (outf, insn);
6480 fputs (";; Registers live at end:", outf);
6481 dump_regset (bb->global_live_at_end, outf);
6484 fputs (";; Successors: ", outf);
6485 for (e = bb->succ; e; e = e->succ_next)
6486 dump_edge_info (outf, e, 1);
6494 dump_bb (bb, stderr);
6501 dump_bb (BASIC_BLOCK (n), stderr);
6504 /* Like print_rtl, but also print out live information for the start of each
6508 print_rtl_with_bb (outf, rtx_first)
6512 register rtx tmp_rtx;
6515 fprintf (outf, "(nil)\n");
6519 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
6520 int max_uid = get_max_uid ();
6521 basic_block *start = (basic_block *)
6522 xcalloc (max_uid, sizeof (basic_block));
6523 basic_block *end = (basic_block *)
6524 xcalloc (max_uid, sizeof (basic_block));
6525 enum bb_state *in_bb_p = (enum bb_state *)
6526 xcalloc (max_uid, sizeof (enum bb_state));
6528 for (i = n_basic_blocks - 1; i >= 0; i--)
6530 basic_block bb = BASIC_BLOCK (i);
6533 start[INSN_UID (bb->head)] = bb;
6534 end[INSN_UID (bb->end)] = bb;
6535 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
6537 enum bb_state state = IN_MULTIPLE_BB;
6538 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
6540 in_bb_p[INSN_UID (x)] = state;
6547 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
6552 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
6554 fprintf (outf, ";; Start of basic block %d, registers live:",
6556 dump_regset (bb->global_live_at_start, outf);
6560 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
6561 && GET_CODE (tmp_rtx) != NOTE
6562 && GET_CODE (tmp_rtx) != BARRIER)
6563 fprintf (outf, ";; Insn is not within a basic block\n");
6564 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
6565 fprintf (outf, ";; Insn is in multiple basic blocks\n");
6567 did_output = print_rtl_single (outf, tmp_rtx);
6569 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
6571 fprintf (outf, ";; End of basic block %d, registers live:\n",
6573 dump_regset (bb->global_live_at_end, outf);
6586 if (current_function_epilogue_delay_list != 0)
6588 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
6589 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
6590 tmp_rtx = XEXP (tmp_rtx, 1))
6591 print_rtl_single (outf, XEXP (tmp_rtx, 0));
6595 /* Dump the rtl into the current debugging dump file, then abort. */
6598 print_rtl_and_abort_fcn (file, line, function)
6601 const char *function;
6605 print_rtl_with_bb (rtl_dump_file, get_insns ());
6606 fclose (rtl_dump_file);
6609 fancy_abort (file, line, function);
6612 /* Recompute register set/reference counts immediately prior to register
6615 This avoids problems with set/reference counts changing to/from values
6616 which have special meanings to the register allocators.
6618 Additionally, the reference counts are the primary component used by the
6619 register allocators to prioritize pseudos for allocation to hard regs.
6620 More accurate reference counts generally lead to better register allocation.
6622 F is the first insn to be scanned.
6624 LOOP_STEP denotes how much loop_depth should be incremented per
6625 loop nesting level in order to increase the ref count more for
6626 references in a loop.
6628 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
6629 possibly other information which is used by the register allocators. */
6632 recompute_reg_usage (f, loop_step)
6633 rtx f ATTRIBUTE_UNUSED;
6634 int loop_step ATTRIBUTE_UNUSED;
6636 allocate_reg_life_data ();
6637 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
6640 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
6641 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
6642 of the number of registers that died. */
6645 count_or_remove_death_notes (blocks, kill)
6651 for (i = n_basic_blocks - 1; i >= 0; --i)
6656 if (blocks && ! TEST_BIT (blocks, i))
6659 bb = BASIC_BLOCK (i);
6661 for (insn = bb->head;; insn = NEXT_INSN (insn))
6665 rtx *pprev = ®_NOTES (insn);
6670 switch (REG_NOTE_KIND (link))
6673 if (GET_CODE (XEXP (link, 0)) == REG)
6675 rtx reg = XEXP (link, 0);
6678 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
6681 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
6689 rtx next = XEXP (link, 1);
6690 free_EXPR_LIST_node (link);
6691 *pprev = link = next;
6697 pprev = &XEXP (link, 1);
6704 if (insn == bb->end)
6713 /* Update insns block within BB. */
6716 update_bb_for_insn (bb)
6721 if (! basic_block_for_insn)
6724 for (insn = bb->head; ; insn = NEXT_INSN (insn))
6726 set_block_for_insn (insn, bb);
6728 if (insn == bb->end)
6734 /* Record INSN's block as BB. */
6737 set_block_for_insn (insn, bb)
6741 size_t uid = INSN_UID (insn);
6742 if (uid >= basic_block_for_insn->num_elements)
6746 /* Add one-eighth the size so we don't keep calling xrealloc. */
6747 new_size = uid + (uid + 7) / 8;
6749 VARRAY_GROW (basic_block_for_insn, new_size);
6751 VARRAY_BB (basic_block_for_insn, uid) = bb;
6754 /* Record INSN's block number as BB. */
6755 /* ??? This has got to go. */
6758 set_block_num (insn, bb)
6762 set_block_for_insn (insn, BASIC_BLOCK (bb));
6765 /* Verify the CFG consistency. This function check some CFG invariants and
6766 aborts when something is wrong. Hope that this function will help to
6767 convert many optimization passes to preserve CFG consistent.
6769 Currently it does following checks:
6771 - test head/end pointers
6772 - overlapping of basic blocks
6773 - edge list corectness
6774 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
6775 - tails of basic blocks (ensure that boundary is necesary)
6776 - scans body of the basic block for JUMP_INSN, CODE_LABEL
6777 and NOTE_INSN_BASIC_BLOCK
6778 - check that all insns are in the basic blocks
6779 (except the switch handling code, barriers and notes)
6780 - check that all returns are followed by barriers
6782 In future it can be extended check a lot of other stuff as well
6783 (reachability of basic blocks, life information, etc. etc.). */
6788 const int max_uid = get_max_uid ();
6789 const rtx rtx_first = get_insns ();
6790 rtx last_head = get_last_insn ();
6791 basic_block *bb_info;
6793 int i, last_bb_num_seen, num_bb_notes, err = 0;
6795 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
6797 for (i = n_basic_blocks - 1; i >= 0; i--)
6799 basic_block bb = BASIC_BLOCK (i);
6800 rtx head = bb->head;
6803 /* Verify the end of the basic block is in the INSN chain. */
6804 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
6809 error ("End insn %d for block %d not found in the insn stream.",
6810 INSN_UID (end), bb->index);
6814 /* Work backwards from the end to the head of the basic block
6815 to verify the head is in the RTL chain. */
6816 for (; x != NULL_RTX; x = PREV_INSN (x))
6818 /* While walking over the insn chain, verify insns appear
6819 in only one basic block and initialize the BB_INFO array
6820 used by other passes. */
6821 if (bb_info[INSN_UID (x)] != NULL)
6823 error ("Insn %d is in multiple basic blocks (%d and %d)",
6824 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
6827 bb_info[INSN_UID (x)] = bb;
6834 error ("Head insn %d for block %d not found in the insn stream.",
6835 INSN_UID (head), bb->index);
6842 /* Now check the basic blocks (boundaries etc.) */
6843 for (i = n_basic_blocks - 1; i >= 0; i--)
6845 basic_block bb = BASIC_BLOCK (i);
6846 /* Check corectness of edge lists */
6855 "verify_flow_info: Basic block %d succ edge is corrupted\n",
6857 fprintf (stderr, "Predecessor: ");
6858 dump_edge_info (stderr, e, 0);
6859 fprintf (stderr, "\nSuccessor: ");
6860 dump_edge_info (stderr, e, 1);
6864 if (e->dest != EXIT_BLOCK_PTR)
6866 edge e2 = e->dest->pred;
6867 while (e2 && e2 != e)
6871 error ("Basic block %i edge lists are corrupted", bb->index);
6883 error ("Basic block %d pred edge is corrupted", bb->index);
6884 fputs ("Predecessor: ", stderr);
6885 dump_edge_info (stderr, e, 0);
6886 fputs ("\nSuccessor: ", stderr);
6887 dump_edge_info (stderr, e, 1);
6888 fputc ('\n', stderr);
6891 if (e->src != ENTRY_BLOCK_PTR)
6893 edge e2 = e->src->succ;
6894 while (e2 && e2 != e)
6898 error ("Basic block %i edge lists are corrupted", bb->index);
6905 /* OK pointers are correct. Now check the header of basic
6906 block. It ought to contain optional CODE_LABEL followed
6907 by NOTE_BASIC_BLOCK. */
6909 if (GET_CODE (x) == CODE_LABEL)
6913 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
6919 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
6921 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
6928 /* Do checks for empty blocks here */
6935 if (NOTE_INSN_BASIC_BLOCK_P (x))
6937 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
6938 INSN_UID (x), bb->index);
6945 if (GET_CODE (x) == JUMP_INSN
6946 || GET_CODE (x) == CODE_LABEL
6947 || GET_CODE (x) == BARRIER)
6949 error ("In basic block %d:", bb->index);
6950 fatal_insn ("Flow control insn inside a basic block", x);
6958 last_bb_num_seen = -1;
6963 if (NOTE_INSN_BASIC_BLOCK_P (x))
6965 basic_block bb = NOTE_BASIC_BLOCK (x);
6967 if (bb->index != last_bb_num_seen + 1)
6968 /* Basic blocks not numbered consecutively. */
6971 last_bb_num_seen = bb->index;
6974 if (!bb_info[INSN_UID (x)])
6976 switch (GET_CODE (x))
6983 /* An addr_vec is placed outside any block block. */
6985 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
6986 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
6987 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
6992 /* But in any case, non-deletable labels can appear anywhere. */
6996 fatal_insn ("Insn outside basic block", x);
7001 && GET_CODE (x) == JUMP_INSN
7002 && returnjump_p (x) && ! condjump_p (x)
7003 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
7004 fatal_insn ("Return not followed by barrier", x);
7009 if (num_bb_notes != n_basic_blocks)
7011 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
7012 num_bb_notes, n_basic_blocks);
7021 /* Functions to access an edge list with a vector representation.
7022 Enough data is kept such that given an index number, the
7023 pred and succ that edge represents can be determined, or
7024 given a pred and a succ, its index number can be returned.
7025 This allows algorithms which consume a lot of memory to
7026 represent the normally full matrix of edge (pred,succ) with a
7027 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
7028 wasted space in the client code due to sparse flow graphs. */
7030 /* This functions initializes the edge list. Basically the entire
7031 flowgraph is processed, and all edges are assigned a number,
7032 and the data structure is filled in. */
7037 struct edge_list *elist;
7043 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
7047 /* Determine the number of edges in the flow graph by counting successor
7048 edges on each basic block. */
7049 for (x = 0; x < n_basic_blocks; x++)
7051 basic_block bb = BASIC_BLOCK (x);
7053 for (e = bb->succ; e; e = e->succ_next)
7056 /* Don't forget successors of the entry block. */
7057 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
7060 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
7061 elist->num_blocks = block_count;
7062 elist->num_edges = num_edges;
7063 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
7067 /* Follow successors of the entry block, and register these edges. */
7068 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
7070 elist->index_to_edge[num_edges] = e;
7074 for (x = 0; x < n_basic_blocks; x++)
7076 basic_block bb = BASIC_BLOCK (x);
7078 /* Follow all successors of blocks, and register these edges. */
7079 for (e = bb->succ; e; e = e->succ_next)
7081 elist->index_to_edge[num_edges] = e;
7088 /* This function free's memory associated with an edge list. */
7091 free_edge_list (elist)
7092 struct edge_list *elist;
7096 free (elist->index_to_edge);
7101 /* This function provides debug output showing an edge list. */
7104 print_edge_list (f, elist)
7106 struct edge_list *elist;
7109 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
7110 elist->num_blocks - 2, elist->num_edges);
7112 for (x = 0; x < elist->num_edges; x++)
7114 fprintf (f, " %-4d - edge(", x);
7115 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
7116 fprintf (f, "entry,");
7118 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
7120 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
7121 fprintf (f, "exit)\n");
7123 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
7127 /* This function provides an internal consistency check of an edge list,
7128 verifying that all edges are present, and that there are no
7132 verify_edge_list (f, elist)
7134 struct edge_list *elist;
7136 int x, pred, succ, index;
7139 for (x = 0; x < n_basic_blocks; x++)
7141 basic_block bb = BASIC_BLOCK (x);
7143 for (e = bb->succ; e; e = e->succ_next)
7145 pred = e->src->index;
7146 succ = e->dest->index;
7147 index = EDGE_INDEX (elist, e->src, e->dest);
7148 if (index == EDGE_INDEX_NO_EDGE)
7150 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
7153 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
7154 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
7155 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
7156 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
7157 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
7158 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
7161 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
7163 pred = e->src->index;
7164 succ = e->dest->index;
7165 index = EDGE_INDEX (elist, e->src, e->dest);
7166 if (index == EDGE_INDEX_NO_EDGE)
7168 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
7171 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
7172 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
7173 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
7174 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
7175 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
7176 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
7178 /* We've verified that all the edges are in the list, no lets make sure
7179 there are no spurious edges in the list. */
7181 for (pred = 0; pred < n_basic_blocks; pred++)
7182 for (succ = 0; succ < n_basic_blocks; succ++)
7184 basic_block p = BASIC_BLOCK (pred);
7185 basic_block s = BASIC_BLOCK (succ);
7189 for (e = p->succ; e; e = e->succ_next)
7195 for (e = s->pred; e; e = e->pred_next)
7201 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
7202 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7203 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
7205 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
7206 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7207 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
7208 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
7209 BASIC_BLOCK (succ)));
7211 for (succ = 0; succ < n_basic_blocks; succ++)
7213 basic_block p = ENTRY_BLOCK_PTR;
7214 basic_block s = BASIC_BLOCK (succ);
7218 for (e = p->succ; e; e = e->succ_next)
7224 for (e = s->pred; e; e = e->pred_next)
7230 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
7231 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7232 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
7234 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
7235 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7236 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
7237 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
7238 BASIC_BLOCK (succ)));
7240 for (pred = 0; pred < n_basic_blocks; pred++)
7242 basic_block p = BASIC_BLOCK (pred);
7243 basic_block s = EXIT_BLOCK_PTR;
7247 for (e = p->succ; e; e = e->succ_next)
7253 for (e = s->pred; e; e = e->pred_next)
7259 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
7260 == EDGE_INDEX_NO_EDGE && found_edge != 0)
7261 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
7263 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
7264 != EDGE_INDEX_NO_EDGE && found_edge == 0)
7265 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
7266 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
7271 /* This routine will determine what, if any, edge there is between
7272 a specified predecessor and successor. */
7275 find_edge_index (edge_list, pred, succ)
7276 struct edge_list *edge_list;
7277 basic_block pred, succ;
7280 for (x = 0; x < NUM_EDGES (edge_list); x++)
7282 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
7283 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
7286 return (EDGE_INDEX_NO_EDGE);
7289 /* This function will remove an edge from the flow graph. */
7295 edge last_pred = NULL;
7296 edge last_succ = NULL;
7298 basic_block src, dest;
7301 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
7307 last_succ->succ_next = e->succ_next;
7309 src->succ = e->succ_next;
7311 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
7317 last_pred->pred_next = e->pred_next;
7319 dest->pred = e->pred_next;
7325 /* This routine will remove any fake successor edges for a basic block.
7326 When the edge is removed, it is also removed from whatever predecessor
7330 remove_fake_successors (bb)
7334 for (e = bb->succ; e;)
7338 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
7343 /* This routine will remove all fake edges from the flow graph. If
7344 we remove all fake successors, it will automatically remove all
7345 fake predecessors. */
7348 remove_fake_edges ()
7352 for (x = 0; x < n_basic_blocks; x++)
7353 remove_fake_successors (BASIC_BLOCK (x));
7355 /* We've handled all successors except the entry block's. */
7356 remove_fake_successors (ENTRY_BLOCK_PTR);
7359 /* This function will add a fake edge between any block which has no
7360 successors, and the exit block. Some data flow equations require these
7364 add_noreturn_fake_exit_edges ()
7368 for (x = 0; x < n_basic_blocks; x++)
7369 if (BASIC_BLOCK (x)->succ == NULL)
7370 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
7373 /* This function adds a fake edge between any infinite loops to the
7374 exit block. Some optimizations require a path from each node to
7377 See also Morgan, Figure 3.10, pp. 82-83.
7379 The current implementation is ugly, not attempting to minimize the
7380 number of inserted fake edges. To reduce the number of fake edges
7381 to insert, add fake edges from _innermost_ loops containing only
7382 nodes not reachable from the exit block. */
7385 connect_infinite_loops_to_exit ()
7387 basic_block unvisited_block;
7389 /* Perform depth-first search in the reverse graph to find nodes
7390 reachable from the exit block. */
7391 struct depth_first_search_dsS dfs_ds;
7393 flow_dfs_compute_reverse_init (&dfs_ds);
7394 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
7396 /* Repeatedly add fake edges, updating the unreachable nodes. */
7399 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
7400 if (!unvisited_block)
7402 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
7403 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
7406 flow_dfs_compute_reverse_finish (&dfs_ds);
7411 /* Redirect an edge's successor from one block to another. */
7414 redirect_edge_succ (e, new_succ)
7416 basic_block new_succ;
7420 /* Disconnect the edge from the old successor block. */
7421 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
7423 *pe = (*pe)->pred_next;
7425 /* Reconnect the edge to the new successor block. */
7426 e->pred_next = new_succ->pred;
7431 /* Redirect an edge's predecessor from one block to another. */
7434 redirect_edge_pred (e, new_pred)
7436 basic_block new_pred;
7440 /* Disconnect the edge from the old predecessor block. */
7441 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
7443 *pe = (*pe)->succ_next;
7445 /* Reconnect the edge to the new predecessor block. */
7446 e->succ_next = new_pred->succ;
7451 /* Dump the list of basic blocks in the bitmap NODES. */
7454 flow_nodes_print (str, nodes, file)
7456 const sbitmap nodes;
7464 fprintf (file, "%s { ", str);
7465 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
7466 fputs ("}\n", file);
7470 /* Dump the list of edges in the array EDGE_LIST. */
7473 flow_edge_list_print (str, edge_list, num_edges, file)
7475 const edge *edge_list;
7484 fprintf (file, "%s { ", str);
7485 for (i = 0; i < num_edges; i++)
7486 fprintf (file, "%d->%d ", edge_list[i]->src->index,
7487 edge_list[i]->dest->index);
7488 fputs ("}\n", file);
7492 /* Dump loop related CFG information. */
7495 flow_loops_cfg_dump (loops, file)
7496 const struct loops *loops;
7501 if (! loops->num || ! file || ! loops->cfg.dom)
7504 for (i = 0; i < n_basic_blocks; i++)
7508 fprintf (file, ";; %d succs { ", i);
7509 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
7510 fprintf (file, "%d ", succ->dest->index);
7511 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
7514 /* Dump the DFS node order. */
7515 if (loops->cfg.dfs_order)
7517 fputs (";; DFS order: ", file);
7518 for (i = 0; i < n_basic_blocks; i++)
7519 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
7522 /* Dump the reverse completion node order. */
7523 if (loops->cfg.rc_order)
7525 fputs (";; RC order: ", file);
7526 for (i = 0; i < n_basic_blocks; i++)
7527 fprintf (file, "%d ", loops->cfg.rc_order[i]);
7532 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
7535 flow_loop_nested_p (outer, loop)
7539 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
7543 /* Dump the loop information specified by LOOP to the stream FILE
7544 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7546 flow_loop_dump (loop, file, loop_dump_aux, verbose)
7547 const struct loop *loop;
7549 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
7552 if (! loop || ! loop->header)
7555 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
7556 loop->num, INSN_UID (loop->first->head),
7557 INSN_UID (loop->last->end),
7558 loop->shared ? " shared" : "",
7559 loop->invalid ? " invalid" : "");
7560 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
7561 loop->header->index, loop->latch->index,
7562 loop->pre_header ? loop->pre_header->index : -1,
7563 loop->first->index, loop->last->index);
7564 fprintf (file, ";; depth %d, level %d, outer %ld\n",
7565 loop->depth, loop->level,
7566 (long) (loop->outer ? loop->outer->num : -1));
7568 if (loop->pre_header_edges)
7569 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
7570 loop->num_pre_header_edges, file);
7571 flow_edge_list_print (";; entry edges", loop->entry_edges,
7572 loop->num_entries, file);
7573 fprintf (file, ";; %d", loop->num_nodes);
7574 flow_nodes_print (" nodes", loop->nodes, file);
7575 flow_edge_list_print (";; exit edges", loop->exit_edges,
7576 loop->num_exits, file);
7577 if (loop->exits_doms)
7578 flow_nodes_print (";; exit doms", loop->exits_doms, file);
7580 loop_dump_aux (loop, file, verbose);
7584 /* Dump the loop information specified by LOOPS to the stream FILE,
7585 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
7587 flow_loops_dump (loops, file, loop_dump_aux, verbose)
7588 const struct loops *loops;
7590 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
7596 num_loops = loops->num;
7597 if (! num_loops || ! file)
7600 fprintf (file, ";; %d loops found, %d levels\n",
7601 num_loops, loops->levels);
7603 for (i = 0; i < num_loops; i++)
7605 struct loop *loop = &loops->array[i];
7607 flow_loop_dump (loop, file, loop_dump_aux, verbose);
7613 for (j = 0; j < i; j++)
7615 struct loop *oloop = &loops->array[j];
7617 if (loop->header == oloop->header)
7622 smaller = loop->num_nodes < oloop->num_nodes;
7624 /* If the union of LOOP and OLOOP is different than
7625 the larger of LOOP and OLOOP then LOOP and OLOOP
7626 must be disjoint. */
7627 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
7628 smaller ? oloop : loop);
7630 ";; loop header %d shared by loops %d, %d %s\n",
7631 loop->header->index, i, j,
7632 disjoint ? "disjoint" : "nested");
7639 flow_loops_cfg_dump (loops, file);
7643 /* Free all the memory allocated for LOOPS. */
7646 flow_loops_free (loops)
7647 struct loops *loops;
7656 /* Free the loop descriptors. */
7657 for (i = 0; i < loops->num; i++)
7659 struct loop *loop = &loops->array[i];
7661 if (loop->pre_header_edges)
7662 free (loop->pre_header_edges);
7664 sbitmap_free (loop->nodes);
7665 if (loop->entry_edges)
7666 free (loop->entry_edges);
7667 if (loop->exit_edges)
7668 free (loop->exit_edges);
7669 if (loop->exits_doms)
7670 sbitmap_free (loop->exits_doms);
7672 free (loops->array);
7673 loops->array = NULL;
7676 sbitmap_vector_free (loops->cfg.dom);
7677 if (loops->cfg.dfs_order)
7678 free (loops->cfg.dfs_order);
7680 if (loops->shared_headers)
7681 sbitmap_free (loops->shared_headers);
7686 /* Find the entry edges into the loop with header HEADER and nodes
7687 NODES and store in ENTRY_EDGES array. Return the number of entry
7688 edges from the loop. */
7691 flow_loop_entry_edges_find (header, nodes, entry_edges)
7693 const sbitmap nodes;
7699 *entry_edges = NULL;
7702 for (e = header->pred; e; e = e->pred_next)
7704 basic_block src = e->src;
7706 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
7713 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
7716 for (e = header->pred; e; e = e->pred_next)
7718 basic_block src = e->src;
7720 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
7721 (*entry_edges)[num_entries++] = e;
7728 /* Find the exit edges from the loop using the bitmap of loop nodes
7729 NODES and store in EXIT_EDGES array. Return the number of
7730 exit edges from the loop. */
7733 flow_loop_exit_edges_find (nodes, exit_edges)
7734 const sbitmap nodes;
7743 /* Check all nodes within the loop to see if there are any
7744 successors not in the loop. Note that a node may have multiple
7745 exiting edges ????? A node can have one jumping edge and one fallthru
7746 edge so only one of these can exit the loop. */
7748 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7749 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7751 basic_block dest = e->dest;
7753 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7761 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
7763 /* Store all exiting edges into an array. */
7765 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
7766 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
7768 basic_block dest = e->dest;
7770 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
7771 (*exit_edges)[num_exits++] = e;
7779 /* Find the nodes contained within the loop with header HEADER and
7780 latch LATCH and store in NODES. Return the number of nodes within
7784 flow_loop_nodes_find (header, latch, nodes)
7793 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
7796 /* Start with only the loop header in the set of loop nodes. */
7797 sbitmap_zero (nodes);
7798 SET_BIT (nodes, header->index);
7800 header->loop_depth++;
7802 /* Push the loop latch on to the stack. */
7803 if (! TEST_BIT (nodes, latch->index))
7805 SET_BIT (nodes, latch->index);
7806 latch->loop_depth++;
7808 stack[sp++] = latch;
7817 for (e = node->pred; e; e = e->pred_next)
7819 basic_block ancestor = e->src;
7821 /* If each ancestor not marked as part of loop, add to set of
7822 loop nodes and push on to stack. */
7823 if (ancestor != ENTRY_BLOCK_PTR
7824 && ! TEST_BIT (nodes, ancestor->index))
7826 SET_BIT (nodes, ancestor->index);
7827 ancestor->loop_depth++;
7829 stack[sp++] = ancestor;
7837 /* Compute the depth first search order and store in the array
7838 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
7839 RC_ORDER is non-zero, return the reverse completion number for each
7840 node. Returns the number of nodes visited. A depth first search
7841 tries to get as far away from the starting point as quickly as
7845 flow_depth_first_order_compute (dfs_order, rc_order)
7852 int rcnum = n_basic_blocks - 1;
7855 /* Allocate stack for back-tracking up CFG. */
7856 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
7859 /* Allocate bitmap to track nodes that have been visited. */
7860 visited = sbitmap_alloc (n_basic_blocks);
7862 /* None of the nodes in the CFG have been visited yet. */
7863 sbitmap_zero (visited);
7865 /* Push the first edge on to the stack. */
7866 stack[sp++] = ENTRY_BLOCK_PTR->succ;
7874 /* Look at the edge on the top of the stack. */
7879 /* Check if the edge destination has been visited yet. */
7880 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
7882 /* Mark that we have visited the destination. */
7883 SET_BIT (visited, dest->index);
7886 dfs_order[dfsnum++] = dest->index;
7890 /* Since the DEST node has been visited for the first
7891 time, check its successors. */
7892 stack[sp++] = dest->succ;
7896 /* There are no successors for the DEST node so assign
7897 its reverse completion number. */
7899 rc_order[rcnum--] = dest->index;
7904 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
7906 /* There are no more successors for the SRC node
7907 so assign its reverse completion number. */
7909 rc_order[rcnum--] = src->index;
7913 stack[sp - 1] = e->succ_next;
7920 sbitmap_free (visited);
7922 /* The number of nodes visited should not be greater than
7924 if (dfsnum > n_basic_blocks)
7927 /* There are some nodes left in the CFG that are unreachable. */
7928 if (dfsnum < n_basic_blocks)
7933 /* Compute the depth first search order on the _reverse_ graph and
7934 store in the array DFS_ORDER, marking the nodes visited in VISITED.
7935 Returns the number of nodes visited.
7937 The computation is split into three pieces:
7939 flow_dfs_compute_reverse_init () creates the necessary data
7942 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
7943 structures. The block will start the search.
7945 flow_dfs_compute_reverse_execute () continues (or starts) the
7946 search using the block on the top of the stack, stopping when the
7949 flow_dfs_compute_reverse_finish () destroys the necessary data
7952 Thus, the user will probably call ..._init(), call ..._add_bb() to
7953 add a beginning basic block to the stack, call ..._execute(),
7954 possibly add another bb to the stack and again call ..._execute(),
7955 ..., and finally call _finish(). */
7957 /* Initialize the data structures used for depth-first search on the
7958 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
7959 added to the basic block stack. DATA is the current depth-first
7960 search context. If INITIALIZE_STACK is non-zero, there is an
7961 element on the stack. */
7964 flow_dfs_compute_reverse_init (data)
7965 depth_first_search_ds data;
7967 /* Allocate stack for back-tracking up CFG. */
7969 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
7970 * sizeof (basic_block));
7973 /* Allocate bitmap to track nodes that have been visited. */
7974 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
7976 /* None of the nodes in the CFG have been visited yet. */
7977 sbitmap_zero (data->visited_blocks);
7982 /* Add the specified basic block to the top of the dfs data
7983 structures. When the search continues, it will start at the
7987 flow_dfs_compute_reverse_add_bb (data, bb)
7988 depth_first_search_ds data;
7991 data->stack[data->sp++] = bb;
7995 /* Continue the depth-first search through the reverse graph starting
7996 with the block at the stack's top and ending when the stack is
7997 empty. Visited nodes are marked. Returns an unvisited basic
7998 block, or NULL if there is none available. */
8001 flow_dfs_compute_reverse_execute (data)
8002 depth_first_search_ds data;
8008 while (data->sp > 0)
8010 bb = data->stack[--data->sp];
8012 /* Mark that we have visited this node. */
8013 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
8015 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
8017 /* Perform depth-first search on adjacent vertices. */
8018 for (e = bb->pred; e; e = e->pred_next)
8019 flow_dfs_compute_reverse_add_bb (data, e->src);
8023 /* Determine if there are unvisited basic blocks. */
8024 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
8025 if (!TEST_BIT (data->visited_blocks, i))
8026 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
8030 /* Destroy the data structures needed for depth-first search on the
8034 flow_dfs_compute_reverse_finish (data)
8035 depth_first_search_ds data;
8038 sbitmap_free (data->visited_blocks);
8043 /* Find the root node of the loop pre-header extended basic block and
8044 the edges along the trace from the root node to the loop header. */
8047 flow_loop_pre_header_scan (loop)
8053 loop->num_pre_header_edges = 0;
8055 if (loop->num_entries != 1)
8058 ebb = loop->entry_edges[0]->src;
8060 if (ebb != ENTRY_BLOCK_PTR)
8064 /* Count number of edges along trace from loop header to
8065 root of pre-header extended basic block. Usually this is
8066 only one or two edges. */
8068 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
8070 ebb = ebb->pred->src;
8074 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
8075 loop->num_pre_header_edges = num;
8077 /* Store edges in order that they are followed. The source
8078 of the first edge is the root node of the pre-header extended
8079 basic block and the destination of the last last edge is
8081 for (e = loop->entry_edges[0]; num; e = e->src->pred)
8083 loop->pre_header_edges[--num] = e;
8089 /* Return the block for the pre-header of the loop with header
8090 HEADER where DOM specifies the dominator information. Return NULL if
8091 there is no pre-header. */
8094 flow_loop_pre_header_find (header, dom)
8098 basic_block pre_header;
8101 /* If block p is a predecessor of the header and is the only block
8102 that the header does not dominate, then it is the pre-header. */
8104 for (e = header->pred; e; e = e->pred_next)
8106 basic_block node = e->src;
8108 if (node != ENTRY_BLOCK_PTR
8109 && ! TEST_BIT (dom[node->index], header->index))
8111 if (pre_header == NULL)
8115 /* There are multiple edges into the header from outside
8116 the loop so there is no pre-header block. */
8125 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
8126 previously added. The insertion algorithm assumes that the loops
8127 are added in the order found by a depth first search of the CFG. */
8130 flow_loop_tree_node_add (prevloop, loop)
8131 struct loop *prevloop;
8135 if (flow_loop_nested_p (prevloop, loop))
8137 prevloop->inner = loop;
8138 loop->outer = prevloop;
8142 while (prevloop->outer)
8144 if (flow_loop_nested_p (prevloop->outer, loop))
8146 prevloop->next = loop;
8147 loop->outer = prevloop->outer;
8150 prevloop = prevloop->outer;
8153 prevloop->next = loop;
8157 /* Build the loop hierarchy tree for LOOPS. */
8160 flow_loops_tree_build (loops)
8161 struct loops *loops;
8166 num_loops = loops->num;
8170 /* Root the loop hierarchy tree with the first loop found.
8171 Since we used a depth first search this should be the
8173 loops->tree = &loops->array[0];
8174 loops->tree->outer = loops->tree->inner = loops->tree->next = NULL;
8176 /* Add the remaining loops to the tree. */
8177 for (i = 1; i < num_loops; i++)
8178 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
8181 /* Helper function to compute loop nesting depth and enclosed loop level
8182 for the natural loop specified by LOOP at the loop depth DEPTH.
8183 Returns the loop level. */
8186 flow_loop_level_compute (loop, depth)
8196 /* Traverse loop tree assigning depth and computing level as the
8197 maximum level of all the inner loops of this loop. The loop
8198 level is equivalent to the height of the loop in the loop tree
8199 and corresponds to the number of enclosed loop levels (including
8201 for (inner = loop->inner; inner; inner = inner->next)
8205 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
8210 loop->level = level;
8211 loop->depth = depth;
8215 /* Compute the loop nesting depth and enclosed loop level for the loop
8216 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
8220 flow_loops_level_compute (loops)
8221 struct loops *loops;
8227 /* Traverse all the outer level loops. */
8228 for (loop = loops->tree; loop; loop = loop->next)
8230 level = flow_loop_level_compute (loop, 1);
8238 /* Scan a single natural loop specified by LOOP collecting information
8239 about it specified by FLAGS. */
8242 flow_loop_scan (loops, loop, flags)
8243 struct loops *loops;
8247 /* Determine prerequisites. */
8248 if ((flags & LOOP_EXITS_DOMS) && ! loop->exit_edges)
8249 flags |= LOOP_EXIT_EDGES;
8251 if (flags & LOOP_ENTRY_EDGES)
8253 /* Find edges which enter the loop header.
8254 Note that the entry edges should only
8255 enter the header of a natural loop. */
8257 = flow_loop_entry_edges_find (loop->header,
8259 &loop->entry_edges);
8262 if (flags & LOOP_EXIT_EDGES)
8264 /* Find edges which exit the loop. */
8266 = flow_loop_exit_edges_find (loop->nodes,
8270 if (flags & LOOP_EXITS_DOMS)
8274 /* Determine which loop nodes dominate all the exits
8276 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
8277 sbitmap_copy (loop->exits_doms, loop->nodes);
8278 for (j = 0; j < loop->num_exits; j++)
8279 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
8280 loops->cfg.dom[loop->exit_edges[j]->src->index]);
8282 /* The header of a natural loop must dominate
8284 if (! TEST_BIT (loop->exits_doms, loop->header->index))
8288 if (flags & LOOP_PRE_HEADER)
8290 /* Look to see if the loop has a pre-header node. */
8292 = flow_loop_pre_header_find (loop->header, loops->cfg.dom);
8294 /* Find the blocks within the extended basic block of
8295 the loop pre-header. */
8296 flow_loop_pre_header_scan (loop);
8302 /* Find all the natural loops in the function and save in LOOPS structure
8303 and recalculate loop_depth information in basic block structures.
8304 FLAGS controls which loop information is collected.
8305 Return the number of natural loops found. */
8308 flow_loops_find (loops, flags)
8309 struct loops *loops;
8321 /* This function cannot be repeatedly called with different
8322 flags to build up the loop information. The loop tree
8323 must always be built if this function is called. */
8324 if (! (flags & LOOP_TREE))
8327 memset (loops, 0, sizeof (*loops));
8329 /* Taking care of this degenerate case makes the rest of
8330 this code simpler. */
8331 if (n_basic_blocks == 0)
8337 /* Compute the dominators. */
8338 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
8339 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
8341 /* Count the number of loop edges (back edges). This should be the
8342 same as the number of natural loops. */
8345 for (b = 0; b < n_basic_blocks; b++)
8349 header = BASIC_BLOCK (b);
8350 header->loop_depth = 0;
8352 for (e = header->pred; e; e = e->pred_next)
8354 basic_block latch = e->src;
8356 /* Look for back edges where a predecessor is dominated
8357 by this block. A natural loop has a single entry
8358 node (header) that dominates all the nodes in the
8359 loop. It also has single back edge to the header
8360 from a latch node. Note that multiple natural loops
8361 may share the same header. */
8362 if (b != header->index)
8365 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
8372 /* Compute depth first search order of the CFG so that outer
8373 natural loops will be found before inner natural loops. */
8374 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
8375 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
8376 flow_depth_first_order_compute (dfs_order, rc_order);
8378 /* Save CFG derived information to avoid recomputing it. */
8379 loops->cfg.dom = dom;
8380 loops->cfg.dfs_order = dfs_order;
8381 loops->cfg.rc_order = rc_order;
8383 /* Allocate loop structures. */
8385 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
8387 headers = sbitmap_alloc (n_basic_blocks);
8388 sbitmap_zero (headers);
8390 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
8391 sbitmap_zero (loops->shared_headers);
8393 /* Find and record information about all the natural loops
8396 for (b = 0; b < n_basic_blocks; b++)
8400 /* Search the nodes of the CFG in reverse completion order
8401 so that we can find outer loops first. */
8402 header = BASIC_BLOCK (rc_order[b]);
8404 /* Look for all the possible latch blocks for this header. */
8405 for (e = header->pred; e; e = e->pred_next)
8407 basic_block latch = e->src;
8409 /* Look for back edges where a predecessor is dominated
8410 by this block. A natural loop has a single entry
8411 node (header) that dominates all the nodes in the
8412 loop. It also has single back edge to the header
8413 from a latch node. Note that multiple natural loops
8414 may share the same header. */
8415 if (latch != ENTRY_BLOCK_PTR
8416 && TEST_BIT (dom[latch->index], header->index))
8420 loop = loops->array + num_loops;
8422 loop->header = header;
8423 loop->latch = latch;
8424 loop->num = num_loops;
8431 for (i = 0; i < num_loops; i++)
8433 struct loop *loop = &loops->array[i];
8435 /* Keep track of blocks that are loop headers so
8436 that we can tell which loops should be merged. */
8437 if (TEST_BIT (headers, loop->header->index))
8438 SET_BIT (loops->shared_headers, loop->header->index);
8439 SET_BIT (headers, loop->header->index);
8441 /* Find nodes contained within the loop. */
8442 loop->nodes = sbitmap_alloc (n_basic_blocks);
8444 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
8446 /* Compute first and last blocks within the loop.
8447 These are often the same as the loop header and
8448 loop latch respectively, but this is not always
8451 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
8453 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
8455 flow_loop_scan (loops, loop, flags);
8458 /* Natural loops with shared headers may either be disjoint or
8459 nested. Disjoint loops with shared headers cannot be inner
8460 loops and should be merged. For now just mark loops that share
8462 for (i = 0; i < num_loops; i++)
8463 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
8464 loops->array[i].shared = 1;
8466 sbitmap_free (headers);
8469 loops->num = num_loops;
8471 /* Build the loop hierarchy tree. */
8472 flow_loops_tree_build (loops);
8474 /* Assign the loop nesting depth and enclosed loop level for each
8476 loops->levels = flow_loops_level_compute (loops);
8482 /* Update the information regarding the loops in the CFG
8483 specified by LOOPS. */
8485 flow_loops_update (loops, flags)
8486 struct loops *loops;
8489 /* One day we may want to update the current loop data. For now
8490 throw away the old stuff and rebuild what we need. */
8492 flow_loops_free (loops);
8494 return flow_loops_find (loops, flags);
8498 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
8501 flow_loop_outside_edge_p (loop, e)
8502 const struct loop *loop;
8505 if (e->dest != loop->header)
8507 return (e->src == ENTRY_BLOCK_PTR)
8508 || ! TEST_BIT (loop->nodes, e->src->index);
8511 /* Clear LOG_LINKS fields of insns in a chain.
8512 Also clear the global_live_at_{start,end} fields of the basic block
8516 clear_log_links (insns)
8522 for (i = insns; i; i = NEXT_INSN (i))
8526 for (b = 0; b < n_basic_blocks; b++)
8528 basic_block bb = BASIC_BLOCK (b);
8530 bb->global_live_at_start = NULL;
8531 bb->global_live_at_end = NULL;
8534 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
8535 EXIT_BLOCK_PTR->global_live_at_start = NULL;
8538 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
8539 correspond to the hard registers, if any, set in that map. This
8540 could be done far more efficiently by having all sorts of special-cases
8541 with moving single words, but probably isn't worth the trouble. */
8544 reg_set_to_hard_reg_set (to, from)
8550 EXECUTE_IF_SET_IN_BITMAP
8553 if (i >= FIRST_PSEUDO_REGISTER)
8555 SET_HARD_REG_BIT (*to, i);
8559 /* Called once at intialization time. */
8564 static int initialized;
8568 gcc_obstack_init (&flow_obstack);
8569 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
8574 obstack_free (&flow_obstack, flow_firstobj);
8575 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);