1 /* Data flow analysis for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This file contains the data flow analysis pass of the compiler. It
23 computes data flow information which tells combine_instructions
24 which insns to consider combining and controls register allocation.
26 Additional data flow information that is too bulky to record is
27 generated during the analysis, and is used at that time to create
28 autoincrement and autodecrement addressing.
30 The first step is dividing the function into basic blocks.
31 find_basic_blocks does this. Then life_analysis determines
32 where each register is live and where it is dead.
34 ** find_basic_blocks **
36 find_basic_blocks divides the current function's rtl into basic
37 blocks and constructs the CFG. The blocks are recorded in the
38 basic_block_info array; the CFG exists in the edge structures
39 referenced by the blocks.
41 find_basic_blocks also finds any unreachable loops and deletes them.
45 life_analysis is called immediately after find_basic_blocks.
46 It uses the basic block information to determine where each
47 hard or pseudo register is live.
49 ** live-register info **
51 The information about where each register is live is in two parts:
52 the REG_NOTES of insns, and the vector basic_block->global_live_at_start.
54 basic_block->global_live_at_start has an element for each basic
55 block, and the element is a bit-vector with a bit for each hard or
56 pseudo register. The bit is 1 if the register is live at the
57 beginning of the basic block.
59 Two types of elements can be added to an insn's REG_NOTES.
60 A REG_DEAD note is added to an insn's REG_NOTES for any register
61 that meets both of two conditions: The value in the register is not
62 needed in subsequent insns and the insn does not replace the value in
63 the register (in the case of multi-word hard registers, the value in
64 each register must be replaced by the insn to avoid a REG_DEAD note).
66 In the vast majority of cases, an object in a REG_DEAD note will be
67 used somewhere in the insn. The (rare) exception to this is if an
68 insn uses a multi-word hard register and only some of the registers are
69 needed in subsequent insns. In that case, REG_DEAD notes will be
70 provided for those hard registers that are not subsequently needed.
71 Partial REG_DEAD notes of this type do not occur when an insn sets
72 only some of the hard registers used in such a multi-word operand;
73 omitting REG_DEAD notes for objects stored in an insn is optional and
74 the desire to do so does not justify the complexity of the partial
77 REG_UNUSED notes are added for each register that is set by the insn
78 but is unused subsequently (if every register set by the insn is unused
79 and the insn does not reference memory or have some other side-effect,
80 the insn is deleted instead). If only part of a multi-word hard
81 register is used in a subsequent insn, REG_UNUSED notes are made for
82 the parts that will not be used.
84 To determine which registers are live after any insn, one can
85 start from the beginning of the basic block and scan insns, noting
86 which registers are set by each insn and which die there.
88 ** Other actions of life_analysis **
90 life_analysis sets up the LOG_LINKS fields of insns because the
91 information needed to do so is readily available.
93 life_analysis deletes insns whose only effect is to store a value
96 life_analysis notices cases where a reference to a register as
97 a memory address can be combined with a preceding or following
98 incrementation or decrementation of the register. The separate
99 instruction to increment or decrement is deleted and the address
100 is changed to a POST_INC or similar rtx.
102 Each time an incrementing or decrementing address is created,
103 a REG_INC element is added to the insn's REG_NOTES list.
105 life_analysis fills in certain vectors containing information about
106 register usage: REG_N_REFS, REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH,
107 REG_N_CALLS_CROSSED and REG_BASIC_BLOCK.
109 life_analysis sets current_function_sp_is_unchanging if the function
110 doesn't modify the stack pointer. */
114 Split out from life_analysis:
115 - local property discovery (bb->local_live, bb->local_set)
116 - global property computation
118 - pre/post modify transformation
126 #include "hard-reg-set.h"
127 #include "basic-block.h"
128 #include "insn-config.h"
132 #include "function.h"
141 #include "splay-tree.h"
143 #define obstack_chunk_alloc xmalloc
144 #define obstack_chunk_free free
146 /* EXIT_IGNORE_STACK should be nonzero if, when returning from a function,
147 the stack pointer does not matter. The value is tested only in
148 functions that have frame pointers.
149 No definition is equivalent to always zero. */
150 #ifndef EXIT_IGNORE_STACK
151 #define EXIT_IGNORE_STACK 0
154 #ifndef HAVE_epilogue
155 #define HAVE_epilogue 0
157 #ifndef HAVE_prologue
158 #define HAVE_prologue 0
160 #ifndef HAVE_sibcall_epilogue
161 #define HAVE_sibcall_epilogue 0
165 #define LOCAL_REGNO(REGNO) 0
167 #ifndef EPILOGUE_USES
168 #define EPILOGUE_USES(REGNO) 0
171 #ifdef HAVE_conditional_execution
172 #ifndef REVERSE_CONDEXEC_PREDICATES_P
173 #define REVERSE_CONDEXEC_PREDICATES_P(x, y) ((x) == reverse_condition (y))
177 /* The obstack on which the flow graph components are allocated. */
179 struct obstack flow_obstack;
180 static char *flow_firstobj;
182 /* Number of basic blocks in the current function. */
186 /* Number of edges in the current function. */
190 /* The basic block array. */
192 varray_type basic_block_info;
194 /* The special entry and exit blocks. */
196 struct basic_block_def entry_exit_blocks[2]
199 NULL, /* head_tree */
203 NULL, /* local_set */
204 NULL, /* cond_local_set */
205 NULL, /* global_live_at_start */
206 NULL, /* global_live_at_end */
208 ENTRY_BLOCK, /* index */
216 NULL, /* head_tree */
220 NULL, /* local_set */
221 NULL, /* cond_local_set */
222 NULL, /* global_live_at_start */
223 NULL, /* global_live_at_end */
225 EXIT_BLOCK, /* index */
232 /* Nonzero if the second flow pass has completed. */
235 /* Maximum register number used in this function, plus one. */
239 /* Indexed by n, giving various register information */
241 varray_type reg_n_info;
243 /* Size of a regset for the current function,
244 in (1) bytes and (2) elements. */
249 /* Regset of regs live when calls to `setjmp'-like functions happen. */
250 /* ??? Does this exist only for the setjmp-clobbered warning message? */
252 regset regs_live_at_setjmp;
254 /* List made of EXPR_LIST rtx's which gives pairs of pseudo registers
255 that have to go in the same hard reg.
256 The first two regs in the list are a pair, and the next two
257 are another pair, etc. */
260 /* Callback that determines if it's ok for a function to have no
261 noreturn attribute. */
262 int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
264 /* Set of registers that may be eliminable. These are handled specially
265 in updating regs_ever_live. */
267 static HARD_REG_SET elim_reg_set;
269 /* The basic block structure for every insn, indexed by uid. */
271 varray_type basic_block_for_insn;
273 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
274 /* ??? Should probably be using LABEL_NUSES instead. It would take a
275 bit of surgery to be able to use or co-opt the routines in jump. */
277 static rtx label_value_list;
278 static rtx tail_recursion_label_list;
280 /* Holds information for tracking conditional register life information. */
281 struct reg_cond_life_info
283 /* A boolean expression of conditions under which a register is dead. */
285 /* Conditions under which a register is dead at the basic block end. */
288 /* A boolean expression of conditions under which a register has been
292 /* ??? Could store mask of bytes that are dead, so that we could finally
293 track lifetimes of multi-word registers accessed via subregs. */
296 /* For use in communicating between propagate_block and its subroutines.
297 Holds all information needed to compute life and def-use information. */
299 struct propagate_block_info
301 /* The basic block we're considering. */
304 /* Bit N is set if register N is conditionally or unconditionally live. */
307 /* Bit N is set if register N is set this insn. */
310 /* Element N is the next insn that uses (hard or pseudo) register N
311 within the current basic block; or zero, if there is no such insn. */
314 /* Contains a list of all the MEMs we are tracking for dead store
318 /* If non-null, record the set of registers set unconditionally in the
322 /* If non-null, record the set of registers set conditionally in the
324 regset cond_local_set;
326 #ifdef HAVE_conditional_execution
327 /* Indexed by register number, holds a reg_cond_life_info for each
328 register that is not unconditionally live or dead. */
329 splay_tree reg_cond_dead;
331 /* Bit N is set if register N is in an expression in reg_cond_dead. */
335 /* The length of mem_set_list. */
336 int mem_set_list_len;
338 /* Non-zero if the value of CC0 is live. */
341 /* Flags controling the set of information propagate_block collects. */
345 /* Maximum length of pbi->mem_set_list before we start dropping
346 new elements on the floor. */
347 #define MAX_MEM_SET_LIST_LEN 100
349 /* Store the data structures necessary for depth-first search. */
350 struct depth_first_search_dsS {
351 /* stack for backtracking during the algorithm */
354 /* number of edges in the stack. That is, positions 0, ..., sp-1
358 /* record of basic blocks already seen by depth-first search */
359 sbitmap visited_blocks;
361 typedef struct depth_first_search_dsS *depth_first_search_ds;
363 /* Have print_rtl_and_abort give the same information that fancy_abort
365 #define print_rtl_and_abort() \
366 print_rtl_and_abort_fcn (__FILE__, __LINE__, __FUNCTION__)
368 /* Forward declarations */
369 static bool try_crossjump_to_edge PARAMS ((int, edge, edge));
370 static bool try_crossjump_bb PARAMS ((int, basic_block));
371 static bool outgoing_edges_match PARAMS ((basic_block, basic_block));
372 static int flow_find_cross_jump PARAMS ((int, basic_block, basic_block,
374 static int count_basic_blocks PARAMS ((rtx));
375 static void find_basic_blocks_1 PARAMS ((rtx));
376 static rtx find_label_refs PARAMS ((rtx, rtx));
377 static void make_edges PARAMS ((rtx));
378 static void make_label_edge PARAMS ((sbitmap *, basic_block,
380 static void make_eh_edge PARAMS ((sbitmap *, basic_block, rtx));
382 static void commit_one_edge_insertion PARAMS ((edge));
384 static void delete_unreachable_blocks PARAMS ((void));
385 static int can_delete_note_p PARAMS ((rtx));
386 static void expunge_block PARAMS ((basic_block));
387 static int can_delete_label_p PARAMS ((rtx));
388 static int tail_recursion_label_p PARAMS ((rtx));
389 static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
391 static int merge_blocks_move_successor_nojumps PARAMS ((basic_block,
393 static int merge_blocks PARAMS ((edge,basic_block,basic_block,
395 static bool try_optimize_cfg PARAMS ((int));
396 static bool forwarder_block_p PARAMS ((basic_block));
397 static bool can_fallthru PARAMS ((basic_block, basic_block));
398 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
399 static bool try_simplify_condjump PARAMS ((basic_block));
400 static bool try_forward_edges PARAMS ((basic_block));
401 static void tidy_fallthru_edges PARAMS ((void));
402 static int verify_wide_reg_1 PARAMS ((rtx *, void *));
403 static void verify_wide_reg PARAMS ((int, rtx, rtx));
404 static void verify_local_live_at_start PARAMS ((regset, basic_block));
405 static int noop_move_p PARAMS ((rtx));
406 static void delete_noop_moves PARAMS ((rtx));
407 static void notice_stack_pointer_modification_1 PARAMS ((rtx, rtx, void *));
408 static void notice_stack_pointer_modification PARAMS ((rtx));
409 static void mark_reg PARAMS ((rtx, void *));
410 static void mark_regs_live_at_end PARAMS ((regset));
411 static int set_phi_alternative_reg PARAMS ((rtx, int, int, void *));
412 static void calculate_global_regs_live PARAMS ((sbitmap, sbitmap, int));
413 static void propagate_block_delete_insn PARAMS ((basic_block, rtx));
414 static rtx propagate_block_delete_libcall PARAMS ((basic_block, rtx, rtx));
415 static int insn_dead_p PARAMS ((struct propagate_block_info *,
417 static int libcall_dead_p PARAMS ((struct propagate_block_info *,
419 static void mark_set_regs PARAMS ((struct propagate_block_info *,
421 static void mark_set_1 PARAMS ((struct propagate_block_info *,
422 enum rtx_code, rtx, rtx,
424 #ifdef HAVE_conditional_execution
425 static int mark_regno_cond_dead PARAMS ((struct propagate_block_info *,
427 static void free_reg_cond_life_info PARAMS ((splay_tree_value));
428 static int flush_reg_cond_reg_1 PARAMS ((splay_tree_node, void *));
429 static void flush_reg_cond_reg PARAMS ((struct propagate_block_info *,
431 static rtx elim_reg_cond PARAMS ((rtx, unsigned int));
432 static rtx ior_reg_cond PARAMS ((rtx, rtx, int));
433 static rtx not_reg_cond PARAMS ((rtx));
434 static rtx and_reg_cond PARAMS ((rtx, rtx, int));
437 static void attempt_auto_inc PARAMS ((struct propagate_block_info *,
438 rtx, rtx, rtx, rtx, rtx));
439 static void find_auto_inc PARAMS ((struct propagate_block_info *,
441 static int try_pre_increment_1 PARAMS ((struct propagate_block_info *,
443 static int try_pre_increment PARAMS ((rtx, rtx, HOST_WIDE_INT));
445 static void mark_used_reg PARAMS ((struct propagate_block_info *,
447 static void mark_used_regs PARAMS ((struct propagate_block_info *,
449 void dump_flow_info PARAMS ((FILE *));
450 void debug_flow_info PARAMS ((void));
451 static void print_rtl_and_abort_fcn PARAMS ((const char *, int,
455 static void invalidate_mems_from_autoinc PARAMS ((struct propagate_block_info *,
457 static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
459 static void remove_fake_successors PARAMS ((basic_block));
460 static void flow_nodes_print PARAMS ((const char *, const sbitmap,
462 static void flow_edge_list_print PARAMS ((const char *, const edge *,
464 static void flow_loops_cfg_dump PARAMS ((const struct loops *,
466 static int flow_loop_nested_p PARAMS ((struct loop *,
468 static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
470 static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
471 static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
472 static void flow_dfs_compute_reverse_init
473 PARAMS ((depth_first_search_ds));
474 static void flow_dfs_compute_reverse_add_bb
475 PARAMS ((depth_first_search_ds, basic_block));
476 static basic_block flow_dfs_compute_reverse_execute
477 PARAMS ((depth_first_search_ds));
478 static void flow_dfs_compute_reverse_finish
479 PARAMS ((depth_first_search_ds));
480 static void flow_loop_pre_header_scan PARAMS ((struct loop *));
481 static basic_block flow_loop_pre_header_find PARAMS ((basic_block,
483 static void flow_loop_tree_node_add PARAMS ((struct loop *, struct loop *));
484 static void flow_loops_tree_build PARAMS ((struct loops *));
485 static int flow_loop_level_compute PARAMS ((struct loop *, int));
486 static int flow_loops_level_compute PARAMS ((struct loops *));
487 static void find_sub_basic_blocks PARAMS ((basic_block));
488 static bool redirect_edge_and_branch PARAMS ((edge, basic_block));
489 static basic_block redirect_edge_and_branch_force PARAMS ((edge, basic_block));
490 static rtx block_label PARAMS ((basic_block));
492 /* Find basic blocks of the current function.
493 F is the first insn of the function and NREGS the number of register
497 find_basic_blocks (f, nregs, file)
499 int nregs ATTRIBUTE_UNUSED;
500 FILE *file ATTRIBUTE_UNUSED;
503 timevar_push (TV_CFG);
505 /* Flush out existing data. */
506 if (basic_block_info != NULL)
512 /* Clear bb->aux on all extant basic blocks. We'll use this as a
513 tag for reuse during create_basic_block, just in case some pass
514 copies around basic block notes improperly. */
515 for (i = 0; i < n_basic_blocks; ++i)
516 BASIC_BLOCK (i)->aux = NULL;
518 VARRAY_FREE (basic_block_info);
521 n_basic_blocks = count_basic_blocks (f);
523 /* Size the basic block table. The actual structures will be allocated
524 by find_basic_blocks_1, since we want to keep the structure pointers
525 stable across calls to find_basic_blocks. */
526 /* ??? This whole issue would be much simpler if we called find_basic_blocks
527 exactly once, and thereafter we don't have a single long chain of
528 instructions at all until close to the end of compilation when we
529 actually lay them out. */
531 VARRAY_BB_INIT (basic_block_info, n_basic_blocks, "basic_block_info");
533 find_basic_blocks_1 (f);
535 /* Record the block to which an insn belongs. */
536 /* ??? This should be done another way, by which (perhaps) a label is
537 tagged directly with the basic block that it starts. It is used for
538 more than that currently, but IMO that is the only valid use. */
540 max_uid = get_max_uid ();
542 /* Leave space for insns life_analysis makes in some cases for auto-inc.
543 These cases are rare, so we don't need too much space. */
544 max_uid += max_uid / 10;
547 compute_bb_for_insn (max_uid);
549 /* Discover the edges of our cfg. */
550 make_edges (label_value_list);
552 /* Do very simple cleanup now, for the benefit of code that runs between
553 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */
554 tidy_fallthru_edges ();
556 mark_critical_edges ();
558 #ifdef ENABLE_CHECKING
561 timevar_pop (TV_CFG);
565 check_function_return_warnings ()
567 if (warn_missing_noreturn
568 && !TREE_THIS_VOLATILE (cfun->decl)
569 && EXIT_BLOCK_PTR->pred == NULL
570 && (lang_missing_noreturn_ok_p
571 && !lang_missing_noreturn_ok_p (cfun->decl)))
572 warning ("function might be possible candidate for attribute `noreturn'");
574 /* If we have a path to EXIT, then we do return. */
575 if (TREE_THIS_VOLATILE (cfun->decl)
576 && EXIT_BLOCK_PTR->pred != NULL)
577 warning ("`noreturn' function does return");
579 /* If the clobber_return_insn appears in some basic block, then we
580 do reach the end without returning a value. */
581 else if (warn_return_type
582 && cfun->x_clobber_return_insn != NULL
583 && EXIT_BLOCK_PTR->pred != NULL)
585 int max_uid = get_max_uid ();
587 /* If clobber_return_insn was excised by jump1, then renumber_insns
588 can make max_uid smaller than the number still recorded in our rtx.
589 That's fine, since this is a quick way of verifying that the insn
590 is no longer in the chain. */
591 if (INSN_UID (cfun->x_clobber_return_insn) < max_uid)
593 /* Recompute insn->block mapping, since the initial mapping is
594 set before we delete unreachable blocks. */
595 compute_bb_for_insn (max_uid);
597 if (BLOCK_FOR_INSN (cfun->x_clobber_return_insn) != NULL)
598 warning ("control reaches end of non-void function");
603 /* Count the basic blocks of the function. */
606 count_basic_blocks (f)
610 register RTX_CODE prev_code;
611 register int count = 0;
612 int saw_abnormal_edge = 0;
614 prev_code = JUMP_INSN;
615 for (insn = f; insn; insn = NEXT_INSN (insn))
617 enum rtx_code code = GET_CODE (insn);
619 if (code == CODE_LABEL
620 || (GET_RTX_CLASS (code) == 'i'
621 && (prev_code == JUMP_INSN
622 || prev_code == BARRIER
623 || saw_abnormal_edge)))
625 saw_abnormal_edge = 0;
629 /* Record whether this insn created an edge. */
630 if (code == CALL_INSN)
634 /* If there is a nonlocal goto label and the specified
635 region number isn't -1, we have an edge. */
636 if (nonlocal_goto_handler_labels
637 && ((note = find_reg_note (insn, REG_EH_REGION, NULL_RTX)) == 0
638 || INTVAL (XEXP (note, 0)) >= 0))
639 saw_abnormal_edge = 1;
641 else if (can_throw_internal (insn))
642 saw_abnormal_edge = 1;
644 else if (flag_non_call_exceptions
646 && can_throw_internal (insn))
647 saw_abnormal_edge = 1;
653 /* The rest of the compiler works a bit smoother when we don't have to
654 check for the edge case of do-nothing functions with no basic blocks. */
657 emit_insn (gen_rtx_USE (VOIDmode, const0_rtx));
664 /* Scan a list of insns for labels referred to other than by jumps.
665 This is used to scan the alternatives of a call placeholder. */
667 find_label_refs (f, lvl)
673 for (insn = f; insn; insn = NEXT_INSN (insn))
674 if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
678 /* Make a list of all labels referred to other than by jumps
679 (which just don't have the REG_LABEL notes).
681 Make a special exception for labels followed by an ADDR*VEC,
682 as this would be a part of the tablejump setup code.
684 Make a special exception to registers loaded with label
685 values just before jump insns that use them. */
687 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
688 if (REG_NOTE_KIND (note) == REG_LABEL)
690 rtx lab = XEXP (note, 0), next;
692 if ((next = next_nonnote_insn (lab)) != NULL
693 && GET_CODE (next) == JUMP_INSN
694 && (GET_CODE (PATTERN (next)) == ADDR_VEC
695 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
697 else if (GET_CODE (lab) == NOTE)
699 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
700 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
703 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
710 /* Assume that someone emitted code with control flow instructions to the
711 basic block. Update the data structure. */
713 find_sub_basic_blocks (bb)
716 rtx first_insn = bb->head, insn;
718 edge succ_list = bb->succ;
719 rtx jump_insn = NULL_RTX;
723 basic_block first_bb = bb, last_bb;
726 if (GET_CODE (first_insn) == LABEL_REF)
727 first_insn = NEXT_INSN (first_insn);
728 first_insn = NEXT_INSN (first_insn);
732 /* Scan insn chain and try to find new basic block boundaries. */
735 enum rtx_code code = GET_CODE (insn);
739 /* We need some special care for those expressions. */
740 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
741 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
750 /* On code label, split current basic block. */
752 falltru = split_block (bb, PREV_INSN (insn));
757 remove_edge (falltru);
761 if (LABEL_ALTERNATE_NAME (insn))
762 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
765 /* In case we've previously split insn on the JUMP_INSN, move the
766 block header to proper place. */
769 falltru = split_block (bb, PREV_INSN (insn));
779 insn = NEXT_INSN (insn);
781 /* Last basic block must end in the original BB end. */
785 /* Wire in the original edges for last basic block. */
788 bb->succ = succ_list;
790 succ_list->src = bb, succ_list = succ_list->succ_next;
793 bb->succ = succ_list;
795 /* Now re-scan and wire in all edges. This expect simple (conditional)
796 jumps at the end of each new basic blocks. */
798 for (i = first_bb->index; i < last_bb->index; i++)
800 bb = BASIC_BLOCK (i);
801 if (GET_CODE (bb->end) == JUMP_INSN)
803 mark_jump_label (PATTERN (bb->end), bb->end, 0);
804 make_label_edge (NULL, bb, JUMP_LABEL (bb->end), 0);
806 insn = NEXT_INSN (insn);
810 /* Find all basic blocks of the function whose first insn is F.
812 Collect and return a list of labels whose addresses are taken. This
813 will be used in make_edges for use with computed gotos. */
816 find_basic_blocks_1 (f)
819 register rtx insn, next;
821 rtx bb_note = NULL_RTX;
827 /* We process the instructions in a slightly different way than we did
828 previously. This is so that we see a NOTE_BASIC_BLOCK after we have
829 closed out the previous block, so that it gets attached at the proper
830 place. Since this form should be equivalent to the previous,
831 count_basic_blocks continues to use the old form as a check. */
833 for (insn = f; insn; insn = next)
835 enum rtx_code code = GET_CODE (insn);
837 next = NEXT_INSN (insn);
843 int kind = NOTE_LINE_NUMBER (insn);
845 /* Look for basic block notes with which to keep the
846 basic_block_info pointers stable. Unthread the note now;
847 we'll put it back at the right place in create_basic_block.
848 Or not at all if we've already found a note in this block. */
849 if (kind == NOTE_INSN_BASIC_BLOCK)
851 if (bb_note == NULL_RTX)
854 next = flow_delete_insn (insn);
860 /* A basic block starts at a label. If we've closed one off due
861 to a barrier or some such, no need to do it again. */
862 if (head != NULL_RTX)
864 create_basic_block (i++, head, end, bb_note);
872 /* A basic block ends at a jump. */
873 if (head == NULL_RTX)
877 /* ??? Make a special check for table jumps. The way this
878 happens is truly and amazingly gross. We are about to
879 create a basic block that contains just a code label and
880 an addr*vec jump insn. Worse, an addr_diff_vec creates
881 its own natural loop.
883 Prevent this bit of brain damage, pasting things together
884 correctly in make_edges.
886 The correct solution involves emitting the table directly
887 on the tablejump instruction as a note, or JUMP_LABEL. */
889 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
890 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
898 goto new_bb_inclusive;
901 /* A basic block ends at a barrier. It may be that an unconditional
902 jump already closed the basic block -- no need to do it again. */
903 if (head == NULL_RTX)
905 goto new_bb_exclusive;
909 /* Record whether this call created an edge. */
910 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
911 int region = (note ? INTVAL (XEXP (note, 0)) : 0);
913 if (GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
915 /* Scan each of the alternatives for label refs. */
916 lvl = find_label_refs (XEXP (PATTERN (insn), 0), lvl);
917 lvl = find_label_refs (XEXP (PATTERN (insn), 1), lvl);
918 lvl = find_label_refs (XEXP (PATTERN (insn), 2), lvl);
919 /* Record its tail recursion label, if any. */
920 if (XEXP (PATTERN (insn), 3) != NULL_RTX)
921 trll = alloc_EXPR_LIST (0, XEXP (PATTERN (insn), 3), trll);
924 /* A basic block ends at a call that can either throw or
925 do a non-local goto. */
926 if ((nonlocal_goto_handler_labels && region >= 0)
927 || can_throw_internal (insn))
930 if (head == NULL_RTX)
935 create_basic_block (i++, head, end, bb_note);
936 head = end = NULL_RTX;
944 /* Non-call exceptions generate new blocks just like calls. */
945 if (flag_non_call_exceptions && can_throw_internal (insn))
946 goto new_bb_inclusive;
948 if (head == NULL_RTX)
957 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
961 /* Make a list of all labels referred to other than by jumps.
963 Make a special exception for labels followed by an ADDR*VEC,
964 as this would be a part of the tablejump setup code.
966 Make a special exception to registers loaded with label
967 values just before jump insns that use them. */
969 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
970 if (REG_NOTE_KIND (note) == REG_LABEL)
972 rtx lab = XEXP (note, 0), next;
974 if ((next = next_nonnote_insn (lab)) != NULL
975 && GET_CODE (next) == JUMP_INSN
976 && (GET_CODE (PATTERN (next)) == ADDR_VEC
977 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
979 else if (GET_CODE (lab) == NOTE)
981 else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
982 && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
985 lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
990 if (head != NULL_RTX)
991 create_basic_block (i++, head, end, bb_note);
993 flow_delete_insn (bb_note);
995 if (i != n_basic_blocks)
998 label_value_list = lvl;
999 tail_recursion_label_list = trll;
1002 /* Tidy the CFG by deleting unreachable code and whatnot. */
1008 timevar_push (TV_CLEANUP_CFG);
1009 delete_unreachable_blocks ();
1010 if (try_optimize_cfg (mode))
1011 delete_unreachable_blocks ();
1012 mark_critical_edges ();
1014 /* Kill the data we won't maintain. */
1015 free_EXPR_LIST_list (&label_value_list);
1016 free_EXPR_LIST_list (&tail_recursion_label_list);
1017 timevar_pop (TV_CLEANUP_CFG);
1020 /* Create a new basic block consisting of the instructions between
1021 HEAD and END inclusive. Reuses the note and basic block struct
1022 in BB_NOTE, if any. */
1025 create_basic_block (index, head, end, bb_note)
1027 rtx head, end, bb_note;
1032 && ! RTX_INTEGRATED_P (bb_note)
1033 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
1036 /* If we found an existing note, thread it back onto the chain. */
1040 if (GET_CODE (head) == CODE_LABEL)
1044 after = PREV_INSN (head);
1048 if (after != bb_note && NEXT_INSN (after) != bb_note)
1049 reorder_insns (bb_note, bb_note, after);
1053 /* Otherwise we must create a note and a basic block structure.
1054 Since we allow basic block structs in rtl, give the struct
1055 the same lifetime by allocating it off the function obstack
1056 rather than using malloc. */
1058 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
1059 memset (bb, 0, sizeof (*bb));
1061 if (GET_CODE (head) == CODE_LABEL)
1062 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
1065 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
1068 NOTE_BASIC_BLOCK (bb_note) = bb;
1071 /* Always include the bb note in the block. */
1072 if (NEXT_INSN (end) == bb_note)
1078 BASIC_BLOCK (index) = bb;
1080 /* Tag the block so that we know it has been used when considering
1081 other basic block notes. */
1085 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
1086 note associated with the BLOCK. */
1089 first_insn_after_basic_block_note (block)
1094 /* Get the first instruction in the block. */
1097 if (insn == NULL_RTX)
1099 if (GET_CODE (insn) == CODE_LABEL)
1100 insn = NEXT_INSN (insn);
1101 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
1104 return NEXT_INSN (insn);
1107 /* Records the basic block struct in BB_FOR_INSN, for every instruction
1108 indexed by INSN_UID. MAX is the size of the array. */
1111 compute_bb_for_insn (max)
1116 if (basic_block_for_insn)
1117 VARRAY_FREE (basic_block_for_insn);
1118 VARRAY_BB_INIT (basic_block_for_insn, max, "basic_block_for_insn");
1120 for (i = 0; i < n_basic_blocks; ++i)
1122 basic_block bb = BASIC_BLOCK (i);
1129 int uid = INSN_UID (insn);
1131 VARRAY_BB (basic_block_for_insn, uid) = bb;
1134 insn = NEXT_INSN (insn);
1139 /* Free the memory associated with the edge structures. */
1147 for (i = 0; i < n_basic_blocks; ++i)
1149 basic_block bb = BASIC_BLOCK (i);
1151 for (e = bb->succ; e; e = n)
1161 for (e = ENTRY_BLOCK_PTR->succ; e; e = n)
1167 ENTRY_BLOCK_PTR->succ = 0;
1168 EXIT_BLOCK_PTR->pred = 0;
1173 /* Identify the edges between basic blocks.
1175 NONLOCAL_LABEL_LIST is a list of non-local labels in the function. Blocks
1176 that are otherwise unreachable may be reachable with a non-local goto.
1178 BB_EH_END is an array indexed by basic block number in which we record
1179 the list of exception regions active at the end of the basic block. */
1182 make_edges (label_value_list)
1183 rtx label_value_list;
1186 sbitmap *edge_cache = NULL;
1188 /* Assume no computed jump; revise as we create edges. */
1189 current_function_has_computed_jump = 0;
1191 /* Heavy use of computed goto in machine-generated code can lead to
1192 nearly fully-connected CFGs. In that case we spend a significant
1193 amount of time searching the edge lists for duplicates. */
1194 if (forced_labels || label_value_list)
1196 edge_cache = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
1197 sbitmap_vector_zero (edge_cache, n_basic_blocks);
1200 /* By nature of the way these get numbered, block 0 is always the entry. */
1201 make_edge (edge_cache, ENTRY_BLOCK_PTR, BASIC_BLOCK (0), EDGE_FALLTHRU);
1203 for (i = 0; i < n_basic_blocks; ++i)
1205 basic_block bb = BASIC_BLOCK (i);
1208 int force_fallthru = 0;
1210 if (GET_CODE (bb->head) == CODE_LABEL
1211 && LABEL_ALTERNATE_NAME (bb->head))
1212 make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0);
1214 /* Examine the last instruction of the block, and discover the
1215 ways we can leave the block. */
1218 code = GET_CODE (insn);
1221 if (code == JUMP_INSN)
1225 /* Recognize exception handling placeholders. */
1226 if (GET_CODE (PATTERN (insn)) == RESX)
1227 make_eh_edge (edge_cache, bb, insn);
1229 /* Recognize a non-local goto as a branch outside the
1230 current function. */
1231 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1234 /* ??? Recognize a tablejump and do the right thing. */
1235 else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1236 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1237 && GET_CODE (tmp) == JUMP_INSN
1238 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1239 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1244 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1245 vec = XVEC (PATTERN (tmp), 0);
1247 vec = XVEC (PATTERN (tmp), 1);
1249 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1250 make_label_edge (edge_cache, bb,
1251 XEXP (RTVEC_ELT (vec, j), 0), 0);
1253 /* Some targets (eg, ARM) emit a conditional jump that also
1254 contains the out-of-range target. Scan for these and
1255 add an edge if necessary. */
1256 if ((tmp = single_set (insn)) != NULL
1257 && SET_DEST (tmp) == pc_rtx
1258 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1259 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF)
1260 make_label_edge (edge_cache, bb,
1261 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0);
1263 #ifdef CASE_DROPS_THROUGH
1264 /* Silly VAXen. The ADDR_VEC is going to be in the way of
1265 us naturally detecting fallthru into the next block. */
1270 /* If this is a computed jump, then mark it as reaching
1271 everything on the label_value_list and forced_labels list. */
1272 else if (computed_jump_p (insn))
1274 current_function_has_computed_jump = 1;
1276 for (x = label_value_list; x; x = XEXP (x, 1))
1277 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1279 for (x = forced_labels; x; x = XEXP (x, 1))
1280 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL);
1283 /* Returns create an exit out. */
1284 else if (returnjump_p (insn))
1285 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0);
1287 /* Otherwise, we have a plain conditional or unconditional jump. */
1290 if (! JUMP_LABEL (insn))
1292 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0);
1296 /* If this is a sibling call insn, then this is in effect a
1297 combined call and return, and so we need an edge to the
1298 exit block. No need to worry about EH edges, since we
1299 wouldn't have created the sibling call in the first place. */
1301 if (code == CALL_INSN && SIBLING_CALL_P (insn))
1302 make_edge (edge_cache, bb, EXIT_BLOCK_PTR,
1303 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1305 /* If this is a CALL_INSN, then mark it as reaching the active EH
1306 handler for this CALL_INSN. If we're handling non-call
1307 exceptions then any insn can reach any of the active handlers.
1309 Also mark the CALL_INSN as reaching any nonlocal goto handler. */
1311 else if (code == CALL_INSN || flag_non_call_exceptions)
1313 /* Add any appropriate EH edges. */
1314 make_eh_edge (edge_cache, bb, insn);
1316 if (code == CALL_INSN && nonlocal_goto_handler_labels)
1318 /* ??? This could be made smarter: in some cases it's possible
1319 to tell that certain calls will not do a nonlocal goto.
1321 For example, if the nested functions that do the nonlocal
1322 gotos do not have their addresses taken, then only calls to
1323 those functions or to other nested functions that use them
1324 could possibly do nonlocal gotos. */
1325 /* We do know that a REG_EH_REGION note with a value less
1326 than 0 is guaranteed not to perform a non-local goto. */
1327 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1328 if (!note || INTVAL (XEXP (note, 0)) >= 0)
1329 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
1330 make_label_edge (edge_cache, bb, XEXP (x, 0),
1331 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL);
1335 /* Find out if we can drop through to the next block. */
1336 insn = next_nonnote_insn (insn);
1337 if (!insn || (i + 1 == n_basic_blocks && force_fallthru))
1338 make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1339 else if (i + 1 < n_basic_blocks)
1341 rtx tmp = BLOCK_HEAD (i + 1);
1342 if (GET_CODE (tmp) == NOTE)
1343 tmp = next_nonnote_insn (tmp);
1344 if (force_fallthru || insn == tmp)
1345 make_edge (edge_cache, bb, BASIC_BLOCK (i + 1), EDGE_FALLTHRU);
1350 sbitmap_vector_free (edge_cache);
1353 /* Create an edge between two basic blocks. FLAGS are auxiliary information
1354 about the edge that is accumulated between calls. */
1357 make_edge (edge_cache, src, dst, flags)
1358 sbitmap *edge_cache;
1359 basic_block src, dst;
1365 /* Don't bother with edge cache for ENTRY or EXIT; there aren't that
1366 many edges to them, and we didn't allocate memory for it. */
1367 use_edge_cache = (edge_cache
1368 && src != ENTRY_BLOCK_PTR
1369 && dst != EXIT_BLOCK_PTR);
1371 /* Make sure we don't add duplicate edges. */
1372 switch (use_edge_cache)
1375 /* Quick test for non-existance of the edge. */
1376 if (! TEST_BIT (edge_cache[src->index], dst->index))
1379 /* The edge exists; early exit if no work to do. */
1385 for (e = src->succ; e; e = e->succ_next)
1394 e = (edge) xcalloc (1, sizeof (*e));
1397 e->succ_next = src->succ;
1398 e->pred_next = dst->pred;
1407 SET_BIT (edge_cache[src->index], dst->index);
1410 /* Create an edge from a basic block to a label. */
1413 make_label_edge (edge_cache, src, label, flags)
1414 sbitmap *edge_cache;
1419 if (GET_CODE (label) != CODE_LABEL)
1422 /* If the label was never emitted, this insn is junk, but avoid a
1423 crash trying to refer to BLOCK_FOR_INSN (label). This can happen
1424 as a result of a syntax error and a diagnostic has already been
1427 if (INSN_UID (label) == 0)
1430 make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags);
1433 /* Create the edges generated by INSN in REGION. */
1436 make_eh_edge (edge_cache, src, insn)
1437 sbitmap *edge_cache;
1441 int is_call = (GET_CODE (insn) == CALL_INSN ? EDGE_ABNORMAL_CALL : 0);
1444 handlers = reachable_handlers (insn);
1446 for (i = handlers; i; i = XEXP (i, 1))
1447 make_label_edge (edge_cache, src, XEXP (i, 0),
1448 EDGE_ABNORMAL | EDGE_EH | is_call);
1450 free_INSN_LIST_list (&handlers);
1453 /* Identify critical edges and set the bits appropriately. */
1456 mark_critical_edges ()
1458 int i, n = n_basic_blocks;
1461 /* We begin with the entry block. This is not terribly important now,
1462 but could be if a front end (Fortran) implemented alternate entry
1464 bb = ENTRY_BLOCK_PTR;
1471 /* (1) Critical edges must have a source with multiple successors. */
1472 if (bb->succ && bb->succ->succ_next)
1474 for (e = bb->succ; e; e = e->succ_next)
1476 /* (2) Critical edges must have a destination with multiple
1477 predecessors. Note that we know there is at least one
1478 predecessor -- the edge we followed to get here. */
1479 if (e->dest->pred->pred_next)
1480 e->flags |= EDGE_CRITICAL;
1482 e->flags &= ~EDGE_CRITICAL;
1487 for (e = bb->succ; e; e = e->succ_next)
1488 e->flags &= ~EDGE_CRITICAL;
1493 bb = BASIC_BLOCK (i);
1497 /* Split a block BB after insn INSN creating a new fallthru edge.
1498 Return the new edge. Note that to keep other parts of the compiler happy,
1499 this function renumbers all the basic blocks so that the new
1500 one has a number one greater than the block split. */
1503 split_block (bb, insn)
1513 /* There is no point splitting the block after its end. */
1514 if (bb->end == insn)
1517 /* Create the new structures. */
1518 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1519 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1522 memset (new_bb, 0, sizeof (*new_bb));
1524 new_bb->head = NEXT_INSN (insn);
1525 new_bb->end = bb->end;
1528 new_bb->succ = bb->succ;
1529 bb->succ = new_edge;
1530 new_bb->pred = new_edge;
1531 new_bb->count = bb->count;
1532 new_bb->frequency = bb->frequency;
1533 new_bb->loop_depth = bb->loop_depth;
1536 new_edge->dest = new_bb;
1537 new_edge->flags = EDGE_FALLTHRU;
1538 new_edge->probability = REG_BR_PROB_BASE;
1539 new_edge->count = bb->count;
1541 /* Redirect the src of the successor edges of bb to point to new_bb. */
1542 for (e = new_bb->succ; e; e = e->succ_next)
1545 /* Place the new block just after the block being split. */
1546 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1548 /* Some parts of the compiler expect blocks to be number in
1549 sequential order so insert the new block immediately after the
1550 block being split.. */
1552 for (i = n_basic_blocks - 1; i > j + 1; --i)
1554 basic_block tmp = BASIC_BLOCK (i - 1);
1555 BASIC_BLOCK (i) = tmp;
1559 BASIC_BLOCK (i) = new_bb;
1562 if (GET_CODE (new_bb->head) == CODE_LABEL)
1564 /* Create the basic block note. */
1565 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK,
1567 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1571 /* Create the basic block note. */
1572 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
1574 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1575 new_bb->head = bb_note;
1578 update_bb_for_insn (new_bb);
1580 if (bb->global_live_at_start)
1582 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1583 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1584 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
1586 /* We now have to calculate which registers are live at the end
1587 of the split basic block and at the start of the new basic
1588 block. Start with those registers that are known to be live
1589 at the end of the original basic block and get
1590 propagate_block to determine which registers are live. */
1591 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
1592 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
1593 COPY_REG_SET (bb->global_live_at_end,
1594 new_bb->global_live_at_start);
1600 /* Return label in the head of basic block. Create one if it doesn't exist. */
1605 if (GET_CODE (block->head) != CODE_LABEL)
1606 block->head = emit_label_before (gen_label_rtx (), block->head);
1610 /* Return true if the block has no effect and only forwards control flow to
1611 its single destination. */
1613 forwarder_block_p (bb)
1616 rtx insn = bb->head;
1617 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
1618 || !bb->succ || bb->succ->succ_next)
1621 while (insn != bb->end)
1623 if (active_insn_p (insn))
1625 insn = NEXT_INSN (insn);
1627 return (!active_insn_p (insn)
1628 || (GET_CODE (insn) == JUMP_INSN && onlyjump_p (insn)));
1631 /* Return nonzero if we can reach target from src by falling trought. */
1633 can_fallthru (src, target)
1634 basic_block src, target;
1636 rtx insn = src->end;
1637 rtx insn2 = target->head;
1639 if (src->index + 1 == target->index && !active_insn_p (insn2))
1640 insn2 = next_active_insn (insn2);
1641 /* ??? Later we may add code to move jump tables offline. */
1642 return next_active_insn (insn) == insn2;
1645 /* Attempt to perform edge redirection by replacing possibly complex jump
1646 instruction by unconditional jump or removing jump completely.
1647 This can apply only if all edges now point to the same block.
1649 The parameters and return values are equivalent to redirect_edge_and_branch.
1652 try_redirect_by_replacing_jump (e, target)
1656 basic_block src = e->src;
1657 rtx insn = src->end;
1662 /* Verify that all targets will be TARGET. */
1663 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
1664 if (tmp->dest != target && tmp != e)
1666 if (tmp || !onlyjump_p (insn))
1669 /* Avoid removing branch with side effects. */
1670 set = single_set (insn);
1671 if (!set || side_effects_p (set))
1674 /* See if we can create the fallthru edge. */
1675 if (can_fallthru (src, target))
1677 src->end = PREV_INSN (insn);
1679 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
1680 flow_delete_insn (insn);
1683 /* Selectivly unlink whole insn chain. */
1684 if (src->end != PREV_INSN (target->head))
1685 flow_delete_insn_chain (NEXT_INSN (src->end),
1686 PREV_INSN (target->head));
1688 /* If this already is simplejump, redirect it. */
1689 else if (simplejump_p (insn))
1691 if (e->dest == target)
1694 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
1695 INSN_UID (insn), e->dest->index, target->index);
1696 redirect_jump (insn, block_label (target), 0);
1698 /* Or replace possibly complicated jump insn by simple jump insn. */
1701 rtx target_label = block_label (target);
1704 src->end = PREV_INSN (insn);
1705 src->end = emit_jump_insn_after (gen_jump (target_label), src->end);
1706 JUMP_LABEL (src->end) = target_label;
1707 LABEL_NUSES (target_label)++;
1708 if (basic_block_for_insn)
1709 set_block_for_new_insns (src->end, src);
1711 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
1712 INSN_UID (insn), INSN_UID (src->end));
1713 flow_delete_insn (insn);
1714 barrier = next_nonnote_insn (src->end);
1715 if (!barrier || GET_CODE (barrier) != BARRIER)
1716 emit_barrier_after (src->end);
1719 /* Keep only one edge out and set proper flags. */
1720 while (src->succ->succ_next)
1721 remove_edge (src->succ);
1724 e->flags = EDGE_FALLTHRU;
1727 e->probability = REG_BR_PROB_BASE;
1728 e->count = src->count;
1730 /* In case we've zapped an conditional jump, we need to kill the cc0
1731 setter too if available. */
1734 if (GET_CODE (insn) == JUMP_INSN)
1735 insn = prev_nonnote_insn (insn);
1736 if (sets_cc0_p (insn))
1738 if (insn == src->end)
1739 src->end = PREV_INSN (insn);
1740 flow_delete_insn (insn);
1744 /* We don't want a block to end on a line-number note since that has
1745 the potential of changing the code between -g and not -g. */
1746 while (GET_CODE (e->src->end) == NOTE
1747 && NOTE_LINE_NUMBER (e->src->end) >= 0)
1749 rtx prev = PREV_INSN (e->src->end);
1750 flow_delete_insn (e->src->end);
1754 if (e->dest != target)
1755 redirect_edge_succ (e, target);
1759 /* Attempt to change code to redirect edge E to TARGET.
1760 Don't do that on expense of adding new instructions or reordering
1763 Function can be also called with edge destionation equivalent to the
1764 TARGET. Then it should try the simplifications and do nothing if
1767 Return true if transformation suceeded. We still return flase in case
1768 E already destinated TARGET and we didn't managed to simplify instruction
1771 redirect_edge_and_branch (e, target)
1776 rtx old_label = e->dest->head;
1777 basic_block src = e->src;
1778 rtx insn = src->end;
1780 if (try_redirect_by_replacing_jump (e, target))
1782 /* Do this fast path late, as we want above code to simplify for cases
1783 where called on single edge leaving basic block containing nontrivial
1785 else if (e->dest == target)
1788 /* We can only redirect non-fallthru edges of jump insn. */
1789 if (e->flags & EDGE_FALLTHRU)
1791 if (GET_CODE (insn) != JUMP_INSN)
1794 /* Recognize a tablejump and adjust all matching cases. */
1795 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
1796 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
1797 && GET_CODE (tmp) == JUMP_INSN
1798 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
1799 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
1803 rtx new_label = block_label (target);
1805 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1806 vec = XVEC (PATTERN (tmp), 0);
1808 vec = XVEC (PATTERN (tmp), 1);
1810 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1811 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1813 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1814 --LABEL_NUSES (old_label);
1815 ++LABEL_NUSES (new_label);
1818 /* Handle casesi dispatch insns */
1819 if ((tmp = single_set (insn)) != NULL
1820 && SET_DEST (tmp) == pc_rtx
1821 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1822 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1823 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1825 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
1827 --LABEL_NUSES (old_label);
1828 ++LABEL_NUSES (new_label);
1833 /* ?? We may play the games with moving the named labels from
1834 one basic block to the other in case only one computed_jump is
1836 if (computed_jump_p (insn))
1839 /* A return instruction can't be redirected. */
1840 if (returnjump_p (insn))
1843 /* If the insn doesn't go where we think, we're confused. */
1844 if (JUMP_LABEL (insn) != old_label)
1846 redirect_jump (insn, block_label (target), 0);
1850 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
1851 e->src->index, e->dest->index, target->index);
1852 if (e->dest != target)
1855 /* Check whether the edge is already present. */
1856 for (s = src->succ; s; s=s->succ_next)
1857 if (s->dest == target)
1861 s->flags |= e->flags;
1862 s->probability += e->probability;
1863 s->count += e->count;
1867 redirect_edge_succ (e, target);
1872 /* Redirect edge even at the expense of creating new jump insn or
1873 basic block. Return new basic block if created, NULL otherwise.
1874 Abort if converison is impossible. */
1876 redirect_edge_and_branch_force (e, target)
1886 if (redirect_edge_and_branch (e, target))
1888 if (e->dest == target)
1890 if (e->flags & EDGE_ABNORMAL)
1892 if (!(e->flags & EDGE_FALLTHRU))
1895 e->flags &= ~EDGE_FALLTHRU;
1896 label = block_label (target);
1897 /* Case of the fallthru block. */
1898 if (!e->src->succ->succ_next)
1900 e->src->end = emit_jump_insn_after (gen_jump (label), e->src->end);
1901 JUMP_LABEL (e->src->end) = label;
1902 LABEL_NUSES (label)++;
1903 if (basic_block_for_insn)
1904 set_block_for_new_insns (e->src->end, e->src);
1905 emit_barrier_after (e->src->end);
1907 fprintf (rtl_dump_file,
1908 "Emitting jump insn %i to redirect edge %i->%i to %i\n",
1909 INSN_UID (e->src->end), e->src->index, e->dest->index,
1911 redirect_edge_succ (e, target);
1914 /* Redirecting fallthru edge of the conditional needs extra work. */
1917 fprintf (rtl_dump_file,
1918 "Emitting jump insn %i in new BB to redirect edge %i->%i to %i\n",
1919 INSN_UID (e->src->end), e->src->index, e->dest->index,
1922 /* Create the new structures. */
1923 new_bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*new_bb));
1924 new_edge = (edge) xcalloc (1, sizeof (*new_edge));
1927 memset (new_bb, 0, sizeof (*new_bb));
1929 new_bb->end = new_bb->head = e->src->end;
1930 new_bb->succ = NULL;
1931 new_bb->pred = new_edge;
1932 new_bb->count = e->count;
1933 new_bb->frequency = e->probability * e->src->frequency / REG_BR_PROB_BASE;
1934 new_bb->loop_depth = e->dest->loop_depth;
1936 new_edge->flags = EDGE_FALLTHRU;
1937 new_edge->probability = e->probability;
1938 new_edge->count = e->count;
1940 if (e->dest->global_live_at_start)
1942 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1943 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1944 COPY_REG_SET (new_bb->global_live_at_start,
1945 e->dest->global_live_at_start);
1946 COPY_REG_SET (new_bb->global_live_at_end, new_bb->global_live_at_start);
1950 new_edge->src = e->src;
1951 new_edge->dest = new_bb;
1952 new_edge->succ_next = e->src->succ;
1953 e->src->succ = new_edge;
1954 new_edge->pred_next = NULL;
1956 /* Redirect old edge. */
1957 redirect_edge_succ (e, target);
1958 redirect_edge_pred (e, new_bb);
1959 e->probability = REG_BR_PROB_BASE;
1961 /* Place the new block just after the block being split. */
1962 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
1964 /* Some parts of the compiler expect blocks to be number in
1965 sequential order so insert the new block immediately after the
1966 block being split.. */
1967 j = new_edge->src->index;
1968 for (i = n_basic_blocks - 1; i > j + 1; --i)
1970 basic_block tmp = BASIC_BLOCK (i - 1);
1971 BASIC_BLOCK (i) = tmp;
1975 BASIC_BLOCK (i) = new_bb;
1978 /* Create the basic block note. */
1979 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, new_bb->head);
1980 NOTE_BASIC_BLOCK (bb_note) = new_bb;
1981 new_bb->head = bb_note;
1983 new_bb->end = emit_jump_insn_after (gen_jump (label), new_bb->head);
1984 JUMP_LABEL (new_bb->end) = label;
1985 LABEL_NUSES (label)++;
1986 if (basic_block_for_insn)
1987 set_block_for_new_insns (new_bb->end, new_bb);
1988 emit_barrier_after (new_bb->end);
1992 /* Split a (typically critical) edge. Return the new block.
1993 Abort on abnormal edges.
1995 ??? The code generally expects to be called on critical edges.
1996 The case of a block ending in an unconditional jump to a
1997 block with multiple predecessors is not handled optimally. */
2000 split_edge (edge_in)
2003 basic_block old_pred, bb, old_succ;
2008 /* Abnormal edges cannot be split. */
2009 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
2012 old_pred = edge_in->src;
2013 old_succ = edge_in->dest;
2015 /* Create the new structures. */
2016 bb = (basic_block) obstack_alloc (&flow_obstack, sizeof (*bb));
2017 edge_out = (edge) xcalloc (1, sizeof (*edge_out));
2020 memset (bb, 0, sizeof (*bb));
2022 /* ??? This info is likely going to be out of date very soon. */
2023 if (old_succ->global_live_at_start)
2025 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2026 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2027 COPY_REG_SET (bb->global_live_at_start, old_succ->global_live_at_start);
2028 COPY_REG_SET (bb->global_live_at_end, old_succ->global_live_at_start);
2032 bb->succ = edge_out;
2033 bb->count = edge_in->count;
2034 bb->frequency = (edge_in->probability * edge_in->src->frequency
2035 / REG_BR_PROB_BASE);
2037 edge_in->flags &= ~EDGE_CRITICAL;
2039 edge_out->pred_next = old_succ->pred;
2040 edge_out->succ_next = NULL;
2042 edge_out->dest = old_succ;
2043 edge_out->flags = EDGE_FALLTHRU;
2044 edge_out->probability = REG_BR_PROB_BASE;
2045 edge_out->count = edge_in->count;
2047 old_succ->pred = edge_out;
2049 /* Tricky case -- if there existed a fallthru into the successor
2050 (and we're not it) we must add a new unconditional jump around
2051 the new block we're actually interested in.
2053 Further, if that edge is critical, this means a second new basic
2054 block must be created to hold it. In order to simplify correct
2055 insn placement, do this before we touch the existing basic block
2056 ordering for the block we were really wanting. */
2057 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2060 for (e = edge_out->pred_next; e; e = e->pred_next)
2061 if (e->flags & EDGE_FALLTHRU)
2066 basic_block jump_block;
2069 if ((e->flags & EDGE_CRITICAL) == 0
2070 && e->src != ENTRY_BLOCK_PTR)
2072 /* Non critical -- we can simply add a jump to the end
2073 of the existing predecessor. */
2074 jump_block = e->src;
2078 /* We need a new block to hold the jump. The simplest
2079 way to do the bulk of the work here is to recursively
2081 jump_block = split_edge (e);
2082 e = jump_block->succ;
2085 /* Now add the jump insn ... */
2086 pos = emit_jump_insn_after (gen_jump (old_succ->head),
2088 jump_block->end = pos;
2089 if (basic_block_for_insn)
2090 set_block_for_new_insns (pos, jump_block);
2091 emit_barrier_after (pos);
2093 /* ... let jump know that label is in use, ... */
2094 JUMP_LABEL (pos) = old_succ->head;
2095 ++LABEL_NUSES (old_succ->head);
2097 /* ... and clear fallthru on the outgoing edge. */
2098 e->flags &= ~EDGE_FALLTHRU;
2100 /* Continue splitting the interesting edge. */
2104 /* Place the new block just in front of the successor. */
2105 VARRAY_GROW (basic_block_info, ++n_basic_blocks);
2106 if (old_succ == EXIT_BLOCK_PTR)
2107 j = n_basic_blocks - 1;
2109 j = old_succ->index;
2110 for (i = n_basic_blocks - 1; i > j; --i)
2112 basic_block tmp = BASIC_BLOCK (i - 1);
2113 BASIC_BLOCK (i) = tmp;
2116 BASIC_BLOCK (i) = bb;
2119 /* Create the basic block note.
2121 Where we place the note can have a noticable impact on the generated
2122 code. Consider this cfg:
2132 If we need to insert an insn on the edge from block 0 to block 1,
2133 we want to ensure the instructions we insert are outside of any
2134 loop notes that physically sit between block 0 and block 1. Otherwise
2135 we confuse the loop optimizer into thinking the loop is a phony. */
2136 if (old_succ != EXIT_BLOCK_PTR
2137 && PREV_INSN (old_succ->head)
2138 && GET_CODE (PREV_INSN (old_succ->head)) == NOTE
2139 && NOTE_LINE_NUMBER (PREV_INSN (old_succ->head)) == NOTE_INSN_LOOP_BEG)
2140 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK,
2141 PREV_INSN (old_succ->head));
2142 else if (old_succ != EXIT_BLOCK_PTR)
2143 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, old_succ->head);
2145 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
2146 NOTE_BASIC_BLOCK (bb_note) = bb;
2147 bb->head = bb->end = bb_note;
2149 /* For non-fallthry edges, we must adjust the predecessor's
2150 jump instruction to target our new block. */
2151 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
2153 if (!redirect_edge_and_branch (edge_in, bb))
2157 redirect_edge_succ (edge_in, bb);
2162 /* Queue instructions for insertion on an edge between two basic blocks.
2163 The new instructions and basic blocks (if any) will not appear in the
2164 CFG until commit_edge_insertions is called. */
2167 insert_insn_on_edge (pattern, e)
2171 /* We cannot insert instructions on an abnormal critical edge.
2172 It will be easier to find the culprit if we die now. */
2173 if ((e->flags & (EDGE_ABNORMAL|EDGE_CRITICAL))
2174 == (EDGE_ABNORMAL|EDGE_CRITICAL))
2177 if (e->insns == NULL_RTX)
2180 push_to_sequence (e->insns);
2182 emit_insn (pattern);
2184 e->insns = get_insns ();
2188 /* Update the CFG for the instructions queued on edge E. */
2191 commit_one_edge_insertion (e)
2194 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
2197 /* Pull the insns off the edge now since the edge might go away. */
2199 e->insns = NULL_RTX;
2201 /* Figure out where to put these things. If the destination has
2202 one predecessor, insert there. Except for the exit block. */
2203 if (e->dest->pred->pred_next == NULL
2204 && e->dest != EXIT_BLOCK_PTR)
2208 /* Get the location correct wrt a code label, and "nice" wrt
2209 a basic block note, and before everything else. */
2211 if (GET_CODE (tmp) == CODE_LABEL)
2212 tmp = NEXT_INSN (tmp);
2213 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2214 tmp = NEXT_INSN (tmp);
2215 if (tmp == bb->head)
2218 after = PREV_INSN (tmp);
2221 /* If the source has one successor and the edge is not abnormal,
2222 insert there. Except for the entry block. */
2223 else if ((e->flags & EDGE_ABNORMAL) == 0
2224 && e->src->succ->succ_next == NULL
2225 && e->src != ENTRY_BLOCK_PTR)
2228 /* It is possible to have a non-simple jump here. Consider a target
2229 where some forms of unconditional jumps clobber a register. This
2230 happens on the fr30 for example.
2232 We know this block has a single successor, so we can just emit
2233 the queued insns before the jump. */
2234 if (GET_CODE (bb->end) == JUMP_INSN)
2240 /* We'd better be fallthru, or we've lost track of what's what. */
2241 if ((e->flags & EDGE_FALLTHRU) == 0)
2248 /* Otherwise we must split the edge. */
2251 bb = split_edge (e);
2255 /* Now that we've found the spot, do the insertion. */
2257 /* Set the new block number for these insns, if structure is allocated. */
2258 if (basic_block_for_insn)
2261 for (i = insns; i != NULL_RTX; i = NEXT_INSN (i))
2262 set_block_for_insn (i, bb);
2267 emit_insns_before (insns, before);
2268 if (before == bb->head)
2271 last = prev_nonnote_insn (before);
2275 last = emit_insns_after (insns, after);
2276 if (after == bb->end)
2280 if (returnjump_p (last))
2282 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2283 This is not currently a problem because this only happens
2284 for the (single) epilogue, which already has a fallthru edge
2288 if (e->dest != EXIT_BLOCK_PTR
2289 || e->succ_next != NULL
2290 || (e->flags & EDGE_FALLTHRU) == 0)
2292 e->flags &= ~EDGE_FALLTHRU;
2294 emit_barrier_after (last);
2298 flow_delete_insn (before);
2300 else if (GET_CODE (last) == JUMP_INSN)
2302 find_sub_basic_blocks (bb);
2305 /* Update the CFG for all queued instructions. */
2308 commit_edge_insertions ()
2313 #ifdef ENABLE_CHECKING
2314 verify_flow_info ();
2318 bb = ENTRY_BLOCK_PTR;
2323 for (e = bb->succ; e; e = next)
2325 next = e->succ_next;
2327 commit_one_edge_insertion (e);
2330 if (++i >= n_basic_blocks)
2332 bb = BASIC_BLOCK (i);
2336 /* Add fake edges to the function exit for any non constant calls in
2337 the bitmap of blocks specified by BLOCKS or to the whole CFG if
2338 BLOCKS is zero. Return the nuber of blocks that were split. */
2341 flow_call_edges_add (blocks)
2345 int blocks_split = 0;
2349 /* Map bb indicies into basic block pointers since split_block
2350 will renumber the basic blocks. */
2352 bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
2356 for (i = 0; i < n_basic_blocks; i++)
2357 bbs[bb_num++] = BASIC_BLOCK (i);
2361 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
2363 bbs[bb_num++] = BASIC_BLOCK (i);
2368 /* Now add fake edges to the function exit for any non constant
2369 calls since there is no way that we can determine if they will
2372 for (i = 0; i < bb_num; i++)
2374 basic_block bb = bbs[i];
2378 for (insn = bb->end; ; insn = prev_insn)
2380 prev_insn = PREV_INSN (insn);
2381 if (GET_CODE (insn) == CALL_INSN && ! CONST_CALL_P (insn))
2385 /* Note that the following may create a new basic block
2386 and renumber the existing basic blocks. */
2387 e = split_block (bb, insn);
2391 make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2393 if (insn == bb->head)
2399 verify_flow_info ();
2402 return blocks_split;
2405 /* Find unreachable blocks. An unreachable block will have NULL in
2406 block->aux, a non-NULL value indicates the block is reachable. */
2409 find_unreachable_blocks ()
2413 basic_block *tos, *worklist;
2416 tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
2418 /* Use basic_block->aux as a marker. Clear them all. */
2420 for (i = 0; i < n; ++i)
2421 BASIC_BLOCK (i)->aux = NULL;
2423 /* Add our starting points to the worklist. Almost always there will
2424 be only one. It isn't inconcievable that we might one day directly
2425 support Fortran alternate entry points. */
2427 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
2431 /* Mark the block with a handy non-null value. */
2435 /* Iterate: find everything reachable from what we've already seen. */
2437 while (tos != worklist)
2439 basic_block b = *--tos;
2441 for (e = b->succ; e; e = e->succ_next)
2452 /* Delete all unreachable basic blocks. */
2454 delete_unreachable_blocks ()
2458 find_unreachable_blocks ();
2460 /* Delete all unreachable basic blocks. Count down so that we
2461 don't interfere with the block renumbering that happens in
2462 flow_delete_block. */
2464 for (i = n_basic_blocks - 1; i >= 0; --i)
2466 basic_block b = BASIC_BLOCK (i);
2469 /* This block was found. Tidy up the mark. */
2472 flow_delete_block (b);
2475 tidy_fallthru_edges ();
2478 /* Return true if NOTE is not one of the ones that must be kept paired,
2479 so that we may simply delete them. */
2482 can_delete_note_p (note)
2485 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
2486 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
2489 /* Unlink a chain of insns between START and FINISH, leaving notes
2490 that must be paired. */
2493 flow_delete_insn_chain (start, finish)
2496 /* Unchain the insns one by one. It would be quicker to delete all
2497 of these with a single unchaining, rather than one at a time, but
2498 we need to keep the NOTE's. */
2504 next = NEXT_INSN (start);
2505 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
2507 else if (GET_CODE (start) == CODE_LABEL
2508 && ! can_delete_label_p (start))
2510 const char *name = LABEL_NAME (start);
2511 PUT_CODE (start, NOTE);
2512 NOTE_LINE_NUMBER (start) = NOTE_INSN_DELETED_LABEL;
2513 NOTE_SOURCE_FILE (start) = name;
2516 next = flow_delete_insn (start);
2518 if (start == finish)
2524 /* Delete the insns in a (non-live) block. We physically delete every
2525 non-deleted-note insn, and update the flow graph appropriately.
2527 Return nonzero if we deleted an exception handler. */
2529 /* ??? Preserving all such notes strikes me as wrong. It would be nice
2530 to post-process the stream to remove empty blocks, loops, ranges, etc. */
2533 flow_delete_block (b)
2536 int deleted_handler = 0;
2539 /* If the head of this block is a CODE_LABEL, then it might be the
2540 label for an exception handler which can't be reached.
2542 We need to remove the label from the exception_handler_label list
2543 and remove the associated NOTE_INSN_EH_REGION_BEG and
2544 NOTE_INSN_EH_REGION_END notes. */
2548 never_reached_warning (insn);
2550 if (GET_CODE (insn) == CODE_LABEL)
2551 maybe_remove_eh_handler (insn);
2553 /* Include any jump table following the basic block. */
2555 if (GET_CODE (end) == JUMP_INSN
2556 && (tmp = JUMP_LABEL (end)) != NULL_RTX
2557 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
2558 && GET_CODE (tmp) == JUMP_INSN
2559 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
2560 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
2563 /* Include any barrier that may follow the basic block. */
2564 tmp = next_nonnote_insn (end);
2565 if (tmp && GET_CODE (tmp) == BARRIER)
2568 /* Selectively delete the entire chain. */
2569 flow_delete_insn_chain (insn, end);
2571 /* Remove the edges into and out of this block. Note that there may
2572 indeed be edges in, if we are removing an unreachable loop. */
2576 for (e = b->pred; e; e = next)
2578 for (q = &e->src->succ; *q != e; q = &(*q)->succ_next)
2581 next = e->pred_next;
2585 for (e = b->succ; e; e = next)
2587 for (q = &e->dest->pred; *q != e; q = &(*q)->pred_next)
2590 next = e->succ_next;
2599 /* Remove the basic block from the array, and compact behind it. */
2602 return deleted_handler;
2605 /* Remove block B from the basic block array and compact behind it. */
2611 int i, n = n_basic_blocks;
2613 for (i = b->index; i + 1 < n; ++i)
2615 basic_block x = BASIC_BLOCK (i + 1);
2616 BASIC_BLOCK (i) = x;
2620 basic_block_info->num_elements--;
2624 /* Delete INSN by patching it out. Return the next insn. */
2627 flow_delete_insn (insn)
2630 rtx prev = PREV_INSN (insn);
2631 rtx next = NEXT_INSN (insn);
2634 PREV_INSN (insn) = NULL_RTX;
2635 NEXT_INSN (insn) = NULL_RTX;
2636 INSN_DELETED_P (insn) = 1;
2639 NEXT_INSN (prev) = next;
2641 PREV_INSN (next) = prev;
2643 set_last_insn (prev);
2645 if (GET_CODE (insn) == CODE_LABEL)
2646 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2648 /* If deleting a jump, decrement the use count of the label. Deleting
2649 the label itself should happen in the normal course of block merging. */
2650 if (GET_CODE (insn) == JUMP_INSN
2651 && JUMP_LABEL (insn)
2652 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
2653 LABEL_NUSES (JUMP_LABEL (insn))--;
2655 /* Also if deleting an insn that references a label. */
2656 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
2657 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2658 LABEL_NUSES (XEXP (note, 0))--;
2660 if (GET_CODE (insn) == JUMP_INSN
2661 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2662 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2664 rtx pat = PATTERN (insn);
2665 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
2666 int len = XVECLEN (pat, diff_vec_p);
2669 for (i = 0; i < len; i++)
2670 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
2676 /* True if a given label can be deleted. */
2679 can_delete_label_p (label)
2684 if (LABEL_PRESERVE_P (label))
2687 for (x = forced_labels; x; x = XEXP (x, 1))
2688 if (label == XEXP (x, 0))
2690 for (x = label_value_list; x; x = XEXP (x, 1))
2691 if (label == XEXP (x, 0))
2693 for (x = exception_handler_labels; x; x = XEXP (x, 1))
2694 if (label == XEXP (x, 0))
2697 /* User declared labels must be preserved. */
2698 if (LABEL_NAME (label) != 0)
2705 tail_recursion_label_p (label)
2710 for (x = tail_recursion_label_list; x; x = XEXP (x, 1))
2711 if (label == XEXP (x, 0))
2717 /* Blocks A and B are to be merged into a single block A. The insns
2718 are already contiguous, hence `nomove'. */
2721 merge_blocks_nomove (a, b)
2725 rtx b_head, b_end, a_end;
2726 rtx del_first = NULL_RTX, del_last = NULL_RTX;
2729 /* If there was a CODE_LABEL beginning B, delete it. */
2732 if (GET_CODE (b_head) == CODE_LABEL)
2734 /* Detect basic blocks with nothing but a label. This can happen
2735 in particular at the end of a function. */
2736 if (b_head == b_end)
2738 del_first = del_last = b_head;
2739 b_head = NEXT_INSN (b_head);
2742 /* Delete the basic block note. */
2743 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
2745 if (b_head == b_end)
2750 b_head = NEXT_INSN (b_head);
2753 /* If there was a jump out of A, delete it. */
2755 if (GET_CODE (a_end) == JUMP_INSN)
2759 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
2760 if (GET_CODE (prev) != NOTE
2761 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
2768 /* If this was a conditional jump, we need to also delete
2769 the insn that set cc0. */
2770 if (prev && sets_cc0_p (prev))
2773 prev = prev_nonnote_insn (prev);
2782 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
2783 del_first = NEXT_INSN (a_end);
2785 /* Delete everything marked above as well as crap that might be
2786 hanging out between the two blocks. */
2787 flow_delete_insn_chain (del_first, del_last);
2789 /* Normally there should only be one successor of A and that is B, but
2790 partway though the merge of blocks for conditional_execution we'll
2791 be merging a TEST block with THEN and ELSE successors. Free the
2792 whole lot of them and hope the caller knows what they're doing. */
2794 remove_edge (a->succ);
2796 /* Adjust the edges out of B for the new owner. */
2797 for (e = b->succ; e; e = e->succ_next)
2801 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
2802 b->pred = b->succ = NULL;
2804 /* Reassociate the insns of B with A. */
2807 if (basic_block_for_insn)
2809 BLOCK_FOR_INSN (b_head) = a;
2810 while (b_head != b_end)
2812 b_head = NEXT_INSN (b_head);
2813 BLOCK_FOR_INSN (b_head) = a;
2823 /* Blocks A and B are to be merged into a single block. A has no incoming
2824 fallthru edge, so it can be moved before B without adding or modifying
2825 any jumps (aside from the jump from A to B). */
2828 merge_blocks_move_predecessor_nojumps (a, b)
2831 rtx start, end, barrier;
2837 barrier = next_nonnote_insn (end);
2838 if (GET_CODE (barrier) != BARRIER)
2840 flow_delete_insn (barrier);
2842 /* Move block and loop notes out of the chain so that we do not
2843 disturb their order.
2845 ??? A better solution would be to squeeze out all the non-nested notes
2846 and adjust the block trees appropriately. Even better would be to have
2847 a tighter connection between block trees and rtl so that this is not
2849 start = squeeze_notes (start, end);
2851 /* Scramble the insn chain. */
2852 if (end != PREV_INSN (b->head))
2853 reorder_insns (start, end, PREV_INSN (b->head));
2857 fprintf (rtl_dump_file, "Moved block %d before %d and merged.\n",
2858 a->index, b->index);
2861 /* Swap the records for the two blocks around. Although we are deleting B,
2862 A is now where B was and we want to compact the BB array from where
2864 BASIC_BLOCK (a->index) = b;
2865 BASIC_BLOCK (b->index) = a;
2867 a->index = b->index;
2870 /* Now blocks A and B are contiguous. Merge them. */
2871 merge_blocks_nomove (a, b);
2876 /* Blocks A and B are to be merged into a single block. B has no outgoing
2877 fallthru edge, so it can be moved after A without adding or modifying
2878 any jumps (aside from the jump from A to B). */
2881 merge_blocks_move_successor_nojumps (a, b)
2884 rtx start, end, barrier;
2888 barrier = NEXT_INSN (end);
2890 /* Recognize a jump table following block B. */
2892 && GET_CODE (barrier) == CODE_LABEL
2893 && NEXT_INSN (barrier)
2894 && GET_CODE (NEXT_INSN (barrier)) == JUMP_INSN
2895 && (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
2896 || GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
2898 end = NEXT_INSN (barrier);
2899 barrier = NEXT_INSN (end);
2902 /* There had better have been a barrier there. Delete it. */
2903 if (barrier && GET_CODE (barrier) == BARRIER)
2904 flow_delete_insn (barrier);
2906 /* Move block and loop notes out of the chain so that we do not
2907 disturb their order.
2909 ??? A better solution would be to squeeze out all the non-nested notes
2910 and adjust the block trees appropriately. Even better would be to have
2911 a tighter connection between block trees and rtl so that this is not
2913 start = squeeze_notes (start, end);
2915 /* Scramble the insn chain. */
2916 reorder_insns (start, end, a->end);
2918 /* Now blocks A and B are contiguous. Merge them. */
2919 merge_blocks_nomove (a, b);
2923 fprintf (rtl_dump_file, "Moved block %d after %d and merged.\n",
2924 b->index, a->index);
2930 /* Attempt to merge basic blocks that are potentially non-adjacent.
2931 Return true iff the attempt succeeded. */
2934 merge_blocks (e, b, c, mode)
2939 /* If C has a tail recursion label, do not merge. There is no
2940 edge recorded from the call_placeholder back to this label, as
2941 that would make optimize_sibling_and_tail_recursive_calls more
2942 complex for no gain. */
2943 if (GET_CODE (c->head) == CODE_LABEL
2944 && tail_recursion_label_p (c->head))
2947 /* If B has a fallthru edge to C, no need to move anything. */
2948 if (e->flags & EDGE_FALLTHRU)
2950 merge_blocks_nomove (b, c);
2954 fprintf (rtl_dump_file, "Merged %d and %d without moving.\n",
2955 b->index, c->index);
2960 /* Otherwise we will need to move code around. Do that only if expensive
2961 transformations are allowed. */
2962 else if (mode & CLEANUP_EXPENSIVE)
2964 edge tmp_edge, c_fallthru_edge;
2965 int c_has_outgoing_fallthru;
2966 int b_has_incoming_fallthru;
2968 /* Avoid overactive code motion, as the forwarder blocks should eb
2969 eliminated by the edge redirection instead. Only exception is the
2970 case b is an forwarder block and c has no fallthru edge, but no
2971 optimizers should be confused by this extra jump and we are about
2972 to kill the jump in bb_reorder pass instead. */
2973 if (forwarder_block_p (b) || forwarder_block_p (c))
2976 /* We must make sure to not munge nesting of exception regions,
2977 lexical blocks, and loop notes.
2979 The first is taken care of by requiring that the active eh
2980 region at the end of one block always matches the active eh
2981 region at the beginning of the next block.
2983 The later two are taken care of by squeezing out all the notes. */
2985 /* ??? A throw/catch edge (or any abnormal edge) should be rarely
2986 executed and we may want to treat blocks which have two out
2987 edges, one normal, one abnormal as only having one edge for
2988 block merging purposes. */
2990 for (tmp_edge = c->succ; tmp_edge; tmp_edge = tmp_edge->succ_next)
2991 if (tmp_edge->flags & EDGE_FALLTHRU)
2993 c_has_outgoing_fallthru = (tmp_edge != NULL);
2994 c_fallthru_edge = tmp_edge;
2996 for (tmp_edge = b->pred; tmp_edge; tmp_edge = tmp_edge->pred_next)
2997 if (tmp_edge->flags & EDGE_FALLTHRU)
2999 b_has_incoming_fallthru = (tmp_edge != NULL);
3001 /* If B does not have an incoming fallthru, then it can be moved
3002 immediately before C without introducing or modifying jumps.
3003 C cannot be the first block, so we do not have to worry about
3004 accessing a non-existent block. */
3005 if (! b_has_incoming_fallthru)
3006 return merge_blocks_move_predecessor_nojumps (b, c);
3008 /* Otherwise, we're going to try to move C after B. If C does
3009 not have an outgoing fallthru, then it can be moved
3010 immediately after B without introducing or modifying jumps. */
3011 if (! c_has_outgoing_fallthru)
3012 return merge_blocks_move_successor_nojumps (b, c);
3014 /* Otherwise, we'll need to insert an extra jump, and possibly
3015 a new block to contain it. We can't redirect to EXIT_BLOCK_PTR,
3016 as we don't have explicit return instructions before epilogues
3017 are generated, so give up on that case. */
3019 if (c_fallthru_edge->dest != EXIT_BLOCK_PTR
3020 && merge_blocks_move_successor_nojumps (b, c))
3022 basic_block target = c_fallthru_edge->dest;
3026 /* This is a dirty hack to avoid code duplication.
3028 Set edge to point to wrong basic block, so
3029 redirect_edge_and_branch_force will do the trick
3030 and rewire edge back to the original location. */
3031 redirect_edge_succ (c_fallthru_edge, ENTRY_BLOCK_PTR);
3032 new = redirect_edge_and_branch_force (c_fallthru_edge, target);
3034 /* We've just created barrier, but other barrier is already present
3035 in the stream. Avoid duplicate. */
3036 barrier = next_nonnote_insn (new ? new->end : b->end);
3037 if (GET_CODE (barrier) != BARRIER)
3039 flow_delete_insn (barrier);
3047 /* Simplify conditional jump around an jump.
3048 Return nonzero in case optimization matched. */
3051 try_simplify_condjump (src)
3054 basic_block final_block, next_block;
3055 rtx insn = src->end;
3056 edge branch, fallthru;
3058 /* Verify that there are exactly two successors. */
3059 if (!src->succ || !src->succ->succ_next || src->succ->succ_next->succ_next
3060 || !any_condjump_p (insn))
3063 fallthru = FALLTHRU_EDGE (src);
3065 /* Following block must be simple forwarder block with single
3066 entry and must not be last in the stream. */
3067 next_block = fallthru->dest;
3068 if (!forwarder_block_p (next_block)
3069 || next_block->pred->pred_next
3070 || next_block->index == n_basic_blocks - 1)
3073 /* The branch must target to block afterwards. */
3074 final_block = BASIC_BLOCK (next_block->index + 1);
3076 branch = BRANCH_EDGE (src);
3078 if (branch->dest != final_block)
3081 /* Avoid jump.c from being overactive on removin ureachable insns. */
3082 LABEL_NUSES (JUMP_LABEL (insn))++;
3083 if (!invert_jump (insn, block_label (next_block->succ->dest), 1))
3085 LABEL_NUSES (JUMP_LABEL (insn))--;
3089 fprintf (rtl_dump_file, "Simplifying condjump %i around jump %i\n",
3090 INSN_UID (insn), INSN_UID (next_block->end));
3092 redirect_edge_succ (branch, final_block);
3093 redirect_edge_succ (fallthru, next_block->succ->dest);
3095 branch->flags |= EDGE_FALLTHRU;
3096 fallthru->flags &= ~EDGE_FALLTHRU;
3098 flow_delete_block (next_block);
3102 /* Attempt to forward edges leaving basic block B.
3103 Return nonzero if sucessfull. */
3106 try_forward_edges (b)
3111 for (e = b->succ; e; e = e->succ_next)
3113 basic_block target = e->dest, first = e->dest;
3116 /* Look for the real destination of jump.
3117 Avoid inifinite loop in the infinite empty loop by counting
3118 up to n_basic_blocks. */
3119 while (forwarder_block_p (target)
3120 && target->succ->dest != EXIT_BLOCK_PTR
3121 && counter < n_basic_blocks)
3123 /* Bypass trivial infinite loops. */
3124 if (target == target->succ->dest)
3125 counter = n_basic_blocks;
3126 target = target->succ->dest, counter++;
3129 if (target != first && counter < n_basic_blocks
3130 && redirect_edge_and_branch (e, target))
3132 while (first != target)
3134 first->count -= e->count;
3135 first->succ->count -= e->count;
3136 first->frequency -= ((e->probability * b->frequency
3137 + REG_BR_PROB_BASE / 2)
3138 / REG_BR_PROB_BASE);
3139 first = first->succ->dest;
3141 /* We've possibly removed the edge. */
3145 else if (rtl_dump_file && counter == n_basic_blocks)
3146 fprintf (rtl_dump_file, "Infinite loop in BB %i.\n", target->index);
3147 else if (rtl_dump_file && first != target)
3148 fprintf (rtl_dump_file,
3149 "Forwarding edge %i->%i to %i failed.\n", b->index,
3150 e->dest->index, target->index);
3155 /* Compare the instructions before end of B1 and B2
3156 to find an opportunity for cross jumping.
3157 (This means detecting identical sequences of insns)
3158 Find the longest possible equivalent sequences
3159 and store the first insns of those sequences into *F1 and *F2
3160 and return length of that sequence.
3162 To simplify callers of this function, in the
3163 all instructions were matched, allways store bb->head. */
3166 flow_find_cross_jump (mode, bb1, bb2, f1, f2)
3168 basic_block bb1, bb2;
3171 rtx i1 = onlyjump_p (bb1->end) ? PREV_INSN (bb1->end): bb1->end;
3172 rtx i2 = onlyjump_p (bb2->end) ? PREV_INSN (bb2->end): bb2->end;
3176 rtx last1 = bb1->end, last2 = bb2->end;
3177 rtx afterlast1 = bb1->end, afterlast2 = bb2->end;
3179 /* In case basic block ends by nontrivial jump instruction, count it as
3180 an instruction. Do not count an unconditional jump, as it will be
3181 removed by basic_block reordering pass in case it is on the common
3183 if (bb1->succ->succ_next && bb1->end != i1)
3186 for (;i1 != bb1->head; i1 = PREV_INSN (i1))
3189 if (GET_CODE (i1) == NOTE)
3191 while ((GET_CODE (i2) == NOTE && i2 != bb2->head))
3192 i2 = PREV_INSN (i2);
3194 if (GET_CODE (i1) != GET_CODE (i2))
3200 /* If this is a CALL_INSN, compare register usage information.
3201 If we don't check this on stack register machines, the two
3202 CALL_INSNs might be merged leaving reg-stack.c with mismatching
3203 numbers of stack registers in the same basic block.
3204 If we don't check this on machines with delay slots, a delay slot may
3205 be filled that clobbers a parameter expected by the subroutine.
3207 ??? We take the simple route for now and assume that if they're
3208 equal, they were constructed identically. */
3210 if (GET_CODE (i1) == CALL_INSN
3211 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
3212 CALL_INSN_FUNCTION_USAGE (i2)))
3216 /* If cross_jump_death_matters is not 0, the insn's mode
3217 indicates whether or not the insn contains any stack-like
3220 if (!lose && (mode & CLEANUP_POST_REGSTACK ) && stack_regs_mentioned (i1))
3222 /* If register stack conversion has already been done, then
3223 death notes must also be compared before it is certain that
3224 the two instruction streams match. */
3227 HARD_REG_SET i1_regset, i2_regset;
3229 CLEAR_HARD_REG_SET (i1_regset);
3230 CLEAR_HARD_REG_SET (i2_regset);
3232 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
3233 if (REG_NOTE_KIND (note) == REG_DEAD
3234 && STACK_REG_P (XEXP (note, 0)))
3235 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
3237 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
3238 if (REG_NOTE_KIND (note) == REG_DEAD
3239 && STACK_REG_P (XEXP (note, 0)))
3240 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
3242 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
3251 if (lose || GET_CODE (p1) != GET_CODE (p2)
3252 || ! rtx_renumbered_equal_p (p1, p2))
3254 /* The following code helps take care of G++ cleanups. */
3258 if (!lose && GET_CODE (p1) == GET_CODE (p2)
3259 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
3260 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
3261 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
3262 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
3263 /* If the equivalences are not to a constant, they may
3264 reference pseudos that no longer exist, so we can't
3266 && CONSTANT_P (XEXP (equiv1, 0))
3267 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
3269 rtx s1 = single_set (i1);
3270 rtx s2 = single_set (i2);
3271 if (s1 != 0 && s2 != 0
3272 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
3274 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
3275 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
3276 if (! rtx_renumbered_equal_p (p1, p2))
3278 else if (apply_change_group ())
3283 /* Insns fail to match; cross jumping is limited to the following
3287 /* Don't allow the insn after a compare to be shared by
3288 cross-jumping unless the compare is also shared.
3289 Here, if either of these non-matching insns is a compare,
3290 exclude the following insn from possible cross-jumping. */
3291 if (sets_cc0_p (p1) || sets_cc0_p (p2))
3292 last1 = afterlast1, last2 = afterlast2, ninsns--;
3298 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3300 /* Ok, this insn is potentially includable in a cross-jump here. */
3301 afterlast1 = last1, afterlast2 = last2;
3302 last1 = i1, last2 = i2;
3308 i2 = PREV_INSN (i2);
3311 /* Skip the notes to reach potential head of basic block. */
3312 while (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == NOTE)
3313 last1 = PREV_INSN (last1);
3314 if (last1 != bb1->head && GET_CODE (PREV_INSN (last1)) == CODE_LABEL)
3315 last1 = PREV_INSN (last1);
3316 while (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == NOTE)
3317 last2 = PREV_INSN (last2);
3318 if (last2 != bb2->head && GET_CODE (PREV_INSN (last2)) == CODE_LABEL)
3319 last2 = PREV_INSN (last2);
3326 /* Return true iff outgoing edges of BB1 and BB2 match, together with
3327 the branch instruction. This means that if we commonize the control
3328 flow before end of the basic block, the semantic remains unchanged.
3330 Assume that at least one outgoing edge is forwarded to the same
3333 outgoing_edges_match (bb1, bb2)
3337 /* bb1 has one succesor, so we are seeing unconditional jump. */
3338 if (bb1->succ && !bb1->succ->succ_next)
3339 return (bb2->succ && !bb2->succ->succ_next);
3341 /* Match conditional jumps - this may get tricky when fallthru and branch
3342 edges are crossed. */
3343 if (bb1->succ && bb1->succ->succ_next && !bb1->succ->succ_next->succ_next
3344 && any_condjump_p (bb1->end))
3346 edge b1, f1, b2, f2;
3347 bool reverse, match;
3348 rtx set1, set2, cond1, cond2;
3349 enum rtx_code code1, code2;
3351 if (!bb2->succ || !bb2->succ->succ_next
3352 || bb1->succ->succ_next->succ_next || !any_condjump_p (bb2->end))
3354 b1 = BRANCH_EDGE (bb1);
3355 b2 = BRANCH_EDGE (bb2);
3356 f1 = FALLTHRU_EDGE (bb1);
3357 f2 = FALLTHRU_EDGE (bb2);
3359 /* Get around possible forwarders on fallthru edges. Other cases
3360 should be optimized out already. */
3361 if (forwarder_block_p (f1->dest))
3362 f1 = f1->dest->succ;
3363 if (forwarder_block_p (f2->dest))
3364 f2 = f2->dest->succ;
3366 /* To simplify use of this function, return false if there are
3367 unneeded forwarder blocks. These will get eliminated later
3368 during cleanup_cfg. */
3369 if (forwarder_block_p (f1->dest)
3370 || forwarder_block_p (f2->dest)
3371 || forwarder_block_p (b1->dest)
3372 || forwarder_block_p (b2->dest))
3375 if (f1->dest == f2->dest && b1->dest == b2->dest)
3377 else if (f1->dest == b2->dest && b1->dest == f2->dest)
3382 set1 = pc_set (bb1->end);
3383 set2 = pc_set (bb2->end);
3384 if ((XEXP (SET_SRC (set1), 1) == pc_rtx)
3385 != (XEXP (SET_SRC (set2), 1) == pc_rtx))
3388 cond1 = XEXP (SET_SRC (set1), 0);
3389 cond2 = XEXP (SET_SRC (set2), 0);
3390 code1 = GET_CODE (cond1);
3392 code2 = reversed_comparison_code (cond2, bb2->end);
3394 code2 = GET_CODE (cond2);
3396 if (code2 == UNKNOWN)
3399 /* See if we don have (cross) match in the codes and operands. */
3400 match = ((code1 == code2
3401 && rtx_renumbered_equal_p (XEXP (cond1, 0), XEXP (cond2, 0))
3402 && rtx_renumbered_equal_p (XEXP (cond1, 1), XEXP (cond2, 1)))
3403 || (code1 == swap_condition (code2)
3404 && rtx_renumbered_equal_p (XEXP (cond1, 1),
3406 && rtx_renumbered_equal_p (XEXP (cond1, 0),
3408 /* In case of returning true, we will commonize the flow.
3409 This also means, that both branches will contain only single
3410 branch prediction algorithm. To match require resulting branch
3411 to be still well predictable. */
3412 if (match && !optimize_size)
3416 note1 = find_reg_note (bb1->end, REG_BR_PROB, 0);
3417 note2 = find_reg_note (bb2->end, REG_BR_PROB, 0);
3418 if (!note1 || !note2)
3420 prob1 = INTVAL (XEXP (note1, 0));
3421 prob2 = INTVAL (XEXP (note2, 0));
3423 prob2 = REG_BR_PROB_BASE - prob2;
3425 /* ??? Later we should use basic block frequency to allow merging
3426 in the infrequent blocks, but at the moment it is not
3427 available when cleanup_cfg is run. */
3428 if (abs (prob1 - prob2) > REG_BR_PROB_BASE / 90)
3431 if (rtl_dump_file && match)
3432 fprintf (rtl_dump_file, "Conditionals in bb %i and %i match.\n",
3433 bb1->index, bb2->index);
3436 /* ??? We can handle computed jumps too. This may be important for
3437 inlined functions containing switch statements. Also jumps w/o
3438 fallthru edges can be handled by simply matching whole insn. */
3442 /* Assume that e1 and e2 are the edges from the same basic block.
3443 Attempt to find common code on both paths and forward control flow
3444 from the first path to second if such exist. */
3446 try_crossjump_to_edge (mode, e1, e2)
3451 basic_block redirect_to;
3452 rtx newpos1, newpos2;
3459 /* Skip forwarder blocks. This is needed to avoid forced forwarders
3460 after conditional jumps from making us to miss optimization.
3462 We don't need to worry about multiple entry or chained forwarders, as they
3463 will be optimized out. */
3464 if (e1->src->pred && !e1->src->pred->pred_next
3465 && forwarder_block_p (e1->src))
3467 if (e2->src->pred && !e2->src->pred->pred_next
3468 && forwarder_block_p (e2->src))
3471 if (e1->src == ENTRY_BLOCK_PTR || e2->src == ENTRY_BLOCK_PTR)
3473 if (e1->src == e2->src)
3476 /* Seeing more than 1 forwarder blocks would confuse us later... */
3477 if (forwarder_block_p (e1->dest)
3478 && forwarder_block_p (e1->dest->succ->dest))
3480 if (forwarder_block_p (e2->dest)
3481 && forwarder_block_p (e2->dest->succ->dest))
3483 /* ... similary as seeing dead code... */
3484 if (!e1->src->pred || !e2->src->pred)
3486 /* ...similary non-jump edges. */
3487 if (e1->flags & EDGE_COMPLEX)
3490 if (!outgoing_edges_match (e1->src, e2->src))
3492 nmatch = flow_find_cross_jump (mode, e1->src, e2->src, &newpos1, &newpos2);
3496 /* Avoid splitting if possible. */
3497 if (newpos2 == e2->src->head)
3498 redirect_to = e2->src;
3502 fprintf (rtl_dump_file, "Splitting bb %i before %i insns\n",
3503 e2->src->index, nmatch);
3504 redirect_to = split_block (e2->src, PREV_INSN (newpos2))->dest;
3508 fprintf (rtl_dump_file,
3509 "Cross jumping from bb %i to bb %i. %i insn commoized\n",
3510 e1->src->index, e2->src->index, nmatch);
3512 redirect_to->count += e1->src->count;
3513 redirect_to->frequency += e1->src->frequency;
3515 /* Recompute the frequencies and counts of outgoing edges. */
3516 for (s = redirect_to->succ; s; s = s->succ_next)
3519 basic_block d = (forwarder_block_p (s->dest) ? s->dest->succ->dest
3521 for (s2 = e1->src->succ;; s2 = s2->succ_next)
3524 (forwarder_block_p (s2->dest) ? s2->dest->succ->dest : s2->dest);
3528 s->count += s2->count;
3530 /* Take care to update possible forwarder blocks. We took care
3531 that there is no more than one in chain, so we can't run
3532 into infinite loop. */
3533 if (forwarder_block_p (s->dest))
3535 s->dest->succ->count += s2->count;
3536 s->dest->count += s2->count;
3537 s->dest->frequency += ((s->probability * s->src->frequency)
3538 / REG_BR_PROB_BASE);
3540 if (forwarder_block_p (s2->dest))
3542 s2->dest->succ->count -= s2->count;
3543 s2->dest->count -= s2->count;
3544 s2->dest->frequency -= ((s->probability * s->src->frequency)
3545 / REG_BR_PROB_BASE);
3547 if (!redirect_to->frequency && !e1->src->frequency)
3548 s->probability = (s->probability + s2->probability) / 2;
3551 ((s->probability * redirect_to->frequency +
3552 s2->probability * e1->src->frequency)
3553 / (redirect_to->frequency + e1->src->frequency));
3556 /* FIXME: enable once probabilities are fetched properly at
3559 note = find_reg_note (redirect_to->end, REG_BR_PROB, 0);
3561 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (redirect_to)->probability);
3564 /* Skip possible basic block header. */
3566 if (GET_CODE (first) == CODE_LABEL)
3567 first = NEXT_INSN (first);
3568 if (GET_CODE (first) == NOTE)
3569 first = NEXT_INSN (first);
3571 last = e1->src->end;
3573 /* Now emit the jump insn. */
3574 label = block_label (redirect_to);
3575 e1->src->end = emit_jump_insn_after (gen_jump (label), e1->src->end);
3576 JUMP_LABEL (e1->src->end) = label;
3577 LABEL_NUSES (label)++;
3578 if (basic_block_for_insn)
3579 set_block_for_new_insns (e1->src->end, e1->src);
3581 flow_delete_insn_chain (first, last);
3583 barrier = next_nonnote_insn (e1->src->end);
3584 if (!barrier || GET_CODE (barrier) != BARRIER)
3585 emit_barrier_after (e1->src->end);
3588 while (e1->src->succ->succ_next)
3589 remove_edge (e1->src->succ);
3590 e1->src->succ->flags = 0;
3591 redirect_edge_succ (e1->src->succ, redirect_to);
3595 /* Attempt to implement cross jumping. This means moving one or more branches
3596 to BB earlier to BB predecesors commonizing some code. */
3598 try_crossjump_bb (mode, bb)
3602 edge e, e2, nexte2, nexte, fallthru;
3603 bool changed = false;
3605 /* In case basic block has single predecesor, do nothing. */
3606 if (!bb->pred || !bb->pred->pred_next)
3609 /* It is always cheapest to jump into fallthru edge. */
3610 for (fallthru = bb->pred; fallthru; fallthru = fallthru->pred_next)
3611 if (fallthru->flags & EDGE_FALLTHRU)
3614 for (e = bb->pred; e; e = nexte)
3616 nexte = e->pred_next;
3617 /* First of all prioritize the fallthru edge, as the cheapest. */
3618 if (e != fallthru && fallthru
3619 && try_crossjump_to_edge (mode, e, fallthru))
3620 changed = true, nexte = bb->pred;
3622 /* Try match in other incomming edges.
3624 Loop only over the earlier edges to avoid,as the later
3625 will be examined in the oposite direction. */
3626 for (e2 = bb->pred; e2 != e; e2 = nexte2)
3628 nexte2 = e2->pred_next;
3629 if (e2 != fallthru && try_crossjump_to_edge (mode, e, e2))
3634 /* We may've removed the fallthru edge. */
3635 for (fallthru = bb->pred; fallthru;
3636 fallthru = fallthru->pred_next)
3637 if (fallthru->flags & EDGE_FALLTHRU)
3646 /* Do simple CFG optimizations - basic block merging, simplifying of jump
3649 Return nonzero in case some optimizations matched. */
3652 try_optimize_cfg (mode)
3656 bool changed_overall = 0;
3660 /* Attempt to merge blocks as made possible by edge removal. If a block
3661 has only one successor, and the successor has only one predecessor,
3662 they may be combined. */
3669 fprintf (rtl_dump_file, "\n\ntry_optimize_cfg iteration %i\n\n",
3671 for (i = 0; i < n_basic_blocks;)
3673 basic_block c, b = BASIC_BLOCK (i);
3675 int changed_here = 0;
3677 /* Delete trivially dead basic blocks. */
3678 while (b->pred == NULL)
3680 c = BASIC_BLOCK (b->index - 1);
3682 fprintf (rtl_dump_file, "Deleting block %i.\n", b->index);
3683 flow_delete_block (b);
3687 /* The fallthru forwarder block can be deleted. */
3688 if (b->pred->pred_next == NULL
3689 && forwarder_block_p (b)
3690 && n_basic_blocks > 1
3691 && (b->pred->flags & EDGE_FALLTHRU)
3692 && (b->succ->flags & EDGE_FALLTHRU))
3695 fprintf (rtl_dump_file, "Deleting fallthru block %i.\n",
3697 c = BASIC_BLOCK (i ? i - 1 : i + 1);
3698 redirect_edge_succ (b->pred, b->succ->dest);
3699 flow_delete_block (b);
3704 /* Remove code labels no longer used.
3705 Don't do the optimization before sibling calls are discovered,
3706 as some branches may be hidden inside CALL_PLACEHOLDERs. */
3707 if (!(mode & CLEANUP_PRE_SIBCALL)
3708 && b->pred->pred_next == NULL
3709 && (b->pred->flags & EDGE_FALLTHRU)
3710 && GET_CODE (b->head) == CODE_LABEL
3711 /* If previous block does end with condjump jumping to next BB,
3712 we can't delete the label. */
3713 && (b->pred->src == ENTRY_BLOCK_PTR
3714 || !reg_mentioned_p (b->head, b->pred->src->end)))
3716 rtx label = b->head;
3717 b->head = NEXT_INSN (b->head);
3718 flow_delete_insn_chain (label, label);
3720 fprintf (rtl_dump_file, "Deleted label in block %i.\n",
3724 /* A loop because chains of blocks might be combineable. */
3725 while ((s = b->succ) != NULL
3726 && s->succ_next == NULL
3727 && (s->flags & EDGE_EH) == 0
3728 && (c = s->dest) != EXIT_BLOCK_PTR
3729 && c->pred->pred_next == NULL
3730 /* If the jump insn has side effects,
3731 we can't kill the edge. */
3732 && (GET_CODE (b->end) != JUMP_INSN
3733 || onlyjump_p (b->end)) && merge_blocks (s, b, c, mode))
3736 if ((mode & CLEANUP_EXPENSIVE) && try_simplify_condjump (b))
3739 /* In the case basic blocks has single outgoing edge, but over by the
3740 non-trivial jump instruction, we can replace it by unconditional
3741 jump, or delete the jump completely. Use logic of
3742 redirect_edge_and_branch to do the dirty job for us.
3744 We match cases as conditional jumps jumping to the next block or
3748 && b->succ->succ_next == NULL
3749 && GET_CODE (b->end) == JUMP_INSN
3750 && b->succ->dest != EXIT_BLOCK_PTR
3751 && redirect_edge_and_branch (b->succ, b->succ->dest))
3754 if (try_forward_edges (b))
3757 if ((mode & CLEANUP_CROSSJUMP) && try_crossjump_bb (mode, b))
3760 /* Don't get confused by the index shift caused by deleting
3767 if ((mode & CLEANUP_CROSSJUMP) && try_crossjump_bb (mode, EXIT_BLOCK_PTR))
3769 #ifdef ENABLE_CHECKING
3771 verify_flow_info ();
3773 changed_overall |= changed;
3776 return changed_overall;
3779 /* The given edge should potentially be a fallthru edge. If that is in
3780 fact true, delete the jump and barriers that are in the way. */
3783 tidy_fallthru_edge (e, b, c)
3789 /* ??? In a late-running flow pass, other folks may have deleted basic
3790 blocks by nopping out blocks, leaving multiple BARRIERs between here
3791 and the target label. They ought to be chastized and fixed.
3793 We can also wind up with a sequence of undeletable labels between
3794 one block and the next.
3796 So search through a sequence of barriers, labels, and notes for
3797 the head of block C and assert that we really do fall through. */
3799 if (next_real_insn (b->end) != next_real_insn (PREV_INSN (c->head)))
3802 /* Remove what will soon cease being the jump insn from the source block.
3803 If block B consisted only of this single jump, turn it into a deleted
3806 if (GET_CODE (q) == JUMP_INSN
3808 && (any_uncondjump_p (q)
3809 || (b->succ == e && e->succ_next == NULL)))
3812 /* If this was a conditional jump, we need to also delete
3813 the insn that set cc0. */
3814 if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
3821 NOTE_LINE_NUMBER (q) = NOTE_INSN_DELETED;
3822 NOTE_SOURCE_FILE (q) = 0;
3828 /* We don't want a block to end on a line-number note since that has
3829 the potential of changing the code between -g and not -g. */
3830 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
3837 /* Selectively unlink the sequence. */
3838 if (q != PREV_INSN (c->head))
3839 flow_delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
3841 e->flags |= EDGE_FALLTHRU;
3844 /* Fix up edges that now fall through, or rather should now fall through
3845 but previously required a jump around now deleted blocks. Simplify
3846 the search by only examining blocks numerically adjacent, since this
3847 is how find_basic_blocks created them. */
3850 tidy_fallthru_edges ()
3854 for (i = 1; i < n_basic_blocks; ++i)
3856 basic_block b = BASIC_BLOCK (i - 1);
3857 basic_block c = BASIC_BLOCK (i);
3860 /* We care about simple conditional or unconditional jumps with
3863 If we had a conditional branch to the next instruction when
3864 find_basic_blocks was called, then there will only be one
3865 out edge for the block which ended with the conditional
3866 branch (since we do not create duplicate edges).
3868 Furthermore, the edge will be marked as a fallthru because we
3869 merge the flags for the duplicate edges. So we do not want to
3870 check that the edge is not a FALLTHRU edge. */
3871 if ((s = b->succ) != NULL
3872 && ! (s->flags & EDGE_COMPLEX)
3873 && s->succ_next == NULL
3875 /* If the jump insn has side effects, we can't tidy the edge. */
3876 && (GET_CODE (b->end) != JUMP_INSN
3877 || onlyjump_p (b->end)))
3878 tidy_fallthru_edge (s, b, c);
3882 /* Perform data flow analysis.
3883 F is the first insn of the function; FLAGS is a set of PROP_* flags
3884 to be used in accumulating flow info. */
3887 life_analysis (f, file, flags)
3892 #ifdef ELIMINABLE_REGS
3894 static struct {int from, to; } eliminables[] = ELIMINABLE_REGS;
3897 /* Record which registers will be eliminated. We use this in
3900 CLEAR_HARD_REG_SET (elim_reg_set);
3902 #ifdef ELIMINABLE_REGS
3903 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3904 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3906 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
3910 flags &= ~(PROP_LOG_LINKS | PROP_AUTOINC);
3912 /* The post-reload life analysis have (on a global basis) the same
3913 registers live as was computed by reload itself. elimination
3914 Otherwise offsets and such may be incorrect.
3916 Reload will make some registers as live even though they do not
3919 We don't want to create new auto-incs after reload, since they
3920 are unlikely to be useful and can cause problems with shared
3922 if (reload_completed)
3923 flags &= ~(PROP_REG_INFO | PROP_AUTOINC);
3925 /* We want alias analysis information for local dead store elimination. */
3926 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
3927 init_alias_analysis ();
3929 /* Always remove no-op moves. Do this before other processing so
3930 that we don't have to keep re-scanning them. */
3931 delete_noop_moves (f);
3933 /* Some targets can emit simpler epilogues if they know that sp was
3934 not ever modified during the function. After reload, of course,
3935 we've already emitted the epilogue so there's no sense searching. */
3936 if (! reload_completed)
3937 notice_stack_pointer_modification (f);
3939 /* Allocate and zero out data structures that will record the
3940 data from lifetime analysis. */
3941 allocate_reg_life_data ();
3942 allocate_bb_life_data ();
3944 /* Find the set of registers live on function exit. */
3945 mark_regs_live_at_end (EXIT_BLOCK_PTR->global_live_at_start);
3947 /* "Update" life info from zero. It'd be nice to begin the
3948 relaxation with just the exit and noreturn blocks, but that set
3949 is not immediately handy. */
3951 if (flags & PROP_REG_INFO)
3952 memset (regs_ever_live, 0, sizeof (regs_ever_live));
3953 update_life_info (NULL, UPDATE_LIFE_GLOBAL, flags);
3956 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
3957 end_alias_analysis ();
3960 dump_flow_info (file);
3962 free_basic_block_vars (1);
3964 #ifdef ENABLE_CHECKING
3968 /* Search for any REG_LABEL notes which reference deleted labels. */
3969 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3971 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
3973 if (inote && GET_CODE (inote) == NOTE_INSN_DELETED_LABEL)
3980 /* A subroutine of verify_wide_reg, called through for_each_rtx.
3981 Search for REGNO. If found, abort if it is not wider than word_mode. */
3984 verify_wide_reg_1 (px, pregno)
3989 unsigned int regno = *(int *) pregno;
3991 if (GET_CODE (x) == REG && REGNO (x) == regno)
3993 if (GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD)
4000 /* A subroutine of verify_local_live_at_start. Search through insns
4001 between HEAD and END looking for register REGNO. */
4004 verify_wide_reg (regno, head, end)
4011 && for_each_rtx (&PATTERN (head), verify_wide_reg_1, ®no))
4015 head = NEXT_INSN (head);
4018 /* We didn't find the register at all. Something's way screwy. */
4020 fprintf (rtl_dump_file, "Aborting in verify_wide_reg; reg %d\n", regno);
4021 print_rtl_and_abort ();
4024 /* A subroutine of update_life_info. Verify that there are no untoward
4025 changes in live_at_start during a local update. */
4028 verify_local_live_at_start (new_live_at_start, bb)
4029 regset new_live_at_start;
4032 if (reload_completed)
4034 /* After reload, there are no pseudos, nor subregs of multi-word
4035 registers. The regsets should exactly match. */
4036 if (! REG_SET_EQUAL_P (new_live_at_start, bb->global_live_at_start))
4040 fprintf (rtl_dump_file,
4041 "live_at_start mismatch in bb %d, aborting\n",
4043 debug_bitmap_file (rtl_dump_file, bb->global_live_at_start);
4044 debug_bitmap_file (rtl_dump_file, new_live_at_start);
4046 print_rtl_and_abort ();
4053 /* Find the set of changed registers. */
4054 XOR_REG_SET (new_live_at_start, bb->global_live_at_start);
4056 EXECUTE_IF_SET_IN_REG_SET (new_live_at_start, 0, i,
4058 /* No registers should die. */
4059 if (REGNO_REG_SET_P (bb->global_live_at_start, i))
4062 fprintf (rtl_dump_file,
4063 "Register %d died unexpectedly in block %d\n", i,
4065 print_rtl_and_abort ();
4068 /* Verify that the now-live register is wider than word_mode. */
4069 verify_wide_reg (i, bb->head, bb->end);
4074 /* Updates life information starting with the basic blocks set in BLOCKS.
4075 If BLOCKS is null, consider it to be the universal set.
4077 If EXTENT is UPDATE_LIFE_LOCAL, such as after splitting or peepholeing,
4078 we are only expecting local modifications to basic blocks. If we find
4079 extra registers live at the beginning of a block, then we either killed
4080 useful data, or we have a broken split that wants data not provided.
4081 If we find registers removed from live_at_start, that means we have
4082 a broken peephole that is killing a register it shouldn't.
4084 ??? This is not true in one situation -- when a pre-reload splitter
4085 generates subregs of a multi-word pseudo, current life analysis will
4086 lose the kill. So we _can_ have a pseudo go live. How irritating.
4088 Including PROP_REG_INFO does not properly refresh regs_ever_live
4089 unless the caller resets it to zero. */
4092 update_life_info (blocks, extent, prop_flags)
4094 enum update_life_extent extent;
4098 regset_head tmp_head;
4101 tmp = INITIALIZE_REG_SET (tmp_head);
4103 /* For a global update, we go through the relaxation process again. */
4104 if (extent != UPDATE_LIFE_LOCAL)
4106 calculate_global_regs_live (blocks, blocks,
4107 prop_flags & PROP_SCAN_DEAD_CODE);
4109 /* If asked, remove notes from the blocks we'll update. */
4110 if (extent == UPDATE_LIFE_GLOBAL_RM_NOTES)
4111 count_or_remove_death_notes (blocks, 1);
4116 EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
4118 basic_block bb = BASIC_BLOCK (i);
4120 COPY_REG_SET (tmp, bb->global_live_at_end);
4121 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4123 if (extent == UPDATE_LIFE_LOCAL)
4124 verify_local_live_at_start (tmp, bb);
4129 for (i = n_basic_blocks - 1; i >= 0; --i)
4131 basic_block bb = BASIC_BLOCK (i);
4133 COPY_REG_SET (tmp, bb->global_live_at_end);
4134 propagate_block (bb, tmp, NULL, NULL, prop_flags);
4136 if (extent == UPDATE_LIFE_LOCAL)
4137 verify_local_live_at_start (tmp, bb);
4143 if (prop_flags & PROP_REG_INFO)
4145 /* The only pseudos that are live at the beginning of the function
4146 are those that were not set anywhere in the function. local-alloc
4147 doesn't know how to handle these correctly, so mark them as not
4148 local to any one basic block. */
4149 EXECUTE_IF_SET_IN_REG_SET (ENTRY_BLOCK_PTR->global_live_at_end,
4150 FIRST_PSEUDO_REGISTER, i,
4151 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
4153 /* We have a problem with any pseudoreg that lives across the setjmp.
4154 ANSI says that if a user variable does not change in value between
4155 the setjmp and the longjmp, then the longjmp preserves it. This
4156 includes longjmp from a place where the pseudo appears dead.
4157 (In principle, the value still exists if it is in scope.)
4158 If the pseudo goes in a hard reg, some other value may occupy
4159 that hard reg where this pseudo is dead, thus clobbering the pseudo.
4160 Conclusion: such a pseudo must not go in a hard reg. */
4161 EXECUTE_IF_SET_IN_REG_SET (regs_live_at_setjmp,
4162 FIRST_PSEUDO_REGISTER, i,
4164 if (regno_reg_rtx[i] != 0)
4166 REG_LIVE_LENGTH (i) = -1;
4167 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
4173 /* Free the variables allocated by find_basic_blocks.
4175 KEEP_HEAD_END_P is non-zero if basic_block_info is not to be freed. */
4178 free_basic_block_vars (keep_head_end_p)
4179 int keep_head_end_p;
4181 if (basic_block_for_insn)
4183 VARRAY_FREE (basic_block_for_insn);
4184 basic_block_for_insn = NULL;
4187 if (! keep_head_end_p)
4189 if (basic_block_info)
4192 VARRAY_FREE (basic_block_info);
4196 ENTRY_BLOCK_PTR->aux = NULL;
4197 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
4198 EXIT_BLOCK_PTR->aux = NULL;
4199 EXIT_BLOCK_PTR->global_live_at_start = NULL;
4203 /* Return nonzero if an insn consists only of SETs, each of which only sets a
4210 rtx pat = PATTERN (insn);
4212 /* Insns carrying these notes are useful later on. */
4213 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
4216 if (GET_CODE (pat) == SET && set_noop_p (pat))
4219 if (GET_CODE (pat) == PARALLEL)
4222 /* If nothing but SETs of registers to themselves,
4223 this insn can also be deleted. */
4224 for (i = 0; i < XVECLEN (pat, 0); i++)
4226 rtx tem = XVECEXP (pat, 0, i);
4228 if (GET_CODE (tem) == USE
4229 || GET_CODE (tem) == CLOBBER)
4232 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
4241 /* Delete any insns that copy a register to itself. */
4244 delete_noop_moves (f)
4248 for (insn = f; insn; insn = NEXT_INSN (insn))
4250 if (GET_CODE (insn) == INSN && noop_move_p (insn))
4252 PUT_CODE (insn, NOTE);
4253 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
4254 NOTE_SOURCE_FILE (insn) = 0;
4259 /* Determine if the stack pointer is constant over the life of the function.
4260 Only useful before prologues have been emitted. */
4263 notice_stack_pointer_modification_1 (x, pat, data)
4265 rtx pat ATTRIBUTE_UNUSED;
4266 void *data ATTRIBUTE_UNUSED;
4268 if (x == stack_pointer_rtx
4269 /* The stack pointer is only modified indirectly as the result
4270 of a push until later in flow. See the comments in rtl.texi
4271 regarding Embedded Side-Effects on Addresses. */
4272 || (GET_CODE (x) == MEM
4273 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'a'
4274 && XEXP (XEXP (x, 0), 0) == stack_pointer_rtx))
4275 current_function_sp_is_unchanging = 0;
4279 notice_stack_pointer_modification (f)
4284 /* Assume that the stack pointer is unchanging if alloca hasn't
4286 current_function_sp_is_unchanging = !current_function_calls_alloca;
4287 if (! current_function_sp_is_unchanging)
4290 for (insn = f; insn; insn = NEXT_INSN (insn))
4294 /* Check if insn modifies the stack pointer. */
4295 note_stores (PATTERN (insn), notice_stack_pointer_modification_1,
4297 if (! current_function_sp_is_unchanging)
4303 /* Mark a register in SET. Hard registers in large modes get all
4304 of their component registers set as well. */
4307 mark_reg (reg, xset)
4311 regset set = (regset) xset;
4312 int regno = REGNO (reg);
4314 if (GET_MODE (reg) == BLKmode)
4317 SET_REGNO_REG_SET (set, regno);
4318 if (regno < FIRST_PSEUDO_REGISTER)
4320 int n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
4322 SET_REGNO_REG_SET (set, regno + n);
4326 /* Mark those regs which are needed at the end of the function as live
4327 at the end of the last basic block. */
4330 mark_regs_live_at_end (set)
4335 /* If exiting needs the right stack value, consider the stack pointer
4336 live at the end of the function. */
4337 if ((HAVE_epilogue && reload_completed)
4338 || ! EXIT_IGNORE_STACK
4339 || (! FRAME_POINTER_REQUIRED
4340 && ! current_function_calls_alloca
4341 && flag_omit_frame_pointer)
4342 || current_function_sp_is_unchanging)
4344 SET_REGNO_REG_SET (set, STACK_POINTER_REGNUM);
4347 /* Mark the frame pointer if needed at the end of the function. If
4348 we end up eliminating it, it will be removed from the live list
4349 of each basic block by reload. */
4351 if (! reload_completed || frame_pointer_needed)
4353 SET_REGNO_REG_SET (set, FRAME_POINTER_REGNUM);
4354 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
4355 /* If they are different, also mark the hard frame pointer as live. */
4356 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
4357 SET_REGNO_REG_SET (set, HARD_FRAME_POINTER_REGNUM);
4361 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
4362 /* Many architectures have a GP register even without flag_pic.
4363 Assume the pic register is not in use, or will be handled by
4364 other means, if it is not fixed. */
4365 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4366 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4367 SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
4370 /* Mark all global registers, and all registers used by the epilogue
4371 as being live at the end of the function since they may be
4372 referenced by our caller. */
4373 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4374 if (global_regs[i] || EPILOGUE_USES (i))
4375 SET_REGNO_REG_SET (set, i);
4377 if (HAVE_epilogue && reload_completed)
4379 /* Mark all call-saved registers that we actually used. */
4380 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4381 if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
4382 SET_REGNO_REG_SET (set, i);
4385 #ifdef EH_RETURN_DATA_REGNO
4386 /* Mark the registers that will contain data for the handler. */
4387 if (reload_completed && current_function_calls_eh_return)
4390 unsigned regno = EH_RETURN_DATA_REGNO(i);
4391 if (regno == INVALID_REGNUM)
4393 SET_REGNO_REG_SET (set, regno);
4396 #ifdef EH_RETURN_STACKADJ_RTX
4397 if ((! HAVE_epilogue || ! reload_completed)
4398 && current_function_calls_eh_return)
4400 rtx tmp = EH_RETURN_STACKADJ_RTX;
4401 if (tmp && REG_P (tmp))
4402 mark_reg (tmp, set);
4405 #ifdef EH_RETURN_HANDLER_RTX
4406 if ((! HAVE_epilogue || ! reload_completed)
4407 && current_function_calls_eh_return)
4409 rtx tmp = EH_RETURN_HANDLER_RTX;
4410 if (tmp && REG_P (tmp))
4411 mark_reg (tmp, set);
4415 /* Mark function return value. */
4416 diddle_return_value (mark_reg, set);
4419 /* Callback function for for_each_successor_phi. DATA is a regset.
4420 Sets the SRC_REGNO, the regno of the phi alternative for phi node
4421 INSN, in the regset. */
4424 set_phi_alternative_reg (insn, dest_regno, src_regno, data)
4425 rtx insn ATTRIBUTE_UNUSED;
4426 int dest_regno ATTRIBUTE_UNUSED;
4430 regset live = (regset) data;
4431 SET_REGNO_REG_SET (live, src_regno);
4435 /* Propagate global life info around the graph of basic blocks. Begin
4436 considering blocks with their corresponding bit set in BLOCKS_IN.
4437 If BLOCKS_IN is null, consider it the universal set.
4439 BLOCKS_OUT is set for every block that was changed. */
4442 calculate_global_regs_live (blocks_in, blocks_out, flags)
4443 sbitmap blocks_in, blocks_out;
4446 basic_block *queue, *qhead, *qtail, *qend;
4447 regset tmp, new_live_at_end, call_used;
4448 regset_head tmp_head, call_used_head;
4449 regset_head new_live_at_end_head;
4452 tmp = INITIALIZE_REG_SET (tmp_head);
4453 new_live_at_end = INITIALIZE_REG_SET (new_live_at_end_head);
4454 call_used = INITIALIZE_REG_SET (call_used_head);
4456 /* Inconveniently, this is only redily available in hard reg set form. */
4457 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
4458 if (call_used_regs[i])
4459 SET_REGNO_REG_SET (call_used, i);
4461 /* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
4462 because the `head == tail' style test for an empty queue doesn't
4463 work with a full queue. */
4464 queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
4466 qhead = qend = queue + n_basic_blocks + 2;
4468 /* Queue the blocks set in the initial mask. Do this in reverse block
4469 number order so that we are more likely for the first round to do
4470 useful work. We use AUX non-null to flag that the block is queued. */
4473 /* Clear out the garbage that might be hanging out in bb->aux. */
4474 for (i = n_basic_blocks - 1; i >= 0; --i)
4475 BASIC_BLOCK (i)->aux = NULL;
4477 EXECUTE_IF_SET_IN_SBITMAP (blocks_in, 0, i,
4479 basic_block bb = BASIC_BLOCK (i);
4486 for (i = 0; i < n_basic_blocks; ++i)
4488 basic_block bb = BASIC_BLOCK (i);
4495 sbitmap_zero (blocks_out);
4497 /* We work through the queue until there are no more blocks. What
4498 is live at the end of this block is precisely the union of what
4499 is live at the beginning of all its successors. So, we set its
4500 GLOBAL_LIVE_AT_END field based on the GLOBAL_LIVE_AT_START field
4501 for its successors. Then, we compute GLOBAL_LIVE_AT_START for
4502 this block by walking through the instructions in this block in
4503 reverse order and updating as we go. If that changed
4504 GLOBAL_LIVE_AT_START, we add the predecessors of the block to the
4505 queue; they will now need to recalculate GLOBAL_LIVE_AT_END.
4507 We are guaranteed to terminate, because GLOBAL_LIVE_AT_START
4508 never shrinks. If a register appears in GLOBAL_LIVE_AT_START, it
4509 must either be live at the end of the block, or used within the
4510 block. In the latter case, it will certainly never disappear
4511 from GLOBAL_LIVE_AT_START. In the former case, the register
4512 could go away only if it disappeared from GLOBAL_LIVE_AT_START
4513 for one of the successor blocks. By induction, that cannot
4515 while (qhead != qtail)
4517 int rescan, changed;
4526 /* Begin by propagating live_at_start from the successor blocks. */
4527 CLEAR_REG_SET (new_live_at_end);
4528 for (e = bb->succ; e; e = e->succ_next)
4530 basic_block sb = e->dest;
4532 /* Call-clobbered registers die across exception and call edges. */
4533 /* ??? Abnormal call edges ignored for the moment, as this gets
4534 confused by sibling call edges, which crashes reg-stack. */
4535 if (e->flags & EDGE_EH)
4537 bitmap_operation (tmp, sb->global_live_at_start,
4538 call_used, BITMAP_AND_COMPL);
4539 IOR_REG_SET (new_live_at_end, tmp);
4542 IOR_REG_SET (new_live_at_end, sb->global_live_at_start);
4545 /* The all-important stack pointer must always be live. */
4546 SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
4548 /* Before reload, there are a few registers that must be forced
4549 live everywhere -- which might not already be the case for
4550 blocks within infinite loops. */
4551 if (! reload_completed)
4553 /* Any reference to any pseudo before reload is a potential
4554 reference of the frame pointer. */
4555 SET_REGNO_REG_SET (new_live_at_end, FRAME_POINTER_REGNUM);
4557 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4558 /* Pseudos with argument area equivalences may require
4559 reloading via the argument pointer. */
4560 if (fixed_regs[ARG_POINTER_REGNUM])
4561 SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
4564 /* Any constant, or pseudo with constant equivalences, may
4565 require reloading from memory using the pic register. */
4566 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
4567 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
4568 SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
4571 /* Regs used in phi nodes are not included in
4572 global_live_at_start, since they are live only along a
4573 particular edge. Set those regs that are live because of a
4574 phi node alternative corresponding to this particular block. */
4576 for_each_successor_phi (bb, &set_phi_alternative_reg,
4579 if (bb == ENTRY_BLOCK_PTR)
4581 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
4585 /* On our first pass through this block, we'll go ahead and continue.
4586 Recognize first pass by local_set NULL. On subsequent passes, we
4587 get to skip out early if live_at_end wouldn't have changed. */
4589 if (bb->local_set == NULL)
4591 bb->local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4592 bb->cond_local_set = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4597 /* If any bits were removed from live_at_end, we'll have to
4598 rescan the block. This wouldn't be necessary if we had
4599 precalculated local_live, however with PROP_SCAN_DEAD_CODE
4600 local_live is really dependent on live_at_end. */
4601 CLEAR_REG_SET (tmp);
4602 rescan = bitmap_operation (tmp, bb->global_live_at_end,
4603 new_live_at_end, BITMAP_AND_COMPL);
4607 /* If any of the registers in the new live_at_end set are
4608 conditionally set in this basic block, we must rescan.
4609 This is because conditional lifetimes at the end of the
4610 block do not just take the live_at_end set into account,
4611 but also the liveness at the start of each successor
4612 block. We can miss changes in those sets if we only
4613 compare the new live_at_end against the previous one. */
4614 CLEAR_REG_SET (tmp);
4615 rescan = bitmap_operation (tmp, new_live_at_end,
4616 bb->cond_local_set, BITMAP_AND);
4621 /* Find the set of changed bits. Take this opportunity
4622 to notice that this set is empty and early out. */
4623 CLEAR_REG_SET (tmp);
4624 changed = bitmap_operation (tmp, bb->global_live_at_end,
4625 new_live_at_end, BITMAP_XOR);
4629 /* If any of the changed bits overlap with local_set,
4630 we'll have to rescan the block. Detect overlap by
4631 the AND with ~local_set turning off bits. */
4632 rescan = bitmap_operation (tmp, tmp, bb->local_set,
4637 /* Let our caller know that BB changed enough to require its
4638 death notes updated. */
4640 SET_BIT (blocks_out, bb->index);
4644 /* Add to live_at_start the set of all registers in
4645 new_live_at_end that aren't in the old live_at_end. */
4647 bitmap_operation (tmp, new_live_at_end, bb->global_live_at_end,
4649 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
4651 changed = bitmap_operation (bb->global_live_at_start,
4652 bb->global_live_at_start,
4659 COPY_REG_SET (bb->global_live_at_end, new_live_at_end);
4661 /* Rescan the block insn by insn to turn (a copy of) live_at_end
4662 into live_at_start. */
4663 propagate_block (bb, new_live_at_end, bb->local_set,
4664 bb->cond_local_set, flags);
4666 /* If live_at start didn't change, no need to go farther. */
4667 if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end))
4670 COPY_REG_SET (bb->global_live_at_start, new_live_at_end);
4673 /* Queue all predecessors of BB so that we may re-examine
4674 their live_at_end. */
4675 for (e = bb->pred; e; e = e->pred_next)
4677 basic_block pb = e->src;
4678 if (pb->aux == NULL)
4689 FREE_REG_SET (new_live_at_end);
4690 FREE_REG_SET (call_used);
4694 EXECUTE_IF_SET_IN_SBITMAP (blocks_out, 0, i,
4696 basic_block bb = BASIC_BLOCK (i);
4697 FREE_REG_SET (bb->local_set);
4698 FREE_REG_SET (bb->cond_local_set);
4703 for (i = n_basic_blocks - 1; i >= 0; --i)
4705 basic_block bb = BASIC_BLOCK (i);
4706 FREE_REG_SET (bb->local_set);
4707 FREE_REG_SET (bb->cond_local_set);
4714 /* Subroutines of life analysis. */
4716 /* Allocate the permanent data structures that represent the results
4717 of life analysis. Not static since used also for stupid life analysis. */
4720 allocate_bb_life_data ()
4724 for (i = 0; i < n_basic_blocks; i++)
4726 basic_block bb = BASIC_BLOCK (i);
4728 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4729 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4732 ENTRY_BLOCK_PTR->global_live_at_end
4733 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4734 EXIT_BLOCK_PTR->global_live_at_start
4735 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4737 regs_live_at_setjmp = OBSTACK_ALLOC_REG_SET (&flow_obstack);
4741 allocate_reg_life_data ()
4745 max_regno = max_reg_num ();
4747 /* Recalculate the register space, in case it has grown. Old style
4748 vector oriented regsets would set regset_{size,bytes} here also. */
4749 allocate_reg_info (max_regno, FALSE, FALSE);
4751 /* Reset all the data we'll collect in propagate_block and its
4753 for (i = 0; i < max_regno; i++)
4757 REG_N_DEATHS (i) = 0;
4758 REG_N_CALLS_CROSSED (i) = 0;
4759 REG_LIVE_LENGTH (i) = 0;
4760 REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
4764 /* Delete dead instructions for propagate_block. */
4767 propagate_block_delete_insn (bb, insn)
4771 rtx inote = find_reg_note (insn, REG_LABEL, NULL_RTX);
4773 /* If the insn referred to a label, and that label was attached to
4774 an ADDR_VEC, it's safe to delete the ADDR_VEC. In fact, it's
4775 pretty much mandatory to delete it, because the ADDR_VEC may be
4776 referencing labels that no longer exist.
4778 INSN may reference a deleted label, particularly when a jump
4779 table has been optimized into a direct jump. There's no
4780 real good way to fix up the reference to the deleted label
4781 when the label is deleted, so we just allow it here.
4783 After dead code elimination is complete, we do search for
4784 any REG_LABEL notes which reference deleted labels as a
4787 if (inote && GET_CODE (inote) == CODE_LABEL)
4789 rtx label = XEXP (inote, 0);
4792 /* The label may be forced if it has been put in the constant
4793 pool. If that is the only use we must discard the table
4794 jump following it, but not the label itself. */
4795 if (LABEL_NUSES (label) == 1 + LABEL_PRESERVE_P (label)
4796 && (next = next_nonnote_insn (label)) != NULL
4797 && GET_CODE (next) == JUMP_INSN
4798 && (GET_CODE (PATTERN (next)) == ADDR_VEC
4799 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC))
4801 rtx pat = PATTERN (next);
4802 int diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
4803 int len = XVECLEN (pat, diff_vec_p);
4806 for (i = 0; i < len; i++)
4807 LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0))--;
4809 flow_delete_insn (next);
4813 if (bb->end == insn)
4814 bb->end = PREV_INSN (insn);
4815 flow_delete_insn (insn);
4818 /* Delete dead libcalls for propagate_block. Return the insn
4819 before the libcall. */
4822 propagate_block_delete_libcall (bb, insn, note)
4826 rtx first = XEXP (note, 0);
4827 rtx before = PREV_INSN (first);
4829 if (insn == bb->end)
4832 flow_delete_insn_chain (first, insn);
4836 /* Update the life-status of regs for one insn. Return the previous insn. */
4839 propagate_one_insn (pbi, insn)
4840 struct propagate_block_info *pbi;
4843 rtx prev = PREV_INSN (insn);
4844 int flags = pbi->flags;
4845 int insn_is_dead = 0;
4846 int libcall_is_dead = 0;
4850 if (! INSN_P (insn))
4853 note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
4854 if (flags & PROP_SCAN_DEAD_CODE)
4856 insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
4857 libcall_is_dead = (insn_is_dead && note != 0
4858 && libcall_dead_p (pbi, note, insn));
4861 /* If an instruction consists of just dead store(s) on final pass,
4863 if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
4865 /* If we're trying to delete a prologue or epilogue instruction
4866 that isn't flagged as possibly being dead, something is wrong.
4867 But if we are keeping the stack pointer depressed, we might well
4868 be deleting insns that are used to compute the amount to update
4869 it by, so they are fine. */
4870 if (reload_completed
4871 && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
4872 && (TYPE_RETURNS_STACK_DEPRESSED
4873 (TREE_TYPE (current_function_decl))))
4874 && (((HAVE_epilogue || HAVE_prologue)
4875 && prologue_epilogue_contains (insn))
4876 || (HAVE_sibcall_epilogue
4877 && sibcall_epilogue_contains (insn)))
4878 && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
4881 /* Record sets. Do this even for dead instructions, since they
4882 would have killed the values if they hadn't been deleted. */
4883 mark_set_regs (pbi, PATTERN (insn), insn);
4885 /* CC0 is now known to be dead. Either this insn used it,
4886 in which case it doesn't anymore, or clobbered it,
4887 so the next insn can't use it. */
4890 if (libcall_is_dead)
4891 prev = propagate_block_delete_libcall (pbi->bb, insn, note);
4893 propagate_block_delete_insn (pbi->bb, insn);
4898 /* See if this is an increment or decrement that can be merged into
4899 a following memory address. */
4902 register rtx x = single_set (insn);
4904 /* Does this instruction increment or decrement a register? */
4905 if ((flags & PROP_AUTOINC)
4907 && GET_CODE (SET_DEST (x)) == REG
4908 && (GET_CODE (SET_SRC (x)) == PLUS
4909 || GET_CODE (SET_SRC (x)) == MINUS)
4910 && XEXP (SET_SRC (x), 0) == SET_DEST (x)
4911 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
4912 /* Ok, look for a following memory ref we can combine with.
4913 If one is found, change the memory ref to a PRE_INC
4914 or PRE_DEC, cancel this insn, and return 1.
4915 Return 0 if nothing has been done. */
4916 && try_pre_increment_1 (pbi, insn))
4919 #endif /* AUTO_INC_DEC */
4921 CLEAR_REG_SET (pbi->new_set);
4923 /* If this is not the final pass, and this insn is copying the value of
4924 a library call and it's dead, don't scan the insns that perform the
4925 library call, so that the call's arguments are not marked live. */
4926 if (libcall_is_dead)
4928 /* Record the death of the dest reg. */
4929 mark_set_regs (pbi, PATTERN (insn), insn);
4931 insn = XEXP (note, 0);
4932 return PREV_INSN (insn);
4934 else if (GET_CODE (PATTERN (insn)) == SET
4935 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx
4936 && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
4937 && XEXP (SET_SRC (PATTERN (insn)), 0) == stack_pointer_rtx
4938 && GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == CONST_INT)
4939 /* We have an insn to pop a constant amount off the stack.
4940 (Such insns use PLUS regardless of the direction of the stack,
4941 and any insn to adjust the stack by a constant is always a pop.)
4942 These insns, if not dead stores, have no effect on life. */
4946 /* Any regs live at the time of a call instruction must not go
4947 in a register clobbered by calls. Find all regs now live and
4948 record this for them. */
4950 if (GET_CODE (insn) == CALL_INSN && (flags & PROP_REG_INFO))
4951 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
4952 { REG_N_CALLS_CROSSED (i)++; });
4954 /* Record sets. Do this even for dead instructions, since they
4955 would have killed the values if they hadn't been deleted. */
4956 mark_set_regs (pbi, PATTERN (insn), insn);
4958 if (GET_CODE (insn) == CALL_INSN)
4964 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
4965 cond = COND_EXEC_TEST (PATTERN (insn));
4967 /* Non-constant calls clobber memory. */
4968 if (! CONST_CALL_P (insn))
4970 free_EXPR_LIST_list (&pbi->mem_set_list);
4971 pbi->mem_set_list_len = 0;
4974 /* There may be extra registers to be clobbered. */
4975 for (note = CALL_INSN_FUNCTION_USAGE (insn);
4977 note = XEXP (note, 1))
4978 if (GET_CODE (XEXP (note, 0)) == CLOBBER)
4979 mark_set_1 (pbi, CLOBBER, XEXP (XEXP (note, 0), 0),
4980 cond, insn, pbi->flags);
4982 /* Calls change all call-used and global registers. */
4983 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4984 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
4986 /* We do not want REG_UNUSED notes for these registers. */
4987 mark_set_1 (pbi, CLOBBER, gen_rtx_REG (reg_raw_mode[i], i),
4989 pbi->flags & ~(PROP_DEATH_NOTES | PROP_REG_INFO));
4993 /* If an insn doesn't use CC0, it becomes dead since we assume
4994 that every insn clobbers it. So show it dead here;
4995 mark_used_regs will set it live if it is referenced. */
5000 mark_used_regs (pbi, PATTERN (insn), NULL_RTX, insn);
5002 /* Sometimes we may have inserted something before INSN (such as a move)
5003 when we make an auto-inc. So ensure we will scan those insns. */
5005 prev = PREV_INSN (insn);
5008 if (! insn_is_dead && GET_CODE (insn) == CALL_INSN)
5014 if (GET_CODE (PATTERN (insn)) == COND_EXEC)
5015 cond = COND_EXEC_TEST (PATTERN (insn));
5017 /* Calls use their arguments. */
5018 for (note = CALL_INSN_FUNCTION_USAGE (insn);
5020 note = XEXP (note, 1))
5021 if (GET_CODE (XEXP (note, 0)) == USE)
5022 mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
5025 /* The stack ptr is used (honorarily) by a CALL insn. */
5026 SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
5028 /* Calls may also reference any of the global registers,
5029 so they are made live. */
5030 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5032 mark_used_reg (pbi, gen_rtx_REG (reg_raw_mode[i], i),
5037 /* On final pass, update counts of how many insns in which each reg
5039 if (flags & PROP_REG_INFO)
5040 EXECUTE_IF_SET_IN_REG_SET (pbi->reg_live, 0, i,
5041 { REG_LIVE_LENGTH (i)++; });
5046 /* Initialize a propagate_block_info struct for public consumption.
5047 Note that the structure itself is opaque to this file, but that
5048 the user can use the regsets provided here. */
5050 struct propagate_block_info *
5051 init_propagate_block_info (bb, live, local_set, cond_local_set, flags)
5053 regset live, local_set, cond_local_set;
5056 struct propagate_block_info *pbi = xmalloc (sizeof (*pbi));
5059 pbi->reg_live = live;
5060 pbi->mem_set_list = NULL_RTX;
5061 pbi->mem_set_list_len = 0;
5062 pbi->local_set = local_set;
5063 pbi->cond_local_set = cond_local_set;
5067 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5068 pbi->reg_next_use = (rtx *) xcalloc (max_reg_num (), sizeof (rtx));
5070 pbi->reg_next_use = NULL;
5072 pbi->new_set = BITMAP_XMALLOC ();
5074 #ifdef HAVE_conditional_execution
5075 pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL,
5076 free_reg_cond_life_info);
5077 pbi->reg_cond_reg = BITMAP_XMALLOC ();
5079 /* If this block ends in a conditional branch, for each register live
5080 from one side of the branch and not the other, record the register
5081 as conditionally dead. */
5082 if (GET_CODE (bb->end) == JUMP_INSN
5083 && any_condjump_p (bb->end))
5085 regset_head diff_head;
5086 regset diff = INITIALIZE_REG_SET (diff_head);
5087 basic_block bb_true, bb_false;
5088 rtx cond_true, cond_false, set_src;
5091 /* Identify the successor blocks. */
5092 bb_true = bb->succ->dest;
5093 if (bb->succ->succ_next != NULL)
5095 bb_false = bb->succ->succ_next->dest;
5097 if (bb->succ->flags & EDGE_FALLTHRU)
5099 basic_block t = bb_false;
5103 else if (! (bb->succ->succ_next->flags & EDGE_FALLTHRU))
5108 /* This can happen with a conditional jump to the next insn. */
5109 if (JUMP_LABEL (bb->end) != bb_true->head)
5112 /* Simplest way to do nothing. */
5116 /* Extract the condition from the branch. */
5117 set_src = SET_SRC (pc_set (bb->end));
5118 cond_true = XEXP (set_src, 0);
5119 cond_false = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond_true)),
5120 GET_MODE (cond_true), XEXP (cond_true, 0),
5121 XEXP (cond_true, 1));
5122 if (GET_CODE (XEXP (set_src, 1)) == PC)
5125 cond_false = cond_true;
5129 /* Compute which register lead different lives in the successors. */
5130 if (bitmap_operation (diff, bb_true->global_live_at_start,
5131 bb_false->global_live_at_start, BITMAP_XOR))
5133 rtx reg = XEXP (cond_true, 0);
5135 if (GET_CODE (reg) == SUBREG)
5136 reg = SUBREG_REG (reg);
5138 if (GET_CODE (reg) != REG)
5141 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (reg));
5143 /* For each such register, mark it conditionally dead. */
5144 EXECUTE_IF_SET_IN_REG_SET
5147 struct reg_cond_life_info *rcli;
5150 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
5152 if (REGNO_REG_SET_P (bb_true->global_live_at_start, i))
5156 rcli->condition = cond;
5157 rcli->stores = const0_rtx;
5158 rcli->orig_condition = cond;
5160 splay_tree_insert (pbi->reg_cond_dead, i,
5161 (splay_tree_value) rcli);
5165 FREE_REG_SET (diff);
5169 /* If this block has no successors, any stores to the frame that aren't
5170 used later in the block are dead. So make a pass over the block
5171 recording any such that are made and show them dead at the end. We do
5172 a very conservative and simple job here. */
5174 && ! (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5175 && (TYPE_RETURNS_STACK_DEPRESSED
5176 (TREE_TYPE (current_function_decl))))
5177 && (flags & PROP_SCAN_DEAD_CODE)
5178 && (bb->succ == NULL
5179 || (bb->succ->succ_next == NULL
5180 && bb->succ->dest == EXIT_BLOCK_PTR
5181 && ! current_function_calls_eh_return)))
5184 for (insn = bb->end; insn != bb->head; insn = PREV_INSN (insn))
5185 if (GET_CODE (insn) == INSN
5186 && (set = single_set (insn))
5187 && GET_CODE (SET_DEST (set)) == MEM)
5189 rtx mem = SET_DEST (set);
5190 rtx canon_mem = canon_rtx (mem);
5192 /* This optimization is performed by faking a store to the
5193 memory at the end of the block. This doesn't work for
5194 unchanging memories because multiple stores to unchanging
5195 memory is illegal and alias analysis doesn't consider it. */
5196 if (RTX_UNCHANGING_P (canon_mem))
5199 if (XEXP (canon_mem, 0) == frame_pointer_rtx
5200 || (GET_CODE (XEXP (canon_mem, 0)) == PLUS
5201 && XEXP (XEXP (canon_mem, 0), 0) == frame_pointer_rtx
5202 && GET_CODE (XEXP (XEXP (canon_mem, 0), 1)) == CONST_INT))
5205 /* Store a copy of mem, otherwise the address may be scrogged
5206 by find_auto_inc. This matters because insn_dead_p uses
5207 an rtx_equal_p check to determine if two addresses are
5208 the same. This works before find_auto_inc, but fails
5209 after find_auto_inc, causing discrepencies between the
5210 set of live registers calculated during the
5211 calculate_global_regs_live phase and what actually exists
5212 after flow completes, leading to aborts. */
5213 if (flags & PROP_AUTOINC)
5214 mem = shallow_copy_rtx (mem);
5216 pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
5217 if (++pbi->mem_set_list_len >= MAX_MEM_SET_LIST_LEN)
5226 /* Release a propagate_block_info struct. */
5229 free_propagate_block_info (pbi)
5230 struct propagate_block_info *pbi;
5232 free_EXPR_LIST_list (&pbi->mem_set_list);
5234 BITMAP_XFREE (pbi->new_set);
5236 #ifdef HAVE_conditional_execution
5237 splay_tree_delete (pbi->reg_cond_dead);
5238 BITMAP_XFREE (pbi->reg_cond_reg);
5241 if (pbi->reg_next_use)
5242 free (pbi->reg_next_use);
5247 /* Compute the registers live at the beginning of a basic block BB from
5248 those live at the end.
5250 When called, REG_LIVE contains those live at the end. On return, it
5251 contains those live at the beginning.
5253 LOCAL_SET, if non-null, will be set with all registers killed
5254 unconditionally by this basic block.
5255 Likewise, COND_LOCAL_SET, if non-null, will be set with all registers
5256 killed conditionally by this basic block. If there is any unconditional
5257 set of a register, then the corresponding bit will be set in LOCAL_SET
5258 and cleared in COND_LOCAL_SET.
5259 It is valid for LOCAL_SET and COND_LOCAL_SET to be the same set. In this
5260 case, the resulting set will be equal to the union of the two sets that
5261 would otherwise be computed. */
5264 propagate_block (bb, live, local_set, cond_local_set, flags)
5268 regset cond_local_set;
5271 struct propagate_block_info *pbi;
5274 pbi = init_propagate_block_info (bb, live, local_set, cond_local_set, flags);
5276 if (flags & PROP_REG_INFO)
5280 /* Process the regs live at the end of the block.
5281 Mark them as not local to any one basic block. */
5282 EXECUTE_IF_SET_IN_REG_SET (live, 0, i,
5283 { REG_BASIC_BLOCK (i) = REG_BLOCK_GLOBAL; });
5286 /* Scan the block an insn at a time from end to beginning. */
5288 for (insn = bb->end;; insn = prev)
5290 /* If this is a call to `setjmp' et al, warn if any
5291 non-volatile datum is live. */
5292 if ((flags & PROP_REG_INFO)
5293 && GET_CODE (insn) == NOTE
5294 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
5295 IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
5297 prev = propagate_one_insn (pbi, insn);
5299 if (insn == bb->head)
5303 free_propagate_block_info (pbi);
5306 /* Return 1 if X (the body of an insn, or part of it) is just dead stores
5307 (SET expressions whose destinations are registers dead after the insn).
5308 NEEDED is the regset that says which regs are alive after the insn.
5310 Unless CALL_OK is non-zero, an insn is needed if it contains a CALL.
5312 If X is the entire body of an insn, NOTES contains the reg notes
5313 pertaining to the insn. */
5316 insn_dead_p (pbi, x, call_ok, notes)
5317 struct propagate_block_info *pbi;
5320 rtx notes ATTRIBUTE_UNUSED;
5322 enum rtx_code code = GET_CODE (x);
5325 /* If flow is invoked after reload, we must take existing AUTO_INC
5326 expresions into account. */
5327 if (reload_completed)
5329 for (; notes; notes = XEXP (notes, 1))
5331 if (REG_NOTE_KIND (notes) == REG_INC)
5333 int regno = REGNO (XEXP (notes, 0));
5335 /* Don't delete insns to set global regs. */
5336 if ((regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5337 || REGNO_REG_SET_P (pbi->reg_live, regno))
5344 /* If setting something that's a reg or part of one,
5345 see if that register's altered value will be live. */
5349 rtx r = SET_DEST (x);
5352 if (GET_CODE (r) == CC0)
5353 return ! pbi->cc0_live;
5356 /* A SET that is a subroutine call cannot be dead. */
5357 if (GET_CODE (SET_SRC (x)) == CALL)
5363 /* Don't eliminate loads from volatile memory or volatile asms. */
5364 else if (volatile_refs_p (SET_SRC (x)))
5367 if (GET_CODE (r) == MEM)
5371 if (MEM_VOLATILE_P (r))
5374 /* Walk the set of memory locations we are currently tracking
5375 and see if one is an identical match to this memory location.
5376 If so, this memory write is dead (remember, we're walking
5377 backwards from the end of the block to the start). Since
5378 rtx_equal_p does not check the alias set or flags, we also
5379 must have the potential for them to conflict (anti_dependence). */
5380 for (temp = pbi->mem_set_list; temp != 0; temp = XEXP (temp, 1))
5381 if (anti_dependence (r, XEXP (temp, 0)))
5383 rtx mem = XEXP (temp, 0);
5385 if (rtx_equal_p (mem, r))
5388 /* Check if memory reference matches an auto increment. Only
5389 post increment/decrement or modify are valid. */
5390 if (GET_MODE (mem) == GET_MODE (r)
5391 && (GET_CODE (XEXP (mem, 0)) == POST_DEC
5392 || GET_CODE (XEXP (mem, 0)) == POST_INC
5393 || GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
5394 && GET_MODE (XEXP (mem, 0)) == GET_MODE (r)
5395 && rtx_equal_p (XEXP (XEXP (mem, 0), 0), XEXP (r, 0)))
5402 while (GET_CODE (r) == SUBREG
5403 || GET_CODE (r) == STRICT_LOW_PART
5404 || GET_CODE (r) == ZERO_EXTRACT)
5407 if (GET_CODE (r) == REG)
5409 int regno = REGNO (r);
5412 if (REGNO_REG_SET_P (pbi->reg_live, regno))
5415 /* If this is a hard register, verify that subsequent
5416 words are not needed. */
5417 if (regno < FIRST_PSEUDO_REGISTER)
5419 int n = HARD_REGNO_NREGS (regno, GET_MODE (r));
5422 if (REGNO_REG_SET_P (pbi->reg_live, regno+n))
5426 /* Don't delete insns to set global regs. */
5427 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
5430 /* Make sure insns to set the stack pointer aren't deleted. */
5431 if (regno == STACK_POINTER_REGNUM)
5434 /* ??? These bits might be redundant with the force live bits
5435 in calculate_global_regs_live. We would delete from
5436 sequential sets; whether this actually affects real code
5437 for anything but the stack pointer I don't know. */
5438 /* Make sure insns to set the frame pointer aren't deleted. */
5439 if (regno == FRAME_POINTER_REGNUM
5440 && (! reload_completed || frame_pointer_needed))
5442 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5443 if (regno == HARD_FRAME_POINTER_REGNUM
5444 && (! reload_completed || frame_pointer_needed))
5448 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5449 /* Make sure insns to set arg pointer are never deleted
5450 (if the arg pointer isn't fixed, there will be a USE
5451 for it, so we can treat it normally). */
5452 if (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
5456 /* Otherwise, the set is dead. */
5462 /* If performing several activities, insn is dead if each activity
5463 is individually dead. Also, CLOBBERs and USEs can be ignored; a
5464 CLOBBER or USE that's inside a PARALLEL doesn't make the insn
5466 else if (code == PARALLEL)
5468 int i = XVECLEN (x, 0);
5470 for (i--; i >= 0; i--)
5471 if (GET_CODE (XVECEXP (x, 0, i)) != CLOBBER
5472 && GET_CODE (XVECEXP (x, 0, i)) != USE
5473 && ! insn_dead_p (pbi, XVECEXP (x, 0, i), call_ok, NULL_RTX))
5479 /* A CLOBBER of a pseudo-register that is dead serves no purpose. That
5480 is not necessarily true for hard registers. */
5481 else if (code == CLOBBER && GET_CODE (XEXP (x, 0)) == REG
5482 && REGNO (XEXP (x, 0)) >= FIRST_PSEUDO_REGISTER
5483 && ! REGNO_REG_SET_P (pbi->reg_live, REGNO (XEXP (x, 0))))
5486 /* We do not check other CLOBBER or USE here. An insn consisting of just
5487 a CLOBBER or just a USE should not be deleted. */
5491 /* If INSN is the last insn in a libcall, and assuming INSN is dead,
5492 return 1 if the entire library call is dead.
5493 This is true if INSN copies a register (hard or pseudo)
5494 and if the hard return reg of the call insn is dead.
5495 (The caller should have tested the destination of the SET inside
5496 INSN already for death.)
5498 If this insn doesn't just copy a register, then we don't
5499 have an ordinary libcall. In that case, cse could not have
5500 managed to substitute the source for the dest later on,
5501 so we can assume the libcall is dead.
5503 PBI is the block info giving pseudoregs live before this insn.
5504 NOTE is the REG_RETVAL note of the insn. */
5507 libcall_dead_p (pbi, note, insn)
5508 struct propagate_block_info *pbi;
5512 rtx x = single_set (insn);
5516 register rtx r = SET_SRC (x);
5517 if (GET_CODE (r) == REG)
5519 rtx call = XEXP (note, 0);
5523 /* Find the call insn. */
5524 while (call != insn && GET_CODE (call) != CALL_INSN)
5525 call = NEXT_INSN (call);
5527 /* If there is none, do nothing special,
5528 since ordinary death handling can understand these insns. */
5532 /* See if the hard reg holding the value is dead.
5533 If this is a PARALLEL, find the call within it. */
5534 call_pat = PATTERN (call);
5535 if (GET_CODE (call_pat) == PARALLEL)
5537 for (i = XVECLEN (call_pat, 0) - 1; i >= 0; i--)
5538 if (GET_CODE (XVECEXP (call_pat, 0, i)) == SET
5539 && GET_CODE (SET_SRC (XVECEXP (call_pat, 0, i))) == CALL)
5542 /* This may be a library call that is returning a value
5543 via invisible pointer. Do nothing special, since
5544 ordinary death handling can understand these insns. */
5548 call_pat = XVECEXP (call_pat, 0, i);
5551 return insn_dead_p (pbi, call_pat, 1, REG_NOTES (call));
5557 /* Return 1 if register REGNO was used before it was set, i.e. if it is
5558 live at function entry. Don't count global register variables, variables
5559 in registers that can be used for function arg passing, or variables in
5560 fixed hard registers. */
5563 regno_uninitialized (regno)
5566 if (n_basic_blocks == 0
5567 || (regno < FIRST_PSEUDO_REGISTER
5568 && (global_regs[regno]
5569 || fixed_regs[regno]
5570 || FUNCTION_ARG_REGNO_P (regno))))
5573 return REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno);
5576 /* 1 if register REGNO was alive at a place where `setjmp' was called
5577 and was set more than once or is an argument.
5578 Such regs may be clobbered by `longjmp'. */
5581 regno_clobbered_at_setjmp (regno)
5584 if (n_basic_blocks == 0)
5587 return ((REG_N_SETS (regno) > 1
5588 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
5589 && REGNO_REG_SET_P (regs_live_at_setjmp, regno));
5592 /* INSN references memory, possibly using autoincrement addressing modes.
5593 Find any entries on the mem_set_list that need to be invalidated due
5594 to an address change. */
5597 invalidate_mems_from_autoinc (pbi, insn)
5598 struct propagate_block_info *pbi;
5601 rtx note = REG_NOTES (insn);
5602 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
5604 if (REG_NOTE_KIND (note) == REG_INC)
5606 rtx temp = pbi->mem_set_list;
5607 rtx prev = NULL_RTX;
5612 next = XEXP (temp, 1);
5613 if (reg_overlap_mentioned_p (XEXP (note, 0), XEXP (temp, 0)))
5615 /* Splice temp out of list. */
5617 XEXP (prev, 1) = next;
5619 pbi->mem_set_list = next;
5620 free_EXPR_LIST_node (temp);
5621 pbi->mem_set_list_len--;
5631 /* EXP is either a MEM or a REG. Remove any dependant entries
5632 from pbi->mem_set_list. */
5635 invalidate_mems_from_set (pbi, exp)
5636 struct propagate_block_info *pbi;
5639 rtx temp = pbi->mem_set_list;
5640 rtx prev = NULL_RTX;
5645 next = XEXP (temp, 1);
5646 if ((GET_CODE (exp) == MEM
5647 && output_dependence (XEXP (temp, 0), exp))
5648 || (GET_CODE (exp) == REG
5649 && reg_overlap_mentioned_p (exp, XEXP (temp, 0))))
5651 /* Splice this entry out of the list. */
5653 XEXP (prev, 1) = next;
5655 pbi->mem_set_list = next;
5656 free_EXPR_LIST_node (temp);
5657 pbi->mem_set_list_len--;
5665 /* Process the registers that are set within X. Their bits are set to
5666 1 in the regset DEAD, because they are dead prior to this insn.
5668 If INSN is nonzero, it is the insn being processed.
5670 FLAGS is the set of operations to perform. */
5673 mark_set_regs (pbi, x, insn)
5674 struct propagate_block_info *pbi;
5677 rtx cond = NULL_RTX;
5682 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5684 if (REG_NOTE_KIND (link) == REG_INC)
5685 mark_set_1 (pbi, SET, XEXP (link, 0),
5686 (GET_CODE (x) == COND_EXEC
5687 ? COND_EXEC_TEST (x) : NULL_RTX),
5691 switch (code = GET_CODE (x))
5695 mark_set_1 (pbi, code, SET_DEST (x), cond, insn, pbi->flags);
5699 cond = COND_EXEC_TEST (x);
5700 x = COND_EXEC_CODE (x);
5706 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
5708 rtx sub = XVECEXP (x, 0, i);
5709 switch (code = GET_CODE (sub))
5712 if (cond != NULL_RTX)
5715 cond = COND_EXEC_TEST (sub);
5716 sub = COND_EXEC_CODE (sub);
5717 if (GET_CODE (sub) != SET && GET_CODE (sub) != CLOBBER)
5723 mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, pbi->flags);
5738 /* Process a single set, which appears in INSN. REG (which may not
5739 actually be a REG, it may also be a SUBREG, PARALLEL, etc.) is
5740 being set using the CODE (which may be SET, CLOBBER, or COND_EXEC).
5741 If the set is conditional (because it appear in a COND_EXEC), COND
5742 will be the condition. */
5745 mark_set_1 (pbi, code, reg, cond, insn, flags)
5746 struct propagate_block_info *pbi;
5748 rtx reg, cond, insn;
5751 int regno_first = -1, regno_last = -1;
5752 unsigned long not_dead = 0;
5755 /* Modifying just one hardware register of a multi-reg value or just a
5756 byte field of a register does not mean the value from before this insn
5757 is now dead. Of course, if it was dead after it's unused now. */
5759 switch (GET_CODE (reg))
5762 /* Some targets place small structures in registers for return values of
5763 functions. We have to detect this case specially here to get correct
5764 flow information. */
5765 for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
5766 if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
5767 mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
5773 case STRICT_LOW_PART:
5774 /* ??? Assumes STRICT_LOW_PART not used on multi-word registers. */
5776 reg = XEXP (reg, 0);
5777 while (GET_CODE (reg) == SUBREG
5778 || GET_CODE (reg) == ZERO_EXTRACT
5779 || GET_CODE (reg) == SIGN_EXTRACT
5780 || GET_CODE (reg) == STRICT_LOW_PART);
5781 if (GET_CODE (reg) == MEM)
5783 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live, REGNO (reg));
5787 regno_last = regno_first = REGNO (reg);
5788 if (regno_first < FIRST_PSEUDO_REGISTER)
5789 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
5793 if (GET_CODE (SUBREG_REG (reg)) == REG)
5795 enum machine_mode outer_mode = GET_MODE (reg);
5796 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (reg));
5798 /* Identify the range of registers affected. This is moderately
5799 tricky for hard registers. See alter_subreg. */
5801 regno_last = regno_first = REGNO (SUBREG_REG (reg));
5802 if (regno_first < FIRST_PSEUDO_REGISTER)
5804 regno_first += subreg_regno_offset (regno_first, inner_mode,
5807 regno_last = (regno_first
5808 + HARD_REGNO_NREGS (regno_first, outer_mode) - 1);
5810 /* Since we've just adjusted the register number ranges, make
5811 sure REG matches. Otherwise some_was_live will be clear
5812 when it shouldn't have been, and we'll create incorrect
5813 REG_UNUSED notes. */
5814 reg = gen_rtx_REG (outer_mode, regno_first);
5818 /* If the number of words in the subreg is less than the number
5819 of words in the full register, we have a well-defined partial
5820 set. Otherwise the high bits are undefined.
5822 This is only really applicable to pseudos, since we just took
5823 care of multi-word hard registers. */
5824 if (((GET_MODE_SIZE (outer_mode)
5825 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
5826 < ((GET_MODE_SIZE (inner_mode)
5827 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))
5828 not_dead = (unsigned long) REGNO_REG_SET_P (pbi->reg_live,
5831 reg = SUBREG_REG (reg);
5835 reg = SUBREG_REG (reg);
5842 /* If this set is a MEM, then it kills any aliased writes.
5843 If this set is a REG, then it kills any MEMs which use the reg. */
5844 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
5846 if (GET_CODE (reg) == MEM || GET_CODE (reg) == REG)
5847 invalidate_mems_from_set (pbi, reg);
5849 /* If the memory reference had embedded side effects (autoincrement
5850 address modes. Then we may need to kill some entries on the
5852 if (insn && GET_CODE (reg) == MEM)
5853 invalidate_mems_from_autoinc (pbi, insn);
5855 if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN
5856 && GET_CODE (reg) == MEM && ! side_effects_p (reg)
5857 /* ??? With more effort we could track conditional memory life. */
5859 /* We do not know the size of a BLKmode store, so we do not track
5860 them for redundant store elimination. */
5861 && GET_MODE (reg) != BLKmode
5862 /* There are no REG_INC notes for SP, so we can't assume we'll see
5863 everything that invalidates it. To be safe, don't eliminate any
5864 stores though SP; none of them should be redundant anyway. */
5865 && ! reg_mentioned_p (stack_pointer_rtx, reg))
5868 /* Store a copy of mem, otherwise the address may be
5869 scrogged by find_auto_inc. */
5870 if (flags & PROP_AUTOINC)
5871 reg = shallow_copy_rtx (reg);
5873 pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
5874 pbi->mem_set_list_len++;
5878 if (GET_CODE (reg) == REG
5879 && ! (regno_first == FRAME_POINTER_REGNUM
5880 && (! reload_completed || frame_pointer_needed))
5881 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
5882 && ! (regno_first == HARD_FRAME_POINTER_REGNUM
5883 && (! reload_completed || frame_pointer_needed))
5885 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5886 && ! (regno_first == ARG_POINTER_REGNUM && fixed_regs[regno_first])
5890 int some_was_live = 0, some_was_dead = 0;
5892 for (i = regno_first; i <= regno_last; ++i)
5894 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
5897 /* Order of the set operation matters here since both
5898 sets may be the same. */
5899 CLEAR_REGNO_REG_SET (pbi->cond_local_set, i);
5900 if (cond != NULL_RTX
5901 && ! REGNO_REG_SET_P (pbi->local_set, i))
5902 SET_REGNO_REG_SET (pbi->cond_local_set, i);
5904 SET_REGNO_REG_SET (pbi->local_set, i);
5906 if (code != CLOBBER)
5907 SET_REGNO_REG_SET (pbi->new_set, i);
5909 some_was_live |= needed_regno;
5910 some_was_dead |= ! needed_regno;
5913 #ifdef HAVE_conditional_execution
5914 /* Consider conditional death in deciding that the register needs
5916 if (some_was_live && ! not_dead
5917 /* The stack pointer is never dead. Well, not strictly true,
5918 but it's very difficult to tell from here. Hopefully
5919 combine_stack_adjustments will fix up the most egregious
5921 && regno_first != STACK_POINTER_REGNUM)
5923 for (i = regno_first; i <= regno_last; ++i)
5924 if (! mark_regno_cond_dead (pbi, i, cond))
5925 not_dead |= ((unsigned long) 1) << (i - regno_first);
5929 /* Additional data to record if this is the final pass. */
5930 if (flags & (PROP_LOG_LINKS | PROP_REG_INFO
5931 | PROP_DEATH_NOTES | PROP_AUTOINC))
5934 register int blocknum = pbi->bb->index;
5937 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
5939 y = pbi->reg_next_use[regno_first];
5941 /* The next use is no longer next, since a store intervenes. */
5942 for (i = regno_first; i <= regno_last; ++i)
5943 pbi->reg_next_use[i] = 0;
5946 if (flags & PROP_REG_INFO)
5948 for (i = regno_first; i <= regno_last; ++i)
5950 /* Count (weighted) references, stores, etc. This counts a
5951 register twice if it is modified, but that is correct. */
5952 REG_N_SETS (i) += 1;
5953 REG_N_REFS (i) += 1;
5954 REG_FREQ (i) += (optimize_size || !pbi->bb->frequency
5955 ? 1 : pbi->bb->frequency);
5957 /* The insns where a reg is live are normally counted
5958 elsewhere, but we want the count to include the insn
5959 where the reg is set, and the normal counting mechanism
5960 would not count it. */
5961 REG_LIVE_LENGTH (i) += 1;
5964 /* If this is a hard reg, record this function uses the reg. */
5965 if (regno_first < FIRST_PSEUDO_REGISTER)
5967 for (i = regno_first; i <= regno_last; i++)
5968 regs_ever_live[i] = 1;
5972 /* Keep track of which basic blocks each reg appears in. */
5973 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
5974 REG_BASIC_BLOCK (regno_first) = blocknum;
5975 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
5976 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
5980 if (! some_was_dead)
5982 if (flags & PROP_LOG_LINKS)
5984 /* Make a logical link from the next following insn
5985 that uses this register, back to this insn.
5986 The following insns have already been processed.
5988 We don't build a LOG_LINK for hard registers containing
5989 in ASM_OPERANDs. If these registers get replaced,
5990 we might wind up changing the semantics of the insn,
5991 even if reload can make what appear to be valid
5992 assignments later. */
5993 if (y && (BLOCK_NUM (y) == blocknum)
5994 && (regno_first >= FIRST_PSEUDO_REGISTER
5995 || asm_noperands (PATTERN (y)) < 0))
5996 LOG_LINKS (y) = alloc_INSN_LIST (insn, LOG_LINKS (y));
6001 else if (! some_was_live)
6003 if (flags & PROP_REG_INFO)
6004 REG_N_DEATHS (regno_first) += 1;
6006 if (flags & PROP_DEATH_NOTES)
6008 /* Note that dead stores have already been deleted
6009 when possible. If we get here, we have found a
6010 dead store that cannot be eliminated (because the
6011 same insn does something useful). Indicate this
6012 by marking the reg being set as dying here. */
6014 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6019 if (flags & PROP_DEATH_NOTES)
6021 /* This is a case where we have a multi-word hard register
6022 and some, but not all, of the words of the register are
6023 needed in subsequent insns. Write REG_UNUSED notes
6024 for those parts that were not needed. This case should
6027 for (i = regno_first; i <= regno_last; ++i)
6028 if (! REGNO_REG_SET_P (pbi->reg_live, i))
6030 = alloc_EXPR_LIST (REG_UNUSED,
6031 gen_rtx_REG (reg_raw_mode[i], i),
6037 /* Mark the register as being dead. */
6039 /* The stack pointer is never dead. Well, not strictly true,
6040 but it's very difficult to tell from here. Hopefully
6041 combine_stack_adjustments will fix up the most egregious
6043 && regno_first != STACK_POINTER_REGNUM)
6045 for (i = regno_first; i <= regno_last; ++i)
6046 if (!(not_dead & (((unsigned long) 1) << (i - regno_first))))
6047 CLEAR_REGNO_REG_SET (pbi->reg_live, i);
6050 else if (GET_CODE (reg) == REG)
6052 if (flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6053 pbi->reg_next_use[regno_first] = 0;
6056 /* If this is the last pass and this is a SCRATCH, show it will be dying
6057 here and count it. */
6058 else if (GET_CODE (reg) == SCRATCH)
6060 if (flags & PROP_DEATH_NOTES)
6062 = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
6066 #ifdef HAVE_conditional_execution
6067 /* Mark REGNO conditionally dead.
6068 Return true if the register is now unconditionally dead. */
6071 mark_regno_cond_dead (pbi, regno, cond)
6072 struct propagate_block_info *pbi;
6076 /* If this is a store to a predicate register, the value of the
6077 predicate is changing, we don't know that the predicate as seen
6078 before is the same as that seen after. Flush all dependent
6079 conditions from reg_cond_dead. This will make all such
6080 conditionally live registers unconditionally live. */
6081 if (REGNO_REG_SET_P (pbi->reg_cond_reg, regno))
6082 flush_reg_cond_reg (pbi, regno);
6084 /* If this is an unconditional store, remove any conditional
6085 life that may have existed. */
6086 if (cond == NULL_RTX)
6087 splay_tree_remove (pbi->reg_cond_dead, regno);
6090 splay_tree_node node;
6091 struct reg_cond_life_info *rcli;
6094 /* Otherwise this is a conditional set. Record that fact.
6095 It may have been conditionally used, or there may be a
6096 subsequent set with a complimentary condition. */
6098 node = splay_tree_lookup (pbi->reg_cond_dead, regno);
6101 /* The register was unconditionally live previously.
6102 Record the current condition as the condition under
6103 which it is dead. */
6104 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
6105 rcli->condition = cond;
6106 rcli->stores = cond;
6107 rcli->orig_condition = const0_rtx;
6108 splay_tree_insert (pbi->reg_cond_dead, regno,
6109 (splay_tree_value) rcli);
6111 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6113 /* Not unconditionaly dead. */
6118 /* The register was conditionally live previously.
6119 Add the new condition to the old. */
6120 rcli = (struct reg_cond_life_info *) node->value;
6121 ncond = rcli->condition;
6122 ncond = ior_reg_cond (ncond, cond, 1);
6123 if (rcli->stores == const0_rtx)
6124 rcli->stores = cond;
6125 else if (rcli->stores != const1_rtx)
6126 rcli->stores = ior_reg_cond (rcli->stores, cond, 1);
6128 /* If the register is now unconditionally dead, remove the entry
6129 in the splay_tree. A register is unconditionally dead if the
6130 dead condition ncond is true. A register is also unconditionally
6131 dead if the sum of all conditional stores is an unconditional
6132 store (stores is true), and the dead condition is identically the
6133 same as the original dead condition initialized at the end of
6134 the block. This is a pointer compare, not an rtx_equal_p
6136 if (ncond == const1_rtx
6137 || (ncond == rcli->orig_condition && rcli->stores == const1_rtx))
6138 splay_tree_remove (pbi->reg_cond_dead, regno);
6141 rcli->condition = ncond;
6143 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6145 /* Not unconditionaly dead. */
6154 /* Called from splay_tree_delete for pbi->reg_cond_life. */
6157 free_reg_cond_life_info (value)
6158 splay_tree_value value;
6160 struct reg_cond_life_info *rcli = (struct reg_cond_life_info *) value;
6164 /* Helper function for flush_reg_cond_reg. */
6167 flush_reg_cond_reg_1 (node, data)
6168 splay_tree_node node;
6171 struct reg_cond_life_info *rcli;
6172 int *xdata = (int *) data;
6173 unsigned int regno = xdata[0];
6175 /* Don't need to search if last flushed value was farther on in
6176 the in-order traversal. */
6177 if (xdata[1] >= (int) node->key)
6180 /* Splice out portions of the expression that refer to regno. */
6181 rcli = (struct reg_cond_life_info *) node->value;
6182 rcli->condition = elim_reg_cond (rcli->condition, regno);
6183 if (rcli->stores != const0_rtx && rcli->stores != const1_rtx)
6184 rcli->stores = elim_reg_cond (rcli->stores, regno);
6186 /* If the entire condition is now false, signal the node to be removed. */
6187 if (rcli->condition == const0_rtx)
6189 xdata[1] = node->key;
6192 else if (rcli->condition == const1_rtx)
6198 /* Flush all (sub) expressions referring to REGNO from REG_COND_LIVE. */
6201 flush_reg_cond_reg (pbi, regno)
6202 struct propagate_block_info *pbi;
6209 while (splay_tree_foreach (pbi->reg_cond_dead,
6210 flush_reg_cond_reg_1, pair) == -1)
6211 splay_tree_remove (pbi->reg_cond_dead, pair[1]);
6213 CLEAR_REGNO_REG_SET (pbi->reg_cond_reg, regno);
6216 /* Logical arithmetic on predicate conditions. IOR, NOT and AND.
6217 For ior/and, the ADD flag determines whether we want to add the new
6218 condition X to the old one unconditionally. If it is zero, we will
6219 only return a new expression if X allows us to simplify part of
6220 OLD, otherwise we return OLD unchanged to the caller.
6221 If ADD is nonzero, we will return a new condition in all cases. The
6222 toplevel caller of one of these functions should always pass 1 for
6226 ior_reg_cond (old, x, add)
6232 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6234 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6235 && REVERSE_CONDEXEC_PREDICATES_P (GET_CODE (x), GET_CODE (old))
6236 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6238 if (GET_CODE (x) == GET_CODE (old)
6239 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6243 return gen_rtx_IOR (0, old, x);
6246 switch (GET_CODE (old))
6249 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6250 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6251 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6253 if (op0 == const0_rtx)
6255 if (op1 == const0_rtx)
6257 if (op0 == const1_rtx || op1 == const1_rtx)
6259 if (op0 == XEXP (old, 0))
6260 op0 = gen_rtx_IOR (0, op0, x);
6262 op1 = gen_rtx_IOR (0, op1, x);
6263 return gen_rtx_IOR (0, op0, op1);
6267 return gen_rtx_IOR (0, old, x);
6270 op0 = ior_reg_cond (XEXP (old, 0), x, 0);
6271 op1 = ior_reg_cond (XEXP (old, 1), x, 0);
6272 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6274 if (op0 == const1_rtx)
6276 if (op1 == const1_rtx)
6278 if (op0 == const0_rtx || op1 == const0_rtx)
6280 if (op0 == XEXP (old, 0))
6281 op0 = gen_rtx_IOR (0, op0, x);
6283 op1 = gen_rtx_IOR (0, op1, x);
6284 return gen_rtx_AND (0, op0, op1);
6288 return gen_rtx_IOR (0, old, x);
6291 op0 = and_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6292 if (op0 != XEXP (old, 0))
6293 return not_reg_cond (op0);
6296 return gen_rtx_IOR (0, old, x);
6307 enum rtx_code x_code;
6309 if (x == const0_rtx)
6311 else if (x == const1_rtx)
6313 x_code = GET_CODE (x);
6316 if (GET_RTX_CLASS (x_code) == '<'
6317 && GET_CODE (XEXP (x, 0)) == REG)
6319 if (XEXP (x, 1) != const0_rtx)
6322 return gen_rtx_fmt_ee (reverse_condition (x_code),
6323 VOIDmode, XEXP (x, 0), const0_rtx);
6325 return gen_rtx_NOT (0, x);
6329 and_reg_cond (old, x, add)
6335 if (GET_RTX_CLASS (GET_CODE (old)) == '<')
6337 if (GET_RTX_CLASS (GET_CODE (x)) == '<'
6338 && GET_CODE (x) == reverse_condition (GET_CODE (old))
6339 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6341 if (GET_CODE (x) == GET_CODE (old)
6342 && REGNO (XEXP (x, 0)) == REGNO (XEXP (old, 0)))
6346 return gen_rtx_AND (0, old, x);
6349 switch (GET_CODE (old))
6352 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6353 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6354 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6356 if (op0 == const0_rtx)
6358 if (op1 == const0_rtx)
6360 if (op0 == const1_rtx || op1 == const1_rtx)
6362 if (op0 == XEXP (old, 0))
6363 op0 = gen_rtx_AND (0, op0, x);
6365 op1 = gen_rtx_AND (0, op1, x);
6366 return gen_rtx_IOR (0, op0, op1);
6370 return gen_rtx_AND (0, old, x);
6373 op0 = and_reg_cond (XEXP (old, 0), x, 0);
6374 op1 = and_reg_cond (XEXP (old, 1), x, 0);
6375 if (op0 != XEXP (old, 0) || op1 != XEXP (old, 1))
6377 if (op0 == const1_rtx)
6379 if (op1 == const1_rtx)
6381 if (op0 == const0_rtx || op1 == const0_rtx)
6383 if (op0 == XEXP (old, 0))
6384 op0 = gen_rtx_AND (0, op0, x);
6386 op1 = gen_rtx_AND (0, op1, x);
6387 return gen_rtx_AND (0, op0, op1);
6392 /* If X is identical to one of the existing terms of the AND,
6393 then just return what we already have. */
6394 /* ??? There really should be some sort of recursive check here in
6395 case there are nested ANDs. */
6396 if ((GET_CODE (XEXP (old, 0)) == GET_CODE (x)
6397 && REGNO (XEXP (XEXP (old, 0), 0)) == REGNO (XEXP (x, 0)))
6398 || (GET_CODE (XEXP (old, 1)) == GET_CODE (x)
6399 && REGNO (XEXP (XEXP (old, 1), 0)) == REGNO (XEXP (x, 0))))
6402 return gen_rtx_AND (0, old, x);
6405 op0 = ior_reg_cond (XEXP (old, 0), not_reg_cond (x), 0);
6406 if (op0 != XEXP (old, 0))
6407 return not_reg_cond (op0);
6410 return gen_rtx_AND (0, old, x);
6417 /* Given a condition X, remove references to reg REGNO and return the
6418 new condition. The removal will be done so that all conditions
6419 involving REGNO are considered to evaluate to false. This function
6420 is used when the value of REGNO changes. */
6423 elim_reg_cond (x, regno)
6429 if (GET_RTX_CLASS (GET_CODE (x)) == '<')
6431 if (REGNO (XEXP (x, 0)) == regno)
6436 switch (GET_CODE (x))
6439 op0 = elim_reg_cond (XEXP (x, 0), regno);
6440 op1 = elim_reg_cond (XEXP (x, 1), regno);
6441 if (op0 == const0_rtx || op1 == const0_rtx)
6443 if (op0 == const1_rtx)
6445 if (op1 == const1_rtx)
6447 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6449 return gen_rtx_AND (0, op0, op1);
6452 op0 = elim_reg_cond (XEXP (x, 0), regno);
6453 op1 = elim_reg_cond (XEXP (x, 1), regno);
6454 if (op0 == const1_rtx || op1 == const1_rtx)
6456 if (op0 == const0_rtx)
6458 if (op1 == const0_rtx)
6460 if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1))
6462 return gen_rtx_IOR (0, op0, op1);
6465 op0 = elim_reg_cond (XEXP (x, 0), regno);
6466 if (op0 == const0_rtx)
6468 if (op0 == const1_rtx)
6470 if (op0 != XEXP (x, 0))
6471 return not_reg_cond (op0);
6478 #endif /* HAVE_conditional_execution */
6482 /* Try to substitute the auto-inc expression INC as the address inside
6483 MEM which occurs in INSN. Currently, the address of MEM is an expression
6484 involving INCR_REG, and INCR is the next use of INCR_REG; it is an insn
6485 that has a single set whose source is a PLUS of INCR_REG and something
6489 attempt_auto_inc (pbi, inc, insn, mem, incr, incr_reg)
6490 struct propagate_block_info *pbi;
6491 rtx inc, insn, mem, incr, incr_reg;
6493 int regno = REGNO (incr_reg);
6494 rtx set = single_set (incr);
6495 rtx q = SET_DEST (set);
6496 rtx y = SET_SRC (set);
6497 int opnum = XEXP (y, 0) == incr_reg ? 0 : 1;
6499 /* Make sure this reg appears only once in this insn. */
6500 if (count_occurrences (PATTERN (insn), incr_reg, 1) != 1)
6503 if (dead_or_set_p (incr, incr_reg)
6504 /* Mustn't autoinc an eliminable register. */
6505 && (regno >= FIRST_PSEUDO_REGISTER
6506 || ! TEST_HARD_REG_BIT (elim_reg_set, regno)))
6508 /* This is the simple case. Try to make the auto-inc. If
6509 we can't, we are done. Otherwise, we will do any
6510 needed updates below. */
6511 if (! validate_change (insn, &XEXP (mem, 0), inc, 0))
6514 else if (GET_CODE (q) == REG
6515 /* PREV_INSN used here to check the semi-open interval
6517 && ! reg_used_between_p (q, PREV_INSN (insn), incr)
6518 /* We must also check for sets of q as q may be
6519 a call clobbered hard register and there may
6520 be a call between PREV_INSN (insn) and incr. */
6521 && ! reg_set_between_p (q, PREV_INSN (insn), incr))
6523 /* We have *p followed sometime later by q = p+size.
6524 Both p and q must be live afterward,
6525 and q is not used between INSN and its assignment.
6526 Change it to q = p, ...*q..., q = q+size.
6527 Then fall into the usual case. */
6531 emit_move_insn (q, incr_reg);
6532 insns = get_insns ();
6535 if (basic_block_for_insn)
6536 for (temp = insns; temp; temp = NEXT_INSN (temp))
6537 set_block_for_insn (temp, pbi->bb);
6539 /* If we can't make the auto-inc, or can't make the
6540 replacement into Y, exit. There's no point in making
6541 the change below if we can't do the auto-inc and doing
6542 so is not correct in the pre-inc case. */
6545 validate_change (insn, &XEXP (mem, 0), inc, 1);
6546 validate_change (incr, &XEXP (y, opnum), q, 1);
6547 if (! apply_change_group ())
6550 /* We now know we'll be doing this change, so emit the
6551 new insn(s) and do the updates. */
6552 emit_insns_before (insns, insn);
6554 if (pbi->bb->head == insn)
6555 pbi->bb->head = insns;
6557 /* INCR will become a NOTE and INSN won't contain a
6558 use of INCR_REG. If a use of INCR_REG was just placed in
6559 the insn before INSN, make that the next use.
6560 Otherwise, invalidate it. */
6561 if (GET_CODE (PREV_INSN (insn)) == INSN
6562 && GET_CODE (PATTERN (PREV_INSN (insn))) == SET
6563 && SET_SRC (PATTERN (PREV_INSN (insn))) == incr_reg)
6564 pbi->reg_next_use[regno] = PREV_INSN (insn);
6566 pbi->reg_next_use[regno] = 0;
6571 /* REGNO is now used in INCR which is below INSN, but
6572 it previously wasn't live here. If we don't mark
6573 it as live, we'll put a REG_DEAD note for it
6574 on this insn, which is incorrect. */
6575 SET_REGNO_REG_SET (pbi->reg_live, regno);
6577 /* If there are any calls between INSN and INCR, show
6578 that REGNO now crosses them. */
6579 for (temp = insn; temp != incr; temp = NEXT_INSN (temp))
6580 if (GET_CODE (temp) == CALL_INSN)
6581 REG_N_CALLS_CROSSED (regno)++;
6586 /* If we haven't returned, it means we were able to make the
6587 auto-inc, so update the status. First, record that this insn
6588 has an implicit side effect. */
6590 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, incr_reg, REG_NOTES (insn));
6592 /* Modify the old increment-insn to simply copy
6593 the already-incremented value of our register. */
6594 if (! validate_change (incr, &SET_SRC (set), incr_reg, 0))
6597 /* If that makes it a no-op (copying the register into itself) delete
6598 it so it won't appear to be a "use" and a "set" of this
6600 if (REGNO (SET_DEST (set)) == REGNO (incr_reg))
6602 /* If the original source was dead, it's dead now. */
6605 while ((note = find_reg_note (incr, REG_DEAD, NULL_RTX)) != NULL_RTX)
6607 remove_note (incr, note);
6608 if (XEXP (note, 0) != incr_reg)
6609 CLEAR_REGNO_REG_SET (pbi->reg_live, REGNO (XEXP (note, 0)));
6612 PUT_CODE (incr, NOTE);
6613 NOTE_LINE_NUMBER (incr) = NOTE_INSN_DELETED;
6614 NOTE_SOURCE_FILE (incr) = 0;
6617 if (regno >= FIRST_PSEUDO_REGISTER)
6619 /* Count an extra reference to the reg. When a reg is
6620 incremented, spilling it is worse, so we want to make
6621 that less likely. */
6622 REG_FREQ (regno) += (optimize_size || !pbi->bb->frequency
6623 ? 1 : pbi->bb->frequency);
6625 /* Count the increment as a setting of the register,
6626 even though it isn't a SET in rtl. */
6627 REG_N_SETS (regno)++;
6631 /* X is a MEM found in INSN. See if we can convert it into an auto-increment
6635 find_auto_inc (pbi, x, insn)
6636 struct propagate_block_info *pbi;
6640 rtx addr = XEXP (x, 0);
6641 HOST_WIDE_INT offset = 0;
6642 rtx set, y, incr, inc_val;
6644 int size = GET_MODE_SIZE (GET_MODE (x));
6646 if (GET_CODE (insn) == JUMP_INSN)
6649 /* Here we detect use of an index register which might be good for
6650 postincrement, postdecrement, preincrement, or predecrement. */
6652 if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
6653 offset = INTVAL (XEXP (addr, 1)), addr = XEXP (addr, 0);
6655 if (GET_CODE (addr) != REG)
6658 regno = REGNO (addr);
6660 /* Is the next use an increment that might make auto-increment? */
6661 incr = pbi->reg_next_use[regno];
6662 if (incr == 0 || BLOCK_NUM (incr) != BLOCK_NUM (insn))
6664 set = single_set (incr);
6665 if (set == 0 || GET_CODE (set) != SET)
6669 if (GET_CODE (y) != PLUS)
6672 if (REG_P (XEXP (y, 0)) && REGNO (XEXP (y, 0)) == REGNO (addr))
6673 inc_val = XEXP (y, 1);
6674 else if (REG_P (XEXP (y, 1)) && REGNO (XEXP (y, 1)) == REGNO (addr))
6675 inc_val = XEXP (y, 0);
6679 if (GET_CODE (inc_val) == CONST_INT)
6681 if (HAVE_POST_INCREMENT
6682 && (INTVAL (inc_val) == size && offset == 0))
6683 attempt_auto_inc (pbi, gen_rtx_POST_INC (Pmode, addr), insn, x,
6685 else if (HAVE_POST_DECREMENT
6686 && (INTVAL (inc_val) == -size && offset == 0))
6687 attempt_auto_inc (pbi, gen_rtx_POST_DEC (Pmode, addr), insn, x,
6689 else if (HAVE_PRE_INCREMENT
6690 && (INTVAL (inc_val) == size && offset == size))
6691 attempt_auto_inc (pbi, gen_rtx_PRE_INC (Pmode, addr), insn, x,
6693 else if (HAVE_PRE_DECREMENT
6694 && (INTVAL (inc_val) == -size && offset == -size))
6695 attempt_auto_inc (pbi, gen_rtx_PRE_DEC (Pmode, addr), insn, x,
6697 else if (HAVE_POST_MODIFY_DISP && offset == 0)
6698 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
6699 gen_rtx_PLUS (Pmode,
6702 insn, x, incr, addr);
6704 else if (GET_CODE (inc_val) == REG
6705 && ! reg_set_between_p (inc_val, PREV_INSN (insn),
6709 if (HAVE_POST_MODIFY_REG && offset == 0)
6710 attempt_auto_inc (pbi, gen_rtx_POST_MODIFY (Pmode, addr,
6711 gen_rtx_PLUS (Pmode,
6714 insn, x, incr, addr);
6718 #endif /* AUTO_INC_DEC */
6721 mark_used_reg (pbi, reg, cond, insn)
6722 struct propagate_block_info *pbi;
6724 rtx cond ATTRIBUTE_UNUSED;
6727 unsigned int regno_first, regno_last, i;
6728 int some_was_live, some_was_dead, some_not_set;
6730 regno_last = regno_first = REGNO (reg);
6731 if (regno_first < FIRST_PSEUDO_REGISTER)
6732 regno_last += HARD_REGNO_NREGS (regno_first, GET_MODE (reg)) - 1;
6734 /* Find out if any of this register is live after this instruction. */
6735 some_was_live = some_was_dead = 0;
6736 for (i = regno_first; i <= regno_last; ++i)
6738 int needed_regno = REGNO_REG_SET_P (pbi->reg_live, i);
6739 some_was_live |= needed_regno;
6740 some_was_dead |= ! needed_regno;
6743 /* Find out if any of the register was set this insn. */
6745 for (i = regno_first; i <= regno_last; ++i)
6746 some_not_set |= ! REGNO_REG_SET_P (pbi->new_set, i);
6748 if (pbi->flags & (PROP_LOG_LINKS | PROP_AUTOINC))
6750 /* Record where each reg is used, so when the reg is set we know
6751 the next insn that uses it. */
6752 pbi->reg_next_use[regno_first] = insn;
6755 if (pbi->flags & PROP_REG_INFO)
6757 if (regno_first < FIRST_PSEUDO_REGISTER)
6759 /* If this is a register we are going to try to eliminate,
6760 don't mark it live here. If we are successful in
6761 eliminating it, it need not be live unless it is used for
6762 pseudos, in which case it will have been set live when it
6763 was allocated to the pseudos. If the register will not
6764 be eliminated, reload will set it live at that point.
6766 Otherwise, record that this function uses this register. */
6767 /* ??? The PPC backend tries to "eliminate" on the pic
6768 register to itself. This should be fixed. In the mean
6769 time, hack around it. */
6771 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno_first)
6772 && (regno_first == FRAME_POINTER_REGNUM
6773 || regno_first == ARG_POINTER_REGNUM)))
6774 for (i = regno_first; i <= regno_last; ++i)
6775 regs_ever_live[i] = 1;
6779 /* Keep track of which basic block each reg appears in. */
6781 register int blocknum = pbi->bb->index;
6782 if (REG_BASIC_BLOCK (regno_first) == REG_BLOCK_UNKNOWN)
6783 REG_BASIC_BLOCK (regno_first) = blocknum;
6784 else if (REG_BASIC_BLOCK (regno_first) != blocknum)
6785 REG_BASIC_BLOCK (regno_first) = REG_BLOCK_GLOBAL;
6787 /* Count (weighted) number of uses of each reg. */
6788 REG_FREQ (regno_first)
6789 += (optimize_size || !pbi->bb->frequency ? 1 : pbi->bb->frequency);
6790 REG_N_REFS (regno_first)++;
6794 /* Record and count the insns in which a reg dies. If it is used in
6795 this insn and was dead below the insn then it dies in this insn.
6796 If it was set in this insn, we do not make a REG_DEAD note;
6797 likewise if we already made such a note. */
6798 if ((pbi->flags & (PROP_DEATH_NOTES | PROP_REG_INFO))
6802 /* Check for the case where the register dying partially
6803 overlaps the register set by this insn. */
6804 if (regno_first != regno_last)
6805 for (i = regno_first; i <= regno_last; ++i)
6806 some_was_live |= REGNO_REG_SET_P (pbi->new_set, i);
6808 /* If none of the words in X is needed, make a REG_DEAD note.
6809 Otherwise, we must make partial REG_DEAD notes. */
6810 if (! some_was_live)
6812 if ((pbi->flags & PROP_DEATH_NOTES)
6813 && ! find_regno_note (insn, REG_DEAD, regno_first))
6815 = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
6817 if (pbi->flags & PROP_REG_INFO)
6818 REG_N_DEATHS (regno_first)++;
6822 /* Don't make a REG_DEAD note for a part of a register
6823 that is set in the insn. */
6824 for (i = regno_first; i <= regno_last; ++i)
6825 if (! REGNO_REG_SET_P (pbi->reg_live, i)
6826 && ! dead_or_set_regno_p (insn, i))
6828 = alloc_EXPR_LIST (REG_DEAD,
6829 gen_rtx_REG (reg_raw_mode[i], i),
6834 /* Mark the register as being live. */
6835 for (i = regno_first; i <= regno_last; ++i)
6837 SET_REGNO_REG_SET (pbi->reg_live, i);
6839 #ifdef HAVE_conditional_execution
6840 /* If this is a conditional use, record that fact. If it is later
6841 conditionally set, we'll know to kill the register. */
6842 if (cond != NULL_RTX)
6844 splay_tree_node node;
6845 struct reg_cond_life_info *rcli;
6850 node = splay_tree_lookup (pbi->reg_cond_dead, i);
6853 /* The register was unconditionally live previously.
6854 No need to do anything. */
6858 /* The register was conditionally live previously.
6859 Subtract the new life cond from the old death cond. */
6860 rcli = (struct reg_cond_life_info *) node->value;
6861 ncond = rcli->condition;
6862 ncond = and_reg_cond (ncond, not_reg_cond (cond), 1);
6864 /* If the register is now unconditionally live,
6865 remove the entry in the splay_tree. */
6866 if (ncond == const0_rtx)
6867 splay_tree_remove (pbi->reg_cond_dead, i);
6870 rcli->condition = ncond;
6871 SET_REGNO_REG_SET (pbi->reg_cond_reg,
6872 REGNO (XEXP (cond, 0)));
6878 /* The register was not previously live at all. Record
6879 the condition under which it is still dead. */
6880 rcli = (struct reg_cond_life_info *) xmalloc (sizeof (*rcli));
6881 rcli->condition = not_reg_cond (cond);
6882 rcli->stores = const0_rtx;
6883 rcli->orig_condition = const0_rtx;
6884 splay_tree_insert (pbi->reg_cond_dead, i,
6885 (splay_tree_value) rcli);
6887 SET_REGNO_REG_SET (pbi->reg_cond_reg, REGNO (XEXP (cond, 0)));
6890 else if (some_was_live)
6892 /* The register may have been conditionally live previously, but
6893 is now unconditionally live. Remove it from the conditionally
6894 dead list, so that a conditional set won't cause us to think
6896 splay_tree_remove (pbi->reg_cond_dead, i);
6902 /* Scan expression X and store a 1-bit in NEW_LIVE for each reg it uses.
6903 This is done assuming the registers needed from X are those that
6904 have 1-bits in PBI->REG_LIVE.
6906 INSN is the containing instruction. If INSN is dead, this function
6910 mark_used_regs (pbi, x, cond, insn)
6911 struct propagate_block_info *pbi;
6914 register RTX_CODE code;
6916 int flags = pbi->flags;
6919 code = GET_CODE (x);
6939 /* If we are clobbering a MEM, mark any registers inside the address
6941 if (GET_CODE (XEXP (x, 0)) == MEM)
6942 mark_used_regs (pbi, XEXP (XEXP (x, 0), 0), cond, insn);
6946 /* Don't bother watching stores to mems if this is not the
6947 final pass. We'll not be deleting dead stores this round. */
6948 if (optimize && (flags & PROP_SCAN_DEAD_CODE))
6950 /* Invalidate the data for the last MEM stored, but only if MEM is
6951 something that can be stored into. */
6952 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
6953 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
6954 /* Needn't clear the memory set list. */
6958 rtx temp = pbi->mem_set_list;
6959 rtx prev = NULL_RTX;
6964 next = XEXP (temp, 1);
6965 if (anti_dependence (XEXP (temp, 0), x))
6967 /* Splice temp out of the list. */
6969 XEXP (prev, 1) = next;
6971 pbi->mem_set_list = next;
6972 free_EXPR_LIST_node (temp);
6973 pbi->mem_set_list_len--;
6981 /* If the memory reference had embedded side effects (autoincrement
6982 address modes. Then we may need to kill some entries on the
6985 invalidate_mems_from_autoinc (pbi, insn);
6989 if (flags & PROP_AUTOINC)
6990 find_auto_inc (pbi, x, insn);
6995 #ifdef CLASS_CANNOT_CHANGE_MODE
6996 if (GET_CODE (SUBREG_REG (x)) == REG
6997 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER
6998 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
6999 GET_MODE (SUBREG_REG (x))))
7000 REG_CHANGES_MODE (REGNO (SUBREG_REG (x))) = 1;
7003 /* While we're here, optimize this case. */
7005 if (GET_CODE (x) != REG)
7010 /* See a register other than being set => mark it as needed. */
7011 mark_used_reg (pbi, x, cond, insn);
7016 register rtx testreg = SET_DEST (x);
7019 /* If storing into MEM, don't show it as being used. But do
7020 show the address as being used. */
7021 if (GET_CODE (testreg) == MEM)
7024 if (flags & PROP_AUTOINC)
7025 find_auto_inc (pbi, testreg, insn);
7027 mark_used_regs (pbi, XEXP (testreg, 0), cond, insn);
7028 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7032 /* Storing in STRICT_LOW_PART is like storing in a reg
7033 in that this SET might be dead, so ignore it in TESTREG.
7034 but in some other ways it is like using the reg.
7036 Storing in a SUBREG or a bit field is like storing the entire
7037 register in that if the register's value is not used
7038 then this SET is not needed. */
7039 while (GET_CODE (testreg) == STRICT_LOW_PART
7040 || GET_CODE (testreg) == ZERO_EXTRACT
7041 || GET_CODE (testreg) == SIGN_EXTRACT
7042 || GET_CODE (testreg) == SUBREG)
7044 #ifdef CLASS_CANNOT_CHANGE_MODE
7045 if (GET_CODE (testreg) == SUBREG
7046 && GET_CODE (SUBREG_REG (testreg)) == REG
7047 && REGNO (SUBREG_REG (testreg)) >= FIRST_PSEUDO_REGISTER
7048 && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (SUBREG_REG (testreg)),
7049 GET_MODE (testreg)))
7050 REG_CHANGES_MODE (REGNO (SUBREG_REG (testreg))) = 1;
7053 /* Modifying a single register in an alternate mode
7054 does not use any of the old value. But these other
7055 ways of storing in a register do use the old value. */
7056 if (GET_CODE (testreg) == SUBREG
7057 && !(REG_SIZE (SUBREG_REG (testreg)) > REG_SIZE (testreg)))
7062 testreg = XEXP (testreg, 0);
7065 /* If this is a store into a register or group of registers,
7066 recursively scan the value being stored. */
7068 if ((GET_CODE (testreg) == PARALLEL
7069 && GET_MODE (testreg) == BLKmode)
7070 || (GET_CODE (testreg) == REG
7071 && (regno = REGNO (testreg),
7072 ! (regno == FRAME_POINTER_REGNUM
7073 && (! reload_completed || frame_pointer_needed)))
7074 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7075 && ! (regno == HARD_FRAME_POINTER_REGNUM
7076 && (! reload_completed || frame_pointer_needed))
7078 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
7079 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
7084 mark_used_regs (pbi, SET_DEST (x), cond, insn);
7085 mark_used_regs (pbi, SET_SRC (x), cond, insn);
7092 case UNSPEC_VOLATILE:
7096 /* Traditional and volatile asm instructions must be considered to use
7097 and clobber all hard registers, all pseudo-registers and all of
7098 memory. So must TRAP_IF and UNSPEC_VOLATILE operations.
7100 Consider for instance a volatile asm that changes the fpu rounding
7101 mode. An insn should not be moved across this even if it only uses
7102 pseudo-regs because it might give an incorrectly rounded result.
7104 ?!? Unfortunately, marking all hard registers as live causes massive
7105 problems for the register allocator and marking all pseudos as live
7106 creates mountains of uninitialized variable warnings.
7108 So for now, just clear the memory set list and mark any regs
7109 we can find in ASM_OPERANDS as used. */
7110 if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
7112 free_EXPR_LIST_list (&pbi->mem_set_list);
7113 pbi->mem_set_list_len = 0;
7116 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
7117 We can not just fall through here since then we would be confused
7118 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
7119 traditional asms unlike their normal usage. */
7120 if (code == ASM_OPERANDS)
7124 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
7125 mark_used_regs (pbi, ASM_OPERANDS_INPUT (x, j), cond, insn);
7131 if (cond != NULL_RTX)
7134 mark_used_regs (pbi, COND_EXEC_TEST (x), NULL_RTX, insn);
7136 cond = COND_EXEC_TEST (x);
7137 x = COND_EXEC_CODE (x);
7141 /* We _do_not_ want to scan operands of phi nodes. Operands of
7142 a phi function are evaluated only when control reaches this
7143 block along a particular edge. Therefore, regs that appear
7144 as arguments to phi should not be added to the global live at
7152 /* Recursively scan the operands of this expression. */
7155 register const char *fmt = GET_RTX_FORMAT (code);
7158 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7162 /* Tail recursive case: save a function call level. */
7168 mark_used_regs (pbi, XEXP (x, i), cond, insn);
7170 else if (fmt[i] == 'E')
7173 for (j = 0; j < XVECLEN (x, i); j++)
7174 mark_used_regs (pbi, XVECEXP (x, i, j), cond, insn);
7183 try_pre_increment_1 (pbi, insn)
7184 struct propagate_block_info *pbi;
7187 /* Find the next use of this reg. If in same basic block,
7188 make it do pre-increment or pre-decrement if appropriate. */
7189 rtx x = single_set (insn);
7190 HOST_WIDE_INT amount = ((GET_CODE (SET_SRC (x)) == PLUS ? 1 : -1)
7191 * INTVAL (XEXP (SET_SRC (x), 1)));
7192 int regno = REGNO (SET_DEST (x));
7193 rtx y = pbi->reg_next_use[regno];
7195 && SET_DEST (x) != stack_pointer_rtx
7196 && BLOCK_NUM (y) == BLOCK_NUM (insn)
7197 /* Don't do this if the reg dies, or gets set in y; a standard addressing
7198 mode would be better. */
7199 && ! dead_or_set_p (y, SET_DEST (x))
7200 && try_pre_increment (y, SET_DEST (x), amount))
7202 /* We have found a suitable auto-increment and already changed
7203 insn Y to do it. So flush this increment instruction. */
7204 propagate_block_delete_insn (pbi->bb, insn);
7206 /* Count a reference to this reg for the increment insn we are
7207 deleting. When a reg is incremented, spilling it is worse,
7208 so we want to make that less likely. */
7209 if (regno >= FIRST_PSEUDO_REGISTER)
7211 REG_FREQ (regno) += (optimize_size || !pbi->bb->frequency
7212 ? 1 : pbi->bb->frequency);
7213 REG_N_SETS (regno)++;
7216 /* Flush any remembered memories depending on the value of
7217 the incremented register. */
7218 invalidate_mems_from_set (pbi, SET_DEST (x));
7225 /* Try to change INSN so that it does pre-increment or pre-decrement
7226 addressing on register REG in order to add AMOUNT to REG.
7227 AMOUNT is negative for pre-decrement.
7228 Returns 1 if the change could be made.
7229 This checks all about the validity of the result of modifying INSN. */
7232 try_pre_increment (insn, reg, amount)
7234 HOST_WIDE_INT amount;
7238 /* Nonzero if we can try to make a pre-increment or pre-decrement.
7239 For example, addl $4,r1; movl (r1),... can become movl +(r1),... */
7241 /* Nonzero if we can try to make a post-increment or post-decrement.
7242 For example, addl $4,r1; movl -4(r1),... can become movl (r1)+,...
7243 It is possible for both PRE_OK and POST_OK to be nonzero if the machine
7244 supports both pre-inc and post-inc, or both pre-dec and post-dec. */
7247 /* Nonzero if the opportunity actually requires post-inc or post-dec. */
7250 /* From the sign of increment, see which possibilities are conceivable
7251 on this target machine. */
7252 if (HAVE_PRE_INCREMENT && amount > 0)
7254 if (HAVE_POST_INCREMENT && amount > 0)
7257 if (HAVE_PRE_DECREMENT && amount < 0)
7259 if (HAVE_POST_DECREMENT && amount < 0)
7262 if (! (pre_ok || post_ok))
7265 /* It is not safe to add a side effect to a jump insn
7266 because if the incremented register is spilled and must be reloaded
7267 there would be no way to store the incremented value back in memory. */
7269 if (GET_CODE (insn) == JUMP_INSN)
7274 use = find_use_as_address (PATTERN (insn), reg, 0);
7275 if (post_ok && (use == 0 || use == (rtx) 1))
7277 use = find_use_as_address (PATTERN (insn), reg, -amount);
7281 if (use == 0 || use == (rtx) 1)
7284 if (GET_MODE_SIZE (GET_MODE (use)) != (amount > 0 ? amount : - amount))
7287 /* See if this combination of instruction and addressing mode exists. */
7288 if (! validate_change (insn, &XEXP (use, 0),
7289 gen_rtx_fmt_e (amount > 0
7290 ? (do_post ? POST_INC : PRE_INC)
7291 : (do_post ? POST_DEC : PRE_DEC),
7295 /* Record that this insn now has an implicit side effect on X. */
7296 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC, reg, REG_NOTES (insn));
7300 #endif /* AUTO_INC_DEC */
7302 /* Find the place in the rtx X where REG is used as a memory address.
7303 Return the MEM rtx that so uses it.
7304 If PLUSCONST is nonzero, search instead for a memory address equivalent to
7305 (plus REG (const_int PLUSCONST)).
7307 If such an address does not appear, return 0.
7308 If REG appears more than once, or is used other than in such an address,
7312 find_use_as_address (x, reg, plusconst)
7315 HOST_WIDE_INT plusconst;
7317 enum rtx_code code = GET_CODE (x);
7318 const char *fmt = GET_RTX_FORMAT (code);
7320 register rtx value = 0;
7323 if (code == MEM && XEXP (x, 0) == reg && plusconst == 0)
7326 if (code == MEM && GET_CODE (XEXP (x, 0)) == PLUS
7327 && XEXP (XEXP (x, 0), 0) == reg
7328 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7329 && INTVAL (XEXP (XEXP (x, 0), 1)) == plusconst)
7332 if (code == SIGN_EXTRACT || code == ZERO_EXTRACT)
7334 /* If REG occurs inside a MEM used in a bit-field reference,
7335 that is unacceptable. */
7336 if (find_use_as_address (XEXP (x, 0), reg, 0) != 0)
7337 return (rtx) (HOST_WIDE_INT) 1;
7341 return (rtx) (HOST_WIDE_INT) 1;
7343 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7347 tem = find_use_as_address (XEXP (x, i), reg, plusconst);
7351 return (rtx) (HOST_WIDE_INT) 1;
7353 else if (fmt[i] == 'E')
7356 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7358 tem = find_use_as_address (XVECEXP (x, i, j), reg, plusconst);
7362 return (rtx) (HOST_WIDE_INT) 1;
7370 /* Write information about registers and basic blocks into FILE.
7371 This is part of making a debugging dump. */
7374 dump_regset (r, outf)
7381 fputs (" (nil)", outf);
7385 EXECUTE_IF_SET_IN_REG_SET (r, 0, i,
7387 fprintf (outf, " %d", i);
7388 if (i < FIRST_PSEUDO_REGISTER)
7389 fprintf (outf, " [%s]",
7394 /* Print a human-reaable representation of R on the standard error
7395 stream. This function is designed to be used from within the
7402 dump_regset (r, stderr);
7403 putc ('\n', stderr);
7407 dump_flow_info (file)
7411 static const char * const reg_class_names[] = REG_CLASS_NAMES;
7413 fprintf (file, "%d registers.\n", max_regno);
7414 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
7417 enum reg_class class, altclass;
7418 fprintf (file, "\nRegister %d used %d times across %d insns",
7419 i, REG_N_REFS (i), REG_LIVE_LENGTH (i));
7420 if (REG_BASIC_BLOCK (i) >= 0)
7421 fprintf (file, " in block %d", REG_BASIC_BLOCK (i));
7423 fprintf (file, "; set %d time%s", REG_N_SETS (i),
7424 (REG_N_SETS (i) == 1) ? "" : "s");
7425 if (REG_USERVAR_P (regno_reg_rtx[i]))
7426 fprintf (file, "; user var");
7427 if (REG_N_DEATHS (i) != 1)
7428 fprintf (file, "; dies in %d places", REG_N_DEATHS (i));
7429 if (REG_N_CALLS_CROSSED (i) == 1)
7430 fprintf (file, "; crosses 1 call");
7431 else if (REG_N_CALLS_CROSSED (i))
7432 fprintf (file, "; crosses %d calls", REG_N_CALLS_CROSSED (i));
7433 if (PSEUDO_REGNO_BYTES (i) != UNITS_PER_WORD)
7434 fprintf (file, "; %d bytes", PSEUDO_REGNO_BYTES (i));
7435 class = reg_preferred_class (i);
7436 altclass = reg_alternate_class (i);
7437 if (class != GENERAL_REGS || altclass != ALL_REGS)
7439 if (altclass == ALL_REGS || class == ALL_REGS)
7440 fprintf (file, "; pref %s", reg_class_names[(int) class]);
7441 else if (altclass == NO_REGS)
7442 fprintf (file, "; %s or none", reg_class_names[(int) class]);
7444 fprintf (file, "; pref %s, else %s",
7445 reg_class_names[(int) class],
7446 reg_class_names[(int) altclass]);
7448 if (REG_POINTER (regno_reg_rtx[i]))
7449 fprintf (file, "; pointer");
7450 fprintf (file, ".\n");
7453 fprintf (file, "\n%d basic blocks, %d edges.\n", n_basic_blocks, n_edges);
7454 for (i = 0; i < n_basic_blocks; i++)
7456 register basic_block bb = BASIC_BLOCK (i);
7459 fprintf (file, "\nBasic block %d: first insn %d, last %d, loop_depth %d, count ",
7460 i, INSN_UID (bb->head), INSN_UID (bb->end), bb->loop_depth);
7461 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7462 fprintf (file, ", freq %i.\n", bb->frequency);
7464 fprintf (file, "Predecessors: ");
7465 for (e = bb->pred; e; e = e->pred_next)
7466 dump_edge_info (file, e, 0);
7468 fprintf (file, "\nSuccessors: ");
7469 for (e = bb->succ; e; e = e->succ_next)
7470 dump_edge_info (file, e, 1);
7472 fprintf (file, "\nRegisters live at start:");
7473 dump_regset (bb->global_live_at_start, file);
7475 fprintf (file, "\nRegisters live at end:");
7476 dump_regset (bb->global_live_at_end, file);
7487 dump_flow_info (stderr);
7491 dump_edge_info (file, e, do_succ)
7496 basic_block side = (do_succ ? e->dest : e->src);
7498 if (side == ENTRY_BLOCK_PTR)
7499 fputs (" ENTRY", file);
7500 else if (side == EXIT_BLOCK_PTR)
7501 fputs (" EXIT", file);
7503 fprintf (file, " %d", side->index);
7506 fprintf (file, " [%.1f%%] ", e->probability * 100.0 / REG_BR_PROB_BASE);
7510 fprintf (file, " count:");
7511 fprintf (file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) e->count);
7516 static const char * const bitnames[] = {
7517 "fallthru", "crit", "ab", "abcall", "eh", "fake"
7520 int i, flags = e->flags;
7524 for (i = 0; flags; i++)
7525 if (flags & (1 << i))
7531 if (i < (int) ARRAY_SIZE (bitnames))
7532 fputs (bitnames[i], file);
7534 fprintf (file, "%d", i);
7541 /* Print out one basic block with live information at start and end. */
7552 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
7553 bb->index, bb->loop_depth);
7554 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
7557 fputs (";; Predecessors: ", outf);
7558 for (e = bb->pred; e; e = e->pred_next)
7559 dump_edge_info (outf, e, 0);
7562 fputs (";; Registers live at start:", outf);
7563 dump_regset (bb->global_live_at_start, outf);
7566 for (insn = bb->head, last = NEXT_INSN (bb->end);
7568 insn = NEXT_INSN (insn))
7569 print_rtl_single (outf, insn);
7571 fputs (";; Registers live at end:", outf);
7572 dump_regset (bb->global_live_at_end, outf);
7575 fputs (";; Successors: ", outf);
7576 for (e = bb->succ; e; e = e->succ_next)
7577 dump_edge_info (outf, e, 1);
7585 dump_bb (bb, stderr);
7592 dump_bb (BASIC_BLOCK (n), stderr);
7595 /* Like print_rtl, but also print out live information for the start of each
7599 print_rtl_with_bb (outf, rtx_first)
7603 register rtx tmp_rtx;
7606 fprintf (outf, "(nil)\n");
7610 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
7611 int max_uid = get_max_uid ();
7612 basic_block *start = (basic_block *)
7613 xcalloc (max_uid, sizeof (basic_block));
7614 basic_block *end = (basic_block *)
7615 xcalloc (max_uid, sizeof (basic_block));
7616 enum bb_state *in_bb_p = (enum bb_state *)
7617 xcalloc (max_uid, sizeof (enum bb_state));
7619 for (i = n_basic_blocks - 1; i >= 0; i--)
7621 basic_block bb = BASIC_BLOCK (i);
7624 start[INSN_UID (bb->head)] = bb;
7625 end[INSN_UID (bb->end)] = bb;
7626 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
7628 enum bb_state state = IN_MULTIPLE_BB;
7629 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
7631 in_bb_p[INSN_UID (x)] = state;
7638 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
7643 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
7645 fprintf (outf, ";; Start of basic block %d, registers live:",
7647 dump_regset (bb->global_live_at_start, outf);
7651 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
7652 && GET_CODE (tmp_rtx) != NOTE
7653 && GET_CODE (tmp_rtx) != BARRIER)
7654 fprintf (outf, ";; Insn is not within a basic block\n");
7655 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
7656 fprintf (outf, ";; Insn is in multiple basic blocks\n");
7658 did_output = print_rtl_single (outf, tmp_rtx);
7660 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
7662 fprintf (outf, ";; End of basic block %d, registers live:\n",
7664 dump_regset (bb->global_live_at_end, outf);
7677 if (current_function_epilogue_delay_list != 0)
7679 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
7680 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
7681 tmp_rtx = XEXP (tmp_rtx, 1))
7682 print_rtl_single (outf, XEXP (tmp_rtx, 0));
7686 /* Dump the rtl into the current debugging dump file, then abort. */
7689 print_rtl_and_abort_fcn (file, line, function)
7692 const char *function;
7696 print_rtl_with_bb (rtl_dump_file, get_insns ());
7697 fclose (rtl_dump_file);
7700 fancy_abort (file, line, function);
7703 /* Recompute register set/reference counts immediately prior to register
7706 This avoids problems with set/reference counts changing to/from values
7707 which have special meanings to the register allocators.
7709 Additionally, the reference counts are the primary component used by the
7710 register allocators to prioritize pseudos for allocation to hard regs.
7711 More accurate reference counts generally lead to better register allocation.
7713 F is the first insn to be scanned.
7715 LOOP_STEP denotes how much loop_depth should be incremented per
7716 loop nesting level in order to increase the ref count more for
7717 references in a loop.
7719 It might be worthwhile to update REG_LIVE_LENGTH, REG_BASIC_BLOCK and
7720 possibly other information which is used by the register allocators. */
7723 recompute_reg_usage (f, loop_step)
7724 rtx f ATTRIBUTE_UNUSED;
7725 int loop_step ATTRIBUTE_UNUSED;
7727 allocate_reg_life_data ();
7728 update_life_info (NULL, UPDATE_LIFE_LOCAL, PROP_REG_INFO);
7731 /* Optionally removes all the REG_DEAD and REG_UNUSED notes from a set of
7732 blocks. If BLOCKS is NULL, assume the universal set. Returns a count
7733 of the number of registers that died. */
7736 count_or_remove_death_notes (blocks, kill)
7742 for (i = n_basic_blocks - 1; i >= 0; --i)
7747 if (blocks && ! TEST_BIT (blocks, i))
7750 bb = BASIC_BLOCK (i);
7752 for (insn = bb->head;; insn = NEXT_INSN (insn))
7756 rtx *pprev = ®_NOTES (insn);
7761 switch (REG_NOTE_KIND (link))
7764 if (GET_CODE (XEXP (link, 0)) == REG)
7766 rtx reg = XEXP (link, 0);
7769 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
7772 n = HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg));
7780 rtx next = XEXP (link, 1);
7781 free_EXPR_LIST_node (link);
7782 *pprev = link = next;
7788 pprev = &XEXP (link, 1);
7795 if (insn == bb->end)
7804 /* Update insns block within BB. */
7807 update_bb_for_insn (bb)
7812 if (! basic_block_for_insn)
7815 for (insn = bb->head; ; insn = NEXT_INSN (insn))
7817 set_block_for_insn (insn, bb);
7819 if (insn == bb->end)
7825 /* Record INSN's block as BB. */
7828 set_block_for_insn (insn, bb)
7832 size_t uid = INSN_UID (insn);
7833 if (uid >= basic_block_for_insn->num_elements)
7837 /* Add one-eighth the size so we don't keep calling xrealloc. */
7838 new_size = uid + (uid + 7) / 8;
7840 VARRAY_GROW (basic_block_for_insn, new_size);
7842 VARRAY_BB (basic_block_for_insn, uid) = bb;
7845 /* When a new insn has been inserted into an existing block, it will
7846 sometimes emit more than a single insn. This routine will set the
7847 block number for the specified insn, and look backwards in the insn
7848 chain to see if there are any other uninitialized insns immediately
7849 previous to this one, and set the block number for them too. */
7852 set_block_for_new_insns (insn, bb)
7856 set_block_for_insn (insn, bb);
7858 /* Scan the previous instructions setting the block number until we find
7859 an instruction that has the block number set, or we find a note
7861 for (insn = PREV_INSN (insn); insn != NULL_RTX; insn = PREV_INSN (insn))
7863 if (GET_CODE (insn) == NOTE)
7865 if (INSN_UID (insn) >= basic_block_for_insn->num_elements
7866 || BLOCK_FOR_INSN (insn) == 0)
7867 set_block_for_insn (insn, bb);
7873 /* Verify the CFG consistency. This function check some CFG invariants and
7874 aborts when something is wrong. Hope that this function will help to
7875 convert many optimization passes to preserve CFG consistent.
7877 Currently it does following checks:
7879 - test head/end pointers
7880 - overlapping of basic blocks
7881 - edge list corectness
7882 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
7883 - tails of basic blocks (ensure that boundary is necesary)
7884 - scans body of the basic block for JUMP_INSN, CODE_LABEL
7885 and NOTE_INSN_BASIC_BLOCK
7886 - check that all insns are in the basic blocks
7887 (except the switch handling code, barriers and notes)
7888 - check that all returns are followed by barriers
7890 In future it can be extended check a lot of other stuff as well
7891 (reachability of basic blocks, life information, etc. etc.). */
7896 const int max_uid = get_max_uid ();
7897 const rtx rtx_first = get_insns ();
7898 rtx last_head = get_last_insn ();
7899 basic_block *bb_info;
7901 int i, last_bb_num_seen, num_bb_notes, err = 0;
7903 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
7905 for (i = n_basic_blocks - 1; i >= 0; i--)
7907 basic_block bb = BASIC_BLOCK (i);
7908 rtx head = bb->head;
7911 /* Verify the end of the basic block is in the INSN chain. */
7912 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
7917 error ("End insn %d for block %d not found in the insn stream.",
7918 INSN_UID (end), bb->index);
7922 /* Work backwards from the end to the head of the basic block
7923 to verify the head is in the RTL chain. */
7924 for (; x != NULL_RTX; x = PREV_INSN (x))
7926 /* While walking over the insn chain, verify insns appear
7927 in only one basic block and initialize the BB_INFO array
7928 used by other passes. */
7929 if (bb_info[INSN_UID (x)] != NULL)
7931 error ("Insn %d is in multiple basic blocks (%d and %d)",
7932 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
7935 bb_info[INSN_UID (x)] = bb;
7942 error ("Head insn %d for block %d not found in the insn stream.",
7943 INSN_UID (head), bb->index);
7950 /* Now check the basic blocks (boundaries etc.) */
7951 for (i = n_basic_blocks - 1; i >= 0; i--)
7953 basic_block bb = BASIC_BLOCK (i);
7954 /* Check corectness of edge lists */
7960 if ((e->flags & EDGE_FALLTHRU)
7961 && e->src != ENTRY_BLOCK_PTR
7962 && e->dest != EXIT_BLOCK_PTR
7963 && (e->src->index + 1 != e->dest->index
7964 || !can_fallthru (e->src, e->dest)))
7966 error ("verify_flow_info: Incorrect fallthru edge %i->%i",
7967 e->src->index, e->dest->index);
7973 error ("verify_flow_info: Basic block %d succ edge is corrupted",
7975 fprintf (stderr, "Predecessor: ");
7976 dump_edge_info (stderr, e, 0);
7977 fprintf (stderr, "\nSuccessor: ");
7978 dump_edge_info (stderr, e, 1);
7979 fprintf (stderr, "\n");
7982 if (e->dest != EXIT_BLOCK_PTR)
7984 edge e2 = e->dest->pred;
7985 while (e2 && e2 != e)
7989 error ("Basic block %i edge lists are corrupted", bb->index);
8001 error ("Basic block %d pred edge is corrupted", bb->index);
8002 fputs ("Predecessor: ", stderr);
8003 dump_edge_info (stderr, e, 0);
8004 fputs ("\nSuccessor: ", stderr);
8005 dump_edge_info (stderr, e, 1);
8006 fputc ('\n', stderr);
8009 if (e->src != ENTRY_BLOCK_PTR)
8011 edge e2 = e->src->succ;
8012 while (e2 && e2 != e)
8016 error ("Basic block %i edge lists are corrupted", bb->index);
8023 /* OK pointers are correct. Now check the header of basic
8024 block. It ought to contain optional CODE_LABEL followed
8025 by NOTE_BASIC_BLOCK. */
8027 if (GET_CODE (x) == CODE_LABEL)
8031 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
8037 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
8039 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
8046 /* Do checks for empty blocks here */
8053 if (NOTE_INSN_BASIC_BLOCK_P (x))
8055 error ("NOTE_INSN_BASIC_BLOCK %d in the middle of basic block %d",
8056 INSN_UID (x), bb->index);
8063 if (GET_CODE (x) == JUMP_INSN
8064 || GET_CODE (x) == CODE_LABEL
8065 || GET_CODE (x) == BARRIER)
8067 error ("In basic block %d:", bb->index);
8068 fatal_insn ("Flow control insn inside a basic block", x);
8076 last_bb_num_seen = -1;
8081 if (NOTE_INSN_BASIC_BLOCK_P (x))
8083 basic_block bb = NOTE_BASIC_BLOCK (x);
8085 if (bb->index != last_bb_num_seen + 1)
8086 /* Basic blocks not numbered consecutively. */
8089 last_bb_num_seen = bb->index;
8092 if (!bb_info[INSN_UID (x)])
8094 switch (GET_CODE (x))
8101 /* An addr_vec is placed outside any block block. */
8103 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
8104 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
8105 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
8110 /* But in any case, non-deletable labels can appear anywhere. */
8114 fatal_insn ("Insn outside basic block", x);
8119 && GET_CODE (x) == JUMP_INSN
8120 && returnjump_p (x) && ! condjump_p (x)
8121 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
8122 fatal_insn ("Return not followed by barrier", x);
8127 if (num_bb_notes != n_basic_blocks)
8129 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
8130 num_bb_notes, n_basic_blocks);
8139 /* Functions to access an edge list with a vector representation.
8140 Enough data is kept such that given an index number, the
8141 pred and succ that edge represents can be determined, or
8142 given a pred and a succ, its index number can be returned.
8143 This allows algorithms which consume a lot of memory to
8144 represent the normally full matrix of edge (pred,succ) with a
8145 single indexed vector, edge (EDGE_INDEX (pred, succ)), with no
8146 wasted space in the client code due to sparse flow graphs. */
8148 /* This functions initializes the edge list. Basically the entire
8149 flowgraph is processed, and all edges are assigned a number,
8150 and the data structure is filled in. */
8155 struct edge_list *elist;
8161 block_count = n_basic_blocks + 2; /* Include the entry and exit blocks. */
8165 /* Determine the number of edges in the flow graph by counting successor
8166 edges on each basic block. */
8167 for (x = 0; x < n_basic_blocks; x++)
8169 basic_block bb = BASIC_BLOCK (x);
8171 for (e = bb->succ; e; e = e->succ_next)
8174 /* Don't forget successors of the entry block. */
8175 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8178 elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
8179 elist->num_blocks = block_count;
8180 elist->num_edges = num_edges;
8181 elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
8185 /* Follow successors of the entry block, and register these edges. */
8186 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8188 elist->index_to_edge[num_edges] = e;
8192 for (x = 0; x < n_basic_blocks; x++)
8194 basic_block bb = BASIC_BLOCK (x);
8196 /* Follow all successors of blocks, and register these edges. */
8197 for (e = bb->succ; e; e = e->succ_next)
8199 elist->index_to_edge[num_edges] = e;
8206 /* This function free's memory associated with an edge list. */
8209 free_edge_list (elist)
8210 struct edge_list *elist;
8214 free (elist->index_to_edge);
8219 /* This function provides debug output showing an edge list. */
8222 print_edge_list (f, elist)
8224 struct edge_list *elist;
8227 fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
8228 elist->num_blocks - 2, elist->num_edges);
8230 for (x = 0; x < elist->num_edges; x++)
8232 fprintf (f, " %-4d - edge(", x);
8233 if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
8234 fprintf (f, "entry,");
8236 fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
8238 if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
8239 fprintf (f, "exit)\n");
8241 fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
8245 /* This function provides an internal consistency check of an edge list,
8246 verifying that all edges are present, and that there are no
8250 verify_edge_list (f, elist)
8252 struct edge_list *elist;
8254 int x, pred, succ, index;
8257 for (x = 0; x < n_basic_blocks; x++)
8259 basic_block bb = BASIC_BLOCK (x);
8261 for (e = bb->succ; e; e = e->succ_next)
8263 pred = e->src->index;
8264 succ = e->dest->index;
8265 index = EDGE_INDEX (elist, e->src, e->dest);
8266 if (index == EDGE_INDEX_NO_EDGE)
8268 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8271 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8272 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8273 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8274 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8275 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8276 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8279 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
8281 pred = e->src->index;
8282 succ = e->dest->index;
8283 index = EDGE_INDEX (elist, e->src, e->dest);
8284 if (index == EDGE_INDEX_NO_EDGE)
8286 fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
8289 if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
8290 fprintf (f, "*p* Pred for index %d should be %d not %d\n",
8291 index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
8292 if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
8293 fprintf (f, "*p* Succ for index %d should be %d not %d\n",
8294 index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
8296 /* We've verified that all the edges are in the list, no lets make sure
8297 there are no spurious edges in the list. */
8299 for (pred = 0; pred < n_basic_blocks; pred++)
8300 for (succ = 0; succ < n_basic_blocks; succ++)
8302 basic_block p = BASIC_BLOCK (pred);
8303 basic_block s = BASIC_BLOCK (succ);
8307 for (e = p->succ; e; e = e->succ_next)
8313 for (e = s->pred; e; e = e->pred_next)
8319 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8320 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8321 fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
8323 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
8324 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8325 fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
8326 pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8327 BASIC_BLOCK (succ)));
8329 for (succ = 0; succ < n_basic_blocks; succ++)
8331 basic_block p = ENTRY_BLOCK_PTR;
8332 basic_block s = BASIC_BLOCK (succ);
8336 for (e = p->succ; e; e = e->succ_next)
8342 for (e = s->pred; e; e = e->pred_next)
8348 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8349 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8350 fprintf (f, "*** Edge (entry, %d) appears to not have an index\n",
8352 if (EDGE_INDEX (elist, ENTRY_BLOCK_PTR, BASIC_BLOCK (succ))
8353 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8354 fprintf (f, "*** Edge (entry, %d) has index %d, but no edge exists\n",
8355 succ, EDGE_INDEX (elist, ENTRY_BLOCK_PTR,
8356 BASIC_BLOCK (succ)));
8358 for (pred = 0; pred < n_basic_blocks; pred++)
8360 basic_block p = BASIC_BLOCK (pred);
8361 basic_block s = EXIT_BLOCK_PTR;
8365 for (e = p->succ; e; e = e->succ_next)
8371 for (e = s->pred; e; e = e->pred_next)
8377 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8378 == EDGE_INDEX_NO_EDGE && found_edge != 0)
8379 fprintf (f, "*** Edge (%d, exit) appears to not have an index\n",
8381 if (EDGE_INDEX (elist, BASIC_BLOCK (pred), EXIT_BLOCK_PTR)
8382 != EDGE_INDEX_NO_EDGE && found_edge == 0)
8383 fprintf (f, "*** Edge (%d, exit) has index %d, but no edge exists\n",
8384 pred, EDGE_INDEX (elist, BASIC_BLOCK (pred),
8389 /* This routine will determine what, if any, edge there is between
8390 a specified predecessor and successor. */
8393 find_edge_index (edge_list, pred, succ)
8394 struct edge_list *edge_list;
8395 basic_block pred, succ;
8398 for (x = 0; x < NUM_EDGES (edge_list); x++)
8400 if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
8401 && INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
8404 return (EDGE_INDEX_NO_EDGE);
8407 /* This function will remove an edge from the flow graph. */
8413 edge last_pred = NULL;
8414 edge last_succ = NULL;
8416 basic_block src, dest;
8419 for (tmp = src->succ; tmp && tmp != e; tmp = tmp->succ_next)
8425 last_succ->succ_next = e->succ_next;
8427 src->succ = e->succ_next;
8429 for (tmp = dest->pred; tmp && tmp != e; tmp = tmp->pred_next)
8435 last_pred->pred_next = e->pred_next;
8437 dest->pred = e->pred_next;
8443 /* This routine will remove any fake successor edges for a basic block.
8444 When the edge is removed, it is also removed from whatever predecessor
8448 remove_fake_successors (bb)
8452 for (e = bb->succ; e;)
8456 if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
8461 /* This routine will remove all fake edges from the flow graph. If
8462 we remove all fake successors, it will automatically remove all
8463 fake predecessors. */
8466 remove_fake_edges ()
8470 for (x = 0; x < n_basic_blocks; x++)
8471 remove_fake_successors (BASIC_BLOCK (x));
8473 /* We've handled all successors except the entry block's. */
8474 remove_fake_successors (ENTRY_BLOCK_PTR);
8477 /* This function will add a fake edge between any block which has no
8478 successors, and the exit block. Some data flow equations require these
8482 add_noreturn_fake_exit_edges ()
8486 for (x = 0; x < n_basic_blocks; x++)
8487 if (BASIC_BLOCK (x)->succ == NULL)
8488 make_edge (NULL, BASIC_BLOCK (x), EXIT_BLOCK_PTR, EDGE_FAKE);
8491 /* This function adds a fake edge between any infinite loops to the
8492 exit block. Some optimizations require a path from each node to
8495 See also Morgan, Figure 3.10, pp. 82-83.
8497 The current implementation is ugly, not attempting to minimize the
8498 number of inserted fake edges. To reduce the number of fake edges
8499 to insert, add fake edges from _innermost_ loops containing only
8500 nodes not reachable from the exit block. */
8503 connect_infinite_loops_to_exit ()
8505 basic_block unvisited_block;
8507 /* Perform depth-first search in the reverse graph to find nodes
8508 reachable from the exit block. */
8509 struct depth_first_search_dsS dfs_ds;
8511 flow_dfs_compute_reverse_init (&dfs_ds);
8512 flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
8514 /* Repeatedly add fake edges, updating the unreachable nodes. */
8517 unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
8518 if (!unvisited_block)
8520 make_edge (NULL, unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
8521 flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
8524 flow_dfs_compute_reverse_finish (&dfs_ds);
8529 /* Redirect an edge's successor from one block to another. */
8532 redirect_edge_succ (e, new_succ)
8534 basic_block new_succ;
8538 /* Disconnect the edge from the old successor block. */
8539 for (pe = &e->dest->pred; *pe != e; pe = &(*pe)->pred_next)
8541 *pe = (*pe)->pred_next;
8543 /* Reconnect the edge to the new successor block. */
8544 e->pred_next = new_succ->pred;
8549 /* Redirect an edge's predecessor from one block to another. */
8552 redirect_edge_pred (e, new_pred)
8554 basic_block new_pred;
8558 /* Disconnect the edge from the old predecessor block. */
8559 for (pe = &e->src->succ; *pe != e; pe = &(*pe)->succ_next)
8561 *pe = (*pe)->succ_next;
8563 /* Reconnect the edge to the new predecessor block. */
8564 e->succ_next = new_pred->succ;
8569 /* Dump the list of basic blocks in the bitmap NODES. */
8572 flow_nodes_print (str, nodes, file)
8574 const sbitmap nodes;
8582 fprintf (file, "%s { ", str);
8583 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
8584 fputs ("}\n", file);
8588 /* Dump the list of edges in the array EDGE_LIST. */
8591 flow_edge_list_print (str, edge_list, num_edges, file)
8593 const edge *edge_list;
8602 fprintf (file, "%s { ", str);
8603 for (i = 0; i < num_edges; i++)
8604 fprintf (file, "%d->%d ", edge_list[i]->src->index,
8605 edge_list[i]->dest->index);
8606 fputs ("}\n", file);
8610 /* Dump loop related CFG information. */
8613 flow_loops_cfg_dump (loops, file)
8614 const struct loops *loops;
8619 if (! loops->num || ! file || ! loops->cfg.dom)
8622 for (i = 0; i < n_basic_blocks; i++)
8626 fprintf (file, ";; %d succs { ", i);
8627 for (succ = BASIC_BLOCK (i)->succ; succ; succ = succ->succ_next)
8628 fprintf (file, "%d ", succ->dest->index);
8629 flow_nodes_print ("} dom", loops->cfg.dom[i], file);
8632 /* Dump the DFS node order. */
8633 if (loops->cfg.dfs_order)
8635 fputs (";; DFS order: ", file);
8636 for (i = 0; i < n_basic_blocks; i++)
8637 fprintf (file, "%d ", loops->cfg.dfs_order[i]);
8640 /* Dump the reverse completion node order. */
8641 if (loops->cfg.rc_order)
8643 fputs (";; RC order: ", file);
8644 for (i = 0; i < n_basic_blocks; i++)
8645 fprintf (file, "%d ", loops->cfg.rc_order[i]);
8650 /* Return non-zero if the nodes of LOOP are a subset of OUTER. */
8653 flow_loop_nested_p (outer, loop)
8657 return sbitmap_a_subset_b_p (loop->nodes, outer->nodes);
8661 /* Dump the loop information specified by LOOP to the stream FILE
8662 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
8664 flow_loop_dump (loop, file, loop_dump_aux, verbose)
8665 const struct loop *loop;
8667 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
8670 if (! loop || ! loop->header)
8673 fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
8674 loop->num, INSN_UID (loop->first->head),
8675 INSN_UID (loop->last->end),
8676 loop->shared ? " shared" : "",
8677 loop->invalid ? " invalid" : "");
8678 fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
8679 loop->header->index, loop->latch->index,
8680 loop->pre_header ? loop->pre_header->index : -1,
8681 loop->first->index, loop->last->index);
8682 fprintf (file, ";; depth %d, level %d, outer %ld\n",
8683 loop->depth, loop->level,
8684 (long) (loop->outer ? loop->outer->num : -1));
8686 if (loop->pre_header_edges)
8687 flow_edge_list_print (";; pre-header edges", loop->pre_header_edges,
8688 loop->num_pre_header_edges, file);
8689 flow_edge_list_print (";; entry edges", loop->entry_edges,
8690 loop->num_entries, file);
8691 fprintf (file, ";; %d", loop->num_nodes);
8692 flow_nodes_print (" nodes", loop->nodes, file);
8693 flow_edge_list_print (";; exit edges", loop->exit_edges,
8694 loop->num_exits, file);
8695 if (loop->exits_doms)
8696 flow_nodes_print (";; exit doms", loop->exits_doms, file);
8698 loop_dump_aux (loop, file, verbose);
8702 /* Dump the loop information specified by LOOPS to the stream FILE,
8703 using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
8705 flow_loops_dump (loops, file, loop_dump_aux, verbose)
8706 const struct loops *loops;
8708 void (*loop_dump_aux) PARAMS((const struct loop *, FILE *, int));
8714 num_loops = loops->num;
8715 if (! num_loops || ! file)
8718 fprintf (file, ";; %d loops found, %d levels\n",
8719 num_loops, loops->levels);
8721 for (i = 0; i < num_loops; i++)
8723 struct loop *loop = &loops->array[i];
8725 flow_loop_dump (loop, file, loop_dump_aux, verbose);
8731 for (j = 0; j < i; j++)
8733 struct loop *oloop = &loops->array[j];
8735 if (loop->header == oloop->header)
8740 smaller = loop->num_nodes < oloop->num_nodes;
8742 /* If the union of LOOP and OLOOP is different than
8743 the larger of LOOP and OLOOP then LOOP and OLOOP
8744 must be disjoint. */
8745 disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
8746 smaller ? oloop : loop);
8748 ";; loop header %d shared by loops %d, %d %s\n",
8749 loop->header->index, i, j,
8750 disjoint ? "disjoint" : "nested");
8757 flow_loops_cfg_dump (loops, file);
8761 /* Free all the memory allocated for LOOPS. */
8764 flow_loops_free (loops)
8765 struct loops *loops;
8774 /* Free the loop descriptors. */
8775 for (i = 0; i < loops->num; i++)
8777 struct loop *loop = &loops->array[i];
8779 if (loop->pre_header_edges)
8780 free (loop->pre_header_edges);
8782 sbitmap_free (loop->nodes);
8783 if (loop->entry_edges)
8784 free (loop->entry_edges);
8785 if (loop->exit_edges)
8786 free (loop->exit_edges);
8787 if (loop->exits_doms)
8788 sbitmap_free (loop->exits_doms);
8790 free (loops->array);
8791 loops->array = NULL;
8794 sbitmap_vector_free (loops->cfg.dom);
8795 if (loops->cfg.dfs_order)
8796 free (loops->cfg.dfs_order);
8798 if (loops->shared_headers)
8799 sbitmap_free (loops->shared_headers);
8804 /* Find the entry edges into the loop with header HEADER and nodes
8805 NODES and store in ENTRY_EDGES array. Return the number of entry
8806 edges from the loop. */
8809 flow_loop_entry_edges_find (header, nodes, entry_edges)
8811 const sbitmap nodes;
8817 *entry_edges = NULL;
8820 for (e = header->pred; e; e = e->pred_next)
8822 basic_block src = e->src;
8824 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
8831 *entry_edges = (edge *) xmalloc (num_entries * sizeof (edge *));
8834 for (e = header->pred; e; e = e->pred_next)
8836 basic_block src = e->src;
8838 if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
8839 (*entry_edges)[num_entries++] = e;
8846 /* Find the exit edges from the loop using the bitmap of loop nodes
8847 NODES and store in EXIT_EDGES array. Return the number of
8848 exit edges from the loop. */
8851 flow_loop_exit_edges_find (nodes, exit_edges)
8852 const sbitmap nodes;
8861 /* Check all nodes within the loop to see if there are any
8862 successors not in the loop. Note that a node may have multiple
8863 exiting edges ????? A node can have one jumping edge and one fallthru
8864 edge so only one of these can exit the loop. */
8866 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
8867 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
8869 basic_block dest = e->dest;
8871 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
8879 *exit_edges = (edge *) xmalloc (num_exits * sizeof (edge *));
8881 /* Store all exiting edges into an array. */
8883 EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
8884 for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
8886 basic_block dest = e->dest;
8888 if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
8889 (*exit_edges)[num_exits++] = e;
8897 /* Find the nodes contained within the loop with header HEADER and
8898 latch LATCH and store in NODES. Return the number of nodes within
8902 flow_loop_nodes_find (header, latch, nodes)
8911 stack = (basic_block *) xmalloc (n_basic_blocks * sizeof (basic_block));
8914 /* Start with only the loop header in the set of loop nodes. */
8915 sbitmap_zero (nodes);
8916 SET_BIT (nodes, header->index);
8918 header->loop_depth++;
8920 /* Push the loop latch on to the stack. */
8921 if (! TEST_BIT (nodes, latch->index))
8923 SET_BIT (nodes, latch->index);
8924 latch->loop_depth++;
8926 stack[sp++] = latch;
8935 for (e = node->pred; e; e = e->pred_next)
8937 basic_block ancestor = e->src;
8939 /* If each ancestor not marked as part of loop, add to set of
8940 loop nodes and push on to stack. */
8941 if (ancestor != ENTRY_BLOCK_PTR
8942 && ! TEST_BIT (nodes, ancestor->index))
8944 SET_BIT (nodes, ancestor->index);
8945 ancestor->loop_depth++;
8947 stack[sp++] = ancestor;
8955 /* Compute the depth first search order and store in the array
8956 DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
8957 RC_ORDER is non-zero, return the reverse completion number for each
8958 node. Returns the number of nodes visited. A depth first search
8959 tries to get as far away from the starting point as quickly as
8963 flow_depth_first_order_compute (dfs_order, rc_order)
8970 int rcnum = n_basic_blocks - 1;
8973 /* Allocate stack for back-tracking up CFG. */
8974 stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
8977 /* Allocate bitmap to track nodes that have been visited. */
8978 visited = sbitmap_alloc (n_basic_blocks);
8980 /* None of the nodes in the CFG have been visited yet. */
8981 sbitmap_zero (visited);
8983 /* Push the first edge on to the stack. */
8984 stack[sp++] = ENTRY_BLOCK_PTR->succ;
8992 /* Look at the edge on the top of the stack. */
8997 /* Check if the edge destination has been visited yet. */
8998 if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
9000 /* Mark that we have visited the destination. */
9001 SET_BIT (visited, dest->index);
9004 dfs_order[dfsnum++] = dest->index;
9008 /* Since the DEST node has been visited for the first
9009 time, check its successors. */
9010 stack[sp++] = dest->succ;
9014 /* There are no successors for the DEST node so assign
9015 its reverse completion number. */
9017 rc_order[rcnum--] = dest->index;
9022 if (! e->succ_next && src != ENTRY_BLOCK_PTR)
9024 /* There are no more successors for the SRC node
9025 so assign its reverse completion number. */
9027 rc_order[rcnum--] = src->index;
9031 stack[sp - 1] = e->succ_next;
9038 sbitmap_free (visited);
9040 /* The number of nodes visited should not be greater than
9042 if (dfsnum > n_basic_blocks)
9045 /* There are some nodes left in the CFG that are unreachable. */
9046 if (dfsnum < n_basic_blocks)
9051 /* Compute the depth first search order on the _reverse_ graph and
9052 store in the array DFS_ORDER, marking the nodes visited in VISITED.
9053 Returns the number of nodes visited.
9055 The computation is split into three pieces:
9057 flow_dfs_compute_reverse_init () creates the necessary data
9060 flow_dfs_compute_reverse_add_bb () adds a basic block to the data
9061 structures. The block will start the search.
9063 flow_dfs_compute_reverse_execute () continues (or starts) the
9064 search using the block on the top of the stack, stopping when the
9067 flow_dfs_compute_reverse_finish () destroys the necessary data
9070 Thus, the user will probably call ..._init(), call ..._add_bb() to
9071 add a beginning basic block to the stack, call ..._execute(),
9072 possibly add another bb to the stack and again call ..._execute(),
9073 ..., and finally call _finish(). */
9075 /* Initialize the data structures used for depth-first search on the
9076 reverse graph. If INITIALIZE_STACK is nonzero, the exit block is
9077 added to the basic block stack. DATA is the current depth-first
9078 search context. If INITIALIZE_STACK is non-zero, there is an
9079 element on the stack. */
9082 flow_dfs_compute_reverse_init (data)
9083 depth_first_search_ds data;
9085 /* Allocate stack for back-tracking up CFG. */
9087 (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
9088 * sizeof (basic_block));
9091 /* Allocate bitmap to track nodes that have been visited. */
9092 data->visited_blocks = sbitmap_alloc (n_basic_blocks - (INVALID_BLOCK + 1));
9094 /* None of the nodes in the CFG have been visited yet. */
9095 sbitmap_zero (data->visited_blocks);
9100 /* Add the specified basic block to the top of the dfs data
9101 structures. When the search continues, it will start at the
9105 flow_dfs_compute_reverse_add_bb (data, bb)
9106 depth_first_search_ds data;
9109 data->stack[data->sp++] = bb;
9113 /* Continue the depth-first search through the reverse graph starting
9114 with the block at the stack's top and ending when the stack is
9115 empty. Visited nodes are marked. Returns an unvisited basic
9116 block, or NULL if there is none available. */
9119 flow_dfs_compute_reverse_execute (data)
9120 depth_first_search_ds data;
9126 while (data->sp > 0)
9128 bb = data->stack[--data->sp];
9130 /* Mark that we have visited this node. */
9131 if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
9133 SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
9135 /* Perform depth-first search on adjacent vertices. */
9136 for (e = bb->pred; e; e = e->pred_next)
9137 flow_dfs_compute_reverse_add_bb (data, e->src);
9141 /* Determine if there are unvisited basic blocks. */
9142 for (i = n_basic_blocks - (INVALID_BLOCK + 1); --i >= 0;)
9143 if (!TEST_BIT (data->visited_blocks, i))
9144 return BASIC_BLOCK (i + (INVALID_BLOCK + 1));
9148 /* Destroy the data structures needed for depth-first search on the
9152 flow_dfs_compute_reverse_finish (data)
9153 depth_first_search_ds data;
9156 sbitmap_free (data->visited_blocks);
9161 /* Find the root node of the loop pre-header extended basic block and
9162 the edges along the trace from the root node to the loop header. */
9165 flow_loop_pre_header_scan (loop)
9171 loop->num_pre_header_edges = 0;
9173 if (loop->num_entries != 1)
9176 ebb = loop->entry_edges[0]->src;
9178 if (ebb != ENTRY_BLOCK_PTR)
9182 /* Count number of edges along trace from loop header to
9183 root of pre-header extended basic block. Usually this is
9184 only one or two edges. */
9186 while (ebb->pred->src != ENTRY_BLOCK_PTR && ! ebb->pred->pred_next)
9188 ebb = ebb->pred->src;
9192 loop->pre_header_edges = (edge *) xmalloc (num * sizeof (edge *));
9193 loop->num_pre_header_edges = num;
9195 /* Store edges in order that they are followed. The source
9196 of the first edge is the root node of the pre-header extended
9197 basic block and the destination of the last last edge is
9199 for (e = loop->entry_edges[0]; num; e = e->src->pred)
9201 loop->pre_header_edges[--num] = e;
9207 /* Return the block for the pre-header of the loop with header
9208 HEADER where DOM specifies the dominator information. Return NULL if
9209 there is no pre-header. */
9212 flow_loop_pre_header_find (header, dom)
9216 basic_block pre_header;
9219 /* If block p is a predecessor of the header and is the only block
9220 that the header does not dominate, then it is the pre-header. */
9222 for (e = header->pred; e; e = e->pred_next)
9224 basic_block node = e->src;
9226 if (node != ENTRY_BLOCK_PTR
9227 && ! TEST_BIT (dom[node->index], header->index))
9229 if (pre_header == NULL)
9233 /* There are multiple edges into the header from outside
9234 the loop so there is no pre-header block. */
9243 /* Add LOOP to the loop hierarchy tree where PREVLOOP was the loop
9244 previously added. The insertion algorithm assumes that the loops
9245 are added in the order found by a depth first search of the CFG. */
9248 flow_loop_tree_node_add (prevloop, loop)
9249 struct loop *prevloop;
9253 if (flow_loop_nested_p (prevloop, loop))
9255 prevloop->inner = loop;
9256 loop->outer = prevloop;
9260 while (prevloop->outer)
9262 if (flow_loop_nested_p (prevloop->outer, loop))
9264 prevloop->next = loop;
9265 loop->outer = prevloop->outer;
9268 prevloop = prevloop->outer;
9271 prevloop->next = loop;
9275 /* Build the loop hierarchy tree for LOOPS. */
9278 flow_loops_tree_build (loops)
9279 struct loops *loops;
9284 num_loops = loops->num;
9288 /* Root the loop hierarchy tree with the first loop found.
9289 Since we used a depth first search this should be the
9291 loops->tree_root = &loops->array[0];
9292 loops->tree_root->outer = loops->tree_root->inner = loops->tree_root->next = NULL;
9294 /* Add the remaining loops to the tree. */
9295 for (i = 1; i < num_loops; i++)
9296 flow_loop_tree_node_add (&loops->array[i - 1], &loops->array[i]);
9299 /* Helper function to compute loop nesting depth and enclosed loop level
9300 for the natural loop specified by LOOP at the loop depth DEPTH.
9301 Returns the loop level. */
9304 flow_loop_level_compute (loop, depth)
9314 /* Traverse loop tree assigning depth and computing level as the
9315 maximum level of all the inner loops of this loop. The loop
9316 level is equivalent to the height of the loop in the loop tree
9317 and corresponds to the number of enclosed loop levels (including
9319 for (inner = loop->inner; inner; inner = inner->next)
9323 ilevel = flow_loop_level_compute (inner, depth + 1) + 1;
9328 loop->level = level;
9329 loop->depth = depth;
9333 /* Compute the loop nesting depth and enclosed loop level for the loop
9334 hierarchy tree specfied by LOOPS. Return the maximum enclosed loop
9338 flow_loops_level_compute (loops)
9339 struct loops *loops;
9345 /* Traverse all the outer level loops. */
9346 for (loop = loops->tree_root; loop; loop = loop->next)
9348 level = flow_loop_level_compute (loop, 1);
9356 /* Scan a single natural loop specified by LOOP collecting information
9357 about it specified by FLAGS. */
9360 flow_loop_scan (loops, loop, flags)
9361 struct loops *loops;
9365 /* Determine prerequisites. */
9366 if ((flags & LOOP_EXITS_DOMS) && ! loop->exit_edges)
9367 flags |= LOOP_EXIT_EDGES;
9369 if (flags & LOOP_ENTRY_EDGES)
9371 /* Find edges which enter the loop header.
9372 Note that the entry edges should only
9373 enter the header of a natural loop. */
9375 = flow_loop_entry_edges_find (loop->header,
9377 &loop->entry_edges);
9380 if (flags & LOOP_EXIT_EDGES)
9382 /* Find edges which exit the loop. */
9384 = flow_loop_exit_edges_find (loop->nodes,
9388 if (flags & LOOP_EXITS_DOMS)
9392 /* Determine which loop nodes dominate all the exits
9394 loop->exits_doms = sbitmap_alloc (n_basic_blocks);
9395 sbitmap_copy (loop->exits_doms, loop->nodes);
9396 for (j = 0; j < loop->num_exits; j++)
9397 sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
9398 loops->cfg.dom[loop->exit_edges[j]->src->index]);
9400 /* The header of a natural loop must dominate
9402 if (! TEST_BIT (loop->exits_doms, loop->header->index))
9406 if (flags & LOOP_PRE_HEADER)
9408 /* Look to see if the loop has a pre-header node. */
9410 = flow_loop_pre_header_find (loop->header, loops->cfg.dom);
9412 /* Find the blocks within the extended basic block of
9413 the loop pre-header. */
9414 flow_loop_pre_header_scan (loop);
9420 /* Find all the natural loops in the function and save in LOOPS structure
9421 and recalculate loop_depth information in basic block structures.
9422 FLAGS controls which loop information is collected.
9423 Return the number of natural loops found. */
9426 flow_loops_find (loops, flags)
9427 struct loops *loops;
9439 /* This function cannot be repeatedly called with different
9440 flags to build up the loop information. The loop tree
9441 must always be built if this function is called. */
9442 if (! (flags & LOOP_TREE))
9445 memset (loops, 0, sizeof (*loops));
9447 /* Taking care of this degenerate case makes the rest of
9448 this code simpler. */
9449 if (n_basic_blocks == 0)
9455 /* Compute the dominators. */
9456 dom = sbitmap_vector_alloc (n_basic_blocks, n_basic_blocks);
9457 calculate_dominance_info (NULL, dom, CDI_DOMINATORS);
9459 /* Count the number of loop edges (back edges). This should be the
9460 same as the number of natural loops. */
9463 for (b = 0; b < n_basic_blocks; b++)
9467 header = BASIC_BLOCK (b);
9468 header->loop_depth = 0;
9470 for (e = header->pred; e; e = e->pred_next)
9472 basic_block latch = e->src;
9474 /* Look for back edges where a predecessor is dominated
9475 by this block. A natural loop has a single entry
9476 node (header) that dominates all the nodes in the
9477 loop. It also has single back edge to the header
9478 from a latch node. Note that multiple natural loops
9479 may share the same header. */
9480 if (b != header->index)
9483 if (latch != ENTRY_BLOCK_PTR && TEST_BIT (dom[latch->index], b))
9490 /* Compute depth first search order of the CFG so that outer
9491 natural loops will be found before inner natural loops. */
9492 dfs_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9493 rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
9494 flow_depth_first_order_compute (dfs_order, rc_order);
9496 /* Save CFG derived information to avoid recomputing it. */
9497 loops->cfg.dom = dom;
9498 loops->cfg.dfs_order = dfs_order;
9499 loops->cfg.rc_order = rc_order;
9501 /* Allocate loop structures. */
9503 = (struct loop *) xcalloc (num_loops, sizeof (struct loop));
9505 headers = sbitmap_alloc (n_basic_blocks);
9506 sbitmap_zero (headers);
9508 loops->shared_headers = sbitmap_alloc (n_basic_blocks);
9509 sbitmap_zero (loops->shared_headers);
9511 /* Find and record information about all the natural loops
9514 for (b = 0; b < n_basic_blocks; b++)
9518 /* Search the nodes of the CFG in reverse completion order
9519 so that we can find outer loops first. */
9520 header = BASIC_BLOCK (rc_order[b]);
9522 /* Look for all the possible latch blocks for this header. */
9523 for (e = header->pred; e; e = e->pred_next)
9525 basic_block latch = e->src;
9527 /* Look for back edges where a predecessor is dominated
9528 by this block. A natural loop has a single entry
9529 node (header) that dominates all the nodes in the
9530 loop. It also has single back edge to the header
9531 from a latch node. Note that multiple natural loops
9532 may share the same header. */
9533 if (latch != ENTRY_BLOCK_PTR
9534 && TEST_BIT (dom[latch->index], header->index))
9538 loop = loops->array + num_loops;
9540 loop->header = header;
9541 loop->latch = latch;
9542 loop->num = num_loops;
9549 for (i = 0; i < num_loops; i++)
9551 struct loop *loop = &loops->array[i];
9553 /* Keep track of blocks that are loop headers so
9554 that we can tell which loops should be merged. */
9555 if (TEST_BIT (headers, loop->header->index))
9556 SET_BIT (loops->shared_headers, loop->header->index);
9557 SET_BIT (headers, loop->header->index);
9559 /* Find nodes contained within the loop. */
9560 loop->nodes = sbitmap_alloc (n_basic_blocks);
9562 = flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
9564 /* Compute first and last blocks within the loop.
9565 These are often the same as the loop header and
9566 loop latch respectively, but this is not always
9569 = BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
9571 = BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
9573 flow_loop_scan (loops, loop, flags);
9576 /* Natural loops with shared headers may either be disjoint or
9577 nested. Disjoint loops with shared headers cannot be inner
9578 loops and should be merged. For now just mark loops that share
9580 for (i = 0; i < num_loops; i++)
9581 if (TEST_BIT (loops->shared_headers, loops->array[i].header->index))
9582 loops->array[i].shared = 1;
9584 sbitmap_free (headers);
9588 sbitmap_vector_free (dom);
9591 loops->num = num_loops;
9593 /* Build the loop hierarchy tree. */
9594 flow_loops_tree_build (loops);
9596 /* Assign the loop nesting depth and enclosed loop level for each
9598 loops->levels = flow_loops_level_compute (loops);
9604 /* Update the information regarding the loops in the CFG
9605 specified by LOOPS. */
9607 flow_loops_update (loops, flags)
9608 struct loops *loops;
9611 /* One day we may want to update the current loop data. For now
9612 throw away the old stuff and rebuild what we need. */
9614 flow_loops_free (loops);
9616 return flow_loops_find (loops, flags);
9620 /* Return non-zero if edge E enters header of LOOP from outside of LOOP. */
9623 flow_loop_outside_edge_p (loop, e)
9624 const struct loop *loop;
9627 if (e->dest != loop->header)
9629 return (e->src == ENTRY_BLOCK_PTR)
9630 || ! TEST_BIT (loop->nodes, e->src->index);
9633 /* Clear LOG_LINKS fields of insns in a chain.
9634 Also clear the global_live_at_{start,end} fields of the basic block
9638 clear_log_links (insns)
9644 for (i = insns; i; i = NEXT_INSN (i))
9648 for (b = 0; b < n_basic_blocks; b++)
9650 basic_block bb = BASIC_BLOCK (b);
9652 bb->global_live_at_start = NULL;
9653 bb->global_live_at_end = NULL;
9656 ENTRY_BLOCK_PTR->global_live_at_end = NULL;
9657 EXIT_BLOCK_PTR->global_live_at_start = NULL;
9660 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
9661 correspond to the hard registers, if any, set in that map. This
9662 could be done far more efficiently by having all sorts of special-cases
9663 with moving single words, but probably isn't worth the trouble. */
9666 reg_set_to_hard_reg_set (to, from)
9672 EXECUTE_IF_SET_IN_BITMAP
9675 if (i >= FIRST_PSEUDO_REGISTER)
9677 SET_HARD_REG_BIT (*to, i);
9681 /* Called once at intialization time. */
9686 static int initialized;
9690 gcc_obstack_init (&flow_obstack);
9691 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);
9696 obstack_free (&flow_obstack, flow_firstobj);
9697 flow_firstobj = (char *) obstack_alloc (&flow_obstack, 0);