/* Data flow analysis for GNU compiler.
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000 Free Software Foundation, Inc.
+ 1999, 2000, 2001 Free Software Foundation, Inc.
This file is part of GNU CC.
are another pair, etc. */
rtx regs_may_share;
+/* Callback that determines if it's ok for a function to have no
+ noreturn attribute. */
+int (*lang_missing_noreturn_ok_p) PARAMS ((tree));
+
/* Set of registers that may be eliminable. These are handled specially
in updating regs_ever_live. */
regset reg_cond_reg;
#endif
+ /* The length of mem_set_list. */
+ int mem_set_list_len;
+
/* Non-zero if the value of CC0 is live. */
int cc0_live;
int flags;
};
+/* Maximum length of pbi->mem_set_list before we start dropping
+ new elements on the floor. */
+#define MAX_MEM_SET_LIST_LEN 100
+
/* Store the data structures necessary for depth-first search. */
struct depth_first_search_dsS {
/* stack for backtracking during the algorithm */
static void invalidate_mems_from_set PARAMS ((struct propagate_block_info *,
rtx));
static void remove_fake_successors PARAMS ((basic_block));
-static void flow_nodes_print PARAMS ((const char *, const sbitmap,
+static void flow_nodes_print PARAMS ((const char *, const sbitmap,
FILE *));
static void flow_edge_list_print PARAMS ((const char *, const edge *,
int, FILE *));
static void flow_loops_cfg_dump PARAMS ((const struct loops *,
FILE *));
-static int flow_loop_nested_p PARAMS ((struct loop *,
+static int flow_loop_nested_p PARAMS ((struct loop *,
struct loop *));
-static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
+static int flow_loop_entry_edges_find PARAMS ((basic_block, const sbitmap,
edge **));
static int flow_loop_exit_edges_find PARAMS ((const sbitmap, edge **));
static int flow_loop_nodes_find PARAMS ((basic_block, basic_block, sbitmap));
{
if (warn_missing_noreturn
&& !TREE_THIS_VOLATILE (cfun->decl)
- && EXIT_BLOCK_PTR->pred == NULL)
+ && EXIT_BLOCK_PTR->pred == NULL
+ && (lang_missing_noreturn_ok_p
+ && !lang_missing_noreturn_ok_p (cfun->decl)))
warning ("function might be possible candidate for attribute `noreturn'");
/* If we have a path to EXIT, then we do return. */
rtx insn;
for (insn = f; insn; insn = NEXT_INSN (insn))
- if (INSN_P (insn))
+ if (INSN_P (insn) && GET_CODE (insn) != JUMP_INSN)
{
rtx note;
as this would be a part of the tablejump setup code.
Make a special exception for the eh_return_stub_label, which
- we know isn't part of any otherwise visible control flow. */
+ we know isn't part of any otherwise visible control flow.
+
+ Make a special exception to registers loaded with label
+ values just before jump insns that use them. */
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
if (REG_NOTE_KIND (note) == REG_LABEL)
;
else if (GET_CODE (lab) == NOTE)
;
+ else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
+ && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
+ ;
else
lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
}
break;
}
- if (GET_RTX_CLASS (code) == 'i')
+ if (GET_RTX_CLASS (code) == 'i'
+ && GET_CODE (insn) != JUMP_INSN)
{
rtx note;
- /* Make a list of all labels referred to other than by jumps
- (which just don't have the REG_LABEL notes).
+ /* Make a list of all labels referred to other than by jumps.
Make a special exception for labels followed by an ADDR*VEC,
as this would be a part of the tablejump setup code.
Make a special exception for the eh_return_stub_label, which
- we know isn't part of any otherwise visible control flow. */
+ we know isn't part of any otherwise visible control flow.
+
+ Make a special exception to registers loaded with label
+ values just before jump insns that use them. */
for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
if (REG_NOTE_KIND (note) == REG_LABEL)
;
else if (GET_CODE (lab) == NOTE)
;
+ else if (GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
+ && find_reg_note (NEXT_INSN (insn), REG_LABEL, lab))
+ ;
else
lvl = alloc_EXPR_LIST (0, XEXP (note, 0), lvl);
}
{
rtx tmp;
+ /* Recognize a non-local goto as a branch outside the
+ current function. */
+ if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
+ ;
+
/* ??? Recognize a tablejump and do the right thing. */
- if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
- && (tmp = NEXT_INSN (tmp)) != NULL_RTX
- && GET_CODE (tmp) == JUMP_INSN
- && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
- || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
+ else if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
+ && (tmp = NEXT_INSN (tmp)) != NULL_RTX
+ && GET_CODE (tmp) == JUMP_INSN
+ && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
+ || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
{
rtvec vec;
int j;
/* Redirect the src of the successor edges of bb to point to new_bb. */
for (e = new_bb->succ; e; e = e->succ_next)
e->src = new_bb;
-
+
/* Place the new block just after the block being split. */
VARRAY_GROW (basic_block_info, ++n_basic_blocks);
propagate_block to determine which registers are live. */
COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
- COPY_REG_SET (bb->global_live_at_end,
+ COPY_REG_SET (bb->global_live_at_end,
new_bb->global_live_at_start);
}
bb = BASIC_BLOCK (i);
}
}
+
+/* Add fake edges to the function exit for any non constant calls in
+ the bitmap of blocks specified by BLOCKS or to the whole CFG if
+ BLOCKS is zero. Return the nuber of blocks that were split. */
+
+int
+flow_call_edges_add (blocks)
+ sbitmap blocks;
+{
+ int i;
+ int blocks_split = 0;
+ int bb_num = 0;
+ basic_block *bbs;
+
+ /* Map bb indicies into basic block pointers since split_block
+ will renumber the basic blocks. */
+
+ bbs = xmalloc (n_basic_blocks * sizeof (*bbs));
+
+ if (! blocks)
+ {
+ for (i = 0; i < n_basic_blocks; i++)
+ bbs[bb_num++] = BASIC_BLOCK (i);
+ }
+ else
+ {
+ EXECUTE_IF_SET_IN_SBITMAP (blocks, 0, i,
+ {
+ bbs[bb_num++] = BASIC_BLOCK (i);
+ });
+ }
+
+
+ /* Now add fake edges to the function exit for any non constant
+ calls since there is no way that we can determine if they will
+ return or not... */
+
+ for (i = 0; i < bb_num; i++)
+ {
+ basic_block bb = bbs[i];
+ rtx insn;
+ rtx prev_insn;
+
+ for (insn = bb->end; ; insn = prev_insn)
+ {
+ prev_insn = PREV_INSN (insn);
+ if (GET_CODE (insn) == CALL_INSN && ! CONST_CALL_P (insn))
+ {
+ edge e;
+
+ /* Note that the following may create a new basic block
+ and renumber the existing basic blocks. */
+ e = split_block (bb, insn);
+ if (e)
+ blocks_split++;
+
+ make_edge (NULL, bb, EXIT_BLOCK_PTR, EDGE_FAKE);
+ }
+ if (insn == bb->head)
+ break;
+ }
+ }
+
+ if (blocks_split)
+ verify_flow_info ();
+
+ free (bbs);
+ return blocks_split;
+}
\f
/* Delete all unreachable basic blocks. */
NOTE_SOURCE_FILE (q) = 0;
}
else
- q = PREV_INSN (q);
+ {
+ q = PREV_INSN (q);
+
+ /* We don't want a block to end on a line-number note since that has
+ the potential of changing the code between -g and not -g. */
+ while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
+ q = PREV_INSN (q);
+ }
b->end = q;
}
#endif
}
-#ifdef PIC_OFFSET_TABLE_REGNUM
#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
/* Many architectures have a GP register even without flag_pic.
Assume the pic register is not in use, or will be handled by
other means, if it is not fixed. */
- if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
+ if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
+ && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
SET_REGNO_REG_SET (set, PIC_OFFSET_TABLE_REGNUM);
#endif
-#endif
/* Mark all global registers, and all registers used by the epilogue
as being live at the end of the function since they may be
SET_REGNO_REG_SET (new_live_at_end, STACK_POINTER_REGNUM);
/* Before reload, there are a few registers that must be forced
- live everywhere -- which might not already be the case for
+ live everywhere -- which might not already be the case for
blocks within infinite loops. */
if (! reload_completed)
{
SET_REGNO_REG_SET (new_live_at_end, ARG_POINTER_REGNUM);
#endif
-#ifdef PIC_OFFSET_TABLE_REGNUM
/* Any constant, or pseudo with constant equivalences, may
require reloading from memory using the pic register. */
- if (fixed_regs[PIC_OFFSET_TABLE_REGNUM])
+ if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
+ && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
SET_REGNO_REG_SET (new_live_at_end, PIC_OFFSET_TABLE_REGNUM);
-#endif
}
/* Regs used in phi nodes are not included in
note = find_reg_note (insn, REG_RETVAL, NULL_RTX);
if (flags & PROP_SCAN_DEAD_CODE)
{
- insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0,
- REG_NOTES (insn));
+ insn_is_dead = insn_dead_p (pbi, PATTERN (insn), 0, REG_NOTES (insn));
libcall_is_dead = (insn_is_dead && note != 0
&& libcall_dead_p (pbi, note, insn));
}
- /* We almost certainly don't want to delete prologue or epilogue
- instructions. Warn about probable compiler losage. */
- if (insn_is_dead
- && reload_completed
- && (((HAVE_epilogue || HAVE_prologue)
- && prologue_epilogue_contains (insn))
- || (HAVE_sibcall_epilogue
- && sibcall_epilogue_contains (insn)))
- && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
- {
- if (flags & PROP_KILL_DEAD_CODE)
- {
- warning ("ICE: would have deleted prologue/epilogue insn");
- if (!inhibit_warnings)
- debug_rtx (insn);
- }
- libcall_is_dead = insn_is_dead = 0;
- }
-
/* If an instruction consists of just dead store(s) on final pass,
delete it. */
if ((flags & PROP_KILL_DEAD_CODE) && insn_is_dead)
{
+ /* If we're trying to delete a prologue or epilogue instruction
+ that isn't flagged as possibly being dead, something is wrong.
+ But if we are keeping the stack pointer depressed, we might well
+ be deleting insns that are used to compute the amount to update
+ it by, so they are fine. */
+ if (reload_completed
+ && !(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
+ && (TYPE_RETURNS_STACK_DEPRESSED
+ (TREE_TYPE (current_function_decl))))
+ && (((HAVE_epilogue || HAVE_prologue)
+ && prologue_epilogue_contains (insn))
+ || (HAVE_sibcall_epilogue
+ && sibcall_epilogue_contains (insn)))
+ && find_reg_note (insn, REG_MAYBE_DEAD, NULL_RTX) == 0)
+ abort ();
+
/* Record sets. Do this even for dead instructions, since they
would have killed the values if they hadn't been deleted. */
mark_set_regs (pbi, PATTERN (insn), insn);
/* Non-constant calls clobber memory. */
if (! CONST_CALL_P (insn))
- free_EXPR_LIST_list (&pbi->mem_set_list);
+ {
+ free_EXPR_LIST_list (&pbi->mem_set_list);
+ pbi->mem_set_list_len = 0;
+ }
/* There may be extra registers to be clobbered. */
for (note = CALL_INSN_FUNCTION_USAGE (insn);
pbi->bb = bb;
pbi->reg_live = live;
pbi->mem_set_list = NULL_RTX;
+ pbi->mem_set_list_len = 0;
pbi->local_set = local_set;
pbi->cond_local_set = cond_local_set;
pbi->cc0_live = 0;
{
rtx mem = SET_DEST (PATTERN (insn));
+ /* This optimization is performed by faking a store to the
+ memory at the end of the block. This doesn't work for
+ unchanging memories because multiple stores to unchanging
+ memory is illegal and alias analysis doesn't consider it. */
+ if (RTX_UNCHANGING_P (mem))
+ continue;
+
if (XEXP (mem, 0) == frame_pointer_rtx
|| (GET_CODE (XEXP (mem, 0)) == PLUS
&& XEXP (XEXP (mem, 0), 0) == frame_pointer_rtx
mem = shallow_copy_rtx (mem);
#endif
pbi->mem_set_list = alloc_EXPR_LIST (0, mem, pbi->mem_set_list);
+ if (++pbi->mem_set_list_len >= MAX_MEM_SET_LIST_LEN)
+ break;
}
}
}
#ifdef AUTO_INC_DEC
/* Check if memory reference matches an auto increment. Only
post increment/decrement or modify are valid. */
- if (GET_MODE (mem) == GET_MODE (r)
+ if (GET_MODE (mem) == GET_MODE (r)
&& (GET_CODE (XEXP (mem, 0)) == POST_DEC
|| GET_CODE (XEXP (mem, 0)) == POST_INC
|| GET_CODE (XEXP (mem, 0)) == POST_MODIFY)
else
pbi->mem_set_list = next;
free_EXPR_LIST_node (temp);
+ pbi->mem_set_list_len--;
}
else
prev = temp;
else
pbi->mem_set_list = next;
free_EXPR_LIST_node (temp);
+ pbi->mem_set_list_len--;
}
else
prev = temp;
int not_dead = 0;
int i;
- /* Some targets place small structures in registers for
- return values of functions. We have to detect this
- case specially here to get correct flow information. */
- if (GET_CODE (reg) == PARALLEL
- && GET_MODE (reg) == BLKmode)
- {
- for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
- mark_set_1 (pbi, code, XVECEXP (reg, 0, i), cond, insn, flags);
- return;
- }
-
/* Modifying just one hardware register of a multi-reg value or just a
byte field of a register does not mean the value from before this insn
is now dead. Of course, if it was dead after it's unused now. */
switch (GET_CODE (reg))
{
+ case PARALLEL:
+ /* Some targets place small structures in registers for return values of
+ functions. We have to detect this case specially here to get correct
+ flow information. */
+ for (i = XVECLEN (reg, 0) - 1; i >= 0; i--)
+ if (XEXP (XVECEXP (reg, 0, i), 0) != 0)
+ mark_set_1 (pbi, code, XEXP (XVECEXP (reg, 0, i), 0), cond, insn,
+ flags);
+ return;
+
case ZERO_EXTRACT:
case SIGN_EXTRACT:
case STRICT_LOW_PART:
if (insn && GET_CODE (reg) == MEM)
invalidate_mems_from_autoinc (pbi, insn);
- if (GET_CODE (reg) == MEM && ! side_effects_p (reg)
+ if (pbi->mem_set_list_len < MAX_MEM_SET_LIST_LEN
+ && GET_CODE (reg) == MEM && ! side_effects_p (reg)
/* ??? With more effort we could track conditional memory life. */
&& ! cond
/* We do not know the size of a BLKmode store, so we do not track
reg = shallow_copy_rtx (reg);
#endif
pbi->mem_set_list = alloc_EXPR_LIST (0, reg, pbi->mem_set_list);
+ pbi->mem_set_list_len++;
}
}
the in-order traversal. */
if (xdata[1] >= (int) node->key)
return 0;
-
+
/* Splice out portions of the expression that refer to regno. */
rcli = (struct reg_cond_life_info *) node->value;
rcli->condition = elim_reg_cond (rcli->condition, regno);
else
pbi->mem_set_list = next;
free_EXPR_LIST_node (temp);
+ pbi->mem_set_list_len--;
}
else
prev = temp;
testreg = XEXP (testreg, 0);
}
- /* If this is a store into a register, recursively scan the
- value being stored. */
+ /* If this is a store into a register or group of registers,
+ recursively scan the value being stored. */
if ((GET_CODE (testreg) == PARALLEL
&& GET_MODE (testreg) == BLKmode)
So for now, just clear the memory set list and mark any regs
we can find in ASM_OPERANDS as used. */
if (code != ASM_OPERANDS || MEM_VOLATILE_P (x))
- free_EXPR_LIST_list (&pbi->mem_set_list);
+ {
+ free_EXPR_LIST_list (&pbi->mem_set_list);
+ pbi->mem_set_list_len = 0;
+ }
/* For all ASM_OPERANDS, we must traverse the vector of input operands.
We can not just fall through here since then we would be confused
/* Update insns block within BB. */
-void
+void
update_bb_for_insn (bb)
basic_block bb;
{
basic_block bb = NOTE_BASIC_BLOCK (x);
num_bb_notes++;
if (bb->index != last_bb_num_seen + 1)
- fatal ("Basic blocks not numbered consecutively");
+ /* Basic blocks not numbered consecutively. */
+ abort ();
+
last_bb_num_seen = bb->index;
}
}
if (num_bb_notes != n_basic_blocks)
- fatal ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
- num_bb_notes, n_basic_blocks);
+ internal_error
+ ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
+ num_bb_notes, n_basic_blocks);
if (err)
abort ();
if (EDGE_INDEX (elist, BASIC_BLOCK (pred), BASIC_BLOCK (succ))
!= EDGE_INDEX_NO_EDGE && found_edge == 0)
fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
- pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
+ pred, succ, EDGE_INDEX (elist, BASIC_BLOCK (pred),
BASIC_BLOCK (succ)));
}
for (succ = 0; succ < n_basic_blocks; succ++)
\f
/* Dump the list of basic blocks in the bitmap NODES. */
-static void
+static void
flow_nodes_print (str, nodes, file)
const char *str;
const sbitmap nodes;
/* Dump the list of edges in the array EDGE_LIST. */
-static void
+static void
flow_edge_list_print (str, edge_list, num_edges, file)
const char *str;
const edge *edge_list;
/* Dump the loop information specified by LOOPS to the stream FILE,
using auxiliary dump callback function LOOP_DUMP_AUX if non null. */
-void
+void
flow_loops_dump (loops, file, loop_dump_aux, verbose)
const struct loops *loops;
FILE *file;
if (! num_loops || ! file)
return;
- fprintf (file, ";; %d loops found, %d levels\n",
+ fprintf (file, ";; %d loops found, %d levels\n",
num_loops, loops->levels);
for (i = 0; i < num_loops; i++)
must be disjoint. */
disjoint = ! flow_loop_nested_p (smaller ? loop : oloop,
smaller ? oloop : loop);
- fprintf (file,
+ fprintf (file,
";; loop header %d shared by loops %d, %d %s\n",
loop->header->index, i, j,
disjoint ? "disjoint" : "nested");
for (e = header->pred; e; e = e->pred_next)
{
basic_block src = e->src;
-
+
if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
num_entries++;
}
for (e = header->pred; e; e = e->pred_next)
{
basic_block src = e->src;
-
+
if (src == ENTRY_BLOCK_PTR || ! TEST_BIT (nodes, src->index))
(*entry_edges)[num_entries++] = e;
}
EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
{
- basic_block dest = e->dest;
+ basic_block dest = e->dest;
if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
num_exits++;
EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {
for (e = BASIC_BLOCK (node)->succ; e; e = e->succ_next)
{
- basic_block dest = e->dest;
+ basic_block dest = e->dest;
if (dest == EXIT_BLOCK_PTR || ! TEST_BIT (nodes, dest->index))
(*exit_edges)[num_exits++] = e;
}
+/* Scan a single natural loop specified by LOOP collecting information
+ about it specified by FLAGS. */
+
+int
+flow_loop_scan (loops, loop, flags)
+ struct loops *loops;
+ struct loop *loop;
+ int flags;
+{
+ /* Determine prerequisites. */
+ if ((flags & LOOP_EXITS_DOMS) && ! loop->exit_edges)
+ flags |= LOOP_EXIT_EDGES;
+
+ if (flags & LOOP_ENTRY_EDGES)
+ {
+ /* Find edges which enter the loop header.
+ Note that the entry edges should only
+ enter the header of a natural loop. */
+ loop->num_entries
+ = flow_loop_entry_edges_find (loop->header,
+ loop->nodes,
+ &loop->entry_edges);
+ }
+
+ if (flags & LOOP_EXIT_EDGES)
+ {
+ /* Find edges which exit the loop. */
+ loop->num_exits
+ = flow_loop_exit_edges_find (loop->nodes,
+ &loop->exit_edges);
+ }
+
+ if (flags & LOOP_EXITS_DOMS)
+ {
+ int j;
+
+ /* Determine which loop nodes dominate all the exits
+ of the loop. */
+ loop->exits_doms = sbitmap_alloc (n_basic_blocks);
+ sbitmap_copy (loop->exits_doms, loop->nodes);
+ for (j = 0; j < loop->num_exits; j++)
+ sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
+ loops->cfg.dom[loop->exit_edges[j]->src->index]);
+
+ /* The header of a natural loop must dominate
+ all exits. */
+ if (! TEST_BIT (loop->exits_doms, loop->header->index))
+ abort ();
+ }
+
+ if (flags & LOOP_PRE_HEADER)
+ {
+ /* Look to see if the loop has a pre-header node. */
+ loop->pre_header
+ = flow_loop_pre_header_find (loop->header, loops->cfg.dom);
+
+ /* Find the blocks within the extended basic block of
+ the loop pre-header. */
+ flow_loop_pre_header_scan (loop);
+ }
+ return 1;
+}
+
+
/* Find all the natural loops in the function and save in LOOPS structure
and recalculate loop_depth information in basic block structures.
FLAGS controls which loop information is collected.
must always be built if this function is called. */
if (! (flags & LOOP_TREE))
abort ();
-
+
memset (loops, 0, sizeof (*loops));
/* Taking care of this degenerate case makes the rest of
rc_order = (int *) xmalloc (n_basic_blocks * sizeof (int));
flow_depth_first_order_compute (dfs_order, rc_order);
+ /* Save CFG derived information to avoid recomputing it. */
+ loops->cfg.dom = dom;
+ loops->cfg.dfs_order = dfs_order;
+ loops->cfg.rc_order = rc_order;
+
/* Allocate loop structures. */
loops->array
= (struct loop *) xcalloc (num_loops, sizeof (struct loop));
for (i = 0; i < num_loops; i++)
{
struct loop *loop = &loops->array[i];
- int j;
/* Keep track of blocks that are loop headers so
that we can tell which loops should be merged. */
loop->nodes = sbitmap_alloc (n_basic_blocks);
loop->num_nodes
= flow_loop_nodes_find (loop->header, loop->latch, loop->nodes);
-
+
/* Compute first and last blocks within the loop.
These are often the same as the loop header and
loop latch respectively, but this is not always
= BASIC_BLOCK (sbitmap_first_set_bit (loop->nodes));
loop->last
= BASIC_BLOCK (sbitmap_last_set_bit (loop->nodes));
-
- if (flags & LOOP_EDGES)
- {
- /* Find edges which enter the loop header.
- Note that the entry edges should only
- enter the header of a natural loop. */
- loop->num_entries
- = flow_loop_entry_edges_find (loop->header,
- loop->nodes,
- &loop->entry_edges);
-
- /* Find edges which exit the loop. */
- loop->num_exits
- = flow_loop_exit_edges_find (loop->nodes,
- &loop->exit_edges);
-
- /* Determine which loop nodes dominate all the exits
- of the loop. */
- loop->exits_doms = sbitmap_alloc (n_basic_blocks);
- sbitmap_copy (loop->exits_doms, loop->nodes);
- for (j = 0; j < loop->num_exits; j++)
- sbitmap_a_and_b (loop->exits_doms, loop->exits_doms,
- dom[loop->exit_edges[j]->src->index]);
-
- /* The header of a natural loop must dominate
- all exits. */
- if (! TEST_BIT (loop->exits_doms, loop->header->index))
- abort ();
- }
-
- if (flags & LOOP_PRE_HEADER)
- {
- /* Look to see if the loop has a pre-header node. */
- loop->pre_header
- = flow_loop_pre_header_find (loop->header, dom);
- flow_loop_pre_header_scan (loop);
- }
+ flow_loop_scan (loops, loop, flags);
}
/* Natural loops with shared headers may either be disjoint or
loops->num = num_loops;
- /* Save CFG derived information to avoid recomputing it. */
- loops->cfg.dom = dom;
- loops->cfg.dfs_order = dfs_order;
- loops->cfg.rc_order = rc_order;
-
/* Build the loop hierarchy tree. */
flow_loops_tree_build (loops);
throw away the old stuff and rebuild what we need. */
if (loops->array)
flow_loops_free (loops);
-
+
return flow_loops_find (loops, flags);
}