Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
1999, 2000, 2001 Free Software Foundation, Inc.
-This file is part of GNU CC.
+This file is part of GCC.
-GNU CC is free software; you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
-any later version.
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
-GNU CC is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+for more details.
You should have received a copy of the GNU General Public License
-along with GNU CC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+along with GCC; see the file COPYING. If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA. */
/* This file contains the data flow analysis pass of the compiler. It
computes data flow information which tells combine_instructions
ENTRY_BLOCK, /* index */
0, /* loop_depth */
0, /* count */
- 0 /* frequency */
+ 0, /* frequency */
+ 0 /* flags */
},
{
NULL, /* head */
EXIT_BLOCK, /* index */
0, /* loop_depth */
0, /* count */
- 0 /* frequency */
+ 0, /* frequency */
+ 0 /* flags */
}
};
static void delete_unreachable_blocks PARAMS ((void));
static int can_delete_note_p PARAMS ((rtx));
-static void expunge_block PARAMS ((basic_block));
static int can_delete_label_p PARAMS ((rtx));
static int tail_recursion_label_p PARAMS ((rtx));
static int merge_blocks_move_predecessor_nojumps PARAMS ((basic_block,
cleanup_cfg (mode)
int mode;
{
+ int i;
+
timevar_push (TV_CLEANUP_CFG);
delete_unreachable_blocks ();
if (try_optimize_cfg (mode))
free_EXPR_LIST_list (&label_value_list);
free_EXPR_LIST_list (&tail_recursion_label_list);
timevar_pop (TV_CLEANUP_CFG);
+
+ /* Clear bb->aux on all basic blocks. */
+ for (i = 0; i < n_basic_blocks; ++i)
+ BASIC_BLOCK (i)->aux = NULL;
}
/* Create a new basic block consisting of the instructions between
if (GET_CODE (bb->end) == JUMP_INSN)
{
before = bb->end;
+ while (GET_CODE (PREV_INSN (before)) == NOTE
+ && NOTE_LINE_NUMBER (PREV_INSN (before)) == NOTE_INSN_LOOP_BEG)
+ before = PREV_INSN (before);
}
else
{
return blocks_split;
}
\f
-/* Find unreachable blocks. An unreachable block will have NULL in
- block->aux, a non-NULL value indicates the block is reachable. */
+/* Find unreachable blocks. An unreachable block will have 0 in
+ the reachable bit in block->flags. A non-zero value indicates the
+ block is reachable. */
void
find_unreachable_blocks ()
n = n_basic_blocks;
tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
- /* Use basic_block->aux as a marker. Clear them all. */
+ /* Clear all the reachability flags. */
for (i = 0; i < n; ++i)
- BASIC_BLOCK (i)->aux = NULL;
+ BASIC_BLOCK (i)->flags &= ~BB_REACHABLE;
/* Add our starting points to the worklist. Almost always there will
be only one. It isn't inconcievable that we might one day directly
{
*tos++ = e->dest;
- /* Mark the block with a handy non-null value. */
- e->dest->aux = e;
+ /* Mark the block reachable. */
+ e->dest->flags |= BB_REACHABLE;
}
/* Iterate: find everything reachable from what we've already seen. */
basic_block b = *--tos;
for (e = b->succ; e; e = e->succ_next)
- if (!e->dest->aux)
+ if (!(e->dest->flags & BB_REACHABLE))
{
*tos++ = e->dest;
- e->dest->aux = e;
+ e->dest->flags |= BB_REACHABLE;
}
}
{
basic_block b = BASIC_BLOCK (i);
- if (b->aux != NULL)
- /* This block was found. Tidy up the mark. */
- b->aux = NULL;
- else
+ if (!(b->flags & BB_REACHABLE))
flow_delete_block (b);
}
/* Remove block B from the basic block array and compact behind it. */
-static void
+void
expunge_block (b)
basic_block b;
{
#ifdef HAVE_cc0
/* If this was a conditional jump, we need to also delete
the insn that set cc0. */
- if (prev && sets_cc0_p (prev))
+ if (only_sets_cc0_p (prev))
{
rtx tmp = prev;
prev = prev_nonnote_insn (prev);
merge_blocks_move_predecessor_nojumps (a, b)
basic_block a, b;
{
- rtx start, end, barrier;
+ rtx barrier;
int index;
- start = a->head;
- end = a->end;
-
- barrier = next_nonnote_insn (end);
+ barrier = next_nonnote_insn (a->end);
if (GET_CODE (barrier) != BARRIER)
abort ();
flow_delete_insn (barrier);
and adjust the block trees appropriately. Even better would be to have
a tighter connection between block trees and rtl so that this is not
necessary. */
- start = squeeze_notes (start, end);
+ squeeze_notes (&a->head, &a->end);
/* Scramble the insn chain. */
- if (end != PREV_INSN (b->head))
- reorder_insns (start, end, PREV_INSN (b->head));
+ if (a->end != PREV_INSN (b->head))
+ reorder_insns (a->head, a->end, PREV_INSN (b->head));
if (rtl_dump_file)
{
merge_blocks_move_successor_nojumps (a, b)
basic_block a, b;
{
- rtx start, end, barrier;
+ rtx barrier;
- start = b->head;
- end = b->end;
- barrier = NEXT_INSN (end);
+ barrier = NEXT_INSN (b->end);
/* Recognize a jump table following block B. */
if (barrier
&& (GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_VEC
|| GET_CODE (PATTERN (NEXT_INSN (barrier))) == ADDR_DIFF_VEC))
{
- end = NEXT_INSN (barrier);
- barrier = NEXT_INSN (end);
+ b->end = NEXT_INSN (barrier);
+ barrier = NEXT_INSN (b->end);
}
/* There had better have been a barrier there. Delete it. */
and adjust the block trees appropriately. Even better would be to have
a tighter connection between block trees and rtl so that this is not
necessary. */
- start = squeeze_notes (start, end);
+ squeeze_notes (&b->head, &b->end);
/* Scramble the insn chain. */
- reorder_insns (start, end, a->end);
+ reorder_insns (b->head, b->end, a->end);
/* Now blocks A and B are contiguous. Merge them. */
merge_blocks_nomove (a, b);
/* Success. Update the CFG to match. Note that after this point
the edge variable names appear backwards; the redirection is done
this way to preserve edge profile data. */
- redirect_edge_succ_nodup (cbranch_jump_edge, cbranch_dest_block);
- redirect_edge_succ_nodup (cbranch_fallthru_edge, jump_dest_block);
+ cbranch_jump_edge = redirect_edge_succ_nodup (cbranch_jump_edge,
+ cbranch_dest_block);
+ cbranch_fallthru_edge = redirect_edge_succ_nodup (cbranch_fallthru_edge,
+ jump_dest_block);
cbranch_jump_edge->flags |= EDGE_FALLTHRU;
cbranch_fallthru_edge->flags &= ~EDGE_FALLTHRU;
need to be compared for equivalence, which we'll do below. */
i1 = bb1->end;
- if (onlyjump_p (i1))
+ if (onlyjump_p (i1)
+ || (returnjump_p (i1) && !side_effects_p (PATTERN (i1))))
i1 = PREV_INSN (i1);
i2 = bb2->end;
- if (onlyjump_p (i2))
+ if (onlyjump_p (i2)
+ || (returnjump_p (i2) && !side_effects_p (PATTERN (i2))))
i2 = PREV_INSN (i2);
last1 = afterlast1 = last2 = afterlast2 = NULL_RTX;
#ifdef HAVE_cc0
/* If this was a conditional jump, we need to also delete
the insn that set cc0. */
- if (any_condjump_p (q) && sets_cc0_p (PREV_INSN (q)))
+ if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
q = PREV_INSN (q);
#endif
PUT_CODE (insn, NOTE);
NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
NOTE_SOURCE_FILE (insn) = 0;
+ if (insn == bb->end)
+ purge_dead_edges (bb);
}
}
}
{
/* Mark all call-saved registers that we actually used. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- if (regs_ever_live[i] && ! call_used_regs[i] && ! LOCAL_REGNO (i))
+ if (regs_ever_live[i] && ! LOCAL_REGNO (i)
+ && ! TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
SET_REGNO_REG_SET (set, i);
}
}
if (bb->end == insn)
- bb->end = PREV_INSN (insn);
+ {
+ bb->end = PREV_INSN (insn);
+ purge_dead_edges (bb);
+ }
flow_delete_insn (insn);
}
/* If this is a call to `setjmp' et al, warn if any
non-volatile datum is live. */
if ((flags & PROP_REG_INFO)
- && GET_CODE (insn) == CALL
+ && GET_CODE (insn) == CALL_INSN
&& find_reg_note (insn, REG_SETJMP, NULL))
IOR_REG_SET (regs_live_at_setjmp, pbi->reg_live);
const rtx rtx_first = get_insns ();
rtx last_head = get_last_insn ();
basic_block *bb_info, *last_visited;
+ size_t *edge_checksum;
rtx x;
int i, last_bb_num_seen, num_bb_notes, err = 0;
bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
last_visited = (basic_block *) xcalloc (n_basic_blocks + 2,
sizeof (basic_block));
+ edge_checksum = (size_t *) xcalloc (n_basic_blocks + 2, sizeof (size_t));
for (i = n_basic_blocks - 1; i >= 0; i--)
{
for (i = n_basic_blocks - 1; i >= 0; i--)
{
basic_block bb = BASIC_BLOCK (i);
- /* Check correctness of edge lists. */
- edge e;
int has_fallthru = 0;
+ edge e;
e = bb->succ;
while (e)
fprintf (stderr, "\n");
err = 1;
}
- if (e->dest != EXIT_BLOCK_PTR)
- {
- edge e2 = e->dest->pred;
- while (e2 && e2 != e)
- e2 = e2->pred_next;
- if (!e2)
- {
- error ("Basic block %i edge lists are corrupted", bb->index);
- err = 1;
- }
- }
+ edge_checksum[e->dest->index + 2] += (size_t) e;
e = e->succ_next;
}
if (!has_fallthru)
fputc ('\n', stderr);
err = 1;
}
- if (e->src != ENTRY_BLOCK_PTR)
- {
- edge e2 = e->src->succ;
- while (e2 && e2 != e)
- e2 = e2->succ_next;
- if (!e2)
- {
- error ("Basic block %i edge lists are corrupted", bb->index);
- err = 1;
- }
- }
+ edge_checksum[e->dest->index + 2] -= (size_t) e;
e = e->pred_next;
}
}
if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
{
- error ("NOTE_INSN_BASIC_BLOCK is missing for block %d\n",
+ error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
bb->index);
err = 1;
}
}
}
+ /* Complete edge checksumming for ENTRY and EXIT. */
+ {
+ edge e;
+ for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
+ edge_checksum[e->dest->index + 2] += (size_t) e;
+ for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
+ edge_checksum[e->dest->index + 2] -= (size_t) e;
+ }
+
+ for (i = -2; i < n_basic_blocks; ++i)
+ if (edge_checksum[i + 2])
+ {
+ error ("Basic block %i edge lists are corrupted", i);
+ err = 1;
+ }
+
last_bb_num_seen = -1;
num_bb_notes = 0;
x = rtx_first;
/* Clean up. */
free (bb_info);
free (last_visited);
+ free (edge_checksum);
}
\f
/* Functions to access an edge list with a vector representation.
/* Like previous but avoid possible dupplicate edge. */
-void
+edge
redirect_edge_succ_nodup (e, new_succ)
edge e;
basic_block new_succ;
s->probability += e->probability;
s->count += e->count;
remove_edge (e);
+ e = s;
}
else
redirect_edge_succ (e, new_succ);
+ return e;
}
/* Redirect an edge's predecessor from one block to another. */
if (! loop || ! loop->header)
return;
- fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
- loop->num, INSN_UID (loop->first->head),
- INSN_UID (loop->last->end),
- loop->shared ? " shared" : "",
- loop->invalid ? " invalid" : "");
+ if (loop->first->head && loop->last->end)
+ fprintf (file, ";;\n;; Loop %d (%d to %d):%s%s\n",
+ loop->num, INSN_UID (loop->first->head),
+ INSN_UID (loop->last->end),
+ loop->shared ? " shared" : "",
+ loop->invalid ? " invalid" : "");
+ else
+ fprintf (file, ";;\n;; Loop %d:%s%s\n", loop->num,
+ loop->shared ? " shared" : "",
+ loop->invalid ? " invalid" : "");
+
fprintf (file, ";; header %d, latch %d, pre-header %d, first %d, last %d\n",
loop->header->index, loop->latch->index,
loop->pre_header ? loop->pre_header->index : -1,
return num_nodes;
}
+/* Compute reverse top sort order */
+void
+flow_reverse_top_sort_order_compute (rts_order)
+ int *rts_order;
+{
+ edge *stack;
+ int sp;
+ int postnum = 0;
+ sbitmap visited;
+
+ /* Allocate stack for back-tracking up CFG. */
+ stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
+ sp = 0;
+
+ /* Allocate bitmap to track nodes that have been visited. */
+ visited = sbitmap_alloc (n_basic_blocks);
+
+ /* None of the nodes in the CFG have been visited yet. */
+ sbitmap_zero (visited);
+
+ /* Push the first edge on to the stack. */
+ stack[sp++] = ENTRY_BLOCK_PTR->succ;
+
+ while (sp)
+ {
+ edge e;
+ basic_block src;
+ basic_block dest;
+
+ /* Look at the edge on the top of the stack. */
+ e = stack[sp - 1];
+ src = e->src;
+ dest = e->dest;
+
+ /* Check if the edge destination has been visited yet. */
+ if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
+ {
+ /* Mark that we have visited the destination. */
+ SET_BIT (visited, dest->index);
+
+ if (dest->succ)
+ {
+ /* Since the DEST node has been visited for the first
+ time, check its successors. */
+ stack[sp++] = dest->succ;
+ }
+ else
+ rts_order[postnum++] = dest->index;
+ }
+ else
+ {
+ if (! e->succ_next && src != ENTRY_BLOCK_PTR)
+ rts_order[postnum++] = src->index;
+
+ if (e->succ_next)
+ stack[sp - 1] = e->succ_next;
+ else
+ sp--;
+ }
+ }
+
+ free (stack);
+ sbitmap_free (visited);
+}
+
/* Compute the depth first search order and store in the array
DFS_ORDER if non-zero, marking the nodes visited in VISITED. If
RC_ORDER is non-zero, return the reverse completion number for each