/* Generic partial redundancy elimination with lazy code motion support.
- Copyright (C) 1998, 1999, 2000 Free Software Foundation, Inc.
+ Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
This file is part of GNU CC.
{
int bb;
edge e;
- basic_block *worklist, *tos;
+ basic_block *worklist, *qin, *qout, *qend;
+ unsigned int qlen;
/* Allocate a worklist array/queue. Entries are only added to the
list if they were not already on the list. So the size is
bounded by the number of basic blocks. */
- tos = worklist
+ qin = qout = worklist
= (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
/* We want a maximal solution, so make an optimistic initialization of
/* Put every block on the worklist; this is necessary because of the
optimistic initialization of ANTIN above. */
- for (bb = 0; bb < n_basic_blocks; bb++)
+ for (bb = n_basic_blocks - 1; bb >= 0; bb--)
{
- *tos++ = BASIC_BLOCK (bb);
+ *qin++ = BASIC_BLOCK (bb);
BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
}
+
+ qin = worklist;
+ qend = &worklist[n_basic_blocks];
+ qlen = n_basic_blocks;
/* Mark blocks which are predecessors of the exit block so that we
can easily identify them below. */
e->src->aux = EXIT_BLOCK_PTR;
/* Iterate until the worklist is empty. */
- while (tos != worklist)
+ while (qlen)
{
/* Take the first entry off the worklist. */
- basic_block b = *--tos;
+ basic_block b = *qout++;
bb = b->index;
+ qlen--;
+
+ if (qout >= qend)
+ qout = worklist;
if (b->aux == EXIT_BLOCK_PTR)
/* Do not clear the aux field for blocks which are predecessors of
for (e = b->pred; e; e = e->pred_next)
if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
{
- *tos++ = e->src;
+ *qin++ = e->src;
e->src->aux = e;
+ qlen++;
+ if (qin >= qend)
+ qin = worklist;
}
}
- free (tos);
+ free (worklist);
}
/* Compute the earliest vector for edge based lcm. */
sbitmap_copy (earliest[x], antin[succ->index]);
else
{
- if (succ == EXIT_BLOCK_PTR)
+ /* We refer to the EXIT_BLOCK index, instead of testing for
+ EXIT_BLOCK_PTR, so that EXIT_BLOCK_PTR's index can be
+ changed so as to pretend it's a regular block, so that
+ its antin can be taken into account. */
+ if (succ->index == EXIT_BLOCK)
sbitmap_zero (earliest[x]);
else
{
{
int bb, num_edges, i;
edge e;
- basic_block *worklist, *tos;
+ basic_block *worklist, *qin, *qout, *qend;
+ unsigned int qlen;
num_edges = NUM_EDGES (edge_list);
/* Allocate a worklist array/queue. Entries are only added to the
list if they were not already on the list. So the size is
bounded by the number of basic blocks. */
- tos = worklist
+ qin = qout = worklist
= (basic_block *) xmalloc (sizeof (basic_block) * (n_basic_blocks + 1));
/* Initialize a mapping from each edge to its index. */
/* Add all the blocks to the worklist. This prevents an early exit from
the loop given our optimistic initialization of LATER above. */
- for (bb = n_basic_blocks - 1; bb >= 0; bb--)
+ for (bb = 0; bb < n_basic_blocks; bb++)
{
basic_block b = BASIC_BLOCK (bb);
- *tos++ = b;
+ *qin++ = b;
b->aux = b;
}
+ qin = worklist;
+ /* Note that we do not use the last allocated element for our queue,
+ as EXIT_BLOCK is never inserted into it. In fact the above allocation
+ of n_basic_blocks + 1 elements is not encessary. */
+ qend = &worklist[n_basic_blocks];
+ qlen = n_basic_blocks;
/* Iterate until the worklist is empty. */
- while (tos != worklist)
+ while (qlen)
{
/* Take the first entry off the worklist. */
- basic_block b = *--tos;
+ basic_block b = *qout++;
b->aux = NULL;
+ qlen--;
+ if (qout >= qend)
+ qout = worklist;
/* Compute the intersection of LATERIN for each incoming edge to B. */
bb = b->index;
to add the target of the outgoing edge to the worklist. */
&& e->dest != EXIT_BLOCK_PTR && e->dest->aux == 0)
{
- *tos++ = e->dest;
+ *qin++ = e->dest;
e->dest->aux = e;
+ qlen++;
+ if (qin >= qend)
+ qin = worklist;
}
}
laterin[n_basic_blocks],
later[(size_t) e->aux]);
- free (tos);
+ free (worklist);
}
/* Compute the insertion and deletion points for edge based LCM. */
{
int bb;
edge e;
- basic_block *worklist, *tos;
+ basic_block *worklist, *qin, *qout, *qend;
+ unsigned int qlen;
/* Allocate a worklist array/queue. Entries are only added to the
list if they were not already on the list. So the size is
bounded by the number of basic blocks. */
- tos = worklist
+ qin = qout = worklist
= (basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
/* We want a maximal solution. */
/* Put every block on the worklist; this is necessary because of the
optimistic initialization of AVOUT above. */
- for (bb = n_basic_blocks - 1; bb >= 0; bb--)
+ for (bb = 0; bb < n_basic_blocks; bb++)
{
- *tos++ = BASIC_BLOCK (bb);
+ *qin++ = BASIC_BLOCK (bb);
BASIC_BLOCK (bb)->aux = BASIC_BLOCK (bb);
}
+
+ qin = worklist;
+ qend = &worklist[n_basic_blocks];
+ qlen = n_basic_blocks;
/* Mark blocks which are successors of the entry block so that we
can easily identify them below. */
e->dest->aux = ENTRY_BLOCK_PTR;
/* Iterate until the worklist is empty. */
- while (tos != worklist)
+ while (qlen)
{
/* Take the first entry off the worklist. */
- basic_block b = *--tos;
+ basic_block b = *qout++;
bb = b->index;
+ qlen--;
+
+ if (qout >= qend)
+ qout = worklist;
/* If one of the predecessor blocks is the ENTRY block, then the
intersection of avouts is the null set. We can identify such blocks
for (e = b->succ; e; e = e->succ_next)
if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
{
- *tos++ = e->dest;
+ *qin++ = e->dest;
e->dest->aux = e;
+ qlen++;
+
+ if (qin >= qend)
+ qin = worklist;
}
}
- free (tos);
+ free (worklist);
}
/* Compute the farthest vector for edge based lcm. */
/* This structure contains the information for each insn which requires
either single or double mode to be set.
MODE is the mode this insn must be executed in.
- INSN_PTR is the insn to be executed.
+ INSN_PTR is the insn to be executed (may be the note that marks the
+ beginning of a basic block).
BBNUM is the flow graph basic block this insn occurs in.
NEXT is the next insn in the same basic block. */
struct seginfo
int n_entities;
int max_num_modes = 0;
+#ifdef NORMAL_MODE
+ /* Increment n_basic_blocks before allocating bb_info. */
+ n_basic_blocks++;
+#endif
+
for (e = N_ENTITIES - 1, n_entities = 0; e >= 0; e--)
if (OPTIMIZE_MODE_SWITCHING (e))
{
max_num_modes = num_modes[e];
}
+#ifdef NORMAL_MODE
+ /* Decrement it back in case we return below. */
+ n_basic_blocks--;
+#endif
+
if (! n_entities)
return 0;
+#ifdef NORMAL_MODE
+ /* We're going to pretend the EXIT_BLOCK is a regular basic block,
+ so that switching back to normal mode when entering the
+ EXIT_BLOCK isn't optimized away. We do this by incrementing the
+ basic block count, growing the VARRAY of basic_block_info and
+ appending the EXIT_BLOCK_PTR to it. */
+ n_basic_blocks++;
+ if (VARRAY_SIZE (basic_block_info) < n_basic_blocks)
+ VARRAY_GROW (basic_block_info, n_basic_blocks);
+ BASIC_BLOCK (n_basic_blocks - 1) = EXIT_BLOCK_PTR;
+ EXIT_BLOCK_PTR->index = n_basic_blocks - 1;
+#endif
+
/* Create the bitmap vectors. */
antic = sbitmap_vector_alloc (n_basic_blocks, n_entities);
insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
insn = NEXT_INSN (insn))
{
- if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
+ if (INSN_P (insn))
{
int mode = MODE_NEEDED (e, insn);
rtx link;
}
}
- /* If this is a predecessor of the exit block, and we must
- force a mode on exit, make note of that. */
-#ifdef NORMAL_MODE
- if (NORMAL_MODE (e) != no_mode && last_mode != NORMAL_MODE (e))
- for (eg = BASIC_BLOCK (bb)->succ; eg; eg = eg->succ_next)
- if (eg->dest == EXIT_BLOCK_PTR)
- {
- rtx insn = BLOCK_END (bb);
-
- /* Find the last insn before a USE and/or JUMP. */
- while ((GET_CODE (insn) == INSN
- && GET_CODE (PATTERN (insn)) == USE)
- || GET_CODE (insn) == JUMP_INSN)
- insn = PREV_INSN (insn);
- if (insn != BLOCK_END (bb) && NEXT_INSN (insn))
- insn = NEXT_INSN (insn);
- last_mode = NORMAL_MODE (e);
- add_seginfo (info + bb,
- new_seginfo (last_mode, insn, bb, live_now));
- RESET_BIT (transp[bb], j);
- }
-#endif
-
info[bb].computing = last_mode;
/* Check for blocks without ANY mode requirements. */
if (last_mode == no_mode)
info[bb].seginfo->mode = no_mode;
}
}
+
+ bb = n_basic_blocks - 1;
+ info[bb].seginfo->mode = mode;
}
}
#endif /* NORMAL_MODE */
mode_set = gen_sequence ();
end_sequence ();
- /* If this is an abnormal edge, we'll insert at the end of the
- previous block. */
+ /* If this is an abnormal edge, we'll insert at the end
+ of the previous block. */
if (eg->flags & EDGE_ABNORMAL)
{
- src_bb->end = emit_insn_after (mode_set, src_bb->end);
+ if (GET_CODE (src_bb->end) == JUMP_INSN)
+ emit_insn_before (mode_set, src_bb->end);
+ /* It doesn't make sense to switch to normal mode
+ after a CALL_INSN, so we're going to abort if we
+ find one. The cases in which a CALL_INSN may
+ have an abnormal edge are sibcalls and EH edges.
+ In the case of sibcalls, the dest basic-block is
+ the EXIT_BLOCK, that runs in normal mode; it is
+ assumed that a sibcall insn requires normal mode
+ itself, so no mode switch would be required after
+ the call (it wouldn't make sense, anyway). In
+ the case of EH edges, EH entry points also start
+ in normal mode, so a similar reasoning applies. */
+ else if (GET_CODE (src_bb->end) == INSN)
+ src_bb->end = emit_insn_after (mode_set, src_bb->end);
+ else
+ abort ();
bb_info[j][src_bb->index].computing = mode;
RESET_BIT (transp[src_bb->index], j);
}
free_edge_list (edge_list);
}
+#ifdef NORMAL_MODE
+ /* Restore the special status of EXIT_BLOCK. */
+ n_basic_blocks--;
+ VARRAY_POP (basic_block_info);
+ EXIT_BLOCK_PTR->index = EXIT_BLOCK;
+#endif
+
/* Now output the remaining mode sets in all the segments. */
for (j = n_entities - 1; j >= 0; j--)
{
int no_mode = num_modes[entity_map[j]];
+#ifdef NORMAL_MODE
+ if (bb_info[j][n_basic_blocks].seginfo->mode != no_mode)
+ {
+ edge eg;
+ struct seginfo *ptr = bb_info[j][n_basic_blocks].seginfo;
+
+ for (eg = EXIT_BLOCK_PTR->pred; eg; eg = eg->pred_next)
+ {
+ rtx mode_set;
+
+ if (bb_info[j][eg->src->index].computing == ptr->mode)
+ continue;
+
+ start_sequence ();
+ EMIT_MODE_SET (entity_map[j], ptr->mode, ptr->regs_live);
+ mode_set = gen_sequence ();
+ end_sequence ();
+
+ /* If this is an abnormal edge, we'll insert at the end of the
+ previous block. */
+ if (eg->flags & EDGE_ABNORMAL)
+ {
+ if (GET_CODE (eg->src->end) == JUMP_INSN)
+ emit_insn_before (mode_set, eg->src->end);
+ else if (GET_CODE (eg->src->end) == INSN)
+ eg->src->end = emit_insn_after (mode_set, eg->src->end);
+ else
+ abort ();
+ }
+ else
+ {
+ need_commit = 1;
+ insert_insn_on_edge (mode_set, eg);
+ }
+ }
+
+ }
+#endif
+
for (bb = n_basic_blocks - 1; bb >= 0; bb--)
{
struct seginfo *ptr, *next;
mode_set = gen_sequence ();
end_sequence ();
- if (NOTE_LINE_NUMBER (ptr->insn_ptr) == NOTE_INSN_BASIC_BLOCK)
+ if (GET_CODE (ptr->insn_ptr) == NOTE
+ && (NOTE_LINE_NUMBER (ptr->insn_ptr)
+ == NOTE_INSN_BASIC_BLOCK))
emit_block_insn_after (mode_set, ptr->insn_ptr,
BASIC_BLOCK (ptr->bbnum));
else