the result of the expression is copied to a new register, and the redundant
expression is deleted by replacing it with this new register. Classic GCSE
doesn't have this problem as much as it computes the reaching defs of
- each register in each block and thus can try to use an existing register.
-
- **********************
-
- A fair bit of simplicity is created by creating small functions for simple
- tasks, even when the function is only called in one place. This may
- measurably slow things down [or may not] by creating more function call
- overhead than is necessary. The source is laid out so that it's trivial
- to make the affected functions inline so that one can measure what speed
- up, if any, can be achieved, and maybe later when things settle things can
- be rearranged.
-
- Help stamp out big monolithic functions! */
+ each register in each block and thus can try to use an existing
+ register. */
\f
/* GCSE global vars. */
{
/* The next setting of this register. */
struct reg_set *next;
- /* The insn where it was set. */
- rtx insn;
+ /* The index of the block where it was set. */
+ int bb_index;
} reg_set;
static reg_set **reg_set_table;
/* This array parallels modify_mem_list, but is kept canonicalized. */
static rtx * canon_modify_mem_list;
-static bitmap canon_modify_mem_list_set;
+
+/* Bitmap indexed by block numbers to record which blocks contain
+ function calls. */
+static bitmap blocks_with_calls;
/* Various variables for statistics gathering. */
\f
/* For available exprs */
static sbitmap *ae_kill, *ae_gen;
-
-/* Objects of this type are passed around by the null-pointer check
- removal routines. */
-struct null_pointer_info
-{
- /* The basic block being processed. */
- basic_block current_block;
- /* The first register to be handled in this pass. */
- unsigned int min_reg;
- /* One greater than the last register to be handled in this pass. */
- unsigned int max_reg;
- sbitmap *nonnull_local;
- sbitmap *nonnull_killed;
-};
\f
static void compute_can_copy (void);
static void *gmalloc (size_t) ATTRIBUTE_MALLOC;
static void alloc_reg_set_mem (int);
static void free_reg_set_mem (void);
static void record_one_set (int, rtx);
-static void replace_one_set (int, rtx, rtx);
static void record_set_info (rtx, rtx, void *);
static void compute_sets (rtx);
static void hash_scan_insn (rtx, struct hash_table *, int);
CUID_INSN (i++) = insn;
/* Allocate vars to track sets of regs. */
- reg_set_bitmap = BITMAP_XMALLOC ();
+ reg_set_bitmap = BITMAP_ALLOC (NULL);
/* Allocate vars to track sets of regs, memory per block. */
reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
basic block. */
modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
- modify_mem_list_set = BITMAP_XMALLOC ();
- canon_modify_mem_list_set = BITMAP_XMALLOC ();
+ modify_mem_list_set = BITMAP_ALLOC (NULL);
+ blocks_with_calls = BITMAP_ALLOC (NULL);
}
/* Free memory allocated by alloc_gcse_mem. */
free (uid_cuid);
free (cuid_insn);
- BITMAP_XFREE (reg_set_bitmap);
+ BITMAP_FREE (reg_set_bitmap);
sbitmap_vector_free (reg_set_in_block);
free_modify_mem_tables ();
- BITMAP_XFREE (modify_mem_list_set);
- BITMAP_XFREE (canon_modify_mem_list_set);
+ BITMAP_FREE (modify_mem_list_set);
+ BITMAP_FREE (blocks_with_calls);
}
\f
/* Compute the local properties of each recorded expression.
obstack_free (®_set_obstack, NULL);
}
-/* An OLD_INSN that used to set REGNO was replaced by NEW_INSN.
- Update the corresponding `reg_set_table' entry accordingly.
- We assume that NEW_INSN is not already recorded in reg_set_table[regno]. */
-
-static void
-replace_one_set (int regno, rtx old_insn, rtx new_insn)
-{
- struct reg_set *reg_info;
- if (regno >= reg_set_table_size)
- return;
- for (reg_info = reg_set_table[regno]; reg_info; reg_info = reg_info->next)
- if (reg_info->insn == old_insn)
- {
- reg_info->insn = new_insn;
- break;
- }
-}
-
/* Record REGNO in the reg_set table. */
static void
new_reg_info = obstack_alloc (®_set_obstack, sizeof (struct reg_set));
bytes_used += sizeof (struct reg_set);
- new_reg_info->insn = insn;
+ new_reg_info->bb_index = BLOCK_NUM (insn);
new_reg_info->next = reg_set_table[regno];
reg_set_table[regno] = new_reg_info;
}
unsigned int hash;
struct expr *cur_expr, *last_expr = NULL;
struct occr *antic_occr, *avail_occr;
- struct occr *last_occr = NULL;
hash = hash_expr (x, mode, &do_not_record_p, table->size);
{
antic_occr = cur_expr->antic_occr;
- /* Search for another occurrence in the same basic block. */
- while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
- {
- /* If an occurrence isn't found, save a pointer to the end of
- the list. */
- last_occr = antic_occr;
- antic_occr = antic_occr->next;
- }
+ if (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
+ antic_occr = NULL;
if (antic_occr)
/* Found another instance of the expression in the same basic block.
/* First occurrence of this expression in this basic block. */
antic_occr = gcse_alloc (sizeof (struct occr));
bytes_used += sizeof (struct occr);
- /* First occurrence of this expression in any block? */
- if (cur_expr->antic_occr == NULL)
- cur_expr->antic_occr = antic_occr;
- else
- last_occr->next = antic_occr;
-
antic_occr->insn = insn;
- antic_occr->next = NULL;
+ antic_occr->next = cur_expr->antic_occr;
antic_occr->deleted_p = 0;
+ cur_expr->antic_occr = antic_occr;
}
}
{
avail_occr = cur_expr->avail_occr;
- /* Search for another occurrence in the same basic block. */
- while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
+ if (avail_occr && BLOCK_NUM (avail_occr->insn) == BLOCK_NUM (insn))
{
- /* If an occurrence isn't found, save a pointer to the end of
- the list. */
- last_occr = avail_occr;
- avail_occr = avail_occr->next;
+ /* Found another instance of the expression in the same basic block.
+ Prefer this occurrence to the currently recorded one. We want
+ the last one in the block and the block is scanned from start
+ to end. */
+ avail_occr->insn = insn;
}
-
- if (avail_occr)
- /* Found another instance of the expression in the same basic block.
- Prefer this occurrence to the currently recorded one. We want
- the last one in the block and the block is scanned from start
- to end. */
- avail_occr->insn = insn;
else
{
/* First occurrence of this expression in this basic block. */
avail_occr = gcse_alloc (sizeof (struct occr));
bytes_used += sizeof (struct occr);
-
- /* First occurrence of this expression in any block? */
- if (cur_expr->avail_occr == NULL)
- cur_expr->avail_occr = avail_occr;
- else
- last_occr->next = avail_occr;
-
avail_occr->insn = insn;
- avail_occr->next = NULL;
+ avail_occr->next = cur_expr->avail_occr;
avail_occr->deleted_p = 0;
+ cur_expr->avail_occr = avail_occr;
}
}
}
int found;
unsigned int hash;
struct expr *cur_expr, *last_expr = NULL;
- struct occr *cur_occr, *last_occr = NULL;
+ struct occr *cur_occr;
gcc_assert (GET_CODE (x) == SET && REG_P (SET_DEST (x)));
/* Now record the occurrence. */
cur_occr = cur_expr->avail_occr;
- /* Search for another occurrence in the same basic block. */
- while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
+ if (cur_occr && BLOCK_NUM (cur_occr->insn) == BLOCK_NUM (insn))
{
- /* If an occurrence isn't found, save a pointer to the end of
- the list. */
- last_occr = cur_occr;
- cur_occr = cur_occr->next;
+ /* Found another instance of the expression in the same basic block.
+ Prefer this occurrence to the currently recorded one. We want
+ the last one in the block and the block is scanned from start
+ to end. */
+ cur_occr->insn = insn;
}
-
- if (cur_occr)
- /* Found another instance of the expression in the same basic block.
- Prefer this occurrence to the currently recorded one. We want the
- last one in the block and the block is scanned from start to end. */
- cur_occr->insn = insn;
else
{
/* First occurrence of this expression in this basic block. */
cur_occr = gcse_alloc (sizeof (struct occr));
bytes_used += sizeof (struct occr);
- /* First occurrence of this expression in any block? */
- if (cur_expr->avail_occr == NULL)
- cur_expr->avail_occr = cur_occr;
- else
- last_occr->next = cur_occr;
-
- cur_occr->insn = insn;
- cur_occr->next = NULL;
- cur_occr->deleted_p = 0;
+ cur_occr->insn = insn;
+ cur_occr->next = cur_expr->avail_occr;
+ cur_occr->deleted_p = 0;
+ cur_expr->avail_occr = cur_occr;
}
}
alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
canon_modify_mem_list[bb] =
alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
- bitmap_set_bit (canon_modify_mem_list_set, bb);
}
/* Record memory modification information for INSN. We do not actually care
need to insert a pair of items, as canon_list_insert does. */
canon_modify_mem_list[bb] =
alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
- bitmap_set_bit (canon_modify_mem_list_set, bb);
+ bitmap_set_bit (blocks_with_calls, bb);
}
else
note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
{
free_INSN_LIST_list (modify_mem_list + i);
- }
- bitmap_clear (modify_mem_list_set);
-
- EXECUTE_IF_SET_IN_BITMAP (canon_modify_mem_list_set, 0, i, bi)
- {
free_insn_expr_list_list (canon_modify_mem_list + i);
}
- bitmap_clear (canon_modify_mem_list_set);
+ bitmap_clear (modify_mem_list_set);
+ bitmap_clear (blocks_with_calls);
}
-/* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
+/* Release memory used by modify_mem_list_set. */
static void
free_modify_mem_tables (void)
else
{
for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
- SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
+ SET_BIT (bmap[r->bb_index], indx);
}
}
else
else
{
for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
- RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
+ RESET_BIT (bmap[r->bb_index], indx);
}
}
return;
case MEM:
- FOR_EACH_BB (bb)
- {
- rtx list_entry = canon_modify_mem_list[bb->index];
+ {
+ bitmap_iterator bi;
+ unsigned bb_index;
- while (list_entry)
- {
- rtx dest, dest_addr;
+ /* First handle all the blocks with calls. We don't need to
+ do any list walking for them. */
+ EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
+ {
+ if (set_p)
+ SET_BIT (bmap[bb_index], indx);
+ else
+ RESET_BIT (bmap[bb_index], indx);
+ }
- if (CALL_P (XEXP (list_entry, 0)))
- {
- if (set_p)
- SET_BIT (bmap[bb->index], indx);
- else
- RESET_BIT (bmap[bb->index], indx);
- break;
- }
- /* LIST_ENTRY must be an INSN of some kind that sets memory.
- Examine each hunk of memory that is modified. */
+ /* Now iterate over the blocks which have memory modifications
+ but which do not have any calls. */
+ EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set, blocks_with_calls,
+ 0, bb_index, bi)
+ {
+ rtx list_entry = canon_modify_mem_list[bb_index];
- dest = XEXP (list_entry, 0);
- list_entry = XEXP (list_entry, 1);
- dest_addr = XEXP (list_entry, 0);
+ while (list_entry)
+ {
+ rtx dest, dest_addr;
- if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
- x, rtx_addr_varies_p))
- {
- if (set_p)
- SET_BIT (bmap[bb->index], indx);
- else
- RESET_BIT (bmap[bb->index], indx);
- break;
- }
- list_entry = XEXP (list_entry, 1);
- }
- }
+ /* LIST_ENTRY must be an INSN of some kind that sets memory.
+ Examine each hunk of memory that is modified. */
+
+ dest = XEXP (list_entry, 0);
+ list_entry = XEXP (list_entry, 1);
+ dest_addr = XEXP (list_entry, 0);
+
+ if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
+ x, rtx_addr_varies_p))
+ {
+ if (set_p)
+ SET_BIT (bmap[bb_index], indx);
+ else
+ RESET_BIT (bmap[bb_index], indx);
+ break;
+ }
+ list_entry = XEXP (list_entry, 1);
+ }
+ }
+ }
x = XEXP (x, 0);
goto repeat;
dest = GET_CODE (cond) == EQ ? BRANCH_EDGE (bb)->dest
: FALLTHRU_EDGE (bb)->dest;
- if (dest && EDGE_COUNT (dest->preds) == 1
+ if (dest && single_pred_p (dest)
&& dest != EXIT_BLOCK_PTR)
{
new = gen_rtx_SET (VOIDmode, XEXP (cond, 0),
}
else if (GET_CODE (new) == LABEL_REF)
{
- edge_iterator ei2;
-
dest = BLOCK_FOR_INSN (XEXP (new, 0));
/* Don't bypass edges containing instructions. */
- FOR_EACH_EDGE (edest, ei2, bb->succs)
- if (edest->dest == dest && edest->insns.r)
- {
- dest = NULL;
- break;
- }
+ edest = find_edge (bb, dest);
+ if (edest && edest->insns.r)
+ dest = NULL;
}
else
dest = NULL;
branch. We would end up emitting the instruction on "both"
edges. */
- if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc))))
- {
- edge e2;
- edge_iterator ei2;
-
- FOR_EACH_EDGE (e2, ei2, e->src->succs)
- if (e2->dest == dest)
- {
- dest = NULL;
- break;
- }
- }
+ if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc)))
+ && find_edge (e->src, dest))
+ dest = NULL;
old_dest = e->dest;
if (dest != NULL
EXIT_BLOCK_PTR, next_bb)
{
/* Check for more than one predecessor. */
- if (EDGE_COUNT (bb->preds) > 1)
+ if (!single_pred_p (bb))
{
setcc = NULL_RTX;
for (insn = BB_HEAD (bb);
if (JUMP_P (insn)
|| (NONJUMP_INSN_P (insn)
- && (EDGE_COUNT (bb->succs) > 1
- || EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL)))
+ && (!single_succ_p (bb)
+ || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
{
#ifdef HAVE_cc0
rtx note;
/* Likewise if the last insn is a call, as will happen in the presence
of exception handling. */
else if (CALL_P (insn)
- && (EDGE_COUNT (bb->succs) > 1 || EDGE_SUCC (bb, 0)->flags & EDGE_ABNORMAL))
+ && (!single_succ_p (bb)
+ || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
{
/* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
we search backward and place the instructions before the first
if (! occr->deleted_p)
continue;
- /* Insert this expression on this edge if if it would
+ /* Insert this expression on this edge if it would
reach the deleted occurrence in BB. */
if (!TEST_BIT (inserted[e], j))
{
new_insn = emit_insn_after (new_insn, insn);
/* Keep register set table up to date. */
- replace_one_set (REGNO (old_reg), insn, new_insn);
record_one_set (regno, insn);
}
else
/* Check if INSN kills the store pattern X (is aliased with it).
AFTER is true if we are checking the case when store X occurs
- after the insn. Return true if it it does. */
+ after the insn. Return true if it does. */
static bool
store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)