Perhaps there should be a bitmap argument to df_analyse to specify
which registers should be analysed? */
-#define HANDLE_SUBREG
-
#include "config.h"
#include "system.h"
#include "rtl.h"
EXECUTE_IF_SET_IN_BITMAP (BITMAP, MIN, node_, \
{(BB) = BASIC_BLOCK (node_); CODE;});} while (0)
-#define FOR_EACH_BB_IN_BITMAP_REV(BITMAP, MIN, BB, CODE) \
-do { \
- unsigned int node_; \
- EXECUTE_IF_SET_IN_BITMAP_REV (BITMAP, node_, \
- {(BB) = BASIC_BLOCK (node_); CODE;});} while (0)
-
-#define FOR_EACH_BB_IN_SBITMAP(BITMAP, MIN, BB, CODE) \
-do { \
- unsigned int node_; \
- EXECUTE_IF_SET_IN_SBITMAP (BITMAP, MIN, node_, \
- {(BB) = BASIC_BLOCK (node_); CODE;});} while (0)
-
-#define obstack_chunk_alloc xmalloc
-#define obstack_chunk_free free
-
static struct obstack df_ref_obstack;
static struct df *ddf;
#if 0
static void df_def_table_realloc PARAMS((struct df *, int));
#endif
-static void df_insn_table_realloc PARAMS((struct df *, int));
+static void df_insn_table_realloc PARAMS((struct df *, unsigned int));
static void df_bitmaps_alloc PARAMS((struct df *, int));
static void df_bitmaps_free PARAMS((struct df *, int));
static void df_free PARAMS((struct df *));
/* Local memory allocation/deallocation routines. */
-/* Increase the insn info table by SIZE more elements. */
+/* Increase the insn info table to have space for at least SIZE + 1
+ elements. */
static void
df_insn_table_realloc (df, size)
struct df *df;
- int size;
+ unsigned int size;
{
- /* Make table 25 percent larger by default. */
- if (! size)
- size = df->insn_size / 4;
+ size++;
+ if (size <= df->insn_size)
+ return;
- size += df->insn_size;
+ /* Make the table a little larger than requested, so we don't need
+ to enlarge it so often. */
+ size += df->insn_size / 4;
df->insns = (struct insn_info *)
xrealloc (df->insns, size * sizeof (struct insn_info));
size = df->reg_size / 4;
size += df->reg_size;
+ if (size < max_reg_num ())
+ size = max_reg_num ();
df->regs = (struct reg_info *)
xrealloc (df->regs, size * sizeof (struct reg_info));
df->n_defs = df->def_id;
df->n_uses = df->use_id;
- FOR_ALL_BB (bb)
+ FOR_EACH_BB (bb)
{
struct bb_info *bb_info = DF_BB_INFO (df, bb);
{
basic_block bb;
- FOR_ALL_BB (bb)
+ FOR_EACH_BB (bb)
{
struct bb_info *bb_info = DF_BB_INFO (df, bb);
}
-/* Allocate and initialise dataflow memory. */
+/* Allocate and initialize dataflow memory. */
static void
df_alloc (df, n_regs)
struct df *df;
df->bbs = xcalloc (last_basic_block, sizeof (struct bb_info));
df->all_blocks = BITMAP_XMALLOC ();
- FOR_ALL_BB (bb)
- bitmap_set_bit (df->all_blocks, bb->sindex);
+ FOR_EACH_BB (bb)
+ bitmap_set_bit (df->all_blocks, bb->index);
}
rtx reg;
rtx use;
- reg = regno >= FIRST_PSEUDO_REGISTER
- ? regno_reg_rtx[regno] : gen_rtx_REG (reg_raw_mode[regno], regno);
+ reg = regno_reg_rtx[regno];
use = gen_rtx_USE (GET_MODE (reg), reg);
return use;
rtx reg;
rtx use;
- reg = regno >= FIRST_PSEUDO_REGISTER
- ? regno_reg_rtx[regno] : gen_rtx_REG (reg_raw_mode[regno], regno);
+ reg = regno_reg_rtx[regno];
use = gen_rtx_CLOBBER (GET_MODE (reg), reg);
return use;
are really referenced. E.g. a (subreg:SI (reg:DI 0) 0) does _not_
reference the whole reg 0 in DI mode (which would also include
reg 1, at least, if 0 and 1 are SImode registers). */
- endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
+ endregno = HARD_REGNO_NREGS (regno, GET_MODE (reg));
+ if (GET_CODE (reg) == SUBREG)
+ regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
+ SUBREG_BYTE (reg), GET_MODE (reg));
+ endregno += regno;
for (i = regno; i < endregno; i++)
- df_ref_record_1 (df, gen_rtx_REG (reg_raw_mode[i], i),
+ df_ref_record_1 (df, regno_reg_rtx[i],
loc, insn, ref_type, ref_flags);
}
else
}
}
-/* Writes to SUBREG of inndermode wider than word and outermode shorter than
- word are read-modify-write. */
+/* Writes to paradoxical subregs, or subregs which are too narrow
+ are read-modify-write. */
static inline bool
read_modify_subreg_p (x)
rtx x;
{
+ unsigned int isize, osize;
if (GET_CODE (x) != SUBREG)
return false;
- if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) <= UNITS_PER_WORD)
+ isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
+ osize = GET_MODE_SIZE (GET_MODE (x));
+ if (isize <= osize)
+ return true;
+ if (isize <= UNITS_PER_WORD)
return false;
- if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
+ if (osize >= UNITS_PER_WORD)
return false;
return true;
}
return;
}
+#ifdef CLASS_CANNOT_CHANGE_MODE
+ if (GET_CODE (dst) == SUBREG
+ && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
+ GET_MODE (SUBREG_REG (dst))))
+ flags |= DF_REF_MODE_CHANGE;
+#endif
+
/* May be, we should flag the use of strict_low_part somehow. Might be
handy for the reg allocator. */
while (GET_CODE (dst) == STRICT_LOW_PART
loc = &XEXP (dst, 0);
dst = *loc;
}
+#ifdef CLASS_CANNOT_CHANGE_MODE
+ if (GET_CODE (dst) == SUBREG
+ && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
+ GET_MODE (SUBREG_REG (dst))))
+ flags |= DF_REF_MODE_CHANGE;
+#endif
loc = &XEXP (dst, 0);
dst = *loc;
flags |= DF_REF_READ_WRITE;
df_uses_record (df, loc, ref_type, bb, insn, flags);
return;
}
+#ifdef CLASS_CANNOT_CHANGE_MODE
+ if (CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
+ GET_MODE (SUBREG_REG (x))))
+ flags |= DF_REF_MODE_CHANGE;
+#endif
/* ... Fall through ... */
switch (GET_CODE (dst))
{
+ enum df_ref_flags use_flags;
case SUBREG:
if (read_modify_subreg_p (dst))
{
+ use_flags = DF_REF_READ_WRITE;
+#ifdef CLASS_CANNOT_CHANGE_MODE
+ if (CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
+ GET_MODE (SUBREG_REG (dst))))
+ use_flags |= DF_REF_MODE_CHANGE;
+#endif
df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb,
- insn, DF_REF_READ_WRITE);
+ insn, use_flags);
break;
}
/* ... FALLTHRU ... */
case REG:
case PC:
+ case PARALLEL:
break;
case MEM:
df_uses_record (df, &XEXP (dst, 0),
dst = XEXP (dst, 0);
if (GET_CODE (dst) != SUBREG)
abort ();
+ use_flags = DF_REF_READ_WRITE;
+#ifdef CLASS_CANNOT_CHANGE_MODE
+ if (CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
+ GET_MODE (SUBREG_REG (dst))))
+ use_flags |= DF_REF_MODE_CHANGE;
+#endif
df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb,
- insn, DF_REF_READ_WRITE);
+ insn, use_flags);
break;
case ZERO_EXTRACT:
case SIGN_EXTRACT:
{
struct ref *def = link->ref;
unsigned int dregno = DF_REF_REGNO (def);
+ /* Don't add ref's to the chain two times. I.e. only add
+ new refs. XXX the same could be done by testing if the current
+ insn is a modified (or a new) one. This would be faster. */
+ if (DF_REF_ID (def) < df->def_id_save)
+ continue;
df->regs[dregno].defs
= df_link_create (def, df->regs[dregno].defs);
{
struct ref *use = link->ref;
unsigned int uregno = DF_REF_REGNO (use);
+ /* Don't add ref's to the chain two times. I.e. only add
+ new refs. XXX the same could be done by testing if the current
+ insn is a modified (or a new) one. This would be faster. */
+ if (DF_REF_ID (use) < df->use_id_save)
+ continue;
df->regs[uregno].uses
= df_link_create (use, df->regs[uregno].uses);
basic_block bb;
dflags = 0;
- aflags = flags;
+ aflags = flags;
if (flags & DF_UD_CHAIN)
aflags |= DF_RD | DF_RD_CHAIN;
df_reg_use_chain_create (df, blocks);
}
- df->dfs_order = xmalloc (sizeof(int) * num_basic_blocks);
- df->rc_order = xmalloc (sizeof(int) * num_basic_blocks);
- df->rts_order = xmalloc (sizeof(int) * num_basic_blocks);
+ df->dfs_order = xmalloc (sizeof(int) * n_basic_blocks);
+ df->rc_order = xmalloc (sizeof(int) * n_basic_blocks);
+ df->rts_order = xmalloc (sizeof(int) * n_basic_blocks);
df->inverse_dfs_map = xmalloc (sizeof(int) * last_basic_block);
df->inverse_rc_map = xmalloc (sizeof(int) * last_basic_block);
df->inverse_rts_map = xmalloc (sizeof(int) * last_basic_block);
-
+
flow_depth_first_order_compute (df->dfs_order, df->rc_order);
flow_reverse_top_sort_order_compute (df->rts_order);
- for (i = 0; i < num_basic_blocks; i ++)
+ for (i = 0; i < n_basic_blocks; i ++)
{
df->inverse_dfs_map[df->dfs_order[i]] = i;
df->inverse_rc_map[df->rc_order[i]] = i;
bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *gen = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *kill = xmalloc (sizeof (bitmap) * last_basic_block);
- FOR_ALL_BB (bb)
+ FOR_EACH_BB (bb)
{
- in[bb->sindex] = DF_BB_INFO (df, bb)->rd_in;
- out[bb->sindex] = DF_BB_INFO (df, bb)->rd_out;
- gen[bb->sindex] = DF_BB_INFO (df, bb)->rd_gen;
- kill[bb->sindex] = DF_BB_INFO (df, bb)->rd_kill;
+ in[bb->index] = DF_BB_INFO (df, bb)->rd_in;
+ out[bb->index] = DF_BB_INFO (df, bb)->rd_out;
+ gen[bb->index] = DF_BB_INFO (df, bb)->rd_gen;
+ kill[bb->index] = DF_BB_INFO (df, bb)->rd_kill;
}
iterative_dataflow_bitmap (in, out, gen, kill, df->all_blocks,
FORWARD, UNION, df_rd_transfer_function,
bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *gen = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *kill = xmalloc (sizeof (bitmap) * last_basic_block);
- FOR_ALL_BB (bb)
+ FOR_EACH_BB (bb)
{
- in[bb->sindex] = DF_BB_INFO (df, bb)->ru_in;
- out[bb->sindex] = DF_BB_INFO (df, bb)->ru_out;
- gen[bb->sindex] = DF_BB_INFO (df, bb)->ru_gen;
- kill[bb->sindex] = DF_BB_INFO (df, bb)->ru_kill;
+ in[bb->index] = DF_BB_INFO (df, bb)->ru_in;
+ out[bb->index] = DF_BB_INFO (df, bb)->ru_out;
+ gen[bb->index] = DF_BB_INFO (df, bb)->ru_gen;
+ kill[bb->index] = DF_BB_INFO (df, bb)->ru_kill;
}
iterative_dataflow_bitmap (in, out, gen, kill, df->all_blocks,
BACKWARD, UNION, df_ru_transfer_function,
bitmap *out = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *use = xmalloc (sizeof (bitmap) * last_basic_block);
bitmap *def = xmalloc (sizeof (bitmap) * last_basic_block);
- FOR_ALL_BB (bb)
+ FOR_EACH_BB (bb)
{
- in[bb->sindex] = DF_BB_INFO (df, bb)->lr_in;
- out[bb->sindex] = DF_BB_INFO (df, bb)->lr_out;
- use[bb->sindex] = DF_BB_INFO (df, bb)->lr_use;
- def[bb->sindex] = DF_BB_INFO (df, bb)->lr_def;
+ in[bb->index] = DF_BB_INFO (df, bb)->lr_in;
+ out[bb->index] = DF_BB_INFO (df, bb)->lr_out;
+ use[bb->index] = DF_BB_INFO (df, bb)->lr_use;
+ def[bb->index] = DF_BB_INFO (df, bb)->lr_def;
}
iterative_dataflow_bitmap (in, out, use, def, df->all_blocks,
BACKWARD, UNION, df_lr_transfer_function,
}
-/* Initialise dataflow analysis. */
+/* Initialize dataflow analysis. */
struct df *
df_init ()
{
/* Scan the insn for refs. */
df_insn_refs_record (df, bb, insn);
-
- bitmap_clear_bit (df->insns_modified, uid);
count++;
}
if (insn == bb->end)
if (!df->n_bbs)
return 0;
- FOR_ALL_BB (bb)
- if (bitmap_bit_p (df->bbs_modified, bb->sindex)
- && (! blocks || (blocks == (bitmap) -1) || bitmap_bit_p (blocks, bb->sindex)))
+ FOR_EACH_BB (bb)
+ if (bitmap_bit_p (df->bbs_modified, bb->index)
+ && (! blocks || (blocks == (bitmap) -1) || bitmap_bit_p (blocks, bb->index)))
{
update = 1;
break;
/* Recompute everything from scratch. */
df_free (df);
}
- /* Allocate and initialise data structures. */
+ /* Allocate and initialize data structures. */
df_alloc (df, max_reg_num ());
df_analyse_1 (df, 0, flags, 0);
update = 1;
df_analyse_1 (df, blocks, flags, 1);
bitmap_zero (df->bbs_modified);
+ bitmap_zero (df->insns_modified);
}
}
return update;
}
else
{
- FOR_ALL_BB (bb)
+ FOR_EACH_BB (bb)
df_bb_refs_unlink (df, bb);
}
}
unsigned int uid;
uid = INSN_UID (insn);
-
if (uid >= df->insn_size)
- df_insn_table_realloc (df, 0);
+ df_insn_table_realloc (df, uid);
- bitmap_set_bit (df->bbs_modified, bb->sindex);
+ bitmap_set_bit (df->bbs_modified, bb->index);
bitmap_set_bit (df->insns_modified, uid);
/* For incremental updating on the fly, perhaps we could make a copy
uid = INSN_UID (insn);
if (uid >= df->insn_size)
- df_insn_table_realloc (df, 0);
+ df_insn_table_realloc (df, uid);
df_insn_modify (df, bb, insn);
FILE *file;
{
unsigned int j;
+ basic_block bb;
if (! df || ! file)
return;
basic_block bb;
fprintf (file, "Reaching defs:\n");
- FOR_ALL_BB (bb)
+ FOR_EACH_BB (bb)
{
struct bb_info *bb_info = DF_BB_INFO (df, bb);
if (! bb_info->rd_in)
continue;
- fprintf (file, "bb %d in \t", bb->sindex);
+ fprintf (file, "bb %d in \t", bb->index);
dump_bitmap (file, bb_info->rd_in);
- fprintf (file, "bb %d gen \t", bb->sindex);
+ fprintf (file, "bb %d gen \t", bb->index);
dump_bitmap (file, bb_info->rd_gen);
- fprintf (file, "bb %d kill\t", bb->sindex);
+ fprintf (file, "bb %d kill\t", bb->index);
dump_bitmap (file, bb_info->rd_kill);
- fprintf (file, "bb %d out \t", bb->sindex);
+ fprintf (file, "bb %d out \t", bb->index);
dump_bitmap (file, bb_info->rd_out);
}
}
if (flags & DF_RU)
{
- basic_block bb;
-
fprintf (file, "Reaching uses:\n");
- FOR_ALL_BB (bb)
+ FOR_EACH_BB (bb)
{
struct bb_info *bb_info = DF_BB_INFO (df, bb);
if (! bb_info->ru_in)
continue;
- fprintf (file, "bb %d in \t", bb->sindex);
+ fprintf (file, "bb %d in \t", bb->index);
dump_bitmap (file, bb_info->ru_in);
- fprintf (file, "bb %d gen \t", bb->sindex);
+ fprintf (file, "bb %d gen \t", bb->index);
dump_bitmap (file, bb_info->ru_gen);
- fprintf (file, "bb %d kill\t", bb->sindex);
+ fprintf (file, "bb %d kill\t", bb->index);
dump_bitmap (file, bb_info->ru_kill);
- fprintf (file, "bb %d out \t", bb->sindex);
+ fprintf (file, "bb %d out \t", bb->index);
dump_bitmap (file, bb_info->ru_out);
}
}
if (flags & DF_LR)
{
- basic_block bb;
-
fprintf (file, "Live regs:\n");
- FOR_ALL_BB (bb)
+ FOR_EACH_BB (bb)
{
struct bb_info *bb_info = DF_BB_INFO (df, bb);
if (! bb_info->lr_in)
continue;
- fprintf (file, "bb %d in \t", bb->sindex);
+ fprintf (file, "bb %d in \t", bb->index);
dump_bitmap (file, bb_info->lr_in);
- fprintf (file, "bb %d use \t", bb->sindex);
+ fprintf (file, "bb %d use \t", bb->index);
dump_bitmap (file, bb_info->lr_use);
- fprintf (file, "bb %d def \t", bb->sindex);
+ fprintf (file, "bb %d def \t", bb->index);
dump_bitmap (file, bb_info->lr_def);
- fprintf (file, "bb %d out \t", bb->sindex);
+ fprintf (file, "bb %d out \t", bb->index);
dump_bitmap (file, bb_info->lr_out);
}
}
basic_block bb = df_regno_bb (df, j);
if (bb)
- fprintf (file, " bb %d", bb->sindex);
+ fprintf (file, " bb %d", bb->index);
else
fprintf (file, " bb ?");
}
void *data;
{
int changed;
- int i = block->sindex;
+ int i = block->index;
edge e;
- basic_block bb = block;
- SET_BIT (visited, block->sindex);
- if (TEST_BIT (pending, block->sindex))
+ basic_block bb= block;
+ SET_BIT (visited, block->index);
+ if (TEST_BIT (pending, block->index))
{
if (dir == FORWARD)
{
switch (conf_op)
{
case UNION:
- bitmap_a_or_b (in[i], in[i], out[e->src->sindex]);
+ bitmap_a_or_b (in[i], in[i], out[e->src->index]);
break;
case INTERSECTION:
- bitmap_a_and_b (in[i], in[i], out[e->src->sindex]);
+ bitmap_a_and_b (in[i], in[i], out[e->src->index]);
break;
}
}
switch (conf_op)
{
case UNION:
- bitmap_a_or_b (out[i], out[i], in[e->dest->sindex]);
+ bitmap_a_or_b (out[i], out[i], in[e->dest->index]);
break;
case INTERSECTION:
- bitmap_a_and_b (out[i], out[i], in[e->dest->sindex]);
+ bitmap_a_and_b (out[i], out[i], in[e->dest->index]);
break;
}
}
{
for (e = bb->succ; e != 0; e = e->succ_next)
{
- if (e->dest == EXIT_BLOCK_PTR || e->dest == block)
+ if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
continue;
- SET_BIT (pending, e->dest->sindex);
+ SET_BIT (pending, e->dest->index);
}
}
else
{
for (e = bb->pred; e != 0; e = e->pred_next)
{
- if (e->src == ENTRY_BLOCK_PTR || e->dest == block)
+ if (e->src == ENTRY_BLOCK_PTR || e->dest->index == i)
continue;
- SET_BIT (pending, e->src->sindex);
+ SET_BIT (pending, e->src->index);
}
}
}
{
for (e = bb->succ; e != 0; e = e->succ_next)
{
- if (e->dest == EXIT_BLOCK_PTR || e->dest == block)
+ if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
continue;
- if (!TEST_BIT (visited, e->dest->sindex))
- hybrid_search_bitmap (e->dest, in, out, gen, kill, dir,
- conf_op, transfun, visited, pending,
+ if (!TEST_BIT (visited, e->dest->index))
+ hybrid_search_bitmap (e->dest, in, out, gen, kill, dir,
+ conf_op, transfun, visited, pending,
data);
}
}
{
for (e = bb->pred; e != 0; e = e->pred_next)
{
- if (e->src == ENTRY_BLOCK_PTR || e->src == block)
+ if (e->src == ENTRY_BLOCK_PTR || e->src->index == i)
continue;
- if (!TEST_BIT (visited, e->src->sindex))
+ if (!TEST_BIT (visited, e->src->index))
hybrid_search_bitmap (e->src, in, out, gen, kill, dir,
conf_op, transfun, visited, pending,
data);
void *data;
{
int changed;
- int i = block->sindex;
+ int i = block->index;
edge e;
- basic_block bb = block;
- SET_BIT (visited, block->sindex);
- if (TEST_BIT (pending, block->sindex))
+ basic_block bb= block;
+ SET_BIT (visited, block->index);
+ if (TEST_BIT (pending, block->index))
{
if (dir == FORWARD)
{
switch (conf_op)
{
case UNION:
- sbitmap_a_or_b (in[i], in[i], out[e->src->sindex]);
+ sbitmap_a_or_b (in[i], in[i], out[e->src->index]);
break;
case INTERSECTION:
- sbitmap_a_and_b (in[i], in[i], out[e->src->sindex]);
+ sbitmap_a_and_b (in[i], in[i], out[e->src->index]);
break;
}
}
switch (conf_op)
{
case UNION:
- sbitmap_a_or_b (out[i], out[i], in[e->dest->sindex]);
+ sbitmap_a_or_b (out[i], out[i], in[e->dest->index]);
break;
case INTERSECTION:
- sbitmap_a_and_b (out[i], out[i], in[e->dest->sindex]);
+ sbitmap_a_and_b (out[i], out[i], in[e->dest->index]);
break;
}
}
{
for (e = bb->succ; e != 0; e = e->succ_next)
{
- if (e->dest == EXIT_BLOCK_PTR || e->dest == block)
+ if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
continue;
- SET_BIT (pending, e->dest->sindex);
+ SET_BIT (pending, e->dest->index);
}
}
else
{
for (e = bb->pred; e != 0; e = e->pred_next)
{
- if (e->src == ENTRY_BLOCK_PTR || e->dest == block)
+ if (e->src == ENTRY_BLOCK_PTR || e->dest->index == i)
continue;
- SET_BIT (pending, e->src->sindex);
+ SET_BIT (pending, e->src->index);
}
}
}
{
for (e = bb->succ; e != 0; e = e->succ_next)
{
- if (e->dest == EXIT_BLOCK_PTR || e->dest == block)
+ if (e->dest == EXIT_BLOCK_PTR || e->dest->index == i)
continue;
- if (!TEST_BIT (visited, e->dest->sindex))
+ if (!TEST_BIT (visited, e->dest->index))
hybrid_search_sbitmap (e->dest, in, out, gen, kill, dir,
conf_op, transfun, visited, pending,
data);
{
for (e = bb->pred; e != 0; e = e->pred_next)
{
- if (e->src == ENTRY_BLOCK_PTR || e->src == block)
+ if (e->src == ENTRY_BLOCK_PTR || e->src->index == i)
continue;
- if (!TEST_BIT (visited, e->src->sindex))
+ if (!TEST_BIT (visited, e->src->index))
hybrid_search_sbitmap (e->src, in, out, gen, kill, dir,
conf_op, transfun, visited, pending,
data);
{
i = (size_t) fibheap_extract_min (worklist);
bb = BASIC_BLOCK (i);
- if (!TEST_BIT (visited, bb->sindex))
+ if (!TEST_BIT (visited, bb->index))
hybrid_search_sbitmap (bb, in, out, gen, kill, dir,
conf_op, transfun, visited, pending, data);
}
{
i = (size_t) fibheap_extract_min (worklist);
bb = BASIC_BLOCK (i);
- if (!TEST_BIT (visited, bb->sindex))
+ if (!TEST_BIT (visited, bb->index))
hybrid_search_bitmap (bb, in, out, gen, kill, dir,
conf_op, transfun, visited, pending, data);
}