bitmap_bit_p.
* config/bfin/bifn.c (bfin_discover_loop): Likewise.
* dominance.c (iterate_fix_dominators): Likewise.
* dse.c (set_usage_bits): Likewise.
(set_position_unneeded, record_store): Likewise.
* gimple-fold.c (get_maxval_strlen): Likewise.
* haifa-sched.c (fix_inter_tick, fix_recovery_deps): Likewise.
* ipa-inline.c (update_caller_keys): Likewise.
* ipa-split.c (verify_non_ssa_vars): Likewise.
* ipa-type-escape.c (mark_type, close_type_seen): Likewise.
(close_type_exposed_parameter, close_type_full_escape): Likewise.
(close_addressof_down): Likewise.
* ira-color.c (assign_hard_reg, push_allocno_to_stack): Likewise.
(setup_allocno_left_conflicts_size): Likewise.
(ira_reassign_conflict_allocnos): Likewise.
(ira_reassign_pseudos): Likewise.
* ira-emit.c (change_loop): Likewise.
* loop-invariant.c (mark_regno_live, mark_regno_death): Likewise.
* lto-streamer-out.c (write_symbol): Likewise.
* predict.c (expr_expected_value_1): Likewise.
* regstat.c (regstat_bb_compute_ri): Likewise.
* sel-sched.c (create_block_for_bookkeeping): Likewise.
(track_scheduled_insns_and_blocks, sel_sched_region_1): Likewise.
* stmt.c (expand_case): Likewise.
* tree-eh.c (emit_eh_dispatch): Likewise.
* tree-into-ssa.c (prune_unused_phi_nodes): Likewise.
* tree-loop-distribution.c (make_nodes_having_upstream_mem_writes):
Likewise.
(rdg_flag_vertex, rdg_flag_loop_exits): Likewise.
(rdg_build_components): Likewise.
* tree-object-size.c (collect_object_sizes_for): Likewise.
* tree-sra.c (convert_callers): Likewise.
* tree-ssa-coalesce.c (live_track_add_partition): Likewise.
* tree-ssa-live.c (mark_all_vars_used_1): Likewise.
* tree-ssa-pre.c (bitmap_set_replace_value): Likewise.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@163378
138bc75d-0d04-0410-961f-
82ee72b054a4
+2010-08-19 Nathan Froyd <froydnj@codesourcery.com>
+
+ * cfgloop.c (get_loop_body_in_bfs_order): Avoid redundant call to
+ bitmap_bit_p.
+ * config/bfin/bifn.c (bfin_discover_loop): Likewise.
+ * dominance.c (iterate_fix_dominators): Likewise.
+ * dse.c (set_usage_bits): Likewise.
+ (set_position_unneeded, record_store): Likewise.
+ * gimple-fold.c (get_maxval_strlen): Likewise.
+ * haifa-sched.c (fix_inter_tick, fix_recovery_deps): Likewise.
+ * ipa-inline.c (update_caller_keys): Likewise.
+ * ipa-split.c (verify_non_ssa_vars): Likewise.
+ * ipa-type-escape.c (mark_type, close_type_seen): Likewise.
+ (close_type_exposed_parameter, close_type_full_escape): Likewise.
+ (close_addressof_down): Likewise.
+ * ira-color.c (assign_hard_reg, push_allocno_to_stack): Likewise.
+ (setup_allocno_left_conflicts_size): Likewise.
+ (ira_reassign_conflict_allocnos): Likewise.
+ (ira_reassign_pseudos): Likewise.
+ * ira-emit.c (change_loop): Likewise.
+ * loop-invariant.c (mark_regno_live, mark_regno_death): Likewise.
+ * lto-streamer-out.c (write_symbol): Likewise.
+ * predict.c (expr_expected_value_1): Likewise.
+ * regstat.c (regstat_bb_compute_ri): Likewise.
+ * sel-sched.c (create_block_for_bookkeeping): Likewise.
+ (track_scheduled_insns_and_blocks, sel_sched_region_1): Likewise.
+ * stmt.c (expand_case): Likewise.
+ * tree-eh.c (emit_eh_dispatch): Likewise.
+ * tree-into-ssa.c (prune_unused_phi_nodes): Likewise.
+ * tree-loop-distribution.c (make_nodes_having_upstream_mem_writes):
+ Likewise.
+ (rdg_flag_vertex, rdg_flag_loop_exits): Likewise.
+ (rdg_build_components): Likewise.
+ * tree-object-size.c (collect_object_sizes_for): Likewise.
+ * tree-sra.c (convert_callers): Likewise.
+ * tree-ssa-coalesce.c (live_track_add_partition): Likewise.
+ * tree-ssa-live.c (mark_all_vars_used_1): Likewise.
+ * tree-ssa-pre.c (bitmap_set_replace_value): Likewise.
+
2010-08-19 Uros Bizjak <ubizjak@gmail.com>
* config/i386/i386.md (*lea_1): Use P mode iterator.
edge e;
edge_iterator ei;
- if (!bitmap_bit_p (visited, bb->index))
- {
- /* This basic block is now visited */
- bitmap_set_bit (visited, bb->index);
- blocks[i++] = bb;
- }
+ if (bitmap_set_bit (visited, bb->index))
+ /* This basic block is now visited */
+ blocks[i++] = bb;
FOR_EACH_EDGE (e, ei, bb->succs)
{
if (flow_bb_inside_loop_p (loop, e->dest))
{
- if (!bitmap_bit_p (visited, e->dest->index))
- {
- bitmap_set_bit (visited, e->dest->index);
- blocks[i++] = e->dest;
- }
+ if (bitmap_set_bit (visited, e->dest->index))
+ blocks[i++] = e->dest;
}
}
break;
}
- if (bitmap_bit_p (loop->block_bitmap, bb->index))
+ if (!bitmap_set_bit (loop->block_bitmap, bb->index))
continue;
/* We've not seen this block before. Add it to the loop's
list and then add each successor to the work list. */
VEC_safe_push (basic_block, heap, loop->blocks, bb);
- bitmap_set_bit (loop->block_bitmap, bb->index);
if (bb != tail_bb)
{
dom_i = (size_t) *pointer_map_contains (map, dom);
/* Do not include parallel edges to G. */
- if (bitmap_bit_p ((bitmap) g->vertices[dom_i].data, i))
+ if (!bitmap_set_bit ((bitmap) g->vertices[dom_i].data, i))
continue;
- bitmap_set_bit ((bitmap) g->vertices[dom_i].data, i);
add_edge (g, dom_i, i);
}
}
ai = i;
}
- if (bitmap_bit_p (store1, ai))
+ if (!bitmap_set_bit (store1, ai))
bitmap_set_bit (store2, ai);
else
{
- bitmap_set_bit (store1, ai);
if (i < 0)
{
if (group->offset_map_size_n < ai)
{
if (__builtin_expect (s_info->is_large, false))
{
- if (!bitmap_bit_p (s_info->positions_needed.large.bmap, pos))
- {
- s_info->positions_needed.large.count++;
- bitmap_set_bit (s_info->positions_needed.large.bmap, pos);
- }
+ if (bitmap_set_bit (s_info->positions_needed.large.bmap, pos))
+ s_info->positions_needed.large.count++;
}
else
s_info->positions_needed.small_bitmask
gcc_assert (GET_MODE (mem) != BLKmode);
- if (bitmap_bit_p (store1, spill_alias_set))
+ if (!bitmap_set_bit (store1, spill_alias_set))
bitmap_set_bit (store2, spill_alias_set);
- else
- bitmap_set_bit (store1, spill_alias_set);
if (clear_alias_group->offset_map_size_p < spill_alias_set)
clear_alias_group->offset_map_size_p = spill_alias_set;
}
/* If we were already here, break the infinite cycle. */
- if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
+ if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
return true;
- bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
var = arg;
def_stmt = SSA_NAME_DEF_STMT (var);
gcc_assert (tick >= MIN_TICK);
/* Fix INSN_TICK of instruction from just scheduled block. */
- if (!bitmap_bit_p (&processed, INSN_LUID (head)))
+ if (bitmap_set_bit (&processed, INSN_LUID (head)))
{
- bitmap_set_bit (&processed, INSN_LUID (head));
tick -= next_clock;
if (tick < MIN_TICK)
/* If NEXT has its INSN_TICK calculated, fix it.
If not - it will be properly calculated from
scratch later in fix_tick_ready. */
- && !bitmap_bit_p (&processed, INSN_LUID (next)))
+ && bitmap_set_bit (&processed, INSN_LUID (next)))
{
- bitmap_set_bit (&processed, INSN_LUID (next));
tick -= next_clock;
if (tick < MIN_TICK)
{
sd_delete_dep (sd_it);
- if (!bitmap_bit_p (&in_ready, INSN_LUID (consumer)))
- {
- ready_list = alloc_INSN_LIST (consumer, ready_list);
- bitmap_set_bit (&in_ready, INSN_LUID (consumer));
- }
+ if (bitmap_set_bit (&in_ready, INSN_LUID (consumer)))
+ ready_list = alloc_INSN_LIST (consumer, ready_list);
}
else
{
if (!node->local.inlinable
|| node->global.inlined_to)
return;
- if (bitmap_bit_p (updated_nodes, node->uid))
+ if (!bitmap_set_bit (updated_nodes, node->uid))
return;
- bitmap_set_bit (updated_nodes, node->uid);
node->global.estimated_growth = INT_MIN;
/* See if there is something to do. */
FOR_EACH_EDGE (e, ei, bb->preds)
if (e->src != ENTRY_BLOCK_PTR
- && !bitmap_bit_p (seen, e->src->index))
+ && bitmap_set_bit (seen, e->src->index))
{
gcc_checking_assert (!bitmap_bit_p (current->split_bbs,
e->src->index));
VEC_safe_push (basic_block, heap, worklist, e->src);
- bitmap_set_bit (seen, e->src->index);
}
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
}
uid = TYPE_UID (type);
- if (bitmap_bit_p (map, uid))
+ if (!bitmap_set_bit (map, uid))
return type;
- else
- {
- bitmap_set_bit (map, uid);
- if (escape_status == FULL_ESCAPE)
- {
- /* Efficiency hack. When things are bad, do not mess around
- with this type anymore. */
- bitmap_set_bit (global_types_exposed_parameter, uid);
- }
- }
+ else if (escape_status == FULL_ESCAPE)
+ /* Efficiency hack. When things are bad, do not mess around
+ with this type anymore. */
+ bitmap_set_bit (global_types_exposed_parameter, uid);
+
return type;
}
uid = TYPE_UID (type);
- if (bitmap_bit_p (been_there_done_that, uid))
+ if (!bitmap_set_bit (been_there_done_that, uid))
return;
- bitmap_set_bit (been_there_done_that, uid);
/* If we are doing a language with a type hierarchy, mark all of
the superclasses. */
uid = TYPE_UID (type);
gcc_assert (!POINTER_TYPE_P (type));
- if (bitmap_bit_p (been_there_done_that, uid))
+ if (!bitmap_set_bit (been_there_done_that, uid))
return;
- bitmap_set_bit (been_there_done_that, uid);
/* If the field is a struct or union type, mark all of the
subfields. */
return;
uid = TYPE_UID (type);
- if (bitmap_bit_p (been_there_done_that, uid))
+ if (!bitmap_set_bit (been_there_done_that, uid))
return;
- bitmap_set_bit (been_there_done_that, uid);
subtype_map = subtype_map_for_uid (uid, false);
else
return NULL;
- if (bitmap_bit_p (been_there_done_that, uid))
+ if (!bitmap_set_bit (been_there_done_that, uid))
return map;
- bitmap_set_bit (been_there_done_that, uid);
/* If the type escapes, get rid of the addressof map, it will not be
needed. */
if (allocno_coalesced_p)
{
- if (bitmap_bit_p (processed_coalesced_allocno_bitmap,
+ if (!bitmap_set_bit (processed_coalesced_allocno_bitmap,
ALLOCNO_NUM (conflict_allocno)))
continue;
- bitmap_set_bit (processed_coalesced_allocno_bitmap,
- ALLOCNO_NUM (conflict_allocno));
}
ira_allocate_and_copy_costs
{
conflict_obj = ALLOCNO_OBJECT (conflict_allocno,
OBJECT_SUBWORD (conflict_obj));
- if (bitmap_bit_p (processed_coalesced_allocno_bitmap,
+ if (!bitmap_set_bit (processed_coalesced_allocno_bitmap,
OBJECT_CONFLICT_ID (conflict_obj)))
continue;
- bitmap_set_bit (processed_coalesced_allocno_bitmap,
- OBJECT_CONFLICT_ID (conflict_obj));
}
if (!ALLOCNO_IN_GRAPH_P (conflict_allocno)
== ALLOCNO_COVER_CLASS (conflict_allocno));
if (allocno_coalesced_p)
{
- if (bitmap_bit_p (processed_coalesced_allocno_bitmap,
- ALLOCNO_NUM (conflict_allocno)))
+ if (!bitmap_set_bit (processed_coalesced_allocno_bitmap,
+ ALLOCNO_NUM (conflict_allocno)))
continue;
- bitmap_set_bit (processed_coalesced_allocno_bitmap,
- ALLOCNO_NUM (conflict_allocno));
}
if (! ALLOCNO_ASSIGNED_P (conflict_allocno))
ira_allocno_t conflict_a = OBJECT_ALLOCNO (conflict_obj);
ira_assert (ira_reg_classes_intersect_p
[cover_class][ALLOCNO_COVER_CLASS (conflict_a)]);
- if (bitmap_bit_p (allocnos_to_color, ALLOCNO_NUM (conflict_a)))
+ if (!bitmap_set_bit (allocnos_to_color, ALLOCNO_NUM (conflict_a)))
continue;
- bitmap_set_bit (allocnos_to_color, ALLOCNO_NUM (conflict_a));
sorted_allocnos[allocnos_to_color_num++] = conflict_a;
}
}
ira_allocno_t conflict_a = OBJECT_ALLOCNO (conflict_obj);
if (ALLOCNO_HARD_REGNO (conflict_a) < 0
&& ! ALLOCNO_DONT_REASSIGN_P (conflict_a)
- && ! bitmap_bit_p (temp, ALLOCNO_REGNO (conflict_a)))
+ && bitmap_set_bit (temp, ALLOCNO_REGNO (conflict_a)))
{
spilled_pseudo_regs[num++] = ALLOCNO_REGNO (conflict_a);
- bitmap_set_bit (temp, ALLOCNO_REGNO (conflict_a));
/* ?!? This seems wrong. */
bitmap_set_bit (consideration_allocno_bitmap,
ALLOCNO_NUM (conflict_a));
regno = ALLOCNO_REGNO (allocno);
if (ALLOCNO_CAP_MEMBER (allocno) != NULL)
continue;
- used_p = bitmap_bit_p (used_regno_bitmap, regno);
- bitmap_set_bit (used_regno_bitmap, regno);
+ used_p = !bitmap_set_bit (used_regno_bitmap, regno);
ALLOCNO_SOMEWHERE_RENAMED_P (allocno) = true;
if (! used_p)
continue;
loop != current_loops->tree_root;
loop = loop_outer (loop))
bitmap_set_bit (&LOOP_DATA (loop)->regs_live, regno);
- if (bitmap_bit_p (&curr_regs_live, regno))
+ if (!bitmap_set_bit (&curr_regs_live, regno))
return;
- bitmap_set_bit (&curr_regs_live, regno);
change_pressure (regno, true);
}
static void
mark_regno_death (int regno)
{
- if (! bitmap_bit_p (&curr_regs_live, regno))
+ if (! bitmap_clear_bit (&curr_regs_live, regno))
return;
- bitmap_clear_bit (&curr_regs_live, regno);
change_pressure (regno, false);
}
gcc_assert (slot_num >= 0);
/* Avoid duplicate symbols. */
- if (bitmap_bit_p (seen, slot_num))
+ if (!bitmap_set_bit (seen, slot_num))
return;
- else
- bitmap_set_bit (seen, slot_num);
if (DECL_EXTERNAL (t))
{
def = SSA_NAME_DEF_STMT (op0);
/* If we were already here, break the infinite cycle. */
- if (bitmap_bit_p (visited, SSA_NAME_VERSION (op0)))
+ if (!bitmap_set_bit (visited, SSA_NAME_VERSION (op0)))
return NULL;
- bitmap_set_bit (visited, SSA_NAME_VERSION (op0));
if (gimple_code (def) == GIMPLE_PHI)
{
REG_BASIC_BLOCK (uregno) = REG_BLOCK_GLOBAL;
}
- if (!bitmap_bit_p (live, uregno))
+ if (bitmap_set_bit (live, uregno))
{
/* This register is now live. */
- bitmap_set_bit (live, uregno);
/* If we have seen this regno, then it has already been
processed correctly with the per insn increment. If
if (INSN_P (insn))
EXPR_ORIG_BB_INDEX (INSN_EXPR (insn)) = succ->index;
- if (bitmap_bit_p (code_motion_visited_blocks, new_bb->index))
- {
- bitmap_set_bit (code_motion_visited_blocks, succ->index);
- bitmap_clear_bit (code_motion_visited_blocks, new_bb->index);
- }
+ if (bitmap_clear_bit (code_motion_visited_blocks, new_bb->index))
+ bitmap_set_bit (code_motion_visited_blocks, succ->index);
gcc_assert (LABEL_P (BB_HEAD (new_bb))
&& LABEL_P (BB_HEAD (succ)));
we still need to count it as an originator. */
bitmap_set_bit (current_originators, INSN_UID (insn));
- if (!bitmap_bit_p (current_copies, INSN_UID (insn)))
+ if (!bitmap_clear_bit (current_copies, INSN_UID (insn)))
{
/* Note that original block needs to be rescheduled, as we pulled an
instruction out of it. */
else if (INSN_UID (insn) < first_emitted_uid && !DEBUG_INSN_P (insn))
num_insns_scheduled++;
}
- else
- bitmap_clear_bit (current_copies, INSN_UID (insn));
/* For instructions we must immediately remove insn from the
stream, so subsequent update_data_sets () won't include this
continue;
}
- if (bitmap_bit_p (blocks_to_reschedule, bb->index))
+ if (bitmap_clear_bit (blocks_to_reschedule, bb->index))
{
flist_tail_init (new_fences);
/* Mark BB as head of the new ebb. */
bitmap_set_bit (forced_ebb_heads, bb->index);
- bitmap_clear_bit (blocks_to_reschedule, bb->index);
-
gcc_assert (fences == NULL);
init_fences (bb_note (bb));
/* If we have not seen this label yet, then increase the
number of unique case node targets seen. */
lab = label_rtx (n->code_label);
- if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab)))
- {
- bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab));
- uniq++;
- }
+ if (bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab)))
+ uniq++;
}
BITMAP_FREE (label_bitmap);
static void
note_eh_region_may_contain_throw (eh_region region)
{
- while (!bitmap_bit_p (eh_region_may_contain_throw_map, region->index))
+ while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
{
- bitmap_set_bit (eh_region_may_contain_throw_map, region->index);
region = region->outer;
if (region == NULL)
break;
}
/* If the phi node is already live, there is nothing to do. */
- if (bitmap_bit_p (live_phis, p))
+ if (!bitmap_set_bit (live_phis, p))
continue;
- /* Mark the phi as live, and add the new uses to the worklist. */
- bitmap_set_bit (live_phis, p);
+ /* Add the new uses to the worklist. */
def_bb = BASIC_BLOCK (p);
FOR_EACH_EDGE (e, ei, def_bb->preds)
{
for (i = 0; VEC_iterate (int, nodes, i, x); i++)
{
- if (bitmap_bit_p (seen, x))
+ if (!bitmap_set_bit (seen, x))
continue;
- bitmap_set_bit (seen, x);
-
if (RDG_MEM_WRITE_STMT (rdg, x)
|| predecessor_has_mem_write (rdg, &(rdg->vertices[x]))
/* In anti dependences the read should occur before
{
struct loop *loop;
- if (bitmap_bit_p (partition, v))
+ if (!bitmap_set_bit (partition, v))
return;
loop = loop_containing_stmt (RDG_STMT (rdg, v));
bitmap_set_bit (loops, loop->num);
- bitmap_set_bit (partition, v);
if (rdg_cannot_recompute_vertex_p (rdg, v))
{
part_has_writes);
EXECUTE_IF_SET_IN_BITMAP (new_loops, 0, i, bi)
- if (!bitmap_bit_p (loops, i))
- {
- bitmap_set_bit (loops, i);
- collect_condition_stmts (get_loop (i), &conds);
- }
+ if (bitmap_set_bit (loops, i))
+ collect_condition_stmts (get_loop (i), &conds);
BITMAP_FREE (new_loops);
}
{
int c = rdg->vertices[v].component;
- if (!bitmap_bit_p (saved_components, c))
+ if (bitmap_set_bit (saved_components, c))
{
rdgc x = XCNEW (struct rdg_component);
x->num = c;
x->vertices = all_components[c];
VEC_safe_push (rdgc, heap, *components, x);
- bitmap_set_bit (saved_components, c);
}
}
if (osi->pass == 0)
{
- if (! bitmap_bit_p (osi->visited, varno))
+ if (bitmap_set_bit (osi->visited, varno))
{
- bitmap_set_bit (osi->visited, varno);
object_sizes[object_size_type][varno]
= (object_size_type & 2) ? -1 : 0;
}
}
for (cs = node->callers; cs; cs = cs->next_caller)
- if (!bitmap_bit_p (recomputed_callers, cs->caller->uid))
- {
- compute_inline_parameters (cs->caller);
- bitmap_set_bit (recomputed_callers, cs->caller->uid);
- }
+ if (bitmap_set_bit (recomputed_callers, cs->caller->uid))
+ compute_inline_parameters (cs->caller);
BITMAP_FREE (recomputed_callers);
current_function_decl = old_cur_fndecl;
root = basevar_index (ptr->map, partition);
/* If this base var wasn't live before, it is now. Clear the element list
since it was delayed until needed. */
- if (!bitmap_bit_p (ptr->live_base_var, root))
- {
- bitmap_set_bit (ptr->live_base_var, root);
- bitmap_clear (ptr->live_base_partitions[root]);
- }
+ if (bitmap_set_bit (ptr->live_base_var, root))
+ bitmap_clear (ptr->live_base_partitions[root]);
bitmap_set_bit (ptr->live_base_partitions[root], partition);
}
eliminated as unused. */
if (TREE_CODE (t) == VAR_DECL)
{
- if (data != NULL && bitmap_bit_p ((bitmap) data, DECL_UID (t)))
- {
- bitmap_clear_bit ((bitmap) data, DECL_UID (t));
- mark_all_vars_used (&DECL_INITIAL (t), data);
- }
+ if (data != NULL && bitmap_clear_bit ((bitmap) data, DECL_UID (t)))
+ mark_all_vars_used (&DECL_INITIAL (t), data);
set_is_used (t);
}
/* remove_unused_scope_block_p requires information about labels
exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
{
- if (bitmap_bit_p (&set->expressions, i))
+ if (bitmap_clear_bit (&set->expressions, i))
{
- bitmap_clear_bit (&set->expressions, i);
bitmap_set_bit (&set->expressions, get_expression_id (expr));
return;
}