bitmap irr_blocks;
};
+typedef struct tm_region *tm_region_p;
+DEF_VEC_P (tm_region_p);
+DEF_VEC_ALLOC_P (tm_region_p, heap);
+
/* True if there are pending edge statements to be committed for the
current function being scanned in the tmmark pass. */
bool pending_edge_inserts_p;
VEC(basic_block, heap) *queue = NULL;
bitmap visited_blocks = BITMAP_ALLOC (NULL);
struct tm_region *old_region;
+ VEC(tm_region_p, heap) *bb_regions = NULL;
all_tm_regions = region;
bb = single_succ (ENTRY_BLOCK_PTR);
+ /* We could store this information in bb->aux, but we may get called
+ through get_all_tm_blocks() from another pass that may be already
+ using bb->aux. */
+ VEC_safe_grow_cleared (tm_region_p, heap, bb_regions, last_basic_block);
+
VEC_safe_push (basic_block, heap, queue, bb);
- gcc_assert (!bb->aux); /* FIXME: Remove me. */
- bb->aux = region;
+ VEC_replace (tm_region_p, bb_regions, bb->index, region);
do
{
bb = VEC_pop (basic_block, queue);
- region = (struct tm_region *)bb->aux;
- bb->aux = NULL;
+ region = VEC_index (tm_region_p, bb_regions, bb->index);
+ VEC_replace (tm_region_p, bb_regions, bb->index, NULL);
/* Record exit and irrevocable blocks. */
region = tm_region_init_1 (region, bb);
{
bitmap_set_bit (visited_blocks, e->dest->index);
VEC_safe_push (basic_block, heap, queue, e->dest);
- gcc_assert (!e->dest->aux); /* FIXME: Remove me. */
/* If the current block started a new region, make sure that only
the entry block of the new region is associated with this region.
Other successors are still part of the old region. */
if (old_region != region && e->dest != region->entry_block)
- e->dest->aux = old_region;
+ VEC_replace (tm_region_p, bb_regions, e->dest->index, old_region);
else
- e->dest->aux = region;
+ VEC_replace (tm_region_p, bb_regions, e->dest->index, region);
}
}
while (!VEC_empty (basic_block, queue));
VEC_free (basic_block, heap, queue);
BITMAP_FREE (visited_blocks);
+ VEC_free (tm_region_p, heap, bb_regions);
}
/* The "gate" function for all transactional memory expansion and optimization
return bbs;
}
+/* Set the IN_TRANSACTION for all gimple statements that appear in a
+ transaction. */
+
+void
+compute_transaction_bits (void)
+{
+ struct tm_region *region;
+ VEC (basic_block, heap) *queue;
+ unsigned int i;
+ basic_block bb;
+
+ /* ?? Perhaps we need to abstract gate_tm_init further, because we
+ certainly don't need it to calculate CDI_DOMINATOR info. */
+ gate_tm_init ();
+
+ FOR_EACH_BB (bb)
+ bb->flags &= ~BB_IN_TRANSACTION;
+
+ for (region = all_tm_regions; region; region = region->next)
+ {
+ queue = get_tm_region_blocks (region->entry_block,
+ region->exit_blocks,
+ region->irr_blocks,
+ NULL,
+ /*stop_at_irr_p=*/true);
+ for (i = 0; VEC_iterate (basic_block, queue, i, bb); ++i)
+ bb->flags |= BB_IN_TRANSACTION;
+ VEC_free (basic_block, heap, queue);
+ }
+
+ if (all_tm_regions)
+ bitmap_obstack_release (&tm_obstack);
+}
+
/* Entry point to the MARK phase of TM expansion. Here we replace
transactional memory statements with calls to builtins, and function
calls with their transactional clones (if available). But we don't
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
{
+ bool do_next = true;
gimple stmt = gsi_stmt (gsi);
/* ??? TM_COMMIT (and any other tm builtin function) in a nested
make_tm_edge (stmt, bb, region);
bb = e->dest;
gsi = gsi_start_bb (bb);
+ do_next = false;
}
/* Delete any tail-call annotation that may have been added.
gimple_call_set_tail (stmt, false);
}
- gsi_next (&gsi);
+ if (do_next)
+ gsi_next (&gsi);
}
}
assembly statement is not relevant to the transaction
is to wrap it in a __tm_waiver block. This is not
yet implemented, so we can't check for it. */
+ if (is_tm_safe (current_function_decl))
+ {
+ tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
+ SET_EXPR_LOCATION (t, gimple_location (stmt));
+ TREE_BLOCK (t) = gimple_block (stmt);
+ error ("%Kasm not allowed in %<transaction_safe%> function", t);
+ }
return true;
default:
record_tm_clone_pair (old_decl, new_decl);
- if (info->old_node->needed)
+ if (info->old_node->needed
+ || ipa_ref_list_first_refering (&info->old_node->ref_list))
ipa_tm_mark_needed_node (new_node);
return false;
}
record_tm_clone_pair (old_decl, new_decl);
cgraph_call_function_insertion_hooks (new_node);
- if (old_node->needed)
+ if (old_node->needed
+ || ipa_ref_list_first_refering (&old_node->ref_list))
ipa_tm_mark_needed_node (new_node);
/* Do the same thing, but for any aliases of the original node. */
/* If this function makes no calls and has no irrevocable blocks,
then there's nothing to do. */
/* ??? Remove non-aborting top-level transactions. */
- if (!node->callees && !d->irrevocable_blocks_clone)
+ if (!node->callees && !node->indirect_calls && !d->irrevocable_blocks_clone)
return;
current_function_decl = d->clone->decl;