static fibheapkey_t bb_to_key (basic_block);
static bool better_edge_p (const_basic_block, const_edge, int, int, int, int, const_edge);
static void connect_traces (int, struct trace *);
-static bool copy_bb_p (basic_block, int);
+static bool copy_bb_p (const_basic_block, int);
static int get_uncond_jump_length (void);
static bool push_to_next_round_p (const_basic_block, int, int, int, gcov_type);
static void find_rarely_executed_basic_blocks_and_crossing_edges (edge **,
when code size is allowed to grow by duplication. */
static bool
-copy_bb_p (basic_block bb, int code_may_grow)
+copy_bb_p (const_basic_block bb, int code_may_grow)
{
int size = 0;
int max_size = uncond_jump_length;
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
+ TODO_dump_func | TODO_verify_rtl_sharing,/* todo_flags_finish */
0 /* letter */
};
splitting possibly introduced more crossjumping opportunities. */
cfg_layout_initialize (CLEANUP_EXPENSIVE);
- if (flag_sched2_use_traces && flag_schedule_insns_after_reload)
+ if (flag_reorder_blocks || flag_reorder_blocks_and_partition)
{
- timevar_push (TV_TRACER);
- tracer ();
- timevar_pop (TV_TRACER);
+ reorder_basic_blocks ();
+ cleanup_cfg (CLEANUP_EXPENSIVE);
}
- if (flag_reorder_blocks || flag_reorder_blocks_and_partition)
- reorder_basic_blocks ();
- if (flag_reorder_blocks || flag_reorder_blocks_and_partition
- || (flag_sched2_use_traces && flag_schedule_insns_after_reload))
- cleanup_cfg (CLEANUP_EXPENSIVE);
-
FOR_EACH_BB (bb)
if (bb->next_bb != EXIT_BLOCK_PTR)
bb->aux = bb->next_bb;
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
+ TODO_dump_func | TODO_verify_rtl_sharing,/* todo_flags_finish */
'B' /* letter */
};
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
+ TODO_dump_func | TODO_verify_rtl_sharing,/* todo_flags_finish */
0 /* letter */
};