/* The location of the finally is either the last stmt in the finally
block or the location of the TRY_FINALLY itself. */
- finally_loc = gimple_seq_last_stmt (tf->top_p_seq) != NULL ?
- gimple_location (gimple_seq_last_stmt (tf->top_p_seq))
- : tf_loc;
+ x = gimple_seq_last_stmt (finally);
+ finally_loc = x ? gimple_location (x) : tf_loc;
/* Lower the finally block itself. */
lower_eh_constructs_1 (state, finally);
}
if (!optimize)
- return ndests == 1;
+ {
+ gimple_stmt_iterator gsi;
+
+ if (ndests == 1)
+ return true;
+
+ for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
+ if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
+ return false;
+ }
+ return true;
+ }
/* Finally estimate N times, plus N gotos. */
f_estimate = count_insns_seq (finally, &eni_size_weights);
}
}
+/* Try to sink var = {v} {CLOBBER} stmts followed just by
+ internal throw to successor BB. */
+
+static int
+sink_clobbers (basic_block bb)
+{
+ edge e;
+ edge_iterator ei;
+ gimple_stmt_iterator gsi, dgsi;
+ basic_block succbb;
+ bool any_clobbers = false;
+
+ /* Only optimize if BB has a single EH successor and
+ all predecessor edges are EH too. */
+ if (!single_succ_p (bb)
+ || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
+ return 0;
+
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ {
+ if ((e->flags & EDGE_EH) == 0)
+ return 0;
+ }
+
+ /* And BB contains only CLOBBER stmts before the final
+ RESX. */
+ gsi = gsi_last_bb (bb);
+ for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
+ if (is_gimple_debug (stmt))
+ continue;
+ if (gimple_code (stmt) == GIMPLE_LABEL)
+ break;
+ if (!gimple_clobber_p (stmt)
+ || TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
+ return 0;
+ any_clobbers = true;
+ }
+ if (!any_clobbers)
+ return 0;
+
+ succbb = single_succ (bb);
+ dgsi = gsi_after_labels (succbb);
+ gsi = gsi_last_bb (bb);
+ for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
+ tree vdef;
+ if (is_gimple_debug (stmt))
+ continue;
+ if (gimple_code (stmt) == GIMPLE_LABEL)
+ break;
+ unlink_stmt_vdef (stmt);
+ gsi_remove (&gsi, false);
+ vdef = gimple_vdef (stmt);
+ if (vdef && TREE_CODE (vdef) == SSA_NAME)
+ {
+ vdef = SSA_NAME_VAR (vdef);
+ mark_sym_for_renaming (vdef);
+ gimple_set_vdef (stmt, vdef);
+ gimple_set_vuse (stmt, vdef);
+ }
+ release_defs (stmt);
+ gsi_insert_before (&dgsi, stmt, GSI_SAME_STMT);
+ }
+
+ return TODO_update_ssa_only_virtuals;
+}
+
/* At the end of inlining, we can lower EH_DISPATCH. Return true when
we have found some duplicate labels and removed some edges. */
execute_lower_eh_dispatch (void)
{
basic_block bb;
- bool any_rewritten = false;
+ int flags = 0;
bool redirected = false;
assign_filter_values ();
if (gimple_code (last) == GIMPLE_EH_DISPATCH)
{
redirected |= lower_eh_dispatch (bb, last);
- any_rewritten = true;
+ flags |= TODO_update_ssa_only_virtuals;
+ }
+ else if (gimple_code (last) == GIMPLE_RESX)
+ {
+ if (stmt_can_throw_external (last))
+ optimize_clobbers (bb);
+ else
+ flags |= sink_clobbers (bb);
}
- else if (gimple_code (last) == GIMPLE_RESX
- && stmt_can_throw_external (last))
- optimize_clobbers (bb);
}
if (redirected)
delete_unreachable_blocks ();
- return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
+ return flags;
}
static bool
{
eh_region r;
int i;
+ sbitmap r_reachable;
+ basic_block bb;
+
+ r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
+ sbitmap_zero (r_reachable);
+
+ FOR_EACH_BB (bb)
+ {
+ gimple stmt = last_stmt (bb);
+ if (stmt)
+ /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
+ switch (gimple_code (stmt))
+ {
+ case GIMPLE_RESX:
+ SET_BIT (r_reachable, gimple_resx_region (stmt));
+ break;
+ case GIMPLE_EH_DISPATCH:
+ SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
+ break;
+ default:
+ break;
+ }
+ }
for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
- if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW)
+ if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW
+ && !TEST_BIT (r_reachable, i))
{
if (dump_file)
fprintf (dump_file, "Removing unreachable region %d\n", i);
remove_eh_handler (r);
}
+
+ sbitmap_free (r_reachable);
}
/* Undo critical edge splitting on an EH landing pad. Earlier, we
edge_iterator ei;
edge e, e_out;
bool has_non_eh_pred;
+ bool ret = false;
int new_lp_nr;
/* There can be zero or one edges out of BB. This is the quickest test. */
default:
return false;
}
+
+ resx = last_stmt (bb);
+ if (resx && is_gimple_resx (resx))
+ {
+ if (stmt_can_throw_external (resx))
+ optimize_clobbers (bb);
+ else if (sink_clobbers (bb))
+ ret = true;
+ }
+
gsi = gsi_after_labels (bb);
/* Make sure to skip debug statements. */
{
/* For the degenerate case of an infinite loop bail out. */
if (infinite_empty_loop_p (e_out))
- return false;
+ return ret;
- return cleanup_empty_eh_unsplit (bb, e_out, lp);
+ return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
}
/* The block should consist only of a single RESX statement, modulo a
resx = gsi_stmt (gsi);
}
if (!is_gimple_resx (resx))
- return false;
+ return ret;
gcc_assert (gsi_one_before_end_p (gsi));
/* Determine if there are non-EH edges, or resx edges into the handler. */
return true;
}
- return false;
+ return ret;
succeed:
if (dump_file && (dump_flags & TDF_DETAILS))