/* Exception handling semantics and decomposition for trees.
- Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
+ Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
Free Software Foundation, Inc.
This file is part of GCC.
#include "coretypes.h"
#include "tm.h"
#include "tree.h"
-#include "rtl.h"
-#include "tm_p.h"
#include "flags.h"
#include "function.h"
#include "except.h"
+#include "pointer-set.h"
#include "tree-flow.h"
#include "tree-dump.h"
#include "tree-inline.h"
return lookup_stmt_eh_lp_fn (cfun, t);
}
-/* Likewise, but reference a tree expression instead. */
-
-int
-lookup_expr_eh_lp (tree t)
-{
- if (cfun && cfun->eh->throw_stmt_table && t && EXPR_P (t))
- {
- tree_ann_common_t ann = tree_common_ann (t);
- if (ann)
- return ann->lp_nr;
- }
- return 0;
-}
-
-
/* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
nodes and LABEL_DECL nodes. We will use this during the second phase to
determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
The eh region creation is straight-forward, but frobbing all the gotos
and such into shape isn't. */
-/* The sequence into which we record all EH stuff. This will be
+/* The sequence into which we record all EH stuff. This will be
placed at the end of the function when we're all done. */
static gimple_seq eh_seq;
/* Record whether an EH region contains something that can throw,
indexed by EH region number. */
-static bitmap eh_region_may_contain_throw;
+static bitmap eh_region_may_contain_throw_map;
/* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
statements that are seen to escape this GIMPLE_TRY_FINALLY node.
if (tf->goto_queue_active == 0)
return;
replace_goto_queue_stmt_list (tf->top_p_seq, tf);
+ replace_goto_queue_stmt_list (eh_seq, tf);
}
/* Add a new record to the goto queue contained in TF. NEW_STMT is the
labels. */
new_stmt = stmt;
record_in_goto_queue (tf, new_stmt, index, true);
-
}
/* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
static void
note_eh_region_may_contain_throw (eh_region region)
{
- while (!bitmap_bit_p (eh_region_may_contain_throw, region->index))
+ while (!bitmap_bit_p (eh_region_may_contain_throw_map, region->index))
{
- bitmap_set_bit (eh_region_may_contain_throw, region->index);
+ bitmap_set_bit (eh_region_may_contain_throw_map, region->index);
region = region->outer;
if (region == NULL)
break;
}
}
+/* Check if REGION has been marked as containing a throw. If REGION is
+ NULL, this predicate is false. */
+
+static inline bool
+eh_region_may_contain_throw (eh_region r)
+{
+ return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
+}
+
/* We want to transform
try { body; } catch { stuff; }
to
if (tf->may_throw)
{
- emit_post_landing_pad (&eh_seq, tf->region);
-
seq = lower_try_finally_dup_block (finally, state);
lower_eh_constructs_1 (state, seq);
- gimple_seq_add_seq (&eh_seq, seq);
+ emit_post_landing_pad (&eh_seq, tf->region);
+ gimple_seq_add_seq (&eh_seq, seq);
emit_resx (&eh_seq, tf->region);
}
return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
}
+/* REG is the enclosing region for a possible cleanup region, or the region
+ itself. Returns TRUE if such a region would be unreachable.
+
+ Cleanup regions within a must-not-throw region aren't actually reachable
+ even if there are throwing stmts within them, because the personality
+ routine will call terminate before unwinding. */
+
+static bool
+cleanup_is_dead_in (eh_region reg)
+{
+ while (reg && reg->type == ERT_CLEANUP)
+ reg = reg->outer;
+ return (reg && reg->type == ERT_MUST_NOT_THROW);
+}
/* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
to a sequence of labels and blocks, plus the exception region trees
struct leh_tf_state this_tf;
struct leh_state this_state;
int ndests;
+ gimple_seq old_eh_seq;
/* Process the try block. */
this_tf.try_finally_expr = tp;
this_tf.top_p = tp;
this_tf.outer = state;
- if (using_eh_for_cleanups_p)
- this_tf.region = gen_eh_region_cleanup (state->cur_region);
+ if (using_eh_for_cleanups_p && !cleanup_is_dead_in (state->cur_region))
+ {
+ this_tf.region = gen_eh_region_cleanup (state->cur_region);
+ this_state.cur_region = this_tf.region;
+ }
else
- this_tf.region = NULL;
+ {
+ this_tf.region = NULL;
+ this_state.cur_region = state->cur_region;
+ }
- this_state.cur_region = this_tf.region;
this_state.ehp_region = state->ehp_region;
this_state.tf = &this_tf;
+ old_eh_seq = eh_seq;
+ eh_seq = NULL;
+
lower_eh_constructs_1 (&this_state, gimple_try_eval(tp));
/* Determine if the try block is escaped through the bottom. */
this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
/* Determine if any exceptions are possible within the try block. */
- if (using_eh_for_cleanups_p)
- this_tf.may_throw = bitmap_bit_p (eh_region_may_contain_throw,
- this_tf.region->index);
+ if (this_tf.region)
+ this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
if (this_tf.may_throw)
honor_protect_cleanup_actions (state, &this_state, &this_tf);
if (this_tf.goto_queue_map)
pointer_map_destroy (this_tf.goto_queue_map);
+ /* If there was an old (aka outer) eh_seq, append the current eh_seq.
+ If there was no old eh_seq, then the append is trivially already done. */
+ if (old_eh_seq)
+ {
+ if (eh_seq == NULL)
+ eh_seq = old_eh_seq;
+ else
+ {
+ gimple_seq new_eh_seq = eh_seq;
+ eh_seq = old_eh_seq;
+ gimple_seq_add_seq(&eh_seq, new_eh_seq);
+ }
+ }
+
return this_tf.top_p_seq;
}
static gimple_seq
lower_catch (struct leh_state *state, gimple tp)
{
- eh_region try_region;
- struct leh_state this_state;
+ eh_region try_region = NULL;
+ struct leh_state this_state = *state;
gimple_stmt_iterator gsi;
tree out_label;
gimple_seq new_seq;
gimple x;
location_t try_catch_loc = gimple_location (tp);
- try_region = gen_eh_region_try (state->cur_region);
-
- this_state = *state;
- this_state.cur_region = try_region;
+ if (flag_exceptions)
+ {
+ try_region = gen_eh_region_try (state->cur_region);
+ this_state.cur_region = try_region;
+ }
lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
- if (!bitmap_bit_p (eh_region_may_contain_throw, try_region->index))
+ if (!eh_region_may_contain_throw (try_region))
return gimple_try_eval (tp);
new_seq = NULL;
x = gimple_build_goto (out_label);
gimple_seq_add_stmt (&new_seq, x);
}
+ if (!c->type_list)
+ break;
}
gimple_try_set_cleanup (tp, new_seq);
static gimple_seq
lower_eh_filter (struct leh_state *state, gimple tp)
{
- struct leh_state this_state;
- eh_region this_region;
+ struct leh_state this_state = *state;
+ eh_region this_region = NULL;
gimple inner, x;
gimple_seq new_seq;
inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
- this_region = gen_eh_region_allowed (state->cur_region,
- gimple_eh_filter_types (inner));
- this_state = *state;
- this_state.cur_region = this_region;
+ if (flag_exceptions)
+ {
+ this_region = gen_eh_region_allowed (state->cur_region,
+ gimple_eh_filter_types (inner));
+ this_state.cur_region = this_region;
+ }
lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
- if (!bitmap_bit_p (eh_region_may_contain_throw, this_region->index))
+ if (!eh_region_may_contain_throw (this_region))
return gimple_try_eval (tp);
new_seq = NULL;
static gimple_seq
lower_eh_must_not_throw (struct leh_state *state, gimple tp)
{
- struct leh_state this_state;
- eh_region this_region;
- gimple inner;
+ struct leh_state this_state = *state;
- inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
+ if (flag_exceptions)
+ {
+ gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
+ eh_region this_region;
- this_region = gen_eh_region_must_not_throw (state->cur_region);
- this_region->u.must_not_throw.failure_decl
- = gimple_eh_must_not_throw_fndecl (inner);
- this_region->u.must_not_throw.failure_loc = gimple_location (tp);
+ this_region = gen_eh_region_must_not_throw (state->cur_region);
+ this_region->u.must_not_throw.failure_decl
+ = gimple_eh_must_not_throw_fndecl (inner);
+ this_region->u.must_not_throw.failure_loc = gimple_location (tp);
- /* In order to get mangling applied to this decl, we must mark it
- used now. Otherwise, pass_ipa_free_lang_data won't think it
- needs to happen. */
- TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
+ /* In order to get mangling applied to this decl, we must mark it
+ used now. Otherwise, pass_ipa_free_lang_data won't think it
+ needs to happen. */
+ TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
- this_state = *state;
- this_state.cur_region = this_region;
+ this_state.cur_region = this_region;
+ }
lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
static gimple_seq
lower_cleanup (struct leh_state *state, gimple tp)
{
- struct leh_state this_state;
- eh_region this_region;
+ struct leh_state this_state = *state;
+ eh_region this_region = NULL;
struct leh_tf_state fake_tf;
gimple_seq result;
+ bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
- /* If not using eh, then exception-only cleanups are no-ops. */
- if (!flag_exceptions)
+ if (flag_exceptions && !cleanup_dead)
{
- result = gimple_try_eval (tp);
- lower_eh_constructs_1 (state, result);
- return result;
+ this_region = gen_eh_region_cleanup (state->cur_region);
+ this_state.cur_region = this_region;
}
- this_region = gen_eh_region_cleanup (state->cur_region);
- this_state = *state;
- this_state.cur_region = this_region;
-
lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
- if (!bitmap_bit_p (eh_region_may_contain_throw, this_region->index))
+ if (cleanup_dead || !eh_region_may_contain_throw (this_region))
return gimple_try_eval (tp);
/* Build enough of a try-finally state so that we can reuse
case GIMPLE_ASSIGN:
/* If the stmt can throw use a new temporary for the assignment
to a LHS. This makes sure the old value of the LHS is
- available on the EH edge. */
+ available on the EH edge. Only do so for statements that
+ potentially fall thru (no noreturn calls e.g.), otherwise
+ this new assignment might create fake fallthru regions. */
if (stmt_could_throw_p (stmt)
&& gimple_has_lhs (stmt)
+ && gimple_stmt_may_fallthru (stmt)
&& !tree_could_throw_p (gimple_get_lhs (stmt))
&& is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
{
else
{
x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
- switch (gimple_code (x))
+ if (!x)
{
- case GIMPLE_CATCH:
- replace = lower_catch (state, stmt);
- break;
- case GIMPLE_EH_FILTER:
- replace = lower_eh_filter (state, stmt);
- break;
- case GIMPLE_EH_MUST_NOT_THROW:
- replace = lower_eh_must_not_throw (state, stmt);
- break;
- default:
- replace = lower_cleanup (state, stmt);
- break;
+ replace = gimple_try_eval (stmt);
+ lower_eh_constructs_1 (state, replace);
}
+ else
+ switch (gimple_code (x))
+ {
+ case GIMPLE_CATCH:
+ replace = lower_catch (state, stmt);
+ break;
+ case GIMPLE_EH_FILTER:
+ replace = lower_eh_filter (state, stmt);
+ break;
+ case GIMPLE_EH_MUST_NOT_THROW:
+ replace = lower_eh_must_not_throw (state, stmt);
+ break;
+ default:
+ replace = lower_cleanup (state, stmt);
+ break;
+ }
}
/* Remove the old stmt and insert the transformed sequence
return 0;
finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
- eh_region_may_contain_throw = BITMAP_ALLOC (NULL);
+ eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
memset (&null_state, 0, sizeof (null_state));
collect_finally_tree_1 (bodyp, NULL);
gcc_assert (bodyp == gimple_body (current_function_decl));
htab_delete (finally_tree);
- BITMAP_FREE (eh_region_may_contain_throw);
+ BITMAP_FREE (eh_region_may_contain_throw_map);
eh_seq = NULL;
/* If this function needs a language specific EH personality routine
{
new_lp = get_eh_landing_pad_from_number (new_lp_nr);
gcc_assert (new_lp);
-
+
/* Unless CHANGE_REGION is true, the new and old landing pad
had better be associated with the same EH region. */
gcc_assert (change_region || new_lp->region == old_lp->region);
};
-/* At the end of inlining, we can lower EH_DISPATCH. */
+/* At the end of inlining, we can lower EH_DISPATCH. Return true when
+ we have found some duplicate labels and removed some edges. */
-static void
+static bool
lower_eh_dispatch (basic_block src, gimple stmt)
{
gimple_stmt_iterator gsi;
eh_region r;
tree filter, fn;
gimple x;
+ bool redirected = false;
region_nr = gimple_eh_dispatch_region (stmt);
r = get_eh_region_from_number (region_nr);
eh_catch c;
edge_iterator ei;
edge e;
+ struct pointer_set_t *seen_values = pointer_set_create ();
/* Collect the labels for a switch. Zero the post_landing_pad
field becase we'll no longer have anything keeping these labels
for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
{
tree tp_node, flt_node, lab = c->label;
+ bool have_label = false;
c->label = NULL;
tp_node = c->type_list;
}
do
{
- tree t = build3 (CASE_LABEL_EXPR, void_type_node,
- TREE_VALUE (flt_node), NULL, lab);
- VEC_safe_push (tree, heap, labels, t);
+ /* Filter out duplicate labels that arise when this handler
+ is shadowed by an earlier one. When no labels are
+ attached to the handler anymore, we remove
+ the corresponding edge and then we delete unreachable
+ blocks at the end of this pass. */
+ if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node)))
+ {
+ tree t = build3 (CASE_LABEL_EXPR, void_type_node,
+ TREE_VALUE (flt_node), NULL, lab);
+ VEC_safe_push (tree, heap, labels, t);
+ pointer_set_insert (seen_values, TREE_VALUE (flt_node));
+ have_label = true;
+ }
tp_node = TREE_CHAIN (tp_node);
flt_node = TREE_CHAIN (flt_node);
}
while (tp_node);
+ if (! have_label)
+ {
+ remove_edge (find_edge (src, label_to_block (lab)));
+ redirected = true;
+ }
}
/* Clean up the edge flags. */
VEC_free (tree, heap, labels);
}
+ pointer_set_destroy (seen_values);
}
break;
/* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
gsi_remove (&gsi, true);
+ return redirected;
}
static unsigned
{
basic_block bb;
bool any_rewritten = false;
+ bool redirected = false;
assign_filter_values ();
gimple last = last_stmt (bb);
if (last && gimple_code (last) == GIMPLE_EH_DISPATCH)
{
- lower_eh_dispatch (bb, last);
+ redirected |= lower_eh_dispatch (bb, last);
any_rewritten = true;
}
}
+ if (redirected)
+ delete_unreachable_blocks ();
return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
}
fprintf (dump_file, "Removing unreachable landing pad %d\n", lp_nr);
remove_eh_landing_pad (lp);
}
-
+
if (dump_file)
{
fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
return false;
- /* The block must be empty except for the labels. */
- if (!gsi_end_p (gsi_after_labels (bb)))
+ /* The block must be empty except for the labels and debug insns. */
+ gsi = gsi_after_labels (bb);
+ if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
+ gsi_next_nondebug (&gsi);
+ if (!gsi_end_p (gsi))
return false;
/* The destination block must not already have a landing pad
if (find_edge (e_in->src, e_out->dest))
return false;
- /* ??? I can't imagine there would be PHI nodes, since by nature
- of critical edge splitting this block should never have been
- a dominance frontier. If cfg cleanups somehow confuse this,
- due to single edges in and out we ought to have degenerate PHIs
- and can easily propagate the PHI arguments. */
- gcc_assert (gimple_seq_empty_p (phi_nodes (bb)));
+ /* ??? We can get degenerate phis due to cfg cleanups. I would have
+ thought this should have been cleaned up by a phicprop pass, but
+ that doesn't appear to handle virtuals. Propagate by hand. */
+ if (!gimple_seq_empty_p (phi_nodes (bb)))
+ {
+ for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
+ {
+ gimple use_stmt, phi = gsi_stmt (gsi);
+ tree lhs = gimple_phi_result (phi);
+ tree rhs = gimple_phi_arg_def (phi, 0);
+ use_operand_p use_p;
+ imm_use_iterator iter;
+
+ FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
+ {
+ FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
+ SET_USE (use_p, rhs);
+ }
+
+ if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
+ SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
+
+ remove_phi_node (&gsi, true);
+ }
+ }
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
static bool
cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
- edge old_bb_out)
+ edge old_bb_out, bool change_region)
{
gimple_stmt_iterator ngsi, ogsi;
edge_iterator ei;
for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
if (e->flags & EDGE_EH)
{
- redirect_eh_edge_1 (e, new_bb, true);
+ redirect_eh_edge_1 (e, new_bb, change_region);
redirect_edge_succ (e, new_bb);
flush_pending_stmts (e);
}
}
/* A subroutine of cleanup_empty_eh. Handle more complex cases of
- unsplitting than unsplit_eh was prepared to handle, e.g. when
+ unsplitting than unsplit_eh was prepared to handle, e.g. when
multiple incoming edges and phis are involved. */
static bool
-cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad olp)
+cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
{
gimple_stmt_iterator gsi;
- eh_landing_pad nlp;
tree lab;
/* We really ought not have totally lost everything following
be a successor. */
gcc_assert (e_out != NULL);
- /* Look for an EH label in the successor block. */
+ /* The destination block must not already have a landing pad
+ for a different region. */
lab = NULL;
for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
+ int lp_nr;
+
if (gimple_code (stmt) != GIMPLE_LABEL)
break;
lab = gimple_label_label (stmt);
- if (EH_LANDING_PAD_NR (lab))
- goto found;
+ lp_nr = EH_LANDING_PAD_NR (lab);
+ if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
+ return false;
}
- return false;
- found:
-
- /* The other label had better be part of the same EH region. Given that
- we've not lowered RESX, there should be no way to have a totally empty
- landing pad that crosses to another EH region. */
- nlp = get_eh_landing_pad_from_number (EH_LANDING_PAD_NR (lab));
- gcc_assert (nlp->region == olp->region);
/* Attempt to move the PHIs into the successor block. */
- if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out))
+ if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file,
- "Unsplit EH landing pad %d to block %d via lp %d.\n",
- olp->index, e_out->dest->index, nlp->index);
-
- remove_eh_landing_pad (olp);
+ "Unsplit EH landing pad %d to block %i "
+ "(via cleanup_empty_eh).\n",
+ lp->index, e_out->dest->index);
return true;
}
landing pad block. If the merge succeeds, we'll already have redirected
all the EH edges. The handler itself will go unreachable if there were
no normal edges. */
- if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out))
+ if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
goto succeed;
/* Finally, if all input edges are EH edges, then we can (potentially)