/* Implements exception handling.
Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
Contributed by Mike Stump <mrs@cygnus.com>.
This file is part of GCC.
/* Each region does exactly one thing. */
enum eh_region_type
- {
+ {
ERT_UNKNOWN = 0,
ERT_CLEANUP,
ERT_TRY,
struct eh_region_u_fixup {
tree cleanup_exp;
struct eh_region *real_region;
+ bool resolved;
} GTY ((tag ("ERT_FIXUP"))) fixup;
} GTY ((desc ("%0.type"))) u;
/* Entry point for this region's handler before landing pads are built. */
rtx label;
+ tree tree_label;
/* Entry point for this region's handler from the runtime eh library. */
rtx landing_pad;
static struct eh_region *expand_eh_region_end (void);
-static rtx get_exception_filter (struct function *);
-
-static void collect_eh_region_array (void);
static void resolve_fixup_regions (void);
static void remove_fixup_regions (void);
static void remove_unreachable_regions (rtx);
static void remove_eh_handler (struct eh_region *);
static int for_each_eh_label_1 (void **, void *);
-struct reachable_info;
-
/* The return value of reachable_next_level. */
enum reachable_code
{
RNL_BLOCKED
};
-static int check_handled (tree, tree);
-static void add_reachable_handler (struct reachable_info *,
- struct eh_region *, struct eh_region *);
+struct reachable_info;
static enum reachable_code reachable_next_level (struct eh_region *, tree,
struct reachable_info *);
{
tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
- sjlj_fc_type_node = (*lang_hooks.types.make_type) (RECORD_TYPE);
+ sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
build_pointer_type (sjlj_fc_type_node));
DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
tmp = build_index_type (build_int_2 (4 - 1, 0));
- tmp = build_array_type ((*lang_hooks.types.type_for_mode) (word_mode, 1),
+ tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
tmp);
f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
#endif
#else
- /* This is 2 for builtin_setjmp, plus whatever the target requires
- via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
- tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
- / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
+ /* builtin_setjmp takes a pointer to 5 words. */
+ tmp = build_int_2 (5 * BITS_PER_WORD / POINTER_SIZE - 1, 0);
#endif
tmp = build_index_type (tmp);
tmp = build_array_type (ptr_type_node, tmp);
cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
}
\f
+/* Routines to generate the exception tree somewhat directly.
+ These are used from tree-eh.c when processing exception related
+ nodes during tree optimization. */
+
+static struct eh_region *
+gen_eh_region (enum eh_region_type type, struct eh_region *outer)
+{
+ struct eh_region *new;
+
+#ifdef ENABLE_CHECKING
+ if (! doing_eh (0))
+ abort ();
+#endif
+
+ /* Insert a new blank region as a leaf in the tree. */
+ new = ggc_alloc_cleared (sizeof (*new));
+ new->type = type;
+ new->outer = outer;
+ if (outer)
+ {
+ new->next_peer = outer->inner;
+ outer->inner = new;
+ }
+ else
+ {
+ new->next_peer = cfun->eh->region_tree;
+ cfun->eh->region_tree = new;
+ }
+
+ new->region_number = ++cfun->eh->last_region_number;
+
+ return new;
+}
+
+struct eh_region *
+gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
+{
+ struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
+ cleanup->u.cleanup.prev_try = prev_try;
+ return cleanup;
+}
+
+struct eh_region *
+gen_eh_region_try (struct eh_region *outer)
+{
+ return gen_eh_region (ERT_TRY, outer);
+}
+
+struct eh_region *
+gen_eh_region_catch (struct eh_region *t, tree type_or_list)
+{
+ struct eh_region *c, *l;
+ tree type_list, type_node;
+
+ /* Ensure to always end up with a type list to normalize further
+ processing, then register each type against the runtime types map. */
+ type_list = type_or_list;
+ if (type_or_list)
+ {
+ if (TREE_CODE (type_or_list) != TREE_LIST)
+ type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
+
+ type_node = type_list;
+ for (; type_node; type_node = TREE_CHAIN (type_node))
+ add_type_for_runtime (TREE_VALUE (type_node));
+ }
+
+ c = gen_eh_region (ERT_CATCH, t->outer);
+ c->u.catch.type_list = type_list;
+ l = t->u.try.last_catch;
+ c->u.catch.prev_catch = l;
+ if (l)
+ l->u.catch.next_catch = c;
+ else
+ t->u.try.catch = c;
+ t->u.try.last_catch = c;
+
+ return c;
+}
+
+struct eh_region *
+gen_eh_region_allowed (struct eh_region *outer, tree allowed)
+{
+ struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
+ region->u.allowed.type_list = allowed;
+
+ for (; allowed ; allowed = TREE_CHAIN (allowed))
+ add_type_for_runtime (TREE_VALUE (allowed));
+
+ return region;
+}
+
+struct eh_region *
+gen_eh_region_must_not_throw (struct eh_region *outer)
+{
+ return gen_eh_region (ERT_MUST_NOT_THROW, outer);
+}
+
+int
+get_eh_region_number (struct eh_region *region)
+{
+ return region->region_number;
+}
+
+bool
+get_eh_region_may_contain_throw (struct eh_region *region)
+{
+ return region->may_contain_throw;
+}
+
+tree
+get_eh_region_tree_label (struct eh_region *region)
+{
+ return region->tree_label;
+}
+
+void
+set_eh_region_tree_label (struct eh_region *region, tree lab)
+{
+ region->tree_label = lab;
+}
+\f
/* Start an exception handling region. All instructions emitted
after this point are considered to be part of the region until
expand_eh_region_end is invoked. */
void
expand_eh_region_start (void)
{
- struct eh_region *new_region;
- struct eh_region *cur_region;
+ struct eh_region *new;
rtx note;
if (! doing_eh (0))
return;
- /* Insert a new blank region as a leaf in the tree. */
- new_region = ggc_alloc_cleared (sizeof (*new_region));
- cur_region = cfun->eh->cur_region;
- new_region->outer = cur_region;
- if (cur_region)
- {
- new_region->next_peer = cur_region->inner;
- cur_region->inner = new_region;
- }
- else
- {
- new_region->next_peer = cfun->eh->region_tree;
- cfun->eh->region_tree = new_region;
- }
- cfun->eh->cur_region = new_region;
+ new = gen_eh_region (ERT_UNKNOWN, cfun->eh->cur_region);
+ cfun->eh->cur_region = new;
/* Create a note marking the start of this region. */
- new_region->region_number = ++cfun->eh->last_region_number;
note = emit_note (NOTE_INSN_EH_REGION_BEG);
- NOTE_EH_HANDLER (note) = new_region->region_number;
+ NOTE_EH_HANDLER (note) = new->region_number;
}
/* Common code to end a region. Returns the region just ended. */
return cur_region;
}
+/* Expand HANDLER, which is the operand 1 of a TRY_CATCH_EXPR. Catch
+ blocks and C++ exception-specifications are handled specially. */
+
+void
+expand_eh_handler (tree handler)
+{
+ tree inner = expr_first (handler);
+
+ switch (TREE_CODE (inner))
+ {
+ case CATCH_EXPR:
+ expand_start_all_catch ();
+ expand_expr (handler, const0_rtx, VOIDmode, 0);
+ expand_end_all_catch ();
+ break;
+
+ case EH_FILTER_EXPR:
+ if (EH_FILTER_MUST_NOT_THROW (handler))
+ expand_eh_region_end_must_not_throw (EH_FILTER_FAILURE (handler));
+ else
+ expand_eh_region_end_allowed (EH_FILTER_TYPES (handler),
+ EH_FILTER_FAILURE (handler));
+ break;
+
+ default:
+ expand_eh_region_end_cleanup (handler);
+ break;
+ }
+}
+
/* End an exception handling region for a cleanup. HANDLER is an
expression to expand for the cleanup. */
emit_label (around_label);
}
+void
+expand_resx_expr (tree exp)
+{
+ int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
+ struct eh_region *reg = cfun->eh->region_array[region_nr];
+
+ reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
+ emit_barrier ();
+}
+
/* End an exception handling region for a try block, and prepares
for subsequent calls to expand_start_catch. */
emit_jump (region->u.try.continue_label);
}
-/* Begin a catch clause. TYPE is the type caught, a list of such types, or
- null if this is a catch-all clause. Providing a type list enables to
- associate the catch region with potentially several exception types, which
- is useful e.g. for Ada. */
+/* Begin a catch clause. TYPE is the type caught, a list of such
+ types, (in the case of Java) an ADDR_EXPR which points to the
+ runtime type to match, or null if this is a catch-all
+ clause. Providing a type list enables to associate the catch region
+ with potentially several exception types, which is useful e.g. for
+ Ada. */
void
expand_start_catch (tree type_or_list)
{
- struct eh_region *t, *c, *l;
- tree type_list;
+ struct eh_region *c;
+ rtx note;
if (! doing_eh (0))
return;
- type_list = type_or_list;
-
- if (type_or_list)
- {
- /* Ensure to always end up with a type list to normalize further
- processing, then register each type against the runtime types
- map. */
- tree type_node;
-
- if (TREE_CODE (type_or_list) != TREE_LIST)
- type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
-
- type_node = type_list;
- for (; type_node; type_node = TREE_CHAIN (type_node))
- add_type_for_runtime (TREE_VALUE (type_node));
- }
-
- expand_eh_region_start ();
+ c = gen_eh_region_catch (cfun->eh->try_region, type_or_list);
+ cfun->eh->cur_region = c;
- t = cfun->eh->try_region;
- c = cfun->eh->cur_region;
- c->type = ERT_CATCH;
- c->u.catch.type_list = type_list;
c->label = gen_label_rtx ();
-
- l = t->u.try.last_catch;
- c->u.catch.prev_catch = l;
- if (l)
- l->u.catch.next_catch = c;
- else
- t->u.try.catch = c;
- t->u.try.last_catch = c;
-
emit_label (c->label);
+
+ note = emit_note (NOTE_INSN_EH_REGION_BEG);
+ NOTE_EH_HANDLER (note) = c->region_number;
}
/* End a catch clause. Control will resume after the try/catch block. */
void
expand_end_catch (void)
{
- struct eh_region *try_region;
-
if (! doing_eh (0))
return;
expand_eh_region_end ();
- try_region = cfun->eh->try_region;
-
- emit_jump (try_region->u.try.continue_label);
+ emit_jump (cfun->eh->try_region->u.try.continue_label);
}
/* End a sequence of catch handlers for a try block. */
call to a function which itself may contain a throw. */
void
-note_eh_region_may_contain_throw (void)
+note_eh_region_may_contain_throw (struct eh_region *region)
{
- struct eh_region *region;
-
- region = cfun->eh->cur_region;
while (region && !region->may_contain_throw)
{
region->may_contain_throw = 1;
}
}
+void
+note_current_region_may_contain_throw (void)
+{
+ note_eh_region_may_contain_throw (cfun->eh->cur_region);
+}
+
+
/* Return an rtl expression for a pointer to the exception object
within a handler. */
/* Return an rtl expression for the exception dispatch filter
within a handler. */
-static rtx
+rtx
get_exception_filter (struct function *fun)
{
rtx filter = fun->eh->filter;
collect the regions this way as in expand_eh_region_start, but
without having to realloc memory. */
-static void
+void
collect_eh_region_array (void)
{
struct eh_region **array, *i;
}
static void
+resolve_one_fixup_region (struct eh_region *fixup)
+{
+ struct eh_region *cleanup, *real;
+ int j, n;
+
+ n = cfun->eh->last_region_number;
+ cleanup = 0;
+
+ for (j = 1; j <= n; ++j)
+ {
+ cleanup = cfun->eh->region_array[j];
+ if (cleanup && cleanup->type == ERT_CLEANUP
+ && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
+ break;
+ }
+ if (j > n)
+ abort ();
+
+ real = cleanup->outer;
+ if (real && real->type == ERT_FIXUP)
+ {
+ if (!real->u.fixup.resolved)
+ resolve_one_fixup_region (real);
+ real = real->u.fixup.real_region;
+ }
+
+ fixup->u.fixup.real_region = real;
+ fixup->u.fixup.resolved = true;
+}
+
+static void
resolve_fixup_regions (void)
{
- int i, j, n = cfun->eh->last_region_number;
+ int i, n = cfun->eh->last_region_number;
for (i = 1; i <= n; ++i)
{
struct eh_region *fixup = cfun->eh->region_array[i];
- struct eh_region *cleanup = 0;
- if (! fixup || fixup->type != ERT_FIXUP)
+ if (!fixup || fixup->type != ERT_FIXUP || fixup->u.fixup.resolved)
continue;
- for (j = 1; j <= n; ++j)
- {
- cleanup = cfun->eh->region_array[j];
- if (cleanup && cleanup->type == ERT_CLEANUP
- && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
- break;
- }
- if (j > n)
- abort ();
-
- fixup->u.fixup.real_region = cleanup->outer;
+ resolve_one_fixup_region (fixup);
}
}
abort ();
uid_region_num[INSN_UID (r->label)] = i;
}
- if (r->type == ERT_TRY && r->u.try.continue_label)
- {
- if (uid_region_num[INSN_UID (r->u.try.continue_label)])
- abort ();
- uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
- }
}
for (insn = insns; insn; insn = NEXT_INSN (insn))
- reachable[uid_region_num[INSN_UID (insn)]] = true;
+ {
+ reachable[uid_region_num[INSN_UID (insn)]] = true;
+
+ if (GET_CODE (insn) == CALL_INSN
+ && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
+ for (i = 0; i < 3; i++)
+ {
+ rtx sub = XEXP (PATTERN (insn), i);
+ for (; sub ; sub = NEXT_INSN (sub))
+ reachable[uid_region_num[INSN_UID (sub)]] = true;
+ }
+ }
for (i = cfun->eh->last_region_number; i > 0; --i)
{
r = cfun->eh->region_array[i];
if (r && r->region_number == i && !reachable[i])
{
- /* Don't remove ERT_THROW regions if their outer region
- is reachable. */
- if (r->type == ERT_THROW
- && r->outer
- && reachable[r->outer->region_number])
- continue;
+ bool kill_it = true;
+ switch (r->type)
+ {
+ case ERT_THROW:
+ /* Don't remove ERT_THROW regions if their outer region
+ is reachable. */
+ if (r->outer && reachable[r->outer->region_number])
+ kill_it = false;
+ break;
+
+ case ERT_MUST_NOT_THROW:
+ /* MUST_NOT_THROW regions are implementable solely in the
+ runtime, but their existance continues to affect calls
+ within that region. Never delete them here. */
+ kill_it = false;
+ break;
+
+ case ERT_TRY:
+ {
+ /* TRY regions are reachable if any of its CATCH regions
+ are reachable. */
+ struct eh_region *c;
+ for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
+ if (reachable[c->region_number])
+ {
+ kill_it = false;
+ break;
+ }
+ break;
+ }
- remove_eh_handler (r);
+ default:
+ break;
+ }
+
+ if (kill_it)
+ remove_eh_handler (r);
}
}
abort ();
}
+static void
+collect_rtl_labels_from_trees (void)
+{
+ int i, n = cfun->eh->last_region_number;
+ for (i = 1; i <= n; ++i)
+ {
+ struct eh_region *reg = cfun->eh->region_array[i];
+ if (reg && reg->tree_label)
+ reg->label = DECL_RTL_IF_SET (reg->tree_label);
+ }
+}
+
void
convert_from_eh_region_ranges (void)
{
- int *stack;
- rtx insns;
+ rtx insns = get_insns ();
+
+ if (cfun->eh->region_array)
+ {
+ /* If the region array already exists, assume we're coming from
+ optimize_function_tree. In this case all we need to do is
+ collect the rtl labels that correspond to the tree labels
+ that we allocated earlier. */
+ collect_rtl_labels_from_trees ();
+ }
+ else
+ {
+ int *stack;
+
+ collect_eh_region_array ();
+ resolve_fixup_regions ();
- collect_eh_region_array ();
- resolve_fixup_regions ();
+ stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
+ convert_from_eh_region_ranges_1 (&insns, stack, 0);
+ free (stack);
- stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
- insns = get_insns ();
- convert_from_eh_region_ranges_1 (&insns, stack, 0);
- free (stack);
+ remove_fixup_regions ();
+ }
- remove_fixup_regions ();
remove_unreachable_regions (insns);
}
t2r_hash (const void *pentry)
{
tree entry = (tree) pentry;
- return TYPE_HASH (TREE_PURPOSE (entry));
+ return TREE_HASH (TREE_PURPOSE (entry));
}
static void
tree *slot;
slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
- TYPE_HASH (type), INSERT);
+ TREE_HASH (type), INSERT);
if (*slot == NULL)
{
tree runtime = (*lang_eh_runtime_type) (type);
tree *slot;
slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
- TYPE_HASH (type), NO_INSERT);
+ TREE_HASH (type), NO_INSERT);
/* We should have always inserted the data earlier. */
return TREE_VALUE (*slot);
ttypes_filter_hash (const void *pentry)
{
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
- return TYPE_HASH (entry->t);
+ return TREE_HASH (entry->t);
}
/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
tree list;
for (list = entry->t; list ; list = TREE_CHAIN (list))
- h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
+ h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
return h;
}
-/* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
- up the search. Return the filter value to be used. */
+/* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
+ to speed up the search. Return the filter value to be used. */
static int
add_ttypes_entry (htab_t ttypes_hash, tree type)
struct ttypes_filter **slot, *n;
slot = (struct ttypes_filter **)
- htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
+ htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
if ((n = *slot) == NULL)
{
htab_delete (ehspec);
}
+/* Emit SEQ into basic block just before INSN (that is assumed to be
+ first instruction of some existing BB and return the newly
+ produced block. */
+static basic_block
+emit_to_new_bb_before (rtx seq, rtx insn)
+{
+ rtx last;
+ basic_block bb;
+ edge e;
+
+ /* If there happens to be an fallthru edge (possibly created by cleanup_cfg
+ call), we don't want it to go into newly created landing pad or other EH
+ construct. */
+ for (e = BLOCK_FOR_INSN (insn)->pred; e; e = e->pred_next)
+ if (e->flags & EDGE_FALLTHRU)
+ force_nonfallthru (e);
+ last = emit_insn_before (seq, insn);
+ if (GET_CODE (last) == BARRIER)
+ last = PREV_INSN (last);
+ bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
+ update_bb_for_insn (bb);
+ bb->flags |= BB_SUPERBLOCK;
+ return bb;
+}
+
/* Generate the code to actually handle exceptions, which will follow the
landing pads. */
seq = get_insns ();
end_sequence ();
- emit_insn_before (seq, region->u.try.catch->label);
+ emit_to_new_bb_before (seq, region->u.try.catch->label);
+
break;
case ERT_ALLOWED_EXCEPTIONS:
seq = get_insns ();
end_sequence ();
- emit_insn_before (seq, region->label);
+ emit_to_new_bb_before (seq, region->label);
break;
case ERT_CLEANUP:
struct eh_region *region = cfun->eh->region_array[i];
struct eh_region *outer;
rtx seq;
+ rtx barrier;
/* Mind we don't process a region more than once. */
if (!region || region->region_number != i)
start_sequence ();
if (outer)
- emit_jump (outer->post_landing_pad);
+ {
+ edge e;
+ basic_block src, dest;
+
+ emit_jump (outer->post_landing_pad);
+ src = BLOCK_FOR_INSN (region->resume);
+ dest = BLOCK_FOR_INSN (outer->post_landing_pad);
+ while (src->succ)
+ remove_edge (src->succ);
+ e = make_edge (src, dest, 0);
+ e->probability = REG_BR_PROB_BASE;
+ e->count = src->count;
+ }
else
- emit_library_call (unwind_resume_libfunc, LCT_THROW,
- VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
+ {
+ emit_library_call (unwind_resume_libfunc, LCT_THROW,
+ VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
+
+ /* What we just emitted was a throwing libcall, so it got a
+ barrier automatically added after it. If the last insn in
+ the libcall sequence isn't the barrier, it's because the
+ target emits multiple insns for a call, and there are insns
+ after the actual call insn (which are redundant and would be
+ optimized away). The barrier is inserted exactly after the
+ call insn, so let's go get that and delete the insns after
+ it, because below we need the barrier to be the last insn in
+ the sequence. */
+ delete_insns_since (NEXT_INSN (last_call_insn ()));
+ }
seq = get_insns ();
end_sequence ();
- emit_insn_before (seq, region->resume);
+ barrier = emit_insn_before (seq, region->resume);
+ /* Avoid duplicate barrier. */
+ if (GET_CODE (barrier) != BARRIER)
+ abort ();
+ delete_insn (barrier);
delete_insn (region->resume);
+
+ /* ??? From tree-ssa we can wind up with catch regions whose
+ label is not instantiated, but whose resx is present. Now
+ that we've dealt with the resx, kill the region. */
+ if (region->label == NULL && region->type == ERT_CLEANUP)
+ remove_eh_handler (region);
}
}
{
struct eh_region *region = cfun->eh->region_array[i];
rtx seq;
+ basic_block bb;
bool clobbers_hard_regs = false;
+ edge e;
/* Mind we don't process a region more than once. */
if (!region || region->region_number != i)
seq = get_insns ();
end_sequence ();
- emit_insn_before (seq, region->post_landing_pad);
+ bb = emit_to_new_bb_before (seq, region->post_landing_pad);
+ e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
+ e->count = bb->count;
+ e->probability = REG_BR_PROB_BASE;
}
}
rc = RNL_NOT_CAUGHT;
for (; region; region = region->outer)
{
- rc = reachable_next_level (region, type_thrown, 0);
+ rc = reachable_next_level (region, type_thrown, NULL);
if (rc != RNL_NOT_CAUGHT)
break;
}
if (cfun->uses_eh_lsda)
{
char buf[20];
+ rtx sym;
+
ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
- emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
+ sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
+ SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
+ emit_move_insn (mem, sym);
}
else
emit_move_insn (mem, const0_rtx);
for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
if (GET_CODE (fn_begin) == NOTE
- && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
+ && (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG
+ || NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK))
break;
- emit_insn_after (seq, fn_begin);
+ if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
+ insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
+ else
+ {
+ rtx last = BB_END (ENTRY_BLOCK_PTR->succ->dest);
+ for (; ; fn_begin = NEXT_INSN (fn_begin))
+ if ((GET_CODE (fn_begin) == NOTE
+ && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
+ || fn_begin == last)
+ break;
+ emit_insn_after (seq, fn_begin);
+ }
}
/* Call back from expand_function_end to know where we should put
sjlj_emit_function_exit (void)
{
rtx seq;
+ edge e;
start_sequence ();
post-dominates all can_throw_internal instructions. This is
the last possible moment. */
- emit_insn_after (seq, cfun->eh->sjlj_exit_after);
+ for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
+ if (e->flags & EDGE_FALLTHRU)
+ break;
+ if (e)
+ {
+ rtx insn;
+
+ /* Figure out whether the place we are supposed to insert libcall
+ is inside the last basic block or after it. In the other case
+ we need to emit to edge. */
+ if (e->src->next_bb != EXIT_BLOCK_PTR)
+ abort ();
+ for (insn = NEXT_INSN (BB_END (e->src)); insn; insn = NEXT_INSN (insn))
+ if (insn == cfun->eh->sjlj_exit_after)
+ break;
+ if (insn)
+ insert_insn_on_edge (seq, e);
+ else
+ {
+ insn = cfun->eh->sjlj_exit_after;
+ if (GET_CODE (insn) == CODE_LABEL)
+ insn = NEXT_INSN (insn);
+ emit_insn_after (seq, insn);
+ }
+ }
}
static void
{
int i, first_reachable;
rtx mem, dispatch, seq, fc;
+ rtx before;
+ basic_block bb;
+ edge e;
fc = cfun->eh->sjlj_fc;
seq = get_insns ();
end_sequence ();
- emit_insn_before (seq, (cfun->eh->region_array[first_reachable]
- ->post_landing_pad));
+ before = cfun->eh->region_array[first_reachable]->post_landing_pad;
+
+ bb = emit_to_new_bb_before (seq, before);
+ e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
+ e->count = bb->count;
+ e->probability = REG_BR_PROB_BASE;
}
static void
void
finish_eh_generation (void)
{
+ basic_block bb;
+
/* Nothing to do if no regions created. */
if (cfun->eh->region_tree == NULL)
return;
/* We've totally changed the CFG. Start over. */
find_exception_handler_labels ();
- rebuild_jump_labels (get_insns ());
- find_basic_blocks (get_insns (), max_reg_num (), 0);
+ break_superblocks ();
+ if (USING_SJLJ_EXCEPTIONS)
+ commit_edge_insertions ();
+ FOR_EACH_BB (bb)
+ {
+ edge e, next;
+ bool eh = false;
+ for (e = bb->succ; e; e = next)
+ {
+ next = e->succ_next;
+ if (e->flags & EDGE_EH)
+ {
+ remove_edge (e);
+ eh = true;
+ }
+ }
+ if (eh)
+ rtl_make_eh_edge (NULL, bb, BB_END (bb));
+ }
cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
}
\f
for_each_eh_label (void (*callback) (rtx))
{
htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
- (void *)callback);
+ (void *) &callback);
}
static int
for_each_eh_label_1 (void **pentry, void *data)
{
struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
- void (*callback) (rtx) = (void (*) (rtx)) data;
+ void (*callback) (rtx) = *(void (**) (rtx)) data;
(*callback) (entry->label);
return 1;
/* This section describes CFG exception edges for flow. */
/* For communicating between calls to reachable_next_level. */
-struct reachable_info GTY(())
+struct reachable_info
{
tree types_caught;
tree types_allowed;
- rtx handlers;
+ void (*callback) (struct eh_region *, void *);
+ void *callback_data;
+ bool saw_any_handlers;
};
/* A subroutine of reachable_next_level. Return true if TYPE, or a
base class of TYPE, is in HANDLED. */
-static int
+int
check_handled (tree handled, tree type)
{
tree t;
LP_REGION contains the landing pad; REGION is the handler. */
static void
-add_reachable_handler (struct reachable_info *info, struct eh_region *lp_region, struct eh_region *region)
+add_reachable_handler (struct reachable_info *info,
+ struct eh_region *lp_region, struct eh_region *region)
{
if (! info)
return;
+ info->saw_any_handlers = true;
+
if (cfun->eh->built_landing_pads)
- {
- if (! info->handlers)
- info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
- }
+ info->callback (lp_region, info->callback_data);
else
- info->handlers = alloc_INSN_LIST (region->label, info->handlers);
+ info->callback (region, info->callback_data);
}
/* Process one level of exception regions for reachability.
If we've touched down at some landing pad previous, then the
explicit function call we generated may be used. Otherwise
the call is made by the runtime. */
- if (info && info->handlers)
+ if (info && info->saw_any_handlers)
{
add_reachable_handler (info, region, region);
return RNL_CAUGHT;
abort ();
}
-/* Retrieve a list of labels of exception handlers which can be
- reached by a given insn. */
+/* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
-rtx
-reachable_handlers (rtx insn)
+void
+foreach_reachable_handler (int region_number, bool is_resx,
+ void (*callback) (struct eh_region *, void *),
+ void *callback_data)
{
struct reachable_info info;
struct eh_region *region;
tree type_thrown;
- int region_number;
-
- if (GET_CODE (insn) == JUMP_INSN
- && GET_CODE (PATTERN (insn)) == RESX)
- region_number = XINT (PATTERN (insn), 0);
- else
- {
- rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
- if (!note || INTVAL (XEXP (note, 0)) <= 0)
- return NULL;
- region_number = INTVAL (XEXP (note, 0));
- }
memset (&info, 0, sizeof (info));
+ info.callback = callback;
+ info.callback_data = callback_data;
region = cfun->eh->region_array[region_number];
type_thrown = NULL_TREE;
- if (GET_CODE (insn) == JUMP_INSN
- && GET_CODE (PATTERN (insn)) == RESX)
+ if (is_resx)
{
/* A RESX leaves a region instead of entering it. Thus the
region itself may have been deleted out from under us. */
if (region == NULL)
- return NULL;
+ return;
region = region->outer;
}
else if (region->type == ERT_THROW)
else
region = region->outer;
}
+}
+
+/* Retrieve a list of labels of exception handlers which can be
+ reached by a given insn. */
- return info.handlers;
+static void
+arh_to_landing_pad (struct eh_region *region, void *data)
+{
+ rtx *p_handlers = data;
+ if (! *p_handlers)
+ *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
+}
+
+static void
+arh_to_label (struct eh_region *region, void *data)
+{
+ rtx *p_handlers = data;
+ *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
+}
+
+rtx
+reachable_handlers (rtx insn)
+{
+ bool is_resx = false;
+ rtx handlers = NULL;
+ int region_number;
+
+ if (GET_CODE (insn) == JUMP_INSN
+ && GET_CODE (PATTERN (insn)) == RESX)
+ {
+ region_number = XINT (PATTERN (insn), 0);
+ is_resx = true;
+ }
+ else
+ {
+ rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note || INTVAL (XEXP (note, 0)) <= 0)
+ return NULL;
+ region_number = INTVAL (XEXP (note, 0));
+ }
+
+ foreach_reachable_handler (region_number, is_resx,
+ (cfun->eh->built_landing_pads
+ ? arh_to_landing_pad
+ : arh_to_label),
+ &handlers);
+
+ return handlers;
}
/* Determine if the given INSN can throw an exception that is caught
within the function. */
bool
-can_throw_internal (rtx insn)
+can_throw_internal_1 (int region_number)
{
struct eh_region *region;
tree type_thrown;
+
+ region = cfun->eh->region_array[region_number];
+
+ type_thrown = NULL_TREE;
+ if (region->type == ERT_THROW)
+ {
+ type_thrown = region->u.throw.type;
+ region = region->outer;
+ }
+
+ /* If this exception is ignored by each and every containing region,
+ then control passes straight out. The runtime may handle some
+ regions, which also do not require processing internally. */
+ for (; region; region = region->outer)
+ {
+ enum reachable_code how = reachable_next_level (region, type_thrown, 0);
+ if (how == RNL_BLOCKED)
+ return false;
+ if (how != RNL_NOT_CAUGHT)
+ return true;
+ }
+
+ return false;
+}
+
+bool
+can_throw_internal (rtx insn)
+{
rtx note;
if (! INSN_P (insn))
if (!note || INTVAL (XEXP (note, 0)) <= 0)
return false;
- region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
+ return can_throw_internal_1 (INTVAL (XEXP (note, 0)));
+}
+
+/* Determine if the given INSN can throw an exception that is
+ visible outside the function. */
+
+bool
+can_throw_external_1 (int region_number)
+{
+ struct eh_region *region;
+ tree type_thrown;
+
+ region = cfun->eh->region_array[region_number];
type_thrown = NULL_TREE;
if (region->type == ERT_THROW)
region = region->outer;
}
- /* If this exception is ignored by each and every containing region,
- then control passes straight out. The runtime may handle some
- regions, which also do not require processing internally. */
- for (; region; region = region->outer)
- {
- enum reachable_code how = reachable_next_level (region, type_thrown, 0);
- if (how == RNL_BLOCKED)
- return false;
- if (how != RNL_NOT_CAUGHT)
- return true;
- }
+ /* If the exception is caught or blocked by any containing region,
+ then it is not seen by any calling function. */
+ for (; region ; region = region->outer)
+ if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
+ return false;
- return false;
+ return true;
}
-/* Determine if the given INSN can throw an exception that is
- visible outside the function. */
-
bool
can_throw_external (rtx insn)
{
- struct eh_region *region;
- tree type_thrown;
rtx note;
if (! INSN_P (insn))
if (INTVAL (XEXP (note, 0)) <= 0)
return false;
- region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
-
- type_thrown = NULL_TREE;
- if (region->type == ERT_THROW)
- {
- type_thrown = region->u.throw.type;
- region = region->outer;
- }
-
- /* If the exception is caught or blocked by any containing region,
- then it is not seen by any calling function. */
- for (; region ; region = region->outer)
- if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
- return false;
-
- return true;
+ return can_throw_external_1 (INTVAL (XEXP (note, 0)));
}
/* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
emit_label (around_label);
}
+
+/* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
+ POINTERS_EXTEND_UNSIGNED and return it. */
+
+rtx
+expand_builtin_extend_pointer (tree addr_tree)
+{
+ rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
+ int extend;
+
+#ifdef POINTERS_EXTEND_UNSIGNED
+ extend = POINTERS_EXTEND_UNSIGNED;
+#else
+ /* The previous EH code did an unsigned extend by default, so we do this also
+ for consistency. */
+ extend = 1;
+#endif
+
+ return convert_modes (word_mode, ptr_mode, addr, extend);
+}
\f
/* In the following functions, we represent entries in the action table
as 1-based indices. Special cases are:
/* An exception specification adds its filter to the
beginning of the chain. */
next = collect_one_action_chain (ar_hash, region->outer);
- return add_action_record (ar_hash, region->u.allowed.filter,
- next < 0 ? 0 : next);
+
+ /* If there is no next action, terminate the chain. */
+ if (next == -1)
+ next = 0;
+ /* If all outer actions are cleanups or must_not_throw,
+ we'll have no action record for it, since we had wanted
+ to encode these states in the call-site record directly.
+ Add a cleanup action to the chain to catch these. */
+ else if (next <= 0)
+ next = add_action_record (ar_hash, 0, 0);
+
+ return add_action_record (ar_hash, region->u.allowed.filter, next);
case ERT_MUST_NOT_THROW:
/* A must-not-throw region with no inner handlers or cleanups
/* Note that varasm still thinks we're in the function's code section.
The ".endp" directive that will immediately follow will take us back. */
#else
- (*targetm.asm_out.exception_section) ();
+ targetm.asm_out.exception_section ();
#endif
have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
assemble_align (tt_format_size * BITS_PER_UNIT);
}
- (*targetm.asm_out.internal_label) (asm_out_file, "LLSDA",
+ targetm.asm_out.internal_label (asm_out_file, "LLSDA",
current_function_funcdef_no);
/* The LSDA header. */
/* Let cgraph know that the rtti decl is used. Not all of the
paths below go through assemble_integer, which would take
care of this for us. */
- if (TREE_CODE (type) != ADDR_EXPR)
+ STRIP_NOPS (type);
+ if (TREE_CODE (type) == ADDR_EXPR)
+ {
+ type = TREE_OPERAND (type, 0);
+ node = cgraph_varpool_node (type);
+ if (node)
+ cgraph_varpool_mark_needed_node (node);
+ }
+ else if (TREE_CODE (type) != INTEGER_CST)
abort ();
- type = TREE_OPERAND (type, 0);
- node = cgraph_varpool_node (type);
- if (node)
- cgraph_varpool_mark_needed_node (node);
}
if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)