/* Implements exception handling.
Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
Contributed by Mike Stump <mrs@cygnus.com>.
This file is part of GCC.
#endif
-/* Nonzero means enable synchronous exceptions for non-call instructions. */
-int flag_non_call_exceptions;
-
/* Protect cleanup actions with must-not-throw regions, with a call
to the given failure handler. */
tree (*lang_protect_cleanup_actions) (void);
/* Each region does exactly one thing. */
enum eh_region_type
- {
+ {
ERT_UNKNOWN = 0,
ERT_CLEANUP,
ERT_TRY,
/* Entry point for this region's handler before landing pads are built. */
rtx label;
+ tree tree_label;
/* Entry point for this region's handler from the runtime eh library. */
rtx landing_pad;
static void add_type_for_runtime (tree);
static tree lookup_type_for_runtime (tree);
-static struct eh_region *expand_eh_region_end (void);
-
-static rtx get_exception_filter (struct function *);
-
-static void collect_eh_region_array (void);
-static void resolve_fixup_regions (void);
-static void remove_fixup_regions (void);
static void remove_unreachable_regions (rtx);
-static void convert_from_eh_region_ranges_1 (rtx *, int *, int);
-static struct eh_region *duplicate_eh_region_1 (struct eh_region *,
- struct inline_remap *);
-static void duplicate_eh_region_2 (struct eh_region *, struct eh_region **);
static int ttypes_filter_eq (const void *, const void *);
static hashval_t ttypes_filter_hash (const void *);
static int ehspec_filter_eq (const void *, const void *);
static void remove_eh_handler (struct eh_region *);
static int for_each_eh_label_1 (void **, void *);
-struct reachable_info;
-
/* The return value of reachable_next_level. */
enum reachable_code
{
RNL_BLOCKED
};
-static int check_handled (tree, tree);
-static void add_reachable_handler (struct reachable_info *,
- struct eh_region *, struct eh_region *);
+struct reachable_info;
static enum reachable_code reachable_next_level (struct eh_region *, tree,
struct reachable_info *);
integer_type_node);
DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
- tmp = build_index_type (build_int_2 (4 - 1, 0));
+ tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
tmp);
f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
#ifdef DONT_USE_BUILTIN_SETJMP
#ifdef JMP_BUF_SIZE
- tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
+ tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
#else
/* Should be large enough for most systems, if it is not,
JMP_BUF_SIZE should be defined with the proper value. It will
also tend to be larger than necessary for most systems, a more
optimal port will define JMP_BUF_SIZE. */
- tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
+ tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
#endif
#else
/* builtin_setjmp takes a pointer to 5 words. */
- tmp = build_int_2 (5 * BITS_PER_WORD / POINTER_SIZE - 1, 0);
+ tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
#endif
tmp = build_index_type (tmp);
tmp = build_array_type (ptr_type_node, tmp);
cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
}
\f
-/* Start an exception handling region. All instructions emitted
- after this point are considered to be part of the region until
- expand_eh_region_end is invoked. */
+/* Routines to generate the exception tree somewhat directly.
+ These are used from tree-eh.c when processing exception related
+ nodes during tree optimization. */
-void
-expand_eh_region_start (void)
+static struct eh_region *
+gen_eh_region (enum eh_region_type type, struct eh_region *outer)
{
- struct eh_region *new_region;
- struct eh_region *cur_region;
- rtx note;
+ struct eh_region *new;
- if (! doing_eh (0))
- return;
+#ifdef ENABLE_CHECKING
+ gcc_assert (doing_eh (0));
+#endif
/* Insert a new blank region as a leaf in the tree. */
- new_region = ggc_alloc_cleared (sizeof (*new_region));
- cur_region = cfun->eh->cur_region;
- new_region->outer = cur_region;
- if (cur_region)
+ new = ggc_alloc_cleared (sizeof (*new));
+ new->type = type;
+ new->outer = outer;
+ if (outer)
{
- new_region->next_peer = cur_region->inner;
- cur_region->inner = new_region;
+ new->next_peer = outer->inner;
+ outer->inner = new;
}
else
{
- new_region->next_peer = cfun->eh->region_tree;
- cfun->eh->region_tree = new_region;
+ new->next_peer = cfun->eh->region_tree;
+ cfun->eh->region_tree = new;
}
- cfun->eh->cur_region = new_region;
-
- /* Create a note marking the start of this region. */
- new_region->region_number = ++cfun->eh->last_region_number;
- note = emit_note (NOTE_INSN_EH_REGION_BEG);
- NOTE_EH_HANDLER (note) = new_region->region_number;
-}
-
-/* Common code to end a region. Returns the region just ended. */
-
-static struct eh_region *
-expand_eh_region_end (void)
-{
- struct eh_region *cur_region = cfun->eh->cur_region;
- rtx note;
- /* Create a note marking the end of this region. */
- note = emit_note (NOTE_INSN_EH_REGION_END);
- NOTE_EH_HANDLER (note) = cur_region->region_number;
+ new->region_number = ++cfun->eh->last_region_number;
- /* Pop. */
- cfun->eh->cur_region = cur_region->outer;
-
- return cur_region;
+ return new;
}
-/* End an exception handling region for a cleanup. HANDLER is an
- expression to expand for the cleanup. */
-
-void
-expand_eh_region_end_cleanup (tree handler)
+struct eh_region *
+gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
{
- struct eh_region *region;
- tree protect_cleanup_actions;
- rtx around_label;
- rtx data_save[2];
-
- if (! doing_eh (0))
- return;
-
- region = expand_eh_region_end ();
- region->type = ERT_CLEANUP;
- region->label = gen_label_rtx ();
- region->u.cleanup.exp = handler;
- region->u.cleanup.prev_try = cfun->eh->try_region;
-
- around_label = gen_label_rtx ();
- emit_jump (around_label);
-
- emit_label (region->label);
-
- if (flag_non_call_exceptions || region->may_contain_throw)
- {
- /* Give the language a chance to specify an action to be taken if an
- exception is thrown that would propagate out of the HANDLER. */
- protect_cleanup_actions
- = (lang_protect_cleanup_actions
- ? (*lang_protect_cleanup_actions) ()
- : NULL_TREE);
-
- if (protect_cleanup_actions)
- expand_eh_region_start ();
-
- /* In case this cleanup involves an inline destructor with a try block in
- it, we need to save the EH return data registers around it. */
- data_save[0] = gen_reg_rtx (ptr_mode);
- emit_move_insn (data_save[0], get_exception_pointer (cfun));
- data_save[1] = gen_reg_rtx (word_mode);
- emit_move_insn (data_save[1], get_exception_filter (cfun));
-
- expand_expr (handler, const0_rtx, VOIDmode, 0);
-
- emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
- emit_move_insn (cfun->eh->filter, data_save[1]);
-
- if (protect_cleanup_actions)
- expand_eh_region_end_must_not_throw (protect_cleanup_actions);
-
- /* We need any stack adjustment complete before the around_label. */
- do_pending_stack_adjust ();
- }
-
- /* We delay the generation of the _Unwind_Resume until we generate
- landing pads. We emit a marker here so as to get good control
- flow data in the meantime. */
- region->resume
- = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
- emit_barrier ();
-
- emit_label (around_label);
+ struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
+ cleanup->u.cleanup.prev_try = prev_try;
+ return cleanup;
}
-/* End an exception handling region for a try block, and prepares
- for subsequent calls to expand_start_catch. */
-
-void
-expand_start_all_catch (void)
+struct eh_region *
+gen_eh_region_try (struct eh_region *outer)
{
- struct eh_region *region;
-
- if (! doing_eh (1))
- return;
-
- region = expand_eh_region_end ();
- region->type = ERT_TRY;
- region->u.try.prev_try = cfun->eh->try_region;
- region->u.try.continue_label = gen_label_rtx ();
-
- cfun->eh->try_region = region;
-
- emit_jump (region->u.try.continue_label);
+ return gen_eh_region (ERT_TRY, outer);
}
-/* Begin a catch clause. TYPE is the type caught, a list of such
- types, (in the case of Java) an ADDR_EXPR which points to the
- runtime type to match, or null if this is a catch-all
- clause. Providing a type list enables to associate the catch region
- with potentially several exception types, which is useful e.g. for
- Ada. */
-
-void
-expand_start_catch (tree type_or_list)
+struct eh_region *
+gen_eh_region_catch (struct eh_region *t, tree type_or_list)
{
- struct eh_region *t, *c, *l;
- tree type_list;
-
- if (! doing_eh (0))
- return;
+ struct eh_region *c, *l;
+ tree type_list, type_node;
+ /* Ensure to always end up with a type list to normalize further
+ processing, then register each type against the runtime types map. */
type_list = type_or_list;
-
if (type_or_list)
{
- /* Ensure to always end up with a type list to normalize further
- processing, then register each type against the runtime types
- map. */
- tree type_node;
-
if (TREE_CODE (type_or_list) != TREE_LIST)
type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
add_type_for_runtime (TREE_VALUE (type_node));
}
- expand_eh_region_start ();
-
- t = cfun->eh->try_region;
- c = cfun->eh->cur_region;
- c->type = ERT_CATCH;
+ c = gen_eh_region (ERT_CATCH, t->outer);
c->u.catch.type_list = type_list;
- c->label = gen_label_rtx ();
-
l = t->u.try.last_catch;
c->u.catch.prev_catch = l;
if (l)
t->u.try.catch = c;
t->u.try.last_catch = c;
- emit_label (c->label);
+ return c;
}
-/* End a catch clause. Control will resume after the try/catch block. */
-
-void
-expand_end_catch (void)
+struct eh_region *
+gen_eh_region_allowed (struct eh_region *outer, tree allowed)
{
- struct eh_region *try_region;
-
- if (! doing_eh (0))
- return;
+ struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
+ region->u.allowed.type_list = allowed;
- expand_eh_region_end ();
- try_region = cfun->eh->try_region;
+ for (; allowed ; allowed = TREE_CHAIN (allowed))
+ add_type_for_runtime (TREE_VALUE (allowed));
- emit_jump (try_region->u.try.continue_label);
+ return region;
}
-/* End a sequence of catch handlers for a try block. */
-
-void
-expand_end_all_catch (void)
+struct eh_region *
+gen_eh_region_must_not_throw (struct eh_region *outer)
{
- struct eh_region *try_region;
-
- if (! doing_eh (0))
- return;
-
- try_region = cfun->eh->try_region;
- cfun->eh->try_region = try_region->u.try.prev_try;
-
- emit_label (try_region->u.try.continue_label);
+ return gen_eh_region (ERT_MUST_NOT_THROW, outer);
}
-/* End an exception region for an exception type filter. ALLOWED is a
- TREE_LIST of types to be matched by the runtime. FAILURE is an
- expression to invoke if a mismatch occurs.
-
- ??? We could use these semantics for calls to rethrow, too; if we can
- see the surrounding catch clause, we know that the exception we're
- rethrowing satisfies the "filter" of the catch type. */
-
-void
-expand_eh_region_end_allowed (tree allowed, tree failure)
+int
+get_eh_region_number (struct eh_region *region)
{
- struct eh_region *region;
- rtx around_label;
-
- if (! doing_eh (0))
- return;
-
- region = expand_eh_region_end ();
- region->type = ERT_ALLOWED_EXCEPTIONS;
- region->u.allowed.type_list = allowed;
- region->label = gen_label_rtx ();
-
- for (; allowed ; allowed = TREE_CHAIN (allowed))
- add_type_for_runtime (TREE_VALUE (allowed));
-
- /* We must emit the call to FAILURE here, so that if this function
- throws a different exception, that it will be processed by the
- correct region. */
-
- around_label = gen_label_rtx ();
- emit_jump (around_label);
-
- emit_label (region->label);
- expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
- /* We must adjust the stack before we reach the AROUND_LABEL because
- the call to FAILURE does not occur on all paths to the
- AROUND_LABEL. */
- do_pending_stack_adjust ();
-
- emit_label (around_label);
+ return region->region_number;
}
-/* End an exception region for a must-not-throw filter. FAILURE is an
- expression invoke if an uncaught exception propagates this far.
-
- This is conceptually identical to expand_eh_region_end_allowed with
- an empty allowed list (if you passed "std::terminate" instead of
- "__cxa_call_unexpected"), but they are represented differently in
- the C++ LSDA. */
-
-void
-expand_eh_region_end_must_not_throw (tree failure)
+bool
+get_eh_region_may_contain_throw (struct eh_region *region)
{
- struct eh_region *region;
- rtx around_label;
-
- if (! doing_eh (0))
- return;
-
- region = expand_eh_region_end ();
- region->type = ERT_MUST_NOT_THROW;
- region->label = gen_label_rtx ();
-
- /* We must emit the call to FAILURE here, so that if this function
- throws a different exception, that it will be processed by the
- correct region. */
-
- around_label = gen_label_rtx ();
- emit_jump (around_label);
-
- emit_label (region->label);
- expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
-
- emit_label (around_label);
+ return region->may_contain_throw;
}
-/* End an exception region for a throw. No handling goes on here,
- but it's the easiest way for the front-end to indicate what type
- is being thrown. */
+tree
+get_eh_region_tree_label (struct eh_region *region)
+{
+ return region->tree_label;
+}
void
-expand_eh_region_end_throw (tree type)
+set_eh_region_tree_label (struct eh_region *region, tree lab)
{
- struct eh_region *region;
-
- if (! doing_eh (0))
- return;
-
- region = expand_eh_region_end ();
- region->type = ERT_THROW;
- region->u.throw.type = type;
+ region->tree_label = lab;
}
-
-/* End a fixup region. Within this region the cleanups for the immediately
- enclosing region are _not_ run. This is used for goto cleanup to avoid
- destroying an object twice.
-
- This would be an extraordinarily simple prospect, were it not for the
- fact that we don't actually know what the immediately enclosing region
- is. This surprising fact is because expand_cleanups is currently
- generating a sequence that it will insert somewhere else. We collect
- the proper notion of "enclosing" in convert_from_eh_region_ranges. */
-
+\f
void
-expand_eh_region_end_fixup (tree handler)
+expand_resx_expr (tree exp)
{
- struct eh_region *fixup;
-
- if (! doing_eh (0))
- return;
+ int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
+ struct eh_region *reg = cfun->eh->region_array[region_nr];
- fixup = expand_eh_region_end ();
- fixup->type = ERT_FIXUP;
- fixup->u.fixup.cleanup_exp = handler;
+ reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
+ emit_barrier ();
}
/* Note that the current EH region (if any) may contain a throw, or a
call to a function which itself may contain a throw. */
void
-note_eh_region_may_contain_throw (void)
+note_eh_region_may_contain_throw (struct eh_region *region)
{
- struct eh_region *region;
-
- region = cfun->eh->cur_region;
while (region && !region->may_contain_throw)
{
region->may_contain_throw = 1;
}
}
+void
+note_current_region_may_contain_throw (void)
+{
+ note_eh_region_may_contain_throw (cfun->eh->cur_region);
+}
+
+
/* Return an rtl expression for a pointer to the exception object
within a handler. */
/* Return an rtl expression for the exception dispatch filter
within a handler. */
-static rtx
+rtx
get_exception_filter (struct function *fun)
{
rtx filter = fun->eh->filter;
if (fun == cfun && ! filter)
{
- filter = gen_reg_rtx (word_mode);
+ filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
fun->eh->filter = filter;
}
return filter;
\f
/* This section is for the exception handling specific optimization pass. */
-/* Random access the exception region tree. It's just as simple to
- collect the regions this way as in expand_eh_region_start, but
- without having to realloc memory. */
+/* Random access the exception region tree. */
-static void
+void
collect_eh_region_array (void)
{
struct eh_region **array, *i;
}
}
-static void
-resolve_one_fixup_region (struct eh_region *fixup)
-{
- struct eh_region *cleanup, *real;
- int j, n;
-
- n = cfun->eh->last_region_number;
- cleanup = 0;
-
- for (j = 1; j <= n; ++j)
- {
- cleanup = cfun->eh->region_array[j];
- if (cleanup && cleanup->type == ERT_CLEANUP
- && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
- break;
- }
- if (j > n)
- abort ();
-
- real = cleanup->outer;
- if (real && real->type == ERT_FIXUP)
- {
- if (!real->u.fixup.resolved)
- resolve_one_fixup_region (real);
- real = real->u.fixup.real_region;
- }
-
- fixup->u.fixup.real_region = real;
- fixup->u.fixup.resolved = true;
-}
-
-static void
-resolve_fixup_regions (void)
-{
- int i, n = cfun->eh->last_region_number;
-
- for (i = 1; i <= n; ++i)
- {
- struct eh_region *fixup = cfun->eh->region_array[i];
-
- if (!fixup || fixup->type != ERT_FIXUP || fixup->u.fixup.resolved)
- continue;
-
- resolve_one_fixup_region (fixup);
- }
-}
-
-/* Now that we've discovered what region actually encloses a fixup,
- we can shuffle pointers and remove them from the tree. */
-
-static void
-remove_fixup_regions (void)
-{
- int i;
- rtx insn, note;
- struct eh_region *fixup;
-
- /* Walk the insn chain and adjust the REG_EH_REGION numbers
- for instructions referencing fixup regions. This is only
- strictly necessary for fixup regions with no parent, but
- doesn't hurt to do it for all regions. */
- for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
- if (INSN_P (insn)
- && (note = find_reg_note (insn, REG_EH_REGION, NULL))
- && INTVAL (XEXP (note, 0)) > 0
- && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
- && fixup->type == ERT_FIXUP)
- {
- if (fixup->u.fixup.real_region)
- XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
- else
- remove_note (insn, note);
- }
-
- /* Remove the fixup regions from the tree. */
- for (i = cfun->eh->last_region_number; i > 0; --i)
- {
- fixup = cfun->eh->region_array[i];
- if (! fixup)
- continue;
-
- /* Allow GC to maybe free some memory. */
- if (fixup->type == ERT_CLEANUP)
- fixup->u.cleanup.exp = NULL_TREE;
-
- if (fixup->type != ERT_FIXUP)
- continue;
-
- if (fixup->inner)
- {
- struct eh_region *parent, *p, **pp;
-
- parent = fixup->u.fixup.real_region;
-
- /* Fix up the children's parent pointers; find the end of
- the list. */
- for (p = fixup->inner; ; p = p->next_peer)
- {
- p->outer = parent;
- if (! p->next_peer)
- break;
- }
-
- /* In the tree of cleanups, only outer-inner ordering matters.
- So link the children back in anywhere at the correct level. */
- if (parent)
- pp = &parent->inner;
- else
- pp = &cfun->eh->region_tree;
- p->next_peer = *pp;
- *pp = fixup->inner;
- fixup->inner = NULL;
- }
-
- remove_eh_handler (fixup);
- }
-}
-
/* Remove all regions whose labels are not reachable from insns. */
static void
if (r->resume)
{
- if (uid_region_num[INSN_UID (r->resume)])
- abort ();
+ gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
uid_region_num[INSN_UID (r->resume)] = i;
}
if (r->label)
{
- if (uid_region_num[INSN_UID (r->label)])
- abort ();
+ gcc_assert (!uid_region_num[INSN_UID (r->label)]);
uid_region_num[INSN_UID (r->label)] = i;
}
- if (r->type == ERT_TRY && r->u.try.continue_label)
- {
- if (uid_region_num[INSN_UID (r->u.try.continue_label)])
- abort ();
- uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
- }
}
for (insn = insns; insn; insn = NEXT_INSN (insn))
- {
- reachable[uid_region_num[INSN_UID (insn)]] = true;
-
- if (GET_CODE (insn) == CALL_INSN
- && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
- for (i = 0; i < 3; i++)
- {
- rtx sub = XEXP (PATTERN (insn), i);
- for (; sub ; sub = NEXT_INSN (sub))
- reachable[uid_region_num[INSN_UID (sub)]] = true;
- }
- }
+ reachable[uid_region_num[INSN_UID (insn)]] = true;
for (i = cfun->eh->last_region_number; i > 0; --i)
{
r = cfun->eh->region_array[i];
if (r && r->region_number == i && !reachable[i])
{
- /* Don't remove ERT_THROW regions if their outer region
- is reachable. */
- if (r->type == ERT_THROW
- && r->outer
- && reachable[r->outer->region_number])
- continue;
-
- remove_eh_handler (r);
- }
- }
-
- free (reachable);
- free (uid_region_num);
-}
-
-/* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
- can_throw instruction in the region. */
-
-static void
-convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
-{
- int *sp = orig_sp;
- rtx insn, next;
-
- for (insn = *pinsns; insn ; insn = next)
- {
- next = NEXT_INSN (insn);
- if (GET_CODE (insn) == NOTE)
- {
- int kind = NOTE_LINE_NUMBER (insn);
- if (kind == NOTE_INSN_EH_REGION_BEG
- || kind == NOTE_INSN_EH_REGION_END)
+ bool kill_it = true;
+ switch (r->type)
{
- if (kind == NOTE_INSN_EH_REGION_BEG)
- {
- struct eh_region *r;
-
- *sp++ = cur;
- cur = NOTE_EH_HANDLER (insn);
-
- r = cfun->eh->region_array[cur];
- if (r->type == ERT_FIXUP)
- {
- r = r->u.fixup.real_region;
- cur = r ? r->region_number : 0;
- }
- else if (r->type == ERT_CATCH)
+ case ERT_THROW:
+ /* Don't remove ERT_THROW regions if their outer region
+ is reachable. */
+ if (r->outer && reachable[r->outer->region_number])
+ kill_it = false;
+ break;
+
+ case ERT_MUST_NOT_THROW:
+ /* MUST_NOT_THROW regions are implementable solely in the
+ runtime, but their existence continues to affect calls
+ within that region. Never delete them here. */
+ kill_it = false;
+ break;
+
+ case ERT_TRY:
+ {
+ /* TRY regions are reachable if any of its CATCH regions
+ are reachable. */
+ struct eh_region *c;
+ for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
+ if (reachable[c->region_number])
{
- r = r->outer;
- cur = r ? r->region_number : 0;
+ kill_it = false;
+ break;
}
- }
- else
- cur = *--sp;
-
- /* Removing the first insn of a CALL_PLACEHOLDER sequence
- requires extra care to adjust sequence start. */
- if (insn == *pinsns)
- *pinsns = next;
- remove_insn (insn);
- continue;
- }
- }
- else if (INSN_P (insn))
- {
- if (cur > 0
- && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
- /* Calls can always potentially throw exceptions, unless
- they have a REG_EH_REGION note with a value of 0 or less.
- Which should be the only possible kind so far. */
- && (GET_CODE (insn) == CALL_INSN
- /* If we wanted exceptions for non-call insns, then
- any may_trap_p instruction could throw. */
- || (flag_non_call_exceptions
- && GET_CODE (PATTERN (insn)) != CLOBBER
- && GET_CODE (PATTERN (insn)) != USE
- && may_trap_p (PATTERN (insn)))))
- {
- REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
- REG_NOTES (insn));
- }
+ break;
+ }
- if (GET_CODE (insn) == CALL_INSN
- && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
- {
- convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
- sp, cur);
- convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
- sp, cur);
- convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
- sp, cur);
+ default:
+ break;
}
+
+ if (kill_it)
+ remove_eh_handler (r);
}
}
- if (sp != orig_sp)
- abort ();
+ free (reachable);
+ free (uid_region_num);
}
+/* Set up EH labels for RTL. */
+
void
convert_from_eh_region_ranges (void)
{
- int *stack;
- rtx insns;
-
- collect_eh_region_array ();
- resolve_fixup_regions ();
+ rtx insns = get_insns ();
+ int i, n = cfun->eh->last_region_number;
- stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
- insns = get_insns ();
- convert_from_eh_region_ranges_1 (&insns, stack, 0);
- free (stack);
+ /* Most of the work is already done at the tree level. All we need to
+ do is collect the rtl labels that correspond to the tree labels that
+ collect the rtl labels that correspond to the tree labels
+ we allocated earlier. */
+ for (i = 1; i <= n; ++i)
+ {
+ struct eh_region *region = cfun->eh->region_array[i];
+ if (region && region->tree_label)
+ region->label = DECL_RTL_IF_SET (region->tree_label);
+ }
- remove_fixup_regions ();
remove_unreachable_regions (insns);
}
label. After landing pad creation, the exception handlers may
share landing pads. This is ok, since maybe_remove_eh_handler
only requires the 1-1 mapping before landing pad creation. */
- if (*slot && !cfun->eh->built_landing_pads)
- abort ();
+ gcc_assert (!*slot || cfun->eh->built_landing_pads);
*slot = entry;
}
return false;
}
\f
-static struct eh_region *
-duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map)
-{
- struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region));
-
- n->region_number = o->region_number + cfun->eh->last_region_number;
- n->type = o->type;
-
- switch (n->type)
- {
- case ERT_CLEANUP:
- case ERT_MUST_NOT_THROW:
- break;
-
- case ERT_TRY:
- if (o->u.try.continue_label)
- n->u.try.continue_label
- = get_label_from_map (map,
- CODE_LABEL_NUMBER (o->u.try.continue_label));
- break;
-
- case ERT_CATCH:
- n->u.catch.type_list = o->u.catch.type_list;
- break;
-
- case ERT_ALLOWED_EXCEPTIONS:
- n->u.allowed.type_list = o->u.allowed.type_list;
- break;
-
- case ERT_THROW:
- n->u.throw.type = o->u.throw.type;
-
- default:
- abort ();
- }
-
- if (o->label)
- n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
- if (o->resume)
- {
- n->resume = map->insn_map[INSN_UID (o->resume)];
- if (n->resume == NULL)
- abort ();
- }
-
- return n;
-}
-
-static void
-duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array)
-{
- struct eh_region *n = n_array[o->region_number];
-
- switch (n->type)
- {
- case ERT_TRY:
- n->u.try.catch = n_array[o->u.try.catch->region_number];
- n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
- break;
-
- case ERT_CATCH:
- if (o->u.catch.next_catch)
- n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
- if (o->u.catch.prev_catch)
- n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
- break;
-
- default:
- break;
- }
-
- if (o->outer)
- n->outer = n_array[o->outer->region_number];
- if (o->inner)
- n->inner = n_array[o->inner->region_number];
- if (o->next_peer)
- n->next_peer = n_array[o->next_peer->region_number];
-}
-
-int
-duplicate_eh_regions (struct function *ifun, struct inline_remap *map)
-{
- int ifun_last_region_number = ifun->eh->last_region_number;
- struct eh_region **n_array, *root, *cur;
- int i;
-
- if (ifun_last_region_number == 0)
- return 0;
-
- n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
-
- for (i = 1; i <= ifun_last_region_number; ++i)
- {
- cur = ifun->eh->region_array[i];
- if (!cur || cur->region_number != i)
- continue;
- n_array[i] = duplicate_eh_region_1 (cur, map);
- }
- for (i = 1; i <= ifun_last_region_number; ++i)
- {
- cur = ifun->eh->region_array[i];
- if (!cur || cur->region_number != i)
- continue;
- duplicate_eh_region_2 (cur, n_array);
- }
-
- root = n_array[ifun->eh->region_tree->region_number];
- cur = cfun->eh->cur_region;
- if (cur)
- {
- struct eh_region *p = cur->inner;
- if (p)
- {
- while (p->next_peer)
- p = p->next_peer;
- p->next_peer = root;
- }
- else
- cur->inner = root;
-
- for (i = 1; i <= ifun_last_region_number; ++i)
- if (n_array[i] && n_array[i]->outer == NULL)
- n_array[i]->outer = cur;
- }
- else
- {
- struct eh_region *p = cfun->eh->region_tree;
- if (p)
- {
- while (p->next_peer)
- p = p->next_peer;
- p->next_peer = root;
- }
- else
- cfun->eh->region_tree = root;
- }
-
- free (n_array);
-
- i = cfun->eh->last_region_number;
- cfun->eh->last_region_number = i + ifun_last_region_number;
- return i;
-}
-
-\f
static int
t2r_eq (const void *pentry, const void *pdata)
{
for (;tp_node; tp_node = TREE_CHAIN (tp_node))
{
int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
- tree flt_node = build_int_2 (flt, 0);
+ tree flt_node = build_int_cst (NULL_TREE, flt);
r->u.catch.filter_list
= tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
/* Get a filter value for the NULL list also since it will need
an action record anyway. */
int flt = add_ttypes_entry (ttypes, NULL);
- tree flt_node = build_int_2 (flt, 0);
+ tree flt_node = build_int_cst (NULL_TREE, flt);
r->u.catch.filter_list
= tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
rtx last;
basic_block bb;
edge e;
+ edge_iterator ei;
- /* If there happens to be an fallthru edge (possibly created by cleanup_cfg
+ /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
call), we don't want it to go into newly created landing pad or other EH
construct. */
- for (e = BLOCK_FOR_INSN (insn)->pred; e; e = e->pred_next)
+ for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
if (e->flags & EDGE_FALLTHRU)
force_nonfallthru (e);
+ else
+ ei_next (&ei);
last = emit_insn_before (seq, insn);
- if (GET_CODE (last) == BARRIER)
+ if (BARRIER_P (last))
last = PREV_INSN (last);
bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
update_bb_for_insn (bb);
emit_cmp_and_jump_insns
(cfun->eh->filter,
GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
- EQ, NULL_RTX, word_mode, 0, c->label);
+ EQ, NULL_RTX,
+ targetm.eh_return_filter_mode (), 0, c->label);
tp_node = TREE_CHAIN (tp_node);
flt_node = TREE_CHAIN (flt_node);
emit_cmp_and_jump_insns (cfun->eh->filter,
GEN_INT (region->u.allowed.filter),
- EQ, NULL_RTX, word_mode, 0, region->label);
+ EQ, NULL_RTX,
+ targetm.eh_return_filter_mode (), 0, region->label);
/* We delay the generation of the _Unwind_Resume until we generate
landing pads. We emit a marker here so as to get good control
break;
default:
- abort ();
+ gcc_unreachable ();
}
}
}
emit_jump (outer->post_landing_pad);
src = BLOCK_FOR_INSN (region->resume);
dest = BLOCK_FOR_INSN (outer->post_landing_pad);
- while (src->succ)
- remove_edge (src->succ);
+ while (EDGE_COUNT (src->succs) > 0)
+ remove_edge (EDGE_SUCC (src, 0));
e = make_edge (src, dest, 0);
e->probability = REG_BR_PROB_BASE;
e->count = src->count;
}
else
- emit_library_call (unwind_resume_libfunc, LCT_THROW,
- VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
+ {
+ emit_library_call (unwind_resume_libfunc, LCT_THROW,
+ VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
+
+ /* What we just emitted was a throwing libcall, so it got a
+ barrier automatically added after it. If the last insn in
+ the libcall sequence isn't the barrier, it's because the
+ target emits multiple insns for a call, and there are insns
+ after the actual call insn (which are redundant and would be
+ optimized away). The barrier is inserted exactly after the
+ call insn, so let's go get that and delete the insns after
+ it, because below we need the barrier to be the last insn in
+ the sequence. */
+ delete_insns_since (NEXT_INSN (last_call_insn ()));
+ }
seq = get_insns ();
end_sequence ();
barrier = emit_insn_before (seq, region->resume);
/* Avoid duplicate barrier. */
- if (GET_CODE (barrier) != BARRIER)
- abort ();
+ gcc_assert (BARRIER_P (barrier));
delete_insn (barrier);
delete_insn (region->resume);
+
+ /* ??? From tree-ssa we can wind up with catch regions whose
+ label is not instantiated, but whose resx is present. Now
+ that we've dealt with the resx, kill the region. */
+ if (region->label == NULL && region->type == ERT_CLEANUP)
+ remove_eh_handler (region);
}
}
emit_move_insn (cfun->eh->exc_ptr,
gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
emit_move_insn (cfun->eh->filter,
- gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
+ gen_rtx_REG (targetm.eh_return_filter_mode (),
+ EH_RETURN_DATA_REGNO (1)));
seq = get_insns ();
end_sequence ();
rc = RNL_NOT_CAUGHT;
for (; region; region = region->outer)
{
- rc = reachable_next_level (region, type_thrown, 0);
+ rc = reachable_next_level (region, type_thrown, NULL);
if (rc != RNL_NOT_CAUGHT)
break;
}
rtx note, before, p;
/* Reset value tracking at extended basic block boundaries. */
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
last_call_site = -2;
if (! INSN_P (insn))
/* Calls (and trapping insns) without notes are outside any
exception handling region in this function. Mark them as
no action. */
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
|| (flag_non_call_exceptions
&& may_trap_p (PATTERN (insn))))
this_call_site = -1;
/* Don't separate a call from it's argument loads. */
before = insn;
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
before = find_first_parameter_load (insn, NULL_RTX);
start_sequence ();
can_throw_internal instructions. */
for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
- if (GET_CODE (fn_begin) == NOTE
+ if (NOTE_P (fn_begin)
&& (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG
|| NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK))
break;
if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
- insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
+ insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
else
{
- rtx last = BB_END (ENTRY_BLOCK_PTR->succ->dest);
+ rtx last = BB_END (single_succ (ENTRY_BLOCK_PTR));
for (; ; fn_begin = NEXT_INSN (fn_begin))
- if ((GET_CODE (fn_begin) == NOTE
+ if ((NOTE_P (fn_begin)
&& NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
|| fn_begin == last)
break;
{
rtx seq;
edge e;
+ edge_iterator ei;
start_sequence ();
post-dominates all can_throw_internal instructions. This is
the last possible moment. */
- for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
if (e->flags & EDGE_FALLTHRU)
break;
if (e)
/* Figure out whether the place we are supposed to insert libcall
is inside the last basic block or after it. In the other case
we need to emit to edge. */
- if (e->src->next_bb != EXIT_BLOCK_PTR)
- abort ();
- for (insn = NEXT_INSN (BB_END (e->src)); insn; insn = NEXT_INSN (insn))
- if (insn == cfun->eh->sjlj_exit_after)
- break;
- if (insn)
- insert_insn_on_edge (seq, e);
- else
+ gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
+ for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
{
- insn = cfun->eh->sjlj_exit_after;
- if (GET_CODE (insn) == CODE_LABEL)
- insn = NEXT_INSN (insn);
- emit_insn_after (seq, insn);
+ if (insn == cfun->eh->sjlj_exit_after)
+ {
+ if (LABEL_P (insn))
+ insn = NEXT_INSN (insn);
+ emit_insn_after (seq, insn);
+ return;
+ }
+ if (insn == BB_END (e->src))
+ break;
}
+ insert_insn_on_edge (seq, e);
}
}
connect many of the handlers, and then type information will not
be effective. Still, this is a win over previous implementations. */
- cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
-
/* These registers are used by the landing pads. Make sure they
have been generated. */
get_exception_pointer (cfun);
commit_edge_insertions ();
FOR_EACH_BB (bb)
{
- edge e, next;
+ edge e;
+ edge_iterator ei;
bool eh = false;
- for (e = bb->succ; e; e = next)
+ for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
{
- next = e->succ_next;
if (e->flags & EDGE_EH)
{
remove_edge (e);
eh = true;
}
+ else
+ ei_next (&ei);
}
if (eh)
- make_eh_edge (NULL, bb, BB_END (bb));
+ rtl_make_eh_edge (NULL, bb, BB_END (bb));
}
- cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
}
\f
static hashval_t
tmp.label = label;
slot = (struct ehl_map_entry **)
htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
- if (! slot)
- abort ();
+ gcc_assert (slot);
htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
}
cfun->eh->region_array[region->region_number] = outer;
if (region->aka)
{
- int i;
- EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i,
- { cfun->eh->region_array[i] = outer; });
+ unsigned i;
+ bitmap_iterator bi;
+
+ EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
+ {
+ cfun->eh->region_array[i] = outer;
+ }
}
if (outer)
if (!outer->aka)
outer->aka = BITMAP_GGC_ALLOC ();
if (region->aka)
- bitmap_a_or_b (outer->aka, outer->aka, region->aka);
+ bitmap_ior_into (outer->aka, region->aka);
bitmap_set_bit (outer->aka, region->region_number);
}
try->type == ERT_CATCH;
try = try->next_peer)
continue;
- if (try->type != ERT_TRY)
- abort ();
+ gcc_assert (try->type == ERT_TRY);
next = region->u.catch.next_catch;
prev = region->u.catch.prev_catch;
(*callback) (entry->label);
return 1;
}
+
+/* Invoke CALLBACK for every exception region in the current function. */
+
+void
+for_each_eh_region (void (*callback) (struct eh_region *))
+{
+ int i, n = cfun->eh->last_region_number;
+ for (i = 1; i <= n; ++i)
+ {
+ struct eh_region *region = cfun->eh->region_array[i];
+ if (region)
+ (*callback) (region);
+ }
+}
\f
/* This section describes CFG exception edges for flow. */
/* For communicating between calls to reachable_next_level. */
-struct reachable_info GTY(())
+struct reachable_info
{
tree types_caught;
tree types_allowed;
- rtx handlers;
+ void (*callback) (struct eh_region *, void *);
+ void *callback_data;
+ bool saw_any_handlers;
};
/* A subroutine of reachable_next_level. Return true if TYPE, or a
LP_REGION contains the landing pad; REGION is the handler. */
static void
-add_reachable_handler (struct reachable_info *info, struct eh_region *lp_region, struct eh_region *region)
+add_reachable_handler (struct reachable_info *info,
+ struct eh_region *lp_region, struct eh_region *region)
{
if (! info)
return;
+ info->saw_any_handlers = true;
+
if (cfun->eh->built_landing_pads)
- {
- if (! info->handlers)
- info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
- }
+ info->callback (lp_region, info->callback_data);
else
- info->handlers = alloc_INSN_LIST (region->label, info->handlers);
+ info->callback (region, info->callback_data);
}
/* Process one level of exception regions for reachability.
If we've touched down at some landing pad previous, then the
explicit function call we generated may be used. Otherwise
the call is made by the runtime. */
- if (info && info->handlers)
+ if (info && info->saw_any_handlers)
{
add_reachable_handler (info, region, region);
return RNL_CAUGHT;
case ERT_FIXUP:
case ERT_UNKNOWN:
/* Shouldn't see these here. */
+ gcc_unreachable ();
break;
+ default:
+ gcc_unreachable ();
}
-
- abort ();
}
-/* Retrieve a list of labels of exception handlers which can be
- reached by a given insn. */
+/* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
-rtx
-reachable_handlers (rtx insn)
+void
+foreach_reachable_handler (int region_number, bool is_resx,
+ void (*callback) (struct eh_region *, void *),
+ void *callback_data)
{
struct reachable_info info;
struct eh_region *region;
tree type_thrown;
- int region_number;
-
- if (GET_CODE (insn) == JUMP_INSN
- && GET_CODE (PATTERN (insn)) == RESX)
- region_number = XINT (PATTERN (insn), 0);
- else
- {
- rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
- if (!note || INTVAL (XEXP (note, 0)) <= 0)
- return NULL;
- region_number = INTVAL (XEXP (note, 0));
- }
memset (&info, 0, sizeof (info));
+ info.callback = callback;
+ info.callback_data = callback_data;
region = cfun->eh->region_array[region_number];
type_thrown = NULL_TREE;
- if (GET_CODE (insn) == JUMP_INSN
- && GET_CODE (PATTERN (insn)) == RESX)
+ if (is_resx)
{
/* A RESX leaves a region instead of entering it. Thus the
region itself may have been deleted out from under us. */
if (region == NULL)
- return NULL;
+ return;
region = region->outer;
}
else if (region->type == ERT_THROW)
else
region = region->outer;
}
-
- return info.handlers;
}
-/* Determine if the given INSN can throw an exception that is caught
- within the function. */
+/* Retrieve a list of labels of exception handlers which can be
+ reached by a given insn. */
-bool
-can_throw_internal (rtx insn)
+static void
+arh_to_landing_pad (struct eh_region *region, void *data)
{
- struct eh_region *region;
- tree type_thrown;
- rtx note;
+ rtx *p_handlers = data;
+ if (! *p_handlers)
+ *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
+}
- if (! INSN_P (insn))
- return false;
+static void
+arh_to_label (struct eh_region *region, void *data)
+{
+ rtx *p_handlers = data;
+ *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
+}
- if (GET_CODE (insn) == INSN
- && GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0, 0);
+rtx
+reachable_handlers (rtx insn)
+{
+ bool is_resx = false;
+ rtx handlers = NULL;
+ int region_number;
- if (GET_CODE (insn) == CALL_INSN
- && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
+ if (JUMP_P (insn)
+ && GET_CODE (PATTERN (insn)) == RESX)
{
- int i;
- for (i = 0; i < 3; ++i)
- {
- rtx sub = XEXP (PATTERN (insn), i);
- for (; sub ; sub = NEXT_INSN (sub))
- if (can_throw_internal (sub))
- return true;
- }
- return false;
+ region_number = XINT (PATTERN (insn), 0);
+ is_resx = true;
+ }
+ else
+ {
+ rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note || INTVAL (XEXP (note, 0)) <= 0)
+ return NULL;
+ region_number = INTVAL (XEXP (note, 0));
}
- /* Every insn that might throw has an EH_REGION note. */
- note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
- if (!note || INTVAL (XEXP (note, 0)) <= 0)
- return false;
+ foreach_reachable_handler (region_number, is_resx,
+ (cfun->eh->built_landing_pads
+ ? arh_to_landing_pad
+ : arh_to_label),
+ &handlers);
+
+ return handlers;
+}
+
+/* Determine if the given INSN can throw an exception that is caught
+ within the function. */
+
+bool
+can_throw_internal_1 (int region_number)
+{
+ struct eh_region *region;
+ tree type_thrown;
- region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
+ region = cfun->eh->region_array[region_number];
type_thrown = NULL_TREE;
if (region->type == ERT_THROW)
return false;
}
+bool
+can_throw_internal (rtx insn)
+{
+ rtx note;
+
+ if (! INSN_P (insn))
+ return false;
+
+ if (JUMP_P (insn)
+ && GET_CODE (PATTERN (insn)) == RESX
+ && XINT (PATTERN (insn), 0) > 0)
+ return can_throw_internal_1 (XINT (PATTERN (insn), 0));
+
+ if (NONJUMP_INSN_P (insn)
+ && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ insn = XVECEXP (PATTERN (insn), 0, 0);
+
+ /* Every insn that might throw has an EH_REGION note. */
+ note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note || INTVAL (XEXP (note, 0)) <= 0)
+ return false;
+
+ return can_throw_internal_1 (INTVAL (XEXP (note, 0)));
+}
+
/* Determine if the given INSN can throw an exception that is
visible outside the function. */
bool
-can_throw_external (rtx insn)
+can_throw_external_1 (int region_number)
{
struct eh_region *region;
tree type_thrown;
+
+ region = cfun->eh->region_array[region_number];
+
+ type_thrown = NULL_TREE;
+ if (region->type == ERT_THROW)
+ {
+ type_thrown = region->u.throw.type;
+ region = region->outer;
+ }
+
+ /* If the exception is caught or blocked by any containing region,
+ then it is not seen by any calling function. */
+ for (; region ; region = region->outer)
+ if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
+ return false;
+
+ return true;
+}
+
+bool
+can_throw_external (rtx insn)
+{
rtx note;
if (! INSN_P (insn))
return false;
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
- if (GET_CODE (insn) == CALL_INSN
- && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
- {
- int i;
- for (i = 0; i < 3; ++i)
- {
- rtx sub = XEXP (PATTERN (insn), i);
- for (; sub ; sub = NEXT_INSN (sub))
- if (can_throw_external (sub))
- return true;
- }
- return false;
- }
-
note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
if (!note)
{
assume it might throw. Given that the front end and middle
ends mark known NOTHROW functions, this isn't so wildly
inaccurate. */
- return (GET_CODE (insn) == CALL_INSN
+ return (CALL_P (insn)
|| (flag_non_call_exceptions
&& may_trap_p (PATTERN (insn))));
}
if (INTVAL (XEXP (note, 0)) <= 0)
return false;
- region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
-
- type_thrown = NULL_TREE;
- if (region->type == ERT_THROW)
- {
- type_thrown = region->u.throw.type;
- region = region->outer;
- }
-
- /* If the exception is caught or blocked by any containing region,
- then it is not seen by any calling function. */
- for (; region ; region = region->outer)
- if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
- return false;
-
- return true;
+ return can_throw_external_1 (INTVAL (XEXP (note, 0)));
}
-/* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
+/* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
void
set_nothrow_function_flags (void)
{
rtx insn;
- current_function_nothrow = 1;
+ TREE_NOTHROW (current_function_decl) = 1;
/* Assume cfun->all_throwers_are_sibcalls until we encounter
something that can throw an exception. We specifically exempt
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (can_throw_external (insn))
{
- current_function_nothrow = 0;
+ TREE_NOTHROW (current_function_decl) = 0;
- if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
+ if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
{
cfun->all_throwers_are_sibcalls = 0;
return;
insn = XEXP (insn, 1))
if (can_throw_external (insn))
{
- current_function_nothrow = 0;
+ TREE_NOTHROW (current_function_decl) = 0;
- if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
+ if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
{
cfun->all_throwers_are_sibcalls = 0;
return;
if (TREE_CODE (which) != INTEGER_CST)
{
- error ("argument of `__builtin_eh_return_regno' must be constant");
+ error ("argument of %<__builtin_eh_return_regno%> must be constant");
return constm1_rtx;
}
/* An exception specification adds its filter to the
beginning of the chain. */
next = collect_one_action_chain (ar_hash, region->outer);
- return add_action_record (ar_hash, region->u.allowed.filter,
- next < 0 ? 0 : next);
+
+ /* If there is no next action, terminate the chain. */
+ if (next == -1)
+ next = 0;
+ /* If all outer actions are cleanups or must_not_throw,
+ we'll have no action record for it, since we had wanted
+ to encode these states in the call-site record directly.
+ Add a cleanup action to the chain to catch these. */
+ else if (next <= 0)
+ next = add_action_record (ar_hash, 0, 0);
+
+ return add_action_record (ar_hash, region->u.allowed.filter, next);
case ERT_MUST_NOT_THROW:
/* A must-not-throw region with no inner handlers or cleanups
return collect_one_action_chain (ar_hash, region->outer);
default:
- abort ();
+ gcc_unreachable ();
}
}
rtx this_landing_pad;
insn = iter;
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
if (!note)
{
- if (! (GET_CODE (insn) == CALL_INSN
+ if (! (CALL_P (insn)
|| (flag_non_call_exceptions
&& may_trap_p (PATTERN (insn)))))
continue;
static void
dw2_output_call_site_table (void)
{
- const char *const function_start_lab
- = IDENTIFIER_POINTER (current_function_func_begin_label);
int n = cfun->eh->call_site_data_used;
int i;
/* ??? Perhaps use attr_length to choose data1 or data2 instead of
data4 if the function is small enough. */
#ifdef HAVE_AS_LEB128
- dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
+ dw2_asm_output_delta_uleb128 (reg_start_lab,
+ current_function_func_begin_label,
"region %d start", i);
dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
"length");
if (cs->landing_pad)
- dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
+ dw2_asm_output_delta_uleb128 (landing_pad_lab,
+ current_function_func_begin_label,
"landing pad");
else
dw2_asm_output_data_uleb128 (0, "landing pad");
#else
- dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
+ dw2_asm_output_delta (4, reg_start_lab,
+ current_function_func_begin_label,
"region %d start", i);
dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
if (cs->landing_pad)
- dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
+ dw2_asm_output_delta (4, landing_pad_lab,
+ current_function_func_begin_label,
"landing pad");
else
dw2_asm_output_data (4, 0, "landing pad");
if (targetm.have_named_sections)
{
int flags;
-#ifdef HAVE_LD_RO_RW_SECTION_MIXING
- int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
- flags = (! flag_pic
- || ((tt_format & 0x70) != DW_EH_PE_absptr
- && (tt_format & 0x70) != DW_EH_PE_aligned))
- ? 0 : SECTION_WRITE;
-#else
- flags = SECTION_WRITE;
-#endif
+ if (EH_TABLES_CAN_BE_READ_ONLY)
+ {
+ int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
+
+ flags = (! flag_pic
+ || ((tt_format & 0x70) != DW_EH_PE_absptr
+ && (tt_format & 0x70) != DW_EH_PE_aligned))
+ ? 0 : SECTION_WRITE;
+ }
+ else
+ flags = SECTION_WRITE;
named_section_flags (".gcc_except_table", flags);
}
else if (flag_pic)
if (! cfun->uses_eh_lsda)
return;
-#ifdef IA64_UNWIND_INFO
+#ifdef TARGET_UNWIND_INFO
+ /* TODO: Move this into target file. */
+ assemble_external_libcall (eh_personality_libfunc);
fputs ("\t.personality\t", asm_out_file);
output_addr_const (asm_out_file, eh_personality_libfunc);
fputs ("\n\t.handlerdata\n", asm_out_file);
/* Note that varasm still thinks we're in the function's code section.
The ".endp" directive that will immediately follow will take us back. */
#else
- (*targetm.asm_out.exception_section) ();
+ targetm.asm_out.exception_section ();
#endif
have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
assemble_align (tt_format_size * BITS_PER_UNIT);
}
- (*targetm.asm_out.internal_label) (asm_out_file, "LLSDA",
+ targetm.asm_out.internal_label (asm_out_file, "LLSDA",
current_function_funcdef_no);
/* The LSDA header. */
/* Let cgraph know that the rtti decl is used. Not all of the
paths below go through assemble_integer, which would take
care of this for us. */
+ STRIP_NOPS (type);
if (TREE_CODE (type) == ADDR_EXPR)
{
type = TREE_OPERAND (type, 0);
- node = cgraph_varpool_node (type);
- if (node)
- cgraph_varpool_mark_needed_node (node);
+ if (TREE_CODE (type) == VAR_DECL)
+ {
+ node = cgraph_varpool_node (type);
+ if (node)
+ cgraph_varpool_mark_needed_node (node);
+ }
}
- else if (TREE_CODE (type) != INTEGER_CST)
- abort ();
+ else
+ gcc_assert (TREE_CODE (type) == INTEGER_CST);
}
if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)