/* Implements exception handling.
- Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001 Free Software Foundation, Inc.
+ Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
+ 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
Contributed by Mike Stump <mrs@cygnus.com>.
-This file is part of GNU CC.
+This file is part of GCC.
-GNU CC is free software; you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
-any later version.
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 2, or (at your option) any later
+version.
-GNU CC is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+for more details.
You should have received a copy of the GNU General Public License
-along with GNU CC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+along with GCC; see the file COPYING. If not, write to the Free
+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
+02111-1307, USA. */
/* An exception is an event that can be signaled from within a
#include "config.h"
#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
#include "rtl.h"
#include "tree.h"
#include "flags.h"
#include "function.h"
#include "expr.h"
+#include "libfuncs.h"
#include "insn-config.h"
#include "except.h"
#include "integrate.h"
#include "intl.h"
#include "ggc.h"
#include "tm_p.h"
-
+#include "target.h"
+#include "langhooks.h"
/* Provide defaults for stuff that may not be defined when using
sjlj exceptions. */
-#ifndef EH_RETURN_STACKADJ_RTX
-#define EH_RETURN_STACKADJ_RTX 0
-#endif
-#ifndef EH_RETURN_HANDLER_RTX
-#define EH_RETURN_HANDLER_RTX 0
-#endif
#ifndef EH_RETURN_DATA_REGNO
#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
#endif
/* Map a type to a runtime object to match type. */
tree (*lang_eh_runtime_type) PARAMS ((tree));
-/* A list of labels used for exception handlers. */
-rtx exception_handler_labels;
+/* A hash table of label to region number. */
+
+struct ehl_map_entry GTY(())
+{
+ rtx label;
+ struct eh_region *region;
+};
-static int call_site_base;
-static int sjlj_funcdef_number;
-static htab_t type_to_runtime_map;
+static GTY(()) int call_site_base;
+static GTY ((param_is (union tree_node)))
+ htab_t type_to_runtime_map;
/* Describe the SjLj_Function_Context structure. */
-static tree sjlj_fc_type_node;
+static GTY(()) tree sjlj_fc_type_node;
static int sjlj_fc_call_site_ofs;
static int sjlj_fc_data_ofs;
static int sjlj_fc_personality_ofs;
static int sjlj_fc_jbuf_ofs;
\f
/* Describes one exception region. */
-struct eh_region
+struct eh_region GTY(())
{
/* The immediately surrounding region. */
struct eh_region *outer;
/* An identifier for this region. */
int region_number;
+ /* When a region is deleted, its parents inherit the REG_EH_REGION
+ numbers already assigned. */
+ bitmap aka;
+
/* Each region does exactly one thing. */
enum eh_region_type
{
- ERT_CLEANUP = 1,
+ ERT_UNKNOWN = 0,
+ ERT_CLEANUP,
ERT_TRY,
ERT_CATCH,
ERT_ALLOWED_EXCEPTIONS,
ERT_FIXUP
} type;
- /* Holds the action to perform based on the preceeding type. */
- union {
+ /* Holds the action to perform based on the preceding type. */
+ union eh_region_u {
/* A list of catch blocks, a surrounding try block,
and the label for continuing after a catch. */
- struct {
+ struct eh_region_u_try {
struct eh_region *catch;
struct eh_region *last_catch;
struct eh_region *prev_try;
rtx continue_label;
- } try;
+ } GTY ((tag ("ERT_TRY"))) try;
- /* The list through the catch handlers, the type object
- matched, and a pointer to the generated code. */
- struct {
+ /* The list through the catch handlers, the list of type objects
+ matched, and the list of associated filters. */
+ struct eh_region_u_catch {
struct eh_region *next_catch;
struct eh_region *prev_catch;
- tree type;
- int filter;
- } catch;
+ tree type_list;
+ tree filter_list;
+ } GTY ((tag ("ERT_CATCH"))) catch;
/* A tree_list of allowed types. */
- struct {
+ struct eh_region_u_allowed {
tree type_list;
int filter;
- } allowed;
+ } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
- /* The type given by a call to "throw foo();", or discovered
+ /* The type given by a call to "throw foo();", or discovered
for a throw. */
- struct {
+ struct eh_region_u_throw {
tree type;
- } throw;
+ } GTY ((tag ("ERT_THROW"))) throw;
/* Retain the cleanup expression even after expansion so that
we can match up fixup regions. */
- struct {
+ struct eh_region_u_cleanup {
tree exp;
- } cleanup;
+ struct eh_region *prev_try;
+ } GTY ((tag ("ERT_CLEANUP"))) cleanup;
/* The real region (by expression and by pointer) that fixup code
should live in. */
- struct {
+ struct eh_region_u_fixup {
tree cleanup_exp;
struct eh_region *real_region;
- } fixup;
- } u;
+ } GTY ((tag ("ERT_FIXUP"))) fixup;
+ } GTY ((desc ("%0.type"))) u;
/* Entry point for this region's handler before landing pads are built. */
rtx label;
/* The RESX insn for handing off control to the next outermost handler,
if appropriate. */
rtx resume;
+
+ /* True if something in this region may throw. */
+ unsigned may_contain_throw : 1;
+};
+
+struct call_site_record GTY(())
+{
+ rtx landing_pad;
+ int action;
};
/* Used to save exception status for each function. */
-struct eh_status
+struct eh_status GTY(())
{
/* The tree of all regions for this function. */
struct eh_region *region_tree;
/* The same information as an indexable array. */
- struct eh_region **region_array;
+ struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
/* The most recently open region. */
struct eh_region *cur_region;
/* This is the region for which we are processing catch blocks. */
struct eh_region *try_region;
- /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
- node is itself a TREE_CHAINed list of handlers for regions that
- are not yet closed. The TREE_VALUE of each entry contains the
- handler for the corresponding entry on the ehstack. */
- tree protect_list;
-
rtx filter;
rtx exc_ptr;
varray_type ehspec_data;
varray_type action_record_data;
- struct call_site_record
- {
- rtx landing_pad;
- int action;
- } *call_site_data;
+ htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
+
+ struct call_site_record * GTY ((length ("%h.call_site_data_used")))
+ call_site_data;
int call_site_data_used;
int call_site_data_size;
};
\f
-static void mark_eh_region PARAMS ((struct eh_region *));
-
-static int t2r_eq PARAMS ((const PTR,
- const PTR));
-static hashval_t t2r_hash PARAMS ((const PTR));
-static int t2r_mark_1 PARAMS ((PTR *, PTR));
-static void t2r_mark PARAMS ((PTR));
+static int t2r_eq PARAMS ((const void *,
+ const void *));
+static hashval_t t2r_hash PARAMS ((const void *));
static void add_type_for_runtime PARAMS ((tree));
static tree lookup_type_for_runtime PARAMS ((tree));
static void collect_eh_region_array PARAMS ((void));
static void resolve_fixup_regions PARAMS ((void));
static void remove_fixup_regions PARAMS ((void));
+static void remove_unreachable_regions PARAMS ((rtx));
static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
struct inline_remap *));
static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
struct eh_region **));
-static int ttypes_filter_eq PARAMS ((const PTR,
- const PTR));
-static hashval_t ttypes_filter_hash PARAMS ((const PTR));
-static int ehspec_filter_eq PARAMS ((const PTR,
- const PTR));
-static hashval_t ehspec_filter_hash PARAMS ((const PTR));
+static int ttypes_filter_eq PARAMS ((const void *,
+ const void *));
+static hashval_t ttypes_filter_hash PARAMS ((const void *));
+static int ehspec_filter_eq PARAMS ((const void *,
+ const void *));
+static hashval_t ehspec_filter_hash PARAMS ((const void *));
static int add_ttypes_entry PARAMS ((htab_t, tree));
static int add_ehspec_entry PARAMS ((htab_t, htab_t,
tree));
PARAMS ((rtx, struct sjlj_lp_info *));
static void sjlj_build_landing_pads PARAMS ((void));
+static hashval_t ehl_hash PARAMS ((const void *));
+static int ehl_eq PARAMS ((const void *,
+ const void *));
+static void add_ehl_entry PARAMS ((rtx,
+ struct eh_region *));
static void remove_exception_handler_label PARAMS ((rtx));
static void remove_eh_handler PARAMS ((struct eh_region *));
+static int for_each_eh_label_1 PARAMS ((void **, void *));
struct reachable_info;
static enum reachable_code reachable_next_level
PARAMS ((struct eh_region *, tree, struct reachable_info *));
-static int action_record_eq PARAMS ((const PTR,
- const PTR));
-static hashval_t action_record_hash PARAMS ((const PTR));
+static int action_record_eq PARAMS ((const void *,
+ const void *));
+static hashval_t action_record_hash PARAMS ((const void *));
static int add_action_record PARAMS ((htab_t, int, int));
static int collect_one_action_chain PARAMS ((htab_t,
struct eh_region *));
\f
/* Routine to see if exception handling is turned on.
- DO_WARN is non-zero if we want to inform the user that exception
- handling is turned off.
+ DO_WARN is nonzero if we want to inform the user that exception
+ handling is turned off.
This is used to ensure that -fexceptions has been specified if the
compiler tries to use any exception-specific functions. */
void
init_eh ()
{
- ggc_add_rtx_root (&exception_handler_labels, 1);
-
if (! flag_exceptions)
return;
- type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
- ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
+ type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
/* Create the SjLj_Function_Context structure. This should match
the definition in unwind-sjlj.c. */
{
tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
- sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
- ggc_add_tree_root (&sjlj_fc_type_node, 1);
+ sjlj_fc_type_node = (*lang_hooks.types.make_type) (RECORD_TYPE);
f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
build_pointer_type (sjlj_fc_type_node));
DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
tmp = build_index_type (build_int_2 (4 - 1, 0));
- tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
+ tmp = build_array_type ((*lang_hooks.types.type_for_mode) (word_mode, 1),
+ tmp);
f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
void
init_eh_for_function ()
{
- cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
-}
-
-/* Mark EH for GC. */
-
-static void
-mark_eh_region (region)
- struct eh_region *region;
-{
- if (! region)
- return;
-
- switch (region->type)
- {
- case ERT_CLEANUP:
- ggc_mark_tree (region->u.cleanup.exp);
- break;
- case ERT_TRY:
- ggc_mark_rtx (region->u.try.continue_label);
- break;
- case ERT_CATCH:
- ggc_mark_tree (region->u.catch.type);
- break;
- case ERT_ALLOWED_EXCEPTIONS:
- ggc_mark_tree (region->u.allowed.type_list);
- break;
- case ERT_MUST_NOT_THROW:
- break;
- case ERT_THROW:
- ggc_mark_tree (region->u.throw.type);
- break;
- case ERT_FIXUP:
- ggc_mark_tree (region->u.fixup.cleanup_exp);
- break;
- default:
- abort ();
- }
-
- ggc_mark_rtx (region->label);
- ggc_mark_rtx (region->resume);
- ggc_mark_rtx (region->landing_pad);
- ggc_mark_rtx (region->post_landing_pad);
-}
-
-void
-mark_eh_status (eh)
- struct eh_status *eh;
-{
- int i;
-
- if (eh == 0)
- return;
-
- /* If we've called collect_eh_region_array, use it. Otherwise walk
- the tree non-recursively. */
- if (eh->region_array)
- {
- for (i = eh->last_region_number; i > 0; --i)
- {
- struct eh_region *r = eh->region_array[i];
- if (r && r->region_number == i)
- mark_eh_region (r);
- }
- }
- else if (eh->region_tree)
- {
- struct eh_region *r = eh->region_tree;
- while (1)
- {
- mark_eh_region (r);
- if (r->inner)
- r = r->inner;
- else if (r->next_peer)
- r = r->next_peer;
- else
- {
- do {
- r = r->outer;
- if (r == NULL)
- goto tree_done;
- } while (r->next_peer == NULL);
- r = r->next_peer;
- }
- }
- tree_done:;
- }
-
- ggc_mark_tree (eh->protect_list);
- ggc_mark_rtx (eh->filter);
- ggc_mark_rtx (eh->exc_ptr);
- ggc_mark_tree_varray (eh->ttype_data);
-
- if (eh->call_site_data)
- {
- for (i = eh->call_site_data_used - 1; i >= 0; --i)
- ggc_mark_rtx (eh->call_site_data[i].landing_pad);
- }
-
- ggc_mark_rtx (eh->ehr_stackadj);
- ggc_mark_rtx (eh->ehr_handler);
- ggc_mark_rtx (eh->ehr_label);
-
- ggc_mark_rtx (eh->sjlj_fc);
- ggc_mark_rtx (eh->sjlj_exit_after);
-}
-
-void
-free_eh_status (f)
- struct function *f;
-{
- struct eh_status *eh = f->eh;
-
- if (eh->region_array)
- {
- int i;
- for (i = eh->last_region_number; i > 0; --i)
- {
- struct eh_region *r = eh->region_array[i];
- /* Mind we don't free a region struct more than once. */
- if (r && r->region_number == i)
- free (r);
- }
- free (eh->region_array);
- }
- else if (eh->region_tree)
- {
- struct eh_region *next, *r = eh->region_tree;
- while (1)
- {
- if (r->inner)
- r = r->inner;
- else if (r->next_peer)
- {
- next = r->next_peer;
- free (r);
- r = next;
- }
- else
- {
- do {
- next = r->outer;
- free (r);
- r = next;
- if (r == NULL)
- goto tree_done;
- } while (r->next_peer == NULL);
- next = r->next_peer;
- free (r);
- r = next;
- }
- }
- tree_done:;
- }
-
- VARRAY_FREE (eh->ttype_data);
- VARRAY_FREE (eh->ehspec_data);
- VARRAY_FREE (eh->action_record_data);
- if (eh->call_site_data)
- free (eh->call_site_data);
-
- free (eh);
- f->eh = NULL;
+ cfun->eh = (struct eh_status *)
+ ggc_alloc_cleared (sizeof (struct eh_status));
}
-
\f
/* Start an exception handling region. All instructions emitted
after this point are considered to be part of the region until
return;
/* Insert a new blank region as a leaf in the tree. */
- new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
+ new_region = (struct eh_region *) ggc_alloc_cleared (sizeof (*new_region));
cur_region = cfun->eh->cur_region;
new_region->outer = cur_region;
if (cur_region)
struct eh_region *cur_region = cfun->eh->cur_region;
rtx note;
- /* Create a nute marking the end of this region. */
+ /* Create a note marking the end of this region. */
note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
NOTE_EH_HANDLER (note) = cur_region->region_number;
region->type = ERT_CLEANUP;
region->label = gen_label_rtx ();
region->u.cleanup.exp = handler;
+ region->u.cleanup.prev_try = cfun->eh->try_region;
around_label = gen_label_rtx ();
emit_jump (around_label);
emit_label (region->label);
- /* Give the language a chance to specify an action to be taken if an
- exception is thrown that would propogate out of the HANDLER. */
- protect_cleanup_actions
- = (lang_protect_cleanup_actions
- ? (*lang_protect_cleanup_actions) ()
- : NULL_TREE);
+ if (flag_non_call_exceptions || region->may_contain_throw)
+ {
+ /* Give the language a chance to specify an action to be taken if an
+ exception is thrown that would propagate out of the HANDLER. */
+ protect_cleanup_actions
+ = (lang_protect_cleanup_actions
+ ? (*lang_protect_cleanup_actions) ()
+ : NULL_TREE);
- if (protect_cleanup_actions)
- expand_eh_region_start ();
+ if (protect_cleanup_actions)
+ expand_eh_region_start ();
- /* In case this cleanup involves an inline destructor with a try block in
- it, we need to save the EH return data registers around it. */
- data_save[0] = gen_reg_rtx (Pmode);
- emit_move_insn (data_save[0], get_exception_pointer (cfun));
- data_save[1] = gen_reg_rtx (word_mode);
- emit_move_insn (data_save[1], get_exception_filter (cfun));
+ /* In case this cleanup involves an inline destructor with a try block in
+ it, we need to save the EH return data registers around it. */
+ data_save[0] = gen_reg_rtx (ptr_mode);
+ emit_move_insn (data_save[0], get_exception_pointer (cfun));
+ data_save[1] = gen_reg_rtx (word_mode);
+ emit_move_insn (data_save[1], get_exception_filter (cfun));
- expand_expr (handler, const0_rtx, VOIDmode, 0);
+ expand_expr (handler, const0_rtx, VOIDmode, 0);
- emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
- emit_move_insn (cfun->eh->filter, data_save[1]);
+ emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
+ emit_move_insn (cfun->eh->filter, data_save[1]);
- if (protect_cleanup_actions)
- expand_eh_region_end_must_not_throw (protect_cleanup_actions);
+ if (protect_cleanup_actions)
+ expand_eh_region_end_must_not_throw (protect_cleanup_actions);
- /* We need any stack adjustment complete before the around_label. */
- do_pending_stack_adjust ();
+ /* We need any stack adjustment complete before the around_label. */
+ do_pending_stack_adjust ();
+ }
/* We delay the generation of the _Unwind_Resume until we generate
landing pads. We emit a marker here so as to get good control
emit_jump (region->u.try.continue_label);
}
-/* Begin a catch clause. TYPE is the type caught, or null if this is
- a catch-all clause. */
+/* Begin a catch clause. TYPE is the type caught, a list of such types, or
+ null if this is a catch-all clause. Providing a type list enables to
+ associate the catch region with potentially several exception types, which
+ is useful e.g. for Ada. */
void
-expand_start_catch (type)
- tree type;
+expand_start_catch (type_or_list)
+ tree type_or_list;
{
struct eh_region *t, *c, *l;
+ tree type_list;
if (! doing_eh (0))
return;
- if (type)
- add_type_for_runtime (type);
+ type_list = type_or_list;
+
+ if (type_or_list)
+ {
+ /* Ensure to always end up with a type list to normalize further
+ processing, then register each type against the runtime types
+ map. */
+ tree type_node;
+
+ if (TREE_CODE (type_or_list) != TREE_LIST)
+ type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
+
+ type_node = type_list;
+ for (; type_node; type_node = TREE_CHAIN (type_node))
+ add_type_for_runtime (TREE_VALUE (type_node));
+ }
+
expand_eh_region_start ();
t = cfun->eh->try_region;
c = cfun->eh->cur_region;
c->type = ERT_CATCH;
- c->u.catch.type = type;
+ c->u.catch.type_list = type_list;
c->label = gen_label_rtx ();
l = t->u.try.last_catch;
void
expand_end_catch ()
{
- struct eh_region *try_region, *catch_region;
+ struct eh_region *try_region;
if (! doing_eh (0))
return;
- catch_region = expand_eh_region_end ();
+ expand_eh_region_end ();
try_region = cfun->eh->try_region;
emit_jump (try_region->u.try.continue_label);
/* End an exception region for an exception type filter. ALLOWED is a
TREE_LIST of types to be matched by the runtime. FAILURE is an
- expression to invoke if a mismatch ocurrs. */
+ expression to invoke if a mismatch occurs.
+
+ ??? We could use these semantics for calls to rethrow, too; if we can
+ see the surrounding catch clause, we know that the exception we're
+ rethrowing satisfies the "filter" of the catch type. */
void
expand_eh_region_end_allowed (allowed, failure)
fixup->u.fixup.cleanup_exp = handler;
}
+/* Note that the current EH region (if any) may contain a throw, or a
+ call to a function which itself may contain a throw. */
+
+void
+note_eh_region_may_contain_throw ()
+{
+ struct eh_region *region;
+
+ region = cfun->eh->cur_region;
+ while (region && !region->may_contain_throw)
+ {
+ region->may_contain_throw = 1;
+ region = region->outer;
+ }
+}
+
/* Return an rtl expression for a pointer to the exception object
within a handler. */
rtx exc_ptr = fun->eh->exc_ptr;
if (fun == cfun && ! exc_ptr)
{
- exc_ptr = gen_reg_rtx (Pmode);
+ exc_ptr = gen_reg_rtx (ptr_mode);
fun->eh->exc_ptr = exc_ptr;
}
return exc_ptr;
return filter;
}
\f
-/* Begin a region that will contain entries created with
- add_partial_entry. */
-
-void
-begin_protect_partials ()
-{
- /* Push room for a new list. */
- cfun->eh->protect_list
- = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
-}
-
-/* Start a new exception region for a region of code that has a
- cleanup action and push the HANDLER for the region onto
- protect_list. All of the regions created with add_partial_entry
- will be ended when end_protect_partials is invoked. */
-
-void
-add_partial_entry (handler)
- tree handler;
-{
- expand_eh_region_start ();
-
- /* ??? This comment was old before the most recent rewrite. We
- really ought to fix the callers at some point. */
- /* For backwards compatibility, we allow callers to omit calls to
- begin_protect_partials for the outermost region. So, we must
- explicitly do so here. */
- if (!cfun->eh->protect_list)
- begin_protect_partials ();
-
- /* Add this entry to the front of the list. */
- TREE_VALUE (cfun->eh->protect_list)
- = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
-}
-
-/* End all the pending exception regions on protect_list. */
-
-void
-end_protect_partials ()
-{
- tree t;
-
- /* ??? This comment was old before the most recent rewrite. We
- really ought to fix the callers at some point. */
- /* For backwards compatibility, we allow callers to omit the call to
- begin_protect_partials for the outermost region. So,
- PROTECT_LIST may be NULL. */
- if (!cfun->eh->protect_list)
- return;
-
- /* Pop the topmost entry. */
- t = TREE_VALUE (cfun->eh->protect_list);
- cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
-
- /* End all the exception regions. */
- for (; t; t = TREE_CHAIN (t))
- expand_eh_region_end_cleanup (TREE_VALUE (t));
-}
-
-\f
/* This section is for the exception handling specific optimization pass. */
/* Random access the exception region tree. It's just as simple to
if (! i)
return;
- array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
+ array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
+ * sizeof (*array));
cfun->eh->region_array = array;
while (1)
for (j = 1; j <= n; ++j)
{
cleanup = cfun->eh->region_array[j];
- if (cleanup->type == ERT_CLEANUP
+ if (cleanup && cleanup->type == ERT_CLEANUP
&& cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
break;
}
&& fixup->type == ERT_FIXUP)
{
if (fixup->u.fixup.real_region)
- XEXP (note, 1) = GEN_INT (fixup->u.fixup.real_region->region_number);
+ XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
else
remove_note (insn, note);
}
/* Allow GC to maybe free some memory. */
if (fixup->type == ERT_CLEANUP)
- fixup->u.cleanup.exp = NULL_TREE;
+ fixup->u.cleanup.exp = NULL_TREE;
if (fixup->type != ERT_FIXUP)
continue;
}
}
+/* Remove all regions whose labels are not reachable from insns. */
+
+static void
+remove_unreachable_regions (insns)
+ rtx insns;
+{
+ int i, *uid_region_num;
+ bool *reachable;
+ struct eh_region *r;
+ rtx insn;
+
+ uid_region_num = xcalloc (get_max_uid (), sizeof(int));
+ reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
+
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ {
+ r = cfun->eh->region_array[i];
+ if (!r || r->region_number != i)
+ continue;
+
+ if (r->resume)
+ {
+ if (uid_region_num[INSN_UID (r->resume)])
+ abort ();
+ uid_region_num[INSN_UID (r->resume)] = i;
+ }
+ if (r->label)
+ {
+ if (uid_region_num[INSN_UID (r->label)])
+ abort ();
+ uid_region_num[INSN_UID (r->label)] = i;
+ }
+ if (r->type == ERT_TRY && r->u.try.continue_label)
+ {
+ if (uid_region_num[INSN_UID (r->u.try.continue_label)])
+ abort ();
+ uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
+ }
+ }
+
+ for (insn = insns; insn; insn = NEXT_INSN (insn))
+ reachable[uid_region_num[INSN_UID (insn)]] = true;
+
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ {
+ r = cfun->eh->region_array[i];
+ if (r && r->region_number == i && !reachable[i])
+ {
+ /* Don't remove ERT_THROW regions if their outer region
+ is reachable. */
+ if (r->type == ERT_THROW
+ && r->outer
+ && reachable[r->outer->region_number])
+ continue;
+
+ remove_eh_handler (r);
+ }
+ }
+
+ free (reachable);
+ free (uid_region_num);
+}
+
/* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
can_throw instruction in the region. */
/* If we wanted exceptions for non-call insns, then
any may_trap_p instruction could throw. */
|| (flag_non_call_exceptions
+ && GET_CODE (PATTERN (insn)) != CLOBBER
+ && GET_CODE (PATTERN (insn)) != USE
&& may_trap_p (PATTERN (insn)))))
{
REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
free (stack);
remove_fixup_regions ();
+ remove_unreachable_regions (insns);
+}
+
+static void
+add_ehl_entry (label, region)
+ rtx label;
+ struct eh_region *region;
+{
+ struct ehl_map_entry **slot, *entry;
+
+ LABEL_PRESERVE_P (label) = 1;
+
+ entry = (struct ehl_map_entry *) ggc_alloc (sizeof (*entry));
+ entry->label = label;
+ entry->region = region;
+
+ slot = (struct ehl_map_entry **)
+ htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
+
+ /* Before landing pad creation, each exception handler has its own
+ label. After landing pad creation, the exception handlers may
+ share landing pads. This is ok, since maybe_remove_eh_handler
+ only requires the 1-1 mapping before landing pad creation. */
+ if (*slot && !cfun->eh->built_landing_pads)
+ abort ();
+
+ *slot = entry;
}
void
find_exception_handler_labels ()
{
- rtx list = NULL_RTX;
int i;
- free_EXPR_LIST_list (&exception_handler_labels);
+ if (cfun->eh->exception_handler_label_map)
+ htab_empty (cfun->eh->exception_handler_label_map);
+ else
+ {
+ /* ??? The expansion factor here (3/2) must be greater than the htab
+ occupancy factor (4/3) to avoid unnecessary resizing. */
+ cfun->eh->exception_handler_label_map
+ = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
+ ehl_hash, ehl_eq, NULL);
+ }
if (cfun->eh->region_tree == NULL)
return;
struct eh_region *region = cfun->eh->region_array[i];
rtx lab;
- if (! region)
+ if (! region || region->region_number != i)
continue;
if (cfun->eh->built_landing_pads)
lab = region->landing_pad;
lab = region->label;
if (lab)
- list = alloc_EXPR_LIST (0, lab, list);
+ add_ehl_entry (lab, region);
}
/* For sjlj exceptions, need the return label to remain live until
after landing pad generation. */
if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
- list = alloc_EXPR_LIST (0, return_label, list);
-
- exception_handler_labels = list;
+ add_ehl_entry (return_label, NULL);
}
+bool
+current_function_has_exception_handlers ()
+{
+ int i;
+
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ {
+ struct eh_region *region = cfun->eh->region_array[i];
+
+ if (! region || region->region_number != i)
+ continue;
+ if (region->type != ERT_THROW)
+ return true;
+ }
+
+ return false;
+}
\f
static struct eh_region *
duplicate_eh_region_1 (o, map)
struct inline_remap *map;
{
struct eh_region *n
- = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
+ = (struct eh_region *) ggc_alloc_cleared (sizeof (struct eh_region));
n->region_number = o->region_number + cfun->eh->last_region_number;
n->type = o->type;
break;
case ERT_CATCH:
- n->u.catch.type = o->u.catch.type;
+ n->u.catch.type_list = o->u.catch.type_list;
break;
case ERT_ALLOWED_EXCEPTIONS:
case ERT_THROW:
n->u.throw.type = o->u.throw.type;
-
+
default:
abort ();
}
case ERT_CATCH:
if (o->u.catch.next_catch)
- n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
+ n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
if (o->u.catch.prev_catch)
- n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
+ n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
break;
default:
n->inner = n_array[o->inner->region_number];
if (o->next_peer)
n->next_peer = n_array[o->next_peer->region_number];
-}
+}
int
duplicate_eh_regions (ifun, map)
cur->inner = root;
for (i = 1; i <= ifun_last_region_number; ++i)
- if (n_array[i]->outer == NULL)
+ if (n_array[i] && n_array[i]->outer == NULL)
n_array[i]->outer = cur;
}
else
}
\f
-/* ??? Move from tree.c to tree.h. */
-#define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
-
static int
t2r_eq (pentry, pdata)
- const PTR pentry;
- const PTR pdata;
+ const void *pentry;
+ const void *pdata;
{
tree entry = (tree) pentry;
tree data = (tree) pdata;
static hashval_t
t2r_hash (pentry)
- const PTR pentry;
+ const void *pentry;
{
tree entry = (tree) pentry;
return TYPE_HASH (TREE_PURPOSE (entry));
}
-static int
-t2r_mark_1 (slot, data)
- PTR *slot;
- PTR data ATTRIBUTE_UNUSED;
-{
- tree contents = (tree) *slot;
- ggc_mark_tree (contents);
- return 1;
-}
-
-static void
-t2r_mark (addr)
- PTR addr;
-{
- htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
-}
-
static void
add_type_for_runtime (type)
tree type;
*slot = tree_cons (type, runtime, NULL_TREE);
}
}
-
+
static tree
lookup_type_for_runtime (type)
tree type;
slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
TYPE_HASH (type), NO_INSERT);
- /* We should have always inserrted the data earlier. */
+ /* We should have always inserted the data earlier. */
return TREE_VALUE (*slot);
}
\f
/* Represent an entry in @TTypes for either catch actions
or exception filter actions. */
-struct ttypes_filter
+struct ttypes_filter GTY(())
{
tree t;
int filter;
static int
ttypes_filter_eq (pentry, pdata)
- const PTR pentry;
- const PTR pdata;
+ const void *pentry;
+ const void *pdata;
{
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
tree data = (tree) pdata;
static hashval_t
ttypes_filter_hash (pentry)
- const PTR pentry;
+ const void *pentry;
{
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
return TYPE_HASH (entry->t);
static int
ehspec_filter_eq (pentry, pdata)
- const PTR pentry;
- const PTR pdata;
+ const void *pentry;
+ const void *pdata;
{
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
static hashval_t
ehspec_filter_hash (pentry)
- const PTR pentry;
+ const void *pentry;
{
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
hashval_t h = 0;
/* Look up each type in the list and encode its filter
value as a uleb128. Terminate the list with 0. */
for (; list ; list = TREE_CHAIN (list))
- push_uleb128 (&cfun->eh->ehspec_data,
+ push_uleb128 (&cfun->eh->ehspec_data,
add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
}
switch (r->type)
{
case ERT_CATCH:
- r->u.catch.filter = add_ttypes_entry (ttypes, r->u.catch.type);
+ /* Whatever type_list is (NULL or true list), we build a list
+ of filters for the region. */
+ r->u.catch.filter_list = NULL_TREE;
+
+ if (r->u.catch.type_list != NULL)
+ {
+ /* Get a filter value for each of the types caught and store
+ them in the region's dedicated list. */
+ tree tp_node = r->u.catch.type_list;
+
+ for (;tp_node; tp_node = TREE_CHAIN (tp_node))
+ {
+ int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
+ tree flt_node = build_int_2 (flt, 0);
+
+ r->u.catch.filter_list
+ = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
+ }
+ }
+ else
+ {
+ /* Get a filter value for the NULL list also since it will need
+ an action record anyway. */
+ int flt = add_ttypes_entry (ttypes, NULL);
+ tree flt_node = build_int_2 (flt, 0);
+
+ r->u.catch.filter_list
+ = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
+ }
+
break;
case ERT_ALLOWED_EXCEPTIONS:
htab_delete (ehspec);
}
+/* Generate the code to actually handle exceptions, which will follow the
+ landing pads. */
+
static void
build_post_landing_pads ()
{
all the way up the chain until blocked by a cleanup. */
/* ??? Outer try regions can share landing pads with inner
try regions if the types are completely non-overlapping,
- and there are no interveaning cleanups. */
+ and there are no intervening cleanups. */
region->post_landing_pad = gen_label_rtx ();
struct eh_region *c;
for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
{
- /* ??? _Unwind_ForcedUnwind wants no match here. */
- if (c->u.catch.type == NULL)
+ if (c->u.catch.type_list == NULL)
emit_jump (c->label);
else
- emit_cmp_and_jump_insns (cfun->eh->filter,
- GEN_INT (c->u.catch.filter),
- EQ, NULL_RTX, word_mode,
- 0, 0, c->label);
+ {
+ /* Need for one cmp/jump per type caught. Each type
+ list entry has a matching entry in the filter list
+ (see assign_filter_values). */
+ tree tp_node = c->u.catch.type_list;
+ tree flt_node = c->u.catch.filter_list;
+
+ for (; tp_node; )
+ {
+ emit_cmp_and_jump_insns
+ (cfun->eh->filter,
+ GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
+ EQ, NULL_RTX, word_mode, 0, c->label);
+
+ tp_node = TREE_CHAIN (tp_node);
+ flt_node = TREE_CHAIN (flt_node);
+ }
+ }
}
}
seq = get_insns ();
end_sequence ();
- emit_insns_before (seq, region->u.try.catch->label);
+ emit_insn_before (seq, region->u.try.catch->label);
break;
case ERT_ALLOWED_EXCEPTIONS:
emit_cmp_and_jump_insns (cfun->eh->filter,
GEN_INT (region->u.allowed.filter),
- EQ, NULL_RTX, word_mode, 0, 0,
- region->label);
+ EQ, NULL_RTX, word_mode, 0, region->label);
/* We delay the generation of the _Unwind_Resume until we generate
landing pads. We emit a marker here so as to get good control
seq = get_insns ();
end_sequence ();
- emit_insns_before (seq, region->label);
+ emit_insn_before (seq, region->label);
break;
case ERT_CLEANUP:
emit_jump (outer->post_landing_pad);
else
emit_library_call (unwind_resume_libfunc, LCT_THROW,
- VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
+ VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
seq = get_insns ();
end_sequence ();
- emit_insns_before (seq, region->resume);
-
- /* Leave the RESX to be deleted by flow. */
+ emit_insn_before (seq, region->resume);
+ delete_insn (region->resume);
}
}
static void
dw2_build_landing_pads ()
{
- int i, j;
+ int i;
+ unsigned int j;
for (i = cfun->eh->last_region_number; i > 0; --i)
{
struct eh_region *region = cfun->eh->region_array[i];
rtx seq;
+ bool clobbers_hard_regs = false;
/* Mind we don't process a region more than once. */
if (!region || region->region_number != i)
if (r == INVALID_REGNUM)
break;
if (! call_used_regs[r])
- emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
+ {
+ emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
+ clobbers_hard_regs = true;
+ }
+ }
+
+ if (clobbers_hard_regs)
+ {
+ /* @@@ This is a kludge. Not all machine descriptions define a
+ blockage insn, but we must not allow the code we just generated
+ to be reordered by scheduling. So emit an ASM_INPUT to act as
+ blockage insn. */
+ emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
}
emit_move_insn (cfun->eh->exc_ptr,
- gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
+ gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
emit_move_insn (cfun->eh->filter,
gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
seq = get_insns ();
end_sequence ();
- emit_insns_before (seq, region->post_landing_pad);
+ emit_insn_before (seq, region->post_landing_pad);
}
}
for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
{
struct eh_region *region;
+ enum reachable_code rc;
tree type_thrown;
rtx note;
/* Find the first containing region that might handle the exception.
That's the landing pad to which we will transfer control. */
+ rc = RNL_NOT_CAUGHT;
for (; region; region = region->outer)
- if (reachable_next_level (region, type_thrown, 0) != RNL_NOT_CAUGHT)
- break;
-
- if (region)
+ {
+ rc = reachable_next_level (region, type_thrown, 0);
+ if (rc != RNL_NOT_CAUGHT)
+ break;
+ }
+ if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
{
lp_info[region->region_number].directly_reachable = 1;
found_one = true;
A region receives a dispatch index if it is directly reachable
and requires in-function processing. Regions that share post-landing
- pads may share dispatch indicies. */
+ pads may share dispatch indices. */
/* ??? Post-landing pad sharing doesn't actually happen at the moment
(see build_post_landing_pads) so we don't bother checking for it. */
index = 0;
for (i = cfun->eh->last_region_number; i > 0; --i)
- if (lp_info[i].directly_reachable
- && lp_info[i].action_index >= 0)
+ if (lp_info[i].directly_reachable)
lp_info[i].dispatch_index = index++;
/* Finally: assign call-site values. If dwarf2 terms, this would be
int last_call_site = -2;
rtx insn, mem;
- mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
- sjlj_fc_call_site_ofs);
-
for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
{
struct eh_region *region;
/* Don't separate a call from it's argument loads. */
before = insn;
if (GET_CODE (insn) == CALL_INSN)
- {
- HARD_REG_SET parm_regs;
- int nparm_regs;
-
- /* Since different machines initialize their parameter registers
- in different orders, assume nothing. Collect the set of all
- parameter registers. */
- CLEAR_HARD_REG_SET (parm_regs);
- nparm_regs = 0;
- for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
- if (GET_CODE (XEXP (p, 0)) == USE
- && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
- {
- if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
- abort ();
-
- /* We only care about registers which can hold function
- arguments. */
- if (! FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
- continue;
-
- SET_HARD_REG_BIT (parm_regs, REGNO (XEXP (XEXP (p, 0), 0)));
- nparm_regs++;
- }
-
- /* Search backward for the first set of a register in this set. */
- while (nparm_regs)
- {
- before = PREV_INSN (before);
-
- /* Given that we've done no other optimizations yet,
- the arguments should be immediately available. */
- if (GET_CODE (before) == CODE_LABEL)
- abort ();
-
- p = single_set (before);
- if (p && GET_CODE (SET_DEST (p)) == REG
- && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
- && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
- {
- CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
- nparm_regs--;
- }
- }
- }
+ before = find_first_parameter_load (insn, NULL_RTX);
start_sequence ();
+ mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
+ sjlj_fc_call_site_ofs);
emit_move_insn (mem, GEN_INT (this_call_site));
p = get_insns ();
end_sequence ();
- emit_insns_before (p, before);
+ emit_insn_before (p, before);
last_call_site = this_call_site;
}
}
if (cfun->uses_eh_lsda)
{
char buf[20];
- ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
+ ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
}
else
emit_move_insn (mem, const0_rtx);
-
+
#ifdef DONT_USE_BUILTIN_SETJMP
{
rtx x, note;
- x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_NORMAL,
+ x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
TYPE_MODE (integer_type_node), 1,
plus_constant (XEXP (fc, 0),
sjlj_fc_jbuf_ofs), Pmode);
NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
- TYPE_MODE (integer_type_node), 0, 0,
- dispatch_label);
+ TYPE_MODE (integer_type_node), 0, dispatch_label);
}
#else
expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
if (GET_CODE (fn_begin) == NOTE
&& NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
break;
- emit_insns_after (seq, fn_begin);
+ emit_insn_after (seq, fn_begin);
}
/* Call back from expand_function_end to know where we should put
post-dominates all can_throw_internal instructions. This is
the last possible moment. */
- emit_insns_after (seq, cfun->eh->sjlj_exit_after);
+ emit_insn_after (seq, cfun->eh->sjlj_exit_after);
}
static void
start_sequence ();
emit_label (dispatch_label);
-
+
#ifndef DONT_USE_BUILTIN_SETJMP
expand_builtin_setjmp_receiver (dispatch_label);
#endif
first_reachable = 0;
for (i = cfun->eh->last_region_number; i > 0; --i)
{
- if (! lp_info[i].directly_reachable
- || lp_info[i].action_index < 0)
+ if (! lp_info[i].directly_reachable)
continue;
if (! first_reachable)
continue;
}
- emit_cmp_and_jump_insns (dispatch,
- GEN_INT (lp_info[i].dispatch_index), EQ,
- NULL_RTX, TYPE_MODE (integer_type_node), 0, 0,
+ emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
+ EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
cfun->eh->region_array[i]->post_landing_pad);
}
seq = get_insns ();
end_sequence ();
- emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
- ->post_landing_pad));
+ emit_insn_before (seq, (cfun->eh->region_array[first_reachable]
+ ->post_landing_pad));
}
static void
connect many of the handlers, and then type information will not
be effective. Still, this is a win over previous implementations. */
- jump_optimize_minimal (get_insns ());
- find_basic_blocks (get_insns (), max_reg_num (), 0);
- cleanup_cfg ();
+ cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
/* These registers are used by the landing pads. Make sure they
have been generated. */
/* We've totally changed the CFG. Start over. */
find_exception_handler_labels ();
- jump_optimize_minimal (get_insns ());
+ rebuild_jump_labels (get_insns ());
find_basic_blocks (get_insns (), max_reg_num (), 0);
- cleanup_cfg ();
+ cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
}
\f
+static hashval_t
+ehl_hash (pentry)
+ const void *pentry;
+{
+ struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
+
+ /* 2^32 * ((sqrt(5) - 1) / 2) */
+ const hashval_t scaled_golden_ratio = 0x9e3779b9;
+ return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
+}
+
+static int
+ehl_eq (pentry, pdata)
+ const void *pentry;
+ const void *pdata;
+{
+ struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
+ struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
+
+ return entry->label == data->label;
+}
+
/* This section handles removing dead code for flow. */
-/* Remove LABEL from the exception_handler_labels list. */
+/* Remove LABEL from exception_handler_label_map. */
static void
remove_exception_handler_label (label)
rtx label;
{
- rtx *pl, l;
+ struct ehl_map_entry **slot, tmp;
- for (pl = &exception_handler_labels, l = *pl;
- XEXP (l, 0) != label;
- pl = &XEXP (l, 1), l = *pl)
- continue;
+ /* If exception_handler_label_map was not built yet,
+ there is nothing to do. */
+ if (cfun->eh->exception_handler_label_map == NULL)
+ return;
- *pl = XEXP (l, 1);
- free_EXPR_LIST_node (l);
+ tmp.label = label;
+ slot = (struct ehl_map_entry **)
+ htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
+ if (! slot)
+ abort ();
+
+ htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
}
/* Splice REGION from the region tree etc. */
remove_eh_handler (region)
struct eh_region *region;
{
- struct eh_region **pp, *p;
+ struct eh_region **pp, **pp_start, *p, *outer, *inner;
rtx lab;
- int i;
/* For the benefit of efficiently handling REG_EH_REGION notes,
replace this region in the region array with its containing
region. Note that previous region deletions may result in
- multiple copies of this region in the array, so we have to
- search the whole thing. */
- for (i = cfun->eh->last_region_number; i > 0; --i)
- if (cfun->eh->region_array[i] == region)
- cfun->eh->region_array[i] = region->outer;
+ multiple copies of this region in the array, so we have a
+ list of alternate numbers by which we are known. */
+
+ outer = region->outer;
+ cfun->eh->region_array[region->region_number] = outer;
+ if (region->aka)
+ {
+ int i;
+ EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i,
+ { cfun->eh->region_array[i] = outer; });
+ }
+
+ if (outer)
+ {
+ if (!outer->aka)
+ outer->aka = BITMAP_GGC_ALLOC ();
+ if (region->aka)
+ bitmap_a_or_b (outer->aka, outer->aka, region->aka);
+ bitmap_set_bit (outer->aka, region->region_number);
+ }
if (cfun->eh->built_landing_pads)
lab = region->landing_pad;
if (lab)
remove_exception_handler_label (lab);
- if (region->outer)
- pp = ®ion->outer->inner;
+ if (outer)
+ pp_start = &outer->inner;
else
- pp = &cfun->eh->region_tree;
- for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
+ pp_start = &cfun->eh->region_tree;
+ for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
continue;
+ *pp = region->next_peer;
- if (region->inner)
+ inner = region->inner;
+ if (inner)
{
- for (p = region->inner; p->next_peer ; p = p->next_peer)
- p->outer = region->outer;
- p->next_peer = region->next_peer;
- p->outer = region->outer;
- *pp = region->inner;
+ for (p = inner; p->next_peer ; p = p->next_peer)
+ p->outer = outer;
+ p->outer = outer;
+
+ p->next_peer = *pp_start;
+ *pp_start = inner;
}
- else
- *pp = region->next_peer;
if (region->type == ERT_CATCH)
{
remove_eh_handler (try);
}
}
-
- free (region);
}
/* LABEL heads a basic block that is about to be deleted. If this
maybe_remove_eh_handler (label)
rtx label;
{
- int i;
+ struct ehl_map_entry **slot, tmp;
+ struct eh_region *region;
/* ??? After generating landing pads, it's not so simple to determine
if the region data is completely unused. One must examine the
if (cfun->eh->built_landing_pads)
return;
- for (i = cfun->eh->last_region_number; i > 0; --i)
+ tmp.label = label;
+ slot = (struct ehl_map_entry **)
+ htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
+ if (! slot)
+ return;
+ region = (*slot)->region;
+ if (! region)
+ return;
+
+ /* Flow will want to remove MUST_NOT_THROW regions as unreachable
+ because there is no path to the fallback call to terminate.
+ But the region continues to affect call-site data until there
+ are no more contained calls, which we don't see here. */
+ if (region->type == ERT_MUST_NOT_THROW)
{
- struct eh_region *region = cfun->eh->region_array[i];
- if (region && region->label == label)
- {
- /* Flow will want to remove MUST_NOT_THROW regions as unreachable
- because there is no path to the fallback call to terminate.
- But the region continues to affect call-site data until there
- are no more contained calls, which we don't see here. */
- if (region->type == ERT_MUST_NOT_THROW)
- {
- remove_exception_handler_label (region->label);
- region->label = NULL_RTX;
- }
- else
- remove_eh_handler (region);
- break;
- }
+ htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
+ region->label = NULL_RTX;
}
+ else
+ remove_eh_handler (region);
}
+/* Invokes CALLBACK for every exception handler label. Only used by old
+ loop hackery; should not be used by new code. */
+
+void
+for_each_eh_label (callback)
+ void (*callback) PARAMS ((rtx));
+{
+ htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
+ (void *)callback);
+}
+
+static int
+for_each_eh_label_1 (pentry, data)
+ void **pentry;
+ void *data;
+{
+ struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
+ void (*callback) PARAMS ((rtx)) = (void (*) PARAMS ((rtx))) data;
+
+ (*callback) (entry->label);
+ return 1;
+}
\f
/* This section describes CFG exception edges for flow. */
/* For communicating between calls to reachable_next_level. */
-struct reachable_info
+struct reachable_info GTY(())
{
tree types_caught;
tree types_allowed;
/* A subroutine of reachable_next_level. If we are collecting a list
of handlers, add one. After landing pad generation, reference
it instead of the handlers themselves. Further, the handlers are
- all wired together, so by referencing one, we've got them all.
+ all wired together, so by referencing one, we've got them all.
Before landing pad generation we reference each handler individually.
LP_REGION contains the landing pad; REGION is the handler. */
info->handlers = alloc_INSN_LIST (region->label, info->handlers);
}
-/* Process one level of exception regions for reachability.
+/* Process one level of exception regions for reachability.
If TYPE_THROWN is non-null, then it is the *exact* type being
propagated. If INFO is non-null, then collect handler labels
and caught/allowed type information between invocations. */
for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
{
/* A catch-all handler ends the search. */
- /* ??? _Unwind_ForcedUnwind will want outer cleanups
- to be run as well. */
- if (c->u.catch.type == NULL)
+ if (c->u.catch.type_list == NULL)
{
add_reachable_handler (info, region, c);
return RNL_CAUGHT;
if (type_thrown)
{
- /* If we have a type match, end the search. */
- if (c->u.catch.type == type_thrown
- || (lang_eh_type_covers
- && (*lang_eh_type_covers) (c->u.catch.type,
- type_thrown)))
+ /* If we have at least one type match, end the search. */
+ tree tp_node = c->u.catch.type_list;
+
+ for (; tp_node; tp_node = TREE_CHAIN (tp_node))
{
- add_reachable_handler (info, region, c);
- return RNL_CAUGHT;
+ tree type = TREE_VALUE (tp_node);
+
+ if (type == type_thrown
+ || (lang_eh_type_covers
+ && (*lang_eh_type_covers) (type, type_thrown)))
+ {
+ add_reachable_handler (info, region, c);
+ return RNL_CAUGHT;
+ }
}
/* If we have definitive information of a match failure,
return RNL_NOT_CAUGHT;
}
+ /* At this point, we either don't know what type is thrown or
+ don't have front-end assistance to help deciding if it is
+ covered by one of the types in the list for this region.
+
+ We'd then like to add this region to the list of reachable
+ handlers since it is indeed potentially reachable based on the
+ information we have.
+
+ Actually, this handler is for sure not reachable if all the
+ types it matches have already been caught. That is, it is only
+ potentially reachable if at least one of the types it catches
+ has not been previously caught. */
+
if (! info)
ret = RNL_MAYBE_CAUGHT;
-
- /* A type must not have been previously caught. */
- else if (! check_handled (info->types_caught, c->u.catch.type))
+ else
{
- add_reachable_handler (info, region, c);
- info->types_caught = tree_cons (NULL, c->u.catch.type,
- info->types_caught);
+ tree tp_node = c->u.catch.type_list;
+ bool maybe_reachable = false;
+
+ /* Compute the potential reachability of this handler and
+ update the list of types caught at the same time. */
+ for (; tp_node; tp_node = TREE_CHAIN (tp_node))
+ {
+ tree type = TREE_VALUE (tp_node);
+
+ if (! check_handled (info->types_caught, type))
+ {
+ info->types_caught
+ = tree_cons (NULL, type, info->types_caught);
+
+ maybe_reachable = true;
+ }
+ }
+
+ if (maybe_reachable)
+ {
+ add_reachable_handler (info, region, c);
- /* ??? If the catch type is a base class of every allowed
- type, then we know we can stop the search. */
- ret = RNL_MAYBE_CAUGHT;
+ /* ??? If the catch type is a base class of every allowed
+ type, then we know we can stop the search. */
+ ret = RNL_MAYBE_CAUGHT;
+ }
}
}
info->types_allowed = tree_cons (NULL_TREE,
region->u.allowed.type_list,
info->types_allowed);
-
- /* If we have definitive information about the type heirarchy,
+
+ /* If we have definitive information about the type hierarchy,
then we can tell if the thrown type will pass through the
filter. */
if (type_thrown && lang_eh_type_covers)
return RNL_MAYBE_CAUGHT;
case ERT_CATCH:
- /* Catch regions are handled by their controling try region. */
+ /* Catch regions are handled by their controlling try region. */
return RNL_NOT_CAUGHT;
case ERT_MUST_NOT_THROW:
if (info && info->handlers)
{
add_reachable_handler (info, region, region);
- return RNL_CAUGHT;
+ return RNL_CAUGHT;
}
else
return RNL_BLOCKED;
case ERT_THROW:
case ERT_FIXUP:
+ case ERT_UNKNOWN:
/* Shouldn't see these here. */
break;
}
region = cfun->eh->region_array[region_number];
type_thrown = NULL_TREE;
- if (region->type == ERT_THROW)
+ if (GET_CODE (insn) == JUMP_INSN
+ && GET_CODE (PATTERN (insn)) == RESX)
+ {
+ /* A RESX leaves a region instead of entering it. Thus the
+ region itself may have been deleted out from under us. */
+ if (region == NULL)
+ return NULL;
+ region = region->outer;
+ }
+ else if (region->type == ERT_THROW)
{
type_thrown = region->u.throw.type;
region = region->outer;
}
- else if (GET_CODE (insn) == JUMP_INSN
- && GET_CODE (PATTERN (insn)) == RESX)
- region = region->outer;
-
- for (; region; region = region->outer)
- if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
- break;
+ while (region)
+ {
+ if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
+ break;
+ /* If we have processed one cleanup, there is no point in
+ processing any more of them. Each cleanup will have an edge
+ to the next outer cleanup region, so the flow graph will be
+ accurate. */
+ if (region->type == ERT_CLEANUP)
+ region = region->u.cleanup.prev_try;
+ else
+ region = region->outer;
+ }
+
return info.handlers;
}
if (how == RNL_BLOCKED)
return false;
if (how != RNL_NOT_CAUGHT)
- return true;
+ return true;
}
return false;
return true;
}
-/* True if nothing in this function can throw outside this function. */
+/* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
-bool
-nothrow_function_p ()
+void
+set_nothrow_function_flags ()
{
rtx insn;
+
+ current_function_nothrow = 1;
- if (! flag_exceptions)
- return true;
+ /* Assume cfun->all_throwers_are_sibcalls until we encounter
+ something that can throw an exception. We specifically exempt
+ CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
+ and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
+ is optimistic. */
+ cfun->all_throwers_are_sibcalls = 1;
+
+ if (! flag_exceptions)
+ return;
+
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (can_throw_external (insn))
- return false;
+ {
+ current_function_nothrow = 0;
+
+ if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
+ {
+ cfun->all_throwers_are_sibcalls = 0;
+ return;
+ }
+ }
+
for (insn = current_function_epilogue_delay_list; insn;
insn = XEXP (insn, 1))
if (can_throw_external (insn))
- return false;
+ {
+ current_function_nothrow = 0;
- return true;
+ if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
+ {
+ cfun->all_throwers_are_sibcalls = 0;
+ return;
+ }
+ }
}
\f
expand_builtin_unwind_init ()
{
/* Set this so all the registers get saved in our frame; we need to be
- able to copy the saved values for any registers from frames we unwind. */
+ able to copy the saved values for any registers from frames we unwind. */
current_function_has_nonlocal_label = 1;
#ifdef SETUP_FRAME_ADDRESSES
iwhich = DBX_REGISTER_NUMBER (iwhich);
#endif
- return GEN_INT (iwhich);
+ return GEN_INT (iwhich);
}
/* Given a value extracted from the return address register or stack slot,
{
rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
+ if (GET_MODE (addr) != Pmode
+ && GET_MODE (addr) != VOIDmode)
+ {
+#ifdef POINTERS_EXTEND_UNSIGNED
+ addr = convert_memory_address (Pmode, addr);
+#else
+ addr = convert_to_mode (Pmode, addr, 0);
+#endif
+ }
+
/* First mask out any unwanted bits. */
#ifdef MASK_RETURN_ADDR
- expand_and (addr, MASK_RETURN_ADDR, addr);
+ expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
#endif
/* Then adjust to find the real return address. */
expand_builtin_frob_return_addr (addr_tree)
tree addr_tree;
{
- rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
+ rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
#ifdef POINTERS_EXTEND_UNSIGNED
- addr = convert_memory_address (Pmode, addr);
+ if (GET_MODE (addr) != Pmode)
+ addr = convert_memory_address (Pmode, addr);
#endif
#ifdef RETURN_ADDR_OFFSET
void
expand_builtin_eh_return (stackadj_tree, handler_tree)
- tree stackadj_tree, handler_tree;
+ tree stackadj_tree ATTRIBUTE_UNUSED;
+ tree handler_tree;
{
- rtx stackadj, handler;
-
- stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
- handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
+ rtx tmp;
+#ifdef EH_RETURN_STACKADJ_RTX
+ tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
#ifdef POINTERS_EXTEND_UNSIGNED
- stackadj = convert_memory_address (Pmode, stackadj);
- handler = convert_memory_address (Pmode, handler);
+ if (GET_MODE (tmp) != Pmode)
+ tmp = convert_memory_address (Pmode, tmp);
+#endif
+ if (!cfun->eh->ehr_stackadj)
+ cfun->eh->ehr_stackadj = copy_to_reg (tmp);
+ else if (tmp != cfun->eh->ehr_stackadj)
+ emit_move_insn (cfun->eh->ehr_stackadj, tmp);
#endif
- if (! cfun->eh->ehr_label)
- {
- cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
- cfun->eh->ehr_handler = copy_to_reg (handler);
- cfun->eh->ehr_label = gen_label_rtx ();
- }
- else
- {
- if (stackadj != cfun->eh->ehr_stackadj)
- emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
- if (handler != cfun->eh->ehr_handler)
- emit_move_insn (cfun->eh->ehr_handler, handler);
- }
+ tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
+#ifdef POINTERS_EXTEND_UNSIGNED
+ if (GET_MODE (tmp) != Pmode)
+ tmp = convert_memory_address (Pmode, tmp);
+#endif
+ if (!cfun->eh->ehr_handler)
+ cfun->eh->ehr_handler = copy_to_reg (tmp);
+ else if (tmp != cfun->eh->ehr_handler)
+ emit_move_insn (cfun->eh->ehr_handler, tmp);
+ if (!cfun->eh->ehr_label)
+ cfun->eh->ehr_label = gen_label_rtx ();
emit_jump (cfun->eh->ehr_label);
}
void
expand_eh_return ()
{
- rtx sa, ra, around_label;
+ rtx around_label;
if (! cfun->eh->ehr_label)
return;
- sa = EH_RETURN_STACKADJ_RTX;
- if (! sa)
- {
- error ("__builtin_eh_return not supported on this target");
- return;
- }
-
current_function_calls_eh_return = 1;
+#ifdef EH_RETURN_STACKADJ_RTX
+ emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
+#endif
+
around_label = gen_label_rtx ();
- emit_move_insn (sa, const0_rtx);
emit_jump (around_label);
emit_label (cfun->eh->ehr_label);
clobber_return_register ();
+#ifdef EH_RETURN_STACKADJ_RTX
+ emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
+#endif
+
#ifdef HAVE_eh_return
if (HAVE_eh_return)
- emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
+ emit_insn (gen_eh_return (cfun->eh->ehr_handler));
else
#endif
{
- ra = EH_RETURN_HANDLER_RTX;
- if (! ra)
- {
- error ("__builtin_eh_return not supported on this target");
- ra = gen_reg_rtx (Pmode);
- }
-
- emit_move_insn (sa, cfun->eh->ehr_stackadj);
- emit_move_insn (ra, cfun->eh->ehr_handler);
+#ifdef EH_RETURN_HANDLER_RTX
+ emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
+#else
+ error ("__builtin_eh_return not supported on this target");
+#endif
}
emit_label (around_label);
}
\f
+/* In the following functions, we represent entries in the action table
+ as 1-based indices. Special cases are:
+
+ 0: null action record, non-null landing pad; implies cleanups
+ -1: null action record, null landing pad; implies no action
+ -2: no call-site entry; implies must_not_throw
+ -3: we have yet to process outer regions
+
+ Further, no special cases apply to the "next" field of the record.
+ For next, 0 means end of list. */
+
struct action_record
{
int offset;
static int
action_record_eq (pentry, pdata)
- const PTR pentry;
- const PTR pdata;
+ const void *pentry;
+ const void *pdata;
{
const struct action_record *entry = (const struct action_record *) pentry;
const struct action_record *data = (const struct action_record *) pdata;
static hashval_t
action_record_hash (pentry)
- const PTR pentry;
+ const void *pentry;
{
const struct action_record *entry = (const struct action_record *) pentry;
return entry->next * 1009 + entry->filter;
/* The filter value goes in untouched. The link to the next
record is a "self-relative" byte offset, or zero to indicate
that there is no next record. So convert the absolute 1 based
- indicies we've been carrying around into a displacement. */
+ indices we've been carrying around into a displacement. */
push_sleb128 (&cfun->eh->action_record_data, filter);
if (next)
/* Process the associated catch regions in reverse order.
If there's a catch-all handler, then we don't need to
search outer regions. Use a magic -3 value to record
- that we havn't done the outer search. */
+ that we haven't done the outer search. */
next = -3;
for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
{
- if (c->u.catch.type == NULL)
- next = add_action_record (ar_hash, c->u.catch.filter, 0);
+ if (c->u.catch.type_list == NULL)
+ {
+ /* Retrieve the filter from the head of the filter list
+ where we have stored it (see assign_filter_values). */
+ int filter
+ = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
+
+ next = add_action_record (ar_hash, filter, 0);
+ }
else
{
+ /* Once the outer search is done, trigger an action record for
+ each filter we have. */
+ tree flt_node;
+
if (next == -3)
{
next = collect_one_action_chain (ar_hash, region->outer);
- if (next < 0)
+
+ /* If there is no next action, terminate the chain. */
+ if (next == -1)
next = 0;
+ /* If all outer actions are cleanups or must_not_throw,
+ we'll have no action record for it, since we had wanted
+ to encode these states in the call-site record directly.
+ Add a cleanup action to the chain to catch these. */
+ else if (next <= 0)
+ next = add_action_record (ar_hash, 0, 0);
+ }
+
+ flt_node = c->u.catch.filter_list;
+ for (; flt_node; flt_node = TREE_CHAIN (flt_node))
+ {
+ int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
+ next = add_action_record (ar_hash, filter, next);
}
- next = add_action_record (ar_hash, c->u.catch.filter, next);
}
}
return next;
{
size = (size ? size * 2 : 64);
data = (struct call_site_record *)
- xrealloc (data, sizeof (*data) * size);
+ ggc_realloc (data, sizeof (*data) * size);
cfun->eh->call_site_data = data;
cfun->eh->call_site_data_size = size;
}
rtx last_action_insn = NULL_RTX;
rtx last_landing_pad = NULL_RTX;
rtx first_no_action_insn = NULL_RTX;
- int call_site;
+ int call_site = 0;
if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
return;
are created. */
if (this_action >= -1)
{
- call_site = add_call_site (this_landing_pad,
+ call_site = add_call_site (this_landing_pad,
this_action < 0 ? 0 : this_action);
note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
NOTE_EH_HANDLER (note) = call_site;
static void
dw2_output_call_site_table ()
{
- const char *function_start_lab
+ const char *const function_start_lab
= IDENTIFIER_POINTER (current_function_func_begin_label);
int n = cfun->eh->call_site_data_used;
int i;
call_site_base += n;
}
+/* Tell assembler to switch to the section for the exception handling
+ table. */
+
+void
+default_exception_section ()
+{
+ if (targetm.have_named_sections)
+ {
+ int flags;
+#ifdef HAVE_LD_RO_RW_SECTION_MIXING
+ int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
+
+ flags = (! flag_pic
+ || ((tt_format & 0x70) != DW_EH_PE_absptr
+ && (tt_format & 0x70) != DW_EH_PE_aligned))
+ ? 0 : SECTION_WRITE;
+#else
+ flags = SECTION_WRITE;
+#endif
+ named_section_flags (".gcc_except_table", flags);
+ }
+ else if (flag_pic)
+ data_section ();
+ else
+ readonly_data_section ();
+}
+
void
output_function_exception_table ()
{
int call_site_len;
#endif
int have_tt_data;
- int funcdef_number;
- int tt_format_size;
+ int tt_format_size = 0;
/* Not all functions need anything. */
if (! cfun->uses_eh_lsda)
return;
- funcdef_number = (USING_SJLJ_EXCEPTIONS
- ? sjlj_funcdef_number
- : current_funcdef_number);
-
#ifdef IA64_UNWIND_INFO
fputs ("\t.personality\t", asm_out_file);
output_addr_const (asm_out_file, eh_personality_libfunc);
/* Note that varasm still thinks we're in the function's code section.
The ".endp" directive that will immediately follow will take us back. */
#else
- exception_section ();
+ (*targetm.asm_out.exception_section) ();
#endif
have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
{
tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
#ifdef HAVE_AS_LEB128
- ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
+ ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
+ current_function_funcdef_no);
#endif
tt_format_size = size_of_encoded_value (tt_format);
- assemble_eh_align (tt_format_size * BITS_PER_UNIT);
+ assemble_align (tt_format_size * BITS_PER_UNIT);
}
- ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
+ (*targetm.asm_out.internal_label) (asm_out_file, "LLSDA",
+ current_function_funcdef_no);
/* The LSDA header. */
{
#ifdef HAVE_AS_LEB128
char ttype_after_disp_label[32];
- ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
- funcdef_number);
+ ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
+ current_function_funcdef_no);
dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
"@TType base offset");
ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
#ifdef HAVE_AS_LEB128
ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
- funcdef_number);
+ current_function_funcdef_no);
ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
- funcdef_number);
+ current_function_funcdef_no);
dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
"Call-site table length");
ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
(i ? NULL : "Action record table"));
if (have_tt_data)
- assemble_eh_align (tt_format_size * BITS_PER_UNIT);
+ assemble_align (tt_format_size * BITS_PER_UNIT);
i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
while (i-- > 0)
{
tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
+ rtx value;
if (type == NULL_TREE)
type = integer_zero_node;
else
type = lookup_type_for_runtime (type);
- dw2_asm_output_encoded_addr_rtx (tt_format,
- expand_expr (type, NULL_RTX, VOIDmode,
- EXPAND_INITIALIZER),
- NULL);
+ value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
+ if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
+ assemble_integer (value, tt_format_size,
+ tt_format_size * BITS_PER_UNIT, 1);
+ else
+ dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
}
#ifdef HAVE_AS_LEB128
(i ? NULL : "Exception specification table"));
function_section (current_function_decl);
-
- if (USING_SJLJ_EXCEPTIONS)
- sjlj_funcdef_number += 1;
}
+
+#include "gt-except.h"