/* Implements exception handling.
- Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001 Free Software Foundation, Inc.
+ Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
+ Free Software Foundation, Inc.
Contributed by Mike Stump <mrs@cygnus.com>.
-This file is part of GNU CC.
+This file is part of GCC.
-GNU CC is free software; you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
-any later version.
+GCC is free software; you can redistribute it and/or modify it under
+the terms of the GNU General Public License as published by the Free
+Software Foundation; either version 3, or (at your option) any later
+version.
-GNU CC is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+WARRANTY; without even the implied warranty of MERCHANTABILITY or
+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
+for more details.
You should have received a copy of the GNU General Public License
-along with GNU CC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
/* An exception is an event that can be signaled from within a
#include "config.h"
#include "system.h"
+#include "coretypes.h"
+#include "tm.h"
#include "rtl.h"
#include "tree.h"
#include "flags.h"
#include "function.h"
#include "expr.h"
+#include "libfuncs.h"
#include "insn-config.h"
#include "except.h"
#include "integrate.h"
#include "intl.h"
#include "ggc.h"
#include "tm_p.h"
-
+#include "target.h"
+#include "langhooks.h"
+#include "cgraph.h"
+#include "diagnostic.h"
+#include "tree-pass.h"
+#include "timevar.h"
/* Provide defaults for stuff that may not be defined when using
sjlj exceptions. */
-#ifndef EH_RETURN_STACKADJ_RTX
-#define EH_RETURN_STACKADJ_RTX 0
-#endif
-#ifndef EH_RETURN_HANDLER_RTX
-#define EH_RETURN_HANDLER_RTX 0
-#endif
#ifndef EH_RETURN_DATA_REGNO
#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
#endif
-/* Nonzero means enable synchronous exceptions for non-call instructions. */
-int flag_non_call_exceptions;
-
/* Protect cleanup actions with must-not-throw regions, with a call
to the given failure handler. */
-tree (*lang_protect_cleanup_actions) PARAMS ((void));
+tree (*lang_protect_cleanup_actions) (void);
/* Return true if type A catches type B. */
-int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
+int (*lang_eh_type_covers) (tree a, tree b);
/* Map a type to a runtime object to match type. */
-tree (*lang_eh_runtime_type) PARAMS ((tree));
+tree (*lang_eh_runtime_type) (tree);
-/* A list of labels used for exception handlers. */
-rtx exception_handler_labels;
+/* A hash table of label to region number. */
-static int call_site_base;
-static unsigned int sjlj_funcdef_number;
-static htab_t type_to_runtime_map;
+struct ehl_map_entry GTY(())
+{
+ rtx label;
+ struct eh_region *region;
+};
+
+static GTY(()) int call_site_base;
+static GTY ((param_is (union tree_node)))
+ htab_t type_to_runtime_map;
/* Describe the SjLj_Function_Context structure. */
-static tree sjlj_fc_type_node;
+static GTY(()) tree sjlj_fc_type_node;
static int sjlj_fc_call_site_ofs;
static int sjlj_fc_data_ofs;
static int sjlj_fc_personality_ofs;
static int sjlj_fc_jbuf_ofs;
\f
/* Describes one exception region. */
-struct eh_region
+struct eh_region GTY(())
{
/* The immediately surrounding region. */
struct eh_region *outer;
/* An identifier for this region. */
int region_number;
+ /* When a region is deleted, its parents inherit the REG_EH_REGION
+ numbers already assigned. */
+ bitmap aka;
+
/* Each region does exactly one thing. */
enum eh_region_type
{
- ERT_CLEANUP = 1,
+ ERT_UNKNOWN = 0,
+ ERT_CLEANUP,
ERT_TRY,
ERT_CATCH,
ERT_ALLOWED_EXCEPTIONS,
ERT_MUST_NOT_THROW,
- ERT_THROW,
- ERT_FIXUP
+ ERT_THROW
} type;
- /* Holds the action to perform based on the preceeding type. */
- union {
+ /* Holds the action to perform based on the preceding type. */
+ union eh_region_u {
/* A list of catch blocks, a surrounding try block,
and the label for continuing after a catch. */
- struct {
+ struct eh_region_u_try {
struct eh_region *catch;
struct eh_region *last_catch;
- struct eh_region *prev_try;
- rtx continue_label;
- } try;
+ } GTY ((tag ("ERT_TRY"))) try;
- /* The list through the catch handlers, the type object
- matched, and a pointer to the generated code. */
- struct {
+ /* The list through the catch handlers, the list of type objects
+ matched, and the list of associated filters. */
+ struct eh_region_u_catch {
struct eh_region *next_catch;
struct eh_region *prev_catch;
- tree type;
- int filter;
- } catch;
+ tree type_list;
+ tree filter_list;
+ } GTY ((tag ("ERT_CATCH"))) catch;
/* A tree_list of allowed types. */
- struct {
+ struct eh_region_u_allowed {
tree type_list;
int filter;
- } allowed;
+ } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
- /* The type given by a call to "throw foo();", or discovered
+ /* The type given by a call to "throw foo();", or discovered
for a throw. */
- struct {
+ struct eh_region_u_throw {
tree type;
- } throw;
+ } GTY ((tag ("ERT_THROW"))) throw;
/* Retain the cleanup expression even after expansion so that
we can match up fixup regions. */
- struct {
- tree exp;
- } cleanup;
-
- /* The real region (by expression and by pointer) that fixup code
- should live in. */
- struct {
- tree cleanup_exp;
- struct eh_region *real_region;
- } fixup;
- } u;
+ struct eh_region_u_cleanup {
+ struct eh_region *prev_try;
+ } GTY ((tag ("ERT_CLEANUP"))) cleanup;
+ } GTY ((desc ("%0.type"))) u;
/* Entry point for this region's handler before landing pads are built. */
rtx label;
+ tree tree_label;
/* Entry point for this region's handler from the runtime eh library. */
rtx landing_pad;
/* The RESX insn for handing off control to the next outermost handler,
if appropriate. */
rtx resume;
+
+ /* True if something in this region may throw. */
+ unsigned may_contain_throw : 1;
};
+typedef struct eh_region *eh_region;
+
+struct call_site_record GTY(())
+{
+ rtx landing_pad;
+ int action;
+};
+
+DEF_VEC_P(eh_region);
+DEF_VEC_ALLOC_P(eh_region, gc);
+
/* Used to save exception status for each function. */
-struct eh_status
+struct eh_status GTY(())
{
/* The tree of all regions for this function. */
struct eh_region *region_tree;
/* The same information as an indexable array. */
- struct eh_region **region_array;
+ VEC(eh_region,gc) *region_array;
/* The most recently open region. */
struct eh_region *cur_region;
/* This is the region for which we are processing catch blocks. */
struct eh_region *try_region;
- /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
- node is itself a TREE_CHAINed list of handlers for regions that
- are not yet closed. The TREE_VALUE of each entry contains the
- handler for the corresponding entry on the ehstack. */
- tree protect_list;
-
rtx filter;
rtx exc_ptr;
int built_landing_pads;
int last_region_number;
- varray_type ttype_data;
+ VEC(tree,gc) *ttype_data;
varray_type ehspec_data;
varray_type action_record_data;
- struct call_site_record
- {
- rtx landing_pad;
- int action;
- } *call_site_data;
+ htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
+
+ struct call_site_record * GTY ((length ("%h.call_site_data_used")))
+ call_site_data;
int call_site_data_used;
int call_site_data_size;
rtx sjlj_fc;
rtx sjlj_exit_after;
-};
+ htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
+};
\f
-static void mark_eh_region PARAMS ((struct eh_region *));
-
-static int t2r_eq PARAMS ((const PTR,
- const PTR));
-static hashval_t t2r_hash PARAMS ((const PTR));
-static int t2r_mark_1 PARAMS ((PTR *, PTR));
-static void t2r_mark PARAMS ((PTR));
-static void add_type_for_runtime PARAMS ((tree));
-static tree lookup_type_for_runtime PARAMS ((tree));
-
-static struct eh_region *expand_eh_region_end PARAMS ((void));
-
-static rtx get_exception_filter PARAMS ((struct function *));
-
-static void collect_eh_region_array PARAMS ((void));
-static void resolve_fixup_regions PARAMS ((void));
-static void remove_fixup_regions PARAMS ((void));
-static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
-
-static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
- struct inline_remap *));
-static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
- struct eh_region **));
-static int ttypes_filter_eq PARAMS ((const PTR,
- const PTR));
-static hashval_t ttypes_filter_hash PARAMS ((const PTR));
-static int ehspec_filter_eq PARAMS ((const PTR,
- const PTR));
-static hashval_t ehspec_filter_hash PARAMS ((const PTR));
-static int add_ttypes_entry PARAMS ((htab_t, tree));
-static int add_ehspec_entry PARAMS ((htab_t, htab_t,
- tree));
-static void assign_filter_values PARAMS ((void));
-static void build_post_landing_pads PARAMS ((void));
-static void connect_post_landing_pads PARAMS ((void));
-static void dw2_build_landing_pads PARAMS ((void));
+static int t2r_eq (const void *, const void *);
+static hashval_t t2r_hash (const void *);
+static void add_type_for_runtime (tree);
+static tree lookup_type_for_runtime (tree);
+
+static void remove_unreachable_regions (rtx);
+
+static int ttypes_filter_eq (const void *, const void *);
+static hashval_t ttypes_filter_hash (const void *);
+static int ehspec_filter_eq (const void *, const void *);
+static hashval_t ehspec_filter_hash (const void *);
+static int add_ttypes_entry (htab_t, tree);
+static int add_ehspec_entry (htab_t, htab_t, tree);
+static void assign_filter_values (void);
+static void build_post_landing_pads (void);
+static void connect_post_landing_pads (void);
+static void dw2_build_landing_pads (void);
struct sjlj_lp_info;
-static bool sjlj_find_directly_reachable_regions
- PARAMS ((struct sjlj_lp_info *));
-static void sjlj_assign_call_site_values
- PARAMS ((rtx, struct sjlj_lp_info *));
-static void sjlj_mark_call_sites
- PARAMS ((struct sjlj_lp_info *));
-static void sjlj_emit_function_enter PARAMS ((rtx));
-static void sjlj_emit_function_exit PARAMS ((void));
-static void sjlj_emit_dispatch_table
- PARAMS ((rtx, struct sjlj_lp_info *));
-static void sjlj_build_landing_pads PARAMS ((void));
-
-static void remove_exception_handler_label PARAMS ((rtx));
-static void remove_eh_handler PARAMS ((struct eh_region *));
-
-struct reachable_info;
+static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
+static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
+static void sjlj_mark_call_sites (struct sjlj_lp_info *);
+static void sjlj_emit_function_enter (rtx);
+static void sjlj_emit_function_exit (void);
+static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
+static void sjlj_build_landing_pads (void);
+
+static hashval_t ehl_hash (const void *);
+static int ehl_eq (const void *, const void *);
+static void add_ehl_entry (rtx, struct eh_region *);
+static void remove_exception_handler_label (rtx);
+static void remove_eh_handler (struct eh_region *);
+static int for_each_eh_label_1 (void **, void *);
/* The return value of reachable_next_level. */
enum reachable_code
RNL_BLOCKED
};
-static int check_handled PARAMS ((tree, tree));
-static void add_reachable_handler
- PARAMS ((struct reachable_info *, struct eh_region *,
- struct eh_region *));
-static enum reachable_code reachable_next_level
- PARAMS ((struct eh_region *, tree, struct reachable_info *));
-
-static int action_record_eq PARAMS ((const PTR,
- const PTR));
-static hashval_t action_record_hash PARAMS ((const PTR));
-static int add_action_record PARAMS ((htab_t, int, int));
-static int collect_one_action_chain PARAMS ((htab_t,
- struct eh_region *));
-static int add_call_site PARAMS ((rtx, int));
-
-static void push_uleb128 PARAMS ((varray_type *,
- unsigned int));
-static void push_sleb128 PARAMS ((varray_type *, int));
+struct reachable_info;
+static enum reachable_code reachable_next_level (struct eh_region *, tree,
+ struct reachable_info *);
+
+static int action_record_eq (const void *, const void *);
+static hashval_t action_record_hash (const void *);
+static int add_action_record (htab_t, int, int);
+static int collect_one_action_chain (htab_t, struct eh_region *);
+static int add_call_site (rtx, int);
+
+static void push_uleb128 (varray_type *, unsigned int);
+static void push_sleb128 (varray_type *, int);
#ifndef HAVE_AS_LEB128
-static int dw2_size_of_call_site_table PARAMS ((void));
-static int sjlj_size_of_call_site_table PARAMS ((void));
+static int dw2_size_of_call_site_table (void);
+static int sjlj_size_of_call_site_table (void);
#endif
-static void dw2_output_call_site_table PARAMS ((void));
-static void sjlj_output_call_site_table PARAMS ((void));
+static void dw2_output_call_site_table (void);
+static void sjlj_output_call_site_table (void);
\f
/* Routine to see if exception handling is turned on.
- DO_WARN is non-zero if we want to inform the user that exception
- handling is turned off.
+ DO_WARN is nonzero if we want to inform the user that exception
+ handling is turned off.
This is used to ensure that -fexceptions has been specified if the
compiler tries to use any exception-specific functions. */
int
-doing_eh (do_warn)
- int do_warn;
+doing_eh (int do_warn)
{
if (! flag_exceptions)
{
\f
void
-init_eh ()
+init_eh (void)
{
- ggc_add_rtx_root (&exception_handler_labels, 1);
-
if (! flag_exceptions)
return;
- type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
- ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
+ type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
/* Create the SjLj_Function_Context structure. This should match
the definition in unwind-sjlj.c. */
{
tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
- sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
- ggc_add_tree_root (&sjlj_fc_type_node, 1);
+ sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
build_pointer_type (sjlj_fc_type_node));
integer_type_node);
DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
- tmp = build_index_type (build_int_2 (4 - 1, 0));
- tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
+ tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
+ tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
+ tmp);
f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
#ifdef DONT_USE_BUILTIN_SETJMP
#ifdef JMP_BUF_SIZE
- tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
+ tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
#else
/* Should be large enough for most systems, if it is not,
JMP_BUF_SIZE should be defined with the proper value. It will
also tend to be larger than necessary for most systems, a more
optimal port will define JMP_BUF_SIZE. */
- tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
+ tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
#endif
#else
- /* This is 2 for builtin_setjmp, plus whatever the target requires
- via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
- tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
- / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
+ /* builtin_setjmp takes a pointer to 5 words. */
+ tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
#endif
tmp = build_index_type (tmp);
tmp = build_array_type (ptr_type_node, tmp);
}
void
-init_eh_for_function ()
-{
- cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
-}
-
-/* Mark EH for GC. */
-
-static void
-mark_eh_region (region)
- struct eh_region *region;
-{
- if (! region)
- return;
-
- switch (region->type)
- {
- case ERT_CLEANUP:
- ggc_mark_tree (region->u.cleanup.exp);
- break;
- case ERT_TRY:
- ggc_mark_rtx (region->u.try.continue_label);
- break;
- case ERT_CATCH:
- ggc_mark_tree (region->u.catch.type);
- break;
- case ERT_ALLOWED_EXCEPTIONS:
- ggc_mark_tree (region->u.allowed.type_list);
- break;
- case ERT_MUST_NOT_THROW:
- break;
- case ERT_THROW:
- ggc_mark_tree (region->u.throw.type);
- break;
- case ERT_FIXUP:
- ggc_mark_tree (region->u.fixup.cleanup_exp);
- break;
- default:
- abort ();
- }
-
- ggc_mark_rtx (region->label);
- ggc_mark_rtx (region->resume);
- ggc_mark_rtx (region->landing_pad);
- ggc_mark_rtx (region->post_landing_pad);
-}
-
-void
-mark_eh_status (eh)
- struct eh_status *eh;
-{
- int i;
-
- if (eh == 0)
- return;
-
- /* If we've called collect_eh_region_array, use it. Otherwise walk
- the tree non-recursively. */
- if (eh->region_array)
- {
- for (i = eh->last_region_number; i > 0; --i)
- {
- struct eh_region *r = eh->region_array[i];
- if (r && r->region_number == i)
- mark_eh_region (r);
- }
- }
- else if (eh->region_tree)
- {
- struct eh_region *r = eh->region_tree;
- while (1)
- {
- mark_eh_region (r);
- if (r->inner)
- r = r->inner;
- else if (r->next_peer)
- r = r->next_peer;
- else
- {
- do {
- r = r->outer;
- if (r == NULL)
- goto tree_done;
- } while (r->next_peer == NULL);
- r = r->next_peer;
- }
- }
- tree_done:;
- }
-
- ggc_mark_tree (eh->protect_list);
- ggc_mark_rtx (eh->filter);
- ggc_mark_rtx (eh->exc_ptr);
- ggc_mark_tree_varray (eh->ttype_data);
-
- if (eh->call_site_data)
- {
- for (i = eh->call_site_data_used - 1; i >= 0; --i)
- ggc_mark_rtx (eh->call_site_data[i].landing_pad);
- }
-
- ggc_mark_rtx (eh->ehr_stackadj);
- ggc_mark_rtx (eh->ehr_handler);
- ggc_mark_rtx (eh->ehr_label);
-
- ggc_mark_rtx (eh->sjlj_fc);
- ggc_mark_rtx (eh->sjlj_exit_after);
-}
-
-void
-free_eh_status (f)
- struct function *f;
+init_eh_for_function (void)
{
- struct eh_status *eh = f->eh;
-
- if (eh->region_array)
- {
- int i;
- for (i = eh->last_region_number; i > 0; --i)
- {
- struct eh_region *r = eh->region_array[i];
- /* Mind we don't free a region struct more than once. */
- if (r && r->region_number == i)
- free (r);
- }
- free (eh->region_array);
- }
- else if (eh->region_tree)
- {
- struct eh_region *next, *r = eh->region_tree;
- while (1)
- {
- if (r->inner)
- r = r->inner;
- else if (r->next_peer)
- {
- next = r->next_peer;
- free (r);
- r = next;
- }
- else
- {
- do {
- next = r->outer;
- free (r);
- r = next;
- if (r == NULL)
- goto tree_done;
- } while (r->next_peer == NULL);
- next = r->next_peer;
- free (r);
- r = next;
- }
- }
- tree_done:;
- }
-
- VARRAY_FREE (eh->ttype_data);
- VARRAY_FREE (eh->ehspec_data);
- VARRAY_FREE (eh->action_record_data);
- if (eh->call_site_data)
- free (eh->call_site_data);
-
- free (eh);
- f->eh = NULL;
+ cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
}
-
\f
-/* Start an exception handling region. All instructions emitted
- after this point are considered to be part of the region until
- expand_eh_region_end is invoked. */
+/* Routines to generate the exception tree somewhat directly.
+ These are used from tree-eh.c when processing exception related
+ nodes during tree optimization. */
-void
-expand_eh_region_start ()
+static struct eh_region *
+gen_eh_region (enum eh_region_type type, struct eh_region *outer)
{
- struct eh_region *new_region;
- struct eh_region *cur_region;
- rtx note;
+ struct eh_region *new;
- if (! doing_eh (0))
- return;
+#ifdef ENABLE_CHECKING
+ gcc_assert (doing_eh (0));
+#endif
/* Insert a new blank region as a leaf in the tree. */
- new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
- cur_region = cfun->eh->cur_region;
- new_region->outer = cur_region;
- if (cur_region)
+ new = ggc_alloc_cleared (sizeof (*new));
+ new->type = type;
+ new->outer = outer;
+ if (outer)
{
- new_region->next_peer = cur_region->inner;
- cur_region->inner = new_region;
+ new->next_peer = outer->inner;
+ outer->inner = new;
}
else
{
- new_region->next_peer = cfun->eh->region_tree;
- cfun->eh->region_tree = new_region;
+ new->next_peer = cfun->eh->region_tree;
+ cfun->eh->region_tree = new;
}
- cfun->eh->cur_region = new_region;
-
- /* Create a note marking the start of this region. */
- new_region->region_number = ++cfun->eh->last_region_number;
- note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
- NOTE_EH_HANDLER (note) = new_region->region_number;
-}
-/* Common code to end a region. Returns the region just ended. */
+ new->region_number = ++cfun->eh->last_region_number;
-static struct eh_region *
-expand_eh_region_end ()
-{
- struct eh_region *cur_region = cfun->eh->cur_region;
- rtx note;
-
- /* Create a nute marking the end of this region. */
- note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
- NOTE_EH_HANDLER (note) = cur_region->region_number;
-
- /* Pop. */
- cfun->eh->cur_region = cur_region->outer;
-
- return cur_region;
+ return new;
}
-/* End an exception handling region for a cleanup. HANDLER is an
- expression to expand for the cleanup. */
-
-void
-expand_eh_region_end_cleanup (handler)
- tree handler;
+struct eh_region *
+gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
{
- struct eh_region *region;
- tree protect_cleanup_actions;
- rtx around_label;
- rtx data_save[2];
-
- if (! doing_eh (0))
- return;
-
- region = expand_eh_region_end ();
- region->type = ERT_CLEANUP;
- region->label = gen_label_rtx ();
- region->u.cleanup.exp = handler;
-
- around_label = gen_label_rtx ();
- emit_jump (around_label);
-
- emit_label (region->label);
-
- /* Give the language a chance to specify an action to be taken if an
- exception is thrown that would propogate out of the HANDLER. */
- protect_cleanup_actions
- = (lang_protect_cleanup_actions
- ? (*lang_protect_cleanup_actions) ()
- : NULL_TREE);
-
- if (protect_cleanup_actions)
- expand_eh_region_start ();
-
- /* In case this cleanup involves an inline destructor with a try block in
- it, we need to save the EH return data registers around it. */
- data_save[0] = gen_reg_rtx (Pmode);
- emit_move_insn (data_save[0], get_exception_pointer (cfun));
- data_save[1] = gen_reg_rtx (word_mode);
- emit_move_insn (data_save[1], get_exception_filter (cfun));
-
- expand_expr (handler, const0_rtx, VOIDmode, 0);
-
- emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
- emit_move_insn (cfun->eh->filter, data_save[1]);
-
- if (protect_cleanup_actions)
- expand_eh_region_end_must_not_throw (protect_cleanup_actions);
-
- /* We need any stack adjustment complete before the around_label. */
- do_pending_stack_adjust ();
-
- /* We delay the generation of the _Unwind_Resume until we generate
- landing pads. We emit a marker here so as to get good control
- flow data in the meantime. */
- region->resume
- = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
- emit_barrier ();
-
- emit_label (around_label);
+ struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
+ cleanup->u.cleanup.prev_try = prev_try;
+ return cleanup;
}
-/* End an exception handling region for a try block, and prepares
- for subsequent calls to expand_start_catch. */
-
-void
-expand_start_all_catch ()
+struct eh_region *
+gen_eh_region_try (struct eh_region *outer)
{
- struct eh_region *region;
-
- if (! doing_eh (1))
- return;
-
- region = expand_eh_region_end ();
- region->type = ERT_TRY;
- region->u.try.prev_try = cfun->eh->try_region;
- region->u.try.continue_label = gen_label_rtx ();
-
- cfun->eh->try_region = region;
-
- emit_jump (region->u.try.continue_label);
+ return gen_eh_region (ERT_TRY, outer);
}
-/* Begin a catch clause. TYPE is the type caught, or null if this is
- a catch-all clause. */
-
-void
-expand_start_catch (type)
- tree type;
+struct eh_region *
+gen_eh_region_catch (struct eh_region *t, tree type_or_list)
{
- struct eh_region *t, *c, *l;
-
- if (! doing_eh (0))
- return;
+ struct eh_region *c, *l;
+ tree type_list, type_node;
- if (type)
- add_type_for_runtime (type);
- expand_eh_region_start ();
+ /* Ensure to always end up with a type list to normalize further
+ processing, then register each type against the runtime types map. */
+ type_list = type_or_list;
+ if (type_or_list)
+ {
+ if (TREE_CODE (type_or_list) != TREE_LIST)
+ type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
- t = cfun->eh->try_region;
- c = cfun->eh->cur_region;
- c->type = ERT_CATCH;
- c->u.catch.type = type;
- c->label = gen_label_rtx ();
+ type_node = type_list;
+ for (; type_node; type_node = TREE_CHAIN (type_node))
+ add_type_for_runtime (TREE_VALUE (type_node));
+ }
+ c = gen_eh_region (ERT_CATCH, t->outer);
+ c->u.catch.type_list = type_list;
l = t->u.try.last_catch;
c->u.catch.prev_catch = l;
if (l)
t->u.try.catch = c;
t->u.try.last_catch = c;
- emit_label (c->label);
+ return c;
}
-/* End a catch clause. Control will resume after the try/catch block. */
-
-void
-expand_end_catch ()
+struct eh_region *
+gen_eh_region_allowed (struct eh_region *outer, tree allowed)
{
- struct eh_region *try_region, *catch_region;
-
- if (! doing_eh (0))
- return;
+ struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
+ region->u.allowed.type_list = allowed;
- catch_region = expand_eh_region_end ();
- try_region = cfun->eh->try_region;
+ for (; allowed ; allowed = TREE_CHAIN (allowed))
+ add_type_for_runtime (TREE_VALUE (allowed));
- emit_jump (try_region->u.try.continue_label);
+ return region;
}
-/* End a sequence of catch handlers for a try block. */
-
-void
-expand_end_all_catch ()
+struct eh_region *
+gen_eh_region_must_not_throw (struct eh_region *outer)
{
- struct eh_region *try_region;
-
- if (! doing_eh (0))
- return;
+ return gen_eh_region (ERT_MUST_NOT_THROW, outer);
+}
- try_region = cfun->eh->try_region;
- cfun->eh->try_region = try_region->u.try.prev_try;
+int
+get_eh_region_number (struct eh_region *region)
+{
+ return region->region_number;
+}
- emit_label (try_region->u.try.continue_label);
+bool
+get_eh_region_may_contain_throw (struct eh_region *region)
+{
+ return region->may_contain_throw;
}
-/* End an exception region for an exception type filter. ALLOWED is a
- TREE_LIST of types to be matched by the runtime. FAILURE is an
- expression to invoke if a mismatch ocurrs. */
+tree
+get_eh_region_tree_label (struct eh_region *region)
+{
+ return region->tree_label;
+}
void
-expand_eh_region_end_allowed (allowed, failure)
- tree allowed, failure;
+set_eh_region_tree_label (struct eh_region *region, tree lab)
{
- struct eh_region *region;
- rtx around_label;
-
- if (! doing_eh (0))
- return;
-
- region = expand_eh_region_end ();
- region->type = ERT_ALLOWED_EXCEPTIONS;
- region->u.allowed.type_list = allowed;
- region->label = gen_label_rtx ();
-
- for (; allowed ; allowed = TREE_CHAIN (allowed))
- add_type_for_runtime (TREE_VALUE (allowed));
-
- /* We must emit the call to FAILURE here, so that if this function
- throws a different exception, that it will be processed by the
- correct region. */
-
- around_label = gen_label_rtx ();
- emit_jump (around_label);
-
- emit_label (region->label);
- expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
- /* We must adjust the stack before we reach the AROUND_LABEL because
- the call to FAILURE does not occur on all paths to the
- AROUND_LABEL. */
- do_pending_stack_adjust ();
-
- emit_label (around_label);
+ region->tree_label = lab;
}
-
-/* End an exception region for a must-not-throw filter. FAILURE is an
- expression invoke if an uncaught exception propagates this far.
-
- This is conceptually identical to expand_eh_region_end_allowed with
- an empty allowed list (if you passed "std::terminate" instead of
- "__cxa_call_unexpected"), but they are represented differently in
- the C++ LSDA. */
-
+\f
void
-expand_eh_region_end_must_not_throw (failure)
- tree failure;
+expand_resx_expr (tree exp)
{
- struct eh_region *region;
- rtx around_label;
-
- if (! doing_eh (0))
- return;
-
- region = expand_eh_region_end ();
- region->type = ERT_MUST_NOT_THROW;
- region->label = gen_label_rtx ();
+ int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
+ struct eh_region *reg = VEC_index (eh_region,
+ cfun->eh->region_array, region_nr);
- /* We must emit the call to FAILURE here, so that if this function
- throws a different exception, that it will be processed by the
- correct region. */
-
- around_label = gen_label_rtx ();
- emit_jump (around_label);
-
- emit_label (region->label);
- expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
-
- emit_label (around_label);
+ gcc_assert (!reg->resume);
+ reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
+ emit_barrier ();
}
-/* End an exception region for a throw. No handling goes on here,
- but it's the easiest way for the front-end to indicate what type
- is being thrown. */
+/* Note that the current EH region (if any) may contain a throw, or a
+ call to a function which itself may contain a throw. */
void
-expand_eh_region_end_throw (type)
- tree type;
+note_eh_region_may_contain_throw (struct eh_region *region)
{
- struct eh_region *region;
-
- if (! doing_eh (0))
- return;
-
- region = expand_eh_region_end ();
- region->type = ERT_THROW;
- region->u.throw.type = type;
+ while (region && !region->may_contain_throw)
+ {
+ region->may_contain_throw = 1;
+ region = region->outer;
+ }
}
-/* End a fixup region. Within this region the cleanups for the immediately
- enclosing region are _not_ run. This is used for goto cleanup to avoid
- destroying an object twice.
-
- This would be an extraordinarily simple prospect, were it not for the
- fact that we don't actually know what the immediately enclosing region
- is. This surprising fact is because expand_cleanups is currently
- generating a sequence that it will insert somewhere else. We collect
- the proper notion of "enclosing" in convert_from_eh_region_ranges. */
-
void
-expand_eh_region_end_fixup (handler)
- tree handler;
+note_current_region_may_contain_throw (void)
{
- struct eh_region *fixup;
-
- if (! doing_eh (0))
- return;
-
- fixup = expand_eh_region_end ();
- fixup->type = ERT_FIXUP;
- fixup->u.fixup.cleanup_exp = handler;
+ note_eh_region_may_contain_throw (cfun->eh->cur_region);
}
+
/* Return an rtl expression for a pointer to the exception object
within a handler. */
rtx
-get_exception_pointer (fun)
- struct function *fun;
+get_exception_pointer (struct function *fun)
{
rtx exc_ptr = fun->eh->exc_ptr;
if (fun == cfun && ! exc_ptr)
{
- exc_ptr = gen_reg_rtx (Pmode);
+ exc_ptr = gen_reg_rtx (ptr_mode);
fun->eh->exc_ptr = exc_ptr;
}
return exc_ptr;
/* Return an rtl expression for the exception dispatch filter
within a handler. */
-static rtx
-get_exception_filter (fun)
- struct function *fun;
+rtx
+get_exception_filter (struct function *fun)
{
rtx filter = fun->eh->filter;
if (fun == cfun && ! filter)
{
- filter = gen_reg_rtx (word_mode);
+ filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
fun->eh->filter = filter;
}
return filter;
}
\f
-/* Begin a region that will contain entries created with
- add_partial_entry. */
-
-void
-begin_protect_partials ()
-{
- /* Push room for a new list. */
- cfun->eh->protect_list
- = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
-}
-
-/* Start a new exception region for a region of code that has a
- cleanup action and push the HANDLER for the region onto
- protect_list. All of the regions created with add_partial_entry
- will be ended when end_protect_partials is invoked. */
-
-void
-add_partial_entry (handler)
- tree handler;
-{
- expand_eh_region_start ();
-
- /* ??? This comment was old before the most recent rewrite. We
- really ought to fix the callers at some point. */
- /* For backwards compatibility, we allow callers to omit calls to
- begin_protect_partials for the outermost region. So, we must
- explicitly do so here. */
- if (!cfun->eh->protect_list)
- begin_protect_partials ();
-
- /* Add this entry to the front of the list. */
- TREE_VALUE (cfun->eh->protect_list)
- = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
-}
-
-/* End all the pending exception regions on protect_list. */
-
-void
-end_protect_partials ()
-{
- tree t;
-
- /* ??? This comment was old before the most recent rewrite. We
- really ought to fix the callers at some point. */
- /* For backwards compatibility, we allow callers to omit the call to
- begin_protect_partials for the outermost region. So,
- PROTECT_LIST may be NULL. */
- if (!cfun->eh->protect_list)
- return;
-
- /* Pop the topmost entry. */
- t = TREE_VALUE (cfun->eh->protect_list);
- cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
-
- /* End all the exception regions. */
- for (; t; t = TREE_CHAIN (t))
- expand_eh_region_end_cleanup (TREE_VALUE (t));
-}
-
-\f
/* This section is for the exception handling specific optimization pass. */
-/* Random access the exception region tree. It's just as simple to
- collect the regions this way as in expand_eh_region_start, but
- without having to realloc memory. */
+/* Random access the exception region tree. */
-static void
-collect_eh_region_array ()
+void
+collect_eh_region_array (void)
{
- struct eh_region **array, *i;
+ struct eh_region *i;
i = cfun->eh->region_tree;
if (! i)
return;
- array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
- cfun->eh->region_array = array;
+ VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
+ cfun->eh->last_region_number + 1);
+ VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
while (1)
{
- array[i->region_number] = i;
+ VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
/* If there are sub-regions, process them. */
if (i->inner)
}
}
+/* Remove all regions whose labels are not reachable from insns. */
+
static void
-resolve_fixup_regions ()
+remove_unreachable_regions (rtx insns)
{
- int i, j, n = cfun->eh->last_region_number;
+ int i, *uid_region_num;
+ bool *reachable;
+ struct eh_region *r;
+ rtx insn;
- for (i = 1; i <= n; ++i)
- {
- struct eh_region *fixup = cfun->eh->region_array[i];
- struct eh_region *cleanup = 0;
+ uid_region_num = xcalloc (get_max_uid (), sizeof(int));
+ reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
- if (! fixup || fixup->type != ERT_FIXUP)
+ for (i = cfun->eh->last_region_number; i > 0; --i)
+ {
+ r = VEC_index (eh_region, cfun->eh->region_array, i);
+ if (!r || r->region_number != i)
continue;
- for (j = 1; j <= n; ++j)
+ if (r->resume)
{
- cleanup = cfun->eh->region_array[j];
- if (cleanup->type == ERT_CLEANUP
- && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
- break;
+ gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
+ uid_region_num[INSN_UID (r->resume)] = i;
+ }
+ if (r->label)
+ {
+ gcc_assert (!uid_region_num[INSN_UID (r->label)]);
+ uid_region_num[INSN_UID (r->label)] = i;
}
- if (j > n)
- abort ();
-
- fixup->u.fixup.real_region = cleanup->outer;
}
-}
-
-/* Now that we've discovered what region actually encloses a fixup,
- we can shuffle pointers and remove them from the tree. */
-static void
-remove_fixup_regions ()
-{
- int i;
- rtx insn, note;
- struct eh_region *fixup;
-
- /* Walk the insn chain and adjust the REG_EH_REGION numbers
- for instructions referencing fixup regions. This is only
- strictly necessary for fixup regions with no parent, but
- doesn't hurt to do it for all regions. */
- for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
- if (INSN_P (insn)
- && (note = find_reg_note (insn, REG_EH_REGION, NULL))
- && INTVAL (XEXP (note, 0)) > 0
- && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
- && fixup->type == ERT_FIXUP)
- {
- if (fixup->u.fixup.real_region)
- XEXP (note, 1) = GEN_INT (fixup->u.fixup.real_region->region_number);
- else
- remove_note (insn, note);
- }
+ for (insn = insns; insn; insn = NEXT_INSN (insn))
+ reachable[uid_region_num[INSN_UID (insn)]] = true;
- /* Remove the fixup regions from the tree. */
for (i = cfun->eh->last_region_number; i > 0; --i)
{
- fixup = cfun->eh->region_array[i];
- if (! fixup)
- continue;
-
- /* Allow GC to maybe free some memory. */
- if (fixup->type == ERT_CLEANUP)
- fixup->u.cleanup.exp = NULL_TREE;
-
- if (fixup->type != ERT_FIXUP)
- continue;
-
- if (fixup->inner)
+ r = VEC_index (eh_region, cfun->eh->region_array, i);
+ if (r && r->region_number == i && !reachable[i])
{
- struct eh_region *parent, *p, **pp;
-
- parent = fixup->u.fixup.real_region;
-
- /* Fix up the children's parent pointers; find the end of
- the list. */
- for (p = fixup->inner; ; p = p->next_peer)
+ bool kill_it = true;
+ switch (r->type)
{
- p->outer = parent;
- if (! p->next_peer)
+ case ERT_THROW:
+ /* Don't remove ERT_THROW regions if their outer region
+ is reachable. */
+ if (r->outer && reachable[r->outer->region_number])
+ kill_it = false;
+ break;
+
+ case ERT_MUST_NOT_THROW:
+ /* MUST_NOT_THROW regions are implementable solely in the
+ runtime, but their existence continues to affect calls
+ within that region. Never delete them here. */
+ kill_it = false;
+ break;
+
+ case ERT_TRY:
+ {
+ /* TRY regions are reachable if any of its CATCH regions
+ are reachable. */
+ struct eh_region *c;
+ for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
+ if (reachable[c->region_number])
+ {
+ kill_it = false;
+ break;
+ }
break;
+ }
+
+ default:
+ break;
}
- /* In the tree of cleanups, only outer-inner ordering matters.
- So link the children back in anywhere at the correct level. */
- if (parent)
- pp = &parent->inner;
- else
- pp = &cfun->eh->region_tree;
- p->next_peer = *pp;
- *pp = fixup->inner;
- fixup->inner = NULL;
+ if (kill_it)
+ remove_eh_handler (r);
}
-
- remove_eh_handler (fixup);
}
+
+ free (reachable);
+ free (uid_region_num);
}
-/* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
- can_throw instruction in the region. */
+/* Set up EH labels for RTL. */
-static void
-convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
- rtx *pinsns;
- int *orig_sp;
- int cur;
+void
+convert_from_eh_region_ranges (void)
{
- int *sp = orig_sp;
- rtx insn, next;
+ rtx insns = get_insns ();
+ int i, n = cfun->eh->last_region_number;
- for (insn = *pinsns; insn ; insn = next)
+ /* Most of the work is already done at the tree level. All we need to
+ do is collect the rtl labels that correspond to the tree labels that
+ collect the rtl labels that correspond to the tree labels
+ we allocated earlier. */
+ for (i = 1; i <= n; ++i)
{
- next = NEXT_INSN (insn);
- if (GET_CODE (insn) == NOTE)
- {
- int kind = NOTE_LINE_NUMBER (insn);
- if (kind == NOTE_INSN_EH_REGION_BEG
- || kind == NOTE_INSN_EH_REGION_END)
- {
- if (kind == NOTE_INSN_EH_REGION_BEG)
- {
- struct eh_region *r;
-
- *sp++ = cur;
- cur = NOTE_EH_HANDLER (insn);
-
- r = cfun->eh->region_array[cur];
- if (r->type == ERT_FIXUP)
- {
- r = r->u.fixup.real_region;
- cur = r ? r->region_number : 0;
- }
- else if (r->type == ERT_CATCH)
- {
- r = r->outer;
- cur = r ? r->region_number : 0;
- }
- }
- else
- cur = *--sp;
-
- /* Removing the first insn of a CALL_PLACEHOLDER sequence
- requires extra care to adjust sequence start. */
- if (insn == *pinsns)
- *pinsns = next;
- remove_insn (insn);
- continue;
- }
- }
- else if (INSN_P (insn))
- {
- if (cur > 0
- && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
- /* Calls can always potentially throw exceptions, unless
- they have a REG_EH_REGION note with a value of 0 or less.
- Which should be the only possible kind so far. */
- && (GET_CODE (insn) == CALL_INSN
- /* If we wanted exceptions for non-call insns, then
- any may_trap_p instruction could throw. */
- || (flag_non_call_exceptions
- && may_trap_p (PATTERN (insn)))))
- {
- REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
- REG_NOTES (insn));
- }
+ struct eh_region *region;
- if (GET_CODE (insn) == CALL_INSN
- && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
- {
- convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
- sp, cur);
- convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
- sp, cur);
- convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
- sp, cur);
- }
- }
+ region = VEC_index (eh_region, cfun->eh->region_array, i);
+ if (region && region->tree_label)
+ region->label = DECL_RTL_IF_SET (region->tree_label);
}
- if (sp != orig_sp)
- abort ();
+ remove_unreachable_regions (insns);
}
-void
-convert_from_eh_region_ranges ()
+static void
+add_ehl_entry (rtx label, struct eh_region *region)
{
- int *stack;
- rtx insns;
+ struct ehl_map_entry **slot, *entry;
+
+ LABEL_PRESERVE_P (label) = 1;
- collect_eh_region_array ();
- resolve_fixup_regions ();
+ entry = ggc_alloc (sizeof (*entry));
+ entry->label = label;
+ entry->region = region;
- stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
- insns = get_insns ();
- convert_from_eh_region_ranges_1 (&insns, stack, 0);
- free (stack);
+ slot = (struct ehl_map_entry **)
+ htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
- remove_fixup_regions ();
+ /* Before landing pad creation, each exception handler has its own
+ label. After landing pad creation, the exception handlers may
+ share landing pads. This is ok, since maybe_remove_eh_handler
+ only requires the 1-1 mapping before landing pad creation. */
+ gcc_assert (!*slot || cfun->eh->built_landing_pads);
+
+ *slot = entry;
}
void
-find_exception_handler_labels ()
+find_exception_handler_labels (void)
{
- rtx list = NULL_RTX;
int i;
- free_EXPR_LIST_list (&exception_handler_labels);
+ if (cfun->eh->exception_handler_label_map)
+ htab_empty (cfun->eh->exception_handler_label_map);
+ else
+ {
+ /* ??? The expansion factor here (3/2) must be greater than the htab
+ occupancy factor (4/3) to avoid unnecessary resizing. */
+ cfun->eh->exception_handler_label_map
+ = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
+ ehl_hash, ehl_eq, NULL);
+ }
if (cfun->eh->region_tree == NULL)
return;
for (i = cfun->eh->last_region_number; i > 0; --i)
{
- struct eh_region *region = cfun->eh->region_array[i];
+ struct eh_region *region;
rtx lab;
- if (! region)
+ region = VEC_index (eh_region, cfun->eh->region_array, i);
+ if (! region || region->region_number != i)
continue;
if (cfun->eh->built_landing_pads)
lab = region->landing_pad;
lab = region->label;
if (lab)
- list = alloc_EXPR_LIST (0, lab, list);
+ add_ehl_entry (lab, region);
}
/* For sjlj exceptions, need the return label to remain live until
after landing pad generation. */
if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
- list = alloc_EXPR_LIST (0, return_label, list);
-
- exception_handler_labels = list;
+ add_ehl_entry (return_label, NULL);
}
-\f
-static struct eh_region *
-duplicate_eh_region_1 (o, map)
- struct eh_region *o;
- struct inline_remap *map;
-{
- struct eh_region *n
- = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
+/* Returns true if the current function has exception handling regions. */
- n->region_number = o->region_number + cfun->eh->last_region_number;
- n->type = o->type;
+bool
+current_function_has_exception_handlers (void)
+{
+ int i;
- switch (n->type)
+ for (i = cfun->eh->last_region_number; i > 0; --i)
{
- case ERT_CLEANUP:
- case ERT_MUST_NOT_THROW:
- break;
-
- case ERT_TRY:
- if (o->u.try.continue_label)
- n->u.try.continue_label
- = get_label_from_map (map,
- CODE_LABEL_NUMBER (o->u.try.continue_label));
- break;
+ struct eh_region *region;
- case ERT_CATCH:
- n->u.catch.type = o->u.catch.type;
- break;
+ region = VEC_index (eh_region, cfun->eh->region_array, i);
+ if (region
+ && region->region_number == i
+ && region->type != ERT_THROW)
+ return true;
+ }
- case ERT_ALLOWED_EXCEPTIONS:
- n->u.allowed.type_list = o->u.allowed.type_list;
- break;
+ return false;
+}
+\f
+/* A subroutine of duplicate_eh_regions. Search the region tree under O
+ for the minimum and maximum region numbers. Update *MIN and *MAX. */
- case ERT_THROW:
- n->u.throw.type = o->u.throw.type;
-
- default:
- abort ();
- }
+static void
+duplicate_eh_regions_0 (eh_region o, int *min, int *max)
+{
+ if (o->region_number < *min)
+ *min = o->region_number;
+ if (o->region_number > *max)
+ *max = o->region_number;
- if (o->label)
- n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
- if (o->resume)
+ if (o->inner)
{
- n->resume = map->insn_map[INSN_UID (o->resume)];
- if (n->resume == NULL)
- abort ();
+ o = o->inner;
+ duplicate_eh_regions_0 (o, min, max);
+ while (o->next_peer)
+ {
+ o = o->next_peer;
+ duplicate_eh_regions_0 (o, min, max);
+ }
}
-
- return n;
}
-static void
-duplicate_eh_region_2 (o, n_array)
- struct eh_region *o;
- struct eh_region **n_array;
+/* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
+ Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
+ about the other internal pointers just yet, just the tree-like pointers. */
+
+static eh_region
+duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
{
- struct eh_region *n = n_array[o->region_number];
+ eh_region ret, n;
- switch (n->type)
- {
- case ERT_TRY:
- n->u.try.catch = n_array[o->u.try.catch->region_number];
- n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
- break;
+ ret = n = ggc_alloc (sizeof (struct eh_region));
- case ERT_CATCH:
- if (o->u.catch.next_catch)
- n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
- if (o->u.catch.prev_catch)
- n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
- break;
+ *n = *old;
+ n->outer = outer;
+ n->next_peer = NULL;
+ gcc_assert (!old->aka);
- default:
- break;
+ n->region_number += eh_offset;
+ VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
+
+ if (old->inner)
+ {
+ old = old->inner;
+ n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
+ while (old->next_peer)
+ {
+ old = old->next_peer;
+ n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
+ }
}
- if (o->outer)
- n->outer = n_array[o->outer->region_number];
- if (o->inner)
- n->inner = n_array[o->inner->region_number];
- if (o->next_peer)
- n->next_peer = n_array[o->next_peer->region_number];
-}
+ return ret;
+}
+
+/* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
+ function and root the tree below OUTER_REGION. Remap labels using MAP
+ callback. The special case of COPY_REGION of 0 means all regions. */
int
-duplicate_eh_regions (ifun, map)
- struct function *ifun;
- struct inline_remap *map;
+duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
+ void *data, int copy_region, int outer_region)
{
- int ifun_last_region_number = ifun->eh->last_region_number;
- struct eh_region **n_array, *root, *cur;
- int i;
+ eh_region cur, prev_try, outer, *splice;
+ int i, min_region, max_region, eh_offset, cfun_last_region_number;
+ int num_regions;
- if (ifun_last_region_number == 0)
+ if (!ifun->eh->region_tree)
return 0;
- n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
+ /* Find the range of region numbers to be copied. The interface we
+ provide here mandates a single offset to find new number from old,
+ which means we must look at the numbers present, instead of the
+ count or something else. */
+ if (copy_region > 0)
+ {
+ min_region = INT_MAX;
+ max_region = 0;
- for (i = 1; i <= ifun_last_region_number; ++i)
+ cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
+ duplicate_eh_regions_0 (cur, &min_region, &max_region);
+ }
+ else
+ min_region = 1, max_region = ifun->eh->last_region_number;
+ num_regions = max_region - min_region + 1;
+ cfun_last_region_number = cfun->eh->last_region_number;
+ eh_offset = cfun_last_region_number + 1 - min_region;
+
+ /* If we've not yet created a region array, do so now. */
+ VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
+ cfun_last_region_number + 1 + num_regions);
+ cfun->eh->last_region_number = max_region + eh_offset;
+
+ /* We may have just allocated the array for the first time.
+ Make sure that element zero is null. */
+ VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
+
+ /* Zero all entries in the range allocated. */
+ memset (VEC_address (eh_region, cfun->eh->region_array)
+ + cfun_last_region_number + 1, 0, num_regions * sizeof (eh_region));
+
+ /* Locate the spot at which to insert the new tree. */
+ if (outer_region > 0)
{
- cur = ifun->eh->region_array[i];
- if (!cur || cur->region_number != i)
- continue;
- n_array[i] = duplicate_eh_region_1 (cur, map);
+ outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
+ splice = &outer->inner;
}
- for (i = 1; i <= ifun_last_region_number; ++i)
+ else
{
- cur = ifun->eh->region_array[i];
- if (!cur || cur->region_number != i)
- continue;
- duplicate_eh_region_2 (cur, n_array);
+ outer = NULL;
+ splice = &cfun->eh->region_tree;
}
+ while (*splice)
+ splice = &(*splice)->next_peer;
- root = n_array[ifun->eh->region_tree->region_number];
- cur = cfun->eh->cur_region;
- if (cur)
+ /* Copy all the regions in the subtree. */
+ if (copy_region > 0)
{
- struct eh_region *p = cur->inner;
- if (p)
- {
- while (p->next_peer)
- p = p->next_peer;
- p->next_peer = root;
- }
- else
- cur->inner = root;
-
- for (i = 1; i <= ifun_last_region_number; ++i)
- if (n_array[i] && n_array[i]->outer == NULL)
- n_array[i]->outer = cur;
+ cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
+ *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
}
else
{
- struct eh_region *p = cfun->eh->region_tree;
- if (p)
+ eh_region n;
+
+ cur = ifun->eh->region_tree;
+ *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
+ while (cur->next_peer)
{
- while (p->next_peer)
- p = p->next_peer;
- p->next_peer = root;
+ cur = cur->next_peer;
+ n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
}
- else
- cfun->eh->region_tree = root;
}
- free (n_array);
+ /* Remap all the labels in the new regions. */
+ for (i = cfun_last_region_number + 1;
+ VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
+ if (cur && cur->tree_label)
+ cur->tree_label = map (cur->tree_label, data);
+
+ /* Search for the containing ERT_TRY region to fix up
+ the prev_try short-cuts for ERT_CLEANUP regions. */
+ prev_try = NULL;
+ if (outer_region > 0)
+ for (prev_try = VEC_index (eh_region, cfun->eh->region_array, outer_region);
+ prev_try && prev_try->type != ERT_TRY;
+ prev_try = prev_try->outer)
+ if (prev_try->type == ERT_MUST_NOT_THROW
+ || (prev_try->type == ERT_ALLOWED_EXCEPTIONS
+ && !prev_try->u.allowed.type_list))
+ {
+ prev_try = NULL;
+ break;
+ }
+
+ /* Remap all of the internal catch and cleanup linkages. Since we
+ duplicate entire subtrees, all of the referenced regions will have
+ been copied too. And since we renumbered them as a block, a simple
+ bit of arithmetic finds us the index for the replacement region. */
+ for (i = cfun_last_region_number + 1;
+ VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
+ {
+ if (cur == NULL)
+ continue;
- i = cfun->eh->last_region_number;
- cfun->eh->last_region_number = i + ifun_last_region_number;
- return i;
+#define REMAP(REG) \
+ (REG) = VEC_index (eh_region, cfun->eh->region_array, \
+ (REG)->region_number + eh_offset)
+
+ switch (cur->type)
+ {
+ case ERT_TRY:
+ if (cur->u.try.catch)
+ REMAP (cur->u.try.catch);
+ if (cur->u.try.last_catch)
+ REMAP (cur->u.try.last_catch);
+ break;
+
+ case ERT_CATCH:
+ if (cur->u.catch.next_catch)
+ REMAP (cur->u.catch.next_catch);
+ if (cur->u.catch.prev_catch)
+ REMAP (cur->u.catch.prev_catch);
+ break;
+
+ case ERT_CLEANUP:
+ if (cur->u.cleanup.prev_try)
+ REMAP (cur->u.cleanup.prev_try);
+ else
+ cur->u.cleanup.prev_try = prev_try;
+ break;
+
+ default:
+ break;
+ }
+
+#undef REMAP
+ }
+
+ return eh_offset;
}
-\f
-/* ??? Move from tree.c to tree.h. */
-#define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
+/* Return true if REGION_A is outer to REGION_B in IFUN. */
-static int
-t2r_eq (pentry, pdata)
- const PTR pentry;
- const PTR pdata;
+bool
+eh_region_outer_p (struct function *ifun, int region_a, int region_b)
{
- tree entry = (tree) pentry;
- tree data = (tree) pdata;
+ struct eh_region *rp_a, *rp_b;
- return TREE_PURPOSE (entry) == data;
+ gcc_assert (ifun->eh->last_region_number > 0);
+ gcc_assert (ifun->eh->region_tree);
+
+ rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
+ rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
+ gcc_assert (rp_a != NULL);
+ gcc_assert (rp_b != NULL);
+
+ do
+ {
+ if (rp_a == rp_b)
+ return true;
+ rp_b = rp_b->outer;
+ }
+ while (rp_b);
+
+ return false;
}
-static hashval_t
-t2r_hash (pentry)
- const PTR pentry;
+/* Return region number of region that is outer to both if REGION_A and
+ REGION_B in IFUN. */
+
+int
+eh_region_outermost (struct function *ifun, int region_a, int region_b)
{
- tree entry = (tree) pentry;
- return TYPE_HASH (TREE_PURPOSE (entry));
-}
+ struct eh_region *rp_a, *rp_b;
+ sbitmap b_outer;
+
+ gcc_assert (ifun->eh->last_region_number > 0);
+ gcc_assert (ifun->eh->region_tree);
+
+ rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
+ rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
+ gcc_assert (rp_a != NULL);
+ gcc_assert (rp_b != NULL);
+ b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
+ sbitmap_zero (b_outer);
+
+ do
+ {
+ SET_BIT (b_outer, rp_b->region_number);
+ rp_b = rp_b->outer;
+ }
+ while (rp_b);
+
+ do
+ {
+ if (TEST_BIT (b_outer, rp_a->region_number))
+ {
+ sbitmap_free (b_outer);
+ return rp_a->region_number;
+ }
+ rp_a = rp_a->outer;
+ }
+ while (rp_a);
+
+ sbitmap_free (b_outer);
+ return -1;
+}
+\f
static int
-t2r_mark_1 (slot, data)
- PTR *slot;
- PTR data ATTRIBUTE_UNUSED;
+t2r_eq (const void *pentry, const void *pdata)
{
- tree contents = (tree) *slot;
- ggc_mark_tree (contents);
- return 1;
+ const_tree const entry = (const_tree) pentry;
+ const_tree const data = (const_tree) pdata;
+
+ return TREE_PURPOSE (entry) == data;
}
-static void
-t2r_mark (addr)
- PTR addr;
+static hashval_t
+t2r_hash (const void *pentry)
{
- htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
+ const_tree const entry = (const_tree) pentry;
+ return TREE_HASH (TREE_PURPOSE (entry));
}
static void
-add_type_for_runtime (type)
- tree type;
+add_type_for_runtime (tree type)
{
tree *slot;
slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
- TYPE_HASH (type), INSERT);
+ TREE_HASH (type), INSERT);
if (*slot == NULL)
{
tree runtime = (*lang_eh_runtime_type) (type);
*slot = tree_cons (type, runtime, NULL_TREE);
}
}
-
+
static tree
-lookup_type_for_runtime (type)
- tree type;
+lookup_type_for_runtime (tree type)
{
tree *slot;
slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
- TYPE_HASH (type), NO_INSERT);
+ TREE_HASH (type), NO_INSERT);
- /* We should have always inserrted the data earlier. */
+ /* We should have always inserted the data earlier. */
return TREE_VALUE (*slot);
}
\f
/* Represent an entry in @TTypes for either catch actions
or exception filter actions. */
-struct ttypes_filter
+struct ttypes_filter GTY(())
{
tree t;
int filter;
(a tree) for a @TTypes type node we are thinking about adding. */
static int
-ttypes_filter_eq (pentry, pdata)
- const PTR pentry;
- const PTR pdata;
+ttypes_filter_eq (const void *pentry, const void *pdata)
{
- const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
- tree data = (tree) pdata;
+ const struct ttypes_filter *const entry
+ = (const struct ttypes_filter *) pentry;
+ const_tree const data = (const_tree) pdata;
return entry->t == data;
}
static hashval_t
-ttypes_filter_hash (pentry)
- const PTR pentry;
+ttypes_filter_hash (const void *pentry)
{
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
- return TYPE_HASH (entry->t);
+ return TREE_HASH (entry->t);
}
/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
should put these in some canonical order. */
static int
-ehspec_filter_eq (pentry, pdata)
- const PTR pentry;
- const PTR pdata;
+ehspec_filter_eq (const void *pentry, const void *pdata)
{
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
/* Hash function for exception specification lists. */
static hashval_t
-ehspec_filter_hash (pentry)
- const PTR pentry;
+ehspec_filter_hash (const void *pentry)
{
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
hashval_t h = 0;
tree list;
for (list = entry->t; list ; list = TREE_CHAIN (list))
- h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
+ h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
return h;
}
-/* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
- up the search. Return the filter value to be used. */
+/* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
+ to speed up the search. Return the filter value to be used. */
static int
-add_ttypes_entry (ttypes_hash, type)
- htab_t ttypes_hash;
- tree type;
+add_ttypes_entry (htab_t ttypes_hash, tree type)
{
struct ttypes_filter **slot, *n;
slot = (struct ttypes_filter **)
- htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
+ htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
if ((n = *slot) == NULL)
{
/* Filter value is a 1 based table index. */
- n = (struct ttypes_filter *) xmalloc (sizeof (*n));
+ n = XNEW (struct ttypes_filter);
n->t = type;
- n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
+ n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
*slot = n;
- VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
+ VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
}
return n->filter;
to speed up the search. Return the filter value to be used. */
static int
-add_ehspec_entry (ehspec_hash, ttypes_hash, list)
- htab_t ehspec_hash;
- htab_t ttypes_hash;
- tree list;
+add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
{
struct ttypes_filter **slot, *n;
struct ttypes_filter dummy;
{
/* Filter value is a -1 based byte index into a uleb128 buffer. */
- n = (struct ttypes_filter *) xmalloc (sizeof (*n));
+ n = XNEW (struct ttypes_filter);
n->t = list;
n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
*slot = n;
- /* Look up each type in the list and encode its filter
- value as a uleb128. Terminate the list with 0. */
+ /* Generate a 0 terminated list of filter values. */
for (; list ; list = TREE_CHAIN (list))
- push_uleb128 (&cfun->eh->ehspec_data,
- add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
- VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
+ {
+ if (targetm.arm_eabi_unwinder)
+ VARRAY_PUSH_TREE (cfun->eh->ehspec_data, TREE_VALUE (list));
+ else
+ {
+ /* Look up each type in the list and encode its filter
+ value as a uleb128. */
+ push_uleb128 (&cfun->eh->ehspec_data,
+ add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
+ }
+ }
+ if (targetm.arm_eabi_unwinder)
+ VARRAY_PUSH_TREE (cfun->eh->ehspec_data, NULL_TREE);
+ else
+ VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
}
return n->filter;
the same filter value, which saves table space. */
static void
-assign_filter_values ()
+assign_filter_values (void)
{
int i;
htab_t ttypes, ehspec;
- VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
- VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
+ cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
+ if (targetm.arm_eabi_unwinder)
+ VARRAY_TREE_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
+ else
+ VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
for (i = cfun->eh->last_region_number; i > 0; --i)
{
- struct eh_region *r = cfun->eh->region_array[i];
+ struct eh_region *r;
+
+ r = VEC_index (eh_region, cfun->eh->region_array, i);
+
+ /* Mind we don't process a region more than once. */
+ if (!r || r->region_number != i)
+ continue;
+
+ switch (r->type)
+ {
+ case ERT_CATCH:
+ /* Whatever type_list is (NULL or true list), we build a list
+ of filters for the region. */
+ r->u.catch.filter_list = NULL_TREE;
+
+ if (r->u.catch.type_list != NULL)
+ {
+ /* Get a filter value for each of the types caught and store
+ them in the region's dedicated list. */
+ tree tp_node = r->u.catch.type_list;
+
+ for (;tp_node; tp_node = TREE_CHAIN (tp_node))
+ {
+ int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
+ tree flt_node = build_int_cst (NULL_TREE, flt);
+
+ r->u.catch.filter_list
+ = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
+ }
+ }
+ else
+ {
+ /* Get a filter value for the NULL list also since it will need
+ an action record anyway. */
+ int flt = add_ttypes_entry (ttypes, NULL);
+ tree flt_node = build_int_cst (NULL_TREE, flt);
- /* Mind we don't process a region more than once. */
- if (!r || r->region_number != i)
- continue;
+ r->u.catch.filter_list
+ = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
+ }
- switch (r->type)
- {
- case ERT_CATCH:
- r->u.catch.filter = add_ttypes_entry (ttypes, r->u.catch.type);
break;
case ERT_ALLOWED_EXCEPTIONS:
htab_delete (ehspec);
}
+/* Emit SEQ into basic block just before INSN (that is assumed to be
+ first instruction of some existing BB and return the newly
+ produced block. */
+static basic_block
+emit_to_new_bb_before (rtx seq, rtx insn)
+{
+ rtx last;
+ basic_block bb;
+ edge e;
+ edge_iterator ei;
+
+ /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
+ call), we don't want it to go into newly created landing pad or other EH
+ construct. */
+ for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
+ if (e->flags & EDGE_FALLTHRU)
+ force_nonfallthru (e);
+ else
+ ei_next (&ei);
+ last = emit_insn_before (seq, insn);
+ if (BARRIER_P (last))
+ last = PREV_INSN (last);
+ bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
+ update_bb_for_insn (bb);
+ bb->flags |= BB_SUPERBLOCK;
+ return bb;
+}
+
+/* Generate the code to actually handle exceptions, which will follow the
+ landing pads. */
+
static void
-build_post_landing_pads ()
+build_post_landing_pads (void)
{
int i;
for (i = cfun->eh->last_region_number; i > 0; --i)
{
- struct eh_region *region = cfun->eh->region_array[i];
+ struct eh_region *region;
rtx seq;
+ region = VEC_index (eh_region, cfun->eh->region_array, i);
/* Mind we don't process a region more than once. */
if (!region || region->region_number != i)
continue;
all the way up the chain until blocked by a cleanup. */
/* ??? Outer try regions can share landing pads with inner
try regions if the types are completely non-overlapping,
- and there are no interveaning cleanups. */
+ and there are no intervening cleanups. */
region->post_landing_pad = gen_label_rtx ();
struct eh_region *c;
for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
{
- /* ??? _Unwind_ForcedUnwind wants no match here. */
- if (c->u.catch.type == NULL)
+ if (c->u.catch.type_list == NULL)
emit_jump (c->label);
else
- emit_cmp_and_jump_insns (cfun->eh->filter,
- GEN_INT (c->u.catch.filter),
- EQ, NULL_RTX, word_mode,
- 0, 0, c->label);
+ {
+ /* Need for one cmp/jump per type caught. Each type
+ list entry has a matching entry in the filter list
+ (see assign_filter_values). */
+ tree tp_node = c->u.catch.type_list;
+ tree flt_node = c->u.catch.filter_list;
+
+ for (; tp_node; )
+ {
+ emit_cmp_and_jump_insns
+ (cfun->eh->filter,
+ GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
+ EQ, NULL_RTX,
+ targetm.eh_return_filter_mode (), 0, c->label);
+
+ tp_node = TREE_CHAIN (tp_node);
+ flt_node = TREE_CHAIN (flt_node);
+ }
+ }
}
}
seq = get_insns ();
end_sequence ();
- emit_insns_before (seq, region->u.try.catch->label);
+ emit_to_new_bb_before (seq, region->u.try.catch->label);
+
break;
case ERT_ALLOWED_EXCEPTIONS:
emit_cmp_and_jump_insns (cfun->eh->filter,
GEN_INT (region->u.allowed.filter),
- EQ, NULL_RTX, word_mode, 0, 0,
- region->label);
+ EQ, NULL_RTX,
+ targetm.eh_return_filter_mode (), 0, region->label);
/* We delay the generation of the _Unwind_Resume until we generate
landing pads. We emit a marker here so as to get good control
seq = get_insns ();
end_sequence ();
- emit_insns_before (seq, region->label);
+ emit_to_new_bb_before (seq, region->label);
break;
case ERT_CLEANUP:
break;
default:
- abort ();
+ gcc_unreachable ();
}
}
}
_Unwind_Resume otherwise. */
static void
-connect_post_landing_pads ()
+connect_post_landing_pads (void)
{
int i;
for (i = cfun->eh->last_region_number; i > 0; --i)
{
- struct eh_region *region = cfun->eh->region_array[i];
+ struct eh_region *region;
struct eh_region *outer;
rtx seq;
+ rtx barrier;
+ region = VEC_index (eh_region, cfun->eh->region_array, i);
/* Mind we don't process a region more than once. */
if (!region || region->region_number != i)
continue;
start_sequence ();
if (outer)
- emit_jump (outer->post_landing_pad);
+ {
+ edge e;
+ basic_block src, dest;
+
+ emit_jump (outer->post_landing_pad);
+ src = BLOCK_FOR_INSN (region->resume);
+ dest = BLOCK_FOR_INSN (outer->post_landing_pad);
+ while (EDGE_COUNT (src->succs) > 0)
+ remove_edge (EDGE_SUCC (src, 0));
+ e = make_edge (src, dest, 0);
+ e->probability = REG_BR_PROB_BASE;
+ e->count = src->count;
+ }
else
- emit_library_call (unwind_resume_libfunc, LCT_THROW,
- VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
+ {
+ emit_library_call (unwind_resume_libfunc, LCT_THROW,
+ VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
+
+ /* What we just emitted was a throwing libcall, so it got a
+ barrier automatically added after it. If the last insn in
+ the libcall sequence isn't the barrier, it's because the
+ target emits multiple insns for a call, and there are insns
+ after the actual call insn (which are redundant and would be
+ optimized away). The barrier is inserted exactly after the
+ call insn, so let's go get that and delete the insns after
+ it, because below we need the barrier to be the last insn in
+ the sequence. */
+ delete_insns_since (NEXT_INSN (last_call_insn ()));
+ }
seq = get_insns ();
end_sequence ();
- emit_insns_before (seq, region->resume);
- flow_delete_insn (region->resume);
+ barrier = emit_insn_before (seq, region->resume);
+ /* Avoid duplicate barrier. */
+ gcc_assert (BARRIER_P (barrier));
+ delete_insn (barrier);
+ delete_insn (region->resume);
+
+ /* ??? From tree-ssa we can wind up with catch regions whose
+ label is not instantiated, but whose resx is present. Now
+ that we've dealt with the resx, kill the region. */
+ if (region->label == NULL && region->type == ERT_CLEANUP)
+ remove_eh_handler (region);
}
}
\f
static void
-dw2_build_landing_pads ()
+dw2_build_landing_pads (void)
{
int i;
- unsigned int j;
for (i = cfun->eh->last_region_number; i > 0; --i)
{
- struct eh_region *region = cfun->eh->region_array[i];
+ struct eh_region *region;
rtx seq;
+ basic_block bb;
+ edge e;
+ region = VEC_index (eh_region, cfun->eh->region_array, i);
/* Mind we don't process a region more than once. */
if (!region || region->region_number != i)
continue;
#endif
{ /* Nothing */ }
- /* If the eh_return data registers are call-saved, then we
- won't have considered them clobbered from the call that
- threw. Kill them now. */
- for (j = 0; ; ++j)
- {
- unsigned r = EH_RETURN_DATA_REGNO (j);
- if (r == INVALID_REGNUM)
- break;
- if (! call_used_regs[r])
- emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
- }
-
emit_move_insn (cfun->eh->exc_ptr,
- gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
+ gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
emit_move_insn (cfun->eh->filter,
- gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
+ gen_rtx_REG (targetm.eh_return_filter_mode (),
+ EH_RETURN_DATA_REGNO (1)));
seq = get_insns ();
end_sequence ();
- emit_insns_before (seq, region->post_landing_pad);
+ bb = emit_to_new_bb_before (seq, region->post_landing_pad);
+ e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
+ e->count = bb->count;
+ e->probability = REG_BR_PROB_BASE;
}
}
};
static bool
-sjlj_find_directly_reachable_regions (lp_info)
- struct sjlj_lp_info *lp_info;
+sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
{
rtx insn;
bool found_one = false;
for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
{
struct eh_region *region;
+ enum reachable_code rc;
tree type_thrown;
rtx note;
if (!note || INTVAL (XEXP (note, 0)) <= 0)
continue;
- region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
+ region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
type_thrown = NULL_TREE;
if (region->type == ERT_THROW)
/* Find the first containing region that might handle the exception.
That's the landing pad to which we will transfer control. */
+ rc = RNL_NOT_CAUGHT;
for (; region; region = region->outer)
- if (reachable_next_level (region, type_thrown, 0) != RNL_NOT_CAUGHT)
- break;
-
- if (region)
+ {
+ rc = reachable_next_level (region, type_thrown, NULL);
+ if (rc != RNL_NOT_CAUGHT)
+ break;
+ }
+ if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
{
lp_info[region->region_number].directly_reachable = 1;
found_one = true;
}
static void
-sjlj_assign_call_site_values (dispatch_label, lp_info)
- rtx dispatch_label;
- struct sjlj_lp_info *lp_info;
+sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
{
htab_t ar_hash;
int i, index;
for (i = cfun->eh->last_region_number; i > 0; --i)
if (lp_info[i].directly_reachable)
{
- struct eh_region *r = cfun->eh->region_array[i];
+ struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
+
r->landing_pad = dispatch_label;
lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
if (lp_info[i].action_index != -1)
A region receives a dispatch index if it is directly reachable
and requires in-function processing. Regions that share post-landing
- pads may share dispatch indicies. */
+ pads may share dispatch indices. */
/* ??? Post-landing pad sharing doesn't actually happen at the moment
(see build_post_landing_pads) so we don't bother checking for it. */
index = 0;
for (i = cfun->eh->last_region_number; i > 0; --i)
- if (lp_info[i].directly_reachable
- && lp_info[i].action_index >= 0)
+ if (lp_info[i].directly_reachable)
lp_info[i].dispatch_index = index++;
/* Finally: assign call-site values. If dwarf2 terms, this would be
}
static void
-sjlj_mark_call_sites (lp_info)
- struct sjlj_lp_info *lp_info;
+sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
{
int last_call_site = -2;
rtx insn, mem;
- mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
- sjlj_fc_call_site_ofs);
-
for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
{
struct eh_region *region;
rtx note, before, p;
/* Reset value tracking at extended basic block boundaries. */
- if (GET_CODE (insn) == CODE_LABEL)
+ if (LABEL_P (insn))
last_call_site = -2;
if (! INSN_P (insn))
/* Calls (and trapping insns) without notes are outside any
exception handling region in this function. Mark them as
no action. */
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
|| (flag_non_call_exceptions
&& may_trap_p (PATTERN (insn))))
this_call_site = -1;
if (INTVAL (XEXP (note, 0)) <= 0)
continue;
- region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
+ region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
this_call_site = lp_info[region->region_number].call_site_index;
}
/* Don't separate a call from it's argument loads. */
before = insn;
- if (GET_CODE (insn) == CALL_INSN)
- before = find_first_parameter_load (insn, NULL_RTX);
+ if (CALL_P (insn))
+ before = find_first_parameter_load (insn, NULL_RTX);
start_sequence ();
+ mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
+ sjlj_fc_call_site_ofs);
emit_move_insn (mem, GEN_INT (this_call_site));
p = get_insns ();
end_sequence ();
- emit_insns_before (p, before);
+ emit_insn_before (p, before);
last_call_site = this_call_site;
}
}
/* Construct the SjLj_Function_Context. */
static void
-sjlj_emit_function_enter (dispatch_label)
- rtx dispatch_label;
+sjlj_emit_function_enter (rtx dispatch_label)
{
rtx fn_begin, fc, mem, seq;
+ bool fn_begin_outside_block;
fc = cfun->eh->sjlj_fc;
if (cfun->uses_eh_lsda)
{
char buf[20];
- ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
- emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
+ rtx sym;
+
+ ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
+ sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
+ SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
+ emit_move_insn (mem, sym);
}
else
emit_move_insn (mem, const0_rtx);
-
+
#ifdef DONT_USE_BUILTIN_SETJMP
{
- rtx x, note;
- x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_NORMAL,
+ rtx x;
+ x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
TYPE_MODE (integer_type_node), 1,
plus_constant (XEXP (fc, 0),
sjlj_fc_jbuf_ofs), Pmode);
- note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
- NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
-
emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
- TYPE_MODE (integer_type_node), 0, 0,
- dispatch_label);
+ TYPE_MODE (integer_type_node), 0, dispatch_label);
+ add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
}
#else
expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
do this in a block that is at loop level 0 and dominates all
can_throw_internal instructions. */
+ fn_begin_outside_block = true;
for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
- if (GET_CODE (fn_begin) == NOTE
- && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
- break;
- emit_insns_after (seq, fn_begin);
+ if (NOTE_P (fn_begin))
+ {
+ if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
+ break;
+ else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
+ fn_begin_outside_block = false;
+ }
+
+ if (fn_begin_outside_block)
+ insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
+ else
+ emit_insn_after (seq, fn_begin);
}
/* Call back from expand_function_end to know where we should put
the call to unwind_sjlj_unregister_libfunc if needed. */
void
-sjlj_emit_function_exit_after (after)
- rtx after;
+sjlj_emit_function_exit_after (rtx after)
{
cfun->eh->sjlj_exit_after = after;
}
static void
-sjlj_emit_function_exit ()
+sjlj_emit_function_exit (void)
{
rtx seq;
+ edge e;
+ edge_iterator ei;
start_sequence ();
post-dominates all can_throw_internal instructions. This is
the last possible moment. */
- emit_insns_after (seq, cfun->eh->sjlj_exit_after);
+ FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
+ if (e->flags & EDGE_FALLTHRU)
+ break;
+ if (e)
+ {
+ rtx insn;
+
+ /* Figure out whether the place we are supposed to insert libcall
+ is inside the last basic block or after it. In the other case
+ we need to emit to edge. */
+ gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
+ for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
+ {
+ if (insn == cfun->eh->sjlj_exit_after)
+ {
+ if (LABEL_P (insn))
+ insn = NEXT_INSN (insn);
+ emit_insn_after (seq, insn);
+ return;
+ }
+ if (insn == BB_END (e->src))
+ break;
+ }
+ insert_insn_on_edge (seq, e);
+ }
}
static void
-sjlj_emit_dispatch_table (dispatch_label, lp_info)
- rtx dispatch_label;
- struct sjlj_lp_info *lp_info;
+sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
{
int i, first_reachable;
rtx mem, dispatch, seq, fc;
+ rtx before;
+ basic_block bb;
+ edge e;
fc = cfun->eh->sjlj_fc;
start_sequence ();
emit_label (dispatch_label);
-
+
#ifndef DONT_USE_BUILTIN_SETJMP
expand_builtin_setjmp_receiver (dispatch_label);
#endif
dispatch = copy_to_reg (mem);
mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
- if (word_mode != Pmode)
+ if (word_mode != ptr_mode)
{
#ifdef POINTERS_EXTEND_UNSIGNED
- mem = convert_memory_address (Pmode, mem);
+ mem = convert_memory_address (ptr_mode, mem);
#else
- mem = convert_to_mode (Pmode, mem, 0);
+ mem = convert_to_mode (ptr_mode, mem, 0);
#endif
}
emit_move_insn (cfun->eh->exc_ptr, mem);
first_reachable = 0;
for (i = cfun->eh->last_region_number; i > 0; --i)
{
- if (! lp_info[i].directly_reachable
- || lp_info[i].action_index < 0)
+ if (! lp_info[i].directly_reachable)
continue;
if (! first_reachable)
continue;
}
- emit_cmp_and_jump_insns (dispatch,
- GEN_INT (lp_info[i].dispatch_index), EQ,
- NULL_RTX, TYPE_MODE (integer_type_node), 0, 0,
- cfun->eh->region_array[i]->post_landing_pad);
+ emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
+ EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
+ ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
+ ->post_landing_pad);
}
seq = get_insns ();
end_sequence ();
- emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
- ->post_landing_pad));
+ before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
+ ->post_landing_pad);
+
+ bb = emit_to_new_bb_before (seq, before);
+ e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
+ e->count = bb->count;
+ e->probability = REG_BR_PROB_BASE;
}
static void
-sjlj_build_landing_pads ()
+sjlj_build_landing_pads (void)
{
struct sjlj_lp_info *lp_info;
- lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
- sizeof (struct sjlj_lp_info));
+ lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
if (sjlj_find_directly_reachable_regions (lp_info))
{
}
void
-finish_eh_generation ()
+finish_eh_generation (void)
{
+ basic_block bb;
+
/* Nothing to do if no regions created. */
if (cfun->eh->region_tree == NULL)
return;
connect many of the handlers, and then type information will not
be effective. Still, this is a win over previous implementations. */
- rebuild_jump_labels (get_insns ());
- find_basic_blocks (get_insns (), max_reg_num (), 0);
- cleanup_cfg (CLEANUP_PRE_LOOP);
-
/* These registers are used by the landing pads. Make sure they
have been generated. */
get_exception_pointer (cfun);
/* We've totally changed the CFG. Start over. */
find_exception_handler_labels ();
- rebuild_jump_labels (get_insns ());
- find_basic_blocks (get_insns (), max_reg_num (), 0);
- cleanup_cfg (CLEANUP_PRE_LOOP);
+ break_superblocks ();
+ if (USING_SJLJ_EXCEPTIONS
+ /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
+ || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
+ commit_edge_insertions ();
+ FOR_EACH_BB (bb)
+ {
+ edge e;
+ edge_iterator ei;
+ bool eh = false;
+ for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
+ {
+ if (e->flags & EDGE_EH)
+ {
+ remove_edge (e);
+ eh = true;
+ }
+ else
+ ei_next (&ei);
+ }
+ if (eh)
+ rtl_make_eh_edge (NULL, bb, BB_END (bb));
+ }
}
\f
+static hashval_t
+ehl_hash (const void *pentry)
+{
+ const struct ehl_map_entry *const entry
+ = (const struct ehl_map_entry *) pentry;
+
+ /* 2^32 * ((sqrt(5) - 1) / 2) */
+ const hashval_t scaled_golden_ratio = 0x9e3779b9;
+ return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
+}
+
+static int
+ehl_eq (const void *pentry, const void *pdata)
+{
+ const struct ehl_map_entry *const entry
+ = (const struct ehl_map_entry *) pentry;
+ const struct ehl_map_entry *const data
+ = (const struct ehl_map_entry *) pdata;
+
+ return entry->label == data->label;
+}
+
/* This section handles removing dead code for flow. */
-/* Remove LABEL from the exception_handler_labels list. */
+/* Remove LABEL from exception_handler_label_map. */
static void
-remove_exception_handler_label (label)
- rtx label;
+remove_exception_handler_label (rtx label)
{
- rtx *pl, l;
+ struct ehl_map_entry **slot, tmp;
- for (pl = &exception_handler_labels, l = *pl;
- XEXP (l, 0) != label;
- pl = &XEXP (l, 1), l = *pl)
- continue;
+ /* If exception_handler_label_map was not built yet,
+ there is nothing to do. */
+ if (cfun->eh->exception_handler_label_map == NULL)
+ return;
+
+ tmp.label = label;
+ slot = (struct ehl_map_entry **)
+ htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
+ gcc_assert (slot);
- *pl = XEXP (l, 1);
- free_EXPR_LIST_node (l);
+ htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
}
/* Splice REGION from the region tree etc. */
static void
-remove_eh_handler (region)
- struct eh_region *region;
+remove_eh_handler (struct eh_region *region)
{
- struct eh_region **pp, *p;
+ struct eh_region **pp, **pp_start, *p, *outer, *inner;
rtx lab;
- int i;
/* For the benefit of efficiently handling REG_EH_REGION notes,
replace this region in the region array with its containing
region. Note that previous region deletions may result in
- multiple copies of this region in the array, so we have to
- search the whole thing. */
- for (i = cfun->eh->last_region_number; i > 0; --i)
- if (cfun->eh->region_array[i] == region)
- cfun->eh->region_array[i] = region->outer;
+ multiple copies of this region in the array, so we have a
+ list of alternate numbers by which we are known. */
+
+ outer = region->outer;
+ VEC_replace (eh_region, cfun->eh->region_array, region->region_number, outer);
+ if (region->aka)
+ {
+ unsigned i;
+ bitmap_iterator bi;
+
+ EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
+ {
+ VEC_replace (eh_region, cfun->eh->region_array, i, outer);
+ }
+ }
+
+ if (outer)
+ {
+ if (!outer->aka)
+ outer->aka = BITMAP_GGC_ALLOC ();
+ if (region->aka)
+ bitmap_ior_into (outer->aka, region->aka);
+ bitmap_set_bit (outer->aka, region->region_number);
+ }
if (cfun->eh->built_landing_pads)
lab = region->landing_pad;
if (lab)
remove_exception_handler_label (lab);
- if (region->outer)
- pp = ®ion->outer->inner;
+ if (outer)
+ pp_start = &outer->inner;
else
- pp = &cfun->eh->region_tree;
- for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
+ pp_start = &cfun->eh->region_tree;
+ for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
continue;
+ *pp = region->next_peer;
- if (region->inner)
+ inner = region->inner;
+ if (inner)
{
- for (p = region->inner; p->next_peer ; p = p->next_peer)
- p->outer = region->outer;
- p->next_peer = region->next_peer;
- p->outer = region->outer;
- *pp = region->inner;
+ for (p = inner; p->next_peer ; p = p->next_peer)
+ p->outer = outer;
+ p->outer = outer;
+
+ p->next_peer = *pp_start;
+ *pp_start = inner;
}
- else
- *pp = region->next_peer;
if (region->type == ERT_CATCH)
{
try->type == ERT_CATCH;
try = try->next_peer)
continue;
- if (try->type != ERT_TRY)
- abort ();
+ gcc_assert (try->type == ERT_TRY);
next = region->u.catch.next_catch;
prev = region->u.catch.prev_catch;
remove_eh_handler (try);
}
}
-
- free (region);
}
/* LABEL heads a basic block that is about to be deleted. If this
delete the region. */
void
-maybe_remove_eh_handler (label)
- rtx label;
+maybe_remove_eh_handler (rtx label)
{
- int i;
+ struct ehl_map_entry **slot, tmp;
+ struct eh_region *region;
/* ??? After generating landing pads, it's not so simple to determine
if the region data is completely unused. One must examine the
if (cfun->eh->built_landing_pads)
return;
- for (i = cfun->eh->last_region_number; i > 0; --i)
+ tmp.label = label;
+ slot = (struct ehl_map_entry **)
+ htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
+ if (! slot)
+ return;
+ region = (*slot)->region;
+ if (! region)
+ return;
+
+ /* Flow will want to remove MUST_NOT_THROW regions as unreachable
+ because there is no path to the fallback call to terminate.
+ But the region continues to affect call-site data until there
+ are no more contained calls, which we don't see here. */
+ if (region->type == ERT_MUST_NOT_THROW)
{
- struct eh_region *region = cfun->eh->region_array[i];
- if (region && region->label == label)
- {
- /* Flow will want to remove MUST_NOT_THROW regions as unreachable
- because there is no path to the fallback call to terminate.
- But the region continues to affect call-site data until there
- are no more contained calls, which we don't see here. */
- if (region->type == ERT_MUST_NOT_THROW)
- {
- remove_exception_handler_label (region->label);
- region->label = NULL_RTX;
- }
- else
- remove_eh_handler (region);
- break;
- }
+ htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
+ region->label = NULL_RTX;
}
+ else
+ remove_eh_handler (region);
+}
+
+/* Invokes CALLBACK for every exception handler label. Only used by old
+ loop hackery; should not be used by new code. */
+
+void
+for_each_eh_label (void (*callback) (rtx))
+{
+ htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
+ (void *) &callback);
+}
+
+static int
+for_each_eh_label_1 (void **pentry, void *data)
+{
+ struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
+ void (*callback) (rtx) = *(void (**) (rtx)) data;
+
+ (*callback) (entry->label);
+ return 1;
}
+/* Invoke CALLBACK for every exception region in the current function. */
+
+void
+for_each_eh_region (void (*callback) (struct eh_region *))
+{
+ int i, n = cfun->eh->last_region_number;
+ for (i = 1; i <= n; ++i)
+ {
+ struct eh_region *region;
+
+ region = VEC_index (eh_region, cfun->eh->region_array, i);
+ if (region)
+ (*callback) (region);
+ }
+}
\f
/* This section describes CFG exception edges for flow. */
{
tree types_caught;
tree types_allowed;
- rtx handlers;
+ void (*callback) (struct eh_region *, void *);
+ void *callback_data;
+ bool saw_any_handlers;
};
/* A subroutine of reachable_next_level. Return true if TYPE, or a
base class of TYPE, is in HANDLED. */
static int
-check_handled (handled, type)
- tree handled, type;
+check_handled (tree handled, tree type)
{
tree t;
/* A subroutine of reachable_next_level. If we are collecting a list
of handlers, add one. After landing pad generation, reference
it instead of the handlers themselves. Further, the handlers are
- all wired together, so by referencing one, we've got them all.
+ all wired together, so by referencing one, we've got them all.
Before landing pad generation we reference each handler individually.
LP_REGION contains the landing pad; REGION is the handler. */
static void
-add_reachable_handler (info, lp_region, region)
- struct reachable_info *info;
- struct eh_region *lp_region;
- struct eh_region *region;
+add_reachable_handler (struct reachable_info *info,
+ struct eh_region *lp_region, struct eh_region *region)
{
if (! info)
return;
+ info->saw_any_handlers = true;
+
if (cfun->eh->built_landing_pads)
- {
- if (! info->handlers)
- info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
- }
+ info->callback (lp_region, info->callback_data);
else
- info->handlers = alloc_INSN_LIST (region->label, info->handlers);
+ info->callback (region, info->callback_data);
}
-/* Process one level of exception regions for reachability.
+/* Process one level of exception regions for reachability.
If TYPE_THROWN is non-null, then it is the *exact* type being
propagated. If INFO is non-null, then collect handler labels
and caught/allowed type information between invocations. */
static enum reachable_code
-reachable_next_level (region, type_thrown, info)
- struct eh_region *region;
- tree type_thrown;
- struct reachable_info *info;
+reachable_next_level (struct eh_region *region, tree type_thrown,
+ struct reachable_info *info)
{
switch (region->type)
{
for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
{
/* A catch-all handler ends the search. */
- /* ??? _Unwind_ForcedUnwind will want outer cleanups
- to be run as well. */
- if (c->u.catch.type == NULL)
+ if (c->u.catch.type_list == NULL)
{
add_reachable_handler (info, region, c);
return RNL_CAUGHT;
if (type_thrown)
{
- /* If we have a type match, end the search. */
- if (c->u.catch.type == type_thrown
- || (lang_eh_type_covers
- && (*lang_eh_type_covers) (c->u.catch.type,
- type_thrown)))
+ /* If we have at least one type match, end the search. */
+ tree tp_node = c->u.catch.type_list;
+
+ for (; tp_node; tp_node = TREE_CHAIN (tp_node))
{
- add_reachable_handler (info, region, c);
- return RNL_CAUGHT;
+ tree type = TREE_VALUE (tp_node);
+
+ if (type == type_thrown
+ || (lang_eh_type_covers
+ && (*lang_eh_type_covers) (type, type_thrown)))
+ {
+ add_reachable_handler (info, region, c);
+ return RNL_CAUGHT;
+ }
}
/* If we have definitive information of a match failure,
return RNL_NOT_CAUGHT;
}
+ /* At this point, we either don't know what type is thrown or
+ don't have front-end assistance to help deciding if it is
+ covered by one of the types in the list for this region.
+
+ We'd then like to add this region to the list of reachable
+ handlers since it is indeed potentially reachable based on the
+ information we have.
+
+ Actually, this handler is for sure not reachable if all the
+ types it matches have already been caught. That is, it is only
+ potentially reachable if at least one of the types it catches
+ has not been previously caught. */
+
if (! info)
ret = RNL_MAYBE_CAUGHT;
-
- /* A type must not have been previously caught. */
- else if (! check_handled (info->types_caught, c->u.catch.type))
+ else
{
- add_reachable_handler (info, region, c);
- info->types_caught = tree_cons (NULL, c->u.catch.type,
- info->types_caught);
+ tree tp_node = c->u.catch.type_list;
+ bool maybe_reachable = false;
+
+ /* Compute the potential reachability of this handler and
+ update the list of types caught at the same time. */
+ for (; tp_node; tp_node = TREE_CHAIN (tp_node))
+ {
+ tree type = TREE_VALUE (tp_node);
+
+ if (! check_handled (info->types_caught, type))
+ {
+ info->types_caught
+ = tree_cons (NULL, type, info->types_caught);
+
+ maybe_reachable = true;
+ }
+ }
- /* ??? If the catch type is a base class of every allowed
- type, then we know we can stop the search. */
- ret = RNL_MAYBE_CAUGHT;
+ if (maybe_reachable)
+ {
+ add_reachable_handler (info, region, c);
+
+ /* ??? If the catch type is a base class of every allowed
+ type, then we know we can stop the search. */
+ ret = RNL_MAYBE_CAUGHT;
+ }
}
}
info->types_allowed = tree_cons (NULL_TREE,
region->u.allowed.type_list,
info->types_allowed);
-
- /* If we have definitive information about the type heirarchy,
+
+ /* If we have definitive information about the type hierarchy,
then we can tell if the thrown type will pass through the
filter. */
if (type_thrown && lang_eh_type_covers)
return RNL_MAYBE_CAUGHT;
case ERT_CATCH:
- /* Catch regions are handled by their controling try region. */
+ /* Catch regions are handled by their controlling try region. */
return RNL_NOT_CAUGHT;
case ERT_MUST_NOT_THROW:
/* Here we end our search, since no exceptions may propagate.
If we've touched down at some landing pad previous, then the
explicit function call we generated may be used. Otherwise
- the call is made by the runtime. */
- if (info && info->handlers)
+ the call is made by the runtime.
+
+ Before inlining, do not perform this optimization. We may
+ inline a subroutine that contains handlers, and that will
+ change the value of saw_any_handlers. */
+
+ if ((info && info->saw_any_handlers) || !cfun->after_inlining)
{
add_reachable_handler (info, region, region);
- return RNL_CAUGHT;
+ return RNL_CAUGHT;
}
else
return RNL_BLOCKED;
case ERT_THROW:
- case ERT_FIXUP:
+ case ERT_UNKNOWN:
/* Shouldn't see these here. */
+ gcc_unreachable ();
break;
+ default:
+ gcc_unreachable ();
}
-
- abort ();
}
-/* Retrieve a list of labels of exception handlers which can be
- reached by a given insn. */
+/* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
-rtx
-reachable_handlers (insn)
- rtx insn;
+void
+foreach_reachable_handler (int region_number, bool is_resx,
+ void (*callback) (struct eh_region *, void *),
+ void *callback_data)
{
struct reachable_info info;
struct eh_region *region;
tree type_thrown;
- int region_number;
-
- if (GET_CODE (insn) == JUMP_INSN
- && GET_CODE (PATTERN (insn)) == RESX)
- region_number = XINT (PATTERN (insn), 0);
- else
- {
- rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
- if (!note || INTVAL (XEXP (note, 0)) <= 0)
- return NULL;
- region_number = INTVAL (XEXP (note, 0));
- }
memset (&info, 0, sizeof (info));
+ info.callback = callback;
+ info.callback_data = callback_data;
- region = cfun->eh->region_array[region_number];
+ region = VEC_index (eh_region, cfun->eh->region_array, region_number);
type_thrown = NULL_TREE;
- if (GET_CODE (insn) == JUMP_INSN
- && GET_CODE (PATTERN (insn)) == RESX)
+ if (is_resx)
{
/* A RESX leaves a region instead of entering it. Thus the
region itself may have been deleted out from under us. */
if (region == NULL)
- return NULL;
+ return;
region = region->outer;
}
else if (region->type == ERT_THROW)
region = region->outer;
}
- for (; region; region = region->outer)
- if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
- break;
-
- return info.handlers;
+ while (region)
+ {
+ if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
+ break;
+ /* If we have processed one cleanup, there is no point in
+ processing any more of them. Each cleanup will have an edge
+ to the next outer cleanup region, so the flow graph will be
+ accurate. */
+ if (region->type == ERT_CLEANUP)
+ region = region->u.cleanup.prev_try;
+ else
+ region = region->outer;
+ }
}
-/* Determine if the given INSN can throw an exception that is caught
- within the function. */
+/* Retrieve a list of labels of exception handlers which can be
+ reached by a given insn. */
-bool
-can_throw_internal (insn)
- rtx insn;
+static void
+arh_to_landing_pad (struct eh_region *region, void *data)
{
- struct eh_region *region;
- tree type_thrown;
- rtx note;
+ rtx *p_handlers = data;
+ if (! *p_handlers)
+ *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
+}
- if (! INSN_P (insn))
- return false;
+static void
+arh_to_label (struct eh_region *region, void *data)
+{
+ rtx *p_handlers = data;
+ *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
+}
- if (GET_CODE (insn) == INSN
- && GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0, 0);
+rtx
+reachable_handlers (rtx insn)
+{
+ bool is_resx = false;
+ rtx handlers = NULL;
+ int region_number;
- if (GET_CODE (insn) == CALL_INSN
- && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
+ if (JUMP_P (insn)
+ && GET_CODE (PATTERN (insn)) == RESX)
{
- int i;
- for (i = 0; i < 3; ++i)
- {
- rtx sub = XEXP (PATTERN (insn), i);
- for (; sub ; sub = NEXT_INSN (sub))
- if (can_throw_internal (sub))
- return true;
- }
- return false;
+ region_number = XINT (PATTERN (insn), 0);
+ is_resx = true;
+ }
+ else
+ {
+ rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note || INTVAL (XEXP (note, 0)) <= 0)
+ return NULL;
+ region_number = INTVAL (XEXP (note, 0));
}
- /* Every insn that might throw has an EH_REGION note. */
- note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
- if (!note || INTVAL (XEXP (note, 0)) <= 0)
- return false;
+ foreach_reachable_handler (region_number, is_resx,
+ (cfun->eh->built_landing_pads
+ ? arh_to_landing_pad
+ : arh_to_label),
+ &handlers);
+
+ return handlers;
+}
+
+/* Determine if the given INSN can throw an exception that is caught
+ within the function. */
+
+bool
+can_throw_internal_1 (int region_number, bool is_resx)
+{
+ struct eh_region *region;
+ tree type_thrown;
- region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
+ region = VEC_index (eh_region, cfun->eh->region_array, region_number);
type_thrown = NULL_TREE;
- if (region->type == ERT_THROW)
+ if (is_resx)
+ region = region->outer;
+ else if (region->type == ERT_THROW)
{
type_thrown = region->u.throw.type;
region = region->outer;
if (how == RNL_BLOCKED)
return false;
if (how != RNL_NOT_CAUGHT)
- return true;
+ return true;
}
return false;
}
+bool
+can_throw_internal (const_rtx insn)
+{
+ rtx note;
+
+ if (! INSN_P (insn))
+ return false;
+
+ if (JUMP_P (insn)
+ && GET_CODE (PATTERN (insn)) == RESX
+ && XINT (PATTERN (insn), 0) > 0)
+ return can_throw_internal_1 (XINT (PATTERN (insn), 0), true);
+
+ if (NONJUMP_INSN_P (insn)
+ && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ insn = XVECEXP (PATTERN (insn), 0, 0);
+
+ /* Every insn that might throw has an EH_REGION note. */
+ note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note || INTVAL (XEXP (note, 0)) <= 0)
+ return false;
+
+ return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false);
+}
+
/* Determine if the given INSN can throw an exception that is
visible outside the function. */
bool
-can_throw_external (insn)
- rtx insn;
+can_throw_external_1 (int region_number, bool is_resx)
{
struct eh_region *region;
tree type_thrown;
+
+ region = VEC_index (eh_region, cfun->eh->region_array, region_number);
+
+ type_thrown = NULL_TREE;
+ if (is_resx)
+ region = region->outer;
+ else if (region->type == ERT_THROW)
+ {
+ type_thrown = region->u.throw.type;
+ region = region->outer;
+ }
+
+ /* If the exception is caught or blocked by any containing region,
+ then it is not seen by any calling function. */
+ for (; region ; region = region->outer)
+ if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
+ return false;
+
+ return true;
+}
+
+bool
+can_throw_external (const_rtx insn)
+{
rtx note;
if (! INSN_P (insn))
return false;
- if (GET_CODE (insn) == INSN
+ if (JUMP_P (insn)
+ && GET_CODE (PATTERN (insn)) == RESX
+ && XINT (PATTERN (insn), 0) > 0)
+ return can_throw_external_1 (XINT (PATTERN (insn), 0), true);
+
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
- if (GET_CODE (insn) == CALL_INSN
- && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
- {
- int i;
- for (i = 0; i < 3; ++i)
- {
- rtx sub = XEXP (PATTERN (insn), i);
- for (; sub ; sub = NEXT_INSN (sub))
- if (can_throw_external (sub))
- return true;
- }
- return false;
- }
-
note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
if (!note)
{
assume it might throw. Given that the front end and middle
ends mark known NOTHROW functions, this isn't so wildly
inaccurate. */
- return (GET_CODE (insn) == CALL_INSN
+ return (CALL_P (insn)
|| (flag_non_call_exceptions
&& may_trap_p (PATTERN (insn))));
}
if (INTVAL (XEXP (note, 0)) <= 0)
return false;
- region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
-
- type_thrown = NULL_TREE;
- if (region->type == ERT_THROW)
- {
- type_thrown = region->u.throw.type;
- region = region->outer;
- }
-
- /* If the exception is caught or blocked by any containing region,
- then it is not seen by any calling function. */
- for (; region ; region = region->outer)
- if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
- return false;
-
- return true;
+ return can_throw_external_1 (INTVAL (XEXP (note, 0)), false);
}
-/* True if nothing in this function can throw outside this function. */
+/* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
-bool
-nothrow_function_p ()
+unsigned int
+set_nothrow_function_flags (void)
{
rtx insn;
+ /* If we don't know that this implementation of the function will
+ actually be used, then we must not set TREE_NOTHROW, since
+ callers must not assume that this function does not throw. */
+ if (DECL_REPLACEABLE_P (current_function_decl))
+ return 0;
+
+ TREE_NOTHROW (current_function_decl) = 1;
+
+ /* Assume cfun->all_throwers_are_sibcalls until we encounter
+ something that can throw an exception. We specifically exempt
+ CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
+ and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
+ is optimistic. */
+
+ cfun->all_throwers_are_sibcalls = 1;
+
if (! flag_exceptions)
- return true;
+ return 0;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (can_throw_external (insn))
- return false;
+ {
+ TREE_NOTHROW (current_function_decl) = 0;
+
+ if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
+ {
+ cfun->all_throwers_are_sibcalls = 0;
+ return 0;
+ }
+ }
+
for (insn = current_function_epilogue_delay_list; insn;
insn = XEXP (insn, 1))
if (can_throw_external (insn))
- return false;
+ {
+ TREE_NOTHROW (current_function_decl) = 0;
- return true;
+ if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
+ {
+ cfun->all_throwers_are_sibcalls = 0;
+ return 0;
+ }
+ }
+ return 0;
}
+struct tree_opt_pass pass_set_nothrow_function_flags =
+{
+ NULL, /* name */
+ NULL, /* gate */
+ set_nothrow_function_flags, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
+ 0 /* letter */
+};
+
\f
/* Various hooks for unwind library. */
On the SPARC, this means flushing the register windows. */
void
-expand_builtin_unwind_init ()
+expand_builtin_unwind_init (void)
{
/* Set this so all the registers get saved in our frame; we need to be
- able to copy the saved values for any registers from frames we unwind. */
- current_function_has_nonlocal_label = 1;
+ able to copy the saved values for any registers from frames we unwind. */
+ current_function_saves_all_registers = 1;
#ifdef SETUP_FRAME_ADDRESSES
SETUP_FRAME_ADDRESSES ();
}
rtx
-expand_builtin_eh_return_data_regno (arglist)
- tree arglist;
+expand_builtin_eh_return_data_regno (tree exp)
{
- tree which = TREE_VALUE (arglist);
+ tree which = CALL_EXPR_ARG (exp, 0);
unsigned HOST_WIDE_INT iwhich;
if (TREE_CODE (which) != INTEGER_CST)
{
- error ("argument of `__builtin_eh_return_regno' must be constant");
+ error ("argument of %<__builtin_eh_return_regno%> must be constant");
return constm1_rtx;
}
iwhich = DBX_REGISTER_NUMBER (iwhich);
#endif
- return GEN_INT (iwhich);
+ return GEN_INT (iwhich);
}
/* Given a value extracted from the return address register or stack slot,
return the actual address encoded in that value. */
rtx
-expand_builtin_extract_return_addr (addr_tree)
- tree addr_tree;
+expand_builtin_extract_return_addr (tree addr_tree)
{
- rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
+ rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
+
+ if (GET_MODE (addr) != Pmode
+ && GET_MODE (addr) != VOIDmode)
+ {
+#ifdef POINTERS_EXTEND_UNSIGNED
+ addr = convert_memory_address (Pmode, addr);
+#else
+ addr = convert_to_mode (Pmode, addr, 0);
+#endif
+ }
/* First mask out any unwanted bits. */
#ifdef MASK_RETURN_ADDR
- expand_and (addr, MASK_RETURN_ADDR, addr);
+ expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
#endif
/* Then adjust to find the real return address. */
stack slot so the epilogue will return to that address. */
rtx
-expand_builtin_frob_return_addr (addr_tree)
- tree addr_tree;
+expand_builtin_frob_return_addr (tree addr_tree)
{
- rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
+ rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
-#ifdef POINTERS_EXTEND_UNSIGNED
addr = convert_memory_address (Pmode, addr);
-#endif
#ifdef RETURN_ADDR_OFFSET
addr = force_reg (Pmode, addr);
exception handler. */
void
-expand_builtin_eh_return (stackadj_tree, handler_tree)
- tree stackadj_tree, handler_tree;
-{
- rtx stackadj, handler;
-
- stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
- handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
-
-#ifdef POINTERS_EXTEND_UNSIGNED
- stackadj = convert_memory_address (Pmode, stackadj);
- handler = convert_memory_address (Pmode, handler);
+expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
+ tree handler_tree)
+{
+ rtx tmp;
+
+#ifdef EH_RETURN_STACKADJ_RTX
+ tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj,
+ VOIDmode, EXPAND_NORMAL);
+ tmp = convert_memory_address (Pmode, tmp);
+ if (!cfun->eh->ehr_stackadj)
+ cfun->eh->ehr_stackadj = copy_to_reg (tmp);
+ else if (tmp != cfun->eh->ehr_stackadj)
+ emit_move_insn (cfun->eh->ehr_stackadj, tmp);
#endif
- if (! cfun->eh->ehr_label)
- {
- cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
- cfun->eh->ehr_handler = copy_to_reg (handler);
- cfun->eh->ehr_label = gen_label_rtx ();
- }
- else
- {
- if (stackadj != cfun->eh->ehr_stackadj)
- emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
- if (handler != cfun->eh->ehr_handler)
- emit_move_insn (cfun->eh->ehr_handler, handler);
- }
+ tmp = expand_expr (handler_tree, cfun->eh->ehr_handler,
+ VOIDmode, EXPAND_NORMAL);
+ tmp = convert_memory_address (Pmode, tmp);
+ if (!cfun->eh->ehr_handler)
+ cfun->eh->ehr_handler = copy_to_reg (tmp);
+ else if (tmp != cfun->eh->ehr_handler)
+ emit_move_insn (cfun->eh->ehr_handler, tmp);
+ if (!cfun->eh->ehr_label)
+ cfun->eh->ehr_label = gen_label_rtx ();
emit_jump (cfun->eh->ehr_label);
}
void
-expand_eh_return ()
+expand_eh_return (void)
{
- rtx sa, ra, around_label;
+ rtx around_label;
if (! cfun->eh->ehr_label)
return;
- sa = EH_RETURN_STACKADJ_RTX;
- if (! sa)
- {
- error ("__builtin_eh_return not supported on this target");
- return;
- }
-
current_function_calls_eh_return = 1;
+#ifdef EH_RETURN_STACKADJ_RTX
+ emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
+#endif
+
around_label = gen_label_rtx ();
- emit_move_insn (sa, const0_rtx);
emit_jump (around_label);
emit_label (cfun->eh->ehr_label);
clobber_return_register ();
+#ifdef EH_RETURN_STACKADJ_RTX
+ emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
+#endif
+
#ifdef HAVE_eh_return
if (HAVE_eh_return)
- emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
+ emit_insn (gen_eh_return (cfun->eh->ehr_handler));
else
#endif
{
- ra = EH_RETURN_HANDLER_RTX;
- if (! ra)
- {
- error ("__builtin_eh_return not supported on this target");
- ra = gen_reg_rtx (Pmode);
- }
-
- emit_move_insn (sa, cfun->eh->ehr_stackadj);
- emit_move_insn (ra, cfun->eh->ehr_handler);
+#ifdef EH_RETURN_HANDLER_RTX
+ emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
+#else
+ error ("__builtin_eh_return not supported on this target");
+#endif
}
emit_label (around_label);
}
+
+/* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
+ POINTERS_EXTEND_UNSIGNED and return it. */
+
+rtx
+expand_builtin_extend_pointer (tree addr_tree)
+{
+ rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
+ int extend;
+
+#ifdef POINTERS_EXTEND_UNSIGNED
+ extend = POINTERS_EXTEND_UNSIGNED;
+#else
+ /* The previous EH code did an unsigned extend by default, so we do this also
+ for consistency. */
+ extend = 1;
+#endif
+
+ return convert_modes (word_mode, ptr_mode, addr, extend);
+}
\f
+/* In the following functions, we represent entries in the action table
+ as 1-based indices. Special cases are:
+
+ 0: null action record, non-null landing pad; implies cleanups
+ -1: null action record, null landing pad; implies no action
+ -2: no call-site entry; implies must_not_throw
+ -3: we have yet to process outer regions
+
+ Further, no special cases apply to the "next" field of the record.
+ For next, 0 means end of list. */
+
struct action_record
{
int offset;
};
static int
-action_record_eq (pentry, pdata)
- const PTR pentry;
- const PTR pdata;
+action_record_eq (const void *pentry, const void *pdata)
{
const struct action_record *entry = (const struct action_record *) pentry;
const struct action_record *data = (const struct action_record *) pdata;
}
static hashval_t
-action_record_hash (pentry)
- const PTR pentry;
+action_record_hash (const void *pentry)
{
const struct action_record *entry = (const struct action_record *) pentry;
return entry->next * 1009 + entry->filter;
}
static int
-add_action_record (ar_hash, filter, next)
- htab_t ar_hash;
- int filter, next;
+add_action_record (htab_t ar_hash, int filter, int next)
{
struct action_record **slot, *new, tmp;
if ((new = *slot) == NULL)
{
- new = (struct action_record *) xmalloc (sizeof (*new));
+ new = xmalloc (sizeof (*new));
new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
new->filter = filter;
new->next = next;
/* The filter value goes in untouched. The link to the next
record is a "self-relative" byte offset, or zero to indicate
that there is no next record. So convert the absolute 1 based
- indicies we've been carrying around into a displacement. */
+ indices we've been carrying around into a displacement. */
push_sleb128 (&cfun->eh->action_record_data, filter);
if (next)
}
static int
-collect_one_action_chain (ar_hash, region)
- htab_t ar_hash;
- struct eh_region *region;
+collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
{
struct eh_region *c;
int next;
/* Process the associated catch regions in reverse order.
If there's a catch-all handler, then we don't need to
search outer regions. Use a magic -3 value to record
- that we havn't done the outer search. */
+ that we haven't done the outer search. */
next = -3;
for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
{
- if (c->u.catch.type == NULL)
- next = add_action_record (ar_hash, c->u.catch.filter, 0);
+ if (c->u.catch.type_list == NULL)
+ {
+ /* Retrieve the filter from the head of the filter list
+ where we have stored it (see assign_filter_values). */
+ int filter
+ = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
+
+ next = add_action_record (ar_hash, filter, 0);
+ }
else
{
+ /* Once the outer search is done, trigger an action record for
+ each filter we have. */
+ tree flt_node;
+
if (next == -3)
{
next = collect_one_action_chain (ar_hash, region->outer);
- if (next < 0)
+
+ /* If there is no next action, terminate the chain. */
+ if (next == -1)
next = 0;
+ /* If all outer actions are cleanups or must_not_throw,
+ we'll have no action record for it, since we had wanted
+ to encode these states in the call-site record directly.
+ Add a cleanup action to the chain to catch these. */
+ else if (next <= 0)
+ next = add_action_record (ar_hash, 0, 0);
+ }
+
+ flt_node = c->u.catch.filter_list;
+ for (; flt_node; flt_node = TREE_CHAIN (flt_node))
+ {
+ int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
+ next = add_action_record (ar_hash, filter, next);
}
- next = add_action_record (ar_hash, c->u.catch.filter, next);
}
}
return next;
/* An exception specification adds its filter to the
beginning of the chain. */
next = collect_one_action_chain (ar_hash, region->outer);
- return add_action_record (ar_hash, region->u.allowed.filter,
- next < 0 ? 0 : next);
+
+ /* If there is no next action, terminate the chain. */
+ if (next == -1)
+ next = 0;
+ /* If all outer actions are cleanups or must_not_throw,
+ we'll have no action record for it, since we had wanted
+ to encode these states in the call-site record directly.
+ Add a cleanup action to the chain to catch these. */
+ else if (next <= 0)
+ next = add_action_record (ar_hash, 0, 0);
+
+ return add_action_record (ar_hash, region->u.allowed.filter, next);
case ERT_MUST_NOT_THROW:
/* A must-not-throw region with no inner handlers or cleanups
return collect_one_action_chain (ar_hash, region->outer);
default:
- abort ();
+ gcc_unreachable ();
}
}
static int
-add_call_site (landing_pad, action)
- rtx landing_pad;
- int action;
+add_call_site (rtx landing_pad, int action)
{
struct call_site_record *data = cfun->eh->call_site_data;
int used = cfun->eh->call_site_data_used;
if (used >= size)
{
size = (size ? size * 2 : 64);
- data = (struct call_site_record *)
- xrealloc (data, sizeof (*data) * size);
+ data = ggc_realloc (data, sizeof (*data) * size);
cfun->eh->call_site_data = data;
cfun->eh->call_site_data_size = size;
}
The new note numbers will not refer to region numbers, but
instead to call site entries. */
-void
-convert_to_eh_region_ranges ()
+unsigned int
+convert_to_eh_region_ranges (void)
{
rtx insn, iter, note;
htab_t ar_hash;
int call_site = 0;
if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
- return;
+ return 0;
VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
rtx this_landing_pad;
insn = iter;
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
if (!note)
{
- if (! (GET_CODE (insn) == CALL_INSN
+ if (! (CALL_P (insn)
|| (flag_non_call_exceptions
&& may_trap_p (PATTERN (insn)))))
continue;
{
if (INTVAL (XEXP (note, 0)) <= 0)
continue;
- region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
+ region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
this_action = collect_one_action_chain (ar_hash, region);
}
are created. */
if (this_action >= -1)
{
- call_site = add_call_site (this_landing_pad,
+ call_site = add_call_site (this_landing_pad,
this_action < 0 ? 0 : this_action);
note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
NOTE_EH_HANDLER (note) = call_site;
}
htab_delete (ar_hash);
+ return 0;
}
+struct tree_opt_pass pass_convert_to_eh_region_ranges =
+{
+ "eh-ranges", /* name */
+ NULL, /* gate */
+ convert_to_eh_region_ranges, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
+};
+
\f
static void
-push_uleb128 (data_area, value)
- varray_type *data_area;
- unsigned int value;
+push_uleb128 (varray_type *data_area, unsigned int value)
{
do
{
}
static void
-push_sleb128 (data_area, value)
- varray_type *data_area;
- int value;
+push_sleb128 (varray_type *data_area, int value)
{
unsigned char byte;
int more;
\f
#ifndef HAVE_AS_LEB128
static int
-dw2_size_of_call_site_table ()
+dw2_size_of_call_site_table (void)
{
int n = cfun->eh->call_site_data_used;
int size = n * (4 + 4 + 4);
}
static int
-sjlj_size_of_call_site_table ()
+sjlj_size_of_call_site_table (void)
{
int n = cfun->eh->call_site_data_used;
int size = 0;
#endif
static void
-dw2_output_call_site_table ()
+dw2_output_call_site_table (void)
{
- const char *function_start_lab
- = IDENTIFIER_POINTER (current_function_func_begin_label);
int n = cfun->eh->call_site_data_used;
int i;
/* ??? Perhaps use attr_length to choose data1 or data2 instead of
data4 if the function is small enough. */
#ifdef HAVE_AS_LEB128
- dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
+ dw2_asm_output_delta_uleb128 (reg_start_lab,
+ current_function_func_begin_label,
"region %d start", i);
dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
"length");
if (cs->landing_pad)
- dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
+ dw2_asm_output_delta_uleb128 (landing_pad_lab,
+ current_function_func_begin_label,
"landing pad");
else
dw2_asm_output_data_uleb128 (0, "landing pad");
#else
- dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
+ dw2_asm_output_delta (4, reg_start_lab,
+ current_function_func_begin_label,
"region %d start", i);
dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
if (cs->landing_pad)
- dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
+ dw2_asm_output_delta (4, landing_pad_lab,
+ current_function_func_begin_label,
"landing pad");
else
dw2_asm_output_data (4, 0, "landing pad");
}
static void
-sjlj_output_call_site_table ()
+sjlj_output_call_site_table (void)
{
int n = cfun->eh->call_site_data_used;
int i;
call_site_base += n;
}
+#ifndef TARGET_UNWIND_INFO
+/* Switch to the section that should be used for exception tables. */
+
+static void
+switch_to_exception_section (const char * ARG_UNUSED (fnname))
+{
+ section *s;
+
+ if (exception_section)
+ s = exception_section;
+ else
+ {
+ /* Compute the section and cache it into exception_section,
+ unless it depends on the function name. */
+ if (targetm.have_named_sections)
+ {
+ int flags;
+
+ if (EH_TABLES_CAN_BE_READ_ONLY)
+ {
+ int tt_format =
+ ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
+ flags = ((! flag_pic
+ || ((tt_format & 0x70) != DW_EH_PE_absptr
+ && (tt_format & 0x70) != DW_EH_PE_aligned))
+ ? 0 : SECTION_WRITE);
+ }
+ else
+ flags = SECTION_WRITE;
+
+#ifdef HAVE_LD_EH_GC_SECTIONS
+ if (flag_function_sections)
+ {
+ char *section_name = xmalloc (strlen (fnname) + 32);
+ sprintf (section_name, ".gcc_except_table.%s", fnname);
+ s = get_section (section_name, flags, NULL);
+ free (section_name);
+ }
+ else
+#endif
+ exception_section
+ = s = get_section (".gcc_except_table", flags, NULL);
+ }
+ else
+ exception_section
+ = s = flag_pic ? data_section : readonly_data_section;
+ }
+
+ switch_to_section (s);
+}
+#endif
+
+
+/* Output a reference from an exception table to the type_info object TYPE.
+ TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
+ the value. */
+
+static void
+output_ttype (tree type, int tt_format, int tt_format_size)
+{
+ rtx value;
+ bool public = true;
+
+ if (type == NULL_TREE)
+ value = const0_rtx;
+ else
+ {
+ struct varpool_node *node;
+
+ type = lookup_type_for_runtime (type);
+ value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
+
+ /* Let cgraph know that the rtti decl is used. Not all of the
+ paths below go through assemble_integer, which would take
+ care of this for us. */
+ STRIP_NOPS (type);
+ if (TREE_CODE (type) == ADDR_EXPR)
+ {
+ type = TREE_OPERAND (type, 0);
+ if (TREE_CODE (type) == VAR_DECL)
+ {
+ node = varpool_node (type);
+ if (node)
+ varpool_mark_needed_node (node);
+ public = TREE_PUBLIC (type);
+ }
+ }
+ else
+ gcc_assert (TREE_CODE (type) == INTEGER_CST);
+ }
+
+ /* Allow the target to override the type table entry format. */
+ if (targetm.asm_out.ttype (value))
+ return;
+
+ if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
+ assemble_integer (value, tt_format_size,
+ tt_format_size * BITS_PER_UNIT, 1);
+ else
+ dw2_asm_output_encoded_addr_rtx (tt_format, value, public, NULL);
+}
+
void
-output_function_exception_table ()
+output_function_exception_table (const char * ARG_UNUSED (fnname))
{
int tt_format, cs_format, lp_format, i, n;
#ifdef HAVE_AS_LEB128
int call_site_len;
#endif
int have_tt_data;
- int funcdef_number;
int tt_format_size = 0;
/* Not all functions need anything. */
if (! cfun->uses_eh_lsda)
return;
- funcdef_number = (USING_SJLJ_EXCEPTIONS
- ? sjlj_funcdef_number
- : current_funcdef_number);
+ if (eh_personality_libfunc)
+ assemble_external_libcall (eh_personality_libfunc);
-#ifdef IA64_UNWIND_INFO
+#ifdef TARGET_UNWIND_INFO
+ /* TODO: Move this into target file. */
fputs ("\t.personality\t", asm_out_file);
output_addr_const (asm_out_file, eh_personality_libfunc);
fputs ("\n\t.handlerdata\n", asm_out_file);
/* Note that varasm still thinks we're in the function's code section.
The ".endp" directive that will immediately follow will take us back. */
#else
- exception_section ();
+ switch_to_exception_section (fnname);
#endif
- have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
+ /* If the target wants a label to begin the table, emit it here. */
+ targetm.asm_out.except_table_label (asm_out_file);
+
+ have_tt_data = (VEC_length (tree, cfun->eh->ttype_data) > 0
|| VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
/* Indicate the format of the @TType entries. */
{
tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
#ifdef HAVE_AS_LEB128
- ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
+ ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
+ current_function_funcdef_no);
#endif
tt_format_size = size_of_encoded_value (tt_format);
- assemble_eh_align (tt_format_size * BITS_PER_UNIT);
+ assemble_align (tt_format_size * BITS_PER_UNIT);
}
- ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
+ targetm.asm_out.internal_label (asm_out_file, "LLSDA",
+ current_function_funcdef_no);
/* The LSDA header. */
{
#ifdef HAVE_AS_LEB128
char ttype_after_disp_label[32];
- ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
- funcdef_number);
+ ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
+ current_function_funcdef_no);
dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
"@TType base offset");
ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
after_disp = (1 + size_of_uleb128 (call_site_len)
+ call_site_len
+ VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
- + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
+ + (VEC_length (tree, cfun->eh->ttype_data)
* tt_format_size));
disp = after_disp;
#ifdef HAVE_AS_LEB128
ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
- funcdef_number);
+ current_function_funcdef_no);
ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
- funcdef_number);
+ current_function_funcdef_no);
dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
"Call-site table length");
ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
(i ? NULL : "Action record table"));
if (have_tt_data)
- assemble_eh_align (tt_format_size * BITS_PER_UNIT);
+ assemble_align (tt_format_size * BITS_PER_UNIT);
- i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
+ i = VEC_length (tree, cfun->eh->ttype_data);
while (i-- > 0)
{
- tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
-
- if (type == NULL_TREE)
- type = integer_zero_node;
- else
- type = lookup_type_for_runtime (type);
-
- dw2_asm_output_encoded_addr_rtx (tt_format,
- expand_expr (type, NULL_RTX, VOIDmode,
- EXPAND_INITIALIZER),
- NULL);
+ tree type = VEC_index (tree, cfun->eh->ttype_data, i);
+ output_ttype (type, tt_format, tt_format_size);
}
#ifdef HAVE_AS_LEB128
/* ??? Decode and interpret the data for flag_debug_asm. */
n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
for (i = 0; i < n; ++i)
- dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
- (i ? NULL : "Exception specification table"));
+ {
+ if (targetm.arm_eabi_unwinder)
+ {
+ tree type = VARRAY_TREE (cfun->eh->ehspec_data, i);
+ output_ttype (type, tt_format, tt_format_size);
+ }
+ else
+ dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
+ (i ? NULL : "Exception specification table"));
+ }
+
+ switch_to_section (current_function_section ());
+}
+
+void
+set_eh_throw_stmt_table (struct function *fun, struct htab *table)
+{
+ fun->eh->throw_stmt_table = table;
+}
- function_section (current_function_decl);
+htab_t
+get_eh_throw_stmt_table (struct function *fun)
+{
+ return fun->eh->throw_stmt_table;
+}
- if (USING_SJLJ_EXCEPTIONS)
- sjlj_funcdef_number += 1;
+/* Dump EH information to OUT. */
+void
+dump_eh_tree (FILE *out, struct function *fun)
+{
+ struct eh_region *i;
+ int depth = 0;
+ static const char * const type_name[] = {"unknown", "cleanup", "try", "catch",
+ "allowed_exceptions", "must_not_throw",
+ "throw"};
+
+ i = fun->eh->region_tree;
+ if (! i)
+ return;
+
+ fprintf (out, "Eh tree:\n");
+ while (1)
+ {
+ fprintf (out, " %*s %i %s", depth * 2, "",
+ i->region_number, type_name [(int)i->type]);
+ if (i->tree_label)
+ {
+ fprintf (out, " tree_label:");
+ print_generic_expr (out, i->tree_label, 0);
+ }
+ fprintf (out, "\n");
+ /* If there are sub-regions, process them. */
+ if (i->inner)
+ i = i->inner, depth++;
+ /* If there are peers, process them. */
+ else if (i->next_peer)
+ i = i->next_peer;
+ /* Otherwise, step back up the tree to the next peer. */
+ else
+ {
+ do {
+ i = i->outer;
+ depth--;
+ if (i == NULL)
+ return;
+ } while (i->next_peer == NULL);
+ i = i->next_peer;
+ }
+ }
+}
+
+/* Verify some basic invariants on EH datastructures. Could be extended to
+ catch more. */
+void
+verify_eh_tree (struct function *fun)
+{
+ struct eh_region *i, *outer = NULL;
+ bool err = false;
+ int nvisited = 0;
+ int count = 0;
+ int j;
+ int depth = 0;
+
+ i = fun->eh->region_tree;
+ if (! i)
+ return;
+ for (j = fun->eh->last_region_number; j > 0; --j)
+ if ((i = VEC_index (eh_region, cfun->eh->region_array, j)))
+ {
+ count++;
+ if (i->region_number != j)
+ {
+ error ("region_array is corrupted for region %i", i->region_number);
+ err = true;
+ }
+ }
+
+ while (1)
+ {
+ if (VEC_index (eh_region, cfun->eh->region_array, i->region_number) != i)
+ {
+ error ("region_array is corrupted for region %i", i->region_number);
+ err = true;
+ }
+ if (i->outer != outer)
+ {
+ error ("outer block of region %i is wrong", i->region_number);
+ err = true;
+ }
+ if (i->may_contain_throw && outer && !outer->may_contain_throw)
+ {
+ error ("region %i may contain throw and is contained in region that may not",
+ i->region_number);
+ err = true;
+ }
+ if (depth < 0)
+ {
+ error ("negative nesting depth of region %i", i->region_number);
+ err = true;
+ }
+ nvisited ++;
+ /* If there are sub-regions, process them. */
+ if (i->inner)
+ outer = i, i = i->inner, depth++;
+ /* If there are peers, process them. */
+ else if (i->next_peer)
+ i = i->next_peer;
+ /* Otherwise, step back up the tree to the next peer. */
+ else
+ {
+ do {
+ i = i->outer;
+ depth--;
+ if (i == NULL)
+ {
+ if (depth != -1)
+ {
+ error ("tree list ends on depth %i", depth + 1);
+ err = true;
+ }
+ if (count != nvisited)
+ {
+ error ("array does not match the region tree");
+ err = true;
+ }
+ if (err)
+ {
+ dump_eh_tree (stderr, fun);
+ internal_error ("verify_eh_tree failed");
+ }
+ return;
+ }
+ outer = i->outer;
+ } while (i->next_peer == NULL);
+ i = i->next_peer;
+ }
+ }
+}
+
+/* Initialize unwind_resume_libfunc. */
+
+void
+default_init_unwind_resume_libfunc (void)
+{
+ /* The default c++ routines aren't actually c++ specific, so use those. */
+ unwind_resume_libfunc =
+ init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
+ : "_Unwind_Resume");
+}
+
+\f
+static bool
+gate_handle_eh (void)
+{
+ return doing_eh (0);
+}
+
+/* Complete generation of exception handling code. */
+static unsigned int
+rest_of_handle_eh (void)
+{
+ cleanup_cfg (CLEANUP_NO_INSN_DEL);
+ finish_eh_generation ();
+ cleanup_cfg (CLEANUP_NO_INSN_DEL);
+ return 0;
}
+
+struct tree_opt_pass pass_rtl_eh =
+{
+ "eh", /* name */
+ gate_handle_eh, /* gate */
+ rest_of_handle_eh, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_JUMP, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func, /* todo_flags_finish */
+ 'h' /* letter */
+};
+
+#include "gt-except.h"