/* Implements exception handling.
Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
Contributed by Mike Stump <mrs@cygnus.com>.
This file is part of GCC.
#include "tm_p.h"
#include "target.h"
#include "langhooks.h"
+#include "cgraph.h"
/* Provide defaults for stuff that may not be defined when using
sjlj exceptions. */
-#ifndef EH_RETURN_STACKADJ_RTX
-#define EH_RETURN_STACKADJ_RTX 0
-#endif
-#ifndef EH_RETURN_HANDLER_RTX
-#define EH_RETURN_HANDLER_RTX 0
-#endif
#ifndef EH_RETURN_DATA_REGNO
#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
#endif
/* Protect cleanup actions with must-not-throw regions, with a call
to the given failure handler. */
-tree (*lang_protect_cleanup_actions) PARAMS ((void));
+tree (*lang_protect_cleanup_actions) (void);
/* Return true if type A catches type B. */
-int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
+int (*lang_eh_type_covers) (tree a, tree b);
/* Map a type to a runtime object to match type. */
-tree (*lang_eh_runtime_type) PARAMS ((tree));
+tree (*lang_eh_runtime_type) (tree);
/* A hash table of label to region number. */
struct eh_region *region;
};
-static int call_site_base;
+static GTY(()) int call_site_base;
static GTY ((param_is (union tree_node)))
htab_t type_to_runtime_map;
/* Each region does exactly one thing. */
enum eh_region_type
- {
+ {
ERT_UNKNOWN = 0,
ERT_CLEANUP,
ERT_TRY,
struct eh_region_u_fixup {
tree cleanup_exp;
struct eh_region *real_region;
+ bool resolved;
} GTY ((tag ("ERT_FIXUP"))) fixup;
} GTY ((desc ("%0.type"))) u;
/* Entry point for this region's handler before landing pads are built. */
rtx label;
+ tree tree_label;
/* Entry point for this region's handler from the runtime eh library. */
rtx landing_pad;
htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
- struct call_site_record * GTY ((length ("%h.call_site_data_used")))
+ struct call_site_record * GTY ((length ("%h.call_site_data_used")))
call_site_data;
int call_site_data_used;
int call_site_data_size;
};
\f
-static int t2r_eq PARAMS ((const PTR,
- const PTR));
-static hashval_t t2r_hash PARAMS ((const PTR));
-static void add_type_for_runtime PARAMS ((tree));
-static tree lookup_type_for_runtime PARAMS ((tree));
-
-static struct eh_region *expand_eh_region_end PARAMS ((void));
-
-static rtx get_exception_filter PARAMS ((struct function *));
-
-static void collect_eh_region_array PARAMS ((void));
-static void resolve_fixup_regions PARAMS ((void));
-static void remove_fixup_regions PARAMS ((void));
-static void remove_unreachable_regions PARAMS ((rtx));
-static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
-
-static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
- struct inline_remap *));
-static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
- struct eh_region **));
-static int ttypes_filter_eq PARAMS ((const PTR,
- const PTR));
-static hashval_t ttypes_filter_hash PARAMS ((const PTR));
-static int ehspec_filter_eq PARAMS ((const PTR,
- const PTR));
-static hashval_t ehspec_filter_hash PARAMS ((const PTR));
-static int add_ttypes_entry PARAMS ((htab_t, tree));
-static int add_ehspec_entry PARAMS ((htab_t, htab_t,
- tree));
-static void assign_filter_values PARAMS ((void));
-static void build_post_landing_pads PARAMS ((void));
-static void connect_post_landing_pads PARAMS ((void));
-static void dw2_build_landing_pads PARAMS ((void));
+static int t2r_eq (const void *, const void *);
+static hashval_t t2r_hash (const void *);
+static void add_type_for_runtime (tree);
+static tree lookup_type_for_runtime (tree);
+
+static struct eh_region *expand_eh_region_end (void);
+
+static void resolve_fixup_regions (void);
+static void remove_fixup_regions (void);
+static void remove_unreachable_regions (rtx);
+static void convert_from_eh_region_ranges_1 (rtx *, int *, int);
+
+static struct eh_region *duplicate_eh_region_1 (struct eh_region *,
+ struct inline_remap *);
+static void duplicate_eh_region_2 (struct eh_region *, struct eh_region **);
+static int ttypes_filter_eq (const void *, const void *);
+static hashval_t ttypes_filter_hash (const void *);
+static int ehspec_filter_eq (const void *, const void *);
+static hashval_t ehspec_filter_hash (const void *);
+static int add_ttypes_entry (htab_t, tree);
+static int add_ehspec_entry (htab_t, htab_t, tree);
+static void assign_filter_values (void);
+static void build_post_landing_pads (void);
+static void connect_post_landing_pads (void);
+static void dw2_build_landing_pads (void);
struct sjlj_lp_info;
-static bool sjlj_find_directly_reachable_regions
- PARAMS ((struct sjlj_lp_info *));
-static void sjlj_assign_call_site_values
- PARAMS ((rtx, struct sjlj_lp_info *));
-static void sjlj_mark_call_sites
- PARAMS ((struct sjlj_lp_info *));
-static void sjlj_emit_function_enter PARAMS ((rtx));
-static void sjlj_emit_function_exit PARAMS ((void));
-static void sjlj_emit_dispatch_table
- PARAMS ((rtx, struct sjlj_lp_info *));
-static void sjlj_build_landing_pads PARAMS ((void));
-
-static hashval_t ehl_hash PARAMS ((const PTR));
-static int ehl_eq PARAMS ((const PTR,
- const PTR));
-static void add_ehl_entry PARAMS ((rtx,
- struct eh_region *));
-static void remove_exception_handler_label PARAMS ((rtx));
-static void remove_eh_handler PARAMS ((struct eh_region *));
-static int for_each_eh_label_1 PARAMS ((PTR *, PTR));
-
-struct reachable_info;
+static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
+static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
+static void sjlj_mark_call_sites (struct sjlj_lp_info *);
+static void sjlj_emit_function_enter (rtx);
+static void sjlj_emit_function_exit (void);
+static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
+static void sjlj_build_landing_pads (void);
+
+static hashval_t ehl_hash (const void *);
+static int ehl_eq (const void *, const void *);
+static void add_ehl_entry (rtx, struct eh_region *);
+static void remove_exception_handler_label (rtx);
+static void remove_eh_handler (struct eh_region *);
+static int for_each_eh_label_1 (void **, void *);
/* The return value of reachable_next_level. */
enum reachable_code
RNL_BLOCKED
};
-static int check_handled PARAMS ((tree, tree));
-static void add_reachable_handler
- PARAMS ((struct reachable_info *, struct eh_region *,
- struct eh_region *));
-static enum reachable_code reachable_next_level
- PARAMS ((struct eh_region *, tree, struct reachable_info *));
-
-static int action_record_eq PARAMS ((const PTR,
- const PTR));
-static hashval_t action_record_hash PARAMS ((const PTR));
-static int add_action_record PARAMS ((htab_t, int, int));
-static int collect_one_action_chain PARAMS ((htab_t,
- struct eh_region *));
-static int add_call_site PARAMS ((rtx, int));
-
-static void push_uleb128 PARAMS ((varray_type *,
- unsigned int));
-static void push_sleb128 PARAMS ((varray_type *, int));
+struct reachable_info;
+static enum reachable_code reachable_next_level (struct eh_region *, tree,
+ struct reachable_info *);
+
+static int action_record_eq (const void *, const void *);
+static hashval_t action_record_hash (const void *);
+static int add_action_record (htab_t, int, int);
+static int collect_one_action_chain (htab_t, struct eh_region *);
+static int add_call_site (rtx, int);
+
+static void push_uleb128 (varray_type *, unsigned int);
+static void push_sleb128 (varray_type *, int);
#ifndef HAVE_AS_LEB128
-static int dw2_size_of_call_site_table PARAMS ((void));
-static int sjlj_size_of_call_site_table PARAMS ((void));
+static int dw2_size_of_call_site_table (void);
+static int sjlj_size_of_call_site_table (void);
#endif
-static void dw2_output_call_site_table PARAMS ((void));
-static void sjlj_output_call_site_table PARAMS ((void));
+static void dw2_output_call_site_table (void);
+static void sjlj_output_call_site_table (void);
\f
/* Routine to see if exception handling is turned on.
compiler tries to use any exception-specific functions. */
int
-doing_eh (do_warn)
- int do_warn;
+doing_eh (int do_warn)
{
if (! flag_exceptions)
{
\f
void
-init_eh ()
+init_eh (void)
{
if (! flag_exceptions)
return;
{
tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
- sjlj_fc_type_node = (*lang_hooks.types.make_type) (RECORD_TYPE);
+ sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
build_pointer_type (sjlj_fc_type_node));
DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
tmp = build_index_type (build_int_2 (4 - 1, 0));
- tmp = build_array_type ((*lang_hooks.types.type_for_mode) (word_mode, 1),
+ tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
tmp);
f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
#endif
#else
- /* This is 2 for builtin_setjmp, plus whatever the target requires
- via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
- tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
- / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
+ /* builtin_setjmp takes a pointer to 5 words. */
+ tmp = build_int_2 (5 * BITS_PER_WORD / POINTER_SIZE - 1, 0);
#endif
tmp = build_index_type (tmp);
tmp = build_array_type (ptr_type_node, tmp);
}
void
-init_eh_for_function ()
+init_eh_for_function (void)
{
- cfun->eh = (struct eh_status *)
- ggc_alloc_cleared (sizeof (struct eh_status));
+ cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
}
\f
-/* Start an exception handling region. All instructions emitted
- after this point are considered to be part of the region until
- expand_eh_region_end is invoked. */
+/* Routines to generate the exception tree somewhat directly.
+ These are used from tree-eh.c when processing exception related
+ nodes during tree optimization. */
-void
-expand_eh_region_start ()
+static struct eh_region *
+gen_eh_region (enum eh_region_type type, struct eh_region *outer)
{
- struct eh_region *new_region;
- struct eh_region *cur_region;
- rtx note;
+ struct eh_region *new;
+#ifdef ENABLE_CHECKING
if (! doing_eh (0))
- return;
+ abort ();
+#endif
/* Insert a new blank region as a leaf in the tree. */
- new_region = (struct eh_region *) ggc_alloc_cleared (sizeof (*new_region));
- cur_region = cfun->eh->cur_region;
- new_region->outer = cur_region;
- if (cur_region)
+ new = ggc_alloc_cleared (sizeof (*new));
+ new->type = type;
+ new->outer = outer;
+ if (outer)
{
- new_region->next_peer = cur_region->inner;
- cur_region->inner = new_region;
+ new->next_peer = outer->inner;
+ outer->inner = new;
}
else
{
- new_region->next_peer = cfun->eh->region_tree;
- cfun->eh->region_tree = new_region;
+ new->next_peer = cfun->eh->region_tree;
+ cfun->eh->region_tree = new;
+ }
+
+ new->region_number = ++cfun->eh->last_region_number;
+
+ return new;
+}
+
+struct eh_region *
+gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
+{
+ struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
+ cleanup->u.cleanup.prev_try = prev_try;
+ return cleanup;
+}
+
+struct eh_region *
+gen_eh_region_try (struct eh_region *outer)
+{
+ return gen_eh_region (ERT_TRY, outer);
+}
+
+struct eh_region *
+gen_eh_region_catch (struct eh_region *t, tree type_or_list)
+{
+ struct eh_region *c, *l;
+ tree type_list, type_node;
+
+ /* Ensure to always end up with a type list to normalize further
+ processing, then register each type against the runtime types map. */
+ type_list = type_or_list;
+ if (type_or_list)
+ {
+ if (TREE_CODE (type_or_list) != TREE_LIST)
+ type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
+
+ type_node = type_list;
+ for (; type_node; type_node = TREE_CHAIN (type_node))
+ add_type_for_runtime (TREE_VALUE (type_node));
}
- cfun->eh->cur_region = new_region;
+
+ c = gen_eh_region (ERT_CATCH, t->outer);
+ c->u.catch.type_list = type_list;
+ l = t->u.try.last_catch;
+ c->u.catch.prev_catch = l;
+ if (l)
+ l->u.catch.next_catch = c;
+ else
+ t->u.try.catch = c;
+ t->u.try.last_catch = c;
+
+ return c;
+}
+
+struct eh_region *
+gen_eh_region_allowed (struct eh_region *outer, tree allowed)
+{
+ struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
+ region->u.allowed.type_list = allowed;
+
+ for (; allowed ; allowed = TREE_CHAIN (allowed))
+ add_type_for_runtime (TREE_VALUE (allowed));
+
+ return region;
+}
+
+struct eh_region *
+gen_eh_region_must_not_throw (struct eh_region *outer)
+{
+ return gen_eh_region (ERT_MUST_NOT_THROW, outer);
+}
+
+int
+get_eh_region_number (struct eh_region *region)
+{
+ return region->region_number;
+}
+
+bool
+get_eh_region_may_contain_throw (struct eh_region *region)
+{
+ return region->may_contain_throw;
+}
+
+tree
+get_eh_region_tree_label (struct eh_region *region)
+{
+ return region->tree_label;
+}
+
+void
+set_eh_region_tree_label (struct eh_region *region, tree lab)
+{
+ region->tree_label = lab;
+}
+\f
+/* Start an exception handling region. All instructions emitted
+ after this point are considered to be part of the region until
+ expand_eh_region_end is invoked. */
+
+void
+expand_eh_region_start (void)
+{
+ struct eh_region *new;
+ rtx note;
+
+ if (! doing_eh (0))
+ return;
+
+ new = gen_eh_region (ERT_UNKNOWN, cfun->eh->cur_region);
+ cfun->eh->cur_region = new;
/* Create a note marking the start of this region. */
- new_region->region_number = ++cfun->eh->last_region_number;
- note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
- NOTE_EH_HANDLER (note) = new_region->region_number;
+ note = emit_note (NOTE_INSN_EH_REGION_BEG);
+ NOTE_EH_HANDLER (note) = new->region_number;
}
/* Common code to end a region. Returns the region just ended. */
static struct eh_region *
-expand_eh_region_end ()
+expand_eh_region_end (void)
{
struct eh_region *cur_region = cfun->eh->cur_region;
rtx note;
/* Create a note marking the end of this region. */
- note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
+ note = emit_note (NOTE_INSN_EH_REGION_END);
NOTE_EH_HANDLER (note) = cur_region->region_number;
/* Pop. */
return cur_region;
}
+/* Expand HANDLER, which is the operand 1 of a TRY_CATCH_EXPR. Catch
+ blocks and C++ exception-specifications are handled specially. */
+
+void
+expand_eh_handler (tree handler)
+{
+ tree inner = expr_first (handler);
+
+ switch (TREE_CODE (inner))
+ {
+ case CATCH_EXPR:
+ expand_start_all_catch ();
+ expand_expr (handler, const0_rtx, VOIDmode, 0);
+ expand_end_all_catch ();
+ break;
+
+ case EH_FILTER_EXPR:
+ if (EH_FILTER_MUST_NOT_THROW (handler))
+ expand_eh_region_end_must_not_throw (EH_FILTER_FAILURE (handler));
+ else
+ expand_eh_region_end_allowed (EH_FILTER_TYPES (handler),
+ EH_FILTER_FAILURE (handler));
+ break;
+
+ default:
+ expand_eh_region_end_cleanup (handler);
+ break;
+ }
+}
+
/* End an exception handling region for a cleanup. HANDLER is an
expression to expand for the cleanup. */
void
-expand_eh_region_end_cleanup (handler)
- tree handler;
+expand_eh_region_end_cleanup (tree handler)
{
struct eh_region *region;
tree protect_cleanup_actions;
emit_label (around_label);
}
+void
+expand_resx_expr (tree exp)
+{
+ int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
+ struct eh_region *reg = cfun->eh->region_array[region_nr];
+
+ reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
+ emit_barrier ();
+}
+
/* End an exception handling region for a try block, and prepares
for subsequent calls to expand_start_catch. */
void
-expand_start_all_catch ()
+expand_start_all_catch (void)
{
struct eh_region *region;
emit_jump (region->u.try.continue_label);
}
-/* Begin a catch clause. TYPE is the type caught, a list of such types, or
- null if this is a catch-all clause. Providing a type list enables to
- associate the catch region with potentially several exception types, which
- is useful e.g. for Ada. */
+/* Begin a catch clause. TYPE is the type caught, a list of such
+ types, (in the case of Java) an ADDR_EXPR which points to the
+ runtime type to match, or null if this is a catch-all
+ clause. Providing a type list enables to associate the catch region
+ with potentially several exception types, which is useful e.g. for
+ Ada. */
void
-expand_start_catch (type_or_list)
- tree type_or_list;
+expand_start_catch (tree type_or_list)
{
- struct eh_region *t, *c, *l;
- tree type_list;
+ struct eh_region *c;
+ rtx note;
if (! doing_eh (0))
return;
- type_list = type_or_list;
-
- if (type_or_list)
- {
- /* Ensure to always end up with a type list to normalize further
- processing, then register each type against the runtime types
- map. */
- tree type_node;
-
- if (TREE_CODE (type_or_list) != TREE_LIST)
- type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
+ c = gen_eh_region_catch (cfun->eh->try_region, type_or_list);
+ cfun->eh->cur_region = c;
- type_node = type_list;
- for (; type_node; type_node = TREE_CHAIN (type_node))
- add_type_for_runtime (TREE_VALUE (type_node));
- }
-
- expand_eh_region_start ();
-
- t = cfun->eh->try_region;
- c = cfun->eh->cur_region;
- c->type = ERT_CATCH;
- c->u.catch.type_list = type_list;
c->label = gen_label_rtx ();
-
- l = t->u.try.last_catch;
- c->u.catch.prev_catch = l;
- if (l)
- l->u.catch.next_catch = c;
- else
- t->u.try.catch = c;
- t->u.try.last_catch = c;
-
emit_label (c->label);
+
+ note = emit_note (NOTE_INSN_EH_REGION_BEG);
+ NOTE_EH_HANDLER (note) = c->region_number;
}
/* End a catch clause. Control will resume after the try/catch block. */
void
-expand_end_catch ()
+expand_end_catch (void)
{
- struct eh_region *try_region;
-
if (! doing_eh (0))
return;
expand_eh_region_end ();
- try_region = cfun->eh->try_region;
-
- emit_jump (try_region->u.try.continue_label);
+ emit_jump (cfun->eh->try_region->u.try.continue_label);
}
/* End a sequence of catch handlers for a try block. */
void
-expand_end_all_catch ()
+expand_end_all_catch (void)
{
struct eh_region *try_region;
rethrowing satisfies the "filter" of the catch type. */
void
-expand_eh_region_end_allowed (allowed, failure)
- tree allowed, failure;
+expand_eh_region_end_allowed (tree allowed, tree failure)
{
struct eh_region *region;
rtx around_label;
the C++ LSDA. */
void
-expand_eh_region_end_must_not_throw (failure)
- tree failure;
+expand_eh_region_end_must_not_throw (tree failure)
{
struct eh_region *region;
rtx around_label;
is being thrown. */
void
-expand_eh_region_end_throw (type)
- tree type;
+expand_eh_region_end_throw (tree type)
{
struct eh_region *region;
the proper notion of "enclosing" in convert_from_eh_region_ranges. */
void
-expand_eh_region_end_fixup (handler)
- tree handler;
+expand_eh_region_end_fixup (tree handler)
{
struct eh_region *fixup;
call to a function which itself may contain a throw. */
void
-note_eh_region_may_contain_throw ()
+note_eh_region_may_contain_throw (struct eh_region *region)
{
- struct eh_region *region;
-
- region = cfun->eh->cur_region;
while (region && !region->may_contain_throw)
{
region->may_contain_throw = 1;
}
}
+void
+note_current_region_may_contain_throw (void)
+{
+ note_eh_region_may_contain_throw (cfun->eh->cur_region);
+}
+
+
/* Return an rtl expression for a pointer to the exception object
within a handler. */
rtx
-get_exception_pointer (fun)
- struct function *fun;
+get_exception_pointer (struct function *fun)
{
rtx exc_ptr = fun->eh->exc_ptr;
if (fun == cfun && ! exc_ptr)
/* Return an rtl expression for the exception dispatch filter
within a handler. */
-static rtx
-get_exception_filter (fun)
- struct function *fun;
+rtx
+get_exception_filter (struct function *fun)
{
rtx filter = fun->eh->filter;
if (fun == cfun && ! filter)
collect the regions this way as in expand_eh_region_start, but
without having to realloc memory. */
-static void
-collect_eh_region_array ()
+void
+collect_eh_region_array (void)
{
struct eh_region **array, *i;
}
static void
-resolve_fixup_regions ()
+resolve_one_fixup_region (struct eh_region *fixup)
{
- int i, j, n = cfun->eh->last_region_number;
+ struct eh_region *cleanup, *real;
+ int j, n;
+
+ n = cfun->eh->last_region_number;
+ cleanup = 0;
+
+ for (j = 1; j <= n; ++j)
+ {
+ cleanup = cfun->eh->region_array[j];
+ if (cleanup && cleanup->type == ERT_CLEANUP
+ && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
+ break;
+ }
+ if (j > n)
+ abort ();
+
+ real = cleanup->outer;
+ if (real && real->type == ERT_FIXUP)
+ {
+ if (!real->u.fixup.resolved)
+ resolve_one_fixup_region (real);
+ real = real->u.fixup.real_region;
+ }
+
+ fixup->u.fixup.real_region = real;
+ fixup->u.fixup.resolved = true;
+}
+
+static void
+resolve_fixup_regions (void)
+{
+ int i, n = cfun->eh->last_region_number;
for (i = 1; i <= n; ++i)
{
struct eh_region *fixup = cfun->eh->region_array[i];
- struct eh_region *cleanup = 0;
- if (! fixup || fixup->type != ERT_FIXUP)
+ if (!fixup || fixup->type != ERT_FIXUP || fixup->u.fixup.resolved)
continue;
- for (j = 1; j <= n; ++j)
- {
- cleanup = cfun->eh->region_array[j];
- if (cleanup && cleanup->type == ERT_CLEANUP
- && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
- break;
- }
- if (j > n)
- abort ();
-
- fixup->u.fixup.real_region = cleanup->outer;
+ resolve_one_fixup_region (fixup);
}
}
we can shuffle pointers and remove them from the tree. */
static void
-remove_fixup_regions ()
+remove_fixup_regions (void)
{
int i;
rtx insn, note;
/* Remove all regions whose labels are not reachable from insns. */
static void
-remove_unreachable_regions (insns)
- rtx insns;
+remove_unreachable_regions (rtx insns)
{
int i, *uid_region_num;
bool *reachable;
abort ();
uid_region_num[INSN_UID (r->label)] = i;
}
- if (r->type == ERT_TRY && r->u.try.continue_label)
- {
- if (uid_region_num[INSN_UID (r->u.try.continue_label)])
- abort ();
- uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
- }
}
for (insn = insns; insn; insn = NEXT_INSN (insn))
- reachable[uid_region_num[INSN_UID (insn)]] = true;
+ {
+ reachable[uid_region_num[INSN_UID (insn)]] = true;
+
+ if (GET_CODE (insn) == CALL_INSN
+ && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
+ for (i = 0; i < 3; i++)
+ {
+ rtx sub = XEXP (PATTERN (insn), i);
+ for (; sub ; sub = NEXT_INSN (sub))
+ reachable[uid_region_num[INSN_UID (sub)]] = true;
+ }
+ }
for (i = cfun->eh->last_region_number; i > 0; --i)
{
r = cfun->eh->region_array[i];
if (r && r->region_number == i && !reachable[i])
{
- /* Don't remove ERT_THROW regions if their outer region
- is reachable. */
- if (r->type == ERT_THROW
- && r->outer
- && reachable[r->outer->region_number])
- continue;
+ bool kill_it = true;
+ switch (r->type)
+ {
+ case ERT_THROW:
+ /* Don't remove ERT_THROW regions if their outer region
+ is reachable. */
+ if (r->outer && reachable[r->outer->region_number])
+ kill_it = false;
+ break;
+
+ case ERT_MUST_NOT_THROW:
+ /* MUST_NOT_THROW regions are implementable solely in the
+ runtime, but their existance continues to affect calls
+ within that region. Never delete them here. */
+ kill_it = false;
+ break;
+
+ case ERT_TRY:
+ {
+ /* TRY regions are reachable if any of its CATCH regions
+ are reachable. */
+ struct eh_region *c;
+ for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
+ if (reachable[c->region_number])
+ {
+ kill_it = false;
+ break;
+ }
+ break;
+ }
- remove_eh_handler (r);
+ default:
+ break;
+ }
+
+ if (kill_it)
+ remove_eh_handler (r);
}
}
can_throw instruction in the region. */
static void
-convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
- rtx *pinsns;
- int *orig_sp;
- int cur;
+convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
{
int *sp = orig_sp;
rtx insn, next;
abort ();
}
+static void
+collect_rtl_labels_from_trees (void)
+{
+ int i, n = cfun->eh->last_region_number;
+ for (i = 1; i <= n; ++i)
+ {
+ struct eh_region *reg = cfun->eh->region_array[i];
+ if (reg && reg->tree_label)
+ reg->label = DECL_RTL_IF_SET (reg->tree_label);
+ }
+}
+
void
-convert_from_eh_region_ranges ()
+convert_from_eh_region_ranges (void)
{
- int *stack;
- rtx insns;
+ rtx insns = get_insns ();
- collect_eh_region_array ();
- resolve_fixup_regions ();
+ if (cfun->eh->region_array)
+ {
+ /* If the region array already exists, assume we're coming from
+ optimize_function_tree. In this case all we need to do is
+ collect the rtl labels that correspond to the tree labels
+ that we allocated earlier. */
+ collect_rtl_labels_from_trees ();
+ }
+ else
+ {
+ int *stack;
- stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
- insns = get_insns ();
- convert_from_eh_region_ranges_1 (&insns, stack, 0);
- free (stack);
+ collect_eh_region_array ();
+ resolve_fixup_regions ();
+
+ stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
+ convert_from_eh_region_ranges_1 (&insns, stack, 0);
+ free (stack);
+
+ remove_fixup_regions ();
+ }
- remove_fixup_regions ();
remove_unreachable_regions (insns);
}
static void
-add_ehl_entry (label, region)
- rtx label;
- struct eh_region *region;
+add_ehl_entry (rtx label, struct eh_region *region)
{
struct ehl_map_entry **slot, *entry;
LABEL_PRESERVE_P (label) = 1;
- entry = (struct ehl_map_entry *) ggc_alloc (sizeof (*entry));
+ entry = ggc_alloc (sizeof (*entry));
entry->label = label;
entry->region = region;
}
void
-find_exception_handler_labels ()
+find_exception_handler_labels (void)
{
int i;
}
bool
-current_function_has_exception_handlers ()
+current_function_has_exception_handlers (void)
{
int i;
}
\f
static struct eh_region *
-duplicate_eh_region_1 (o, map)
- struct eh_region *o;
- struct inline_remap *map;
+duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map)
{
- struct eh_region *n
- = (struct eh_region *) ggc_alloc_cleared (sizeof (struct eh_region));
+ struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region));
n->region_number = o->region_number + cfun->eh->last_region_number;
n->type = o->type;
}
static void
-duplicate_eh_region_2 (o, n_array)
- struct eh_region *o;
- struct eh_region **n_array;
+duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array)
{
struct eh_region *n = n_array[o->region_number];
}
int
-duplicate_eh_regions (ifun, map)
- struct function *ifun;
- struct inline_remap *map;
+duplicate_eh_regions (struct function *ifun, struct inline_remap *map)
{
int ifun_last_region_number = ifun->eh->last_region_number;
struct eh_region **n_array, *root, *cur;
\f
static int
-t2r_eq (pentry, pdata)
- const PTR pentry;
- const PTR pdata;
+t2r_eq (const void *pentry, const void *pdata)
{
tree entry = (tree) pentry;
tree data = (tree) pdata;
}
static hashval_t
-t2r_hash (pentry)
- const PTR pentry;
+t2r_hash (const void *pentry)
{
tree entry = (tree) pentry;
- return TYPE_HASH (TREE_PURPOSE (entry));
+ return TREE_HASH (TREE_PURPOSE (entry));
}
static void
-add_type_for_runtime (type)
- tree type;
+add_type_for_runtime (tree type)
{
tree *slot;
slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
- TYPE_HASH (type), INSERT);
+ TREE_HASH (type), INSERT);
if (*slot == NULL)
{
tree runtime = (*lang_eh_runtime_type) (type);
}
static tree
-lookup_type_for_runtime (type)
- tree type;
+lookup_type_for_runtime (tree type)
{
tree *slot;
slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
- TYPE_HASH (type), NO_INSERT);
+ TREE_HASH (type), NO_INSERT);
/* We should have always inserted the data earlier. */
return TREE_VALUE (*slot);
(a tree) for a @TTypes type node we are thinking about adding. */
static int
-ttypes_filter_eq (pentry, pdata)
- const PTR pentry;
- const PTR pdata;
+ttypes_filter_eq (const void *pentry, const void *pdata)
{
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
tree data = (tree) pdata;
}
static hashval_t
-ttypes_filter_hash (pentry)
- const PTR pentry;
+ttypes_filter_hash (const void *pentry)
{
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
- return TYPE_HASH (entry->t);
+ return TREE_HASH (entry->t);
}
/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
should put these in some canonical order. */
static int
-ehspec_filter_eq (pentry, pdata)
- const PTR pentry;
- const PTR pdata;
+ehspec_filter_eq (const void *pentry, const void *pdata)
{
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
/* Hash function for exception specification lists. */
static hashval_t
-ehspec_filter_hash (pentry)
- const PTR pentry;
+ehspec_filter_hash (const void *pentry)
{
const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
hashval_t h = 0;
tree list;
for (list = entry->t; list ; list = TREE_CHAIN (list))
- h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
+ h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
return h;
}
-/* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
- up the search. Return the filter value to be used. */
+/* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
+ to speed up the search. Return the filter value to be used. */
static int
-add_ttypes_entry (ttypes_hash, type)
- htab_t ttypes_hash;
- tree type;
+add_ttypes_entry (htab_t ttypes_hash, tree type)
{
struct ttypes_filter **slot, *n;
slot = (struct ttypes_filter **)
- htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
+ htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
if ((n = *slot) == NULL)
{
/* Filter value is a 1 based table index. */
- n = (struct ttypes_filter *) xmalloc (sizeof (*n));
+ n = xmalloc (sizeof (*n));
n->t = type;
n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
*slot = n;
to speed up the search. Return the filter value to be used. */
static int
-add_ehspec_entry (ehspec_hash, ttypes_hash, list)
- htab_t ehspec_hash;
- htab_t ttypes_hash;
- tree list;
+add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
{
struct ttypes_filter **slot, *n;
struct ttypes_filter dummy;
{
/* Filter value is a -1 based byte index into a uleb128 buffer. */
- n = (struct ttypes_filter *) xmalloc (sizeof (*n));
+ n = xmalloc (sizeof (*n));
n->t = list;
n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
*slot = n;
the same filter value, which saves table space. */
static void
-assign_filter_values ()
+assign_filter_values (void)
{
int i;
htab_t ttypes, ehspec;
htab_delete (ehspec);
}
+/* Emit SEQ into basic block just before INSN (that is assumed to be
+ first instruction of some existing BB and return the newly
+ produced block. */
+static basic_block
+emit_to_new_bb_before (rtx seq, rtx insn)
+{
+ rtx last;
+ basic_block bb;
+ edge e;
+
+ /* If there happens to be an fallthru edge (possibly created by cleanup_cfg
+ call), we don't want it to go into newly created landing pad or other EH
+ construct. */
+ for (e = BLOCK_FOR_INSN (insn)->pred; e; e = e->pred_next)
+ if (e->flags & EDGE_FALLTHRU)
+ force_nonfallthru (e);
+ last = emit_insn_before (seq, insn);
+ if (GET_CODE (last) == BARRIER)
+ last = PREV_INSN (last);
+ bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
+ update_bb_for_insn (bb);
+ bb->flags |= BB_SUPERBLOCK;
+ return bb;
+}
+
+/* Generate the code to actually handle exceptions, which will follow the
+ landing pads. */
+
static void
-build_post_landing_pads ()
+build_post_landing_pads (void)
{
int i;
seq = get_insns ();
end_sequence ();
- emit_insn_before (seq, region->u.try.catch->label);
+ emit_to_new_bb_before (seq, region->u.try.catch->label);
+
break;
case ERT_ALLOWED_EXCEPTIONS:
seq = get_insns ();
end_sequence ();
- emit_insn_before (seq, region->label);
+ emit_to_new_bb_before (seq, region->label);
break;
case ERT_CLEANUP:
_Unwind_Resume otherwise. */
static void
-connect_post_landing_pads ()
+connect_post_landing_pads (void)
{
int i;
struct eh_region *region = cfun->eh->region_array[i];
struct eh_region *outer;
rtx seq;
+ rtx barrier;
/* Mind we don't process a region more than once. */
if (!region || region->region_number != i)
start_sequence ();
if (outer)
- emit_jump (outer->post_landing_pad);
+ {
+ edge e;
+ basic_block src, dest;
+
+ emit_jump (outer->post_landing_pad);
+ src = BLOCK_FOR_INSN (region->resume);
+ dest = BLOCK_FOR_INSN (outer->post_landing_pad);
+ while (src->succ)
+ remove_edge (src->succ);
+ e = make_edge (src, dest, 0);
+ e->probability = REG_BR_PROB_BASE;
+ e->count = src->count;
+ }
else
- emit_library_call (unwind_resume_libfunc, LCT_THROW,
- VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
+ {
+ emit_library_call (unwind_resume_libfunc, LCT_THROW,
+ VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
+
+ /* What we just emitted was a throwing libcall, so it got a
+ barrier automatically added after it. If the last insn in
+ the libcall sequence isn't the barrier, it's because the
+ target emits multiple insns for a call, and there are insns
+ after the actual call insn (which are redundant and would be
+ optimized away). The barrier is inserted exactly after the
+ call insn, so let's go get that and delete the insns after
+ it, because below we need the barrier to be the last insn in
+ the sequence. */
+ delete_insns_since (NEXT_INSN (last_call_insn ()));
+ }
seq = get_insns ();
end_sequence ();
- emit_insn_before (seq, region->resume);
+ barrier = emit_insn_before (seq, region->resume);
+ /* Avoid duplicate barrier. */
+ if (GET_CODE (barrier) != BARRIER)
+ abort ();
+ delete_insn (barrier);
delete_insn (region->resume);
+
+ /* ??? From tree-ssa we can wind up with catch regions whose
+ label is not instantiated, but whose resx is present. Now
+ that we've dealt with the resx, kill the region. */
+ if (region->label == NULL && region->type == ERT_CLEANUP)
+ remove_eh_handler (region);
}
}
\f
static void
-dw2_build_landing_pads ()
+dw2_build_landing_pads (void)
{
int i;
unsigned int j;
{
struct eh_region *region = cfun->eh->region_array[i];
rtx seq;
+ basic_block bb;
bool clobbers_hard_regs = false;
+ edge e;
/* Mind we don't process a region more than once. */
if (!region || region->region_number != i)
seq = get_insns ();
end_sequence ();
- emit_insn_before (seq, region->post_landing_pad);
+ bb = emit_to_new_bb_before (seq, region->post_landing_pad);
+ e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
+ e->count = bb->count;
+ e->probability = REG_BR_PROB_BASE;
}
}
};
static bool
-sjlj_find_directly_reachable_regions (lp_info)
- struct sjlj_lp_info *lp_info;
+sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
{
rtx insn;
bool found_one = false;
rc = RNL_NOT_CAUGHT;
for (; region; region = region->outer)
{
- rc = reachable_next_level (region, type_thrown, 0);
+ rc = reachable_next_level (region, type_thrown, NULL);
if (rc != RNL_NOT_CAUGHT)
break;
}
}
static void
-sjlj_assign_call_site_values (dispatch_label, lp_info)
- rtx dispatch_label;
- struct sjlj_lp_info *lp_info;
+sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
{
htab_t ar_hash;
int i, index;
}
static void
-sjlj_mark_call_sites (lp_info)
- struct sjlj_lp_info *lp_info;
+sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
{
int last_call_site = -2;
rtx insn, mem;
/* Construct the SjLj_Function_Context. */
static void
-sjlj_emit_function_enter (dispatch_label)
- rtx dispatch_label;
+sjlj_emit_function_enter (rtx dispatch_label)
{
rtx fn_begin, fc, mem, seq;
if (cfun->uses_eh_lsda)
{
char buf[20];
+ rtx sym;
+
ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
- emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
+ sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
+ SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
+ emit_move_insn (mem, sym);
}
else
emit_move_insn (mem, const0_rtx);
plus_constant (XEXP (fc, 0),
sjlj_fc_jbuf_ofs), Pmode);
- note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
+ note = emit_note (NOTE_INSN_EXPECTED_VALUE);
NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
if (GET_CODE (fn_begin) == NOTE
- && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
+ && (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG
+ || NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK))
break;
- emit_insn_after (seq, fn_begin);
+ if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
+ insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
+ else
+ {
+ rtx last = BB_END (ENTRY_BLOCK_PTR->succ->dest);
+ for (; ; fn_begin = NEXT_INSN (fn_begin))
+ if ((GET_CODE (fn_begin) == NOTE
+ && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
+ || fn_begin == last)
+ break;
+ emit_insn_after (seq, fn_begin);
+ }
}
/* Call back from expand_function_end to know where we should put
the call to unwind_sjlj_unregister_libfunc if needed. */
void
-sjlj_emit_function_exit_after (after)
- rtx after;
+sjlj_emit_function_exit_after (rtx after)
{
cfun->eh->sjlj_exit_after = after;
}
static void
-sjlj_emit_function_exit ()
+sjlj_emit_function_exit (void)
{
rtx seq;
+ edge e;
start_sequence ();
post-dominates all can_throw_internal instructions. This is
the last possible moment. */
- emit_insn_after (seq, cfun->eh->sjlj_exit_after);
+ for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
+ if (e->flags & EDGE_FALLTHRU)
+ break;
+ if (e)
+ {
+ rtx insn;
+
+ /* Figure out whether the place we are supposed to insert libcall
+ is inside the last basic block or after it. In the other case
+ we need to emit to edge. */
+ if (e->src->next_bb != EXIT_BLOCK_PTR)
+ abort ();
+ for (insn = NEXT_INSN (BB_END (e->src)); insn; insn = NEXT_INSN (insn))
+ if (insn == cfun->eh->sjlj_exit_after)
+ break;
+ if (insn)
+ insert_insn_on_edge (seq, e);
+ else
+ {
+ insn = cfun->eh->sjlj_exit_after;
+ if (GET_CODE (insn) == CODE_LABEL)
+ insn = NEXT_INSN (insn);
+ emit_insn_after (seq, insn);
+ }
+ }
}
static void
-sjlj_emit_dispatch_table (dispatch_label, lp_info)
- rtx dispatch_label;
- struct sjlj_lp_info *lp_info;
+sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
{
int i, first_reachable;
rtx mem, dispatch, seq, fc;
+ rtx before;
+ basic_block bb;
+ edge e;
fc = cfun->eh->sjlj_fc;
dispatch = copy_to_reg (mem);
mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
- if (word_mode != Pmode)
+ if (word_mode != ptr_mode)
{
#ifdef POINTERS_EXTEND_UNSIGNED
- mem = convert_memory_address (Pmode, mem);
+ mem = convert_memory_address (ptr_mode, mem);
#else
- mem = convert_to_mode (Pmode, mem, 0);
+ mem = convert_to_mode (ptr_mode, mem, 0);
#endif
}
emit_move_insn (cfun->eh->exc_ptr, mem);
seq = get_insns ();
end_sequence ();
- emit_insn_before (seq, (cfun->eh->region_array[first_reachable]
- ->post_landing_pad));
+ before = cfun->eh->region_array[first_reachable]->post_landing_pad;
+
+ bb = emit_to_new_bb_before (seq, before);
+ e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
+ e->count = bb->count;
+ e->probability = REG_BR_PROB_BASE;
}
static void
-sjlj_build_landing_pads ()
+sjlj_build_landing_pads (void)
{
struct sjlj_lp_info *lp_info;
- lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
- sizeof (struct sjlj_lp_info));
+ lp_info = xcalloc (cfun->eh->last_region_number + 1,
+ sizeof (struct sjlj_lp_info));
if (sjlj_find_directly_reachable_regions (lp_info))
{
}
void
-finish_eh_generation ()
+finish_eh_generation (void)
{
+ basic_block bb;
+
/* Nothing to do if no regions created. */
if (cfun->eh->region_tree == NULL)
return;
/* We've totally changed the CFG. Start over. */
find_exception_handler_labels ();
- rebuild_jump_labels (get_insns ());
- find_basic_blocks (get_insns (), max_reg_num (), 0);
+ break_superblocks ();
+ if (USING_SJLJ_EXCEPTIONS)
+ commit_edge_insertions ();
+ FOR_EACH_BB (bb)
+ {
+ edge e, next;
+ bool eh = false;
+ for (e = bb->succ; e; e = next)
+ {
+ next = e->succ_next;
+ if (e->flags & EDGE_EH)
+ {
+ remove_edge (e);
+ eh = true;
+ }
+ }
+ if (eh)
+ rtl_make_eh_edge (NULL, bb, BB_END (bb));
+ }
cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
}
\f
static hashval_t
-ehl_hash (pentry)
- const PTR pentry;
+ehl_hash (const void *pentry)
{
struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
}
static int
-ehl_eq (pentry, pdata)
- const PTR pentry;
- const PTR pdata;
+ehl_eq (const void *pentry, const void *pdata)
{
struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
/* Remove LABEL from exception_handler_label_map. */
static void
-remove_exception_handler_label (label)
- rtx label;
+remove_exception_handler_label (rtx label)
{
struct ehl_map_entry **slot, tmp;
/* Splice REGION from the region tree etc. */
static void
-remove_eh_handler (region)
- struct eh_region *region;
+remove_eh_handler (struct eh_region *region)
{
struct eh_region **pp, **pp_start, *p, *outer, *inner;
rtx lab;
delete the region. */
void
-maybe_remove_eh_handler (label)
- rtx label;
+maybe_remove_eh_handler (rtx label)
{
struct ehl_map_entry **slot, tmp;
struct eh_region *region;
loop hackery; should not be used by new code. */
void
-for_each_eh_label (callback)
- void (*callback) PARAMS ((rtx));
+for_each_eh_label (void (*callback) (rtx))
{
htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
- (void *)callback);
+ (void *) &callback);
}
static int
-for_each_eh_label_1 (pentry, data)
- PTR *pentry;
- PTR data;
+for_each_eh_label_1 (void **pentry, void *data)
{
struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
- void (*callback) PARAMS ((rtx)) = (void (*) PARAMS ((rtx))) data;
+ void (*callback) (rtx) = *(void (**) (rtx)) data;
(*callback) (entry->label);
return 1;
/* This section describes CFG exception edges for flow. */
/* For communicating between calls to reachable_next_level. */
-struct reachable_info GTY(())
+struct reachable_info
{
tree types_caught;
tree types_allowed;
- rtx handlers;
+ void (*callback) (struct eh_region *, void *);
+ void *callback_data;
+ bool saw_any_handlers;
};
/* A subroutine of reachable_next_level. Return true if TYPE, or a
base class of TYPE, is in HANDLED. */
-static int
-check_handled (handled, type)
- tree handled, type;
+int
+check_handled (tree handled, tree type)
{
tree t;
LP_REGION contains the landing pad; REGION is the handler. */
static void
-add_reachable_handler (info, lp_region, region)
- struct reachable_info *info;
- struct eh_region *lp_region;
- struct eh_region *region;
+add_reachable_handler (struct reachable_info *info,
+ struct eh_region *lp_region, struct eh_region *region)
{
if (! info)
return;
+ info->saw_any_handlers = true;
+
if (cfun->eh->built_landing_pads)
- {
- if (! info->handlers)
- info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
- }
+ info->callback (lp_region, info->callback_data);
else
- info->handlers = alloc_INSN_LIST (region->label, info->handlers);
+ info->callback (region, info->callback_data);
}
/* Process one level of exception regions for reachability.
and caught/allowed type information between invocations. */
static enum reachable_code
-reachable_next_level (region, type_thrown, info)
- struct eh_region *region;
- tree type_thrown;
- struct reachable_info *info;
+reachable_next_level (struct eh_region *region, tree type_thrown,
+ struct reachable_info *info)
{
switch (region->type)
{
If we've touched down at some landing pad previous, then the
explicit function call we generated may be used. Otherwise
the call is made by the runtime. */
- if (info && info->handlers)
+ if (info && info->saw_any_handlers)
{
add_reachable_handler (info, region, region);
return RNL_CAUGHT;
abort ();
}
-/* Retrieve a list of labels of exception handlers which can be
- reached by a given insn. */
+/* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
-rtx
-reachable_handlers (insn)
- rtx insn;
+void
+foreach_reachable_handler (int region_number, bool is_resx,
+ void (*callback) (struct eh_region *, void *),
+ void *callback_data)
{
struct reachable_info info;
struct eh_region *region;
tree type_thrown;
- int region_number;
-
- if (GET_CODE (insn) == JUMP_INSN
- && GET_CODE (PATTERN (insn)) == RESX)
- region_number = XINT (PATTERN (insn), 0);
- else
- {
- rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
- if (!note || INTVAL (XEXP (note, 0)) <= 0)
- return NULL;
- region_number = INTVAL (XEXP (note, 0));
- }
memset (&info, 0, sizeof (info));
+ info.callback = callback;
+ info.callback_data = callback_data;
region = cfun->eh->region_array[region_number];
type_thrown = NULL_TREE;
- if (GET_CODE (insn) == JUMP_INSN
- && GET_CODE (PATTERN (insn)) == RESX)
+ if (is_resx)
{
/* A RESX leaves a region instead of entering it. Thus the
region itself may have been deleted out from under us. */
if (region == NULL)
- return NULL;
+ return;
region = region->outer;
}
else if (region->type == ERT_THROW)
else
region = region->outer;
}
-
- return info.handlers;
+}
+
+/* Retrieve a list of labels of exception handlers which can be
+ reached by a given insn. */
+
+static void
+arh_to_landing_pad (struct eh_region *region, void *data)
+{
+ rtx *p_handlers = data;
+ if (! *p_handlers)
+ *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
+}
+
+static void
+arh_to_label (struct eh_region *region, void *data)
+{
+ rtx *p_handlers = data;
+ *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
+}
+
+rtx
+reachable_handlers (rtx insn)
+{
+ bool is_resx = false;
+ rtx handlers = NULL;
+ int region_number;
+
+ if (GET_CODE (insn) == JUMP_INSN
+ && GET_CODE (PATTERN (insn)) == RESX)
+ {
+ region_number = XINT (PATTERN (insn), 0);
+ is_resx = true;
+ }
+ else
+ {
+ rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note || INTVAL (XEXP (note, 0)) <= 0)
+ return NULL;
+ region_number = INTVAL (XEXP (note, 0));
+ }
+
+ foreach_reachable_handler (region_number, is_resx,
+ (cfun->eh->built_landing_pads
+ ? arh_to_landing_pad
+ : arh_to_label),
+ &handlers);
+
+ return handlers;
}
/* Determine if the given INSN can throw an exception that is caught
within the function. */
bool
-can_throw_internal (insn)
- rtx insn;
+can_throw_internal_1 (int region_number)
{
struct eh_region *region;
tree type_thrown;
+
+ region = cfun->eh->region_array[region_number];
+
+ type_thrown = NULL_TREE;
+ if (region->type == ERT_THROW)
+ {
+ type_thrown = region->u.throw.type;
+ region = region->outer;
+ }
+
+ /* If this exception is ignored by each and every containing region,
+ then control passes straight out. The runtime may handle some
+ regions, which also do not require processing internally. */
+ for (; region; region = region->outer)
+ {
+ enum reachable_code how = reachable_next_level (region, type_thrown, 0);
+ if (how == RNL_BLOCKED)
+ return false;
+ if (how != RNL_NOT_CAUGHT)
+ return true;
+ }
+
+ return false;
+}
+
+bool
+can_throw_internal (rtx insn)
+{
rtx note;
if (! INSN_P (insn))
if (!note || INTVAL (XEXP (note, 0)) <= 0)
return false;
- region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
+ return can_throw_internal_1 (INTVAL (XEXP (note, 0)));
+}
+
+/* Determine if the given INSN can throw an exception that is
+ visible outside the function. */
+
+bool
+can_throw_external_1 (int region_number)
+{
+ struct eh_region *region;
+ tree type_thrown;
+
+ region = cfun->eh->region_array[region_number];
type_thrown = NULL_TREE;
if (region->type == ERT_THROW)
region = region->outer;
}
- /* If this exception is ignored by each and every containing region,
- then control passes straight out. The runtime may handle some
- regions, which also do not require processing internally. */
- for (; region; region = region->outer)
- {
- enum reachable_code how = reachable_next_level (region, type_thrown, 0);
- if (how == RNL_BLOCKED)
- return false;
- if (how != RNL_NOT_CAUGHT)
- return true;
- }
+ /* If the exception is caught or blocked by any containing region,
+ then it is not seen by any calling function. */
+ for (; region ; region = region->outer)
+ if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
+ return false;
- return false;
+ return true;
}
-/* Determine if the given INSN can throw an exception that is
- visible outside the function. */
-
bool
-can_throw_external (insn)
- rtx insn;
+can_throw_external (rtx insn)
{
- struct eh_region *region;
- tree type_thrown;
rtx note;
if (! INSN_P (insn))
if (INTVAL (XEXP (note, 0)) <= 0)
return false;
- region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
-
- type_thrown = NULL_TREE;
- if (region->type == ERT_THROW)
- {
- type_thrown = region->u.throw.type;
- region = region->outer;
- }
-
- /* If the exception is caught or blocked by any containing region,
- then it is not seen by any calling function. */
- for (; region ; region = region->outer)
- if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
- return false;
-
- return true;
+ return can_throw_external_1 (INTVAL (XEXP (note, 0)));
}
/* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
void
-set_nothrow_function_flags ()
+set_nothrow_function_flags (void)
{
rtx insn;
-
+
current_function_nothrow = 1;
/* Assume cfun->all_throwers_are_sibcalls until we encounter
if (! flag_exceptions)
return;
-
+
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (can_throw_external (insn))
{
On the SPARC, this means flushing the register windows. */
void
-expand_builtin_unwind_init ()
+expand_builtin_unwind_init (void)
{
/* Set this so all the registers get saved in our frame; we need to be
able to copy the saved values for any registers from frames we unwind. */
}
rtx
-expand_builtin_eh_return_data_regno (arglist)
- tree arglist;
+expand_builtin_eh_return_data_regno (tree arglist)
{
tree which = TREE_VALUE (arglist);
unsigned HOST_WIDE_INT iwhich;
return the actual address encoded in that value. */
rtx
-expand_builtin_extract_return_addr (addr_tree)
- tree addr_tree;
+expand_builtin_extract_return_addr (tree addr_tree)
{
rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
stack slot so the epilogue will return to that address. */
rtx
-expand_builtin_frob_return_addr (addr_tree)
- tree addr_tree;
+expand_builtin_frob_return_addr (tree addr_tree)
{
rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
-#ifdef POINTERS_EXTEND_UNSIGNED
- if (GET_MODE (addr) != Pmode)
- addr = convert_memory_address (Pmode, addr);
-#endif
+ addr = convert_memory_address (Pmode, addr);
#ifdef RETURN_ADDR_OFFSET
addr = force_reg (Pmode, addr);
exception handler. */
void
-expand_builtin_eh_return (stackadj_tree, handler_tree)
- tree stackadj_tree, handler_tree;
+expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
+ tree handler_tree)
{
- rtx stackadj, handler;
-
- stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
- handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
-
-#ifdef POINTERS_EXTEND_UNSIGNED
- if (GET_MODE (stackadj) != Pmode)
- stackadj = convert_memory_address (Pmode, stackadj);
-
- if (GET_MODE (handler) != Pmode)
- handler = convert_memory_address (Pmode, handler);
+ rtx tmp;
+
+#ifdef EH_RETURN_STACKADJ_RTX
+ tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
+ tmp = convert_memory_address (Pmode, tmp);
+ if (!cfun->eh->ehr_stackadj)
+ cfun->eh->ehr_stackadj = copy_to_reg (tmp);
+ else if (tmp != cfun->eh->ehr_stackadj)
+ emit_move_insn (cfun->eh->ehr_stackadj, tmp);
#endif
- if (! cfun->eh->ehr_label)
- {
- cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
- cfun->eh->ehr_handler = copy_to_reg (handler);
- cfun->eh->ehr_label = gen_label_rtx ();
- }
- else
- {
- if (stackadj != cfun->eh->ehr_stackadj)
- emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
- if (handler != cfun->eh->ehr_handler)
- emit_move_insn (cfun->eh->ehr_handler, handler);
- }
+ tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
+ tmp = convert_memory_address (Pmode, tmp);
+ if (!cfun->eh->ehr_handler)
+ cfun->eh->ehr_handler = copy_to_reg (tmp);
+ else if (tmp != cfun->eh->ehr_handler)
+ emit_move_insn (cfun->eh->ehr_handler, tmp);
+ if (!cfun->eh->ehr_label)
+ cfun->eh->ehr_label = gen_label_rtx ();
emit_jump (cfun->eh->ehr_label);
}
void
-expand_eh_return ()
+expand_eh_return (void)
{
- rtx sa, ra, around_label;
+ rtx around_label;
if (! cfun->eh->ehr_label)
return;
- sa = EH_RETURN_STACKADJ_RTX;
- if (! sa)
- {
- error ("__builtin_eh_return not supported on this target");
- return;
- }
-
current_function_calls_eh_return = 1;
+#ifdef EH_RETURN_STACKADJ_RTX
+ emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
+#endif
+
around_label = gen_label_rtx ();
- emit_move_insn (sa, const0_rtx);
emit_jump (around_label);
emit_label (cfun->eh->ehr_label);
clobber_return_register ();
+#ifdef EH_RETURN_STACKADJ_RTX
+ emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
+#endif
+
#ifdef HAVE_eh_return
if (HAVE_eh_return)
- emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
+ emit_insn (gen_eh_return (cfun->eh->ehr_handler));
else
#endif
{
- ra = EH_RETURN_HANDLER_RTX;
- if (! ra)
- {
- error ("__builtin_eh_return not supported on this target");
- ra = gen_reg_rtx (Pmode);
- }
-
- emit_move_insn (sa, cfun->eh->ehr_stackadj);
- emit_move_insn (ra, cfun->eh->ehr_handler);
+#ifdef EH_RETURN_HANDLER_RTX
+ emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
+#else
+ error ("__builtin_eh_return not supported on this target");
+#endif
}
emit_label (around_label);
}
+
+/* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
+ POINTERS_EXTEND_UNSIGNED and return it. */
+
+rtx
+expand_builtin_extend_pointer (tree addr_tree)
+{
+ rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
+ int extend;
+
+#ifdef POINTERS_EXTEND_UNSIGNED
+ extend = POINTERS_EXTEND_UNSIGNED;
+#else
+ /* The previous EH code did an unsigned extend by default, so we do this also
+ for consistency. */
+ extend = 1;
+#endif
+
+ return convert_modes (word_mode, ptr_mode, addr, extend);
+}
\f
/* In the following functions, we represent entries in the action table
as 1-based indices. Special cases are:
};
static int
-action_record_eq (pentry, pdata)
- const PTR pentry;
- const PTR pdata;
+action_record_eq (const void *pentry, const void *pdata)
{
const struct action_record *entry = (const struct action_record *) pentry;
const struct action_record *data = (const struct action_record *) pdata;
}
static hashval_t
-action_record_hash (pentry)
- const PTR pentry;
+action_record_hash (const void *pentry)
{
const struct action_record *entry = (const struct action_record *) pentry;
return entry->next * 1009 + entry->filter;
}
static int
-add_action_record (ar_hash, filter, next)
- htab_t ar_hash;
- int filter, next;
+add_action_record (htab_t ar_hash, int filter, int next)
{
struct action_record **slot, *new, tmp;
if ((new = *slot) == NULL)
{
- new = (struct action_record *) xmalloc (sizeof (*new));
+ new = xmalloc (sizeof (*new));
new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
new->filter = filter;
new->next = next;
}
static int
-collect_one_action_chain (ar_hash, region)
- htab_t ar_hash;
- struct eh_region *region;
+collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
{
struct eh_region *c;
int next;
/* An exception specification adds its filter to the
beginning of the chain. */
next = collect_one_action_chain (ar_hash, region->outer);
- return add_action_record (ar_hash, region->u.allowed.filter,
- next < 0 ? 0 : next);
+
+ /* If there is no next action, terminate the chain. */
+ if (next == -1)
+ next = 0;
+ /* If all outer actions are cleanups or must_not_throw,
+ we'll have no action record for it, since we had wanted
+ to encode these states in the call-site record directly.
+ Add a cleanup action to the chain to catch these. */
+ else if (next <= 0)
+ next = add_action_record (ar_hash, 0, 0);
+
+ return add_action_record (ar_hash, region->u.allowed.filter, next);
case ERT_MUST_NOT_THROW:
/* A must-not-throw region with no inner handlers or cleanups
}
static int
-add_call_site (landing_pad, action)
- rtx landing_pad;
- int action;
+add_call_site (rtx landing_pad, int action)
{
struct call_site_record *data = cfun->eh->call_site_data;
int used = cfun->eh->call_site_data_used;
if (used >= size)
{
size = (size ? size * 2 : 64);
- data = (struct call_site_record *)
- ggc_realloc (data, sizeof (*data) * size);
+ data = ggc_realloc (data, sizeof (*data) * size);
cfun->eh->call_site_data = data;
cfun->eh->call_site_data_size = size;
}
instead to call site entries. */
void
-convert_to_eh_region_ranges ()
+convert_to_eh_region_ranges (void)
{
rtx insn, iter, note;
htab_t ar_hash;
\f
static void
-push_uleb128 (data_area, value)
- varray_type *data_area;
- unsigned int value;
+push_uleb128 (varray_type *data_area, unsigned int value)
{
do
{
}
static void
-push_sleb128 (data_area, value)
- varray_type *data_area;
- int value;
+push_sleb128 (varray_type *data_area, int value)
{
unsigned char byte;
int more;
\f
#ifndef HAVE_AS_LEB128
static int
-dw2_size_of_call_site_table ()
+dw2_size_of_call_site_table (void)
{
int n = cfun->eh->call_site_data_used;
int size = n * (4 + 4 + 4);
}
static int
-sjlj_size_of_call_site_table ()
+sjlj_size_of_call_site_table (void)
{
int n = cfun->eh->call_site_data_used;
int size = 0;
#endif
static void
-dw2_output_call_site_table ()
+dw2_output_call_site_table (void)
{
const char *const function_start_lab
= IDENTIFIER_POINTER (current_function_func_begin_label);
}
static void
-sjlj_output_call_site_table ()
+sjlj_output_call_site_table (void)
{
int n = cfun->eh->call_site_data_used;
int i;
table. */
void
-default_exception_section ()
+default_exception_section (void)
{
if (targetm.have_named_sections)
{
}
void
-output_function_exception_table ()
+output_function_exception_table (void)
{
int tt_format, cs_format, lp_format, i, n;
#ifdef HAVE_AS_LEB128
/* Note that varasm still thinks we're in the function's code section.
The ".endp" directive that will immediately follow will take us back. */
#else
- (*targetm.asm_out.exception_section) ();
+ targetm.asm_out.exception_section ();
#endif
have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
assemble_align (tt_format_size * BITS_PER_UNIT);
}
- (*targetm.asm_out.internal_label) (asm_out_file, "LLSDA",
+ targetm.asm_out.internal_label (asm_out_file, "LLSDA",
current_function_funcdef_no);
/* The LSDA header. */
rtx value;
if (type == NULL_TREE)
- type = integer_zero_node;
+ value = const0_rtx;
else
- type = lookup_type_for_runtime (type);
+ {
+ struct cgraph_varpool_node *node;
+
+ type = lookup_type_for_runtime (type);
+ value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
+
+ /* Let cgraph know that the rtti decl is used. Not all of the
+ paths below go through assemble_integer, which would take
+ care of this for us. */
+ STRIP_NOPS (type);
+ if (TREE_CODE (type) == ADDR_EXPR)
+ {
+ type = TREE_OPERAND (type, 0);
+ node = cgraph_varpool_node (type);
+ if (node)
+ cgraph_varpool_mark_needed_node (node);
+ }
+ else if (TREE_CODE (type) != INTEGER_CST)
+ abort ();
+ }
- value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
assemble_integer (value, tt_format_size,
tt_format_size * BITS_PER_UNIT, 1);