/* Implements exception handling.
Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
Contributed by Mike Stump <mrs@cygnus.com>.
This file is part of GCC.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
/* An exception is an event that can be signaled from within a
#include "target.h"
#include "langhooks.h"
#include "cgraph.h"
+#include "diagnostic.h"
+#include "tree-pass.h"
+#include "timevar.h"
/* Provide defaults for stuff that may not be defined when using
sjlj exceptions. */
/* Each region does exactly one thing. */
enum eh_region_type
- {
+ {
ERT_UNKNOWN = 0,
ERT_CLEANUP,
ERT_TRY,
ERT_CATCH,
ERT_ALLOWED_EXCEPTIONS,
ERT_MUST_NOT_THROW,
- ERT_THROW,
- ERT_FIXUP
+ ERT_THROW
} type;
/* Holds the action to perform based on the preceding type. */
struct eh_region_u_try {
struct eh_region *catch;
struct eh_region *last_catch;
- struct eh_region *prev_try;
- rtx continue_label;
} GTY ((tag ("ERT_TRY"))) try;
/* The list through the catch handlers, the list of type objects
/* Retain the cleanup expression even after expansion so that
we can match up fixup regions. */
struct eh_region_u_cleanup {
- tree exp;
struct eh_region *prev_try;
} GTY ((tag ("ERT_CLEANUP"))) cleanup;
-
- /* The real region (by expression and by pointer) that fixup code
- should live in. */
- struct eh_region_u_fixup {
- tree cleanup_exp;
- struct eh_region *real_region;
- bool resolved;
- } GTY ((tag ("ERT_FIXUP"))) fixup;
} GTY ((desc ("%0.type"))) u;
/* Entry point for this region's handler before landing pads are built. */
unsigned may_contain_throw : 1;
};
+typedef struct eh_region *eh_region;
+
struct call_site_record GTY(())
{
rtx landing_pad;
int action;
};
+DEF_VEC_P(eh_region);
+DEF_VEC_ALLOC_P(eh_region, gc);
+
/* Used to save exception status for each function. */
struct eh_status GTY(())
{
struct eh_region *region_tree;
/* The same information as an indexable array. */
- struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
+ VEC(eh_region,gc) *region_array;
/* The most recently open region. */
struct eh_region *cur_region;
int built_landing_pads;
int last_region_number;
- varray_type ttype_data;
+ VEC(tree,gc) *ttype_data;
varray_type ehspec_data;
varray_type action_record_data;
rtx sjlj_fc;
rtx sjlj_exit_after;
-};
+ htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
+};
\f
static int t2r_eq (const void *, const void *);
static hashval_t t2r_hash (const void *);
static void add_type_for_runtime (tree);
static tree lookup_type_for_runtime (tree);
-static void resolve_fixup_regions (void);
-static void remove_fixup_regions (void);
static void remove_unreachable_regions (rtx);
-static void convert_from_eh_region_ranges_1 (rtx *, int *, int);
-static struct eh_region *duplicate_eh_region_1 (struct eh_region *,
- struct inline_remap *);
-static void duplicate_eh_region_2 (struct eh_region *, struct eh_region **);
static int ttypes_filter_eq (const void *, const void *);
static hashval_t ttypes_filter_hash (const void *);
static int ehspec_filter_eq (const void *, const void *);
cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
}
\f
-/* Routines to generate the exception tree somewhat directly.
+/* Routines to generate the exception tree somewhat directly.
These are used from tree-eh.c when processing exception related
nodes during tree optimization. */
struct eh_region *new;
#ifdef ENABLE_CHECKING
- if (! doing_eh (0))
- abort ();
+ gcc_assert (doing_eh (0));
#endif
/* Insert a new blank region as a leaf in the tree. */
expand_resx_expr (tree exp)
{
int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
- struct eh_region *reg = cfun->eh->region_array[region_nr];
+ struct eh_region *reg = VEC_index (eh_region,
+ cfun->eh->region_array, region_nr);
+ gcc_assert (!reg->resume);
reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
emit_barrier ();
}
void
collect_eh_region_array (void)
{
- struct eh_region **array, *i;
+ struct eh_region *i;
i = cfun->eh->region_tree;
if (! i)
return;
- array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
- * sizeof (*array));
- cfun->eh->region_array = array;
+ VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
+ cfun->eh->last_region_number + 1);
+ VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
while (1)
{
- array[i->region_number] = i;
+ VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
/* If there are sub-regions, process them. */
if (i->inner)
}
}
-static void
-resolve_one_fixup_region (struct eh_region *fixup)
-{
- struct eh_region *cleanup, *real;
- int j, n;
-
- n = cfun->eh->last_region_number;
- cleanup = 0;
-
- for (j = 1; j <= n; ++j)
- {
- cleanup = cfun->eh->region_array[j];
- if (cleanup && cleanup->type == ERT_CLEANUP
- && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
- break;
- }
- if (j > n)
- abort ();
-
- real = cleanup->outer;
- if (real && real->type == ERT_FIXUP)
- {
- if (!real->u.fixup.resolved)
- resolve_one_fixup_region (real);
- real = real->u.fixup.real_region;
- }
-
- fixup->u.fixup.real_region = real;
- fixup->u.fixup.resolved = true;
-}
-
-static void
-resolve_fixup_regions (void)
-{
- int i, n = cfun->eh->last_region_number;
-
- for (i = 1; i <= n; ++i)
- {
- struct eh_region *fixup = cfun->eh->region_array[i];
-
- if (!fixup || fixup->type != ERT_FIXUP || fixup->u.fixup.resolved)
- continue;
-
- resolve_one_fixup_region (fixup);
- }
-}
-
-/* Now that we've discovered what region actually encloses a fixup,
- we can shuffle pointers and remove them from the tree. */
-
-static void
-remove_fixup_regions (void)
-{
- int i;
- rtx insn, note;
- struct eh_region *fixup;
-
- /* Walk the insn chain and adjust the REG_EH_REGION numbers
- for instructions referencing fixup regions. This is only
- strictly necessary for fixup regions with no parent, but
- doesn't hurt to do it for all regions. */
- for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
- if (INSN_P (insn)
- && (note = find_reg_note (insn, REG_EH_REGION, NULL))
- && INTVAL (XEXP (note, 0)) > 0
- && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
- && fixup->type == ERT_FIXUP)
- {
- if (fixup->u.fixup.real_region)
- XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
- else
- remove_note (insn, note);
- }
-
- /* Remove the fixup regions from the tree. */
- for (i = cfun->eh->last_region_number; i > 0; --i)
- {
- fixup = cfun->eh->region_array[i];
- if (! fixup)
- continue;
-
- /* Allow GC to maybe free some memory. */
- if (fixup->type == ERT_CLEANUP)
- fixup->u.cleanup.exp = NULL_TREE;
-
- if (fixup->type != ERT_FIXUP)
- continue;
-
- if (fixup->inner)
- {
- struct eh_region *parent, *p, **pp;
-
- parent = fixup->u.fixup.real_region;
-
- /* Fix up the children's parent pointers; find the end of
- the list. */
- for (p = fixup->inner; ; p = p->next_peer)
- {
- p->outer = parent;
- if (! p->next_peer)
- break;
- }
-
- /* In the tree of cleanups, only outer-inner ordering matters.
- So link the children back in anywhere at the correct level. */
- if (parent)
- pp = &parent->inner;
- else
- pp = &cfun->eh->region_tree;
- p->next_peer = *pp;
- *pp = fixup->inner;
- fixup->inner = NULL;
- }
-
- remove_eh_handler (fixup);
- }
-}
-
/* Remove all regions whose labels are not reachable from insns. */
static void
for (i = cfun->eh->last_region_number; i > 0; --i)
{
- r = cfun->eh->region_array[i];
+ r = VEC_index (eh_region, cfun->eh->region_array, i);
if (!r || r->region_number != i)
continue;
if (r->resume)
{
- if (uid_region_num[INSN_UID (r->resume)])
- abort ();
+ gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
uid_region_num[INSN_UID (r->resume)] = i;
}
if (r->label)
{
- if (uid_region_num[INSN_UID (r->label)])
- abort ();
+ gcc_assert (!uid_region_num[INSN_UID (r->label)]);
uid_region_num[INSN_UID (r->label)] = i;
}
}
for (i = cfun->eh->last_region_number; i > 0; --i)
{
- r = cfun->eh->region_array[i];
+ r = VEC_index (eh_region, cfun->eh->region_array, i);
if (r && r->region_number == i && !reachable[i])
{
bool kill_it = true;
default:
break;
}
-
+
if (kill_it)
remove_eh_handler (r);
}
free (uid_region_num);
}
-/* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
- can_throw instruction in the region. */
-
-static void
-convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
-{
- int *sp = orig_sp;
- rtx insn, next;
-
- for (insn = *pinsns; insn ; insn = next)
- {
- next = NEXT_INSN (insn);
- if (NOTE_P (insn))
- {
- int kind = NOTE_LINE_NUMBER (insn);
- if (kind == NOTE_INSN_EH_REGION_BEG
- || kind == NOTE_INSN_EH_REGION_END)
- {
- if (kind == NOTE_INSN_EH_REGION_BEG)
- {
- struct eh_region *r;
-
- *sp++ = cur;
- cur = NOTE_EH_HANDLER (insn);
-
- r = cfun->eh->region_array[cur];
- if (r->type == ERT_FIXUP)
- {
- r = r->u.fixup.real_region;
- cur = r ? r->region_number : 0;
- }
- else if (r->type == ERT_CATCH)
- {
- r = r->outer;
- cur = r ? r->region_number : 0;
- }
- }
- else
- cur = *--sp;
-
- if (insn == *pinsns)
- *pinsns = next;
- remove_insn (insn);
- continue;
- }
- }
- else if (INSN_P (insn))
- {
- if (cur > 0
- && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
- /* Calls can always potentially throw exceptions, unless
- they have a REG_EH_REGION note with a value of 0 or less.
- Which should be the only possible kind so far. */
- && (CALL_P (insn)
- /* If we wanted exceptions for non-call insns, then
- any may_trap_p instruction could throw. */
- || (flag_non_call_exceptions
- && GET_CODE (PATTERN (insn)) != CLOBBER
- && GET_CODE (PATTERN (insn)) != USE
- && may_trap_p (PATTERN (insn)))))
- {
- REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
- REG_NOTES (insn));
- }
- }
- }
-
- if (sp != orig_sp)
- abort ();
-}
-
-static void
-collect_rtl_labels_from_trees (void)
-{
- int i, n = cfun->eh->last_region_number;
- for (i = 1; i <= n; ++i)
- {
- struct eh_region *reg = cfun->eh->region_array[i];
- if (reg && reg->tree_label)
- reg->label = DECL_RTL_IF_SET (reg->tree_label);
- }
-}
+/* Set up EH labels for RTL. */
void
convert_from_eh_region_ranges (void)
{
rtx insns = get_insns ();
+ int i, n = cfun->eh->last_region_number;
- if (cfun->eh->region_array)
- {
- /* If the region array already exists, assume we're coming from
- optimize_function_tree. In this case all we need to do is
- collect the rtl labels that correspond to the tree labels
- that we allocated earlier. */
- collect_rtl_labels_from_trees ();
- }
- else
+ /* Most of the work is already done at the tree level. All we need to
+ do is collect the rtl labels that correspond to the tree labels that
+ collect the rtl labels that correspond to the tree labels
+ we allocated earlier. */
+ for (i = 1; i <= n; ++i)
{
- int *stack;
-
- collect_eh_region_array ();
- resolve_fixup_regions ();
-
- stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
- convert_from_eh_region_ranges_1 (&insns, stack, 0);
- free (stack);
+ struct eh_region *region;
- remove_fixup_regions ();
+ region = VEC_index (eh_region, cfun->eh->region_array, i);
+ if (region && region->tree_label)
+ region->label = DECL_RTL_IF_SET (region->tree_label);
}
remove_unreachable_regions (insns);
label. After landing pad creation, the exception handlers may
share landing pads. This is ok, since maybe_remove_eh_handler
only requires the 1-1 mapping before landing pad creation. */
- if (*slot && !cfun->eh->built_landing_pads)
- abort ();
+ gcc_assert (!*slot || cfun->eh->built_landing_pads);
*slot = entry;
}
for (i = cfun->eh->last_region_number; i > 0; --i)
{
- struct eh_region *region = cfun->eh->region_array[i];
+ struct eh_region *region;
rtx lab;
+ region = VEC_index (eh_region, cfun->eh->region_array, i);
if (! region || region->region_number != i)
continue;
if (cfun->eh->built_landing_pads)
add_ehl_entry (return_label, NULL);
}
+/* Returns true if the current function has exception handling regions. */
+
bool
current_function_has_exception_handlers (void)
{
for (i = cfun->eh->last_region_number; i > 0; --i)
{
- struct eh_region *region = cfun->eh->region_array[i];
+ struct eh_region *region;
- if (! region || region->region_number != i)
- continue;
- if (region->type != ERT_THROW)
+ region = VEC_index (eh_region, cfun->eh->region_array, i);
+ if (region
+ && region->region_number == i
+ && region->type != ERT_THROW)
return true;
}
return false;
}
\f
-static struct eh_region *
-duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map)
-{
- struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region));
+/* A subroutine of duplicate_eh_regions. Search the region tree under O
+ for the minimum and maximum region numbers. Update *MIN and *MAX. */
- n->region_number = o->region_number + cfun->eh->last_region_number;
- n->type = o->type;
+static void
+duplicate_eh_regions_0 (eh_region o, int *min, int *max)
+{
+ if (o->region_number < *min)
+ *min = o->region_number;
+ if (o->region_number > *max)
+ *max = o->region_number;
- switch (n->type)
+ if (o->inner)
{
- case ERT_CLEANUP:
- case ERT_MUST_NOT_THROW:
- break;
+ o = o->inner;
+ duplicate_eh_regions_0 (o, min, max);
+ while (o->next_peer)
+ {
+ o = o->next_peer;
+ duplicate_eh_regions_0 (o, min, max);
+ }
+ }
+}
- case ERT_TRY:
- if (o->u.try.continue_label)
- n->u.try.continue_label
- = get_label_from_map (map,
- CODE_LABEL_NUMBER (o->u.try.continue_label));
- break;
+/* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
+ Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
+ about the other internal pointers just yet, just the tree-like pointers. */
- case ERT_CATCH:
- n->u.catch.type_list = o->u.catch.type_list;
- break;
+static eh_region
+duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
+{
+ eh_region ret, n;
- case ERT_ALLOWED_EXCEPTIONS:
- n->u.allowed.type_list = o->u.allowed.type_list;
- break;
+ ret = n = ggc_alloc (sizeof (struct eh_region));
- case ERT_THROW:
- n->u.throw.type = o->u.throw.type;
+ *n = *old;
+ n->outer = outer;
+ n->next_peer = NULL;
+ gcc_assert (!old->aka);
- default:
- abort ();
- }
+ n->region_number += eh_offset;
+ VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
- if (o->label)
- n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
- if (o->resume)
+ if (old->inner)
{
- n->resume = map->insn_map[INSN_UID (o->resume)];
- if (n->resume == NULL)
- abort ();
+ old = old->inner;
+ n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
+ while (old->next_peer)
+ {
+ old = old->next_peer;
+ n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
+ }
}
- return n;
+ return ret;
}
-static void
-duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array)
+/* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
+ function and root the tree below OUTER_REGION. Remap labels using MAP
+ callback. The special case of COPY_REGION of 0 means all regions. */
+
+int
+duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
+ void *data, int copy_region, int outer_region)
{
- struct eh_region *n = n_array[o->region_number];
+ eh_region cur, prev_try, outer, *splice;
+ int i, min_region, max_region, eh_offset, cfun_last_region_number;
+ int num_regions;
- switch (n->type)
- {
- case ERT_TRY:
- n->u.try.catch = n_array[o->u.try.catch->region_number];
- n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
- break;
+ if (!ifun->eh->region_tree)
+ return 0;
- case ERT_CATCH:
- if (o->u.catch.next_catch)
- n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
- if (o->u.catch.prev_catch)
- n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
- break;
+ /* Find the range of region numbers to be copied. The interface we
+ provide here mandates a single offset to find new number from old,
+ which means we must look at the numbers present, instead of the
+ count or something else. */
+ if (copy_region > 0)
+ {
+ min_region = INT_MAX;
+ max_region = 0;
- default:
- break;
+ cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
+ duplicate_eh_regions_0 (cur, &min_region, &max_region);
}
+ else
+ min_region = 1, max_region = ifun->eh->last_region_number;
+ num_regions = max_region - min_region + 1;
+ cfun_last_region_number = cfun->eh->last_region_number;
+ eh_offset = cfun_last_region_number + 1 - min_region;
- if (o->outer)
- n->outer = n_array[o->outer->region_number];
- if (o->inner)
- n->inner = n_array[o->inner->region_number];
- if (o->next_peer)
- n->next_peer = n_array[o->next_peer->region_number];
-}
-
-int
-duplicate_eh_regions (struct function *ifun, struct inline_remap *map)
-{
- int ifun_last_region_number = ifun->eh->last_region_number;
- struct eh_region **n_array, *root, *cur;
- int i;
+ /* If we've not yet created a region array, do so now. */
+ VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
+ cfun_last_region_number + 1 + num_regions);
+ cfun->eh->last_region_number = max_region + eh_offset;
- if (ifun_last_region_number == 0)
- return 0;
+ /* We may have just allocated the array for the first time.
+ Make sure that element zero is null. */
+ VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
- n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
+ /* Zero all entries in the range allocated. */
+ memset (VEC_address (eh_region, cfun->eh->region_array)
+ + cfun_last_region_number + 1, 0, num_regions * sizeof (eh_region));
- for (i = 1; i <= ifun_last_region_number; ++i)
+ /* Locate the spot at which to insert the new tree. */
+ if (outer_region > 0)
{
- cur = ifun->eh->region_array[i];
- if (!cur || cur->region_number != i)
- continue;
- n_array[i] = duplicate_eh_region_1 (cur, map);
+ outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
+ splice = &outer->inner;
}
- for (i = 1; i <= ifun_last_region_number; ++i)
+ else
{
- cur = ifun->eh->region_array[i];
- if (!cur || cur->region_number != i)
- continue;
- duplicate_eh_region_2 (cur, n_array);
+ outer = NULL;
+ splice = &cfun->eh->region_tree;
}
+ while (*splice)
+ splice = &(*splice)->next_peer;
- root = n_array[ifun->eh->region_tree->region_number];
- cur = cfun->eh->cur_region;
- if (cur)
+ /* Copy all the regions in the subtree. */
+ if (copy_region > 0)
{
- struct eh_region *p = cur->inner;
- if (p)
- {
- while (p->next_peer)
- p = p->next_peer;
- p->next_peer = root;
- }
- else
- cur->inner = root;
-
- for (i = 1; i <= ifun_last_region_number; ++i)
- if (n_array[i] && n_array[i]->outer == NULL)
- n_array[i]->outer = cur;
+ cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
+ *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
}
else
{
- struct eh_region *p = cfun->eh->region_tree;
- if (p)
+ eh_region n;
+
+ cur = ifun->eh->region_tree;
+ *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
+ while (cur->next_peer)
{
- while (p->next_peer)
- p = p->next_peer;
- p->next_peer = root;
+ cur = cur->next_peer;
+ n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
}
- else
- cfun->eh->region_tree = root;
}
- free (n_array);
+ /* Remap all the labels in the new regions. */
+ for (i = cfun_last_region_number + 1;
+ VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
+ if (cur && cur->tree_label)
+ cur->tree_label = map (cur->tree_label, data);
+
+ /* Search for the containing ERT_TRY region to fix up
+ the prev_try short-cuts for ERT_CLEANUP regions. */
+ prev_try = NULL;
+ if (outer_region > 0)
+ for (prev_try = VEC_index (eh_region, cfun->eh->region_array, outer_region);
+ prev_try && prev_try->type != ERT_TRY;
+ prev_try = prev_try->outer)
+ ;
+
+ /* Remap all of the internal catch and cleanup linkages. Since we
+ duplicate entire subtrees, all of the referenced regions will have
+ been copied too. And since we renumbered them as a block, a simple
+ bit of arithmetic finds us the index for the replacement region. */
+ for (i = cfun_last_region_number + 1;
+ VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
+ {
+ if (cur == NULL)
+ continue;
+
+#define REMAP(REG) \
+ (REG) = VEC_index (eh_region, cfun->eh->region_array, \
+ (REG)->region_number + eh_offset)
- i = cfun->eh->last_region_number;
- cfun->eh->last_region_number = i + ifun_last_region_number;
- return i;
+ switch (cur->type)
+ {
+ case ERT_TRY:
+ if (cur->u.try.catch)
+ REMAP (cur->u.try.catch);
+ if (cur->u.try.last_catch)
+ REMAP (cur->u.try.last_catch);
+ break;
+
+ case ERT_CATCH:
+ if (cur->u.catch.next_catch)
+ REMAP (cur->u.catch.next_catch);
+ if (cur->u.catch.prev_catch)
+ REMAP (cur->u.catch.prev_catch);
+ break;
+
+ case ERT_CLEANUP:
+ if (cur->u.cleanup.prev_try)
+ REMAP (cur->u.cleanup.prev_try);
+ else
+ cur->u.cleanup.prev_try = prev_try;
+ break;
+
+ default:
+ break;
+ }
+
+#undef REMAP
+ }
+
+ return eh_offset;
+}
+
+/* Return true if REGION_A is outer to REGION_B in IFUN. */
+
+bool
+eh_region_outer_p (struct function *ifun, int region_a, int region_b)
+{
+ struct eh_region *rp_a, *rp_b;
+
+ gcc_assert (ifun->eh->last_region_number > 0);
+ gcc_assert (ifun->eh->region_tree);
+
+ rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
+ rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
+ gcc_assert (rp_a != NULL);
+ gcc_assert (rp_b != NULL);
+
+ do
+ {
+ if (rp_a == rp_b)
+ return true;
+ rp_b = rp_b->outer;
+ }
+ while (rp_b);
+
+ return false;
}
+/* Return region number of region that is outer to both if REGION_A and
+ REGION_B in IFUN. */
+
+int
+eh_region_outermost (struct function *ifun, int region_a, int region_b)
+{
+ struct eh_region *rp_a, *rp_b;
+ sbitmap b_outer;
+
+ gcc_assert (ifun->eh->last_region_number > 0);
+ gcc_assert (ifun->eh->region_tree);
+
+ rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
+ rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
+ gcc_assert (rp_a != NULL);
+ gcc_assert (rp_b != NULL);
+
+ b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
+ sbitmap_zero (b_outer);
+
+ do
+ {
+ SET_BIT (b_outer, rp_b->region_number);
+ rp_b = rp_b->outer;
+ }
+ while (rp_b);
+
+ do
+ {
+ if (TEST_BIT (b_outer, rp_a->region_number))
+ {
+ sbitmap_free (b_outer);
+ return rp_a->region_number;
+ }
+ rp_a = rp_a->outer;
+ }
+ while (rp_a);
+
+ sbitmap_free (b_outer);
+ return -1;
+}
\f
static int
t2r_eq (const void *pentry, const void *pdata)
{
/* Filter value is a 1 based table index. */
- n = xmalloc (sizeof (*n));
+ n = XNEW (struct ttypes_filter);
n->t = type;
- n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
+ n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
*slot = n;
- VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
+ VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
}
return n->filter;
{
/* Filter value is a -1 based byte index into a uleb128 buffer. */
- n = xmalloc (sizeof (*n));
+ n = XNEW (struct ttypes_filter);
n->t = list;
n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
*slot = n;
- /* Look up each type in the list and encode its filter
- value as a uleb128. Terminate the list with 0. */
+ /* Generate a 0 terminated list of filter values. */
for (; list ; list = TREE_CHAIN (list))
- push_uleb128 (&cfun->eh->ehspec_data,
- add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
- VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
+ {
+ if (targetm.arm_eabi_unwinder)
+ VARRAY_PUSH_TREE (cfun->eh->ehspec_data, TREE_VALUE (list));
+ else
+ {
+ /* Look up each type in the list and encode its filter
+ value as a uleb128. */
+ push_uleb128 (&cfun->eh->ehspec_data,
+ add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
+ }
+ }
+ if (targetm.arm_eabi_unwinder)
+ VARRAY_PUSH_TREE (cfun->eh->ehspec_data, NULL_TREE);
+ else
+ VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
}
return n->filter;
int i;
htab_t ttypes, ehspec;
- VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
- VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
+ cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
+ if (targetm.arm_eabi_unwinder)
+ VARRAY_TREE_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
+ else
+ VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
for (i = cfun->eh->last_region_number; i > 0; --i)
{
- struct eh_region *r = cfun->eh->region_array[i];
+ struct eh_region *r;
+
+ r = VEC_index (eh_region, cfun->eh->region_array, i);
/* Mind we don't process a region more than once. */
if (!r || r->region_number != i)
rtx last;
basic_block bb;
edge e;
+ edge_iterator ei;
- /* If there happens to be an fallthru edge (possibly created by cleanup_cfg
- call), we don't want it to go into newly created landing pad or other EH
+ /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
+ call), we don't want it to go into newly created landing pad or other EH
construct. */
- for (e = BLOCK_FOR_INSN (insn)->pred; e; e = e->pred_next)
+ for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
if (e->flags & EDGE_FALLTHRU)
force_nonfallthru (e);
+ else
+ ei_next (&ei);
last = emit_insn_before (seq, insn);
if (BARRIER_P (last))
last = PREV_INSN (last);
for (i = cfun->eh->last_region_number; i > 0; --i)
{
- struct eh_region *region = cfun->eh->region_array[i];
+ struct eh_region *region;
rtx seq;
+ region = VEC_index (eh_region, cfun->eh->region_array, i);
/* Mind we don't process a region more than once. */
if (!region || region->region_number != i)
continue;
emit_cmp_and_jump_insns
(cfun->eh->filter,
GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
- EQ, NULL_RTX,
+ EQ, NULL_RTX,
targetm.eh_return_filter_mode (), 0, c->label);
tp_node = TREE_CHAIN (tp_node);
emit_cmp_and_jump_insns (cfun->eh->filter,
GEN_INT (region->u.allowed.filter),
- EQ, NULL_RTX,
+ EQ, NULL_RTX,
targetm.eh_return_filter_mode (), 0, region->label);
/* We delay the generation of the _Unwind_Resume until we generate
break;
default:
- abort ();
+ gcc_unreachable ();
}
}
}
for (i = cfun->eh->last_region_number; i > 0; --i)
{
- struct eh_region *region = cfun->eh->region_array[i];
+ struct eh_region *region;
struct eh_region *outer;
rtx seq;
rtx barrier;
+ region = VEC_index (eh_region, cfun->eh->region_array, i);
/* Mind we don't process a region more than once. */
if (!region || region->region_number != i)
continue;
emit_jump (outer->post_landing_pad);
src = BLOCK_FOR_INSN (region->resume);
dest = BLOCK_FOR_INSN (outer->post_landing_pad);
- while (src->succ)
- remove_edge (src->succ);
+ while (EDGE_COUNT (src->succs) > 0)
+ remove_edge (EDGE_SUCC (src, 0));
e = make_edge (src, dest, 0);
e->probability = REG_BR_PROB_BASE;
e->count = src->count;
end_sequence ();
barrier = emit_insn_before (seq, region->resume);
/* Avoid duplicate barrier. */
- if (!BARRIER_P (barrier))
- abort ();
+ gcc_assert (BARRIER_P (barrier));
delete_insn (barrier);
delete_insn (region->resume);
dw2_build_landing_pads (void)
{
int i;
- unsigned int j;
for (i = cfun->eh->last_region_number; i > 0; --i)
{
- struct eh_region *region = cfun->eh->region_array[i];
+ struct eh_region *region;
rtx seq;
basic_block bb;
- bool clobbers_hard_regs = false;
edge e;
+ region = VEC_index (eh_region, cfun->eh->region_array, i);
/* Mind we don't process a region more than once. */
if (!region || region->region_number != i)
continue;
#endif
{ /* Nothing */ }
- /* If the eh_return data registers are call-saved, then we
- won't have considered them clobbered from the call that
- threw. Kill them now. */
- for (j = 0; ; ++j)
- {
- unsigned r = EH_RETURN_DATA_REGNO (j);
- if (r == INVALID_REGNUM)
- break;
- if (! call_used_regs[r])
- {
- emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
- clobbers_hard_regs = true;
- }
- }
-
- if (clobbers_hard_regs)
- {
- /* @@@ This is a kludge. Not all machine descriptions define a
- blockage insn, but we must not allow the code we just generated
- to be reordered by scheduling. So emit an ASM_INPUT to act as
- blockage insn. */
- emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
- }
-
emit_move_insn (cfun->eh->exc_ptr,
gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
emit_move_insn (cfun->eh->filter,
- gen_rtx_REG (targetm.eh_return_filter_mode (),
+ gen_rtx_REG (targetm.eh_return_filter_mode (),
EH_RETURN_DATA_REGNO (1)));
seq = get_insns ();
if (!note || INTVAL (XEXP (note, 0)) <= 0)
continue;
- region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
+ region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
type_thrown = NULL_TREE;
if (region->type == ERT_THROW)
for (i = cfun->eh->last_region_number; i > 0; --i)
if (lp_info[i].directly_reachable)
{
- struct eh_region *r = cfun->eh->region_array[i];
+ struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
+
r->landing_pad = dispatch_label;
lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
if (lp_info[i].action_index != -1)
if (INTVAL (XEXP (note, 0)) <= 0)
continue;
- region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
+ region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
this_call_site = lp_info[region->region_number].call_site_index;
}
sjlj_emit_function_enter (rtx dispatch_label)
{
rtx fn_begin, fc, mem, seq;
+ bool fn_begin_outside_block;
fc = cfun->eh->sjlj_fc;
#ifdef DONT_USE_BUILTIN_SETJMP
{
- rtx x, note;
+ rtx x;
x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
TYPE_MODE (integer_type_node), 1,
plus_constant (XEXP (fc, 0),
sjlj_fc_jbuf_ofs), Pmode);
- note = emit_note (NOTE_INSN_EXPECTED_VALUE);
- NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
-
emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
TYPE_MODE (integer_type_node), 0, dispatch_label);
+ add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
}
#else
expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
do this in a block that is at loop level 0 and dominates all
can_throw_internal instructions. */
+ fn_begin_outside_block = true;
for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
- if (NOTE_P (fn_begin)
- && (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG
- || NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK))
- break;
- if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
- insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
- else
- {
- rtx last = BB_END (ENTRY_BLOCK_PTR->succ->dest);
- for (; ; fn_begin = NEXT_INSN (fn_begin))
- if ((NOTE_P (fn_begin)
- && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
- || fn_begin == last)
+ if (NOTE_P (fn_begin))
+ {
+ if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
break;
- emit_insn_after (seq, fn_begin);
- }
+ else if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK)
+ fn_begin_outside_block = false;
+ }
+
+ if (fn_begin_outside_block)
+ insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
+ else
+ emit_insn_after (seq, fn_begin);
}
/* Call back from expand_function_end to know where we should put
{
rtx seq;
edge e;
+ edge_iterator ei;
start_sequence ();
post-dominates all can_throw_internal instructions. This is
the last possible moment. */
- for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
if (e->flags & EDGE_FALLTHRU)
break;
if (e)
/* Figure out whether the place we are supposed to insert libcall
is inside the last basic block or after it. In the other case
we need to emit to edge. */
- if (e->src->next_bb != EXIT_BLOCK_PTR)
- abort ();
- for (insn = NEXT_INSN (BB_END (e->src)); insn; insn = NEXT_INSN (insn))
- if (insn == cfun->eh->sjlj_exit_after)
- break;
- if (insn)
- insert_insn_on_edge (seq, e);
- else
+ gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
+ for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
{
- insn = cfun->eh->sjlj_exit_after;
- if (LABEL_P (insn))
- insn = NEXT_INSN (insn);
- emit_insn_after (seq, insn);
+ if (insn == cfun->eh->sjlj_exit_after)
+ {
+ if (LABEL_P (insn))
+ insn = NEXT_INSN (insn);
+ emit_insn_after (seq, insn);
+ return;
+ }
+ if (insn == BB_END (e->src))
+ break;
}
+ insert_insn_on_edge (seq, e);
}
}
emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
- cfun->eh->region_array[i]->post_landing_pad);
+ ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
+ ->post_landing_pad);
}
seq = get_insns ();
end_sequence ();
- before = cfun->eh->region_array[first_reachable]->post_landing_pad;
+ before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
+ ->post_landing_pad);
bb = emit_to_new_bb_before (seq, before);
e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
{
struct sjlj_lp_info *lp_info;
- lp_info = xcalloc (cfun->eh->last_region_number + 1,
- sizeof (struct sjlj_lp_info));
+ lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
if (sjlj_find_directly_reachable_regions (lp_info))
{
commit_edge_insertions ();
FOR_EACH_BB (bb)
{
- edge e, next;
+ edge e;
+ edge_iterator ei;
bool eh = false;
- for (e = bb->succ; e; e = next)
+ for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
{
- next = e->succ_next;
if (e->flags & EDGE_EH)
{
remove_edge (e);
eh = true;
}
+ else
+ ei_next (&ei);
}
if (eh)
rtl_make_eh_edge (NULL, bb, BB_END (bb));
tmp.label = label;
slot = (struct ehl_map_entry **)
htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
- if (! slot)
- abort ();
+ gcc_assert (slot);
htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
}
list of alternate numbers by which we are known. */
outer = region->outer;
- cfun->eh->region_array[region->region_number] = outer;
+ VEC_replace (eh_region, cfun->eh->region_array, region->region_number, outer);
if (region->aka)
{
- int i;
- EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i,
- { cfun->eh->region_array[i] = outer; });
+ unsigned i;
+ bitmap_iterator bi;
+
+ EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
+ {
+ VEC_replace (eh_region, cfun->eh->region_array, i, outer);
+ }
}
if (outer)
if (!outer->aka)
outer->aka = BITMAP_GGC_ALLOC ();
if (region->aka)
- bitmap_a_or_b (outer->aka, outer->aka, region->aka);
+ bitmap_ior_into (outer->aka, region->aka);
bitmap_set_bit (outer->aka, region->region_number);
}
try->type == ERT_CATCH;
try = try->next_peer)
continue;
- if (try->type != ERT_TRY)
- abort ();
+ gcc_assert (try->type == ERT_TRY);
next = region->u.catch.next_catch;
prev = region->u.catch.prev_catch;
int i, n = cfun->eh->last_region_number;
for (i = 1; i <= n; ++i)
{
- struct eh_region *region = cfun->eh->region_array[i];
+ struct eh_region *region;
+
+ region = VEC_index (eh_region, cfun->eh->region_array, i);
if (region)
(*callback) (region);
}
/* A subroutine of reachable_next_level. Return true if TYPE, or a
base class of TYPE, is in HANDLED. */
-int
+static int
check_handled (tree handled, tree type)
{
tree t;
/* Here we end our search, since no exceptions may propagate.
If we've touched down at some landing pad previous, then the
explicit function call we generated may be used. Otherwise
- the call is made by the runtime. */
- if (info && info->saw_any_handlers)
+ the call is made by the runtime.
+
+ Before inlining, do not perform this optimization. We may
+ inline a subroutine that contains handlers, and that will
+ change the value of saw_any_handlers. */
+
+ if ((info && info->saw_any_handlers) || !cfun->after_inlining)
{
add_reachable_handler (info, region, region);
return RNL_CAUGHT;
return RNL_BLOCKED;
case ERT_THROW:
- case ERT_FIXUP:
case ERT_UNKNOWN:
/* Shouldn't see these here. */
+ gcc_unreachable ();
break;
+ default:
+ gcc_unreachable ();
}
-
- abort ();
}
/* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
info.callback = callback;
info.callback_data = callback_data;
- region = cfun->eh->region_array[region_number];
+ region = VEC_index (eh_region, cfun->eh->region_array, region_number);
type_thrown = NULL_TREE;
if (is_resx)
within the function. */
bool
-can_throw_internal_1 (int region_number)
+can_throw_internal_1 (int region_number, bool is_resx)
{
struct eh_region *region;
tree type_thrown;
- region = cfun->eh->region_array[region_number];
+ region = VEC_index (eh_region, cfun->eh->region_array, region_number);
type_thrown = NULL_TREE;
- if (region->type == ERT_THROW)
+ if (is_resx)
+ region = region->outer;
+ else if (region->type == ERT_THROW)
{
type_thrown = region->u.throw.type;
region = region->outer;
if (JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) == RESX
&& XINT (PATTERN (insn), 0) > 0)
- return can_throw_internal_1 (XINT (PATTERN (insn), 0));
+ return can_throw_internal_1 (XINT (PATTERN (insn), 0), true);
if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
if (!note || INTVAL (XEXP (note, 0)) <= 0)
return false;
- return can_throw_internal_1 (INTVAL (XEXP (note, 0)));
+ return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false);
}
/* Determine if the given INSN can throw an exception that is
visible outside the function. */
bool
-can_throw_external_1 (int region_number)
+can_throw_external_1 (int region_number, bool is_resx)
{
struct eh_region *region;
tree type_thrown;
- region = cfun->eh->region_array[region_number];
+ region = VEC_index (eh_region, cfun->eh->region_array, region_number);
type_thrown = NULL_TREE;
- if (region->type == ERT_THROW)
+ if (is_resx)
+ region = region->outer;
+ else if (region->type == ERT_THROW)
{
type_thrown = region->u.throw.type;
region = region->outer;
if (! INSN_P (insn))
return false;
+ if (JUMP_P (insn)
+ && GET_CODE (PATTERN (insn)) == RESX
+ && XINT (PATTERN (insn), 0) > 0)
+ return can_throw_external_1 (XINT (PATTERN (insn), 0), true);
+
if (NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
if (INTVAL (XEXP (note, 0)) <= 0)
return false;
- return can_throw_external_1 (INTVAL (XEXP (note, 0)));
+ return can_throw_external_1 (INTVAL (XEXP (note, 0)), false);
}
/* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
-void
+unsigned int
set_nothrow_function_flags (void)
{
rtx insn;
+ if (!targetm.binds_local_p (current_function_decl))
+ return 0;
+
TREE_NOTHROW (current_function_decl) = 1;
/* Assume cfun->all_throwers_are_sibcalls until we encounter
cfun->all_throwers_are_sibcalls = 1;
if (! flag_exceptions)
- return;
+ return 0;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (can_throw_external (insn))
if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
{
cfun->all_throwers_are_sibcalls = 0;
- return;
+ return 0;
}
}
if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
{
cfun->all_throwers_are_sibcalls = 0;
- return;
+ return 0;
}
}
+ return 0;
}
+struct tree_opt_pass pass_set_nothrow_function_flags =
+{
+ NULL, /* name */
+ NULL, /* gate */
+ set_nothrow_function_flags, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
+ 0 /* letter */
+};
+
\f
/* Various hooks for unwind library. */
if (TREE_CODE (which) != INTEGER_CST)
{
- error ("argument of `__builtin_eh_return_regno' must be constant");
+ error ("argument of %<__builtin_eh_return_regno%> must be constant");
return constm1_rtx;
}
Add a cleanup action to the chain to catch these. */
else if (next <= 0)
next = add_action_record (ar_hash, 0, 0);
-
+
return add_action_record (ar_hash, region->u.allowed.filter, next);
case ERT_MUST_NOT_THROW:
return collect_one_action_chain (ar_hash, region->outer);
default:
- abort ();
+ gcc_unreachable ();
}
}
The new note numbers will not refer to region numbers, but
instead to call site entries. */
-void
+unsigned int
convert_to_eh_region_ranges (void)
{
rtx insn, iter, note;
int call_site = 0;
if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
- return;
+ return 0;
VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
{
if (INTVAL (XEXP (note, 0)) <= 0)
continue;
- region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
+ region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
this_action = collect_one_action_chain (ar_hash, region);
}
}
htab_delete (ar_hash);
+ return 0;
}
+struct tree_opt_pass pass_convert_to_eh_region_ranges =
+{
+ "eh-ranges", /* name */
+ NULL, /* gate */
+ convert_to_eh_region_ranges, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
+};
+
\f
static void
push_uleb128 (varray_type *data_area, unsigned int value)
static void
dw2_output_call_site_table (void)
{
- const char *const function_start_lab
- = IDENTIFIER_POINTER (current_function_func_begin_label);
int n = cfun->eh->call_site_data_used;
int i;
/* ??? Perhaps use attr_length to choose data1 or data2 instead of
data4 if the function is small enough. */
#ifdef HAVE_AS_LEB128
- dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
+ dw2_asm_output_delta_uleb128 (reg_start_lab,
+ current_function_func_begin_label,
"region %d start", i);
dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
"length");
if (cs->landing_pad)
- dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
+ dw2_asm_output_delta_uleb128 (landing_pad_lab,
+ current_function_func_begin_label,
"landing pad");
else
dw2_asm_output_data_uleb128 (0, "landing pad");
#else
- dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
+ dw2_asm_output_delta (4, reg_start_lab,
+ current_function_func_begin_label,
"region %d start", i);
dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
if (cs->landing_pad)
- dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
+ dw2_asm_output_delta (4, landing_pad_lab,
+ current_function_func_begin_label,
"landing pad");
else
dw2_asm_output_data (4, 0, "landing pad");
call_site_base += n;
}
-/* Tell assembler to switch to the section for the exception handling
- table. */
+#ifndef TARGET_UNWIND_INFO
+/* Switch to the section that should be used for exception tables. */
-void
-default_exception_section (void)
+static void
+switch_to_exception_section (const char * ARG_UNUSED (fnname))
{
- if (targetm.have_named_sections)
+ section *s;
+
+ if (exception_section)
+ s = exception_section;
+ else
{
- int flags;
-#ifdef HAVE_LD_RO_RW_SECTION_MIXING
- int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
+ /* Compute the section and cache it into exception_section,
+ unless it depends on the function name. */
+ if (targetm.have_named_sections)
+ {
+ int flags;
- flags = (! flag_pic
- || ((tt_format & 0x70) != DW_EH_PE_absptr
- && (tt_format & 0x70) != DW_EH_PE_aligned))
- ? 0 : SECTION_WRITE;
-#else
- flags = SECTION_WRITE;
+ if (EH_TABLES_CAN_BE_READ_ONLY)
+ {
+ int tt_format =
+ ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
+ flags = ((! flag_pic
+ || ((tt_format & 0x70) != DW_EH_PE_absptr
+ && (tt_format & 0x70) != DW_EH_PE_aligned))
+ ? 0 : SECTION_WRITE);
+ }
+ else
+ flags = SECTION_WRITE;
+
+#ifdef HAVE_LD_EH_GC_SECTIONS
+ if (flag_function_sections)
+ {
+ char *section_name = xmalloc (strlen (fnname) + 32);
+ sprintf (section_name, ".gcc_except_table.%s", fnname);
+ s = get_section (section_name, flags, NULL);
+ free (section_name);
+ }
+ else
+#endif
+ exception_section
+ = s = get_section (".gcc_except_table", flags, NULL);
+ }
+ else
+ exception_section
+ = s = flag_pic ? data_section : readonly_data_section;
+ }
+
+ switch_to_section (s);
+}
#endif
- named_section_flags (".gcc_except_table", flags);
+
+
+/* Output a reference from an exception table to the type_info object TYPE.
+ TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
+ the value. */
+
+static void
+output_ttype (tree type, int tt_format, int tt_format_size)
+{
+ rtx value;
+ bool public = true;
+
+ if (type == NULL_TREE)
+ value = const0_rtx;
+ else
+ {
+ struct varpool_node *node;
+
+ type = lookup_type_for_runtime (type);
+ value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
+
+ /* Let cgraph know that the rtti decl is used. Not all of the
+ paths below go through assemble_integer, which would take
+ care of this for us. */
+ STRIP_NOPS (type);
+ if (TREE_CODE (type) == ADDR_EXPR)
+ {
+ type = TREE_OPERAND (type, 0);
+ if (TREE_CODE (type) == VAR_DECL)
+ {
+ node = varpool_node (type);
+ if (node)
+ varpool_mark_needed_node (node);
+ public = TREE_PUBLIC (type);
+ }
+ }
+ else
+ gcc_assert (TREE_CODE (type) == INTEGER_CST);
}
- else if (flag_pic)
- data_section ();
+
+ /* Allow the target to override the type table entry format. */
+ if (targetm.asm_out.ttype (value))
+ return;
+
+ if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
+ assemble_integer (value, tt_format_size,
+ tt_format_size * BITS_PER_UNIT, 1);
else
- readonly_data_section ();
+ dw2_asm_output_encoded_addr_rtx (tt_format, value, public, NULL);
}
void
-output_function_exception_table (void)
+output_function_exception_table (const char * ARG_UNUSED (fnname))
{
int tt_format, cs_format, lp_format, i, n;
#ifdef HAVE_AS_LEB128
int have_tt_data;
int tt_format_size = 0;
+ if (eh_personality_libfunc)
+ assemble_external_libcall (eh_personality_libfunc);
+
/* Not all functions need anything. */
if (! cfun->uses_eh_lsda)
return;
/* Note that varasm still thinks we're in the function's code section.
The ".endp" directive that will immediately follow will take us back. */
#else
- targetm.asm_out.exception_section ();
+ switch_to_exception_section (fnname);
#endif
- have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
+ /* If the target wants a label to begin the table, emit it here. */
+ targetm.asm_out.except_table_label (asm_out_file);
+
+ have_tt_data = (VEC_length (tree, cfun->eh->ttype_data) > 0
|| VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
/* Indicate the format of the @TType entries. */
after_disp = (1 + size_of_uleb128 (call_site_len)
+ call_site_len
+ VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
- + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
+ + (VEC_length (tree, cfun->eh->ttype_data)
* tt_format_size));
disp = after_disp;
if (have_tt_data)
assemble_align (tt_format_size * BITS_PER_UNIT);
- i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
+ i = VEC_length (tree, cfun->eh->ttype_data);
while (i-- > 0)
{
- tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
- rtx value;
-
- if (type == NULL_TREE)
- value = const0_rtx;
- else
- {
- struct cgraph_varpool_node *node;
-
- type = lookup_type_for_runtime (type);
- value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
-
- /* Let cgraph know that the rtti decl is used. Not all of the
- paths below go through assemble_integer, which would take
- care of this for us. */
- STRIP_NOPS (type);
- if (TREE_CODE (type) == ADDR_EXPR)
- {
- type = TREE_OPERAND (type, 0);
- if (TREE_CODE (type) == VAR_DECL)
- {
- node = cgraph_varpool_node (type);
- if (node)
- cgraph_varpool_mark_needed_node (node);
- }
- }
- else if (TREE_CODE (type) != INTEGER_CST)
- abort ();
- }
-
- if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
- assemble_integer (value, tt_format_size,
- tt_format_size * BITS_PER_UNIT, 1);
- else
- dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
+ tree type = VEC_index (tree, cfun->eh->ttype_data, i);
+ output_ttype (type, tt_format, tt_format_size);
}
#ifdef HAVE_AS_LEB128
/* ??? Decode and interpret the data for flag_debug_asm. */
n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
for (i = 0; i < n; ++i)
- dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
- (i ? NULL : "Exception specification table"));
+ {
+ if (targetm.arm_eabi_unwinder)
+ {
+ tree type = VARRAY_TREE (cfun->eh->ehspec_data, i);
+ output_ttype (type, tt_format, tt_format_size);
+ }
+ else
+ dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
+ (i ? NULL : "Exception specification table"));
+ }
+
+ switch_to_section (current_function_section ());
+}
- function_section (current_function_decl);
+void
+set_eh_throw_stmt_table (struct function *fun, struct htab *table)
+{
+ fun->eh->throw_stmt_table = table;
}
+htab_t
+get_eh_throw_stmt_table (struct function *fun)
+{
+ return fun->eh->throw_stmt_table;
+}
+
+/* Dump EH information to OUT. */
+void
+dump_eh_tree (FILE *out, struct function *fun)
+{
+ struct eh_region *i;
+ int depth = 0;
+ static const char * const type_name[] = {"unknown", "cleanup", "try", "catch",
+ "allowed_exceptions", "must_not_throw",
+ "throw"};
+
+ i = fun->eh->region_tree;
+ if (! i)
+ return;
+
+ fprintf (out, "Eh tree:\n");
+ while (1)
+ {
+ fprintf (out, " %*s %i %s", depth * 2, "",
+ i->region_number, type_name [(int)i->type]);
+ if (i->tree_label)
+ {
+ fprintf (out, " tree_label:");
+ print_generic_expr (out, i->tree_label, 0);
+ }
+ fprintf (out, "\n");
+ /* If there are sub-regions, process them. */
+ if (i->inner)
+ i = i->inner, depth++;
+ /* If there are peers, process them. */
+ else if (i->next_peer)
+ i = i->next_peer;
+ /* Otherwise, step back up the tree to the next peer. */
+ else
+ {
+ do {
+ i = i->outer;
+ depth--;
+ if (i == NULL)
+ return;
+ } while (i->next_peer == NULL);
+ i = i->next_peer;
+ }
+ }
+}
+
+/* Verify some basic invariants on EH datastructures. Could be extended to
+ catch more. */
+void
+verify_eh_tree (struct function *fun)
+{
+ struct eh_region *i, *outer = NULL;
+ bool err = false;
+ int nvisited = 0;
+ int count = 0;
+ int j;
+ int depth = 0;
+
+ i = fun->eh->region_tree;
+ if (! i)
+ return;
+ for (j = fun->eh->last_region_number; j > 0; --j)
+ if ((i = VEC_index (eh_region, cfun->eh->region_array, j)))
+ {
+ count++;
+ if (i->region_number != j)
+ {
+ error ("region_array is corrupted for region %i", i->region_number);
+ err = true;
+ }
+ }
+
+ while (1)
+ {
+ if (VEC_index (eh_region, cfun->eh->region_array, i->region_number) != i)
+ {
+ error ("region_array is corrupted for region %i", i->region_number);
+ err = true;
+ }
+ if (i->outer != outer)
+ {
+ error ("outer block of region %i is wrong", i->region_number);
+ err = true;
+ }
+ if (i->may_contain_throw && outer && !outer->may_contain_throw)
+ {
+ error ("region %i may contain throw and is contained in region that may not",
+ i->region_number);
+ err = true;
+ }
+ if (depth < 0)
+ {
+ error ("negative nesting depth of region %i", i->region_number);
+ err = true;
+ }
+ nvisited ++;
+ /* If there are sub-regions, process them. */
+ if (i->inner)
+ outer = i, i = i->inner, depth++;
+ /* If there are peers, process them. */
+ else if (i->next_peer)
+ i = i->next_peer;
+ /* Otherwise, step back up the tree to the next peer. */
+ else
+ {
+ do {
+ i = i->outer;
+ depth--;
+ if (i == NULL)
+ {
+ if (depth != -1)
+ {
+ error ("tree list ends on depth %i", depth + 1);
+ err = true;
+ }
+ if (count != nvisited)
+ {
+ error ("array does not match the region tree");
+ err = true;
+ }
+ if (err)
+ {
+ dump_eh_tree (stderr, fun);
+ internal_error ("verify_eh_tree failed");
+ }
+ return;
+ }
+ outer = i->outer;
+ } while (i->next_peer == NULL);
+ i = i->next_peer;
+ }
+ }
+}
+
+/* Initialize unwind_resume_libfunc. */
+
+void
+default_init_unwind_resume_libfunc (void)
+{
+ /* The default c++ routines aren't actually c++ specific, so use those. */
+ unwind_resume_libfunc =
+ init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
+ : "_Unwind_Resume");
+}
+
+\f
+static bool
+gate_handle_eh (void)
+{
+ return doing_eh (0);
+}
+
+/* Complete generation of exception handling code. */
+static unsigned int
+rest_of_handle_eh (void)
+{
+ cleanup_cfg (CLEANUP_NO_INSN_DEL);
+ finish_eh_generation ();
+ cleanup_cfg (CLEANUP_NO_INSN_DEL);
+ return 0;
+}
+
+struct tree_opt_pass pass_rtl_eh =
+{
+ "eh", /* name */
+ gate_handle_eh, /* gate */
+ rest_of_handle_eh, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_JUMP, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func, /* todo_flags_finish */
+ 'h' /* letter */
+};
+
#include "gt-except.h"