/* Implements exception handling.
Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
Contributed by Mike Stump <mrs@cygnus.com>.
This file is part of GCC.
return false;
}
\f
-static struct eh_region *
-duplicate_eh_region_1 (struct eh_region *o)
-{
- struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region));
-
- *n = *o;
+/* A subroutine of duplicate_eh_regions. Search the region tree under O
+ for the minimum and maximum region numbers. Update *MIN and *MAX. */
- n->region_number = o->region_number + cfun->eh->last_region_number;
- VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
- gcc_assert (!o->aka);
+static void
+duplicate_eh_regions_0 (eh_region o, int *min, int *max)
+{
+ if (o->region_number < *min)
+ *min = o->region_number;
+ if (o->region_number > *max)
+ *max = o->region_number;
- return n;
+ if (o->inner)
+ {
+ o = o->inner;
+ duplicate_eh_regions_0 (o, min, max);
+ while (o->next_peer)
+ {
+ o = o->next_peer;
+ duplicate_eh_regions_0 (o, min, max);
+ }
+ }
}
-static void
-duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array,
- struct eh_region *prev_try)
+/* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
+ Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
+ about the other internal pointers just yet, just the tree-like pointers. */
+
+static eh_region
+duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
{
- struct eh_region *n = n_array[o->region_number];
+ eh_region ret, n;
- switch (n->type)
- {
- case ERT_TRY:
- if (o->u.try.catch)
- n->u.try.catch = n_array[o->u.try.catch->region_number];
- if (o->u.try.last_catch)
- n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
- break;
+ ret = n = ggc_alloc (sizeof (struct eh_region));
- case ERT_CATCH:
- if (o->u.catch.next_catch)
- n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
- if (o->u.catch.prev_catch)
- n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
- break;
+ *n = *old;
+ n->outer = outer;
+ n->next_peer = NULL;
+ gcc_assert (!old->aka);
- case ERT_CLEANUP:
- if (o->u.cleanup.prev_try)
- n->u.cleanup.prev_try = n_array[o->u.cleanup.prev_try->region_number];
- else
- n->u.cleanup.prev_try = prev_try;
- break;
+ n->region_number += eh_offset;
+ VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
- default:
- break;
+ if (old->inner)
+ {
+ old = old->inner;
+ n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
+ while (old->next_peer)
+ {
+ old = old->next_peer;
+ n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
+ }
}
- if (o->outer)
- n->outer = n_array[o->outer->region_number];
- if (o->inner)
- n->inner = n_array[o->inner->region_number];
- if (o->next_peer)
- n->next_peer = n_array[o->next_peer->region_number];
+ return ret;
}
-/* Duplicate the EH regions of IFUN into current function, root the tree in
- OUTER_REGION and remap labels using MAP callback. */
+/* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
+ function and root the tree below OUTER_REGION. Remap labels using MAP
+ callback. The special case of COPY_REGION of 0 means all regions. */
+
int
duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
- void *data, int outer_region)
+ void *data, int copy_region, int outer_region)
{
- int ifun_last_region_number = ifun->eh->last_region_number;
- struct eh_region **n_array, *root, *cur, *prev_try;
- int i;
+ eh_region cur, prev_try, outer, *splice;
+ int i, min_region, max_region, eh_offset, cfun_last_region_number;
+ int num_regions;
- if (ifun_last_region_number == 0 || !ifun->eh->region_tree)
+ if (!ifun->eh->region_tree)
return 0;
- n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
+ /* Find the range of region numbers to be copied. The interface we
+ provide here mandates a single offset to find new number from old,
+ which means we must look at the numbers present, instead of the
+ count or something else. */
+ if (copy_region > 0)
+ {
+ min_region = INT_MAX;
+ max_region = 0;
+
+ cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
+ duplicate_eh_regions_0 (cur, &min_region, &max_region);
+ }
+ else
+ min_region = 1, max_region = ifun->eh->last_region_number;
+ num_regions = max_region - min_region + 1;
+ cfun_last_region_number = cfun->eh->last_region_number;
+ eh_offset = cfun_last_region_number + 1 - min_region;
+
+ /* If we've not yet created a region array, do so now. */
VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
- cfun->eh->last_region_number + 1 + ifun_last_region_number);
+ cfun_last_region_number + 1 + num_regions);
+ cfun->eh->last_region_number = max_region + eh_offset;
- /* We might've created new cfun->eh->region_array so zero out nonexisting region 0. */
+ /* We may have just allocated the array for the first time.
+ Make sure that element zero is null. */
VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
- for (i = cfun->eh->last_region_number + 1;
- i < cfun->eh->last_region_number + 1 + ifun_last_region_number; i++)
- VEC_replace (eh_region, cfun->eh->region_array, i, 0);
+ /* Zero all entries in the range allocated. */
+ memset (VEC_address (eh_region, cfun->eh->region_array)
+ + cfun_last_region_number + 1, 0, num_regions * sizeof (eh_region));
+
+ /* Locate the spot at which to insert the new tree. */
+ if (outer_region > 0)
+ {
+ outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
+ splice = &outer->inner;
+ }
+ else
+ {
+ outer = NULL;
+ splice = &cfun->eh->region_tree;
+ }
+ while (*splice)
+ splice = &(*splice)->next_peer;
+
+ /* Copy all the regions in the subtree. */
+ if (copy_region > 0)
+ {
+ cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
+ *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
+ }
+ else
+ {
+ eh_region n;
+
+ cur = ifun->eh->region_tree;
+ *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
+ while (cur->next_peer)
+ {
+ cur = cur->next_peer;
+ n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
+ }
+ }
+
+ /* Remap all the labels in the new regions. */
+ for (i = cfun_last_region_number + 1;
+ VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
+ if (cur && cur->tree_label)
+ cur->tree_label = map (cur->tree_label, data);
/* Search for the containing ERT_TRY region to fix up
the prev_try short-cuts for ERT_CLEANUP regions. */
prev_try = prev_try->outer)
;
- for (i = 1; i <= ifun_last_region_number; ++i)
+ /* Remap all of the internal catch and cleanup linkages. Since we
+ duplicate entire subtrees, all of the referenced regions will have
+ been copied too. And since we renumbered them as a block, a simple
+ bit of arithmetic finds us the index for the replacement region. */
+ for (i = cfun_last_region_number + 1;
+ VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
{
- cur = VEC_index (eh_region, ifun->eh->region_array, i);
- if (!cur || cur->region_number != i)
+ if (cur == NULL)
continue;
- n_array[i] = duplicate_eh_region_1 (cur);
- if (cur->tree_label)
+
+#define REMAP(REG) \
+ (REG) = VEC_index (eh_region, cfun->eh->region_array, \
+ (REG)->region_number + eh_offset)
+
+ switch (cur->type)
{
- tree newlabel = map (cur->tree_label, data);
- n_array[i]->tree_label = newlabel;
+ case ERT_TRY:
+ if (cur->u.try.catch)
+ REMAP (cur->u.try.catch);
+ if (cur->u.try.last_catch)
+ REMAP (cur->u.try.last_catch);
+ break;
+
+ case ERT_CATCH:
+ if (cur->u.catch.next_catch)
+ REMAP (cur->u.catch.next_catch);
+ if (cur->u.catch.prev_catch)
+ REMAP (cur->u.catch.prev_catch);
+ break;
+
+ case ERT_CLEANUP:
+ if (cur->u.cleanup.prev_try)
+ REMAP (cur->u.cleanup.prev_try);
+ else
+ cur->u.cleanup.prev_try = prev_try;
+ break;
+
+ default:
+ break;
}
- else
- n_array[i]->tree_label = NULL;
+
+#undef REMAP
}
- for (i = 1; i <= ifun_last_region_number; ++i)
+
+ return eh_offset;
+}
+
+/* Return true if REGION_A is outer to REGION_B in IFUN. */
+
+bool
+eh_region_outer_p (struct function *ifun, int region_a, int region_b)
+{
+ struct eh_region *rp_a, *rp_b;
+
+ gcc_assert (ifun->eh->last_region_number > 0);
+ gcc_assert (ifun->eh->region_tree);
+
+ rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
+ rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
+ gcc_assert (rp_a != NULL);
+ gcc_assert (rp_b != NULL);
+
+ do
{
- cur = VEC_index (eh_region, ifun->eh->region_array, i);
- if (!cur || cur->region_number != i)
- continue;
- duplicate_eh_region_2 (cur, n_array, prev_try);
+ if (rp_a == rp_b)
+ return true;
+ rp_b = rp_b->outer;
}
+ while (rp_b);
- root = n_array[ifun->eh->region_tree->region_number];
- gcc_assert (root->outer == NULL);
- if (outer_region > 0)
- {
- struct eh_region *cur
- = VEC_index (eh_region, cfun->eh->region_array, outer_region);
- struct eh_region *p = cur->inner;
+ return false;
+}
- if (p)
- {
- while (p->next_peer)
- p = p->next_peer;
- p->next_peer = root;
- }
- else
- cur->inner = root;
- for (i = 1; i <= ifun_last_region_number; ++i)
- if (n_array[i] && n_array[i]->outer == NULL)
- n_array[i]->outer = cur;
+/* Return region number of region that is outer to both if REGION_A and
+ REGION_B in IFUN. */
+
+int
+eh_region_outermost (struct function *ifun, int region_a, int region_b)
+{
+ struct eh_region *rp_a, *rp_b;
+ sbitmap b_outer;
+
+ gcc_assert (ifun->eh->last_region_number > 0);
+ gcc_assert (ifun->eh->region_tree);
+
+ rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
+ rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
+ gcc_assert (rp_a != NULL);
+ gcc_assert (rp_b != NULL);
+
+ b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
+ sbitmap_zero (b_outer);
+
+ do
+ {
+ SET_BIT (b_outer, rp_b->region_number);
+ rp_b = rp_b->outer;
}
- else
+ while (rp_b);
+
+ do
{
- struct eh_region *p = cfun->eh->region_tree;
- if (p)
+ if (TEST_BIT (b_outer, rp_a->region_number))
{
- while (p->next_peer)
- p = p->next_peer;
- p->next_peer = root;
+ sbitmap_free (b_outer);
+ return rp_a->region_number;
}
- else
- cfun->eh->region_tree = root;
+ rp_a = rp_a->outer;
}
+ while (rp_a);
- free (n_array);
-
- i = cfun->eh->last_region_number;
- cfun->eh->last_region_number = i + ifun_last_region_number;
-
- return i;
+ sbitmap_free (b_outer);
+ return -1;
}
\f
static int
dw2_build_landing_pads (void)
{
int i;
- unsigned int j;
for (i = cfun->eh->last_region_number; i > 0; --i)
{
struct eh_region *region;
rtx seq;
basic_block bb;
- bool clobbers_hard_regs = false;
edge e;
region = VEC_index (eh_region, cfun->eh->region_array, i);
#endif
{ /* Nothing */ }
- /* If the eh_return data registers are call-saved, then we
- won't have considered them clobbered from the call that
- threw. Kill them now. */
- for (j = 0; ; ++j)
- {
- unsigned r = EH_RETURN_DATA_REGNO (j);
- if (r == INVALID_REGNUM)
- break;
- if (! call_used_regs[r])
- {
- emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
- clobbers_hard_regs = true;
- }
- }
-
- if (clobbers_hard_regs)
- {
- /* @@@ This is a kludge. Not all machine descriptions define a
- blockage insn, but we must not allow the code we just generated
- to be reordered by scheduling. So emit an ASM_INPUT to act as
- blockage insn. */
- emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
- }
-
emit_move_insn (cfun->eh->exc_ptr,
gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
emit_move_insn (cfun->eh->filter,
#ifdef DONT_USE_BUILTIN_SETJMP
{
- rtx x, note;
+ rtx x;
x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
TYPE_MODE (integer_type_node), 1,
plus_constant (XEXP (fc, 0),
sjlj_fc_jbuf_ofs), Pmode);
- note = emit_note (NOTE_INSN_EXPECTED_VALUE);
- NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
-
emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
TYPE_MODE (integer_type_node), 0, dispatch_label);
+ add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
}
#else
expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
/* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */
-void
+unsigned int
set_nothrow_function_flags (void)
{
rtx insn;
+ /* If we don't know that this implementation of the function will
+ actually be used, then we must not set TREE_NOTHROW, since
+ callers must not assume that this function does not throw. */
+ if (DECL_REPLACEABLE_P (current_function_decl))
+ return 0;
+
TREE_NOTHROW (current_function_decl) = 1;
/* Assume cfun->all_throwers_are_sibcalls until we encounter
cfun->all_throwers_are_sibcalls = 1;
if (! flag_exceptions)
- return;
+ return 0;
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
if (can_throw_external (insn))
if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
{
cfun->all_throwers_are_sibcalls = 0;
- return;
+ return 0;
}
}
if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
{
cfun->all_throwers_are_sibcalls = 0;
- return;
+ return 0;
}
}
+ return 0;
}
struct tree_opt_pass pass_set_nothrow_function_flags =
}
rtx
-expand_builtin_eh_return_data_regno (tree arglist)
+expand_builtin_eh_return_data_regno (tree exp)
{
- tree which = TREE_VALUE (arglist);
+ tree which = CALL_EXPR_ARG (exp, 0);
unsigned HOST_WIDE_INT iwhich;
if (TREE_CODE (which) != INTEGER_CST)
The new note numbers will not refer to region numbers, but
instead to call site entries. */
-void
+unsigned int
convert_to_eh_region_ranges (void)
{
rtx insn, iter, note;
int call_site = 0;
if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
- return;
+ return 0;
VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
}
htab_delete (ar_hash);
+ return 0;
}
struct tree_opt_pass pass_convert_to_eh_region_ranges =
/* Switch to the section that should be used for exception tables. */
static void
-switch_to_exception_section (void)
+switch_to_exception_section (const char * ARG_UNUSED (fnname))
{
- if (exception_section == 0)
+ section *s;
+
+ if (exception_section)
+ s = exception_section;
+ else
{
+ /* Compute the section and cache it into exception_section,
+ unless it depends on the function name. */
if (targetm.have_named_sections)
{
int flags;
}
else
flags = SECTION_WRITE;
- exception_section = get_section (".gcc_except_table", flags, NULL);
+
+#ifdef HAVE_LD_EH_GC_SECTIONS
+ if (flag_function_sections)
+ {
+ char *section_name = xmalloc (strlen (fnname) + 32);
+ sprintf (section_name, ".gcc_except_table.%s", fnname);
+ s = get_section (section_name, flags, NULL);
+ free (section_name);
+ }
+ else
+#endif
+ exception_section
+ = s = get_section (".gcc_except_table", flags, NULL);
}
else
- exception_section = flag_pic ? data_section : readonly_data_section;
+ exception_section
+ = s = flag_pic ? data_section : readonly_data_section;
}
- switch_to_section (exception_section);
+
+ switch_to_section (s);
}
#endif
/* Output a reference from an exception table to the type_info object TYPE.
- TT_FORMAT and TT_FORMAT_SIZE descibe the DWARF encoding method used for
+ TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
the value. */
static void
value = const0_rtx;
else
{
- struct cgraph_varpool_node *node;
+ struct varpool_node *node;
type = lookup_type_for_runtime (type);
- value = expand_normal (type);
+ value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
/* Let cgraph know that the rtti decl is used. Not all of the
paths below go through assemble_integer, which would take
type = TREE_OPERAND (type, 0);
if (TREE_CODE (type) == VAR_DECL)
{
- node = cgraph_varpool_node (type);
+ node = varpool_node (type);
if (node)
- cgraph_varpool_mark_needed_node (node);
+ varpool_mark_needed_node (node);
public = TREE_PUBLIC (type);
}
}
}
void
-output_function_exception_table (void)
+output_function_exception_table (const char * ARG_UNUSED (fnname))
{
int tt_format, cs_format, lp_format, i, n;
#ifdef HAVE_AS_LEB128
/* Note that varasm still thinks we're in the function's code section.
The ".endp" directive that will immediately follow will take us back. */
#else
- switch_to_exception_section ();
+ switch_to_exception_section (fnname);
#endif
/* If the target wants a label to begin the table, emit it here. */
}
/* Complete generation of exception handling code. */
-static void
+static unsigned int
rest_of_handle_eh (void)
{
- cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
+ cleanup_cfg (CLEANUP_NO_INSN_DEL);
finish_eh_generation ();
- cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
+ cleanup_cfg (CLEANUP_NO_INSN_DEL);
+ return 0;
}
struct tree_opt_pass pass_rtl_eh =