X-Git-Url: http://git.sourceforge.jp/view?p=pf3gnuchains%2Fgcc-fork.git;a=blobdiff_plain;f=gcc%2Fexcept.c;h=b360ae4ad1da7f63ed76ac99b5a76a3efe1dc6fd;hp=4578ae903d8fffaa28e08755302eb567354f16fd;hb=51a8d95d91d96c6fc2e5432272f5772110b4ba0a;hpb=77fce4cd57cbc9db7cdbc15bba96e178dbd0f879 diff --git a/gcc/except.c b/gcc/except.c index 4578ae903d8..b360ae4ad1d 100644 --- a/gcc/except.c +++ b/gcc/except.c @@ -1,6 +1,6 @@ /* Implements exception handling. Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998, - 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc. + 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc. Contributed by Mike Stump . This file is part of GCC. @@ -134,7 +134,7 @@ struct eh_region GTY(()) /* Each region does exactly one thing. */ enum eh_region_type - { + { ERT_UNKNOWN = 0, ERT_CLEANUP, ERT_TRY, @@ -199,12 +199,17 @@ struct eh_region GTY(()) unsigned may_contain_throw : 1; }; +typedef struct eh_region *eh_region; + struct call_site_record GTY(()) { rtx landing_pad; int action; }; +DEF_VEC_P(eh_region); +DEF_VEC_ALLOC_P(eh_region, gc); + /* Used to save exception status for each function. */ struct eh_status GTY(()) { @@ -212,7 +217,7 @@ struct eh_status GTY(()) struct eh_region *region_tree; /* The same information as an indexable array. */ - struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array; + VEC(eh_region,gc) *region_array; /* The most recently open region. */ struct eh_region *cur_region; @@ -246,7 +251,6 @@ struct eh_status GTY(()) htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table; }; - static int t2r_eq (const void *, const void *); static hashval_t t2r_hash (const void *); @@ -437,7 +441,7 @@ init_eh_for_function (void) cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status)); } -/* Routines to generate the exception tree somewhat directly. +/* Routines to generate the exception tree somewhat directly. These are used from tree-eh.c when processing exception related nodes during tree optimization. */ @@ -562,7 +566,8 @@ void expand_resx_expr (tree exp) { int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)); - struct eh_region *reg = cfun->eh->region_array[region_nr]; + struct eh_region *reg = VEC_index (eh_region, + cfun->eh->region_array, region_nr); gcc_assert (!reg->resume); reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr)); @@ -626,19 +631,19 @@ get_exception_filter (struct function *fun) void collect_eh_region_array (void) { - struct eh_region **array, *i; + struct eh_region *i; i = cfun->eh->region_tree; if (! i) return; - array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1) - * sizeof (*array)); - cfun->eh->region_array = array; + VEC_safe_grow (eh_region, gc, cfun->eh->region_array, + cfun->eh->last_region_number + 1); + VEC_replace (eh_region, cfun->eh->region_array, 0, 0); while (1) { - array[i->region_number] = i; + VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i); /* If there are sub-regions, process them. */ if (i->inner) @@ -674,7 +679,7 @@ remove_unreachable_regions (rtx insns) for (i = cfun->eh->last_region_number; i > 0; --i) { - r = cfun->eh->region_array[i]; + r = VEC_index (eh_region, cfun->eh->region_array, i); if (!r || r->region_number != i) continue; @@ -695,7 +700,7 @@ remove_unreachable_regions (rtx insns) for (i = cfun->eh->last_region_number; i > 0; --i) { - r = cfun->eh->region_array[i]; + r = VEC_index (eh_region, cfun->eh->region_array, i); if (r && r->region_number == i && !reachable[i]) { bool kill_it = true; @@ -732,7 +737,7 @@ remove_unreachable_regions (rtx insns) default: break; } - + if (kill_it) remove_eh_handler (r); } @@ -756,7 +761,9 @@ convert_from_eh_region_ranges (void) we allocated earlier. */ for (i = 1; i <= n; ++i) { - struct eh_region *region = cfun->eh->region_array[i]; + struct eh_region *region; + + region = VEC_index (eh_region, cfun->eh->region_array, i); if (region && region->tree_label) region->label = DECL_RTL_IF_SET (region->tree_label); } @@ -808,9 +815,10 @@ find_exception_handler_labels (void) for (i = cfun->eh->last_region_number; i > 0; --i) { - struct eh_region *region = cfun->eh->region_array[i]; + struct eh_region *region; rtx lab; + region = VEC_index (eh_region, cfun->eh->region_array, i); if (! region || region->region_number != i) continue; if (cfun->eh->built_landing_pads) @@ -837,8 +845,9 @@ current_function_has_exception_handlers (void) for (i = cfun->eh->last_region_number; i > 0; --i) { - struct eh_region *region = cfun->eh->region_array[i]; + struct eh_region *region; + region = VEC_index (eh_region, cfun->eh->region_array, i); if (region && region->region_number == i && region->type != ERT_THROW) @@ -848,146 +857,273 @@ current_function_has_exception_handlers (void) return false; } -static struct eh_region * -duplicate_eh_region_1 (struct eh_region *o) +/* A subroutine of duplicate_eh_regions. Search the region tree under O + for the minimum and maximum region numbers. Update *MIN and *MAX. */ + +static void +duplicate_eh_regions_0 (eh_region o, int *min, int *max) { - struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region)); + if (o->region_number < *min) + *min = o->region_number; + if (o->region_number > *max) + *max = o->region_number; - *n = *o; - - n->region_number = o->region_number + cfun->eh->last_region_number; - gcc_assert (!o->aka); - - return n; + if (o->inner) + { + o = o->inner; + duplicate_eh_regions_0 (o, min, max); + while (o->next_peer) + { + o = o->next_peer; + duplicate_eh_regions_0 (o, min, max); + } + } } -static void -duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array, - struct eh_region *prev_try) +/* A subroutine of duplicate_eh_regions. Copy the region tree under OLD. + Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry + about the other internal pointers just yet, just the tree-like pointers. */ + +static eh_region +duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset) { - struct eh_region *n = n_array[o->region_number]; - - switch (n->type) - { - case ERT_TRY: - if (o->u.try.catch) - n->u.try.catch = n_array[o->u.try.catch->region_number]; - if (o->u.try.last_catch) - n->u.try.last_catch = n_array[o->u.try.last_catch->region_number]; - break; - - case ERT_CATCH: - if (o->u.catch.next_catch) - n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number]; - if (o->u.catch.prev_catch) - n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number]; - break; + eh_region ret, n; - case ERT_CLEANUP: - if (o->u.cleanup.prev_try) - n->u.cleanup.prev_try = n_array[o->u.cleanup.prev_try->region_number]; - else - n->u.cleanup.prev_try = prev_try; - break; - - default: - break; + ret = n = ggc_alloc (sizeof (struct eh_region)); + + *n = *old; + n->outer = outer; + n->next_peer = NULL; + gcc_assert (!old->aka); + + n->region_number += eh_offset; + VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n); + + if (old->inner) + { + old = old->inner; + n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset); + while (old->next_peer) + { + old = old->next_peer; + n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset); + } } - - if (o->outer) - n->outer = n_array[o->outer->region_number]; - if (o->inner) - n->inner = n_array[o->inner->region_number]; - if (o->next_peer) - n->next_peer = n_array[o->next_peer->region_number]; + + return ret; } -/* Duplicate the EH regions of IFUN into current function, root the tree in - OUTER_REGION and remap labels using MAP callback. */ +/* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current + function and root the tree below OUTER_REGION. Remap labels using MAP + callback. The special case of COPY_REGION of 0 means all regions. */ + int duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map, - void *data, int outer_region) + void *data, int copy_region, int outer_region) { - int ifun_last_region_number = ifun->eh->last_region_number; - struct eh_region **n_array, *root, *cur, *prev_try; - int i; - - if (ifun_last_region_number == 0 || !ifun->eh->region_tree) + eh_region cur, prev_try, outer, *splice; + int i, min_region, max_region, eh_offset, cfun_last_region_number; + int num_regions; + + if (!ifun->eh->region_tree) return 0; - - n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array)); - + + /* Find the range of region numbers to be copied. The interface we + provide here mandates a single offset to find new number from old, + which means we must look at the numbers present, instead of the + count or something else. */ + if (copy_region > 0) + { + min_region = INT_MAX; + max_region = 0; + + cur = VEC_index (eh_region, ifun->eh->region_array, copy_region); + duplicate_eh_regions_0 (cur, &min_region, &max_region); + } + else + min_region = 1, max_region = ifun->eh->last_region_number; + num_regions = max_region - min_region + 1; + cfun_last_region_number = cfun->eh->last_region_number; + eh_offset = cfun_last_region_number + 1 - min_region; + + /* If we've not yet created a region array, do so now. */ + VEC_safe_grow (eh_region, gc, cfun->eh->region_array, + cfun_last_region_number + 1 + num_regions); + cfun->eh->last_region_number = max_region + eh_offset; + + /* We may have just allocated the array for the first time. + Make sure that element zero is null. */ + VEC_replace (eh_region, cfun->eh->region_array, 0, 0); + + /* Zero all entries in the range allocated. */ + memset (VEC_address (eh_region, cfun->eh->region_array) + + cfun_last_region_number + 1, 0, num_regions * sizeof (eh_region)); + + /* Locate the spot at which to insert the new tree. */ + if (outer_region > 0) + { + outer = VEC_index (eh_region, cfun->eh->region_array, outer_region); + splice = &outer->inner; + } + else + { + outer = NULL; + splice = &cfun->eh->region_tree; + } + while (*splice) + splice = &(*splice)->next_peer; + + /* Copy all the regions in the subtree. */ + if (copy_region > 0) + { + cur = VEC_index (eh_region, ifun->eh->region_array, copy_region); + *splice = duplicate_eh_regions_1 (cur, outer, eh_offset); + } + else + { + eh_region n; + + cur = ifun->eh->region_tree; + *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset); + while (cur->next_peer) + { + cur = cur->next_peer; + n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset); + } + } + + /* Remap all the labels in the new regions. */ + for (i = cfun_last_region_number + 1; + VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i) + if (cur && cur->tree_label) + cur->tree_label = map (cur->tree_label, data); + /* Search for the containing ERT_TRY region to fix up the prev_try short-cuts for ERT_CLEANUP regions. */ prev_try = NULL; if (outer_region > 0) - for (prev_try = cfun->eh->region_array[outer_region]; + for (prev_try = VEC_index (eh_region, cfun->eh->region_array, outer_region); prev_try && prev_try->type != ERT_TRY; prev_try = prev_try->outer) - ; + if (prev_try->type == ERT_MUST_NOT_THROW) + { + prev_try = NULL; + break; + } - for (i = 1; i <= ifun_last_region_number; ++i) + /* Remap all of the internal catch and cleanup linkages. Since we + duplicate entire subtrees, all of the referenced regions will have + been copied too. And since we renumbered them as a block, a simple + bit of arithmetic finds us the index for the replacement region. */ + for (i = cfun_last_region_number + 1; + VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i) { - cur = ifun->eh->region_array[i]; - if (!cur || cur->region_number != i) + if (cur == NULL) continue; - n_array[i] = duplicate_eh_region_1 (cur); - if (cur->tree_label) + +#define REMAP(REG) \ + (REG) = VEC_index (eh_region, cfun->eh->region_array, \ + (REG)->region_number + eh_offset) + + switch (cur->type) { - tree newlabel = map (cur->tree_label, data); - n_array[i]->tree_label = newlabel; + case ERT_TRY: + if (cur->u.try.catch) + REMAP (cur->u.try.catch); + if (cur->u.try.last_catch) + REMAP (cur->u.try.last_catch); + break; + + case ERT_CATCH: + if (cur->u.catch.next_catch) + REMAP (cur->u.catch.next_catch); + if (cur->u.catch.prev_catch) + REMAP (cur->u.catch.prev_catch); + break; + + case ERT_CLEANUP: + if (cur->u.cleanup.prev_try) + REMAP (cur->u.cleanup.prev_try); + else + cur->u.cleanup.prev_try = prev_try; + break; + + default: + break; } - else - n_array[i]->tree_label = NULL; + +#undef REMAP } - for (i = 1; i <= ifun_last_region_number; ++i) + + return eh_offset; +} + +/* Return true if REGION_A is outer to REGION_B in IFUN. */ + +bool +eh_region_outer_p (struct function *ifun, int region_a, int region_b) +{ + struct eh_region *rp_a, *rp_b; + + gcc_assert (ifun->eh->last_region_number > 0); + gcc_assert (ifun->eh->region_tree); + + rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a); + rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b); + gcc_assert (rp_a != NULL); + gcc_assert (rp_b != NULL); + + do { - cur = ifun->eh->region_array[i]; - if (!cur || cur->region_number != i) - continue; - duplicate_eh_region_2 (cur, n_array, prev_try); + if (rp_a == rp_b) + return true; + rp_b = rp_b->outer; } - - root = n_array[ifun->eh->region_tree->region_number]; - gcc_assert (root->outer == NULL); - if (outer_region > 0) - { - struct eh_region *cur = cfun->eh->region_array[outer_region]; - struct eh_region *p = cur->inner; + while (rp_b); - if (p) - { - while (p->next_peer) - p = p->next_peer; - p->next_peer = root; - } - else - cur->inner = root; - for (i = 1; i <= ifun_last_region_number; ++i) - if (n_array[i] && n_array[i]->outer == NULL) - n_array[i]->outer = cur; + return false; +} + +/* Return region number of region that is outer to both if REGION_A and + REGION_B in IFUN. */ + +int +eh_region_outermost (struct function *ifun, int region_a, int region_b) +{ + struct eh_region *rp_a, *rp_b; + sbitmap b_outer; + + gcc_assert (ifun->eh->last_region_number > 0); + gcc_assert (ifun->eh->region_tree); + + rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a); + rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b); + gcc_assert (rp_a != NULL); + gcc_assert (rp_b != NULL); + + b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1); + sbitmap_zero (b_outer); + + do + { + SET_BIT (b_outer, rp_b->region_number); + rp_b = rp_b->outer; } - else + while (rp_b); + + do { - struct eh_region *p = cfun->eh->region_tree; - if (p) + if (TEST_BIT (b_outer, rp_a->region_number)) { - while (p->next_peer) - p = p->next_peer; - p->next_peer = root; + sbitmap_free (b_outer); + return rp_a->region_number; } - else - cfun->eh->region_tree = root; - } - - free (n_array); - - i = cfun->eh->last_region_number; - cfun->eh->last_region_number = i + ifun_last_region_number; - - collect_eh_region_array (); - - return i; + rp_a = rp_a->outer; + } + while (rp_a); + + sbitmap_free (b_outer); + return -1; } static int @@ -1103,7 +1239,7 @@ add_ttypes_entry (htab_t ttypes_hash, tree type) { /* Filter value is a 1 based table index. */ - n = xmalloc (sizeof (*n)); + n = XNEW (struct ttypes_filter); n->t = type; n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1; *slot = n; @@ -1131,7 +1267,7 @@ add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list) { /* Filter value is a -1 based byte index into a uleb128 buffer. */ - n = xmalloc (sizeof (*n)); + n = XNEW (struct ttypes_filter); n->t = list; n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1); *slot = n; @@ -1180,7 +1316,9 @@ assign_filter_values (void) for (i = cfun->eh->last_region_number; i > 0; --i) { - struct eh_region *r = cfun->eh->region_array[i]; + struct eh_region *r; + + r = VEC_index (eh_region, cfun->eh->region_array, i); /* Mind we don't process a region more than once. */ if (!r || r->region_number != i) @@ -1247,7 +1385,7 @@ emit_to_new_bb_before (rtx seq, rtx insn) edge_iterator ei; /* If there happens to be a fallthru edge (possibly created by cleanup_cfg - call), we don't want it to go into newly created landing pad or other EH + call), we don't want it to go into newly created landing pad or other EH construct. */ for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); ) if (e->flags & EDGE_FALLTHRU) @@ -1273,9 +1411,10 @@ build_post_landing_pads (void) for (i = cfun->eh->last_region_number; i > 0; --i) { - struct eh_region *region = cfun->eh->region_array[i]; + struct eh_region *region; rtx seq; + region = VEC_index (eh_region, cfun->eh->region_array, i); /* Mind we don't process a region more than once. */ if (!region || region->region_number != i) continue; @@ -1317,7 +1456,7 @@ build_post_landing_pads (void) emit_cmp_and_jump_insns (cfun->eh->filter, GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)), - EQ, NULL_RTX, + EQ, NULL_RTX, targetm.eh_return_filter_mode (), 0, c->label); tp_node = TREE_CHAIN (tp_node); @@ -1350,7 +1489,7 @@ build_post_landing_pads (void) emit_cmp_and_jump_insns (cfun->eh->filter, GEN_INT (region->u.allowed.filter), - EQ, NULL_RTX, + EQ, NULL_RTX, targetm.eh_return_filter_mode (), 0, region->label); /* We delay the generation of the _Unwind_Resume until we generate @@ -1392,11 +1531,12 @@ connect_post_landing_pads (void) for (i = cfun->eh->last_region_number; i > 0; --i) { - struct eh_region *region = cfun->eh->region_array[i]; + struct eh_region *region; struct eh_region *outer; rtx seq; rtx barrier; + region = VEC_index (eh_region, cfun->eh->region_array, i); /* Mind we don't process a region more than once. */ if (!region || region->region_number != i) continue; @@ -1465,16 +1605,15 @@ static void dw2_build_landing_pads (void) { int i; - unsigned int j; for (i = cfun->eh->last_region_number; i > 0; --i) { - struct eh_region *region = cfun->eh->region_array[i]; + struct eh_region *region; rtx seq; basic_block bb; - bool clobbers_hard_regs = false; edge e; + region = VEC_index (eh_region, cfun->eh->region_array, i); /* Mind we don't process a region more than once. */ if (!region || region->region_number != i) continue; @@ -1501,34 +1640,10 @@ dw2_build_landing_pads (void) #endif { /* Nothing */ } - /* If the eh_return data registers are call-saved, then we - won't have considered them clobbered from the call that - threw. Kill them now. */ - for (j = 0; ; ++j) - { - unsigned r = EH_RETURN_DATA_REGNO (j); - if (r == INVALID_REGNUM) - break; - if (! call_used_regs[r]) - { - emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r))); - clobbers_hard_regs = true; - } - } - - if (clobbers_hard_regs) - { - /* @@@ This is a kludge. Not all machine descriptions define a - blockage insn, but we must not allow the code we just generated - to be reordered by scheduling. So emit an ASM_INPUT to act as - blockage insn. */ - emit_insn (gen_rtx_ASM_INPUT (VOIDmode, "")); - } - emit_move_insn (cfun->eh->exc_ptr, gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0))); emit_move_insn (cfun->eh->filter, - gen_rtx_REG (targetm.eh_return_filter_mode (), + gen_rtx_REG (targetm.eh_return_filter_mode (), EH_RETURN_DATA_REGNO (1))); seq = get_insns (); @@ -1570,7 +1685,7 @@ sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info) if (!note || INTVAL (XEXP (note, 0)) <= 0) continue; - region = cfun->eh->region_array[INTVAL (XEXP (note, 0))]; + region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0))); type_thrown = NULL_TREE; if (region->type == ERT_THROW) @@ -1612,7 +1727,8 @@ sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info) for (i = cfun->eh->last_region_number; i > 0; --i) if (lp_info[i].directly_reachable) { - struct eh_region *r = cfun->eh->region_array[i]; + struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i); + r->landing_pad = dispatch_label; lp_info[i].action_index = collect_one_action_chain (ar_hash, r); if (lp_info[i].action_index != -1) @@ -1698,7 +1814,7 @@ sjlj_mark_call_sites (struct sjlj_lp_info *lp_info) if (INTVAL (XEXP (note, 0)) <= 0) continue; - region = cfun->eh->region_array[INTVAL (XEXP (note, 0))]; + region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0))); this_call_site = lp_info[region->region_number].call_site_index; } @@ -1728,6 +1844,7 @@ static void sjlj_emit_function_enter (rtx dispatch_label) { rtx fn_begin, fc, mem, seq; + bool fn_begin_outside_block; fc = cfun->eh->sjlj_fc; @@ -1756,17 +1873,15 @@ sjlj_emit_function_enter (rtx dispatch_label) #ifdef DONT_USE_BUILTIN_SETJMP { - rtx x, note; + rtx x; x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE, TYPE_MODE (integer_type_node), 1, plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs), Pmode); - note = emit_note (NOTE_INSN_EXPECTED_VALUE); - NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx); - emit_cmp_and_jump_insns (x, const0_rtx, NE, 0, TYPE_MODE (integer_type_node), 0, dispatch_label); + add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100); } #else expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs), @@ -1783,23 +1898,20 @@ sjlj_emit_function_enter (rtx dispatch_label) do this in a block that is at loop level 0 and dominates all can_throw_internal instructions. */ + fn_begin_outside_block = true; for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin)) - if (NOTE_P (fn_begin) - && (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG - || NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK)) - break; - if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG) + if (NOTE_P (fn_begin)) + { + if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG) + break; + else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin)) + fn_begin_outside_block = false; + } + + if (fn_begin_outside_block) insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR)); else - { - rtx last = BB_END (single_succ (ENTRY_BLOCK_PTR)); - for (; ; fn_begin = NEXT_INSN (fn_begin)) - if ((NOTE_P (fn_begin) - && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG) - || fn_begin == last) - break; - emit_insn_after (seq, fn_begin); - } + emit_insn_after (seq, fn_begin); } /* Call back from expand_function_end to know where we should put @@ -1913,13 +2025,15 @@ sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info) emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index), EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0, - cfun->eh->region_array[i]->post_landing_pad); + ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i)) + ->post_landing_pad); } seq = get_insns (); end_sequence (); - before = cfun->eh->region_array[first_reachable]->post_landing_pad; + before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable)) + ->post_landing_pad); bb = emit_to_new_bb_before (seq, before); e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU); @@ -1932,8 +2046,7 @@ sjlj_build_landing_pads (void) { struct sjlj_lp_info *lp_info; - lp_info = xcalloc (cfun->eh->last_region_number + 1, - sizeof (struct sjlj_lp_info)); + lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1); if (sjlj_find_directly_reachable_regions (lp_info)) { @@ -2071,7 +2184,7 @@ remove_eh_handler (struct eh_region *region) list of alternate numbers by which we are known. */ outer = region->outer; - cfun->eh->region_array[region->region_number] = outer; + VEC_replace (eh_region, cfun->eh->region_array, region->region_number, outer); if (region->aka) { unsigned i; @@ -2079,7 +2192,7 @@ remove_eh_handler (struct eh_region *region) EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi) { - cfun->eh->region_array[i] = outer; + VEC_replace (eh_region, cfun->eh->region_array, i, outer); } } @@ -2213,7 +2326,9 @@ for_each_eh_region (void (*callback) (struct eh_region *)) int i, n = cfun->eh->last_region_number; for (i = 1; i <= n; ++i) { - struct eh_region *region = cfun->eh->region_array[i]; + struct eh_region *region; + + region = VEC_index (eh_region, cfun->eh->region_array, i); if (region) (*callback) (region); } @@ -2424,7 +2539,7 @@ reachable_next_level (struct eh_region *region, tree type_thrown, /* Here we end our search, since no exceptions may propagate. If we've touched down at some landing pad previous, then the explicit function call we generated may be used. Otherwise - the call is made by the runtime. + the call is made by the runtime. Before inlining, do not perform this optimization. We may inline a subroutine that contains handlers, and that will @@ -2463,7 +2578,7 @@ foreach_reachable_handler (int region_number, bool is_resx, info.callback = callback; info.callback_data = callback_data; - region = cfun->eh->region_array[region_number]; + region = VEC_index (eh_region, cfun->eh->region_array, region_number); type_thrown = NULL_TREE; if (is_resx) @@ -2552,7 +2667,7 @@ can_throw_internal_1 (int region_number, bool is_resx) struct eh_region *region; tree type_thrown; - region = cfun->eh->region_array[region_number]; + region = VEC_index (eh_region, cfun->eh->region_array, region_number); type_thrown = NULL_TREE; if (is_resx) @@ -2612,7 +2727,7 @@ can_throw_external_1 (int region_number, bool is_resx) struct eh_region *region; tree type_thrown; - region = cfun->eh->region_array[region_number]; + region = VEC_index (eh_region, cfun->eh->region_array, region_number); type_thrown = NULL_TREE; if (is_resx) @@ -2669,11 +2784,17 @@ can_throw_external (rtx insn) /* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */ -void +unsigned int set_nothrow_function_flags (void) { rtx insn; + /* If we don't know that this implementation of the function will + actually be used, then we must not set TREE_NOTHROW, since + callers must not assume that this function does not throw. */ + if (DECL_REPLACEABLE_P (current_function_decl)) + return 0; + TREE_NOTHROW (current_function_decl) = 1; /* Assume cfun->all_throwers_are_sibcalls until we encounter @@ -2685,7 +2806,7 @@ set_nothrow_function_flags (void) cfun->all_throwers_are_sibcalls = 1; if (! flag_exceptions) - return; + return 0; for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) if (can_throw_external (insn)) @@ -2695,7 +2816,7 @@ set_nothrow_function_flags (void) if (!CALL_P (insn) || !SIBLING_CALL_P (insn)) { cfun->all_throwers_are_sibcalls = 0; - return; + return 0; } } @@ -2708,9 +2829,10 @@ set_nothrow_function_flags (void) if (!CALL_P (insn) || !SIBLING_CALL_P (insn)) { cfun->all_throwers_are_sibcalls = 0; - return; + return 0; } } + return 0; } struct tree_opt_pass pass_set_nothrow_function_flags = @@ -2741,7 +2863,7 @@ expand_builtin_unwind_init (void) { /* Set this so all the registers get saved in our frame; we need to be able to copy the saved values for any registers from frames we unwind. */ - current_function_has_nonlocal_label = 1; + current_function_calls_unwind_init = 1; #ifdef SETUP_FRAME_ADDRESSES SETUP_FRAME_ADDRESSES (); @@ -2749,9 +2871,9 @@ expand_builtin_unwind_init (void) } rtx -expand_builtin_eh_return_data_regno (tree arglist) +expand_builtin_eh_return_data_regno (tree exp) { - tree which = TREE_VALUE (arglist); + tree which = CALL_EXPR_ARG (exp, 0); unsigned HOST_WIDE_INT iwhich; if (TREE_CODE (which) != INTEGER_CST) @@ -2780,7 +2902,7 @@ expand_builtin_eh_return_data_regno (tree arglist) rtx expand_builtin_extract_return_addr (tree addr_tree) { - rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0); + rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL); if (GET_MODE (addr) != Pmode && GET_MODE (addr) != VOIDmode) @@ -2812,7 +2934,7 @@ expand_builtin_extract_return_addr (tree addr_tree) rtx expand_builtin_frob_return_addr (tree addr_tree) { - rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0); + rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL); addr = convert_memory_address (Pmode, addr); @@ -2834,7 +2956,8 @@ expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED, rtx tmp; #ifdef EH_RETURN_STACKADJ_RTX - tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0); + tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, + VOIDmode, EXPAND_NORMAL); tmp = convert_memory_address (Pmode, tmp); if (!cfun->eh->ehr_stackadj) cfun->eh->ehr_stackadj = copy_to_reg (tmp); @@ -2842,7 +2965,8 @@ expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED, emit_move_insn (cfun->eh->ehr_stackadj, tmp); #endif - tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0); + tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, + VOIDmode, EXPAND_NORMAL); tmp = convert_memory_address (Pmode, tmp); if (!cfun->eh->ehr_handler) cfun->eh->ehr_handler = copy_to_reg (tmp); @@ -2900,7 +3024,7 @@ expand_eh_return (void) rtx expand_builtin_extend_pointer (tree addr_tree) { - rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0); + rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL); int extend; #ifdef POINTERS_EXTEND_UNSIGNED @@ -3068,7 +3192,7 @@ collect_one_action_chain (htab_t ar_hash, struct eh_region *region) Add a cleanup action to the chain to catch these. */ else if (next <= 0) next = add_action_record (ar_hash, 0, 0); - + return add_action_record (ar_hash, region->u.allowed.filter, next); case ERT_MUST_NOT_THROW: @@ -3116,7 +3240,7 @@ add_call_site (rtx landing_pad, int action) The new note numbers will not refer to region numbers, but instead to call site entries. */ -void +unsigned int convert_to_eh_region_ranges (void) { rtx insn, iter, note; @@ -3128,7 +3252,7 @@ convert_to_eh_region_ranges (void) int call_site = 0; if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL) - return; + return 0; VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data"); @@ -3160,7 +3284,7 @@ convert_to_eh_region_ranges (void) { if (INTVAL (XEXP (note, 0)) <= 0) continue; - region = cfun->eh->region_array[INTVAL (XEXP (note, 0))]; + region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0))); this_action = collect_one_action_chain (ar_hash, region); } @@ -3237,11 +3361,12 @@ convert_to_eh_region_ranges (void) } htab_delete (ar_hash); + return 0; } struct tree_opt_pass pass_convert_to_eh_region_ranges = { - NULL, /* name */ + "eh-ranges", /* name */ NULL, /* gate */ convert_to_eh_region_ranges, /* execute */ NULL, /* sub */ @@ -3252,7 +3377,7 @@ struct tree_opt_pass pass_convert_to_eh_region_ranges = 0, /* properties_provided */ 0, /* properties_destroyed */ 0, /* todo_flags_start */ - 0, /* todo_flags_finish */ + TODO_dump_func, /* todo_flags_finish */ 0 /* letter */ }; @@ -3398,50 +3523,74 @@ sjlj_output_call_site_table (void) call_site_base += n; } -/* Tell assembler to switch to the section for the exception handling - table. */ +#ifndef TARGET_UNWIND_INFO +/* Switch to the section that should be used for exception tables. */ -void -default_exception_section (void) +static void +switch_to_exception_section (const char * ARG_UNUSED (fnname)) { - if (targetm.have_named_sections) - { - int flags; + section *s; - if (EH_TABLES_CAN_BE_READ_ONLY) + if (exception_section) + s = exception_section; + else + { + /* Compute the section and cache it into exception_section, + unless it depends on the function name. */ + if (targetm.have_named_sections) { - int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1); - - flags = (! flag_pic - || ((tt_format & 0x70) != DW_EH_PE_absptr - && (tt_format & 0x70) != DW_EH_PE_aligned)) - ? 0 : SECTION_WRITE; + int flags; + + if (EH_TABLES_CAN_BE_READ_ONLY) + { + int tt_format = + ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1); + flags = ((! flag_pic + || ((tt_format & 0x70) != DW_EH_PE_absptr + && (tt_format & 0x70) != DW_EH_PE_aligned)) + ? 0 : SECTION_WRITE); + } + else + flags = SECTION_WRITE; + +#ifdef HAVE_LD_EH_GC_SECTIONS + if (flag_function_sections) + { + char *section_name = xmalloc (strlen (fnname) + 32); + sprintf (section_name, ".gcc_except_table.%s", fnname); + s = get_section (section_name, flags, NULL); + free (section_name); + } + else +#endif + exception_section + = s = get_section (".gcc_except_table", flags, NULL); } else - flags = SECTION_WRITE; - named_section_flags (".gcc_except_table", flags); + exception_section + = s = flag_pic ? data_section : readonly_data_section; } - else if (flag_pic) - data_section (); - else - readonly_data_section (); + + switch_to_section (s); } +#endif /* Output a reference from an exception table to the type_info object TYPE. - TT_FORMAT and TT_FORMAT_SIZE descibe the DWARF encoding method used for + TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for the value. */ static void output_ttype (tree type, int tt_format, int tt_format_size) { rtx value; + bool public = true; if (type == NULL_TREE) value = const0_rtx; else { - struct cgraph_varpool_node *node; + struct varpool_node *node; type = lookup_type_for_runtime (type); value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER); @@ -3455,13 +3604,14 @@ output_ttype (tree type, int tt_format, int tt_format_size) type = TREE_OPERAND (type, 0); if (TREE_CODE (type) == VAR_DECL) { - node = cgraph_varpool_node (type); + node = varpool_node (type); if (node) - cgraph_varpool_mark_needed_node (node); + varpool_mark_needed_node (node); + public = TREE_PUBLIC (type); } } - else if (TREE_CODE (type) != INTEGER_CST) - abort (); + else + gcc_assert (TREE_CODE (type) == INTEGER_CST); } /* Allow the target to override the type table entry format. */ @@ -3472,11 +3622,11 @@ output_ttype (tree type, int tt_format, int tt_format_size) assemble_integer (value, tt_format_size, tt_format_size * BITS_PER_UNIT, 1); else - dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL); + dw2_asm_output_encoded_addr_rtx (tt_format, value, public, NULL); } void -output_function_exception_table (void) +output_function_exception_table (const char * ARG_UNUSED (fnname)) { int tt_format, cs_format, lp_format, i, n; #ifdef HAVE_AS_LEB128 @@ -3493,18 +3643,23 @@ output_function_exception_table (void) if (! cfun->uses_eh_lsda) return; + if (eh_personality_libfunc) + assemble_external_libcall (eh_personality_libfunc); + #ifdef TARGET_UNWIND_INFO /* TODO: Move this into target file. */ - assemble_external_libcall (eh_personality_libfunc); fputs ("\t.personality\t", asm_out_file); output_addr_const (asm_out_file, eh_personality_libfunc); fputs ("\n\t.handlerdata\n", asm_out_file); /* Note that varasm still thinks we're in the function's code section. The ".endp" directive that will immediately follow will take us back. */ #else - targetm.asm_out.exception_section (); + switch_to_exception_section (fnname); #endif + /* If the target wants a label to begin the table, emit it here. */ + targetm.asm_out.except_table_label (asm_out_file); + have_tt_data = (VEC_length (tree, cfun->eh->ttype_data) > 0 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0); @@ -3656,7 +3811,7 @@ output_function_exception_table (void) (i ? NULL : "Exception specification table")); } - current_function_section (current_function_decl); + switch_to_section (current_function_section ()); } void @@ -3672,7 +3827,7 @@ get_eh_throw_stmt_table (struct function *fun) } /* Dump EH information to OUT. */ -void +void dump_eh_tree (FILE *out, struct function *fun) { struct eh_region *i; @@ -3718,7 +3873,7 @@ dump_eh_tree (FILE *out, struct function *fun) /* Verify some basic invariants on EH datastructures. Could be extended to catch more. */ -void +void verify_eh_tree (struct function *fun) { struct eh_region *i, *outer = NULL; @@ -3732,10 +3887,10 @@ verify_eh_tree (struct function *fun) if (! i) return; for (j = fun->eh->last_region_number; j > 0; --j) - if (fun->eh->region_array[j]) + if ((i = VEC_index (eh_region, cfun->eh->region_array, j))) { count++; - if (fun->eh->region_array[j]->region_number != j) + if (i->region_number != j) { error ("region_array is corrupted for region %i", i->region_number); err = true; @@ -3744,7 +3899,7 @@ verify_eh_tree (struct function *fun) while (1) { - if (fun->eh->region_array[i->region_number] != i) + if (VEC_index (eh_region, cfun->eh->region_array, i->region_number) != i) { error ("region_array is corrupted for region %i", i->region_number); err = true; @@ -3823,19 +3978,20 @@ gate_handle_eh (void) } /* Complete generation of exception handling code. */ -static void +static unsigned int rest_of_handle_eh (void) { - cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL); + cleanup_cfg (CLEANUP_NO_INSN_DEL); finish_eh_generation (); - cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL); + cleanup_cfg (CLEANUP_NO_INSN_DEL); + return 0; } struct tree_opt_pass pass_rtl_eh = { "eh", /* name */ - gate_handle_eh, /* gate */ - rest_of_handle_eh, /* execute */ + gate_handle_eh, /* gate */ + rest_of_handle_eh, /* execute */ NULL, /* sub */ NULL, /* next */ 0, /* static_pass_number */