X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Fexcept.c;h=6fb454e8543ad2ee917b44e2c51d844b911d92af;hb=8aea3a7eefc659ca3ff65f3819f083cba38ac1ed;hp=782a516abfb64ce4a24a82da136407c60bf5d98f;hpb=6276113b222aa3a5633d3aff11d9c301f2e55505;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/except.c b/gcc/except.c index 782a516abfb..6fb454e8543 100644 --- a/gcc/except.c +++ b/gcc/except.c @@ -134,7 +134,7 @@ struct eh_region GTY(()) /* Each region does exactly one thing. */ enum eh_region_type - { + { ERT_UNKNOWN = 0, ERT_CLEANUP, ERT_TRY, @@ -251,7 +251,6 @@ struct eh_status GTY(()) htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table; }; - static int t2r_eq (const void *, const void *); static hashval_t t2r_hash (const void *); @@ -442,7 +441,7 @@ init_eh_for_function (void) cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status)); } -/* Routines to generate the exception tree somewhat directly. +/* Routines to generate the exception tree somewhat directly. These are used from tree-eh.c when processing exception related nodes during tree optimization. */ @@ -738,7 +737,7 @@ remove_unreachable_regions (rtx insns) default: break; } - + if (kill_it) remove_eh_handler (r); } @@ -858,85 +857,146 @@ current_function_has_exception_handlers (void) return false; } -static struct eh_region * -duplicate_eh_region_1 (struct eh_region *o) +/* A subroutine of duplicate_eh_regions. Search the region tree under O + for the minimum and maximum region numbers. Update *MIN and *MAX. */ + +static void +duplicate_eh_regions_0 (eh_region o, int *min, int *max) { - struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region)); + if (o->region_number < *min) + *min = o->region_number; + if (o->region_number > *max) + *max = o->region_number; - *n = *o; - - n->region_number = o->region_number + cfun->eh->last_region_number; - VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n); - gcc_assert (!o->aka); - - return n; + if (o->inner) + { + o = o->inner; + duplicate_eh_regions_0 (o, min, max); + while (o->next_peer) + { + o = o->next_peer; + duplicate_eh_regions_0 (o, min, max); + } + } } -static void -duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array, - struct eh_region *prev_try) +/* A subroutine of duplicate_eh_regions. Copy the region tree under OLD. + Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry + about the other internal pointers just yet, just the tree-like pointers. */ + +static eh_region +duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset) { - struct eh_region *n = n_array[o->region_number]; - - switch (n->type) - { - case ERT_TRY: - if (o->u.try.catch) - n->u.try.catch = n_array[o->u.try.catch->region_number]; - if (o->u.try.last_catch) - n->u.try.last_catch = n_array[o->u.try.last_catch->region_number]; - break; - - case ERT_CATCH: - if (o->u.catch.next_catch) - n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number]; - if (o->u.catch.prev_catch) - n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number]; - break; + eh_region ret, n; - case ERT_CLEANUP: - if (o->u.cleanup.prev_try) - n->u.cleanup.prev_try = n_array[o->u.cleanup.prev_try->region_number]; - else - n->u.cleanup.prev_try = prev_try; - break; - - default: - break; + ret = n = ggc_alloc (sizeof (struct eh_region)); + + *n = *old; + n->outer = outer; + gcc_assert (!old->aka); + + n->region_number += eh_offset; + VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n); + + if (old->inner) + { + old = old->inner; + n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset); + while (old->next_peer) + { + old = old->next_peer; + n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset); + } } - - if (o->outer) - n->outer = n_array[o->outer->region_number]; - if (o->inner) - n->inner = n_array[o->inner->region_number]; - if (o->next_peer) - n->next_peer = n_array[o->next_peer->region_number]; + + return ret; } -/* Duplicate the EH regions of IFUN into current function, root the tree in - OUTER_REGION and remap labels using MAP callback. */ +/* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current + function and root the tree below OUTER_REGION. Remap labels using MAP + callback. The special case of COPY_REGION of 0 means all regions. */ + int duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map, - void *data, int outer_region) + void *data, int copy_region, int outer_region) { - int ifun_last_region_number = ifun->eh->last_region_number; - struct eh_region **n_array, *root, *cur, *prev_try; - int i; - - if (ifun_last_region_number == 0 || !ifun->eh->region_tree) + eh_region cur, prev_try, outer, *splice; + int i, min_region, max_region, eh_offset, cfun_last_region_number; + int num_regions; + + if (!ifun->eh->region_tree) return 0; - - n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array)); + + /* Find the range of region numbers to be copied. The interface we + provide here mandates a single offset to find new number from old, + which means we must look at the numbers present, instead of the + count or something else. */ + if (copy_region > 0) + { + min_region = INT_MAX; + max_region = 0; + + cur = VEC_index (eh_region, ifun->eh->region_array, copy_region); + duplicate_eh_regions_0 (cur, &min_region, &max_region); + } + else + min_region = 1, max_region = ifun->eh->last_region_number; + num_regions = max_region - min_region + 1; + cfun_last_region_number = cfun->eh->last_region_number; + eh_offset = cfun_last_region_number + 1 - min_region; + + /* If we've not yet created a region array, do so now. */ VEC_safe_grow (eh_region, gc, cfun->eh->region_array, - cfun->eh->last_region_number + 1 + ifun_last_region_number); + cfun_last_region_number + 1 + num_regions); + cfun->eh->last_region_number = max_region + eh_offset; - /* We might've created new cfun->eh->region_array so zero out nonexisting region 0. */ + /* We may have just allocated the array for the first time. + Make sure that element zero is null. */ VEC_replace (eh_region, cfun->eh->region_array, 0, 0); - for (i = cfun->eh->last_region_number + 1; - i < cfun->eh->last_region_number + 1 + ifun_last_region_number; i++) - VEC_replace (eh_region, cfun->eh->region_array, i, 0); - + /* Zero all entries in the range allocated. */ + memset (VEC_address (eh_region, cfun->eh->region_array) + + cfun_last_region_number + 1, 0, num_regions * sizeof (eh_region)); + + /* Locate the spot at which to insert the new tree. */ + if (outer_region > 0) + { + outer = VEC_index (eh_region, cfun->eh->region_array, outer_region); + splice = &outer->inner; + } + else + { + outer = NULL; + splice = &cfun->eh->region_tree; + } + while (*splice) + splice = &(*splice)->next_peer; + + /* Copy all the regions in the subtree. */ + if (copy_region > 0) + { + cur = VEC_index (eh_region, ifun->eh->region_array, copy_region); + *splice = duplicate_eh_regions_1 (cur, outer, eh_offset); + } + else + { + eh_region n; + + cur = ifun->eh->region_tree; + *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset); + while (cur->next_peer) + { + cur = cur->next_peer; + n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset); + } + } + + /* Remap all the labels in the new regions. */ + for (i = cfun_last_region_number + 1; + VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i) + if (cur && cur->tree_label) + cur->tree_label = map (cur->tree_label, data); + /* Search for the containing ERT_TRY region to fix up the prev_try short-cuts for ERT_CLEANUP regions. */ prev_try = NULL; @@ -946,67 +1006,119 @@ duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map, prev_try = prev_try->outer) ; - for (i = 1; i <= ifun_last_region_number; ++i) + /* Remap all of the internal catch and cleanup linkages. Since we + duplicate entire subtrees, all of the referenced regions will have + been copied too. And since we renumbered them as a block, a simple + bit of arithmetic finds us the index for the replacement region. */ + for (i = cfun_last_region_number + 1; + VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i) { - cur = VEC_index (eh_region, ifun->eh->region_array, i); - if (!cur || cur->region_number != i) + if (cur == NULL) continue; - n_array[i] = duplicate_eh_region_1 (cur); - if (cur->tree_label) + +#define REMAP(REG) \ + (REG) = VEC_index (eh_region, cfun->eh->region_array, \ + (REG)->region_number + eh_offset) + + switch (cur->type) { - tree newlabel = map (cur->tree_label, data); - n_array[i]->tree_label = newlabel; + case ERT_TRY: + if (cur->u.try.catch) + REMAP (cur->u.try.catch); + if (cur->u.try.last_catch) + REMAP (cur->u.try.last_catch); + break; + + case ERT_CATCH: + if (cur->u.catch.next_catch) + REMAP (cur->u.catch.next_catch); + if (cur->u.catch.prev_catch) + REMAP (cur->u.catch.prev_catch); + break; + + case ERT_CLEANUP: + if (cur->u.cleanup.prev_try) + REMAP (cur->u.cleanup.prev_try); + else + cur->u.cleanup.prev_try = prev_try; + break; + + default: + break; } - else - n_array[i]->tree_label = NULL; + +#undef REMAP } - for (i = 1; i <= ifun_last_region_number; ++i) + + return eh_offset; +} + +/* Return true if REGION_A is outer to REGION_B in IFUN. */ + +bool +eh_region_outer_p (struct function *ifun, int region_a, int region_b) +{ + struct eh_region *rp_a, *rp_b; + + gcc_assert (ifun->eh->last_region_number > 0); + gcc_assert (ifun->eh->region_tree); + + rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a); + rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b); + gcc_assert (rp_a != NULL); + gcc_assert (rp_b != NULL); + + do { - cur = VEC_index (eh_region, ifun->eh->region_array, i); - if (!cur || cur->region_number != i) - continue; - duplicate_eh_region_2 (cur, n_array, prev_try); + if (rp_a == rp_b) + return true; + rp_b = rp_b->outer; } - - root = n_array[ifun->eh->region_tree->region_number]; - gcc_assert (root->outer == NULL); - if (outer_region > 0) - { - struct eh_region *cur - = VEC_index (eh_region, cfun->eh->region_array, outer_region); - struct eh_region *p = cur->inner; + while (rp_b); - if (p) - { - while (p->next_peer) - p = p->next_peer; - p->next_peer = root; - } - else - cur->inner = root; - for (i = 1; i <= ifun_last_region_number; ++i) - if (n_array[i] && n_array[i]->outer == NULL) - n_array[i]->outer = cur; + return false; +} + +/* Return region number of region that is outer to both if REGION_A and + REGION_B in IFUN. */ + +int +eh_region_outermost (struct function *ifun, int region_a, int region_b) +{ + struct eh_region *rp_a, *rp_b; + sbitmap b_outer; + + gcc_assert (ifun->eh->last_region_number > 0); + gcc_assert (ifun->eh->region_tree); + + rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a); + rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b); + gcc_assert (rp_a != NULL); + gcc_assert (rp_b != NULL); + + b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1); + sbitmap_zero (b_outer); + + do + { + SET_BIT (b_outer, rp_b->region_number); + rp_b = rp_b->outer; } - else + while (rp_b); + + do { - struct eh_region *p = cfun->eh->region_tree; - if (p) + if (TEST_BIT (b_outer, rp_a->region_number)) { - while (p->next_peer) - p = p->next_peer; - p->next_peer = root; + sbitmap_free (b_outer); + return rp_a->region_number; } - else - cfun->eh->region_tree = root; + rp_a = rp_a->outer; } - - free (n_array); - - i = cfun->eh->last_region_number; - cfun->eh->last_region_number = i + ifun_last_region_number; - - return i; + while (rp_a); + + sbitmap_free (b_outer); + return -1; } static int @@ -1122,7 +1234,7 @@ add_ttypes_entry (htab_t ttypes_hash, tree type) { /* Filter value is a 1 based table index. */ - n = xmalloc (sizeof (*n)); + n = XNEW (struct ttypes_filter); n->t = type; n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1; *slot = n; @@ -1150,7 +1262,7 @@ add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list) { /* Filter value is a -1 based byte index into a uleb128 buffer. */ - n = xmalloc (sizeof (*n)); + n = XNEW (struct ttypes_filter); n->t = list; n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1); *slot = n; @@ -1268,7 +1380,7 @@ emit_to_new_bb_before (rtx seq, rtx insn) edge_iterator ei; /* If there happens to be a fallthru edge (possibly created by cleanup_cfg - call), we don't want it to go into newly created landing pad or other EH + call), we don't want it to go into newly created landing pad or other EH construct. */ for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); ) if (e->flags & EDGE_FALLTHRU) @@ -1339,7 +1451,7 @@ build_post_landing_pads (void) emit_cmp_and_jump_insns (cfun->eh->filter, GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)), - EQ, NULL_RTX, + EQ, NULL_RTX, targetm.eh_return_filter_mode (), 0, c->label); tp_node = TREE_CHAIN (tp_node); @@ -1372,7 +1484,7 @@ build_post_landing_pads (void) emit_cmp_and_jump_insns (cfun->eh->filter, GEN_INT (region->u.allowed.filter), - EQ, NULL_RTX, + EQ, NULL_RTX, targetm.eh_return_filter_mode (), 0, region->label); /* We delay the generation of the _Unwind_Resume until we generate @@ -1552,7 +1664,7 @@ dw2_build_landing_pads (void) emit_move_insn (cfun->eh->exc_ptr, gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0))); emit_move_insn (cfun->eh->filter, - gen_rtx_REG (targetm.eh_return_filter_mode (), + gen_rtx_REG (targetm.eh_return_filter_mode (), EH_RETURN_DATA_REGNO (1))); seq = get_insns (); @@ -1957,8 +2069,7 @@ sjlj_build_landing_pads (void) { struct sjlj_lp_info *lp_info; - lp_info = xcalloc (cfun->eh->last_region_number + 1, - sizeof (struct sjlj_lp_info)); + lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1); if (sjlj_find_directly_reachable_regions (lp_info)) { @@ -2451,7 +2562,7 @@ reachable_next_level (struct eh_region *region, tree type_thrown, /* Here we end our search, since no exceptions may propagate. If we've touched down at some landing pad previous, then the explicit function call we generated may be used. Otherwise - the call is made by the runtime. + the call is made by the runtime. Before inlining, do not perform this optimization. We may inline a subroutine that contains handlers, and that will @@ -2696,7 +2807,7 @@ can_throw_external (rtx insn) /* Set TREE_NOTHROW and cfun->all_throwers_are_sibcalls. */ -void +unsigned int set_nothrow_function_flags (void) { rtx insn; @@ -2712,7 +2823,7 @@ set_nothrow_function_flags (void) cfun->all_throwers_are_sibcalls = 1; if (! flag_exceptions) - return; + return 0; for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) if (can_throw_external (insn)) @@ -2722,7 +2833,7 @@ set_nothrow_function_flags (void) if (!CALL_P (insn) || !SIBLING_CALL_P (insn)) { cfun->all_throwers_are_sibcalls = 0; - return; + return 0; } } @@ -2735,9 +2846,10 @@ set_nothrow_function_flags (void) if (!CALL_P (insn) || !SIBLING_CALL_P (insn)) { cfun->all_throwers_are_sibcalls = 0; - return; + return 0; } } + return 0; } struct tree_opt_pass pass_set_nothrow_function_flags = @@ -3095,7 +3207,7 @@ collect_one_action_chain (htab_t ar_hash, struct eh_region *region) Add a cleanup action to the chain to catch these. */ else if (next <= 0) next = add_action_record (ar_hash, 0, 0); - + return add_action_record (ar_hash, region->u.allowed.filter, next); case ERT_MUST_NOT_THROW: @@ -3143,7 +3255,7 @@ add_call_site (rtx landing_pad, int action) The new note numbers will not refer to region numbers, but instead to call site entries. */ -void +unsigned int convert_to_eh_region_ranges (void) { rtx insn, iter, note; @@ -3155,7 +3267,7 @@ convert_to_eh_region_ranges (void) int call_site = 0; if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL) - return; + return 0; VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data"); @@ -3264,6 +3376,7 @@ convert_to_eh_region_ranges (void) } htab_delete (ar_hash); + return 0; } struct tree_opt_pass pass_convert_to_eh_region_ranges = @@ -3425,35 +3538,41 @@ sjlj_output_call_site_table (void) call_site_base += n; } -/* Return the default value of exception_section. */ +#ifndef TARGET_UNWIND_INFO +/* Switch to the section that should be used for exception tables. */ -section * -default_exception_section (void) +static void +switch_to_exception_section (void) { - if (targetm.have_named_sections) + if (exception_section == 0) { - int flags; - - if (EH_TABLES_CAN_BE_READ_ONLY) + if (targetm.have_named_sections) { - int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1); - - flags = (! flag_pic - || ((tt_format & 0x70) != DW_EH_PE_absptr - && (tt_format & 0x70) != DW_EH_PE_aligned)) - ? 0 : SECTION_WRITE; + int flags; + + if (EH_TABLES_CAN_BE_READ_ONLY) + { + int tt_format = + ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1); + flags = ((! flag_pic + || ((tt_format & 0x70) != DW_EH_PE_absptr + && (tt_format & 0x70) != DW_EH_PE_aligned)) + ? 0 : SECTION_WRITE); + } + else + flags = SECTION_WRITE; + exception_section = get_section (".gcc_except_table", flags, NULL); } else - flags = SECTION_WRITE; - return get_section (".gcc_except_table", flags, NULL); + exception_section = flag_pic ? data_section : readonly_data_section; } - else - return flag_pic ? data_section : readonly_data_section; + switch_to_section (exception_section); } +#endif /* Output a reference from an exception table to the type_info object TYPE. - TT_FORMAT and TT_FORMAT_SIZE descibe the DWARF encoding method used for + TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for the value. */ static void @@ -3530,9 +3649,12 @@ output_function_exception_table (void) /* Note that varasm still thinks we're in the function's code section. The ".endp" directive that will immediately follow will take us back. */ #else - switch_to_section (exception_section); + switch_to_exception_section (); #endif + /* If the target wants a label to begin the table, emit it here. */ + targetm.asm_out.except_table_label (asm_out_file); + have_tt_data = (VEC_length (tree, cfun->eh->ttype_data) > 0 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0); @@ -3700,7 +3822,7 @@ get_eh_throw_stmt_table (struct function *fun) } /* Dump EH information to OUT. */ -void +void dump_eh_tree (FILE *out, struct function *fun) { struct eh_region *i; @@ -3746,7 +3868,7 @@ dump_eh_tree (FILE *out, struct function *fun) /* Verify some basic invariants on EH datastructures. Could be extended to catch more. */ -void +void verify_eh_tree (struct function *fun) { struct eh_region *i, *outer = NULL; @@ -3851,19 +3973,20 @@ gate_handle_eh (void) } /* Complete generation of exception handling code. */ -static void +static unsigned int rest_of_handle_eh (void) { - cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL); + cleanup_cfg (CLEANUP_NO_INSN_DEL); finish_eh_generation (); - cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL); + cleanup_cfg (CLEANUP_NO_INSN_DEL); + return 0; } struct tree_opt_pass pass_rtl_eh = { "eh", /* name */ - gate_handle_eh, /* gate */ - rest_of_handle_eh, /* execute */ + gate_handle_eh, /* gate */ + rest_of_handle_eh, /* execute */ NULL, /* sub */ NULL, /* next */ 0, /* static_pass_number */