1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
52 #include "coretypes.h"
60 #include "insn-config.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
75 #include "langhooks.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
81 /* Provide defaults for stuff that may not be defined when using
83 #ifndef EH_RETURN_DATA_REGNO
84 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
87 /* Protect cleanup actions with must-not-throw regions, with a call
88 to the given failure handler. */
89 gimple (*lang_protect_cleanup_actions) (void);
91 /* Return true if type A catches type B. */
92 int (*lang_eh_type_covers) (tree a, tree b);
94 /* Map a type to a runtime object to match type. */
95 tree (*lang_eh_runtime_type) (tree);
97 /* A hash table of label to region number. */
99 struct ehl_map_entry GTY(())
102 struct eh_region *region;
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
117 /* Describes one exception region. */
118 struct eh_region GTY(())
120 /* The immediately surrounding region. */
121 struct eh_region *outer;
123 /* The list of immediately contained regions. */
124 struct eh_region *inner;
125 struct eh_region *next_peer;
127 /* An identifier for this region. */
130 /* When a region is deleted, its parents inherit the REG_EH_REGION
131 numbers already assigned. */
134 /* Each region does exactly one thing. */
141 ERT_ALLOWED_EXCEPTIONS,
146 /* Holds the action to perform based on the preceding type. */
148 /* A list of catch blocks, a surrounding try block,
149 and the label for continuing after a catch. */
150 struct eh_region_u_try {
151 struct eh_region *eh_catch;
152 struct eh_region *last_catch;
153 } GTY ((tag ("ERT_TRY"))) eh_try;
155 /* The list through the catch handlers, the list of type objects
156 matched, and the list of associated filters. */
157 struct eh_region_u_catch {
158 struct eh_region *next_catch;
159 struct eh_region *prev_catch;
162 } GTY ((tag ("ERT_CATCH"))) eh_catch;
164 /* A tree_list of allowed types. */
165 struct eh_region_u_allowed {
168 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
170 /* The type given by a call to "throw foo();", or discovered
172 struct eh_region_u_throw {
174 } GTY ((tag ("ERT_THROW"))) eh_throw;
176 /* Retain the cleanup expression even after expansion so that
177 we can match up fixup regions. */
178 struct eh_region_u_cleanup {
179 struct eh_region *prev_try;
180 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
181 } GTY ((desc ("%0.type"))) u;
183 /* Entry point for this region's handler before landing pads are built. */
187 /* Entry point for this region's handler from the runtime eh library. */
190 /* Entry point for this region's handler from an inner region. */
191 rtx post_landing_pad;
193 /* The RESX insn for handing off control to the next outermost handler,
197 /* True if something in this region may throw. */
198 unsigned may_contain_throw : 1;
201 typedef struct eh_region *eh_region;
203 struct call_site_record GTY(())
209 DEF_VEC_P(eh_region);
210 DEF_VEC_ALLOC_P(eh_region, gc);
211 DEF_VEC_ALLOC_P(eh_region, heap);
213 /* Used to save exception status for each function. */
214 struct eh_status GTY(())
216 /* The tree of all regions for this function. */
217 struct eh_region *region_tree;
219 /* The same information as an indexable array. */
220 VEC(eh_region,gc) *region_array;
221 int last_region_number;
223 htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
226 static int t2r_eq (const void *, const void *);
227 static hashval_t t2r_hash (const void *);
228 static void add_type_for_runtime (tree);
229 static tree lookup_type_for_runtime (tree);
231 static int ttypes_filter_eq (const void *, const void *);
232 static hashval_t ttypes_filter_hash (const void *);
233 static int ehspec_filter_eq (const void *, const void *);
234 static hashval_t ehspec_filter_hash (const void *);
235 static int add_ttypes_entry (htab_t, tree);
236 static int add_ehspec_entry (htab_t, htab_t, tree);
237 static void assign_filter_values (void);
238 static void build_post_landing_pads (void);
239 static void connect_post_landing_pads (void);
240 static void dw2_build_landing_pads (void);
243 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
244 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
245 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
246 static void sjlj_emit_function_enter (rtx);
247 static void sjlj_emit_function_exit (void);
248 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
249 static void sjlj_build_landing_pads (void);
251 static hashval_t ehl_hash (const void *);
252 static int ehl_eq (const void *, const void *);
253 static void add_ehl_entry (rtx, struct eh_region *);
254 static void remove_exception_handler_label (rtx);
255 static void remove_eh_handler (struct eh_region *);
256 static void remove_eh_handler_and_replace (struct eh_region *,
258 static int for_each_eh_label_1 (void **, void *);
260 /* The return value of reachable_next_level. */
263 /* The given exception is not processed by the given region. */
265 /* The given exception may need processing by the given region. */
267 /* The given exception is completely processed by the given region. */
269 /* The given exception is completely processed by the runtime. */
273 struct reachable_info;
274 static enum reachable_code reachable_next_level (struct eh_region *, tree,
275 struct reachable_info *, bool);
277 static int action_record_eq (const void *, const void *);
278 static hashval_t action_record_hash (const void *);
279 static int add_action_record (htab_t, int, int);
280 static int collect_one_action_chain (htab_t, struct eh_region *);
281 static int add_call_site (rtx, int);
283 static void push_uleb128 (varray_type *, unsigned int);
284 static void push_sleb128 (varray_type *, int);
285 #ifndef HAVE_AS_LEB128
286 static int dw2_size_of_call_site_table (void);
287 static int sjlj_size_of_call_site_table (void);
289 static void dw2_output_call_site_table (void);
290 static void sjlj_output_call_site_table (void);
293 /* Routine to see if exception handling is turned on.
294 DO_WARN is nonzero if we want to inform the user that exception
295 handling is turned off.
297 This is used to ensure that -fexceptions has been specified if the
298 compiler tries to use any exception-specific functions. */
301 doing_eh (int do_warn)
303 if (! flag_exceptions)
305 static int warned = 0;
306 if (! warned && do_warn)
308 error ("exception handling disabled, use -fexceptions to enable");
320 if (! flag_exceptions)
323 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
325 /* Create the SjLj_Function_Context structure. This should match
326 the definition in unwind-sjlj.c. */
327 if (USING_SJLJ_EXCEPTIONS)
329 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
331 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
333 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
334 build_pointer_type (sjlj_fc_type_node));
335 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
337 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
339 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
341 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
342 tmp = build_array_type (lang_hooks.types.type_for_mode
343 (targetm.unwind_word_mode (), 1),
345 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
346 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
348 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
350 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
352 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
354 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
356 #ifdef DONT_USE_BUILTIN_SETJMP
358 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
360 /* Should be large enough for most systems, if it is not,
361 JMP_BUF_SIZE should be defined with the proper value. It will
362 also tend to be larger than necessary for most systems, a more
363 optimal port will define JMP_BUF_SIZE. */
364 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
367 /* builtin_setjmp takes a pointer to 5 words. */
368 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
370 tmp = build_index_type (tmp);
371 tmp = build_array_type (ptr_type_node, tmp);
372 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
373 #ifdef DONT_USE_BUILTIN_SETJMP
374 /* We don't know what the alignment requirements of the
375 runtime's jmp_buf has. Overestimate. */
376 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
377 DECL_USER_ALIGN (f_jbuf) = 1;
379 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
381 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
382 TREE_CHAIN (f_prev) = f_cs;
383 TREE_CHAIN (f_cs) = f_data;
384 TREE_CHAIN (f_data) = f_per;
385 TREE_CHAIN (f_per) = f_lsda;
386 TREE_CHAIN (f_lsda) = f_jbuf;
388 layout_type (sjlj_fc_type_node);
390 /* Cache the interesting field offsets so that we have
391 easy access from rtl. */
392 sjlj_fc_call_site_ofs
393 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
394 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
396 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
397 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
398 sjlj_fc_personality_ofs
399 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
400 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
402 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
403 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
405 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
406 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
411 init_eh_for_function (void)
413 cfun->eh = GGC_CNEW (struct eh_status);
416 /* Routines to generate the exception tree somewhat directly.
417 These are used from tree-eh.c when processing exception related
418 nodes during tree optimization. */
420 static struct eh_region *
421 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
423 struct eh_region *new_eh;
425 #ifdef ENABLE_CHECKING
426 gcc_assert (doing_eh (0));
429 /* Insert a new blank region as a leaf in the tree. */
430 new_eh = GGC_CNEW (struct eh_region);
432 new_eh->outer = outer;
435 new_eh->next_peer = outer->inner;
436 outer->inner = new_eh;
440 new_eh->next_peer = cfun->eh->region_tree;
441 cfun->eh->region_tree = new_eh;
444 new_eh->region_number = ++cfun->eh->last_region_number;
450 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
452 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
453 cleanup->u.cleanup.prev_try = prev_try;
458 gen_eh_region_try (struct eh_region *outer)
460 return gen_eh_region (ERT_TRY, outer);
464 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
466 struct eh_region *c, *l;
467 tree type_list, type_node;
469 /* Ensure to always end up with a type list to normalize further
470 processing, then register each type against the runtime types map. */
471 type_list = type_or_list;
474 if (TREE_CODE (type_or_list) != TREE_LIST)
475 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
477 type_node = type_list;
478 for (; type_node; type_node = TREE_CHAIN (type_node))
479 add_type_for_runtime (TREE_VALUE (type_node));
482 c = gen_eh_region (ERT_CATCH, t->outer);
483 c->u.eh_catch.type_list = type_list;
484 l = t->u.eh_try.last_catch;
485 c->u.eh_catch.prev_catch = l;
487 l->u.eh_catch.next_catch = c;
489 t->u.eh_try.eh_catch = c;
490 t->u.eh_try.last_catch = c;
496 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
498 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
499 region->u.allowed.type_list = allowed;
501 for (; allowed ; allowed = TREE_CHAIN (allowed))
502 add_type_for_runtime (TREE_VALUE (allowed));
508 gen_eh_region_must_not_throw (struct eh_region *outer)
510 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
514 get_eh_region_number (struct eh_region *region)
516 return region->region_number;
520 get_eh_region_may_contain_throw (struct eh_region *region)
522 return region->may_contain_throw;
526 get_eh_region_tree_label (struct eh_region *region)
528 return region->tree_label;
532 get_eh_region_no_tree_label (int region)
534 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
538 set_eh_region_tree_label (struct eh_region *region, tree lab)
540 region->tree_label = lab;
544 expand_resx_expr (tree exp)
546 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
547 struct eh_region *reg = VEC_index (eh_region,
548 cfun->eh->region_array, region_nr);
550 gcc_assert (!reg->resume);
551 do_pending_stack_adjust ();
552 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
556 /* Note that the current EH region (if any) may contain a throw, or a
557 call to a function which itself may contain a throw. */
560 note_eh_region_may_contain_throw (struct eh_region *region)
562 while (region && !region->may_contain_throw)
564 region->may_contain_throw = 1;
565 region = region->outer;
570 /* Return an rtl expression for a pointer to the exception object
574 get_exception_pointer (void)
576 if (! crtl->eh.exc_ptr)
577 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
578 return crtl->eh.exc_ptr;
581 /* Return an rtl expression for the exception dispatch filter
585 get_exception_filter (void)
587 if (! crtl->eh.filter)
588 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
589 return crtl->eh.filter;
592 /* This section is for the exception handling specific optimization pass. */
594 /* Random access the exception region tree. */
597 collect_eh_region_array (void)
601 i = cfun->eh->region_tree;
605 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
606 cfun->eh->last_region_number + 1);
607 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
611 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
613 /* If there are sub-regions, process them. */
616 /* If there are peers, process them. */
617 else if (i->next_peer)
619 /* Otherwise, step back up the tree to the next peer. */
626 } while (i->next_peer == NULL);
632 /* R is MUST_NOT_THROW region that is not reachable via local
633 RESX instructions. It still must be kept in the tree in case runtime
634 can unwind through it, or we will eliminate out terminate call
635 runtime would do otherwise. Return TRUE if R contains throwing statements
636 or some of the exceptions in inner regions can be unwound up to R.
638 CONTAINS_STMT is bitmap of all regions that contains some throwing
641 Function looks O(^3) at first sight. In fact the function is called at most
642 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
643 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
644 the outer loop examines every region at most once. The inner loop
645 is doing unwinding from the throwing statement same way as we do during
646 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
647 of CFG. In practice Eh trees are wide, not deep, so this is not
651 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region *r)
653 struct eh_region *i = r->inner;
657 if (TEST_BIT (contains_stmt, r->region_number))
660 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
661 if (TEST_BIT (contains_stmt, n))
667 /* It is pointless to look into MUST_NOT_THROW
668 or dive into subregions. They never unwind up. */
669 if (i->type != ERT_MUST_NOT_THROW)
671 bool found = TEST_BIT (contains_stmt, i->region_number);
673 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
674 if (TEST_BIT (contains_stmt, n))
679 /* We have nested region that contains throwing statement.
680 See if resuming might lead up to the resx or we get locally
681 caught sooner. If we get locally caught sooner, we either
682 know region R is not reachable or it would have direct edge
683 from the EH resx and thus consider region reachable at
687 struct eh_region *i1 = i;
688 tree type_thrown = NULL_TREE;
690 if (i1->type == ERT_THROW)
692 type_thrown = i1->u.eh_throw.type;
695 for (; i1 != r; i1 = i1->outer)
696 if (reachable_next_level (i1, type_thrown, NULL,
697 false) >= RNL_CAUGHT)
703 /* If there are sub-regions, process them. */
704 if (i->type != ERT_MUST_NOT_THROW && i->inner)
706 /* If there are peers, process them. */
707 else if (i->next_peer)
709 /* Otherwise, step back up the tree to the next peer. */
718 while (i->next_peer == NULL);
724 /* Bring region R to the root of tree. */
727 bring_to_root (struct eh_region *r)
729 struct eh_region **pp;
730 struct eh_region *outer = r->outer;
733 for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
737 r->next_peer = cfun->eh->region_tree;
738 cfun->eh->region_tree = r;
741 /* Remove all regions whose labels are not reachable.
742 REACHABLE is bitmap of all regions that are used by the function
743 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
746 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
750 VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
751 struct eh_region *local_must_not_throw = NULL;
752 struct eh_region *first_must_not_throw = NULL;
754 for (i = cfun->eh->last_region_number; i > 0; --i)
756 r = VEC_index (eh_region, cfun->eh->region_array, i);
757 if (!r || r->region_number != i)
759 if (!TEST_BIT (reachable, i) && !r->resume)
763 r->tree_label = NULL;
767 /* Don't remove ERT_THROW regions if their outer region
769 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
772 case ERT_MUST_NOT_THROW:
773 /* MUST_NOT_THROW regions are implementable solely in the
774 runtime, but we need them when inlining function.
776 Keep them if outer region is not MUST_NOT_THROW a well
777 and if they contain some statement that might unwind through
779 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
781 || can_be_reached_by_runtime (contains_stmt, r)))
786 /* TRY regions are reachable if any of its CATCH regions
789 for (c = r->u.eh_try.eh_catch; c;
790 c = c->u.eh_catch.next_catch)
791 if (TEST_BIT (reachable, c->region_number))
806 fprintf (dump_file, "Removing unreachable eh region %i\n",
808 remove_eh_handler (r);
810 else if (r->type == ERT_MUST_NOT_THROW)
812 if (!first_must_not_throw)
813 first_must_not_throw = r;
814 VEC_safe_push (eh_region, heap, must_not_throws, r);
818 if (r->type == ERT_MUST_NOT_THROW)
820 if (!local_must_not_throw)
821 local_must_not_throw = r;
823 VEC_safe_push (eh_region, heap, must_not_throws, r);
827 /* MUST_NOT_THROW regions without local handler are all the same; they
828 trigger terminate call in runtime.
829 MUST_NOT_THROW handled locally can differ in debug info associated
830 to std::terminate () call or if one is coming from Java and other
831 from C++ whether they call terminate or abort.
833 We merge all MUST_NOT_THROW regions handled by the run-time into one.
834 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
835 (since unwinding never continues to the outer region anyway).
836 If MUST_NOT_THROW with local handler is present in the tree, we use
837 that region to merge into, since it will remain in tree anyway;
838 otherwise we use first MUST_NOT_THROW.
840 Merging of locally handled regions needs changes to the CFG. Crossjumping
841 should take care of this, by looking at the actual code and
842 ensuring that the cleanup actions are really the same. */
844 if (local_must_not_throw)
845 first_must_not_throw = local_must_not_throw;
847 for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
849 if (!r->label && !r->tree_label && r != first_must_not_throw)
852 fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
854 first_must_not_throw->region_number);
855 remove_eh_handler_and_replace (r, first_must_not_throw);
856 first_must_not_throw->may_contain_throw |= r->may_contain_throw;
861 #ifdef ENABLE_CHECKING
862 verify_eh_tree (cfun);
864 VEC_free (eh_region, heap, must_not_throws);
867 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
868 is identical to label. */
871 label_to_region_map (void)
873 VEC(int,heap) * label_to_region = NULL;
876 VEC_safe_grow_cleared (int, heap, label_to_region,
877 cfun->cfg->last_label_uid + 1);
878 for (i = cfun->eh->last_region_number; i > 0; --i)
880 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
881 if (r && r->region_number == i
882 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
884 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
888 return label_to_region;
891 /* Return number of EH regions. */
893 num_eh_regions (void)
895 return cfun->eh->last_region_number + 1;
898 /* Remove all regions whose labels are not reachable from insns. */
901 rtl_remove_unreachable_regions (rtx insns)
903 int i, *uid_region_num;
908 uid_region_num = XCNEWVEC (int, get_max_uid ());
909 reachable = sbitmap_alloc (cfun->eh->last_region_number + 1);
910 sbitmap_zero (reachable);
912 for (i = cfun->eh->last_region_number; i > 0; --i)
914 r = VEC_index (eh_region, cfun->eh->region_array, i);
915 if (!r || r->region_number != i)
920 gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
921 uid_region_num[INSN_UID (r->resume)] = i;
925 gcc_assert (!uid_region_num[INSN_UID (r->label)]);
926 uid_region_num[INSN_UID (r->label)] = i;
930 for (insn = insns; insn; insn = NEXT_INSN (insn))
931 SET_BIT (reachable, uid_region_num[INSN_UID (insn)]);
933 remove_unreachable_regions (reachable, NULL);
935 sbitmap_free (reachable);
936 free (uid_region_num);
939 /* Set up EH labels for RTL. */
942 convert_from_eh_region_ranges (void)
944 rtx insns = get_insns ();
945 int i, n = cfun->eh->last_region_number;
947 /* Most of the work is already done at the tree level. All we need to
948 do is collect the rtl labels that correspond to the tree labels that
949 collect the rtl labels that correspond to the tree labels
950 we allocated earlier. */
951 for (i = 1; i <= n; ++i)
953 struct eh_region *region;
955 region = VEC_index (eh_region, cfun->eh->region_array, i);
956 if (region && region->tree_label)
957 region->label = DECL_RTL_IF_SET (region->tree_label);
960 rtl_remove_unreachable_regions (insns);
964 add_ehl_entry (rtx label, struct eh_region *region)
966 struct ehl_map_entry **slot, *entry;
968 LABEL_PRESERVE_P (label) = 1;
970 entry = GGC_NEW (struct ehl_map_entry);
971 entry->label = label;
972 entry->region = region;
974 slot = (struct ehl_map_entry **)
975 htab_find_slot (crtl->eh.exception_handler_label_map, entry, INSERT);
977 /* Before landing pad creation, each exception handler has its own
978 label. After landing pad creation, the exception handlers may
979 share landing pads. This is ok, since maybe_remove_eh_handler
980 only requires the 1-1 mapping before landing pad creation. */
981 gcc_assert (!*slot || crtl->eh.built_landing_pads);
987 find_exception_handler_labels (void)
991 if (crtl->eh.exception_handler_label_map)
992 htab_empty (crtl->eh.exception_handler_label_map);
995 /* ??? The expansion factor here (3/2) must be greater than the htab
996 occupancy factor (4/3) to avoid unnecessary resizing. */
997 crtl->eh.exception_handler_label_map
998 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
999 ehl_hash, ehl_eq, NULL);
1002 if (cfun->eh->region_tree == NULL)
1005 for (i = cfun->eh->last_region_number; i > 0; --i)
1007 struct eh_region *region;
1010 region = VEC_index (eh_region, cfun->eh->region_array, i);
1011 if (! region || region->region_number != i)
1013 if (crtl->eh.built_landing_pads)
1014 lab = region->landing_pad;
1016 lab = region->label;
1019 add_ehl_entry (lab, region);
1022 /* For sjlj exceptions, need the return label to remain live until
1023 after landing pad generation. */
1024 if (USING_SJLJ_EXCEPTIONS && ! crtl->eh.built_landing_pads)
1025 add_ehl_entry (return_label, NULL);
1028 /* Returns true if the current function has exception handling regions. */
1031 current_function_has_exception_handlers (void)
1035 for (i = cfun->eh->last_region_number; i > 0; --i)
1037 struct eh_region *region;
1039 region = VEC_index (eh_region, cfun->eh->region_array, i);
1041 && region->region_number == i
1042 && region->type != ERT_THROW)
1049 /* A subroutine of duplicate_eh_regions. Search the region tree under O
1050 for the minimum and maximum region numbers. Update *MIN and *MAX. */
1053 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
1059 i = bitmap_first_set_bit (o->aka);
1062 i = bitmap_last_set_bit (o->aka);
1066 if (o->region_number < *min)
1067 *min = o->region_number;
1068 if (o->region_number > *max)
1069 *max = o->region_number;
1074 duplicate_eh_regions_0 (o, min, max);
1075 while (o->next_peer)
1078 duplicate_eh_regions_0 (o, min, max);
1083 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
1084 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
1085 about the other internal pointers just yet, just the tree-like pointers. */
1088 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
1092 ret = n = GGC_NEW (struct eh_region);
1096 n->next_peer = NULL;
1101 n->aka = BITMAP_GGC_ALLOC ();
1103 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
1105 bitmap_set_bit (n->aka, i + eh_offset);
1106 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
1110 n->region_number += eh_offset;
1111 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1116 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
1117 while (old->next_peer)
1119 old = old->next_peer;
1120 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
1127 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
1128 function and root the tree below OUTER_REGION. Remap labels using MAP
1129 callback. The special case of COPY_REGION of 0 means all regions. */
1132 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
1133 void *data, int copy_region, int outer_region)
1135 eh_region cur, prev_try, outer, *splice;
1136 int i, min_region, max_region, eh_offset, cfun_last_region_number;
1141 #ifdef ENABLE_CHECKING
1142 verify_eh_tree (ifun);
1145 /* Find the range of region numbers to be copied. The interface we
1146 provide here mandates a single offset to find new number from old,
1147 which means we must look at the numbers present, instead of the
1148 count or something else. */
1149 if (copy_region > 0)
1151 min_region = INT_MAX;
1154 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1155 duplicate_eh_regions_0 (cur, &min_region, &max_region);
1158 min_region = 1, max_region = ifun->eh->last_region_number;
1159 num_regions = max_region - min_region + 1;
1160 cfun_last_region_number = cfun->eh->last_region_number;
1161 eh_offset = cfun_last_region_number + 1 - min_region;
1163 /* If we've not yet created a region array, do so now. */
1164 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
1165 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
1166 cfun->eh->last_region_number + 1);
1168 /* Locate the spot at which to insert the new tree. */
1169 if (outer_region > 0)
1171 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1173 splice = &outer->inner;
1175 splice = &cfun->eh->region_tree;
1180 splice = &cfun->eh->region_tree;
1183 splice = &(*splice)->next_peer;
1185 if (!ifun->eh->region_tree)
1188 for (i = cfun_last_region_number + 1;
1189 i <= cfun->eh->last_region_number; i++)
1191 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1192 if (outer->aka == NULL)
1193 outer->aka = BITMAP_GGC_ALLOC ();
1194 bitmap_set_bit (outer->aka, i);
1199 /* Copy all the regions in the subtree. */
1200 if (copy_region > 0)
1202 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1203 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1209 cur = ifun->eh->region_tree;
1210 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1211 while (cur->next_peer)
1213 cur = cur->next_peer;
1214 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1218 /* Remap all the labels in the new regions. */
1219 for (i = cfun_last_region_number + 1;
1220 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1221 if (cur && cur->tree_label)
1222 cur->tree_label = map (cur->tree_label, data);
1224 /* Search for the containing ERT_TRY region to fix up
1225 the prev_try short-cuts for ERT_CLEANUP regions. */
1227 if (outer_region > 0)
1229 VEC_index (eh_region, cfun->eh->region_array, outer_region);
1230 prev_try && prev_try->type != ERT_TRY; prev_try = prev_try->outer)
1231 if (prev_try->type == ERT_MUST_NOT_THROW
1232 || (prev_try->type == ERT_ALLOWED_EXCEPTIONS
1233 && !prev_try->u.allowed.type_list))
1239 /* Remap all of the internal catch and cleanup linkages. Since we
1240 duplicate entire subtrees, all of the referenced regions will have
1241 been copied too. And since we renumbered them as a block, a simple
1242 bit of arithmetic finds us the index for the replacement region. */
1243 for (i = cfun_last_region_number + 1;
1244 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1246 /* All removed EH that is toplevel in input function is now
1247 in outer EH of output function. */
1250 gcc_assert (VEC_index
1251 (eh_region, ifun->eh->region_array,
1252 i - eh_offset) == NULL);
1255 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1256 if (outer->aka == NULL)
1257 outer->aka = BITMAP_GGC_ALLOC ();
1258 bitmap_set_bit (outer->aka, i);
1262 if (i != cur->region_number)
1265 #define REMAP(REG) \
1266 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1267 (REG)->region_number + eh_offset)
1272 if (cur->u.eh_try.eh_catch)
1273 REMAP (cur->u.eh_try.eh_catch);
1274 if (cur->u.eh_try.last_catch)
1275 REMAP (cur->u.eh_try.last_catch);
1279 if (cur->u.eh_catch.next_catch)
1280 REMAP (cur->u.eh_catch.next_catch);
1281 if (cur->u.eh_catch.prev_catch)
1282 REMAP (cur->u.eh_catch.prev_catch);
1286 if (cur->u.cleanup.prev_try)
1287 REMAP (cur->u.cleanup.prev_try);
1289 cur->u.cleanup.prev_try = prev_try;
1298 #ifdef ENABLE_CHECKING
1299 verify_eh_tree (cfun);
1305 /* Return true if REGION_A is outer to REGION_B in IFUN. */
1308 eh_region_outer_p (struct function *ifun, int region_a, int region_b)
1310 struct eh_region *rp_a, *rp_b;
1312 gcc_assert (ifun->eh->last_region_number > 0);
1313 gcc_assert (ifun->eh->region_tree);
1315 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1316 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1317 gcc_assert (rp_a != NULL);
1318 gcc_assert (rp_b != NULL);
1331 /* Return region number of region that is outer to both if REGION_A and
1332 REGION_B in IFUN. */
1335 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1337 struct eh_region *rp_a, *rp_b;
1340 gcc_assert (ifun->eh->last_region_number > 0);
1341 gcc_assert (ifun->eh->region_tree);
1343 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1344 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1345 gcc_assert (rp_a != NULL);
1346 gcc_assert (rp_b != NULL);
1348 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1349 sbitmap_zero (b_outer);
1353 SET_BIT (b_outer, rp_b->region_number);
1360 if (TEST_BIT (b_outer, rp_a->region_number))
1362 sbitmap_free (b_outer);
1363 return rp_a->region_number;
1369 sbitmap_free (b_outer);
1374 t2r_eq (const void *pentry, const void *pdata)
1376 const_tree const entry = (const_tree) pentry;
1377 const_tree const data = (const_tree) pdata;
1379 return TREE_PURPOSE (entry) == data;
1383 t2r_hash (const void *pentry)
1385 const_tree const entry = (const_tree) pentry;
1386 return TREE_HASH (TREE_PURPOSE (entry));
1390 add_type_for_runtime (tree type)
1394 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1395 TREE_HASH (type), INSERT);
1398 tree runtime = (*lang_eh_runtime_type) (type);
1399 *slot = tree_cons (type, runtime, NULL_TREE);
1404 lookup_type_for_runtime (tree type)
1408 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1409 TREE_HASH (type), NO_INSERT);
1411 /* We should have always inserted the data earlier. */
1412 return TREE_VALUE (*slot);
1416 /* Represent an entry in @TTypes for either catch actions
1417 or exception filter actions. */
1418 struct ttypes_filter GTY(())
1424 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1425 (a tree) for a @TTypes type node we are thinking about adding. */
1428 ttypes_filter_eq (const void *pentry, const void *pdata)
1430 const struct ttypes_filter *const entry
1431 = (const struct ttypes_filter *) pentry;
1432 const_tree const data = (const_tree) pdata;
1434 return entry->t == data;
1438 ttypes_filter_hash (const void *pentry)
1440 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1441 return TREE_HASH (entry->t);
1444 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1445 exception specification list we are thinking about adding. */
1446 /* ??? Currently we use the type lists in the order given. Someone
1447 should put these in some canonical order. */
1450 ehspec_filter_eq (const void *pentry, const void *pdata)
1452 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1453 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1455 return type_list_equal (entry->t, data->t);
1458 /* Hash function for exception specification lists. */
1461 ehspec_filter_hash (const void *pentry)
1463 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1467 for (list = entry->t; list ; list = TREE_CHAIN (list))
1468 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1472 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1473 to speed up the search. Return the filter value to be used. */
1476 add_ttypes_entry (htab_t ttypes_hash, tree type)
1478 struct ttypes_filter **slot, *n;
1480 slot = (struct ttypes_filter **)
1481 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1483 if ((n = *slot) == NULL)
1485 /* Filter value is a 1 based table index. */
1487 n = XNEW (struct ttypes_filter);
1489 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1492 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1498 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1499 to speed up the search. Return the filter value to be used. */
1502 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1504 struct ttypes_filter **slot, *n;
1505 struct ttypes_filter dummy;
1508 slot = (struct ttypes_filter **)
1509 htab_find_slot (ehspec_hash, &dummy, INSERT);
1511 if ((n = *slot) == NULL)
1513 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1515 n = XNEW (struct ttypes_filter);
1517 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1520 /* Generate a 0 terminated list of filter values. */
1521 for (; list ; list = TREE_CHAIN (list))
1523 if (targetm.arm_eabi_unwinder)
1524 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1527 /* Look up each type in the list and encode its filter
1528 value as a uleb128. */
1529 push_uleb128 (&crtl->eh.ehspec_data,
1530 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1533 if (targetm.arm_eabi_unwinder)
1534 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1536 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1542 /* Generate the action filter values to be used for CATCH and
1543 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1544 we use lots of landing pads, and so every type or list can share
1545 the same filter value, which saves table space. */
1548 assign_filter_values (void)
1551 htab_t ttypes, ehspec;
1553 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1554 if (targetm.arm_eabi_unwinder)
1555 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1557 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1559 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1560 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1562 for (i = cfun->eh->last_region_number; i > 0; --i)
1564 struct eh_region *r;
1566 r = VEC_index (eh_region, cfun->eh->region_array, i);
1568 /* Mind we don't process a region more than once. */
1569 if (!r || r->region_number != i)
1575 /* Whatever type_list is (NULL or true list), we build a list
1576 of filters for the region. */
1577 r->u.eh_catch.filter_list = NULL_TREE;
1579 if (r->u.eh_catch.type_list != NULL)
1581 /* Get a filter value for each of the types caught and store
1582 them in the region's dedicated list. */
1583 tree tp_node = r->u.eh_catch.type_list;
1585 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1587 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1588 tree flt_node = build_int_cst (NULL_TREE, flt);
1590 r->u.eh_catch.filter_list
1591 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1596 /* Get a filter value for the NULL list also since it will need
1597 an action record anyway. */
1598 int flt = add_ttypes_entry (ttypes, NULL);
1599 tree flt_node = build_int_cst (NULL_TREE, flt);
1601 r->u.eh_catch.filter_list
1602 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1607 case ERT_ALLOWED_EXCEPTIONS:
1609 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1617 htab_delete (ttypes);
1618 htab_delete (ehspec);
1621 /* Emit SEQ into basic block just before INSN (that is assumed to be
1622 first instruction of some existing BB and return the newly
1625 emit_to_new_bb_before (rtx seq, rtx insn)
1632 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1633 call), we don't want it to go into newly created landing pad or other EH
1635 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1636 if (e->flags & EDGE_FALLTHRU)
1637 force_nonfallthru (e);
1640 last = emit_insn_before (seq, insn);
1641 if (BARRIER_P (last))
1642 last = PREV_INSN (last);
1643 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1644 update_bb_for_insn (bb);
1645 bb->flags |= BB_SUPERBLOCK;
1649 /* Generate the code to actually handle exceptions, which will follow the
1653 build_post_landing_pads (void)
1657 for (i = cfun->eh->last_region_number; i > 0; --i)
1659 struct eh_region *region;
1662 region = VEC_index (eh_region, cfun->eh->region_array, i);
1663 /* Mind we don't process a region more than once. */
1664 if (!region || region->region_number != i)
1667 switch (region->type)
1670 /* ??? Collect the set of all non-overlapping catch handlers
1671 all the way up the chain until blocked by a cleanup. */
1672 /* ??? Outer try regions can share landing pads with inner
1673 try regions if the types are completely non-overlapping,
1674 and there are no intervening cleanups. */
1676 region->post_landing_pad = gen_label_rtx ();
1680 emit_label (region->post_landing_pad);
1682 /* ??? It is mighty inconvenient to call back into the
1683 switch statement generation code in expand_end_case.
1684 Rapid prototyping sez a sequence of ifs. */
1686 struct eh_region *c;
1687 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1689 if (c->u.eh_catch.type_list == NULL)
1690 emit_jump (c->label);
1693 /* Need for one cmp/jump per type caught. Each type
1694 list entry has a matching entry in the filter list
1695 (see assign_filter_values). */
1696 tree tp_node = c->u.eh_catch.type_list;
1697 tree flt_node = c->u.eh_catch.filter_list;
1701 emit_cmp_and_jump_insns
1703 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1705 targetm.eh_return_filter_mode (), 0, c->label);
1707 tp_node = TREE_CHAIN (tp_node);
1708 flt_node = TREE_CHAIN (flt_node);
1714 /* We delay the generation of the _Unwind_Resume until we generate
1715 landing pads. We emit a marker here so as to get good control
1716 flow data in the meantime. */
1718 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1724 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
1728 case ERT_ALLOWED_EXCEPTIONS:
1729 region->post_landing_pad = gen_label_rtx ();
1733 emit_label (region->post_landing_pad);
1735 emit_cmp_and_jump_insns (crtl->eh.filter,
1736 GEN_INT (region->u.allowed.filter),
1738 targetm.eh_return_filter_mode (), 0, region->label);
1740 /* We delay the generation of the _Unwind_Resume until we generate
1741 landing pads. We emit a marker here so as to get good control
1742 flow data in the meantime. */
1744 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1750 emit_to_new_bb_before (seq, region->label);
1754 case ERT_MUST_NOT_THROW:
1755 region->post_landing_pad = region->label;
1760 /* Nothing to do. */
1769 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1770 _Unwind_Resume otherwise. */
1773 connect_post_landing_pads (void)
1777 for (i = cfun->eh->last_region_number; i > 0; --i)
1779 struct eh_region *region;
1780 struct eh_region *outer;
1784 region = VEC_index (eh_region, cfun->eh->region_array, i);
1785 /* Mind we don't process a region more than once. */
1786 if (!region || region->region_number != i)
1789 /* If there is no RESX, or it has been deleted by flow, there's
1790 nothing to fix up. */
1791 if (! region->resume || INSN_DELETED_P (region->resume))
1794 /* Search for another landing pad in this function. */
1795 for (outer = region->outer; outer ; outer = outer->outer)
1796 if (outer->post_landing_pad)
1804 basic_block src, dest;
1806 emit_jump (outer->post_landing_pad);
1807 src = BLOCK_FOR_INSN (region->resume);
1808 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1809 while (EDGE_COUNT (src->succs) > 0)
1810 remove_edge (EDGE_SUCC (src, 0));
1811 e = make_edge (src, dest, 0);
1812 e->probability = REG_BR_PROB_BASE;
1813 e->count = src->count;
1817 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1818 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
1820 /* What we just emitted was a throwing libcall, so it got a
1821 barrier automatically added after it. If the last insn in
1822 the libcall sequence isn't the barrier, it's because the
1823 target emits multiple insns for a call, and there are insns
1824 after the actual call insn (which are redundant and would be
1825 optimized away). The barrier is inserted exactly after the
1826 call insn, so let's go get that and delete the insns after
1827 it, because below we need the barrier to be the last insn in
1829 delete_insns_since (NEXT_INSN (last_call_insn ()));
1834 barrier = emit_insn_before (seq, region->resume);
1835 /* Avoid duplicate barrier. */
1836 gcc_assert (BARRIER_P (barrier));
1837 delete_insn (barrier);
1838 delete_insn (region->resume);
1840 /* ??? From tree-ssa we can wind up with catch regions whose
1841 label is not instantiated, but whose resx is present. Now
1842 that we've dealt with the resx, kill the region. */
1843 if (region->label == NULL && region->type == ERT_CLEANUP)
1844 remove_eh_handler (region);
1850 dw2_build_landing_pads (void)
1854 for (i = cfun->eh->last_region_number; i > 0; --i)
1856 struct eh_region *region;
1861 region = VEC_index (eh_region, cfun->eh->region_array, i);
1862 /* Mind we don't process a region more than once. */
1863 if (!region || region->region_number != i)
1866 if (region->type != ERT_CLEANUP
1867 && region->type != ERT_TRY
1868 && region->type != ERT_ALLOWED_EXCEPTIONS)
1873 region->landing_pad = gen_label_rtx ();
1874 emit_label (region->landing_pad);
1876 #ifdef HAVE_exception_receiver
1877 if (HAVE_exception_receiver)
1878 emit_insn (gen_exception_receiver ());
1881 #ifdef HAVE_nonlocal_goto_receiver
1882 if (HAVE_nonlocal_goto_receiver)
1883 emit_insn (gen_nonlocal_goto_receiver ());
1888 emit_move_insn (crtl->eh.exc_ptr,
1889 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1890 emit_move_insn (crtl->eh.filter,
1891 gen_rtx_REG (targetm.eh_return_filter_mode (),
1892 EH_RETURN_DATA_REGNO (1)));
1897 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1898 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1899 e->count = bb->count;
1900 e->probability = REG_BR_PROB_BASE;
1907 int directly_reachable;
1910 int call_site_index;
1914 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1917 bool found_one = false;
1919 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1921 struct eh_region *region;
1922 enum reachable_code rc;
1926 if (! INSN_P (insn))
1929 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1930 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1933 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1937 type_thrown = NULL_TREE;
1938 if (region->type == ERT_THROW)
1940 type_thrown = region->u.eh_throw.type;
1941 region = region->outer;
1944 /* Find the first containing region that might handle the exception.
1945 That's the landing pad to which we will transfer control. */
1946 rc = RNL_NOT_CAUGHT;
1947 for (; region; region = region->outer)
1949 rc = reachable_next_level (region, type_thrown, NULL, false);
1950 if (rc != RNL_NOT_CAUGHT)
1953 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1955 lp_info[region->region_number].directly_reachable = 1;
1964 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1969 /* First task: build the action table. */
1971 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
1972 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1974 for (i = cfun->eh->last_region_number; i > 0; --i)
1975 if (lp_info[i].directly_reachable)
1977 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1979 r->landing_pad = dispatch_label;
1980 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1981 if (lp_info[i].action_index != -1)
1982 crtl->uses_eh_lsda = 1;
1985 htab_delete (ar_hash);
1987 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1988 landing pad label for the region. For sjlj though, there is one
1989 common landing pad from which we dispatch to the post-landing pads.
1991 A region receives a dispatch index if it is directly reachable
1992 and requires in-function processing. Regions that share post-landing
1993 pads may share dispatch indices. */
1994 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1995 (see build_post_landing_pads) so we don't bother checking for it. */
1998 for (i = cfun->eh->last_region_number; i > 0; --i)
1999 if (lp_info[i].directly_reachable)
2000 lp_info[i].dispatch_index = index++;
2002 /* Finally: assign call-site values. If dwarf2 terms, this would be
2003 the region number assigned by convert_to_eh_region_ranges, but
2004 handles no-action and must-not-throw differently. */
2007 for (i = cfun->eh->last_region_number; i > 0; --i)
2008 if (lp_info[i].directly_reachable)
2010 int action = lp_info[i].action_index;
2012 /* Map must-not-throw to otherwise unused call-site index 0. */
2015 /* Map no-action to otherwise unused call-site index -1. */
2016 else if (action == -1)
2018 /* Otherwise, look it up in the table. */
2020 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2022 lp_info[i].call_site_index = index;
2027 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
2029 int last_call_site = -2;
2032 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2034 struct eh_region *region;
2036 rtx note, before, p;
2038 /* Reset value tracking at extended basic block boundaries. */
2040 last_call_site = -2;
2042 if (! INSN_P (insn))
2045 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2047 /* Calls that are known to not throw need not be marked. */
2048 if (note && INTVAL (XEXP (note, 0)) <= 0)
2052 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2058 /* Calls (and trapping insns) without notes are outside any
2059 exception handling region in this function. Mark them as
2062 || (flag_non_call_exceptions
2063 && may_trap_p (PATTERN (insn))))
2064 this_call_site = -1;
2069 this_call_site = lp_info[region->region_number].call_site_index;
2071 if (this_call_site == last_call_site)
2074 /* Don't separate a call from it's argument loads. */
2077 before = find_first_parameter_load (insn, NULL_RTX);
2080 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
2081 sjlj_fc_call_site_ofs);
2082 emit_move_insn (mem, GEN_INT (this_call_site));
2086 emit_insn_before (p, before);
2087 last_call_site = this_call_site;
2091 /* Construct the SjLj_Function_Context. */
2094 sjlj_emit_function_enter (rtx dispatch_label)
2096 rtx fn_begin, fc, mem, seq;
2097 bool fn_begin_outside_block;
2099 fc = crtl->eh.sjlj_fc;
2103 /* We're storing this libcall's address into memory instead of
2104 calling it directly. Thus, we must call assemble_external_libcall
2105 here, as we can not depend on emit_library_call to do it for us. */
2106 assemble_external_libcall (eh_personality_libfunc);
2107 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2108 emit_move_insn (mem, eh_personality_libfunc);
2110 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2111 if (crtl->uses_eh_lsda)
2116 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2117 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2118 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2119 emit_move_insn (mem, sym);
2122 emit_move_insn (mem, const0_rtx);
2124 #ifdef DONT_USE_BUILTIN_SETJMP
2127 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2128 TYPE_MODE (integer_type_node), 1,
2129 plus_constant (XEXP (fc, 0),
2130 sjlj_fc_jbuf_ofs), Pmode);
2132 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2133 TYPE_MODE (integer_type_node), 0, dispatch_label);
2134 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
2137 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2141 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2142 1, XEXP (fc, 0), Pmode);
2147 /* ??? Instead of doing this at the beginning of the function,
2148 do this in a block that is at loop level 0 and dominates all
2149 can_throw_internal instructions. */
2151 fn_begin_outside_block = true;
2152 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2153 if (NOTE_P (fn_begin))
2155 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2157 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
2158 fn_begin_outside_block = false;
2161 if (fn_begin_outside_block)
2162 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
2164 emit_insn_after (seq, fn_begin);
2167 /* Call back from expand_function_end to know where we should put
2168 the call to unwind_sjlj_unregister_libfunc if needed. */
2171 sjlj_emit_function_exit_after (rtx after)
2173 crtl->eh.sjlj_exit_after = after;
2177 sjlj_emit_function_exit (void)
2185 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2186 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
2191 /* ??? Really this can be done in any block at loop level 0 that
2192 post-dominates all can_throw_internal instructions. This is
2193 the last possible moment. */
2195 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
2196 if (e->flags & EDGE_FALLTHRU)
2202 /* Figure out whether the place we are supposed to insert libcall
2203 is inside the last basic block or after it. In the other case
2204 we need to emit to edge. */
2205 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
2206 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
2208 if (insn == crtl->eh.sjlj_exit_after)
2211 insn = NEXT_INSN (insn);
2212 emit_insn_after (seq, insn);
2215 if (insn == BB_END (e->src))
2218 insert_insn_on_edge (seq, e);
2223 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2225 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2226 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2227 int i, first_reachable;
2228 rtx mem, dispatch, seq, fc;
2233 fc = crtl->eh.sjlj_fc;
2237 emit_label (dispatch_label);
2239 #ifndef DONT_USE_BUILTIN_SETJMP
2240 expand_builtin_setjmp_receiver (dispatch_label);
2243 /* Load up dispatch index, exc_ptr and filter values from the
2244 function context. */
2245 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2246 sjlj_fc_call_site_ofs);
2247 dispatch = copy_to_reg (mem);
2249 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2250 if (unwind_word_mode != ptr_mode)
2252 #ifdef POINTERS_EXTEND_UNSIGNED
2253 mem = convert_memory_address (ptr_mode, mem);
2255 mem = convert_to_mode (ptr_mode, mem, 0);
2258 emit_move_insn (crtl->eh.exc_ptr, mem);
2260 mem = adjust_address (fc, unwind_word_mode,
2261 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2262 if (unwind_word_mode != filter_mode)
2263 mem = convert_to_mode (filter_mode, mem, 0);
2264 emit_move_insn (crtl->eh.filter, mem);
2266 /* Jump to one of the directly reachable regions. */
2267 /* ??? This really ought to be using a switch statement. */
2269 first_reachable = 0;
2270 for (i = cfun->eh->last_region_number; i > 0; --i)
2272 if (! lp_info[i].directly_reachable)
2275 if (! first_reachable)
2277 first_reachable = i;
2281 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2282 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2283 ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
2284 ->post_landing_pad);
2290 before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2291 ->post_landing_pad);
2293 bb = emit_to_new_bb_before (seq, before);
2294 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2295 e->count = bb->count;
2296 e->probability = REG_BR_PROB_BASE;
2300 sjlj_build_landing_pads (void)
2302 struct sjlj_lp_info *lp_info;
2304 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2306 if (sjlj_find_directly_reachable_regions (lp_info))
2308 rtx dispatch_label = gen_label_rtx ();
2309 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2310 TYPE_MODE (sjlj_fc_type_node),
2311 TYPE_ALIGN (sjlj_fc_type_node));
2313 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2314 int_size_in_bytes (sjlj_fc_type_node),
2317 sjlj_assign_call_site_values (dispatch_label, lp_info);
2318 sjlj_mark_call_sites (lp_info);
2320 sjlj_emit_function_enter (dispatch_label);
2321 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2322 sjlj_emit_function_exit ();
2329 finish_eh_generation (void)
2333 /* Nothing to do if no regions created. */
2334 if (cfun->eh->region_tree == NULL)
2337 /* The object here is to provide find_basic_blocks with detailed
2338 information (via reachable_handlers) on how exception control
2339 flows within the function. In this first pass, we can include
2340 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2341 regions, and hope that it will be useful in deleting unreachable
2342 handlers. Subsequently, we will generate landing pads which will
2343 connect many of the handlers, and then type information will not
2344 be effective. Still, this is a win over previous implementations. */
2346 /* These registers are used by the landing pads. Make sure they
2347 have been generated. */
2348 get_exception_pointer ();
2349 get_exception_filter ();
2351 /* Construct the landing pads. */
2353 assign_filter_values ();
2354 build_post_landing_pads ();
2355 connect_post_landing_pads ();
2356 if (USING_SJLJ_EXCEPTIONS)
2357 sjlj_build_landing_pads ();
2359 dw2_build_landing_pads ();
2361 crtl->eh.built_landing_pads = 1;
2363 /* We've totally changed the CFG. Start over. */
2364 find_exception_handler_labels ();
2365 break_superblocks ();
2366 if (USING_SJLJ_EXCEPTIONS
2367 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2368 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2369 commit_edge_insertions ();
2375 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2377 if (e->flags & EDGE_EH)
2386 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2391 ehl_hash (const void *pentry)
2393 const struct ehl_map_entry *const entry
2394 = (const struct ehl_map_entry *) pentry;
2396 /* 2^32 * ((sqrt(5) - 1) / 2) */
2397 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2398 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2402 ehl_eq (const void *pentry, const void *pdata)
2404 const struct ehl_map_entry *const entry
2405 = (const struct ehl_map_entry *) pentry;
2406 const struct ehl_map_entry *const data
2407 = (const struct ehl_map_entry *) pdata;
2409 return entry->label == data->label;
2412 /* This section handles removing dead code for flow. */
2414 /* Remove LABEL from exception_handler_label_map. */
2417 remove_exception_handler_label (rtx label)
2419 struct ehl_map_entry **slot, tmp;
2421 /* If exception_handler_label_map was not built yet,
2422 there is nothing to do. */
2423 if (crtl->eh.exception_handler_label_map == NULL)
2427 slot = (struct ehl_map_entry **)
2428 htab_find_slot (crtl->eh.exception_handler_label_map, &tmp, NO_INSERT);
2431 htab_clear_slot (crtl->eh.exception_handler_label_map, (void **) slot);
2434 /* Splice REGION from the region tree and replace it by REPLACE etc. */
2437 remove_eh_handler_and_replace (struct eh_region *region,
2438 struct eh_region *replace)
2440 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2443 outer = region->outer;
2444 /* For the benefit of efficiently handling REG_EH_REGION notes,
2445 replace this region in the region array with its containing
2446 region. Note that previous region deletions may result in
2447 multiple copies of this region in the array, so we have a
2448 list of alternate numbers by which we are known. */
2450 VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
2457 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2459 VEC_replace (eh_region, cfun->eh->region_array, i, replace);
2466 replace->aka = BITMAP_GGC_ALLOC ();
2468 bitmap_ior_into (replace->aka, region->aka);
2469 bitmap_set_bit (replace->aka, region->region_number);
2472 if (crtl->eh.built_landing_pads)
2473 lab = region->landing_pad;
2475 lab = region->label;
2477 remove_exception_handler_label (lab);
2480 pp_start = &outer->inner;
2482 pp_start = &cfun->eh->region_tree;
2483 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2485 *pp = region->next_peer;
2488 pp_start = &replace->inner;
2490 pp_start = &cfun->eh->region_tree;
2491 inner = region->inner;
2494 for (p = inner; p->next_peer ; p = p->next_peer)
2498 p->next_peer = *pp_start;
2502 if (region->type == ERT_CATCH)
2504 struct eh_region *eh_try, *next, *prev;
2506 for (eh_try = region->next_peer;
2507 eh_try->type == ERT_CATCH;
2508 eh_try = eh_try->next_peer)
2510 gcc_assert (eh_try->type == ERT_TRY);
2512 next = region->u.eh_catch.next_catch;
2513 prev = region->u.eh_catch.prev_catch;
2516 next->u.eh_catch.prev_catch = prev;
2518 eh_try->u.eh_try.last_catch = prev;
2520 prev->u.eh_catch.next_catch = next;
2523 eh_try->u.eh_try.eh_catch = next;
2525 remove_eh_handler (eh_try);
2530 /* Splice REGION from the region tree and replace it by the outer region
2534 remove_eh_handler (struct eh_region *region)
2536 remove_eh_handler_and_replace (region, region->outer);
2539 /* LABEL heads a basic block that is about to be deleted. If this
2540 label corresponds to an exception region, we may be able to
2541 delete the region. */
2544 maybe_remove_eh_handler (rtx label)
2546 struct ehl_map_entry **slot, tmp;
2547 struct eh_region *region;
2549 /* ??? After generating landing pads, it's not so simple to determine
2550 if the region data is completely unused. One must examine the
2551 landing pad and the post landing pad, and whether an inner try block
2552 is referencing the catch handlers directly. */
2553 if (crtl->eh.built_landing_pads)
2557 slot = (struct ehl_map_entry **)
2558 htab_find_slot (crtl->eh.exception_handler_label_map, &tmp, NO_INSERT);
2561 region = (*slot)->region;
2565 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2566 because there is no path to the fallback call to terminate.
2567 But the region continues to affect call-site data until there
2568 are no more contained calls, which we don't see here. */
2569 if (region->type == ERT_MUST_NOT_THROW)
2571 htab_clear_slot (crtl->eh.exception_handler_label_map, (void **) slot);
2572 region->label = NULL_RTX;
2575 remove_eh_handler (region);
2578 /* Remove Eh region R that has turned out to have no code in its handler. */
2581 remove_eh_region (int r)
2583 struct eh_region *region;
2585 region = VEC_index (eh_region, cfun->eh->region_array, r);
2586 remove_eh_handler (region);
2589 /* Invokes CALLBACK for every exception handler label. Only used by old
2590 loop hackery; should not be used by new code. */
2593 for_each_eh_label (void (*callback) (rtx))
2595 htab_traverse (crtl->eh.exception_handler_label_map, for_each_eh_label_1,
2596 (void *) &callback);
2600 for_each_eh_label_1 (void **pentry, void *data)
2602 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2603 void (*callback) (rtx) = *(void (**) (rtx)) data;
2605 (*callback) (entry->label);
2609 /* Invoke CALLBACK for every exception region in the current function. */
2612 for_each_eh_region (void (*callback) (struct eh_region *))
2614 int i, n = cfun->eh->last_region_number;
2615 for (i = 1; i <= n; ++i)
2617 struct eh_region *region;
2619 region = VEC_index (eh_region, cfun->eh->region_array, i);
2621 (*callback) (region);
2625 /* This section describes CFG exception edges for flow. */
2627 /* For communicating between calls to reachable_next_level. */
2628 struct reachable_info
2632 void (*callback) (struct eh_region *, void *);
2633 void *callback_data;
2636 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2637 base class of TYPE, is in HANDLED. */
2640 check_handled (tree handled, tree type)
2644 /* We can check for exact matches without front-end help. */
2645 if (! lang_eh_type_covers)
2647 for (t = handled; t ; t = TREE_CHAIN (t))
2648 if (TREE_VALUE (t) == type)
2653 for (t = handled; t ; t = TREE_CHAIN (t))
2654 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2661 /* A subroutine of reachable_next_level. If we are collecting a list
2662 of handlers, add one. After landing pad generation, reference
2663 it instead of the handlers themselves. Further, the handlers are
2664 all wired together, so by referencing one, we've got them all.
2665 Before landing pad generation we reference each handler individually.
2667 LP_REGION contains the landing pad; REGION is the handler. */
2670 add_reachable_handler (struct reachable_info *info,
2671 struct eh_region *lp_region, struct eh_region *region)
2676 if (crtl->eh.built_landing_pads)
2677 info->callback (lp_region, info->callback_data);
2679 info->callback (region, info->callback_data);
2682 /* Process one level of exception regions for reachability.
2683 If TYPE_THROWN is non-null, then it is the *exact* type being
2684 propagated. If INFO is non-null, then collect handler labels
2685 and caught/allowed type information between invocations. */
2687 static enum reachable_code
2688 reachable_next_level (struct eh_region *region, tree type_thrown,
2689 struct reachable_info *info,
2692 switch (region->type)
2695 /* Before landing-pad generation, we model control flow
2696 directly to the individual handlers. In this way we can
2697 see that catch handler types may shadow one another. */
2698 add_reachable_handler (info, region, region);
2699 return RNL_MAYBE_CAUGHT;
2703 struct eh_region *c;
2704 enum reachable_code ret = RNL_NOT_CAUGHT;
2706 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2708 /* A catch-all handler ends the search. */
2709 if (c->u.eh_catch.type_list == NULL)
2711 add_reachable_handler (info, region, c);
2717 /* If we have at least one type match, end the search. */
2718 tree tp_node = c->u.eh_catch.type_list;
2720 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2722 tree type = TREE_VALUE (tp_node);
2724 if (type == type_thrown
2725 || (lang_eh_type_covers
2726 && (*lang_eh_type_covers) (type, type_thrown)))
2728 add_reachable_handler (info, region, c);
2733 /* If we have definitive information of a match failure,
2734 the catch won't trigger. */
2735 if (lang_eh_type_covers)
2736 return RNL_NOT_CAUGHT;
2739 /* At this point, we either don't know what type is thrown or
2740 don't have front-end assistance to help deciding if it is
2741 covered by one of the types in the list for this region.
2743 We'd then like to add this region to the list of reachable
2744 handlers since it is indeed potentially reachable based on the
2745 information we have.
2747 Actually, this handler is for sure not reachable if all the
2748 types it matches have already been caught. That is, it is only
2749 potentially reachable if at least one of the types it catches
2750 has not been previously caught. */
2753 ret = RNL_MAYBE_CAUGHT;
2756 tree tp_node = c->u.eh_catch.type_list;
2757 bool maybe_reachable = false;
2759 /* Compute the potential reachability of this handler and
2760 update the list of types caught at the same time. */
2761 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2763 tree type = TREE_VALUE (tp_node);
2765 if (! check_handled (info->types_caught, type))
2768 = tree_cons (NULL, type, info->types_caught);
2770 maybe_reachable = true;
2774 if (maybe_reachable)
2776 add_reachable_handler (info, region, c);
2778 /* ??? If the catch type is a base class of every allowed
2779 type, then we know we can stop the search. */
2780 ret = RNL_MAYBE_CAUGHT;
2788 case ERT_ALLOWED_EXCEPTIONS:
2789 /* An empty list of types definitely ends the search. */
2790 if (region->u.allowed.type_list == NULL_TREE)
2792 add_reachable_handler (info, region, region);
2796 /* Collect a list of lists of allowed types for use in detecting
2797 when a catch may be transformed into a catch-all. */
2799 info->types_allowed = tree_cons (NULL_TREE,
2800 region->u.allowed.type_list,
2801 info->types_allowed);
2803 /* If we have definitive information about the type hierarchy,
2804 then we can tell if the thrown type will pass through the
2806 if (type_thrown && lang_eh_type_covers)
2808 if (check_handled (region->u.allowed.type_list, type_thrown))
2809 return RNL_NOT_CAUGHT;
2812 add_reachable_handler (info, region, region);
2817 add_reachable_handler (info, region, region);
2818 return RNL_MAYBE_CAUGHT;
2821 /* Catch regions are handled by their controlling try region. */
2822 return RNL_NOT_CAUGHT;
2824 case ERT_MUST_NOT_THROW:
2825 /* Here we end our search, since no exceptions may propagate.
2827 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
2828 only via locally handled RESX instructions.
2830 When we inline a function call, we can bring in new handlers. In order
2831 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
2832 assume that such handlers exists prior for any inlinable call prior
2833 inlining decisions are fixed. */
2837 add_reachable_handler (info, region, region);
2845 /* Shouldn't see these here. */
2853 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2856 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
2857 void (*callback) (struct eh_region *, void *),
2858 void *callback_data)
2860 struct reachable_info info;
2861 struct eh_region *region;
2864 memset (&info, 0, sizeof (info));
2865 info.callback = callback;
2866 info.callback_data = callback_data;
2868 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2872 type_thrown = NULL_TREE;
2875 /* A RESX leaves a region instead of entering it. Thus the
2876 region itself may have been deleted out from under us. */
2879 region = region->outer;
2881 else if (region->type == ERT_THROW)
2883 type_thrown = region->u.eh_throw.type;
2884 region = region->outer;
2889 if (reachable_next_level (region, type_thrown, &info,
2890 inlinable_call || is_resx) >= RNL_CAUGHT)
2892 /* If we have processed one cleanup, there is no point in
2893 processing any more of them. Each cleanup will have an edge
2894 to the next outer cleanup region, so the flow graph will be
2896 if (region->type == ERT_CLEANUP)
2897 region = region->u.cleanup.prev_try;
2899 region = region->outer;
2903 /* Retrieve a list of labels of exception handlers which can be
2904 reached by a given insn. */
2907 arh_to_landing_pad (struct eh_region *region, void *data)
2909 rtx *p_handlers = (rtx *) data;
2911 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2915 arh_to_label (struct eh_region *region, void *data)
2917 rtx *p_handlers = (rtx *) data;
2918 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2922 reachable_handlers (rtx insn)
2924 bool is_resx = false;
2925 rtx handlers = NULL;
2929 && GET_CODE (PATTERN (insn)) == RESX)
2931 region_number = XINT (PATTERN (insn), 0);
2936 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2937 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2939 region_number = INTVAL (XEXP (note, 0));
2942 foreach_reachable_handler (region_number, is_resx, false,
2943 (crtl->eh.built_landing_pads
2944 ? arh_to_landing_pad
2951 /* Determine if the given INSN can throw an exception that is caught
2952 within the function. */
2955 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
2957 struct eh_region *region;
2960 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2964 type_thrown = NULL_TREE;
2966 region = region->outer;
2967 else if (region->type == ERT_THROW)
2969 type_thrown = region->u.eh_throw.type;
2970 region = region->outer;
2973 /* If this exception is ignored by each and every containing region,
2974 then control passes straight out. The runtime may handle some
2975 regions, which also do not require processing internally. */
2976 for (; region; region = region->outer)
2978 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
2979 inlinable_call || is_resx);
2980 if (how == RNL_BLOCKED)
2982 if (how != RNL_NOT_CAUGHT)
2990 can_throw_internal (const_rtx insn)
2994 if (! INSN_P (insn))
2998 && GET_CODE (PATTERN (insn)) == RESX
2999 && XINT (PATTERN (insn), 0) > 0)
3000 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
3002 if (NONJUMP_INSN_P (insn)
3003 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3004 insn = XVECEXP (PATTERN (insn), 0, 0);
3006 /* Every insn that might throw has an EH_REGION note. */
3007 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3008 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3011 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
3014 /* Determine if the given INSN can throw an exception that is
3015 visible outside the function. */
3018 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
3020 struct eh_region *region;
3023 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3027 type_thrown = NULL_TREE;
3029 region = region->outer;
3030 else if (region->type == ERT_THROW)
3032 type_thrown = region->u.eh_throw.type;
3033 region = region->outer;
3036 /* If the exception is caught or blocked by any containing region,
3037 then it is not seen by any calling function. */
3038 for (; region ; region = region->outer)
3039 if (reachable_next_level (region, type_thrown, NULL,
3040 inlinable_call || is_resx) >= RNL_CAUGHT)
3047 can_throw_external (const_rtx insn)
3051 if (! INSN_P (insn))
3055 && GET_CODE (PATTERN (insn)) == RESX
3056 && XINT (PATTERN (insn), 0) > 0)
3057 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
3059 if (NONJUMP_INSN_P (insn)
3060 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3061 insn = XVECEXP (PATTERN (insn), 0, 0);
3063 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3066 /* Calls (and trapping insns) without notes are outside any
3067 exception handling region in this function. We have to
3068 assume it might throw. Given that the front end and middle
3069 ends mark known NOTHROW functions, this isn't so wildly
3071 return (CALL_P (insn)
3072 || (flag_non_call_exceptions
3073 && may_trap_p (PATTERN (insn))));
3075 if (INTVAL (XEXP (note, 0)) <= 0)
3078 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
3081 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
3084 set_nothrow_function_flags (void)
3090 /* Assume crtl->all_throwers_are_sibcalls until we encounter
3091 something that can throw an exception. We specifically exempt
3092 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
3093 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
3096 crtl->all_throwers_are_sibcalls = 1;
3098 /* If we don't know that this implementation of the function will
3099 actually be used, then we must not set TREE_NOTHROW, since
3100 callers must not assume that this function does not throw. */
3101 if (TREE_NOTHROW (current_function_decl))
3104 if (! flag_exceptions)
3107 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3108 if (can_throw_external (insn))
3112 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3114 crtl->all_throwers_are_sibcalls = 0;
3119 for (insn = crtl->epilogue_delay_list; insn;
3120 insn = XEXP (insn, 1))
3121 if (can_throw_external (insn))
3125 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3127 crtl->all_throwers_are_sibcalls = 0;
3132 && (cgraph_function_body_availability (cgraph_node (current_function_decl))
3133 >= AVAIL_AVAILABLE))
3134 TREE_NOTHROW (current_function_decl) = 1;
3138 struct rtl_opt_pass pass_set_nothrow_function_flags =
3144 set_nothrow_function_flags, /* execute */
3147 0, /* static_pass_number */
3149 0, /* properties_required */
3150 0, /* properties_provided */
3151 0, /* properties_destroyed */
3152 0, /* todo_flags_start */
3153 0, /* todo_flags_finish */
3158 /* Various hooks for unwind library. */
3160 /* Do any necessary initialization to access arbitrary stack frames.
3161 On the SPARC, this means flushing the register windows. */
3164 expand_builtin_unwind_init (void)
3166 /* Set this so all the registers get saved in our frame; we need to be
3167 able to copy the saved values for any registers from frames we unwind. */
3168 crtl->saves_all_registers = 1;
3170 #ifdef SETUP_FRAME_ADDRESSES
3171 SETUP_FRAME_ADDRESSES ();
3176 expand_builtin_eh_return_data_regno (tree exp)
3178 tree which = CALL_EXPR_ARG (exp, 0);
3179 unsigned HOST_WIDE_INT iwhich;
3181 if (TREE_CODE (which) != INTEGER_CST)
3183 error ("argument of %<__builtin_eh_return_regno%> must be constant");
3187 iwhich = tree_low_cst (which, 1);
3188 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3189 if (iwhich == INVALID_REGNUM)
3192 #ifdef DWARF_FRAME_REGNUM
3193 iwhich = DWARF_FRAME_REGNUM (iwhich);
3195 iwhich = DBX_REGISTER_NUMBER (iwhich);
3198 return GEN_INT (iwhich);
3201 /* Given a value extracted from the return address register or stack slot,
3202 return the actual address encoded in that value. */
3205 expand_builtin_extract_return_addr (tree addr_tree)
3207 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3209 if (GET_MODE (addr) != Pmode
3210 && GET_MODE (addr) != VOIDmode)
3212 #ifdef POINTERS_EXTEND_UNSIGNED
3213 addr = convert_memory_address (Pmode, addr);
3215 addr = convert_to_mode (Pmode, addr, 0);
3219 /* First mask out any unwanted bits. */
3220 #ifdef MASK_RETURN_ADDR
3221 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3224 /* Then adjust to find the real return address. */
3225 #if defined (RETURN_ADDR_OFFSET)
3226 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3232 /* Given an actual address in addr_tree, do any necessary encoding
3233 and return the value to be stored in the return address register or
3234 stack slot so the epilogue will return to that address. */
3237 expand_builtin_frob_return_addr (tree addr_tree)
3239 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3241 addr = convert_memory_address (Pmode, addr);
3243 #ifdef RETURN_ADDR_OFFSET
3244 addr = force_reg (Pmode, addr);
3245 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3251 /* Set up the epilogue with the magic bits we'll need to return to the
3252 exception handler. */
3255 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3260 #ifdef EH_RETURN_STACKADJ_RTX
3261 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
3262 VOIDmode, EXPAND_NORMAL);
3263 tmp = convert_memory_address (Pmode, tmp);
3264 if (!crtl->eh.ehr_stackadj)
3265 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
3266 else if (tmp != crtl->eh.ehr_stackadj)
3267 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
3270 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
3271 VOIDmode, EXPAND_NORMAL);
3272 tmp = convert_memory_address (Pmode, tmp);
3273 if (!crtl->eh.ehr_handler)
3274 crtl->eh.ehr_handler = copy_to_reg (tmp);
3275 else if (tmp != crtl->eh.ehr_handler)
3276 emit_move_insn (crtl->eh.ehr_handler, tmp);
3278 if (!crtl->eh.ehr_label)
3279 crtl->eh.ehr_label = gen_label_rtx ();
3280 emit_jump (crtl->eh.ehr_label);
3284 expand_eh_return (void)
3288 if (! crtl->eh.ehr_label)
3291 crtl->calls_eh_return = 1;
3293 #ifdef EH_RETURN_STACKADJ_RTX
3294 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3297 around_label = gen_label_rtx ();
3298 emit_jump (around_label);
3300 emit_label (crtl->eh.ehr_label);
3301 clobber_return_register ();
3303 #ifdef EH_RETURN_STACKADJ_RTX
3304 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3307 #ifdef HAVE_eh_return
3309 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3313 #ifdef EH_RETURN_HANDLER_RTX
3314 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3316 error ("__builtin_eh_return not supported on this target");
3320 emit_label (around_label);
3323 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3324 POINTERS_EXTEND_UNSIGNED and return it. */
3327 expand_builtin_extend_pointer (tree addr_tree)
3329 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3332 #ifdef POINTERS_EXTEND_UNSIGNED
3333 extend = POINTERS_EXTEND_UNSIGNED;
3335 /* The previous EH code did an unsigned extend by default, so we do this also
3340 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3343 /* In the following functions, we represent entries in the action table
3344 as 1-based indices. Special cases are:
3346 0: null action record, non-null landing pad; implies cleanups
3347 -1: null action record, null landing pad; implies no action
3348 -2: no call-site entry; implies must_not_throw
3349 -3: we have yet to process outer regions
3351 Further, no special cases apply to the "next" field of the record.
3352 For next, 0 means end of list. */
3354 struct action_record
3362 action_record_eq (const void *pentry, const void *pdata)
3364 const struct action_record *entry = (const struct action_record *) pentry;
3365 const struct action_record *data = (const struct action_record *) pdata;
3366 return entry->filter == data->filter && entry->next == data->next;
3370 action_record_hash (const void *pentry)
3372 const struct action_record *entry = (const struct action_record *) pentry;
3373 return entry->next * 1009 + entry->filter;
3377 add_action_record (htab_t ar_hash, int filter, int next)
3379 struct action_record **slot, *new_ar, tmp;
3381 tmp.filter = filter;
3383 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3385 if ((new_ar = *slot) == NULL)
3387 new_ar = XNEW (struct action_record);
3388 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3389 new_ar->filter = filter;
3390 new_ar->next = next;
3393 /* The filter value goes in untouched. The link to the next
3394 record is a "self-relative" byte offset, or zero to indicate
3395 that there is no next record. So convert the absolute 1 based
3396 indices we've been carrying around into a displacement. */
3398 push_sleb128 (&crtl->eh.action_record_data, filter);
3400 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3401 push_sleb128 (&crtl->eh.action_record_data, next);
3404 return new_ar->offset;
3408 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3410 struct eh_region *c;
3413 /* If we've reached the top of the region chain, then we have
3414 no actions, and require no landing pad. */
3418 switch (region->type)
3421 /* A cleanup adds a zero filter to the beginning of the chain, but
3422 there are special cases to look out for. If there are *only*
3423 cleanups along a path, then it compresses to a zero action.
3424 Further, if there are multiple cleanups along a path, we only
3425 need to represent one of them, as that is enough to trigger
3426 entry to the landing pad at runtime. */
3427 next = collect_one_action_chain (ar_hash, region->outer);
3430 for (c = region->outer; c ; c = c->outer)
3431 if (c->type == ERT_CLEANUP)
3433 return add_action_record (ar_hash, 0, next);
3436 /* Process the associated catch regions in reverse order.
3437 If there's a catch-all handler, then we don't need to
3438 search outer regions. Use a magic -3 value to record
3439 that we haven't done the outer search. */
3441 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3443 if (c->u.eh_catch.type_list == NULL)
3445 /* Retrieve the filter from the head of the filter list
3446 where we have stored it (see assign_filter_values). */
3448 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3450 next = add_action_record (ar_hash, filter, 0);
3454 /* Once the outer search is done, trigger an action record for
3455 each filter we have. */
3460 next = collect_one_action_chain (ar_hash, region->outer);
3462 /* If there is no next action, terminate the chain. */
3465 /* If all outer actions are cleanups or must_not_throw,
3466 we'll have no action record for it, since we had wanted
3467 to encode these states in the call-site record directly.
3468 Add a cleanup action to the chain to catch these. */
3470 next = add_action_record (ar_hash, 0, 0);
3473 flt_node = c->u.eh_catch.filter_list;
3474 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3476 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3477 next = add_action_record (ar_hash, filter, next);
3483 case ERT_ALLOWED_EXCEPTIONS:
3484 /* An exception specification adds its filter to the
3485 beginning of the chain. */
3486 next = collect_one_action_chain (ar_hash, region->outer);
3488 /* If there is no next action, terminate the chain. */
3491 /* If all outer actions are cleanups or must_not_throw,
3492 we'll have no action record for it, since we had wanted
3493 to encode these states in the call-site record directly.
3494 Add a cleanup action to the chain to catch these. */
3496 next = add_action_record (ar_hash, 0, 0);
3498 return add_action_record (ar_hash, region->u.allowed.filter, next);
3500 case ERT_MUST_NOT_THROW:
3501 /* A must-not-throw region with no inner handlers or cleanups
3502 requires no call-site entry. Note that this differs from
3503 the no handler or cleanup case in that we do require an lsda
3504 to be generated. Return a magic -2 value to record this. */
3509 /* CATCH regions are handled in TRY above. THROW regions are
3510 for optimization information only and produce no output. */
3511 return collect_one_action_chain (ar_hash, region->outer);
3519 add_call_site (rtx landing_pad, int action)
3521 call_site_record record;
3523 record = GGC_NEW (struct call_site_record);
3524 record->landing_pad = landing_pad;
3525 record->action = action;
3527 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3529 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3532 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3533 The new note numbers will not refer to region numbers, but
3534 instead to call site entries. */
3537 convert_to_eh_region_ranges (void)
3539 rtx insn, iter, note;
3541 int last_action = -3;
3542 rtx last_action_insn = NULL_RTX;
3543 rtx last_landing_pad = NULL_RTX;
3544 rtx first_no_action_insn = NULL_RTX;
3547 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3550 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3552 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3554 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3557 struct eh_region *region;
3559 rtx this_landing_pad;
3562 if (NONJUMP_INSN_P (insn)
3563 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3564 insn = XVECEXP (PATTERN (insn), 0, 0);
3566 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3569 if (! (CALL_P (insn)
3570 || (flag_non_call_exceptions
3571 && may_trap_p (PATTERN (insn)))))
3578 if (INTVAL (XEXP (note, 0)) <= 0)
3580 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3581 this_action = collect_one_action_chain (ar_hash, region);
3584 /* Existence of catch handlers, or must-not-throw regions
3585 implies that an lsda is needed (even if empty). */
3586 if (this_action != -1)
3587 crtl->uses_eh_lsda = 1;
3589 /* Delay creation of region notes for no-action regions
3590 until we're sure that an lsda will be required. */
3591 else if (last_action == -3)
3593 first_no_action_insn = iter;
3597 /* Cleanups and handlers may share action chains but not
3598 landing pads. Collect the landing pad for this region. */
3599 if (this_action >= 0)
3601 struct eh_region *o;
3602 for (o = region; ! o->landing_pad ; o = o->outer)
3604 this_landing_pad = o->landing_pad;
3607 this_landing_pad = NULL_RTX;
3609 /* Differing actions or landing pads implies a change in call-site
3610 info, which implies some EH_REGION note should be emitted. */
3611 if (last_action != this_action
3612 || last_landing_pad != this_landing_pad)
3614 /* If we'd not seen a previous action (-3) or the previous
3615 action was must-not-throw (-2), then we do not need an
3617 if (last_action >= -1)
3619 /* If we delayed the creation of the begin, do it now. */
3620 if (first_no_action_insn)
3622 call_site = add_call_site (NULL_RTX, 0);
3623 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3624 first_no_action_insn);
3625 NOTE_EH_HANDLER (note) = call_site;
3626 first_no_action_insn = NULL_RTX;
3629 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3631 NOTE_EH_HANDLER (note) = call_site;
3634 /* If the new action is must-not-throw, then no region notes
3636 if (this_action >= -1)
3638 call_site = add_call_site (this_landing_pad,
3639 this_action < 0 ? 0 : this_action);
3640 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3641 NOTE_EH_HANDLER (note) = call_site;
3644 last_action = this_action;
3645 last_landing_pad = this_landing_pad;
3647 last_action_insn = iter;
3650 if (last_action >= -1 && ! first_no_action_insn)
3652 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3653 NOTE_EH_HANDLER (note) = call_site;
3656 htab_delete (ar_hash);
3660 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3664 "eh_ranges", /* name */
3666 convert_to_eh_region_ranges, /* execute */
3669 0, /* static_pass_number */
3671 0, /* properties_required */
3672 0, /* properties_provided */
3673 0, /* properties_destroyed */
3674 0, /* todo_flags_start */
3675 TODO_dump_func, /* todo_flags_finish */
3681 push_uleb128 (varray_type *data_area, unsigned int value)
3685 unsigned char byte = value & 0x7f;
3689 VARRAY_PUSH_UCHAR (*data_area, byte);
3695 push_sleb128 (varray_type *data_area, int value)
3702 byte = value & 0x7f;
3704 more = ! ((value == 0 && (byte & 0x40) == 0)
3705 || (value == -1 && (byte & 0x40) != 0));
3708 VARRAY_PUSH_UCHAR (*data_area, byte);
3714 #ifndef HAVE_AS_LEB128
3716 dw2_size_of_call_site_table (void)
3718 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3719 int size = n * (4 + 4 + 4);
3722 for (i = 0; i < n; ++i)
3724 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3725 size += size_of_uleb128 (cs->action);
3732 sjlj_size_of_call_site_table (void)
3734 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3738 for (i = 0; i < n; ++i)
3740 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3741 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3742 size += size_of_uleb128 (cs->action);
3750 dw2_output_call_site_table (void)
3752 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3755 for (i = 0; i < n; ++i)
3757 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3758 char reg_start_lab[32];
3759 char reg_end_lab[32];
3760 char landing_pad_lab[32];
3762 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3763 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3765 if (cs->landing_pad)
3766 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3767 CODE_LABEL_NUMBER (cs->landing_pad));
3769 /* ??? Perhaps use insn length scaling if the assembler supports
3770 generic arithmetic. */
3771 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3772 data4 if the function is small enough. */
3773 #ifdef HAVE_AS_LEB128
3774 dw2_asm_output_delta_uleb128 (reg_start_lab,
3775 current_function_func_begin_label,
3776 "region %d start", i);
3777 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3779 if (cs->landing_pad)
3780 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3781 current_function_func_begin_label,
3784 dw2_asm_output_data_uleb128 (0, "landing pad");
3786 dw2_asm_output_delta (4, reg_start_lab,
3787 current_function_func_begin_label,
3788 "region %d start", i);
3789 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3790 if (cs->landing_pad)
3791 dw2_asm_output_delta (4, landing_pad_lab,
3792 current_function_func_begin_label,
3795 dw2_asm_output_data (4, 0, "landing pad");
3797 dw2_asm_output_data_uleb128 (cs->action, "action");
3800 call_site_base += n;
3804 sjlj_output_call_site_table (void)
3806 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3809 for (i = 0; i < n; ++i)
3811 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3813 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3814 "region %d landing pad", i);
3815 dw2_asm_output_data_uleb128 (cs->action, "action");
3818 call_site_base += n;
3821 #ifndef TARGET_UNWIND_INFO
3822 /* Switch to the section that should be used for exception tables. */
3825 switch_to_exception_section (const char * ARG_UNUSED (fnname))
3829 if (exception_section)
3830 s = exception_section;
3833 /* Compute the section and cache it into exception_section,
3834 unless it depends on the function name. */
3835 if (targetm.have_named_sections)
3839 if (EH_TABLES_CAN_BE_READ_ONLY)
3842 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3843 flags = ((! flag_pic
3844 || ((tt_format & 0x70) != DW_EH_PE_absptr
3845 && (tt_format & 0x70) != DW_EH_PE_aligned))
3846 ? 0 : SECTION_WRITE);
3849 flags = SECTION_WRITE;
3851 #ifdef HAVE_LD_EH_GC_SECTIONS
3852 if (flag_function_sections)
3854 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
3855 sprintf (section_name, ".gcc_except_table.%s", fnname);
3856 s = get_section (section_name, flags, NULL);
3857 free (section_name);
3862 = s = get_section (".gcc_except_table", flags, NULL);
3866 = s = flag_pic ? data_section : readonly_data_section;
3869 switch_to_section (s);
3874 /* Output a reference from an exception table to the type_info object TYPE.
3875 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
3879 output_ttype (tree type, int tt_format, int tt_format_size)
3882 bool is_public = true;
3884 if (type == NULL_TREE)
3888 struct varpool_node *node;
3890 type = lookup_type_for_runtime (type);
3891 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3893 /* Let cgraph know that the rtti decl is used. Not all of the
3894 paths below go through assemble_integer, which would take
3895 care of this for us. */
3897 if (TREE_CODE (type) == ADDR_EXPR)
3899 type = TREE_OPERAND (type, 0);
3900 if (TREE_CODE (type) == VAR_DECL)
3902 node = varpool_node (type);
3904 varpool_mark_needed_node (node);
3905 is_public = TREE_PUBLIC (type);
3909 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3912 /* Allow the target to override the type table entry format. */
3913 if (targetm.asm_out.ttype (value))
3916 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3917 assemble_integer (value, tt_format_size,
3918 tt_format_size * BITS_PER_UNIT, 1);
3920 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
3924 output_function_exception_table (const char * ARG_UNUSED (fnname))
3926 int tt_format, cs_format, lp_format, i, n;
3927 #ifdef HAVE_AS_LEB128
3928 char ttype_label[32];
3929 char cs_after_size_label[32];
3930 char cs_end_label[32];
3935 int tt_format_size = 0;
3937 /* Not all functions need anything. */
3938 if (! crtl->uses_eh_lsda)
3941 if (eh_personality_libfunc)
3942 assemble_external_libcall (eh_personality_libfunc);
3944 #ifdef TARGET_UNWIND_INFO
3945 /* TODO: Move this into target file. */
3946 fputs ("\t.personality\t", asm_out_file);
3947 output_addr_const (asm_out_file, eh_personality_libfunc);
3948 fputs ("\n\t.handlerdata\n", asm_out_file);
3949 /* Note that varasm still thinks we're in the function's code section.
3950 The ".endp" directive that will immediately follow will take us back. */
3952 switch_to_exception_section (fnname);
3955 /* If the target wants a label to begin the table, emit it here. */
3956 targetm.asm_out.except_table_label (asm_out_file);
3958 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
3959 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
3961 /* Indicate the format of the @TType entries. */
3963 tt_format = DW_EH_PE_omit;
3966 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3967 #ifdef HAVE_AS_LEB128
3968 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3969 current_function_funcdef_no);
3971 tt_format_size = size_of_encoded_value (tt_format);
3973 assemble_align (tt_format_size * BITS_PER_UNIT);
3976 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3977 current_function_funcdef_no);
3979 /* The LSDA header. */
3981 /* Indicate the format of the landing pad start pointer. An omitted
3982 field implies @LPStart == @Start. */
3983 /* Currently we always put @LPStart == @Start. This field would
3984 be most useful in moving the landing pads completely out of
3985 line to another section, but it could also be used to minimize
3986 the size of uleb128 landing pad offsets. */
3987 lp_format = DW_EH_PE_omit;
3988 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3989 eh_data_format_name (lp_format));
3991 /* @LPStart pointer would go here. */
3993 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3994 eh_data_format_name (tt_format));
3996 #ifndef HAVE_AS_LEB128
3997 if (USING_SJLJ_EXCEPTIONS)
3998 call_site_len = sjlj_size_of_call_site_table ();
4000 call_site_len = dw2_size_of_call_site_table ();
4003 /* A pc-relative 4-byte displacement to the @TType data. */
4006 #ifdef HAVE_AS_LEB128
4007 char ttype_after_disp_label[32];
4008 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
4009 current_function_funcdef_no);
4010 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
4011 "@TType base offset");
4012 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
4014 /* Ug. Alignment queers things. */
4015 unsigned int before_disp, after_disp, last_disp, disp;
4017 before_disp = 1 + 1;
4018 after_disp = (1 + size_of_uleb128 (call_site_len)
4020 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
4021 + (VEC_length (tree, crtl->eh.ttype_data)
4027 unsigned int disp_size, pad;
4030 disp_size = size_of_uleb128 (disp);
4031 pad = before_disp + disp_size + after_disp;
4032 if (pad % tt_format_size)
4033 pad = tt_format_size - (pad % tt_format_size);
4036 disp = after_disp + pad;
4038 while (disp != last_disp);
4040 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
4044 /* Indicate the format of the call-site offsets. */
4045 #ifdef HAVE_AS_LEB128
4046 cs_format = DW_EH_PE_uleb128;
4048 cs_format = DW_EH_PE_udata4;
4050 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
4051 eh_data_format_name (cs_format));
4053 #ifdef HAVE_AS_LEB128
4054 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
4055 current_function_funcdef_no);
4056 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
4057 current_function_funcdef_no);
4058 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
4059 "Call-site table length");
4060 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
4061 if (USING_SJLJ_EXCEPTIONS)
4062 sjlj_output_call_site_table ();
4064 dw2_output_call_site_table ();
4065 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
4067 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
4068 if (USING_SJLJ_EXCEPTIONS)
4069 sjlj_output_call_site_table ();
4071 dw2_output_call_site_table ();
4074 /* ??? Decode and interpret the data for flag_debug_asm. */
4075 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
4076 for (i = 0; i < n; ++i)
4077 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
4078 (i ? NULL : "Action record table"));
4081 assemble_align (tt_format_size * BITS_PER_UNIT);
4083 i = VEC_length (tree, crtl->eh.ttype_data);
4086 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
4087 output_ttype (type, tt_format, tt_format_size);
4090 #ifdef HAVE_AS_LEB128
4092 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
4095 /* ??? Decode and interpret the data for flag_debug_asm. */
4096 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
4097 for (i = 0; i < n; ++i)
4099 if (targetm.arm_eabi_unwinder)
4101 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
4102 output_ttype (type, tt_format, tt_format_size);
4105 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
4106 (i ? NULL : "Exception specification table"));
4109 switch_to_section (current_function_section ());
4113 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
4115 fun->eh->throw_stmt_table = table;
4119 get_eh_throw_stmt_table (struct function *fun)
4121 return fun->eh->throw_stmt_table;
4124 /* Dump EH information to OUT. */
4127 dump_eh_tree (FILE * out, struct function *fun)
4129 struct eh_region *i;
4131 static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
4132 "allowed_exceptions", "must_not_throw",
4136 i = fun->eh->region_tree;
4140 fprintf (out, "Eh tree:\n");
4143 fprintf (out, " %*s %i %s", depth * 2, "",
4144 i->region_number, type_name[(int) i->type]);
4147 fprintf (out, " tree_label:");
4148 print_generic_expr (out, i->tree_label, 0);
4153 if (i->u.cleanup.prev_try)
4154 fprintf (out, " prev try:%i",
4155 i->u.cleanup.prev_try->region_number);
4160 struct eh_region *c;
4161 fprintf (out, " catch regions:");
4162 for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4163 fprintf (out, " %i", c->region_number);
4168 if (i->u.eh_catch.prev_catch)
4169 fprintf (out, " prev: %i",
4170 i->u.eh_catch.prev_catch->region_number);
4171 if (i->u.eh_catch.next_catch)
4172 fprintf (out, " next %i",
4173 i->u.eh_catch.next_catch->region_number);
4176 case ERT_ALLOWED_EXCEPTIONS:
4177 fprintf (out, "filter :%i types:", i->u.allowed.filter);
4178 print_generic_expr (out, i->u.allowed.type_list, 0);
4182 fprintf (out, "type:");
4183 print_generic_expr (out, i->u.eh_throw.type, 0);
4186 case ERT_MUST_NOT_THROW:
4194 fprintf (out, " also known as:");
4195 dump_bitmap (out, i->aka);
4198 fprintf (out, "\n");
4199 /* If there are sub-regions, process them. */
4201 i = i->inner, depth++;
4202 /* If there are peers, process them. */
4203 else if (i->next_peer)
4205 /* Otherwise, step back up the tree to the next peer. */
4215 while (i->next_peer == NULL);
4221 /* Verify some basic invariants on EH datastructures. Could be extended to
4224 verify_eh_tree (struct function *fun)
4226 struct eh_region *i, *outer = NULL;
4233 if (!fun->eh->region_tree)
4235 for (j = fun->eh->last_region_number; j > 0; --j)
4236 if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
4238 if (i->region_number == j)
4240 if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
4242 error ("region_array is corrupted for region %i",
4247 i = fun->eh->region_tree;
4251 if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
4253 error ("region_array is corrupted for region %i", i->region_number);
4256 if (i->outer != outer)
4258 error ("outer block of region %i is wrong", i->region_number);
4261 if (i->may_contain_throw && outer && !outer->may_contain_throw)
4264 ("region %i may contain throw and is contained in region that may not",
4270 error ("negative nesting depth of region %i", i->region_number);
4274 /* If there are sub-regions, process them. */
4276 outer = i, i = i->inner, depth++;
4277 /* If there are peers, process them. */
4278 else if (i->next_peer)
4280 /* Otherwise, step back up the tree to the next peer. */
4291 error ("tree list ends on depth %i", depth + 1);
4294 if (count != nvisited)
4296 error ("array does not match the region tree");
4301 dump_eh_tree (stderr, fun);
4302 internal_error ("verify_eh_tree failed");
4308 while (i->next_peer == NULL);
4314 /* Initialize unwind_resume_libfunc. */
4317 default_init_unwind_resume_libfunc (void)
4319 /* The default c++ routines aren't actually c++ specific, so use those. */
4320 unwind_resume_libfunc =
4321 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
4322 : "_Unwind_Resume");
4327 gate_handle_eh (void)
4329 return doing_eh (0);
4332 /* Complete generation of exception handling code. */
4334 rest_of_handle_eh (void)
4336 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4337 finish_eh_generation ();
4338 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4342 struct rtl_opt_pass pass_rtl_eh =
4347 gate_handle_eh, /* gate */
4348 rest_of_handle_eh, /* execute */
4351 0, /* static_pass_number */
4352 TV_JUMP, /* tv_id */
4353 0, /* properties_required */
4354 0, /* properties_provided */
4355 0, /* properties_destroyed */
4356 0, /* todo_flags_start */
4357 TODO_dump_func /* todo_flags_finish */
4361 #include "gt-except.h"