1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
52 #include "coretypes.h"
60 #include "insn-config.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
75 #include "langhooks.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
81 /* Provide defaults for stuff that may not be defined when using
83 #ifndef EH_RETURN_DATA_REGNO
84 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
87 /* Protect cleanup actions with must-not-throw regions, with a call
88 to the given failure handler. */
89 gimple (*lang_protect_cleanup_actions) (void);
91 /* Return true if type A catches type B. */
92 int (*lang_eh_type_covers) (tree a, tree b);
94 /* Map a type to a runtime object to match type. */
95 tree (*lang_eh_runtime_type) (tree);
97 /* A hash table of label to region number. */
99 struct ehl_map_entry GTY(())
102 struct eh_region *region;
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
117 /* Describes one exception region. */
118 struct eh_region GTY(())
120 /* The immediately surrounding region. */
121 struct eh_region *outer;
123 /* The list of immediately contained regions. */
124 struct eh_region *inner;
125 struct eh_region *next_peer;
127 /* An identifier for this region. */
130 /* When a region is deleted, its parents inherit the REG_EH_REGION
131 numbers already assigned. */
134 /* Each region does exactly one thing. */
141 ERT_ALLOWED_EXCEPTIONS,
146 /* Holds the action to perform based on the preceding type. */
148 /* A list of catch blocks, a surrounding try block,
149 and the label for continuing after a catch. */
150 struct eh_region_u_try {
151 struct eh_region *eh_catch;
152 struct eh_region *last_catch;
153 } GTY ((tag ("ERT_TRY"))) eh_try;
155 /* The list through the catch handlers, the list of type objects
156 matched, and the list of associated filters. */
157 struct eh_region_u_catch {
158 struct eh_region *next_catch;
159 struct eh_region *prev_catch;
162 } GTY ((tag ("ERT_CATCH"))) eh_catch;
164 /* A tree_list of allowed types. */
165 struct eh_region_u_allowed {
168 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
170 /* The type given by a call to "throw foo();", or discovered
172 struct eh_region_u_throw {
174 } GTY ((tag ("ERT_THROW"))) eh_throw;
176 /* Retain the cleanup expression even after expansion so that
177 we can match up fixup regions. */
178 struct eh_region_u_cleanup {
179 struct eh_region *prev_try;
180 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
181 } GTY ((desc ("%0.type"))) u;
183 /* Entry point for this region's handler before landing pads are built. */
187 /* Entry point for this region's handler from the runtime eh library. */
190 /* Entry point for this region's handler from an inner region. */
191 rtx post_landing_pad;
193 /* The RESX insn for handing off control to the next outermost handler,
197 /* True if something in this region may throw. */
198 unsigned may_contain_throw : 1;
201 typedef struct eh_region *eh_region;
203 struct call_site_record GTY(())
209 DEF_VEC_P(eh_region);
210 DEF_VEC_ALLOC_P(eh_region, gc);
211 DEF_VEC_ALLOC_P(eh_region, heap);
213 /* Used to save exception status for each function. */
214 struct eh_status GTY(())
216 /* The tree of all regions for this function. */
217 struct eh_region *region_tree;
219 /* The same information as an indexable array. */
220 VEC(eh_region,gc) *region_array;
221 int last_region_number;
223 htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
226 static int t2r_eq (const void *, const void *);
227 static hashval_t t2r_hash (const void *);
228 static void add_type_for_runtime (tree);
229 static tree lookup_type_for_runtime (tree);
231 static int ttypes_filter_eq (const void *, const void *);
232 static hashval_t ttypes_filter_hash (const void *);
233 static int ehspec_filter_eq (const void *, const void *);
234 static hashval_t ehspec_filter_hash (const void *);
235 static int add_ttypes_entry (htab_t, tree);
236 static int add_ehspec_entry (htab_t, htab_t, tree);
237 static void assign_filter_values (void);
238 static void build_post_landing_pads (void);
239 static void connect_post_landing_pads (void);
240 static void dw2_build_landing_pads (void);
243 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
244 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
245 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
246 static void sjlj_emit_function_enter (rtx);
247 static void sjlj_emit_function_exit (void);
248 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
249 static void sjlj_build_landing_pads (void);
251 static void remove_eh_handler (struct eh_region *);
252 static void remove_eh_handler_and_replace (struct eh_region *,
255 /* The return value of reachable_next_level. */
258 /* The given exception is not processed by the given region. */
260 /* The given exception may need processing by the given region. */
262 /* The given exception is completely processed by the given region. */
264 /* The given exception is completely processed by the runtime. */
268 struct reachable_info;
269 static enum reachable_code reachable_next_level (struct eh_region *, tree,
270 struct reachable_info *, bool);
272 static int action_record_eq (const void *, const void *);
273 static hashval_t action_record_hash (const void *);
274 static int add_action_record (htab_t, int, int);
275 static int collect_one_action_chain (htab_t, struct eh_region *);
276 static int add_call_site (rtx, int);
278 static void push_uleb128 (varray_type *, unsigned int);
279 static void push_sleb128 (varray_type *, int);
280 #ifndef HAVE_AS_LEB128
281 static int dw2_size_of_call_site_table (void);
282 static int sjlj_size_of_call_site_table (void);
284 static void dw2_output_call_site_table (void);
285 static void sjlj_output_call_site_table (void);
288 /* Routine to see if exception handling is turned on.
289 DO_WARN is nonzero if we want to inform the user that exception
290 handling is turned off.
292 This is used to ensure that -fexceptions has been specified if the
293 compiler tries to use any exception-specific functions. */
296 doing_eh (int do_warn)
298 if (! flag_exceptions)
300 static int warned = 0;
301 if (! warned && do_warn)
303 error ("exception handling disabled, use -fexceptions to enable");
315 if (! flag_exceptions)
318 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
320 /* Create the SjLj_Function_Context structure. This should match
321 the definition in unwind-sjlj.c. */
322 if (USING_SJLJ_EXCEPTIONS)
324 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
326 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
328 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
329 build_pointer_type (sjlj_fc_type_node));
330 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
332 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
334 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
336 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
337 tmp = build_array_type (lang_hooks.types.type_for_mode
338 (targetm.unwind_word_mode (), 1),
340 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
341 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
343 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
345 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
347 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
349 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
351 #ifdef DONT_USE_BUILTIN_SETJMP
353 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
355 /* Should be large enough for most systems, if it is not,
356 JMP_BUF_SIZE should be defined with the proper value. It will
357 also tend to be larger than necessary for most systems, a more
358 optimal port will define JMP_BUF_SIZE. */
359 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
362 /* builtin_setjmp takes a pointer to 5 words. */
363 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
365 tmp = build_index_type (tmp);
366 tmp = build_array_type (ptr_type_node, tmp);
367 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
368 #ifdef DONT_USE_BUILTIN_SETJMP
369 /* We don't know what the alignment requirements of the
370 runtime's jmp_buf has. Overestimate. */
371 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
372 DECL_USER_ALIGN (f_jbuf) = 1;
374 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
376 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
377 TREE_CHAIN (f_prev) = f_cs;
378 TREE_CHAIN (f_cs) = f_data;
379 TREE_CHAIN (f_data) = f_per;
380 TREE_CHAIN (f_per) = f_lsda;
381 TREE_CHAIN (f_lsda) = f_jbuf;
383 layout_type (sjlj_fc_type_node);
385 /* Cache the interesting field offsets so that we have
386 easy access from rtl. */
387 sjlj_fc_call_site_ofs
388 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
389 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
391 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
392 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
393 sjlj_fc_personality_ofs
394 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
395 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
397 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
398 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
400 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
401 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
406 init_eh_for_function (void)
408 cfun->eh = GGC_CNEW (struct eh_status);
411 /* Routines to generate the exception tree somewhat directly.
412 These are used from tree-eh.c when processing exception related
413 nodes during tree optimization. */
415 static struct eh_region *
416 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
418 struct eh_region *new_eh;
420 #ifdef ENABLE_CHECKING
421 gcc_assert (doing_eh (0));
424 /* Insert a new blank region as a leaf in the tree. */
425 new_eh = GGC_CNEW (struct eh_region);
427 new_eh->outer = outer;
430 new_eh->next_peer = outer->inner;
431 outer->inner = new_eh;
435 new_eh->next_peer = cfun->eh->region_tree;
436 cfun->eh->region_tree = new_eh;
439 new_eh->region_number = ++cfun->eh->last_region_number;
445 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
447 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
448 cleanup->u.cleanup.prev_try = prev_try;
453 gen_eh_region_try (struct eh_region *outer)
455 return gen_eh_region (ERT_TRY, outer);
459 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
461 struct eh_region *c, *l;
462 tree type_list, type_node;
464 /* Ensure to always end up with a type list to normalize further
465 processing, then register each type against the runtime types map. */
466 type_list = type_or_list;
469 if (TREE_CODE (type_or_list) != TREE_LIST)
470 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
472 type_node = type_list;
473 for (; type_node; type_node = TREE_CHAIN (type_node))
474 add_type_for_runtime (TREE_VALUE (type_node));
477 c = gen_eh_region (ERT_CATCH, t->outer);
478 c->u.eh_catch.type_list = type_list;
479 l = t->u.eh_try.last_catch;
480 c->u.eh_catch.prev_catch = l;
482 l->u.eh_catch.next_catch = c;
484 t->u.eh_try.eh_catch = c;
485 t->u.eh_try.last_catch = c;
491 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
493 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
494 region->u.allowed.type_list = allowed;
496 for (; allowed ; allowed = TREE_CHAIN (allowed))
497 add_type_for_runtime (TREE_VALUE (allowed));
503 gen_eh_region_must_not_throw (struct eh_region *outer)
505 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
509 get_eh_region_number (struct eh_region *region)
511 return region->region_number;
515 get_eh_region_may_contain_throw (struct eh_region *region)
517 return region->may_contain_throw;
521 get_eh_region_tree_label (struct eh_region *region)
523 return region->tree_label;
527 get_eh_region_no_tree_label (int region)
529 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
533 set_eh_region_tree_label (struct eh_region *region, tree lab)
535 region->tree_label = lab;
539 expand_resx_expr (tree exp)
541 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
542 struct eh_region *reg = VEC_index (eh_region,
543 cfun->eh->region_array, region_nr);
545 gcc_assert (!reg->resume);
546 do_pending_stack_adjust ();
547 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
551 /* Note that the current EH region (if any) may contain a throw, or a
552 call to a function which itself may contain a throw. */
555 note_eh_region_may_contain_throw (struct eh_region *region)
557 while (region && !region->may_contain_throw)
559 region->may_contain_throw = 1;
560 region = region->outer;
565 /* Return an rtl expression for a pointer to the exception object
569 get_exception_pointer (void)
571 if (! crtl->eh.exc_ptr)
572 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
573 return crtl->eh.exc_ptr;
576 /* Return an rtl expression for the exception dispatch filter
580 get_exception_filter (void)
582 if (! crtl->eh.filter)
583 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
584 return crtl->eh.filter;
587 /* This section is for the exception handling specific optimization pass. */
589 /* Random access the exception region tree. */
592 collect_eh_region_array (void)
596 i = cfun->eh->region_tree;
600 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
601 cfun->eh->last_region_number + 1);
602 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
606 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
608 /* If there are sub-regions, process them. */
611 /* If there are peers, process them. */
612 else if (i->next_peer)
614 /* Otherwise, step back up the tree to the next peer. */
621 } while (i->next_peer == NULL);
627 /* R is MUST_NOT_THROW region that is not reachable via local
628 RESX instructions. It still must be kept in the tree in case runtime
629 can unwind through it, or we will eliminate out terminate call
630 runtime would do otherwise. Return TRUE if R contains throwing statements
631 or some of the exceptions in inner regions can be unwound up to R.
633 CONTAINS_STMT is bitmap of all regions that contains some throwing
636 Function looks O(^3) at first sight. In fact the function is called at most
637 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
638 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
639 the outer loop examines every region at most once. The inner loop
640 is doing unwinding from the throwing statement same way as we do during
641 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
642 of CFG. In practice Eh trees are wide, not deep, so this is not
646 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region *r)
648 struct eh_region *i = r->inner;
652 if (TEST_BIT (contains_stmt, r->region_number))
655 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
656 if (TEST_BIT (contains_stmt, n))
662 /* It is pointless to look into MUST_NOT_THROW
663 or dive into subregions. They never unwind up. */
664 if (i->type != ERT_MUST_NOT_THROW)
666 bool found = TEST_BIT (contains_stmt, i->region_number);
668 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
669 if (TEST_BIT (contains_stmt, n))
674 /* We have nested region that contains throwing statement.
675 See if resuming might lead up to the resx or we get locally
676 caught sooner. If we get locally caught sooner, we either
677 know region R is not reachable or it would have direct edge
678 from the EH resx and thus consider region reachable at
682 struct eh_region *i1 = i;
683 tree type_thrown = NULL_TREE;
685 if (i1->type == ERT_THROW)
687 type_thrown = i1->u.eh_throw.type;
690 for (; i1 != r; i1 = i1->outer)
691 if (reachable_next_level (i1, type_thrown, NULL,
692 false) >= RNL_CAUGHT)
698 /* If there are sub-regions, process them. */
699 if (i->type != ERT_MUST_NOT_THROW && i->inner)
701 /* If there are peers, process them. */
702 else if (i->next_peer)
704 /* Otherwise, step back up the tree to the next peer. */
713 while (i->next_peer == NULL);
719 /* Bring region R to the root of tree. */
722 bring_to_root (struct eh_region *r)
724 struct eh_region **pp;
725 struct eh_region *outer = r->outer;
728 for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
732 r->next_peer = cfun->eh->region_tree;
733 cfun->eh->region_tree = r;
736 /* Remove all regions whose labels are not reachable.
737 REACHABLE is bitmap of all regions that are used by the function
738 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
741 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
745 VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
746 struct eh_region *local_must_not_throw = NULL;
747 struct eh_region *first_must_not_throw = NULL;
749 for (i = cfun->eh->last_region_number; i > 0; --i)
751 r = VEC_index (eh_region, cfun->eh->region_array, i);
752 if (!r || r->region_number != i)
754 if (!TEST_BIT (reachable, i) && !r->resume)
758 r->tree_label = NULL;
762 /* Don't remove ERT_THROW regions if their outer region
764 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
767 case ERT_MUST_NOT_THROW:
768 /* MUST_NOT_THROW regions are implementable solely in the
769 runtime, but we need them when inlining function.
771 Keep them if outer region is not MUST_NOT_THROW a well
772 and if they contain some statement that might unwind through
774 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
776 || can_be_reached_by_runtime (contains_stmt, r)))
781 /* TRY regions are reachable if any of its CATCH regions
784 for (c = r->u.eh_try.eh_catch; c;
785 c = c->u.eh_catch.next_catch)
786 if (TEST_BIT (reachable, c->region_number))
801 fprintf (dump_file, "Removing unreachable eh region %i\n",
803 remove_eh_handler (r);
805 else if (r->type == ERT_MUST_NOT_THROW)
807 if (!first_must_not_throw)
808 first_must_not_throw = r;
809 VEC_safe_push (eh_region, heap, must_not_throws, r);
813 if (r->type == ERT_MUST_NOT_THROW)
815 if (!local_must_not_throw)
816 local_must_not_throw = r;
818 VEC_safe_push (eh_region, heap, must_not_throws, r);
822 /* MUST_NOT_THROW regions without local handler are all the same; they
823 trigger terminate call in runtime.
824 MUST_NOT_THROW handled locally can differ in debug info associated
825 to std::terminate () call or if one is coming from Java and other
826 from C++ whether they call terminate or abort.
828 We merge all MUST_NOT_THROW regions handled by the run-time into one.
829 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
830 (since unwinding never continues to the outer region anyway).
831 If MUST_NOT_THROW with local handler is present in the tree, we use
832 that region to merge into, since it will remain in tree anyway;
833 otherwise we use first MUST_NOT_THROW.
835 Merging of locally handled regions needs changes to the CFG. Crossjumping
836 should take care of this, by looking at the actual code and
837 ensuring that the cleanup actions are really the same. */
839 if (local_must_not_throw)
840 first_must_not_throw = local_must_not_throw;
842 for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
844 if (!r->label && !r->tree_label && r != first_must_not_throw)
847 fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
849 first_must_not_throw->region_number);
850 remove_eh_handler_and_replace (r, first_must_not_throw);
851 first_must_not_throw->may_contain_throw |= r->may_contain_throw;
856 #ifdef ENABLE_CHECKING
857 verify_eh_tree (cfun);
859 VEC_free (eh_region, heap, must_not_throws);
862 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
863 is identical to label. */
866 label_to_region_map (void)
868 VEC(int,heap) * label_to_region = NULL;
871 VEC_safe_grow_cleared (int, heap, label_to_region,
872 cfun->cfg->last_label_uid + 1);
873 for (i = cfun->eh->last_region_number; i > 0; --i)
875 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
876 if (r && r->region_number == i
877 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
879 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
883 return label_to_region;
886 /* Return number of EH regions. */
888 num_eh_regions (void)
890 return cfun->eh->last_region_number + 1;
893 /* Set up EH labels for RTL. */
896 convert_from_eh_region_ranges (void)
898 int i, n = cfun->eh->last_region_number;
900 /* Most of the work is already done at the tree level. All we need to
901 do is collect the rtl labels that correspond to the tree labels that
902 collect the rtl labels that correspond to the tree labels
903 we allocated earlier. */
904 for (i = 1; i <= n; ++i)
906 struct eh_region *region;
908 region = VEC_index (eh_region, cfun->eh->region_array, i);
909 if (region && region->tree_label)
910 region->label = DECL_RTL_IF_SET (region->tree_label);
915 find_exception_handler_labels (void)
919 if (cfun->eh->region_tree == NULL)
922 for (i = cfun->eh->last_region_number; i > 0; --i)
924 struct eh_region *region;
927 region = VEC_index (eh_region, cfun->eh->region_array, i);
928 if (! region || region->region_number != i)
930 if (crtl->eh.built_landing_pads)
931 lab = region->landing_pad;
937 /* Returns true if the current function has exception handling regions. */
940 current_function_has_exception_handlers (void)
944 for (i = cfun->eh->last_region_number; i > 0; --i)
946 struct eh_region *region;
948 region = VEC_index (eh_region, cfun->eh->region_array, i);
950 && region->region_number == i
951 && region->type != ERT_THROW)
958 /* A subroutine of duplicate_eh_regions. Search the region tree under O
959 for the minimum and maximum region numbers. Update *MIN and *MAX. */
962 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
968 i = bitmap_first_set_bit (o->aka);
971 i = bitmap_last_set_bit (o->aka);
975 if (o->region_number < *min)
976 *min = o->region_number;
977 if (o->region_number > *max)
978 *max = o->region_number;
983 duplicate_eh_regions_0 (o, min, max);
987 duplicate_eh_regions_0 (o, min, max);
992 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
993 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
994 about the other internal pointers just yet, just the tree-like pointers. */
997 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
1001 ret = n = GGC_NEW (struct eh_region);
1005 n->next_peer = NULL;
1010 n->aka = BITMAP_GGC_ALLOC ();
1012 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
1014 bitmap_set_bit (n->aka, i + eh_offset);
1015 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
1019 n->region_number += eh_offset;
1020 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1025 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
1026 while (old->next_peer)
1028 old = old->next_peer;
1029 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
1036 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
1037 function and root the tree below OUTER_REGION. Remap labels using MAP
1038 callback. The special case of COPY_REGION of 0 means all regions. */
1041 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
1042 void *data, int copy_region, int outer_region)
1044 eh_region cur, prev_try, outer, *splice;
1045 int i, min_region, max_region, eh_offset, cfun_last_region_number;
1050 #ifdef ENABLE_CHECKING
1051 verify_eh_tree (ifun);
1054 /* Find the range of region numbers to be copied. The interface we
1055 provide here mandates a single offset to find new number from old,
1056 which means we must look at the numbers present, instead of the
1057 count or something else. */
1058 if (copy_region > 0)
1060 min_region = INT_MAX;
1063 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1064 duplicate_eh_regions_0 (cur, &min_region, &max_region);
1067 min_region = 1, max_region = ifun->eh->last_region_number;
1068 num_regions = max_region - min_region + 1;
1069 cfun_last_region_number = cfun->eh->last_region_number;
1070 eh_offset = cfun_last_region_number + 1 - min_region;
1072 /* If we've not yet created a region array, do so now. */
1073 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
1074 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
1075 cfun->eh->last_region_number + 1);
1077 /* Locate the spot at which to insert the new tree. */
1078 if (outer_region > 0)
1080 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1082 splice = &outer->inner;
1084 splice = &cfun->eh->region_tree;
1089 splice = &cfun->eh->region_tree;
1092 splice = &(*splice)->next_peer;
1094 if (!ifun->eh->region_tree)
1097 for (i = cfun_last_region_number + 1;
1098 i <= cfun->eh->last_region_number; i++)
1100 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1101 if (outer->aka == NULL)
1102 outer->aka = BITMAP_GGC_ALLOC ();
1103 bitmap_set_bit (outer->aka, i);
1108 /* Copy all the regions in the subtree. */
1109 if (copy_region > 0)
1111 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1112 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1118 cur = ifun->eh->region_tree;
1119 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1120 while (cur->next_peer)
1122 cur = cur->next_peer;
1123 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1127 /* Remap all the labels in the new regions. */
1128 for (i = cfun_last_region_number + 1;
1129 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1130 if (cur && cur->tree_label)
1131 cur->tree_label = map (cur->tree_label, data);
1133 /* Search for the containing ERT_TRY region to fix up
1134 the prev_try short-cuts for ERT_CLEANUP regions. */
1136 if (outer_region > 0)
1138 VEC_index (eh_region, cfun->eh->region_array, outer_region);
1139 prev_try && prev_try->type != ERT_TRY; prev_try = prev_try->outer)
1140 if (prev_try->type == ERT_MUST_NOT_THROW
1141 || (prev_try->type == ERT_ALLOWED_EXCEPTIONS
1142 && !prev_try->u.allowed.type_list))
1148 /* Remap all of the internal catch and cleanup linkages. Since we
1149 duplicate entire subtrees, all of the referenced regions will have
1150 been copied too. And since we renumbered them as a block, a simple
1151 bit of arithmetic finds us the index for the replacement region. */
1152 for (i = cfun_last_region_number + 1;
1153 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1155 /* All removed EH that is toplevel in input function is now
1156 in outer EH of output function. */
1159 gcc_assert (VEC_index
1160 (eh_region, ifun->eh->region_array,
1161 i - eh_offset) == NULL);
1164 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1165 if (outer->aka == NULL)
1166 outer->aka = BITMAP_GGC_ALLOC ();
1167 bitmap_set_bit (outer->aka, i);
1171 if (i != cur->region_number)
1174 #define REMAP(REG) \
1175 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1176 (REG)->region_number + eh_offset)
1181 if (cur->u.eh_try.eh_catch)
1182 REMAP (cur->u.eh_try.eh_catch);
1183 if (cur->u.eh_try.last_catch)
1184 REMAP (cur->u.eh_try.last_catch);
1188 if (cur->u.eh_catch.next_catch)
1189 REMAP (cur->u.eh_catch.next_catch);
1190 if (cur->u.eh_catch.prev_catch)
1191 REMAP (cur->u.eh_catch.prev_catch);
1195 if (cur->u.cleanup.prev_try)
1196 REMAP (cur->u.cleanup.prev_try);
1198 cur->u.cleanup.prev_try = prev_try;
1207 #ifdef ENABLE_CHECKING
1208 verify_eh_tree (cfun);
1214 /* Return true if REGION_A is outer to REGION_B in IFUN. */
1217 eh_region_outer_p (struct function *ifun, int region_a, int region_b)
1219 struct eh_region *rp_a, *rp_b;
1221 gcc_assert (ifun->eh->last_region_number > 0);
1222 gcc_assert (ifun->eh->region_tree);
1224 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1225 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1226 gcc_assert (rp_a != NULL);
1227 gcc_assert (rp_b != NULL);
1240 /* Return region number of region that is outer to both if REGION_A and
1241 REGION_B in IFUN. */
1244 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1246 struct eh_region *rp_a, *rp_b;
1249 gcc_assert (ifun->eh->last_region_number > 0);
1250 gcc_assert (ifun->eh->region_tree);
1252 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1253 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1254 gcc_assert (rp_a != NULL);
1255 gcc_assert (rp_b != NULL);
1257 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1258 sbitmap_zero (b_outer);
1262 SET_BIT (b_outer, rp_b->region_number);
1269 if (TEST_BIT (b_outer, rp_a->region_number))
1271 sbitmap_free (b_outer);
1272 return rp_a->region_number;
1278 sbitmap_free (b_outer);
1283 t2r_eq (const void *pentry, const void *pdata)
1285 const_tree const entry = (const_tree) pentry;
1286 const_tree const data = (const_tree) pdata;
1288 return TREE_PURPOSE (entry) == data;
1292 t2r_hash (const void *pentry)
1294 const_tree const entry = (const_tree) pentry;
1295 return TREE_HASH (TREE_PURPOSE (entry));
1299 add_type_for_runtime (tree type)
1303 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1304 TREE_HASH (type), INSERT);
1307 tree runtime = (*lang_eh_runtime_type) (type);
1308 *slot = tree_cons (type, runtime, NULL_TREE);
1313 lookup_type_for_runtime (tree type)
1317 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1318 TREE_HASH (type), NO_INSERT);
1320 /* We should have always inserted the data earlier. */
1321 return TREE_VALUE (*slot);
1325 /* Represent an entry in @TTypes for either catch actions
1326 or exception filter actions. */
1327 struct ttypes_filter GTY(())
1333 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1334 (a tree) for a @TTypes type node we are thinking about adding. */
1337 ttypes_filter_eq (const void *pentry, const void *pdata)
1339 const struct ttypes_filter *const entry
1340 = (const struct ttypes_filter *) pentry;
1341 const_tree const data = (const_tree) pdata;
1343 return entry->t == data;
1347 ttypes_filter_hash (const void *pentry)
1349 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1350 return TREE_HASH (entry->t);
1353 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1354 exception specification list we are thinking about adding. */
1355 /* ??? Currently we use the type lists in the order given. Someone
1356 should put these in some canonical order. */
1359 ehspec_filter_eq (const void *pentry, const void *pdata)
1361 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1362 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1364 return type_list_equal (entry->t, data->t);
1367 /* Hash function for exception specification lists. */
1370 ehspec_filter_hash (const void *pentry)
1372 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1376 for (list = entry->t; list ; list = TREE_CHAIN (list))
1377 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1381 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1382 to speed up the search. Return the filter value to be used. */
1385 add_ttypes_entry (htab_t ttypes_hash, tree type)
1387 struct ttypes_filter **slot, *n;
1389 slot = (struct ttypes_filter **)
1390 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1392 if ((n = *slot) == NULL)
1394 /* Filter value is a 1 based table index. */
1396 n = XNEW (struct ttypes_filter);
1398 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1401 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1407 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1408 to speed up the search. Return the filter value to be used. */
1411 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1413 struct ttypes_filter **slot, *n;
1414 struct ttypes_filter dummy;
1417 slot = (struct ttypes_filter **)
1418 htab_find_slot (ehspec_hash, &dummy, INSERT);
1420 if ((n = *slot) == NULL)
1422 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1424 n = XNEW (struct ttypes_filter);
1426 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1429 /* Generate a 0 terminated list of filter values. */
1430 for (; list ; list = TREE_CHAIN (list))
1432 if (targetm.arm_eabi_unwinder)
1433 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1436 /* Look up each type in the list and encode its filter
1437 value as a uleb128. */
1438 push_uleb128 (&crtl->eh.ehspec_data,
1439 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1442 if (targetm.arm_eabi_unwinder)
1443 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1445 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1451 /* Generate the action filter values to be used for CATCH and
1452 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1453 we use lots of landing pads, and so every type or list can share
1454 the same filter value, which saves table space. */
1457 assign_filter_values (void)
1460 htab_t ttypes, ehspec;
1462 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1463 if (targetm.arm_eabi_unwinder)
1464 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1466 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1468 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1469 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1471 for (i = cfun->eh->last_region_number; i > 0; --i)
1473 struct eh_region *r;
1475 r = VEC_index (eh_region, cfun->eh->region_array, i);
1477 /* Mind we don't process a region more than once. */
1478 if (!r || r->region_number != i)
1484 /* Whatever type_list is (NULL or true list), we build a list
1485 of filters for the region. */
1486 r->u.eh_catch.filter_list = NULL_TREE;
1488 if (r->u.eh_catch.type_list != NULL)
1490 /* Get a filter value for each of the types caught and store
1491 them in the region's dedicated list. */
1492 tree tp_node = r->u.eh_catch.type_list;
1494 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1496 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1497 tree flt_node = build_int_cst (NULL_TREE, flt);
1499 r->u.eh_catch.filter_list
1500 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1505 /* Get a filter value for the NULL list also since it will need
1506 an action record anyway. */
1507 int flt = add_ttypes_entry (ttypes, NULL);
1508 tree flt_node = build_int_cst (NULL_TREE, flt);
1510 r->u.eh_catch.filter_list
1511 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1516 case ERT_ALLOWED_EXCEPTIONS:
1518 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1526 htab_delete (ttypes);
1527 htab_delete (ehspec);
1530 /* Emit SEQ into basic block just before INSN (that is assumed to be
1531 first instruction of some existing BB and return the newly
1534 emit_to_new_bb_before (rtx seq, rtx insn)
1541 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1542 call), we don't want it to go into newly created landing pad or other EH
1544 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1545 if (e->flags & EDGE_FALLTHRU)
1546 force_nonfallthru (e);
1549 last = emit_insn_before (seq, insn);
1550 if (BARRIER_P (last))
1551 last = PREV_INSN (last);
1552 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1553 update_bb_for_insn (bb);
1554 bb->flags |= BB_SUPERBLOCK;
1558 /* Generate the code to actually handle exceptions, which will follow the
1562 build_post_landing_pads (void)
1566 for (i = cfun->eh->last_region_number; i > 0; --i)
1568 struct eh_region *region;
1571 region = VEC_index (eh_region, cfun->eh->region_array, i);
1572 /* Mind we don't process a region more than once. */
1573 if (!region || region->region_number != i)
1576 switch (region->type)
1579 /* ??? Collect the set of all non-overlapping catch handlers
1580 all the way up the chain until blocked by a cleanup. */
1581 /* ??? Outer try regions can share landing pads with inner
1582 try regions if the types are completely non-overlapping,
1583 and there are no intervening cleanups. */
1585 region->post_landing_pad = gen_label_rtx ();
1589 emit_label (region->post_landing_pad);
1591 /* ??? It is mighty inconvenient to call back into the
1592 switch statement generation code in expand_end_case.
1593 Rapid prototyping sez a sequence of ifs. */
1595 struct eh_region *c;
1596 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1598 if (c->u.eh_catch.type_list == NULL)
1599 emit_jump (c->label);
1602 /* Need for one cmp/jump per type caught. Each type
1603 list entry has a matching entry in the filter list
1604 (see assign_filter_values). */
1605 tree tp_node = c->u.eh_catch.type_list;
1606 tree flt_node = c->u.eh_catch.filter_list;
1610 emit_cmp_and_jump_insns
1612 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1614 targetm.eh_return_filter_mode (), 0, c->label);
1616 tp_node = TREE_CHAIN (tp_node);
1617 flt_node = TREE_CHAIN (flt_node);
1623 /* We delay the generation of the _Unwind_Resume until we generate
1624 landing pads. We emit a marker here so as to get good control
1625 flow data in the meantime. */
1627 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1633 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
1637 case ERT_ALLOWED_EXCEPTIONS:
1638 region->post_landing_pad = gen_label_rtx ();
1642 emit_label (region->post_landing_pad);
1644 emit_cmp_and_jump_insns (crtl->eh.filter,
1645 GEN_INT (region->u.allowed.filter),
1647 targetm.eh_return_filter_mode (), 0, region->label);
1649 /* We delay the generation of the _Unwind_Resume until we generate
1650 landing pads. We emit a marker here so as to get good control
1651 flow data in the meantime. */
1653 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1659 emit_to_new_bb_before (seq, region->label);
1663 case ERT_MUST_NOT_THROW:
1664 region->post_landing_pad = region->label;
1669 /* Nothing to do. */
1678 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1679 _Unwind_Resume otherwise. */
1682 connect_post_landing_pads (void)
1686 for (i = cfun->eh->last_region_number; i > 0; --i)
1688 struct eh_region *region;
1689 struct eh_region *outer;
1693 region = VEC_index (eh_region, cfun->eh->region_array, i);
1694 /* Mind we don't process a region more than once. */
1695 if (!region || region->region_number != i)
1698 /* If there is no RESX, or it has been deleted by flow, there's
1699 nothing to fix up. */
1700 if (! region->resume || INSN_DELETED_P (region->resume))
1703 /* Search for another landing pad in this function. */
1704 for (outer = region->outer; outer ; outer = outer->outer)
1705 if (outer->post_landing_pad)
1713 basic_block src, dest;
1715 emit_jump (outer->post_landing_pad);
1716 src = BLOCK_FOR_INSN (region->resume);
1717 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1718 while (EDGE_COUNT (src->succs) > 0)
1719 remove_edge (EDGE_SUCC (src, 0));
1720 e = make_edge (src, dest, 0);
1721 e->probability = REG_BR_PROB_BASE;
1722 e->count = src->count;
1726 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1727 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
1729 /* What we just emitted was a throwing libcall, so it got a
1730 barrier automatically added after it. If the last insn in
1731 the libcall sequence isn't the barrier, it's because the
1732 target emits multiple insns for a call, and there are insns
1733 after the actual call insn (which are redundant and would be
1734 optimized away). The barrier is inserted exactly after the
1735 call insn, so let's go get that and delete the insns after
1736 it, because below we need the barrier to be the last insn in
1738 delete_insns_since (NEXT_INSN (last_call_insn ()));
1743 barrier = emit_insn_before (seq, region->resume);
1744 /* Avoid duplicate barrier. */
1745 gcc_assert (BARRIER_P (barrier));
1746 delete_insn (barrier);
1747 delete_insn (region->resume);
1749 /* ??? From tree-ssa we can wind up with catch regions whose
1750 label is not instantiated, but whose resx is present. Now
1751 that we've dealt with the resx, kill the region. */
1752 if (region->label == NULL && region->type == ERT_CLEANUP)
1753 remove_eh_handler (region);
1759 dw2_build_landing_pads (void)
1763 for (i = cfun->eh->last_region_number; i > 0; --i)
1765 struct eh_region *region;
1770 region = VEC_index (eh_region, cfun->eh->region_array, i);
1771 /* Mind we don't process a region more than once. */
1772 if (!region || region->region_number != i)
1775 if (region->type != ERT_CLEANUP
1776 && region->type != ERT_TRY
1777 && region->type != ERT_ALLOWED_EXCEPTIONS)
1782 region->landing_pad = gen_label_rtx ();
1783 emit_label (region->landing_pad);
1785 #ifdef HAVE_exception_receiver
1786 if (HAVE_exception_receiver)
1787 emit_insn (gen_exception_receiver ());
1790 #ifdef HAVE_nonlocal_goto_receiver
1791 if (HAVE_nonlocal_goto_receiver)
1792 emit_insn (gen_nonlocal_goto_receiver ());
1797 emit_move_insn (crtl->eh.exc_ptr,
1798 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1799 emit_move_insn (crtl->eh.filter,
1800 gen_rtx_REG (targetm.eh_return_filter_mode (),
1801 EH_RETURN_DATA_REGNO (1)));
1806 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1807 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1808 e->count = bb->count;
1809 e->probability = REG_BR_PROB_BASE;
1816 int directly_reachable;
1819 int call_site_index;
1823 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1826 bool found_one = false;
1828 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1830 struct eh_region *region;
1831 enum reachable_code rc;
1835 if (! INSN_P (insn))
1838 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1839 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1842 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1846 type_thrown = NULL_TREE;
1847 if (region->type == ERT_THROW)
1849 type_thrown = region->u.eh_throw.type;
1850 region = region->outer;
1853 /* Find the first containing region that might handle the exception.
1854 That's the landing pad to which we will transfer control. */
1855 rc = RNL_NOT_CAUGHT;
1856 for (; region; region = region->outer)
1858 rc = reachable_next_level (region, type_thrown, NULL, false);
1859 if (rc != RNL_NOT_CAUGHT)
1862 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1864 lp_info[region->region_number].directly_reachable = 1;
1873 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1878 /* First task: build the action table. */
1880 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
1881 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1883 for (i = cfun->eh->last_region_number; i > 0; --i)
1884 if (lp_info[i].directly_reachable)
1886 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1888 r->landing_pad = dispatch_label;
1889 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1890 if (lp_info[i].action_index != -1)
1891 crtl->uses_eh_lsda = 1;
1894 htab_delete (ar_hash);
1896 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1897 landing pad label for the region. For sjlj though, there is one
1898 common landing pad from which we dispatch to the post-landing pads.
1900 A region receives a dispatch index if it is directly reachable
1901 and requires in-function processing. Regions that share post-landing
1902 pads may share dispatch indices. */
1903 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1904 (see build_post_landing_pads) so we don't bother checking for it. */
1907 for (i = cfun->eh->last_region_number; i > 0; --i)
1908 if (lp_info[i].directly_reachable)
1909 lp_info[i].dispatch_index = index++;
1911 /* Finally: assign call-site values. If dwarf2 terms, this would be
1912 the region number assigned by convert_to_eh_region_ranges, but
1913 handles no-action and must-not-throw differently. */
1916 for (i = cfun->eh->last_region_number; i > 0; --i)
1917 if (lp_info[i].directly_reachable)
1919 int action = lp_info[i].action_index;
1921 /* Map must-not-throw to otherwise unused call-site index 0. */
1924 /* Map no-action to otherwise unused call-site index -1. */
1925 else if (action == -1)
1927 /* Otherwise, look it up in the table. */
1929 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1931 lp_info[i].call_site_index = index;
1936 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1938 int last_call_site = -2;
1941 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1943 struct eh_region *region;
1945 rtx note, before, p;
1947 /* Reset value tracking at extended basic block boundaries. */
1949 last_call_site = -2;
1951 if (! INSN_P (insn))
1954 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1956 /* Calls that are known to not throw need not be marked. */
1957 if (note && INTVAL (XEXP (note, 0)) <= 0)
1961 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1967 /* Calls (and trapping insns) without notes are outside any
1968 exception handling region in this function. Mark them as
1971 || (flag_non_call_exceptions
1972 && may_trap_p (PATTERN (insn))))
1973 this_call_site = -1;
1978 this_call_site = lp_info[region->region_number].call_site_index;
1980 if (this_call_site == last_call_site)
1983 /* Don't separate a call from it's argument loads. */
1986 before = find_first_parameter_load (insn, NULL_RTX);
1989 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1990 sjlj_fc_call_site_ofs);
1991 emit_move_insn (mem, GEN_INT (this_call_site));
1995 emit_insn_before (p, before);
1996 last_call_site = this_call_site;
2000 /* Construct the SjLj_Function_Context. */
2003 sjlj_emit_function_enter (rtx dispatch_label)
2005 rtx fn_begin, fc, mem, seq;
2006 bool fn_begin_outside_block;
2008 fc = crtl->eh.sjlj_fc;
2012 /* We're storing this libcall's address into memory instead of
2013 calling it directly. Thus, we must call assemble_external_libcall
2014 here, as we can not depend on emit_library_call to do it for us. */
2015 assemble_external_libcall (eh_personality_libfunc);
2016 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2017 emit_move_insn (mem, eh_personality_libfunc);
2019 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2020 if (crtl->uses_eh_lsda)
2025 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2026 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2027 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2028 emit_move_insn (mem, sym);
2031 emit_move_insn (mem, const0_rtx);
2033 #ifdef DONT_USE_BUILTIN_SETJMP
2036 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2037 TYPE_MODE (integer_type_node), 1,
2038 plus_constant (XEXP (fc, 0),
2039 sjlj_fc_jbuf_ofs), Pmode);
2041 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2042 TYPE_MODE (integer_type_node), 0, dispatch_label);
2043 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
2046 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2050 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2051 1, XEXP (fc, 0), Pmode);
2056 /* ??? Instead of doing this at the beginning of the function,
2057 do this in a block that is at loop level 0 and dominates all
2058 can_throw_internal instructions. */
2060 fn_begin_outside_block = true;
2061 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2062 if (NOTE_P (fn_begin))
2064 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2066 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
2067 fn_begin_outside_block = false;
2070 if (fn_begin_outside_block)
2071 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
2073 emit_insn_after (seq, fn_begin);
2076 /* Call back from expand_function_end to know where we should put
2077 the call to unwind_sjlj_unregister_libfunc if needed. */
2080 sjlj_emit_function_exit_after (rtx after)
2082 crtl->eh.sjlj_exit_after = after;
2086 sjlj_emit_function_exit (void)
2094 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2095 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
2100 /* ??? Really this can be done in any block at loop level 0 that
2101 post-dominates all can_throw_internal instructions. This is
2102 the last possible moment. */
2104 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
2105 if (e->flags & EDGE_FALLTHRU)
2111 /* Figure out whether the place we are supposed to insert libcall
2112 is inside the last basic block or after it. In the other case
2113 we need to emit to edge. */
2114 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
2115 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
2117 if (insn == crtl->eh.sjlj_exit_after)
2120 insn = NEXT_INSN (insn);
2121 emit_insn_after (seq, insn);
2124 if (insn == BB_END (e->src))
2127 insert_insn_on_edge (seq, e);
2132 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2134 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2135 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2136 int i, first_reachable;
2137 rtx mem, dispatch, seq, fc;
2142 fc = crtl->eh.sjlj_fc;
2146 emit_label (dispatch_label);
2148 #ifndef DONT_USE_BUILTIN_SETJMP
2149 expand_builtin_setjmp_receiver (dispatch_label);
2152 /* Load up dispatch index, exc_ptr and filter values from the
2153 function context. */
2154 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2155 sjlj_fc_call_site_ofs);
2156 dispatch = copy_to_reg (mem);
2158 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2159 if (unwind_word_mode != ptr_mode)
2161 #ifdef POINTERS_EXTEND_UNSIGNED
2162 mem = convert_memory_address (ptr_mode, mem);
2164 mem = convert_to_mode (ptr_mode, mem, 0);
2167 emit_move_insn (crtl->eh.exc_ptr, mem);
2169 mem = adjust_address (fc, unwind_word_mode,
2170 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2171 if (unwind_word_mode != filter_mode)
2172 mem = convert_to_mode (filter_mode, mem, 0);
2173 emit_move_insn (crtl->eh.filter, mem);
2175 /* Jump to one of the directly reachable regions. */
2176 /* ??? This really ought to be using a switch statement. */
2178 first_reachable = 0;
2179 for (i = cfun->eh->last_region_number; i > 0; --i)
2181 if (! lp_info[i].directly_reachable)
2184 if (! first_reachable)
2186 first_reachable = i;
2190 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2191 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2192 ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
2193 ->post_landing_pad);
2199 before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2200 ->post_landing_pad);
2202 bb = emit_to_new_bb_before (seq, before);
2203 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2204 e->count = bb->count;
2205 e->probability = REG_BR_PROB_BASE;
2209 sjlj_build_landing_pads (void)
2211 struct sjlj_lp_info *lp_info;
2213 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2215 if (sjlj_find_directly_reachable_regions (lp_info))
2217 rtx dispatch_label = gen_label_rtx ();
2218 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2219 TYPE_MODE (sjlj_fc_type_node),
2220 TYPE_ALIGN (sjlj_fc_type_node));
2222 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2223 int_size_in_bytes (sjlj_fc_type_node),
2226 sjlj_assign_call_site_values (dispatch_label, lp_info);
2227 sjlj_mark_call_sites (lp_info);
2229 sjlj_emit_function_enter (dispatch_label);
2230 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2231 sjlj_emit_function_exit ();
2238 finish_eh_generation (void)
2242 /* Nothing to do if no regions created. */
2243 if (cfun->eh->region_tree == NULL)
2246 /* The object here is to provide find_basic_blocks with detailed
2247 information (via reachable_handlers) on how exception control
2248 flows within the function. In this first pass, we can include
2249 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2250 regions, and hope that it will be useful in deleting unreachable
2251 handlers. Subsequently, we will generate landing pads which will
2252 connect many of the handlers, and then type information will not
2253 be effective. Still, this is a win over previous implementations. */
2255 /* These registers are used by the landing pads. Make sure they
2256 have been generated. */
2257 get_exception_pointer ();
2258 get_exception_filter ();
2260 /* Construct the landing pads. */
2262 assign_filter_values ();
2263 build_post_landing_pads ();
2264 connect_post_landing_pads ();
2265 if (USING_SJLJ_EXCEPTIONS)
2266 sjlj_build_landing_pads ();
2268 dw2_build_landing_pads ();
2270 crtl->eh.built_landing_pads = 1;
2272 /* We've totally changed the CFG. Start over. */
2273 find_exception_handler_labels ();
2274 break_superblocks ();
2275 if (USING_SJLJ_EXCEPTIONS
2276 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2277 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2278 commit_edge_insertions ();
2284 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2286 if (e->flags & EDGE_EH)
2295 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2299 /* This section handles removing dead code for flow. */
2301 /* Splice REGION from the region tree and replace it by REPLACE etc. */
2304 remove_eh_handler_and_replace (struct eh_region *region,
2305 struct eh_region *replace)
2307 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2310 outer = region->outer;
2311 /* For the benefit of efficiently handling REG_EH_REGION notes,
2312 replace this region in the region array with its containing
2313 region. Note that previous region deletions may result in
2314 multiple copies of this region in the array, so we have a
2315 list of alternate numbers by which we are known. */
2317 VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
2324 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2326 VEC_replace (eh_region, cfun->eh->region_array, i, replace);
2333 replace->aka = BITMAP_GGC_ALLOC ();
2335 bitmap_ior_into (replace->aka, region->aka);
2336 bitmap_set_bit (replace->aka, region->region_number);
2339 if (crtl->eh.built_landing_pads)
2340 lab = region->landing_pad;
2342 lab = region->label;
2344 pp_start = &outer->inner;
2346 pp_start = &cfun->eh->region_tree;
2347 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2349 *pp = region->next_peer;
2352 pp_start = &replace->inner;
2354 pp_start = &cfun->eh->region_tree;
2355 inner = region->inner;
2358 for (p = inner; p->next_peer ; p = p->next_peer)
2362 p->next_peer = *pp_start;
2366 if (region->type == ERT_CATCH)
2368 struct eh_region *eh_try, *next, *prev;
2370 for (eh_try = region->next_peer;
2371 eh_try->type == ERT_CATCH;
2372 eh_try = eh_try->next_peer)
2374 gcc_assert (eh_try->type == ERT_TRY);
2376 next = region->u.eh_catch.next_catch;
2377 prev = region->u.eh_catch.prev_catch;
2380 next->u.eh_catch.prev_catch = prev;
2382 eh_try->u.eh_try.last_catch = prev;
2384 prev->u.eh_catch.next_catch = next;
2387 eh_try->u.eh_try.eh_catch = next;
2389 remove_eh_handler (eh_try);
2394 /* Splice REGION from the region tree and replace it by the outer region
2398 remove_eh_handler (struct eh_region *region)
2400 remove_eh_handler_and_replace (region, region->outer);
2403 /* Remove Eh region R that has turned out to have no code in its handler. */
2406 remove_eh_region (int r)
2408 struct eh_region *region;
2410 region = VEC_index (eh_region, cfun->eh->region_array, r);
2411 remove_eh_handler (region);
2414 /* Invokes CALLBACK for every exception handler label. Only used by old
2415 loop hackery; should not be used by new code. */
2418 for_each_eh_label (void (*callback) (rtx))
2421 for (i = 0; i < cfun->eh->last_region_number; i++)
2423 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
2424 if (r && r->region_number == i && r->label
2425 && GET_CODE (r->label) == CODE_LABEL)
2426 (*callback) (r->label);
2430 /* Invoke CALLBACK for every exception region in the current function. */
2433 for_each_eh_region (void (*callback) (struct eh_region *))
2435 int i, n = cfun->eh->last_region_number;
2436 for (i = 1; i <= n; ++i)
2438 struct eh_region *region;
2440 region = VEC_index (eh_region, cfun->eh->region_array, i);
2442 (*callback) (region);
2446 /* This section describes CFG exception edges for flow. */
2448 /* For communicating between calls to reachable_next_level. */
2449 struct reachable_info
2453 void (*callback) (struct eh_region *, void *);
2454 void *callback_data;
2457 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2458 base class of TYPE, is in HANDLED. */
2461 check_handled (tree handled, tree type)
2465 /* We can check for exact matches without front-end help. */
2466 if (! lang_eh_type_covers)
2468 for (t = handled; t ; t = TREE_CHAIN (t))
2469 if (TREE_VALUE (t) == type)
2474 for (t = handled; t ; t = TREE_CHAIN (t))
2475 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2482 /* A subroutine of reachable_next_level. If we are collecting a list
2483 of handlers, add one. After landing pad generation, reference
2484 it instead of the handlers themselves. Further, the handlers are
2485 all wired together, so by referencing one, we've got them all.
2486 Before landing pad generation we reference each handler individually.
2488 LP_REGION contains the landing pad; REGION is the handler. */
2491 add_reachable_handler (struct reachable_info *info,
2492 struct eh_region *lp_region, struct eh_region *region)
2497 if (crtl->eh.built_landing_pads)
2498 info->callback (lp_region, info->callback_data);
2500 info->callback (region, info->callback_data);
2503 /* Process one level of exception regions for reachability.
2504 If TYPE_THROWN is non-null, then it is the *exact* type being
2505 propagated. If INFO is non-null, then collect handler labels
2506 and caught/allowed type information between invocations. */
2508 static enum reachable_code
2509 reachable_next_level (struct eh_region *region, tree type_thrown,
2510 struct reachable_info *info,
2513 switch (region->type)
2516 /* Before landing-pad generation, we model control flow
2517 directly to the individual handlers. In this way we can
2518 see that catch handler types may shadow one another. */
2519 add_reachable_handler (info, region, region);
2520 return RNL_MAYBE_CAUGHT;
2524 struct eh_region *c;
2525 enum reachable_code ret = RNL_NOT_CAUGHT;
2527 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2529 /* A catch-all handler ends the search. */
2530 if (c->u.eh_catch.type_list == NULL)
2532 add_reachable_handler (info, region, c);
2538 /* If we have at least one type match, end the search. */
2539 tree tp_node = c->u.eh_catch.type_list;
2541 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2543 tree type = TREE_VALUE (tp_node);
2545 if (type == type_thrown
2546 || (lang_eh_type_covers
2547 && (*lang_eh_type_covers) (type, type_thrown)))
2549 add_reachable_handler (info, region, c);
2554 /* If we have definitive information of a match failure,
2555 the catch won't trigger. */
2556 if (lang_eh_type_covers)
2557 return RNL_NOT_CAUGHT;
2560 /* At this point, we either don't know what type is thrown or
2561 don't have front-end assistance to help deciding if it is
2562 covered by one of the types in the list for this region.
2564 We'd then like to add this region to the list of reachable
2565 handlers since it is indeed potentially reachable based on the
2566 information we have.
2568 Actually, this handler is for sure not reachable if all the
2569 types it matches have already been caught. That is, it is only
2570 potentially reachable if at least one of the types it catches
2571 has not been previously caught. */
2574 ret = RNL_MAYBE_CAUGHT;
2577 tree tp_node = c->u.eh_catch.type_list;
2578 bool maybe_reachable = false;
2580 /* Compute the potential reachability of this handler and
2581 update the list of types caught at the same time. */
2582 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2584 tree type = TREE_VALUE (tp_node);
2586 if (! check_handled (info->types_caught, type))
2589 = tree_cons (NULL, type, info->types_caught);
2591 maybe_reachable = true;
2595 if (maybe_reachable)
2597 add_reachable_handler (info, region, c);
2599 /* ??? If the catch type is a base class of every allowed
2600 type, then we know we can stop the search. */
2601 ret = RNL_MAYBE_CAUGHT;
2609 case ERT_ALLOWED_EXCEPTIONS:
2610 /* An empty list of types definitely ends the search. */
2611 if (region->u.allowed.type_list == NULL_TREE)
2613 add_reachable_handler (info, region, region);
2617 /* Collect a list of lists of allowed types for use in detecting
2618 when a catch may be transformed into a catch-all. */
2620 info->types_allowed = tree_cons (NULL_TREE,
2621 region->u.allowed.type_list,
2622 info->types_allowed);
2624 /* If we have definitive information about the type hierarchy,
2625 then we can tell if the thrown type will pass through the
2627 if (type_thrown && lang_eh_type_covers)
2629 if (check_handled (region->u.allowed.type_list, type_thrown))
2630 return RNL_NOT_CAUGHT;
2633 add_reachable_handler (info, region, region);
2638 add_reachable_handler (info, region, region);
2639 return RNL_MAYBE_CAUGHT;
2642 /* Catch regions are handled by their controlling try region. */
2643 return RNL_NOT_CAUGHT;
2645 case ERT_MUST_NOT_THROW:
2646 /* Here we end our search, since no exceptions may propagate.
2648 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
2649 only via locally handled RESX instructions.
2651 When we inline a function call, we can bring in new handlers. In order
2652 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
2653 assume that such handlers exists prior for any inlinable call prior
2654 inlining decisions are fixed. */
2658 add_reachable_handler (info, region, region);
2666 /* Shouldn't see these here. */
2674 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2677 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
2678 void (*callback) (struct eh_region *, void *),
2679 void *callback_data)
2681 struct reachable_info info;
2682 struct eh_region *region;
2685 memset (&info, 0, sizeof (info));
2686 info.callback = callback;
2687 info.callback_data = callback_data;
2689 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2693 type_thrown = NULL_TREE;
2696 /* A RESX leaves a region instead of entering it. Thus the
2697 region itself may have been deleted out from under us. */
2700 region = region->outer;
2702 else if (region->type == ERT_THROW)
2704 type_thrown = region->u.eh_throw.type;
2705 region = region->outer;
2710 if (reachable_next_level (region, type_thrown, &info,
2711 inlinable_call || is_resx) >= RNL_CAUGHT)
2713 /* If we have processed one cleanup, there is no point in
2714 processing any more of them. Each cleanup will have an edge
2715 to the next outer cleanup region, so the flow graph will be
2717 if (region->type == ERT_CLEANUP)
2718 region = region->u.cleanup.prev_try;
2720 region = region->outer;
2724 /* Retrieve a list of labels of exception handlers which can be
2725 reached by a given insn. */
2728 arh_to_landing_pad (struct eh_region *region, void *data)
2730 rtx *p_handlers = (rtx *) data;
2732 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2736 arh_to_label (struct eh_region *region, void *data)
2738 rtx *p_handlers = (rtx *) data;
2739 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2743 reachable_handlers (rtx insn)
2745 bool is_resx = false;
2746 rtx handlers = NULL;
2750 && GET_CODE (PATTERN (insn)) == RESX)
2752 region_number = XINT (PATTERN (insn), 0);
2757 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2758 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2760 region_number = INTVAL (XEXP (note, 0));
2763 foreach_reachable_handler (region_number, is_resx, false,
2764 (crtl->eh.built_landing_pads
2765 ? arh_to_landing_pad
2772 /* Determine if the given INSN can throw an exception that is caught
2773 within the function. */
2776 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
2778 struct eh_region *region;
2781 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2785 type_thrown = NULL_TREE;
2787 region = region->outer;
2788 else if (region->type == ERT_THROW)
2790 type_thrown = region->u.eh_throw.type;
2791 region = region->outer;
2794 /* If this exception is ignored by each and every containing region,
2795 then control passes straight out. The runtime may handle some
2796 regions, which also do not require processing internally. */
2797 for (; region; region = region->outer)
2799 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
2800 inlinable_call || is_resx);
2801 if (how == RNL_BLOCKED)
2803 if (how != RNL_NOT_CAUGHT)
2811 can_throw_internal (const_rtx insn)
2815 if (! INSN_P (insn))
2819 && GET_CODE (PATTERN (insn)) == RESX
2820 && XINT (PATTERN (insn), 0) > 0)
2821 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
2823 if (NONJUMP_INSN_P (insn)
2824 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2825 insn = XVECEXP (PATTERN (insn), 0, 0);
2827 /* Every insn that might throw has an EH_REGION note. */
2828 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2829 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2832 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
2835 /* Determine if the given INSN can throw an exception that is
2836 visible outside the function. */
2839 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
2841 struct eh_region *region;
2844 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2848 type_thrown = NULL_TREE;
2850 region = region->outer;
2851 else if (region->type == ERT_THROW)
2853 type_thrown = region->u.eh_throw.type;
2854 region = region->outer;
2857 /* If the exception is caught or blocked by any containing region,
2858 then it is not seen by any calling function. */
2859 for (; region ; region = region->outer)
2860 if (reachable_next_level (region, type_thrown, NULL,
2861 inlinable_call || is_resx) >= RNL_CAUGHT)
2868 can_throw_external (const_rtx insn)
2872 if (! INSN_P (insn))
2876 && GET_CODE (PATTERN (insn)) == RESX
2877 && XINT (PATTERN (insn), 0) > 0)
2878 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
2880 if (NONJUMP_INSN_P (insn)
2881 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2882 insn = XVECEXP (PATTERN (insn), 0, 0);
2884 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2887 /* Calls (and trapping insns) without notes are outside any
2888 exception handling region in this function. We have to
2889 assume it might throw. Given that the front end and middle
2890 ends mark known NOTHROW functions, this isn't so wildly
2892 return (CALL_P (insn)
2893 || (flag_non_call_exceptions
2894 && may_trap_p (PATTERN (insn))));
2896 if (INTVAL (XEXP (note, 0)) <= 0)
2899 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
2902 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
2905 set_nothrow_function_flags (void)
2911 /* Assume crtl->all_throwers_are_sibcalls until we encounter
2912 something that can throw an exception. We specifically exempt
2913 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2914 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2917 crtl->all_throwers_are_sibcalls = 1;
2919 /* If we don't know that this implementation of the function will
2920 actually be used, then we must not set TREE_NOTHROW, since
2921 callers must not assume that this function does not throw. */
2922 if (TREE_NOTHROW (current_function_decl))
2925 if (! flag_exceptions)
2928 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2929 if (can_throw_external (insn))
2933 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2935 crtl->all_throwers_are_sibcalls = 0;
2940 for (insn = crtl->epilogue_delay_list; insn;
2941 insn = XEXP (insn, 1))
2942 if (can_throw_external (insn))
2946 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2948 crtl->all_throwers_are_sibcalls = 0;
2953 && (cgraph_function_body_availability (cgraph_node (current_function_decl))
2954 >= AVAIL_AVAILABLE))
2955 TREE_NOTHROW (current_function_decl) = 1;
2959 struct rtl_opt_pass pass_set_nothrow_function_flags =
2965 set_nothrow_function_flags, /* execute */
2968 0, /* static_pass_number */
2970 0, /* properties_required */
2971 0, /* properties_provided */
2972 0, /* properties_destroyed */
2973 0, /* todo_flags_start */
2974 0, /* todo_flags_finish */
2979 /* Various hooks for unwind library. */
2981 /* Do any necessary initialization to access arbitrary stack frames.
2982 On the SPARC, this means flushing the register windows. */
2985 expand_builtin_unwind_init (void)
2987 /* Set this so all the registers get saved in our frame; we need to be
2988 able to copy the saved values for any registers from frames we unwind. */
2989 crtl->saves_all_registers = 1;
2991 #ifdef SETUP_FRAME_ADDRESSES
2992 SETUP_FRAME_ADDRESSES ();
2997 expand_builtin_eh_return_data_regno (tree exp)
2999 tree which = CALL_EXPR_ARG (exp, 0);
3000 unsigned HOST_WIDE_INT iwhich;
3002 if (TREE_CODE (which) != INTEGER_CST)
3004 error ("argument of %<__builtin_eh_return_regno%> must be constant");
3008 iwhich = tree_low_cst (which, 1);
3009 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3010 if (iwhich == INVALID_REGNUM)
3013 #ifdef DWARF_FRAME_REGNUM
3014 iwhich = DWARF_FRAME_REGNUM (iwhich);
3016 iwhich = DBX_REGISTER_NUMBER (iwhich);
3019 return GEN_INT (iwhich);
3022 /* Given a value extracted from the return address register or stack slot,
3023 return the actual address encoded in that value. */
3026 expand_builtin_extract_return_addr (tree addr_tree)
3028 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3030 if (GET_MODE (addr) != Pmode
3031 && GET_MODE (addr) != VOIDmode)
3033 #ifdef POINTERS_EXTEND_UNSIGNED
3034 addr = convert_memory_address (Pmode, addr);
3036 addr = convert_to_mode (Pmode, addr, 0);
3040 /* First mask out any unwanted bits. */
3041 #ifdef MASK_RETURN_ADDR
3042 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3045 /* Then adjust to find the real return address. */
3046 #if defined (RETURN_ADDR_OFFSET)
3047 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3053 /* Given an actual address in addr_tree, do any necessary encoding
3054 and return the value to be stored in the return address register or
3055 stack slot so the epilogue will return to that address. */
3058 expand_builtin_frob_return_addr (tree addr_tree)
3060 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3062 addr = convert_memory_address (Pmode, addr);
3064 #ifdef RETURN_ADDR_OFFSET
3065 addr = force_reg (Pmode, addr);
3066 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3072 /* Set up the epilogue with the magic bits we'll need to return to the
3073 exception handler. */
3076 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3081 #ifdef EH_RETURN_STACKADJ_RTX
3082 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
3083 VOIDmode, EXPAND_NORMAL);
3084 tmp = convert_memory_address (Pmode, tmp);
3085 if (!crtl->eh.ehr_stackadj)
3086 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
3087 else if (tmp != crtl->eh.ehr_stackadj)
3088 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
3091 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
3092 VOIDmode, EXPAND_NORMAL);
3093 tmp = convert_memory_address (Pmode, tmp);
3094 if (!crtl->eh.ehr_handler)
3095 crtl->eh.ehr_handler = copy_to_reg (tmp);
3096 else if (tmp != crtl->eh.ehr_handler)
3097 emit_move_insn (crtl->eh.ehr_handler, tmp);
3099 if (!crtl->eh.ehr_label)
3100 crtl->eh.ehr_label = gen_label_rtx ();
3101 emit_jump (crtl->eh.ehr_label);
3105 expand_eh_return (void)
3109 if (! crtl->eh.ehr_label)
3112 crtl->calls_eh_return = 1;
3114 #ifdef EH_RETURN_STACKADJ_RTX
3115 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3118 around_label = gen_label_rtx ();
3119 emit_jump (around_label);
3121 emit_label (crtl->eh.ehr_label);
3122 clobber_return_register ();
3124 #ifdef EH_RETURN_STACKADJ_RTX
3125 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3128 #ifdef HAVE_eh_return
3130 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3134 #ifdef EH_RETURN_HANDLER_RTX
3135 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3137 error ("__builtin_eh_return not supported on this target");
3141 emit_label (around_label);
3144 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3145 POINTERS_EXTEND_UNSIGNED and return it. */
3148 expand_builtin_extend_pointer (tree addr_tree)
3150 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3153 #ifdef POINTERS_EXTEND_UNSIGNED
3154 extend = POINTERS_EXTEND_UNSIGNED;
3156 /* The previous EH code did an unsigned extend by default, so we do this also
3161 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3164 /* In the following functions, we represent entries in the action table
3165 as 1-based indices. Special cases are:
3167 0: null action record, non-null landing pad; implies cleanups
3168 -1: null action record, null landing pad; implies no action
3169 -2: no call-site entry; implies must_not_throw
3170 -3: we have yet to process outer regions
3172 Further, no special cases apply to the "next" field of the record.
3173 For next, 0 means end of list. */
3175 struct action_record
3183 action_record_eq (const void *pentry, const void *pdata)
3185 const struct action_record *entry = (const struct action_record *) pentry;
3186 const struct action_record *data = (const struct action_record *) pdata;
3187 return entry->filter == data->filter && entry->next == data->next;
3191 action_record_hash (const void *pentry)
3193 const struct action_record *entry = (const struct action_record *) pentry;
3194 return entry->next * 1009 + entry->filter;
3198 add_action_record (htab_t ar_hash, int filter, int next)
3200 struct action_record **slot, *new_ar, tmp;
3202 tmp.filter = filter;
3204 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3206 if ((new_ar = *slot) == NULL)
3208 new_ar = XNEW (struct action_record);
3209 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3210 new_ar->filter = filter;
3211 new_ar->next = next;
3214 /* The filter value goes in untouched. The link to the next
3215 record is a "self-relative" byte offset, or zero to indicate
3216 that there is no next record. So convert the absolute 1 based
3217 indices we've been carrying around into a displacement. */
3219 push_sleb128 (&crtl->eh.action_record_data, filter);
3221 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3222 push_sleb128 (&crtl->eh.action_record_data, next);
3225 return new_ar->offset;
3229 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3231 struct eh_region *c;
3234 /* If we've reached the top of the region chain, then we have
3235 no actions, and require no landing pad. */
3239 switch (region->type)
3242 /* A cleanup adds a zero filter to the beginning of the chain, but
3243 there are special cases to look out for. If there are *only*
3244 cleanups along a path, then it compresses to a zero action.
3245 Further, if there are multiple cleanups along a path, we only
3246 need to represent one of them, as that is enough to trigger
3247 entry to the landing pad at runtime. */
3248 next = collect_one_action_chain (ar_hash, region->outer);
3251 for (c = region->outer; c ; c = c->outer)
3252 if (c->type == ERT_CLEANUP)
3254 return add_action_record (ar_hash, 0, next);
3257 /* Process the associated catch regions in reverse order.
3258 If there's a catch-all handler, then we don't need to
3259 search outer regions. Use a magic -3 value to record
3260 that we haven't done the outer search. */
3262 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3264 if (c->u.eh_catch.type_list == NULL)
3266 /* Retrieve the filter from the head of the filter list
3267 where we have stored it (see assign_filter_values). */
3269 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3271 next = add_action_record (ar_hash, filter, 0);
3275 /* Once the outer search is done, trigger an action record for
3276 each filter we have. */
3281 next = collect_one_action_chain (ar_hash, region->outer);
3283 /* If there is no next action, terminate the chain. */
3286 /* If all outer actions are cleanups or must_not_throw,
3287 we'll have no action record for it, since we had wanted
3288 to encode these states in the call-site record directly.
3289 Add a cleanup action to the chain to catch these. */
3291 next = add_action_record (ar_hash, 0, 0);
3294 flt_node = c->u.eh_catch.filter_list;
3295 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3297 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3298 next = add_action_record (ar_hash, filter, next);
3304 case ERT_ALLOWED_EXCEPTIONS:
3305 /* An exception specification adds its filter to the
3306 beginning of the chain. */
3307 next = collect_one_action_chain (ar_hash, region->outer);
3309 /* If there is no next action, terminate the chain. */
3312 /* If all outer actions are cleanups or must_not_throw,
3313 we'll have no action record for it, since we had wanted
3314 to encode these states in the call-site record directly.
3315 Add a cleanup action to the chain to catch these. */
3317 next = add_action_record (ar_hash, 0, 0);
3319 return add_action_record (ar_hash, region->u.allowed.filter, next);
3321 case ERT_MUST_NOT_THROW:
3322 /* A must-not-throw region with no inner handlers or cleanups
3323 requires no call-site entry. Note that this differs from
3324 the no handler or cleanup case in that we do require an lsda
3325 to be generated. Return a magic -2 value to record this. */
3330 /* CATCH regions are handled in TRY above. THROW regions are
3331 for optimization information only and produce no output. */
3332 return collect_one_action_chain (ar_hash, region->outer);
3340 add_call_site (rtx landing_pad, int action)
3342 call_site_record record;
3344 record = GGC_NEW (struct call_site_record);
3345 record->landing_pad = landing_pad;
3346 record->action = action;
3348 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3350 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3353 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3354 The new note numbers will not refer to region numbers, but
3355 instead to call site entries. */
3358 convert_to_eh_region_ranges (void)
3360 rtx insn, iter, note;
3362 int last_action = -3;
3363 rtx last_action_insn = NULL_RTX;
3364 rtx last_landing_pad = NULL_RTX;
3365 rtx first_no_action_insn = NULL_RTX;
3368 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3371 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3373 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3375 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3378 struct eh_region *region;
3380 rtx this_landing_pad;
3383 if (NONJUMP_INSN_P (insn)
3384 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3385 insn = XVECEXP (PATTERN (insn), 0, 0);
3387 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3390 if (! (CALL_P (insn)
3391 || (flag_non_call_exceptions
3392 && may_trap_p (PATTERN (insn)))))
3399 if (INTVAL (XEXP (note, 0)) <= 0)
3401 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3402 this_action = collect_one_action_chain (ar_hash, region);
3405 /* Existence of catch handlers, or must-not-throw regions
3406 implies that an lsda is needed (even if empty). */
3407 if (this_action != -1)
3408 crtl->uses_eh_lsda = 1;
3410 /* Delay creation of region notes for no-action regions
3411 until we're sure that an lsda will be required. */
3412 else if (last_action == -3)
3414 first_no_action_insn = iter;
3418 /* Cleanups and handlers may share action chains but not
3419 landing pads. Collect the landing pad for this region. */
3420 if (this_action >= 0)
3422 struct eh_region *o;
3423 for (o = region; ! o->landing_pad ; o = o->outer)
3425 this_landing_pad = o->landing_pad;
3428 this_landing_pad = NULL_RTX;
3430 /* Differing actions or landing pads implies a change in call-site
3431 info, which implies some EH_REGION note should be emitted. */
3432 if (last_action != this_action
3433 || last_landing_pad != this_landing_pad)
3435 /* If we'd not seen a previous action (-3) or the previous
3436 action was must-not-throw (-2), then we do not need an
3438 if (last_action >= -1)
3440 /* If we delayed the creation of the begin, do it now. */
3441 if (first_no_action_insn)
3443 call_site = add_call_site (NULL_RTX, 0);
3444 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3445 first_no_action_insn);
3446 NOTE_EH_HANDLER (note) = call_site;
3447 first_no_action_insn = NULL_RTX;
3450 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3452 NOTE_EH_HANDLER (note) = call_site;
3455 /* If the new action is must-not-throw, then no region notes
3457 if (this_action >= -1)
3459 call_site = add_call_site (this_landing_pad,
3460 this_action < 0 ? 0 : this_action);
3461 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3462 NOTE_EH_HANDLER (note) = call_site;
3465 last_action = this_action;
3466 last_landing_pad = this_landing_pad;
3468 last_action_insn = iter;
3471 if (last_action >= -1 && ! first_no_action_insn)
3473 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3474 NOTE_EH_HANDLER (note) = call_site;
3477 htab_delete (ar_hash);
3481 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3485 "eh_ranges", /* name */
3487 convert_to_eh_region_ranges, /* execute */
3490 0, /* static_pass_number */
3492 0, /* properties_required */
3493 0, /* properties_provided */
3494 0, /* properties_destroyed */
3495 0, /* todo_flags_start */
3496 TODO_dump_func, /* todo_flags_finish */
3502 push_uleb128 (varray_type *data_area, unsigned int value)
3506 unsigned char byte = value & 0x7f;
3510 VARRAY_PUSH_UCHAR (*data_area, byte);
3516 push_sleb128 (varray_type *data_area, int value)
3523 byte = value & 0x7f;
3525 more = ! ((value == 0 && (byte & 0x40) == 0)
3526 || (value == -1 && (byte & 0x40) != 0));
3529 VARRAY_PUSH_UCHAR (*data_area, byte);
3535 #ifndef HAVE_AS_LEB128
3537 dw2_size_of_call_site_table (void)
3539 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3540 int size = n * (4 + 4 + 4);
3543 for (i = 0; i < n; ++i)
3545 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3546 size += size_of_uleb128 (cs->action);
3553 sjlj_size_of_call_site_table (void)
3555 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3559 for (i = 0; i < n; ++i)
3561 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3562 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3563 size += size_of_uleb128 (cs->action);
3571 dw2_output_call_site_table (void)
3573 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3576 for (i = 0; i < n; ++i)
3578 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3579 char reg_start_lab[32];
3580 char reg_end_lab[32];
3581 char landing_pad_lab[32];
3583 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3584 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3586 if (cs->landing_pad)
3587 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3588 CODE_LABEL_NUMBER (cs->landing_pad));
3590 /* ??? Perhaps use insn length scaling if the assembler supports
3591 generic arithmetic. */
3592 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3593 data4 if the function is small enough. */
3594 #ifdef HAVE_AS_LEB128
3595 dw2_asm_output_delta_uleb128 (reg_start_lab,
3596 current_function_func_begin_label,
3597 "region %d start", i);
3598 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3600 if (cs->landing_pad)
3601 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3602 current_function_func_begin_label,
3605 dw2_asm_output_data_uleb128 (0, "landing pad");
3607 dw2_asm_output_delta (4, reg_start_lab,
3608 current_function_func_begin_label,
3609 "region %d start", i);
3610 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3611 if (cs->landing_pad)
3612 dw2_asm_output_delta (4, landing_pad_lab,
3613 current_function_func_begin_label,
3616 dw2_asm_output_data (4, 0, "landing pad");
3618 dw2_asm_output_data_uleb128 (cs->action, "action");
3621 call_site_base += n;
3625 sjlj_output_call_site_table (void)
3627 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3630 for (i = 0; i < n; ++i)
3632 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3634 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3635 "region %d landing pad", i);
3636 dw2_asm_output_data_uleb128 (cs->action, "action");
3639 call_site_base += n;
3642 #ifndef TARGET_UNWIND_INFO
3643 /* Switch to the section that should be used for exception tables. */
3646 switch_to_exception_section (const char * ARG_UNUSED (fnname))
3650 if (exception_section)
3651 s = exception_section;
3654 /* Compute the section and cache it into exception_section,
3655 unless it depends on the function name. */
3656 if (targetm.have_named_sections)
3660 if (EH_TABLES_CAN_BE_READ_ONLY)
3663 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3664 flags = ((! flag_pic
3665 || ((tt_format & 0x70) != DW_EH_PE_absptr
3666 && (tt_format & 0x70) != DW_EH_PE_aligned))
3667 ? 0 : SECTION_WRITE);
3670 flags = SECTION_WRITE;
3672 #ifdef HAVE_LD_EH_GC_SECTIONS
3673 if (flag_function_sections)
3675 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
3676 sprintf (section_name, ".gcc_except_table.%s", fnname);
3677 s = get_section (section_name, flags, NULL);
3678 free (section_name);
3683 = s = get_section (".gcc_except_table", flags, NULL);
3687 = s = flag_pic ? data_section : readonly_data_section;
3690 switch_to_section (s);
3695 /* Output a reference from an exception table to the type_info object TYPE.
3696 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
3700 output_ttype (tree type, int tt_format, int tt_format_size)
3703 bool is_public = true;
3705 if (type == NULL_TREE)
3709 struct varpool_node *node;
3711 type = lookup_type_for_runtime (type);
3712 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3714 /* Let cgraph know that the rtti decl is used. Not all of the
3715 paths below go through assemble_integer, which would take
3716 care of this for us. */
3718 if (TREE_CODE (type) == ADDR_EXPR)
3720 type = TREE_OPERAND (type, 0);
3721 if (TREE_CODE (type) == VAR_DECL)
3723 node = varpool_node (type);
3725 varpool_mark_needed_node (node);
3726 is_public = TREE_PUBLIC (type);
3730 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3733 /* Allow the target to override the type table entry format. */
3734 if (targetm.asm_out.ttype (value))
3737 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3738 assemble_integer (value, tt_format_size,
3739 tt_format_size * BITS_PER_UNIT, 1);
3741 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
3745 output_function_exception_table (const char * ARG_UNUSED (fnname))
3747 int tt_format, cs_format, lp_format, i, n;
3748 #ifdef HAVE_AS_LEB128
3749 char ttype_label[32];
3750 char cs_after_size_label[32];
3751 char cs_end_label[32];
3756 int tt_format_size = 0;
3758 /* Not all functions need anything. */
3759 if (! crtl->uses_eh_lsda)
3762 if (eh_personality_libfunc)
3763 assemble_external_libcall (eh_personality_libfunc);
3765 #ifdef TARGET_UNWIND_INFO
3766 /* TODO: Move this into target file. */
3767 fputs ("\t.personality\t", asm_out_file);
3768 output_addr_const (asm_out_file, eh_personality_libfunc);
3769 fputs ("\n\t.handlerdata\n", asm_out_file);
3770 /* Note that varasm still thinks we're in the function's code section.
3771 The ".endp" directive that will immediately follow will take us back. */
3773 switch_to_exception_section (fnname);
3776 /* If the target wants a label to begin the table, emit it here. */
3777 targetm.asm_out.except_table_label (asm_out_file);
3779 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
3780 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
3782 /* Indicate the format of the @TType entries. */
3784 tt_format = DW_EH_PE_omit;
3787 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3788 #ifdef HAVE_AS_LEB128
3789 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3790 current_function_funcdef_no);
3792 tt_format_size = size_of_encoded_value (tt_format);
3794 assemble_align (tt_format_size * BITS_PER_UNIT);
3797 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3798 current_function_funcdef_no);
3800 /* The LSDA header. */
3802 /* Indicate the format of the landing pad start pointer. An omitted
3803 field implies @LPStart == @Start. */
3804 /* Currently we always put @LPStart == @Start. This field would
3805 be most useful in moving the landing pads completely out of
3806 line to another section, but it could also be used to minimize
3807 the size of uleb128 landing pad offsets. */
3808 lp_format = DW_EH_PE_omit;
3809 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3810 eh_data_format_name (lp_format));
3812 /* @LPStart pointer would go here. */
3814 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3815 eh_data_format_name (tt_format));
3817 #ifndef HAVE_AS_LEB128
3818 if (USING_SJLJ_EXCEPTIONS)
3819 call_site_len = sjlj_size_of_call_site_table ();
3821 call_site_len = dw2_size_of_call_site_table ();
3824 /* A pc-relative 4-byte displacement to the @TType data. */
3827 #ifdef HAVE_AS_LEB128
3828 char ttype_after_disp_label[32];
3829 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3830 current_function_funcdef_no);
3831 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3832 "@TType base offset");
3833 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3835 /* Ug. Alignment queers things. */
3836 unsigned int before_disp, after_disp, last_disp, disp;
3838 before_disp = 1 + 1;
3839 after_disp = (1 + size_of_uleb128 (call_site_len)
3841 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
3842 + (VEC_length (tree, crtl->eh.ttype_data)
3848 unsigned int disp_size, pad;
3851 disp_size = size_of_uleb128 (disp);
3852 pad = before_disp + disp_size + after_disp;
3853 if (pad % tt_format_size)
3854 pad = tt_format_size - (pad % tt_format_size);
3857 disp = after_disp + pad;
3859 while (disp != last_disp);
3861 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3865 /* Indicate the format of the call-site offsets. */
3866 #ifdef HAVE_AS_LEB128
3867 cs_format = DW_EH_PE_uleb128;
3869 cs_format = DW_EH_PE_udata4;
3871 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3872 eh_data_format_name (cs_format));
3874 #ifdef HAVE_AS_LEB128
3875 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3876 current_function_funcdef_no);
3877 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3878 current_function_funcdef_no);
3879 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3880 "Call-site table length");
3881 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3882 if (USING_SJLJ_EXCEPTIONS)
3883 sjlj_output_call_site_table ();
3885 dw2_output_call_site_table ();
3886 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3888 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3889 if (USING_SJLJ_EXCEPTIONS)
3890 sjlj_output_call_site_table ();
3892 dw2_output_call_site_table ();
3895 /* ??? Decode and interpret the data for flag_debug_asm. */
3896 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
3897 for (i = 0; i < n; ++i)
3898 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
3899 (i ? NULL : "Action record table"));
3902 assemble_align (tt_format_size * BITS_PER_UNIT);
3904 i = VEC_length (tree, crtl->eh.ttype_data);
3907 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
3908 output_ttype (type, tt_format, tt_format_size);
3911 #ifdef HAVE_AS_LEB128
3913 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3916 /* ??? Decode and interpret the data for flag_debug_asm. */
3917 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
3918 for (i = 0; i < n; ++i)
3920 if (targetm.arm_eabi_unwinder)
3922 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
3923 output_ttype (type, tt_format, tt_format_size);
3926 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
3927 (i ? NULL : "Exception specification table"));
3930 switch_to_section (current_function_section ());
3934 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3936 fun->eh->throw_stmt_table = table;
3940 get_eh_throw_stmt_table (struct function *fun)
3942 return fun->eh->throw_stmt_table;
3945 /* Dump EH information to OUT. */
3948 dump_eh_tree (FILE * out, struct function *fun)
3950 struct eh_region *i;
3952 static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
3953 "allowed_exceptions", "must_not_throw",
3957 i = fun->eh->region_tree;
3961 fprintf (out, "Eh tree:\n");
3964 fprintf (out, " %*s %i %s", depth * 2, "",
3965 i->region_number, type_name[(int) i->type]);
3968 fprintf (out, " tree_label:");
3969 print_generic_expr (out, i->tree_label, 0);
3974 if (i->u.cleanup.prev_try)
3975 fprintf (out, " prev try:%i",
3976 i->u.cleanup.prev_try->region_number);
3981 struct eh_region *c;
3982 fprintf (out, " catch regions:");
3983 for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
3984 fprintf (out, " %i", c->region_number);
3989 if (i->u.eh_catch.prev_catch)
3990 fprintf (out, " prev: %i",
3991 i->u.eh_catch.prev_catch->region_number);
3992 if (i->u.eh_catch.next_catch)
3993 fprintf (out, " next %i",
3994 i->u.eh_catch.next_catch->region_number);
3997 case ERT_ALLOWED_EXCEPTIONS:
3998 fprintf (out, "filter :%i types:", i->u.allowed.filter);
3999 print_generic_expr (out, i->u.allowed.type_list, 0);
4003 fprintf (out, "type:");
4004 print_generic_expr (out, i->u.eh_throw.type, 0);
4007 case ERT_MUST_NOT_THROW:
4015 fprintf (out, " also known as:");
4016 dump_bitmap (out, i->aka);
4019 fprintf (out, "\n");
4020 /* If there are sub-regions, process them. */
4022 i = i->inner, depth++;
4023 /* If there are peers, process them. */
4024 else if (i->next_peer)
4026 /* Otherwise, step back up the tree to the next peer. */
4036 while (i->next_peer == NULL);
4042 /* Verify some basic invariants on EH datastructures. Could be extended to
4045 verify_eh_tree (struct function *fun)
4047 struct eh_region *i, *outer = NULL;
4054 if (!fun->eh->region_tree)
4056 for (j = fun->eh->last_region_number; j > 0; --j)
4057 if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
4059 if (i->region_number == j)
4061 if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
4063 error ("region_array is corrupted for region %i",
4068 i = fun->eh->region_tree;
4072 if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
4074 error ("region_array is corrupted for region %i", i->region_number);
4077 if (i->outer != outer)
4079 error ("outer block of region %i is wrong", i->region_number);
4082 if (i->may_contain_throw && outer && !outer->may_contain_throw)
4085 ("region %i may contain throw and is contained in region that may not",
4091 error ("negative nesting depth of region %i", i->region_number);
4095 /* If there are sub-regions, process them. */
4097 outer = i, i = i->inner, depth++;
4098 /* If there are peers, process them. */
4099 else if (i->next_peer)
4101 /* Otherwise, step back up the tree to the next peer. */
4112 error ("tree list ends on depth %i", depth + 1);
4115 if (count != nvisited)
4117 error ("array does not match the region tree");
4122 dump_eh_tree (stderr, fun);
4123 internal_error ("verify_eh_tree failed");
4129 while (i->next_peer == NULL);
4135 /* Initialize unwind_resume_libfunc. */
4138 default_init_unwind_resume_libfunc (void)
4140 /* The default c++ routines aren't actually c++ specific, so use those. */
4141 unwind_resume_libfunc =
4142 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
4143 : "_Unwind_Resume");
4148 gate_handle_eh (void)
4150 return doing_eh (0);
4153 /* Complete generation of exception handling code. */
4155 rest_of_handle_eh (void)
4157 finish_eh_generation ();
4158 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4162 struct rtl_opt_pass pass_rtl_eh =
4167 gate_handle_eh, /* gate */
4168 rest_of_handle_eh, /* execute */
4171 0, /* static_pass_number */
4172 TV_JUMP, /* tv_id */
4173 0, /* properties_required */
4174 0, /* properties_provided */
4175 0, /* properties_destroyed */
4176 0, /* todo_flags_start */
4177 TODO_dump_func /* todo_flags_finish */
4181 #include "gt-except.h"