1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
52 #include "coretypes.h"
60 #include "insn-config.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
75 #include "langhooks.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
81 /* Provide defaults for stuff that may not be defined when using
83 #ifndef EH_RETURN_DATA_REGNO
84 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
87 /* Protect cleanup actions with must-not-throw regions, with a call
88 to the given failure handler. */
89 gimple (*lang_protect_cleanup_actions) (void);
91 /* Return true if type A catches type B. */
92 int (*lang_eh_type_covers) (tree a, tree b);
94 /* Map a type to a runtime object to match type. */
95 tree (*lang_eh_runtime_type) (tree);
97 /* A hash table of label to region number. */
99 struct GTY(()) ehl_map_entry {
101 struct eh_region *region;
104 static GTY(()) int call_site_base;
105 static GTY ((param_is (union tree_node)))
106 htab_t type_to_runtime_map;
108 /* Describe the SjLj_Function_Context structure. */
109 static GTY(()) tree sjlj_fc_type_node;
110 static int sjlj_fc_call_site_ofs;
111 static int sjlj_fc_data_ofs;
112 static int sjlj_fc_personality_ofs;
113 static int sjlj_fc_lsda_ofs;
114 static int sjlj_fc_jbuf_ofs;
117 struct GTY(()) call_site_record
123 static int t2r_eq (const void *, const void *);
124 static hashval_t t2r_hash (const void *);
126 static int ttypes_filter_eq (const void *, const void *);
127 static hashval_t ttypes_filter_hash (const void *);
128 static int ehspec_filter_eq (const void *, const void *);
129 static hashval_t ehspec_filter_hash (const void *);
130 static int add_ttypes_entry (htab_t, tree);
131 static int add_ehspec_entry (htab_t, htab_t, tree);
132 static void assign_filter_values (void);
133 static void build_post_landing_pads (void);
134 static void connect_post_landing_pads (void);
135 static void dw2_build_landing_pads (void);
138 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
139 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
140 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
141 static void sjlj_emit_function_enter (rtx);
142 static void sjlj_emit_function_exit (void);
143 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
144 static void sjlj_build_landing_pads (void);
146 static void remove_eh_handler (struct eh_region *);
147 static void remove_eh_handler_and_replace (struct eh_region *,
148 struct eh_region *, bool);
150 /* The return value of reachable_next_level. */
153 /* The given exception is not processed by the given region. */
155 /* The given exception may need processing by the given region. */
157 /* The given exception is completely processed by the given region. */
159 /* The given exception is completely processed by the runtime. */
163 struct reachable_info;
164 static enum reachable_code reachable_next_level (struct eh_region *, tree,
165 struct reachable_info *, bool);
167 static int action_record_eq (const void *, const void *);
168 static hashval_t action_record_hash (const void *);
169 static int add_action_record (htab_t, int, int);
170 static int collect_one_action_chain (htab_t, struct eh_region *);
171 static int add_call_site (rtx, int);
173 static void push_uleb128 (varray_type *, unsigned int);
174 static void push_sleb128 (varray_type *, int);
175 #ifndef HAVE_AS_LEB128
176 static int dw2_size_of_call_site_table (void);
177 static int sjlj_size_of_call_site_table (void);
179 static void dw2_output_call_site_table (void);
180 static void sjlj_output_call_site_table (void);
183 /* Routine to see if exception handling is turned on.
184 DO_WARN is nonzero if we want to inform the user that exception
185 handling is turned off.
187 This is used to ensure that -fexceptions has been specified if the
188 compiler tries to use any exception-specific functions. */
191 doing_eh (int do_warn)
193 if (! flag_exceptions)
195 static int warned = 0;
196 if (! warned && do_warn)
198 error ("exception handling disabled, use -fexceptions to enable");
210 if (! flag_exceptions)
213 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
215 /* Create the SjLj_Function_Context structure. This should match
216 the definition in unwind-sjlj.c. */
217 if (USING_SJLJ_EXCEPTIONS)
219 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
221 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
223 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
224 build_pointer_type (sjlj_fc_type_node));
225 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
227 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
229 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
231 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
232 tmp = build_array_type (lang_hooks.types.type_for_mode
233 (targetm.unwind_word_mode (), 1),
235 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
236 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
238 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
240 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
242 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
244 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
246 #ifdef DONT_USE_BUILTIN_SETJMP
248 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
250 /* Should be large enough for most systems, if it is not,
251 JMP_BUF_SIZE should be defined with the proper value. It will
252 also tend to be larger than necessary for most systems, a more
253 optimal port will define JMP_BUF_SIZE. */
254 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
257 /* builtin_setjmp takes a pointer to 5 words. */
258 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
260 tmp = build_index_type (tmp);
261 tmp = build_array_type (ptr_type_node, tmp);
262 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
263 #ifdef DONT_USE_BUILTIN_SETJMP
264 /* We don't know what the alignment requirements of the
265 runtime's jmp_buf has. Overestimate. */
266 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
267 DECL_USER_ALIGN (f_jbuf) = 1;
269 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
271 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
272 TREE_CHAIN (f_prev) = f_cs;
273 TREE_CHAIN (f_cs) = f_data;
274 TREE_CHAIN (f_data) = f_per;
275 TREE_CHAIN (f_per) = f_lsda;
276 TREE_CHAIN (f_lsda) = f_jbuf;
278 layout_type (sjlj_fc_type_node);
280 /* Cache the interesting field offsets so that we have
281 easy access from rtl. */
282 sjlj_fc_call_site_ofs
283 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
284 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
286 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
287 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
288 sjlj_fc_personality_ofs
289 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
290 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
292 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
293 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
295 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
296 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
301 init_eh_for_function (void)
303 cfun->eh = GGC_CNEW (struct eh_status);
306 /* Routines to generate the exception tree somewhat directly.
307 These are used from tree-eh.c when processing exception related
308 nodes during tree optimization. */
310 static struct eh_region *
311 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
313 struct eh_region *new_eh;
315 #ifdef ENABLE_CHECKING
316 gcc_assert (doing_eh (0));
319 /* Insert a new blank region as a leaf in the tree. */
320 new_eh = GGC_CNEW (struct eh_region);
322 new_eh->outer = outer;
325 new_eh->next_peer = outer->inner;
326 outer->inner = new_eh;
330 new_eh->next_peer = cfun->eh->region_tree;
331 cfun->eh->region_tree = new_eh;
334 new_eh->region_number = ++cfun->eh->last_region_number;
340 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
342 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
343 cleanup->u.cleanup.prev_try = prev_try;
348 gen_eh_region_try (struct eh_region *outer)
350 return gen_eh_region (ERT_TRY, outer);
354 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
356 struct eh_region *c, *l;
357 tree type_list, type_node;
359 /* Ensure to always end up with a type list to normalize further
360 processing, then register each type against the runtime types map. */
361 type_list = type_or_list;
364 if (TREE_CODE (type_or_list) != TREE_LIST)
365 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
367 type_node = type_list;
368 for (; type_node; type_node = TREE_CHAIN (type_node))
369 add_type_for_runtime (TREE_VALUE (type_node));
372 c = gen_eh_region (ERT_CATCH, t->outer);
373 c->u.eh_catch.type_list = type_list;
374 l = t->u.eh_try.last_catch;
375 c->u.eh_catch.prev_catch = l;
377 l->u.eh_catch.next_catch = c;
379 t->u.eh_try.eh_catch = c;
380 t->u.eh_try.last_catch = c;
386 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
388 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
389 region->u.allowed.type_list = allowed;
391 for (; allowed ; allowed = TREE_CHAIN (allowed))
392 add_type_for_runtime (TREE_VALUE (allowed));
398 gen_eh_region_must_not_throw (struct eh_region *outer)
400 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
404 get_eh_region_number (struct eh_region *region)
406 return region->region_number;
410 get_eh_region_may_contain_throw (struct eh_region *region)
412 return region->may_contain_throw;
416 get_eh_region_tree_label (struct eh_region *region)
418 return region->tree_label;
422 get_eh_region_no_tree_label (int region)
424 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
428 set_eh_region_tree_label (struct eh_region *region, tree lab)
430 region->tree_label = lab;
434 expand_resx_expr (tree exp)
436 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
437 struct eh_region *reg = VEC_index (eh_region,
438 cfun->eh->region_array, region_nr);
440 gcc_assert (!reg->resume);
441 do_pending_stack_adjust ();
442 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
446 /* Note that the current EH region (if any) may contain a throw, or a
447 call to a function which itself may contain a throw. */
450 note_eh_region_may_contain_throw (struct eh_region *region)
452 while (region && !region->may_contain_throw)
454 region->may_contain_throw = 1;
455 region = region->outer;
460 /* Return an rtl expression for a pointer to the exception object
464 get_exception_pointer (void)
466 if (! crtl->eh.exc_ptr)
467 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
468 return crtl->eh.exc_ptr;
471 /* Return an rtl expression for the exception dispatch filter
475 get_exception_filter (void)
477 if (! crtl->eh.filter)
478 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
479 return crtl->eh.filter;
482 /* This section is for the exception handling specific optimization pass. */
484 /* Random access the exception region tree. */
487 collect_eh_region_array (void)
491 i = cfun->eh->region_tree;
495 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
496 cfun->eh->last_region_number + 1);
497 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
501 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
503 /* If there are sub-regions, process them. */
506 /* If there are peers, process them. */
507 else if (i->next_peer)
509 /* Otherwise, step back up the tree to the next peer. */
516 } while (i->next_peer == NULL);
522 /* R is MUST_NOT_THROW region that is not reachable via local
523 RESX instructions. It still must be kept in the tree in case runtime
524 can unwind through it, or we will eliminate out terminate call
525 runtime would do otherwise. Return TRUE if R contains throwing statements
526 or some of the exceptions in inner regions can be unwound up to R.
528 CONTAINS_STMT is bitmap of all regions that contains some throwing
531 Function looks O(^3) at first sight. In fact the function is called at most
532 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
533 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
534 the outer loop examines every region at most once. The inner loop
535 is doing unwinding from the throwing statement same way as we do during
536 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
537 of CFG. In practice Eh trees are wide, not deep, so this is not
541 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region *r)
543 struct eh_region *i = r->inner;
547 if (TEST_BIT (contains_stmt, r->region_number))
550 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
551 if (TEST_BIT (contains_stmt, n))
557 /* It is pointless to look into MUST_NOT_THROW
558 or dive into subregions. They never unwind up. */
559 if (i->type != ERT_MUST_NOT_THROW)
561 bool found = TEST_BIT (contains_stmt, i->region_number);
563 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
564 if (TEST_BIT (contains_stmt, n))
569 /* We have nested region that contains throwing statement.
570 See if resuming might lead up to the resx or we get locally
571 caught sooner. If we get locally caught sooner, we either
572 know region R is not reachable or it would have direct edge
573 from the EH resx and thus consider region reachable at
577 struct eh_region *i1 = i;
578 tree type_thrown = NULL_TREE;
580 if (i1->type == ERT_THROW)
582 type_thrown = i1->u.eh_throw.type;
585 for (; i1 != r; i1 = i1->outer)
586 if (reachable_next_level (i1, type_thrown, NULL,
587 false) >= RNL_CAUGHT)
593 /* If there are sub-regions, process them. */
594 if (i->type != ERT_MUST_NOT_THROW && i->inner)
596 /* If there are peers, process them. */
597 else if (i->next_peer)
599 /* Otherwise, step back up the tree to the next peer. */
608 while (i->next_peer == NULL);
614 /* Bring region R to the root of tree. */
617 bring_to_root (struct eh_region *r)
619 struct eh_region **pp;
620 struct eh_region *outer = r->outer;
623 for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
627 r->next_peer = cfun->eh->region_tree;
628 cfun->eh->region_tree = r;
631 /* Remove all regions whose labels are not reachable.
632 REACHABLE is bitmap of all regions that are used by the function
633 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
636 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
640 VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
641 struct eh_region *local_must_not_throw = NULL;
642 struct eh_region *first_must_not_throw = NULL;
644 for (i = cfun->eh->last_region_number; i > 0; --i)
646 r = VEC_index (eh_region, cfun->eh->region_array, i);
647 if (!r || r->region_number != i)
649 if (!TEST_BIT (reachable, i) && !r->resume)
653 r->tree_label = NULL;
657 /* Don't remove ERT_THROW regions if their outer region
659 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
662 case ERT_MUST_NOT_THROW:
663 /* MUST_NOT_THROW regions are implementable solely in the
664 runtime, but we need them when inlining function.
666 Keep them if outer region is not MUST_NOT_THROW a well
667 and if they contain some statement that might unwind through
669 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
671 || can_be_reached_by_runtime (contains_stmt, r)))
676 /* TRY regions are reachable if any of its CATCH regions
679 for (c = r->u.eh_try.eh_catch; c;
680 c = c->u.eh_catch.next_catch)
681 if (TEST_BIT (reachable, c->region_number))
696 fprintf (dump_file, "Removing unreachable eh region %i\n",
698 remove_eh_handler (r);
700 else if (r->type == ERT_MUST_NOT_THROW)
702 if (!first_must_not_throw)
703 first_must_not_throw = r;
704 VEC_safe_push (eh_region, heap, must_not_throws, r);
708 if (r->type == ERT_MUST_NOT_THROW)
710 if (!local_must_not_throw)
711 local_must_not_throw = r;
713 VEC_safe_push (eh_region, heap, must_not_throws, r);
717 /* MUST_NOT_THROW regions without local handler are all the same; they
718 trigger terminate call in runtime.
719 MUST_NOT_THROW handled locally can differ in debug info associated
720 to std::terminate () call or if one is coming from Java and other
721 from C++ whether they call terminate or abort.
723 We merge all MUST_NOT_THROW regions handled by the run-time into one.
724 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
725 (since unwinding never continues to the outer region anyway).
726 If MUST_NOT_THROW with local handler is present in the tree, we use
727 that region to merge into, since it will remain in tree anyway;
728 otherwise we use first MUST_NOT_THROW.
730 Merging of locally handled regions needs changes to the CFG. Crossjumping
731 should take care of this, by looking at the actual code and
732 ensuring that the cleanup actions are really the same. */
734 if (local_must_not_throw)
735 first_must_not_throw = local_must_not_throw;
737 for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
739 if (!r->label && !r->tree_label && r != first_must_not_throw)
742 fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
744 first_must_not_throw->region_number);
745 remove_eh_handler_and_replace (r, first_must_not_throw, false);
746 first_must_not_throw->may_contain_throw |= r->may_contain_throw;
751 #ifdef ENABLE_CHECKING
752 verify_eh_tree (cfun);
754 VEC_free (eh_region, heap, must_not_throws);
757 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
758 is identical to label. */
761 label_to_region_map (void)
763 VEC (int, heap) * label_to_region = NULL;
767 VEC_safe_grow_cleared (int, heap, label_to_region,
768 cfun->cfg->last_label_uid + 1);
769 for (i = cfun->eh->last_region_number; i > 0; --i)
771 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
772 if (r && r->region_number == i
773 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
775 if ((idx = VEC_index (int, label_to_region,
776 LABEL_DECL_UID (r->tree_label))) != 0)
777 r->next_region_sharing_label =
778 VEC_index (eh_region, cfun->eh->region_array, idx);
780 r->next_region_sharing_label = NULL;
781 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
785 return label_to_region;
788 /* Return number of EH regions. */
790 num_eh_regions (void)
792 return cfun->eh->last_region_number + 1;
795 /* Return next region sharing same label as REGION. */
798 get_next_region_sharing_label (int region)
803 r = VEC_index (eh_region, cfun->eh->region_array, region);
804 if (!r || !r->next_region_sharing_label)
806 return r->next_region_sharing_label->region_number;
809 /* Set up EH labels for RTL. */
812 convert_from_eh_region_ranges (void)
814 int i, n = cfun->eh->last_region_number;
816 /* Most of the work is already done at the tree level. All we need to
817 do is collect the rtl labels that correspond to the tree labels that
818 collect the rtl labels that correspond to the tree labels
819 we allocated earlier. */
820 for (i = 1; i <= n; ++i)
822 struct eh_region *region;
824 region = VEC_index (eh_region, cfun->eh->region_array, i);
825 if (region && region->tree_label)
826 region->label = DECL_RTL_IF_SET (region->tree_label);
831 find_exception_handler_labels (void)
835 if (cfun->eh->region_tree == NULL)
838 for (i = cfun->eh->last_region_number; i > 0; --i)
840 struct eh_region *region;
843 region = VEC_index (eh_region, cfun->eh->region_array, i);
844 if (! region || region->region_number != i)
846 if (crtl->eh.built_landing_pads)
847 lab = region->landing_pad;
853 /* Returns true if the current function has exception handling regions. */
856 current_function_has_exception_handlers (void)
860 for (i = cfun->eh->last_region_number; i > 0; --i)
862 struct eh_region *region;
864 region = VEC_index (eh_region, cfun->eh->region_array, i);
866 && region->region_number == i
867 && region->type != ERT_THROW)
874 /* A subroutine of duplicate_eh_regions. Search the region tree under O
875 for the minimum and maximum region numbers. Update *MIN and *MAX. */
878 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
884 i = bitmap_first_set_bit (o->aka);
887 i = bitmap_last_set_bit (o->aka);
891 if (o->region_number < *min)
892 *min = o->region_number;
893 if (o->region_number > *max)
894 *max = o->region_number;
899 duplicate_eh_regions_0 (o, min, max);
903 duplicate_eh_regions_0 (o, min, max);
908 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
909 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
910 about the other internal pointers just yet, just the tree-like pointers. */
913 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
917 ret = n = GGC_NEW (struct eh_region);
926 n->aka = BITMAP_GGC_ALLOC ();
928 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
930 bitmap_set_bit (n->aka, i + eh_offset);
931 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
935 n->region_number += eh_offset;
936 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
941 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
942 while (old->next_peer)
944 old = old->next_peer;
945 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
952 /* Return prev_try pointers catch subregions of R should
955 static struct eh_region *
956 find_prev_try (struct eh_region * r)
958 for (; r && r->type != ERT_TRY; r = r->outer)
959 if (r->type == ERT_MUST_NOT_THROW
960 || (r->type == ERT_ALLOWED_EXCEPTIONS
961 && !r->u.allowed.type_list))
969 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
970 function and root the tree below OUTER_REGION. Remap labels using MAP
971 callback. The special case of COPY_REGION of 0 means all regions. */
974 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
975 void *data, int copy_region, int outer_region)
977 eh_region cur, prev_try, old_prev_try, outer, *splice;
978 int i, min_region, max_region, eh_offset, cfun_last_region_number;
983 #ifdef ENABLE_CHECKING
984 verify_eh_tree (ifun);
987 /* Find the range of region numbers to be copied. The interface we
988 provide here mandates a single offset to find new number from old,
989 which means we must look at the numbers present, instead of the
990 count or something else. */
993 min_region = INT_MAX;
996 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
997 old_prev_try = find_prev_try (cur);
998 duplicate_eh_regions_0 (cur, &min_region, &max_region);
1003 max_region = ifun->eh->last_region_number;
1004 old_prev_try = NULL;
1006 num_regions = max_region - min_region + 1;
1007 cfun_last_region_number = cfun->eh->last_region_number;
1008 eh_offset = cfun_last_region_number + 1 - min_region;
1010 /* If we've not yet created a region array, do so now. */
1011 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
1012 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
1013 cfun->eh->last_region_number + 1);
1015 /* Locate the spot at which to insert the new tree. */
1016 if (outer_region > 0)
1018 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1020 splice = &outer->inner;
1022 splice = &cfun->eh->region_tree;
1027 splice = &cfun->eh->region_tree;
1030 splice = &(*splice)->next_peer;
1032 if (!ifun->eh->region_tree)
1035 for (i = cfun_last_region_number + 1;
1036 i <= cfun->eh->last_region_number; i++)
1038 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1039 if (outer->aka == NULL)
1040 outer->aka = BITMAP_GGC_ALLOC ();
1041 bitmap_set_bit (outer->aka, i);
1046 /* Copy all the regions in the subtree. */
1047 if (copy_region > 0)
1049 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1050 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1056 cur = ifun->eh->region_tree;
1057 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1058 while (cur->next_peer)
1060 cur = cur->next_peer;
1061 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1065 /* Remap all the labels in the new regions. */
1066 for (i = cfun_last_region_number + 1;
1067 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1068 if (cur && cur->tree_label)
1069 cur->tree_label = map (cur->tree_label, data);
1071 /* Search for the containing ERT_TRY region to fix up
1072 the prev_try short-cuts for ERT_CLEANUP regions. */
1074 if (outer_region > 0)
1075 prev_try = find_prev_try (VEC_index (eh_region, cfun->eh->region_array, outer_region));
1077 /* Remap all of the internal catch and cleanup linkages. Since we
1078 duplicate entire subtrees, all of the referenced regions will have
1079 been copied too. And since we renumbered them as a block, a simple
1080 bit of arithmetic finds us the index for the replacement region. */
1081 for (i = cfun_last_region_number + 1;
1082 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1084 /* All removed EH that is toplevel in input function is now
1085 in outer EH of output function. */
1088 gcc_assert (VEC_index
1089 (eh_region, ifun->eh->region_array,
1090 i - eh_offset) == NULL);
1093 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1094 if (outer->aka == NULL)
1095 outer->aka = BITMAP_GGC_ALLOC ();
1096 bitmap_set_bit (outer->aka, i);
1100 if (i != cur->region_number)
1103 #define REMAP(REG) \
1104 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1105 (REG)->region_number + eh_offset)
1110 if (cur->u.eh_try.eh_catch)
1111 REMAP (cur->u.eh_try.eh_catch);
1112 if (cur->u.eh_try.last_catch)
1113 REMAP (cur->u.eh_try.last_catch);
1117 if (cur->u.eh_catch.next_catch)
1118 REMAP (cur->u.eh_catch.next_catch);
1119 if (cur->u.eh_catch.prev_catch)
1120 REMAP (cur->u.eh_catch.prev_catch);
1124 if (cur->u.cleanup.prev_try != old_prev_try)
1125 REMAP (cur->u.cleanup.prev_try);
1127 cur->u.cleanup.prev_try = prev_try;
1136 #ifdef ENABLE_CHECKING
1137 verify_eh_tree (cfun);
1143 /* Return region number of region that is outer to both if REGION_A and
1144 REGION_B in IFUN. */
1147 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1149 struct eh_region *rp_a, *rp_b;
1152 gcc_assert (ifun->eh->last_region_number > 0);
1153 gcc_assert (ifun->eh->region_tree);
1155 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1156 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1157 gcc_assert (rp_a != NULL);
1158 gcc_assert (rp_b != NULL);
1160 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1161 sbitmap_zero (b_outer);
1165 SET_BIT (b_outer, rp_b->region_number);
1172 if (TEST_BIT (b_outer, rp_a->region_number))
1174 sbitmap_free (b_outer);
1175 return rp_a->region_number;
1181 sbitmap_free (b_outer);
1186 t2r_eq (const void *pentry, const void *pdata)
1188 const_tree const entry = (const_tree) pentry;
1189 const_tree const data = (const_tree) pdata;
1191 return TREE_PURPOSE (entry) == data;
1195 t2r_hash (const void *pentry)
1197 const_tree const entry = (const_tree) pentry;
1198 return TREE_HASH (TREE_PURPOSE (entry));
1202 add_type_for_runtime (tree type)
1206 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1207 TREE_HASH (type), INSERT);
1210 tree runtime = (*lang_eh_runtime_type) (type);
1211 *slot = tree_cons (type, runtime, NULL_TREE);
1216 lookup_type_for_runtime (tree type)
1220 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1221 TREE_HASH (type), NO_INSERT);
1223 /* We should have always inserted the data earlier. */
1224 return TREE_VALUE (*slot);
1228 /* Represent an entry in @TTypes for either catch actions
1229 or exception filter actions. */
1230 struct GTY(()) ttypes_filter {
1235 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1236 (a tree) for a @TTypes type node we are thinking about adding. */
1239 ttypes_filter_eq (const void *pentry, const void *pdata)
1241 const struct ttypes_filter *const entry
1242 = (const struct ttypes_filter *) pentry;
1243 const_tree const data = (const_tree) pdata;
1245 return entry->t == data;
1249 ttypes_filter_hash (const void *pentry)
1251 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1252 return TREE_HASH (entry->t);
1255 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1256 exception specification list we are thinking about adding. */
1257 /* ??? Currently we use the type lists in the order given. Someone
1258 should put these in some canonical order. */
1261 ehspec_filter_eq (const void *pentry, const void *pdata)
1263 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1264 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1266 return type_list_equal (entry->t, data->t);
1269 /* Hash function for exception specification lists. */
1272 ehspec_filter_hash (const void *pentry)
1274 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1278 for (list = entry->t; list ; list = TREE_CHAIN (list))
1279 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1283 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1284 to speed up the search. Return the filter value to be used. */
1287 add_ttypes_entry (htab_t ttypes_hash, tree type)
1289 struct ttypes_filter **slot, *n;
1291 slot = (struct ttypes_filter **)
1292 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1294 if ((n = *slot) == NULL)
1296 /* Filter value is a 1 based table index. */
1298 n = XNEW (struct ttypes_filter);
1300 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1303 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1309 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1310 to speed up the search. Return the filter value to be used. */
1313 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1315 struct ttypes_filter **slot, *n;
1316 struct ttypes_filter dummy;
1319 slot = (struct ttypes_filter **)
1320 htab_find_slot (ehspec_hash, &dummy, INSERT);
1322 if ((n = *slot) == NULL)
1324 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1326 n = XNEW (struct ttypes_filter);
1328 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1331 /* Generate a 0 terminated list of filter values. */
1332 for (; list ; list = TREE_CHAIN (list))
1334 if (targetm.arm_eabi_unwinder)
1335 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1338 /* Look up each type in the list and encode its filter
1339 value as a uleb128. */
1340 push_uleb128 (&crtl->eh.ehspec_data,
1341 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1344 if (targetm.arm_eabi_unwinder)
1345 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1347 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1353 /* Generate the action filter values to be used for CATCH and
1354 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1355 we use lots of landing pads, and so every type or list can share
1356 the same filter value, which saves table space. */
1359 assign_filter_values (void)
1362 htab_t ttypes, ehspec;
1364 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1365 if (targetm.arm_eabi_unwinder)
1366 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1368 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1370 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1371 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1373 for (i = cfun->eh->last_region_number; i > 0; --i)
1375 struct eh_region *r;
1377 r = VEC_index (eh_region, cfun->eh->region_array, i);
1379 /* Mind we don't process a region more than once. */
1380 if (!r || r->region_number != i)
1386 /* Whatever type_list is (NULL or true list), we build a list
1387 of filters for the region. */
1388 r->u.eh_catch.filter_list = NULL_TREE;
1390 if (r->u.eh_catch.type_list != NULL)
1392 /* Get a filter value for each of the types caught and store
1393 them in the region's dedicated list. */
1394 tree tp_node = r->u.eh_catch.type_list;
1396 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1398 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1399 tree flt_node = build_int_cst (NULL_TREE, flt);
1401 r->u.eh_catch.filter_list
1402 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1407 /* Get a filter value for the NULL list also since it will need
1408 an action record anyway. */
1409 int flt = add_ttypes_entry (ttypes, NULL);
1410 tree flt_node = build_int_cst (NULL_TREE, flt);
1412 r->u.eh_catch.filter_list
1413 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1418 case ERT_ALLOWED_EXCEPTIONS:
1420 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1428 htab_delete (ttypes);
1429 htab_delete (ehspec);
1432 /* Emit SEQ into basic block just before INSN (that is assumed to be
1433 first instruction of some existing BB and return the newly
1436 emit_to_new_bb_before (rtx seq, rtx insn)
1443 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1444 call), we don't want it to go into newly created landing pad or other EH
1446 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1447 if (e->flags & EDGE_FALLTHRU)
1448 force_nonfallthru (e);
1451 last = emit_insn_before (seq, insn);
1452 if (BARRIER_P (last))
1453 last = PREV_INSN (last);
1454 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1455 update_bb_for_insn (bb);
1456 bb->flags |= BB_SUPERBLOCK;
1460 /* Generate the code to actually handle exceptions, which will follow the
1464 build_post_landing_pads (void)
1468 for (i = cfun->eh->last_region_number; i > 0; --i)
1470 struct eh_region *region;
1473 region = VEC_index (eh_region, cfun->eh->region_array, i);
1474 /* Mind we don't process a region more than once. */
1475 if (!region || region->region_number != i)
1478 switch (region->type)
1481 /* ??? Collect the set of all non-overlapping catch handlers
1482 all the way up the chain until blocked by a cleanup. */
1483 /* ??? Outer try regions can share landing pads with inner
1484 try regions if the types are completely non-overlapping,
1485 and there are no intervening cleanups. */
1487 region->post_landing_pad = gen_label_rtx ();
1491 emit_label (region->post_landing_pad);
1493 /* ??? It is mighty inconvenient to call back into the
1494 switch statement generation code in expand_end_case.
1495 Rapid prototyping sez a sequence of ifs. */
1497 struct eh_region *c;
1498 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1500 if (c->u.eh_catch.type_list == NULL)
1501 emit_jump (c->label);
1504 /* Need for one cmp/jump per type caught. Each type
1505 list entry has a matching entry in the filter list
1506 (see assign_filter_values). */
1507 tree tp_node = c->u.eh_catch.type_list;
1508 tree flt_node = c->u.eh_catch.filter_list;
1512 emit_cmp_and_jump_insns
1514 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1516 targetm.eh_return_filter_mode (), 0, c->label);
1518 tp_node = TREE_CHAIN (tp_node);
1519 flt_node = TREE_CHAIN (flt_node);
1525 /* We delay the generation of the _Unwind_Resume until we generate
1526 landing pads. We emit a marker here so as to get good control
1527 flow data in the meantime. */
1529 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1535 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
1539 case ERT_ALLOWED_EXCEPTIONS:
1540 region->post_landing_pad = gen_label_rtx ();
1544 emit_label (region->post_landing_pad);
1546 emit_cmp_and_jump_insns (crtl->eh.filter,
1547 GEN_INT (region->u.allowed.filter),
1549 targetm.eh_return_filter_mode (), 0, region->label);
1551 /* We delay the generation of the _Unwind_Resume until we generate
1552 landing pads. We emit a marker here so as to get good control
1553 flow data in the meantime. */
1555 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1561 emit_to_new_bb_before (seq, region->label);
1565 case ERT_MUST_NOT_THROW:
1566 region->post_landing_pad = region->label;
1571 /* Nothing to do. */
1580 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1581 _Unwind_Resume otherwise. */
1584 connect_post_landing_pads (void)
1588 for (i = cfun->eh->last_region_number; i > 0; --i)
1590 struct eh_region *region;
1591 struct eh_region *outer;
1595 region = VEC_index (eh_region, cfun->eh->region_array, i);
1596 /* Mind we don't process a region more than once. */
1597 if (!region || region->region_number != i)
1600 /* If there is no RESX, or it has been deleted by flow, there's
1601 nothing to fix up. */
1602 if (! region->resume || INSN_DELETED_P (region->resume))
1605 /* Search for another landing pad in this function. */
1606 for (outer = region->outer; outer ; outer = outer->outer)
1607 if (outer->post_landing_pad)
1615 basic_block src, dest;
1617 emit_jump (outer->post_landing_pad);
1618 src = BLOCK_FOR_INSN (region->resume);
1619 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1620 while (EDGE_COUNT (src->succs) > 0)
1621 remove_edge (EDGE_SUCC (src, 0));
1622 e = make_edge (src, dest, 0);
1623 e->probability = REG_BR_PROB_BASE;
1624 e->count = src->count;
1628 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1629 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
1631 /* What we just emitted was a throwing libcall, so it got a
1632 barrier automatically added after it. If the last insn in
1633 the libcall sequence isn't the barrier, it's because the
1634 target emits multiple insns for a call, and there are insns
1635 after the actual call insn (which are redundant and would be
1636 optimized away). The barrier is inserted exactly after the
1637 call insn, so let's go get that and delete the insns after
1638 it, because below we need the barrier to be the last insn in
1640 delete_insns_since (NEXT_INSN (last_call_insn ()));
1645 barrier = emit_insn_before (seq, region->resume);
1646 /* Avoid duplicate barrier. */
1647 gcc_assert (BARRIER_P (barrier));
1648 delete_insn (barrier);
1649 delete_insn (region->resume);
1651 /* ??? From tree-ssa we can wind up with catch regions whose
1652 label is not instantiated, but whose resx is present. Now
1653 that we've dealt with the resx, kill the region. */
1654 if (region->label == NULL && region->type == ERT_CLEANUP)
1655 remove_eh_handler (region);
1661 dw2_build_landing_pads (void)
1665 for (i = cfun->eh->last_region_number; i > 0; --i)
1667 struct eh_region *region;
1672 region = VEC_index (eh_region, cfun->eh->region_array, i);
1673 /* Mind we don't process a region more than once. */
1674 if (!region || region->region_number != i)
1677 if (region->type != ERT_CLEANUP
1678 && region->type != ERT_TRY
1679 && region->type != ERT_ALLOWED_EXCEPTIONS)
1684 region->landing_pad = gen_label_rtx ();
1685 emit_label (region->landing_pad);
1687 #ifdef HAVE_exception_receiver
1688 if (HAVE_exception_receiver)
1689 emit_insn (gen_exception_receiver ());
1692 #ifdef HAVE_nonlocal_goto_receiver
1693 if (HAVE_nonlocal_goto_receiver)
1694 emit_insn (gen_nonlocal_goto_receiver ());
1699 emit_move_insn (crtl->eh.exc_ptr,
1700 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1701 emit_move_insn (crtl->eh.filter,
1702 gen_rtx_REG (targetm.eh_return_filter_mode (),
1703 EH_RETURN_DATA_REGNO (1)));
1708 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1709 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1710 e->count = bb->count;
1711 e->probability = REG_BR_PROB_BASE;
1718 int directly_reachable;
1721 int call_site_index;
1725 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1728 bool found_one = false;
1730 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1732 struct eh_region *region;
1733 enum reachable_code rc;
1737 if (! INSN_P (insn))
1740 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1741 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1744 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1748 type_thrown = NULL_TREE;
1749 if (region->type == ERT_THROW)
1751 type_thrown = region->u.eh_throw.type;
1752 region = region->outer;
1755 /* Find the first containing region that might handle the exception.
1756 That's the landing pad to which we will transfer control. */
1757 rc = RNL_NOT_CAUGHT;
1758 for (; region; region = region->outer)
1760 rc = reachable_next_level (region, type_thrown, NULL, false);
1761 if (rc != RNL_NOT_CAUGHT)
1764 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1766 lp_info[region->region_number].directly_reachable = 1;
1775 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1780 /* First task: build the action table. */
1782 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
1783 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1785 for (i = cfun->eh->last_region_number; i > 0; --i)
1786 if (lp_info[i].directly_reachable)
1788 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1790 r->landing_pad = dispatch_label;
1791 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1792 if (lp_info[i].action_index != -1)
1793 crtl->uses_eh_lsda = 1;
1796 htab_delete (ar_hash);
1798 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1799 landing pad label for the region. For sjlj though, there is one
1800 common landing pad from which we dispatch to the post-landing pads.
1802 A region receives a dispatch index if it is directly reachable
1803 and requires in-function processing. Regions that share post-landing
1804 pads may share dispatch indices. */
1805 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1806 (see build_post_landing_pads) so we don't bother checking for it. */
1809 for (i = cfun->eh->last_region_number; i > 0; --i)
1810 if (lp_info[i].directly_reachable)
1811 lp_info[i].dispatch_index = index++;
1813 /* Finally: assign call-site values. If dwarf2 terms, this would be
1814 the region number assigned by convert_to_eh_region_ranges, but
1815 handles no-action and must-not-throw differently. */
1818 for (i = cfun->eh->last_region_number; i > 0; --i)
1819 if (lp_info[i].directly_reachable)
1821 int action = lp_info[i].action_index;
1823 /* Map must-not-throw to otherwise unused call-site index 0. */
1826 /* Map no-action to otherwise unused call-site index -1. */
1827 else if (action == -1)
1829 /* Otherwise, look it up in the table. */
1831 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1833 lp_info[i].call_site_index = index;
1838 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1840 int last_call_site = -2;
1843 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1845 struct eh_region *region;
1847 rtx note, before, p;
1849 /* Reset value tracking at extended basic block boundaries. */
1851 last_call_site = -2;
1853 if (! INSN_P (insn))
1856 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1858 /* Calls that are known to not throw need not be marked. */
1859 if (note && INTVAL (XEXP (note, 0)) <= 0)
1863 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1869 /* Calls (and trapping insns) without notes are outside any
1870 exception handling region in this function. Mark them as
1873 || (flag_non_call_exceptions
1874 && may_trap_p (PATTERN (insn))))
1875 this_call_site = -1;
1880 this_call_site = lp_info[region->region_number].call_site_index;
1882 if (this_call_site == last_call_site)
1885 /* Don't separate a call from it's argument loads. */
1888 before = find_first_parameter_load (insn, NULL_RTX);
1891 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1892 sjlj_fc_call_site_ofs);
1893 emit_move_insn (mem, GEN_INT (this_call_site));
1897 emit_insn_before (p, before);
1898 last_call_site = this_call_site;
1902 /* Construct the SjLj_Function_Context. */
1905 sjlj_emit_function_enter (rtx dispatch_label)
1907 rtx fn_begin, fc, mem, seq;
1908 bool fn_begin_outside_block;
1910 fc = crtl->eh.sjlj_fc;
1914 /* We're storing this libcall's address into memory instead of
1915 calling it directly. Thus, we must call assemble_external_libcall
1916 here, as we can not depend on emit_library_call to do it for us. */
1917 assemble_external_libcall (eh_personality_libfunc);
1918 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1919 emit_move_insn (mem, eh_personality_libfunc);
1921 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1922 if (crtl->uses_eh_lsda)
1927 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1928 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1929 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1930 emit_move_insn (mem, sym);
1933 emit_move_insn (mem, const0_rtx);
1935 #ifdef DONT_USE_BUILTIN_SETJMP
1938 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1939 TYPE_MODE (integer_type_node), 1,
1940 plus_constant (XEXP (fc, 0),
1941 sjlj_fc_jbuf_ofs), Pmode);
1943 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1944 TYPE_MODE (integer_type_node), 0, dispatch_label);
1945 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
1948 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1952 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1953 1, XEXP (fc, 0), Pmode);
1958 /* ??? Instead of doing this at the beginning of the function,
1959 do this in a block that is at loop level 0 and dominates all
1960 can_throw_internal instructions. */
1962 fn_begin_outside_block = true;
1963 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1964 if (NOTE_P (fn_begin))
1966 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1968 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1969 fn_begin_outside_block = false;
1972 if (fn_begin_outside_block)
1973 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1975 emit_insn_after (seq, fn_begin);
1978 /* Call back from expand_function_end to know where we should put
1979 the call to unwind_sjlj_unregister_libfunc if needed. */
1982 sjlj_emit_function_exit_after (rtx after)
1984 crtl->eh.sjlj_exit_after = after;
1988 sjlj_emit_function_exit (void)
1994 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1995 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
2000 /* ??? Really this can be done in any block at loop level 0 that
2001 post-dominates all can_throw_internal instructions. This is
2002 the last possible moment. */
2004 insn = crtl->eh.sjlj_exit_after;
2006 insn = NEXT_INSN (insn);
2008 emit_insn_after (seq, insn);
2012 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2014 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2015 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2016 int i, first_reachable;
2017 rtx mem, dispatch, seq, fc;
2022 fc = crtl->eh.sjlj_fc;
2026 emit_label (dispatch_label);
2028 #ifndef DONT_USE_BUILTIN_SETJMP
2029 expand_builtin_setjmp_receiver (dispatch_label);
2032 /* Load up dispatch index, exc_ptr and filter values from the
2033 function context. */
2034 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2035 sjlj_fc_call_site_ofs);
2036 dispatch = copy_to_reg (mem);
2038 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2039 if (unwind_word_mode != ptr_mode)
2041 #ifdef POINTERS_EXTEND_UNSIGNED
2042 mem = convert_memory_address (ptr_mode, mem);
2044 mem = convert_to_mode (ptr_mode, mem, 0);
2047 emit_move_insn (crtl->eh.exc_ptr, mem);
2049 mem = adjust_address (fc, unwind_word_mode,
2050 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2051 if (unwind_word_mode != filter_mode)
2052 mem = convert_to_mode (filter_mode, mem, 0);
2053 emit_move_insn (crtl->eh.filter, mem);
2055 /* Jump to one of the directly reachable regions. */
2056 /* ??? This really ought to be using a switch statement. */
2058 first_reachable = 0;
2059 for (i = cfun->eh->last_region_number; i > 0; --i)
2061 if (! lp_info[i].directly_reachable)
2064 if (! first_reachable)
2066 first_reachable = i;
2070 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2071 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2072 ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
2073 ->post_landing_pad);
2079 before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2080 ->post_landing_pad);
2082 bb = emit_to_new_bb_before (seq, before);
2083 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2084 e->count = bb->count;
2085 e->probability = REG_BR_PROB_BASE;
2089 sjlj_build_landing_pads (void)
2091 struct sjlj_lp_info *lp_info;
2093 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2095 if (sjlj_find_directly_reachable_regions (lp_info))
2097 rtx dispatch_label = gen_label_rtx ();
2098 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2099 TYPE_MODE (sjlj_fc_type_node),
2100 TYPE_ALIGN (sjlj_fc_type_node));
2102 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2103 int_size_in_bytes (sjlj_fc_type_node),
2106 sjlj_assign_call_site_values (dispatch_label, lp_info);
2107 sjlj_mark_call_sites (lp_info);
2109 sjlj_emit_function_enter (dispatch_label);
2110 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2111 sjlj_emit_function_exit ();
2117 /* After initial rtl generation, call back to finish generating
2118 exception support code. */
2121 finish_eh_generation (void)
2125 /* Nothing to do if no regions created. */
2126 if (cfun->eh->region_tree == NULL)
2129 /* The object here is to provide detailed information (via
2130 reachable_handlers) on how exception control flows within the
2131 function for the CFG construction. In this first pass, we can
2132 include type information garnered from ERT_THROW and
2133 ERT_ALLOWED_EXCEPTIONS regions, and hope that it will be useful
2134 in deleting unreachable handlers. Subsequently, we will generate
2135 landing pads which will connect many of the handlers, and then
2136 type information will not be effective. Still, this is a win
2137 over previous implementations. */
2139 /* These registers are used by the landing pads. Make sure they
2140 have been generated. */
2141 get_exception_pointer ();
2142 get_exception_filter ();
2144 /* Construct the landing pads. */
2146 assign_filter_values ();
2147 build_post_landing_pads ();
2148 connect_post_landing_pads ();
2149 if (USING_SJLJ_EXCEPTIONS)
2150 sjlj_build_landing_pads ();
2152 dw2_build_landing_pads ();
2154 crtl->eh.built_landing_pads = 1;
2156 /* We've totally changed the CFG. Start over. */
2157 find_exception_handler_labels ();
2158 break_superblocks ();
2159 if (USING_SJLJ_EXCEPTIONS
2160 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2161 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2162 commit_edge_insertions ();
2168 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2170 if (e->flags & EDGE_EH)
2179 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2183 /* This section handles removing dead code for flow. */
2185 /* Splice REGION from the region tree and replace it by REPLACE etc.
2186 When UPDATE_CATCH_TRY is true mind updating links from catch to try
2190 remove_eh_handler_and_replace (struct eh_region *region,
2191 struct eh_region *replace,
2192 bool update_catch_try)
2194 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2197 outer = region->outer;
2199 /* When we are moving the region in EH tree, update prev_try pointers. */
2200 if (outer != replace && region->inner)
2202 struct eh_region *prev_try = find_prev_try (replace);
2206 if (p->type == ERT_CLEANUP)
2207 p->u.cleanup.prev_try = prev_try;
2208 if (p->type != ERT_TRY
2209 && p->type != ERT_MUST_NOT_THROW
2210 && (p->type != ERT_ALLOWED_EXCEPTIONS
2211 || p->u.allowed.type_list)
2214 else if (p->next_peer)
2218 while (p != region && !p->next_peer)
2225 /* For the benefit of efficiently handling REG_EH_REGION notes,
2226 replace this region in the region array with its containing
2227 region. Note that previous region deletions may result in
2228 multiple copies of this region in the array, so we have a
2229 list of alternate numbers by which we are known. */
2231 VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
2238 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2240 VEC_replace (eh_region, cfun->eh->region_array, i, replace);
2247 replace->aka = BITMAP_GGC_ALLOC ();
2249 bitmap_ior_into (replace->aka, region->aka);
2250 bitmap_set_bit (replace->aka, region->region_number);
2253 if (crtl->eh.built_landing_pads)
2254 lab = region->landing_pad;
2256 lab = region->label;
2258 pp_start = &outer->inner;
2260 pp_start = &cfun->eh->region_tree;
2261 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2263 *pp = region->next_peer;
2266 pp_start = &replace->inner;
2268 pp_start = &cfun->eh->region_tree;
2269 inner = region->inner;
2272 for (p = inner; p->next_peer ; p = p->next_peer)
2276 p->next_peer = *pp_start;
2280 if (region->type == ERT_CATCH
2281 && update_catch_try)
2283 struct eh_region *eh_try, *next, *prev;
2285 for (eh_try = region->next_peer;
2286 eh_try->type == ERT_CATCH;
2287 eh_try = eh_try->next_peer)
2289 gcc_assert (eh_try->type == ERT_TRY);
2291 next = region->u.eh_catch.next_catch;
2292 prev = region->u.eh_catch.prev_catch;
2295 next->u.eh_catch.prev_catch = prev;
2297 eh_try->u.eh_try.last_catch = prev;
2299 prev->u.eh_catch.next_catch = next;
2302 eh_try->u.eh_try.eh_catch = next;
2304 remove_eh_handler (eh_try);
2309 /* Splice REGION from the region tree and replace it by the outer region
2313 remove_eh_handler (struct eh_region *region)
2315 remove_eh_handler_and_replace (region, region->outer, true);
2318 /* Remove Eh region R that has turned out to have no code in its handler. */
2321 remove_eh_region (int r)
2323 struct eh_region *region;
2325 region = VEC_index (eh_region, cfun->eh->region_array, r);
2326 remove_eh_handler (region);
2329 /* Remove Eh region R that has turned out to have no code in its handler
2330 and replace in by R2. */
2333 remove_eh_region_and_replace_by_outer_of (int r, int r2)
2335 struct eh_region *region, *region2;
2337 region = VEC_index (eh_region, cfun->eh->region_array, r);
2338 region2 = VEC_index (eh_region, cfun->eh->region_array, r2);
2339 remove_eh_handler_and_replace (region, region2->outer, true);
2342 /* Invokes CALLBACK for every exception handler label. Only used by old
2343 loop hackery; should not be used by new code. */
2346 for_each_eh_label (void (*callback) (rtx))
2349 for (i = 0; i < cfun->eh->last_region_number; i++)
2351 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
2352 if (r && r->region_number == i && r->label
2353 && GET_CODE (r->label) == CODE_LABEL)
2354 (*callback) (r->label);
2358 /* Invoke CALLBACK for every exception region in the current function. */
2361 for_each_eh_region (void (*callback) (struct eh_region *))
2363 int i, n = cfun->eh->last_region_number;
2364 for (i = 1; i <= n; ++i)
2366 struct eh_region *region;
2368 region = VEC_index (eh_region, cfun->eh->region_array, i);
2370 (*callback) (region);
2374 /* This section describes CFG exception edges for flow. */
2376 /* For communicating between calls to reachable_next_level. */
2377 struct reachable_info
2381 void (*callback) (struct eh_region *, void *);
2382 void *callback_data;
2385 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2386 base class of TYPE, is in HANDLED. */
2389 check_handled (tree handled, tree type)
2393 /* We can check for exact matches without front-end help. */
2394 if (! lang_eh_type_covers)
2396 for (t = handled; t ; t = TREE_CHAIN (t))
2397 if (TREE_VALUE (t) == type)
2402 for (t = handled; t ; t = TREE_CHAIN (t))
2403 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2410 /* A subroutine of reachable_next_level. If we are collecting a list
2411 of handlers, add one. After landing pad generation, reference
2412 it instead of the handlers themselves. Further, the handlers are
2413 all wired together, so by referencing one, we've got them all.
2414 Before landing pad generation we reference each handler individually.
2416 LP_REGION contains the landing pad; REGION is the handler. */
2419 add_reachable_handler (struct reachable_info *info,
2420 struct eh_region *lp_region, struct eh_region *region)
2425 if (crtl->eh.built_landing_pads)
2426 info->callback (lp_region, info->callback_data);
2428 info->callback (region, info->callback_data);
2431 /* Process one level of exception regions for reachability.
2432 If TYPE_THROWN is non-null, then it is the *exact* type being
2433 propagated. If INFO is non-null, then collect handler labels
2434 and caught/allowed type information between invocations. */
2436 static enum reachable_code
2437 reachable_next_level (struct eh_region *region, tree type_thrown,
2438 struct reachable_info *info,
2441 switch (region->type)
2444 /* Before landing-pad generation, we model control flow
2445 directly to the individual handlers. In this way we can
2446 see that catch handler types may shadow one another. */
2447 add_reachable_handler (info, region, region);
2448 return RNL_MAYBE_CAUGHT;
2452 struct eh_region *c;
2453 enum reachable_code ret = RNL_NOT_CAUGHT;
2455 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2457 /* A catch-all handler ends the search. */
2458 if (c->u.eh_catch.type_list == NULL)
2460 add_reachable_handler (info, region, c);
2466 /* If we have at least one type match, end the search. */
2467 tree tp_node = c->u.eh_catch.type_list;
2469 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2471 tree type = TREE_VALUE (tp_node);
2473 if (type == type_thrown
2474 || (lang_eh_type_covers
2475 && (*lang_eh_type_covers) (type, type_thrown)))
2477 add_reachable_handler (info, region, c);
2482 /* If we have definitive information of a match failure,
2483 the catch won't trigger. */
2484 if (lang_eh_type_covers)
2485 return RNL_NOT_CAUGHT;
2488 /* At this point, we either don't know what type is thrown or
2489 don't have front-end assistance to help deciding if it is
2490 covered by one of the types in the list for this region.
2492 We'd then like to add this region to the list of reachable
2493 handlers since it is indeed potentially reachable based on the
2494 information we have.
2496 Actually, this handler is for sure not reachable if all the
2497 types it matches have already been caught. That is, it is only
2498 potentially reachable if at least one of the types it catches
2499 has not been previously caught. */
2502 ret = RNL_MAYBE_CAUGHT;
2505 tree tp_node = c->u.eh_catch.type_list;
2506 bool maybe_reachable = false;
2508 /* Compute the potential reachability of this handler and
2509 update the list of types caught at the same time. */
2510 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2512 tree type = TREE_VALUE (tp_node);
2514 if (! check_handled (info->types_caught, type))
2517 = tree_cons (NULL, type, info->types_caught);
2519 maybe_reachable = true;
2523 if (maybe_reachable)
2525 add_reachable_handler (info, region, c);
2527 /* ??? If the catch type is a base class of every allowed
2528 type, then we know we can stop the search. */
2529 ret = RNL_MAYBE_CAUGHT;
2537 case ERT_ALLOWED_EXCEPTIONS:
2538 /* An empty list of types definitely ends the search. */
2539 if (region->u.allowed.type_list == NULL_TREE)
2541 add_reachable_handler (info, region, region);
2545 /* Collect a list of lists of allowed types for use in detecting
2546 when a catch may be transformed into a catch-all. */
2548 info->types_allowed = tree_cons (NULL_TREE,
2549 region->u.allowed.type_list,
2550 info->types_allowed);
2552 /* If we have definitive information about the type hierarchy,
2553 then we can tell if the thrown type will pass through the
2555 if (type_thrown && lang_eh_type_covers)
2557 if (check_handled (region->u.allowed.type_list, type_thrown))
2558 return RNL_NOT_CAUGHT;
2561 add_reachable_handler (info, region, region);
2566 add_reachable_handler (info, region, region);
2567 return RNL_MAYBE_CAUGHT;
2570 /* Catch regions are handled by their controlling try region. */
2571 return RNL_NOT_CAUGHT;
2573 case ERT_MUST_NOT_THROW:
2574 /* Here we end our search, since no exceptions may propagate.
2576 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
2577 only via locally handled RESX instructions.
2579 When we inline a function call, we can bring in new handlers. In order
2580 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
2581 assume that such handlers exists prior for any inlinable call prior
2582 inlining decisions are fixed. */
2586 add_reachable_handler (info, region, region);
2594 /* Shouldn't see these here. */
2602 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2605 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
2606 void (*callback) (struct eh_region *, void *),
2607 void *callback_data)
2609 struct reachable_info info;
2610 struct eh_region *region;
2613 memset (&info, 0, sizeof (info));
2614 info.callback = callback;
2615 info.callback_data = callback_data;
2617 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2621 type_thrown = NULL_TREE;
2624 /* A RESX leaves a region instead of entering it. Thus the
2625 region itself may have been deleted out from under us. */
2628 region = region->outer;
2630 else if (region->type == ERT_THROW)
2632 type_thrown = region->u.eh_throw.type;
2633 region = region->outer;
2638 if (reachable_next_level (region, type_thrown, &info,
2639 inlinable_call || is_resx) >= RNL_CAUGHT)
2641 /* If we have processed one cleanup, there is no point in
2642 processing any more of them. Each cleanup will have an edge
2643 to the next outer cleanup region, so the flow graph will be
2645 if (region->type == ERT_CLEANUP)
2646 region = region->u.cleanup.prev_try;
2648 region = region->outer;
2652 /* Retrieve a list of labels of exception handlers which can be
2653 reached by a given insn. */
2656 arh_to_landing_pad (struct eh_region *region, void *data)
2658 rtx *p_handlers = (rtx *) data;
2660 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2664 arh_to_label (struct eh_region *region, void *data)
2666 rtx *p_handlers = (rtx *) data;
2667 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2671 reachable_handlers (rtx insn)
2673 bool is_resx = false;
2674 rtx handlers = NULL;
2678 && GET_CODE (PATTERN (insn)) == RESX)
2680 region_number = XINT (PATTERN (insn), 0);
2685 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2686 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2688 region_number = INTVAL (XEXP (note, 0));
2691 foreach_reachable_handler (region_number, is_resx, false,
2692 (crtl->eh.built_landing_pads
2693 ? arh_to_landing_pad
2700 /* Determine if the given INSN can throw an exception that is caught
2701 within the function. */
2704 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
2706 struct eh_region *region;
2709 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2713 type_thrown = NULL_TREE;
2715 region = region->outer;
2716 else if (region->type == ERT_THROW)
2718 type_thrown = region->u.eh_throw.type;
2719 region = region->outer;
2722 /* If this exception is ignored by each and every containing region,
2723 then control passes straight out. The runtime may handle some
2724 regions, which also do not require processing internally. */
2725 for (; region; region = region->outer)
2727 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
2728 inlinable_call || is_resx);
2729 if (how == RNL_BLOCKED)
2731 if (how != RNL_NOT_CAUGHT)
2739 can_throw_internal (const_rtx insn)
2743 if (! INSN_P (insn))
2747 && GET_CODE (PATTERN (insn)) == RESX
2748 && XINT (PATTERN (insn), 0) > 0)
2749 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
2751 if (NONJUMP_INSN_P (insn)
2752 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2753 insn = XVECEXP (PATTERN (insn), 0, 0);
2755 /* Every insn that might throw has an EH_REGION note. */
2756 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2757 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2760 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
2763 /* Determine if the given INSN can throw an exception that is
2764 visible outside the function. */
2767 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
2769 struct eh_region *region;
2772 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2776 type_thrown = NULL_TREE;
2778 region = region->outer;
2779 else if (region->type == ERT_THROW)
2781 type_thrown = region->u.eh_throw.type;
2782 region = region->outer;
2785 /* If the exception is caught or blocked by any containing region,
2786 then it is not seen by any calling function. */
2787 for (; region ; region = region->outer)
2788 if (reachable_next_level (region, type_thrown, NULL,
2789 inlinable_call || is_resx) >= RNL_CAUGHT)
2796 can_throw_external (const_rtx insn)
2800 if (! INSN_P (insn))
2804 && GET_CODE (PATTERN (insn)) == RESX
2805 && XINT (PATTERN (insn), 0) > 0)
2806 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
2808 if (NONJUMP_INSN_P (insn)
2809 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2811 rtx seq = PATTERN (insn);
2812 int i, n = XVECLEN (seq, 0);
2814 for (i = 0; i < n; i++)
2815 if (can_throw_external (XVECEXP (seq, 0, i)))
2821 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2824 /* Calls (and trapping insns) without notes are outside any
2825 exception handling region in this function. We have to
2826 assume it might throw. Given that the front end and middle
2827 ends mark known NOTHROW functions, this isn't so wildly
2829 return (CALL_P (insn)
2830 || (flag_non_call_exceptions
2831 && may_trap_p (PATTERN (insn))));
2833 if (INTVAL (XEXP (note, 0)) <= 0)
2836 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
2839 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
2842 set_nothrow_function_flags (void)
2848 /* Assume crtl->all_throwers_are_sibcalls until we encounter
2849 something that can throw an exception. We specifically exempt
2850 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2851 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2854 crtl->all_throwers_are_sibcalls = 1;
2856 /* If we don't know that this implementation of the function will
2857 actually be used, then we must not set TREE_NOTHROW, since
2858 callers must not assume that this function does not throw. */
2859 if (TREE_NOTHROW (current_function_decl))
2862 if (! flag_exceptions)
2865 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2866 if (can_throw_external (insn))
2870 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2872 crtl->all_throwers_are_sibcalls = 0;
2877 for (insn = crtl->epilogue_delay_list; insn;
2878 insn = XEXP (insn, 1))
2879 if (can_throw_external (insn))
2883 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2885 crtl->all_throwers_are_sibcalls = 0;
2890 && (cgraph_function_body_availability (cgraph_node
2891 (current_function_decl))
2892 >= AVAIL_AVAILABLE))
2894 struct cgraph_node *node = cgraph_node (current_function_decl);
2895 struct cgraph_edge *e;
2896 for (e = node->callers; e; e = e->next_caller)
2897 e->can_throw_external = false;
2898 TREE_NOTHROW (current_function_decl) = 1;
2901 fprintf (dump_file, "Marking function nothrow: %s\n\n",
2902 current_function_name ());
2907 struct rtl_opt_pass pass_set_nothrow_function_flags =
2911 "nothrow", /* name */
2913 set_nothrow_function_flags, /* execute */
2916 0, /* static_pass_number */
2917 TV_NONE, /* tv_id */
2918 0, /* properties_required */
2919 0, /* properties_provided */
2920 0, /* properties_destroyed */
2921 0, /* todo_flags_start */
2922 TODO_dump_func, /* todo_flags_finish */
2927 /* Various hooks for unwind library. */
2929 /* Do any necessary initialization to access arbitrary stack frames.
2930 On the SPARC, this means flushing the register windows. */
2933 expand_builtin_unwind_init (void)
2935 /* Set this so all the registers get saved in our frame; we need to be
2936 able to copy the saved values for any registers from frames we unwind. */
2937 crtl->saves_all_registers = 1;
2939 #ifdef SETUP_FRAME_ADDRESSES
2940 SETUP_FRAME_ADDRESSES ();
2945 expand_builtin_eh_return_data_regno (tree exp)
2947 tree which = CALL_EXPR_ARG (exp, 0);
2948 unsigned HOST_WIDE_INT iwhich;
2950 if (TREE_CODE (which) != INTEGER_CST)
2952 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2956 iwhich = tree_low_cst (which, 1);
2957 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2958 if (iwhich == INVALID_REGNUM)
2961 #ifdef DWARF_FRAME_REGNUM
2962 iwhich = DWARF_FRAME_REGNUM (iwhich);
2964 iwhich = DBX_REGISTER_NUMBER (iwhich);
2967 return GEN_INT (iwhich);
2970 /* Given a value extracted from the return address register or stack slot,
2971 return the actual address encoded in that value. */
2974 expand_builtin_extract_return_addr (tree addr_tree)
2976 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2978 if (GET_MODE (addr) != Pmode
2979 && GET_MODE (addr) != VOIDmode)
2981 #ifdef POINTERS_EXTEND_UNSIGNED
2982 addr = convert_memory_address (Pmode, addr);
2984 addr = convert_to_mode (Pmode, addr, 0);
2988 /* First mask out any unwanted bits. */
2989 #ifdef MASK_RETURN_ADDR
2990 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2993 /* Then adjust to find the real return address. */
2994 #if defined (RETURN_ADDR_OFFSET)
2995 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3001 /* Given an actual address in addr_tree, do any necessary encoding
3002 and return the value to be stored in the return address register or
3003 stack slot so the epilogue will return to that address. */
3006 expand_builtin_frob_return_addr (tree addr_tree)
3008 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3010 addr = convert_memory_address (Pmode, addr);
3012 #ifdef RETURN_ADDR_OFFSET
3013 addr = force_reg (Pmode, addr);
3014 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3020 /* Set up the epilogue with the magic bits we'll need to return to the
3021 exception handler. */
3024 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3029 #ifdef EH_RETURN_STACKADJ_RTX
3030 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
3031 VOIDmode, EXPAND_NORMAL);
3032 tmp = convert_memory_address (Pmode, tmp);
3033 if (!crtl->eh.ehr_stackadj)
3034 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
3035 else if (tmp != crtl->eh.ehr_stackadj)
3036 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
3039 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
3040 VOIDmode, EXPAND_NORMAL);
3041 tmp = convert_memory_address (Pmode, tmp);
3042 if (!crtl->eh.ehr_handler)
3043 crtl->eh.ehr_handler = copy_to_reg (tmp);
3044 else if (tmp != crtl->eh.ehr_handler)
3045 emit_move_insn (crtl->eh.ehr_handler, tmp);
3047 if (!crtl->eh.ehr_label)
3048 crtl->eh.ehr_label = gen_label_rtx ();
3049 emit_jump (crtl->eh.ehr_label);
3053 expand_eh_return (void)
3057 if (! crtl->eh.ehr_label)
3060 crtl->calls_eh_return = 1;
3062 #ifdef EH_RETURN_STACKADJ_RTX
3063 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3066 around_label = gen_label_rtx ();
3067 emit_jump (around_label);
3069 emit_label (crtl->eh.ehr_label);
3070 clobber_return_register ();
3072 #ifdef EH_RETURN_STACKADJ_RTX
3073 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3076 #ifdef HAVE_eh_return
3078 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3082 #ifdef EH_RETURN_HANDLER_RTX
3083 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3085 error ("__builtin_eh_return not supported on this target");
3089 emit_label (around_label);
3092 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3093 POINTERS_EXTEND_UNSIGNED and return it. */
3096 expand_builtin_extend_pointer (tree addr_tree)
3098 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3101 #ifdef POINTERS_EXTEND_UNSIGNED
3102 extend = POINTERS_EXTEND_UNSIGNED;
3104 /* The previous EH code did an unsigned extend by default, so we do this also
3109 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3112 /* In the following functions, we represent entries in the action table
3113 as 1-based indices. Special cases are:
3115 0: null action record, non-null landing pad; implies cleanups
3116 -1: null action record, null landing pad; implies no action
3117 -2: no call-site entry; implies must_not_throw
3118 -3: we have yet to process outer regions
3120 Further, no special cases apply to the "next" field of the record.
3121 For next, 0 means end of list. */
3123 struct action_record
3131 action_record_eq (const void *pentry, const void *pdata)
3133 const struct action_record *entry = (const struct action_record *) pentry;
3134 const struct action_record *data = (const struct action_record *) pdata;
3135 return entry->filter == data->filter && entry->next == data->next;
3139 action_record_hash (const void *pentry)
3141 const struct action_record *entry = (const struct action_record *) pentry;
3142 return entry->next * 1009 + entry->filter;
3146 add_action_record (htab_t ar_hash, int filter, int next)
3148 struct action_record **slot, *new_ar, tmp;
3150 tmp.filter = filter;
3152 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3154 if ((new_ar = *slot) == NULL)
3156 new_ar = XNEW (struct action_record);
3157 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3158 new_ar->filter = filter;
3159 new_ar->next = next;
3162 /* The filter value goes in untouched. The link to the next
3163 record is a "self-relative" byte offset, or zero to indicate
3164 that there is no next record. So convert the absolute 1 based
3165 indices we've been carrying around into a displacement. */
3167 push_sleb128 (&crtl->eh.action_record_data, filter);
3169 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3170 push_sleb128 (&crtl->eh.action_record_data, next);
3173 return new_ar->offset;
3177 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3179 struct eh_region *c;
3182 /* If we've reached the top of the region chain, then we have
3183 no actions, and require no landing pad. */
3187 switch (region->type)
3190 /* A cleanup adds a zero filter to the beginning of the chain, but
3191 there are special cases to look out for. If there are *only*
3192 cleanups along a path, then it compresses to a zero action.
3193 Further, if there are multiple cleanups along a path, we only
3194 need to represent one of them, as that is enough to trigger
3195 entry to the landing pad at runtime. */
3196 next = collect_one_action_chain (ar_hash, region->outer);
3199 for (c = region->outer; c ; c = c->outer)
3200 if (c->type == ERT_CLEANUP)
3202 return add_action_record (ar_hash, 0, next);
3205 /* Process the associated catch regions in reverse order.
3206 If there's a catch-all handler, then we don't need to
3207 search outer regions. Use a magic -3 value to record
3208 that we haven't done the outer search. */
3210 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3212 if (c->u.eh_catch.type_list == NULL)
3214 /* Retrieve the filter from the head of the filter list
3215 where we have stored it (see assign_filter_values). */
3217 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3219 next = add_action_record (ar_hash, filter, 0);
3223 /* Once the outer search is done, trigger an action record for
3224 each filter we have. */
3229 next = collect_one_action_chain (ar_hash, region->outer);
3231 /* If there is no next action, terminate the chain. */
3234 /* If all outer actions are cleanups or must_not_throw,
3235 we'll have no action record for it, since we had wanted
3236 to encode these states in the call-site record directly.
3237 Add a cleanup action to the chain to catch these. */
3239 next = add_action_record (ar_hash, 0, 0);
3242 flt_node = c->u.eh_catch.filter_list;
3243 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3245 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3246 next = add_action_record (ar_hash, filter, next);
3252 case ERT_ALLOWED_EXCEPTIONS:
3253 /* An exception specification adds its filter to the
3254 beginning of the chain. */
3255 next = collect_one_action_chain (ar_hash, region->outer);
3257 /* If there is no next action, terminate the chain. */
3260 /* If all outer actions are cleanups or must_not_throw,
3261 we'll have no action record for it, since we had wanted
3262 to encode these states in the call-site record directly.
3263 Add a cleanup action to the chain to catch these. */
3265 next = add_action_record (ar_hash, 0, 0);
3267 return add_action_record (ar_hash, region->u.allowed.filter, next);
3269 case ERT_MUST_NOT_THROW:
3270 /* A must-not-throw region with no inner handlers or cleanups
3271 requires no call-site entry. Note that this differs from
3272 the no handler or cleanup case in that we do require an lsda
3273 to be generated. Return a magic -2 value to record this. */
3278 /* CATCH regions are handled in TRY above. THROW regions are
3279 for optimization information only and produce no output. */
3280 return collect_one_action_chain (ar_hash, region->outer);
3288 add_call_site (rtx landing_pad, int action)
3290 call_site_record record;
3292 record = GGC_NEW (struct call_site_record);
3293 record->landing_pad = landing_pad;
3294 record->action = action;
3296 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3298 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3301 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3302 The new note numbers will not refer to region numbers, but
3303 instead to call site entries. */
3306 convert_to_eh_region_ranges (void)
3308 rtx insn, iter, note;
3310 int last_action = -3;
3311 rtx last_action_insn = NULL_RTX;
3312 rtx last_landing_pad = NULL_RTX;
3313 rtx first_no_action_insn = NULL_RTX;
3316 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3319 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3321 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3323 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3326 struct eh_region *region;
3328 rtx this_landing_pad;
3331 if (NONJUMP_INSN_P (insn)
3332 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3333 insn = XVECEXP (PATTERN (insn), 0, 0);
3335 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3338 if (! (CALL_P (insn)
3339 || (flag_non_call_exceptions
3340 && may_trap_p (PATTERN (insn)))))
3347 if (INTVAL (XEXP (note, 0)) <= 0)
3349 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3350 this_action = collect_one_action_chain (ar_hash, region);
3353 /* Existence of catch handlers, or must-not-throw regions
3354 implies that an lsda is needed (even if empty). */
3355 if (this_action != -1)
3356 crtl->uses_eh_lsda = 1;
3358 /* Delay creation of region notes for no-action regions
3359 until we're sure that an lsda will be required. */
3360 else if (last_action == -3)
3362 first_no_action_insn = iter;
3366 /* Cleanups and handlers may share action chains but not
3367 landing pads. Collect the landing pad for this region. */
3368 if (this_action >= 0)
3370 struct eh_region *o;
3371 for (o = region; ! o->landing_pad ; o = o->outer)
3373 this_landing_pad = o->landing_pad;
3376 this_landing_pad = NULL_RTX;
3378 /* Differing actions or landing pads implies a change in call-site
3379 info, which implies some EH_REGION note should be emitted. */
3380 if (last_action != this_action
3381 || last_landing_pad != this_landing_pad)
3383 /* If we'd not seen a previous action (-3) or the previous
3384 action was must-not-throw (-2), then we do not need an
3386 if (last_action >= -1)
3388 /* If we delayed the creation of the begin, do it now. */
3389 if (first_no_action_insn)
3391 call_site = add_call_site (NULL_RTX, 0);
3392 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3393 first_no_action_insn);
3394 NOTE_EH_HANDLER (note) = call_site;
3395 first_no_action_insn = NULL_RTX;
3398 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3400 NOTE_EH_HANDLER (note) = call_site;
3403 /* If the new action is must-not-throw, then no region notes
3405 if (this_action >= -1)
3407 call_site = add_call_site (this_landing_pad,
3408 this_action < 0 ? 0 : this_action);
3409 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3410 NOTE_EH_HANDLER (note) = call_site;
3413 last_action = this_action;
3414 last_landing_pad = this_landing_pad;
3416 last_action_insn = iter;
3419 if (last_action >= -1 && ! first_no_action_insn)
3421 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3422 NOTE_EH_HANDLER (note) = call_site;
3425 htab_delete (ar_hash);
3429 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3433 "eh_ranges", /* name */
3435 convert_to_eh_region_ranges, /* execute */
3438 0, /* static_pass_number */
3439 TV_NONE, /* tv_id */
3440 0, /* properties_required */
3441 0, /* properties_provided */
3442 0, /* properties_destroyed */
3443 0, /* todo_flags_start */
3444 TODO_dump_func, /* todo_flags_finish */
3450 push_uleb128 (varray_type *data_area, unsigned int value)
3454 unsigned char byte = value & 0x7f;
3458 VARRAY_PUSH_UCHAR (*data_area, byte);
3464 push_sleb128 (varray_type *data_area, int value)
3471 byte = value & 0x7f;
3473 more = ! ((value == 0 && (byte & 0x40) == 0)
3474 || (value == -1 && (byte & 0x40) != 0));
3477 VARRAY_PUSH_UCHAR (*data_area, byte);
3483 #ifndef HAVE_AS_LEB128
3485 dw2_size_of_call_site_table (void)
3487 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3488 int size = n * (4 + 4 + 4);
3491 for (i = 0; i < n; ++i)
3493 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3494 size += size_of_uleb128 (cs->action);
3501 sjlj_size_of_call_site_table (void)
3503 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3507 for (i = 0; i < n; ++i)
3509 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3510 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3511 size += size_of_uleb128 (cs->action);
3519 dw2_output_call_site_table (void)
3521 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3524 for (i = 0; i < n; ++i)
3526 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3527 char reg_start_lab[32];
3528 char reg_end_lab[32];
3529 char landing_pad_lab[32];
3531 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3532 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3534 if (cs->landing_pad)
3535 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3536 CODE_LABEL_NUMBER (cs->landing_pad));
3538 /* ??? Perhaps use insn length scaling if the assembler supports
3539 generic arithmetic. */
3540 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3541 data4 if the function is small enough. */
3542 #ifdef HAVE_AS_LEB128
3543 dw2_asm_output_delta_uleb128 (reg_start_lab,
3544 current_function_func_begin_label,
3545 "region %d start", i);
3546 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3548 if (cs->landing_pad)
3549 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3550 current_function_func_begin_label,
3553 dw2_asm_output_data_uleb128 (0, "landing pad");
3555 dw2_asm_output_delta (4, reg_start_lab,
3556 current_function_func_begin_label,
3557 "region %d start", i);
3558 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3559 if (cs->landing_pad)
3560 dw2_asm_output_delta (4, landing_pad_lab,
3561 current_function_func_begin_label,
3564 dw2_asm_output_data (4, 0, "landing pad");
3566 dw2_asm_output_data_uleb128 (cs->action, "action");
3569 call_site_base += n;
3573 sjlj_output_call_site_table (void)
3575 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3578 for (i = 0; i < n; ++i)
3580 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3582 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3583 "region %d landing pad", i);
3584 dw2_asm_output_data_uleb128 (cs->action, "action");
3587 call_site_base += n;
3590 #ifndef TARGET_UNWIND_INFO
3591 /* Switch to the section that should be used for exception tables. */
3594 switch_to_exception_section (const char * ARG_UNUSED (fnname))
3598 if (exception_section)
3599 s = exception_section;
3602 /* Compute the section and cache it into exception_section,
3603 unless it depends on the function name. */
3604 if (targetm.have_named_sections)
3608 if (EH_TABLES_CAN_BE_READ_ONLY)
3611 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3612 flags = ((! flag_pic
3613 || ((tt_format & 0x70) != DW_EH_PE_absptr
3614 && (tt_format & 0x70) != DW_EH_PE_aligned))
3615 ? 0 : SECTION_WRITE);
3618 flags = SECTION_WRITE;
3620 #ifdef HAVE_LD_EH_GC_SECTIONS
3621 if (flag_function_sections)
3623 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
3624 sprintf (section_name, ".gcc_except_table.%s", fnname);
3625 s = get_section (section_name, flags, NULL);
3626 free (section_name);
3631 = s = get_section (".gcc_except_table", flags, NULL);
3635 = s = flag_pic ? data_section : readonly_data_section;
3638 switch_to_section (s);
3643 /* Output a reference from an exception table to the type_info object TYPE.
3644 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
3648 output_ttype (tree type, int tt_format, int tt_format_size)
3651 bool is_public = true;
3653 if (type == NULL_TREE)
3657 struct varpool_node *node;
3659 type = lookup_type_for_runtime (type);
3660 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3662 /* Let cgraph know that the rtti decl is used. Not all of the
3663 paths below go through assemble_integer, which would take
3664 care of this for us. */
3666 if (TREE_CODE (type) == ADDR_EXPR)
3668 type = TREE_OPERAND (type, 0);
3669 if (TREE_CODE (type) == VAR_DECL)
3671 node = varpool_node (type);
3673 varpool_mark_needed_node (node);
3674 is_public = TREE_PUBLIC (type);
3678 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3681 /* Allow the target to override the type table entry format. */
3682 if (targetm.asm_out.ttype (value))
3685 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3686 assemble_integer (value, tt_format_size,
3687 tt_format_size * BITS_PER_UNIT, 1);
3689 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
3693 output_function_exception_table (const char * ARG_UNUSED (fnname))
3695 int tt_format, cs_format, lp_format, i, n;
3696 #ifdef HAVE_AS_LEB128
3697 char ttype_label[32];
3698 char cs_after_size_label[32];
3699 char cs_end_label[32];
3704 int tt_format_size = 0;
3706 /* Not all functions need anything. */
3707 if (! crtl->uses_eh_lsda)
3710 if (eh_personality_libfunc)
3711 assemble_external_libcall (eh_personality_libfunc);
3713 #ifdef TARGET_UNWIND_INFO
3714 /* TODO: Move this into target file. */
3715 fputs ("\t.personality\t", asm_out_file);
3716 output_addr_const (asm_out_file, eh_personality_libfunc);
3717 fputs ("\n\t.handlerdata\n", asm_out_file);
3718 /* Note that varasm still thinks we're in the function's code section.
3719 The ".endp" directive that will immediately follow will take us back. */
3721 switch_to_exception_section (fnname);
3724 /* If the target wants a label to begin the table, emit it here. */
3725 targetm.asm_out.except_table_label (asm_out_file);
3727 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
3728 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
3730 /* Indicate the format of the @TType entries. */
3732 tt_format = DW_EH_PE_omit;
3735 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3736 #ifdef HAVE_AS_LEB128
3737 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3738 current_function_funcdef_no);
3740 tt_format_size = size_of_encoded_value (tt_format);
3742 assemble_align (tt_format_size * BITS_PER_UNIT);
3745 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3746 current_function_funcdef_no);
3748 /* The LSDA header. */
3750 /* Indicate the format of the landing pad start pointer. An omitted
3751 field implies @LPStart == @Start. */
3752 /* Currently we always put @LPStart == @Start. This field would
3753 be most useful in moving the landing pads completely out of
3754 line to another section, but it could also be used to minimize
3755 the size of uleb128 landing pad offsets. */
3756 lp_format = DW_EH_PE_omit;
3757 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3758 eh_data_format_name (lp_format));
3760 /* @LPStart pointer would go here. */
3762 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3763 eh_data_format_name (tt_format));
3765 #ifndef HAVE_AS_LEB128
3766 if (USING_SJLJ_EXCEPTIONS)
3767 call_site_len = sjlj_size_of_call_site_table ();
3769 call_site_len = dw2_size_of_call_site_table ();
3772 /* A pc-relative 4-byte displacement to the @TType data. */
3775 #ifdef HAVE_AS_LEB128
3776 char ttype_after_disp_label[32];
3777 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3778 current_function_funcdef_no);
3779 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3780 "@TType base offset");
3781 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3783 /* Ug. Alignment queers things. */
3784 unsigned int before_disp, after_disp, last_disp, disp;
3786 before_disp = 1 + 1;
3787 after_disp = (1 + size_of_uleb128 (call_site_len)
3789 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
3790 + (VEC_length (tree, crtl->eh.ttype_data)
3796 unsigned int disp_size, pad;
3799 disp_size = size_of_uleb128 (disp);
3800 pad = before_disp + disp_size + after_disp;
3801 if (pad % tt_format_size)
3802 pad = tt_format_size - (pad % tt_format_size);
3805 disp = after_disp + pad;
3807 while (disp != last_disp);
3809 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3813 /* Indicate the format of the call-site offsets. */
3814 #ifdef HAVE_AS_LEB128
3815 cs_format = DW_EH_PE_uleb128;
3817 cs_format = DW_EH_PE_udata4;
3819 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3820 eh_data_format_name (cs_format));
3822 #ifdef HAVE_AS_LEB128
3823 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3824 current_function_funcdef_no);
3825 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3826 current_function_funcdef_no);
3827 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3828 "Call-site table length");
3829 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3830 if (USING_SJLJ_EXCEPTIONS)
3831 sjlj_output_call_site_table ();
3833 dw2_output_call_site_table ();
3834 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3836 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3837 if (USING_SJLJ_EXCEPTIONS)
3838 sjlj_output_call_site_table ();
3840 dw2_output_call_site_table ();
3843 /* ??? Decode and interpret the data for flag_debug_asm. */
3844 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
3845 for (i = 0; i < n; ++i)
3846 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
3847 (i ? NULL : "Action record table"));
3850 assemble_align (tt_format_size * BITS_PER_UNIT);
3852 i = VEC_length (tree, crtl->eh.ttype_data);
3855 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
3856 output_ttype (type, tt_format, tt_format_size);
3859 #ifdef HAVE_AS_LEB128
3861 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3864 /* ??? Decode and interpret the data for flag_debug_asm. */
3865 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
3866 for (i = 0; i < n; ++i)
3868 if (targetm.arm_eabi_unwinder)
3870 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
3871 output_ttype (type, tt_format, tt_format_size);
3874 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
3875 (i ? NULL : "Exception specification table"));
3878 switch_to_section (current_function_section ());
3882 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3884 fun->eh->throw_stmt_table = table;
3888 get_eh_throw_stmt_table (struct function *fun)
3890 return fun->eh->throw_stmt_table;
3893 /* Dump EH information to OUT. */
3896 dump_eh_tree (FILE * out, struct function *fun)
3898 struct eh_region *i;
3900 static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
3901 "allowed_exceptions", "must_not_throw",
3905 i = fun->eh->region_tree;
3909 fprintf (out, "Eh tree:\n");
3912 fprintf (out, " %*s %i %s", depth * 2, "",
3913 i->region_number, type_name[(int) i->type]);
3916 fprintf (out, " tree_label:");
3917 print_generic_expr (out, i->tree_label, 0);
3920 fprintf (out, " label:%i", INSN_UID (i->label));
3923 fprintf (out, " landing_pad:%i", INSN_UID (i->landing_pad));
3924 if (GET_CODE (i->landing_pad) == NOTE)
3925 fprintf (out, " (deleted)");
3927 if (i->post_landing_pad)
3929 fprintf (out, " post_landing_pad:%i", INSN_UID (i->post_landing_pad));
3930 if (GET_CODE (i->post_landing_pad) == NOTE)
3931 fprintf (out, " (deleted)");
3935 fprintf (out, " resume:%i", INSN_UID (i->resume));
3936 if (GET_CODE (i->resume) == NOTE)
3937 fprintf (out, " (deleted)");
3939 if (i->may_contain_throw)
3940 fprintf (out, " may_contain_throw");
3944 if (i->u.cleanup.prev_try)
3945 fprintf (out, " prev try:%i",
3946 i->u.cleanup.prev_try->region_number);
3951 struct eh_region *c;
3952 fprintf (out, " catch regions:");
3953 for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
3954 fprintf (out, " %i", c->region_number);
3959 if (i->u.eh_catch.prev_catch)
3960 fprintf (out, " prev: %i",
3961 i->u.eh_catch.prev_catch->region_number);
3962 if (i->u.eh_catch.next_catch)
3963 fprintf (out, " next %i",
3964 i->u.eh_catch.next_catch->region_number);
3965 fprintf (out, " type:");
3966 print_generic_expr (out, i->u.eh_catch.type_list, 0);
3969 case ERT_ALLOWED_EXCEPTIONS:
3970 fprintf (out, " filter :%i types:", i->u.allowed.filter);
3971 print_generic_expr (out, i->u.allowed.type_list, 0);
3975 fprintf (out, " type:");
3976 print_generic_expr (out, i->u.eh_throw.type, 0);
3979 case ERT_MUST_NOT_THROW:
3987 fprintf (out, " also known as:");
3988 dump_bitmap (out, i->aka);
3991 fprintf (out, "\n");
3992 /* If there are sub-regions, process them. */
3994 i = i->inner, depth++;
3995 /* If there are peers, process them. */
3996 else if (i->next_peer)
3998 /* Otherwise, step back up the tree to the next peer. */
4008 while (i->next_peer == NULL);
4014 /* Dump the EH tree for FN on stderr. */
4017 debug_eh_tree (struct function *fn)
4019 dump_eh_tree (stderr, fn);
4023 /* Verify EH region invariants. */
4026 verify_eh_region (struct eh_region *region, struct eh_region *prev_try)
4031 switch (region->type)
4034 if (region->u.cleanup.prev_try != prev_try)
4036 error ("Wrong prev_try pointer in EH region %i",
4037 region->region_number);
4043 struct eh_region *c, *prev = NULL;
4044 if (region->u.eh_try.eh_catch->u.eh_catch.prev_catch)
4046 error ("Try region %i has wrong rh_catch pointer to %i",
4047 region->region_number,
4048 region->u.eh_try.eh_catch->region_number);
4051 for (c = region->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4053 if (c->outer != region->outer)
4056 ("Catch region %i has different outer region than try region %i",
4057 c->region_number, region->region_number);
4060 if (c->u.eh_catch.prev_catch != prev)
4062 error ("Catch region %i has corrupted catchlist",
4068 if (prev != region->u.eh_try.last_catch)
4071 ("Try region %i has wrong last_catch pointer to %i instead of %i",
4072 region->region_number,
4073 region->u.eh_try.last_catch->region_number,
4074 prev->region_number);
4080 if (!region->u.eh_catch.prev_catch
4081 && (!region->next_peer || region->next_peer->type != ERT_TRY))
4083 error ("Catch region %i should be followed by try", region->region_number);
4087 case ERT_ALLOWED_EXCEPTIONS:
4088 case ERT_MUST_NOT_THROW:
4094 if (region->type == ERT_TRY)
4096 else if (region->type == ERT_MUST_NOT_THROW
4097 || (region->type == ERT_ALLOWED_EXCEPTIONS
4098 && !region->u.allowed.type_list))
4100 for (region = region->inner; region; region = region->next_peer)
4101 found |= verify_eh_region (region, prev_try);
4105 /* Verify invariants on EH datastructures. */
4108 verify_eh_tree (struct function *fun)
4110 struct eh_region *i, *outer = NULL;
4117 if (!fun->eh->region_tree)
4119 for (j = fun->eh->last_region_number; j > 0; --j)
4120 if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
4122 if (i->region_number == j)
4124 if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
4126 error ("region_array is corrupted for region %i",
4131 i = fun->eh->region_tree;
4135 if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
4137 error ("region_array is corrupted for region %i", i->region_number);
4140 if (i->outer != outer)
4142 error ("outer block of region %i is wrong", i->region_number);
4145 if (i->may_contain_throw && outer && !outer->may_contain_throw)
4148 ("region %i may contain throw and is contained in region that may not",
4154 error ("negative nesting depth of region %i", i->region_number);
4158 /* If there are sub-regions, process them. */
4160 outer = i, i = i->inner, depth++;
4161 /* If there are peers, process them. */
4162 else if (i->next_peer)
4164 /* Otherwise, step back up the tree to the next peer. */
4175 error ("tree list ends on depth %i", depth + 1);
4178 if (count != nvisited)
4180 error ("array does not match the region tree");
4184 for (i = fun->eh->region_tree; i; i = i->next_peer)
4185 err |= verify_eh_region (i, NULL);
4189 dump_eh_tree (stderr, fun);
4190 internal_error ("verify_eh_tree failed");
4196 while (i->next_peer == NULL);
4202 /* Initialize unwind_resume_libfunc. */
4205 default_init_unwind_resume_libfunc (void)
4207 /* The default c++ routines aren't actually c++ specific, so use those. */
4208 unwind_resume_libfunc =
4209 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
4210 : "_Unwind_Resume");
4215 gate_handle_eh (void)
4217 return doing_eh (0);
4220 /* Complete generation of exception handling code. */
4222 rest_of_handle_eh (void)
4224 finish_eh_generation ();
4225 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4229 struct rtl_opt_pass pass_rtl_eh =
4234 gate_handle_eh, /* gate */
4235 rest_of_handle_eh, /* execute */
4238 0, /* static_pass_number */
4239 TV_JUMP, /* tv_id */
4240 0, /* properties_required */
4241 0, /* properties_provided */
4242 0, /* properties_destroyed */
4243 0, /* todo_flags_start */
4244 TODO_dump_func /* todo_flags_finish */
4248 #include "gt-except.h"