1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
52 #include "coretypes.h"
60 #include "insn-config.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
75 #include "langhooks.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
80 #include "tree-flow.h"
82 /* Provide defaults for stuff that may not be defined when using
84 #ifndef EH_RETURN_DATA_REGNO
85 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 gimple (*lang_protect_cleanup_actions) (void);
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) (tree a, tree b);
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) (tree);
98 /* A hash table of label to region number. */
100 struct GTY(()) ehl_map_entry {
102 struct eh_region *region;
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
118 struct GTY(()) call_site_record
124 static int t2r_eq (const void *, const void *);
125 static hashval_t t2r_hash (const void *);
127 static int ttypes_filter_eq (const void *, const void *);
128 static hashval_t ttypes_filter_hash (const void *);
129 static int ehspec_filter_eq (const void *, const void *);
130 static hashval_t ehspec_filter_hash (const void *);
131 static int add_ttypes_entry (htab_t, tree);
132 static int add_ehspec_entry (htab_t, htab_t, tree);
133 static void assign_filter_values (void);
134 static void build_post_landing_pads (void);
135 static void connect_post_landing_pads (void);
136 static void dw2_build_landing_pads (void);
139 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
140 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
141 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
142 static void sjlj_emit_function_enter (rtx);
143 static void sjlj_emit_function_exit (void);
144 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
145 static void sjlj_build_landing_pads (void);
147 static void remove_eh_handler (struct eh_region *);
148 static void remove_eh_handler_and_replace (struct eh_region *,
149 struct eh_region *, bool);
151 /* The return value of reachable_next_level. */
154 /* The given exception is not processed by the given region. */
156 /* The given exception may need processing by the given region. */
158 /* The given exception is completely processed by the given region. */
160 /* The given exception is completely processed by the runtime. */
164 struct reachable_info;
165 static enum reachable_code reachable_next_level (struct eh_region *, tree,
166 struct reachable_info *, bool);
168 static int action_record_eq (const void *, const void *);
169 static hashval_t action_record_hash (const void *);
170 static int add_action_record (htab_t, int, int);
171 static int collect_one_action_chain (htab_t, struct eh_region *);
172 static int add_call_site (rtx, int);
174 static void push_uleb128 (varray_type *, unsigned int);
175 static void push_sleb128 (varray_type *, int);
176 #ifndef HAVE_AS_LEB128
177 static int dw2_size_of_call_site_table (void);
178 static int sjlj_size_of_call_site_table (void);
180 static void dw2_output_call_site_table (void);
181 static void sjlj_output_call_site_table (void);
184 /* Routine to see if exception handling is turned on.
185 DO_WARN is nonzero if we want to inform the user that exception
186 handling is turned off.
188 This is used to ensure that -fexceptions has been specified if the
189 compiler tries to use any exception-specific functions. */
192 doing_eh (int do_warn)
194 if (! flag_exceptions)
196 static int warned = 0;
197 if (! warned && do_warn)
199 error ("exception handling disabled, use -fexceptions to enable");
211 if (! flag_exceptions)
214 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
216 /* Create the SjLj_Function_Context structure. This should match
217 the definition in unwind-sjlj.c. */
218 if (USING_SJLJ_EXCEPTIONS)
220 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
222 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
224 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
225 build_pointer_type (sjlj_fc_type_node));
226 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
228 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
230 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
232 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
233 tmp = build_array_type (lang_hooks.types.type_for_mode
234 (targetm.unwind_word_mode (), 1),
236 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
237 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
239 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
241 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
243 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
245 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
247 #ifdef DONT_USE_BUILTIN_SETJMP
249 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
251 /* Should be large enough for most systems, if it is not,
252 JMP_BUF_SIZE should be defined with the proper value. It will
253 also tend to be larger than necessary for most systems, a more
254 optimal port will define JMP_BUF_SIZE. */
255 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
258 /* builtin_setjmp takes a pointer to 5 words. */
259 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
261 tmp = build_index_type (tmp);
262 tmp = build_array_type (ptr_type_node, tmp);
263 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
264 #ifdef DONT_USE_BUILTIN_SETJMP
265 /* We don't know what the alignment requirements of the
266 runtime's jmp_buf has. Overestimate. */
267 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
268 DECL_USER_ALIGN (f_jbuf) = 1;
270 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
272 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
273 TREE_CHAIN (f_prev) = f_cs;
274 TREE_CHAIN (f_cs) = f_data;
275 TREE_CHAIN (f_data) = f_per;
276 TREE_CHAIN (f_per) = f_lsda;
277 TREE_CHAIN (f_lsda) = f_jbuf;
279 layout_type (sjlj_fc_type_node);
281 /* Cache the interesting field offsets so that we have
282 easy access from rtl. */
283 sjlj_fc_call_site_ofs
284 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
285 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
287 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
288 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
289 sjlj_fc_personality_ofs
290 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
291 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
293 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
294 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
296 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
297 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
302 init_eh_for_function (void)
304 cfun->eh = GGC_CNEW (struct eh_status);
307 /* Routines to generate the exception tree somewhat directly.
308 These are used from tree-eh.c when processing exception related
309 nodes during tree optimization. */
311 static struct eh_region *
312 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
314 struct eh_region *new_eh;
316 #ifdef ENABLE_CHECKING
317 gcc_assert (doing_eh (0));
320 /* Insert a new blank region as a leaf in the tree. */
321 new_eh = GGC_CNEW (struct eh_region);
323 new_eh->outer = outer;
326 new_eh->next_peer = outer->inner;
327 outer->inner = new_eh;
331 new_eh->next_peer = cfun->eh->region_tree;
332 cfun->eh->region_tree = new_eh;
335 new_eh->region_number = ++cfun->eh->last_region_number;
341 gen_eh_region_cleanup (struct eh_region *outer)
343 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
348 gen_eh_region_try (struct eh_region *outer)
350 return gen_eh_region (ERT_TRY, outer);
354 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
356 struct eh_region *c, *l;
357 tree type_list, type_node;
359 /* Ensure to always end up with a type list to normalize further
360 processing, then register each type against the runtime types map. */
361 type_list = type_or_list;
364 if (TREE_CODE (type_or_list) != TREE_LIST)
365 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
367 type_node = type_list;
368 for (; type_node; type_node = TREE_CHAIN (type_node))
369 add_type_for_runtime (TREE_VALUE (type_node));
372 c = gen_eh_region (ERT_CATCH, t->outer);
373 c->u.eh_catch.type_list = type_list;
374 l = t->u.eh_try.last_catch;
375 c->u.eh_catch.prev_catch = l;
377 l->u.eh_catch.next_catch = c;
379 t->u.eh_try.eh_catch = c;
380 t->u.eh_try.last_catch = c;
386 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
388 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
389 region->u.allowed.type_list = allowed;
391 for (; allowed ; allowed = TREE_CHAIN (allowed))
392 add_type_for_runtime (TREE_VALUE (allowed));
398 gen_eh_region_must_not_throw (struct eh_region *outer)
400 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
404 get_eh_region_number (struct eh_region *region)
406 return region->region_number;
410 get_eh_region_may_contain_throw (struct eh_region *region)
412 return region->may_contain_throw;
416 get_eh_region_tree_label (struct eh_region *region)
418 return region->tree_label;
422 get_eh_region_no_tree_label (int region)
424 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
428 set_eh_region_tree_label (struct eh_region *region, tree lab)
430 region->tree_label = lab;
434 expand_resx_expr (tree exp)
436 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
437 struct eh_region *reg = VEC_index (eh_region,
438 cfun->eh->region_array, region_nr);
440 gcc_assert (!reg->resume);
441 do_pending_stack_adjust ();
442 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
446 /* Note that the current EH region (if any) may contain a throw, or a
447 call to a function which itself may contain a throw. */
450 note_eh_region_may_contain_throw (struct eh_region *region)
452 while (region && !region->may_contain_throw)
454 region->may_contain_throw = 1;
455 region = region->outer;
460 /* Return an rtl expression for a pointer to the exception object
464 get_exception_pointer (void)
466 if (! crtl->eh.exc_ptr)
467 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
468 return crtl->eh.exc_ptr;
471 /* Return an rtl expression for the exception dispatch filter
475 get_exception_filter (void)
477 if (! crtl->eh.filter)
478 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
479 return crtl->eh.filter;
482 /* This section is for the exception handling specific optimization pass. */
484 /* Random access the exception region tree. */
487 collect_eh_region_array (void)
491 i = cfun->eh->region_tree;
495 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
496 cfun->eh->last_region_number + 1);
497 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
501 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
503 /* If there are sub-regions, process them. */
506 /* If there are peers, process them. */
507 else if (i->next_peer)
509 /* Otherwise, step back up the tree to the next peer. */
516 } while (i->next_peer == NULL);
522 /* R is MUST_NOT_THROW region that is not reachable via local
523 RESX instructions. It still must be kept in the tree in case runtime
524 can unwind through it, or we will eliminate out terminate call
525 runtime would do otherwise. Return TRUE if R contains throwing statements
526 or some of the exceptions in inner regions can be unwound up to R.
528 CONTAINS_STMT is bitmap of all regions that contains some throwing
531 Function looks O(^3) at first sight. In fact the function is called at most
532 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
533 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
534 the outer loop examines every region at most once. The inner loop
535 is doing unwinding from the throwing statement same way as we do during
536 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
537 of CFG. In practice Eh trees are wide, not deep, so this is not
541 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region *r)
543 struct eh_region *i = r->inner;
547 if (TEST_BIT (contains_stmt, r->region_number))
550 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
551 if (TEST_BIT (contains_stmt, n))
557 /* It is pointless to look into MUST_NOT_THROW
558 or dive into subregions. They never unwind up. */
559 if (i->type != ERT_MUST_NOT_THROW)
561 bool found = TEST_BIT (contains_stmt, i->region_number);
563 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
564 if (TEST_BIT (contains_stmt, n))
569 /* We have nested region that contains throwing statement.
570 See if resuming might lead up to the resx or we get locally
571 caught sooner. If we get locally caught sooner, we either
572 know region R is not reachable or it would have direct edge
573 from the EH resx and thus consider region reachable at
577 struct eh_region *i1 = i;
578 tree type_thrown = NULL_TREE;
580 if (i1->type == ERT_THROW)
582 type_thrown = i1->u.eh_throw.type;
585 for (; i1 != r; i1 = i1->outer)
586 if (reachable_next_level (i1, type_thrown, NULL,
587 false) >= RNL_CAUGHT)
593 /* If there are sub-regions, process them. */
594 if (i->type != ERT_MUST_NOT_THROW && i->inner)
596 /* If there are peers, process them. */
597 else if (i->next_peer)
599 /* Otherwise, step back up the tree to the next peer. */
608 while (i->next_peer == NULL);
614 /* Bring region R to the root of tree. */
617 bring_to_root (struct eh_region *r)
619 struct eh_region **pp;
620 struct eh_region *outer = r->outer;
623 for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
627 r->next_peer = cfun->eh->region_tree;
628 cfun->eh->region_tree = r;
631 /* Return true if region R2 can be replaced by R1. */
634 eh_region_replaceable_by_p (const struct eh_region *r1,
635 const struct eh_region *r2)
637 /* Regions are semantically same if they are of same type,
638 have same label and type. */
639 if (r1->type != r2->type)
641 if (r1->tree_label != r2->tree_label)
644 /* Verify that also region type dependent data are the same. */
647 case ERT_MUST_NOT_THROW:
652 struct eh_region *c1, *c2;
653 for (c1 = r1->u.eh_try.eh_catch,
654 c2 = r2->u.eh_try.eh_catch;
656 c1 = c1->u.eh_catch.next_catch,
657 c2 = c2->u.eh_catch.next_catch)
658 if (!eh_region_replaceable_by_p (c1, c2))
665 if (!list_equal_p (r1->u.eh_catch.type_list, r2->u.eh_catch.type_list))
667 if (!list_equal_p (r1->u.eh_catch.filter_list,
668 r2->u.eh_catch.filter_list))
671 case ERT_ALLOWED_EXCEPTIONS:
672 if (!list_equal_p (r1->u.allowed.type_list, r2->u.allowed.type_list))
674 if (r1->u.allowed.filter != r2->u.allowed.filter)
678 if (r1->u.eh_throw.type != r2->u.eh_throw.type)
684 if (dump_file && (dump_flags & TDF_DETAILS))
685 fprintf (dump_file, "Regions %i and %i match\n", r1->region_number,
690 /* Replace region R2 by R1. */
693 replace_region (struct eh_region *r1, struct eh_region *r2)
695 struct eh_region *next1 = r1->u.eh_try.eh_catch;
696 struct eh_region *next2 = r2->u.eh_try.eh_catch;
697 bool is_try = r1->type == ERT_TRY;
699 gcc_assert (r1->type != ERT_CATCH);
700 remove_eh_handler_and_replace (r2, r1, false);
707 gcc_assert (next1->type == ERT_CATCH);
708 gcc_assert (next2->type == ERT_CATCH);
709 next1 = next1->u.eh_catch.next_catch;
710 next2 = next2->u.eh_catch.next_catch;
711 remove_eh_handler_and_replace (r2, r1, false);
716 /* Return hash value of type list T. */
719 hash_type_list (tree t)
722 for (; t; t = TREE_CHAIN (t))
723 val = iterative_hash_hashval_t (TREE_HASH (TREE_VALUE (t)), val);
727 /* Hash EH regions so semantically same regions get same hash value. */
730 hash_eh_region (const void *r)
732 const struct eh_region *region = (const struct eh_region *)r;
733 hashval_t val = region->type;
735 if (region->tree_label)
736 val = iterative_hash_hashval_t (LABEL_DECL_UID (region->tree_label), val);
737 switch (region->type)
739 case ERT_MUST_NOT_THROW:
745 for (c = region->u.eh_try.eh_catch;
746 c; c = c->u.eh_catch.next_catch)
747 val = iterative_hash_hashval_t (hash_eh_region (c), val);
751 val = iterative_hash_hashval_t (hash_type_list
752 (region->u.eh_catch.type_list), val);
754 case ERT_ALLOWED_EXCEPTIONS:
755 val = iterative_hash_hashval_t
756 (hash_type_list (region->u.allowed.type_list), val);
757 val = iterative_hash_hashval_t (region->u.allowed.filter, val);
760 val |= iterative_hash_hashval_t (TYPE_UID (region->u.eh_throw.type), val);
768 /* Return true if regions R1 and R2 are equal. */
771 eh_regions_equal_p (const void *r1, const void *r2)
773 return eh_region_replaceable_by_p ((const struct eh_region *)r1,
774 (const struct eh_region *)r2);
777 /* Walk all peers of REGION and try to merge those regions
778 that are semantically equivalent. Look into subregions
782 merge_peers (struct eh_region *region)
784 struct eh_region *r1, *r2, *outer = NULL, *next;
788 outer = region->outer;
792 /* First see if there is inner region equivalent to region
793 in question. EH control flow is acyclic so we know we
796 for (r1 = region; r1; r1 = next)
798 next = r1->next_peer;
799 if (r1->type == ERT_CATCH)
801 if (eh_region_replaceable_by_p (r1->outer, r1))
803 replace_region (r1->outer, r1);
810 /* Get new first region and try to match the peers
813 region = outer->inner;
815 region = cfun->eh->region_tree;
817 /* There are few regions to inspect:
818 N^2 loop matching each region with each region
819 will do the job well. */
820 if (num_regions < 10)
822 for (r1 = region; r1; r1 = r1->next_peer)
824 if (r1->type == ERT_CATCH)
826 for (r2 = r1->next_peer; r2; r2 = next)
828 next = r2->next_peer;
829 if (eh_region_replaceable_by_p (r1, r2))
831 replace_region (r1, r2);
837 /* Or use hashtable to avoid N^2 behaviour. */
841 hash = htab_create (num_regions, hash_eh_region,
842 eh_regions_equal_p, NULL);
843 for (r1 = region; r1; r1 = next)
847 next = r1->next_peer;
848 if (r1->type == ERT_CATCH)
850 slot = htab_find_slot (hash, r1, INSERT);
854 replace_region ((struct eh_region *)*slot, r1);
858 for (r1 = region; r1; r1 = r1->next_peer)
859 merged |= merge_peers (r1->inner);
863 /* Remove all regions whose labels are not reachable.
864 REACHABLE is bitmap of all regions that are used by the function
865 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
868 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
872 VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
873 struct eh_region *local_must_not_throw = NULL;
874 struct eh_region *first_must_not_throw = NULL;
876 for (i = cfun->eh->last_region_number; i > 0; --i)
878 r = VEC_index (eh_region, cfun->eh->region_array, i);
879 if (!r || r->region_number != i)
881 if (!TEST_BIT (reachable, i) && !r->resume)
885 r->tree_label = NULL;
889 /* Don't remove ERT_THROW regions if their outer region
891 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
894 case ERT_MUST_NOT_THROW:
895 /* MUST_NOT_THROW regions are implementable solely in the
896 runtime, but we need them when inlining function.
898 Keep them if outer region is not MUST_NOT_THROW a well
899 and if they contain some statement that might unwind through
901 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
903 || can_be_reached_by_runtime (contains_stmt, r)))
908 /* TRY regions are reachable if any of its CATCH regions
911 for (c = r->u.eh_try.eh_catch; c;
912 c = c->u.eh_catch.next_catch)
913 if (TEST_BIT (reachable, c->region_number))
928 fprintf (dump_file, "Removing unreachable eh region %i\n",
930 remove_eh_handler (r);
932 else if (r->type == ERT_MUST_NOT_THROW)
934 if (!first_must_not_throw)
935 first_must_not_throw = r;
936 VEC_safe_push (eh_region, heap, must_not_throws, r);
940 if (r->type == ERT_MUST_NOT_THROW)
942 if (!local_must_not_throw)
943 local_must_not_throw = r;
945 VEC_safe_push (eh_region, heap, must_not_throws, r);
949 /* MUST_NOT_THROW regions without local handler are all the same; they
950 trigger terminate call in runtime.
951 MUST_NOT_THROW handled locally can differ in debug info associated
952 to std::terminate () call or if one is coming from Java and other
953 from C++ whether they call terminate or abort.
955 We merge all MUST_NOT_THROW regions handled by the run-time into one.
956 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
957 (since unwinding never continues to the outer region anyway).
958 If MUST_NOT_THROW with local handler is present in the tree, we use
959 that region to merge into, since it will remain in tree anyway;
960 otherwise we use first MUST_NOT_THROW.
962 Merging of locally handled regions needs changes to the CFG. Crossjumping
963 should take care of this, by looking at the actual code and
964 ensuring that the cleanup actions are really the same. */
966 if (local_must_not_throw)
967 first_must_not_throw = local_must_not_throw;
969 for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
971 if (!r->label && !r->tree_label && r != first_must_not_throw)
974 fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
976 first_must_not_throw->region_number);
977 remove_eh_handler_and_replace (r, first_must_not_throw, false);
978 first_must_not_throw->may_contain_throw |= r->may_contain_throw;
983 merge_peers (cfun->eh->region_tree);
984 #ifdef ENABLE_CHECKING
985 verify_eh_tree (cfun);
987 VEC_free (eh_region, heap, must_not_throws);
990 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
991 is identical to label. */
994 label_to_region_map (void)
996 VEC (int, heap) * label_to_region = NULL;
1000 VEC_safe_grow_cleared (int, heap, label_to_region,
1001 cfun->cfg->last_label_uid + 1);
1002 for (i = cfun->eh->last_region_number; i > 0; --i)
1004 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1005 if (r && r->region_number == i
1006 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
1008 if ((idx = VEC_index (int, label_to_region,
1009 LABEL_DECL_UID (r->tree_label))) != 0)
1010 r->next_region_sharing_label =
1011 VEC_index (eh_region, cfun->eh->region_array, idx);
1013 r->next_region_sharing_label = NULL;
1014 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
1018 return label_to_region;
1021 /* Return number of EH regions. */
1023 num_eh_regions (void)
1025 return cfun->eh->last_region_number + 1;
1028 /* Return next region sharing same label as REGION. */
1031 get_next_region_sharing_label (int region)
1033 struct eh_region *r;
1036 r = VEC_index (eh_region, cfun->eh->region_array, region);
1037 if (!r || !r->next_region_sharing_label)
1039 return r->next_region_sharing_label->region_number;
1042 /* Set up EH labels for RTL. */
1045 convert_from_eh_region_ranges (void)
1047 int i, n = cfun->eh->last_region_number;
1049 /* Most of the work is already done at the tree level. All we need to
1050 do is collect the rtl labels that correspond to the tree labels that
1051 collect the rtl labels that correspond to the tree labels
1052 we allocated earlier. */
1053 for (i = 1; i <= n; ++i)
1055 struct eh_region *region;
1057 region = VEC_index (eh_region, cfun->eh->region_array, i);
1058 if (region && region->tree_label)
1059 region->label = DECL_RTL_IF_SET (region->tree_label);
1064 find_exception_handler_labels (void)
1068 if (cfun->eh->region_tree == NULL)
1071 for (i = cfun->eh->last_region_number; i > 0; --i)
1073 struct eh_region *region;
1076 region = VEC_index (eh_region, cfun->eh->region_array, i);
1077 if (! region || region->region_number != i)
1079 if (crtl->eh.built_landing_pads)
1080 lab = region->landing_pad;
1082 lab = region->label;
1086 /* Returns true if the current function has exception handling regions. */
1089 current_function_has_exception_handlers (void)
1093 for (i = cfun->eh->last_region_number; i > 0; --i)
1095 struct eh_region *region;
1097 region = VEC_index (eh_region, cfun->eh->region_array, i);
1099 && region->region_number == i
1100 && region->type != ERT_THROW)
1107 /* A subroutine of duplicate_eh_regions. Search the region tree under O
1108 for the minimum and maximum region numbers. Update *MIN and *MAX. */
1111 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
1117 i = bitmap_first_set_bit (o->aka);
1120 i = bitmap_last_set_bit (o->aka);
1124 if (o->region_number < *min)
1125 *min = o->region_number;
1126 if (o->region_number > *max)
1127 *max = o->region_number;
1132 duplicate_eh_regions_0 (o, min, max);
1133 while (o->next_peer)
1136 duplicate_eh_regions_0 (o, min, max);
1141 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
1142 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
1143 about the other internal pointers just yet, just the tree-like pointers. */
1146 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
1150 ret = n = GGC_NEW (struct eh_region);
1154 n->next_peer = NULL;
1159 n->aka = BITMAP_GGC_ALLOC ();
1161 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
1163 bitmap_set_bit (n->aka, i + eh_offset);
1164 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
1168 n->region_number += eh_offset;
1169 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1174 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
1175 while (old->next_peer)
1177 old = old->next_peer;
1178 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
1185 /* Look for first outer region of R (or R itself) that is
1186 TRY region. Return NULL if none. */
1188 static struct eh_region *
1189 find_prev_try (struct eh_region * r)
1191 for (; r && r->type != ERT_TRY; r = r->outer)
1192 if (r->type == ERT_MUST_NOT_THROW
1193 || (r->type == ERT_ALLOWED_EXCEPTIONS
1194 && !r->u.allowed.type_list))
1202 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
1203 function and root the tree below OUTER_REGION. Remap labels using MAP
1204 callback. The special case of COPY_REGION of 0 means all regions. */
1207 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
1208 void *data, int copy_region, int outer_region)
1210 eh_region cur, outer, *splice;
1211 int i, min_region, max_region, eh_offset, cfun_last_region_number;
1216 #ifdef ENABLE_CHECKING
1217 verify_eh_tree (ifun);
1220 /* Find the range of region numbers to be copied. The interface we
1221 provide here mandates a single offset to find new number from old,
1222 which means we must look at the numbers present, instead of the
1223 count or something else. */
1224 if (copy_region > 0)
1226 min_region = INT_MAX;
1229 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1230 duplicate_eh_regions_0 (cur, &min_region, &max_region);
1235 max_region = ifun->eh->last_region_number;
1237 num_regions = max_region - min_region + 1;
1238 cfun_last_region_number = cfun->eh->last_region_number;
1239 eh_offset = cfun_last_region_number + 1 - min_region;
1241 /* If we've not yet created a region array, do so now. */
1242 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
1243 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
1244 cfun->eh->last_region_number + 1);
1246 /* Locate the spot at which to insert the new tree. */
1247 if (outer_region > 0)
1249 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1251 splice = &outer->inner;
1253 splice = &cfun->eh->region_tree;
1258 splice = &cfun->eh->region_tree;
1261 splice = &(*splice)->next_peer;
1263 if (!ifun->eh->region_tree)
1266 for (i = cfun_last_region_number + 1;
1267 i <= cfun->eh->last_region_number; i++)
1269 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1270 if (outer->aka == NULL)
1271 outer->aka = BITMAP_GGC_ALLOC ();
1272 bitmap_set_bit (outer->aka, i);
1277 /* Copy all the regions in the subtree. */
1278 if (copy_region > 0)
1280 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1281 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1287 cur = ifun->eh->region_tree;
1288 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1289 while (cur->next_peer)
1291 cur = cur->next_peer;
1292 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1296 /* Remap all the labels in the new regions. */
1297 for (i = cfun_last_region_number + 1;
1298 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1299 if (cur && cur->tree_label)
1300 cur->tree_label = map (cur->tree_label, data);
1302 /* Remap all of the internal catch and cleanup linkages. Since we
1303 duplicate entire subtrees, all of the referenced regions will have
1304 been copied too. And since we renumbered them as a block, a simple
1305 bit of arithmetic finds us the index for the replacement region. */
1306 for (i = cfun_last_region_number + 1;
1307 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1309 /* All removed EH that is toplevel in input function is now
1310 in outer EH of output function. */
1313 gcc_assert (VEC_index
1314 (eh_region, ifun->eh->region_array,
1315 i - eh_offset) == NULL);
1318 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1319 if (outer->aka == NULL)
1320 outer->aka = BITMAP_GGC_ALLOC ();
1321 bitmap_set_bit (outer->aka, i);
1325 if (i != cur->region_number)
1328 #define REMAP(REG) \
1329 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1330 (REG)->region_number + eh_offset)
1335 if (cur->u.eh_try.eh_catch)
1336 REMAP (cur->u.eh_try.eh_catch);
1337 if (cur->u.eh_try.last_catch)
1338 REMAP (cur->u.eh_try.last_catch);
1342 if (cur->u.eh_catch.next_catch)
1343 REMAP (cur->u.eh_catch.next_catch);
1344 if (cur->u.eh_catch.prev_catch)
1345 REMAP (cur->u.eh_catch.prev_catch);
1354 #ifdef ENABLE_CHECKING
1355 verify_eh_tree (cfun);
1361 /* Return new copy of eh region OLD inside region NEW_OUTER.
1362 Do not care about updating the tree otherwise. */
1364 static struct eh_region *
1365 copy_eh_region_1 (struct eh_region *old, struct eh_region *new_outer)
1367 struct eh_region *new_eh = gen_eh_region (old->type, new_outer);
1369 new_eh->tree_label = old->tree_label;
1370 new_eh->may_contain_throw = old->may_contain_throw;
1371 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1372 cfun->eh->last_region_number + 1);
1373 VEC_replace (eh_region, cfun->eh->region_array, new_eh->region_number, new_eh);
1374 if (dump_file && (dump_flags & TDF_DETAILS))
1375 fprintf (dump_file, "Copying region %i to %i\n", old->region_number, new_eh->region_number);
1379 /* Return new copy of eh region OLD inside region NEW_OUTER.
1381 Copy whole catch-try chain if neccesary. */
1383 static struct eh_region *
1384 copy_eh_region (struct eh_region *old, struct eh_region *new_outer)
1386 struct eh_region *r, *n, *old_try, *new_try, *ret = NULL;
1387 VEC(eh_region,heap) *catch_list = NULL;
1389 if (old->type != ERT_CATCH)
1391 gcc_assert (old->type != ERT_TRY);
1392 r = copy_eh_region_1 (old, new_outer);
1396 /* Locate and copy corresponding TRY. */
1397 for (old_try = old->next_peer; old_try->type == ERT_CATCH; old_try = old_try->next_peer)
1399 gcc_assert (old_try->type == ERT_TRY);
1400 new_try = gen_eh_region_try (new_outer);
1401 new_try->tree_label = old_try->tree_label;
1402 new_try->may_contain_throw = old_try->may_contain_throw;
1403 if (dump_file && (dump_flags & TDF_DETAILS))
1404 fprintf (dump_file, "Copying try-catch regions. Try: %i to %i\n",
1405 old_try->region_number, new_try->region_number);
1406 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1407 cfun->eh->last_region_number + 1);
1408 VEC_replace (eh_region, cfun->eh->region_array, new_try->region_number, new_try);
1410 /* In order to keep CATCH list in order, we need to copy in reverse order. */
1411 for (r = old_try->u.eh_try.last_catch; r->type == ERT_CATCH; r = r->next_peer)
1412 VEC_safe_push (eh_region, heap, catch_list, r);
1414 while (VEC_length (eh_region, catch_list))
1416 r = VEC_pop (eh_region, catch_list);
1418 /* Duplicate CATCH. */
1419 n = gen_eh_region_catch (new_try, r->u.eh_catch.type_list);
1420 n->tree_label = r->tree_label;
1421 n->may_contain_throw = r->may_contain_throw;
1422 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1423 cfun->eh->last_region_number + 1);
1424 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1425 n->tree_label = r->tree_label;
1427 if (dump_file && (dump_flags & TDF_DETAILS))
1428 fprintf (dump_file, "Copying try-catch regions. Catch: %i to %i\n",
1429 r->region_number, n->region_number);
1433 VEC_free (eh_region, heap, catch_list);
1438 /* Callback for forach_reachable_handler that push REGION into single VECtor DATA. */
1441 push_reachable_handler (struct eh_region *region, void *data)
1443 VEC(eh_region,heap) **trace = (VEC(eh_region,heap) **) data;
1444 VEC_safe_push (eh_region, heap, *trace, region);
1447 /* Redirect EH edge E that to NEW_DEST_LABEL.
1448 IS_RESX, INLINABLE_CALL and REGION_NMUBER match the parameter of
1449 foreach_reachable_handler. */
1452 redirect_eh_edge_to_label (edge e, tree new_dest_label, bool is_resx,
1453 bool inlinable_call, int region_number)
1455 struct eh_region *outer;
1456 struct eh_region *region;
1457 VEC (eh_region, heap) * trace = NULL;
1459 int start_here = -1;
1460 basic_block old_bb = e->dest;
1461 struct eh_region *old, *r = NULL;
1462 bool update_inplace = true;
1466 /* If there is only one EH edge, we don't need to duplicate;
1467 just update labels in the tree. */
1468 FOR_EACH_EDGE (e2, ei, old_bb->preds)
1469 if ((e2->flags & EDGE_EH) && e2 != e)
1471 update_inplace = false;
1475 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
1476 gcc_assert (region);
1478 foreach_reachable_handler (region_number, is_resx, inlinable_call,
1479 push_reachable_handler, &trace);
1480 if (dump_file && (dump_flags & TDF_DETAILS))
1482 dump_eh_tree (dump_file, cfun);
1483 fprintf (dump_file, "Trace: ");
1484 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1485 fprintf (dump_file, " %i", VEC_index (eh_region, trace, i)->region_number);
1486 fprintf (dump_file, " inplace: %i\n", update_inplace);
1491 /* In easy route just walk trace and update all occurences of the label. */
1492 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1494 r = VEC_index (eh_region, trace, i);
1495 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1497 r->tree_label = new_dest_label;
1498 if (dump_file && (dump_flags & TDF_DETAILS))
1499 fprintf (dump_file, "Updating label for region %i\n",
1507 /* Now look for outermost handler that reffers to the basic block in question.
1508 We start our duplication there. */
1509 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1511 r = VEC_index (eh_region, trace, i);
1512 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1515 outer = VEC_index (eh_region, trace, start_here)->outer;
1516 gcc_assert (start_here >= 0);
1518 /* And now do the dirty job! */
1519 for (i = start_here; i >= 0; i--)
1521 old = VEC_index (eh_region, trace, i);
1522 gcc_assert (!outer || old->outer != outer->outer);
1524 /* Copy region and update label. */
1525 r = copy_eh_region (old, outer);
1526 VEC_replace (eh_region, trace, i, r);
1527 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1529 r->tree_label = new_dest_label;
1530 if (dump_file && (dump_flags & TDF_DETAILS))
1531 fprintf (dump_file, "Updating label for region %i\n",
1535 /* We got into copying CATCH. copy_eh_region already did job
1536 of copying all catch blocks corresponding to the try. Now
1537 we need to update labels in all of them and see trace.
1539 We continue nesting into TRY region corresponding to CATCH:
1540 When duplicating EH tree contaiing subregions of CATCH,
1541 the CATCH region itself is never inserted to trace so we
1542 never get here anyway. */
1543 if (r->type == ERT_CATCH)
1545 /* Walk other catch regions we copied and update labels as needed. */
1546 for (r = r->next_peer; r->type == ERT_CATCH; r = r->next_peer)
1547 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1549 r->tree_label = new_dest_label;
1550 if (dump_file && (dump_flags & TDF_DETAILS))
1551 fprintf (dump_file, "Updating label for region %i\n",
1554 gcc_assert (r->type == ERT_TRY);
1556 /* Skip sibling catch regions from the trace.
1557 They are already updated. */
1558 while (i > 0 && VEC_index (eh_region, trace, i - 1)->outer == old->outer)
1560 gcc_assert (VEC_index (eh_region, trace, i - 1)->type == ERT_CATCH);
1568 if (is_resx || region->type == ERT_THROW)
1569 r = copy_eh_region (region, outer);
1572 VEC_free (eh_region, heap, trace);
1573 if (dump_file && (dump_flags & TDF_DETAILS))
1575 dump_eh_tree (dump_file, cfun);
1576 fprintf (dump_file, "New region: %i\n", r->region_number);
1581 /* Return region number of region that is outer to both if REGION_A and
1582 REGION_B in IFUN. */
1585 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1587 struct eh_region *rp_a, *rp_b;
1590 gcc_assert (ifun->eh->last_region_number > 0);
1591 gcc_assert (ifun->eh->region_tree);
1593 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1594 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1595 gcc_assert (rp_a != NULL);
1596 gcc_assert (rp_b != NULL);
1598 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1599 sbitmap_zero (b_outer);
1603 SET_BIT (b_outer, rp_b->region_number);
1610 if (TEST_BIT (b_outer, rp_a->region_number))
1612 sbitmap_free (b_outer);
1613 return rp_a->region_number;
1619 sbitmap_free (b_outer);
1624 t2r_eq (const void *pentry, const void *pdata)
1626 const_tree const entry = (const_tree) pentry;
1627 const_tree const data = (const_tree) pdata;
1629 return TREE_PURPOSE (entry) == data;
1633 t2r_hash (const void *pentry)
1635 const_tree const entry = (const_tree) pentry;
1636 return TREE_HASH (TREE_PURPOSE (entry));
1640 add_type_for_runtime (tree type)
1644 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1645 TREE_HASH (type), INSERT);
1648 tree runtime = (*lang_eh_runtime_type) (type);
1649 *slot = tree_cons (type, runtime, NULL_TREE);
1654 lookup_type_for_runtime (tree type)
1658 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1659 TREE_HASH (type), NO_INSERT);
1661 /* We should have always inserted the data earlier. */
1662 return TREE_VALUE (*slot);
1666 /* Represent an entry in @TTypes for either catch actions
1667 or exception filter actions. */
1668 struct GTY(()) ttypes_filter {
1673 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1674 (a tree) for a @TTypes type node we are thinking about adding. */
1677 ttypes_filter_eq (const void *pentry, const void *pdata)
1679 const struct ttypes_filter *const entry
1680 = (const struct ttypes_filter *) pentry;
1681 const_tree const data = (const_tree) pdata;
1683 return entry->t == data;
1687 ttypes_filter_hash (const void *pentry)
1689 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1690 return TREE_HASH (entry->t);
1693 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1694 exception specification list we are thinking about adding. */
1695 /* ??? Currently we use the type lists in the order given. Someone
1696 should put these in some canonical order. */
1699 ehspec_filter_eq (const void *pentry, const void *pdata)
1701 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1702 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1704 return type_list_equal (entry->t, data->t);
1707 /* Hash function for exception specification lists. */
1710 ehspec_filter_hash (const void *pentry)
1712 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1716 for (list = entry->t; list ; list = TREE_CHAIN (list))
1717 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1721 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1722 to speed up the search. Return the filter value to be used. */
1725 add_ttypes_entry (htab_t ttypes_hash, tree type)
1727 struct ttypes_filter **slot, *n;
1729 slot = (struct ttypes_filter **)
1730 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1732 if ((n = *slot) == NULL)
1734 /* Filter value is a 1 based table index. */
1736 n = XNEW (struct ttypes_filter);
1738 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1741 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1747 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1748 to speed up the search. Return the filter value to be used. */
1751 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1753 struct ttypes_filter **slot, *n;
1754 struct ttypes_filter dummy;
1757 slot = (struct ttypes_filter **)
1758 htab_find_slot (ehspec_hash, &dummy, INSERT);
1760 if ((n = *slot) == NULL)
1762 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1764 n = XNEW (struct ttypes_filter);
1766 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1769 /* Generate a 0 terminated list of filter values. */
1770 for (; list ; list = TREE_CHAIN (list))
1772 if (targetm.arm_eabi_unwinder)
1773 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1776 /* Look up each type in the list and encode its filter
1777 value as a uleb128. */
1778 push_uleb128 (&crtl->eh.ehspec_data,
1779 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1782 if (targetm.arm_eabi_unwinder)
1783 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1785 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1791 /* Generate the action filter values to be used for CATCH and
1792 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1793 we use lots of landing pads, and so every type or list can share
1794 the same filter value, which saves table space. */
1797 assign_filter_values (void)
1800 htab_t ttypes, ehspec;
1802 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1803 if (targetm.arm_eabi_unwinder)
1804 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1806 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1808 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1809 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1811 for (i = cfun->eh->last_region_number; i > 0; --i)
1813 struct eh_region *r;
1815 r = VEC_index (eh_region, cfun->eh->region_array, i);
1817 /* Mind we don't process a region more than once. */
1818 if (!r || r->region_number != i)
1824 /* Whatever type_list is (NULL or true list), we build a list
1825 of filters for the region. */
1826 r->u.eh_catch.filter_list = NULL_TREE;
1828 if (r->u.eh_catch.type_list != NULL)
1830 /* Get a filter value for each of the types caught and store
1831 them in the region's dedicated list. */
1832 tree tp_node = r->u.eh_catch.type_list;
1834 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1836 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1837 tree flt_node = build_int_cst (NULL_TREE, flt);
1839 r->u.eh_catch.filter_list
1840 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1845 /* Get a filter value for the NULL list also since it will need
1846 an action record anyway. */
1847 int flt = add_ttypes_entry (ttypes, NULL);
1848 tree flt_node = build_int_cst (NULL_TREE, flt);
1850 r->u.eh_catch.filter_list
1851 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1856 case ERT_ALLOWED_EXCEPTIONS:
1858 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1866 htab_delete (ttypes);
1867 htab_delete (ehspec);
1870 /* Emit SEQ into basic block just before INSN (that is assumed to be
1871 first instruction of some existing BB and return the newly
1874 emit_to_new_bb_before (rtx seq, rtx insn)
1881 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1882 call), we don't want it to go into newly created landing pad or other EH
1884 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1885 if (e->flags & EDGE_FALLTHRU)
1886 force_nonfallthru (e);
1889 last = emit_insn_before (seq, insn);
1890 if (BARRIER_P (last))
1891 last = PREV_INSN (last);
1892 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1893 update_bb_for_insn (bb);
1894 bb->flags |= BB_SUPERBLOCK;
1898 /* Generate the code to actually handle exceptions, which will follow the
1902 build_post_landing_pads (void)
1906 for (i = cfun->eh->last_region_number; i > 0; --i)
1908 struct eh_region *region;
1911 region = VEC_index (eh_region, cfun->eh->region_array, i);
1912 /* Mind we don't process a region more than once. */
1913 if (!region || region->region_number != i)
1916 switch (region->type)
1919 /* It is possible that TRY region is kept alive only because some of
1920 contained catch region still have RESX instruction but they are
1921 reached via their copies. In this case we need to do nothing. */
1922 if (!region->u.eh_try.eh_catch->label)
1925 /* ??? Collect the set of all non-overlapping catch handlers
1926 all the way up the chain until blocked by a cleanup. */
1927 /* ??? Outer try regions can share landing pads with inner
1928 try regions if the types are completely non-overlapping,
1929 and there are no intervening cleanups. */
1931 region->post_landing_pad = gen_label_rtx ();
1935 emit_label (region->post_landing_pad);
1937 /* ??? It is mighty inconvenient to call back into the
1938 switch statement generation code in expand_end_case.
1939 Rapid prototyping sez a sequence of ifs. */
1941 struct eh_region *c;
1942 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1944 if (c->u.eh_catch.type_list == NULL)
1945 emit_jump (c->label);
1948 /* Need for one cmp/jump per type caught. Each type
1949 list entry has a matching entry in the filter list
1950 (see assign_filter_values). */
1951 tree tp_node = c->u.eh_catch.type_list;
1952 tree flt_node = c->u.eh_catch.filter_list;
1956 emit_cmp_and_jump_insns
1958 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1960 targetm.eh_return_filter_mode (), 0, c->label);
1962 tp_node = TREE_CHAIN (tp_node);
1963 flt_node = TREE_CHAIN (flt_node);
1969 /* We delay the generation of the _Unwind_Resume until we generate
1970 landing pads. We emit a marker here so as to get good control
1971 flow data in the meantime. */
1973 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1979 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
1983 case ERT_ALLOWED_EXCEPTIONS:
1986 region->post_landing_pad = gen_label_rtx ();
1990 emit_label (region->post_landing_pad);
1992 emit_cmp_and_jump_insns (crtl->eh.filter,
1993 GEN_INT (region->u.allowed.filter),
1995 targetm.eh_return_filter_mode (), 0, region->label);
1997 /* We delay the generation of the _Unwind_Resume until we generate
1998 landing pads. We emit a marker here so as to get good control
1999 flow data in the meantime. */
2001 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2007 emit_to_new_bb_before (seq, region->label);
2011 case ERT_MUST_NOT_THROW:
2012 region->post_landing_pad = region->label;
2017 /* Nothing to do. */
2026 /* Replace RESX patterns with jumps to the next handler if any, or calls to
2027 _Unwind_Resume otherwise. */
2030 connect_post_landing_pads (void)
2034 for (i = cfun->eh->last_region_number; i > 0; --i)
2036 struct eh_region *region;
2037 struct eh_region *outer;
2041 region = VEC_index (eh_region, cfun->eh->region_array, i);
2042 /* Mind we don't process a region more than once. */
2043 if (!region || region->region_number != i)
2046 /* If there is no RESX, or it has been deleted by flow, there's
2047 nothing to fix up. */
2048 if (! region->resume || INSN_DELETED_P (region->resume))
2051 /* Search for another landing pad in this function. */
2052 for (outer = region->outer; outer ; outer = outer->outer)
2053 if (outer->post_landing_pad)
2061 basic_block src, dest;
2063 emit_jump (outer->post_landing_pad);
2064 src = BLOCK_FOR_INSN (region->resume);
2065 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
2066 while (EDGE_COUNT (src->succs) > 0)
2067 remove_edge (EDGE_SUCC (src, 0));
2068 e = make_edge (src, dest, 0);
2069 e->probability = REG_BR_PROB_BASE;
2070 e->count = src->count;
2074 emit_library_call (unwind_resume_libfunc, LCT_THROW,
2075 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
2077 /* What we just emitted was a throwing libcall, so it got a
2078 barrier automatically added after it. If the last insn in
2079 the libcall sequence isn't the barrier, it's because the
2080 target emits multiple insns for a call, and there are insns
2081 after the actual call insn (which are redundant and would be
2082 optimized away). The barrier is inserted exactly after the
2083 call insn, so let's go get that and delete the insns after
2084 it, because below we need the barrier to be the last insn in
2086 delete_insns_since (NEXT_INSN (last_call_insn ()));
2091 barrier = emit_insn_before (seq, region->resume);
2092 /* Avoid duplicate barrier. */
2093 gcc_assert (BARRIER_P (barrier));
2094 delete_insn (barrier);
2095 delete_insn (region->resume);
2097 /* ??? From tree-ssa we can wind up with catch regions whose
2098 label is not instantiated, but whose resx is present. Now
2099 that we've dealt with the resx, kill the region. */
2100 if (region->label == NULL && region->type == ERT_CLEANUP)
2101 remove_eh_handler (region);
2107 dw2_build_landing_pads (void)
2111 for (i = cfun->eh->last_region_number; i > 0; --i)
2113 struct eh_region *region;
2118 region = VEC_index (eh_region, cfun->eh->region_array, i);
2119 /* Mind we don't process a region more than once. */
2120 if (!region || region->region_number != i)
2123 if (region->type != ERT_CLEANUP
2124 && region->type != ERT_TRY
2125 && region->type != ERT_ALLOWED_EXCEPTIONS)
2128 if (!region->post_landing_pad)
2133 region->landing_pad = gen_label_rtx ();
2134 emit_label (region->landing_pad);
2136 #ifdef HAVE_exception_receiver
2137 if (HAVE_exception_receiver)
2138 emit_insn (gen_exception_receiver ());
2141 #ifdef HAVE_nonlocal_goto_receiver
2142 if (HAVE_nonlocal_goto_receiver)
2143 emit_insn (gen_nonlocal_goto_receiver ());
2148 emit_move_insn (crtl->eh.exc_ptr,
2149 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
2150 emit_move_insn (crtl->eh.filter,
2151 gen_rtx_REG (targetm.eh_return_filter_mode (),
2152 EH_RETURN_DATA_REGNO (1)));
2157 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
2158 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2159 e->count = bb->count;
2160 e->probability = REG_BR_PROB_BASE;
2167 int directly_reachable;
2170 int call_site_index;
2174 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
2177 bool found_one = false;
2179 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2181 struct eh_region *region;
2182 enum reachable_code rc;
2186 if (! INSN_P (insn))
2189 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2190 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2193 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2197 type_thrown = NULL_TREE;
2198 if (region->type == ERT_THROW)
2200 type_thrown = region->u.eh_throw.type;
2201 region = region->outer;
2204 /* Find the first containing region that might handle the exception.
2205 That's the landing pad to which we will transfer control. */
2206 rc = RNL_NOT_CAUGHT;
2207 for (; region; region = region->outer)
2209 rc = reachable_next_level (region, type_thrown, NULL, false);
2210 if (rc != RNL_NOT_CAUGHT)
2213 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2215 lp_info[region->region_number].directly_reachable = 1;
2224 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2229 /* First task: build the action table. */
2231 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
2232 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2234 for (i = cfun->eh->last_region_number; i > 0; --i)
2235 if (lp_info[i].directly_reachable)
2237 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
2239 r->landing_pad = dispatch_label;
2240 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2241 if (lp_info[i].action_index != -1)
2242 crtl->uses_eh_lsda = 1;
2245 htab_delete (ar_hash);
2247 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2248 landing pad label for the region. For sjlj though, there is one
2249 common landing pad from which we dispatch to the post-landing pads.
2251 A region receives a dispatch index if it is directly reachable
2252 and requires in-function processing. Regions that share post-landing
2253 pads may share dispatch indices. */
2254 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2255 (see build_post_landing_pads) so we don't bother checking for it. */
2258 for (i = cfun->eh->last_region_number; i > 0; --i)
2259 if (lp_info[i].directly_reachable)
2260 lp_info[i].dispatch_index = index++;
2262 /* Finally: assign call-site values. If dwarf2 terms, this would be
2263 the region number assigned by convert_to_eh_region_ranges, but
2264 handles no-action and must-not-throw differently. */
2267 for (i = cfun->eh->last_region_number; i > 0; --i)
2268 if (lp_info[i].directly_reachable)
2270 int action = lp_info[i].action_index;
2272 /* Map must-not-throw to otherwise unused call-site index 0. */
2275 /* Map no-action to otherwise unused call-site index -1. */
2276 else if (action == -1)
2278 /* Otherwise, look it up in the table. */
2280 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2282 lp_info[i].call_site_index = index;
2287 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
2289 int last_call_site = -2;
2292 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2294 struct eh_region *region;
2296 rtx note, before, p;
2298 /* Reset value tracking at extended basic block boundaries. */
2300 last_call_site = -2;
2302 if (! INSN_P (insn))
2305 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2307 /* Calls that are known to not throw need not be marked. */
2308 if (note && INTVAL (XEXP (note, 0)) <= 0)
2312 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2318 /* Calls (and trapping insns) without notes are outside any
2319 exception handling region in this function. Mark them as
2322 || (flag_non_call_exceptions
2323 && may_trap_p (PATTERN (insn))))
2324 this_call_site = -1;
2329 this_call_site = lp_info[region->region_number].call_site_index;
2331 if (this_call_site == last_call_site)
2334 /* Don't separate a call from it's argument loads. */
2337 before = find_first_parameter_load (insn, NULL_RTX);
2340 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
2341 sjlj_fc_call_site_ofs);
2342 emit_move_insn (mem, GEN_INT (this_call_site));
2346 emit_insn_before (p, before);
2347 last_call_site = this_call_site;
2351 /* Construct the SjLj_Function_Context. */
2354 sjlj_emit_function_enter (rtx dispatch_label)
2356 rtx fn_begin, fc, mem, seq;
2357 bool fn_begin_outside_block;
2359 fc = crtl->eh.sjlj_fc;
2363 /* We're storing this libcall's address into memory instead of
2364 calling it directly. Thus, we must call assemble_external_libcall
2365 here, as we can not depend on emit_library_call to do it for us. */
2366 assemble_external_libcall (eh_personality_libfunc);
2367 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2368 emit_move_insn (mem, eh_personality_libfunc);
2370 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2371 if (crtl->uses_eh_lsda)
2376 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2377 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2378 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2379 emit_move_insn (mem, sym);
2382 emit_move_insn (mem, const0_rtx);
2384 #ifdef DONT_USE_BUILTIN_SETJMP
2387 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2388 TYPE_MODE (integer_type_node), 1,
2389 plus_constant (XEXP (fc, 0),
2390 sjlj_fc_jbuf_ofs), Pmode);
2392 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2393 TYPE_MODE (integer_type_node), 0, dispatch_label);
2394 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
2397 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2401 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2402 1, XEXP (fc, 0), Pmode);
2407 /* ??? Instead of doing this at the beginning of the function,
2408 do this in a block that is at loop level 0 and dominates all
2409 can_throw_internal instructions. */
2411 fn_begin_outside_block = true;
2412 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2413 if (NOTE_P (fn_begin))
2415 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2417 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
2418 fn_begin_outside_block = false;
2421 if (fn_begin_outside_block)
2422 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
2424 emit_insn_after (seq, fn_begin);
2427 /* Call back from expand_function_end to know where we should put
2428 the call to unwind_sjlj_unregister_libfunc if needed. */
2431 sjlj_emit_function_exit_after (rtx after)
2433 crtl->eh.sjlj_exit_after = after;
2437 sjlj_emit_function_exit (void)
2443 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2444 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
2449 /* ??? Really this can be done in any block at loop level 0 that
2450 post-dominates all can_throw_internal instructions. This is
2451 the last possible moment. */
2453 insn = crtl->eh.sjlj_exit_after;
2455 insn = NEXT_INSN (insn);
2457 emit_insn_after (seq, insn);
2461 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2463 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2464 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2465 int i, first_reachable;
2466 rtx mem, dispatch, seq, fc;
2471 fc = crtl->eh.sjlj_fc;
2475 emit_label (dispatch_label);
2477 #ifndef DONT_USE_BUILTIN_SETJMP
2478 expand_builtin_setjmp_receiver (dispatch_label);
2481 /* Load up dispatch index, exc_ptr and filter values from the
2482 function context. */
2483 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2484 sjlj_fc_call_site_ofs);
2485 dispatch = copy_to_reg (mem);
2487 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2488 if (unwind_word_mode != ptr_mode)
2490 #ifdef POINTERS_EXTEND_UNSIGNED
2491 mem = convert_memory_address (ptr_mode, mem);
2493 mem = convert_to_mode (ptr_mode, mem, 0);
2496 emit_move_insn (crtl->eh.exc_ptr, mem);
2498 mem = adjust_address (fc, unwind_word_mode,
2499 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2500 if (unwind_word_mode != filter_mode)
2501 mem = convert_to_mode (filter_mode, mem, 0);
2502 emit_move_insn (crtl->eh.filter, mem);
2504 /* Jump to one of the directly reachable regions. */
2505 /* ??? This really ought to be using a switch statement. */
2507 first_reachable = 0;
2508 for (i = cfun->eh->last_region_number; i > 0; --i)
2510 if (! lp_info[i].directly_reachable)
2513 if (! first_reachable)
2515 first_reachable = i;
2519 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2520 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2521 ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
2522 ->post_landing_pad);
2528 before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2529 ->post_landing_pad);
2531 bb = emit_to_new_bb_before (seq, before);
2532 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2533 e->count = bb->count;
2534 e->probability = REG_BR_PROB_BASE;
2538 sjlj_build_landing_pads (void)
2540 struct sjlj_lp_info *lp_info;
2542 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2544 if (sjlj_find_directly_reachable_regions (lp_info))
2546 rtx dispatch_label = gen_label_rtx ();
2547 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2548 TYPE_MODE (sjlj_fc_type_node),
2549 TYPE_ALIGN (sjlj_fc_type_node));
2551 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2552 int_size_in_bytes (sjlj_fc_type_node),
2555 sjlj_assign_call_site_values (dispatch_label, lp_info);
2556 sjlj_mark_call_sites (lp_info);
2558 sjlj_emit_function_enter (dispatch_label);
2559 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2560 sjlj_emit_function_exit ();
2566 /* After initial rtl generation, call back to finish generating
2567 exception support code. */
2570 finish_eh_generation (void)
2574 /* Nothing to do if no regions created. */
2575 if (cfun->eh->region_tree == NULL)
2578 /* The object here is to provide detailed information (via
2579 reachable_handlers) on how exception control flows within the
2580 function for the CFG construction. In this first pass, we can
2581 include type information garnered from ERT_THROW and
2582 ERT_ALLOWED_EXCEPTIONS regions, and hope that it will be useful
2583 in deleting unreachable handlers. Subsequently, we will generate
2584 landing pads which will connect many of the handlers, and then
2585 type information will not be effective. Still, this is a win
2586 over previous implementations. */
2588 /* These registers are used by the landing pads. Make sure they
2589 have been generated. */
2590 get_exception_pointer ();
2591 get_exception_filter ();
2593 /* Construct the landing pads. */
2595 assign_filter_values ();
2596 build_post_landing_pads ();
2597 connect_post_landing_pads ();
2598 if (USING_SJLJ_EXCEPTIONS)
2599 sjlj_build_landing_pads ();
2601 dw2_build_landing_pads ();
2603 crtl->eh.built_landing_pads = 1;
2605 /* We've totally changed the CFG. Start over. */
2606 find_exception_handler_labels ();
2607 break_superblocks ();
2608 if (USING_SJLJ_EXCEPTIONS
2609 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2610 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2611 commit_edge_insertions ();
2617 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2619 if (e->flags & EDGE_EH)
2628 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2632 /* This section handles removing dead code for flow. */
2634 /* Splice REGION from the region tree and replace it by REPLACE etc.
2635 When UPDATE_CATCH_TRY is true mind updating links from catch to try
2639 remove_eh_handler_and_replace (struct eh_region *region,
2640 struct eh_region *replace,
2641 bool update_catch_try)
2643 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2646 outer = region->outer;
2648 /* For the benefit of efficiently handling REG_EH_REGION notes,
2649 replace this region in the region array with its containing
2650 region. Note that previous region deletions may result in
2651 multiple copies of this region in the array, so we have a
2652 list of alternate numbers by which we are known. */
2654 VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
2661 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2663 VEC_replace (eh_region, cfun->eh->region_array, i, replace);
2670 replace->aka = BITMAP_GGC_ALLOC ();
2672 bitmap_ior_into (replace->aka, region->aka);
2673 bitmap_set_bit (replace->aka, region->region_number);
2676 if (crtl->eh.built_landing_pads)
2677 lab = region->landing_pad;
2679 lab = region->label;
2681 pp_start = &outer->inner;
2683 pp_start = &cfun->eh->region_tree;
2684 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2686 *pp = region->next_peer;
2689 pp_start = &replace->inner;
2691 pp_start = &cfun->eh->region_tree;
2692 inner = region->inner;
2695 for (p = inner; p->next_peer ; p = p->next_peer)
2699 p->next_peer = *pp_start;
2703 if (region->type == ERT_CATCH
2704 && update_catch_try)
2706 struct eh_region *eh_try, *next, *prev;
2708 for (eh_try = region->next_peer;
2709 eh_try->type == ERT_CATCH;
2710 eh_try = eh_try->next_peer)
2712 gcc_assert (eh_try->type == ERT_TRY);
2714 next = region->u.eh_catch.next_catch;
2715 prev = region->u.eh_catch.prev_catch;
2718 next->u.eh_catch.prev_catch = prev;
2720 eh_try->u.eh_try.last_catch = prev;
2722 prev->u.eh_catch.next_catch = next;
2725 eh_try->u.eh_try.eh_catch = next;
2727 remove_eh_handler (eh_try);
2732 /* Splice REGION from the region tree and replace it by the outer region
2736 remove_eh_handler (struct eh_region *region)
2738 remove_eh_handler_and_replace (region, region->outer, true);
2741 /* Remove Eh region R that has turned out to have no code in its handler. */
2744 remove_eh_region (int r)
2746 struct eh_region *region;
2748 region = VEC_index (eh_region, cfun->eh->region_array, r);
2749 remove_eh_handler (region);
2752 /* Remove Eh region R that has turned out to have no code in its handler
2753 and replace in by R2. */
2756 remove_eh_region_and_replace_by_outer_of (int r, int r2)
2758 struct eh_region *region, *region2;
2760 region = VEC_index (eh_region, cfun->eh->region_array, r);
2761 region2 = VEC_index (eh_region, cfun->eh->region_array, r2);
2762 remove_eh_handler_and_replace (region, region2->outer, true);
2765 /* Invokes CALLBACK for every exception handler label. Only used by old
2766 loop hackery; should not be used by new code. */
2769 for_each_eh_label (void (*callback) (rtx))
2772 for (i = 0; i < cfun->eh->last_region_number; i++)
2774 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
2775 if (r && r->region_number == i && r->label
2776 && GET_CODE (r->label) == CODE_LABEL)
2777 (*callback) (r->label);
2781 /* Invoke CALLBACK for every exception region in the current function. */
2784 for_each_eh_region (void (*callback) (struct eh_region *))
2786 int i, n = cfun->eh->last_region_number;
2787 for (i = 1; i <= n; ++i)
2789 struct eh_region *region;
2791 region = VEC_index (eh_region, cfun->eh->region_array, i);
2793 (*callback) (region);
2797 /* This section describes CFG exception edges for flow. */
2799 /* For communicating between calls to reachable_next_level. */
2800 struct reachable_info
2804 void (*callback) (struct eh_region *, void *);
2805 void *callback_data;
2808 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2809 base class of TYPE, is in HANDLED. */
2812 check_handled (tree handled, tree type)
2816 /* We can check for exact matches without front-end help. */
2817 if (! lang_eh_type_covers)
2819 for (t = handled; t ; t = TREE_CHAIN (t))
2820 if (TREE_VALUE (t) == type)
2825 for (t = handled; t ; t = TREE_CHAIN (t))
2826 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2833 /* A subroutine of reachable_next_level. If we are collecting a list
2834 of handlers, add one. After landing pad generation, reference
2835 it instead of the handlers themselves. Further, the handlers are
2836 all wired together, so by referencing one, we've got them all.
2837 Before landing pad generation we reference each handler individually.
2839 LP_REGION contains the landing pad; REGION is the handler. */
2842 add_reachable_handler (struct reachable_info *info,
2843 struct eh_region *lp_region, struct eh_region *region)
2848 if (crtl->eh.built_landing_pads)
2849 info->callback (lp_region, info->callback_data);
2851 info->callback (region, info->callback_data);
2854 /* Process one level of exception regions for reachability.
2855 If TYPE_THROWN is non-null, then it is the *exact* type being
2856 propagated. If INFO is non-null, then collect handler labels
2857 and caught/allowed type information between invocations. */
2859 static enum reachable_code
2860 reachable_next_level (struct eh_region *region, tree type_thrown,
2861 struct reachable_info *info,
2864 switch (region->type)
2867 /* Before landing-pad generation, we model control flow
2868 directly to the individual handlers. In this way we can
2869 see that catch handler types may shadow one another. */
2870 add_reachable_handler (info, region, region);
2871 return RNL_MAYBE_CAUGHT;
2875 struct eh_region *c;
2876 enum reachable_code ret = RNL_NOT_CAUGHT;
2878 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2880 /* A catch-all handler ends the search. */
2881 if (c->u.eh_catch.type_list == NULL)
2883 add_reachable_handler (info, region, c);
2889 /* If we have at least one type match, end the search. */
2890 tree tp_node = c->u.eh_catch.type_list;
2892 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2894 tree type = TREE_VALUE (tp_node);
2896 if (type == type_thrown
2897 || (lang_eh_type_covers
2898 && (*lang_eh_type_covers) (type, type_thrown)))
2900 add_reachable_handler (info, region, c);
2905 /* If we have definitive information of a match failure,
2906 the catch won't trigger. */
2907 if (lang_eh_type_covers)
2908 return RNL_NOT_CAUGHT;
2911 /* At this point, we either don't know what type is thrown or
2912 don't have front-end assistance to help deciding if it is
2913 covered by one of the types in the list for this region.
2915 We'd then like to add this region to the list of reachable
2916 handlers since it is indeed potentially reachable based on the
2917 information we have.
2919 Actually, this handler is for sure not reachable if all the
2920 types it matches have already been caught. That is, it is only
2921 potentially reachable if at least one of the types it catches
2922 has not been previously caught. */
2925 ret = RNL_MAYBE_CAUGHT;
2928 tree tp_node = c->u.eh_catch.type_list;
2929 bool maybe_reachable = false;
2931 /* Compute the potential reachability of this handler and
2932 update the list of types caught at the same time. */
2933 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2935 tree type = TREE_VALUE (tp_node);
2937 if (! check_handled (info->types_caught, type))
2940 = tree_cons (NULL, type, info->types_caught);
2942 maybe_reachable = true;
2946 if (maybe_reachable)
2948 add_reachable_handler (info, region, c);
2950 /* ??? If the catch type is a base class of every allowed
2951 type, then we know we can stop the search. */
2952 ret = RNL_MAYBE_CAUGHT;
2960 case ERT_ALLOWED_EXCEPTIONS:
2961 /* An empty list of types definitely ends the search. */
2962 if (region->u.allowed.type_list == NULL_TREE)
2964 add_reachable_handler (info, region, region);
2968 /* Collect a list of lists of allowed types for use in detecting
2969 when a catch may be transformed into a catch-all. */
2971 info->types_allowed = tree_cons (NULL_TREE,
2972 region->u.allowed.type_list,
2973 info->types_allowed);
2975 /* If we have definitive information about the type hierarchy,
2976 then we can tell if the thrown type will pass through the
2978 if (type_thrown && lang_eh_type_covers)
2980 if (check_handled (region->u.allowed.type_list, type_thrown))
2981 return RNL_NOT_CAUGHT;
2984 add_reachable_handler (info, region, region);
2989 add_reachable_handler (info, region, region);
2990 return RNL_MAYBE_CAUGHT;
2993 /* Catch regions are handled by their controlling try region. */
2994 return RNL_NOT_CAUGHT;
2996 case ERT_MUST_NOT_THROW:
2997 /* Here we end our search, since no exceptions may propagate.
2999 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
3000 only via locally handled RESX instructions.
3002 When we inline a function call, we can bring in new handlers. In order
3003 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
3004 assume that such handlers exists prior for any inlinable call prior
3005 inlining decisions are fixed. */
3009 add_reachable_handler (info, region, region);
3017 /* Shouldn't see these here. */
3025 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
3028 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
3029 void (*callback) (struct eh_region *, void *),
3030 void *callback_data)
3032 struct reachable_info info;
3033 struct eh_region *region;
3036 memset (&info, 0, sizeof (info));
3037 info.callback = callback;
3038 info.callback_data = callback_data;
3040 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3044 type_thrown = NULL_TREE;
3047 /* A RESX leaves a region instead of entering it. Thus the
3048 region itself may have been deleted out from under us. */
3051 region = region->outer;
3053 else if (region->type == ERT_THROW)
3055 type_thrown = region->u.eh_throw.type;
3056 region = region->outer;
3061 if (reachable_next_level (region, type_thrown, &info,
3062 inlinable_call || is_resx) >= RNL_CAUGHT)
3064 /* If we have processed one cleanup, there is no point in
3065 processing any more of them. Each cleanup will have an edge
3066 to the next outer cleanup region, so the flow graph will be
3068 if (region->type == ERT_CLEANUP)
3070 enum reachable_code code = RNL_NOT_CAUGHT;
3071 region = find_prev_try (region->outer);
3072 /* Continue looking for outer TRY region until we find one
3073 that might cath something. */
3075 && (code = reachable_next_level (region, type_thrown, &info,
3076 inlinable_call || is_resx))
3078 region = find_prev_try (region->outer);
3079 if (code >= RNL_CAUGHT)
3083 region = region->outer;
3087 /* Retrieve a list of labels of exception handlers which can be
3088 reached by a given insn. */
3091 arh_to_landing_pad (struct eh_region *region, void *data)
3093 rtx *p_handlers = (rtx *) data;
3095 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
3099 arh_to_label (struct eh_region *region, void *data)
3101 rtx *p_handlers = (rtx *) data;
3102 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
3106 reachable_handlers (rtx insn)
3108 bool is_resx = false;
3109 rtx handlers = NULL;
3113 && GET_CODE (PATTERN (insn)) == RESX)
3115 region_number = XINT (PATTERN (insn), 0);
3120 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3121 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3123 region_number = INTVAL (XEXP (note, 0));
3126 foreach_reachable_handler (region_number, is_resx, false,
3127 (crtl->eh.built_landing_pads
3128 ? arh_to_landing_pad
3135 /* Determine if the given INSN can throw an exception that is caught
3136 within the function. */
3139 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
3141 struct eh_region *region;
3144 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3148 type_thrown = NULL_TREE;
3150 region = region->outer;
3151 else if (region->type == ERT_THROW)
3153 type_thrown = region->u.eh_throw.type;
3154 region = region->outer;
3157 /* If this exception is ignored by each and every containing region,
3158 then control passes straight out. The runtime may handle some
3159 regions, which also do not require processing internally. */
3160 for (; region; region = region->outer)
3162 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
3163 inlinable_call || is_resx);
3164 if (how == RNL_BLOCKED)
3166 if (how != RNL_NOT_CAUGHT)
3174 can_throw_internal (const_rtx insn)
3178 if (! INSN_P (insn))
3182 && GET_CODE (PATTERN (insn)) == RESX
3183 && XINT (PATTERN (insn), 0) > 0)
3184 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
3186 if (NONJUMP_INSN_P (insn)
3187 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3188 insn = XVECEXP (PATTERN (insn), 0, 0);
3190 /* Every insn that might throw has an EH_REGION note. */
3191 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3192 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3195 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
3198 /* Determine if the given INSN can throw an exception that is
3199 visible outside the function. */
3202 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
3204 struct eh_region *region;
3207 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3211 type_thrown = NULL_TREE;
3213 region = region->outer;
3214 else if (region->type == ERT_THROW)
3216 type_thrown = region->u.eh_throw.type;
3217 region = region->outer;
3220 /* If the exception is caught or blocked by any containing region,
3221 then it is not seen by any calling function. */
3222 for (; region ; region = region->outer)
3223 if (reachable_next_level (region, type_thrown, NULL,
3224 inlinable_call || is_resx) >= RNL_CAUGHT)
3231 can_throw_external (const_rtx insn)
3235 if (! INSN_P (insn))
3239 && GET_CODE (PATTERN (insn)) == RESX
3240 && XINT (PATTERN (insn), 0) > 0)
3241 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
3243 if (NONJUMP_INSN_P (insn)
3244 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3246 rtx seq = PATTERN (insn);
3247 int i, n = XVECLEN (seq, 0);
3249 for (i = 0; i < n; i++)
3250 if (can_throw_external (XVECEXP (seq, 0, i)))
3256 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3259 /* Calls (and trapping insns) without notes are outside any
3260 exception handling region in this function. We have to
3261 assume it might throw. Given that the front end and middle
3262 ends mark known NOTHROW functions, this isn't so wildly
3264 return (CALL_P (insn)
3265 || (flag_non_call_exceptions
3266 && may_trap_p (PATTERN (insn))));
3268 if (INTVAL (XEXP (note, 0)) <= 0)
3271 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
3274 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
3277 set_nothrow_function_flags (void)
3283 /* Assume crtl->all_throwers_are_sibcalls until we encounter
3284 something that can throw an exception. We specifically exempt
3285 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
3286 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
3289 crtl->all_throwers_are_sibcalls = 1;
3291 /* If we don't know that this implementation of the function will
3292 actually be used, then we must not set TREE_NOTHROW, since
3293 callers must not assume that this function does not throw. */
3294 if (TREE_NOTHROW (current_function_decl))
3297 if (! flag_exceptions)
3300 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3301 if (can_throw_external (insn))
3305 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3307 crtl->all_throwers_are_sibcalls = 0;
3312 for (insn = crtl->epilogue_delay_list; insn;
3313 insn = XEXP (insn, 1))
3314 if (can_throw_external (insn))
3318 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3320 crtl->all_throwers_are_sibcalls = 0;
3325 && (cgraph_function_body_availability (cgraph_node
3326 (current_function_decl))
3327 >= AVAIL_AVAILABLE))
3329 struct cgraph_node *node = cgraph_node (current_function_decl);
3330 struct cgraph_edge *e;
3331 for (e = node->callers; e; e = e->next_caller)
3332 e->can_throw_external = false;
3333 TREE_NOTHROW (current_function_decl) = 1;
3336 fprintf (dump_file, "Marking function nothrow: %s\n\n",
3337 current_function_name ());
3342 struct rtl_opt_pass pass_set_nothrow_function_flags =
3346 "nothrow", /* name */
3348 set_nothrow_function_flags, /* execute */
3351 0, /* static_pass_number */
3352 TV_NONE, /* tv_id */
3353 0, /* properties_required */
3354 0, /* properties_provided */
3355 0, /* properties_destroyed */
3356 0, /* todo_flags_start */
3357 TODO_dump_func, /* todo_flags_finish */
3362 /* Various hooks for unwind library. */
3364 /* Do any necessary initialization to access arbitrary stack frames.
3365 On the SPARC, this means flushing the register windows. */
3368 expand_builtin_unwind_init (void)
3370 /* Set this so all the registers get saved in our frame; we need to be
3371 able to copy the saved values for any registers from frames we unwind. */
3372 crtl->saves_all_registers = 1;
3374 #ifdef SETUP_FRAME_ADDRESSES
3375 SETUP_FRAME_ADDRESSES ();
3380 expand_builtin_eh_return_data_regno (tree exp)
3382 tree which = CALL_EXPR_ARG (exp, 0);
3383 unsigned HOST_WIDE_INT iwhich;
3385 if (TREE_CODE (which) != INTEGER_CST)
3387 error ("argument of %<__builtin_eh_return_regno%> must be constant");
3391 iwhich = tree_low_cst (which, 1);
3392 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3393 if (iwhich == INVALID_REGNUM)
3396 #ifdef DWARF_FRAME_REGNUM
3397 iwhich = DWARF_FRAME_REGNUM (iwhich);
3399 iwhich = DBX_REGISTER_NUMBER (iwhich);
3402 return GEN_INT (iwhich);
3405 /* Given a value extracted from the return address register or stack slot,
3406 return the actual address encoded in that value. */
3409 expand_builtin_extract_return_addr (tree addr_tree)
3411 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3413 if (GET_MODE (addr) != Pmode
3414 && GET_MODE (addr) != VOIDmode)
3416 #ifdef POINTERS_EXTEND_UNSIGNED
3417 addr = convert_memory_address (Pmode, addr);
3419 addr = convert_to_mode (Pmode, addr, 0);
3423 /* First mask out any unwanted bits. */
3424 #ifdef MASK_RETURN_ADDR
3425 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3428 /* Then adjust to find the real return address. */
3429 #if defined (RETURN_ADDR_OFFSET)
3430 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3436 /* Given an actual address in addr_tree, do any necessary encoding
3437 and return the value to be stored in the return address register or
3438 stack slot so the epilogue will return to that address. */
3441 expand_builtin_frob_return_addr (tree addr_tree)
3443 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3445 addr = convert_memory_address (Pmode, addr);
3447 #ifdef RETURN_ADDR_OFFSET
3448 addr = force_reg (Pmode, addr);
3449 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3455 /* Set up the epilogue with the magic bits we'll need to return to the
3456 exception handler. */
3459 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3464 #ifdef EH_RETURN_STACKADJ_RTX
3465 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
3466 VOIDmode, EXPAND_NORMAL);
3467 tmp = convert_memory_address (Pmode, tmp);
3468 if (!crtl->eh.ehr_stackadj)
3469 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
3470 else if (tmp != crtl->eh.ehr_stackadj)
3471 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
3474 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
3475 VOIDmode, EXPAND_NORMAL);
3476 tmp = convert_memory_address (Pmode, tmp);
3477 if (!crtl->eh.ehr_handler)
3478 crtl->eh.ehr_handler = copy_to_reg (tmp);
3479 else if (tmp != crtl->eh.ehr_handler)
3480 emit_move_insn (crtl->eh.ehr_handler, tmp);
3482 if (!crtl->eh.ehr_label)
3483 crtl->eh.ehr_label = gen_label_rtx ();
3484 emit_jump (crtl->eh.ehr_label);
3488 expand_eh_return (void)
3492 if (! crtl->eh.ehr_label)
3495 crtl->calls_eh_return = 1;
3497 #ifdef EH_RETURN_STACKADJ_RTX
3498 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3501 around_label = gen_label_rtx ();
3502 emit_jump (around_label);
3504 emit_label (crtl->eh.ehr_label);
3505 clobber_return_register ();
3507 #ifdef EH_RETURN_STACKADJ_RTX
3508 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3511 #ifdef HAVE_eh_return
3513 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3517 #ifdef EH_RETURN_HANDLER_RTX
3518 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3520 error ("__builtin_eh_return not supported on this target");
3524 emit_label (around_label);
3527 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3528 POINTERS_EXTEND_UNSIGNED and return it. */
3531 expand_builtin_extend_pointer (tree addr_tree)
3533 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3536 #ifdef POINTERS_EXTEND_UNSIGNED
3537 extend = POINTERS_EXTEND_UNSIGNED;
3539 /* The previous EH code did an unsigned extend by default, so we do this also
3544 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3547 /* In the following functions, we represent entries in the action table
3548 as 1-based indices. Special cases are:
3550 0: null action record, non-null landing pad; implies cleanups
3551 -1: null action record, null landing pad; implies no action
3552 -2: no call-site entry; implies must_not_throw
3553 -3: we have yet to process outer regions
3555 Further, no special cases apply to the "next" field of the record.
3556 For next, 0 means end of list. */
3558 struct action_record
3566 action_record_eq (const void *pentry, const void *pdata)
3568 const struct action_record *entry = (const struct action_record *) pentry;
3569 const struct action_record *data = (const struct action_record *) pdata;
3570 return entry->filter == data->filter && entry->next == data->next;
3574 action_record_hash (const void *pentry)
3576 const struct action_record *entry = (const struct action_record *) pentry;
3577 return entry->next * 1009 + entry->filter;
3581 add_action_record (htab_t ar_hash, int filter, int next)
3583 struct action_record **slot, *new_ar, tmp;
3585 tmp.filter = filter;
3587 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3589 if ((new_ar = *slot) == NULL)
3591 new_ar = XNEW (struct action_record);
3592 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3593 new_ar->filter = filter;
3594 new_ar->next = next;
3597 /* The filter value goes in untouched. The link to the next
3598 record is a "self-relative" byte offset, or zero to indicate
3599 that there is no next record. So convert the absolute 1 based
3600 indices we've been carrying around into a displacement. */
3602 push_sleb128 (&crtl->eh.action_record_data, filter);
3604 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3605 push_sleb128 (&crtl->eh.action_record_data, next);
3608 return new_ar->offset;
3612 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3614 struct eh_region *c;
3617 /* If we've reached the top of the region chain, then we have
3618 no actions, and require no landing pad. */
3622 switch (region->type)
3625 /* A cleanup adds a zero filter to the beginning of the chain, but
3626 there are special cases to look out for. If there are *only*
3627 cleanups along a path, then it compresses to a zero action.
3628 Further, if there are multiple cleanups along a path, we only
3629 need to represent one of them, as that is enough to trigger
3630 entry to the landing pad at runtime. */
3631 next = collect_one_action_chain (ar_hash, region->outer);
3634 for (c = region->outer; c ; c = c->outer)
3635 if (c->type == ERT_CLEANUP)
3637 return add_action_record (ar_hash, 0, next);
3640 /* Process the associated catch regions in reverse order.
3641 If there's a catch-all handler, then we don't need to
3642 search outer regions. Use a magic -3 value to record
3643 that we haven't done the outer search. */
3645 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3647 if (c->u.eh_catch.type_list == NULL)
3649 /* Retrieve the filter from the head of the filter list
3650 where we have stored it (see assign_filter_values). */
3652 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3654 next = add_action_record (ar_hash, filter, 0);
3658 /* Once the outer search is done, trigger an action record for
3659 each filter we have. */
3664 next = collect_one_action_chain (ar_hash, region->outer);
3666 /* If there is no next action, terminate the chain. */
3669 /* If all outer actions are cleanups or must_not_throw,
3670 we'll have no action record for it, since we had wanted
3671 to encode these states in the call-site record directly.
3672 Add a cleanup action to the chain to catch these. */
3674 next = add_action_record (ar_hash, 0, 0);
3677 flt_node = c->u.eh_catch.filter_list;
3678 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3680 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3681 next = add_action_record (ar_hash, filter, next);
3687 case ERT_ALLOWED_EXCEPTIONS:
3688 /* An exception specification adds its filter to the
3689 beginning of the chain. */
3690 next = collect_one_action_chain (ar_hash, region->outer);
3692 /* If there is no next action, terminate the chain. */
3695 /* If all outer actions are cleanups or must_not_throw,
3696 we'll have no action record for it, since we had wanted
3697 to encode these states in the call-site record directly.
3698 Add a cleanup action to the chain to catch these. */
3700 next = add_action_record (ar_hash, 0, 0);
3702 return add_action_record (ar_hash, region->u.allowed.filter, next);
3704 case ERT_MUST_NOT_THROW:
3705 /* A must-not-throw region with no inner handlers or cleanups
3706 requires no call-site entry. Note that this differs from
3707 the no handler or cleanup case in that we do require an lsda
3708 to be generated. Return a magic -2 value to record this. */
3713 /* CATCH regions are handled in TRY above. THROW regions are
3714 for optimization information only and produce no output. */
3715 return collect_one_action_chain (ar_hash, region->outer);
3723 add_call_site (rtx landing_pad, int action)
3725 call_site_record record;
3727 record = GGC_NEW (struct call_site_record);
3728 record->landing_pad = landing_pad;
3729 record->action = action;
3731 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3733 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3736 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3737 The new note numbers will not refer to region numbers, but
3738 instead to call site entries. */
3741 convert_to_eh_region_ranges (void)
3743 rtx insn, iter, note;
3745 int last_action = -3;
3746 rtx last_action_insn = NULL_RTX;
3747 rtx last_landing_pad = NULL_RTX;
3748 rtx first_no_action_insn = NULL_RTX;
3751 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3754 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3756 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3758 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3761 struct eh_region *region;
3763 rtx this_landing_pad;
3766 if (NONJUMP_INSN_P (insn)
3767 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3768 insn = XVECEXP (PATTERN (insn), 0, 0);
3770 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3773 if (! (CALL_P (insn)
3774 || (flag_non_call_exceptions
3775 && may_trap_p (PATTERN (insn)))))
3782 if (INTVAL (XEXP (note, 0)) <= 0)
3784 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3785 this_action = collect_one_action_chain (ar_hash, region);
3788 /* Existence of catch handlers, or must-not-throw regions
3789 implies that an lsda is needed (even if empty). */
3790 if (this_action != -1)
3791 crtl->uses_eh_lsda = 1;
3793 /* Delay creation of region notes for no-action regions
3794 until we're sure that an lsda will be required. */
3795 else if (last_action == -3)
3797 first_no_action_insn = iter;
3801 /* Cleanups and handlers may share action chains but not
3802 landing pads. Collect the landing pad for this region. */
3803 if (this_action >= 0)
3805 struct eh_region *o;
3806 for (o = region; ! o->landing_pad ; o = o->outer)
3808 this_landing_pad = o->landing_pad;
3811 this_landing_pad = NULL_RTX;
3813 /* Differing actions or landing pads implies a change in call-site
3814 info, which implies some EH_REGION note should be emitted. */
3815 if (last_action != this_action
3816 || last_landing_pad != this_landing_pad)
3818 /* If we'd not seen a previous action (-3) or the previous
3819 action was must-not-throw (-2), then we do not need an
3821 if (last_action >= -1)
3823 /* If we delayed the creation of the begin, do it now. */
3824 if (first_no_action_insn)
3826 call_site = add_call_site (NULL_RTX, 0);
3827 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3828 first_no_action_insn);
3829 NOTE_EH_HANDLER (note) = call_site;
3830 first_no_action_insn = NULL_RTX;
3833 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3835 NOTE_EH_HANDLER (note) = call_site;
3838 /* If the new action is must-not-throw, then no region notes
3840 if (this_action >= -1)
3842 call_site = add_call_site (this_landing_pad,
3843 this_action < 0 ? 0 : this_action);
3844 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3845 NOTE_EH_HANDLER (note) = call_site;
3848 last_action = this_action;
3849 last_landing_pad = this_landing_pad;
3851 last_action_insn = iter;
3854 if (last_action >= -1 && ! first_no_action_insn)
3856 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3857 NOTE_EH_HANDLER (note) = call_site;
3860 htab_delete (ar_hash);
3864 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3868 "eh_ranges", /* name */
3870 convert_to_eh_region_ranges, /* execute */
3873 0, /* static_pass_number */
3874 TV_NONE, /* tv_id */
3875 0, /* properties_required */
3876 0, /* properties_provided */
3877 0, /* properties_destroyed */
3878 0, /* todo_flags_start */
3879 TODO_dump_func, /* todo_flags_finish */
3885 push_uleb128 (varray_type *data_area, unsigned int value)
3889 unsigned char byte = value & 0x7f;
3893 VARRAY_PUSH_UCHAR (*data_area, byte);
3899 push_sleb128 (varray_type *data_area, int value)
3906 byte = value & 0x7f;
3908 more = ! ((value == 0 && (byte & 0x40) == 0)
3909 || (value == -1 && (byte & 0x40) != 0));
3912 VARRAY_PUSH_UCHAR (*data_area, byte);
3918 #ifndef HAVE_AS_LEB128
3920 dw2_size_of_call_site_table (void)
3922 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3923 int size = n * (4 + 4 + 4);
3926 for (i = 0; i < n; ++i)
3928 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3929 size += size_of_uleb128 (cs->action);
3936 sjlj_size_of_call_site_table (void)
3938 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3942 for (i = 0; i < n; ++i)
3944 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3945 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3946 size += size_of_uleb128 (cs->action);
3954 dw2_output_call_site_table (void)
3956 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3959 for (i = 0; i < n; ++i)
3961 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3962 char reg_start_lab[32];
3963 char reg_end_lab[32];
3964 char landing_pad_lab[32];
3966 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3967 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3969 if (cs->landing_pad)
3970 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3971 CODE_LABEL_NUMBER (cs->landing_pad));
3973 /* ??? Perhaps use insn length scaling if the assembler supports
3974 generic arithmetic. */
3975 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3976 data4 if the function is small enough. */
3977 #ifdef HAVE_AS_LEB128
3978 dw2_asm_output_delta_uleb128 (reg_start_lab,
3979 current_function_func_begin_label,
3980 "region %d start", i);
3981 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3983 if (cs->landing_pad)
3984 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3985 current_function_func_begin_label,
3988 dw2_asm_output_data_uleb128 (0, "landing pad");
3990 dw2_asm_output_delta (4, reg_start_lab,
3991 current_function_func_begin_label,
3992 "region %d start", i);
3993 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3994 if (cs->landing_pad)
3995 dw2_asm_output_delta (4, landing_pad_lab,
3996 current_function_func_begin_label,
3999 dw2_asm_output_data (4, 0, "landing pad");
4001 dw2_asm_output_data_uleb128 (cs->action, "action");
4004 call_site_base += n;
4008 sjlj_output_call_site_table (void)
4010 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
4013 for (i = 0; i < n; ++i)
4015 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
4017 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
4018 "region %d landing pad", i);
4019 dw2_asm_output_data_uleb128 (cs->action, "action");
4022 call_site_base += n;
4025 #ifndef TARGET_UNWIND_INFO
4026 /* Switch to the section that should be used for exception tables. */
4029 switch_to_exception_section (const char * ARG_UNUSED (fnname))
4033 if (exception_section)
4034 s = exception_section;
4037 /* Compute the section and cache it into exception_section,
4038 unless it depends on the function name. */
4039 if (targetm.have_named_sections)
4043 if (EH_TABLES_CAN_BE_READ_ONLY)
4046 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4047 flags = ((! flag_pic
4048 || ((tt_format & 0x70) != DW_EH_PE_absptr
4049 && (tt_format & 0x70) != DW_EH_PE_aligned))
4050 ? 0 : SECTION_WRITE);
4053 flags = SECTION_WRITE;
4055 #ifdef HAVE_LD_EH_GC_SECTIONS
4056 if (flag_function_sections)
4058 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
4059 sprintf (section_name, ".gcc_except_table.%s", fnname);
4060 s = get_section (section_name, flags, NULL);
4061 free (section_name);
4066 = s = get_section (".gcc_except_table", flags, NULL);
4070 = s = flag_pic ? data_section : readonly_data_section;
4073 switch_to_section (s);
4078 /* Output a reference from an exception table to the type_info object TYPE.
4079 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
4083 output_ttype (tree type, int tt_format, int tt_format_size)
4086 bool is_public = true;
4088 if (type == NULL_TREE)
4092 struct varpool_node *node;
4094 type = lookup_type_for_runtime (type);
4095 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
4097 /* Let cgraph know that the rtti decl is used. Not all of the
4098 paths below go through assemble_integer, which would take
4099 care of this for us. */
4101 if (TREE_CODE (type) == ADDR_EXPR)
4103 type = TREE_OPERAND (type, 0);
4104 if (TREE_CODE (type) == VAR_DECL)
4106 node = varpool_node (type);
4108 varpool_mark_needed_node (node);
4109 is_public = TREE_PUBLIC (type);
4113 gcc_assert (TREE_CODE (type) == INTEGER_CST);
4116 /* Allow the target to override the type table entry format. */
4117 if (targetm.asm_out.ttype (value))
4120 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
4121 assemble_integer (value, tt_format_size,
4122 tt_format_size * BITS_PER_UNIT, 1);
4124 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
4128 output_function_exception_table (const char * ARG_UNUSED (fnname))
4130 int tt_format, cs_format, lp_format, i, n;
4131 #ifdef HAVE_AS_LEB128
4132 char ttype_label[32];
4133 char cs_after_size_label[32];
4134 char cs_end_label[32];
4139 int tt_format_size = 0;
4141 /* Not all functions need anything. */
4142 if (! crtl->uses_eh_lsda)
4145 if (eh_personality_libfunc)
4146 assemble_external_libcall (eh_personality_libfunc);
4148 #ifdef TARGET_UNWIND_INFO
4149 /* TODO: Move this into target file. */
4150 fputs ("\t.personality\t", asm_out_file);
4151 output_addr_const (asm_out_file, eh_personality_libfunc);
4152 fputs ("\n\t.handlerdata\n", asm_out_file);
4153 /* Note that varasm still thinks we're in the function's code section.
4154 The ".endp" directive that will immediately follow will take us back. */
4156 switch_to_exception_section (fnname);
4159 /* If the target wants a label to begin the table, emit it here. */
4160 targetm.asm_out.except_table_label (asm_out_file);
4162 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
4163 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
4165 /* Indicate the format of the @TType entries. */
4167 tt_format = DW_EH_PE_omit;
4170 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
4171 #ifdef HAVE_AS_LEB128
4172 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
4173 current_function_funcdef_no);
4175 tt_format_size = size_of_encoded_value (tt_format);
4177 assemble_align (tt_format_size * BITS_PER_UNIT);
4180 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
4181 current_function_funcdef_no);
4183 /* The LSDA header. */
4185 /* Indicate the format of the landing pad start pointer. An omitted
4186 field implies @LPStart == @Start. */
4187 /* Currently we always put @LPStart == @Start. This field would
4188 be most useful in moving the landing pads completely out of
4189 line to another section, but it could also be used to minimize
4190 the size of uleb128 landing pad offsets. */
4191 lp_format = DW_EH_PE_omit;
4192 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
4193 eh_data_format_name (lp_format));
4195 /* @LPStart pointer would go here. */
4197 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
4198 eh_data_format_name (tt_format));
4200 #ifndef HAVE_AS_LEB128
4201 if (USING_SJLJ_EXCEPTIONS)
4202 call_site_len = sjlj_size_of_call_site_table ();
4204 call_site_len = dw2_size_of_call_site_table ();
4207 /* A pc-relative 4-byte displacement to the @TType data. */
4210 #ifdef HAVE_AS_LEB128
4211 char ttype_after_disp_label[32];
4212 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
4213 current_function_funcdef_no);
4214 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
4215 "@TType base offset");
4216 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
4218 /* Ug. Alignment queers things. */
4219 unsigned int before_disp, after_disp, last_disp, disp;
4221 before_disp = 1 + 1;
4222 after_disp = (1 + size_of_uleb128 (call_site_len)
4224 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
4225 + (VEC_length (tree, crtl->eh.ttype_data)
4231 unsigned int disp_size, pad;
4234 disp_size = size_of_uleb128 (disp);
4235 pad = before_disp + disp_size + after_disp;
4236 if (pad % tt_format_size)
4237 pad = tt_format_size - (pad % tt_format_size);
4240 disp = after_disp + pad;
4242 while (disp != last_disp);
4244 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
4248 /* Indicate the format of the call-site offsets. */
4249 #ifdef HAVE_AS_LEB128
4250 cs_format = DW_EH_PE_uleb128;
4252 cs_format = DW_EH_PE_udata4;
4254 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
4255 eh_data_format_name (cs_format));
4257 #ifdef HAVE_AS_LEB128
4258 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
4259 current_function_funcdef_no);
4260 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
4261 current_function_funcdef_no);
4262 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
4263 "Call-site table length");
4264 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
4265 if (USING_SJLJ_EXCEPTIONS)
4266 sjlj_output_call_site_table ();
4268 dw2_output_call_site_table ();
4269 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
4271 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
4272 if (USING_SJLJ_EXCEPTIONS)
4273 sjlj_output_call_site_table ();
4275 dw2_output_call_site_table ();
4278 /* ??? Decode and interpret the data for flag_debug_asm. */
4279 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
4280 for (i = 0; i < n; ++i)
4281 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
4282 (i ? NULL : "Action record table"));
4285 assemble_align (tt_format_size * BITS_PER_UNIT);
4287 i = VEC_length (tree, crtl->eh.ttype_data);
4290 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
4291 output_ttype (type, tt_format, tt_format_size);