1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
52 #include "coretypes.h"
60 #include "insn-config.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
75 #include "langhooks.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
81 /* Provide defaults for stuff that may not be defined when using
83 #ifndef EH_RETURN_DATA_REGNO
84 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
87 /* Protect cleanup actions with must-not-throw regions, with a call
88 to the given failure handler. */
89 gimple (*lang_protect_cleanup_actions) (void);
91 /* Return true if type A catches type B. */
92 int (*lang_eh_type_covers) (tree a, tree b);
94 /* Map a type to a runtime object to match type. */
95 tree (*lang_eh_runtime_type) (tree);
97 /* A hash table of label to region number. */
99 struct ehl_map_entry GTY(())
102 struct eh_region *region;
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
117 /* Describes one exception region. */
118 struct eh_region GTY(())
120 /* The immediately surrounding region. */
121 struct eh_region *outer;
123 /* The list of immediately contained regions. */
124 struct eh_region *inner;
125 struct eh_region *next_peer;
127 /* An identifier for this region. */
130 /* When a region is deleted, its parents inherit the REG_EH_REGION
131 numbers already assigned. */
134 /* Each region does exactly one thing. */
141 ERT_ALLOWED_EXCEPTIONS,
146 /* Holds the action to perform based on the preceding type. */
148 /* A list of catch blocks, a surrounding try block,
149 and the label for continuing after a catch. */
150 struct eh_region_u_try {
151 struct eh_region *eh_catch;
152 struct eh_region *last_catch;
153 } GTY ((tag ("ERT_TRY"))) eh_try;
155 /* The list through the catch handlers, the list of type objects
156 matched, and the list of associated filters. */
157 struct eh_region_u_catch {
158 struct eh_region *next_catch;
159 struct eh_region *prev_catch;
162 } GTY ((tag ("ERT_CATCH"))) eh_catch;
164 /* A tree_list of allowed types. */
165 struct eh_region_u_allowed {
168 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
170 /* The type given by a call to "throw foo();", or discovered
172 struct eh_region_u_throw {
174 } GTY ((tag ("ERT_THROW"))) eh_throw;
176 /* Retain the cleanup expression even after expansion so that
177 we can match up fixup regions. */
178 struct eh_region_u_cleanup {
179 struct eh_region *prev_try;
180 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
181 } GTY ((desc ("%0.type"))) u;
183 /* Entry point for this region's handler before landing pads are built. */
187 /* Entry point for this region's handler from the runtime eh library. */
190 /* Entry point for this region's handler from an inner region. */
191 rtx post_landing_pad;
193 /* The RESX insn for handing off control to the next outermost handler,
197 /* True if something in this region may throw. */
198 unsigned may_contain_throw : 1;
201 typedef struct eh_region *eh_region;
203 struct call_site_record GTY(())
209 DEF_VEC_P(eh_region);
210 DEF_VEC_ALLOC_P(eh_region, gc);
212 /* Used to save exception status for each function. */
213 struct eh_status GTY(())
215 /* The tree of all regions for this function. */
216 struct eh_region *region_tree;
218 /* The same information as an indexable array. */
219 VEC(eh_region,gc) *region_array;
220 int last_region_number;
222 htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
225 static int t2r_eq (const void *, const void *);
226 static hashval_t t2r_hash (const void *);
227 static void add_type_for_runtime (tree);
228 static tree lookup_type_for_runtime (tree);
230 static int ttypes_filter_eq (const void *, const void *);
231 static hashval_t ttypes_filter_hash (const void *);
232 static int ehspec_filter_eq (const void *, const void *);
233 static hashval_t ehspec_filter_hash (const void *);
234 static int add_ttypes_entry (htab_t, tree);
235 static int add_ehspec_entry (htab_t, htab_t, tree);
236 static void assign_filter_values (void);
237 static void build_post_landing_pads (void);
238 static void connect_post_landing_pads (void);
239 static void dw2_build_landing_pads (void);
242 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
243 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
244 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
245 static void sjlj_emit_function_enter (rtx);
246 static void sjlj_emit_function_exit (void);
247 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
248 static void sjlj_build_landing_pads (void);
250 static hashval_t ehl_hash (const void *);
251 static int ehl_eq (const void *, const void *);
252 static void add_ehl_entry (rtx, struct eh_region *);
253 static void remove_exception_handler_label (rtx);
254 static void remove_eh_handler (struct eh_region *);
255 static int for_each_eh_label_1 (void **, void *);
257 /* The return value of reachable_next_level. */
260 /* The given exception is not processed by the given region. */
262 /* The given exception may need processing by the given region. */
264 /* The given exception is completely processed by the given region. */
266 /* The given exception is completely processed by the runtime. */
270 struct reachable_info;
271 static enum reachable_code reachable_next_level (struct eh_region *, tree,
272 struct reachable_info *, bool);
274 static int action_record_eq (const void *, const void *);
275 static hashval_t action_record_hash (const void *);
276 static int add_action_record (htab_t, int, int);
277 static int collect_one_action_chain (htab_t, struct eh_region *);
278 static int add_call_site (rtx, int);
280 static void push_uleb128 (varray_type *, unsigned int);
281 static void push_sleb128 (varray_type *, int);
282 #ifndef HAVE_AS_LEB128
283 static int dw2_size_of_call_site_table (void);
284 static int sjlj_size_of_call_site_table (void);
286 static void dw2_output_call_site_table (void);
287 static void sjlj_output_call_site_table (void);
290 /* Routine to see if exception handling is turned on.
291 DO_WARN is nonzero if we want to inform the user that exception
292 handling is turned off.
294 This is used to ensure that -fexceptions has been specified if the
295 compiler tries to use any exception-specific functions. */
298 doing_eh (int do_warn)
300 if (! flag_exceptions)
302 static int warned = 0;
303 if (! warned && do_warn)
305 error ("exception handling disabled, use -fexceptions to enable");
317 if (! flag_exceptions)
320 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
322 /* Create the SjLj_Function_Context structure. This should match
323 the definition in unwind-sjlj.c. */
324 if (USING_SJLJ_EXCEPTIONS)
326 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
328 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
330 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
331 build_pointer_type (sjlj_fc_type_node));
332 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
334 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
336 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
338 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
339 tmp = build_array_type (lang_hooks.types.type_for_mode
340 (targetm.unwind_word_mode (), 1),
342 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
343 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
345 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
347 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
349 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
351 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
353 #ifdef DONT_USE_BUILTIN_SETJMP
355 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
357 /* Should be large enough for most systems, if it is not,
358 JMP_BUF_SIZE should be defined with the proper value. It will
359 also tend to be larger than necessary for most systems, a more
360 optimal port will define JMP_BUF_SIZE. */
361 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
364 /* builtin_setjmp takes a pointer to 5 words. */
365 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
367 tmp = build_index_type (tmp);
368 tmp = build_array_type (ptr_type_node, tmp);
369 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
370 #ifdef DONT_USE_BUILTIN_SETJMP
371 /* We don't know what the alignment requirements of the
372 runtime's jmp_buf has. Overestimate. */
373 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
374 DECL_USER_ALIGN (f_jbuf) = 1;
376 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
378 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
379 TREE_CHAIN (f_prev) = f_cs;
380 TREE_CHAIN (f_cs) = f_data;
381 TREE_CHAIN (f_data) = f_per;
382 TREE_CHAIN (f_per) = f_lsda;
383 TREE_CHAIN (f_lsda) = f_jbuf;
385 layout_type (sjlj_fc_type_node);
387 /* Cache the interesting field offsets so that we have
388 easy access from rtl. */
389 sjlj_fc_call_site_ofs
390 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
391 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
393 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
394 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
395 sjlj_fc_personality_ofs
396 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
397 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
399 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
400 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
402 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
403 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
408 init_eh_for_function (void)
410 cfun->eh = GGC_CNEW (struct eh_status);
413 /* Routines to generate the exception tree somewhat directly.
414 These are used from tree-eh.c when processing exception related
415 nodes during tree optimization. */
417 static struct eh_region *
418 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
420 struct eh_region *new_eh;
422 #ifdef ENABLE_CHECKING
423 gcc_assert (doing_eh (0));
426 /* Insert a new blank region as a leaf in the tree. */
427 new_eh = GGC_CNEW (struct eh_region);
429 new_eh->outer = outer;
432 new_eh->next_peer = outer->inner;
433 outer->inner = new_eh;
437 new_eh->next_peer = cfun->eh->region_tree;
438 cfun->eh->region_tree = new_eh;
441 new_eh->region_number = ++cfun->eh->last_region_number;
447 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
449 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
450 cleanup->u.cleanup.prev_try = prev_try;
455 gen_eh_region_try (struct eh_region *outer)
457 return gen_eh_region (ERT_TRY, outer);
461 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
463 struct eh_region *c, *l;
464 tree type_list, type_node;
466 /* Ensure to always end up with a type list to normalize further
467 processing, then register each type against the runtime types map. */
468 type_list = type_or_list;
471 if (TREE_CODE (type_or_list) != TREE_LIST)
472 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
474 type_node = type_list;
475 for (; type_node; type_node = TREE_CHAIN (type_node))
476 add_type_for_runtime (TREE_VALUE (type_node));
479 c = gen_eh_region (ERT_CATCH, t->outer);
480 c->u.eh_catch.type_list = type_list;
481 l = t->u.eh_try.last_catch;
482 c->u.eh_catch.prev_catch = l;
484 l->u.eh_catch.next_catch = c;
486 t->u.eh_try.eh_catch = c;
487 t->u.eh_try.last_catch = c;
493 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
495 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
496 region->u.allowed.type_list = allowed;
498 for (; allowed ; allowed = TREE_CHAIN (allowed))
499 add_type_for_runtime (TREE_VALUE (allowed));
505 gen_eh_region_must_not_throw (struct eh_region *outer)
507 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
511 get_eh_region_number (struct eh_region *region)
513 return region->region_number;
517 get_eh_region_may_contain_throw (struct eh_region *region)
519 return region->may_contain_throw;
523 get_eh_region_tree_label (struct eh_region *region)
525 return region->tree_label;
529 set_eh_region_tree_label (struct eh_region *region, tree lab)
531 region->tree_label = lab;
535 expand_resx_expr (tree exp)
537 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
538 struct eh_region *reg = VEC_index (eh_region,
539 cfun->eh->region_array, region_nr);
541 gcc_assert (!reg->resume);
542 do_pending_stack_adjust ();
543 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
547 /* Note that the current EH region (if any) may contain a throw, or a
548 call to a function which itself may contain a throw. */
551 note_eh_region_may_contain_throw (struct eh_region *region)
553 while (region && !region->may_contain_throw)
555 region->may_contain_throw = 1;
556 region = region->outer;
561 /* Return an rtl expression for a pointer to the exception object
565 get_exception_pointer (void)
567 if (! crtl->eh.exc_ptr)
568 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
569 return crtl->eh.exc_ptr;
572 /* Return an rtl expression for the exception dispatch filter
576 get_exception_filter (void)
578 if (! crtl->eh.filter)
579 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
580 return crtl->eh.filter;
583 /* This section is for the exception handling specific optimization pass. */
585 /* Random access the exception region tree. */
588 collect_eh_region_array (void)
592 i = cfun->eh->region_tree;
596 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
597 cfun->eh->last_region_number + 1);
598 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
602 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
604 /* If there are sub-regions, process them. */
607 /* If there are peers, process them. */
608 else if (i->next_peer)
610 /* Otherwise, step back up the tree to the next peer. */
617 } while (i->next_peer == NULL);
623 /* R is MUST_NOT_THROW region that is not reachable via local
624 RESX instructions. It still must be kept in the tree in case runtime
625 can unwind through it, or we will eliminate out terminate call
626 runtime would do otherwise. Return TRUE if R contains throwing statements
627 or some of the exceptions in inner regions can be unwound up to R.
629 CONTAINS_STMT is bitmap of all regions that contains some throwing
632 Function looks O(^3) at first sight. In fact the function is called at most
633 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
634 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
635 the outer loop examines every region at most once. The inner loop
636 is doing unwinding from the throwing statement same way as we do during
637 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
638 of CFG. In practice Eh trees are wide, not deep, so this is not
642 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region *r)
644 struct eh_region *i = r->inner;
648 if (TEST_BIT (contains_stmt, r->region_number))
651 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
652 if (TEST_BIT (contains_stmt, n))
658 /* It is pointless to look into MUST_NOT_THROW
659 or dive into subregions. They never unwind up. */
660 if (i->type != ERT_MUST_NOT_THROW)
662 bool found = TEST_BIT (contains_stmt, i->region_number);
664 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
665 if (TEST_BIT (contains_stmt, n))
670 /* We have nested region that contains throwing statement.
671 See if resuming might lead up to the resx or we get locally
672 caught sooner. If we get locally caught sooner, we either
673 know region R is not reachable or it would have direct edge
674 from the EH resx and thus consider region reachable at
678 struct eh_region *i1 = i;
679 tree type_thrown = NULL_TREE;
681 if (i1->type == ERT_THROW)
683 type_thrown = i1->u.eh_throw.type;
686 for (; i1 != r; i1 = i1->outer)
687 if (reachable_next_level (i1, type_thrown, NULL,
688 false) >= RNL_CAUGHT)
694 /* If there are sub-regions, process them. */
695 if (i->type != ERT_MUST_NOT_THROW && i->inner)
697 /* If there are peers, process them. */
698 else if (i->next_peer)
700 /* Otherwise, step back up the tree to the next peer. */
709 while (i->next_peer == NULL);
715 /* Remove all regions whose labels are not reachable.
716 REACHABLE is bitmap of all regions that are used by the function
717 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
719 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
724 for (i = cfun->eh->last_region_number; i > 0; --i)
726 r = VEC_index (eh_region, cfun->eh->region_array, i);
729 if (r->region_number == i && !TEST_BIT (reachable, i) && !r->resume)
733 r->tree_label = NULL;
737 /* Don't remove ERT_THROW regions if their outer region
739 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
742 case ERT_MUST_NOT_THROW:
743 /* MUST_NOT_THROW regions are implementable solely in the
744 runtime, but we need them when inlining function.
746 Keep them if outer region is not MUST_NOT_THROW a well
747 and if they contain some statement that might unwind through
749 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
751 || can_be_reached_by_runtime (contains_stmt, r)))
756 /* TRY regions are reachable if any of its CATCH regions
759 for (c = r->u.eh_try.eh_catch; c;
760 c = c->u.eh_catch.next_catch)
761 if (TEST_BIT (reachable, c->region_number))
776 fprintf (dump_file, "Removing unreachable eh region %i\n",
778 remove_eh_handler (r);
782 #ifdef ENABLE_CHECKING
783 verify_eh_tree (cfun);
787 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
788 is identical to label. */
791 label_to_region_map (void)
793 VEC(int,heap) * label_to_region = NULL;
796 VEC_safe_grow_cleared (int, heap, label_to_region,
797 cfun->cfg->last_label_uid + 1);
798 for (i = cfun->eh->last_region_number; i > 0; --i)
800 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
801 if (r && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
803 if ((unsigned) LABEL_DECL_UID (r->tree_label) >
804 VEC_length (int, label_to_region))
805 VEC_safe_grow_cleared (int, heap, label_to_region,
806 LABEL_DECL_UID (r->tree_label));
807 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
811 return label_to_region;
814 /* Return number of EH regions. */
816 num_eh_regions (void)
818 return cfun->eh->last_region_number + 1;
821 /* Remove all regions whose labels are not reachable from insns. */
824 rtl_remove_unreachable_regions (rtx insns)
826 int i, *uid_region_num;
831 uid_region_num = XCNEWVEC (int, get_max_uid ());
832 reachable = sbitmap_alloc (cfun->eh->last_region_number + 1);
833 sbitmap_zero (reachable);
835 for (i = cfun->eh->last_region_number; i > 0; --i)
837 r = VEC_index (eh_region, cfun->eh->region_array, i);
838 if (!r || r->region_number != i)
843 gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
844 uid_region_num[INSN_UID (r->resume)] = i;
848 gcc_assert (!uid_region_num[INSN_UID (r->label)]);
849 uid_region_num[INSN_UID (r->label)] = i;
853 for (insn = insns; insn; insn = NEXT_INSN (insn))
854 SET_BIT (reachable, uid_region_num[INSN_UID (insn)]);
856 remove_unreachable_regions (reachable, NULL);
858 sbitmap_free (reachable);
859 free (uid_region_num);
862 /* Set up EH labels for RTL. */
865 convert_from_eh_region_ranges (void)
867 rtx insns = get_insns ();
868 int i, n = cfun->eh->last_region_number;
870 /* Most of the work is already done at the tree level. All we need to
871 do is collect the rtl labels that correspond to the tree labels that
872 collect the rtl labels that correspond to the tree labels
873 we allocated earlier. */
874 for (i = 1; i <= n; ++i)
876 struct eh_region *region;
878 region = VEC_index (eh_region, cfun->eh->region_array, i);
879 if (region && region->tree_label)
880 region->label = DECL_RTL_IF_SET (region->tree_label);
883 rtl_remove_unreachable_regions (insns);
887 add_ehl_entry (rtx label, struct eh_region *region)
889 struct ehl_map_entry **slot, *entry;
891 LABEL_PRESERVE_P (label) = 1;
893 entry = GGC_NEW (struct ehl_map_entry);
894 entry->label = label;
895 entry->region = region;
897 slot = (struct ehl_map_entry **)
898 htab_find_slot (crtl->eh.exception_handler_label_map, entry, INSERT);
900 /* Before landing pad creation, each exception handler has its own
901 label. After landing pad creation, the exception handlers may
902 share landing pads. This is ok, since maybe_remove_eh_handler
903 only requires the 1-1 mapping before landing pad creation. */
904 gcc_assert (!*slot || crtl->eh.built_landing_pads);
910 find_exception_handler_labels (void)
914 if (crtl->eh.exception_handler_label_map)
915 htab_empty (crtl->eh.exception_handler_label_map);
918 /* ??? The expansion factor here (3/2) must be greater than the htab
919 occupancy factor (4/3) to avoid unnecessary resizing. */
920 crtl->eh.exception_handler_label_map
921 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
922 ehl_hash, ehl_eq, NULL);
925 if (cfun->eh->region_tree == NULL)
928 for (i = cfun->eh->last_region_number; i > 0; --i)
930 struct eh_region *region;
933 region = VEC_index (eh_region, cfun->eh->region_array, i);
934 if (! region || region->region_number != i)
936 if (crtl->eh.built_landing_pads)
937 lab = region->landing_pad;
942 add_ehl_entry (lab, region);
945 /* For sjlj exceptions, need the return label to remain live until
946 after landing pad generation. */
947 if (USING_SJLJ_EXCEPTIONS && ! crtl->eh.built_landing_pads)
948 add_ehl_entry (return_label, NULL);
951 /* Returns true if the current function has exception handling regions. */
954 current_function_has_exception_handlers (void)
958 for (i = cfun->eh->last_region_number; i > 0; --i)
960 struct eh_region *region;
962 region = VEC_index (eh_region, cfun->eh->region_array, i);
964 && region->region_number == i
965 && region->type != ERT_THROW)
972 /* A subroutine of duplicate_eh_regions. Search the region tree under O
973 for the minimum and maximum region numbers. Update *MIN and *MAX. */
976 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
982 i = bitmap_first_set_bit (o->aka);
985 i = bitmap_last_set_bit (o->aka);
989 if (o->region_number < *min)
990 *min = o->region_number;
991 if (o->region_number > *max)
992 *max = o->region_number;
997 duplicate_eh_regions_0 (o, min, max);
1001 duplicate_eh_regions_0 (o, min, max);
1006 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
1007 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
1008 about the other internal pointers just yet, just the tree-like pointers. */
1011 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
1015 ret = n = GGC_NEW (struct eh_region);
1019 n->next_peer = NULL;
1024 n->aka = BITMAP_GGC_ALLOC ();
1026 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
1028 bitmap_set_bit (n->aka, i + eh_offset);
1029 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
1033 n->region_number += eh_offset;
1034 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1039 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
1040 while (old->next_peer)
1042 old = old->next_peer;
1043 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
1050 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
1051 function and root the tree below OUTER_REGION. Remap labels using MAP
1052 callback. The special case of COPY_REGION of 0 means all regions. */
1055 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
1056 void *data, int copy_region, int outer_region)
1058 eh_region cur, prev_try, outer, *splice;
1059 int i, min_region, max_region, eh_offset, cfun_last_region_number;
1064 #ifdef ENABLE_CHECKING
1065 verify_eh_tree (ifun);
1068 /* Find the range of region numbers to be copied. The interface we
1069 provide here mandates a single offset to find new number from old,
1070 which means we must look at the numbers present, instead of the
1071 count or something else. */
1072 if (copy_region > 0)
1074 min_region = INT_MAX;
1077 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1078 duplicate_eh_regions_0 (cur, &min_region, &max_region);
1081 min_region = 1, max_region = ifun->eh->last_region_number;
1082 num_regions = max_region - min_region + 1;
1083 cfun_last_region_number = cfun->eh->last_region_number;
1084 eh_offset = cfun_last_region_number + 1 - min_region;
1086 /* If we've not yet created a region array, do so now. */
1087 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
1088 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
1089 cfun->eh->last_region_number + 1);
1091 /* Locate the spot at which to insert the new tree. */
1092 if (outer_region > 0)
1094 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1096 splice = &outer->inner;
1098 splice = &cfun->eh->region_tree;
1103 splice = &cfun->eh->region_tree;
1106 splice = &(*splice)->next_peer;
1108 if (!ifun->eh->region_tree)
1111 for (i = cfun_last_region_number + 1;
1112 i <= cfun->eh->last_region_number; i++)
1114 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1115 if (outer->aka == NULL)
1116 outer->aka = BITMAP_GGC_ALLOC ();
1117 bitmap_set_bit (outer->aka, i);
1122 /* Copy all the regions in the subtree. */
1123 if (copy_region > 0)
1125 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1126 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1132 cur = ifun->eh->region_tree;
1133 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1134 while (cur->next_peer)
1136 cur = cur->next_peer;
1137 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1141 /* Remap all the labels in the new regions. */
1142 for (i = cfun_last_region_number + 1;
1143 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1144 if (cur && cur->tree_label)
1145 cur->tree_label = map (cur->tree_label, data);
1147 /* Search for the containing ERT_TRY region to fix up
1148 the prev_try short-cuts for ERT_CLEANUP regions. */
1150 if (outer_region > 0)
1152 VEC_index (eh_region, cfun->eh->region_array, outer_region);
1153 prev_try && prev_try->type != ERT_TRY; prev_try = prev_try->outer)
1154 if (prev_try->type == ERT_MUST_NOT_THROW
1155 || (prev_try->type == ERT_ALLOWED_EXCEPTIONS
1156 && !prev_try->u.allowed.type_list))
1162 /* Remap all of the internal catch and cleanup linkages. Since we
1163 duplicate entire subtrees, all of the referenced regions will have
1164 been copied too. And since we renumbered them as a block, a simple
1165 bit of arithmetic finds us the index for the replacement region. */
1166 for (i = cfun_last_region_number + 1;
1167 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1169 /* All removed EH that is toplevel in input function is now
1170 in outer EH of output function. */
1173 gcc_assert (VEC_index
1174 (eh_region, ifun->eh->region_array,
1175 i - eh_offset) == NULL);
1178 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1179 if (outer->aka == NULL)
1180 outer->aka = BITMAP_GGC_ALLOC ();
1181 bitmap_set_bit (outer->aka, i);
1185 if (i != cur->region_number)
1188 #define REMAP(REG) \
1189 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1190 (REG)->region_number + eh_offset)
1195 if (cur->u.eh_try.eh_catch)
1196 REMAP (cur->u.eh_try.eh_catch);
1197 if (cur->u.eh_try.last_catch)
1198 REMAP (cur->u.eh_try.last_catch);
1202 if (cur->u.eh_catch.next_catch)
1203 REMAP (cur->u.eh_catch.next_catch);
1204 if (cur->u.eh_catch.prev_catch)
1205 REMAP (cur->u.eh_catch.prev_catch);
1209 if (cur->u.cleanup.prev_try)
1210 REMAP (cur->u.cleanup.prev_try);
1212 cur->u.cleanup.prev_try = prev_try;
1221 #ifdef ENABLE_CHECKING
1222 verify_eh_tree (cfun);
1228 /* Return true if REGION_A is outer to REGION_B in IFUN. */
1231 eh_region_outer_p (struct function *ifun, int region_a, int region_b)
1233 struct eh_region *rp_a, *rp_b;
1235 gcc_assert (ifun->eh->last_region_number > 0);
1236 gcc_assert (ifun->eh->region_tree);
1238 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1239 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1240 gcc_assert (rp_a != NULL);
1241 gcc_assert (rp_b != NULL);
1254 /* Return region number of region that is outer to both if REGION_A and
1255 REGION_B in IFUN. */
1258 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1260 struct eh_region *rp_a, *rp_b;
1263 gcc_assert (ifun->eh->last_region_number > 0);
1264 gcc_assert (ifun->eh->region_tree);
1266 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1267 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1268 gcc_assert (rp_a != NULL);
1269 gcc_assert (rp_b != NULL);
1271 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1272 sbitmap_zero (b_outer);
1276 SET_BIT (b_outer, rp_b->region_number);
1283 if (TEST_BIT (b_outer, rp_a->region_number))
1285 sbitmap_free (b_outer);
1286 return rp_a->region_number;
1292 sbitmap_free (b_outer);
1297 t2r_eq (const void *pentry, const void *pdata)
1299 const_tree const entry = (const_tree) pentry;
1300 const_tree const data = (const_tree) pdata;
1302 return TREE_PURPOSE (entry) == data;
1306 t2r_hash (const void *pentry)
1308 const_tree const entry = (const_tree) pentry;
1309 return TREE_HASH (TREE_PURPOSE (entry));
1313 add_type_for_runtime (tree type)
1317 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1318 TREE_HASH (type), INSERT);
1321 tree runtime = (*lang_eh_runtime_type) (type);
1322 *slot = tree_cons (type, runtime, NULL_TREE);
1327 lookup_type_for_runtime (tree type)
1331 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1332 TREE_HASH (type), NO_INSERT);
1334 /* We should have always inserted the data earlier. */
1335 return TREE_VALUE (*slot);
1339 /* Represent an entry in @TTypes for either catch actions
1340 or exception filter actions. */
1341 struct ttypes_filter GTY(())
1347 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1348 (a tree) for a @TTypes type node we are thinking about adding. */
1351 ttypes_filter_eq (const void *pentry, const void *pdata)
1353 const struct ttypes_filter *const entry
1354 = (const struct ttypes_filter *) pentry;
1355 const_tree const data = (const_tree) pdata;
1357 return entry->t == data;
1361 ttypes_filter_hash (const void *pentry)
1363 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1364 return TREE_HASH (entry->t);
1367 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1368 exception specification list we are thinking about adding. */
1369 /* ??? Currently we use the type lists in the order given. Someone
1370 should put these in some canonical order. */
1373 ehspec_filter_eq (const void *pentry, const void *pdata)
1375 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1376 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1378 return type_list_equal (entry->t, data->t);
1381 /* Hash function for exception specification lists. */
1384 ehspec_filter_hash (const void *pentry)
1386 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1390 for (list = entry->t; list ; list = TREE_CHAIN (list))
1391 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1395 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1396 to speed up the search. Return the filter value to be used. */
1399 add_ttypes_entry (htab_t ttypes_hash, tree type)
1401 struct ttypes_filter **slot, *n;
1403 slot = (struct ttypes_filter **)
1404 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1406 if ((n = *slot) == NULL)
1408 /* Filter value is a 1 based table index. */
1410 n = XNEW (struct ttypes_filter);
1412 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1415 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1421 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1422 to speed up the search. Return the filter value to be used. */
1425 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1427 struct ttypes_filter **slot, *n;
1428 struct ttypes_filter dummy;
1431 slot = (struct ttypes_filter **)
1432 htab_find_slot (ehspec_hash, &dummy, INSERT);
1434 if ((n = *slot) == NULL)
1436 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1438 n = XNEW (struct ttypes_filter);
1440 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1443 /* Generate a 0 terminated list of filter values. */
1444 for (; list ; list = TREE_CHAIN (list))
1446 if (targetm.arm_eabi_unwinder)
1447 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1450 /* Look up each type in the list and encode its filter
1451 value as a uleb128. */
1452 push_uleb128 (&crtl->eh.ehspec_data,
1453 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1456 if (targetm.arm_eabi_unwinder)
1457 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1459 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1465 /* Generate the action filter values to be used for CATCH and
1466 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1467 we use lots of landing pads, and so every type or list can share
1468 the same filter value, which saves table space. */
1471 assign_filter_values (void)
1474 htab_t ttypes, ehspec;
1476 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1477 if (targetm.arm_eabi_unwinder)
1478 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1480 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1482 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1483 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1485 for (i = cfun->eh->last_region_number; i > 0; --i)
1487 struct eh_region *r;
1489 r = VEC_index (eh_region, cfun->eh->region_array, i);
1491 /* Mind we don't process a region more than once. */
1492 if (!r || r->region_number != i)
1498 /* Whatever type_list is (NULL or true list), we build a list
1499 of filters for the region. */
1500 r->u.eh_catch.filter_list = NULL_TREE;
1502 if (r->u.eh_catch.type_list != NULL)
1504 /* Get a filter value for each of the types caught and store
1505 them in the region's dedicated list. */
1506 tree tp_node = r->u.eh_catch.type_list;
1508 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1510 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1511 tree flt_node = build_int_cst (NULL_TREE, flt);
1513 r->u.eh_catch.filter_list
1514 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1519 /* Get a filter value for the NULL list also since it will need
1520 an action record anyway. */
1521 int flt = add_ttypes_entry (ttypes, NULL);
1522 tree flt_node = build_int_cst (NULL_TREE, flt);
1524 r->u.eh_catch.filter_list
1525 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1530 case ERT_ALLOWED_EXCEPTIONS:
1532 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1540 htab_delete (ttypes);
1541 htab_delete (ehspec);
1544 /* Emit SEQ into basic block just before INSN (that is assumed to be
1545 first instruction of some existing BB and return the newly
1548 emit_to_new_bb_before (rtx seq, rtx insn)
1555 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1556 call), we don't want it to go into newly created landing pad or other EH
1558 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1559 if (e->flags & EDGE_FALLTHRU)
1560 force_nonfallthru (e);
1563 last = emit_insn_before (seq, insn);
1564 if (BARRIER_P (last))
1565 last = PREV_INSN (last);
1566 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1567 update_bb_for_insn (bb);
1568 bb->flags |= BB_SUPERBLOCK;
1572 /* Generate the code to actually handle exceptions, which will follow the
1576 build_post_landing_pads (void)
1580 for (i = cfun->eh->last_region_number; i > 0; --i)
1582 struct eh_region *region;
1585 region = VEC_index (eh_region, cfun->eh->region_array, i);
1586 /* Mind we don't process a region more than once. */
1587 if (!region || region->region_number != i)
1590 switch (region->type)
1593 /* ??? Collect the set of all non-overlapping catch handlers
1594 all the way up the chain until blocked by a cleanup. */
1595 /* ??? Outer try regions can share landing pads with inner
1596 try regions if the types are completely non-overlapping,
1597 and there are no intervening cleanups. */
1599 region->post_landing_pad = gen_label_rtx ();
1603 emit_label (region->post_landing_pad);
1605 /* ??? It is mighty inconvenient to call back into the
1606 switch statement generation code in expand_end_case.
1607 Rapid prototyping sez a sequence of ifs. */
1609 struct eh_region *c;
1610 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1612 if (c->u.eh_catch.type_list == NULL)
1613 emit_jump (c->label);
1616 /* Need for one cmp/jump per type caught. Each type
1617 list entry has a matching entry in the filter list
1618 (see assign_filter_values). */
1619 tree tp_node = c->u.eh_catch.type_list;
1620 tree flt_node = c->u.eh_catch.filter_list;
1624 emit_cmp_and_jump_insns
1626 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1628 targetm.eh_return_filter_mode (), 0, c->label);
1630 tp_node = TREE_CHAIN (tp_node);
1631 flt_node = TREE_CHAIN (flt_node);
1637 /* We delay the generation of the _Unwind_Resume until we generate
1638 landing pads. We emit a marker here so as to get good control
1639 flow data in the meantime. */
1641 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1647 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
1651 case ERT_ALLOWED_EXCEPTIONS:
1652 region->post_landing_pad = gen_label_rtx ();
1656 emit_label (region->post_landing_pad);
1658 emit_cmp_and_jump_insns (crtl->eh.filter,
1659 GEN_INT (region->u.allowed.filter),
1661 targetm.eh_return_filter_mode (), 0, region->label);
1663 /* We delay the generation of the _Unwind_Resume until we generate
1664 landing pads. We emit a marker here so as to get good control
1665 flow data in the meantime. */
1667 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1673 emit_to_new_bb_before (seq, region->label);
1677 case ERT_MUST_NOT_THROW:
1678 region->post_landing_pad = region->label;
1683 /* Nothing to do. */
1692 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1693 _Unwind_Resume otherwise. */
1696 connect_post_landing_pads (void)
1700 for (i = cfun->eh->last_region_number; i > 0; --i)
1702 struct eh_region *region;
1703 struct eh_region *outer;
1707 region = VEC_index (eh_region, cfun->eh->region_array, i);
1708 /* Mind we don't process a region more than once. */
1709 if (!region || region->region_number != i)
1712 /* If there is no RESX, or it has been deleted by flow, there's
1713 nothing to fix up. */
1714 if (! region->resume || INSN_DELETED_P (region->resume))
1717 /* Search for another landing pad in this function. */
1718 for (outer = region->outer; outer ; outer = outer->outer)
1719 if (outer->post_landing_pad)
1727 basic_block src, dest;
1729 emit_jump (outer->post_landing_pad);
1730 src = BLOCK_FOR_INSN (region->resume);
1731 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1732 while (EDGE_COUNT (src->succs) > 0)
1733 remove_edge (EDGE_SUCC (src, 0));
1734 e = make_edge (src, dest, 0);
1735 e->probability = REG_BR_PROB_BASE;
1736 e->count = src->count;
1740 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1741 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
1743 /* What we just emitted was a throwing libcall, so it got a
1744 barrier automatically added after it. If the last insn in
1745 the libcall sequence isn't the barrier, it's because the
1746 target emits multiple insns for a call, and there are insns
1747 after the actual call insn (which are redundant and would be
1748 optimized away). The barrier is inserted exactly after the
1749 call insn, so let's go get that and delete the insns after
1750 it, because below we need the barrier to be the last insn in
1752 delete_insns_since (NEXT_INSN (last_call_insn ()));
1757 barrier = emit_insn_before (seq, region->resume);
1758 /* Avoid duplicate barrier. */
1759 gcc_assert (BARRIER_P (barrier));
1760 delete_insn (barrier);
1761 delete_insn (region->resume);
1763 /* ??? From tree-ssa we can wind up with catch regions whose
1764 label is not instantiated, but whose resx is present. Now
1765 that we've dealt with the resx, kill the region. */
1766 if (region->label == NULL && region->type == ERT_CLEANUP)
1767 remove_eh_handler (region);
1773 dw2_build_landing_pads (void)
1777 for (i = cfun->eh->last_region_number; i > 0; --i)
1779 struct eh_region *region;
1784 region = VEC_index (eh_region, cfun->eh->region_array, i);
1785 /* Mind we don't process a region more than once. */
1786 if (!region || region->region_number != i)
1789 if (region->type != ERT_CLEANUP
1790 && region->type != ERT_TRY
1791 && region->type != ERT_ALLOWED_EXCEPTIONS)
1796 region->landing_pad = gen_label_rtx ();
1797 emit_label (region->landing_pad);
1799 #ifdef HAVE_exception_receiver
1800 if (HAVE_exception_receiver)
1801 emit_insn (gen_exception_receiver ());
1804 #ifdef HAVE_nonlocal_goto_receiver
1805 if (HAVE_nonlocal_goto_receiver)
1806 emit_insn (gen_nonlocal_goto_receiver ());
1811 emit_move_insn (crtl->eh.exc_ptr,
1812 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1813 emit_move_insn (crtl->eh.filter,
1814 gen_rtx_REG (targetm.eh_return_filter_mode (),
1815 EH_RETURN_DATA_REGNO (1)));
1820 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1821 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1822 e->count = bb->count;
1823 e->probability = REG_BR_PROB_BASE;
1830 int directly_reachable;
1833 int call_site_index;
1837 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1840 bool found_one = false;
1842 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1844 struct eh_region *region;
1845 enum reachable_code rc;
1849 if (! INSN_P (insn))
1852 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1853 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1856 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1858 type_thrown = NULL_TREE;
1859 if (region->type == ERT_THROW)
1861 type_thrown = region->u.eh_throw.type;
1862 region = region->outer;
1865 /* Find the first containing region that might handle the exception.
1866 That's the landing pad to which we will transfer control. */
1867 rc = RNL_NOT_CAUGHT;
1868 for (; region; region = region->outer)
1870 rc = reachable_next_level (region, type_thrown, NULL, false);
1871 if (rc != RNL_NOT_CAUGHT)
1874 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1876 lp_info[region->region_number].directly_reachable = 1;
1885 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1890 /* First task: build the action table. */
1892 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
1893 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1895 for (i = cfun->eh->last_region_number; i > 0; --i)
1896 if (lp_info[i].directly_reachable)
1898 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1900 r->landing_pad = dispatch_label;
1901 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1902 if (lp_info[i].action_index != -1)
1903 crtl->uses_eh_lsda = 1;
1906 htab_delete (ar_hash);
1908 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1909 landing pad label for the region. For sjlj though, there is one
1910 common landing pad from which we dispatch to the post-landing pads.
1912 A region receives a dispatch index if it is directly reachable
1913 and requires in-function processing. Regions that share post-landing
1914 pads may share dispatch indices. */
1915 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1916 (see build_post_landing_pads) so we don't bother checking for it. */
1919 for (i = cfun->eh->last_region_number; i > 0; --i)
1920 if (lp_info[i].directly_reachable)
1921 lp_info[i].dispatch_index = index++;
1923 /* Finally: assign call-site values. If dwarf2 terms, this would be
1924 the region number assigned by convert_to_eh_region_ranges, but
1925 handles no-action and must-not-throw differently. */
1928 for (i = cfun->eh->last_region_number; i > 0; --i)
1929 if (lp_info[i].directly_reachable)
1931 int action = lp_info[i].action_index;
1933 /* Map must-not-throw to otherwise unused call-site index 0. */
1936 /* Map no-action to otherwise unused call-site index -1. */
1937 else if (action == -1)
1939 /* Otherwise, look it up in the table. */
1941 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1943 lp_info[i].call_site_index = index;
1948 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1950 int last_call_site = -2;
1953 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1955 struct eh_region *region;
1957 rtx note, before, p;
1959 /* Reset value tracking at extended basic block boundaries. */
1961 last_call_site = -2;
1963 if (! INSN_P (insn))
1966 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1969 /* Calls (and trapping insns) without notes are outside any
1970 exception handling region in this function. Mark them as
1973 || (flag_non_call_exceptions
1974 && may_trap_p (PATTERN (insn))))
1975 this_call_site = -1;
1981 /* Calls that are known to not throw need not be marked. */
1982 if (INTVAL (XEXP (note, 0)) <= 0)
1985 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1986 this_call_site = lp_info[region->region_number].call_site_index;
1989 if (this_call_site == last_call_site)
1992 /* Don't separate a call from it's argument loads. */
1995 before = find_first_parameter_load (insn, NULL_RTX);
1998 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1999 sjlj_fc_call_site_ofs);
2000 emit_move_insn (mem, GEN_INT (this_call_site));
2004 emit_insn_before (p, before);
2005 last_call_site = this_call_site;
2009 /* Construct the SjLj_Function_Context. */
2012 sjlj_emit_function_enter (rtx dispatch_label)
2014 rtx fn_begin, fc, mem, seq;
2015 bool fn_begin_outside_block;
2017 fc = crtl->eh.sjlj_fc;
2021 /* We're storing this libcall's address into memory instead of
2022 calling it directly. Thus, we must call assemble_external_libcall
2023 here, as we can not depend on emit_library_call to do it for us. */
2024 assemble_external_libcall (eh_personality_libfunc);
2025 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2026 emit_move_insn (mem, eh_personality_libfunc);
2028 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2029 if (crtl->uses_eh_lsda)
2034 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2035 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2036 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2037 emit_move_insn (mem, sym);
2040 emit_move_insn (mem, const0_rtx);
2042 #ifdef DONT_USE_BUILTIN_SETJMP
2045 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2046 TYPE_MODE (integer_type_node), 1,
2047 plus_constant (XEXP (fc, 0),
2048 sjlj_fc_jbuf_ofs), Pmode);
2050 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2051 TYPE_MODE (integer_type_node), 0, dispatch_label);
2052 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
2055 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2059 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2060 1, XEXP (fc, 0), Pmode);
2065 /* ??? Instead of doing this at the beginning of the function,
2066 do this in a block that is at loop level 0 and dominates all
2067 can_throw_internal instructions. */
2069 fn_begin_outside_block = true;
2070 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2071 if (NOTE_P (fn_begin))
2073 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2075 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
2076 fn_begin_outside_block = false;
2079 if (fn_begin_outside_block)
2080 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
2082 emit_insn_after (seq, fn_begin);
2085 /* Call back from expand_function_end to know where we should put
2086 the call to unwind_sjlj_unregister_libfunc if needed. */
2089 sjlj_emit_function_exit_after (rtx after)
2091 crtl->eh.sjlj_exit_after = after;
2095 sjlj_emit_function_exit (void)
2103 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2104 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
2109 /* ??? Really this can be done in any block at loop level 0 that
2110 post-dominates all can_throw_internal instructions. This is
2111 the last possible moment. */
2113 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
2114 if (e->flags & EDGE_FALLTHRU)
2120 /* Figure out whether the place we are supposed to insert libcall
2121 is inside the last basic block or after it. In the other case
2122 we need to emit to edge. */
2123 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
2124 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
2126 if (insn == crtl->eh.sjlj_exit_after)
2129 insn = NEXT_INSN (insn);
2130 emit_insn_after (seq, insn);
2133 if (insn == BB_END (e->src))
2136 insert_insn_on_edge (seq, e);
2141 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2143 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2144 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2145 int i, first_reachable;
2146 rtx mem, dispatch, seq, fc;
2151 fc = crtl->eh.sjlj_fc;
2155 emit_label (dispatch_label);
2157 #ifndef DONT_USE_BUILTIN_SETJMP
2158 expand_builtin_setjmp_receiver (dispatch_label);
2161 /* Load up dispatch index, exc_ptr and filter values from the
2162 function context. */
2163 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2164 sjlj_fc_call_site_ofs);
2165 dispatch = copy_to_reg (mem);
2167 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2168 if (unwind_word_mode != ptr_mode)
2170 #ifdef POINTERS_EXTEND_UNSIGNED
2171 mem = convert_memory_address (ptr_mode, mem);
2173 mem = convert_to_mode (ptr_mode, mem, 0);
2176 emit_move_insn (crtl->eh.exc_ptr, mem);
2178 mem = adjust_address (fc, unwind_word_mode,
2179 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2180 if (unwind_word_mode != filter_mode)
2181 mem = convert_to_mode (filter_mode, mem, 0);
2182 emit_move_insn (crtl->eh.filter, mem);
2184 /* Jump to one of the directly reachable regions. */
2185 /* ??? This really ought to be using a switch statement. */
2187 first_reachable = 0;
2188 for (i = cfun->eh->last_region_number; i > 0; --i)
2190 if (! lp_info[i].directly_reachable)
2193 if (! first_reachable)
2195 first_reachable = i;
2199 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2200 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2201 ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
2202 ->post_landing_pad);
2208 before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2209 ->post_landing_pad);
2211 bb = emit_to_new_bb_before (seq, before);
2212 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2213 e->count = bb->count;
2214 e->probability = REG_BR_PROB_BASE;
2218 sjlj_build_landing_pads (void)
2220 struct sjlj_lp_info *lp_info;
2222 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2224 if (sjlj_find_directly_reachable_regions (lp_info))
2226 rtx dispatch_label = gen_label_rtx ();
2227 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2228 TYPE_MODE (sjlj_fc_type_node),
2229 TYPE_ALIGN (sjlj_fc_type_node));
2231 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2232 int_size_in_bytes (sjlj_fc_type_node),
2235 sjlj_assign_call_site_values (dispatch_label, lp_info);
2236 sjlj_mark_call_sites (lp_info);
2238 sjlj_emit_function_enter (dispatch_label);
2239 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2240 sjlj_emit_function_exit ();
2247 finish_eh_generation (void)
2251 /* Nothing to do if no regions created. */
2252 if (cfun->eh->region_tree == NULL)
2255 /* The object here is to provide find_basic_blocks with detailed
2256 information (via reachable_handlers) on how exception control
2257 flows within the function. In this first pass, we can include
2258 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2259 regions, and hope that it will be useful in deleting unreachable
2260 handlers. Subsequently, we will generate landing pads which will
2261 connect many of the handlers, and then type information will not
2262 be effective. Still, this is a win over previous implementations. */
2264 /* These registers are used by the landing pads. Make sure they
2265 have been generated. */
2266 get_exception_pointer ();
2267 get_exception_filter ();
2269 /* Construct the landing pads. */
2271 assign_filter_values ();
2272 build_post_landing_pads ();
2273 connect_post_landing_pads ();
2274 if (USING_SJLJ_EXCEPTIONS)
2275 sjlj_build_landing_pads ();
2277 dw2_build_landing_pads ();
2279 crtl->eh.built_landing_pads = 1;
2281 /* We've totally changed the CFG. Start over. */
2282 find_exception_handler_labels ();
2283 break_superblocks ();
2284 if (USING_SJLJ_EXCEPTIONS
2285 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2286 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2287 commit_edge_insertions ();
2293 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2295 if (e->flags & EDGE_EH)
2304 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2309 ehl_hash (const void *pentry)
2311 const struct ehl_map_entry *const entry
2312 = (const struct ehl_map_entry *) pentry;
2314 /* 2^32 * ((sqrt(5) - 1) / 2) */
2315 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2316 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2320 ehl_eq (const void *pentry, const void *pdata)
2322 const struct ehl_map_entry *const entry
2323 = (const struct ehl_map_entry *) pentry;
2324 const struct ehl_map_entry *const data
2325 = (const struct ehl_map_entry *) pdata;
2327 return entry->label == data->label;
2330 /* This section handles removing dead code for flow. */
2332 /* Remove LABEL from exception_handler_label_map. */
2335 remove_exception_handler_label (rtx label)
2337 struct ehl_map_entry **slot, tmp;
2339 /* If exception_handler_label_map was not built yet,
2340 there is nothing to do. */
2341 if (crtl->eh.exception_handler_label_map == NULL)
2345 slot = (struct ehl_map_entry **)
2346 htab_find_slot (crtl->eh.exception_handler_label_map, &tmp, NO_INSERT);
2349 htab_clear_slot (crtl->eh.exception_handler_label_map, (void **) slot);
2352 /* Splice REGION from the region tree etc. */
2355 remove_eh_handler (struct eh_region *region)
2357 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2360 /* For the benefit of efficiently handling REG_EH_REGION notes,
2361 replace this region in the region array with its containing
2362 region. Note that previous region deletions may result in
2363 multiple copies of this region in the array, so we have a
2364 list of alternate numbers by which we are known. */
2366 outer = region->outer;
2367 VEC_replace (eh_region, cfun->eh->region_array, region->region_number, outer);
2373 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2375 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
2382 outer->aka = BITMAP_GGC_ALLOC ();
2384 bitmap_ior_into (outer->aka, region->aka);
2385 bitmap_set_bit (outer->aka, region->region_number);
2388 if (crtl->eh.built_landing_pads)
2389 lab = region->landing_pad;
2391 lab = region->label;
2393 remove_exception_handler_label (lab);
2396 pp_start = &outer->inner;
2398 pp_start = &cfun->eh->region_tree;
2399 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2401 *pp = region->next_peer;
2403 inner = region->inner;
2406 for (p = inner; p->next_peer ; p = p->next_peer)
2410 p->next_peer = *pp_start;
2414 if (region->type == ERT_CATCH)
2416 struct eh_region *eh_try, *next, *prev;
2418 for (eh_try = region->next_peer;
2419 eh_try->type == ERT_CATCH;
2420 eh_try = eh_try->next_peer)
2422 gcc_assert (eh_try->type == ERT_TRY);
2424 next = region->u.eh_catch.next_catch;
2425 prev = region->u.eh_catch.prev_catch;
2428 next->u.eh_catch.prev_catch = prev;
2430 eh_try->u.eh_try.last_catch = prev;
2432 prev->u.eh_catch.next_catch = next;
2435 eh_try->u.eh_try.eh_catch = next;
2437 remove_eh_handler (eh_try);
2442 /* LABEL heads a basic block that is about to be deleted. If this
2443 label corresponds to an exception region, we may be able to
2444 delete the region. */
2447 maybe_remove_eh_handler (rtx label)
2449 struct ehl_map_entry **slot, tmp;
2450 struct eh_region *region;
2452 /* ??? After generating landing pads, it's not so simple to determine
2453 if the region data is completely unused. One must examine the
2454 landing pad and the post landing pad, and whether an inner try block
2455 is referencing the catch handlers directly. */
2456 if (crtl->eh.built_landing_pads)
2460 slot = (struct ehl_map_entry **)
2461 htab_find_slot (crtl->eh.exception_handler_label_map, &tmp, NO_INSERT);
2464 region = (*slot)->region;
2468 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2469 because there is no path to the fallback call to terminate.
2470 But the region continues to affect call-site data until there
2471 are no more contained calls, which we don't see here. */
2472 if (region->type == ERT_MUST_NOT_THROW)
2474 htab_clear_slot (crtl->eh.exception_handler_label_map, (void **) slot);
2475 region->label = NULL_RTX;
2478 remove_eh_handler (region);
2481 /* Remove Eh region R that has turned out to have no code in its handler. */
2484 remove_eh_region (int r)
2486 struct eh_region *region;
2488 region = VEC_index (eh_region, cfun->eh->region_array, r);
2489 remove_eh_handler (region);
2492 /* Invokes CALLBACK for every exception handler label. Only used by old
2493 loop hackery; should not be used by new code. */
2496 for_each_eh_label (void (*callback) (rtx))
2498 htab_traverse (crtl->eh.exception_handler_label_map, for_each_eh_label_1,
2499 (void *) &callback);
2503 for_each_eh_label_1 (void **pentry, void *data)
2505 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2506 void (*callback) (rtx) = *(void (**) (rtx)) data;
2508 (*callback) (entry->label);
2512 /* Invoke CALLBACK for every exception region in the current function. */
2515 for_each_eh_region (void (*callback) (struct eh_region *))
2517 int i, n = cfun->eh->last_region_number;
2518 for (i = 1; i <= n; ++i)
2520 struct eh_region *region;
2522 region = VEC_index (eh_region, cfun->eh->region_array, i);
2524 (*callback) (region);
2528 /* This section describes CFG exception edges for flow. */
2530 /* For communicating between calls to reachable_next_level. */
2531 struct reachable_info
2535 void (*callback) (struct eh_region *, void *);
2536 void *callback_data;
2539 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2540 base class of TYPE, is in HANDLED. */
2543 check_handled (tree handled, tree type)
2547 /* We can check for exact matches without front-end help. */
2548 if (! lang_eh_type_covers)
2550 for (t = handled; t ; t = TREE_CHAIN (t))
2551 if (TREE_VALUE (t) == type)
2556 for (t = handled; t ; t = TREE_CHAIN (t))
2557 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2564 /* A subroutine of reachable_next_level. If we are collecting a list
2565 of handlers, add one. After landing pad generation, reference
2566 it instead of the handlers themselves. Further, the handlers are
2567 all wired together, so by referencing one, we've got them all.
2568 Before landing pad generation we reference each handler individually.
2570 LP_REGION contains the landing pad; REGION is the handler. */
2573 add_reachable_handler (struct reachable_info *info,
2574 struct eh_region *lp_region, struct eh_region *region)
2579 if (crtl->eh.built_landing_pads)
2580 info->callback (lp_region, info->callback_data);
2582 info->callback (region, info->callback_data);
2585 /* Process one level of exception regions for reachability.
2586 If TYPE_THROWN is non-null, then it is the *exact* type being
2587 propagated. If INFO is non-null, then collect handler labels
2588 and caught/allowed type information between invocations. */
2590 static enum reachable_code
2591 reachable_next_level (struct eh_region *region, tree type_thrown,
2592 struct reachable_info *info,
2595 switch (region->type)
2598 /* Before landing-pad generation, we model control flow
2599 directly to the individual handlers. In this way we can
2600 see that catch handler types may shadow one another. */
2601 add_reachable_handler (info, region, region);
2602 return RNL_MAYBE_CAUGHT;
2606 struct eh_region *c;
2607 enum reachable_code ret = RNL_NOT_CAUGHT;
2609 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2611 /* A catch-all handler ends the search. */
2612 if (c->u.eh_catch.type_list == NULL)
2614 add_reachable_handler (info, region, c);
2620 /* If we have at least one type match, end the search. */
2621 tree tp_node = c->u.eh_catch.type_list;
2623 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2625 tree type = TREE_VALUE (tp_node);
2627 if (type == type_thrown
2628 || (lang_eh_type_covers
2629 && (*lang_eh_type_covers) (type, type_thrown)))
2631 add_reachable_handler (info, region, c);
2636 /* If we have definitive information of a match failure,
2637 the catch won't trigger. */
2638 if (lang_eh_type_covers)
2639 return RNL_NOT_CAUGHT;
2642 /* At this point, we either don't know what type is thrown or
2643 don't have front-end assistance to help deciding if it is
2644 covered by one of the types in the list for this region.
2646 We'd then like to add this region to the list of reachable
2647 handlers since it is indeed potentially reachable based on the
2648 information we have.
2650 Actually, this handler is for sure not reachable if all the
2651 types it matches have already been caught. That is, it is only
2652 potentially reachable if at least one of the types it catches
2653 has not been previously caught. */
2656 ret = RNL_MAYBE_CAUGHT;
2659 tree tp_node = c->u.eh_catch.type_list;
2660 bool maybe_reachable = false;
2662 /* Compute the potential reachability of this handler and
2663 update the list of types caught at the same time. */
2664 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2666 tree type = TREE_VALUE (tp_node);
2668 if (! check_handled (info->types_caught, type))
2671 = tree_cons (NULL, type, info->types_caught);
2673 maybe_reachable = true;
2677 if (maybe_reachable)
2679 add_reachable_handler (info, region, c);
2681 /* ??? If the catch type is a base class of every allowed
2682 type, then we know we can stop the search. */
2683 ret = RNL_MAYBE_CAUGHT;
2691 case ERT_ALLOWED_EXCEPTIONS:
2692 /* An empty list of types definitely ends the search. */
2693 if (region->u.allowed.type_list == NULL_TREE)
2695 add_reachable_handler (info, region, region);
2699 /* Collect a list of lists of allowed types for use in detecting
2700 when a catch may be transformed into a catch-all. */
2702 info->types_allowed = tree_cons (NULL_TREE,
2703 region->u.allowed.type_list,
2704 info->types_allowed);
2706 /* If we have definitive information about the type hierarchy,
2707 then we can tell if the thrown type will pass through the
2709 if (type_thrown && lang_eh_type_covers)
2711 if (check_handled (region->u.allowed.type_list, type_thrown))
2712 return RNL_NOT_CAUGHT;
2715 add_reachable_handler (info, region, region);
2720 add_reachable_handler (info, region, region);
2721 return RNL_MAYBE_CAUGHT;
2724 /* Catch regions are handled by their controlling try region. */
2725 return RNL_NOT_CAUGHT;
2727 case ERT_MUST_NOT_THROW:
2728 /* Here we end our search, since no exceptions may propagate.
2730 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
2731 only via locally handled RESX instructions.
2733 When we inline a function call, we can bring in new handlers. In order
2734 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
2735 assume that such handlers exists prior for any inlinable call prior
2736 inlining decisions are fixed. */
2740 add_reachable_handler (info, region, region);
2748 /* Shouldn't see these here. */
2756 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2759 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
2760 void (*callback) (struct eh_region *, void *),
2761 void *callback_data)
2763 struct reachable_info info;
2764 struct eh_region *region;
2767 memset (&info, 0, sizeof (info));
2768 info.callback = callback;
2769 info.callback_data = callback_data;
2771 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2775 type_thrown = NULL_TREE;
2778 /* A RESX leaves a region instead of entering it. Thus the
2779 region itself may have been deleted out from under us. */
2782 region = region->outer;
2784 else if (region->type == ERT_THROW)
2786 type_thrown = region->u.eh_throw.type;
2787 region = region->outer;
2792 if (reachable_next_level (region, type_thrown, &info,
2793 inlinable_call || is_resx) >= RNL_CAUGHT)
2795 /* If we have processed one cleanup, there is no point in
2796 processing any more of them. Each cleanup will have an edge
2797 to the next outer cleanup region, so the flow graph will be
2799 if (region->type == ERT_CLEANUP)
2800 region = region->u.cleanup.prev_try;
2802 region = region->outer;
2806 /* Retrieve a list of labels of exception handlers which can be
2807 reached by a given insn. */
2810 arh_to_landing_pad (struct eh_region *region, void *data)
2812 rtx *p_handlers = (rtx *) data;
2814 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2818 arh_to_label (struct eh_region *region, void *data)
2820 rtx *p_handlers = (rtx *) data;
2821 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2825 reachable_handlers (rtx insn)
2827 bool is_resx = false;
2828 rtx handlers = NULL;
2832 && GET_CODE (PATTERN (insn)) == RESX)
2834 region_number = XINT (PATTERN (insn), 0);
2839 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2840 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2842 region_number = INTVAL (XEXP (note, 0));
2845 foreach_reachable_handler (region_number, is_resx, false,
2846 (crtl->eh.built_landing_pads
2847 ? arh_to_landing_pad
2854 /* Determine if the given INSN can throw an exception that is caught
2855 within the function. */
2858 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
2860 struct eh_region *region;
2863 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2867 type_thrown = NULL_TREE;
2869 region = region->outer;
2870 else if (region->type == ERT_THROW)
2872 type_thrown = region->u.eh_throw.type;
2873 region = region->outer;
2876 /* If this exception is ignored by each and every containing region,
2877 then control passes straight out. The runtime may handle some
2878 regions, which also do not require processing internally. */
2879 for (; region; region = region->outer)
2881 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
2882 inlinable_call || is_resx);
2883 if (how == RNL_BLOCKED)
2885 if (how != RNL_NOT_CAUGHT)
2893 can_throw_internal (const_rtx insn)
2897 if (! INSN_P (insn))
2901 && GET_CODE (PATTERN (insn)) == RESX
2902 && XINT (PATTERN (insn), 0) > 0)
2903 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
2905 if (NONJUMP_INSN_P (insn)
2906 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2907 insn = XVECEXP (PATTERN (insn), 0, 0);
2909 /* Every insn that might throw has an EH_REGION note. */
2910 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2911 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2914 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
2917 /* Determine if the given INSN can throw an exception that is
2918 visible outside the function. */
2921 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
2923 struct eh_region *region;
2926 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2930 type_thrown = NULL_TREE;
2932 region = region->outer;
2933 else if (region->type == ERT_THROW)
2935 type_thrown = region->u.eh_throw.type;
2936 region = region->outer;
2939 /* If the exception is caught or blocked by any containing region,
2940 then it is not seen by any calling function. */
2941 for (; region ; region = region->outer)
2942 if (reachable_next_level (region, type_thrown, NULL,
2943 inlinable_call || is_resx) >= RNL_CAUGHT)
2950 can_throw_external (const_rtx insn)
2954 if (! INSN_P (insn))
2958 && GET_CODE (PATTERN (insn)) == RESX
2959 && XINT (PATTERN (insn), 0) > 0)
2960 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
2962 if (NONJUMP_INSN_P (insn)
2963 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2964 insn = XVECEXP (PATTERN (insn), 0, 0);
2966 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2969 /* Calls (and trapping insns) without notes are outside any
2970 exception handling region in this function. We have to
2971 assume it might throw. Given that the front end and middle
2972 ends mark known NOTHROW functions, this isn't so wildly
2974 return (CALL_P (insn)
2975 || (flag_non_call_exceptions
2976 && may_trap_p (PATTERN (insn))));
2978 if (INTVAL (XEXP (note, 0)) <= 0)
2981 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
2984 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
2987 set_nothrow_function_flags (void)
2993 /* Assume crtl->all_throwers_are_sibcalls until we encounter
2994 something that can throw an exception. We specifically exempt
2995 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2996 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2999 crtl->all_throwers_are_sibcalls = 1;
3001 /* If we don't know that this implementation of the function will
3002 actually be used, then we must not set TREE_NOTHROW, since
3003 callers must not assume that this function does not throw. */
3004 if (TREE_NOTHROW (current_function_decl))
3007 if (! flag_exceptions)
3010 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3011 if (can_throw_external (insn))
3015 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3017 crtl->all_throwers_are_sibcalls = 0;
3022 for (insn = crtl->epilogue_delay_list; insn;
3023 insn = XEXP (insn, 1))
3024 if (can_throw_external (insn))
3028 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3030 crtl->all_throwers_are_sibcalls = 0;
3035 && (cgraph_function_body_availability (cgraph_node (current_function_decl))
3036 >= AVAIL_AVAILABLE))
3037 TREE_NOTHROW (current_function_decl) = 1;
3041 struct rtl_opt_pass pass_set_nothrow_function_flags =
3047 set_nothrow_function_flags, /* execute */
3050 0, /* static_pass_number */
3052 0, /* properties_required */
3053 0, /* properties_provided */
3054 0, /* properties_destroyed */
3055 0, /* todo_flags_start */
3056 0, /* todo_flags_finish */
3061 /* Various hooks for unwind library. */
3063 /* Do any necessary initialization to access arbitrary stack frames.
3064 On the SPARC, this means flushing the register windows. */
3067 expand_builtin_unwind_init (void)
3069 /* Set this so all the registers get saved in our frame; we need to be
3070 able to copy the saved values for any registers from frames we unwind. */
3071 crtl->saves_all_registers = 1;
3073 #ifdef SETUP_FRAME_ADDRESSES
3074 SETUP_FRAME_ADDRESSES ();
3079 expand_builtin_eh_return_data_regno (tree exp)
3081 tree which = CALL_EXPR_ARG (exp, 0);
3082 unsigned HOST_WIDE_INT iwhich;
3084 if (TREE_CODE (which) != INTEGER_CST)
3086 error ("argument of %<__builtin_eh_return_regno%> must be constant");
3090 iwhich = tree_low_cst (which, 1);
3091 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3092 if (iwhich == INVALID_REGNUM)
3095 #ifdef DWARF_FRAME_REGNUM
3096 iwhich = DWARF_FRAME_REGNUM (iwhich);
3098 iwhich = DBX_REGISTER_NUMBER (iwhich);
3101 return GEN_INT (iwhich);
3104 /* Given a value extracted from the return address register or stack slot,
3105 return the actual address encoded in that value. */
3108 expand_builtin_extract_return_addr (tree addr_tree)
3110 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3112 if (GET_MODE (addr) != Pmode
3113 && GET_MODE (addr) != VOIDmode)
3115 #ifdef POINTERS_EXTEND_UNSIGNED
3116 addr = convert_memory_address (Pmode, addr);
3118 addr = convert_to_mode (Pmode, addr, 0);
3122 /* First mask out any unwanted bits. */
3123 #ifdef MASK_RETURN_ADDR
3124 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3127 /* Then adjust to find the real return address. */
3128 #if defined (RETURN_ADDR_OFFSET)
3129 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3135 /* Given an actual address in addr_tree, do any necessary encoding
3136 and return the value to be stored in the return address register or
3137 stack slot so the epilogue will return to that address. */
3140 expand_builtin_frob_return_addr (tree addr_tree)
3142 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3144 addr = convert_memory_address (Pmode, addr);
3146 #ifdef RETURN_ADDR_OFFSET
3147 addr = force_reg (Pmode, addr);
3148 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3154 /* Set up the epilogue with the magic bits we'll need to return to the
3155 exception handler. */
3158 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3163 #ifdef EH_RETURN_STACKADJ_RTX
3164 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
3165 VOIDmode, EXPAND_NORMAL);
3166 tmp = convert_memory_address (Pmode, tmp);
3167 if (!crtl->eh.ehr_stackadj)
3168 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
3169 else if (tmp != crtl->eh.ehr_stackadj)
3170 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
3173 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
3174 VOIDmode, EXPAND_NORMAL);
3175 tmp = convert_memory_address (Pmode, tmp);
3176 if (!crtl->eh.ehr_handler)
3177 crtl->eh.ehr_handler = copy_to_reg (tmp);
3178 else if (tmp != crtl->eh.ehr_handler)
3179 emit_move_insn (crtl->eh.ehr_handler, tmp);
3181 if (!crtl->eh.ehr_label)
3182 crtl->eh.ehr_label = gen_label_rtx ();
3183 emit_jump (crtl->eh.ehr_label);
3187 expand_eh_return (void)
3191 if (! crtl->eh.ehr_label)
3194 crtl->calls_eh_return = 1;
3196 #ifdef EH_RETURN_STACKADJ_RTX
3197 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3200 around_label = gen_label_rtx ();
3201 emit_jump (around_label);
3203 emit_label (crtl->eh.ehr_label);
3204 clobber_return_register ();
3206 #ifdef EH_RETURN_STACKADJ_RTX
3207 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3210 #ifdef HAVE_eh_return
3212 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3216 #ifdef EH_RETURN_HANDLER_RTX
3217 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3219 error ("__builtin_eh_return not supported on this target");
3223 emit_label (around_label);
3226 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3227 POINTERS_EXTEND_UNSIGNED and return it. */
3230 expand_builtin_extend_pointer (tree addr_tree)
3232 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3235 #ifdef POINTERS_EXTEND_UNSIGNED
3236 extend = POINTERS_EXTEND_UNSIGNED;
3238 /* The previous EH code did an unsigned extend by default, so we do this also
3243 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3246 /* In the following functions, we represent entries in the action table
3247 as 1-based indices. Special cases are:
3249 0: null action record, non-null landing pad; implies cleanups
3250 -1: null action record, null landing pad; implies no action
3251 -2: no call-site entry; implies must_not_throw
3252 -3: we have yet to process outer regions
3254 Further, no special cases apply to the "next" field of the record.
3255 For next, 0 means end of list. */
3257 struct action_record
3265 action_record_eq (const void *pentry, const void *pdata)
3267 const struct action_record *entry = (const struct action_record *) pentry;
3268 const struct action_record *data = (const struct action_record *) pdata;
3269 return entry->filter == data->filter && entry->next == data->next;
3273 action_record_hash (const void *pentry)
3275 const struct action_record *entry = (const struct action_record *) pentry;
3276 return entry->next * 1009 + entry->filter;
3280 add_action_record (htab_t ar_hash, int filter, int next)
3282 struct action_record **slot, *new_ar, tmp;
3284 tmp.filter = filter;
3286 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3288 if ((new_ar = *slot) == NULL)
3290 new_ar = XNEW (struct action_record);
3291 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3292 new_ar->filter = filter;
3293 new_ar->next = next;
3296 /* The filter value goes in untouched. The link to the next
3297 record is a "self-relative" byte offset, or zero to indicate
3298 that there is no next record. So convert the absolute 1 based
3299 indices we've been carrying around into a displacement. */
3301 push_sleb128 (&crtl->eh.action_record_data, filter);
3303 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3304 push_sleb128 (&crtl->eh.action_record_data, next);
3307 return new_ar->offset;
3311 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3313 struct eh_region *c;
3316 /* If we've reached the top of the region chain, then we have
3317 no actions, and require no landing pad. */
3321 switch (region->type)
3324 /* A cleanup adds a zero filter to the beginning of the chain, but
3325 there are special cases to look out for. If there are *only*
3326 cleanups along a path, then it compresses to a zero action.
3327 Further, if there are multiple cleanups along a path, we only
3328 need to represent one of them, as that is enough to trigger
3329 entry to the landing pad at runtime. */
3330 next = collect_one_action_chain (ar_hash, region->outer);
3333 for (c = region->outer; c ; c = c->outer)
3334 if (c->type == ERT_CLEANUP)
3336 return add_action_record (ar_hash, 0, next);
3339 /* Process the associated catch regions in reverse order.
3340 If there's a catch-all handler, then we don't need to
3341 search outer regions. Use a magic -3 value to record
3342 that we haven't done the outer search. */
3344 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3346 if (c->u.eh_catch.type_list == NULL)
3348 /* Retrieve the filter from the head of the filter list
3349 where we have stored it (see assign_filter_values). */
3351 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3353 next = add_action_record (ar_hash, filter, 0);
3357 /* Once the outer search is done, trigger an action record for
3358 each filter we have. */
3363 next = collect_one_action_chain (ar_hash, region->outer);
3365 /* If there is no next action, terminate the chain. */
3368 /* If all outer actions are cleanups or must_not_throw,
3369 we'll have no action record for it, since we had wanted
3370 to encode these states in the call-site record directly.
3371 Add a cleanup action to the chain to catch these. */
3373 next = add_action_record (ar_hash, 0, 0);
3376 flt_node = c->u.eh_catch.filter_list;
3377 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3379 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3380 next = add_action_record (ar_hash, filter, next);
3386 case ERT_ALLOWED_EXCEPTIONS:
3387 /* An exception specification adds its filter to the
3388 beginning of the chain. */
3389 next = collect_one_action_chain (ar_hash, region->outer);
3391 /* If there is no next action, terminate the chain. */
3394 /* If all outer actions are cleanups or must_not_throw,
3395 we'll have no action record for it, since we had wanted
3396 to encode these states in the call-site record directly.
3397 Add a cleanup action to the chain to catch these. */
3399 next = add_action_record (ar_hash, 0, 0);
3401 return add_action_record (ar_hash, region->u.allowed.filter, next);
3403 case ERT_MUST_NOT_THROW:
3404 /* A must-not-throw region with no inner handlers or cleanups
3405 requires no call-site entry. Note that this differs from
3406 the no handler or cleanup case in that we do require an lsda
3407 to be generated. Return a magic -2 value to record this. */
3412 /* CATCH regions are handled in TRY above. THROW regions are
3413 for optimization information only and produce no output. */
3414 return collect_one_action_chain (ar_hash, region->outer);
3422 add_call_site (rtx landing_pad, int action)
3424 call_site_record record;
3426 record = GGC_NEW (struct call_site_record);
3427 record->landing_pad = landing_pad;
3428 record->action = action;
3430 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3432 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3435 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3436 The new note numbers will not refer to region numbers, but
3437 instead to call site entries. */
3440 convert_to_eh_region_ranges (void)
3442 rtx insn, iter, note;
3444 int last_action = -3;
3445 rtx last_action_insn = NULL_RTX;
3446 rtx last_landing_pad = NULL_RTX;
3447 rtx first_no_action_insn = NULL_RTX;
3450 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3453 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3455 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3457 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3460 struct eh_region *region;
3462 rtx this_landing_pad;
3465 if (NONJUMP_INSN_P (insn)
3466 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3467 insn = XVECEXP (PATTERN (insn), 0, 0);
3469 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3472 if (! (CALL_P (insn)
3473 || (flag_non_call_exceptions
3474 && may_trap_p (PATTERN (insn)))))
3481 if (INTVAL (XEXP (note, 0)) <= 0)
3483 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3484 this_action = collect_one_action_chain (ar_hash, region);
3487 /* Existence of catch handlers, or must-not-throw regions
3488 implies that an lsda is needed (even if empty). */
3489 if (this_action != -1)
3490 crtl->uses_eh_lsda = 1;
3492 /* Delay creation of region notes for no-action regions
3493 until we're sure that an lsda will be required. */
3494 else if (last_action == -3)
3496 first_no_action_insn = iter;
3500 /* Cleanups and handlers may share action chains but not
3501 landing pads. Collect the landing pad for this region. */
3502 if (this_action >= 0)
3504 struct eh_region *o;
3505 for (o = region; ! o->landing_pad ; o = o->outer)
3507 this_landing_pad = o->landing_pad;
3510 this_landing_pad = NULL_RTX;
3512 /* Differing actions or landing pads implies a change in call-site
3513 info, which implies some EH_REGION note should be emitted. */
3514 if (last_action != this_action
3515 || last_landing_pad != this_landing_pad)
3517 /* If we'd not seen a previous action (-3) or the previous
3518 action was must-not-throw (-2), then we do not need an
3520 if (last_action >= -1)
3522 /* If we delayed the creation of the begin, do it now. */
3523 if (first_no_action_insn)
3525 call_site = add_call_site (NULL_RTX, 0);
3526 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3527 first_no_action_insn);
3528 NOTE_EH_HANDLER (note) = call_site;
3529 first_no_action_insn = NULL_RTX;
3532 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3534 NOTE_EH_HANDLER (note) = call_site;
3537 /* If the new action is must-not-throw, then no region notes
3539 if (this_action >= -1)
3541 call_site = add_call_site (this_landing_pad,
3542 this_action < 0 ? 0 : this_action);
3543 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3544 NOTE_EH_HANDLER (note) = call_site;
3547 last_action = this_action;
3548 last_landing_pad = this_landing_pad;
3550 last_action_insn = iter;
3553 if (last_action >= -1 && ! first_no_action_insn)
3555 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3556 NOTE_EH_HANDLER (note) = call_site;
3559 htab_delete (ar_hash);
3563 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3567 "eh_ranges", /* name */
3569 convert_to_eh_region_ranges, /* execute */
3572 0, /* static_pass_number */
3574 0, /* properties_required */
3575 0, /* properties_provided */
3576 0, /* properties_destroyed */
3577 0, /* todo_flags_start */
3578 TODO_dump_func, /* todo_flags_finish */
3584 push_uleb128 (varray_type *data_area, unsigned int value)
3588 unsigned char byte = value & 0x7f;
3592 VARRAY_PUSH_UCHAR (*data_area, byte);
3598 push_sleb128 (varray_type *data_area, int value)
3605 byte = value & 0x7f;
3607 more = ! ((value == 0 && (byte & 0x40) == 0)
3608 || (value == -1 && (byte & 0x40) != 0));
3611 VARRAY_PUSH_UCHAR (*data_area, byte);
3617 #ifndef HAVE_AS_LEB128
3619 dw2_size_of_call_site_table (void)
3621 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3622 int size = n * (4 + 4 + 4);
3625 for (i = 0; i < n; ++i)
3627 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3628 size += size_of_uleb128 (cs->action);
3635 sjlj_size_of_call_site_table (void)
3637 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3641 for (i = 0; i < n; ++i)
3643 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3644 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3645 size += size_of_uleb128 (cs->action);
3653 dw2_output_call_site_table (void)
3655 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3658 for (i = 0; i < n; ++i)
3660 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3661 char reg_start_lab[32];
3662 char reg_end_lab[32];
3663 char landing_pad_lab[32];
3665 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3666 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3668 if (cs->landing_pad)
3669 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3670 CODE_LABEL_NUMBER (cs->landing_pad));
3672 /* ??? Perhaps use insn length scaling if the assembler supports
3673 generic arithmetic. */
3674 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3675 data4 if the function is small enough. */
3676 #ifdef HAVE_AS_LEB128
3677 dw2_asm_output_delta_uleb128 (reg_start_lab,
3678 current_function_func_begin_label,
3679 "region %d start", i);
3680 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3682 if (cs->landing_pad)
3683 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3684 current_function_func_begin_label,
3687 dw2_asm_output_data_uleb128 (0, "landing pad");
3689 dw2_asm_output_delta (4, reg_start_lab,
3690 current_function_func_begin_label,
3691 "region %d start", i);
3692 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3693 if (cs->landing_pad)
3694 dw2_asm_output_delta (4, landing_pad_lab,
3695 current_function_func_begin_label,
3698 dw2_asm_output_data (4, 0, "landing pad");
3700 dw2_asm_output_data_uleb128 (cs->action, "action");
3703 call_site_base += n;
3707 sjlj_output_call_site_table (void)
3709 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3712 for (i = 0; i < n; ++i)
3714 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3716 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3717 "region %d landing pad", i);
3718 dw2_asm_output_data_uleb128 (cs->action, "action");
3721 call_site_base += n;
3724 #ifndef TARGET_UNWIND_INFO
3725 /* Switch to the section that should be used for exception tables. */
3728 switch_to_exception_section (const char * ARG_UNUSED (fnname))
3732 if (exception_section)
3733 s = exception_section;
3736 /* Compute the section and cache it into exception_section,
3737 unless it depends on the function name. */
3738 if (targetm.have_named_sections)
3742 if (EH_TABLES_CAN_BE_READ_ONLY)
3745 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3746 flags = ((! flag_pic
3747 || ((tt_format & 0x70) != DW_EH_PE_absptr
3748 && (tt_format & 0x70) != DW_EH_PE_aligned))
3749 ? 0 : SECTION_WRITE);
3752 flags = SECTION_WRITE;
3754 #ifdef HAVE_LD_EH_GC_SECTIONS
3755 if (flag_function_sections)
3757 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
3758 sprintf (section_name, ".gcc_except_table.%s", fnname);
3759 s = get_section (section_name, flags, NULL);
3760 free (section_name);
3765 = s = get_section (".gcc_except_table", flags, NULL);
3769 = s = flag_pic ? data_section : readonly_data_section;
3772 switch_to_section (s);
3777 /* Output a reference from an exception table to the type_info object TYPE.
3778 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
3782 output_ttype (tree type, int tt_format, int tt_format_size)
3785 bool is_public = true;
3787 if (type == NULL_TREE)
3791 struct varpool_node *node;
3793 type = lookup_type_for_runtime (type);
3794 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3796 /* Let cgraph know that the rtti decl is used. Not all of the
3797 paths below go through assemble_integer, which would take
3798 care of this for us. */
3800 if (TREE_CODE (type) == ADDR_EXPR)
3802 type = TREE_OPERAND (type, 0);
3803 if (TREE_CODE (type) == VAR_DECL)
3805 node = varpool_node (type);
3807 varpool_mark_needed_node (node);
3808 is_public = TREE_PUBLIC (type);
3812 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3815 /* Allow the target to override the type table entry format. */
3816 if (targetm.asm_out.ttype (value))
3819 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3820 assemble_integer (value, tt_format_size,
3821 tt_format_size * BITS_PER_UNIT, 1);
3823 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
3827 output_function_exception_table (const char * ARG_UNUSED (fnname))
3829 int tt_format, cs_format, lp_format, i, n;
3830 #ifdef HAVE_AS_LEB128
3831 char ttype_label[32];
3832 char cs_after_size_label[32];
3833 char cs_end_label[32];
3838 int tt_format_size = 0;
3840 /* Not all functions need anything. */
3841 if (! crtl->uses_eh_lsda)
3844 if (eh_personality_libfunc)
3845 assemble_external_libcall (eh_personality_libfunc);
3847 #ifdef TARGET_UNWIND_INFO
3848 /* TODO: Move this into target file. */
3849 fputs ("\t.personality\t", asm_out_file);
3850 output_addr_const (asm_out_file, eh_personality_libfunc);
3851 fputs ("\n\t.handlerdata\n", asm_out_file);
3852 /* Note that varasm still thinks we're in the function's code section.
3853 The ".endp" directive that will immediately follow will take us back. */
3855 switch_to_exception_section (fnname);
3858 /* If the target wants a label to begin the table, emit it here. */
3859 targetm.asm_out.except_table_label (asm_out_file);
3861 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
3862 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
3864 /* Indicate the format of the @TType entries. */
3866 tt_format = DW_EH_PE_omit;
3869 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3870 #ifdef HAVE_AS_LEB128
3871 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3872 current_function_funcdef_no);
3874 tt_format_size = size_of_encoded_value (tt_format);
3876 assemble_align (tt_format_size * BITS_PER_UNIT);
3879 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3880 current_function_funcdef_no);
3882 /* The LSDA header. */
3884 /* Indicate the format of the landing pad start pointer. An omitted
3885 field implies @LPStart == @Start. */
3886 /* Currently we always put @LPStart == @Start. This field would
3887 be most useful in moving the landing pads completely out of
3888 line to another section, but it could also be used to minimize
3889 the size of uleb128 landing pad offsets. */
3890 lp_format = DW_EH_PE_omit;
3891 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3892 eh_data_format_name (lp_format));
3894 /* @LPStart pointer would go here. */
3896 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3897 eh_data_format_name (tt_format));
3899 #ifndef HAVE_AS_LEB128
3900 if (USING_SJLJ_EXCEPTIONS)
3901 call_site_len = sjlj_size_of_call_site_table ();
3903 call_site_len = dw2_size_of_call_site_table ();
3906 /* A pc-relative 4-byte displacement to the @TType data. */
3909 #ifdef HAVE_AS_LEB128
3910 char ttype_after_disp_label[32];
3911 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3912 current_function_funcdef_no);
3913 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3914 "@TType base offset");
3915 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3917 /* Ug. Alignment queers things. */
3918 unsigned int before_disp, after_disp, last_disp, disp;
3920 before_disp = 1 + 1;
3921 after_disp = (1 + size_of_uleb128 (call_site_len)
3923 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
3924 + (VEC_length (tree, crtl->eh.ttype_data)
3930 unsigned int disp_size, pad;
3933 disp_size = size_of_uleb128 (disp);
3934 pad = before_disp + disp_size + after_disp;
3935 if (pad % tt_format_size)
3936 pad = tt_format_size - (pad % tt_format_size);
3939 disp = after_disp + pad;
3941 while (disp != last_disp);
3943 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3947 /* Indicate the format of the call-site offsets. */
3948 #ifdef HAVE_AS_LEB128
3949 cs_format = DW_EH_PE_uleb128;
3951 cs_format = DW_EH_PE_udata4;
3953 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3954 eh_data_format_name (cs_format));
3956 #ifdef HAVE_AS_LEB128
3957 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3958 current_function_funcdef_no);
3959 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3960 current_function_funcdef_no);
3961 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3962 "Call-site table length");
3963 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3964 if (USING_SJLJ_EXCEPTIONS)
3965 sjlj_output_call_site_table ();
3967 dw2_output_call_site_table ();
3968 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3970 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3971 if (USING_SJLJ_EXCEPTIONS)
3972 sjlj_output_call_site_table ();
3974 dw2_output_call_site_table ();
3977 /* ??? Decode and interpret the data for flag_debug_asm. */
3978 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
3979 for (i = 0; i < n; ++i)
3980 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
3981 (i ? NULL : "Action record table"));
3984 assemble_align (tt_format_size * BITS_PER_UNIT);
3986 i = VEC_length (tree, crtl->eh.ttype_data);
3989 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
3990 output_ttype (type, tt_format, tt_format_size);
3993 #ifdef HAVE_AS_LEB128
3995 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3998 /* ??? Decode and interpret the data for flag_debug_asm. */
3999 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
4000 for (i = 0; i < n; ++i)
4002 if (targetm.arm_eabi_unwinder)
4004 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
4005 output_ttype (type, tt_format, tt_format_size);
4008 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
4009 (i ? NULL : "Exception specification table"));
4012 switch_to_section (current_function_section ());
4016 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
4018 fun->eh->throw_stmt_table = table;
4022 get_eh_throw_stmt_table (struct function *fun)
4024 return fun->eh->throw_stmt_table;
4027 /* Dump EH information to OUT. */
4030 dump_eh_tree (FILE * out, struct function *fun)
4032 struct eh_region *i;
4034 static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
4035 "allowed_exceptions", "must_not_throw",
4039 i = fun->eh->region_tree;
4043 fprintf (out, "Eh tree:\n");
4046 fprintf (out, " %*s %i %s", depth * 2, "",
4047 i->region_number, type_name[(int) i->type]);
4050 fprintf (out, " tree_label:");
4051 print_generic_expr (out, i->tree_label, 0);
4056 if (i->u.cleanup.prev_try)
4057 fprintf (out, " prev try:%i",
4058 i->u.cleanup.prev_try->region_number);
4063 struct eh_region *c;
4064 fprintf (out, " catch regions:");
4065 for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
4066 fprintf (out, " %i", c->region_number);
4071 if (i->u.eh_catch.prev_catch)
4072 fprintf (out, " prev: %i",
4073 i->u.eh_catch.prev_catch->region_number);
4074 if (i->u.eh_catch.next_catch)
4075 fprintf (out, " next %i",
4076 i->u.eh_catch.next_catch->region_number);
4079 case ERT_ALLOWED_EXCEPTIONS:
4080 fprintf (out, "filter :%i types:", i->u.allowed.filter);
4081 print_generic_expr (out, i->u.allowed.type_list, 0);
4085 fprintf (out, "type:");
4086 print_generic_expr (out, i->u.eh_throw.type, 0);
4089 case ERT_MUST_NOT_THROW:
4097 fprintf (out, " also known as:");
4098 dump_bitmap (out, i->aka);
4101 fprintf (out, "\n");
4102 /* If there are sub-regions, process them. */
4104 i = i->inner, depth++;
4105 /* If there are peers, process them. */
4106 else if (i->next_peer)
4108 /* Otherwise, step back up the tree to the next peer. */
4118 while (i->next_peer == NULL);
4124 /* Verify some basic invariants on EH datastructures. Could be extended to
4127 verify_eh_tree (struct function *fun)
4129 struct eh_region *i, *outer = NULL;
4136 if (!fun->eh->region_tree)
4138 for (j = fun->eh->last_region_number; j > 0; --j)
4139 if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
4141 if (i->region_number == j)
4143 if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
4145 error ("region_array is corrupted for region %i",
4150 i = fun->eh->region_tree;
4154 if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
4156 error ("region_array is corrupted for region %i", i->region_number);
4159 if (i->outer != outer)
4161 error ("outer block of region %i is wrong", i->region_number);
4164 if (i->may_contain_throw && outer && !outer->may_contain_throw)
4167 ("region %i may contain throw and is contained in region that may not",
4173 error ("negative nesting depth of region %i", i->region_number);
4177 /* If there are sub-regions, process them. */
4179 outer = i, i = i->inner, depth++;
4180 /* If there are peers, process them. */
4181 else if (i->next_peer)
4183 /* Otherwise, step back up the tree to the next peer. */
4194 error ("tree list ends on depth %i", depth + 1);
4197 if (count != nvisited)
4199 error ("array does not match the region tree");
4204 dump_eh_tree (stderr, fun);
4205 internal_error ("verify_eh_tree failed");
4211 while (i->next_peer == NULL);
4217 /* Initialize unwind_resume_libfunc. */
4220 default_init_unwind_resume_libfunc (void)
4222 /* The default c++ routines aren't actually c++ specific, so use those. */
4223 unwind_resume_libfunc =
4224 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
4225 : "_Unwind_Resume");
4230 gate_handle_eh (void)
4232 return doing_eh (0);
4235 /* Complete generation of exception handling code. */
4237 rest_of_handle_eh (void)
4239 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4240 finish_eh_generation ();
4241 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4245 struct rtl_opt_pass pass_rtl_eh =
4250 gate_handle_eh, /* gate */
4251 rest_of_handle_eh, /* execute */
4254 0, /* static_pass_number */
4255 TV_JUMP, /* tv_id */
4256 0, /* properties_required */
4257 0, /* properties_provided */
4258 0, /* properties_destroyed */
4259 0, /* todo_flags_start */
4260 TODO_dump_func /* todo_flags_finish */
4264 #include "gt-except.h"