1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
52 #include "coretypes.h"
60 #include "insn-config.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
75 #include "langhooks.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
81 /* Provide defaults for stuff that may not be defined when using
83 #ifndef EH_RETURN_DATA_REGNO
84 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
87 /* Protect cleanup actions with must-not-throw regions, with a call
88 to the given failure handler. */
89 gimple (*lang_protect_cleanup_actions) (void);
91 /* Return true if type A catches type B. */
92 int (*lang_eh_type_covers) (tree a, tree b);
94 /* Map a type to a runtime object to match type. */
95 tree (*lang_eh_runtime_type) (tree);
97 /* A hash table of label to region number. */
99 struct ehl_map_entry GTY(())
102 struct eh_region *region;
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
118 struct call_site_record GTY(())
124 static int t2r_eq (const void *, const void *);
125 static hashval_t t2r_hash (const void *);
127 static int ttypes_filter_eq (const void *, const void *);
128 static hashval_t ttypes_filter_hash (const void *);
129 static int ehspec_filter_eq (const void *, const void *);
130 static hashval_t ehspec_filter_hash (const void *);
131 static int add_ttypes_entry (htab_t, tree);
132 static int add_ehspec_entry (htab_t, htab_t, tree);
133 static void assign_filter_values (void);
134 static void build_post_landing_pads (void);
135 static void connect_post_landing_pads (void);
136 static void dw2_build_landing_pads (void);
139 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
140 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
141 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
142 static void sjlj_emit_function_enter (rtx);
143 static void sjlj_emit_function_exit (void);
144 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
145 static void sjlj_build_landing_pads (void);
147 static void remove_eh_handler (struct eh_region *);
148 static void remove_eh_handler_and_replace (struct eh_region *,
151 /* The return value of reachable_next_level. */
154 /* The given exception is not processed by the given region. */
156 /* The given exception may need processing by the given region. */
158 /* The given exception is completely processed by the given region. */
160 /* The given exception is completely processed by the runtime. */
164 struct reachable_info;
165 static enum reachable_code reachable_next_level (struct eh_region *, tree,
166 struct reachable_info *, bool);
168 static int action_record_eq (const void *, const void *);
169 static hashval_t action_record_hash (const void *);
170 static int add_action_record (htab_t, int, int);
171 static int collect_one_action_chain (htab_t, struct eh_region *);
172 static int add_call_site (rtx, int);
174 static void push_uleb128 (varray_type *, unsigned int);
175 static void push_sleb128 (varray_type *, int);
176 #ifndef HAVE_AS_LEB128
177 static int dw2_size_of_call_site_table (void);
178 static int sjlj_size_of_call_site_table (void);
180 static void dw2_output_call_site_table (void);
181 static void sjlj_output_call_site_table (void);
184 /* Routine to see if exception handling is turned on.
185 DO_WARN is nonzero if we want to inform the user that exception
186 handling is turned off.
188 This is used to ensure that -fexceptions has been specified if the
189 compiler tries to use any exception-specific functions. */
192 doing_eh (int do_warn)
194 if (! flag_exceptions)
196 static int warned = 0;
197 if (! warned && do_warn)
199 error ("exception handling disabled, use -fexceptions to enable");
211 if (! flag_exceptions)
214 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
216 /* Create the SjLj_Function_Context structure. This should match
217 the definition in unwind-sjlj.c. */
218 if (USING_SJLJ_EXCEPTIONS)
220 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
222 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
224 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
225 build_pointer_type (sjlj_fc_type_node));
226 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
228 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
230 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
232 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
233 tmp = build_array_type (lang_hooks.types.type_for_mode
234 (targetm.unwind_word_mode (), 1),
236 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
237 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
239 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
241 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
243 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
245 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
247 #ifdef DONT_USE_BUILTIN_SETJMP
249 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
251 /* Should be large enough for most systems, if it is not,
252 JMP_BUF_SIZE should be defined with the proper value. It will
253 also tend to be larger than necessary for most systems, a more
254 optimal port will define JMP_BUF_SIZE. */
255 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
258 /* builtin_setjmp takes a pointer to 5 words. */
259 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
261 tmp = build_index_type (tmp);
262 tmp = build_array_type (ptr_type_node, tmp);
263 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
264 #ifdef DONT_USE_BUILTIN_SETJMP
265 /* We don't know what the alignment requirements of the
266 runtime's jmp_buf has. Overestimate. */
267 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
268 DECL_USER_ALIGN (f_jbuf) = 1;
270 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
272 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
273 TREE_CHAIN (f_prev) = f_cs;
274 TREE_CHAIN (f_cs) = f_data;
275 TREE_CHAIN (f_data) = f_per;
276 TREE_CHAIN (f_per) = f_lsda;
277 TREE_CHAIN (f_lsda) = f_jbuf;
279 layout_type (sjlj_fc_type_node);
281 /* Cache the interesting field offsets so that we have
282 easy access from rtl. */
283 sjlj_fc_call_site_ofs
284 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
285 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
287 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
288 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
289 sjlj_fc_personality_ofs
290 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
291 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
293 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
294 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
296 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
297 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
302 init_eh_for_function (void)
304 cfun->eh = GGC_CNEW (struct eh_status);
307 /* Routines to generate the exception tree somewhat directly.
308 These are used from tree-eh.c when processing exception related
309 nodes during tree optimization. */
311 static struct eh_region *
312 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
314 struct eh_region *new_eh;
316 #ifdef ENABLE_CHECKING
317 gcc_assert (doing_eh (0));
320 /* Insert a new blank region as a leaf in the tree. */
321 new_eh = GGC_CNEW (struct eh_region);
323 new_eh->outer = outer;
326 new_eh->next_peer = outer->inner;
327 outer->inner = new_eh;
331 new_eh->next_peer = cfun->eh->region_tree;
332 cfun->eh->region_tree = new_eh;
335 new_eh->region_number = ++cfun->eh->last_region_number;
341 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
343 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
344 cleanup->u.cleanup.prev_try = prev_try;
349 gen_eh_region_try (struct eh_region *outer)
351 return gen_eh_region (ERT_TRY, outer);
355 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
357 struct eh_region *c, *l;
358 tree type_list, type_node;
360 /* Ensure to always end up with a type list to normalize further
361 processing, then register each type against the runtime types map. */
362 type_list = type_or_list;
365 if (TREE_CODE (type_or_list) != TREE_LIST)
366 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
368 type_node = type_list;
369 for (; type_node; type_node = TREE_CHAIN (type_node))
370 add_type_for_runtime (TREE_VALUE (type_node));
373 c = gen_eh_region (ERT_CATCH, t->outer);
374 c->u.eh_catch.type_list = type_list;
375 l = t->u.eh_try.last_catch;
376 c->u.eh_catch.prev_catch = l;
378 l->u.eh_catch.next_catch = c;
380 t->u.eh_try.eh_catch = c;
381 t->u.eh_try.last_catch = c;
387 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
389 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
390 region->u.allowed.type_list = allowed;
392 for (; allowed ; allowed = TREE_CHAIN (allowed))
393 add_type_for_runtime (TREE_VALUE (allowed));
399 gen_eh_region_must_not_throw (struct eh_region *outer)
401 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
405 get_eh_region_number (struct eh_region *region)
407 return region->region_number;
411 get_eh_region_may_contain_throw (struct eh_region *region)
413 return region->may_contain_throw;
417 get_eh_region_tree_label (struct eh_region *region)
419 return region->tree_label;
423 get_eh_region_no_tree_label (int region)
425 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
429 set_eh_region_tree_label (struct eh_region *region, tree lab)
431 region->tree_label = lab;
435 expand_resx_expr (tree exp)
437 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
438 struct eh_region *reg = VEC_index (eh_region,
439 cfun->eh->region_array, region_nr);
441 gcc_assert (!reg->resume);
442 do_pending_stack_adjust ();
443 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
447 /* Note that the current EH region (if any) may contain a throw, or a
448 call to a function which itself may contain a throw. */
451 note_eh_region_may_contain_throw (struct eh_region *region)
453 while (region && !region->may_contain_throw)
455 region->may_contain_throw = 1;
456 region = region->outer;
461 /* Return an rtl expression for a pointer to the exception object
465 get_exception_pointer (void)
467 if (! crtl->eh.exc_ptr)
468 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
469 return crtl->eh.exc_ptr;
472 /* Return an rtl expression for the exception dispatch filter
476 get_exception_filter (void)
478 if (! crtl->eh.filter)
479 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
480 return crtl->eh.filter;
483 /* This section is for the exception handling specific optimization pass. */
485 /* Random access the exception region tree. */
488 collect_eh_region_array (void)
492 i = cfun->eh->region_tree;
496 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
497 cfun->eh->last_region_number + 1);
498 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
502 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
504 /* If there are sub-regions, process them. */
507 /* If there are peers, process them. */
508 else if (i->next_peer)
510 /* Otherwise, step back up the tree to the next peer. */
517 } while (i->next_peer == NULL);
523 /* R is MUST_NOT_THROW region that is not reachable via local
524 RESX instructions. It still must be kept in the tree in case runtime
525 can unwind through it, or we will eliminate out terminate call
526 runtime would do otherwise. Return TRUE if R contains throwing statements
527 or some of the exceptions in inner regions can be unwound up to R.
529 CONTAINS_STMT is bitmap of all regions that contains some throwing
532 Function looks O(^3) at first sight. In fact the function is called at most
533 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
534 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
535 the outer loop examines every region at most once. The inner loop
536 is doing unwinding from the throwing statement same way as we do during
537 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
538 of CFG. In practice Eh trees are wide, not deep, so this is not
542 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region *r)
544 struct eh_region *i = r->inner;
548 if (TEST_BIT (contains_stmt, r->region_number))
551 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
552 if (TEST_BIT (contains_stmt, n))
558 /* It is pointless to look into MUST_NOT_THROW
559 or dive into subregions. They never unwind up. */
560 if (i->type != ERT_MUST_NOT_THROW)
562 bool found = TEST_BIT (contains_stmt, i->region_number);
564 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
565 if (TEST_BIT (contains_stmt, n))
570 /* We have nested region that contains throwing statement.
571 See if resuming might lead up to the resx or we get locally
572 caught sooner. If we get locally caught sooner, we either
573 know region R is not reachable or it would have direct edge
574 from the EH resx and thus consider region reachable at
578 struct eh_region *i1 = i;
579 tree type_thrown = NULL_TREE;
581 if (i1->type == ERT_THROW)
583 type_thrown = i1->u.eh_throw.type;
586 for (; i1 != r; i1 = i1->outer)
587 if (reachable_next_level (i1, type_thrown, NULL,
588 false) >= RNL_CAUGHT)
594 /* If there are sub-regions, process them. */
595 if (i->type != ERT_MUST_NOT_THROW && i->inner)
597 /* If there are peers, process them. */
598 else if (i->next_peer)
600 /* Otherwise, step back up the tree to the next peer. */
609 while (i->next_peer == NULL);
615 /* Bring region R to the root of tree. */
618 bring_to_root (struct eh_region *r)
620 struct eh_region **pp;
621 struct eh_region *outer = r->outer;
624 for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
628 r->next_peer = cfun->eh->region_tree;
629 cfun->eh->region_tree = r;
632 /* Remove all regions whose labels are not reachable.
633 REACHABLE is bitmap of all regions that are used by the function
634 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
637 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
641 VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
642 struct eh_region *local_must_not_throw = NULL;
643 struct eh_region *first_must_not_throw = NULL;
645 for (i = cfun->eh->last_region_number; i > 0; --i)
647 r = VEC_index (eh_region, cfun->eh->region_array, i);
648 if (!r || r->region_number != i)
650 if (!TEST_BIT (reachable, i) && !r->resume)
654 r->tree_label = NULL;
658 /* Don't remove ERT_THROW regions if their outer region
660 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
663 case ERT_MUST_NOT_THROW:
664 /* MUST_NOT_THROW regions are implementable solely in the
665 runtime, but we need them when inlining function.
667 Keep them if outer region is not MUST_NOT_THROW a well
668 and if they contain some statement that might unwind through
670 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
672 || can_be_reached_by_runtime (contains_stmt, r)))
677 /* TRY regions are reachable if any of its CATCH regions
680 for (c = r->u.eh_try.eh_catch; c;
681 c = c->u.eh_catch.next_catch)
682 if (TEST_BIT (reachable, c->region_number))
697 fprintf (dump_file, "Removing unreachable eh region %i\n",
699 remove_eh_handler (r);
701 else if (r->type == ERT_MUST_NOT_THROW)
703 if (!first_must_not_throw)
704 first_must_not_throw = r;
705 VEC_safe_push (eh_region, heap, must_not_throws, r);
709 if (r->type == ERT_MUST_NOT_THROW)
711 if (!local_must_not_throw)
712 local_must_not_throw = r;
714 VEC_safe_push (eh_region, heap, must_not_throws, r);
718 /* MUST_NOT_THROW regions without local handler are all the same; they
719 trigger terminate call in runtime.
720 MUST_NOT_THROW handled locally can differ in debug info associated
721 to std::terminate () call or if one is coming from Java and other
722 from C++ whether they call terminate or abort.
724 We merge all MUST_NOT_THROW regions handled by the run-time into one.
725 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
726 (since unwinding never continues to the outer region anyway).
727 If MUST_NOT_THROW with local handler is present in the tree, we use
728 that region to merge into, since it will remain in tree anyway;
729 otherwise we use first MUST_NOT_THROW.
731 Merging of locally handled regions needs changes to the CFG. Crossjumping
732 should take care of this, by looking at the actual code and
733 ensuring that the cleanup actions are really the same. */
735 if (local_must_not_throw)
736 first_must_not_throw = local_must_not_throw;
738 for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
740 if (!r->label && !r->tree_label && r != first_must_not_throw)
743 fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
745 first_must_not_throw->region_number);
746 remove_eh_handler_and_replace (r, first_must_not_throw);
747 first_must_not_throw->may_contain_throw |= r->may_contain_throw;
752 #ifdef ENABLE_CHECKING
753 verify_eh_tree (cfun);
755 VEC_free (eh_region, heap, must_not_throws);
758 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
759 is identical to label. */
762 label_to_region_map (void)
764 VEC(int,heap) * label_to_region = NULL;
767 VEC_safe_grow_cleared (int, heap, label_to_region,
768 cfun->cfg->last_label_uid + 1);
769 for (i = cfun->eh->last_region_number; i > 0; --i)
771 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
772 if (r && r->region_number == i
773 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
775 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
779 return label_to_region;
782 /* Return number of EH regions. */
784 num_eh_regions (void)
786 return cfun->eh->last_region_number + 1;
789 /* Set up EH labels for RTL. */
792 convert_from_eh_region_ranges (void)
794 int i, n = cfun->eh->last_region_number;
796 /* Most of the work is already done at the tree level. All we need to
797 do is collect the rtl labels that correspond to the tree labels that
798 collect the rtl labels that correspond to the tree labels
799 we allocated earlier. */
800 for (i = 1; i <= n; ++i)
802 struct eh_region *region;
804 region = VEC_index (eh_region, cfun->eh->region_array, i);
805 if (region && region->tree_label)
806 region->label = DECL_RTL_IF_SET (region->tree_label);
811 find_exception_handler_labels (void)
815 if (cfun->eh->region_tree == NULL)
818 for (i = cfun->eh->last_region_number; i > 0; --i)
820 struct eh_region *region;
823 region = VEC_index (eh_region, cfun->eh->region_array, i);
824 if (! region || region->region_number != i)
826 if (crtl->eh.built_landing_pads)
827 lab = region->landing_pad;
833 /* Returns true if the current function has exception handling regions. */
836 current_function_has_exception_handlers (void)
840 for (i = cfun->eh->last_region_number; i > 0; --i)
842 struct eh_region *region;
844 region = VEC_index (eh_region, cfun->eh->region_array, i);
846 && region->region_number == i
847 && region->type != ERT_THROW)
854 /* A subroutine of duplicate_eh_regions. Search the region tree under O
855 for the minimum and maximum region numbers. Update *MIN and *MAX. */
858 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
864 i = bitmap_first_set_bit (o->aka);
867 i = bitmap_last_set_bit (o->aka);
871 if (o->region_number < *min)
872 *min = o->region_number;
873 if (o->region_number > *max)
874 *max = o->region_number;
879 duplicate_eh_regions_0 (o, min, max);
883 duplicate_eh_regions_0 (o, min, max);
888 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
889 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
890 about the other internal pointers just yet, just the tree-like pointers. */
893 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
897 ret = n = GGC_NEW (struct eh_region);
906 n->aka = BITMAP_GGC_ALLOC ();
908 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
910 bitmap_set_bit (n->aka, i + eh_offset);
911 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
915 n->region_number += eh_offset;
916 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
921 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
922 while (old->next_peer)
924 old = old->next_peer;
925 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
932 /* Return prev_try pointers catch subregions of R should
935 static struct eh_region *
936 find_prev_try (struct eh_region * r)
938 for (; r && r->type != ERT_TRY; r = r->outer)
939 if (r->type == ERT_MUST_NOT_THROW
940 || (r->type == ERT_ALLOWED_EXCEPTIONS
941 && !r->u.allowed.type_list))
949 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
950 function and root the tree below OUTER_REGION. Remap labels using MAP
951 callback. The special case of COPY_REGION of 0 means all regions. */
954 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
955 void *data, int copy_region, int outer_region)
957 eh_region cur, prev_try, old_prev_try, outer, *splice;
958 int i, min_region, max_region, eh_offset, cfun_last_region_number;
963 #ifdef ENABLE_CHECKING
964 verify_eh_tree (ifun);
967 /* Find the range of region numbers to be copied. The interface we
968 provide here mandates a single offset to find new number from old,
969 which means we must look at the numbers present, instead of the
970 count or something else. */
973 min_region = INT_MAX;
976 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
977 old_prev_try = find_prev_try (cur);
978 duplicate_eh_regions_0 (cur, &min_region, &max_region);
983 max_region = ifun->eh->last_region_number;
986 num_regions = max_region - min_region + 1;
987 cfun_last_region_number = cfun->eh->last_region_number;
988 eh_offset = cfun_last_region_number + 1 - min_region;
990 /* If we've not yet created a region array, do so now. */
991 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
992 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
993 cfun->eh->last_region_number + 1);
995 /* Locate the spot at which to insert the new tree. */
996 if (outer_region > 0)
998 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1000 splice = &outer->inner;
1002 splice = &cfun->eh->region_tree;
1007 splice = &cfun->eh->region_tree;
1010 splice = &(*splice)->next_peer;
1012 if (!ifun->eh->region_tree)
1015 for (i = cfun_last_region_number + 1;
1016 i <= cfun->eh->last_region_number; i++)
1018 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1019 if (outer->aka == NULL)
1020 outer->aka = BITMAP_GGC_ALLOC ();
1021 bitmap_set_bit (outer->aka, i);
1026 /* Copy all the regions in the subtree. */
1027 if (copy_region > 0)
1029 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1030 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1036 cur = ifun->eh->region_tree;
1037 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1038 while (cur->next_peer)
1040 cur = cur->next_peer;
1041 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1045 /* Remap all the labels in the new regions. */
1046 for (i = cfun_last_region_number + 1;
1047 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1048 if (cur && cur->tree_label)
1049 cur->tree_label = map (cur->tree_label, data);
1051 /* Search for the containing ERT_TRY region to fix up
1052 the prev_try short-cuts for ERT_CLEANUP regions. */
1054 if (outer_region > 0)
1055 prev_try = find_prev_try (VEC_index (eh_region, cfun->eh->region_array, outer_region));
1057 /* Remap all of the internal catch and cleanup linkages. Since we
1058 duplicate entire subtrees, all of the referenced regions will have
1059 been copied too. And since we renumbered them as a block, a simple
1060 bit of arithmetic finds us the index for the replacement region. */
1061 for (i = cfun_last_region_number + 1;
1062 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1064 /* All removed EH that is toplevel in input function is now
1065 in outer EH of output function. */
1068 gcc_assert (VEC_index
1069 (eh_region, ifun->eh->region_array,
1070 i - eh_offset) == NULL);
1073 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1074 if (outer->aka == NULL)
1075 outer->aka = BITMAP_GGC_ALLOC ();
1076 bitmap_set_bit (outer->aka, i);
1080 if (i != cur->region_number)
1083 #define REMAP(REG) \
1084 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1085 (REG)->region_number + eh_offset)
1090 if (cur->u.eh_try.eh_catch)
1091 REMAP (cur->u.eh_try.eh_catch);
1092 if (cur->u.eh_try.last_catch)
1093 REMAP (cur->u.eh_try.last_catch);
1097 if (cur->u.eh_catch.next_catch)
1098 REMAP (cur->u.eh_catch.next_catch);
1099 if (cur->u.eh_catch.prev_catch)
1100 REMAP (cur->u.eh_catch.prev_catch);
1104 if (cur->u.cleanup.prev_try != old_prev_try)
1105 REMAP (cur->u.cleanup.prev_try);
1107 cur->u.cleanup.prev_try = prev_try;
1116 #ifdef ENABLE_CHECKING
1117 verify_eh_tree (cfun);
1123 /* Return region number of region that is outer to both if REGION_A and
1124 REGION_B in IFUN. */
1127 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1129 struct eh_region *rp_a, *rp_b;
1132 gcc_assert (ifun->eh->last_region_number > 0);
1133 gcc_assert (ifun->eh->region_tree);
1135 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1136 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1137 gcc_assert (rp_a != NULL);
1138 gcc_assert (rp_b != NULL);
1140 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1141 sbitmap_zero (b_outer);
1145 SET_BIT (b_outer, rp_b->region_number);
1152 if (TEST_BIT (b_outer, rp_a->region_number))
1154 sbitmap_free (b_outer);
1155 return rp_a->region_number;
1161 sbitmap_free (b_outer);
1166 t2r_eq (const void *pentry, const void *pdata)
1168 const_tree const entry = (const_tree) pentry;
1169 const_tree const data = (const_tree) pdata;
1171 return TREE_PURPOSE (entry) == data;
1175 t2r_hash (const void *pentry)
1177 const_tree const entry = (const_tree) pentry;
1178 return TREE_HASH (TREE_PURPOSE (entry));
1182 add_type_for_runtime (tree type)
1186 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1187 TREE_HASH (type), INSERT);
1190 tree runtime = (*lang_eh_runtime_type) (type);
1191 *slot = tree_cons (type, runtime, NULL_TREE);
1196 lookup_type_for_runtime (tree type)
1200 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1201 TREE_HASH (type), NO_INSERT);
1203 /* We should have always inserted the data earlier. */
1204 return TREE_VALUE (*slot);
1208 /* Represent an entry in @TTypes for either catch actions
1209 or exception filter actions. */
1210 struct ttypes_filter GTY(())
1216 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1217 (a tree) for a @TTypes type node we are thinking about adding. */
1220 ttypes_filter_eq (const void *pentry, const void *pdata)
1222 const struct ttypes_filter *const entry
1223 = (const struct ttypes_filter *) pentry;
1224 const_tree const data = (const_tree) pdata;
1226 return entry->t == data;
1230 ttypes_filter_hash (const void *pentry)
1232 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1233 return TREE_HASH (entry->t);
1236 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1237 exception specification list we are thinking about adding. */
1238 /* ??? Currently we use the type lists in the order given. Someone
1239 should put these in some canonical order. */
1242 ehspec_filter_eq (const void *pentry, const void *pdata)
1244 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1245 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1247 return type_list_equal (entry->t, data->t);
1250 /* Hash function for exception specification lists. */
1253 ehspec_filter_hash (const void *pentry)
1255 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1259 for (list = entry->t; list ; list = TREE_CHAIN (list))
1260 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1264 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1265 to speed up the search. Return the filter value to be used. */
1268 add_ttypes_entry (htab_t ttypes_hash, tree type)
1270 struct ttypes_filter **slot, *n;
1272 slot = (struct ttypes_filter **)
1273 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1275 if ((n = *slot) == NULL)
1277 /* Filter value is a 1 based table index. */
1279 n = XNEW (struct ttypes_filter);
1281 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1284 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1290 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1291 to speed up the search. Return the filter value to be used. */
1294 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1296 struct ttypes_filter **slot, *n;
1297 struct ttypes_filter dummy;
1300 slot = (struct ttypes_filter **)
1301 htab_find_slot (ehspec_hash, &dummy, INSERT);
1303 if ((n = *slot) == NULL)
1305 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1307 n = XNEW (struct ttypes_filter);
1309 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1312 /* Generate a 0 terminated list of filter values. */
1313 for (; list ; list = TREE_CHAIN (list))
1315 if (targetm.arm_eabi_unwinder)
1316 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1319 /* Look up each type in the list and encode its filter
1320 value as a uleb128. */
1321 push_uleb128 (&crtl->eh.ehspec_data,
1322 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1325 if (targetm.arm_eabi_unwinder)
1326 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1328 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1334 /* Generate the action filter values to be used for CATCH and
1335 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1336 we use lots of landing pads, and so every type or list can share
1337 the same filter value, which saves table space. */
1340 assign_filter_values (void)
1343 htab_t ttypes, ehspec;
1345 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1346 if (targetm.arm_eabi_unwinder)
1347 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1349 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1351 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1352 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1354 for (i = cfun->eh->last_region_number; i > 0; --i)
1356 struct eh_region *r;
1358 r = VEC_index (eh_region, cfun->eh->region_array, i);
1360 /* Mind we don't process a region more than once. */
1361 if (!r || r->region_number != i)
1367 /* Whatever type_list is (NULL or true list), we build a list
1368 of filters for the region. */
1369 r->u.eh_catch.filter_list = NULL_TREE;
1371 if (r->u.eh_catch.type_list != NULL)
1373 /* Get a filter value for each of the types caught and store
1374 them in the region's dedicated list. */
1375 tree tp_node = r->u.eh_catch.type_list;
1377 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1379 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1380 tree flt_node = build_int_cst (NULL_TREE, flt);
1382 r->u.eh_catch.filter_list
1383 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1388 /* Get a filter value for the NULL list also since it will need
1389 an action record anyway. */
1390 int flt = add_ttypes_entry (ttypes, NULL);
1391 tree flt_node = build_int_cst (NULL_TREE, flt);
1393 r->u.eh_catch.filter_list
1394 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1399 case ERT_ALLOWED_EXCEPTIONS:
1401 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1409 htab_delete (ttypes);
1410 htab_delete (ehspec);
1413 /* Emit SEQ into basic block just before INSN (that is assumed to be
1414 first instruction of some existing BB and return the newly
1417 emit_to_new_bb_before (rtx seq, rtx insn)
1424 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1425 call), we don't want it to go into newly created landing pad or other EH
1427 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1428 if (e->flags & EDGE_FALLTHRU)
1429 force_nonfallthru (e);
1432 last = emit_insn_before (seq, insn);
1433 if (BARRIER_P (last))
1434 last = PREV_INSN (last);
1435 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1436 update_bb_for_insn (bb);
1437 bb->flags |= BB_SUPERBLOCK;
1441 /* Generate the code to actually handle exceptions, which will follow the
1445 build_post_landing_pads (void)
1449 for (i = cfun->eh->last_region_number; i > 0; --i)
1451 struct eh_region *region;
1454 region = VEC_index (eh_region, cfun->eh->region_array, i);
1455 /* Mind we don't process a region more than once. */
1456 if (!region || region->region_number != i)
1459 switch (region->type)
1462 /* ??? Collect the set of all non-overlapping catch handlers
1463 all the way up the chain until blocked by a cleanup. */
1464 /* ??? Outer try regions can share landing pads with inner
1465 try regions if the types are completely non-overlapping,
1466 and there are no intervening cleanups. */
1468 region->post_landing_pad = gen_label_rtx ();
1472 emit_label (region->post_landing_pad);
1474 /* ??? It is mighty inconvenient to call back into the
1475 switch statement generation code in expand_end_case.
1476 Rapid prototyping sez a sequence of ifs. */
1478 struct eh_region *c;
1479 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1481 if (c->u.eh_catch.type_list == NULL)
1482 emit_jump (c->label);
1485 /* Need for one cmp/jump per type caught. Each type
1486 list entry has a matching entry in the filter list
1487 (see assign_filter_values). */
1488 tree tp_node = c->u.eh_catch.type_list;
1489 tree flt_node = c->u.eh_catch.filter_list;
1493 emit_cmp_and_jump_insns
1495 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1497 targetm.eh_return_filter_mode (), 0, c->label);
1499 tp_node = TREE_CHAIN (tp_node);
1500 flt_node = TREE_CHAIN (flt_node);
1506 /* We delay the generation of the _Unwind_Resume until we generate
1507 landing pads. We emit a marker here so as to get good control
1508 flow data in the meantime. */
1510 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1516 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
1520 case ERT_ALLOWED_EXCEPTIONS:
1521 region->post_landing_pad = gen_label_rtx ();
1525 emit_label (region->post_landing_pad);
1527 emit_cmp_and_jump_insns (crtl->eh.filter,
1528 GEN_INT (region->u.allowed.filter),
1530 targetm.eh_return_filter_mode (), 0, region->label);
1532 /* We delay the generation of the _Unwind_Resume until we generate
1533 landing pads. We emit a marker here so as to get good control
1534 flow data in the meantime. */
1536 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1542 emit_to_new_bb_before (seq, region->label);
1546 case ERT_MUST_NOT_THROW:
1547 region->post_landing_pad = region->label;
1552 /* Nothing to do. */
1561 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1562 _Unwind_Resume otherwise. */
1565 connect_post_landing_pads (void)
1569 for (i = cfun->eh->last_region_number; i > 0; --i)
1571 struct eh_region *region;
1572 struct eh_region *outer;
1576 region = VEC_index (eh_region, cfun->eh->region_array, i);
1577 /* Mind we don't process a region more than once. */
1578 if (!region || region->region_number != i)
1581 /* If there is no RESX, or it has been deleted by flow, there's
1582 nothing to fix up. */
1583 if (! region->resume || INSN_DELETED_P (region->resume))
1586 /* Search for another landing pad in this function. */
1587 for (outer = region->outer; outer ; outer = outer->outer)
1588 if (outer->post_landing_pad)
1596 basic_block src, dest;
1598 emit_jump (outer->post_landing_pad);
1599 src = BLOCK_FOR_INSN (region->resume);
1600 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1601 while (EDGE_COUNT (src->succs) > 0)
1602 remove_edge (EDGE_SUCC (src, 0));
1603 e = make_edge (src, dest, 0);
1604 e->probability = REG_BR_PROB_BASE;
1605 e->count = src->count;
1609 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1610 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
1612 /* What we just emitted was a throwing libcall, so it got a
1613 barrier automatically added after it. If the last insn in
1614 the libcall sequence isn't the barrier, it's because the
1615 target emits multiple insns for a call, and there are insns
1616 after the actual call insn (which are redundant and would be
1617 optimized away). The barrier is inserted exactly after the
1618 call insn, so let's go get that and delete the insns after
1619 it, because below we need the barrier to be the last insn in
1621 delete_insns_since (NEXT_INSN (last_call_insn ()));
1626 barrier = emit_insn_before (seq, region->resume);
1627 /* Avoid duplicate barrier. */
1628 gcc_assert (BARRIER_P (barrier));
1629 delete_insn (barrier);
1630 delete_insn (region->resume);
1632 /* ??? From tree-ssa we can wind up with catch regions whose
1633 label is not instantiated, but whose resx is present. Now
1634 that we've dealt with the resx, kill the region. */
1635 if (region->label == NULL && region->type == ERT_CLEANUP)
1636 remove_eh_handler (region);
1642 dw2_build_landing_pads (void)
1646 for (i = cfun->eh->last_region_number; i > 0; --i)
1648 struct eh_region *region;
1653 region = VEC_index (eh_region, cfun->eh->region_array, i);
1654 /* Mind we don't process a region more than once. */
1655 if (!region || region->region_number != i)
1658 if (region->type != ERT_CLEANUP
1659 && region->type != ERT_TRY
1660 && region->type != ERT_ALLOWED_EXCEPTIONS)
1665 region->landing_pad = gen_label_rtx ();
1666 emit_label (region->landing_pad);
1668 #ifdef HAVE_exception_receiver
1669 if (HAVE_exception_receiver)
1670 emit_insn (gen_exception_receiver ());
1673 #ifdef HAVE_nonlocal_goto_receiver
1674 if (HAVE_nonlocal_goto_receiver)
1675 emit_insn (gen_nonlocal_goto_receiver ());
1680 emit_move_insn (crtl->eh.exc_ptr,
1681 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1682 emit_move_insn (crtl->eh.filter,
1683 gen_rtx_REG (targetm.eh_return_filter_mode (),
1684 EH_RETURN_DATA_REGNO (1)));
1689 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1690 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1691 e->count = bb->count;
1692 e->probability = REG_BR_PROB_BASE;
1699 int directly_reachable;
1702 int call_site_index;
1706 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1709 bool found_one = false;
1711 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1713 struct eh_region *region;
1714 enum reachable_code rc;
1718 if (! INSN_P (insn))
1721 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1722 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1725 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1729 type_thrown = NULL_TREE;
1730 if (region->type == ERT_THROW)
1732 type_thrown = region->u.eh_throw.type;
1733 region = region->outer;
1736 /* Find the first containing region that might handle the exception.
1737 That's the landing pad to which we will transfer control. */
1738 rc = RNL_NOT_CAUGHT;
1739 for (; region; region = region->outer)
1741 rc = reachable_next_level (region, type_thrown, NULL, false);
1742 if (rc != RNL_NOT_CAUGHT)
1745 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1747 lp_info[region->region_number].directly_reachable = 1;
1756 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1761 /* First task: build the action table. */
1763 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
1764 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1766 for (i = cfun->eh->last_region_number; i > 0; --i)
1767 if (lp_info[i].directly_reachable)
1769 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1771 r->landing_pad = dispatch_label;
1772 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1773 if (lp_info[i].action_index != -1)
1774 crtl->uses_eh_lsda = 1;
1777 htab_delete (ar_hash);
1779 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1780 landing pad label for the region. For sjlj though, there is one
1781 common landing pad from which we dispatch to the post-landing pads.
1783 A region receives a dispatch index if it is directly reachable
1784 and requires in-function processing. Regions that share post-landing
1785 pads may share dispatch indices. */
1786 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1787 (see build_post_landing_pads) so we don't bother checking for it. */
1790 for (i = cfun->eh->last_region_number; i > 0; --i)
1791 if (lp_info[i].directly_reachable)
1792 lp_info[i].dispatch_index = index++;
1794 /* Finally: assign call-site values. If dwarf2 terms, this would be
1795 the region number assigned by convert_to_eh_region_ranges, but
1796 handles no-action and must-not-throw differently. */
1799 for (i = cfun->eh->last_region_number; i > 0; --i)
1800 if (lp_info[i].directly_reachable)
1802 int action = lp_info[i].action_index;
1804 /* Map must-not-throw to otherwise unused call-site index 0. */
1807 /* Map no-action to otherwise unused call-site index -1. */
1808 else if (action == -1)
1810 /* Otherwise, look it up in the table. */
1812 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1814 lp_info[i].call_site_index = index;
1819 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1821 int last_call_site = -2;
1824 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1826 struct eh_region *region;
1828 rtx note, before, p;
1830 /* Reset value tracking at extended basic block boundaries. */
1832 last_call_site = -2;
1834 if (! INSN_P (insn))
1837 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1839 /* Calls that are known to not throw need not be marked. */
1840 if (note && INTVAL (XEXP (note, 0)) <= 0)
1844 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1850 /* Calls (and trapping insns) without notes are outside any
1851 exception handling region in this function. Mark them as
1854 || (flag_non_call_exceptions
1855 && may_trap_p (PATTERN (insn))))
1856 this_call_site = -1;
1861 this_call_site = lp_info[region->region_number].call_site_index;
1863 if (this_call_site == last_call_site)
1866 /* Don't separate a call from it's argument loads. */
1869 before = find_first_parameter_load (insn, NULL_RTX);
1872 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1873 sjlj_fc_call_site_ofs);
1874 emit_move_insn (mem, GEN_INT (this_call_site));
1878 emit_insn_before (p, before);
1879 last_call_site = this_call_site;
1883 /* Construct the SjLj_Function_Context. */
1886 sjlj_emit_function_enter (rtx dispatch_label)
1888 rtx fn_begin, fc, mem, seq;
1889 bool fn_begin_outside_block;
1891 fc = crtl->eh.sjlj_fc;
1895 /* We're storing this libcall's address into memory instead of
1896 calling it directly. Thus, we must call assemble_external_libcall
1897 here, as we can not depend on emit_library_call to do it for us. */
1898 assemble_external_libcall (eh_personality_libfunc);
1899 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1900 emit_move_insn (mem, eh_personality_libfunc);
1902 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1903 if (crtl->uses_eh_lsda)
1908 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1909 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1910 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1911 emit_move_insn (mem, sym);
1914 emit_move_insn (mem, const0_rtx);
1916 #ifdef DONT_USE_BUILTIN_SETJMP
1919 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1920 TYPE_MODE (integer_type_node), 1,
1921 plus_constant (XEXP (fc, 0),
1922 sjlj_fc_jbuf_ofs), Pmode);
1924 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1925 TYPE_MODE (integer_type_node), 0, dispatch_label);
1926 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
1929 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1933 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1934 1, XEXP (fc, 0), Pmode);
1939 /* ??? Instead of doing this at the beginning of the function,
1940 do this in a block that is at loop level 0 and dominates all
1941 can_throw_internal instructions. */
1943 fn_begin_outside_block = true;
1944 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1945 if (NOTE_P (fn_begin))
1947 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1949 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1950 fn_begin_outside_block = false;
1953 if (fn_begin_outside_block)
1954 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1956 emit_insn_after (seq, fn_begin);
1959 /* Call back from expand_function_end to know where we should put
1960 the call to unwind_sjlj_unregister_libfunc if needed. */
1963 sjlj_emit_function_exit_after (rtx after)
1965 crtl->eh.sjlj_exit_after = after;
1969 sjlj_emit_function_exit (void)
1977 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1978 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1983 /* ??? Really this can be done in any block at loop level 0 that
1984 post-dominates all can_throw_internal instructions. This is
1985 the last possible moment. */
1987 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1988 if (e->flags & EDGE_FALLTHRU)
1994 /* Figure out whether the place we are supposed to insert libcall
1995 is inside the last basic block or after it. In the other case
1996 we need to emit to edge. */
1997 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
1998 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
2000 if (insn == crtl->eh.sjlj_exit_after)
2003 insn = NEXT_INSN (insn);
2004 emit_insn_after (seq, insn);
2007 if (insn == BB_END (e->src))
2010 insert_insn_on_edge (seq, e);
2015 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2017 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2018 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2019 int i, first_reachable;
2020 rtx mem, dispatch, seq, fc;
2025 fc = crtl->eh.sjlj_fc;
2029 emit_label (dispatch_label);
2031 #ifndef DONT_USE_BUILTIN_SETJMP
2032 expand_builtin_setjmp_receiver (dispatch_label);
2035 /* Load up dispatch index, exc_ptr and filter values from the
2036 function context. */
2037 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2038 sjlj_fc_call_site_ofs);
2039 dispatch = copy_to_reg (mem);
2041 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2042 if (unwind_word_mode != ptr_mode)
2044 #ifdef POINTERS_EXTEND_UNSIGNED
2045 mem = convert_memory_address (ptr_mode, mem);
2047 mem = convert_to_mode (ptr_mode, mem, 0);
2050 emit_move_insn (crtl->eh.exc_ptr, mem);
2052 mem = adjust_address (fc, unwind_word_mode,
2053 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2054 if (unwind_word_mode != filter_mode)
2055 mem = convert_to_mode (filter_mode, mem, 0);
2056 emit_move_insn (crtl->eh.filter, mem);
2058 /* Jump to one of the directly reachable regions. */
2059 /* ??? This really ought to be using a switch statement. */
2061 first_reachable = 0;
2062 for (i = cfun->eh->last_region_number; i > 0; --i)
2064 if (! lp_info[i].directly_reachable)
2067 if (! first_reachable)
2069 first_reachable = i;
2073 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2074 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2075 ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
2076 ->post_landing_pad);
2082 before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2083 ->post_landing_pad);
2085 bb = emit_to_new_bb_before (seq, before);
2086 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2087 e->count = bb->count;
2088 e->probability = REG_BR_PROB_BASE;
2092 sjlj_build_landing_pads (void)
2094 struct sjlj_lp_info *lp_info;
2096 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2098 if (sjlj_find_directly_reachable_regions (lp_info))
2100 rtx dispatch_label = gen_label_rtx ();
2101 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2102 TYPE_MODE (sjlj_fc_type_node),
2103 TYPE_ALIGN (sjlj_fc_type_node));
2105 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2106 int_size_in_bytes (sjlj_fc_type_node),
2109 sjlj_assign_call_site_values (dispatch_label, lp_info);
2110 sjlj_mark_call_sites (lp_info);
2112 sjlj_emit_function_enter (dispatch_label);
2113 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2114 sjlj_emit_function_exit ();
2121 finish_eh_generation (void)
2125 /* Nothing to do if no regions created. */
2126 if (cfun->eh->region_tree == NULL)
2129 /* The object here is to provide find_basic_blocks with detailed
2130 information (via reachable_handlers) on how exception control
2131 flows within the function. In this first pass, we can include
2132 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2133 regions, and hope that it will be useful in deleting unreachable
2134 handlers. Subsequently, we will generate landing pads which will
2135 connect many of the handlers, and then type information will not
2136 be effective. Still, this is a win over previous implementations. */
2138 /* These registers are used by the landing pads. Make sure they
2139 have been generated. */
2140 get_exception_pointer ();
2141 get_exception_filter ();
2143 /* Construct the landing pads. */
2145 assign_filter_values ();
2146 build_post_landing_pads ();
2147 connect_post_landing_pads ();
2148 if (USING_SJLJ_EXCEPTIONS)
2149 sjlj_build_landing_pads ();
2151 dw2_build_landing_pads ();
2153 crtl->eh.built_landing_pads = 1;
2155 /* We've totally changed the CFG. Start over. */
2156 find_exception_handler_labels ();
2157 break_superblocks ();
2158 if (USING_SJLJ_EXCEPTIONS
2159 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2160 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2161 commit_edge_insertions ();
2167 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2169 if (e->flags & EDGE_EH)
2178 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2182 /* This section handles removing dead code for flow. */
2184 /* Splice REGION from the region tree and replace it by REPLACE etc. */
2187 remove_eh_handler_and_replace (struct eh_region *region,
2188 struct eh_region *replace)
2190 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2193 outer = region->outer;
2194 /* For the benefit of efficiently handling REG_EH_REGION notes,
2195 replace this region in the region array with its containing
2196 region. Note that previous region deletions may result in
2197 multiple copies of this region in the array, so we have a
2198 list of alternate numbers by which we are known. */
2200 VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
2207 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2209 VEC_replace (eh_region, cfun->eh->region_array, i, replace);
2216 replace->aka = BITMAP_GGC_ALLOC ();
2218 bitmap_ior_into (replace->aka, region->aka);
2219 bitmap_set_bit (replace->aka, region->region_number);
2222 if (crtl->eh.built_landing_pads)
2223 lab = region->landing_pad;
2225 lab = region->label;
2227 pp_start = &outer->inner;
2229 pp_start = &cfun->eh->region_tree;
2230 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2232 *pp = region->next_peer;
2235 pp_start = &replace->inner;
2237 pp_start = &cfun->eh->region_tree;
2238 inner = region->inner;
2241 for (p = inner; p->next_peer ; p = p->next_peer)
2245 p->next_peer = *pp_start;
2249 if (region->type == ERT_CATCH)
2251 struct eh_region *eh_try, *next, *prev;
2253 for (eh_try = region->next_peer;
2254 eh_try->type == ERT_CATCH;
2255 eh_try = eh_try->next_peer)
2257 gcc_assert (eh_try->type == ERT_TRY);
2259 next = region->u.eh_catch.next_catch;
2260 prev = region->u.eh_catch.prev_catch;
2263 next->u.eh_catch.prev_catch = prev;
2265 eh_try->u.eh_try.last_catch = prev;
2267 prev->u.eh_catch.next_catch = next;
2270 eh_try->u.eh_try.eh_catch = next;
2272 remove_eh_handler (eh_try);
2277 /* Splice REGION from the region tree and replace it by the outer region
2281 remove_eh_handler (struct eh_region *region)
2283 remove_eh_handler_and_replace (region, region->outer);
2286 /* Remove Eh region R that has turned out to have no code in its handler. */
2289 remove_eh_region (int r)
2291 struct eh_region *region;
2293 region = VEC_index (eh_region, cfun->eh->region_array, r);
2294 remove_eh_handler (region);
2297 /* Invokes CALLBACK for every exception handler label. Only used by old
2298 loop hackery; should not be used by new code. */
2301 for_each_eh_label (void (*callback) (rtx))
2304 for (i = 0; i < cfun->eh->last_region_number; i++)
2306 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
2307 if (r && r->region_number == i && r->label
2308 && GET_CODE (r->label) == CODE_LABEL)
2309 (*callback) (r->label);
2313 /* Invoke CALLBACK for every exception region in the current function. */
2316 for_each_eh_region (void (*callback) (struct eh_region *))
2318 int i, n = cfun->eh->last_region_number;
2319 for (i = 1; i <= n; ++i)
2321 struct eh_region *region;
2323 region = VEC_index (eh_region, cfun->eh->region_array, i);
2325 (*callback) (region);
2329 /* This section describes CFG exception edges for flow. */
2331 /* For communicating between calls to reachable_next_level. */
2332 struct reachable_info
2336 void (*callback) (struct eh_region *, void *);
2337 void *callback_data;
2340 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2341 base class of TYPE, is in HANDLED. */
2344 check_handled (tree handled, tree type)
2348 /* We can check for exact matches without front-end help. */
2349 if (! lang_eh_type_covers)
2351 for (t = handled; t ; t = TREE_CHAIN (t))
2352 if (TREE_VALUE (t) == type)
2357 for (t = handled; t ; t = TREE_CHAIN (t))
2358 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2365 /* A subroutine of reachable_next_level. If we are collecting a list
2366 of handlers, add one. After landing pad generation, reference
2367 it instead of the handlers themselves. Further, the handlers are
2368 all wired together, so by referencing one, we've got them all.
2369 Before landing pad generation we reference each handler individually.
2371 LP_REGION contains the landing pad; REGION is the handler. */
2374 add_reachable_handler (struct reachable_info *info,
2375 struct eh_region *lp_region, struct eh_region *region)
2380 if (crtl->eh.built_landing_pads)
2381 info->callback (lp_region, info->callback_data);
2383 info->callback (region, info->callback_data);
2386 /* Process one level of exception regions for reachability.
2387 If TYPE_THROWN is non-null, then it is the *exact* type being
2388 propagated. If INFO is non-null, then collect handler labels
2389 and caught/allowed type information between invocations. */
2391 static enum reachable_code
2392 reachable_next_level (struct eh_region *region, tree type_thrown,
2393 struct reachable_info *info,
2396 switch (region->type)
2399 /* Before landing-pad generation, we model control flow
2400 directly to the individual handlers. In this way we can
2401 see that catch handler types may shadow one another. */
2402 add_reachable_handler (info, region, region);
2403 return RNL_MAYBE_CAUGHT;
2407 struct eh_region *c;
2408 enum reachable_code ret = RNL_NOT_CAUGHT;
2410 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2412 /* A catch-all handler ends the search. */
2413 if (c->u.eh_catch.type_list == NULL)
2415 add_reachable_handler (info, region, c);
2421 /* If we have at least one type match, end the search. */
2422 tree tp_node = c->u.eh_catch.type_list;
2424 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2426 tree type = TREE_VALUE (tp_node);
2428 if (type == type_thrown
2429 || (lang_eh_type_covers
2430 && (*lang_eh_type_covers) (type, type_thrown)))
2432 add_reachable_handler (info, region, c);
2437 /* If we have definitive information of a match failure,
2438 the catch won't trigger. */
2439 if (lang_eh_type_covers)
2440 return RNL_NOT_CAUGHT;
2443 /* At this point, we either don't know what type is thrown or
2444 don't have front-end assistance to help deciding if it is
2445 covered by one of the types in the list for this region.
2447 We'd then like to add this region to the list of reachable
2448 handlers since it is indeed potentially reachable based on the
2449 information we have.
2451 Actually, this handler is for sure not reachable if all the
2452 types it matches have already been caught. That is, it is only
2453 potentially reachable if at least one of the types it catches
2454 has not been previously caught. */
2457 ret = RNL_MAYBE_CAUGHT;
2460 tree tp_node = c->u.eh_catch.type_list;
2461 bool maybe_reachable = false;
2463 /* Compute the potential reachability of this handler and
2464 update the list of types caught at the same time. */
2465 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2467 tree type = TREE_VALUE (tp_node);
2469 if (! check_handled (info->types_caught, type))
2472 = tree_cons (NULL, type, info->types_caught);
2474 maybe_reachable = true;
2478 if (maybe_reachable)
2480 add_reachable_handler (info, region, c);
2482 /* ??? If the catch type is a base class of every allowed
2483 type, then we know we can stop the search. */
2484 ret = RNL_MAYBE_CAUGHT;
2492 case ERT_ALLOWED_EXCEPTIONS:
2493 /* An empty list of types definitely ends the search. */
2494 if (region->u.allowed.type_list == NULL_TREE)
2496 add_reachable_handler (info, region, region);
2500 /* Collect a list of lists of allowed types for use in detecting
2501 when a catch may be transformed into a catch-all. */
2503 info->types_allowed = tree_cons (NULL_TREE,
2504 region->u.allowed.type_list,
2505 info->types_allowed);
2507 /* If we have definitive information about the type hierarchy,
2508 then we can tell if the thrown type will pass through the
2510 if (type_thrown && lang_eh_type_covers)
2512 if (check_handled (region->u.allowed.type_list, type_thrown))
2513 return RNL_NOT_CAUGHT;
2516 add_reachable_handler (info, region, region);
2521 add_reachable_handler (info, region, region);
2522 return RNL_MAYBE_CAUGHT;
2525 /* Catch regions are handled by their controlling try region. */
2526 return RNL_NOT_CAUGHT;
2528 case ERT_MUST_NOT_THROW:
2529 /* Here we end our search, since no exceptions may propagate.
2531 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
2532 only via locally handled RESX instructions.
2534 When we inline a function call, we can bring in new handlers. In order
2535 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
2536 assume that such handlers exists prior for any inlinable call prior
2537 inlining decisions are fixed. */
2541 add_reachable_handler (info, region, region);
2549 /* Shouldn't see these here. */
2557 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2560 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
2561 void (*callback) (struct eh_region *, void *),
2562 void *callback_data)
2564 struct reachable_info info;
2565 struct eh_region *region;
2568 memset (&info, 0, sizeof (info));
2569 info.callback = callback;
2570 info.callback_data = callback_data;
2572 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2576 type_thrown = NULL_TREE;
2579 /* A RESX leaves a region instead of entering it. Thus the
2580 region itself may have been deleted out from under us. */
2583 region = region->outer;
2585 else if (region->type == ERT_THROW)
2587 type_thrown = region->u.eh_throw.type;
2588 region = region->outer;
2593 if (reachable_next_level (region, type_thrown, &info,
2594 inlinable_call || is_resx) >= RNL_CAUGHT)
2596 /* If we have processed one cleanup, there is no point in
2597 processing any more of them. Each cleanup will have an edge
2598 to the next outer cleanup region, so the flow graph will be
2600 if (region->type == ERT_CLEANUP)
2601 region = region->u.cleanup.prev_try;
2603 region = region->outer;
2607 /* Retrieve a list of labels of exception handlers which can be
2608 reached by a given insn. */
2611 arh_to_landing_pad (struct eh_region *region, void *data)
2613 rtx *p_handlers = (rtx *) data;
2615 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2619 arh_to_label (struct eh_region *region, void *data)
2621 rtx *p_handlers = (rtx *) data;
2622 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2626 reachable_handlers (rtx insn)
2628 bool is_resx = false;
2629 rtx handlers = NULL;
2633 && GET_CODE (PATTERN (insn)) == RESX)
2635 region_number = XINT (PATTERN (insn), 0);
2640 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2641 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2643 region_number = INTVAL (XEXP (note, 0));
2646 foreach_reachable_handler (region_number, is_resx, false,
2647 (crtl->eh.built_landing_pads
2648 ? arh_to_landing_pad
2655 /* Determine if the given INSN can throw an exception that is caught
2656 within the function. */
2659 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
2661 struct eh_region *region;
2664 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2668 type_thrown = NULL_TREE;
2670 region = region->outer;
2671 else if (region->type == ERT_THROW)
2673 type_thrown = region->u.eh_throw.type;
2674 region = region->outer;
2677 /* If this exception is ignored by each and every containing region,
2678 then control passes straight out. The runtime may handle some
2679 regions, which also do not require processing internally. */
2680 for (; region; region = region->outer)
2682 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
2683 inlinable_call || is_resx);
2684 if (how == RNL_BLOCKED)
2686 if (how != RNL_NOT_CAUGHT)
2694 can_throw_internal (const_rtx insn)
2698 if (! INSN_P (insn))
2702 && GET_CODE (PATTERN (insn)) == RESX
2703 && XINT (PATTERN (insn), 0) > 0)
2704 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
2706 if (NONJUMP_INSN_P (insn)
2707 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2708 insn = XVECEXP (PATTERN (insn), 0, 0);
2710 /* Every insn that might throw has an EH_REGION note. */
2711 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2712 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2715 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
2718 /* Determine if the given INSN can throw an exception that is
2719 visible outside the function. */
2722 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
2724 struct eh_region *region;
2727 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2731 type_thrown = NULL_TREE;
2733 region = region->outer;
2734 else if (region->type == ERT_THROW)
2736 type_thrown = region->u.eh_throw.type;
2737 region = region->outer;
2740 /* If the exception is caught or blocked by any containing region,
2741 then it is not seen by any calling function. */
2742 for (; region ; region = region->outer)
2743 if (reachable_next_level (region, type_thrown, NULL,
2744 inlinable_call || is_resx) >= RNL_CAUGHT)
2751 can_throw_external (const_rtx insn)
2755 if (! INSN_P (insn))
2759 && GET_CODE (PATTERN (insn)) == RESX
2760 && XINT (PATTERN (insn), 0) > 0)
2761 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
2763 if (NONJUMP_INSN_P (insn)
2764 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2766 rtx seq = PATTERN (insn);
2767 int i, n = XVECLEN (seq, 0);
2769 for (i = 0; i < n; i++)
2770 if (can_throw_external (XVECEXP (seq, 0, i)))
2776 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2779 /* Calls (and trapping insns) without notes are outside any
2780 exception handling region in this function. We have to
2781 assume it might throw. Given that the front end and middle
2782 ends mark known NOTHROW functions, this isn't so wildly
2784 return (CALL_P (insn)
2785 || (flag_non_call_exceptions
2786 && may_trap_p (PATTERN (insn))));
2788 if (INTVAL (XEXP (note, 0)) <= 0)
2791 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
2794 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
2797 set_nothrow_function_flags (void)
2803 /* Assume crtl->all_throwers_are_sibcalls until we encounter
2804 something that can throw an exception. We specifically exempt
2805 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2806 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2809 crtl->all_throwers_are_sibcalls = 1;
2811 /* If we don't know that this implementation of the function will
2812 actually be used, then we must not set TREE_NOTHROW, since
2813 callers must not assume that this function does not throw. */
2814 if (TREE_NOTHROW (current_function_decl))
2817 if (! flag_exceptions)
2820 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2821 if (can_throw_external (insn))
2825 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2827 crtl->all_throwers_are_sibcalls = 0;
2832 for (insn = crtl->epilogue_delay_list; insn;
2833 insn = XEXP (insn, 1))
2834 if (can_throw_external (insn))
2838 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2840 crtl->all_throwers_are_sibcalls = 0;
2845 && (cgraph_function_body_availability (cgraph_node
2846 (current_function_decl))
2847 >= AVAIL_AVAILABLE))
2849 TREE_NOTHROW (current_function_decl) = 1;
2852 fprintf (dump_file, "Marking function nothrow: %s\n\n",
2853 current_function_name ());
2858 struct rtl_opt_pass pass_set_nothrow_function_flags =
2862 "nothrow", /* name */
2864 set_nothrow_function_flags, /* execute */
2867 0, /* static_pass_number */
2868 TV_NONE, /* tv_id */
2869 0, /* properties_required */
2870 0, /* properties_provided */
2871 0, /* properties_destroyed */
2872 0, /* todo_flags_start */
2873 TODO_dump_func, /* todo_flags_finish */
2878 /* Various hooks for unwind library. */
2880 /* Do any necessary initialization to access arbitrary stack frames.
2881 On the SPARC, this means flushing the register windows. */
2884 expand_builtin_unwind_init (void)
2886 /* Set this so all the registers get saved in our frame; we need to be
2887 able to copy the saved values for any registers from frames we unwind. */
2888 crtl->saves_all_registers = 1;
2890 #ifdef SETUP_FRAME_ADDRESSES
2891 SETUP_FRAME_ADDRESSES ();
2896 expand_builtin_eh_return_data_regno (tree exp)
2898 tree which = CALL_EXPR_ARG (exp, 0);
2899 unsigned HOST_WIDE_INT iwhich;
2901 if (TREE_CODE (which) != INTEGER_CST)
2903 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2907 iwhich = tree_low_cst (which, 1);
2908 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2909 if (iwhich == INVALID_REGNUM)
2912 #ifdef DWARF_FRAME_REGNUM
2913 iwhich = DWARF_FRAME_REGNUM (iwhich);
2915 iwhich = DBX_REGISTER_NUMBER (iwhich);
2918 return GEN_INT (iwhich);
2921 /* Given a value extracted from the return address register or stack slot,
2922 return the actual address encoded in that value. */
2925 expand_builtin_extract_return_addr (tree addr_tree)
2927 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2929 if (GET_MODE (addr) != Pmode
2930 && GET_MODE (addr) != VOIDmode)
2932 #ifdef POINTERS_EXTEND_UNSIGNED
2933 addr = convert_memory_address (Pmode, addr);
2935 addr = convert_to_mode (Pmode, addr, 0);
2939 /* First mask out any unwanted bits. */
2940 #ifdef MASK_RETURN_ADDR
2941 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2944 /* Then adjust to find the real return address. */
2945 #if defined (RETURN_ADDR_OFFSET)
2946 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2952 /* Given an actual address in addr_tree, do any necessary encoding
2953 and return the value to be stored in the return address register or
2954 stack slot so the epilogue will return to that address. */
2957 expand_builtin_frob_return_addr (tree addr_tree)
2959 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2961 addr = convert_memory_address (Pmode, addr);
2963 #ifdef RETURN_ADDR_OFFSET
2964 addr = force_reg (Pmode, addr);
2965 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2971 /* Set up the epilogue with the magic bits we'll need to return to the
2972 exception handler. */
2975 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2980 #ifdef EH_RETURN_STACKADJ_RTX
2981 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2982 VOIDmode, EXPAND_NORMAL);
2983 tmp = convert_memory_address (Pmode, tmp);
2984 if (!crtl->eh.ehr_stackadj)
2985 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
2986 else if (tmp != crtl->eh.ehr_stackadj)
2987 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2990 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2991 VOIDmode, EXPAND_NORMAL);
2992 tmp = convert_memory_address (Pmode, tmp);
2993 if (!crtl->eh.ehr_handler)
2994 crtl->eh.ehr_handler = copy_to_reg (tmp);
2995 else if (tmp != crtl->eh.ehr_handler)
2996 emit_move_insn (crtl->eh.ehr_handler, tmp);
2998 if (!crtl->eh.ehr_label)
2999 crtl->eh.ehr_label = gen_label_rtx ();
3000 emit_jump (crtl->eh.ehr_label);
3004 expand_eh_return (void)
3008 if (! crtl->eh.ehr_label)
3011 crtl->calls_eh_return = 1;
3013 #ifdef EH_RETURN_STACKADJ_RTX
3014 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3017 around_label = gen_label_rtx ();
3018 emit_jump (around_label);
3020 emit_label (crtl->eh.ehr_label);
3021 clobber_return_register ();
3023 #ifdef EH_RETURN_STACKADJ_RTX
3024 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3027 #ifdef HAVE_eh_return
3029 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3033 #ifdef EH_RETURN_HANDLER_RTX
3034 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3036 error ("__builtin_eh_return not supported on this target");
3040 emit_label (around_label);
3043 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3044 POINTERS_EXTEND_UNSIGNED and return it. */
3047 expand_builtin_extend_pointer (tree addr_tree)
3049 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3052 #ifdef POINTERS_EXTEND_UNSIGNED
3053 extend = POINTERS_EXTEND_UNSIGNED;
3055 /* The previous EH code did an unsigned extend by default, so we do this also
3060 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3063 /* In the following functions, we represent entries in the action table
3064 as 1-based indices. Special cases are:
3066 0: null action record, non-null landing pad; implies cleanups
3067 -1: null action record, null landing pad; implies no action
3068 -2: no call-site entry; implies must_not_throw
3069 -3: we have yet to process outer regions
3071 Further, no special cases apply to the "next" field of the record.
3072 For next, 0 means end of list. */
3074 struct action_record
3082 action_record_eq (const void *pentry, const void *pdata)
3084 const struct action_record *entry = (const struct action_record *) pentry;
3085 const struct action_record *data = (const struct action_record *) pdata;
3086 return entry->filter == data->filter && entry->next == data->next;
3090 action_record_hash (const void *pentry)
3092 const struct action_record *entry = (const struct action_record *) pentry;
3093 return entry->next * 1009 + entry->filter;
3097 add_action_record (htab_t ar_hash, int filter, int next)
3099 struct action_record **slot, *new_ar, tmp;
3101 tmp.filter = filter;
3103 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3105 if ((new_ar = *slot) == NULL)
3107 new_ar = XNEW (struct action_record);
3108 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3109 new_ar->filter = filter;
3110 new_ar->next = next;
3113 /* The filter value goes in untouched. The link to the next
3114 record is a "self-relative" byte offset, or zero to indicate
3115 that there is no next record. So convert the absolute 1 based
3116 indices we've been carrying around into a displacement. */
3118 push_sleb128 (&crtl->eh.action_record_data, filter);
3120 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3121 push_sleb128 (&crtl->eh.action_record_data, next);
3124 return new_ar->offset;
3128 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3130 struct eh_region *c;
3133 /* If we've reached the top of the region chain, then we have
3134 no actions, and require no landing pad. */
3138 switch (region->type)
3141 /* A cleanup adds a zero filter to the beginning of the chain, but
3142 there are special cases to look out for. If there are *only*
3143 cleanups along a path, then it compresses to a zero action.
3144 Further, if there are multiple cleanups along a path, we only
3145 need to represent one of them, as that is enough to trigger
3146 entry to the landing pad at runtime. */
3147 next = collect_one_action_chain (ar_hash, region->outer);
3150 for (c = region->outer; c ; c = c->outer)
3151 if (c->type == ERT_CLEANUP)
3153 return add_action_record (ar_hash, 0, next);
3156 /* Process the associated catch regions in reverse order.
3157 If there's a catch-all handler, then we don't need to
3158 search outer regions. Use a magic -3 value to record
3159 that we haven't done the outer search. */
3161 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3163 if (c->u.eh_catch.type_list == NULL)
3165 /* Retrieve the filter from the head of the filter list
3166 where we have stored it (see assign_filter_values). */
3168 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3170 next = add_action_record (ar_hash, filter, 0);
3174 /* Once the outer search is done, trigger an action record for
3175 each filter we have. */
3180 next = collect_one_action_chain (ar_hash, region->outer);
3182 /* If there is no next action, terminate the chain. */
3185 /* If all outer actions are cleanups or must_not_throw,
3186 we'll have no action record for it, since we had wanted
3187 to encode these states in the call-site record directly.
3188 Add a cleanup action to the chain to catch these. */
3190 next = add_action_record (ar_hash, 0, 0);
3193 flt_node = c->u.eh_catch.filter_list;
3194 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3196 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3197 next = add_action_record (ar_hash, filter, next);
3203 case ERT_ALLOWED_EXCEPTIONS:
3204 /* An exception specification adds its filter to the
3205 beginning of the chain. */
3206 next = collect_one_action_chain (ar_hash, region->outer);
3208 /* If there is no next action, terminate the chain. */
3211 /* If all outer actions are cleanups or must_not_throw,
3212 we'll have no action record for it, since we had wanted
3213 to encode these states in the call-site record directly.
3214 Add a cleanup action to the chain to catch these. */
3216 next = add_action_record (ar_hash, 0, 0);
3218 return add_action_record (ar_hash, region->u.allowed.filter, next);
3220 case ERT_MUST_NOT_THROW:
3221 /* A must-not-throw region with no inner handlers or cleanups
3222 requires no call-site entry. Note that this differs from
3223 the no handler or cleanup case in that we do require an lsda
3224 to be generated. Return a magic -2 value to record this. */
3229 /* CATCH regions are handled in TRY above. THROW regions are
3230 for optimization information only and produce no output. */
3231 return collect_one_action_chain (ar_hash, region->outer);
3239 add_call_site (rtx landing_pad, int action)
3241 call_site_record record;
3243 record = GGC_NEW (struct call_site_record);
3244 record->landing_pad = landing_pad;
3245 record->action = action;
3247 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3249 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3252 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3253 The new note numbers will not refer to region numbers, but
3254 instead to call site entries. */
3257 convert_to_eh_region_ranges (void)
3259 rtx insn, iter, note;
3261 int last_action = -3;
3262 rtx last_action_insn = NULL_RTX;
3263 rtx last_landing_pad = NULL_RTX;
3264 rtx first_no_action_insn = NULL_RTX;
3267 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3270 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3272 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3274 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3277 struct eh_region *region;
3279 rtx this_landing_pad;
3282 if (NONJUMP_INSN_P (insn)
3283 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3284 insn = XVECEXP (PATTERN (insn), 0, 0);
3286 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3289 if (! (CALL_P (insn)
3290 || (flag_non_call_exceptions
3291 && may_trap_p (PATTERN (insn)))))
3298 if (INTVAL (XEXP (note, 0)) <= 0)
3300 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3301 this_action = collect_one_action_chain (ar_hash, region);
3304 /* Existence of catch handlers, or must-not-throw regions
3305 implies that an lsda is needed (even if empty). */
3306 if (this_action != -1)
3307 crtl->uses_eh_lsda = 1;
3309 /* Delay creation of region notes for no-action regions
3310 until we're sure that an lsda will be required. */
3311 else if (last_action == -3)
3313 first_no_action_insn = iter;
3317 /* Cleanups and handlers may share action chains but not
3318 landing pads. Collect the landing pad for this region. */
3319 if (this_action >= 0)
3321 struct eh_region *o;
3322 for (o = region; ! o->landing_pad ; o = o->outer)
3324 this_landing_pad = o->landing_pad;
3327 this_landing_pad = NULL_RTX;
3329 /* Differing actions or landing pads implies a change in call-site
3330 info, which implies some EH_REGION note should be emitted. */
3331 if (last_action != this_action
3332 || last_landing_pad != this_landing_pad)
3334 /* If we'd not seen a previous action (-3) or the previous
3335 action was must-not-throw (-2), then we do not need an
3337 if (last_action >= -1)
3339 /* If we delayed the creation of the begin, do it now. */
3340 if (first_no_action_insn)
3342 call_site = add_call_site (NULL_RTX, 0);
3343 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3344 first_no_action_insn);
3345 NOTE_EH_HANDLER (note) = call_site;
3346 first_no_action_insn = NULL_RTX;
3349 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3351 NOTE_EH_HANDLER (note) = call_site;
3354 /* If the new action is must-not-throw, then no region notes
3356 if (this_action >= -1)
3358 call_site = add_call_site (this_landing_pad,
3359 this_action < 0 ? 0 : this_action);
3360 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3361 NOTE_EH_HANDLER (note) = call_site;
3364 last_action = this_action;
3365 last_landing_pad = this_landing_pad;
3367 last_action_insn = iter;
3370 if (last_action >= -1 && ! first_no_action_insn)
3372 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3373 NOTE_EH_HANDLER (note) = call_site;
3376 htab_delete (ar_hash);
3380 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3384 "eh_ranges", /* name */
3386 convert_to_eh_region_ranges, /* execute */
3389 0, /* static_pass_number */
3390 TV_NONE, /* tv_id */
3391 0, /* properties_required */
3392 0, /* properties_provided */
3393 0, /* properties_destroyed */
3394 0, /* todo_flags_start */
3395 TODO_dump_func, /* todo_flags_finish */
3401 push_uleb128 (varray_type *data_area, unsigned int value)
3405 unsigned char byte = value & 0x7f;
3409 VARRAY_PUSH_UCHAR (*data_area, byte);
3415 push_sleb128 (varray_type *data_area, int value)
3422 byte = value & 0x7f;
3424 more = ! ((value == 0 && (byte & 0x40) == 0)
3425 || (value == -1 && (byte & 0x40) != 0));
3428 VARRAY_PUSH_UCHAR (*data_area, byte);
3434 #ifndef HAVE_AS_LEB128
3436 dw2_size_of_call_site_table (void)
3438 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3439 int size = n * (4 + 4 + 4);
3442 for (i = 0; i < n; ++i)
3444 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3445 size += size_of_uleb128 (cs->action);
3452 sjlj_size_of_call_site_table (void)
3454 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3458 for (i = 0; i < n; ++i)
3460 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3461 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3462 size += size_of_uleb128 (cs->action);
3470 dw2_output_call_site_table (void)
3472 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3475 for (i = 0; i < n; ++i)
3477 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3478 char reg_start_lab[32];
3479 char reg_end_lab[32];
3480 char landing_pad_lab[32];
3482 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3483 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3485 if (cs->landing_pad)
3486 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3487 CODE_LABEL_NUMBER (cs->landing_pad));
3489 /* ??? Perhaps use insn length scaling if the assembler supports
3490 generic arithmetic. */
3491 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3492 data4 if the function is small enough. */
3493 #ifdef HAVE_AS_LEB128
3494 dw2_asm_output_delta_uleb128 (reg_start_lab,
3495 current_function_func_begin_label,
3496 "region %d start", i);
3497 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3499 if (cs->landing_pad)
3500 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3501 current_function_func_begin_label,
3504 dw2_asm_output_data_uleb128 (0, "landing pad");
3506 dw2_asm_output_delta (4, reg_start_lab,
3507 current_function_func_begin_label,
3508 "region %d start", i);
3509 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3510 if (cs->landing_pad)
3511 dw2_asm_output_delta (4, landing_pad_lab,
3512 current_function_func_begin_label,
3515 dw2_asm_output_data (4, 0, "landing pad");
3517 dw2_asm_output_data_uleb128 (cs->action, "action");
3520 call_site_base += n;
3524 sjlj_output_call_site_table (void)
3526 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3529 for (i = 0; i < n; ++i)
3531 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3533 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3534 "region %d landing pad", i);
3535 dw2_asm_output_data_uleb128 (cs->action, "action");
3538 call_site_base += n;
3541 #ifndef TARGET_UNWIND_INFO
3542 /* Switch to the section that should be used for exception tables. */
3545 switch_to_exception_section (const char * ARG_UNUSED (fnname))
3549 if (exception_section)
3550 s = exception_section;
3553 /* Compute the section and cache it into exception_section,
3554 unless it depends on the function name. */
3555 if (targetm.have_named_sections)
3559 if (EH_TABLES_CAN_BE_READ_ONLY)
3562 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3563 flags = ((! flag_pic
3564 || ((tt_format & 0x70) != DW_EH_PE_absptr
3565 && (tt_format & 0x70) != DW_EH_PE_aligned))
3566 ? 0 : SECTION_WRITE);
3569 flags = SECTION_WRITE;
3571 #ifdef HAVE_LD_EH_GC_SECTIONS
3572 if (flag_function_sections)
3574 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
3575 sprintf (section_name, ".gcc_except_table.%s", fnname);
3576 s = get_section (section_name, flags, NULL);
3577 free (section_name);
3582 = s = get_section (".gcc_except_table", flags, NULL);
3586 = s = flag_pic ? data_section : readonly_data_section;
3589 switch_to_section (s);
3594 /* Output a reference from an exception table to the type_info object TYPE.
3595 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
3599 output_ttype (tree type, int tt_format, int tt_format_size)
3602 bool is_public = true;
3604 if (type == NULL_TREE)
3608 struct varpool_node *node;
3610 type = lookup_type_for_runtime (type);
3611 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3613 /* Let cgraph know that the rtti decl is used. Not all of the
3614 paths below go through assemble_integer, which would take
3615 care of this for us. */
3617 if (TREE_CODE (type) == ADDR_EXPR)
3619 type = TREE_OPERAND (type, 0);
3620 if (TREE_CODE (type) == VAR_DECL)
3622 node = varpool_node (type);
3624 varpool_mark_needed_node (node);
3625 is_public = TREE_PUBLIC (type);
3629 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3632 /* Allow the target to override the type table entry format. */
3633 if (targetm.asm_out.ttype (value))
3636 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3637 assemble_integer (value, tt_format_size,
3638 tt_format_size * BITS_PER_UNIT, 1);
3640 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
3644 output_function_exception_table (const char * ARG_UNUSED (fnname))
3646 int tt_format, cs_format, lp_format, i, n;
3647 #ifdef HAVE_AS_LEB128