1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
52 #include "coretypes.h"
60 #include "insn-config.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
75 #include "langhooks.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
80 #include "tree-flow.h"
82 /* Provide defaults for stuff that may not be defined when using
84 #ifndef EH_RETURN_DATA_REGNO
85 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 gimple (*lang_protect_cleanup_actions) (void);
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) (tree a, tree b);
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) (tree);
98 /* A hash table of label to region number. */
100 struct GTY(()) ehl_map_entry {
102 struct eh_region_d *region;
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
118 struct GTY(()) call_site_record_d
124 static int t2r_eq (const void *, const void *);
125 static hashval_t t2r_hash (const void *);
127 static int ttypes_filter_eq (const void *, const void *);
128 static hashval_t ttypes_filter_hash (const void *);
129 static int ehspec_filter_eq (const void *, const void *);
130 static hashval_t ehspec_filter_hash (const void *);
131 static int add_ttypes_entry (htab_t, tree);
132 static int add_ehspec_entry (htab_t, htab_t, tree);
133 static void assign_filter_values (void);
134 static void build_post_landing_pads (void);
135 static void connect_post_landing_pads (void);
136 static void dw2_build_landing_pads (void);
139 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
140 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
141 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
142 static void sjlj_emit_function_enter (rtx);
143 static void sjlj_emit_function_exit (void);
144 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
145 static void sjlj_build_landing_pads (void);
147 static void remove_eh_handler (struct eh_region_d *);
148 static void remove_eh_handler_and_replace (struct eh_region_d *,
149 struct eh_region_d *, bool);
151 /* The return value of reachable_next_level. */
154 /* The given exception is not processed by the given region. */
156 /* The given exception may need processing by the given region. */
158 /* The given exception is completely processed by the given region. */
160 /* The given exception is completely processed by the runtime. */
164 struct reachable_info;
165 static enum reachable_code reachable_next_level (struct eh_region_d *, tree,
166 struct reachable_info *, bool);
168 static int action_record_eq (const void *, const void *);
169 static hashval_t action_record_hash (const void *);
170 static int add_action_record (htab_t, int, int);
171 static int collect_one_action_chain (htab_t, struct eh_region_d *);
172 static int add_call_site (rtx, int);
174 static void push_uleb128 (varray_type *, unsigned int);
175 static void push_sleb128 (varray_type *, int);
176 #ifndef HAVE_AS_LEB128
177 static int dw2_size_of_call_site_table (void);
178 static int sjlj_size_of_call_site_table (void);
180 static void dw2_output_call_site_table (void);
181 static void sjlj_output_call_site_table (void);
184 /* Routine to see if exception handling is turned on.
185 DO_WARN is nonzero if we want to inform the user that exception
186 handling is turned off.
188 This is used to ensure that -fexceptions has been specified if the
189 compiler tries to use any exception-specific functions. */
192 doing_eh (int do_warn)
194 if (! flag_exceptions)
196 static int warned = 0;
197 if (! warned && do_warn)
199 error ("exception handling disabled, use -fexceptions to enable");
211 if (! flag_exceptions)
214 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
216 /* Create the SjLj_Function_Context structure. This should match
217 the definition in unwind-sjlj.c. */
218 if (USING_SJLJ_EXCEPTIONS)
220 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
222 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
224 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
225 build_pointer_type (sjlj_fc_type_node));
226 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
228 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
230 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
232 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
233 tmp = build_array_type (lang_hooks.types.type_for_mode
234 (targetm.unwind_word_mode (), 1),
236 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
237 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
239 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
241 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
243 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
245 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
247 #ifdef DONT_USE_BUILTIN_SETJMP
249 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
251 /* Should be large enough for most systems, if it is not,
252 JMP_BUF_SIZE should be defined with the proper value. It will
253 also tend to be larger than necessary for most systems, a more
254 optimal port will define JMP_BUF_SIZE. */
255 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
258 /* builtin_setjmp takes a pointer to 5 words. */
259 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
261 tmp = build_index_type (tmp);
262 tmp = build_array_type (ptr_type_node, tmp);
263 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
264 #ifdef DONT_USE_BUILTIN_SETJMP
265 /* We don't know what the alignment requirements of the
266 runtime's jmp_buf has. Overestimate. */
267 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
268 DECL_USER_ALIGN (f_jbuf) = 1;
270 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
272 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
273 TREE_CHAIN (f_prev) = f_cs;
274 TREE_CHAIN (f_cs) = f_data;
275 TREE_CHAIN (f_data) = f_per;
276 TREE_CHAIN (f_per) = f_lsda;
277 TREE_CHAIN (f_lsda) = f_jbuf;
279 layout_type (sjlj_fc_type_node);
281 /* Cache the interesting field offsets so that we have
282 easy access from rtl. */
283 sjlj_fc_call_site_ofs
284 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
285 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
287 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
288 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
289 sjlj_fc_personality_ofs
290 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
291 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
293 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
294 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
296 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
297 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
302 init_eh_for_function (void)
304 cfun->eh = GGC_CNEW (struct eh_status);
307 /* Routines to generate the exception tree somewhat directly.
308 These are used from tree-eh.c when processing exception related
309 nodes during tree optimization. */
311 static struct eh_region_d *
312 gen_eh_region (enum eh_region_type type, struct eh_region_d *outer)
314 struct eh_region_d *new_eh;
316 #ifdef ENABLE_CHECKING
317 gcc_assert (doing_eh (0));
320 /* Insert a new blank region as a leaf in the tree. */
321 new_eh = GGC_CNEW (struct eh_region_d);
323 new_eh->outer = outer;
326 new_eh->next_peer = outer->inner;
327 outer->inner = new_eh;
331 new_eh->next_peer = cfun->eh->region_tree;
332 cfun->eh->region_tree = new_eh;
335 new_eh->region_number = ++cfun->eh->last_region_number;
341 gen_eh_region_cleanup (struct eh_region_d *outer)
343 struct eh_region_d *cleanup = gen_eh_region (ERT_CLEANUP, outer);
348 gen_eh_region_try (struct eh_region_d *outer)
350 return gen_eh_region (ERT_TRY, outer);
354 gen_eh_region_catch (struct eh_region_d *t, tree type_or_list)
356 struct eh_region_d *c, *l;
357 tree type_list, type_node;
359 /* Ensure to always end up with a type list to normalize further
360 processing, then register each type against the runtime types map. */
361 type_list = type_or_list;
364 if (TREE_CODE (type_or_list) != TREE_LIST)
365 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
367 type_node = type_list;
368 for (; type_node; type_node = TREE_CHAIN (type_node))
369 add_type_for_runtime (TREE_VALUE (type_node));
372 c = gen_eh_region (ERT_CATCH, t->outer);
373 c->u.eh_catch.type_list = type_list;
374 l = t->u.eh_try.last_catch;
375 c->u.eh_catch.prev_catch = l;
377 l->u.eh_catch.next_catch = c;
379 t->u.eh_try.eh_catch = c;
380 t->u.eh_try.last_catch = c;
386 gen_eh_region_allowed (struct eh_region_d *outer, tree allowed)
388 struct eh_region_d *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
389 region->u.allowed.type_list = allowed;
391 for (; allowed ; allowed = TREE_CHAIN (allowed))
392 add_type_for_runtime (TREE_VALUE (allowed));
398 gen_eh_region_must_not_throw (struct eh_region_d *outer)
400 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
404 get_eh_region_number (struct eh_region_d *region)
406 return region->region_number;
410 get_eh_region_may_contain_throw (struct eh_region_d *region)
412 return region->may_contain_throw;
416 get_eh_region_tree_label (struct eh_region_d *region)
418 return region->tree_label;
422 get_eh_region_no_tree_label (int region)
424 return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
428 set_eh_region_tree_label (struct eh_region_d *region, tree lab)
430 region->tree_label = lab;
434 expand_resx_expr (tree exp)
436 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
437 struct eh_region_d *reg = VEC_index (eh_region,
438 cfun->eh->region_array, region_nr);
440 gcc_assert (!reg->resume);
441 do_pending_stack_adjust ();
442 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
446 /* Note that the current EH region (if any) may contain a throw, or a
447 call to a function which itself may contain a throw. */
450 note_eh_region_may_contain_throw (struct eh_region_d *region)
452 while (region && !region->may_contain_throw)
454 region->may_contain_throw = 1;
455 region = region->outer;
460 /* Return an rtl expression for a pointer to the exception object
464 get_exception_pointer (void)
466 if (! crtl->eh.exc_ptr)
467 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
468 return crtl->eh.exc_ptr;
471 /* Return an rtl expression for the exception dispatch filter
475 get_exception_filter (void)
477 if (! crtl->eh.filter)
478 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
479 return crtl->eh.filter;
482 /* This section is for the exception handling specific optimization pass. */
484 /* Random access the exception region tree. */
487 collect_eh_region_array (void)
489 struct eh_region_d *i;
491 i = cfun->eh->region_tree;
495 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
496 cfun->eh->last_region_number + 1);
497 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
501 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
503 /* If there are sub-regions, process them. */
506 /* If there are peers, process them. */
507 else if (i->next_peer)
509 /* Otherwise, step back up the tree to the next peer. */
516 } while (i->next_peer == NULL);
522 /* R is MUST_NOT_THROW region that is not reachable via local
523 RESX instructions. It still must be kept in the tree in case runtime
524 can unwind through it, or we will eliminate out terminate call
525 runtime would do otherwise. Return TRUE if R contains throwing statements
526 or some of the exceptions in inner regions can be unwound up to R.
528 CONTAINS_STMT is bitmap of all regions that contains some throwing
531 Function looks O(^3) at first sight. In fact the function is called at most
532 once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
533 Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
534 the outer loop examines every region at most once. The inner loop
535 is doing unwinding from the throwing statement same way as we do during
536 CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
537 of CFG. In practice Eh trees are wide, not deep, so this is not
541 can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region_d *r)
543 struct eh_region_d *i = r->inner;
547 if (TEST_BIT (contains_stmt, r->region_number))
550 EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
551 if (TEST_BIT (contains_stmt, n))
557 /* It is pointless to look into MUST_NOT_THROW
558 or dive into subregions. They never unwind up. */
559 if (i->type != ERT_MUST_NOT_THROW)
561 bool found = TEST_BIT (contains_stmt, i->region_number);
563 EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
564 if (TEST_BIT (contains_stmt, n))
569 /* We have nested region that contains throwing statement.
570 See if resuming might lead up to the resx or we get locally
571 caught sooner. If we get locally caught sooner, we either
572 know region R is not reachable or it would have direct edge
573 from the EH resx and thus consider region reachable at
577 struct eh_region_d *i1 = i;
578 tree type_thrown = NULL_TREE;
580 if (i1->type == ERT_THROW)
582 type_thrown = i1->u.eh_throw.type;
585 for (; i1 != r; i1 = i1->outer)
586 if (reachable_next_level (i1, type_thrown, NULL,
587 false) >= RNL_CAUGHT)
593 /* If there are sub-regions, process them. */
594 if (i->type != ERT_MUST_NOT_THROW && i->inner)
596 /* If there are peers, process them. */
597 else if (i->next_peer)
599 /* Otherwise, step back up the tree to the next peer. */
608 while (i->next_peer == NULL);
614 /* Bring region R to the root of tree. */
617 bring_to_root (struct eh_region_d *r)
619 struct eh_region_d **pp;
620 struct eh_region_d *outer = r->outer;
623 for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
627 r->next_peer = cfun->eh->region_tree;
628 cfun->eh->region_tree = r;
631 /* Return true if region R2 can be replaced by R1. */
634 eh_region_replaceable_by_p (const struct eh_region_d *r1,
635 const struct eh_region_d *r2)
637 /* Regions are semantically same if they are of same type,
638 have same label and type. */
639 if (r1->type != r2->type)
641 if (r1->tree_label != r2->tree_label)
644 /* Verify that also region type dependent data are the same. */
647 case ERT_MUST_NOT_THROW:
652 struct eh_region_d *c1, *c2;
653 for (c1 = r1->u.eh_try.eh_catch,
654 c2 = r2->u.eh_try.eh_catch;
656 c1 = c1->u.eh_catch.next_catch,
657 c2 = c2->u.eh_catch.next_catch)
658 if (!eh_region_replaceable_by_p (c1, c2))
665 if (!list_equal_p (r1->u.eh_catch.type_list, r2->u.eh_catch.type_list))
667 if (!list_equal_p (r1->u.eh_catch.filter_list,
668 r2->u.eh_catch.filter_list))
671 case ERT_ALLOWED_EXCEPTIONS:
672 if (!list_equal_p (r1->u.allowed.type_list, r2->u.allowed.type_list))
674 if (r1->u.allowed.filter != r2->u.allowed.filter)
678 if (r1->u.eh_throw.type != r2->u.eh_throw.type)
684 if (dump_file && (dump_flags & TDF_DETAILS))
685 fprintf (dump_file, "Regions %i and %i match\n", r1->region_number,
690 /* Replace region R2 by R1. */
693 replace_region (struct eh_region_d *r1, struct eh_region_d *r2)
695 struct eh_region_d *next1 = r1->u.eh_try.eh_catch;
696 struct eh_region_d *next2 = r2->u.eh_try.eh_catch;
697 bool is_try = r1->type == ERT_TRY;
699 gcc_assert (r1->type != ERT_CATCH);
700 remove_eh_handler_and_replace (r2, r1, false);
707 gcc_assert (next1->type == ERT_CATCH);
708 gcc_assert (next2->type == ERT_CATCH);
709 next1 = next1->u.eh_catch.next_catch;
710 next2 = next2->u.eh_catch.next_catch;
711 remove_eh_handler_and_replace (r2, r1, false);
716 /* Return hash value of type list T. */
719 hash_type_list (tree t)
722 for (; t; t = TREE_CHAIN (t))
723 val = iterative_hash_hashval_t (TREE_HASH (TREE_VALUE (t)), val);
727 /* Hash EH regions so semantically same regions get same hash value. */
730 hash_eh_region (const void *r)
732 const struct eh_region_d *region = (const struct eh_region_d *) r;
733 hashval_t val = region->type;
735 if (region->tree_label)
736 val = iterative_hash_hashval_t (LABEL_DECL_UID (region->tree_label), val);
737 switch (region->type)
739 case ERT_MUST_NOT_THROW:
744 struct eh_region_d *c;
745 for (c = region->u.eh_try.eh_catch;
746 c; c = c->u.eh_catch.next_catch)
747 val = iterative_hash_hashval_t (hash_eh_region (c), val);
751 val = iterative_hash_hashval_t (hash_type_list
752 (region->u.eh_catch.type_list), val);
754 case ERT_ALLOWED_EXCEPTIONS:
755 val = iterative_hash_hashval_t
756 (hash_type_list (region->u.allowed.type_list), val);
757 val = iterative_hash_hashval_t (region->u.allowed.filter, val);
760 val |= iterative_hash_hashval_t (TYPE_UID (region->u.eh_throw.type), val);
768 /* Return true if regions R1 and R2 are equal. */
771 eh_regions_equal_p (const void *r1, const void *r2)
773 return eh_region_replaceable_by_p ((const struct eh_region_d *) r1,
774 (const struct eh_region_d *) r2);
777 /* Walk all peers of REGION and try to merge those regions
778 that are semantically equivalent. Look into subregions
782 merge_peers (struct eh_region_d *region)
784 struct eh_region_d *r1, *r2, *outer = NULL, *next;
788 outer = region->outer;
792 /* First see if there is inner region equivalent to region
793 in question. EH control flow is acyclic so we know we
796 for (r1 = region; r1; r1 = next)
798 next = r1->next_peer;
799 if (r1->type == ERT_CATCH)
801 if (eh_region_replaceable_by_p (r1->outer, r1))
803 replace_region (r1->outer, r1);
810 /* Get new first region and try to match the peers
813 region = outer->inner;
815 region = cfun->eh->region_tree;
817 /* There are few regions to inspect:
818 N^2 loop matching each region with each region
819 will do the job well. */
820 if (num_regions < 10)
822 for (r1 = region; r1; r1 = r1->next_peer)
824 if (r1->type == ERT_CATCH)
826 for (r2 = r1->next_peer; r2; r2 = next)
828 next = r2->next_peer;
829 if (eh_region_replaceable_by_p (r1, r2))
831 replace_region (r1, r2);
837 /* Or use hashtable to avoid N^2 behaviour. */
841 hash = htab_create (num_regions, hash_eh_region,
842 eh_regions_equal_p, NULL);
843 for (r1 = region; r1; r1 = next)
847 next = r1->next_peer;
848 if (r1->type == ERT_CATCH)
850 slot = htab_find_slot (hash, r1, INSERT);
854 replace_region ((struct eh_region_d *) *slot, r1);
858 for (r1 = region; r1; r1 = r1->next_peer)
859 merged |= merge_peers (r1->inner);
863 /* Remove all regions whose labels are not reachable.
864 REACHABLE is bitmap of all regions that are used by the function
865 CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
868 remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
871 struct eh_region_d *r;
872 VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
873 struct eh_region_d *local_must_not_throw = NULL;
874 struct eh_region_d *first_must_not_throw = NULL;
876 for (i = cfun->eh->last_region_number; i > 0; --i)
878 r = VEC_index (eh_region, cfun->eh->region_array, i);
879 if (!r || r->region_number != i)
881 if (!TEST_BIT (reachable, i) && !r->resume)
885 r->tree_label = NULL;
889 /* Don't remove ERT_THROW regions if their outer region
891 if (r->outer && TEST_BIT (reachable, r->outer->region_number))
894 case ERT_MUST_NOT_THROW:
895 /* MUST_NOT_THROW regions are implementable solely in the
896 runtime, but we need them when inlining function.
898 Keep them if outer region is not MUST_NOT_THROW a well
899 and if they contain some statement that might unwind through
901 if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
903 || can_be_reached_by_runtime (contains_stmt, r)))
908 /* TRY regions are reachable if any of its CATCH regions
910 struct eh_region_d *c;
911 for (c = r->u.eh_try.eh_catch; c;
912 c = c->u.eh_catch.next_catch)
913 if (TEST_BIT (reachable, c->region_number))
928 fprintf (dump_file, "Removing unreachable eh region %i\n",
930 remove_eh_handler (r);
932 else if (r->type == ERT_MUST_NOT_THROW)
934 if (!first_must_not_throw)
935 first_must_not_throw = r;
936 VEC_safe_push (eh_region, heap, must_not_throws, r);
940 if (r->type == ERT_MUST_NOT_THROW)
942 if (!local_must_not_throw)
943 local_must_not_throw = r;
945 VEC_safe_push (eh_region, heap, must_not_throws, r);
949 /* MUST_NOT_THROW regions without local handler are all the same; they
950 trigger terminate call in runtime.
951 MUST_NOT_THROW handled locally can differ in debug info associated
952 to std::terminate () call or if one is coming from Java and other
953 from C++ whether they call terminate or abort.
955 We merge all MUST_NOT_THROW regions handled by the run-time into one.
956 We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
957 (since unwinding never continues to the outer region anyway).
958 If MUST_NOT_THROW with local handler is present in the tree, we use
959 that region to merge into, since it will remain in tree anyway;
960 otherwise we use first MUST_NOT_THROW.
962 Merging of locally handled regions needs changes to the CFG. Crossjumping
963 should take care of this, by looking at the actual code and
964 ensuring that the cleanup actions are really the same. */
966 if (local_must_not_throw)
967 first_must_not_throw = local_must_not_throw;
969 for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
971 if (!r->label && !r->tree_label && r != first_must_not_throw)
974 fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
976 first_must_not_throw->region_number);
977 remove_eh_handler_and_replace (r, first_must_not_throw, false);
978 first_must_not_throw->may_contain_throw |= r->may_contain_throw;
983 merge_peers (cfun->eh->region_tree);
984 #ifdef ENABLE_CHECKING
985 verify_eh_tree (cfun);
987 VEC_free (eh_region, heap, must_not_throws);
990 /* Return array mapping LABEL_DECL_UID to region such that region's tree_label
991 is identical to label. */
994 label_to_region_map (void)
996 VEC (int, heap) * label_to_region = NULL;
1000 VEC_safe_grow_cleared (int, heap, label_to_region,
1001 cfun->cfg->last_label_uid + 1);
1002 for (i = cfun->eh->last_region_number; i > 0; --i)
1004 struct eh_region_d *r = VEC_index (eh_region, cfun->eh->region_array, i);
1005 if (r && r->region_number == i
1006 && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
1008 if ((idx = VEC_index (int, label_to_region,
1009 LABEL_DECL_UID (r->tree_label))) != 0)
1010 r->next_region_sharing_label =
1011 VEC_index (eh_region, cfun->eh->region_array, idx);
1013 r->next_region_sharing_label = NULL;
1014 VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
1018 return label_to_region;
1021 /* Return number of EH regions. */
1023 num_eh_regions (void)
1025 return cfun->eh->last_region_number + 1;
1028 /* Return next region sharing same label as REGION. */
1031 get_next_region_sharing_label (int region)
1033 struct eh_region_d *r;
1036 r = VEC_index (eh_region, cfun->eh->region_array, region);
1037 if (!r || !r->next_region_sharing_label)
1039 return r->next_region_sharing_label->region_number;
1042 /* Set up EH labels for RTL. */
1045 convert_from_eh_region_ranges (void)
1047 int i, n = cfun->eh->last_region_number;
1049 /* Most of the work is already done at the tree level. All we need to
1050 do is collect the rtl labels that correspond to the tree labels that
1051 collect the rtl labels that correspond to the tree labels
1052 we allocated earlier. */
1053 for (i = 1; i <= n; ++i)
1055 struct eh_region_d *region;
1057 region = VEC_index (eh_region, cfun->eh->region_array, i);
1058 if (region && region->tree_label)
1059 region->label = DECL_RTL_IF_SET (region->tree_label);
1064 find_exception_handler_labels (void)
1068 if (cfun->eh->region_tree == NULL)
1071 for (i = cfun->eh->last_region_number; i > 0; --i)
1073 struct eh_region_d *region;
1076 region = VEC_index (eh_region, cfun->eh->region_array, i);
1077 if (! region || region->region_number != i)
1079 if (crtl->eh.built_landing_pads)
1080 lab = region->landing_pad;
1082 lab = region->label;
1086 /* Returns true if the current function has exception handling regions. */
1089 current_function_has_exception_handlers (void)
1093 for (i = cfun->eh->last_region_number; i > 0; --i)
1095 struct eh_region_d *region;
1097 region = VEC_index (eh_region, cfun->eh->region_array, i);
1099 && region->region_number == i
1100 && region->type != ERT_THROW)
1107 /* A subroutine of duplicate_eh_regions. Search the region tree under O
1108 for the minimum and maximum region numbers. Update *MIN and *MAX. */
1111 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
1117 i = bitmap_first_set_bit (o->aka);
1120 i = bitmap_last_set_bit (o->aka);
1124 if (o->region_number < *min)
1125 *min = o->region_number;
1126 if (o->region_number > *max)
1127 *max = o->region_number;
1132 duplicate_eh_regions_0 (o, min, max);
1133 while (o->next_peer)
1136 duplicate_eh_regions_0 (o, min, max);
1141 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
1142 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
1143 about the other internal pointers just yet, just the tree-like pointers. */
1146 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
1150 ret = n = GGC_NEW (struct eh_region_d);
1154 n->next_peer = NULL;
1159 n->aka = BITMAP_GGC_ALLOC ();
1161 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
1163 bitmap_set_bit (n->aka, i + eh_offset);
1164 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
1168 n->region_number += eh_offset;
1169 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1174 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
1175 while (old->next_peer)
1177 old = old->next_peer;
1178 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
1185 /* Look for first outer region of R (or R itself) that is
1186 TRY region. Return NULL if none. */
1188 static struct eh_region_d *
1189 find_prev_try (struct eh_region_d * r)
1191 for (; r && r->type != ERT_TRY; r = r->outer)
1192 if (r->type == ERT_MUST_NOT_THROW
1193 || (r->type == ERT_ALLOWED_EXCEPTIONS
1194 && !r->u.allowed.type_list))
1202 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
1203 function and root the tree below OUTER_REGION. Remap labels using MAP
1204 callback. The special case of COPY_REGION of 0 means all regions. */
1207 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
1208 void *data, int copy_region, int outer_region)
1210 eh_region cur, outer, *splice;
1211 int i, min_region, max_region, eh_offset, cfun_last_region_number;
1216 #ifdef ENABLE_CHECKING
1217 verify_eh_tree (ifun);
1220 /* Find the range of region numbers to be copied. The interface we
1221 provide here mandates a single offset to find new number from old,
1222 which means we must look at the numbers present, instead of the
1223 count or something else. */
1224 if (copy_region > 0)
1226 min_region = INT_MAX;
1229 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1230 duplicate_eh_regions_0 (cur, &min_region, &max_region);
1235 max_region = ifun->eh->last_region_number;
1237 num_regions = max_region - min_region + 1;
1238 cfun_last_region_number = cfun->eh->last_region_number;
1239 eh_offset = cfun_last_region_number + 1 - min_region;
1241 /* If we've not yet created a region array, do so now. */
1242 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
1243 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
1244 cfun->eh->last_region_number + 1);
1246 /* Locate the spot at which to insert the new tree. */
1247 if (outer_region > 0)
1249 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
1251 splice = &outer->inner;
1253 splice = &cfun->eh->region_tree;
1258 splice = &cfun->eh->region_tree;
1261 splice = &(*splice)->next_peer;
1263 if (!ifun->eh->region_tree)
1266 for (i = cfun_last_region_number + 1;
1267 i <= cfun->eh->last_region_number; i++)
1269 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1270 if (outer->aka == NULL)
1271 outer->aka = BITMAP_GGC_ALLOC ();
1272 bitmap_set_bit (outer->aka, i);
1277 /* Copy all the regions in the subtree. */
1278 if (copy_region > 0)
1280 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
1281 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
1287 cur = ifun->eh->region_tree;
1288 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
1289 while (cur->next_peer)
1291 cur = cur->next_peer;
1292 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
1296 /* Remap all the labels in the new regions. */
1297 for (i = cfun_last_region_number + 1;
1298 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1299 if (cur && cur->tree_label)
1300 cur->tree_label = map (cur->tree_label, data);
1302 /* Remap all of the internal catch and cleanup linkages. Since we
1303 duplicate entire subtrees, all of the referenced regions will have
1304 been copied too. And since we renumbered them as a block, a simple
1305 bit of arithmetic finds us the index for the replacement region. */
1306 for (i = cfun_last_region_number + 1;
1307 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1309 /* All removed EH that is toplevel in input function is now
1310 in outer EH of output function. */
1313 gcc_assert (VEC_index
1314 (eh_region, ifun->eh->region_array,
1315 i - eh_offset) == NULL);
1318 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1319 if (outer->aka == NULL)
1320 outer->aka = BITMAP_GGC_ALLOC ();
1321 bitmap_set_bit (outer->aka, i);
1325 if (i != cur->region_number)
1328 #define REMAP(REG) \
1329 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1330 (REG)->region_number + eh_offset)
1335 if (cur->u.eh_try.eh_catch)
1336 REMAP (cur->u.eh_try.eh_catch);
1337 if (cur->u.eh_try.last_catch)
1338 REMAP (cur->u.eh_try.last_catch);
1342 if (cur->u.eh_catch.next_catch)
1343 REMAP (cur->u.eh_catch.next_catch);
1344 if (cur->u.eh_catch.prev_catch)
1345 REMAP (cur->u.eh_catch.prev_catch);
1354 #ifdef ENABLE_CHECKING
1355 verify_eh_tree (cfun);
1361 /* Return new copy of eh region OLD inside region NEW_OUTER.
1362 Do not care about updating the tree otherwise. */
1364 static struct eh_region_d *
1365 copy_eh_region_1 (struct eh_region_d *old, struct eh_region_d *new_outer)
1367 struct eh_region_d *new_eh = gen_eh_region (old->type, new_outer);
1369 new_eh->tree_label = old->tree_label;
1370 new_eh->may_contain_throw = old->may_contain_throw;
1371 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1372 cfun->eh->last_region_number + 1);
1373 VEC_replace (eh_region, cfun->eh->region_array, new_eh->region_number, new_eh);
1374 if (dump_file && (dump_flags & TDF_DETAILS))
1375 fprintf (dump_file, "Copying region %i to %i\n", old->region_number, new_eh->region_number);
1379 /* Return new copy of eh region OLD inside region NEW_OUTER.
1381 Copy whole catch-try chain if neccesary. */
1383 static struct eh_region_d *
1384 copy_eh_region (struct eh_region_d *old, struct eh_region_d *new_outer)
1386 struct eh_region_d *r, *n, *old_try, *new_try, *ret = NULL;
1387 VEC(eh_region,heap) *catch_list = NULL;
1389 if (old->type != ERT_CATCH)
1391 gcc_assert (old->type != ERT_TRY);
1392 r = copy_eh_region_1 (old, new_outer);
1396 /* Locate and copy corresponding TRY. */
1397 for (old_try = old->next_peer; old_try->type == ERT_CATCH; old_try = old_try->next_peer)
1399 gcc_assert (old_try->type == ERT_TRY);
1400 new_try = gen_eh_region_try (new_outer);
1401 new_try->tree_label = old_try->tree_label;
1402 new_try->may_contain_throw = old_try->may_contain_throw;
1403 if (dump_file && (dump_flags & TDF_DETAILS))
1404 fprintf (dump_file, "Copying try-catch regions. Try: %i to %i\n",
1405 old_try->region_number, new_try->region_number);
1406 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1407 cfun->eh->last_region_number + 1);
1408 VEC_replace (eh_region, cfun->eh->region_array, new_try->region_number, new_try);
1410 /* In order to keep CATCH list in order, we need to copy in reverse order. */
1411 for (r = old_try->u.eh_try.last_catch; r->type == ERT_CATCH; r = r->next_peer)
1412 VEC_safe_push (eh_region, heap, catch_list, r);
1414 while (VEC_length (eh_region, catch_list))
1416 r = VEC_pop (eh_region, catch_list);
1418 /* Duplicate CATCH. */
1419 n = gen_eh_region_catch (new_try, r->u.eh_catch.type_list);
1420 n->tree_label = r->tree_label;
1421 n->may_contain_throw = r->may_contain_throw;
1422 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
1423 cfun->eh->last_region_number + 1);
1424 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
1425 n->tree_label = r->tree_label;
1427 if (dump_file && (dump_flags & TDF_DETAILS))
1428 fprintf (dump_file, "Copying try-catch regions. Catch: %i to %i\n",
1429 r->region_number, n->region_number);
1433 VEC_free (eh_region, heap, catch_list);
1438 /* Callback for forach_reachable_handler that push REGION into single VECtor DATA. */
1441 push_reachable_handler (struct eh_region_d *region, void *data)
1443 VEC(eh_region,heap) **trace = (VEC(eh_region,heap) **) data;
1444 VEC_safe_push (eh_region, heap, *trace, region);
1447 /* Redirect EH edge E that to NEW_DEST_LABEL.
1448 IS_RESX, INLINABLE_CALL and REGION_NMUBER match the parameter of
1449 foreach_reachable_handler. */
1451 struct eh_region_d *
1452 redirect_eh_edge_to_label (edge e, tree new_dest_label, bool is_resx,
1453 bool inlinable_call, int region_number)
1455 struct eh_region_d *outer;
1456 struct eh_region_d *region;
1457 VEC (eh_region, heap) * trace = NULL;
1459 int start_here = -1;
1460 basic_block old_bb = e->dest;
1461 struct eh_region_d *old, *r = NULL;
1462 bool update_inplace = true;
1466 /* If there is only one EH edge, we don't need to duplicate;
1467 just update labels in the tree. */
1468 FOR_EACH_EDGE (e2, ei, old_bb->preds)
1469 if ((e2->flags & EDGE_EH) && e2 != e)
1471 update_inplace = false;
1475 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
1476 gcc_assert (region);
1478 foreach_reachable_handler (region_number, is_resx, inlinable_call,
1479 push_reachable_handler, &trace);
1480 if (dump_file && (dump_flags & TDF_DETAILS))
1482 dump_eh_tree (dump_file, cfun);
1483 fprintf (dump_file, "Trace: ");
1484 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1485 fprintf (dump_file, " %i", VEC_index (eh_region, trace, i)->region_number);
1486 fprintf (dump_file, " inplace: %i\n", update_inplace);
1491 /* In easy route just walk trace and update all occurences of the label. */
1492 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1494 r = VEC_index (eh_region, trace, i);
1495 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1497 r->tree_label = new_dest_label;
1498 if (dump_file && (dump_flags & TDF_DETAILS))
1499 fprintf (dump_file, "Updating label for region %i\n",
1507 /* Now look for outermost handler that reffers to the basic block in question.
1508 We start our duplication there. */
1509 for (i = 0; i < (int) VEC_length (eh_region, trace); i++)
1511 r = VEC_index (eh_region, trace, i);
1512 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1515 outer = VEC_index (eh_region, trace, start_here)->outer;
1516 gcc_assert (start_here >= 0);
1518 /* And now do the dirty job! */
1519 for (i = start_here; i >= 0; i--)
1521 old = VEC_index (eh_region, trace, i);
1522 gcc_assert (!outer || old->outer != outer->outer);
1524 /* Copy region and update label. */
1525 r = copy_eh_region (old, outer);
1526 VEC_replace (eh_region, trace, i, r);
1527 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1529 r->tree_label = new_dest_label;
1530 if (dump_file && (dump_flags & TDF_DETAILS))
1531 fprintf (dump_file, "Updating label for region %i\n",
1535 /* We got into copying CATCH. copy_eh_region already did job
1536 of copying all catch blocks corresponding to the try. Now
1537 we need to update labels in all of them and see trace.
1539 We continue nesting into TRY region corresponding to CATCH:
1540 When duplicating EH tree contaiing subregions of CATCH,
1541 the CATCH region itself is never inserted to trace so we
1542 never get here anyway. */
1543 if (r->type == ERT_CATCH)
1545 /* Walk other catch regions we copied and update labels as needed. */
1546 for (r = r->next_peer; r->type == ERT_CATCH; r = r->next_peer)
1547 if (r->tree_label && label_to_block (r->tree_label) == old_bb)
1549 r->tree_label = new_dest_label;
1550 if (dump_file && (dump_flags & TDF_DETAILS))
1551 fprintf (dump_file, "Updating label for region %i\n",
1554 gcc_assert (r->type == ERT_TRY);
1556 /* Skip sibling catch regions from the trace.
1557 They are already updated. */
1558 while (i > 0 && VEC_index (eh_region, trace, i - 1)->outer == old->outer)
1560 gcc_assert (VEC_index (eh_region, trace, i - 1)->type == ERT_CATCH);
1568 if (is_resx || region->type == ERT_THROW)
1569 r = copy_eh_region (region, outer);
1572 VEC_free (eh_region, heap, trace);
1573 if (dump_file && (dump_flags & TDF_DETAILS))
1575 dump_eh_tree (dump_file, cfun);
1576 fprintf (dump_file, "New region: %i\n", r->region_number);
1581 /* Return region number of region that is outer to both if REGION_A and
1582 REGION_B in IFUN. */
1585 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1587 struct eh_region_d *rp_a, *rp_b;
1590 gcc_assert (ifun->eh->last_region_number > 0);
1591 gcc_assert (ifun->eh->region_tree);
1593 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1594 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1595 gcc_assert (rp_a != NULL);
1596 gcc_assert (rp_b != NULL);
1598 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1599 sbitmap_zero (b_outer);
1603 SET_BIT (b_outer, rp_b->region_number);
1610 if (TEST_BIT (b_outer, rp_a->region_number))
1612 sbitmap_free (b_outer);
1613 return rp_a->region_number;
1619 sbitmap_free (b_outer);
1624 t2r_eq (const void *pentry, const void *pdata)
1626 const_tree const entry = (const_tree) pentry;
1627 const_tree const data = (const_tree) pdata;
1629 return TREE_PURPOSE (entry) == data;
1633 t2r_hash (const void *pentry)
1635 const_tree const entry = (const_tree) pentry;
1636 return TREE_HASH (TREE_PURPOSE (entry));
1640 add_type_for_runtime (tree type)
1644 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1645 TREE_HASH (type), INSERT);
1648 tree runtime = (*lang_eh_runtime_type) (type);
1649 *slot = tree_cons (type, runtime, NULL_TREE);
1654 lookup_type_for_runtime (tree type)
1658 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1659 TREE_HASH (type), NO_INSERT);
1661 /* We should have always inserted the data earlier. */
1662 return TREE_VALUE (*slot);
1666 /* Represent an entry in @TTypes for either catch actions
1667 or exception filter actions. */
1668 struct GTY(()) ttypes_filter {
1673 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1674 (a tree) for a @TTypes type node we are thinking about adding. */
1677 ttypes_filter_eq (const void *pentry, const void *pdata)
1679 const struct ttypes_filter *const entry
1680 = (const struct ttypes_filter *) pentry;
1681 const_tree const data = (const_tree) pdata;
1683 return entry->t == data;
1687 ttypes_filter_hash (const void *pentry)
1689 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1690 return TREE_HASH (entry->t);
1693 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1694 exception specification list we are thinking about adding. */
1695 /* ??? Currently we use the type lists in the order given. Someone
1696 should put these in some canonical order. */
1699 ehspec_filter_eq (const void *pentry, const void *pdata)
1701 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1702 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1704 return type_list_equal (entry->t, data->t);
1707 /* Hash function for exception specification lists. */
1710 ehspec_filter_hash (const void *pentry)
1712 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1716 for (list = entry->t; list ; list = TREE_CHAIN (list))
1717 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1721 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1722 to speed up the search. Return the filter value to be used. */
1725 add_ttypes_entry (htab_t ttypes_hash, tree type)
1727 struct ttypes_filter **slot, *n;
1729 slot = (struct ttypes_filter **)
1730 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1732 if ((n = *slot) == NULL)
1734 /* Filter value is a 1 based table index. */
1736 n = XNEW (struct ttypes_filter);
1738 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1741 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1747 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1748 to speed up the search. Return the filter value to be used. */
1751 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1753 struct ttypes_filter **slot, *n;
1754 struct ttypes_filter dummy;
1757 slot = (struct ttypes_filter **)
1758 htab_find_slot (ehspec_hash, &dummy, INSERT);
1760 if ((n = *slot) == NULL)
1762 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1764 n = XNEW (struct ttypes_filter);
1766 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1769 /* Generate a 0 terminated list of filter values. */
1770 for (; list ; list = TREE_CHAIN (list))
1772 if (targetm.arm_eabi_unwinder)
1773 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1776 /* Look up each type in the list and encode its filter
1777 value as a uleb128. */
1778 push_uleb128 (&crtl->eh.ehspec_data,
1779 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1782 if (targetm.arm_eabi_unwinder)
1783 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1785 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1791 /* Generate the action filter values to be used for CATCH and
1792 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1793 we use lots of landing pads, and so every type or list can share
1794 the same filter value, which saves table space. */
1797 assign_filter_values (void)
1800 htab_t ttypes, ehspec;
1802 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1803 if (targetm.arm_eabi_unwinder)
1804 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1806 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1808 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1809 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1811 for (i = cfun->eh->last_region_number; i > 0; --i)
1813 struct eh_region_d *r;
1815 r = VEC_index (eh_region, cfun->eh->region_array, i);
1817 /* Mind we don't process a region more than once. */
1818 if (!r || r->region_number != i)
1824 /* Whatever type_list is (NULL or true list), we build a list
1825 of filters for the region. */
1826 r->u.eh_catch.filter_list = NULL_TREE;
1828 if (r->u.eh_catch.type_list != NULL)
1830 /* Get a filter value for each of the types caught and store
1831 them in the region's dedicated list. */
1832 tree tp_node = r->u.eh_catch.type_list;
1834 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1836 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1837 tree flt_node = build_int_cst (NULL_TREE, flt);
1839 r->u.eh_catch.filter_list
1840 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1845 /* Get a filter value for the NULL list also since it will need
1846 an action record anyway. */
1847 int flt = add_ttypes_entry (ttypes, NULL);
1848 tree flt_node = build_int_cst (NULL_TREE, flt);
1850 r->u.eh_catch.filter_list
1851 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1856 case ERT_ALLOWED_EXCEPTIONS:
1858 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1866 htab_delete (ttypes);
1867 htab_delete (ehspec);
1870 /* Emit SEQ into basic block just before INSN (that is assumed to be
1871 first instruction of some existing BB and return the newly
1874 emit_to_new_bb_before (rtx seq, rtx insn)
1881 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1882 call), we don't want it to go into newly created landing pad or other EH
1884 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1885 if (e->flags & EDGE_FALLTHRU)
1886 force_nonfallthru (e);
1889 last = emit_insn_before (seq, insn);
1890 if (BARRIER_P (last))
1891 last = PREV_INSN (last);
1892 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1893 update_bb_for_insn (bb);
1894 bb->flags |= BB_SUPERBLOCK;
1898 /* Generate the code to actually handle exceptions, which will follow the
1902 build_post_landing_pads (void)
1906 for (i = cfun->eh->last_region_number; i > 0; --i)
1908 struct eh_region_d *region;
1911 region = VEC_index (eh_region, cfun->eh->region_array, i);
1912 /* Mind we don't process a region more than once. */
1913 if (!region || region->region_number != i)
1916 switch (region->type)
1919 /* It is possible that TRY region is kept alive only because some of
1920 contained catch region still have RESX instruction but they are
1921 reached via their copies. In this case we need to do nothing. */
1922 if (!region->u.eh_try.eh_catch->label)
1925 /* ??? Collect the set of all non-overlapping catch handlers
1926 all the way up the chain until blocked by a cleanup. */
1927 /* ??? Outer try regions can share landing pads with inner
1928 try regions if the types are completely non-overlapping,
1929 and there are no intervening cleanups. */
1931 region->post_landing_pad = gen_label_rtx ();
1935 emit_label (region->post_landing_pad);
1937 /* ??? It is mighty inconvenient to call back into the
1938 switch statement generation code in expand_end_case.
1939 Rapid prototyping sez a sequence of ifs. */
1941 struct eh_region_d *c;
1942 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1944 if (c->u.eh_catch.type_list == NULL)
1945 emit_jump (c->label);
1948 /* Need for one cmp/jump per type caught. Each type
1949 list entry has a matching entry in the filter list
1950 (see assign_filter_values). */
1951 tree tp_node = c->u.eh_catch.type_list;
1952 tree flt_node = c->u.eh_catch.filter_list;
1956 emit_cmp_and_jump_insns
1958 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1960 targetm.eh_return_filter_mode (), 0, c->label);
1962 tp_node = TREE_CHAIN (tp_node);
1963 flt_node = TREE_CHAIN (flt_node);
1969 /* We delay the generation of the _Unwind_Resume until we generate
1970 landing pads. We emit a marker here so as to get good control
1971 flow data in the meantime. */
1973 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1979 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
1983 case ERT_ALLOWED_EXCEPTIONS:
1986 region->post_landing_pad = gen_label_rtx ();
1990 emit_label (region->post_landing_pad);
1992 emit_cmp_and_jump_insns (crtl->eh.filter,
1993 GEN_INT (region->u.allowed.filter),
1995 targetm.eh_return_filter_mode (), 0, region->label);
1997 /* We delay the generation of the _Unwind_Resume until we generate
1998 landing pads. We emit a marker here so as to get good control
1999 flow data in the meantime. */
2001 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
2007 emit_to_new_bb_before (seq, region->label);
2011 case ERT_MUST_NOT_THROW:
2012 region->post_landing_pad = region->label;
2017 /* Nothing to do. */
2026 /* Replace RESX patterns with jumps to the next handler if any, or calls to
2027 _Unwind_Resume otherwise. */
2030 connect_post_landing_pads (void)
2034 for (i = cfun->eh->last_region_number; i > 0; --i)
2036 struct eh_region_d *region;
2037 struct eh_region_d *outer;
2041 region = VEC_index (eh_region, cfun->eh->region_array, i);
2042 /* Mind we don't process a region more than once. */
2043 if (!region || region->region_number != i)
2046 /* If there is no RESX, or it has been deleted by flow, there's
2047 nothing to fix up. */
2048 if (! region->resume || INSN_DELETED_P (region->resume))
2051 /* Search for another landing pad in this function. */
2052 for (outer = region->outer; outer ; outer = outer->outer)
2053 if (outer->post_landing_pad)
2061 basic_block src, dest;
2063 emit_jump (outer->post_landing_pad);
2064 src = BLOCK_FOR_INSN (region->resume);
2065 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
2066 while (EDGE_COUNT (src->succs) > 0)
2067 remove_edge (EDGE_SUCC (src, 0));
2068 e = make_edge (src, dest, 0);
2069 e->probability = REG_BR_PROB_BASE;
2070 e->count = src->count;
2074 emit_library_call (unwind_resume_libfunc, LCT_THROW,
2075 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
2077 /* What we just emitted was a throwing libcall, so it got a
2078 barrier automatically added after it. If the last insn in
2079 the libcall sequence isn't the barrier, it's because the
2080 target emits multiple insns for a call, and there are insns
2081 after the actual call insn (which are redundant and would be
2082 optimized away). The barrier is inserted exactly after the
2083 call insn, so let's go get that and delete the insns after
2084 it, because below we need the barrier to be the last insn in
2086 delete_insns_since (NEXT_INSN (last_call_insn ()));
2091 barrier = emit_insn_before (seq, region->resume);
2092 /* Avoid duplicate barrier. */
2093 gcc_assert (BARRIER_P (barrier));
2094 delete_insn (barrier);
2095 delete_insn (region->resume);
2097 /* ??? From tree-ssa we can wind up with catch regions whose
2098 label is not instantiated, but whose resx is present. Now
2099 that we've dealt with the resx, kill the region. */
2100 if (region->label == NULL && region->type == ERT_CLEANUP)
2101 remove_eh_handler (region);
2107 dw2_build_landing_pads (void)
2111 for (i = cfun->eh->last_region_number; i > 0; --i)
2113 struct eh_region_d *region;
2118 region = VEC_index (eh_region, cfun->eh->region_array, i);
2119 /* Mind we don't process a region more than once. */
2120 if (!region || region->region_number != i)
2123 if (region->type != ERT_CLEANUP
2124 && region->type != ERT_TRY
2125 && region->type != ERT_ALLOWED_EXCEPTIONS)
2128 if (!region->post_landing_pad)
2133 region->landing_pad = gen_label_rtx ();
2134 emit_label (region->landing_pad);
2136 #ifdef HAVE_exception_receiver
2137 if (HAVE_exception_receiver)
2138 emit_insn (gen_exception_receiver ());
2141 #ifdef HAVE_nonlocal_goto_receiver
2142 if (HAVE_nonlocal_goto_receiver)
2143 emit_insn (gen_nonlocal_goto_receiver ());
2148 emit_move_insn (crtl->eh.exc_ptr,
2149 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
2150 emit_move_insn (crtl->eh.filter,
2151 gen_rtx_REG (targetm.eh_return_filter_mode (),
2152 EH_RETURN_DATA_REGNO (1)));
2157 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
2158 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2159 e->count = bb->count;
2160 e->probability = REG_BR_PROB_BASE;
2167 int directly_reachable;
2170 int call_site_index;
2174 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
2177 bool found_one = false;
2179 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2181 struct eh_region_d *region;
2182 enum reachable_code rc;
2186 if (! INSN_P (insn))
2189 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2190 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2193 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2197 type_thrown = NULL_TREE;
2198 if (region->type == ERT_THROW)
2200 type_thrown = region->u.eh_throw.type;
2201 region = region->outer;
2204 /* Find the first containing region that might handle the exception.
2205 That's the landing pad to which we will transfer control. */
2206 rc = RNL_NOT_CAUGHT;
2207 for (; region; region = region->outer)
2209 rc = reachable_next_level (region, type_thrown, NULL, false);
2210 if (rc != RNL_NOT_CAUGHT)
2213 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2215 lp_info[region->region_number].directly_reachable = 1;
2224 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2229 /* First task: build the action table. */
2231 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
2232 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2234 for (i = cfun->eh->last_region_number; i > 0; --i)
2235 if (lp_info[i].directly_reachable)
2237 struct eh_region_d *r =
2238 VEC_index (eh_region, cfun->eh->region_array, i);
2240 r->landing_pad = dispatch_label;
2241 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2242 if (lp_info[i].action_index != -1)
2243 crtl->uses_eh_lsda = 1;
2246 htab_delete (ar_hash);
2248 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2249 landing pad label for the region. For sjlj though, there is one
2250 common landing pad from which we dispatch to the post-landing pads.
2252 A region receives a dispatch index if it is directly reachable
2253 and requires in-function processing. Regions that share post-landing
2254 pads may share dispatch indices. */
2255 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2256 (see build_post_landing_pads) so we don't bother checking for it. */
2259 for (i = cfun->eh->last_region_number; i > 0; --i)
2260 if (lp_info[i].directly_reachable)
2261 lp_info[i].dispatch_index = index++;
2263 /* Finally: assign call-site values. If dwarf2 terms, this would be
2264 the region number assigned by convert_to_eh_region_ranges, but
2265 handles no-action and must-not-throw differently. */
2268 for (i = cfun->eh->last_region_number; i > 0; --i)
2269 if (lp_info[i].directly_reachable)
2271 int action = lp_info[i].action_index;
2273 /* Map must-not-throw to otherwise unused call-site index 0. */
2276 /* Map no-action to otherwise unused call-site index -1. */
2277 else if (action == -1)
2279 /* Otherwise, look it up in the table. */
2281 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2283 lp_info[i].call_site_index = index;
2288 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
2290 int last_call_site = -2;
2293 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2295 struct eh_region_d *region;
2297 rtx note, before, p;
2299 /* Reset value tracking at extended basic block boundaries. */
2301 last_call_site = -2;
2303 if (! INSN_P (insn))
2306 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2308 /* Calls that are known to not throw need not be marked. */
2309 if (note && INTVAL (XEXP (note, 0)) <= 0)
2313 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
2319 /* Calls (and trapping insns) without notes are outside any
2320 exception handling region in this function. Mark them as
2323 || (flag_non_call_exceptions
2324 && may_trap_p (PATTERN (insn))))
2325 this_call_site = -1;
2330 this_call_site = lp_info[region->region_number].call_site_index;
2332 if (this_call_site == last_call_site)
2335 /* Don't separate a call from it's argument loads. */
2338 before = find_first_parameter_load (insn, NULL_RTX);
2341 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
2342 sjlj_fc_call_site_ofs);
2343 emit_move_insn (mem, GEN_INT (this_call_site));
2347 emit_insn_before (p, before);
2348 last_call_site = this_call_site;
2352 /* Construct the SjLj_Function_Context. */
2355 sjlj_emit_function_enter (rtx dispatch_label)
2357 rtx fn_begin, fc, mem, seq;
2358 bool fn_begin_outside_block;
2360 fc = crtl->eh.sjlj_fc;
2364 /* We're storing this libcall's address into memory instead of
2365 calling it directly. Thus, we must call assemble_external_libcall
2366 here, as we can not depend on emit_library_call to do it for us. */
2367 assemble_external_libcall (eh_personality_libfunc);
2368 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2369 emit_move_insn (mem, eh_personality_libfunc);
2371 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2372 if (crtl->uses_eh_lsda)
2377 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2378 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2379 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2380 emit_move_insn (mem, sym);
2383 emit_move_insn (mem, const0_rtx);
2385 #ifdef DONT_USE_BUILTIN_SETJMP
2388 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2389 TYPE_MODE (integer_type_node), 1,
2390 plus_constant (XEXP (fc, 0),
2391 sjlj_fc_jbuf_ofs), Pmode);
2393 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2394 TYPE_MODE (integer_type_node), 0, dispatch_label);
2395 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
2398 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2402 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2403 1, XEXP (fc, 0), Pmode);
2408 /* ??? Instead of doing this at the beginning of the function,
2409 do this in a block that is at loop level 0 and dominates all
2410 can_throw_internal instructions. */
2412 fn_begin_outside_block = true;
2413 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2414 if (NOTE_P (fn_begin))
2416 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2418 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
2419 fn_begin_outside_block = false;
2422 if (fn_begin_outside_block)
2423 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
2425 emit_insn_after (seq, fn_begin);
2428 /* Call back from expand_function_end to know where we should put
2429 the call to unwind_sjlj_unregister_libfunc if needed. */
2432 sjlj_emit_function_exit_after (rtx after)
2434 crtl->eh.sjlj_exit_after = after;
2438 sjlj_emit_function_exit (void)
2444 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2445 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
2450 /* ??? Really this can be done in any block at loop level 0 that
2451 post-dominates all can_throw_internal instructions. This is
2452 the last possible moment. */
2454 insn = crtl->eh.sjlj_exit_after;
2456 insn = NEXT_INSN (insn);
2458 emit_insn_after (seq, insn);
2462 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2464 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
2465 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
2466 int i, first_reachable;
2467 rtx mem, dispatch, seq, fc;
2472 fc = crtl->eh.sjlj_fc;
2476 emit_label (dispatch_label);
2478 #ifndef DONT_USE_BUILTIN_SETJMP
2479 expand_builtin_setjmp_receiver (dispatch_label);
2482 /* Load up dispatch index, exc_ptr and filter values from the
2483 function context. */
2484 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2485 sjlj_fc_call_site_ofs);
2486 dispatch = copy_to_reg (mem);
2488 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2489 if (unwind_word_mode != ptr_mode)
2491 #ifdef POINTERS_EXTEND_UNSIGNED
2492 mem = convert_memory_address (ptr_mode, mem);
2494 mem = convert_to_mode (ptr_mode, mem, 0);
2497 emit_move_insn (crtl->eh.exc_ptr, mem);
2499 mem = adjust_address (fc, unwind_word_mode,
2500 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2501 if (unwind_word_mode != filter_mode)
2502 mem = convert_to_mode (filter_mode, mem, 0);
2503 emit_move_insn (crtl->eh.filter, mem);
2505 /* Jump to one of the directly reachable regions. */
2506 /* ??? This really ought to be using a switch statement. */
2508 first_reachable = 0;
2509 for (i = cfun->eh->last_region_number; i > 0; --i)
2511 if (! lp_info[i].directly_reachable)
2514 if (! first_reachable)
2516 first_reachable = i;
2520 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2521 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2522 (((struct eh_region_d *)
2523 VEC_index (eh_region,
2524 cfun->eh->region_array, i))
2525 ->post_landing_pad));
2531 before = (((struct eh_region_d *)
2532 VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2533 ->post_landing_pad);
2535 bb = emit_to_new_bb_before (seq, before);
2536 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2537 e->count = bb->count;
2538 e->probability = REG_BR_PROB_BASE;
2542 sjlj_build_landing_pads (void)
2544 struct sjlj_lp_info *lp_info;
2546 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2548 if (sjlj_find_directly_reachable_regions (lp_info))
2550 rtx dispatch_label = gen_label_rtx ();
2551 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2552 TYPE_MODE (sjlj_fc_type_node),
2553 TYPE_ALIGN (sjlj_fc_type_node));
2555 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2556 int_size_in_bytes (sjlj_fc_type_node),
2559 sjlj_assign_call_site_values (dispatch_label, lp_info);
2560 sjlj_mark_call_sites (lp_info);
2562 sjlj_emit_function_enter (dispatch_label);
2563 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2564 sjlj_emit_function_exit ();
2570 /* After initial rtl generation, call back to finish generating
2571 exception support code. */
2574 finish_eh_generation (void)
2578 /* Nothing to do if no regions created. */
2579 if (cfun->eh->region_tree == NULL)
2582 /* The object here is to provide detailed information (via
2583 reachable_handlers) on how exception control flows within the
2584 function for the CFG construction. In this first pass, we can
2585 include type information garnered from ERT_THROW and
2586 ERT_ALLOWED_EXCEPTIONS regions, and hope that it will be useful
2587 in deleting unreachable handlers. Subsequently, we will generate
2588 landing pads which will connect many of the handlers, and then
2589 type information will not be effective. Still, this is a win
2590 over previous implementations. */
2592 /* These registers are used by the landing pads. Make sure they
2593 have been generated. */
2594 get_exception_pointer ();
2595 get_exception_filter ();
2597 /* Construct the landing pads. */
2599 assign_filter_values ();
2600 build_post_landing_pads ();
2601 connect_post_landing_pads ();
2602 if (USING_SJLJ_EXCEPTIONS)
2603 sjlj_build_landing_pads ();
2605 dw2_build_landing_pads ();
2607 crtl->eh.built_landing_pads = 1;
2609 /* We've totally changed the CFG. Start over. */
2610 find_exception_handler_labels ();
2611 break_superblocks ();
2612 if (USING_SJLJ_EXCEPTIONS
2613 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2614 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2615 commit_edge_insertions ();
2621 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2623 if (e->flags & EDGE_EH)
2632 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2636 /* This section handles removing dead code for flow. */
2638 /* Splice REGION from the region tree and replace it by REPLACE etc.
2639 When UPDATE_CATCH_TRY is true mind updating links from catch to try
2643 remove_eh_handler_and_replace (struct eh_region_d *region,
2644 struct eh_region_d *replace,
2645 bool update_catch_try)
2647 struct eh_region_d **pp, **pp_start, *p, *outer, *inner;
2650 outer = region->outer;
2652 /* For the benefit of efficiently handling REG_EH_REGION notes,
2653 replace this region in the region array with its containing
2654 region. Note that previous region deletions may result in
2655 multiple copies of this region in the array, so we have a
2656 list of alternate numbers by which we are known. */
2658 VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
2665 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2667 VEC_replace (eh_region, cfun->eh->region_array, i, replace);
2674 replace->aka = BITMAP_GGC_ALLOC ();
2676 bitmap_ior_into (replace->aka, region->aka);
2677 bitmap_set_bit (replace->aka, region->region_number);
2680 if (crtl->eh.built_landing_pads)
2681 lab = region->landing_pad;
2683 lab = region->label;
2685 pp_start = &outer->inner;
2687 pp_start = &cfun->eh->region_tree;
2688 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2690 *pp = region->next_peer;
2693 pp_start = &replace->inner;
2695 pp_start = &cfun->eh->region_tree;
2696 inner = region->inner;
2699 for (p = inner; p->next_peer ; p = p->next_peer)
2703 p->next_peer = *pp_start;
2707 if (region->type == ERT_CATCH
2708 && update_catch_try)
2710 struct eh_region_d *eh_try, *next, *prev;
2712 for (eh_try = region->next_peer;
2713 eh_try->type == ERT_CATCH;
2714 eh_try = eh_try->next_peer)
2716 gcc_assert (eh_try->type == ERT_TRY);
2718 next = region->u.eh_catch.next_catch;
2719 prev = region->u.eh_catch.prev_catch;
2722 next->u.eh_catch.prev_catch = prev;
2724 eh_try->u.eh_try.last_catch = prev;
2726 prev->u.eh_catch.next_catch = next;
2729 eh_try->u.eh_try.eh_catch = next;
2731 remove_eh_handler (eh_try);
2736 /* Splice REGION from the region tree and replace it by the outer region
2740 remove_eh_handler (struct eh_region_d *region)
2742 remove_eh_handler_and_replace (region, region->outer, true);
2745 /* Remove Eh region R that has turned out to have no code in its handler. */
2748 remove_eh_region (int r)
2750 struct eh_region_d *region;
2752 region = VEC_index (eh_region, cfun->eh->region_array, r);
2753 remove_eh_handler (region);
2756 /* Remove Eh region R that has turned out to have no code in its handler
2757 and replace in by R2. */
2760 remove_eh_region_and_replace_by_outer_of (int r, int r2)
2762 struct eh_region_d *region, *region2;
2764 region = VEC_index (eh_region, cfun->eh->region_array, r);
2765 region2 = VEC_index (eh_region, cfun->eh->region_array, r2);
2766 remove_eh_handler_and_replace (region, region2->outer, true);
2769 /* Invokes CALLBACK for every exception handler label. Only used by old
2770 loop hackery; should not be used by new code. */
2773 for_each_eh_label (void (*callback) (rtx))
2776 for (i = 0; i < cfun->eh->last_region_number; i++)
2778 struct eh_region_d *r = VEC_index (eh_region, cfun->eh->region_array, i);
2779 if (r && r->region_number == i && r->label
2780 && GET_CODE (r->label) == CODE_LABEL)
2781 (*callback) (r->label);
2785 /* Invoke CALLBACK for every exception region in the current function. */
2788 for_each_eh_region (void (*callback) (struct eh_region_d *))
2790 int i, n = cfun->eh->last_region_number;
2791 for (i = 1; i <= n; ++i)
2793 struct eh_region_d *region;
2795 region = VEC_index (eh_region, cfun->eh->region_array, i);
2797 (*callback) (region);
2801 /* This section describes CFG exception edges for flow. */
2803 /* For communicating between calls to reachable_next_level. */
2804 struct reachable_info
2808 void (*callback) (struct eh_region_d *, void *);
2809 void *callback_data;
2812 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2813 base class of TYPE, is in HANDLED. */
2816 check_handled (tree handled, tree type)
2820 /* We can check for exact matches without front-end help. */
2821 if (! lang_eh_type_covers)
2823 for (t = handled; t ; t = TREE_CHAIN (t))
2824 if (TREE_VALUE (t) == type)
2829 for (t = handled; t ; t = TREE_CHAIN (t))
2830 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2837 /* A subroutine of reachable_next_level. If we are collecting a list
2838 of handlers, add one. After landing pad generation, reference
2839 it instead of the handlers themselves. Further, the handlers are
2840 all wired together, so by referencing one, we've got them all.
2841 Before landing pad generation we reference each handler individually.
2843 LP_REGION contains the landing pad; REGION is the handler. */
2846 add_reachable_handler (struct reachable_info *info,
2847 struct eh_region_d *lp_region,
2848 struct eh_region_d *region)
2853 if (crtl->eh.built_landing_pads)
2854 info->callback (lp_region, info->callback_data);
2856 info->callback (region, info->callback_data);
2859 /* Process one level of exception regions for reachability.
2860 If TYPE_THROWN is non-null, then it is the *exact* type being
2861 propagated. If INFO is non-null, then collect handler labels
2862 and caught/allowed type information between invocations. */
2864 static enum reachable_code
2865 reachable_next_level (struct eh_region_d *region, tree type_thrown,
2866 struct reachable_info *info,
2869 switch (region->type)
2872 /* Before landing-pad generation, we model control flow
2873 directly to the individual handlers. In this way we can
2874 see that catch handler types may shadow one another. */
2875 add_reachable_handler (info, region, region);
2876 return RNL_MAYBE_CAUGHT;
2880 struct eh_region_d *c;
2881 enum reachable_code ret = RNL_NOT_CAUGHT;
2883 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2885 /* A catch-all handler ends the search. */
2886 if (c->u.eh_catch.type_list == NULL)
2888 add_reachable_handler (info, region, c);
2894 /* If we have at least one type match, end the search. */
2895 tree tp_node = c->u.eh_catch.type_list;
2897 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2899 tree type = TREE_VALUE (tp_node);
2901 if (type == type_thrown
2902 || (lang_eh_type_covers
2903 && (*lang_eh_type_covers) (type, type_thrown)))
2905 add_reachable_handler (info, region, c);
2910 /* If we have definitive information of a match failure,
2911 the catch won't trigger. */
2912 if (lang_eh_type_covers)
2913 return RNL_NOT_CAUGHT;
2916 /* At this point, we either don't know what type is thrown or
2917 don't have front-end assistance to help deciding if it is
2918 covered by one of the types in the list for this region.
2920 We'd then like to add this region to the list of reachable
2921 handlers since it is indeed potentially reachable based on the
2922 information we have.
2924 Actually, this handler is for sure not reachable if all the
2925 types it matches have already been caught. That is, it is only
2926 potentially reachable if at least one of the types it catches
2927 has not been previously caught. */
2930 ret = RNL_MAYBE_CAUGHT;
2933 tree tp_node = c->u.eh_catch.type_list;
2934 bool maybe_reachable = false;
2936 /* Compute the potential reachability of this handler and
2937 update the list of types caught at the same time. */
2938 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2940 tree type = TREE_VALUE (tp_node);
2942 if (! check_handled (info->types_caught, type))
2945 = tree_cons (NULL, type, info->types_caught);
2947 maybe_reachable = true;
2951 if (maybe_reachable)
2953 add_reachable_handler (info, region, c);
2955 /* ??? If the catch type is a base class of every allowed
2956 type, then we know we can stop the search. */
2957 ret = RNL_MAYBE_CAUGHT;
2965 case ERT_ALLOWED_EXCEPTIONS:
2966 /* An empty list of types definitely ends the search. */
2967 if (region->u.allowed.type_list == NULL_TREE)
2969 add_reachable_handler (info, region, region);
2973 /* Collect a list of lists of allowed types for use in detecting
2974 when a catch may be transformed into a catch-all. */
2976 info->types_allowed = tree_cons (NULL_TREE,
2977 region->u.allowed.type_list,
2978 info->types_allowed);
2980 /* If we have definitive information about the type hierarchy,
2981 then we can tell if the thrown type will pass through the
2983 if (type_thrown && lang_eh_type_covers)
2985 if (check_handled (region->u.allowed.type_list, type_thrown))
2986 return RNL_NOT_CAUGHT;
2989 add_reachable_handler (info, region, region);
2994 add_reachable_handler (info, region, region);
2995 return RNL_MAYBE_CAUGHT;
2998 /* Catch regions are handled by their controlling try region. */
2999 return RNL_NOT_CAUGHT;
3001 case ERT_MUST_NOT_THROW:
3002 /* Here we end our search, since no exceptions may propagate.
3004 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
3005 only via locally handled RESX instructions.
3007 When we inline a function call, we can bring in new handlers. In order
3008 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
3009 assume that such handlers exists prior for any inlinable call prior
3010 inlining decisions are fixed. */
3014 add_reachable_handler (info, region, region);
3022 /* Shouldn't see these here. */
3030 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
3033 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
3034 void (*callback) (struct eh_region_d *, void *),
3035 void *callback_data)
3037 struct reachable_info info;
3038 struct eh_region_d *region;
3041 memset (&info, 0, sizeof (info));
3042 info.callback = callback;
3043 info.callback_data = callback_data;
3045 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3049 type_thrown = NULL_TREE;
3052 /* A RESX leaves a region instead of entering it. Thus the
3053 region itself may have been deleted out from under us. */
3056 region = region->outer;
3058 else if (region->type == ERT_THROW)
3060 type_thrown = region->u.eh_throw.type;
3061 region = region->outer;
3066 if (reachable_next_level (region, type_thrown, &info,
3067 inlinable_call || is_resx) >= RNL_CAUGHT)
3069 /* If we have processed one cleanup, there is no point in
3070 processing any more of them. Each cleanup will have an edge
3071 to the next outer cleanup region, so the flow graph will be
3073 if (region->type == ERT_CLEANUP)
3075 enum reachable_code code = RNL_NOT_CAUGHT;
3076 region = find_prev_try (region->outer);
3077 /* Continue looking for outer TRY region until we find one
3078 that might cath something. */
3080 && (code = reachable_next_level (region, type_thrown, &info,
3081 inlinable_call || is_resx))
3083 region = find_prev_try (region->outer);
3084 if (code >= RNL_CAUGHT)
3088 region = region->outer;
3092 /* Retrieve a list of labels of exception handlers which can be
3093 reached by a given insn. */
3096 arh_to_landing_pad (struct eh_region_d *region, void *data)
3098 rtx *p_handlers = (rtx *) data;
3100 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
3104 arh_to_label (struct eh_region_d *region, void *data)
3106 rtx *p_handlers = (rtx *) data;
3107 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
3111 reachable_handlers (rtx insn)
3113 bool is_resx = false;
3114 rtx handlers = NULL;
3118 && GET_CODE (PATTERN (insn)) == RESX)
3120 region_number = XINT (PATTERN (insn), 0);
3125 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3126 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3128 region_number = INTVAL (XEXP (note, 0));
3131 foreach_reachable_handler (region_number, is_resx, false,
3132 (crtl->eh.built_landing_pads
3133 ? arh_to_landing_pad
3140 /* Determine if the given INSN can throw an exception that is caught
3141 within the function. */
3144 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
3146 struct eh_region_d *region;
3149 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3153 type_thrown = NULL_TREE;
3155 region = region->outer;
3156 else if (region->type == ERT_THROW)
3158 type_thrown = region->u.eh_throw.type;
3159 region = region->outer;
3162 /* If this exception is ignored by each and every containing region,
3163 then control passes straight out. The runtime may handle some
3164 regions, which also do not require processing internally. */
3165 for (; region; region = region->outer)
3167 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
3168 inlinable_call || is_resx);
3169 if (how == RNL_BLOCKED)
3171 if (how != RNL_NOT_CAUGHT)
3179 can_throw_internal (const_rtx insn)
3183 if (! INSN_P (insn))
3187 && GET_CODE (PATTERN (insn)) == RESX
3188 && XINT (PATTERN (insn), 0) > 0)
3189 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
3191 if (NONJUMP_INSN_P (insn)
3192 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3193 insn = XVECEXP (PATTERN (insn), 0, 0);
3195 /* Every insn that might throw has an EH_REGION note. */
3196 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3197 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3200 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
3203 /* Determine if the given INSN can throw an exception that is
3204 visible outside the function. */
3207 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
3209 struct eh_region_d *region;
3212 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
3216 type_thrown = NULL_TREE;
3218 region = region->outer;
3219 else if (region->type == ERT_THROW)
3221 type_thrown = region->u.eh_throw.type;
3222 region = region->outer;
3225 /* If the exception is caught or blocked by any containing region,
3226 then it is not seen by any calling function. */
3227 for (; region ; region = region->outer)
3228 if (reachable_next_level (region, type_thrown, NULL,
3229 inlinable_call || is_resx) >= RNL_CAUGHT)
3236 can_throw_external (const_rtx insn)
3240 if (! INSN_P (insn))
3244 && GET_CODE (PATTERN (insn)) == RESX
3245 && XINT (PATTERN (insn), 0) > 0)
3246 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
3248 if (NONJUMP_INSN_P (insn)
3249 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3251 rtx seq = PATTERN (insn);
3252 int i, n = XVECLEN (seq, 0);
3254 for (i = 0; i < n; i++)
3255 if (can_throw_external (XVECEXP (seq, 0, i)))
3261 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3264 /* Calls (and trapping insns) without notes are outside any
3265 exception handling region in this function. We have to
3266 assume it might throw. Given that the front end and middle
3267 ends mark known NOTHROW functions, this isn't so wildly
3269 return (CALL_P (insn)
3270 || (flag_non_call_exceptions
3271 && may_trap_p (PATTERN (insn))));
3273 if (INTVAL (XEXP (note, 0)) <= 0)
3276 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
3279 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
3282 set_nothrow_function_flags (void)
3288 /* Assume crtl->all_throwers_are_sibcalls until we encounter
3289 something that can throw an exception. We specifically exempt
3290 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
3291 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
3294 crtl->all_throwers_are_sibcalls = 1;
3296 /* If we don't know that this implementation of the function will
3297 actually be used, then we must not set TREE_NOTHROW, since
3298 callers must not assume that this function does not throw. */
3299 if (TREE_NOTHROW (current_function_decl))
3302 if (! flag_exceptions)
3305 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3306 if (can_throw_external (insn))
3310 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3312 crtl->all_throwers_are_sibcalls = 0;
3317 for (insn = crtl->epilogue_delay_list; insn;
3318 insn = XEXP (insn, 1))
3319 if (can_throw_external (insn))
3323 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
3325 crtl->all_throwers_are_sibcalls = 0;
3330 && (cgraph_function_body_availability (cgraph_node
3331 (current_function_decl))
3332 >= AVAIL_AVAILABLE))
3334 struct cgraph_node *node = cgraph_node (current_function_decl);
3335 struct cgraph_edge *e;
3336 for (e = node->callers; e; e = e->next_caller)
3337 e->can_throw_external = false;
3338 TREE_NOTHROW (current_function_decl) = 1;
3341 fprintf (dump_file, "Marking function nothrow: %s\n\n",
3342 current_function_name ());
3347 struct rtl_opt_pass pass_set_nothrow_function_flags =
3351 "nothrow", /* name */
3353 set_nothrow_function_flags, /* execute */
3356 0, /* static_pass_number */
3357 TV_NONE, /* tv_id */
3358 0, /* properties_required */
3359 0, /* properties_provided */
3360 0, /* properties_destroyed */
3361 0, /* todo_flags_start */
3362 TODO_dump_func, /* todo_flags_finish */
3367 /* Various hooks for unwind library. */
3369 /* Do any necessary initialization to access arbitrary stack frames.
3370 On the SPARC, this means flushing the register windows. */
3373 expand_builtin_unwind_init (void)
3375 /* Set this so all the registers get saved in our frame; we need to be
3376 able to copy the saved values for any registers from frames we unwind. */
3377 crtl->saves_all_registers = 1;
3379 #ifdef SETUP_FRAME_ADDRESSES
3380 SETUP_FRAME_ADDRESSES ();
3385 expand_builtin_eh_return_data_regno (tree exp)
3387 tree which = CALL_EXPR_ARG (exp, 0);
3388 unsigned HOST_WIDE_INT iwhich;
3390 if (TREE_CODE (which) != INTEGER_CST)
3392 error ("argument of %<__builtin_eh_return_regno%> must be constant");
3396 iwhich = tree_low_cst (which, 1);
3397 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3398 if (iwhich == INVALID_REGNUM)
3401 #ifdef DWARF_FRAME_REGNUM
3402 iwhich = DWARF_FRAME_REGNUM (iwhich);
3404 iwhich = DBX_REGISTER_NUMBER (iwhich);
3407 return GEN_INT (iwhich);
3410 /* Given a value extracted from the return address register or stack slot,
3411 return the actual address encoded in that value. */
3414 expand_builtin_extract_return_addr (tree addr_tree)
3416 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3418 if (GET_MODE (addr) != Pmode
3419 && GET_MODE (addr) != VOIDmode)
3421 #ifdef POINTERS_EXTEND_UNSIGNED
3422 addr = convert_memory_address (Pmode, addr);
3424 addr = convert_to_mode (Pmode, addr, 0);
3428 /* First mask out any unwanted bits. */
3429 #ifdef MASK_RETURN_ADDR
3430 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3433 /* Then adjust to find the real return address. */
3434 #if defined (RETURN_ADDR_OFFSET)
3435 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3441 /* Given an actual address in addr_tree, do any necessary encoding
3442 and return the value to be stored in the return address register or
3443 stack slot so the epilogue will return to that address. */
3446 expand_builtin_frob_return_addr (tree addr_tree)
3448 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3450 addr = convert_memory_address (Pmode, addr);
3452 #ifdef RETURN_ADDR_OFFSET
3453 addr = force_reg (Pmode, addr);
3454 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3460 /* Set up the epilogue with the magic bits we'll need to return to the
3461 exception handler. */
3464 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3469 #ifdef EH_RETURN_STACKADJ_RTX
3470 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
3471 VOIDmode, EXPAND_NORMAL);
3472 tmp = convert_memory_address (Pmode, tmp);
3473 if (!crtl->eh.ehr_stackadj)
3474 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
3475 else if (tmp != crtl->eh.ehr_stackadj)
3476 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
3479 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
3480 VOIDmode, EXPAND_NORMAL);
3481 tmp = convert_memory_address (Pmode, tmp);
3482 if (!crtl->eh.ehr_handler)
3483 crtl->eh.ehr_handler = copy_to_reg (tmp);
3484 else if (tmp != crtl->eh.ehr_handler)
3485 emit_move_insn (crtl->eh.ehr_handler, tmp);
3487 if (!crtl->eh.ehr_label)
3488 crtl->eh.ehr_label = gen_label_rtx ();
3489 emit_jump (crtl->eh.ehr_label);
3493 expand_eh_return (void)
3497 if (! crtl->eh.ehr_label)
3500 crtl->calls_eh_return = 1;
3502 #ifdef EH_RETURN_STACKADJ_RTX
3503 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3506 around_label = gen_label_rtx ();
3507 emit_jump (around_label);
3509 emit_label (crtl->eh.ehr_label);
3510 clobber_return_register ();
3512 #ifdef EH_RETURN_STACKADJ_RTX
3513 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3516 #ifdef HAVE_eh_return
3518 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3522 #ifdef EH_RETURN_HANDLER_RTX
3523 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3525 error ("__builtin_eh_return not supported on this target");
3529 emit_label (around_label);
3532 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3533 POINTERS_EXTEND_UNSIGNED and return it. */
3536 expand_builtin_extend_pointer (tree addr_tree)
3538 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3541 #ifdef POINTERS_EXTEND_UNSIGNED
3542 extend = POINTERS_EXTEND_UNSIGNED;
3544 /* The previous EH code did an unsigned extend by default, so we do this also
3549 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3552 /* In the following functions, we represent entries in the action table
3553 as 1-based indices. Special cases are:
3555 0: null action record, non-null landing pad; implies cleanups
3556 -1: null action record, null landing pad; implies no action
3557 -2: no call-site entry; implies must_not_throw
3558 -3: we have yet to process outer regions
3560 Further, no special cases apply to the "next" field of the record.
3561 For next, 0 means end of list. */
3563 struct action_record
3571 action_record_eq (const void *pentry, const void *pdata)
3573 const struct action_record *entry = (const struct action_record *) pentry;
3574 const struct action_record *data = (const struct action_record *) pdata;
3575 return entry->filter == data->filter && entry->next == data->next;
3579 action_record_hash (const void *pentry)
3581 const struct action_record *entry = (const struct action_record *) pentry;
3582 return entry->next * 1009 + entry->filter;
3586 add_action_record (htab_t ar_hash, int filter, int next)
3588 struct action_record **slot, *new_ar, tmp;
3590 tmp.filter = filter;
3592 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3594 if ((new_ar = *slot) == NULL)
3596 new_ar = XNEW (struct action_record);
3597 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3598 new_ar->filter = filter;
3599 new_ar->next = next;
3602 /* The filter value goes in untouched. The link to the next
3603 record is a "self-relative" byte offset, or zero to indicate
3604 that there is no next record. So convert the absolute 1 based
3605 indices we've been carrying around into a displacement. */
3607 push_sleb128 (&crtl->eh.action_record_data, filter);
3609 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3610 push_sleb128 (&crtl->eh.action_record_data, next);
3613 return new_ar->offset;
3617 collect_one_action_chain (htab_t ar_hash, struct eh_region_d *region)
3619 struct eh_region_d *c;
3622 /* If we've reached the top of the region chain, then we have
3623 no actions, and require no landing pad. */
3627 switch (region->type)
3630 /* A cleanup adds a zero filter to the beginning of the chain, but
3631 there are special cases to look out for. If there are *only*
3632 cleanups along a path, then it compresses to a zero action.
3633 Further, if there are multiple cleanups along a path, we only
3634 need to represent one of them, as that is enough to trigger
3635 entry to the landing pad at runtime. */
3636 next = collect_one_action_chain (ar_hash, region->outer);
3639 for (c = region->outer; c ; c = c->outer)
3640 if (c->type == ERT_CLEANUP)
3642 return add_action_record (ar_hash, 0, next);
3645 /* Process the associated catch regions in reverse order.
3646 If there's a catch-all handler, then we don't need to
3647 search outer regions. Use a magic -3 value to record
3648 that we haven't done the outer search. */
3650 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)