1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
57 #include "insn-config.h"
59 #include "integrate.h"
60 #include "hard-reg-set.h"
61 #include "basic-block.h"
63 #include "dwarf2asm.h"
64 #include "dwarf2out.h"
73 /* Provide defaults for stuff that may not be defined when using
75 #ifndef EH_RETURN_STACKADJ_RTX
76 #define EH_RETURN_STACKADJ_RTX 0
78 #ifndef EH_RETURN_HANDLER_RTX
79 #define EH_RETURN_HANDLER_RTX 0
81 #ifndef EH_RETURN_DATA_REGNO
82 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
86 /* Nonzero means enable synchronous exceptions for non-call instructions. */
87 int flag_non_call_exceptions;
89 /* Protect cleanup actions with must-not-throw regions, with a call
90 to the given failure handler. */
91 tree (*lang_protect_cleanup_actions) PARAMS ((void));
93 /* Return true if type A catches type B. */
94 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
96 /* Map a type to a runtime object to match type. */
97 tree (*lang_eh_runtime_type) PARAMS ((tree));
99 /* A list of labels used for exception handlers. */
100 rtx exception_handler_labels;
102 static int call_site_base;
103 static int sjlj_funcdef_number;
104 static htab_t type_to_runtime_map;
106 /* Describe the SjLj_Function_Context structure. */
107 static tree sjlj_fc_type_node;
108 static int sjlj_fc_call_site_ofs;
109 static int sjlj_fc_data_ofs;
110 static int sjlj_fc_personality_ofs;
111 static int sjlj_fc_lsda_ofs;
112 static int sjlj_fc_jbuf_ofs;
114 /* Describes one exception region. */
117 /* The immediately surrounding region. */
118 struct eh_region *outer;
120 /* The list of immediately contained regions. */
121 struct eh_region *inner;
122 struct eh_region *next_peer;
124 /* An identifier for this region. */
127 /* Each region does exactly one thing. */
133 ERT_ALLOWED_EXCEPTIONS,
139 /* Holds the action to perform based on the preceeding type. */
141 /* A list of catch blocks, a surrounding try block,
142 and the label for continuing after a catch. */
144 struct eh_region *catch;
145 struct eh_region *last_catch;
146 struct eh_region *prev_try;
150 /* The list through the catch handlers, the type object
151 matched, and a pointer to the generated code. */
153 struct eh_region *next_catch;
154 struct eh_region *prev_catch;
159 /* A tree_list of allowed types. */
165 /* The type given by a call to "throw foo();", or discovered
171 /* Retain the cleanup expression even after expansion so that
172 we can match up fixup regions. */
177 /* The real region (by expression and by pointer) that fixup code
181 struct eh_region *real_region;
185 /* Entry point for this region's handler before landing pads are built. */
188 /* Entry point for this region's handler from the runtime eh library. */
191 /* Entry point for this region's handler from an inner region. */
192 rtx post_landing_pad;
194 /* The RESX insn for handing off control to the next outermost handler,
199 /* Used to save exception status for each function. */
202 /* The tree of all regions for this function. */
203 struct eh_region *region_tree;
205 /* The same information as an indexable array. */
206 struct eh_region **region_array;
208 /* The most recently open region. */
209 struct eh_region *cur_region;
211 /* This is the region for which we are processing catch blocks. */
212 struct eh_region *try_region;
214 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
215 node is itself a TREE_CHAINed list of handlers for regions that
216 are not yet closed. The TREE_VALUE of each entry contains the
217 handler for the corresponding entry on the ehstack. */
223 int built_landing_pads;
224 int last_region_number;
226 varray_type ttype_data;
227 varray_type ehspec_data;
228 varray_type action_record_data;
230 struct call_site_record
235 int call_site_data_used;
236 int call_site_data_size;
247 static void mark_eh_region PARAMS ((struct eh_region *));
249 static int t2r_eq PARAMS ((const PTR,
251 static hashval_t t2r_hash PARAMS ((const PTR));
252 static int t2r_mark_1 PARAMS ((PTR *, PTR));
253 static void t2r_mark PARAMS ((PTR));
254 static void add_type_for_runtime PARAMS ((tree));
255 static tree lookup_type_for_runtime PARAMS ((tree));
257 static struct eh_region *expand_eh_region_end PARAMS ((void));
259 static rtx get_exception_filter PARAMS ((void));
261 static void collect_eh_region_array PARAMS ((void));
262 static void resolve_fixup_regions PARAMS ((void));
263 static void remove_fixup_regions PARAMS ((void));
264 static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
266 static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
267 struct inline_remap *));
268 static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
269 struct eh_region **));
270 static int ttypes_filter_eq PARAMS ((const PTR,
272 static hashval_t ttypes_filter_hash PARAMS ((const PTR));
273 static int ehspec_filter_eq PARAMS ((const PTR,
275 static hashval_t ehspec_filter_hash PARAMS ((const PTR));
276 static int add_ttypes_entry PARAMS ((htab_t, tree));
277 static int add_ehspec_entry PARAMS ((htab_t, htab_t,
279 static void assign_filter_values PARAMS ((void));
280 static void build_post_landing_pads PARAMS ((void));
281 static void connect_post_landing_pads PARAMS ((void));
282 static void dw2_build_landing_pads PARAMS ((void));
285 static bool sjlj_find_directly_reachable_regions
286 PARAMS ((struct sjlj_lp_info *));
287 static void sjlj_assign_call_site_values
288 PARAMS ((rtx, struct sjlj_lp_info *));
289 static void sjlj_mark_call_sites
290 PARAMS ((struct sjlj_lp_info *));
291 static void sjlj_emit_function_enter PARAMS ((rtx));
292 static void sjlj_emit_function_exit PARAMS ((void));
293 static void sjlj_emit_dispatch_table
294 PARAMS ((rtx, struct sjlj_lp_info *));
295 static void sjlj_build_landing_pads PARAMS ((void));
297 static void remove_exception_handler_label PARAMS ((rtx));
298 static void remove_eh_handler PARAMS ((struct eh_region *));
300 struct reachable_info;
302 /* The return value of reachable_next_level. */
305 /* The given exception is not processed by the given region. */
307 /* The given exception may need processing by the given region. */
309 /* The given exception is completely processed by the given region. */
311 /* The given exception is completely processed by the runtime. */
315 static int check_handled PARAMS ((tree, tree));
316 static void add_reachable_handler
317 PARAMS ((struct reachable_info *, struct eh_region *,
318 struct eh_region *));
319 static enum reachable_code reachable_next_level
320 PARAMS ((struct eh_region *, tree, struct reachable_info *));
322 static int action_record_eq PARAMS ((const PTR,
324 static hashval_t action_record_hash PARAMS ((const PTR));
325 static int add_action_record PARAMS ((htab_t, int, int));
326 static int collect_one_action_chain PARAMS ((htab_t,
327 struct eh_region *));
328 static int add_call_site PARAMS ((rtx, int));
330 static void push_uleb128 PARAMS ((varray_type *,
332 static void push_sleb128 PARAMS ((varray_type *, int));
333 #ifndef HAVE_AS_LEB128
334 static int dw2_size_of_call_site_table PARAMS ((void));
335 static int sjlj_size_of_call_site_table PARAMS ((void));
337 static void dw2_output_call_site_table PARAMS ((void));
338 static void sjlj_output_call_site_table PARAMS ((void));
341 /* Routine to see if exception handling is turned on.
342 DO_WARN is non-zero if we want to inform the user that exception
343 handling is turned off.
345 This is used to ensure that -fexceptions has been specified if the
346 compiler tries to use any exception-specific functions. */
352 if (! flag_exceptions)
354 static int warned = 0;
355 if (! warned && do_warn)
357 error ("exception handling disabled, use -fexceptions to enable");
369 ggc_add_rtx_root (&exception_handler_labels, 1);
371 if (! flag_exceptions)
374 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
375 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
377 /* Create the SjLj_Function_Context structure. This should match
378 the definition in unwind-sjlj.c. */
379 if (USING_SJLJ_EXCEPTIONS)
381 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
383 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
384 ggc_add_tree_root (&sjlj_fc_type_node, 1);
386 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
387 build_pointer_type (sjlj_fc_type_node));
388 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
390 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
392 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
394 tmp = build_index_type (build_int_2 (4 - 1, 0));
395 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
396 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
397 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
399 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
401 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
403 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
405 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
407 #ifdef DONT_USE_BUILTIN_SETJMP
409 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
411 /* Should be large enough for most systems, if it is not,
412 JMP_BUF_SIZE should be defined with the proper value. It will
413 also tend to be larger than necessary for most systems, a more
414 optimal port will define JMP_BUF_SIZE. */
415 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
418 /* This is 2 for builtin_setjmp, plus whatever the target requires
419 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
420 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
421 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
423 tmp = build_index_type (tmp);
424 tmp = build_array_type (ptr_type_node, tmp);
425 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
426 #ifdef DONT_USE_BUILTIN_SETJMP
427 /* We don't know what the alignment requirements of the
428 runtime's jmp_buf has. Overestimate. */
429 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
430 DECL_USER_ALIGN (f_jbuf) = 1;
432 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
434 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
435 TREE_CHAIN (f_prev) = f_cs;
436 TREE_CHAIN (f_cs) = f_data;
437 TREE_CHAIN (f_data) = f_per;
438 TREE_CHAIN (f_per) = f_lsda;
439 TREE_CHAIN (f_lsda) = f_jbuf;
441 layout_type (sjlj_fc_type_node);
443 /* Cache the interesting field offsets so that we have
444 easy access from rtl. */
445 sjlj_fc_call_site_ofs
446 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
447 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
449 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
450 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
451 sjlj_fc_personality_ofs
452 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
453 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
455 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
456 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
458 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
459 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
464 init_eh_for_function ()
466 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
469 /* Mark EH for GC. */
472 mark_eh_region (region)
473 struct eh_region *region;
478 switch (region->type)
481 ggc_mark_tree (region->u.cleanup.exp);
484 ggc_mark_rtx (region->u.try.continue_label);
487 ggc_mark_tree (region->u.catch.type);
489 case ERT_ALLOWED_EXCEPTIONS:
490 ggc_mark_tree (region->u.allowed.type_list);
492 case ERT_MUST_NOT_THROW:
495 ggc_mark_tree (region->u.throw.type);
498 ggc_mark_tree (region->u.fixup.cleanup_exp);
504 ggc_mark_rtx (region->label);
505 ggc_mark_rtx (region->resume);
506 ggc_mark_rtx (region->landing_pad);
507 ggc_mark_rtx (region->post_landing_pad);
512 struct eh_status *eh;
519 /* If we've called collect_eh_region_array, use it. Otherwise walk
520 the tree non-recursively. */
521 if (eh->region_array)
523 for (i = eh->last_region_number; i > 0; --i)
525 struct eh_region *r = eh->region_array[i];
526 if (r && r->region_number == i)
530 else if (eh->region_tree)
532 struct eh_region *r = eh->region_tree;
538 else if (r->next_peer)
546 } while (r->next_peer == NULL);
553 ggc_mark_tree (eh->protect_list);
554 ggc_mark_rtx (eh->filter);
555 ggc_mark_rtx (eh->exc_ptr);
556 ggc_mark_tree_varray (eh->ttype_data);
558 if (eh->call_site_data)
560 for (i = eh->call_site_data_used - 1; i >= 0; --i)
561 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
564 ggc_mark_rtx (eh->ehr_stackadj);
565 ggc_mark_rtx (eh->ehr_handler);
566 ggc_mark_rtx (eh->ehr_label);
568 ggc_mark_rtx (eh->sjlj_fc);
569 ggc_mark_rtx (eh->sjlj_exit_after);
576 struct eh_status *eh = f->eh;
578 if (eh->region_array)
581 for (i = eh->last_region_number; i > 0; --i)
583 struct eh_region *r = eh->region_array[i];
584 /* Mind we don't free a region struct more than once. */
585 if (r && r->region_number == i)
588 free (eh->region_array);
590 else if (eh->region_tree)
592 struct eh_region *next, *r = eh->region_tree;
597 else if (r->next_peer)
611 } while (r->next_peer == NULL);
620 VARRAY_FREE (eh->ttype_data);
621 VARRAY_FREE (eh->ehspec_data);
622 VARRAY_FREE (eh->action_record_data);
623 if (eh->call_site_data)
624 free (eh->call_site_data);
631 /* Start an exception handling region. All instructions emitted
632 after this point are considered to be part of the region until
633 expand_eh_region_end is invoked. */
636 expand_eh_region_start ()
638 struct eh_region *new_region;
639 struct eh_region *cur_region;
645 /* Insert a new blank region as a leaf in the tree. */
646 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
647 cur_region = cfun->eh->cur_region;
648 new_region->outer = cur_region;
651 new_region->next_peer = cur_region->inner;
652 cur_region->inner = new_region;
656 new_region->next_peer = cfun->eh->region_tree;
657 cfun->eh->region_tree = new_region;
659 cfun->eh->cur_region = new_region;
661 /* Create a note marking the start of this region. */
662 new_region->region_number = ++cfun->eh->last_region_number;
663 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
664 NOTE_EH_HANDLER (note) = new_region->region_number;
667 /* Common code to end a region. Returns the region just ended. */
669 static struct eh_region *
670 expand_eh_region_end ()
672 struct eh_region *cur_region = cfun->eh->cur_region;
675 /* Create a nute marking the end of this region. */
676 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
677 NOTE_EH_HANDLER (note) = cur_region->region_number;
680 cfun->eh->cur_region = cur_region->outer;
685 /* End an exception handling region for a cleanup. HANDLER is an
686 expression to expand for the cleanup. */
689 expand_eh_region_end_cleanup (handler)
692 struct eh_region *region;
693 tree protect_cleanup_actions;
700 region = expand_eh_region_end ();
701 region->type = ERT_CLEANUP;
702 region->label = gen_label_rtx ();
703 region->u.cleanup.exp = handler;
705 around_label = gen_label_rtx ();
706 emit_jump (around_label);
708 emit_label (region->label);
710 /* Give the language a chance to specify an action to be taken if an
711 exception is thrown that would propogate out of the HANDLER. */
712 protect_cleanup_actions
713 = (lang_protect_cleanup_actions
714 ? (*lang_protect_cleanup_actions) ()
717 if (protect_cleanup_actions)
718 expand_eh_region_start ();
720 /* In case this cleanup involves an inline destructor with a try block in
721 it, we need to save the EH return data registers around it. */
722 data_save[0] = gen_reg_rtx (Pmode);
723 emit_move_insn (data_save[0], get_exception_pointer ());
724 data_save[1] = gen_reg_rtx (word_mode);
725 emit_move_insn (data_save[1], get_exception_filter ());
727 expand_expr (handler, const0_rtx, VOIDmode, 0);
729 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
730 emit_move_insn (cfun->eh->filter, data_save[1]);
732 if (protect_cleanup_actions)
733 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
735 /* We need any stack adjustment complete before the around_label. */
736 do_pending_stack_adjust ();
738 /* We delay the generation of the _Unwind_Resume until we generate
739 landing pads. We emit a marker here so as to get good control
740 flow data in the meantime. */
742 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
745 emit_label (around_label);
748 /* End an exception handling region for a try block, and prepares
749 for subsequent calls to expand_start_catch. */
752 expand_start_all_catch ()
754 struct eh_region *region;
759 region = expand_eh_region_end ();
760 region->type = ERT_TRY;
761 region->u.try.prev_try = cfun->eh->try_region;
762 region->u.try.continue_label = gen_label_rtx ();
764 cfun->eh->try_region = region;
766 emit_jump (region->u.try.continue_label);
769 /* Begin a catch clause. TYPE is the type caught, or null if this is
770 a catch-all clause. */
773 expand_start_catch (type)
776 struct eh_region *t, *c, *l;
782 add_type_for_runtime (type);
783 expand_eh_region_start ();
785 t = cfun->eh->try_region;
786 c = cfun->eh->cur_region;
788 c->u.catch.type = type;
789 c->label = gen_label_rtx ();
791 l = t->u.try.last_catch;
792 c->u.catch.prev_catch = l;
794 l->u.catch.next_catch = c;
797 t->u.try.last_catch = c;
799 emit_label (c->label);
802 /* End a catch clause. Control will resume after the try/catch block. */
807 struct eh_region *try_region, *catch_region;
812 catch_region = expand_eh_region_end ();
813 try_region = cfun->eh->try_region;
815 emit_jump (try_region->u.try.continue_label);
818 /* End a sequence of catch handlers for a try block. */
821 expand_end_all_catch ()
823 struct eh_region *try_region;
828 try_region = cfun->eh->try_region;
829 cfun->eh->try_region = try_region->u.try.prev_try;
831 emit_label (try_region->u.try.continue_label);
834 /* End an exception region for an exception type filter. ALLOWED is a
835 TREE_LIST of types to be matched by the runtime. FAILURE is an
836 expression to invoke if a mismatch ocurrs. */
839 expand_eh_region_end_allowed (allowed, failure)
840 tree allowed, failure;
842 struct eh_region *region;
848 region = expand_eh_region_end ();
849 region->type = ERT_ALLOWED_EXCEPTIONS;
850 region->u.allowed.type_list = allowed;
851 region->label = gen_label_rtx ();
853 for (; allowed ; allowed = TREE_CHAIN (allowed))
854 add_type_for_runtime (TREE_VALUE (allowed));
856 /* We must emit the call to FAILURE here, so that if this function
857 throws a different exception, that it will be processed by the
860 around_label = gen_label_rtx ();
861 emit_jump (around_label);
863 emit_label (region->label);
864 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
866 emit_label (around_label);
869 /* End an exception region for a must-not-throw filter. FAILURE is an
870 expression invoke if an uncaught exception propagates this far.
872 This is conceptually identical to expand_eh_region_end_allowed with
873 an empty allowed list (if you passed "std::terminate" instead of
874 "__cxa_call_unexpected"), but they are represented differently in
878 expand_eh_region_end_must_not_throw (failure)
881 struct eh_region *region;
887 region = expand_eh_region_end ();
888 region->type = ERT_MUST_NOT_THROW;
889 region->label = gen_label_rtx ();
891 /* We must emit the call to FAILURE here, so that if this function
892 throws a different exception, that it will be processed by the
895 around_label = gen_label_rtx ();
896 emit_jump (around_label);
898 emit_label (region->label);
899 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
901 emit_label (around_label);
904 /* End an exception region for a throw. No handling goes on here,
905 but it's the easiest way for the front-end to indicate what type
909 expand_eh_region_end_throw (type)
912 struct eh_region *region;
917 region = expand_eh_region_end ();
918 region->type = ERT_THROW;
919 region->u.throw.type = type;
922 /* End a fixup region. Within this region the cleanups for the immediately
923 enclosing region are _not_ run. This is used for goto cleanup to avoid
924 destroying an object twice.
926 This would be an extraordinarily simple prospect, were it not for the
927 fact that we don't actually know what the immediately enclosing region
928 is. This surprising fact is because expand_cleanups is currently
929 generating a sequence that it will insert somewhere else. We collect
930 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
933 expand_eh_region_end_fixup (handler)
936 struct eh_region *fixup;
941 fixup = expand_eh_region_end ();
942 fixup->type = ERT_FIXUP;
943 fixup->u.fixup.cleanup_exp = handler;
946 /* Return an rtl expression for a pointer to the exception object
950 get_exception_pointer ()
952 rtx exc_ptr = cfun->eh->exc_ptr;
955 exc_ptr = gen_reg_rtx (Pmode);
956 cfun->eh->exc_ptr = exc_ptr;
961 /* Return an rtl expression for the exception dispatch filter
965 get_exception_filter ()
967 rtx filter = cfun->eh->filter;
970 filter = gen_reg_rtx (word_mode);
971 cfun->eh->filter = filter;
976 /* Begin a region that will contain entries created with
977 add_partial_entry. */
980 begin_protect_partials ()
982 /* Push room for a new list. */
983 cfun->eh->protect_list
984 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
987 /* Start a new exception region for a region of code that has a
988 cleanup action and push the HANDLER for the region onto
989 protect_list. All of the regions created with add_partial_entry
990 will be ended when end_protect_partials is invoked. */
993 add_partial_entry (handler)
996 expand_eh_region_start ();
998 /* ??? This comment was old before the most recent rewrite. We
999 really ought to fix the callers at some point. */
1000 /* For backwards compatibility, we allow callers to omit calls to
1001 begin_protect_partials for the outermost region. So, we must
1002 explicitly do so here. */
1003 if (!cfun->eh->protect_list)
1004 begin_protect_partials ();
1006 /* Add this entry to the front of the list. */
1007 TREE_VALUE (cfun->eh->protect_list)
1008 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
1011 /* End all the pending exception regions on protect_list. */
1014 end_protect_partials ()
1018 /* ??? This comment was old before the most recent rewrite. We
1019 really ought to fix the callers at some point. */
1020 /* For backwards compatibility, we allow callers to omit the call to
1021 begin_protect_partials for the outermost region. So,
1022 PROTECT_LIST may be NULL. */
1023 if (!cfun->eh->protect_list)
1026 /* Pop the topmost entry. */
1027 t = TREE_VALUE (cfun->eh->protect_list);
1028 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
1030 /* End all the exception regions. */
1031 for (; t; t = TREE_CHAIN (t))
1032 expand_eh_region_end_cleanup (TREE_VALUE (t));
1036 /* This section is for the exception handling specific optimization pass. */
1038 /* Random access the exception region tree. It's just as simple to
1039 collect the regions this way as in expand_eh_region_start, but
1040 without having to realloc memory. */
1043 collect_eh_region_array ()
1045 struct eh_region **array, *i;
1047 i = cfun->eh->region_tree;
1051 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1052 cfun->eh->region_array = array;
1056 array[i->region_number] = i;
1058 /* If there are sub-regions, process them. */
1061 /* If there are peers, process them. */
1062 else if (i->next_peer)
1064 /* Otherwise, step back up the tree to the next peer. */
1071 } while (i->next_peer == NULL);
1078 resolve_fixup_regions ()
1080 int i, j, n = cfun->eh->last_region_number;
1082 for (i = 1; i <= n; ++i)
1084 struct eh_region *fixup = cfun->eh->region_array[i];
1085 struct eh_region *cleanup;
1087 if (! fixup || fixup->type != ERT_FIXUP)
1090 for (j = 1; j <= n; ++j)
1092 cleanup = cfun->eh->region_array[j];
1093 if (cleanup->type == ERT_CLEANUP
1094 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1100 fixup->u.fixup.real_region = cleanup->outer;
1104 /* Now that we've discovered what region actually encloses a fixup,
1105 we can shuffle pointers and remove them from the tree. */
1108 remove_fixup_regions ()
1112 struct eh_region *fixup;
1114 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1115 for instructions referencing fixup regions. This is only
1116 strictly necessary for fixup regions with no parent, but
1117 doesn't hurt to do it for all regions. */
1118 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1120 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1121 && INTVAL (XEXP (note, 0)) > 0
1122 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1123 && fixup->type == ERT_FIXUP)
1125 if (fixup->u.fixup.real_region)
1126 XEXP (note, 1) = GEN_INT (fixup->u.fixup.real_region->region_number);
1128 remove_note (insn, note);
1131 /* Remove the fixup regions from the tree. */
1132 for (i = cfun->eh->last_region_number; i > 0; --i)
1134 fixup = cfun->eh->region_array[i];
1138 /* Allow GC to maybe free some memory. */
1139 if (fixup->type == ERT_CLEANUP)
1140 fixup->u.cleanup.exp = NULL_TREE;
1142 if (fixup->type != ERT_FIXUP)
1147 struct eh_region *parent, *p, **pp;
1149 parent = fixup->u.fixup.real_region;
1151 /* Fix up the children's parent pointers; find the end of
1153 for (p = fixup->inner; ; p = p->next_peer)
1160 /* In the tree of cleanups, only outer-inner ordering matters.
1161 So link the children back in anywhere at the correct level. */
1163 pp = &parent->inner;
1165 pp = &cfun->eh->region_tree;
1168 fixup->inner = NULL;
1171 remove_eh_handler (fixup);
1175 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1176 can_throw instruction in the region. */
1179 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1187 for (insn = *pinsns; insn ; insn = next)
1189 next = NEXT_INSN (insn);
1190 if (GET_CODE (insn) == NOTE)
1192 int kind = NOTE_LINE_NUMBER (insn);
1193 if (kind == NOTE_INSN_EH_REGION_BEG
1194 || kind == NOTE_INSN_EH_REGION_END)
1196 if (kind == NOTE_INSN_EH_REGION_BEG)
1198 struct eh_region *r;
1201 cur = NOTE_EH_HANDLER (insn);
1203 r = cfun->eh->region_array[cur];
1204 if (r->type == ERT_FIXUP)
1206 r = r->u.fixup.real_region;
1207 cur = r ? r->region_number : 0;
1209 else if (r->type == ERT_CATCH)
1212 cur = r ? r->region_number : 0;
1218 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1219 requires extra care to adjust sequence start. */
1220 if (insn == *pinsns)
1226 else if (INSN_P (insn))
1229 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1230 /* Calls can always potentially throw exceptions, unless
1231 they have a REG_EH_REGION note with a value of 0 or less.
1232 Which should be the only possible kind so far. */
1233 && (GET_CODE (insn) == CALL_INSN
1234 /* If we wanted exceptions for non-call insns, then
1235 any may_trap_p instruction could throw. */
1236 || (flag_non_call_exceptions
1237 && may_trap_p (PATTERN (insn)))))
1239 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1243 if (GET_CODE (insn) == CALL_INSN
1244 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1246 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1248 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1250 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1261 convert_from_eh_region_ranges ()
1266 collect_eh_region_array ();
1267 resolve_fixup_regions ();
1269 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1270 insns = get_insns ();
1271 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1274 remove_fixup_regions ();
1278 find_exception_handler_labels ()
1280 rtx list = NULL_RTX;
1283 free_EXPR_LIST_list (&exception_handler_labels);
1285 if (cfun->eh->region_tree == NULL)
1288 for (i = cfun->eh->last_region_number; i > 0; --i)
1290 struct eh_region *region = cfun->eh->region_array[i];
1295 if (cfun->eh->built_landing_pads)
1296 lab = region->landing_pad;
1298 lab = region->label;
1301 list = alloc_EXPR_LIST (0, lab, list);
1304 /* For sjlj exceptions, need the return label to remain live until
1305 after landing pad generation. */
1306 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1307 list = alloc_EXPR_LIST (0, return_label, list);
1309 exception_handler_labels = list;
1313 static struct eh_region *
1314 duplicate_eh_region_1 (o, map)
1315 struct eh_region *o;
1316 struct inline_remap *map;
1319 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
1321 n->region_number = o->region_number + cfun->eh->last_region_number;
1327 case ERT_MUST_NOT_THROW:
1331 if (o->u.try.continue_label)
1332 n->u.try.continue_label
1333 = get_label_from_map (map,
1334 CODE_LABEL_NUMBER (o->u.try.continue_label));
1338 n->u.catch.type = o->u.catch.type;
1341 case ERT_ALLOWED_EXCEPTIONS:
1342 n->u.allowed.type_list = o->u.allowed.type_list;
1346 n->u.throw.type = o->u.throw.type;
1353 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1356 n->resume = map->insn_map[INSN_UID (o->resume)];
1357 if (n->resume == NULL)
1365 duplicate_eh_region_2 (o, n_array)
1366 struct eh_region *o;
1367 struct eh_region **n_array;
1369 struct eh_region *n = n_array[o->region_number];
1374 n->u.try.catch = n_array[o->u.try.catch->region_number];
1375 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1379 if (o->u.catch.next_catch)
1380 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1381 if (o->u.catch.prev_catch)
1382 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1390 n->outer = n_array[o->outer->region_number];
1392 n->inner = n_array[o->inner->region_number];
1394 n->next_peer = n_array[o->next_peer->region_number];
1398 duplicate_eh_regions (ifun, map)
1399 struct function *ifun;
1400 struct inline_remap *map;
1402 int ifun_last_region_number = ifun->eh->last_region_number;
1403 struct eh_region **n_array, *root, *cur;
1406 if (ifun_last_region_number == 0)
1409 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1411 for (i = 1; i <= ifun_last_region_number; ++i)
1413 cur = ifun->eh->region_array[i];
1414 if (!cur || cur->region_number != i)
1416 n_array[i] = duplicate_eh_region_1 (cur, map);
1418 for (i = 1; i <= ifun_last_region_number; ++i)
1420 cur = ifun->eh->region_array[i];
1421 if (!cur || cur->region_number != i)
1423 duplicate_eh_region_2 (cur, n_array);
1426 root = n_array[ifun->eh->region_tree->region_number];
1427 cur = cfun->eh->cur_region;
1430 struct eh_region *p = cur->inner;
1433 while (p->next_peer)
1435 p->next_peer = root;
1440 for (i = 1; i <= ifun_last_region_number; ++i)
1441 if (n_array[i]->outer == NULL)
1442 n_array[i]->outer = cur;
1446 struct eh_region *p = cfun->eh->region_tree;
1449 while (p->next_peer)
1451 p->next_peer = root;
1454 cfun->eh->region_tree = root;
1459 i = cfun->eh->last_region_number;
1460 cfun->eh->last_region_number = i + ifun_last_region_number;
1465 /* ??? Move from tree.c to tree.h. */
1466 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
1469 t2r_eq (pentry, pdata)
1473 tree entry = (tree) pentry;
1474 tree data = (tree) pdata;
1476 return TREE_PURPOSE (entry) == data;
1483 tree entry = (tree) pentry;
1484 return TYPE_HASH (TREE_PURPOSE (entry));
1488 t2r_mark_1 (slot, data)
1490 PTR data ATTRIBUTE_UNUSED;
1492 tree contents = (tree) *slot;
1493 ggc_mark_tree (contents);
1501 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1505 add_type_for_runtime (type)
1510 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1511 TYPE_HASH (type), INSERT);
1514 tree runtime = (*lang_eh_runtime_type) (type);
1515 *slot = tree_cons (type, runtime, NULL_TREE);
1520 lookup_type_for_runtime (type)
1525 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1526 TYPE_HASH (type), NO_INSERT);
1528 /* We should have always inserrted the data earlier. */
1529 return TREE_VALUE (*slot);
1533 /* Represent an entry in @TTypes for either catch actions
1534 or exception filter actions. */
1535 struct ttypes_filter
1541 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1542 (a tree) for a @TTypes type node we are thinking about adding. */
1545 ttypes_filter_eq (pentry, pdata)
1549 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1550 tree data = (tree) pdata;
1552 return entry->t == data;
1556 ttypes_filter_hash (pentry)
1559 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1560 return TYPE_HASH (entry->t);
1563 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1564 exception specification list we are thinking about adding. */
1565 /* ??? Currently we use the type lists in the order given. Someone
1566 should put these in some canonical order. */
1569 ehspec_filter_eq (pentry, pdata)
1573 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1574 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1576 return type_list_equal (entry->t, data->t);
1579 /* Hash function for exception specification lists. */
1582 ehspec_filter_hash (pentry)
1585 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1589 for (list = entry->t; list ; list = TREE_CHAIN (list))
1590 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1594 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1595 up the search. Return the filter value to be used. */
1598 add_ttypes_entry (ttypes_hash, type)
1602 struct ttypes_filter **slot, *n;
1604 slot = (struct ttypes_filter **)
1605 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1607 if ((n = *slot) == NULL)
1609 /* Filter value is a 1 based table index. */
1611 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1613 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1616 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1622 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1623 to speed up the search. Return the filter value to be used. */
1626 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1631 struct ttypes_filter **slot, *n;
1632 struct ttypes_filter dummy;
1635 slot = (struct ttypes_filter **)
1636 htab_find_slot (ehspec_hash, &dummy, INSERT);
1638 if ((n = *slot) == NULL)
1640 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1642 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1644 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1647 /* Look up each type in the list and encode its filter
1648 value as a uleb128. Terminate the list with 0. */
1649 for (; list ; list = TREE_CHAIN (list))
1650 push_uleb128 (&cfun->eh->ehspec_data,
1651 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1652 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1658 /* Generate the action filter values to be used for CATCH and
1659 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1660 we use lots of landing pads, and so every type or list can share
1661 the same filter value, which saves table space. */
1664 assign_filter_values ()
1667 htab_t ttypes, ehspec;
1669 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1670 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1672 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1673 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1675 for (i = cfun->eh->last_region_number; i > 0; --i)
1677 struct eh_region *r = cfun->eh->region_array[i];
1679 /* Mind we don't process a region more than once. */
1680 if (!r || r->region_number != i)
1686 r->u.catch.filter = add_ttypes_entry (ttypes, r->u.catch.type);
1689 case ERT_ALLOWED_EXCEPTIONS:
1691 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1699 htab_delete (ttypes);
1700 htab_delete (ehspec);
1704 build_post_landing_pads ()
1708 for (i = cfun->eh->last_region_number; i > 0; --i)
1710 struct eh_region *region = cfun->eh->region_array[i];
1713 /* Mind we don't process a region more than once. */
1714 if (!region || region->region_number != i)
1717 switch (region->type)
1720 /* ??? Collect the set of all non-overlapping catch handlers
1721 all the way up the chain until blocked by a cleanup. */
1722 /* ??? Outer try regions can share landing pads with inner
1723 try regions if the types are completely non-overlapping,
1724 and there are no interveaning cleanups. */
1726 region->post_landing_pad = gen_label_rtx ();
1730 emit_label (region->post_landing_pad);
1732 /* ??? It is mighty inconvenient to call back into the
1733 switch statement generation code in expand_end_case.
1734 Rapid prototyping sez a sequence of ifs. */
1736 struct eh_region *c;
1737 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1739 /* ??? _Unwind_ForcedUnwind wants no match here. */
1740 if (c->u.catch.type == NULL)
1741 emit_jump (c->label);
1743 emit_cmp_and_jump_insns (cfun->eh->filter,
1744 GEN_INT (c->u.catch.filter),
1745 EQ, NULL_RTX, word_mode,
1750 /* We delay the generation of the _Unwind_Resume until we generate
1751 landing pads. We emit a marker here so as to get good control
1752 flow data in the meantime. */
1754 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1760 emit_insns_before (seq, region->u.try.catch->label);
1763 case ERT_ALLOWED_EXCEPTIONS:
1764 region->post_landing_pad = gen_label_rtx ();
1768 emit_label (region->post_landing_pad);
1770 emit_cmp_and_jump_insns (cfun->eh->filter,
1771 GEN_INT (region->u.allowed.filter),
1772 EQ, NULL_RTX, word_mode, 0, 0,
1775 /* We delay the generation of the _Unwind_Resume until we generate
1776 landing pads. We emit a marker here so as to get good control
1777 flow data in the meantime. */
1779 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1785 emit_insns_before (seq, region->label);
1789 case ERT_MUST_NOT_THROW:
1790 region->post_landing_pad = region->label;
1795 /* Nothing to do. */
1804 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1805 _Unwind_Resume otherwise. */
1808 connect_post_landing_pads ()
1812 for (i = cfun->eh->last_region_number; i > 0; --i)
1814 struct eh_region *region = cfun->eh->region_array[i];
1815 struct eh_region *outer;
1818 /* Mind we don't process a region more than once. */
1819 if (!region || region->region_number != i)
1822 /* If there is no RESX, or it has been deleted by flow, there's
1823 nothing to fix up. */
1824 if (! region->resume || INSN_DELETED_P (region->resume))
1827 /* Search for another landing pad in this function. */
1828 for (outer = region->outer; outer ; outer = outer->outer)
1829 if (outer->post_landing_pad)
1835 emit_jump (outer->post_landing_pad);
1837 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1838 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
1842 emit_insns_before (seq, region->resume);
1844 /* Leave the RESX to be deleted by flow. */
1850 dw2_build_landing_pads ()
1854 for (i = cfun->eh->last_region_number; i > 0; --i)
1856 struct eh_region *region = cfun->eh->region_array[i];
1859 /* Mind we don't process a region more than once. */
1860 if (!region || region->region_number != i)
1863 if (region->type != ERT_CLEANUP
1864 && region->type != ERT_TRY
1865 && region->type != ERT_ALLOWED_EXCEPTIONS)
1870 region->landing_pad = gen_label_rtx ();
1871 emit_label (region->landing_pad);
1873 #ifdef HAVE_exception_receiver
1874 if (HAVE_exception_receiver)
1875 emit_insn (gen_exception_receiver ());
1878 #ifdef HAVE_nonlocal_goto_receiver
1879 if (HAVE_nonlocal_goto_receiver)
1880 emit_insn (gen_nonlocal_goto_receiver ());
1885 /* If the eh_return data registers are call-saved, then we
1886 won't have considered them clobbered from the call that
1887 threw. Kill them now. */
1890 unsigned r = EH_RETURN_DATA_REGNO (j);
1891 if (r == INVALID_REGNUM)
1893 if (! call_used_regs[r])
1894 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1897 emit_move_insn (cfun->eh->exc_ptr,
1898 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
1899 emit_move_insn (cfun->eh->filter,
1900 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (1)));
1905 emit_insns_before (seq, region->post_landing_pad);
1912 int directly_reachable;
1915 int call_site_index;
1919 sjlj_find_directly_reachable_regions (lp_info)
1920 struct sjlj_lp_info *lp_info;
1923 bool found_one = false;
1925 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1927 struct eh_region *region;
1931 if (! INSN_P (insn))
1934 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1935 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1938 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1940 type_thrown = NULL_TREE;
1941 if (region->type == ERT_THROW)
1943 type_thrown = region->u.throw.type;
1944 region = region->outer;
1947 /* Find the first containing region that might handle the exception.
1948 That's the landing pad to which we will transfer control. */
1949 for (; region; region = region->outer)
1950 if (reachable_next_level (region, type_thrown, 0) != RNL_NOT_CAUGHT)
1955 lp_info[region->region_number].directly_reachable = 1;
1964 sjlj_assign_call_site_values (dispatch_label, lp_info)
1966 struct sjlj_lp_info *lp_info;
1971 /* First task: build the action table. */
1973 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1974 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1976 for (i = cfun->eh->last_region_number; i > 0; --i)
1977 if (lp_info[i].directly_reachable)
1979 struct eh_region *r = cfun->eh->region_array[i];
1980 r->landing_pad = dispatch_label;
1981 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1982 if (lp_info[i].action_index != -1)
1983 cfun->uses_eh_lsda = 1;
1986 htab_delete (ar_hash);
1988 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1989 landing pad label for the region. For sjlj though, there is one
1990 common landing pad from which we dispatch to the post-landing pads.
1992 A region receives a dispatch index if it is directly reachable
1993 and requires in-function processing. Regions that share post-landing
1994 pads may share dispatch indicies. */
1995 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1996 (see build_post_landing_pads) so we don't bother checking for it. */
1999 for (i = cfun->eh->last_region_number; i > 0; --i)
2000 if (lp_info[i].directly_reachable
2001 && lp_info[i].action_index >= 0)
2002 lp_info[i].dispatch_index = index++;
2004 /* Finally: assign call-site values. If dwarf2 terms, this would be
2005 the region number assigned by convert_to_eh_region_ranges, but
2006 handles no-action and must-not-throw differently. */
2009 for (i = cfun->eh->last_region_number; i > 0; --i)
2010 if (lp_info[i].directly_reachable)
2012 int action = lp_info[i].action_index;
2014 /* Map must-not-throw to otherwise unused call-site index 0. */
2017 /* Map no-action to otherwise unused call-site index -1. */
2018 else if (action == -1)
2020 /* Otherwise, look it up in the table. */
2022 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2024 lp_info[i].call_site_index = index;
2029 sjlj_mark_call_sites (lp_info)
2030 struct sjlj_lp_info *lp_info;
2032 int last_call_site = -2;
2035 mem = change_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2036 plus_constant (XEXP (cfun->eh->sjlj_fc, 0),
2037 sjlj_fc_call_site_ofs));
2039 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2041 struct eh_region *region;
2043 rtx note, before, p;
2045 /* Reset value tracking at extended basic block boundaries. */
2046 if (GET_CODE (insn) == CODE_LABEL)
2047 last_call_site = -2;
2049 if (! INSN_P (insn))
2052 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2055 /* Calls (and trapping insns) without notes are outside any
2056 exception handling region in this function. Mark them as
2058 if (GET_CODE (insn) == CALL_INSN
2059 || (flag_non_call_exceptions
2060 && may_trap_p (PATTERN (insn))))
2061 this_call_site = -1;
2067 /* Calls that are known to not throw need not be marked. */
2068 if (INTVAL (XEXP (note, 0)) <= 0)
2071 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2072 this_call_site = lp_info[region->region_number].call_site_index;
2075 if (this_call_site == last_call_site)
2078 /* Don't separate a call from it's argument loads. */
2080 if (GET_CODE (insn) == CALL_INSN)
2082 HARD_REG_SET parm_regs;
2085 /* Since different machines initialize their parameter registers
2086 in different orders, assume nothing. Collect the set of all
2087 parameter registers. */
2088 CLEAR_HARD_REG_SET (parm_regs);
2090 for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
2091 if (GET_CODE (XEXP (p, 0)) == USE
2092 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
2094 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
2097 /* We only care about registers which can hold function
2099 if (! FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
2102 SET_HARD_REG_BIT (parm_regs, REGNO (XEXP (XEXP (p, 0), 0)));
2106 /* Search backward for the first set of a register in this set. */
2109 before = PREV_INSN (before);
2111 /* Given that we've done no other optimizations yet,
2112 the arguments should be immediately available. */
2113 if (GET_CODE (before) == CODE_LABEL)
2116 p = single_set (before);
2117 if (p && GET_CODE (SET_DEST (p)) == REG
2118 && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
2119 && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
2121 CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
2128 emit_move_insn (mem, GEN_INT (this_call_site));
2132 emit_insns_before (p, before);
2133 last_call_site = this_call_site;
2137 /* Construct the SjLj_Function_Context. */
2140 sjlj_emit_function_enter (dispatch_label)
2143 rtx fn_begin, fc, mem, seq;
2145 fc = cfun->eh->sjlj_fc;
2149 /* We're storing this libcall's address into memory instead of
2150 calling it directly. Thus, we must call assemble_external_libcall
2151 here, as we can not depend on emit_library_call to do it for us. */
2152 assemble_external_libcall (eh_personality_libfunc);
2153 mem = change_address (fc, Pmode,
2154 plus_constant (XEXP (fc, 0), sjlj_fc_personality_ofs));
2155 emit_move_insn (mem, eh_personality_libfunc);
2157 mem = change_address (fc, Pmode,
2158 plus_constant (XEXP (fc, 0), sjlj_fc_lsda_ofs));
2159 if (cfun->uses_eh_lsda)
2162 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2163 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2166 emit_move_insn (mem, const0_rtx);
2168 #ifdef DONT_USE_BUILTIN_SETJMP
2171 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_NORMAL,
2172 TYPE_MODE (integer_type_node), 1,
2173 plus_constant (XEXP (fc, 0),
2174 sjlj_fc_jbuf_ofs), Pmode);
2176 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2177 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2179 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2180 TYPE_MODE (integer_type_node), 0, 0,
2184 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2188 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2189 1, XEXP (fc, 0), Pmode);
2194 /* ??? Instead of doing this at the beginning of the function,
2195 do this in a block that is at loop level 0 and dominates all
2196 can_throw_internal instructions. */
2198 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2199 if (GET_CODE (fn_begin) == NOTE
2200 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2202 emit_insns_after (seq, fn_begin);
2205 /* Call back from expand_function_end to know where we should put
2206 the call to unwind_sjlj_unregister_libfunc if needed. */
2209 sjlj_emit_function_exit_after (after)
2212 cfun->eh->sjlj_exit_after = after;
2216 sjlj_emit_function_exit ()
2222 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2223 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2228 /* ??? Really this can be done in any block at loop level 0 that
2229 post-dominates all can_throw_internal instructions. This is
2230 the last possible moment. */
2232 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
2236 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2238 struct sjlj_lp_info *lp_info;
2240 int i, first_reachable;
2241 rtx mem, dispatch, seq, fc;
2243 fc = cfun->eh->sjlj_fc;
2247 emit_label (dispatch_label);
2249 #ifndef DONT_USE_BUILTIN_SETJMP
2250 expand_builtin_setjmp_receiver (dispatch_label);
2253 /* Load up dispatch index, exc_ptr and filter values from the
2254 function context. */
2255 mem = change_address (fc, TYPE_MODE (integer_type_node),
2256 plus_constant (XEXP (fc, 0), sjlj_fc_call_site_ofs));
2257 dispatch = copy_to_reg (mem);
2259 mem = change_address (fc, word_mode,
2260 plus_constant (XEXP (fc, 0), sjlj_fc_data_ofs));
2261 if (word_mode != Pmode)
2263 #ifdef POINTERS_EXTEND_UNSIGNED
2264 mem = convert_memory_address (Pmode, mem);
2266 mem = convert_to_mode (Pmode, mem, 0);
2269 emit_move_insn (cfun->eh->exc_ptr, mem);
2271 mem = change_address (fc, word_mode,
2272 plus_constant (XEXP (fc, 0),
2273 sjlj_fc_data_ofs + UNITS_PER_WORD));
2274 emit_move_insn (cfun->eh->filter, mem);
2276 /* Jump to one of the directly reachable regions. */
2277 /* ??? This really ought to be using a switch statement. */
2279 first_reachable = 0;
2280 for (i = cfun->eh->last_region_number; i > 0; --i)
2282 if (! lp_info[i].directly_reachable
2283 || lp_info[i].action_index < 0)
2286 if (! first_reachable)
2288 first_reachable = i;
2292 emit_cmp_and_jump_insns (dispatch,
2293 GEN_INT (lp_info[i].dispatch_index), EQ,
2294 NULL_RTX, TYPE_MODE (integer_type_node), 0, 0,
2295 cfun->eh->region_array[i]->post_landing_pad);
2301 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2302 ->post_landing_pad));
2306 sjlj_build_landing_pads ()
2308 struct sjlj_lp_info *lp_info;
2310 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2311 sizeof (struct sjlj_lp_info));
2313 if (sjlj_find_directly_reachable_regions (lp_info))
2315 rtx dispatch_label = gen_label_rtx ();
2318 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2319 int_size_in_bytes (sjlj_fc_type_node),
2320 TYPE_ALIGN (sjlj_fc_type_node));
2322 sjlj_assign_call_site_values (dispatch_label, lp_info);
2323 sjlj_mark_call_sites (lp_info);
2325 sjlj_emit_function_enter (dispatch_label);
2326 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2327 sjlj_emit_function_exit ();
2334 finish_eh_generation ()
2336 /* Nothing to do if no regions created. */
2337 if (cfun->eh->region_tree == NULL)
2340 /* The object here is to provide find_basic_blocks with detailed
2341 information (via reachable_handlers) on how exception control
2342 flows within the function. In this first pass, we can include
2343 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2344 regions, and hope that it will be useful in deleting unreachable
2345 handlers. Subsequently, we will generate landing pads which will
2346 connect many of the handlers, and then type information will not
2347 be effective. Still, this is a win over previous implementations. */
2349 jump_optimize_minimal (get_insns ());
2350 find_basic_blocks (get_insns (), max_reg_num (), 0);
2353 /* These registers are used by the landing pads. Make sure they
2354 have been generated. */
2355 get_exception_pointer ();
2356 get_exception_filter ();
2358 /* Construct the landing pads. */
2360 assign_filter_values ();
2361 build_post_landing_pads ();
2362 connect_post_landing_pads ();
2363 if (USING_SJLJ_EXCEPTIONS)
2364 sjlj_build_landing_pads ();
2366 dw2_build_landing_pads ();
2368 cfun->eh->built_landing_pads = 1;
2370 /* We've totally changed the CFG. Start over. */
2371 find_exception_handler_labels ();
2372 jump_optimize_minimal (get_insns ());
2373 find_basic_blocks (get_insns (), max_reg_num (), 0);
2377 /* This section handles removing dead code for flow. */
2379 /* Remove LABEL from the exception_handler_labels list. */
2382 remove_exception_handler_label (label)
2387 for (pl = &exception_handler_labels, l = *pl;
2388 XEXP (l, 0) != label;
2389 pl = &XEXP (l, 1), l = *pl)
2393 free_EXPR_LIST_node (l);
2396 /* Splice REGION from the region tree etc. */
2399 remove_eh_handler (region)
2400 struct eh_region *region;
2402 struct eh_region **pp, *p;
2406 /* For the benefit of efficiently handling REG_EH_REGION notes,
2407 replace this region in the region array with its containing
2408 region. Note that previous region deletions may result in
2409 multiple copies of this region in the array, so we have to
2410 search the whole thing. */
2411 for (i = cfun->eh->last_region_number; i > 0; --i)
2412 if (cfun->eh->region_array[i] == region)
2413 cfun->eh->region_array[i] = region->outer;
2415 if (cfun->eh->built_landing_pads)
2416 lab = region->landing_pad;
2418 lab = region->label;
2420 remove_exception_handler_label (lab);
2423 pp = ®ion->outer->inner;
2425 pp = &cfun->eh->region_tree;
2426 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2431 for (p = region->inner; p->next_peer ; p = p->next_peer)
2432 p->outer = region->outer;
2433 p->next_peer = region->next_peer;
2434 p->outer = region->outer;
2435 *pp = region->inner;
2438 *pp = region->next_peer;
2440 if (region->type == ERT_CATCH)
2442 struct eh_region *try, *next, *prev;
2444 for (try = region->next_peer;
2445 try->type == ERT_CATCH;
2446 try = try->next_peer)
2448 if (try->type != ERT_TRY)
2451 next = region->u.catch.next_catch;
2452 prev = region->u.catch.prev_catch;
2455 next->u.catch.prev_catch = prev;
2457 try->u.try.last_catch = prev;
2459 prev->u.catch.next_catch = next;
2462 try->u.try.catch = next;
2464 remove_eh_handler (try);
2471 /* LABEL heads a basic block that is about to be deleted. If this
2472 label corresponds to an exception region, we may be able to
2473 delete the region. */
2476 maybe_remove_eh_handler (label)
2481 /* ??? After generating landing pads, it's not so simple to determine
2482 if the region data is completely unused. One must examine the
2483 landing pad and the post landing pad, and whether an inner try block
2484 is referencing the catch handlers directly. */
2485 if (cfun->eh->built_landing_pads)
2488 for (i = cfun->eh->last_region_number; i > 0; --i)
2490 struct eh_region *region = cfun->eh->region_array[i];
2491 if (region && region->label == label)
2493 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2494 because there is no path to the fallback call to terminate.
2495 But the region continues to affect call-site data until there
2496 are no more contained calls, which we don't see here. */
2497 if (region->type == ERT_MUST_NOT_THROW)
2499 remove_exception_handler_label (region->label);
2500 region->label = NULL_RTX;
2503 remove_eh_handler (region);
2510 /* This section describes CFG exception edges for flow. */
2512 /* For communicating between calls to reachable_next_level. */
2513 struct reachable_info
2520 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2521 base class of TYPE, is in HANDLED. */
2524 check_handled (handled, type)
2529 /* We can check for exact matches without front-end help. */
2530 if (! lang_eh_type_covers)
2532 for (t = handled; t ; t = TREE_CHAIN (t))
2533 if (TREE_VALUE (t) == type)
2538 for (t = handled; t ; t = TREE_CHAIN (t))
2539 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2546 /* A subroutine of reachable_next_level. If we are collecting a list
2547 of handlers, add one. After landing pad generation, reference
2548 it instead of the handlers themselves. Further, the handlers are
2549 all wired together, so by referencing one, we've got them all.
2550 Before landing pad generation we reference each handler individually.
2552 LP_REGION contains the landing pad; REGION is the handler. */
2555 add_reachable_handler (info, lp_region, region)
2556 struct reachable_info *info;
2557 struct eh_region *lp_region;
2558 struct eh_region *region;
2563 if (cfun->eh->built_landing_pads)
2565 if (! info->handlers)
2566 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2569 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2572 /* Process one level of exception regions for reachability.
2573 If TYPE_THROWN is non-null, then it is the *exact* type being
2574 propagated. If INFO is non-null, then collect handler labels
2575 and caught/allowed type information between invocations. */
2577 static enum reachable_code
2578 reachable_next_level (region, type_thrown, info)
2579 struct eh_region *region;
2581 struct reachable_info *info;
2583 switch (region->type)
2586 /* Before landing-pad generation, we model control flow
2587 directly to the individual handlers. In this way we can
2588 see that catch handler types may shadow one another. */
2589 add_reachable_handler (info, region, region);
2590 return RNL_MAYBE_CAUGHT;
2594 struct eh_region *c;
2595 enum reachable_code ret = RNL_NOT_CAUGHT;
2597 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2599 /* A catch-all handler ends the search. */
2600 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2601 to be run as well. */
2602 if (c->u.catch.type == NULL)
2604 add_reachable_handler (info, region, c);
2610 /* If we have a type match, end the search. */
2611 if (c->u.catch.type == type_thrown
2612 || (lang_eh_type_covers
2613 && (*lang_eh_type_covers) (c->u.catch.type,
2616 add_reachable_handler (info, region, c);
2620 /* If we have definitive information of a match failure,
2621 the catch won't trigger. */
2622 if (lang_eh_type_covers)
2623 return RNL_NOT_CAUGHT;
2627 ret = RNL_MAYBE_CAUGHT;
2629 /* A type must not have been previously caught. */
2630 else if (! check_handled (info->types_caught, c->u.catch.type))
2632 add_reachable_handler (info, region, c);
2633 info->types_caught = tree_cons (NULL, c->u.catch.type,
2634 info->types_caught);
2636 /* ??? If the catch type is a base class of every allowed
2637 type, then we know we can stop the search. */
2638 ret = RNL_MAYBE_CAUGHT;
2645 case ERT_ALLOWED_EXCEPTIONS:
2646 /* An empty list of types definitely ends the search. */
2647 if (region->u.allowed.type_list == NULL_TREE)
2649 add_reachable_handler (info, region, region);
2653 /* Collect a list of lists of allowed types for use in detecting
2654 when a catch may be transformed into a catch-all. */
2656 info->types_allowed = tree_cons (NULL_TREE,
2657 region->u.allowed.type_list,
2658 info->types_allowed);
2660 /* If we have definitive information about the type heirarchy,
2661 then we can tell if the thrown type will pass through the
2663 if (type_thrown && lang_eh_type_covers)
2665 if (check_handled (region->u.allowed.type_list, type_thrown))
2666 return RNL_NOT_CAUGHT;
2669 add_reachable_handler (info, region, region);
2674 add_reachable_handler (info, region, region);
2675 return RNL_MAYBE_CAUGHT;
2678 /* Catch regions are handled by their controling try region. */
2679 return RNL_NOT_CAUGHT;
2681 case ERT_MUST_NOT_THROW:
2682 /* Here we end our search, since no exceptions may propagate.
2683 If we've touched down at some landing pad previous, then the
2684 explicit function call we generated may be used. Otherwise
2685 the call is made by the runtime. */
2686 if (info && info->handlers)
2688 add_reachable_handler (info, region, region);
2696 /* Shouldn't see these here. */
2703 /* Retrieve a list of labels of exception handlers which can be
2704 reached by a given insn. */
2707 reachable_handlers (insn)
2710 struct reachable_info info;
2711 struct eh_region *region;
2715 if (GET_CODE (insn) == JUMP_INSN
2716 && GET_CODE (PATTERN (insn)) == RESX)
2717 region_number = XINT (PATTERN (insn), 0);
2720 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2721 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2723 region_number = INTVAL (XEXP (note, 0));
2726 memset (&info, 0, sizeof (info));
2728 region = cfun->eh->region_array[region_number];
2730 type_thrown = NULL_TREE;
2731 if (region->type == ERT_THROW)
2733 type_thrown = region->u.throw.type;
2734 region = region->outer;
2736 else if (GET_CODE (insn) == JUMP_INSN
2737 && GET_CODE (PATTERN (insn)) == RESX)
2738 region = region->outer;
2740 for (; region; region = region->outer)
2741 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2744 return info.handlers;
2747 /* Determine if the given INSN can throw an exception that is caught
2748 within the function. */
2751 can_throw_internal (insn)
2754 struct eh_region *region;
2758 if (! INSN_P (insn))
2761 if (GET_CODE (insn) == INSN
2762 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2763 insn = XVECEXP (PATTERN (insn), 0, 0);
2765 if (GET_CODE (insn) == CALL_INSN
2766 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2769 for (i = 0; i < 3; ++i)
2771 rtx sub = XEXP (PATTERN (insn), i);
2772 for (; sub ; sub = NEXT_INSN (sub))
2773 if (can_throw_internal (sub))
2779 /* Every insn that might throw has an EH_REGION note. */
2780 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2781 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2784 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2786 type_thrown = NULL_TREE;
2787 if (region->type == ERT_THROW)
2789 type_thrown = region->u.throw.type;
2790 region = region->outer;
2793 /* If this exception is ignored by each and every containing region,
2794 then control passes straight out. The runtime may handle some
2795 regions, which also do not require processing internally. */
2796 for (; region; region = region->outer)
2798 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2799 if (how == RNL_BLOCKED)
2801 if (how != RNL_NOT_CAUGHT)
2808 /* Determine if the given INSN can throw an exception that is
2809 visible outside the function. */
2812 can_throw_external (insn)
2815 struct eh_region *region;
2819 if (! INSN_P (insn))
2822 if (GET_CODE (insn) == INSN
2823 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2824 insn = XVECEXP (PATTERN (insn), 0, 0);
2826 if (GET_CODE (insn) == CALL_INSN
2827 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2830 for (i = 0; i < 3; ++i)
2832 rtx sub = XEXP (PATTERN (insn), i);
2833 for (; sub ; sub = NEXT_INSN (sub))
2834 if (can_throw_external (sub))
2840 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2843 /* Calls (and trapping insns) without notes are outside any
2844 exception handling region in this function. We have to
2845 assume it might throw. Given that the front end and middle
2846 ends mark known NOTHROW functions, this isn't so wildly
2848 return (GET_CODE (insn) == CALL_INSN
2849 || (flag_non_call_exceptions
2850 && may_trap_p (PATTERN (insn))));
2852 if (INTVAL (XEXP (note, 0)) <= 0)
2855 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2857 type_thrown = NULL_TREE;
2858 if (region->type == ERT_THROW)
2860 type_thrown = region->u.throw.type;
2861 region = region->outer;
2864 /* If the exception is caught or blocked by any containing region,
2865 then it is not seen by any calling function. */
2866 for (; region ; region = region->outer)
2867 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2873 /* True if nothing in this function can throw outside this function. */
2876 nothrow_function_p ()
2880 if (! flag_exceptions)
2883 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2884 if (can_throw_external (insn))
2886 for (insn = current_function_epilogue_delay_list; insn;
2887 insn = XEXP (insn, 1))
2888 if (can_throw_external (insn))
2895 /* Various hooks for unwind library. */
2897 /* Do any necessary initialization to access arbitrary stack frames.
2898 On the SPARC, this means flushing the register windows. */
2901 expand_builtin_unwind_init ()
2903 /* Set this so all the registers get saved in our frame; we need to be
2904 able to copy the saved values for any registers from frames we unwind. */
2905 current_function_has_nonlocal_label = 1;
2907 #ifdef SETUP_FRAME_ADDRESSES
2908 SETUP_FRAME_ADDRESSES ();
2913 expand_builtin_eh_return_data_regno (arglist)
2916 tree which = TREE_VALUE (arglist);
2917 unsigned HOST_WIDE_INT iwhich;
2919 if (TREE_CODE (which) != INTEGER_CST)
2921 error ("argument of `__builtin_eh_return_regno' must be constant");
2925 iwhich = tree_low_cst (which, 1);
2926 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2927 if (iwhich == INVALID_REGNUM)
2930 #ifdef DWARF_FRAME_REGNUM
2931 iwhich = DWARF_FRAME_REGNUM (iwhich);
2933 iwhich = DBX_REGISTER_NUMBER (iwhich);
2936 return GEN_INT (iwhich);
2939 /* Given a value extracted from the return address register or stack slot,
2940 return the actual address encoded in that value. */
2943 expand_builtin_extract_return_addr (addr_tree)
2946 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2948 /* First mask out any unwanted bits. */
2949 #ifdef MASK_RETURN_ADDR
2950 expand_and (addr, MASK_RETURN_ADDR, addr);
2953 /* Then adjust to find the real return address. */
2954 #if defined (RETURN_ADDR_OFFSET)
2955 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2961 /* Given an actual address in addr_tree, do any necessary encoding
2962 and return the value to be stored in the return address register or
2963 stack slot so the epilogue will return to that address. */
2966 expand_builtin_frob_return_addr (addr_tree)
2969 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2971 #ifdef RETURN_ADDR_OFFSET
2972 addr = force_reg (Pmode, addr);
2973 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2979 /* Set up the epilogue with the magic bits we'll need to return to the
2980 exception handler. */
2983 expand_builtin_eh_return (stackadj_tree, handler_tree)
2984 tree stackadj_tree, handler_tree;
2986 rtx stackadj, handler;
2988 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2989 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2991 if (! cfun->eh->ehr_label)
2993 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
2994 cfun->eh->ehr_handler = copy_to_reg (handler);
2995 cfun->eh->ehr_label = gen_label_rtx ();
2999 if (stackadj != cfun->eh->ehr_stackadj)
3000 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
3001 if (handler != cfun->eh->ehr_handler)
3002 emit_move_insn (cfun->eh->ehr_handler, handler);
3005 emit_jump (cfun->eh->ehr_label);
3011 rtx sa, ra, around_label;
3013 if (! cfun->eh->ehr_label)
3016 sa = EH_RETURN_STACKADJ_RTX;
3019 error ("__builtin_eh_return not supported on this target");
3023 current_function_calls_eh_return = 1;
3025 around_label = gen_label_rtx ();
3026 emit_move_insn (sa, const0_rtx);
3027 emit_jump (around_label);
3029 emit_label (cfun->eh->ehr_label);
3030 clobber_return_register ();
3032 #ifdef HAVE_eh_return
3034 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3038 ra = EH_RETURN_HANDLER_RTX;
3041 error ("__builtin_eh_return not supported on this target");
3042 ra = gen_reg_rtx (Pmode);
3045 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3046 emit_move_insn (ra, cfun->eh->ehr_handler);
3049 emit_label (around_label);
3052 struct action_record
3060 action_record_eq (pentry, pdata)
3064 const struct action_record *entry = (const struct action_record *) pentry;
3065 const struct action_record *data = (const struct action_record *) pdata;
3066 return entry->filter == data->filter && entry->next == data->next;
3070 action_record_hash (pentry)
3073 const struct action_record *entry = (const struct action_record *) pentry;
3074 return entry->next * 1009 + entry->filter;
3078 add_action_record (ar_hash, filter, next)
3082 struct action_record **slot, *new, tmp;
3084 tmp.filter = filter;
3086 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3088 if ((new = *slot) == NULL)
3090 new = (struct action_record *) xmalloc (sizeof (*new));
3091 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3092 new->filter = filter;
3096 /* The filter value goes in untouched. The link to the next
3097 record is a "self-relative" byte offset, or zero to indicate
3098 that there is no next record. So convert the absolute 1 based
3099 indicies we've been carrying around into a displacement. */
3101 push_sleb128 (&cfun->eh->action_record_data, filter);
3103 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3104 push_sleb128 (&cfun->eh->action_record_data, next);
3111 collect_one_action_chain (ar_hash, region)
3113 struct eh_region *region;
3115 struct eh_region *c;
3118 /* If we've reached the top of the region chain, then we have
3119 no actions, and require no landing pad. */
3123 switch (region->type)
3126 /* A cleanup adds a zero filter to the beginning of the chain, but
3127 there are special cases to look out for. If there are *only*
3128 cleanups along a path, then it compresses to a zero action.
3129 Further, if there are multiple cleanups along a path, we only
3130 need to represent one of them, as that is enough to trigger
3131 entry to the landing pad at runtime. */
3132 next = collect_one_action_chain (ar_hash, region->outer);
3135 for (c = region->outer; c ; c = c->outer)
3136 if (c->type == ERT_CLEANUP)
3138 return add_action_record (ar_hash, 0, next);
3141 /* Process the associated catch regions in reverse order.
3142 If there's a catch-all handler, then we don't need to
3143 search outer regions. Use a magic -3 value to record
3144 that we havn't done the outer search. */
3146 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3148 if (c->u.catch.type == NULL)
3149 next = add_action_record (ar_hash, c->u.catch.filter, 0);
3154 next = collect_one_action_chain (ar_hash, region->outer);
3158 next = add_action_record (ar_hash, c->u.catch.filter, next);
3163 case ERT_ALLOWED_EXCEPTIONS:
3164 /* An exception specification adds its filter to the
3165 beginning of the chain. */
3166 next = collect_one_action_chain (ar_hash, region->outer);
3167 return add_action_record (ar_hash, region->u.allowed.filter,
3168 next < 0 ? 0 : next);
3170 case ERT_MUST_NOT_THROW:
3171 /* A must-not-throw region with no inner handlers or cleanups
3172 requires no call-site entry. Note that this differs from
3173 the no handler or cleanup case in that we do require an lsda
3174 to be generated. Return a magic -2 value to record this. */
3179 /* CATCH regions are handled in TRY above. THROW regions are
3180 for optimization information only and produce no output. */
3181 return collect_one_action_chain (ar_hash, region->outer);
3189 add_call_site (landing_pad, action)
3193 struct call_site_record *data = cfun->eh->call_site_data;
3194 int used = cfun->eh->call_site_data_used;
3195 int size = cfun->eh->call_site_data_size;
3199 size = (size ? size * 2 : 64);
3200 data = (struct call_site_record *)
3201 xrealloc (data, sizeof (*data) * size);
3202 cfun->eh->call_site_data = data;
3203 cfun->eh->call_site_data_size = size;
3206 data[used].landing_pad = landing_pad;
3207 data[used].action = action;
3209 cfun->eh->call_site_data_used = used + 1;
3211 return used + call_site_base;
3214 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3215 The new note numbers will not refer to region numbers, but
3216 instead to call site entries. */
3219 convert_to_eh_region_ranges ()
3221 rtx insn, iter, note;
3223 int last_action = -3;
3224 rtx last_action_insn = NULL_RTX;
3225 rtx last_landing_pad = NULL_RTX;
3226 rtx first_no_action_insn = NULL_RTX;
3229 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3232 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3234 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3236 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3239 struct eh_region *region;
3241 rtx this_landing_pad;
3244 if (GET_CODE (insn) == INSN
3245 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3246 insn = XVECEXP (PATTERN (insn), 0, 0);
3248 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3251 if (! (GET_CODE (insn) == CALL_INSN
3252 || (flag_non_call_exceptions
3253 && may_trap_p (PATTERN (insn)))))
3260 if (INTVAL (XEXP (note, 0)) <= 0)
3262 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3263 this_action = collect_one_action_chain (ar_hash, region);
3266 /* Existence of catch handlers, or must-not-throw regions
3267 implies that an lsda is needed (even if empty). */
3268 if (this_action != -1)
3269 cfun->uses_eh_lsda = 1;
3271 /* Delay creation of region notes for no-action regions
3272 until we're sure that an lsda will be required. */
3273 else if (last_action == -3)
3275 first_no_action_insn = iter;
3279 /* Cleanups and handlers may share action chains but not
3280 landing pads. Collect the landing pad for this region. */
3281 if (this_action >= 0)
3283 struct eh_region *o;
3284 for (o = region; ! o->landing_pad ; o = o->outer)
3286 this_landing_pad = o->landing_pad;
3289 this_landing_pad = NULL_RTX;
3291 /* Differing actions or landing pads implies a change in call-site
3292 info, which implies some EH_REGION note should be emitted. */
3293 if (last_action != this_action
3294 || last_landing_pad != this_landing_pad)
3296 /* If we'd not seen a previous action (-3) or the previous
3297 action was must-not-throw (-2), then we do not need an
3299 if (last_action >= -1)
3301 /* If we delayed the creation of the begin, do it now. */
3302 if (first_no_action_insn)
3304 call_site = add_call_site (NULL_RTX, 0);
3305 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3306 first_no_action_insn);
3307 NOTE_EH_HANDLER (note) = call_site;
3308 first_no_action_insn = NULL_RTX;
3311 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3313 NOTE_EH_HANDLER (note) = call_site;
3316 /* If the new action is must-not-throw, then no region notes
3318 if (this_action >= -1)
3320 call_site = add_call_site (this_landing_pad,
3321 this_action < 0 ? 0 : this_action);
3322 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3323 NOTE_EH_HANDLER (note) = call_site;
3326 last_action = this_action;
3327 last_landing_pad = this_landing_pad;
3329 last_action_insn = iter;
3332 if (last_action >= -1 && ! first_no_action_insn)
3334 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3335 NOTE_EH_HANDLER (note) = call_site;
3338 htab_delete (ar_hash);
3343 push_uleb128 (data_area, value)
3344 varray_type *data_area;
3349 unsigned char byte = value & 0x7f;
3353 VARRAY_PUSH_UCHAR (*data_area, byte);
3359 push_sleb128 (data_area, value)
3360 varray_type *data_area;
3368 byte = value & 0x7f;
3370 more = ! ((value == 0 && (byte & 0x40) == 0)
3371 || (value == -1 && (byte & 0x40) != 0));
3374 VARRAY_PUSH_UCHAR (*data_area, byte);
3380 #ifndef HAVE_AS_LEB128
3382 dw2_size_of_call_site_table ()
3384 int n = cfun->eh->call_site_data_used;
3385 int size = n * (4 + 4 + 4);
3388 for (i = 0; i < n; ++i)
3390 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3391 size += size_of_uleb128 (cs->action);
3398 sjlj_size_of_call_site_table ()
3400 int n = cfun->eh->call_site_data_used;
3404 for (i = 0; i < n; ++i)
3406 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3407 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3408 size += size_of_uleb128 (cs->action);
3416 dw2_output_call_site_table ()
3418 const char *function_start_lab
3419 = IDENTIFIER_POINTER (current_function_func_begin_label);
3420 int n = cfun->eh->call_site_data_used;
3423 for (i = 0; i < n; ++i)
3425 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3426 char reg_start_lab[32];
3427 char reg_end_lab[32];
3428 char landing_pad_lab[32];
3430 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3431 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3433 if (cs->landing_pad)
3434 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3435 CODE_LABEL_NUMBER (cs->landing_pad));
3437 /* ??? Perhaps use insn length scaling if the assembler supports
3438 generic arithmetic. */
3439 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3440 data4 if the function is small enough. */
3441 #ifdef HAVE_AS_LEB128
3442 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3443 "region %d start", i);
3444 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3446 if (cs->landing_pad)
3447 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3450 dw2_asm_output_data_uleb128 (0, "landing pad");
3452 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3453 "region %d start", i);
3454 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3455 if (cs->landing_pad)
3456 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3459 dw2_asm_output_data (4, 0, "landing pad");
3461 dw2_asm_output_data_uleb128 (cs->action, "action");
3464 call_site_base += n;
3468 sjlj_output_call_site_table ()
3470 int n = cfun->eh->call_site_data_used;
3473 for (i = 0; i < n; ++i)
3475 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3477 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3478 "region %d landing pad", i);
3479 dw2_asm_output_data_uleb128 (cs->action, "action");
3482 call_site_base += n;
3486 output_function_exception_table ()
3488 int tt_format, cs_format, lp_format, i, n;
3489 #ifdef HAVE_AS_LEB128
3490 char ttype_label[32];
3491 char cs_after_size_label[32];
3492 char cs_end_label[32];
3500 /* Not all functions need anything. */
3501 if (! cfun->uses_eh_lsda)
3504 funcdef_number = (USING_SJLJ_EXCEPTIONS
3505 ? sjlj_funcdef_number
3506 : current_funcdef_number);
3508 #ifdef IA64_UNWIND_INFO
3509 fputs ("\t.personality\t", asm_out_file);
3510 output_addr_const (asm_out_file, eh_personality_libfunc);
3511 fputs ("\n\t.handlerdata\n", asm_out_file);
3512 /* Note that varasm still thinks we're in the function's code section.
3513 The ".endp" directive that will immediately follow will take us back. */
3515 exception_section ();
3518 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3519 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3521 /* Indicate the format of the @TType entries. */
3523 tt_format = DW_EH_PE_omit;
3526 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3527 #ifdef HAVE_AS_LEB128
3528 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3530 tt_format_size = size_of_encoded_value (tt_format);
3532 assemble_eh_align (tt_format_size * BITS_PER_UNIT);
3535 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3537 /* The LSDA header. */
3539 /* Indicate the format of the landing pad start pointer. An omitted
3540 field implies @LPStart == @Start. */
3541 /* Currently we always put @LPStart == @Start. This field would
3542 be most useful in moving the landing pads completely out of
3543 line to another section, but it could also be used to minimize
3544 the size of uleb128 landing pad offsets. */
3545 lp_format = DW_EH_PE_omit;
3546 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3547 eh_data_format_name (lp_format));
3549 /* @LPStart pointer would go here. */
3551 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3552 eh_data_format_name (tt_format));
3554 #ifndef HAVE_AS_LEB128
3555 if (USING_SJLJ_EXCEPTIONS)
3556 call_site_len = sjlj_size_of_call_site_table ();
3558 call_site_len = dw2_size_of_call_site_table ();
3561 /* A pc-relative 4-byte displacement to the @TType data. */
3564 #ifdef HAVE_AS_LEB128
3565 char ttype_after_disp_label[32];
3566 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3568 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3569 "@TType base offset");
3570 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3572 /* Ug. Alignment queers things. */
3573 unsigned int before_disp, after_disp, last_disp, disp;
3575 before_disp = 1 + 1;
3576 after_disp = (1 + size_of_uleb128 (call_site_len)
3578 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3579 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3585 unsigned int disp_size, pad;
3588 disp_size = size_of_uleb128 (disp);
3589 pad = before_disp + disp_size + after_disp;
3590 if (pad % tt_format_size)
3591 pad = tt_format_size - (pad % tt_format_size);
3594 disp = after_disp + pad;
3596 while (disp != last_disp);
3598 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3602 /* Indicate the format of the call-site offsets. */
3603 #ifdef HAVE_AS_LEB128
3604 cs_format = DW_EH_PE_uleb128;
3606 cs_format = DW_EH_PE_udata4;
3608 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3609 eh_data_format_name (cs_format));
3611 #ifdef HAVE_AS_LEB128
3612 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3614 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3616 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3617 "Call-site table length");
3618 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3619 if (USING_SJLJ_EXCEPTIONS)
3620 sjlj_output_call_site_table ();
3622 dw2_output_call_site_table ();
3623 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3625 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3626 if (USING_SJLJ_EXCEPTIONS)
3627 sjlj_output_call_site_table ();
3629 dw2_output_call_site_table ();
3632 /* ??? Decode and interpret the data for flag_debug_asm. */
3633 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3634 for (i = 0; i < n; ++i)
3635 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3636 (i ? NULL : "Action record table"));
3639 assemble_eh_align (tt_format_size * BITS_PER_UNIT);
3641 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3644 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3646 if (type == NULL_TREE)
3647 type = integer_zero_node;
3649 type = lookup_type_for_runtime (type);
3651 dw2_asm_output_encoded_addr_rtx (tt_format,
3652 expand_expr (type, NULL_RTX, VOIDmode,
3653 EXPAND_INITIALIZER),
3657 #ifdef HAVE_AS_LEB128
3659 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3662 /* ??? Decode and interpret the data for flag_debug_asm. */
3663 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3664 for (i = 0; i < n; ++i)
3665 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3666 (i ? NULL : "Exception specification table"));
3668 function_section (current_function_decl);
3670 if (USING_SJLJ_EXCEPTIONS)
3671 sjlj_funcdef_number += 1;