1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
57 #include "insn-config.h"
59 #include "integrate.h"
60 #include "hard-reg-set.h"
61 #include "basic-block.h"
63 #include "dwarf2asm.h"
64 #include "dwarf2out.h"
72 /* Provide defaults for stuff that may not be defined when using
74 #ifndef EH_RETURN_STACKADJ_RTX
75 #define EH_RETURN_STACKADJ_RTX 0
77 #ifndef EH_RETURN_HANDLER_RTX
78 #define EH_RETURN_HANDLER_RTX 0
80 #ifndef EH_RETURN_DATA_REGNO
81 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
85 /* Nonzero means enable synchronous exceptions for non-call instructions. */
86 int flag_non_call_exceptions;
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 tree protect_cleanup_actions;
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) PARAMS ((tree));
98 /* A list of labels used for exception handlers. */
99 rtx exception_handler_labels;
101 static int call_site_base;
102 static int sjlj_funcdef_number;
103 static htab_t type_to_runtime_map;
105 /* Describe the SjLj_Function_Context structure. */
106 static tree sjlj_fc_type_node;
107 static int sjlj_fc_call_site_ofs;
108 static int sjlj_fc_data_ofs;
109 static int sjlj_fc_personality_ofs;
110 static int sjlj_fc_lsda_ofs;
111 static int sjlj_fc_jbuf_ofs;
113 /* Describes one exception region. */
116 /* The immediately surrounding region. */
117 struct eh_region *outer;
119 /* The list of immediately contained regions. */
120 struct eh_region *inner;
121 struct eh_region *next_peer;
123 /* An identifier for this region. */
126 /* Each region does exactly one thing. */
132 ERT_ALLOWED_EXCEPTIONS,
138 /* Holds the action to perform based on the preceeding type. */
140 /* A list of catch blocks, a surrounding try block,
141 and the label for continuing after a catch. */
143 struct eh_region *catch;
144 struct eh_region *last_catch;
145 struct eh_region *prev_try;
149 /* The list through the catch handlers, the type object
150 matched, and a pointer to the generated code. */
152 struct eh_region *next_catch;
153 struct eh_region *prev_catch;
158 /* A tree_list of allowed types. */
164 /* The type given by a call to "throw foo();", or discovered
170 /* Retain the cleanup expression even after expansion so that
171 we can match up fixup regions. */
176 /* The real region (by expression and by pointer) that fixup code
180 struct eh_region *real_region;
184 /* Entry point for this region's handler before landing pads are built. */
187 /* Entry point for this region's handler from the runtime eh library. */
190 /* Entry point for this region's handler from an inner region. */
191 rtx post_landing_pad;
193 /* The RESX insn for handing off control to the next outermost handler,
198 /* Used to save exception status for each function. */
201 /* The tree of all regions for this function. */
202 struct eh_region *region_tree;
204 /* The same information as an indexable array. */
205 struct eh_region **region_array;
207 /* The most recently open region. */
208 struct eh_region *cur_region;
210 /* This is the region for which we are processing catch blocks. */
211 struct eh_region *try_region;
213 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
214 node is itself a TREE_CHAINed list of handlers for regions that
215 are not yet closed. The TREE_VALUE of each entry contains the
216 handler for the corresponding entry on the ehstack. */
222 int built_landing_pads;
223 int last_region_number;
225 varray_type ttype_data;
226 varray_type ehspec_data;
227 varray_type action_record_data;
229 struct call_site_record
234 int call_site_data_used;
235 int call_site_data_size;
246 static void mark_eh_region PARAMS ((struct eh_region *));
248 static int t2r_eq PARAMS ((const PTR,
250 static hashval_t t2r_hash PARAMS ((const PTR));
251 static int t2r_mark_1 PARAMS ((PTR *, PTR));
252 static void t2r_mark PARAMS ((PTR));
253 static void add_type_for_runtime PARAMS ((tree));
254 static tree lookup_type_for_runtime PARAMS ((tree));
256 static struct eh_region *expand_eh_region_end PARAMS ((void));
258 static rtx get_exception_filter PARAMS ((void));
260 static void collect_eh_region_array PARAMS ((void));
261 static void resolve_fixup_regions PARAMS ((void));
262 static void remove_fixup_regions PARAMS ((void));
263 static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
265 static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
266 struct inline_remap *));
267 static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
268 struct eh_region **));
269 static int ttypes_filter_eq PARAMS ((const PTR,
271 static hashval_t ttypes_filter_hash PARAMS ((const PTR));
272 static int ehspec_filter_eq PARAMS ((const PTR,
274 static hashval_t ehspec_filter_hash PARAMS ((const PTR));
275 static int add_ttypes_entry PARAMS ((htab_t, tree));
276 static int add_ehspec_entry PARAMS ((htab_t, htab_t,
278 static void assign_filter_values PARAMS ((void));
279 static void build_post_landing_pads PARAMS ((void));
280 static void connect_post_landing_pads PARAMS ((void));
281 static void dw2_build_landing_pads PARAMS ((void));
284 static bool sjlj_find_directly_reachable_regions
285 PARAMS ((struct sjlj_lp_info *));
286 static void sjlj_assign_call_site_values
287 PARAMS ((rtx, struct sjlj_lp_info *));
288 static void sjlj_mark_call_sites
289 PARAMS ((struct sjlj_lp_info *));
290 static void sjlj_emit_function_enter PARAMS ((rtx));
291 static void sjlj_emit_function_exit PARAMS ((void));
292 static void sjlj_emit_dispatch_table
293 PARAMS ((rtx, struct sjlj_lp_info *));
294 static void sjlj_build_landing_pads PARAMS ((void));
296 static void remove_exception_handler_label PARAMS ((rtx));
297 static void remove_eh_handler PARAMS ((struct eh_region *));
299 struct reachable_info;
301 /* The return value of reachable_next_level. */
304 /* The given exception is not processed by the given region. */
306 /* The given exception may need processing by the given region. */
308 /* The given exception is completely processed by the given region. */
310 /* The given exception is completely processed by the runtime. */
314 static int check_handled PARAMS ((tree, tree));
315 static void add_reachable_handler
316 PARAMS ((struct reachable_info *, struct eh_region *,
317 struct eh_region *));
318 static enum reachable_code reachable_next_level
319 PARAMS ((struct eh_region *, tree, struct reachable_info *));
321 static int action_record_eq PARAMS ((const PTR,
323 static hashval_t action_record_hash PARAMS ((const PTR));
324 static int add_action_record PARAMS ((htab_t, int, int));
325 static int collect_one_action_chain PARAMS ((htab_t,
326 struct eh_region *));
327 static int add_call_site PARAMS ((rtx, int));
329 static void push_uleb128 PARAMS ((varray_type *,
331 static void push_sleb128 PARAMS ((varray_type *, int));
332 static const char *eh_data_format_name PARAMS ((int));
333 #ifndef HAVE_AS_LEB128
334 static int dw2_size_of_call_site_table PARAMS ((void));
335 static int sjlj_size_of_call_site_table PARAMS ((void));
337 static void dw2_output_call_site_table PARAMS ((void));
338 static void sjlj_output_call_site_table PARAMS ((void));
341 /* Routine to see if exception handling is turned on.
342 DO_WARN is non-zero if we want to inform the user that exception
343 handling is turned off.
345 This is used to ensure that -fexceptions has been specified if the
346 compiler tries to use any exception-specific functions. */
352 if (! flag_exceptions)
354 static int warned = 0;
355 if (! warned && do_warn)
357 error ("exception handling disabled, use -fexceptions to enable");
369 ggc_add_rtx_root (&exception_handler_labels, 1);
370 ggc_add_tree_root (&protect_cleanup_actions, 1);
372 if (! flag_exceptions)
375 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
376 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
378 /* Create the SjLj_Function_Context structure. This should match
379 the definition in unwind-sjlj.c. */
380 if (USING_SJLJ_EXCEPTIONS)
382 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
384 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
385 ggc_add_tree_root (&sjlj_fc_type_node, 1);
387 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
388 build_pointer_type (sjlj_fc_type_node));
389 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
391 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
393 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
395 tmp = build_index_type (build_int_2 (4 - 1, 0));
396 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
397 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
398 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
400 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
402 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
404 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
406 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
408 #ifdef DONT_USE_BUILTIN_SETJMP
410 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
412 /* Should be large enough for most systems, if it is not,
413 JMP_BUF_SIZE should be defined with the proper value. It will
414 also tend to be larger than necessary for most systems, a more
415 optimal port will define JMP_BUF_SIZE. */
416 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
419 /* This is 2 for builtin_setjmp, plus whatever the target requires
420 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
421 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
422 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
424 tmp = build_index_type (tmp);
425 tmp = build_array_type (ptr_type_node, tmp);
426 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
427 #ifdef DONT_USE_BUILTIN_SETJMP
428 /* We don't know what the alignment requirements of the
429 runtime's jmp_buf has. Overestimate. */
430 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
431 DECL_USER_ALIGN (f_jbuf) = 1;
433 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
435 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
436 TREE_CHAIN (f_prev) = f_cs;
437 TREE_CHAIN (f_cs) = f_data;
438 TREE_CHAIN (f_data) = f_per;
439 TREE_CHAIN (f_per) = f_lsda;
440 TREE_CHAIN (f_lsda) = f_jbuf;
442 layout_type (sjlj_fc_type_node);
444 /* Cache the interesting field offsets so that we have
445 easy access from rtl. */
446 sjlj_fc_call_site_ofs
447 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
448 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
450 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
451 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
452 sjlj_fc_personality_ofs
453 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
454 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
456 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
457 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
459 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
460 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
465 init_eh_for_function ()
467 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
470 /* Mark EH for GC. */
473 mark_eh_region (region)
474 struct eh_region *region;
479 switch (region->type)
482 ggc_mark_tree (region->u.cleanup.exp);
485 ggc_mark_rtx (region->u.try.continue_label);
488 ggc_mark_tree (region->u.catch.type);
490 case ERT_ALLOWED_EXCEPTIONS:
491 ggc_mark_tree (region->u.allowed.type_list);
493 case ERT_MUST_NOT_THROW:
496 ggc_mark_tree (region->u.throw.type);
499 ggc_mark_tree (region->u.fixup.cleanup_exp);
505 ggc_mark_rtx (region->label);
506 ggc_mark_rtx (region->resume);
507 ggc_mark_rtx (region->landing_pad);
508 ggc_mark_rtx (region->post_landing_pad);
513 struct eh_status *eh;
520 /* If we've called collect_eh_region_array, use it. Otherwise walk
521 the tree non-recursively. */
522 if (eh->region_array)
524 for (i = eh->last_region_number; i > 0; --i)
526 struct eh_region *r = eh->region_array[i];
527 if (r && r->region_number == i)
531 else if (eh->region_tree)
533 struct eh_region *r = eh->region_tree;
539 else if (r->next_peer)
547 } while (r->next_peer == NULL);
554 ggc_mark_tree (eh->protect_list);
555 ggc_mark_rtx (eh->filter);
556 ggc_mark_rtx (eh->exc_ptr);
557 ggc_mark_tree_varray (eh->ttype_data);
559 if (eh->call_site_data)
561 for (i = eh->call_site_data_used - 1; i >= 0; --i)
562 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
565 ggc_mark_rtx (eh->ehr_stackadj);
566 ggc_mark_rtx (eh->ehr_handler);
567 ggc_mark_rtx (eh->ehr_label);
569 ggc_mark_rtx (eh->sjlj_fc);
570 ggc_mark_rtx (eh->sjlj_exit_after);
577 struct eh_status *eh = f->eh;
579 if (eh->region_array)
582 for (i = eh->last_region_number; i > 0; --i)
584 struct eh_region *r = eh->region_array[i];
585 /* Mind we don't free a region struct more than once. */
586 if (r && r->region_number == i)
589 free (eh->region_array);
591 else if (eh->region_tree)
593 struct eh_region *next, *r = eh->region_tree;
598 else if (r->next_peer)
612 } while (r->next_peer == NULL);
621 VARRAY_FREE (eh->ttype_data);
622 VARRAY_FREE (eh->ehspec_data);
623 VARRAY_FREE (eh->action_record_data);
624 if (eh->call_site_data)
625 free (eh->call_site_data);
632 /* Start an exception handling region. All instructions emitted
633 after this point are considered to be part of the region until
634 expand_eh_region_end is invoked. */
637 expand_eh_region_start ()
639 struct eh_region *new_region;
640 struct eh_region *cur_region;
646 /* Insert a new blank region as a leaf in the tree. */
647 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
648 cur_region = cfun->eh->cur_region;
649 new_region->outer = cur_region;
652 new_region->next_peer = cur_region->inner;
653 cur_region->inner = new_region;
657 new_region->next_peer = cfun->eh->region_tree;
658 cfun->eh->region_tree = new_region;
660 cfun->eh->cur_region = new_region;
662 /* Create a note marking the start of this region. */
663 new_region->region_number = ++cfun->eh->last_region_number;
664 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
665 NOTE_EH_HANDLER (note) = new_region->region_number;
668 /* Common code to end a region. Returns the region just ended. */
670 static struct eh_region *
671 expand_eh_region_end ()
673 struct eh_region *cur_region = cfun->eh->cur_region;
676 /* Create a nute marking the end of this region. */
677 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
678 NOTE_EH_HANDLER (note) = cur_region->region_number;
681 cfun->eh->cur_region = cur_region->outer;
686 /* End an exception handling region for a cleanup. HANDLER is an
687 expression to expand for the cleanup. */
690 expand_eh_region_end_cleanup (handler)
693 struct eh_region *region;
700 region = expand_eh_region_end ();
701 region->type = ERT_CLEANUP;
702 region->label = gen_label_rtx ();
703 region->u.cleanup.exp = handler;
705 around_label = gen_label_rtx ();
706 emit_jump (around_label);
708 emit_label (region->label);
710 if (protect_cleanup_actions)
711 expand_eh_region_start ();
713 /* In case this cleanup involves an inline destructor with a try block in
714 it, we need to save the EH return data registers around it. */
715 data_save[0] = gen_reg_rtx (Pmode);
716 emit_move_insn (data_save[0], get_exception_pointer ());
717 data_save[1] = gen_reg_rtx (word_mode);
718 emit_move_insn (data_save[1], get_exception_filter ());
720 expand_expr (handler, const0_rtx, VOIDmode, 0);
722 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
723 emit_move_insn (cfun->eh->filter, data_save[1]);
725 if (protect_cleanup_actions)
726 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
728 /* We need any stack adjustment complete before the around_label. */
729 do_pending_stack_adjust ();
731 /* We delay the generation of the _Unwind_Resume until we generate
732 landing pads. We emit a marker here so as to get good control
733 flow data in the meantime. */
735 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
738 emit_label (around_label);
741 /* End an exception handling region for a try block, and prepares
742 for subsequent calls to expand_start_catch. */
745 expand_start_all_catch ()
747 struct eh_region *region;
752 region = expand_eh_region_end ();
753 region->type = ERT_TRY;
754 region->u.try.prev_try = cfun->eh->try_region;
755 region->u.try.continue_label = gen_label_rtx ();
757 cfun->eh->try_region = region;
759 emit_jump (region->u.try.continue_label);
762 /* Begin a catch clause. TYPE is the type caught, or null if this is
763 a catch-all clause. */
766 expand_start_catch (type)
769 struct eh_region *t, *c, *l;
775 add_type_for_runtime (type);
776 expand_eh_region_start ();
778 t = cfun->eh->try_region;
779 c = cfun->eh->cur_region;
781 c->u.catch.type = type;
782 c->label = gen_label_rtx ();
784 l = t->u.try.last_catch;
785 c->u.catch.prev_catch = l;
787 l->u.catch.next_catch = c;
790 t->u.try.last_catch = c;
792 emit_label (c->label);
795 /* End a catch clause. Control will resume after the try/catch block. */
800 struct eh_region *try_region, *catch_region;
805 catch_region = expand_eh_region_end ();
806 try_region = cfun->eh->try_region;
808 emit_jump (try_region->u.try.continue_label);
811 /* End a sequence of catch handlers for a try block. */
814 expand_end_all_catch ()
816 struct eh_region *try_region;
821 try_region = cfun->eh->try_region;
822 cfun->eh->try_region = try_region->u.try.prev_try;
824 emit_label (try_region->u.try.continue_label);
827 /* End an exception region for an exception type filter. ALLOWED is a
828 TREE_LIST of types to be matched by the runtime. FAILURE is an
829 expression to invoke if a mismatch ocurrs. */
832 expand_eh_region_end_allowed (allowed, failure)
833 tree allowed, failure;
835 struct eh_region *region;
841 region = expand_eh_region_end ();
842 region->type = ERT_ALLOWED_EXCEPTIONS;
843 region->u.allowed.type_list = allowed;
844 region->label = gen_label_rtx ();
846 for (; allowed ; allowed = TREE_CHAIN (allowed))
847 add_type_for_runtime (TREE_VALUE (allowed));
849 /* We must emit the call to FAILURE here, so that if this function
850 throws a different exception, that it will be processed by the
853 around_label = gen_label_rtx ();
854 emit_jump (around_label);
856 emit_label (region->label);
857 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
859 emit_label (around_label);
862 /* End an exception region for a must-not-throw filter. FAILURE is an
863 expression invoke if an uncaught exception propagates this far.
865 This is conceptually identical to expand_eh_region_end_allowed with
866 an empty allowed list (if you passed "std::terminate" instead of
867 "__cxa_call_unexpected"), but they are represented differently in
871 expand_eh_region_end_must_not_throw (failure)
874 struct eh_region *region;
880 region = expand_eh_region_end ();
881 region->type = ERT_MUST_NOT_THROW;
882 region->label = gen_label_rtx ();
884 /* We must emit the call to FAILURE here, so that if this function
885 throws a different exception, that it will be processed by the
888 around_label = gen_label_rtx ();
889 emit_jump (around_label);
891 emit_label (region->label);
892 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
894 emit_label (around_label);
897 /* End an exception region for a throw. No handling goes on here,
898 but it's the easiest way for the front-end to indicate what type
902 expand_eh_region_end_throw (type)
905 struct eh_region *region;
910 region = expand_eh_region_end ();
911 region->type = ERT_THROW;
912 region->u.throw.type = type;
915 /* End a fixup region. Within this region the cleanups for the immediately
916 enclosing region are _not_ run. This is used for goto cleanup to avoid
917 destroying an object twice.
919 This would be an extraordinarily simple prospect, were it not for the
920 fact that we don't actually know what the immediately enclosing region
921 is. This surprising fact is because expand_cleanups is currently
922 generating a sequence that it will insert somewhere else. We collect
923 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
926 expand_eh_region_end_fixup (handler)
929 struct eh_region *fixup;
934 fixup = expand_eh_region_end ();
935 fixup->type = ERT_FIXUP;
936 fixup->u.fixup.cleanup_exp = handler;
939 /* Return an rtl expression for a pointer to the exception object
943 get_exception_pointer ()
945 rtx exc_ptr = cfun->eh->exc_ptr;
948 exc_ptr = gen_reg_rtx (Pmode);
949 cfun->eh->exc_ptr = exc_ptr;
954 /* Return an rtl expression for the exception dispatch filter
958 get_exception_filter ()
960 rtx filter = cfun->eh->filter;
963 filter = gen_reg_rtx (word_mode);
964 cfun->eh->filter = filter;
969 /* Begin a region that will contain entries created with
970 add_partial_entry. */
973 begin_protect_partials ()
975 /* Push room for a new list. */
976 cfun->eh->protect_list
977 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
980 /* Start a new exception region for a region of code that has a
981 cleanup action and push the HANDLER for the region onto
982 protect_list. All of the regions created with add_partial_entry
983 will be ended when end_protect_partials is invoked. */
986 add_partial_entry (handler)
989 expand_eh_region_start ();
991 /* ??? This comment was old before the most recent rewrite. We
992 really ought to fix the callers at some point. */
993 /* For backwards compatibility, we allow callers to omit calls to
994 begin_protect_partials for the outermost region. So, we must
995 explicitly do so here. */
996 if (!cfun->eh->protect_list)
997 begin_protect_partials ();
999 /* Add this entry to the front of the list. */
1000 TREE_VALUE (cfun->eh->protect_list)
1001 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
1004 /* End all the pending exception regions on protect_list. */
1007 end_protect_partials ()
1011 /* ??? This comment was old before the most recent rewrite. We
1012 really ought to fix the callers at some point. */
1013 /* For backwards compatibility, we allow callers to omit the call to
1014 begin_protect_partials for the outermost region. So,
1015 PROTECT_LIST may be NULL. */
1016 if (!cfun->eh->protect_list)
1019 /* Pop the topmost entry. */
1020 t = TREE_VALUE (cfun->eh->protect_list);
1021 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
1023 /* End all the exception regions. */
1024 for (; t; t = TREE_CHAIN (t))
1025 expand_eh_region_end_cleanup (TREE_VALUE (t));
1029 /* This section is for the exception handling specific optimization pass. */
1031 /* Random access the exception region tree. It's just as simple to
1032 collect the regions this way as in expand_eh_region_start, but
1033 without having to realloc memory. */
1036 collect_eh_region_array ()
1038 struct eh_region **array, *i;
1040 i = cfun->eh->region_tree;
1044 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1045 cfun->eh->region_array = array;
1049 array[i->region_number] = i;
1051 /* If there are sub-regions, process them. */
1054 /* If there are peers, process them. */
1055 else if (i->next_peer)
1057 /* Otherwise, step back up the tree to the next peer. */
1064 } while (i->next_peer == NULL);
1071 resolve_fixup_regions ()
1073 int i, j, n = cfun->eh->last_region_number;
1075 for (i = 1; i <= n; ++i)
1077 struct eh_region *fixup = cfun->eh->region_array[i];
1078 struct eh_region *cleanup;
1080 if (! fixup || fixup->type != ERT_FIXUP)
1083 for (j = 1; j <= n; ++j)
1085 cleanup = cfun->eh->region_array[j];
1086 if (cleanup->type == ERT_CLEANUP
1087 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1093 fixup->u.fixup.real_region = cleanup->outer;
1097 /* Now that we've discovered what region actually encloses a fixup,
1098 we can shuffle pointers and remove them from the tree. */
1101 remove_fixup_regions ()
1105 for (i = cfun->eh->last_region_number; i > 0; --i)
1107 struct eh_region *fixup = cfun->eh->region_array[i];
1112 /* Allow GC to maybe free some memory. */
1113 if (fixup->type == ERT_CLEANUP)
1114 fixup->u.cleanup.exp = NULL_TREE;
1116 if (fixup->type != ERT_FIXUP)
1121 struct eh_region *parent, *p, **pp;
1123 parent = fixup->u.fixup.real_region;
1125 /* Fix up the children's parent pointers; find the end of
1127 for (p = fixup->inner; ; p = p->next_peer)
1134 /* In the tree of cleanups, only outer-inner ordering matters.
1135 So link the children back in anywhere at the correct level. */
1137 pp = &parent->inner;
1139 pp = &cfun->eh->region_tree;
1142 fixup->inner = NULL;
1145 remove_eh_handler (fixup);
1149 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1150 can_throw instruction in the region. */
1153 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1161 for (insn = *pinsns; insn ; insn = next)
1163 next = NEXT_INSN (insn);
1164 if (GET_CODE (insn) == NOTE)
1166 int kind = NOTE_LINE_NUMBER (insn);
1167 if (kind == NOTE_INSN_EH_REGION_BEG
1168 || kind == NOTE_INSN_EH_REGION_END)
1170 if (kind == NOTE_INSN_EH_REGION_BEG)
1172 struct eh_region *r;
1175 cur = NOTE_EH_HANDLER (insn);
1177 r = cfun->eh->region_array[cur];
1178 if (r->type == ERT_FIXUP)
1180 r = r->u.fixup.real_region;
1181 cur = r ? r->region_number : 0;
1183 else if (r->type == ERT_CATCH)
1186 cur = r ? r->region_number : 0;
1192 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1193 requires extra care to adjust sequence start. */
1194 if (insn == *pinsns)
1200 else if (INSN_P (insn))
1203 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1204 /* Calls can always potentially throw exceptions, unless
1205 they have a REG_EH_REGION note with a value of 0 or less.
1206 Which should be the only possible kind so far. */
1207 && (GET_CODE (insn) == CALL_INSN
1208 /* If we wanted exceptions for non-call insns, then
1209 any may_trap_p instruction could throw. */
1210 || (flag_non_call_exceptions
1211 && may_trap_p (PATTERN (insn)))))
1213 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1217 if (GET_CODE (insn) == CALL_INSN
1218 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1220 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1222 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1224 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1235 convert_from_eh_region_ranges ()
1240 collect_eh_region_array ();
1241 resolve_fixup_regions ();
1243 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1244 insns = get_insns ();
1245 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1248 remove_fixup_regions ();
1252 find_exception_handler_labels ()
1254 rtx list = NULL_RTX;
1257 free_EXPR_LIST_list (&exception_handler_labels);
1259 if (cfun->eh->region_tree == NULL)
1262 for (i = cfun->eh->last_region_number; i > 0; --i)
1264 struct eh_region *region = cfun->eh->region_array[i];
1269 if (cfun->eh->built_landing_pads)
1270 lab = region->landing_pad;
1272 lab = region->label;
1275 list = alloc_EXPR_LIST (0, lab, list);
1278 /* For sjlj exceptions, need the return label to remain live until
1279 after landing pad generation. */
1280 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1281 list = alloc_EXPR_LIST (0, return_label, list);
1283 exception_handler_labels = list;
1287 static struct eh_region *
1288 duplicate_eh_region_1 (o, map)
1289 struct eh_region *o;
1290 struct inline_remap *map;
1293 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
1295 n->region_number = o->region_number + cfun->eh->last_region_number;
1301 case ERT_MUST_NOT_THROW:
1305 if (o->u.try.continue_label)
1306 n->u.try.continue_label
1307 = get_label_from_map (map,
1308 CODE_LABEL_NUMBER (o->u.try.continue_label));
1312 n->u.catch.type = o->u.catch.type;
1315 case ERT_ALLOWED_EXCEPTIONS:
1316 n->u.allowed.type_list = o->u.allowed.type_list;
1320 n->u.throw.type = o->u.throw.type;
1327 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1330 n->resume = map->insn_map[INSN_UID (o->resume)];
1331 if (n->resume == NULL)
1339 duplicate_eh_region_2 (o, n_array)
1340 struct eh_region *o;
1341 struct eh_region **n_array;
1343 struct eh_region *n = n_array[o->region_number];
1348 n->u.try.catch = n_array[o->u.try.catch->region_number];
1349 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1353 if (o->u.catch.next_catch)
1354 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1355 if (o->u.catch.prev_catch)
1356 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1364 n->outer = n_array[o->outer->region_number];
1366 n->inner = n_array[o->inner->region_number];
1368 n->next_peer = n_array[o->next_peer->region_number];
1372 duplicate_eh_regions (ifun, map)
1373 struct function *ifun;
1374 struct inline_remap *map;
1376 int ifun_last_region_number = ifun->eh->last_region_number;
1377 struct eh_region **n_array, *root, *cur;
1380 if (ifun_last_region_number == 0)
1383 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1385 for (i = 1; i <= ifun_last_region_number; ++i)
1387 cur = ifun->eh->region_array[i];
1388 if (!cur || cur->region_number != i)
1390 n_array[i] = duplicate_eh_region_1 (cur, map);
1392 for (i = 1; i <= ifun_last_region_number; ++i)
1394 cur = ifun->eh->region_array[i];
1395 if (!cur || cur->region_number != i)
1397 duplicate_eh_region_2 (cur, n_array);
1400 root = n_array[ifun->eh->region_tree->region_number];
1401 cur = cfun->eh->cur_region;
1404 struct eh_region *p = cur->inner;
1407 while (p->next_peer)
1409 p->next_peer = root;
1414 for (i = 1; i <= ifun_last_region_number; ++i)
1415 if (n_array[i]->outer == NULL)
1416 n_array[i]->outer = cur;
1420 struct eh_region *p = cfun->eh->region_tree;
1423 while (p->next_peer)
1425 p->next_peer = root;
1428 cfun->eh->region_tree = root;
1433 i = cfun->eh->last_region_number;
1434 cfun->eh->last_region_number = i + ifun_last_region_number;
1439 /* ??? Move from tree.c to tree.h. */
1440 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
1443 t2r_eq (pentry, pdata)
1447 tree entry = (tree) pentry;
1448 tree data = (tree) pdata;
1450 return TREE_PURPOSE (entry) == data;
1457 tree entry = (tree) pentry;
1458 return TYPE_HASH (TREE_PURPOSE (entry));
1462 t2r_mark_1 (slot, data)
1464 PTR data ATTRIBUTE_UNUSED;
1466 tree contents = (tree) *slot;
1467 ggc_mark_tree (contents);
1475 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1479 add_type_for_runtime (type)
1484 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1485 TYPE_HASH (type), INSERT);
1488 tree runtime = (*lang_eh_runtime_type) (type);
1489 *slot = tree_cons (type, runtime, NULL_TREE);
1494 lookup_type_for_runtime (type)
1499 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1500 TYPE_HASH (type), NO_INSERT);
1502 /* We should have always inserrted the data earlier. */
1503 return TREE_VALUE (*slot);
1507 /* Represent an entry in @TTypes for either catch actions
1508 or exception filter actions. */
1509 struct ttypes_filter
1515 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1516 (a tree) for a @TTypes type node we are thinking about adding. */
1519 ttypes_filter_eq (pentry, pdata)
1523 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1524 tree data = (tree) pdata;
1526 return entry->t == data;
1530 ttypes_filter_hash (pentry)
1533 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1534 return TYPE_HASH (entry->t);
1537 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1538 exception specification list we are thinking about adding. */
1539 /* ??? Currently we use the type lists in the order given. Someone
1540 should put these in some canonical order. */
1543 ehspec_filter_eq (pentry, pdata)
1547 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1548 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1550 return type_list_equal (entry->t, data->t);
1553 /* Hash function for exception specification lists. */
1556 ehspec_filter_hash (pentry)
1559 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1563 for (list = entry->t; list ; list = TREE_CHAIN (list))
1564 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1568 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1569 up the search. Return the filter value to be used. */
1572 add_ttypes_entry (ttypes_hash, type)
1576 struct ttypes_filter **slot, *n;
1578 slot = (struct ttypes_filter **)
1579 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1581 if ((n = *slot) == NULL)
1583 /* Filter value is a 1 based table index. */
1585 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1587 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1590 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1596 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1597 to speed up the search. Return the filter value to be used. */
1600 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1605 struct ttypes_filter **slot, *n;
1606 struct ttypes_filter dummy;
1609 slot = (struct ttypes_filter **)
1610 htab_find_slot (ehspec_hash, &dummy, INSERT);
1612 if ((n = *slot) == NULL)
1614 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1616 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1618 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1621 /* Look up each type in the list and encode its filter
1622 value as a uleb128. Terminate the list with 0. */
1623 for (; list ; list = TREE_CHAIN (list))
1624 push_uleb128 (&cfun->eh->ehspec_data,
1625 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1626 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1632 /* Generate the action filter values to be used for CATCH and
1633 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1634 we use lots of landing pads, and so every type or list can share
1635 the same filter value, which saves table space. */
1638 assign_filter_values ()
1641 htab_t ttypes, ehspec;
1643 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1644 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1646 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1647 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1649 for (i = cfun->eh->last_region_number; i > 0; --i)
1651 struct eh_region *r = cfun->eh->region_array[i];
1653 /* Mind we don't process a region more than once. */
1654 if (!r || r->region_number != i)
1660 r->u.catch.filter = add_ttypes_entry (ttypes, r->u.catch.type);
1663 case ERT_ALLOWED_EXCEPTIONS:
1665 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1673 htab_delete (ttypes);
1674 htab_delete (ehspec);
1678 build_post_landing_pads ()
1682 for (i = cfun->eh->last_region_number; i > 0; --i)
1684 struct eh_region *region = cfun->eh->region_array[i];
1687 /* Mind we don't process a region more than once. */
1688 if (!region || region->region_number != i)
1691 switch (region->type)
1694 /* ??? Collect the set of all non-overlapping catch handlers
1695 all the way up the chain until blocked by a cleanup. */
1696 /* ??? Outer try regions can share landing pads with inner
1697 try regions if the types are completely non-overlapping,
1698 and there are no interveaning cleanups. */
1700 region->post_landing_pad = gen_label_rtx ();
1704 emit_label (region->post_landing_pad);
1706 /* ??? It is mighty inconvenient to call back into the
1707 switch statement generation code in expand_end_case.
1708 Rapid prototyping sez a sequence of ifs. */
1710 struct eh_region *c;
1711 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1713 /* ??? _Unwind_ForcedUnwind wants no match here. */
1714 if (c->u.catch.type == NULL)
1715 emit_jump (c->label);
1717 emit_cmp_and_jump_insns (cfun->eh->filter,
1718 GEN_INT (c->u.catch.filter),
1719 EQ, NULL_RTX, word_mode,
1724 /* We delay the generation of the _Unwind_Resume until we generate
1725 landing pads. We emit a marker here so as to get good control
1726 flow data in the meantime. */
1728 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1734 emit_insns_before (seq, region->u.try.catch->label);
1737 case ERT_ALLOWED_EXCEPTIONS:
1738 region->post_landing_pad = gen_label_rtx ();
1742 emit_label (region->post_landing_pad);
1744 emit_cmp_and_jump_insns (cfun->eh->filter,
1745 GEN_INT (region->u.allowed.filter),
1746 EQ, NULL_RTX, word_mode, 0, 0,
1749 /* We delay the generation of the _Unwind_Resume until we generate
1750 landing pads. We emit a marker here so as to get good control
1751 flow data in the meantime. */
1753 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1759 emit_insns_before (seq, region->label);
1763 case ERT_MUST_NOT_THROW:
1764 region->post_landing_pad = region->label;
1769 /* Nothing to do. */
1778 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1779 _Unwind_Resume otherwise. */
1782 connect_post_landing_pads ()
1786 for (i = cfun->eh->last_region_number; i > 0; --i)
1788 struct eh_region *region = cfun->eh->region_array[i];
1789 struct eh_region *outer;
1792 /* Mind we don't process a region more than once. */
1793 if (!region || region->region_number != i)
1796 /* If there is no RESX, or it has been deleted by flow, there's
1797 nothing to fix up. */
1798 if (! region->resume || INSN_DELETED_P (region->resume))
1801 /* Search for another landing pad in this function. */
1802 for (outer = region->outer; outer ; outer = outer->outer)
1803 if (outer->post_landing_pad)
1809 emit_jump (outer->post_landing_pad);
1811 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1812 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
1816 emit_insns_before (seq, region->resume);
1818 /* Leave the RESX to be deleted by flow. */
1824 dw2_build_landing_pads ()
1828 for (i = cfun->eh->last_region_number; i > 0; --i)
1830 struct eh_region *region = cfun->eh->region_array[i];
1833 /* Mind we don't process a region more than once. */
1834 if (!region || region->region_number != i)
1837 if (region->type != ERT_CLEANUP
1838 && region->type != ERT_TRY
1839 && region->type != ERT_ALLOWED_EXCEPTIONS)
1844 region->landing_pad = gen_label_rtx ();
1845 emit_label (region->landing_pad);
1847 #ifdef HAVE_exception_receiver
1848 if (HAVE_exception_receiver)
1849 emit_insn (gen_exception_receiver ());
1852 #ifdef HAVE_nonlocal_goto_receiver
1853 if (HAVE_nonlocal_goto_receiver)
1854 emit_insn (gen_nonlocal_goto_receiver ());
1859 /* If the eh_return data registers are call-saved, then we
1860 won't have considered them clobbered from the call that
1861 threw. Kill them now. */
1864 unsigned r = EH_RETURN_DATA_REGNO (j);
1865 if (r == INVALID_REGNUM)
1867 if (! call_used_regs[r])
1868 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1871 emit_move_insn (cfun->eh->exc_ptr,
1872 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
1873 emit_move_insn (cfun->eh->filter,
1874 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (1)));
1879 emit_insns_before (seq, region->post_landing_pad);
1886 int directly_reachable;
1889 int call_site_index;
1893 sjlj_find_directly_reachable_regions (lp_info)
1894 struct sjlj_lp_info *lp_info;
1897 bool found_one = false;
1899 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1901 struct eh_region *region;
1905 if (! INSN_P (insn))
1908 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1909 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1912 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1914 type_thrown = NULL_TREE;
1915 if (region->type == ERT_THROW)
1917 type_thrown = region->u.throw.type;
1918 region = region->outer;
1921 /* Find the first containing region that might handle the exception.
1922 That's the landing pad to which we will transfer control. */
1923 for (; region; region = region->outer)
1924 if (reachable_next_level (region, type_thrown, 0) != RNL_NOT_CAUGHT)
1929 lp_info[region->region_number].directly_reachable = 1;
1938 sjlj_assign_call_site_values (dispatch_label, lp_info)
1940 struct sjlj_lp_info *lp_info;
1945 /* First task: build the action table. */
1947 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1948 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1950 for (i = cfun->eh->last_region_number; i > 0; --i)
1951 if (lp_info[i].directly_reachable)
1953 struct eh_region *r = cfun->eh->region_array[i];
1954 r->landing_pad = dispatch_label;
1955 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1956 if (lp_info[i].action_index != -1)
1957 cfun->uses_eh_lsda = 1;
1960 htab_delete (ar_hash);
1962 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1963 landing pad label for the region. For sjlj though, there is one
1964 common landing pad from which we dispatch to the post-landing pads.
1966 A region receives a dispatch index if it is directly reachable
1967 and requires in-function processing. Regions that share post-landing
1968 pads may share dispatch indicies. */
1969 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1970 (see build_post_landing_pads) so we don't bother checking for it. */
1973 for (i = cfun->eh->last_region_number; i > 0; --i)
1974 if (lp_info[i].directly_reachable
1975 && lp_info[i].action_index >= 0)
1976 lp_info[i].dispatch_index = index++;
1978 /* Finally: assign call-site values. If dwarf2 terms, this would be
1979 the region number assigned by convert_to_eh_region_ranges, but
1980 handles no-action and must-not-throw differently. */
1983 for (i = cfun->eh->last_region_number; i > 0; --i)
1984 if (lp_info[i].directly_reachable)
1986 int action = lp_info[i].action_index;
1988 /* Map must-not-throw to otherwise unused call-site index 0. */
1991 /* Map no-action to otherwise unused call-site index -1. */
1992 else if (action == -1)
1994 /* Otherwise, look it up in the table. */
1996 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1998 lp_info[i].call_site_index = index;
2003 sjlj_mark_call_sites (lp_info)
2004 struct sjlj_lp_info *lp_info;
2006 int last_call_site = -2;
2009 mem = change_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2010 plus_constant (XEXP (cfun->eh->sjlj_fc, 0),
2011 sjlj_fc_call_site_ofs));
2013 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2015 struct eh_region *region;
2017 rtx note, before, p;
2019 /* Reset value tracking at extended basic block boundaries. */
2020 if (GET_CODE (insn) == CODE_LABEL)
2021 last_call_site = -2;
2023 if (! INSN_P (insn))
2026 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2029 /* Calls (and trapping insns) without notes are outside any
2030 exception handling region in this function. Mark them as
2032 if (GET_CODE (insn) == CALL_INSN
2033 || (flag_non_call_exceptions
2034 && may_trap_p (PATTERN (insn))))
2035 this_call_site = -1;
2041 /* Calls that are known to not throw need not be marked. */
2042 if (INTVAL (XEXP (note, 0)) <= 0)
2045 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2046 this_call_site = lp_info[region->region_number].call_site_index;
2049 if (this_call_site == last_call_site)
2052 /* Don't separate a call from it's argument loads. */
2054 if (GET_CODE (insn) == CALL_INSN)
2056 HARD_REG_SET parm_regs;
2059 /* Since different machines initialize their parameter registers
2060 in different orders, assume nothing. Collect the set of all
2061 parameter registers. */
2062 CLEAR_HARD_REG_SET (parm_regs);
2064 for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
2065 if (GET_CODE (XEXP (p, 0)) == USE
2066 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
2068 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
2071 SET_HARD_REG_BIT (parm_regs, REGNO (XEXP (XEXP (p, 0), 0)));
2075 /* Search backward for the first set of a register in this set. */
2078 before = PREV_INSN (before);
2080 /* Given that we've done no other optimizations yet,
2081 the arguments should be immediately available. */
2082 if (GET_CODE (before) == CODE_LABEL)
2085 p = single_set (before);
2086 if (p && GET_CODE (SET_DEST (p)) == REG
2087 && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
2088 && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
2090 CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
2097 emit_move_insn (mem, GEN_INT (this_call_site));
2101 emit_insns_before (p, before);
2102 last_call_site = this_call_site;
2106 /* Construct the SjLj_Function_Context. */
2109 sjlj_emit_function_enter (dispatch_label)
2112 rtx fn_begin, fc, mem, seq;
2114 fc = cfun->eh->sjlj_fc;
2118 mem = change_address (fc, Pmode,
2119 plus_constant (XEXP (fc, 0), sjlj_fc_personality_ofs));
2120 emit_move_insn (mem, eh_personality_libfunc);
2122 mem = change_address (fc, Pmode,
2123 plus_constant (XEXP (fc, 0), sjlj_fc_lsda_ofs));
2124 if (cfun->uses_eh_lsda)
2127 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2128 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2131 emit_move_insn (mem, const0_rtx);
2133 #ifdef DONT_USE_BUILTIN_SETJMP
2136 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_NORMAL,
2137 TYPE_MODE (integer_type_node), 1,
2138 plus_constant (XEXP (fc, 0),
2139 sjlj_fc_jbuf_ofs), Pmode);
2141 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2142 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2144 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2145 TYPE_MODE (integer_type_node), 0, 0,
2149 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2153 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2154 1, XEXP (fc, 0), Pmode);
2159 /* ??? Instead of doing this at the beginning of the function,
2160 do this in a block that is at loop level 0 and dominates all
2161 can_throw_internal instructions. */
2163 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2164 if (GET_CODE (fn_begin) == NOTE
2165 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2167 emit_insns_after (seq, fn_begin);
2170 /* Call back from expand_function_end to know where we should put
2171 the call to unwind_sjlj_unregister_libfunc if needed. */
2174 sjlj_emit_function_exit_after (after)
2177 cfun->eh->sjlj_exit_after = after;
2181 sjlj_emit_function_exit ()
2187 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2188 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2193 /* ??? Really this can be done in any block at loop level 0 that
2194 post-dominates all can_throw_internal instructions. This is
2195 the last possible moment. */
2197 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
2201 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2203 struct sjlj_lp_info *lp_info;
2205 int i, first_reachable;
2206 rtx mem, dispatch, seq, fc;
2208 fc = cfun->eh->sjlj_fc;
2212 emit_label (dispatch_label);
2214 #ifndef DONT_USE_BUILTIN_SETJMP
2215 expand_builtin_setjmp_receiver (dispatch_label);
2218 /* Load up dispatch index, exc_ptr and filter values from the
2219 function context. */
2220 mem = change_address (fc, TYPE_MODE (integer_type_node),
2221 plus_constant (XEXP (fc, 0), sjlj_fc_call_site_ofs));
2222 dispatch = copy_to_reg (mem);
2224 mem = change_address (fc, word_mode,
2225 plus_constant (XEXP (fc, 0), sjlj_fc_data_ofs));
2226 if (word_mode != Pmode)
2228 #ifdef POINTERS_EXTEND_UNSIGNED
2229 mem = convert_memory_address (Pmode, mem);
2231 mem = convert_to_mode (Pmode, mem, 0);
2234 emit_move_insn (cfun->eh->exc_ptr, mem);
2236 mem = change_address (fc, word_mode,
2237 plus_constant (XEXP (fc, 0),
2238 sjlj_fc_data_ofs + UNITS_PER_WORD));
2239 emit_move_insn (cfun->eh->filter, mem);
2241 /* Jump to one of the directly reachable regions. */
2242 /* ??? This really ought to be using a switch statement. */
2244 first_reachable = 0;
2245 for (i = cfun->eh->last_region_number; i > 0; --i)
2247 if (! lp_info[i].directly_reachable
2248 || lp_info[i].action_index < 0)
2251 if (! first_reachable)
2253 first_reachable = i;
2257 emit_cmp_and_jump_insns (dispatch,
2258 GEN_INT (lp_info[i].dispatch_index), EQ,
2259 NULL_RTX, TYPE_MODE (integer_type_node), 0, 0,
2260 cfun->eh->region_array[i]->post_landing_pad);
2266 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2267 ->post_landing_pad));
2271 sjlj_build_landing_pads ()
2273 struct sjlj_lp_info *lp_info;
2275 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2276 sizeof (struct sjlj_lp_info));
2278 if (sjlj_find_directly_reachable_regions (lp_info))
2280 rtx dispatch_label = gen_label_rtx ();
2283 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2284 int_size_in_bytes (sjlj_fc_type_node),
2285 TYPE_ALIGN (sjlj_fc_type_node));
2287 sjlj_assign_call_site_values (dispatch_label, lp_info);
2288 sjlj_mark_call_sites (lp_info);
2290 sjlj_emit_function_enter (dispatch_label);
2291 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2292 sjlj_emit_function_exit ();
2299 finish_eh_generation ()
2301 /* Nothing to do if no regions created. */
2302 if (cfun->eh->region_tree == NULL)
2305 /* The object here is to provide find_basic_blocks with detailed
2306 information (via reachable_handlers) on how exception control
2307 flows within the function. In this first pass, we can include
2308 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2309 regions, and hope that it will be useful in deleting unreachable
2310 handlers. Subsequently, we will generate landing pads which will
2311 connect many of the handlers, and then type information will not
2312 be effective. Still, this is a win over previous implementations. */
2314 jump_optimize_minimal (get_insns ());
2315 find_basic_blocks (get_insns (), max_reg_num (), 0);
2318 /* These registers are used by the landing pads. Make sure they
2319 have been generated. */
2320 get_exception_pointer ();
2321 get_exception_filter ();
2323 /* Construct the landing pads. */
2325 assign_filter_values ();
2326 build_post_landing_pads ();
2327 connect_post_landing_pads ();
2328 if (USING_SJLJ_EXCEPTIONS)
2329 sjlj_build_landing_pads ();
2331 dw2_build_landing_pads ();
2333 cfun->eh->built_landing_pads = 1;
2335 /* We've totally changed the CFG. Start over. */
2336 find_exception_handler_labels ();
2337 jump_optimize_minimal (get_insns ());
2338 find_basic_blocks (get_insns (), max_reg_num (), 0);
2342 /* This section handles removing dead code for flow. */
2344 /* Remove LABEL from the exception_handler_labels list. */
2347 remove_exception_handler_label (label)
2352 for (pl = &exception_handler_labels, l = *pl;
2353 XEXP (l, 0) != label;
2354 pl = &XEXP (l, 1), l = *pl)
2358 free_EXPR_LIST_node (l);
2361 /* Splice REGION from the region tree etc. */
2364 remove_eh_handler (region)
2365 struct eh_region *region;
2367 struct eh_region **pp, *p;
2371 /* For the benefit of efficiently handling REG_EH_REGION notes,
2372 replace this region in the region array with its containing
2373 region. Note that previous region deletions may result in
2374 multiple copies of this region in the array, so we have to
2375 search the whole thing. */
2376 for (i = cfun->eh->last_region_number; i > 0; --i)
2377 if (cfun->eh->region_array[i] == region)
2378 cfun->eh->region_array[i] = region->outer;
2380 if (cfun->eh->built_landing_pads)
2381 lab = region->landing_pad;
2383 lab = region->label;
2385 remove_exception_handler_label (lab);
2388 pp = ®ion->outer->inner;
2390 pp = &cfun->eh->region_tree;
2391 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2396 for (p = region->inner; p->next_peer ; p = p->next_peer)
2397 p->outer = region->outer;
2398 p->next_peer = region->next_peer;
2399 p->outer = region->outer;
2400 *pp = region->inner;
2403 *pp = region->next_peer;
2405 if (region->type == ERT_CATCH)
2407 struct eh_region *try, *next, *prev;
2409 for (try = region->next_peer;
2410 try->type == ERT_CATCH;
2411 try = try->next_peer)
2413 if (try->type != ERT_TRY)
2416 next = region->u.catch.next_catch;
2417 prev = region->u.catch.prev_catch;
2420 next->u.catch.prev_catch = prev;
2422 try->u.try.last_catch = prev;
2424 prev->u.catch.next_catch = next;
2427 try->u.try.catch = next;
2429 remove_eh_handler (try);
2436 /* LABEL heads a basic block that is about to be deleted. If this
2437 label corresponds to an exception region, we may be able to
2438 delete the region. */
2441 maybe_remove_eh_handler (label)
2446 /* ??? After generating landing pads, it's not so simple to determine
2447 if the region data is completely unused. One must examine the
2448 landing pad and the post landing pad, and whether an inner try block
2449 is referencing the catch handlers directly. */
2450 if (cfun->eh->built_landing_pads)
2453 for (i = cfun->eh->last_region_number; i > 0; --i)
2455 struct eh_region *region = cfun->eh->region_array[i];
2456 if (region && region->label == label)
2458 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2459 because there is no path to the fallback call to terminate.
2460 But the region continues to affect call-site data until there
2461 are no more contained calls, which we don't see here. */
2462 if (region->type == ERT_MUST_NOT_THROW)
2464 remove_exception_handler_label (region->label);
2465 region->label = NULL_RTX;
2468 remove_eh_handler (region);
2475 /* This section describes CFG exception edges for flow. */
2477 /* For communicating between calls to reachable_next_level. */
2478 struct reachable_info
2485 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2486 base class of TYPE, is in HANDLED. */
2489 check_handled (handled, type)
2494 /* We can check for exact matches without front-end help. */
2495 if (! lang_eh_type_covers)
2497 for (t = handled; t ; t = TREE_CHAIN (t))
2498 if (TREE_VALUE (t) == type)
2503 for (t = handled; t ; t = TREE_CHAIN (t))
2504 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2511 /* A subroutine of reachable_next_level. If we are collecting a list
2512 of handlers, add one. After landing pad generation, reference
2513 it instead of the handlers themselves. Further, the handlers are
2514 all wired together, so by referencing one, we've got them all.
2515 Before landing pad generation we reference each handler individually.
2517 LP_REGION contains the landing pad; REGION is the handler. */
2520 add_reachable_handler (info, lp_region, region)
2521 struct reachable_info *info;
2522 struct eh_region *lp_region;
2523 struct eh_region *region;
2528 if (cfun->eh->built_landing_pads)
2530 if (! info->handlers)
2531 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2534 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2537 /* Process one level of exception regions for reachability.
2538 If TYPE_THROWN is non-null, then it is the *exact* type being
2539 propagated. If INFO is non-null, then collect handler labels
2540 and caught/allowed type information between invocations. */
2542 static enum reachable_code
2543 reachable_next_level (region, type_thrown, info)
2544 struct eh_region *region;
2546 struct reachable_info *info;
2548 switch (region->type)
2551 /* Before landing-pad generation, we model control flow
2552 directly to the individual handlers. In this way we can
2553 see that catch handler types may shadow one another. */
2554 add_reachable_handler (info, region, region);
2555 return RNL_MAYBE_CAUGHT;
2559 struct eh_region *c;
2560 enum reachable_code ret = RNL_NOT_CAUGHT;
2562 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2564 /* A catch-all handler ends the search. */
2565 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2566 to be run as well. */
2567 if (c->u.catch.type == NULL)
2569 add_reachable_handler (info, region, c);
2575 /* If we have a type match, end the search. */
2576 if (c->u.catch.type == type_thrown
2577 || (lang_eh_type_covers
2578 && (*lang_eh_type_covers) (c->u.catch.type,
2581 add_reachable_handler (info, region, c);
2585 /* If we have definitive information of a match failure,
2586 the catch won't trigger. */
2587 if (lang_eh_type_covers)
2588 return RNL_NOT_CAUGHT;
2592 ret = RNL_MAYBE_CAUGHT;
2594 /* A type must not have been previously caught. */
2595 else if (! check_handled (info->types_caught, c->u.catch.type))
2597 add_reachable_handler (info, region, c);
2598 info->types_caught = tree_cons (NULL, c->u.catch.type,
2599 info->types_caught);
2601 /* ??? If the catch type is a base class of every allowed
2602 type, then we know we can stop the search. */
2603 ret = RNL_MAYBE_CAUGHT;
2610 case ERT_ALLOWED_EXCEPTIONS:
2611 /* An empty list of types definitely ends the search. */
2612 if (region->u.allowed.type_list == NULL_TREE)
2614 add_reachable_handler (info, region, region);
2618 /* Collect a list of lists of allowed types for use in detecting
2619 when a catch may be transformed into a catch-all. */
2621 info->types_allowed = tree_cons (NULL_TREE,
2622 region->u.allowed.type_list,
2623 info->types_allowed);
2625 /* If we have definitive information about the type heirarchy,
2626 then we can tell if the thrown type will pass through the
2628 if (type_thrown && lang_eh_type_covers)
2630 if (check_handled (region->u.allowed.type_list, type_thrown))
2631 return RNL_NOT_CAUGHT;
2634 add_reachable_handler (info, region, region);
2639 add_reachable_handler (info, region, region);
2640 return RNL_MAYBE_CAUGHT;
2643 /* Catch regions are handled by their controling try region. */
2644 return RNL_NOT_CAUGHT;
2646 case ERT_MUST_NOT_THROW:
2647 /* Here we end our search, since no exceptions may propagate.
2648 If we've touched down at some landing pad previous, then the
2649 explicit function call we generated may be used. Otherwise
2650 the call is made by the runtime. */
2651 if (info && info->handlers)
2653 add_reachable_handler (info, region, region);
2661 /* Shouldn't see these here. */
2668 /* Retrieve a list of labels of exception handlers which can be
2669 reached by a given insn. */
2672 reachable_handlers (insn)
2675 struct reachable_info info;
2676 struct eh_region *region;
2680 if (GET_CODE (insn) == JUMP_INSN
2681 && GET_CODE (PATTERN (insn)) == RESX)
2682 region_number = XINT (PATTERN (insn), 0);
2685 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2686 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2688 region_number = INTVAL (XEXP (note, 0));
2691 memset (&info, 0, sizeof (info));
2693 region = cfun->eh->region_array[region_number];
2695 type_thrown = NULL_TREE;
2696 if (region->type == ERT_THROW)
2698 type_thrown = region->u.throw.type;
2699 region = region->outer;
2701 else if (GET_CODE (insn) == JUMP_INSN
2702 && GET_CODE (PATTERN (insn)) == RESX)
2703 region = region->outer;
2705 for (; region; region = region->outer)
2706 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2709 return info.handlers;
2712 /* Determine if the given INSN can throw an exception that is caught
2713 within the function. */
2716 can_throw_internal (insn)
2719 struct eh_region *region;
2723 if (! INSN_P (insn))
2726 if (GET_CODE (insn) == INSN
2727 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2728 insn = XVECEXP (PATTERN (insn), 0, 0);
2730 if (GET_CODE (insn) == CALL_INSN
2731 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2734 for (i = 0; i < 3; ++i)
2736 rtx sub = XEXP (PATTERN (insn), i);
2737 for (; sub ; sub = NEXT_INSN (sub))
2738 if (can_throw_internal (sub))
2744 /* Every insn that might throw has an EH_REGION note. */
2745 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2746 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2749 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2751 type_thrown = NULL_TREE;
2752 if (region->type == ERT_THROW)
2754 type_thrown = region->u.throw.type;
2755 region = region->outer;
2758 /* If this exception is ignored by each and every containing region,
2759 then control passes straight out. The runtime may handle some
2760 regions, which also do not require processing internally. */
2761 for (; region; region = region->outer)
2763 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2764 if (how == RNL_BLOCKED)
2766 if (how != RNL_NOT_CAUGHT)
2773 /* Determine if the given INSN can throw an exception that is
2774 visible outside the function. */
2777 can_throw_external (insn)
2780 struct eh_region *region;
2784 if (! INSN_P (insn))
2787 if (GET_CODE (insn) == INSN
2788 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2789 insn = XVECEXP (PATTERN (insn), 0, 0);
2791 if (GET_CODE (insn) == CALL_INSN
2792 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2795 for (i = 0; i < 3; ++i)
2797 rtx sub = XEXP (PATTERN (insn), i);
2798 for (; sub ; sub = NEXT_INSN (sub))
2799 if (can_throw_external (sub))
2805 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2808 /* Calls (and trapping insns) without notes are outside any
2809 exception handling region in this function. We have to
2810 assume it might throw. Given that the front end and middle
2811 ends mark known NOTHROW functions, this isn't so wildly
2813 return (GET_CODE (insn) == CALL_INSN
2814 || (flag_non_call_exceptions
2815 && may_trap_p (PATTERN (insn))));
2817 if (INTVAL (XEXP (note, 0)) <= 0)
2820 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2822 type_thrown = NULL_TREE;
2823 if (region->type == ERT_THROW)
2825 type_thrown = region->u.throw.type;
2826 region = region->outer;
2829 /* If the exception is caught or blocked by any containing region,
2830 then it is not seen by any calling function. */
2831 for (; region ; region = region->outer)
2832 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2838 /* True if nothing in this function can throw outside this function. */
2841 nothrow_function_p ()
2845 if (! flag_exceptions)
2848 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2849 if (can_throw_external (insn))
2851 for (insn = current_function_epilogue_delay_list; insn;
2852 insn = XEXP (insn, 1))
2853 if (can_throw_external (insn))
2860 /* Various hooks for unwind library. */
2862 /* Do any necessary initialization to access arbitrary stack frames.
2863 On the SPARC, this means flushing the register windows. */
2866 expand_builtin_unwind_init ()
2868 /* Set this so all the registers get saved in our frame; we need to be
2869 able to copy the saved values for any registers from frames we unwind. */
2870 current_function_has_nonlocal_label = 1;
2872 #ifdef SETUP_FRAME_ADDRESSES
2873 SETUP_FRAME_ADDRESSES ();
2878 expand_builtin_eh_return_data_regno (arglist)
2881 tree which = TREE_VALUE (arglist);
2882 unsigned HOST_WIDE_INT iwhich;
2884 if (TREE_CODE (which) != INTEGER_CST)
2886 error ("argument of `__builtin_eh_return_regno' must be constant");
2890 iwhich = tree_low_cst (which, 1);
2891 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2892 if (iwhich == INVALID_REGNUM)
2895 #ifdef DWARF_FRAME_REGNUM
2896 iwhich = DWARF_FRAME_REGNUM (iwhich);
2898 iwhich = DBX_REGISTER_NUMBER (iwhich);
2901 return GEN_INT (iwhich);
2904 /* Given a value extracted from the return address register or stack slot,
2905 return the actual address encoded in that value. */
2908 expand_builtin_extract_return_addr (addr_tree)
2911 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2913 /* First mask out any unwanted bits. */
2914 #ifdef MASK_RETURN_ADDR
2915 expand_and (addr, MASK_RETURN_ADDR, addr);
2918 /* Then adjust to find the real return address. */
2919 #if defined (RETURN_ADDR_OFFSET)
2920 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2926 /* Given an actual address in addr_tree, do any necessary encoding
2927 and return the value to be stored in the return address register or
2928 stack slot so the epilogue will return to that address. */
2931 expand_builtin_frob_return_addr (addr_tree)
2934 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2936 #ifdef RETURN_ADDR_OFFSET
2937 addr = force_reg (Pmode, addr);
2938 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2944 /* Set up the epilogue with the magic bits we'll need to return to the
2945 exception handler. */
2948 expand_builtin_eh_return (stackadj_tree, handler_tree)
2949 tree stackadj_tree, handler_tree;
2951 rtx stackadj, handler;
2953 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2954 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2956 if (! cfun->eh->ehr_label)
2958 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
2959 cfun->eh->ehr_handler = copy_to_reg (handler);
2960 cfun->eh->ehr_label = gen_label_rtx ();
2964 if (stackadj != cfun->eh->ehr_stackadj)
2965 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
2966 if (handler != cfun->eh->ehr_handler)
2967 emit_move_insn (cfun->eh->ehr_handler, handler);
2970 emit_jump (cfun->eh->ehr_label);
2976 rtx sa, ra, around_label;
2978 if (! cfun->eh->ehr_label)
2981 sa = EH_RETURN_STACKADJ_RTX;
2984 error ("__builtin_eh_return not supported on this target");
2988 current_function_calls_eh_return = 1;
2990 around_label = gen_label_rtx ();
2991 emit_move_insn (sa, const0_rtx);
2992 emit_jump (around_label);
2994 emit_label (cfun->eh->ehr_label);
2995 clobber_return_register ();
2997 #ifdef HAVE_eh_return
2999 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3003 ra = EH_RETURN_HANDLER_RTX;
3006 error ("__builtin_eh_return not supported on this target");
3007 ra = gen_reg_rtx (Pmode);
3010 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3011 emit_move_insn (ra, cfun->eh->ehr_handler);
3014 emit_label (around_label);
3017 struct action_record
3025 action_record_eq (pentry, pdata)
3029 const struct action_record *entry = (const struct action_record *) pentry;
3030 const struct action_record *data = (const struct action_record *) pdata;
3031 return entry->filter == data->filter && entry->next == data->next;
3035 action_record_hash (pentry)
3038 const struct action_record *entry = (const struct action_record *) pentry;
3039 return entry->next * 1009 + entry->filter;
3043 add_action_record (ar_hash, filter, next)
3047 struct action_record **slot, *new, tmp;
3049 tmp.filter = filter;
3051 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3053 if ((new = *slot) == NULL)
3055 new = (struct action_record *) xmalloc (sizeof (*new));
3056 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3057 new->filter = filter;
3061 /* The filter value goes in untouched. The link to the next
3062 record is a "self-relative" byte offset, or zero to indicate
3063 that there is no next record. So convert the absolute 1 based
3064 indicies we've been carrying around into a displacement. */
3066 push_sleb128 (&cfun->eh->action_record_data, filter);
3068 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3069 push_sleb128 (&cfun->eh->action_record_data, next);
3076 collect_one_action_chain (ar_hash, region)
3078 struct eh_region *region;
3080 struct eh_region *c;
3083 /* If we've reached the top of the region chain, then we have
3084 no actions, and require no landing pad. */
3088 switch (region->type)
3091 /* A cleanup adds a zero filter to the beginning of the chain, but
3092 there are special cases to look out for. If there are *only*
3093 cleanups along a path, then it compresses to a zero action.
3094 Further, if there are multiple cleanups along a path, we only
3095 need to represent one of them, as that is enough to trigger
3096 entry to the landing pad at runtime. */
3097 next = collect_one_action_chain (ar_hash, region->outer);
3100 for (c = region->outer; c ; c = c->outer)
3101 if (c->type == ERT_CLEANUP)
3103 return add_action_record (ar_hash, 0, next);
3106 /* Process the associated catch regions in reverse order.
3107 If there's a catch-all handler, then we don't need to
3108 search outer regions. Use a magic -3 value to record
3109 that we havn't done the outer search. */
3111 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3113 if (c->u.catch.type == NULL)
3114 next = add_action_record (ar_hash, c->u.catch.filter, 0);
3119 next = collect_one_action_chain (ar_hash, region->outer);
3123 next = add_action_record (ar_hash, c->u.catch.filter, next);
3128 case ERT_ALLOWED_EXCEPTIONS:
3129 /* An exception specification adds its filter to the
3130 beginning of the chain. */
3131 next = collect_one_action_chain (ar_hash, region->outer);
3132 return add_action_record (ar_hash, region->u.allowed.filter,
3133 next < 0 ? 0 : next);
3135 case ERT_MUST_NOT_THROW:
3136 /* A must-not-throw region with no inner handlers or cleanups
3137 requires no call-site entry. Note that this differs from
3138 the no handler or cleanup case in that we do require an lsda
3139 to be generated. Return a magic -2 value to record this. */
3144 /* CATCH regions are handled in TRY above. THROW regions are
3145 for optimization information only and produce no output. */
3146 return collect_one_action_chain (ar_hash, region->outer);
3154 add_call_site (landing_pad, action)
3158 struct call_site_record *data = cfun->eh->call_site_data;
3159 int used = cfun->eh->call_site_data_used;
3160 int size = cfun->eh->call_site_data_size;
3164 size = (size ? size * 2 : 64);
3165 data = (struct call_site_record *)
3166 xrealloc (data, sizeof (*data) * size);
3167 cfun->eh->call_site_data = data;
3168 cfun->eh->call_site_data_size = size;
3171 data[used].landing_pad = landing_pad;
3172 data[used].action = action;
3174 cfun->eh->call_site_data_used = used + 1;
3176 return used + call_site_base;
3179 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3180 The new note numbers will not refer to region numbers, but
3181 instead to call site entries. */
3184 convert_to_eh_region_ranges ()
3186 rtx insn, iter, note;
3188 int last_action = -3;
3189 rtx last_action_insn = NULL_RTX;
3190 rtx last_landing_pad = NULL_RTX;
3191 rtx first_no_action_insn = NULL_RTX;
3194 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3197 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3199 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3201 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3204 struct eh_region *region;
3206 rtx this_landing_pad;
3209 if (GET_CODE (insn) == INSN
3210 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3211 insn = XVECEXP (PATTERN (insn), 0, 0);
3213 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3216 if (! (GET_CODE (insn) == CALL_INSN
3217 || (flag_non_call_exceptions
3218 && may_trap_p (PATTERN (insn)))))
3225 if (INTVAL (XEXP (note, 0)) <= 0)
3227 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3228 this_action = collect_one_action_chain (ar_hash, region);
3231 /* Existence of catch handlers, or must-not-throw regions
3232 implies that an lsda is needed (even if empty). */
3233 if (this_action != -1)
3234 cfun->uses_eh_lsda = 1;
3236 /* Delay creation of region notes for no-action regions
3237 until we're sure that an lsda will be required. */
3238 else if (last_action == -3)
3240 first_no_action_insn = iter;
3244 /* Cleanups and handlers may share action chains but not
3245 landing pads. Collect the landing pad for this region. */
3246 if (this_action >= 0)
3248 struct eh_region *o;
3249 for (o = region; ! o->landing_pad ; o = o->outer)
3251 this_landing_pad = o->landing_pad;
3254 this_landing_pad = NULL_RTX;
3256 /* Differing actions or landing pads implies a change in call-site
3257 info, which implies some EH_REGION note should be emitted. */
3258 if (last_action != this_action
3259 || last_landing_pad != this_landing_pad)
3261 /* If we'd not seen a previous action (-3) or the previous
3262 action was must-not-throw (-2), then we do not need an
3264 if (last_action >= -1)
3266 /* If we delayed the creation of the begin, do it now. */
3267 if (first_no_action_insn)
3269 call_site = add_call_site (NULL_RTX, 0);
3270 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3271 first_no_action_insn);
3272 NOTE_EH_HANDLER (note) = call_site;
3273 first_no_action_insn = NULL_RTX;
3276 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3278 NOTE_EH_HANDLER (note) = call_site;
3281 /* If the new action is must-not-throw, then no region notes
3283 if (this_action >= -1)
3285 call_site = add_call_site (this_landing_pad,
3286 this_action < 0 ? 0 : this_action);
3287 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3288 NOTE_EH_HANDLER (note) = call_site;
3291 last_action = this_action;
3292 last_landing_pad = this_landing_pad;
3294 last_action_insn = iter;
3297 if (last_action >= -1 && ! first_no_action_insn)
3299 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3300 NOTE_EH_HANDLER (note) = call_site;
3303 htab_delete (ar_hash);
3308 push_uleb128 (data_area, value)
3309 varray_type *data_area;
3314 unsigned char byte = value & 0x7f;
3318 VARRAY_PUSH_UCHAR (*data_area, byte);
3324 push_sleb128 (data_area, value)
3325 varray_type *data_area;
3333 byte = value & 0x7f;
3335 more = ! ((value == 0 && (byte & 0x40) == 0)
3336 || (value == -1 && (byte & 0x40) != 0));
3339 VARRAY_PUSH_UCHAR (*data_area, byte);
3345 #define DW_EH_PE_absptr 0x00
3346 #define DW_EH_PE_omit 0xff
3348 #define DW_EH_PE_uleb128 0x01
3349 #define DW_EH_PE_udata2 0x02
3350 #define DW_EH_PE_udata4 0x03
3351 #define DW_EH_PE_udata8 0x04
3352 #define DW_EH_PE_sleb128 0x09
3353 #define DW_EH_PE_sdata2 0x0A
3354 #define DW_EH_PE_sdata4 0x0B
3355 #define DW_EH_PE_sdata8 0x0C
3356 #define DW_EH_PE_signed 0x08
3358 #define DW_EH_PE_pcrel 0x10
3359 #define DW_EH_PE_textrel 0x20
3360 #define DW_EH_PE_datarel 0x30
3361 #define DW_EH_PE_funcrel 0x40
3364 eh_data_format_name (format)
3369 case DW_EH_PE_absptr: return "absolute";
3370 case DW_EH_PE_omit: return "omit";
3372 case DW_EH_PE_uleb128: return "uleb128";
3373 case DW_EH_PE_udata2: return "udata2";
3374 case DW_EH_PE_udata4: return "udata4";
3375 case DW_EH_PE_udata8: return "udata8";
3376 case DW_EH_PE_sleb128: return "sleb128";
3377 case DW_EH_PE_sdata2: return "sdata2";
3378 case DW_EH_PE_sdata4: return "sdata4";
3379 case DW_EH_PE_sdata8: return "sdata8";
3381 case DW_EH_PE_uleb128 | DW_EH_PE_pcrel: return "pcrel uleb128";
3382 case DW_EH_PE_udata2 | DW_EH_PE_pcrel: return "pcrel udata2";
3383 case DW_EH_PE_udata4 | DW_EH_PE_pcrel: return "pcrel udata4";
3384 case DW_EH_PE_udata8 | DW_EH_PE_pcrel: return "pcrel udata8";
3385 case DW_EH_PE_sleb128 | DW_EH_PE_pcrel: return "pcrel sleb128";
3386 case DW_EH_PE_sdata2 | DW_EH_PE_pcrel: return "pcrel sdata2";
3387 case DW_EH_PE_sdata4 | DW_EH_PE_pcrel: return "pcrel sdata4";
3388 case DW_EH_PE_sdata8 | DW_EH_PE_pcrel: return "pcrel sdata8";
3390 case DW_EH_PE_uleb128 | DW_EH_PE_textrel: return "textrel uleb128";
3391 case DW_EH_PE_udata2 | DW_EH_PE_textrel: return "textrel udata2";
3392 case DW_EH_PE_udata4 | DW_EH_PE_textrel: return "textrel udata4";
3393 case DW_EH_PE_udata8 | DW_EH_PE_textrel: return "textrel udata8";
3394 case DW_EH_PE_sleb128 | DW_EH_PE_textrel: return "textrel sleb128";
3395 case DW_EH_PE_sdata2 | DW_EH_PE_textrel: return "textrel sdata2";
3396 case DW_EH_PE_sdata4 | DW_EH_PE_textrel: return "textrel sdata4";
3397 case DW_EH_PE_sdata8 | DW_EH_PE_textrel: return "textrel sdata8";
3399 case DW_EH_PE_uleb128 | DW_EH_PE_datarel: return "datarel uleb128";
3400 case DW_EH_PE_udata2 | DW_EH_PE_datarel: return "datarel udata2";
3401 case DW_EH_PE_udata4 | DW_EH_PE_datarel: return "datarel udata4";
3402 case DW_EH_PE_udata8 | DW_EH_PE_datarel: return "datarel udata8";
3403 case DW_EH_PE_sleb128 | DW_EH_PE_datarel: return "datarel sleb128";
3404 case DW_EH_PE_sdata2 | DW_EH_PE_datarel: return "datarel sdata2";
3405 case DW_EH_PE_sdata4 | DW_EH_PE_datarel: return "datarel sdata4";
3406 case DW_EH_PE_sdata8 | DW_EH_PE_datarel: return "datarel sdata8";
3408 case DW_EH_PE_uleb128 | DW_EH_PE_funcrel: return "funcrel uleb128";
3409 case DW_EH_PE_udata2 | DW_EH_PE_funcrel: return "funcrel udata2";
3410 case DW_EH_PE_udata4 | DW_EH_PE_funcrel: return "funcrel udata4";
3411 case DW_EH_PE_udata8 | DW_EH_PE_funcrel: return "funcrel udata8";
3412 case DW_EH_PE_sleb128 | DW_EH_PE_funcrel: return "funcrel sleb128";
3413 case DW_EH_PE_sdata2 | DW_EH_PE_funcrel: return "funcrel sdata2";
3414 case DW_EH_PE_sdata4 | DW_EH_PE_funcrel: return "funcrel sdata4";
3415 case DW_EH_PE_sdata8 | DW_EH_PE_funcrel: return "funcrel sdata8";
3422 #ifndef HAVE_AS_LEB128
3424 dw2_size_of_call_site_table ()
3426 int n = cfun->eh->call_site_data_used;
3427 int size = n * (4 + 4 + 4);
3430 for (i = 0; i < n; ++i)
3432 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3433 size += size_of_uleb128 (cs->action);
3440 sjlj_size_of_call_site_table ()
3442 int n = cfun->eh->call_site_data_used;
3446 for (i = 0; i < n; ++i)
3448 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3449 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3450 size += size_of_uleb128 (cs->action);
3458 dw2_output_call_site_table ()
3460 const char *function_start_lab
3461 = IDENTIFIER_POINTER (current_function_func_begin_label);
3462 int n = cfun->eh->call_site_data_used;
3465 for (i = 0; i < n; ++i)
3467 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3468 char reg_start_lab[32];
3469 char reg_end_lab[32];
3470 char landing_pad_lab[32];
3472 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3473 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3475 if (cs->landing_pad)
3476 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3477 CODE_LABEL_NUMBER (cs->landing_pad));
3479 /* ??? Perhaps use insn length scaling if the assembler supports
3480 generic arithmetic. */
3481 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3482 data4 if the function is small enough. */
3483 #ifdef HAVE_AS_LEB128
3484 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3485 "region %d start", i);
3486 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3488 if (cs->landing_pad)
3489 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3492 dw2_asm_output_data_uleb128 (0, "landing pad");
3494 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3495 "region %d start", i);
3496 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3497 if (cs->landing_pad)
3498 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3501 dw2_asm_output_data (4, 0, "landing pad");
3503 dw2_asm_output_data_uleb128 (cs->action, "action");
3506 call_site_base += n;
3510 sjlj_output_call_site_table ()
3512 int n = cfun->eh->call_site_data_used;
3515 for (i = 0; i < n; ++i)
3517 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3519 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3520 "region %d landing pad", i);
3521 dw2_asm_output_data_uleb128 (cs->action, "action");
3524 call_site_base += n;
3528 output_function_exception_table ()
3531 #ifdef HAVE_AS_LEB128
3532 char ttype_label[32];
3533 char cs_after_size_label[32];
3534 char cs_end_label[32];
3541 /* Not all functions need anything. */
3542 if (! cfun->uses_eh_lsda)
3545 funcdef_number = (USING_SJLJ_EXCEPTIONS
3546 ? sjlj_funcdef_number
3547 : current_funcdef_number);
3549 exception_section ();
3551 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3552 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3555 assemble_eh_align (GET_MODE_ALIGNMENT (ptr_mode));
3557 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3559 /* The LSDA header. */
3561 /* Indicate the format of the landing pad start pointer. An omitted
3562 field implies @LPStart == @Start. */
3563 /* Currently we always put @LPStart == @Start. This field would
3564 be most useful in moving the landing pads completely out of
3565 line to another section, but it could also be used to minimize
3566 the size of uleb128 landing pad offsets. */
3567 format = DW_EH_PE_omit;
3568 dw2_asm_output_data (1, format, "@LPStart format (%s)",
3569 eh_data_format_name (format));
3571 /* @LPStart pointer would go here. */
3573 /* Indicate the format of the @TType entries. */
3575 format = DW_EH_PE_omit;
3578 /* ??? Define a ASM_PREFERRED_DATA_FORMAT to say what
3579 sort of dynamic-relocation-free reference to emit. */
3581 #ifdef HAVE_AS_LEB128
3582 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3585 dw2_asm_output_data (1, format, "@TType format (%s)",
3586 eh_data_format_name (format));
3588 #ifndef HAVE_AS_LEB128
3589 if (USING_SJLJ_EXCEPTIONS)
3590 call_site_len = sjlj_size_of_call_site_table ();
3592 call_site_len = dw2_size_of_call_site_table ();
3595 /* A pc-relative 4-byte displacement to the @TType data. */
3598 #ifdef HAVE_AS_LEB128
3599 char ttype_after_disp_label[32];
3600 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3602 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3603 "@TType base offset");
3604 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3606 /* Ug. Alignment queers things. */
3607 unsigned int before_disp, after_disp, last_disp, disp, align;
3609 align = POINTER_SIZE / BITS_PER_UNIT;
3610 before_disp = 1 + 1;
3611 after_disp = (1 + size_of_uleb128 (call_site_len)
3613 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3614 + VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) * align);
3619 unsigned int disp_size, pad;
3622 disp_size = size_of_uleb128 (disp);
3623 pad = before_disp + disp_size + after_disp;
3625 pad = align - (pad % align);
3628 disp = after_disp + pad;
3630 while (disp != last_disp);
3632 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3636 /* Indicate the format of the call-site offsets. */
3637 #ifdef HAVE_AS_LEB128
3638 format = DW_EH_PE_uleb128;
3640 format = DW_EH_PE_udata4;
3642 dw2_asm_output_data (1, format, "call-site format (%s)",
3643 eh_data_format_name (format));
3645 #ifdef HAVE_AS_LEB128
3646 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3648 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3650 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3651 "Call-site table length");
3652 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3653 if (USING_SJLJ_EXCEPTIONS)
3654 sjlj_output_call_site_table ();
3656 dw2_output_call_site_table ();
3657 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3659 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3660 if (USING_SJLJ_EXCEPTIONS)
3661 sjlj_output_call_site_table ();
3663 dw2_output_call_site_table ();
3666 /* ??? Decode and interpret the data for flag_debug_asm. */
3667 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3668 for (i = 0; i < n; ++i)
3669 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3670 (i ? NULL : "Action record table"));
3673 assemble_eh_align (GET_MODE_ALIGNMENT (ptr_mode));
3675 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3678 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3680 if (type == NULL_TREE)
3681 type = integer_zero_node;
3683 type = lookup_type_for_runtime (type);
3685 /* ??? Handle ASM_PREFERRED_DATA_FORMAT. */
3686 output_constant (type, GET_MODE_SIZE (ptr_mode));
3689 #ifdef HAVE_AS_LEB128
3691 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3694 /* ??? Decode and interpret the data for flag_debug_asm. */
3695 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3696 for (i = 0; i < n; ++i)
3697 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3698 (i ? NULL : "Exception specification table"));
3700 function_section (current_function_decl);
3702 if (USING_SJLJ_EXCEPTIONS)
3703 sjlj_funcdef_number += 1;