1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
57 #include "insn-config.h"
59 #include "integrate.h"
60 #include "hard-reg-set.h"
61 #include "basic-block.h"
63 #include "dwarf2asm.h"
64 #include "dwarf2out.h"
72 /* Provide defaults for stuff that may not be defined when using
74 #ifndef EH_RETURN_STACKADJ_RTX
75 #define EH_RETURN_STACKADJ_RTX 0
77 #ifndef EH_RETURN_HANDLER_RTX
78 #define EH_RETURN_HANDLER_RTX 0
80 #ifndef EH_RETURN_DATA_REGNO
81 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
85 /* Nonzero means enable synchronous exceptions for non-call instructions. */
86 int flag_non_call_exceptions;
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 tree protect_cleanup_actions;
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) PARAMS ((tree));
98 /* A list of labels used for exception handlers. */
99 rtx exception_handler_labels;
101 static int call_site_base;
102 static int sjlj_funcdef_number;
103 static htab_t type_to_runtime_map;
105 /* Describe the SjLj_Function_Context structure. */
106 static tree sjlj_fc_type_node;
107 static int sjlj_fc_call_site_ofs;
108 static int sjlj_fc_data_ofs;
109 static int sjlj_fc_personality_ofs;
110 static int sjlj_fc_lsda_ofs;
111 static int sjlj_fc_jbuf_ofs;
113 /* Describes one exception region. */
116 /* The immediately surrounding region. */
117 struct eh_region *outer;
119 /* The list of immediately contained regions. */
120 struct eh_region *inner;
121 struct eh_region *next_peer;
123 /* An identifier for this region. */
126 /* Each region does exactly one thing. */
132 ERT_ALLOWED_EXCEPTIONS,
138 /* Holds the action to perform based on the preceeding type. */
140 /* A list of catch blocks, a surrounding try block,
141 and the label for continuing after a catch. */
143 struct eh_region *catch;
144 struct eh_region *last_catch;
145 struct eh_region *prev_try;
149 /* The list through the catch handlers, the type object
150 matched, and a pointer to the generated code. */
152 struct eh_region *next_catch;
153 struct eh_region *prev_catch;
158 /* A tree_list of allowed types. */
164 /* The type given by a call to "throw foo();", or discovered
170 /* Retain the cleanup expression even after expansion so that
171 we can match up fixup regions. */
176 /* The real region (by expression and by pointer) that fixup code
180 struct eh_region *real_region;
184 /* Entry point for this region's handler before landing pads are built. */
187 /* Entry point for this region's handler from the runtime eh library. */
190 /* Entry point for this region's handler from an inner region. */
191 rtx post_landing_pad;
193 /* The RESX insn for handing off control to the next outermost handler,
198 /* Used to save exception status for each function. */
201 /* The tree of all regions for this function. */
202 struct eh_region *region_tree;
204 /* The same information as an indexable array. */
205 struct eh_region **region_array;
207 /* The most recently open region. */
208 struct eh_region *cur_region;
210 /* This is the region for which we are processing catch blocks. */
211 struct eh_region *try_region;
213 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
214 node is itself a TREE_CHAINed list of handlers for regions that
215 are not yet closed. The TREE_VALUE of each entry contains the
216 handler for the corresponding entry on the ehstack. */
222 int built_landing_pads;
223 int last_region_number;
225 varray_type ttype_data;
226 varray_type ehspec_data;
227 varray_type action_record_data;
229 struct call_site_record
234 int call_site_data_used;
235 int call_site_data_size;
246 static void mark_eh_region PARAMS ((struct eh_region *));
248 static int t2r_eq PARAMS ((const PTR,
250 static hashval_t t2r_hash PARAMS ((const PTR));
251 static int t2r_mark_1 PARAMS ((PTR *, PTR));
252 static void t2r_mark PARAMS ((PTR));
253 static void add_type_for_runtime PARAMS ((tree));
254 static tree lookup_type_for_runtime PARAMS ((tree));
256 static struct eh_region *expand_eh_region_end PARAMS ((void));
258 static rtx get_exception_filter PARAMS ((void));
260 static void collect_eh_region_array PARAMS ((void));
261 static void resolve_fixup_regions PARAMS ((void));
262 static void remove_fixup_regions PARAMS ((void));
263 static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
265 static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
266 struct inline_remap *));
267 static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
268 struct eh_region **));
269 static int ttypes_filter_eq PARAMS ((const PTR,
271 static hashval_t ttypes_filter_hash PARAMS ((const PTR));
272 static int ehspec_filter_eq PARAMS ((const PTR,
274 static hashval_t ehspec_filter_hash PARAMS ((const PTR));
275 static int add_ttypes_entry PARAMS ((htab_t, tree));
276 static int add_ehspec_entry PARAMS ((htab_t, htab_t,
278 static void assign_filter_values PARAMS ((void));
279 static void build_post_landing_pads PARAMS ((void));
280 static void connect_post_landing_pads PARAMS ((void));
281 static void dw2_build_landing_pads PARAMS ((void));
284 static bool sjlj_find_directly_reachable_regions
285 PARAMS ((struct sjlj_lp_info *));
286 static void sjlj_assign_call_site_values
287 PARAMS ((rtx, struct sjlj_lp_info *));
288 static void sjlj_mark_call_sites
289 PARAMS ((struct sjlj_lp_info *));
290 static void sjlj_emit_function_enter PARAMS ((rtx));
291 static void sjlj_emit_function_exit PARAMS ((void));
292 static void sjlj_emit_dispatch_table
293 PARAMS ((rtx, struct sjlj_lp_info *));
294 static void sjlj_build_landing_pads PARAMS ((void));
296 static void remove_exception_handler_label PARAMS ((rtx));
297 static void remove_eh_handler PARAMS ((struct eh_region *));
299 struct reachable_info;
301 /* The return value of reachable_next_level. */
304 /* The given exception is not processed by the given region. */
306 /* The given exception may need processing by the given region. */
308 /* The given exception is completely processed by the given region. */
310 /* The given exception is completely processed by the runtime. */
314 static int check_handled PARAMS ((tree, tree));
315 static void add_reachable_handler
316 PARAMS ((struct reachable_info *, struct eh_region *,
317 struct eh_region *));
318 static enum reachable_code reachable_next_level
319 PARAMS ((struct eh_region *, tree, struct reachable_info *));
321 static int action_record_eq PARAMS ((const PTR,
323 static hashval_t action_record_hash PARAMS ((const PTR));
324 static int add_action_record PARAMS ((htab_t, int, int));
325 static int collect_one_action_chain PARAMS ((htab_t,
326 struct eh_region *));
327 static int add_call_site PARAMS ((rtx, int));
329 static void push_uleb128 PARAMS ((varray_type *,
331 static void push_sleb128 PARAMS ((varray_type *, int));
332 static const char *eh_data_format_name PARAMS ((int));
333 #ifndef HAVE_AS_LEB128
334 static int dw2_size_of_call_site_table PARAMS ((void));
335 static int sjlj_size_of_call_site_table PARAMS ((void));
337 static void dw2_output_call_site_table PARAMS ((void));
338 static void sjlj_output_call_site_table PARAMS ((void));
341 /* Routine to see if exception handling is turned on.
342 DO_WARN is non-zero if we want to inform the user that exception
343 handling is turned off.
345 This is used to ensure that -fexceptions has been specified if the
346 compiler tries to use any exception-specific functions. */
352 if (! flag_exceptions)
354 static int warned = 0;
355 if (! warned && do_warn)
357 error ("exception handling disabled, use -fexceptions to enable");
369 ggc_add_rtx_root (&exception_handler_labels, 1);
370 ggc_add_tree_root (&protect_cleanup_actions, 1);
372 if (! flag_exceptions)
375 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
376 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
378 /* Create the SjLj_Function_Context structure. This should match
379 the definition in unwind-sjlj.c. */
380 if (USING_SJLJ_EXCEPTIONS)
382 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
384 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
385 ggc_add_tree_root (&sjlj_fc_type_node, 1);
387 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
388 build_pointer_type (sjlj_fc_type_node));
389 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
391 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
393 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
395 tmp = build_index_type (build_int_2 (4 - 1, 0));
396 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
397 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
398 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
400 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
402 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
404 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
406 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
408 #ifdef DONT_USE_BUILTIN_SETJMP
410 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
412 /* Should be large enough for most systems, if it is not,
413 JMP_BUF_SIZE should be defined with the proper value. It will
414 also tend to be larger than necessary for most systems, a more
415 optimal port will define JMP_BUF_SIZE. */
416 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
419 /* This is 2 for builtin_setjmp, plus whatever the target requires
420 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
421 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
422 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
424 tmp = build_index_type (tmp);
425 tmp = build_array_type (ptr_type_node, tmp);
426 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
427 #ifdef DONT_USE_BUILTIN_SETJMP
428 /* We don't know what the alignment requirements of the
429 runtime's jmp_buf has. Overestimate. */
430 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
431 DECL_USER_ALIGN (f_jbuf) = 1;
433 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
435 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
436 TREE_CHAIN (f_prev) = f_cs;
437 TREE_CHAIN (f_cs) = f_data;
438 TREE_CHAIN (f_data) = f_per;
439 TREE_CHAIN (f_per) = f_lsda;
440 TREE_CHAIN (f_lsda) = f_jbuf;
442 layout_type (sjlj_fc_type_node);
444 /* Cache the interesting field offsets so that we have
445 easy access from rtl. */
446 sjlj_fc_call_site_ofs
447 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
448 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
450 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
451 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
452 sjlj_fc_personality_ofs
453 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
454 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
456 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
457 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
459 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
460 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
465 init_eh_for_function ()
467 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
470 /* Mark EH for GC. */
473 mark_eh_region (region)
474 struct eh_region *region;
479 switch (region->type)
482 ggc_mark_tree (region->u.cleanup.exp);
485 ggc_mark_rtx (region->u.try.continue_label);
488 ggc_mark_tree (region->u.catch.type);
490 case ERT_ALLOWED_EXCEPTIONS:
491 ggc_mark_tree (region->u.allowed.type_list);
493 case ERT_MUST_NOT_THROW:
496 ggc_mark_tree (region->u.throw.type);
499 ggc_mark_tree (region->u.fixup.cleanup_exp);
505 ggc_mark_rtx (region->label);
506 ggc_mark_rtx (region->resume);
507 ggc_mark_rtx (region->landing_pad);
508 ggc_mark_rtx (region->post_landing_pad);
513 struct eh_status *eh;
520 /* If we've called collect_eh_region_array, use it. Otherwise walk
521 the tree non-recursively. */
522 if (eh->region_array)
524 for (i = eh->last_region_number; i > 0; --i)
526 struct eh_region *r = eh->region_array[i];
527 if (r && r->region_number == i)
531 else if (eh->region_tree)
533 struct eh_region *r = eh->region_tree;
539 else if (r->next_peer)
547 } while (r->next_peer == NULL);
554 ggc_mark_tree (eh->protect_list);
555 ggc_mark_rtx (eh->filter);
556 ggc_mark_rtx (eh->exc_ptr);
557 ggc_mark_tree_varray (eh->ttype_data);
559 if (eh->call_site_data)
561 for (i = eh->call_site_data_used - 1; i >= 0; --i)
562 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
565 ggc_mark_rtx (eh->ehr_stackadj);
566 ggc_mark_rtx (eh->ehr_handler);
567 ggc_mark_rtx (eh->ehr_label);
569 ggc_mark_rtx (eh->sjlj_fc);
570 ggc_mark_rtx (eh->sjlj_exit_after);
577 struct eh_status *eh = f->eh;
579 if (eh->region_array)
582 for (i = eh->last_region_number; i > 0; --i)
584 struct eh_region *r = eh->region_array[i];
585 /* Mind we don't free a region struct more than once. */
586 if (r && r->region_number == i)
589 free (eh->region_array);
591 else if (eh->region_tree)
593 struct eh_region *next, *r = eh->region_tree;
598 else if (r->next_peer)
612 } while (r->next_peer == NULL);
621 VARRAY_FREE (eh->ttype_data);
622 VARRAY_FREE (eh->ehspec_data);
623 VARRAY_FREE (eh->action_record_data);
624 if (eh->call_site_data)
625 free (eh->call_site_data);
632 /* Start an exception handling region. All instructions emitted
633 after this point are considered to be part of the region until
634 expand_eh_region_end is invoked. */
637 expand_eh_region_start ()
639 struct eh_region *new_region;
640 struct eh_region *cur_region;
646 /* Insert a new blank region as a leaf in the tree. */
647 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
648 cur_region = cfun->eh->cur_region;
649 new_region->outer = cur_region;
652 new_region->next_peer = cur_region->inner;
653 cur_region->inner = new_region;
657 new_region->next_peer = cfun->eh->region_tree;
658 cfun->eh->region_tree = new_region;
660 cfun->eh->cur_region = new_region;
662 /* Create a note marking the start of this region. */
663 new_region->region_number = ++cfun->eh->last_region_number;
664 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
665 NOTE_EH_HANDLER (note) = new_region->region_number;
668 /* Common code to end a region. Returns the region just ended. */
670 static struct eh_region *
671 expand_eh_region_end ()
673 struct eh_region *cur_region = cfun->eh->cur_region;
676 /* Create a nute marking the end of this region. */
677 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
678 NOTE_EH_HANDLER (note) = cur_region->region_number;
681 cfun->eh->cur_region = cur_region->outer;
686 /* End an exception handling region for a cleanup. HANDLER is an
687 expression to expand for the cleanup. */
690 expand_eh_region_end_cleanup (handler)
693 struct eh_region *region;
700 region = expand_eh_region_end ();
701 region->type = ERT_CLEANUP;
702 region->label = gen_label_rtx ();
703 region->u.cleanup.exp = handler;
705 around_label = gen_label_rtx ();
706 emit_jump (around_label);
708 emit_label (region->label);
710 if (protect_cleanup_actions)
711 expand_eh_region_start ();
713 /* In case this cleanup involves an inline destructor with a try block in
714 it, we need to save the EH return data registers around it. */
715 data_save[0] = gen_reg_rtx (Pmode);
716 emit_move_insn (data_save[0], get_exception_pointer ());
717 data_save[1] = gen_reg_rtx (word_mode);
718 emit_move_insn (data_save[1], get_exception_filter ());
720 expand_expr (handler, const0_rtx, VOIDmode, 0);
722 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
723 emit_move_insn (cfun->eh->filter, data_save[1]);
725 if (protect_cleanup_actions)
726 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
728 /* We need any stack adjustment complete before the around_label. */
729 do_pending_stack_adjust ();
731 /* We delay the generation of the _Unwind_Resume until we generate
732 landing pads. We emit a marker here so as to get good control
733 flow data in the meantime. */
735 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
738 emit_label (around_label);
741 /* End an exception handling region for a try block, and prepares
742 for subsequent calls to expand_start_catch. */
745 expand_start_all_catch ()
747 struct eh_region *region;
752 region = expand_eh_region_end ();
753 region->type = ERT_TRY;
754 region->u.try.prev_try = cfun->eh->try_region;
755 region->u.try.continue_label = gen_label_rtx ();
757 cfun->eh->try_region = region;
759 emit_jump (region->u.try.continue_label);
762 /* Begin a catch clause. TYPE is the type caught, or null if this is
763 a catch-all clause. */
766 expand_start_catch (type)
769 struct eh_region *t, *c, *l;
775 add_type_for_runtime (type);
776 expand_eh_region_start ();
778 t = cfun->eh->try_region;
779 c = cfun->eh->cur_region;
781 c->u.catch.type = type;
782 c->label = gen_label_rtx ();
784 l = t->u.try.last_catch;
785 c->u.catch.prev_catch = l;
787 l->u.catch.next_catch = c;
790 t->u.try.last_catch = c;
792 emit_label (c->label);
795 /* End a catch clause. Control will resume after the try/catch block. */
800 struct eh_region *try_region, *catch_region;
805 catch_region = expand_eh_region_end ();
806 try_region = cfun->eh->try_region;
808 emit_jump (try_region->u.try.continue_label);
811 /* End a sequence of catch handlers for a try block. */
814 expand_end_all_catch ()
816 struct eh_region *try_region;
821 try_region = cfun->eh->try_region;
822 cfun->eh->try_region = try_region->u.try.prev_try;
824 emit_label (try_region->u.try.continue_label);
827 /* End an exception region for an exception type filter. ALLOWED is a
828 TREE_LIST of types to be matched by the runtime. FAILURE is an
829 expression to invoke if a mismatch ocurrs. */
832 expand_eh_region_end_allowed (allowed, failure)
833 tree allowed, failure;
835 struct eh_region *region;
841 region = expand_eh_region_end ();
842 region->type = ERT_ALLOWED_EXCEPTIONS;
843 region->u.allowed.type_list = allowed;
844 region->label = gen_label_rtx ();
846 for (; allowed ; allowed = TREE_CHAIN (allowed))
847 add_type_for_runtime (TREE_VALUE (allowed));
849 /* We must emit the call to FAILURE here, so that if this function
850 throws a different exception, that it will be processed by the
853 around_label = gen_label_rtx ();
854 emit_jump (around_label);
856 emit_label (region->label);
857 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
859 emit_label (around_label);
862 /* End an exception region for a must-not-throw filter. FAILURE is an
863 expression invoke if an uncaught exception propagates this far.
865 This is conceptually identical to expand_eh_region_end_allowed with
866 an empty allowed list (if you passed "std::terminate" instead of
867 "__cxa_call_unexpected"), but they are represented differently in
871 expand_eh_region_end_must_not_throw (failure)
874 struct eh_region *region;
880 region = expand_eh_region_end ();
881 region->type = ERT_MUST_NOT_THROW;
882 region->label = gen_label_rtx ();
884 /* We must emit the call to FAILURE here, so that if this function
885 throws a different exception, that it will be processed by the
888 around_label = gen_label_rtx ();
889 emit_jump (around_label);
891 emit_label (region->label);
892 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
894 emit_label (around_label);
897 /* End an exception region for a throw. No handling goes on here,
898 but it's the easiest way for the front-end to indicate what type
902 expand_eh_region_end_throw (type)
905 struct eh_region *region;
910 region = expand_eh_region_end ();
911 region->type = ERT_THROW;
912 region->u.throw.type = type;
915 /* End a fixup region. Within this region the cleanups for the immediately
916 enclosing region are _not_ run. This is used for goto cleanup to avoid
917 destroying an object twice.
919 This would be an extraordinarily simple prospect, were it not for the
920 fact that we don't actually know what the immediately enclosing region
921 is. This surprising fact is because expand_cleanups is currently
922 generating a sequence that it will insert somewhere else. We collect
923 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
926 expand_eh_region_end_fixup (handler)
929 struct eh_region *fixup;
934 fixup = expand_eh_region_end ();
935 fixup->type = ERT_FIXUP;
936 fixup->u.fixup.cleanup_exp = handler;
939 /* Return an rtl expression for a pointer to the exception object
943 get_exception_pointer ()
945 rtx exc_ptr = cfun->eh->exc_ptr;
948 exc_ptr = gen_reg_rtx (Pmode);
949 cfun->eh->exc_ptr = exc_ptr;
954 /* Return an rtl expression for the exception dispatch filter
958 get_exception_filter ()
960 rtx filter = cfun->eh->filter;
963 filter = gen_reg_rtx (word_mode);
964 cfun->eh->filter = filter;
969 /* Begin a region that will contain entries created with
970 add_partial_entry. */
973 begin_protect_partials ()
975 /* Push room for a new list. */
976 cfun->eh->protect_list
977 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
980 /* Start a new exception region for a region of code that has a
981 cleanup action and push the HANDLER for the region onto
982 protect_list. All of the regions created with add_partial_entry
983 will be ended when end_protect_partials is invoked. */
986 add_partial_entry (handler)
989 expand_eh_region_start ();
991 /* ??? This comment was old before the most recent rewrite. We
992 really ought to fix the callers at some point. */
993 /* For backwards compatibility, we allow callers to omit calls to
994 begin_protect_partials for the outermost region. So, we must
995 explicitly do so here. */
996 if (!cfun->eh->protect_list)
997 begin_protect_partials ();
999 /* Add this entry to the front of the list. */
1000 TREE_VALUE (cfun->eh->protect_list)
1001 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
1004 /* End all the pending exception regions on protect_list. */
1007 end_protect_partials ()
1011 /* ??? This comment was old before the most recent rewrite. We
1012 really ought to fix the callers at some point. */
1013 /* For backwards compatibility, we allow callers to omit the call to
1014 begin_protect_partials for the outermost region. So,
1015 PROTECT_LIST may be NULL. */
1016 if (!cfun->eh->protect_list)
1019 /* Pop the topmost entry. */
1020 t = TREE_VALUE (cfun->eh->protect_list);
1021 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
1023 /* End all the exception regions. */
1024 for (; t; t = TREE_CHAIN (t))
1025 expand_eh_region_end_cleanup (TREE_VALUE (t));
1029 /* This section is for the exception handling specific optimization pass. */
1031 /* Random access the exception region tree. It's just as simple to
1032 collect the regions this way as in expand_eh_region_start, but
1033 without having to realloc memory. */
1036 collect_eh_region_array ()
1038 struct eh_region **array, *i;
1040 i = cfun->eh->region_tree;
1044 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1045 cfun->eh->region_array = array;
1049 array[i->region_number] = i;
1051 /* If there are sub-regions, process them. */
1054 /* If there are peers, process them. */
1055 else if (i->next_peer)
1057 /* Otherwise, step back up the tree to the next peer. */
1064 } while (i->next_peer == NULL);
1071 resolve_fixup_regions ()
1073 int i, j, n = cfun->eh->last_region_number;
1075 for (i = 1; i <= n; ++i)
1077 struct eh_region *fixup = cfun->eh->region_array[i];
1078 struct eh_region *cleanup;
1080 if (! fixup || fixup->type != ERT_FIXUP)
1083 for (j = 1; j <= n; ++j)
1085 cleanup = cfun->eh->region_array[j];
1086 if (cleanup->type == ERT_CLEANUP
1087 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1093 fixup->u.fixup.real_region = cleanup->outer;
1097 /* Now that we've discovered what region actually encloses a fixup,
1098 we can shuffle pointers and remove them from the tree. */
1101 remove_fixup_regions ()
1105 struct eh_region *fixup;
1107 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1108 for instructions referencing fixup regions. This is only
1109 strictly necessary for fixup regions with no parent, but
1110 doesn't hurt to do it for all regions. */
1111 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1113 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1114 && INTVAL (XEXP (note, 0)) > 0
1115 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1116 && fixup->type == ERT_FIXUP)
1118 if (fixup->u.fixup.real_region)
1119 XEXP (note, 1) = GEN_INT (fixup->u.fixup.real_region->region_number);
1121 remove_note (insn, note);
1124 /* Remove the fixup regions from the tree. */
1125 for (i = cfun->eh->last_region_number; i > 0; --i)
1127 fixup = cfun->eh->region_array[i];
1131 /* Allow GC to maybe free some memory. */
1132 if (fixup->type == ERT_CLEANUP)
1133 fixup->u.cleanup.exp = NULL_TREE;
1135 if (fixup->type != ERT_FIXUP)
1140 struct eh_region *parent, *p, **pp;
1142 parent = fixup->u.fixup.real_region;
1144 /* Fix up the children's parent pointers; find the end of
1146 for (p = fixup->inner; ; p = p->next_peer)
1153 /* In the tree of cleanups, only outer-inner ordering matters.
1154 So link the children back in anywhere at the correct level. */
1156 pp = &parent->inner;
1158 pp = &cfun->eh->region_tree;
1161 fixup->inner = NULL;
1164 remove_eh_handler (fixup);
1168 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1169 can_throw instruction in the region. */
1172 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1180 for (insn = *pinsns; insn ; insn = next)
1182 next = NEXT_INSN (insn);
1183 if (GET_CODE (insn) == NOTE)
1185 int kind = NOTE_LINE_NUMBER (insn);
1186 if (kind == NOTE_INSN_EH_REGION_BEG
1187 || kind == NOTE_INSN_EH_REGION_END)
1189 if (kind == NOTE_INSN_EH_REGION_BEG)
1191 struct eh_region *r;
1194 cur = NOTE_EH_HANDLER (insn);
1196 r = cfun->eh->region_array[cur];
1197 if (r->type == ERT_FIXUP)
1199 r = r->u.fixup.real_region;
1200 cur = r ? r->region_number : 0;
1202 else if (r->type == ERT_CATCH)
1205 cur = r ? r->region_number : 0;
1211 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1212 requires extra care to adjust sequence start. */
1213 if (insn == *pinsns)
1219 else if (INSN_P (insn))
1222 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1223 /* Calls can always potentially throw exceptions, unless
1224 they have a REG_EH_REGION note with a value of 0 or less.
1225 Which should be the only possible kind so far. */
1226 && (GET_CODE (insn) == CALL_INSN
1227 /* If we wanted exceptions for non-call insns, then
1228 any may_trap_p instruction could throw. */
1229 || (flag_non_call_exceptions
1230 && may_trap_p (PATTERN (insn)))))
1232 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1236 if (GET_CODE (insn) == CALL_INSN
1237 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1239 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1241 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1243 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1254 convert_from_eh_region_ranges ()
1259 collect_eh_region_array ();
1260 resolve_fixup_regions ();
1262 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1263 insns = get_insns ();
1264 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1267 remove_fixup_regions ();
1271 find_exception_handler_labels ()
1273 rtx list = NULL_RTX;
1276 free_EXPR_LIST_list (&exception_handler_labels);
1278 if (cfun->eh->region_tree == NULL)
1281 for (i = cfun->eh->last_region_number; i > 0; --i)
1283 struct eh_region *region = cfun->eh->region_array[i];
1288 if (cfun->eh->built_landing_pads)
1289 lab = region->landing_pad;
1291 lab = region->label;
1294 list = alloc_EXPR_LIST (0, lab, list);
1297 /* For sjlj exceptions, need the return label to remain live until
1298 after landing pad generation. */
1299 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1300 list = alloc_EXPR_LIST (0, return_label, list);
1302 exception_handler_labels = list;
1306 static struct eh_region *
1307 duplicate_eh_region_1 (o, map)
1308 struct eh_region *o;
1309 struct inline_remap *map;
1312 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
1314 n->region_number = o->region_number + cfun->eh->last_region_number;
1320 case ERT_MUST_NOT_THROW:
1324 if (o->u.try.continue_label)
1325 n->u.try.continue_label
1326 = get_label_from_map (map,
1327 CODE_LABEL_NUMBER (o->u.try.continue_label));
1331 n->u.catch.type = o->u.catch.type;
1334 case ERT_ALLOWED_EXCEPTIONS:
1335 n->u.allowed.type_list = o->u.allowed.type_list;
1339 n->u.throw.type = o->u.throw.type;
1346 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1349 n->resume = map->insn_map[INSN_UID (o->resume)];
1350 if (n->resume == NULL)
1358 duplicate_eh_region_2 (o, n_array)
1359 struct eh_region *o;
1360 struct eh_region **n_array;
1362 struct eh_region *n = n_array[o->region_number];
1367 n->u.try.catch = n_array[o->u.try.catch->region_number];
1368 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1372 if (o->u.catch.next_catch)
1373 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1374 if (o->u.catch.prev_catch)
1375 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1383 n->outer = n_array[o->outer->region_number];
1385 n->inner = n_array[o->inner->region_number];
1387 n->next_peer = n_array[o->next_peer->region_number];
1391 duplicate_eh_regions (ifun, map)
1392 struct function *ifun;
1393 struct inline_remap *map;
1395 int ifun_last_region_number = ifun->eh->last_region_number;
1396 struct eh_region **n_array, *root, *cur;
1399 if (ifun_last_region_number == 0)
1402 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1404 for (i = 1; i <= ifun_last_region_number; ++i)
1406 cur = ifun->eh->region_array[i];
1407 if (!cur || cur->region_number != i)
1409 n_array[i] = duplicate_eh_region_1 (cur, map);
1411 for (i = 1; i <= ifun_last_region_number; ++i)
1413 cur = ifun->eh->region_array[i];
1414 if (!cur || cur->region_number != i)
1416 duplicate_eh_region_2 (cur, n_array);
1419 root = n_array[ifun->eh->region_tree->region_number];
1420 cur = cfun->eh->cur_region;
1423 struct eh_region *p = cur->inner;
1426 while (p->next_peer)
1428 p->next_peer = root;
1433 for (i = 1; i <= ifun_last_region_number; ++i)
1434 if (n_array[i]->outer == NULL)
1435 n_array[i]->outer = cur;
1439 struct eh_region *p = cfun->eh->region_tree;
1442 while (p->next_peer)
1444 p->next_peer = root;
1447 cfun->eh->region_tree = root;
1452 i = cfun->eh->last_region_number;
1453 cfun->eh->last_region_number = i + ifun_last_region_number;
1458 /* ??? Move from tree.c to tree.h. */
1459 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
1462 t2r_eq (pentry, pdata)
1466 tree entry = (tree) pentry;
1467 tree data = (tree) pdata;
1469 return TREE_PURPOSE (entry) == data;
1476 tree entry = (tree) pentry;
1477 return TYPE_HASH (TREE_PURPOSE (entry));
1481 t2r_mark_1 (slot, data)
1483 PTR data ATTRIBUTE_UNUSED;
1485 tree contents = (tree) *slot;
1486 ggc_mark_tree (contents);
1494 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1498 add_type_for_runtime (type)
1503 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1504 TYPE_HASH (type), INSERT);
1507 tree runtime = (*lang_eh_runtime_type) (type);
1508 *slot = tree_cons (type, runtime, NULL_TREE);
1513 lookup_type_for_runtime (type)
1518 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1519 TYPE_HASH (type), NO_INSERT);
1521 /* We should have always inserrted the data earlier. */
1522 return TREE_VALUE (*slot);
1526 /* Represent an entry in @TTypes for either catch actions
1527 or exception filter actions. */
1528 struct ttypes_filter
1534 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1535 (a tree) for a @TTypes type node we are thinking about adding. */
1538 ttypes_filter_eq (pentry, pdata)
1542 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1543 tree data = (tree) pdata;
1545 return entry->t == data;
1549 ttypes_filter_hash (pentry)
1552 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1553 return TYPE_HASH (entry->t);
1556 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1557 exception specification list we are thinking about adding. */
1558 /* ??? Currently we use the type lists in the order given. Someone
1559 should put these in some canonical order. */
1562 ehspec_filter_eq (pentry, pdata)
1566 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1567 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1569 return type_list_equal (entry->t, data->t);
1572 /* Hash function for exception specification lists. */
1575 ehspec_filter_hash (pentry)
1578 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1582 for (list = entry->t; list ; list = TREE_CHAIN (list))
1583 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1587 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1588 up the search. Return the filter value to be used. */
1591 add_ttypes_entry (ttypes_hash, type)
1595 struct ttypes_filter **slot, *n;
1597 slot = (struct ttypes_filter **)
1598 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1600 if ((n = *slot) == NULL)
1602 /* Filter value is a 1 based table index. */
1604 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1606 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1609 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1615 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1616 to speed up the search. Return the filter value to be used. */
1619 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1624 struct ttypes_filter **slot, *n;
1625 struct ttypes_filter dummy;
1628 slot = (struct ttypes_filter **)
1629 htab_find_slot (ehspec_hash, &dummy, INSERT);
1631 if ((n = *slot) == NULL)
1633 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1635 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1637 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1640 /* Look up each type in the list and encode its filter
1641 value as a uleb128. Terminate the list with 0. */
1642 for (; list ; list = TREE_CHAIN (list))
1643 push_uleb128 (&cfun->eh->ehspec_data,
1644 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1645 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1651 /* Generate the action filter values to be used for CATCH and
1652 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1653 we use lots of landing pads, and so every type or list can share
1654 the same filter value, which saves table space. */
1657 assign_filter_values ()
1660 htab_t ttypes, ehspec;
1662 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1663 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1665 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1666 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1668 for (i = cfun->eh->last_region_number; i > 0; --i)
1670 struct eh_region *r = cfun->eh->region_array[i];
1672 /* Mind we don't process a region more than once. */
1673 if (!r || r->region_number != i)
1679 r->u.catch.filter = add_ttypes_entry (ttypes, r->u.catch.type);
1682 case ERT_ALLOWED_EXCEPTIONS:
1684 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1692 htab_delete (ttypes);
1693 htab_delete (ehspec);
1697 build_post_landing_pads ()
1701 for (i = cfun->eh->last_region_number; i > 0; --i)
1703 struct eh_region *region = cfun->eh->region_array[i];
1706 /* Mind we don't process a region more than once. */
1707 if (!region || region->region_number != i)
1710 switch (region->type)
1713 /* ??? Collect the set of all non-overlapping catch handlers
1714 all the way up the chain until blocked by a cleanup. */
1715 /* ??? Outer try regions can share landing pads with inner
1716 try regions if the types are completely non-overlapping,
1717 and there are no interveaning cleanups. */
1719 region->post_landing_pad = gen_label_rtx ();
1723 emit_label (region->post_landing_pad);
1725 /* ??? It is mighty inconvenient to call back into the
1726 switch statement generation code in expand_end_case.
1727 Rapid prototyping sez a sequence of ifs. */
1729 struct eh_region *c;
1730 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1732 /* ??? _Unwind_ForcedUnwind wants no match here. */
1733 if (c->u.catch.type == NULL)
1734 emit_jump (c->label);
1736 emit_cmp_and_jump_insns (cfun->eh->filter,
1737 GEN_INT (c->u.catch.filter),
1738 EQ, NULL_RTX, word_mode,
1743 /* We delay the generation of the _Unwind_Resume until we generate
1744 landing pads. We emit a marker here so as to get good control
1745 flow data in the meantime. */
1747 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1753 emit_insns_before (seq, region->u.try.catch->label);
1756 case ERT_ALLOWED_EXCEPTIONS:
1757 region->post_landing_pad = gen_label_rtx ();
1761 emit_label (region->post_landing_pad);
1763 emit_cmp_and_jump_insns (cfun->eh->filter,
1764 GEN_INT (region->u.allowed.filter),
1765 EQ, NULL_RTX, word_mode, 0, 0,
1768 /* We delay the generation of the _Unwind_Resume until we generate
1769 landing pads. We emit a marker here so as to get good control
1770 flow data in the meantime. */
1772 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1778 emit_insns_before (seq, region->label);
1782 case ERT_MUST_NOT_THROW:
1783 region->post_landing_pad = region->label;
1788 /* Nothing to do. */
1797 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1798 _Unwind_Resume otherwise. */
1801 connect_post_landing_pads ()
1805 for (i = cfun->eh->last_region_number; i > 0; --i)
1807 struct eh_region *region = cfun->eh->region_array[i];
1808 struct eh_region *outer;
1811 /* Mind we don't process a region more than once. */
1812 if (!region || region->region_number != i)
1815 /* If there is no RESX, or it has been deleted by flow, there's
1816 nothing to fix up. */
1817 if (! region->resume || INSN_DELETED_P (region->resume))
1820 /* Search for another landing pad in this function. */
1821 for (outer = region->outer; outer ; outer = outer->outer)
1822 if (outer->post_landing_pad)
1828 emit_jump (outer->post_landing_pad);
1830 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1831 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
1835 emit_insns_before (seq, region->resume);
1837 /* Leave the RESX to be deleted by flow. */
1843 dw2_build_landing_pads ()
1847 for (i = cfun->eh->last_region_number; i > 0; --i)
1849 struct eh_region *region = cfun->eh->region_array[i];
1852 /* Mind we don't process a region more than once. */
1853 if (!region || region->region_number != i)
1856 if (region->type != ERT_CLEANUP
1857 && region->type != ERT_TRY
1858 && region->type != ERT_ALLOWED_EXCEPTIONS)
1863 region->landing_pad = gen_label_rtx ();
1864 emit_label (region->landing_pad);
1866 #ifdef HAVE_exception_receiver
1867 if (HAVE_exception_receiver)
1868 emit_insn (gen_exception_receiver ());
1871 #ifdef HAVE_nonlocal_goto_receiver
1872 if (HAVE_nonlocal_goto_receiver)
1873 emit_insn (gen_nonlocal_goto_receiver ());
1878 /* If the eh_return data registers are call-saved, then we
1879 won't have considered them clobbered from the call that
1880 threw. Kill them now. */
1883 unsigned r = EH_RETURN_DATA_REGNO (j);
1884 if (r == INVALID_REGNUM)
1886 if (! call_used_regs[r])
1887 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1890 emit_move_insn (cfun->eh->exc_ptr,
1891 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
1892 emit_move_insn (cfun->eh->filter,
1893 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (1)));
1898 emit_insns_before (seq, region->post_landing_pad);
1905 int directly_reachable;
1908 int call_site_index;
1912 sjlj_find_directly_reachable_regions (lp_info)
1913 struct sjlj_lp_info *lp_info;
1916 bool found_one = false;
1918 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1920 struct eh_region *region;
1924 if (! INSN_P (insn))
1927 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1928 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1931 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1933 type_thrown = NULL_TREE;
1934 if (region->type == ERT_THROW)
1936 type_thrown = region->u.throw.type;
1937 region = region->outer;
1940 /* Find the first containing region that might handle the exception.
1941 That's the landing pad to which we will transfer control. */
1942 for (; region; region = region->outer)
1943 if (reachable_next_level (region, type_thrown, 0) != RNL_NOT_CAUGHT)
1948 lp_info[region->region_number].directly_reachable = 1;
1957 sjlj_assign_call_site_values (dispatch_label, lp_info)
1959 struct sjlj_lp_info *lp_info;
1964 /* First task: build the action table. */
1966 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1967 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1969 for (i = cfun->eh->last_region_number; i > 0; --i)
1970 if (lp_info[i].directly_reachable)
1972 struct eh_region *r = cfun->eh->region_array[i];
1973 r->landing_pad = dispatch_label;
1974 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1975 if (lp_info[i].action_index != -1)
1976 cfun->uses_eh_lsda = 1;
1979 htab_delete (ar_hash);
1981 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1982 landing pad label for the region. For sjlj though, there is one
1983 common landing pad from which we dispatch to the post-landing pads.
1985 A region receives a dispatch index if it is directly reachable
1986 and requires in-function processing. Regions that share post-landing
1987 pads may share dispatch indicies. */
1988 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1989 (see build_post_landing_pads) so we don't bother checking for it. */
1992 for (i = cfun->eh->last_region_number; i > 0; --i)
1993 if (lp_info[i].directly_reachable
1994 && lp_info[i].action_index >= 0)
1995 lp_info[i].dispatch_index = index++;
1997 /* Finally: assign call-site values. If dwarf2 terms, this would be
1998 the region number assigned by convert_to_eh_region_ranges, but
1999 handles no-action and must-not-throw differently. */
2002 for (i = cfun->eh->last_region_number; i > 0; --i)
2003 if (lp_info[i].directly_reachable)
2005 int action = lp_info[i].action_index;
2007 /* Map must-not-throw to otherwise unused call-site index 0. */
2010 /* Map no-action to otherwise unused call-site index -1. */
2011 else if (action == -1)
2013 /* Otherwise, look it up in the table. */
2015 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2017 lp_info[i].call_site_index = index;
2022 sjlj_mark_call_sites (lp_info)
2023 struct sjlj_lp_info *lp_info;
2025 int last_call_site = -2;
2028 mem = change_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2029 plus_constant (XEXP (cfun->eh->sjlj_fc, 0),
2030 sjlj_fc_call_site_ofs));
2032 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2034 struct eh_region *region;
2036 rtx note, before, p;
2038 /* Reset value tracking at extended basic block boundaries. */
2039 if (GET_CODE (insn) == CODE_LABEL)
2040 last_call_site = -2;
2042 if (! INSN_P (insn))
2045 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2048 /* Calls (and trapping insns) without notes are outside any
2049 exception handling region in this function. Mark them as
2051 if (GET_CODE (insn) == CALL_INSN
2052 || (flag_non_call_exceptions
2053 && may_trap_p (PATTERN (insn))))
2054 this_call_site = -1;
2060 /* Calls that are known to not throw need not be marked. */
2061 if (INTVAL (XEXP (note, 0)) <= 0)
2064 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2065 this_call_site = lp_info[region->region_number].call_site_index;
2068 if (this_call_site == last_call_site)
2071 /* Don't separate a call from it's argument loads. */
2073 if (GET_CODE (insn) == CALL_INSN)
2075 HARD_REG_SET parm_regs;
2078 /* Since different machines initialize their parameter registers
2079 in different orders, assume nothing. Collect the set of all
2080 parameter registers. */
2081 CLEAR_HARD_REG_SET (parm_regs);
2083 for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
2084 if (GET_CODE (XEXP (p, 0)) == USE
2085 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
2087 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
2090 /* We only care about registers which can hold function
2092 if (! FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
2095 SET_HARD_REG_BIT (parm_regs, REGNO (XEXP (XEXP (p, 0), 0)));
2099 /* Search backward for the first set of a register in this set. */
2102 before = PREV_INSN (before);
2104 /* Given that we've done no other optimizations yet,
2105 the arguments should be immediately available. */
2106 if (GET_CODE (before) == CODE_LABEL)
2109 p = single_set (before);
2110 if (p && GET_CODE (SET_DEST (p)) == REG
2111 && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
2112 && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
2114 CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
2121 emit_move_insn (mem, GEN_INT (this_call_site));
2125 emit_insns_before (p, before);
2126 last_call_site = this_call_site;
2130 /* Construct the SjLj_Function_Context. */
2133 sjlj_emit_function_enter (dispatch_label)
2136 rtx fn_begin, fc, mem, seq;
2138 fc = cfun->eh->sjlj_fc;
2142 mem = change_address (fc, Pmode,
2143 plus_constant (XEXP (fc, 0), sjlj_fc_personality_ofs));
2144 emit_move_insn (mem, eh_personality_libfunc);
2146 mem = change_address (fc, Pmode,
2147 plus_constant (XEXP (fc, 0), sjlj_fc_lsda_ofs));
2148 if (cfun->uses_eh_lsda)
2151 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2152 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2155 emit_move_insn (mem, const0_rtx);
2157 #ifdef DONT_USE_BUILTIN_SETJMP
2160 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_NORMAL,
2161 TYPE_MODE (integer_type_node), 1,
2162 plus_constant (XEXP (fc, 0),
2163 sjlj_fc_jbuf_ofs), Pmode);
2165 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2166 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2168 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2169 TYPE_MODE (integer_type_node), 0, 0,
2173 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2177 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2178 1, XEXP (fc, 0), Pmode);
2183 /* ??? Instead of doing this at the beginning of the function,
2184 do this in a block that is at loop level 0 and dominates all
2185 can_throw_internal instructions. */
2187 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2188 if (GET_CODE (fn_begin) == NOTE
2189 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2191 emit_insns_after (seq, fn_begin);
2194 /* Call back from expand_function_end to know where we should put
2195 the call to unwind_sjlj_unregister_libfunc if needed. */
2198 sjlj_emit_function_exit_after (after)
2201 cfun->eh->sjlj_exit_after = after;
2205 sjlj_emit_function_exit ()
2211 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2212 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2217 /* ??? Really this can be done in any block at loop level 0 that
2218 post-dominates all can_throw_internal instructions. This is
2219 the last possible moment. */
2221 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
2225 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2227 struct sjlj_lp_info *lp_info;
2229 int i, first_reachable;
2230 rtx mem, dispatch, seq, fc;
2232 fc = cfun->eh->sjlj_fc;
2236 emit_label (dispatch_label);
2238 #ifndef DONT_USE_BUILTIN_SETJMP
2239 expand_builtin_setjmp_receiver (dispatch_label);
2242 /* Load up dispatch index, exc_ptr and filter values from the
2243 function context. */
2244 mem = change_address (fc, TYPE_MODE (integer_type_node),
2245 plus_constant (XEXP (fc, 0), sjlj_fc_call_site_ofs));
2246 dispatch = copy_to_reg (mem);
2248 mem = change_address (fc, word_mode,
2249 plus_constant (XEXP (fc, 0), sjlj_fc_data_ofs));
2250 if (word_mode != Pmode)
2252 #ifdef POINTERS_EXTEND_UNSIGNED
2253 mem = convert_memory_address (Pmode, mem);
2255 mem = convert_to_mode (Pmode, mem, 0);
2258 emit_move_insn (cfun->eh->exc_ptr, mem);
2260 mem = change_address (fc, word_mode,
2261 plus_constant (XEXP (fc, 0),
2262 sjlj_fc_data_ofs + UNITS_PER_WORD));
2263 emit_move_insn (cfun->eh->filter, mem);
2265 /* Jump to one of the directly reachable regions. */
2266 /* ??? This really ought to be using a switch statement. */
2268 first_reachable = 0;
2269 for (i = cfun->eh->last_region_number; i > 0; --i)
2271 if (! lp_info[i].directly_reachable
2272 || lp_info[i].action_index < 0)
2275 if (! first_reachable)
2277 first_reachable = i;
2281 emit_cmp_and_jump_insns (dispatch,
2282 GEN_INT (lp_info[i].dispatch_index), EQ,
2283 NULL_RTX, TYPE_MODE (integer_type_node), 0, 0,
2284 cfun->eh->region_array[i]->post_landing_pad);
2290 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2291 ->post_landing_pad));
2295 sjlj_build_landing_pads ()
2297 struct sjlj_lp_info *lp_info;
2299 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2300 sizeof (struct sjlj_lp_info));
2302 if (sjlj_find_directly_reachable_regions (lp_info))
2304 rtx dispatch_label = gen_label_rtx ();
2307 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2308 int_size_in_bytes (sjlj_fc_type_node),
2309 TYPE_ALIGN (sjlj_fc_type_node));
2311 sjlj_assign_call_site_values (dispatch_label, lp_info);
2312 sjlj_mark_call_sites (lp_info);
2314 sjlj_emit_function_enter (dispatch_label);
2315 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2316 sjlj_emit_function_exit ();
2323 finish_eh_generation ()
2325 /* Nothing to do if no regions created. */
2326 if (cfun->eh->region_tree == NULL)
2329 /* The object here is to provide find_basic_blocks with detailed
2330 information (via reachable_handlers) on how exception control
2331 flows within the function. In this first pass, we can include
2332 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2333 regions, and hope that it will be useful in deleting unreachable
2334 handlers. Subsequently, we will generate landing pads which will
2335 connect many of the handlers, and then type information will not
2336 be effective. Still, this is a win over previous implementations. */
2338 jump_optimize_minimal (get_insns ());
2339 find_basic_blocks (get_insns (), max_reg_num (), 0);
2342 /* These registers are used by the landing pads. Make sure they
2343 have been generated. */
2344 get_exception_pointer ();
2345 get_exception_filter ();
2347 /* Construct the landing pads. */
2349 assign_filter_values ();
2350 build_post_landing_pads ();
2351 connect_post_landing_pads ();
2352 if (USING_SJLJ_EXCEPTIONS)
2353 sjlj_build_landing_pads ();
2355 dw2_build_landing_pads ();
2357 cfun->eh->built_landing_pads = 1;
2359 /* We've totally changed the CFG. Start over. */
2360 find_exception_handler_labels ();
2361 jump_optimize_minimal (get_insns ());
2362 find_basic_blocks (get_insns (), max_reg_num (), 0);
2366 /* This section handles removing dead code for flow. */
2368 /* Remove LABEL from the exception_handler_labels list. */
2371 remove_exception_handler_label (label)
2376 for (pl = &exception_handler_labels, l = *pl;
2377 XEXP (l, 0) != label;
2378 pl = &XEXP (l, 1), l = *pl)
2382 free_EXPR_LIST_node (l);
2385 /* Splice REGION from the region tree etc. */
2388 remove_eh_handler (region)
2389 struct eh_region *region;
2391 struct eh_region **pp, *p;
2395 /* For the benefit of efficiently handling REG_EH_REGION notes,
2396 replace this region in the region array with its containing
2397 region. Note that previous region deletions may result in
2398 multiple copies of this region in the array, so we have to
2399 search the whole thing. */
2400 for (i = cfun->eh->last_region_number; i > 0; --i)
2401 if (cfun->eh->region_array[i] == region)
2402 cfun->eh->region_array[i] = region->outer;
2404 if (cfun->eh->built_landing_pads)
2405 lab = region->landing_pad;
2407 lab = region->label;
2409 remove_exception_handler_label (lab);
2412 pp = ®ion->outer->inner;
2414 pp = &cfun->eh->region_tree;
2415 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2420 for (p = region->inner; p->next_peer ; p = p->next_peer)
2421 p->outer = region->outer;
2422 p->next_peer = region->next_peer;
2423 p->outer = region->outer;
2424 *pp = region->inner;
2427 *pp = region->next_peer;
2429 if (region->type == ERT_CATCH)
2431 struct eh_region *try, *next, *prev;
2433 for (try = region->next_peer;
2434 try->type == ERT_CATCH;
2435 try = try->next_peer)
2437 if (try->type != ERT_TRY)
2440 next = region->u.catch.next_catch;
2441 prev = region->u.catch.prev_catch;
2444 next->u.catch.prev_catch = prev;
2446 try->u.try.last_catch = prev;
2448 prev->u.catch.next_catch = next;
2451 try->u.try.catch = next;
2453 remove_eh_handler (try);
2460 /* LABEL heads a basic block that is about to be deleted. If this
2461 label corresponds to an exception region, we may be able to
2462 delete the region. */
2465 maybe_remove_eh_handler (label)
2470 /* ??? After generating landing pads, it's not so simple to determine
2471 if the region data is completely unused. One must examine the
2472 landing pad and the post landing pad, and whether an inner try block
2473 is referencing the catch handlers directly. */
2474 if (cfun->eh->built_landing_pads)
2477 for (i = cfun->eh->last_region_number; i > 0; --i)
2479 struct eh_region *region = cfun->eh->region_array[i];
2480 if (region && region->label == label)
2482 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2483 because there is no path to the fallback call to terminate.
2484 But the region continues to affect call-site data until there
2485 are no more contained calls, which we don't see here. */
2486 if (region->type == ERT_MUST_NOT_THROW)
2488 remove_exception_handler_label (region->label);
2489 region->label = NULL_RTX;
2492 remove_eh_handler (region);
2499 /* This section describes CFG exception edges for flow. */
2501 /* For communicating between calls to reachable_next_level. */
2502 struct reachable_info
2509 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2510 base class of TYPE, is in HANDLED. */
2513 check_handled (handled, type)
2518 /* We can check for exact matches without front-end help. */
2519 if (! lang_eh_type_covers)
2521 for (t = handled; t ; t = TREE_CHAIN (t))
2522 if (TREE_VALUE (t) == type)
2527 for (t = handled; t ; t = TREE_CHAIN (t))
2528 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2535 /* A subroutine of reachable_next_level. If we are collecting a list
2536 of handlers, add one. After landing pad generation, reference
2537 it instead of the handlers themselves. Further, the handlers are
2538 all wired together, so by referencing one, we've got them all.
2539 Before landing pad generation we reference each handler individually.
2541 LP_REGION contains the landing pad; REGION is the handler. */
2544 add_reachable_handler (info, lp_region, region)
2545 struct reachable_info *info;
2546 struct eh_region *lp_region;
2547 struct eh_region *region;
2552 if (cfun->eh->built_landing_pads)
2554 if (! info->handlers)
2555 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2558 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2561 /* Process one level of exception regions for reachability.
2562 If TYPE_THROWN is non-null, then it is the *exact* type being
2563 propagated. If INFO is non-null, then collect handler labels
2564 and caught/allowed type information between invocations. */
2566 static enum reachable_code
2567 reachable_next_level (region, type_thrown, info)
2568 struct eh_region *region;
2570 struct reachable_info *info;
2572 switch (region->type)
2575 /* Before landing-pad generation, we model control flow
2576 directly to the individual handlers. In this way we can
2577 see that catch handler types may shadow one another. */
2578 add_reachable_handler (info, region, region);
2579 return RNL_MAYBE_CAUGHT;
2583 struct eh_region *c;
2584 enum reachable_code ret = RNL_NOT_CAUGHT;
2586 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2588 /* A catch-all handler ends the search. */
2589 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2590 to be run as well. */
2591 if (c->u.catch.type == NULL)
2593 add_reachable_handler (info, region, c);
2599 /* If we have a type match, end the search. */
2600 if (c->u.catch.type == type_thrown
2601 || (lang_eh_type_covers
2602 && (*lang_eh_type_covers) (c->u.catch.type,
2605 add_reachable_handler (info, region, c);
2609 /* If we have definitive information of a match failure,
2610 the catch won't trigger. */
2611 if (lang_eh_type_covers)
2612 return RNL_NOT_CAUGHT;
2616 ret = RNL_MAYBE_CAUGHT;
2618 /* A type must not have been previously caught. */
2619 else if (! check_handled (info->types_caught, c->u.catch.type))
2621 add_reachable_handler (info, region, c);
2622 info->types_caught = tree_cons (NULL, c->u.catch.type,
2623 info->types_caught);
2625 /* ??? If the catch type is a base class of every allowed
2626 type, then we know we can stop the search. */
2627 ret = RNL_MAYBE_CAUGHT;
2634 case ERT_ALLOWED_EXCEPTIONS:
2635 /* An empty list of types definitely ends the search. */
2636 if (region->u.allowed.type_list == NULL_TREE)
2638 add_reachable_handler (info, region, region);
2642 /* Collect a list of lists of allowed types for use in detecting
2643 when a catch may be transformed into a catch-all. */
2645 info->types_allowed = tree_cons (NULL_TREE,
2646 region->u.allowed.type_list,
2647 info->types_allowed);
2649 /* If we have definitive information about the type heirarchy,
2650 then we can tell if the thrown type will pass through the
2652 if (type_thrown && lang_eh_type_covers)
2654 if (check_handled (region->u.allowed.type_list, type_thrown))
2655 return RNL_NOT_CAUGHT;
2658 add_reachable_handler (info, region, region);
2663 add_reachable_handler (info, region, region);
2664 return RNL_MAYBE_CAUGHT;
2667 /* Catch regions are handled by their controling try region. */
2668 return RNL_NOT_CAUGHT;
2670 case ERT_MUST_NOT_THROW:
2671 /* Here we end our search, since no exceptions may propagate.
2672 If we've touched down at some landing pad previous, then the
2673 explicit function call we generated may be used. Otherwise
2674 the call is made by the runtime. */
2675 if (info && info->handlers)
2677 add_reachable_handler (info, region, region);
2685 /* Shouldn't see these here. */
2692 /* Retrieve a list of labels of exception handlers which can be
2693 reached by a given insn. */
2696 reachable_handlers (insn)
2699 struct reachable_info info;
2700 struct eh_region *region;
2704 if (GET_CODE (insn) == JUMP_INSN
2705 && GET_CODE (PATTERN (insn)) == RESX)
2706 region_number = XINT (PATTERN (insn), 0);
2709 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2710 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2712 region_number = INTVAL (XEXP (note, 0));
2715 memset (&info, 0, sizeof (info));
2717 region = cfun->eh->region_array[region_number];
2719 type_thrown = NULL_TREE;
2720 if (region->type == ERT_THROW)
2722 type_thrown = region->u.throw.type;
2723 region = region->outer;
2725 else if (GET_CODE (insn) == JUMP_INSN
2726 && GET_CODE (PATTERN (insn)) == RESX)
2727 region = region->outer;
2729 for (; region; region = region->outer)
2730 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2733 return info.handlers;
2736 /* Determine if the given INSN can throw an exception that is caught
2737 within the function. */
2740 can_throw_internal (insn)
2743 struct eh_region *region;
2747 if (! INSN_P (insn))
2750 if (GET_CODE (insn) == INSN
2751 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2752 insn = XVECEXP (PATTERN (insn), 0, 0);
2754 if (GET_CODE (insn) == CALL_INSN
2755 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2758 for (i = 0; i < 3; ++i)
2760 rtx sub = XEXP (PATTERN (insn), i);
2761 for (; sub ; sub = NEXT_INSN (sub))
2762 if (can_throw_internal (sub))
2768 /* Every insn that might throw has an EH_REGION note. */
2769 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2770 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2773 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2775 type_thrown = NULL_TREE;
2776 if (region->type == ERT_THROW)
2778 type_thrown = region->u.throw.type;
2779 region = region->outer;
2782 /* If this exception is ignored by each and every containing region,
2783 then control passes straight out. The runtime may handle some
2784 regions, which also do not require processing internally. */
2785 for (; region; region = region->outer)
2787 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2788 if (how == RNL_BLOCKED)
2790 if (how != RNL_NOT_CAUGHT)
2797 /* Determine if the given INSN can throw an exception that is
2798 visible outside the function. */
2801 can_throw_external (insn)
2804 struct eh_region *region;
2808 if (! INSN_P (insn))
2811 if (GET_CODE (insn) == INSN
2812 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2813 insn = XVECEXP (PATTERN (insn), 0, 0);
2815 if (GET_CODE (insn) == CALL_INSN
2816 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2819 for (i = 0; i < 3; ++i)
2821 rtx sub = XEXP (PATTERN (insn), i);
2822 for (; sub ; sub = NEXT_INSN (sub))
2823 if (can_throw_external (sub))
2829 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2832 /* Calls (and trapping insns) without notes are outside any
2833 exception handling region in this function. We have to
2834 assume it might throw. Given that the front end and middle
2835 ends mark known NOTHROW functions, this isn't so wildly
2837 return (GET_CODE (insn) == CALL_INSN
2838 || (flag_non_call_exceptions
2839 && may_trap_p (PATTERN (insn))));
2841 if (INTVAL (XEXP (note, 0)) <= 0)
2844 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2846 type_thrown = NULL_TREE;
2847 if (region->type == ERT_THROW)
2849 type_thrown = region->u.throw.type;
2850 region = region->outer;
2853 /* If the exception is caught or blocked by any containing region,
2854 then it is not seen by any calling function. */
2855 for (; region ; region = region->outer)
2856 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2862 /* True if nothing in this function can throw outside this function. */
2865 nothrow_function_p ()
2869 if (! flag_exceptions)
2872 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2873 if (can_throw_external (insn))
2875 for (insn = current_function_epilogue_delay_list; insn;
2876 insn = XEXP (insn, 1))
2877 if (can_throw_external (insn))
2884 /* Various hooks for unwind library. */
2886 /* Do any necessary initialization to access arbitrary stack frames.
2887 On the SPARC, this means flushing the register windows. */
2890 expand_builtin_unwind_init ()
2892 /* Set this so all the registers get saved in our frame; we need to be
2893 able to copy the saved values for any registers from frames we unwind. */
2894 current_function_has_nonlocal_label = 1;
2896 #ifdef SETUP_FRAME_ADDRESSES
2897 SETUP_FRAME_ADDRESSES ();
2902 expand_builtin_eh_return_data_regno (arglist)
2905 tree which = TREE_VALUE (arglist);
2906 unsigned HOST_WIDE_INT iwhich;
2908 if (TREE_CODE (which) != INTEGER_CST)
2910 error ("argument of `__builtin_eh_return_regno' must be constant");
2914 iwhich = tree_low_cst (which, 1);
2915 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2916 if (iwhich == INVALID_REGNUM)
2919 #ifdef DWARF_FRAME_REGNUM
2920 iwhich = DWARF_FRAME_REGNUM (iwhich);
2922 iwhich = DBX_REGISTER_NUMBER (iwhich);
2925 return GEN_INT (iwhich);
2928 /* Given a value extracted from the return address register or stack slot,
2929 return the actual address encoded in that value. */
2932 expand_builtin_extract_return_addr (addr_tree)
2935 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2937 /* First mask out any unwanted bits. */
2938 #ifdef MASK_RETURN_ADDR
2939 expand_and (addr, MASK_RETURN_ADDR, addr);
2942 /* Then adjust to find the real return address. */
2943 #if defined (RETURN_ADDR_OFFSET)
2944 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2950 /* Given an actual address in addr_tree, do any necessary encoding
2951 and return the value to be stored in the return address register or
2952 stack slot so the epilogue will return to that address. */
2955 expand_builtin_frob_return_addr (addr_tree)
2958 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2960 #ifdef RETURN_ADDR_OFFSET
2961 addr = force_reg (Pmode, addr);
2962 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2968 /* Set up the epilogue with the magic bits we'll need to return to the
2969 exception handler. */
2972 expand_builtin_eh_return (stackadj_tree, handler_tree)
2973 tree stackadj_tree, handler_tree;
2975 rtx stackadj, handler;
2977 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2978 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2980 if (! cfun->eh->ehr_label)
2982 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
2983 cfun->eh->ehr_handler = copy_to_reg (handler);
2984 cfun->eh->ehr_label = gen_label_rtx ();
2988 if (stackadj != cfun->eh->ehr_stackadj)
2989 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
2990 if (handler != cfun->eh->ehr_handler)
2991 emit_move_insn (cfun->eh->ehr_handler, handler);
2994 emit_jump (cfun->eh->ehr_label);
3000 rtx sa, ra, around_label;
3002 if (! cfun->eh->ehr_label)
3005 sa = EH_RETURN_STACKADJ_RTX;
3008 error ("__builtin_eh_return not supported on this target");
3012 current_function_calls_eh_return = 1;
3014 around_label = gen_label_rtx ();
3015 emit_move_insn (sa, const0_rtx);
3016 emit_jump (around_label);
3018 emit_label (cfun->eh->ehr_label);
3019 clobber_return_register ();
3021 #ifdef HAVE_eh_return
3023 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3027 ra = EH_RETURN_HANDLER_RTX;
3030 error ("__builtin_eh_return not supported on this target");
3031 ra = gen_reg_rtx (Pmode);
3034 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3035 emit_move_insn (ra, cfun->eh->ehr_handler);
3038 emit_label (around_label);
3041 struct action_record
3049 action_record_eq (pentry, pdata)
3053 const struct action_record *entry = (const struct action_record *) pentry;
3054 const struct action_record *data = (const struct action_record *) pdata;
3055 return entry->filter == data->filter && entry->next == data->next;
3059 action_record_hash (pentry)
3062 const struct action_record *entry = (const struct action_record *) pentry;
3063 return entry->next * 1009 + entry->filter;
3067 add_action_record (ar_hash, filter, next)
3071 struct action_record **slot, *new, tmp;
3073 tmp.filter = filter;
3075 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3077 if ((new = *slot) == NULL)
3079 new = (struct action_record *) xmalloc (sizeof (*new));
3080 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3081 new->filter = filter;
3085 /* The filter value goes in untouched. The link to the next
3086 record is a "self-relative" byte offset, or zero to indicate
3087 that there is no next record. So convert the absolute 1 based
3088 indicies we've been carrying around into a displacement. */
3090 push_sleb128 (&cfun->eh->action_record_data, filter);
3092 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3093 push_sleb128 (&cfun->eh->action_record_data, next);
3100 collect_one_action_chain (ar_hash, region)
3102 struct eh_region *region;
3104 struct eh_region *c;
3107 /* If we've reached the top of the region chain, then we have
3108 no actions, and require no landing pad. */
3112 switch (region->type)
3115 /* A cleanup adds a zero filter to the beginning of the chain, but
3116 there are special cases to look out for. If there are *only*
3117 cleanups along a path, then it compresses to a zero action.
3118 Further, if there are multiple cleanups along a path, we only
3119 need to represent one of them, as that is enough to trigger
3120 entry to the landing pad at runtime. */
3121 next = collect_one_action_chain (ar_hash, region->outer);
3124 for (c = region->outer; c ; c = c->outer)
3125 if (c->type == ERT_CLEANUP)
3127 return add_action_record (ar_hash, 0, next);
3130 /* Process the associated catch regions in reverse order.
3131 If there's a catch-all handler, then we don't need to
3132 search outer regions. Use a magic -3 value to record
3133 that we havn't done the outer search. */
3135 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3137 if (c->u.catch.type == NULL)
3138 next = add_action_record (ar_hash, c->u.catch.filter, 0);
3143 next = collect_one_action_chain (ar_hash, region->outer);
3147 next = add_action_record (ar_hash, c->u.catch.filter, next);
3152 case ERT_ALLOWED_EXCEPTIONS:
3153 /* An exception specification adds its filter to the
3154 beginning of the chain. */
3155 next = collect_one_action_chain (ar_hash, region->outer);
3156 return add_action_record (ar_hash, region->u.allowed.filter,
3157 next < 0 ? 0 : next);
3159 case ERT_MUST_NOT_THROW:
3160 /* A must-not-throw region with no inner handlers or cleanups
3161 requires no call-site entry. Note that this differs from
3162 the no handler or cleanup case in that we do require an lsda
3163 to be generated. Return a magic -2 value to record this. */
3168 /* CATCH regions are handled in TRY above. THROW regions are
3169 for optimization information only and produce no output. */
3170 return collect_one_action_chain (ar_hash, region->outer);
3178 add_call_site (landing_pad, action)
3182 struct call_site_record *data = cfun->eh->call_site_data;
3183 int used = cfun->eh->call_site_data_used;
3184 int size = cfun->eh->call_site_data_size;
3188 size = (size ? size * 2 : 64);
3189 data = (struct call_site_record *)
3190 xrealloc (data, sizeof (*data) * size);
3191 cfun->eh->call_site_data = data;
3192 cfun->eh->call_site_data_size = size;
3195 data[used].landing_pad = landing_pad;
3196 data[used].action = action;
3198 cfun->eh->call_site_data_used = used + 1;
3200 return used + call_site_base;
3203 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3204 The new note numbers will not refer to region numbers, but
3205 instead to call site entries. */
3208 convert_to_eh_region_ranges ()
3210 rtx insn, iter, note;
3212 int last_action = -3;
3213 rtx last_action_insn = NULL_RTX;
3214 rtx last_landing_pad = NULL_RTX;
3215 rtx first_no_action_insn = NULL_RTX;
3218 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3221 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3223 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3225 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3228 struct eh_region *region;
3230 rtx this_landing_pad;
3233 if (GET_CODE (insn) == INSN
3234 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3235 insn = XVECEXP (PATTERN (insn), 0, 0);
3237 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3240 if (! (GET_CODE (insn) == CALL_INSN
3241 || (flag_non_call_exceptions
3242 && may_trap_p (PATTERN (insn)))))
3249 if (INTVAL (XEXP (note, 0)) <= 0)
3251 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3252 this_action = collect_one_action_chain (ar_hash, region);
3255 /* Existence of catch handlers, or must-not-throw regions
3256 implies that an lsda is needed (even if empty). */
3257 if (this_action != -1)
3258 cfun->uses_eh_lsda = 1;
3260 /* Delay creation of region notes for no-action regions
3261 until we're sure that an lsda will be required. */
3262 else if (last_action == -3)
3264 first_no_action_insn = iter;
3268 /* Cleanups and handlers may share action chains but not
3269 landing pads. Collect the landing pad for this region. */
3270 if (this_action >= 0)
3272 struct eh_region *o;
3273 for (o = region; ! o->landing_pad ; o = o->outer)
3275 this_landing_pad = o->landing_pad;
3278 this_landing_pad = NULL_RTX;
3280 /* Differing actions or landing pads implies a change in call-site
3281 info, which implies some EH_REGION note should be emitted. */
3282 if (last_action != this_action
3283 || last_landing_pad != this_landing_pad)
3285 /* If we'd not seen a previous action (-3) or the previous
3286 action was must-not-throw (-2), then we do not need an
3288 if (last_action >= -1)
3290 /* If we delayed the creation of the begin, do it now. */
3291 if (first_no_action_insn)
3293 call_site = add_call_site (NULL_RTX, 0);
3294 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3295 first_no_action_insn);
3296 NOTE_EH_HANDLER (note) = call_site;
3297 first_no_action_insn = NULL_RTX;
3300 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3302 NOTE_EH_HANDLER (note) = call_site;
3305 /* If the new action is must-not-throw, then no region notes
3307 if (this_action >= -1)
3309 call_site = add_call_site (this_landing_pad,
3310 this_action < 0 ? 0 : this_action);
3311 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3312 NOTE_EH_HANDLER (note) = call_site;
3315 last_action = this_action;
3316 last_landing_pad = this_landing_pad;
3318 last_action_insn = iter;
3321 if (last_action >= -1 && ! first_no_action_insn)
3323 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3324 NOTE_EH_HANDLER (note) = call_site;
3327 htab_delete (ar_hash);
3332 push_uleb128 (data_area, value)
3333 varray_type *data_area;
3338 unsigned char byte = value & 0x7f;
3342 VARRAY_PUSH_UCHAR (*data_area, byte);
3348 push_sleb128 (data_area, value)
3349 varray_type *data_area;
3357 byte = value & 0x7f;
3359 more = ! ((value == 0 && (byte & 0x40) == 0)
3360 || (value == -1 && (byte & 0x40) != 0));
3363 VARRAY_PUSH_UCHAR (*data_area, byte);
3369 #define DW_EH_PE_absptr 0x00
3370 #define DW_EH_PE_omit 0xff
3372 #define DW_EH_PE_uleb128 0x01
3373 #define DW_EH_PE_udata2 0x02
3374 #define DW_EH_PE_udata4 0x03
3375 #define DW_EH_PE_udata8 0x04
3376 #define DW_EH_PE_sleb128 0x09
3377 #define DW_EH_PE_sdata2 0x0A
3378 #define DW_EH_PE_sdata4 0x0B
3379 #define DW_EH_PE_sdata8 0x0C
3380 #define DW_EH_PE_signed 0x08
3382 #define DW_EH_PE_pcrel 0x10
3383 #define DW_EH_PE_textrel 0x20
3384 #define DW_EH_PE_datarel 0x30
3385 #define DW_EH_PE_funcrel 0x40
3388 eh_data_format_name (format)
3393 case DW_EH_PE_absptr: return "absolute";
3394 case DW_EH_PE_omit: return "omit";
3396 case DW_EH_PE_uleb128: return "uleb128";
3397 case DW_EH_PE_udata2: return "udata2";
3398 case DW_EH_PE_udata4: return "udata4";
3399 case DW_EH_PE_udata8: return "udata8";
3400 case DW_EH_PE_sleb128: return "sleb128";
3401 case DW_EH_PE_sdata2: return "sdata2";
3402 case DW_EH_PE_sdata4: return "sdata4";
3403 case DW_EH_PE_sdata8: return "sdata8";
3405 case DW_EH_PE_uleb128 | DW_EH_PE_pcrel: return "pcrel uleb128";
3406 case DW_EH_PE_udata2 | DW_EH_PE_pcrel: return "pcrel udata2";
3407 case DW_EH_PE_udata4 | DW_EH_PE_pcrel: return "pcrel udata4";
3408 case DW_EH_PE_udata8 | DW_EH_PE_pcrel: return "pcrel udata8";
3409 case DW_EH_PE_sleb128 | DW_EH_PE_pcrel: return "pcrel sleb128";
3410 case DW_EH_PE_sdata2 | DW_EH_PE_pcrel: return "pcrel sdata2";
3411 case DW_EH_PE_sdata4 | DW_EH_PE_pcrel: return "pcrel sdata4";
3412 case DW_EH_PE_sdata8 | DW_EH_PE_pcrel: return "pcrel sdata8";
3414 case DW_EH_PE_uleb128 | DW_EH_PE_textrel: return "textrel uleb128";
3415 case DW_EH_PE_udata2 | DW_EH_PE_textrel: return "textrel udata2";
3416 case DW_EH_PE_udata4 | DW_EH_PE_textrel: return "textrel udata4";
3417 case DW_EH_PE_udata8 | DW_EH_PE_textrel: return "textrel udata8";
3418 case DW_EH_PE_sleb128 | DW_EH_PE_textrel: return "textrel sleb128";
3419 case DW_EH_PE_sdata2 | DW_EH_PE_textrel: return "textrel sdata2";
3420 case DW_EH_PE_sdata4 | DW_EH_PE_textrel: return "textrel sdata4";
3421 case DW_EH_PE_sdata8 | DW_EH_PE_textrel: return "textrel sdata8";
3423 case DW_EH_PE_uleb128 | DW_EH_PE_datarel: return "datarel uleb128";
3424 case DW_EH_PE_udata2 | DW_EH_PE_datarel: return "datarel udata2";
3425 case DW_EH_PE_udata4 | DW_EH_PE_datarel: return "datarel udata4";
3426 case DW_EH_PE_udata8 | DW_EH_PE_datarel: return "datarel udata8";
3427 case DW_EH_PE_sleb128 | DW_EH_PE_datarel: return "datarel sleb128";
3428 case DW_EH_PE_sdata2 | DW_EH_PE_datarel: return "datarel sdata2";
3429 case DW_EH_PE_sdata4 | DW_EH_PE_datarel: return "datarel sdata4";
3430 case DW_EH_PE_sdata8 | DW_EH_PE_datarel: return "datarel sdata8";
3432 case DW_EH_PE_uleb128 | DW_EH_PE_funcrel: return "funcrel uleb128";
3433 case DW_EH_PE_udata2 | DW_EH_PE_funcrel: return "funcrel udata2";
3434 case DW_EH_PE_udata4 | DW_EH_PE_funcrel: return "funcrel udata4";
3435 case DW_EH_PE_udata8 | DW_EH_PE_funcrel: return "funcrel udata8";
3436 case DW_EH_PE_sleb128 | DW_EH_PE_funcrel: return "funcrel sleb128";
3437 case DW_EH_PE_sdata2 | DW_EH_PE_funcrel: return "funcrel sdata2";
3438 case DW_EH_PE_sdata4 | DW_EH_PE_funcrel: return "funcrel sdata4";
3439 case DW_EH_PE_sdata8 | DW_EH_PE_funcrel: return "funcrel sdata8";
3446 #ifndef HAVE_AS_LEB128
3448 dw2_size_of_call_site_table ()
3450 int n = cfun->eh->call_site_data_used;
3451 int size = n * (4 + 4 + 4);
3454 for (i = 0; i < n; ++i)
3456 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3457 size += size_of_uleb128 (cs->action);
3464 sjlj_size_of_call_site_table ()
3466 int n = cfun->eh->call_site_data_used;
3470 for (i = 0; i < n; ++i)
3472 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3473 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3474 size += size_of_uleb128 (cs->action);
3482 dw2_output_call_site_table ()
3484 const char *function_start_lab
3485 = IDENTIFIER_POINTER (current_function_func_begin_label);
3486 int n = cfun->eh->call_site_data_used;
3489 for (i = 0; i < n; ++i)
3491 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3492 char reg_start_lab[32];
3493 char reg_end_lab[32];
3494 char landing_pad_lab[32];
3496 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3497 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3499 if (cs->landing_pad)
3500 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3501 CODE_LABEL_NUMBER (cs->landing_pad));
3503 /* ??? Perhaps use insn length scaling if the assembler supports
3504 generic arithmetic. */
3505 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3506 data4 if the function is small enough. */
3507 #ifdef HAVE_AS_LEB128
3508 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3509 "region %d start", i);
3510 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3512 if (cs->landing_pad)
3513 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3516 dw2_asm_output_data_uleb128 (0, "landing pad");
3518 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3519 "region %d start", i);
3520 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3521 if (cs->landing_pad)
3522 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3525 dw2_asm_output_data (4, 0, "landing pad");
3527 dw2_asm_output_data_uleb128 (cs->action, "action");
3530 call_site_base += n;
3534 sjlj_output_call_site_table ()
3536 int n = cfun->eh->call_site_data_used;
3539 for (i = 0; i < n; ++i)
3541 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3543 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3544 "region %d landing pad", i);
3545 dw2_asm_output_data_uleb128 (cs->action, "action");
3548 call_site_base += n;
3552 output_function_exception_table ()
3555 #ifdef HAVE_AS_LEB128
3556 char ttype_label[32];
3557 char cs_after_size_label[32];
3558 char cs_end_label[32];
3565 /* Not all functions need anything. */
3566 if (! cfun->uses_eh_lsda)
3569 funcdef_number = (USING_SJLJ_EXCEPTIONS
3570 ? sjlj_funcdef_number
3571 : current_funcdef_number);
3573 exception_section ();
3575 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3576 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3579 assemble_eh_align (GET_MODE_ALIGNMENT (ptr_mode));
3581 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3583 /* The LSDA header. */
3585 /* Indicate the format of the landing pad start pointer. An omitted
3586 field implies @LPStart == @Start. */
3587 /* Currently we always put @LPStart == @Start. This field would
3588 be most useful in moving the landing pads completely out of
3589 line to another section, but it could also be used to minimize
3590 the size of uleb128 landing pad offsets. */
3591 format = DW_EH_PE_omit;
3592 dw2_asm_output_data (1, format, "@LPStart format (%s)",
3593 eh_data_format_name (format));
3595 /* @LPStart pointer would go here. */
3597 /* Indicate the format of the @TType entries. */
3599 format = DW_EH_PE_omit;
3602 /* ??? Define a ASM_PREFERRED_DATA_FORMAT to say what
3603 sort of dynamic-relocation-free reference to emit. */
3605 #ifdef HAVE_AS_LEB128
3606 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3609 dw2_asm_output_data (1, format, "@TType format (%s)",
3610 eh_data_format_name (format));
3612 #ifndef HAVE_AS_LEB128
3613 if (USING_SJLJ_EXCEPTIONS)
3614 call_site_len = sjlj_size_of_call_site_table ();
3616 call_site_len = dw2_size_of_call_site_table ();
3619 /* A pc-relative 4-byte displacement to the @TType data. */
3622 #ifdef HAVE_AS_LEB128
3623 char ttype_after_disp_label[32];
3624 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3626 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3627 "@TType base offset");
3628 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3630 /* Ug. Alignment queers things. */
3631 unsigned int before_disp, after_disp, last_disp, disp, align;
3633 align = POINTER_SIZE / BITS_PER_UNIT;
3634 before_disp = 1 + 1;
3635 after_disp = (1 + size_of_uleb128 (call_site_len)
3637 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3638 + VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) * align);
3643 unsigned int disp_size, pad;
3646 disp_size = size_of_uleb128 (disp);
3647 pad = before_disp + disp_size + after_disp;
3649 pad = align - (pad % align);
3652 disp = after_disp + pad;
3654 while (disp != last_disp);
3656 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3660 /* Indicate the format of the call-site offsets. */
3661 #ifdef HAVE_AS_LEB128
3662 format = DW_EH_PE_uleb128;
3664 format = DW_EH_PE_udata4;
3666 dw2_asm_output_data (1, format, "call-site format (%s)",
3667 eh_data_format_name (format));
3669 #ifdef HAVE_AS_LEB128
3670 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3672 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3674 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3675 "Call-site table length");
3676 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3677 if (USING_SJLJ_EXCEPTIONS)
3678 sjlj_output_call_site_table ();
3680 dw2_output_call_site_table ();
3681 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3683 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3684 if (USING_SJLJ_EXCEPTIONS)
3685 sjlj_output_call_site_table ();
3687 dw2_output_call_site_table ();
3690 /* ??? Decode and interpret the data for flag_debug_asm. */
3691 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3692 for (i = 0; i < n; ++i)
3693 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3694 (i ? NULL : "Action record table"));
3697 assemble_eh_align (GET_MODE_ALIGNMENT (ptr_mode));
3699 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3702 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3704 if (type == NULL_TREE)
3705 type = integer_zero_node;
3707 type = lookup_type_for_runtime (type);
3709 /* ??? Handle ASM_PREFERRED_DATA_FORMAT. */
3710 output_constant (type, GET_MODE_SIZE (ptr_mode));
3713 #ifdef HAVE_AS_LEB128
3715 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3718 /* ??? Decode and interpret the data for flag_debug_asm. */
3719 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3720 for (i = 0; i < n; ++i)
3721 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3722 (i ? NULL : "Exception specification table"));
3724 function_section (current_function_decl);
3726 if (USING_SJLJ_EXCEPTIONS)
3727 sjlj_funcdef_number += 1;