1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
52 #include "coretypes.h"
60 #include "insn-config.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
75 #include "langhooks.h"
78 /* Provide defaults for stuff that may not be defined when using
80 #ifndef EH_RETURN_DATA_REGNO
81 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
85 /* Nonzero means enable synchronous exceptions for non-call instructions. */
86 int flag_non_call_exceptions;
88 /* Protect cleanup actions with must-not-throw regions, with a call
89 to the given failure handler. */
90 tree (*lang_protect_cleanup_actions) (void);
92 /* Return true if type A catches type B. */
93 int (*lang_eh_type_covers) (tree a, tree b);
95 /* Map a type to a runtime object to match type. */
96 tree (*lang_eh_runtime_type) (tree);
98 /* A hash table of label to region number. */
100 struct ehl_map_entry GTY(())
103 struct eh_region *region;
106 static GTY(()) int call_site_base;
107 static GTY ((param_is (union tree_node)))
108 htab_t type_to_runtime_map;
110 /* Describe the SjLj_Function_Context structure. */
111 static GTY(()) tree sjlj_fc_type_node;
112 static int sjlj_fc_call_site_ofs;
113 static int sjlj_fc_data_ofs;
114 static int sjlj_fc_personality_ofs;
115 static int sjlj_fc_lsda_ofs;
116 static int sjlj_fc_jbuf_ofs;
118 /* Describes one exception region. */
119 struct eh_region GTY(())
121 /* The immediately surrounding region. */
122 struct eh_region *outer;
124 /* The list of immediately contained regions. */
125 struct eh_region *inner;
126 struct eh_region *next_peer;
128 /* An identifier for this region. */
131 /* When a region is deleted, its parents inherit the REG_EH_REGION
132 numbers already assigned. */
135 /* Each region does exactly one thing. */
142 ERT_ALLOWED_EXCEPTIONS,
148 /* Holds the action to perform based on the preceding type. */
150 /* A list of catch blocks, a surrounding try block,
151 and the label for continuing after a catch. */
152 struct eh_region_u_try {
153 struct eh_region *catch;
154 struct eh_region *last_catch;
155 struct eh_region *prev_try;
157 } GTY ((tag ("ERT_TRY"))) try;
159 /* The list through the catch handlers, the list of type objects
160 matched, and the list of associated filters. */
161 struct eh_region_u_catch {
162 struct eh_region *next_catch;
163 struct eh_region *prev_catch;
166 } GTY ((tag ("ERT_CATCH"))) catch;
168 /* A tree_list of allowed types. */
169 struct eh_region_u_allowed {
172 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
174 /* The type given by a call to "throw foo();", or discovered
176 struct eh_region_u_throw {
178 } GTY ((tag ("ERT_THROW"))) throw;
180 /* Retain the cleanup expression even after expansion so that
181 we can match up fixup regions. */
182 struct eh_region_u_cleanup {
184 struct eh_region *prev_try;
185 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
187 /* The real region (by expression and by pointer) that fixup code
189 struct eh_region_u_fixup {
191 struct eh_region *real_region;
193 } GTY ((tag ("ERT_FIXUP"))) fixup;
194 } GTY ((desc ("%0.type"))) u;
196 /* Entry point for this region's handler before landing pads are built. */
200 /* Entry point for this region's handler from the runtime eh library. */
203 /* Entry point for this region's handler from an inner region. */
204 rtx post_landing_pad;
206 /* The RESX insn for handing off control to the next outermost handler,
210 /* True if something in this region may throw. */
211 unsigned may_contain_throw : 1;
214 struct call_site_record GTY(())
220 /* Used to save exception status for each function. */
221 struct eh_status GTY(())
223 /* The tree of all regions for this function. */
224 struct eh_region *region_tree;
226 /* The same information as an indexable array. */
227 struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
229 /* The most recently open region. */
230 struct eh_region *cur_region;
232 /* This is the region for which we are processing catch blocks. */
233 struct eh_region *try_region;
238 int built_landing_pads;
239 int last_region_number;
241 varray_type ttype_data;
242 varray_type ehspec_data;
243 varray_type action_record_data;
245 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
247 struct call_site_record * GTY ((length ("%h.call_site_data_used")))
249 int call_site_data_used;
250 int call_site_data_size;
261 static int t2r_eq (const void *, const void *);
262 static hashval_t t2r_hash (const void *);
263 static void add_type_for_runtime (tree);
264 static tree lookup_type_for_runtime (tree);
266 static struct eh_region *expand_eh_region_end (void);
268 static void resolve_fixup_regions (void);
269 static void remove_fixup_regions (void);
270 static void remove_unreachable_regions (rtx);
271 static void convert_from_eh_region_ranges_1 (rtx *, int *, int);
273 static struct eh_region *duplicate_eh_region_1 (struct eh_region *,
274 struct inline_remap *);
275 static void duplicate_eh_region_2 (struct eh_region *, struct eh_region **);
276 static int ttypes_filter_eq (const void *, const void *);
277 static hashval_t ttypes_filter_hash (const void *);
278 static int ehspec_filter_eq (const void *, const void *);
279 static hashval_t ehspec_filter_hash (const void *);
280 static int add_ttypes_entry (htab_t, tree);
281 static int add_ehspec_entry (htab_t, htab_t, tree);
282 static void assign_filter_values (void);
283 static void build_post_landing_pads (void);
284 static void connect_post_landing_pads (void);
285 static void dw2_build_landing_pads (void);
288 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
289 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
290 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
291 static void sjlj_emit_function_enter (rtx);
292 static void sjlj_emit_function_exit (void);
293 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
294 static void sjlj_build_landing_pads (void);
296 static hashval_t ehl_hash (const void *);
297 static int ehl_eq (const void *, const void *);
298 static void add_ehl_entry (rtx, struct eh_region *);
299 static void remove_exception_handler_label (rtx);
300 static void remove_eh_handler (struct eh_region *);
301 static int for_each_eh_label_1 (void **, void *);
303 /* The return value of reachable_next_level. */
306 /* The given exception is not processed by the given region. */
308 /* The given exception may need processing by the given region. */
310 /* The given exception is completely processed by the given region. */
312 /* The given exception is completely processed by the runtime. */
316 struct reachable_info;
317 static enum reachable_code reachable_next_level (struct eh_region *, tree,
318 struct reachable_info *);
320 static int action_record_eq (const void *, const void *);
321 static hashval_t action_record_hash (const void *);
322 static int add_action_record (htab_t, int, int);
323 static int collect_one_action_chain (htab_t, struct eh_region *);
324 static int add_call_site (rtx, int);
326 static void push_uleb128 (varray_type *, unsigned int);
327 static void push_sleb128 (varray_type *, int);
328 #ifndef HAVE_AS_LEB128
329 static int dw2_size_of_call_site_table (void);
330 static int sjlj_size_of_call_site_table (void);
332 static void dw2_output_call_site_table (void);
333 static void sjlj_output_call_site_table (void);
336 /* Routine to see if exception handling is turned on.
337 DO_WARN is nonzero if we want to inform the user that exception
338 handling is turned off.
340 This is used to ensure that -fexceptions has been specified if the
341 compiler tries to use any exception-specific functions. */
344 doing_eh (int do_warn)
346 if (! flag_exceptions)
348 static int warned = 0;
349 if (! warned && do_warn)
351 error ("exception handling disabled, use -fexceptions to enable");
363 if (! flag_exceptions)
366 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
368 /* Create the SjLj_Function_Context structure. This should match
369 the definition in unwind-sjlj.c. */
370 if (USING_SJLJ_EXCEPTIONS)
372 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
374 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
376 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
377 build_pointer_type (sjlj_fc_type_node));
378 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
380 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
382 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
384 tmp = build_index_type (build_int_2 (4 - 1, 0));
385 tmp = build_array_type (lang_hooks.types.type_for_mode (word_mode, 1),
387 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
388 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
390 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
392 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
394 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
396 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
398 #ifdef DONT_USE_BUILTIN_SETJMP
400 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
402 /* Should be large enough for most systems, if it is not,
403 JMP_BUF_SIZE should be defined with the proper value. It will
404 also tend to be larger than necessary for most systems, a more
405 optimal port will define JMP_BUF_SIZE. */
406 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
409 /* builtin_setjmp takes a pointer to 5 words. */
410 tmp = build_int_2 (5 * BITS_PER_WORD / POINTER_SIZE - 1, 0);
412 tmp = build_index_type (tmp);
413 tmp = build_array_type (ptr_type_node, tmp);
414 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
415 #ifdef DONT_USE_BUILTIN_SETJMP
416 /* We don't know what the alignment requirements of the
417 runtime's jmp_buf has. Overestimate. */
418 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
419 DECL_USER_ALIGN (f_jbuf) = 1;
421 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
423 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
424 TREE_CHAIN (f_prev) = f_cs;
425 TREE_CHAIN (f_cs) = f_data;
426 TREE_CHAIN (f_data) = f_per;
427 TREE_CHAIN (f_per) = f_lsda;
428 TREE_CHAIN (f_lsda) = f_jbuf;
430 layout_type (sjlj_fc_type_node);
432 /* Cache the interesting field offsets so that we have
433 easy access from rtl. */
434 sjlj_fc_call_site_ofs
435 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
436 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
438 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
439 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
440 sjlj_fc_personality_ofs
441 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
442 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
444 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
445 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
447 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
448 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
453 init_eh_for_function (void)
455 cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
458 /* Routines to generate the exception tree somewhat directly.
459 These are used from tree-eh.c when processing exception related
460 nodes during tree optimization. */
462 static struct eh_region *
463 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
465 struct eh_region *new;
467 #ifdef ENABLE_CHECKING
472 /* Insert a new blank region as a leaf in the tree. */
473 new = ggc_alloc_cleared (sizeof (*new));
478 new->next_peer = outer->inner;
483 new->next_peer = cfun->eh->region_tree;
484 cfun->eh->region_tree = new;
487 new->region_number = ++cfun->eh->last_region_number;
493 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
495 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
496 cleanup->u.cleanup.prev_try = prev_try;
501 gen_eh_region_try (struct eh_region *outer)
503 return gen_eh_region (ERT_TRY, outer);
507 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
509 struct eh_region *c, *l;
510 tree type_list, type_node;
512 /* Ensure to always end up with a type list to normalize further
513 processing, then register each type against the runtime types map. */
514 type_list = type_or_list;
517 if (TREE_CODE (type_or_list) != TREE_LIST)
518 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
520 type_node = type_list;
521 for (; type_node; type_node = TREE_CHAIN (type_node))
522 add_type_for_runtime (TREE_VALUE (type_node));
525 c = gen_eh_region (ERT_CATCH, t->outer);
526 c->u.catch.type_list = type_list;
527 l = t->u.try.last_catch;
528 c->u.catch.prev_catch = l;
530 l->u.catch.next_catch = c;
533 t->u.try.last_catch = c;
539 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
541 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
542 region->u.allowed.type_list = allowed;
544 for (; allowed ; allowed = TREE_CHAIN (allowed))
545 add_type_for_runtime (TREE_VALUE (allowed));
551 gen_eh_region_must_not_throw (struct eh_region *outer)
553 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
557 get_eh_region_number (struct eh_region *region)
559 return region->region_number;
563 get_eh_region_may_contain_throw (struct eh_region *region)
565 return region->may_contain_throw;
569 get_eh_region_tree_label (struct eh_region *region)
571 return region->tree_label;
575 set_eh_region_tree_label (struct eh_region *region, tree lab)
577 region->tree_label = lab;
580 /* Start an exception handling region. All instructions emitted
581 after this point are considered to be part of the region until
582 expand_eh_region_end is invoked. */
585 expand_eh_region_start (void)
587 struct eh_region *new;
593 new = gen_eh_region (ERT_UNKNOWN, cfun->eh->cur_region);
594 cfun->eh->cur_region = new;
596 /* Create a note marking the start of this region. */
597 note = emit_note (NOTE_INSN_EH_REGION_BEG);
598 NOTE_EH_HANDLER (note) = new->region_number;
601 /* Common code to end a region. Returns the region just ended. */
603 static struct eh_region *
604 expand_eh_region_end (void)
606 struct eh_region *cur_region = cfun->eh->cur_region;
609 /* Create a note marking the end of this region. */
610 note = emit_note (NOTE_INSN_EH_REGION_END);
611 NOTE_EH_HANDLER (note) = cur_region->region_number;
614 cfun->eh->cur_region = cur_region->outer;
619 /* Expand HANDLER, which is the operand 1 of a TRY_CATCH_EXPR. Catch
620 blocks and C++ exception-specifications are handled specially. */
623 expand_eh_handler (tree handler)
625 tree inner = expr_first (handler);
627 switch (TREE_CODE (inner))
630 expand_start_all_catch ();
631 expand_expr (handler, const0_rtx, VOIDmode, 0);
632 expand_end_all_catch ();
636 if (EH_FILTER_MUST_NOT_THROW (handler))
637 expand_eh_region_end_must_not_throw (EH_FILTER_FAILURE (handler));
639 expand_eh_region_end_allowed (EH_FILTER_TYPES (handler),
640 EH_FILTER_FAILURE (handler));
644 expand_eh_region_end_cleanup (handler);
649 /* End an exception handling region for a cleanup. HANDLER is an
650 expression to expand for the cleanup. */
653 expand_eh_region_end_cleanup (tree handler)
655 struct eh_region *region;
656 tree protect_cleanup_actions;
663 region = expand_eh_region_end ();
664 region->type = ERT_CLEANUP;
665 region->label = gen_label_rtx ();
666 region->u.cleanup.exp = handler;
667 region->u.cleanup.prev_try = cfun->eh->try_region;
669 around_label = gen_label_rtx ();
670 emit_jump (around_label);
672 emit_label (region->label);
674 if (flag_non_call_exceptions || region->may_contain_throw)
676 /* Give the language a chance to specify an action to be taken if an
677 exception is thrown that would propagate out of the HANDLER. */
678 protect_cleanup_actions
679 = (lang_protect_cleanup_actions
680 ? (*lang_protect_cleanup_actions) ()
683 if (protect_cleanup_actions)
684 expand_eh_region_start ();
686 /* In case this cleanup involves an inline destructor with a try block in
687 it, we need to save the EH return data registers around it. */
688 data_save[0] = gen_reg_rtx (ptr_mode);
689 emit_move_insn (data_save[0], get_exception_pointer (cfun));
690 data_save[1] = gen_reg_rtx (word_mode);
691 emit_move_insn (data_save[1], get_exception_filter (cfun));
693 expand_expr (handler, const0_rtx, VOIDmode, 0);
695 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
696 emit_move_insn (cfun->eh->filter, data_save[1]);
698 if (protect_cleanup_actions)
699 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
701 /* We need any stack adjustment complete before the around_label. */
702 do_pending_stack_adjust ();
705 /* We delay the generation of the _Unwind_Resume until we generate
706 landing pads. We emit a marker here so as to get good control
707 flow data in the meantime. */
709 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
712 emit_label (around_label);
716 expand_resx_expr (tree exp)
718 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
719 struct eh_region *reg = cfun->eh->region_array[region_nr];
721 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
725 /* End an exception handling region for a try block, and prepares
726 for subsequent calls to expand_start_catch. */
729 expand_start_all_catch (void)
731 struct eh_region *region;
736 region = expand_eh_region_end ();
737 region->type = ERT_TRY;
738 region->u.try.prev_try = cfun->eh->try_region;
739 region->u.try.continue_label = gen_label_rtx ();
741 cfun->eh->try_region = region;
743 emit_jump (region->u.try.continue_label);
746 /* Begin a catch clause. TYPE is the type caught, a list of such
747 types, (in the case of Java) an ADDR_EXPR which points to the
748 runtime type to match, or null if this is a catch-all
749 clause. Providing a type list enables to associate the catch region
750 with potentially several exception types, which is useful e.g. for
754 expand_start_catch (tree type_or_list)
762 c = gen_eh_region_catch (cfun->eh->try_region, type_or_list);
763 cfun->eh->cur_region = c;
765 c->label = gen_label_rtx ();
766 emit_label (c->label);
768 note = emit_note (NOTE_INSN_EH_REGION_BEG);
769 NOTE_EH_HANDLER (note) = c->region_number;
772 /* End a catch clause. Control will resume after the try/catch block. */
775 expand_end_catch (void)
780 expand_eh_region_end ();
781 emit_jump (cfun->eh->try_region->u.try.continue_label);
784 /* End a sequence of catch handlers for a try block. */
787 expand_end_all_catch (void)
789 struct eh_region *try_region;
794 try_region = cfun->eh->try_region;
795 cfun->eh->try_region = try_region->u.try.prev_try;
797 emit_label (try_region->u.try.continue_label);
800 /* End an exception region for an exception type filter. ALLOWED is a
801 TREE_LIST of types to be matched by the runtime. FAILURE is an
802 expression to invoke if a mismatch occurs.
804 ??? We could use these semantics for calls to rethrow, too; if we can
805 see the surrounding catch clause, we know that the exception we're
806 rethrowing satisfies the "filter" of the catch type. */
809 expand_eh_region_end_allowed (tree allowed, tree failure)
811 struct eh_region *region;
817 region = expand_eh_region_end ();
818 region->type = ERT_ALLOWED_EXCEPTIONS;
819 region->u.allowed.type_list = allowed;
820 region->label = gen_label_rtx ();
822 for (; allowed ; allowed = TREE_CHAIN (allowed))
823 add_type_for_runtime (TREE_VALUE (allowed));
825 /* We must emit the call to FAILURE here, so that if this function
826 throws a different exception, that it will be processed by the
829 around_label = gen_label_rtx ();
830 emit_jump (around_label);
832 emit_label (region->label);
833 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
834 /* We must adjust the stack before we reach the AROUND_LABEL because
835 the call to FAILURE does not occur on all paths to the
837 do_pending_stack_adjust ();
839 emit_label (around_label);
842 /* End an exception region for a must-not-throw filter. FAILURE is an
843 expression invoke if an uncaught exception propagates this far.
845 This is conceptually identical to expand_eh_region_end_allowed with
846 an empty allowed list (if you passed "std::terminate" instead of
847 "__cxa_call_unexpected"), but they are represented differently in
851 expand_eh_region_end_must_not_throw (tree failure)
853 struct eh_region *region;
859 region = expand_eh_region_end ();
860 region->type = ERT_MUST_NOT_THROW;
861 region->label = gen_label_rtx ();
863 /* We must emit the call to FAILURE here, so that if this function
864 throws a different exception, that it will be processed by the
867 around_label = gen_label_rtx ();
868 emit_jump (around_label);
870 emit_label (region->label);
871 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
873 emit_label (around_label);
876 /* End an exception region for a throw. No handling goes on here,
877 but it's the easiest way for the front-end to indicate what type
881 expand_eh_region_end_throw (tree type)
883 struct eh_region *region;
888 region = expand_eh_region_end ();
889 region->type = ERT_THROW;
890 region->u.throw.type = type;
893 /* End a fixup region. Within this region the cleanups for the immediately
894 enclosing region are _not_ run. This is used for goto cleanup to avoid
895 destroying an object twice.
897 This would be an extraordinarily simple prospect, were it not for the
898 fact that we don't actually know what the immediately enclosing region
899 is. This surprising fact is because expand_cleanups is currently
900 generating a sequence that it will insert somewhere else. We collect
901 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
904 expand_eh_region_end_fixup (tree handler)
906 struct eh_region *fixup;
911 fixup = expand_eh_region_end ();
912 fixup->type = ERT_FIXUP;
913 fixup->u.fixup.cleanup_exp = handler;
916 /* Note that the current EH region (if any) may contain a throw, or a
917 call to a function which itself may contain a throw. */
920 note_eh_region_may_contain_throw (struct eh_region *region)
922 while (region && !region->may_contain_throw)
924 region->may_contain_throw = 1;
925 region = region->outer;
930 note_current_region_may_contain_throw (void)
932 note_eh_region_may_contain_throw (cfun->eh->cur_region);
936 /* Return an rtl expression for a pointer to the exception object
940 get_exception_pointer (struct function *fun)
942 rtx exc_ptr = fun->eh->exc_ptr;
943 if (fun == cfun && ! exc_ptr)
945 exc_ptr = gen_reg_rtx (ptr_mode);
946 fun->eh->exc_ptr = exc_ptr;
951 /* Return an rtl expression for the exception dispatch filter
955 get_exception_filter (struct function *fun)
957 rtx filter = fun->eh->filter;
958 if (fun == cfun && ! filter)
960 filter = gen_reg_rtx (word_mode);
961 fun->eh->filter = filter;
966 /* This section is for the exception handling specific optimization pass. */
968 /* Random access the exception region tree. It's just as simple to
969 collect the regions this way as in expand_eh_region_start, but
970 without having to realloc memory. */
973 collect_eh_region_array (void)
975 struct eh_region **array, *i;
977 i = cfun->eh->region_tree;
981 array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
983 cfun->eh->region_array = array;
987 array[i->region_number] = i;
989 /* If there are sub-regions, process them. */
992 /* If there are peers, process them. */
993 else if (i->next_peer)
995 /* Otherwise, step back up the tree to the next peer. */
1002 } while (i->next_peer == NULL);
1009 resolve_one_fixup_region (struct eh_region *fixup)
1011 struct eh_region *cleanup, *real;
1014 n = cfun->eh->last_region_number;
1017 for (j = 1; j <= n; ++j)
1019 cleanup = cfun->eh->region_array[j];
1020 if (cleanup && cleanup->type == ERT_CLEANUP
1021 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1027 real = cleanup->outer;
1028 if (real && real->type == ERT_FIXUP)
1030 if (!real->u.fixup.resolved)
1031 resolve_one_fixup_region (real);
1032 real = real->u.fixup.real_region;
1035 fixup->u.fixup.real_region = real;
1036 fixup->u.fixup.resolved = true;
1040 resolve_fixup_regions (void)
1042 int i, n = cfun->eh->last_region_number;
1044 for (i = 1; i <= n; ++i)
1046 struct eh_region *fixup = cfun->eh->region_array[i];
1048 if (!fixup || fixup->type != ERT_FIXUP || fixup->u.fixup.resolved)
1051 resolve_one_fixup_region (fixup);
1055 /* Now that we've discovered what region actually encloses a fixup,
1056 we can shuffle pointers and remove them from the tree. */
1059 remove_fixup_regions (void)
1063 struct eh_region *fixup;
1065 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1066 for instructions referencing fixup regions. This is only
1067 strictly necessary for fixup regions with no parent, but
1068 doesn't hurt to do it for all regions. */
1069 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1071 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1072 && INTVAL (XEXP (note, 0)) > 0
1073 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1074 && fixup->type == ERT_FIXUP)
1076 if (fixup->u.fixup.real_region)
1077 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
1079 remove_note (insn, note);
1082 /* Remove the fixup regions from the tree. */
1083 for (i = cfun->eh->last_region_number; i > 0; --i)
1085 fixup = cfun->eh->region_array[i];
1089 /* Allow GC to maybe free some memory. */
1090 if (fixup->type == ERT_CLEANUP)
1091 fixup->u.cleanup.exp = NULL_TREE;
1093 if (fixup->type != ERT_FIXUP)
1098 struct eh_region *parent, *p, **pp;
1100 parent = fixup->u.fixup.real_region;
1102 /* Fix up the children's parent pointers; find the end of
1104 for (p = fixup->inner; ; p = p->next_peer)
1111 /* In the tree of cleanups, only outer-inner ordering matters.
1112 So link the children back in anywhere at the correct level. */
1114 pp = &parent->inner;
1116 pp = &cfun->eh->region_tree;
1119 fixup->inner = NULL;
1122 remove_eh_handler (fixup);
1126 /* Remove all regions whose labels are not reachable from insns. */
1129 remove_unreachable_regions (rtx insns)
1131 int i, *uid_region_num;
1133 struct eh_region *r;
1136 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
1137 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
1139 for (i = cfun->eh->last_region_number; i > 0; --i)
1141 r = cfun->eh->region_array[i];
1142 if (!r || r->region_number != i)
1147 if (uid_region_num[INSN_UID (r->resume)])
1149 uid_region_num[INSN_UID (r->resume)] = i;
1153 if (uid_region_num[INSN_UID (r->label)])
1155 uid_region_num[INSN_UID (r->label)] = i;
1159 for (insn = insns; insn; insn = NEXT_INSN (insn))
1160 reachable[uid_region_num[INSN_UID (insn)]] = true;
1162 for (i = cfun->eh->last_region_number; i > 0; --i)
1164 r = cfun->eh->region_array[i];
1165 if (r && r->region_number == i && !reachable[i])
1167 bool kill_it = true;
1171 /* Don't remove ERT_THROW regions if their outer region
1173 if (r->outer && reachable[r->outer->region_number])
1177 case ERT_MUST_NOT_THROW:
1178 /* MUST_NOT_THROW regions are implementable solely in the
1179 runtime, but their existence continues to affect calls
1180 within that region. Never delete them here. */
1186 /* TRY regions are reachable if any of its CATCH regions
1188 struct eh_region *c;
1189 for (c = r->u.try.catch; c ; c = c->u.catch.next_catch)
1190 if (reachable[c->region_number])
1203 remove_eh_handler (r);
1208 free (uid_region_num);
1211 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1212 can_throw instruction in the region. */
1215 convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur)
1220 for (insn = *pinsns; insn ; insn = next)
1222 next = NEXT_INSN (insn);
1223 if (GET_CODE (insn) == NOTE)
1225 int kind = NOTE_LINE_NUMBER (insn);
1226 if (kind == NOTE_INSN_EH_REGION_BEG
1227 || kind == NOTE_INSN_EH_REGION_END)
1229 if (kind == NOTE_INSN_EH_REGION_BEG)
1231 struct eh_region *r;
1234 cur = NOTE_EH_HANDLER (insn);
1236 r = cfun->eh->region_array[cur];
1237 if (r->type == ERT_FIXUP)
1239 r = r->u.fixup.real_region;
1240 cur = r ? r->region_number : 0;
1242 else if (r->type == ERT_CATCH)
1245 cur = r ? r->region_number : 0;
1251 if (insn == *pinsns)
1257 else if (INSN_P (insn))
1260 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1261 /* Calls can always potentially throw exceptions, unless
1262 they have a REG_EH_REGION note with a value of 0 or less.
1263 Which should be the only possible kind so far. */
1264 && (GET_CODE (insn) == CALL_INSN
1265 /* If we wanted exceptions for non-call insns, then
1266 any may_trap_p instruction could throw. */
1267 || (flag_non_call_exceptions
1268 && GET_CODE (PATTERN (insn)) != CLOBBER
1269 && GET_CODE (PATTERN (insn)) != USE
1270 && may_trap_p (PATTERN (insn)))))
1272 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1283 collect_rtl_labels_from_trees (void)
1285 int i, n = cfun->eh->last_region_number;
1286 for (i = 1; i <= n; ++i)
1288 struct eh_region *reg = cfun->eh->region_array[i];
1289 if (reg && reg->tree_label)
1290 reg->label = DECL_RTL_IF_SET (reg->tree_label);
1295 convert_from_eh_region_ranges (void)
1297 rtx insns = get_insns ();
1299 if (cfun->eh->region_array)
1301 /* If the region array already exists, assume we're coming from
1302 optimize_function_tree. In this case all we need to do is
1303 collect the rtl labels that correspond to the tree labels
1304 that we allocated earlier. */
1305 collect_rtl_labels_from_trees ();
1311 collect_eh_region_array ();
1312 resolve_fixup_regions ();
1314 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1315 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1318 remove_fixup_regions ();
1321 remove_unreachable_regions (insns);
1325 add_ehl_entry (rtx label, struct eh_region *region)
1327 struct ehl_map_entry **slot, *entry;
1329 LABEL_PRESERVE_P (label) = 1;
1331 entry = ggc_alloc (sizeof (*entry));
1332 entry->label = label;
1333 entry->region = region;
1335 slot = (struct ehl_map_entry **)
1336 htab_find_slot (cfun->eh->exception_handler_label_map, entry, INSERT);
1338 /* Before landing pad creation, each exception handler has its own
1339 label. After landing pad creation, the exception handlers may
1340 share landing pads. This is ok, since maybe_remove_eh_handler
1341 only requires the 1-1 mapping before landing pad creation. */
1342 if (*slot && !cfun->eh->built_landing_pads)
1349 find_exception_handler_labels (void)
1353 if (cfun->eh->exception_handler_label_map)
1354 htab_empty (cfun->eh->exception_handler_label_map);
1357 /* ??? The expansion factor here (3/2) must be greater than the htab
1358 occupancy factor (4/3) to avoid unnecessary resizing. */
1359 cfun->eh->exception_handler_label_map
1360 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
1361 ehl_hash, ehl_eq, NULL);
1364 if (cfun->eh->region_tree == NULL)
1367 for (i = cfun->eh->last_region_number; i > 0; --i)
1369 struct eh_region *region = cfun->eh->region_array[i];
1372 if (! region || region->region_number != i)
1374 if (cfun->eh->built_landing_pads)
1375 lab = region->landing_pad;
1377 lab = region->label;
1380 add_ehl_entry (lab, region);
1383 /* For sjlj exceptions, need the return label to remain live until
1384 after landing pad generation. */
1385 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1386 add_ehl_entry (return_label, NULL);
1390 current_function_has_exception_handlers (void)
1394 for (i = cfun->eh->last_region_number; i > 0; --i)
1396 struct eh_region *region = cfun->eh->region_array[i];
1398 if (! region || region->region_number != i)
1400 if (region->type != ERT_THROW)
1407 static struct eh_region *
1408 duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map)
1410 struct eh_region *n = ggc_alloc_cleared (sizeof (struct eh_region));
1412 n->region_number = o->region_number + cfun->eh->last_region_number;
1418 case ERT_MUST_NOT_THROW:
1422 if (o->u.try.continue_label)
1423 n->u.try.continue_label
1424 = get_label_from_map (map,
1425 CODE_LABEL_NUMBER (o->u.try.continue_label));
1429 n->u.catch.type_list = o->u.catch.type_list;
1432 case ERT_ALLOWED_EXCEPTIONS:
1433 n->u.allowed.type_list = o->u.allowed.type_list;
1437 n->u.throw.type = o->u.throw.type;
1444 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1447 n->resume = map->insn_map[INSN_UID (o->resume)];
1448 if (n->resume == NULL)
1456 duplicate_eh_region_2 (struct eh_region *o, struct eh_region **n_array)
1458 struct eh_region *n = n_array[o->region_number];
1463 n->u.try.catch = n_array[o->u.try.catch->region_number];
1464 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1468 if (o->u.catch.next_catch)
1469 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1470 if (o->u.catch.prev_catch)
1471 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1479 n->outer = n_array[o->outer->region_number];
1481 n->inner = n_array[o->inner->region_number];
1483 n->next_peer = n_array[o->next_peer->region_number];
1487 duplicate_eh_regions (struct function *ifun, struct inline_remap *map)
1489 int ifun_last_region_number = ifun->eh->last_region_number;
1490 struct eh_region **n_array, *root, *cur;
1493 if (ifun_last_region_number == 0)
1496 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1498 for (i = 1; i <= ifun_last_region_number; ++i)
1500 cur = ifun->eh->region_array[i];
1501 if (!cur || cur->region_number != i)
1503 n_array[i] = duplicate_eh_region_1 (cur, map);
1505 for (i = 1; i <= ifun_last_region_number; ++i)
1507 cur = ifun->eh->region_array[i];
1508 if (!cur || cur->region_number != i)
1510 duplicate_eh_region_2 (cur, n_array);
1513 root = n_array[ifun->eh->region_tree->region_number];
1514 cur = cfun->eh->cur_region;
1517 struct eh_region *p = cur->inner;
1520 while (p->next_peer)
1522 p->next_peer = root;
1527 for (i = 1; i <= ifun_last_region_number; ++i)
1528 if (n_array[i] && n_array[i]->outer == NULL)
1529 n_array[i]->outer = cur;
1533 struct eh_region *p = cfun->eh->region_tree;
1536 while (p->next_peer)
1538 p->next_peer = root;
1541 cfun->eh->region_tree = root;
1546 i = cfun->eh->last_region_number;
1547 cfun->eh->last_region_number = i + ifun_last_region_number;
1553 t2r_eq (const void *pentry, const void *pdata)
1555 tree entry = (tree) pentry;
1556 tree data = (tree) pdata;
1558 return TREE_PURPOSE (entry) == data;
1562 t2r_hash (const void *pentry)
1564 tree entry = (tree) pentry;
1565 return TREE_HASH (TREE_PURPOSE (entry));
1569 add_type_for_runtime (tree type)
1573 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1574 TREE_HASH (type), INSERT);
1577 tree runtime = (*lang_eh_runtime_type) (type);
1578 *slot = tree_cons (type, runtime, NULL_TREE);
1583 lookup_type_for_runtime (tree type)
1587 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1588 TREE_HASH (type), NO_INSERT);
1590 /* We should have always inserted the data earlier. */
1591 return TREE_VALUE (*slot);
1595 /* Represent an entry in @TTypes for either catch actions
1596 or exception filter actions. */
1597 struct ttypes_filter GTY(())
1603 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1604 (a tree) for a @TTypes type node we are thinking about adding. */
1607 ttypes_filter_eq (const void *pentry, const void *pdata)
1609 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1610 tree data = (tree) pdata;
1612 return entry->t == data;
1616 ttypes_filter_hash (const void *pentry)
1618 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1619 return TREE_HASH (entry->t);
1622 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1623 exception specification list we are thinking about adding. */
1624 /* ??? Currently we use the type lists in the order given. Someone
1625 should put these in some canonical order. */
1628 ehspec_filter_eq (const void *pentry, const void *pdata)
1630 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1631 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1633 return type_list_equal (entry->t, data->t);
1636 /* Hash function for exception specification lists. */
1639 ehspec_filter_hash (const void *pentry)
1641 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1645 for (list = entry->t; list ; list = TREE_CHAIN (list))
1646 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1650 /* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
1651 to speed up the search. Return the filter value to be used. */
1654 add_ttypes_entry (htab_t ttypes_hash, tree type)
1656 struct ttypes_filter **slot, *n;
1658 slot = (struct ttypes_filter **)
1659 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1661 if ((n = *slot) == NULL)
1663 /* Filter value is a 1 based table index. */
1665 n = xmalloc (sizeof (*n));
1667 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1670 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1676 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1677 to speed up the search. Return the filter value to be used. */
1680 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1682 struct ttypes_filter **slot, *n;
1683 struct ttypes_filter dummy;
1686 slot = (struct ttypes_filter **)
1687 htab_find_slot (ehspec_hash, &dummy, INSERT);
1689 if ((n = *slot) == NULL)
1691 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1693 n = xmalloc (sizeof (*n));
1695 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1698 /* Look up each type in the list and encode its filter
1699 value as a uleb128. Terminate the list with 0. */
1700 for (; list ; list = TREE_CHAIN (list))
1701 push_uleb128 (&cfun->eh->ehspec_data,
1702 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1703 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1709 /* Generate the action filter values to be used for CATCH and
1710 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1711 we use lots of landing pads, and so every type or list can share
1712 the same filter value, which saves table space. */
1715 assign_filter_values (void)
1718 htab_t ttypes, ehspec;
1720 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1721 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1723 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1724 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1726 for (i = cfun->eh->last_region_number; i > 0; --i)
1728 struct eh_region *r = cfun->eh->region_array[i];
1730 /* Mind we don't process a region more than once. */
1731 if (!r || r->region_number != i)
1737 /* Whatever type_list is (NULL or true list), we build a list
1738 of filters for the region. */
1739 r->u.catch.filter_list = NULL_TREE;
1741 if (r->u.catch.type_list != NULL)
1743 /* Get a filter value for each of the types caught and store
1744 them in the region's dedicated list. */
1745 tree tp_node = r->u.catch.type_list;
1747 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1749 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1750 tree flt_node = build_int_2 (flt, 0);
1752 r->u.catch.filter_list
1753 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1758 /* Get a filter value for the NULL list also since it will need
1759 an action record anyway. */
1760 int flt = add_ttypes_entry (ttypes, NULL);
1761 tree flt_node = build_int_2 (flt, 0);
1763 r->u.catch.filter_list
1764 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1769 case ERT_ALLOWED_EXCEPTIONS:
1771 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1779 htab_delete (ttypes);
1780 htab_delete (ehspec);
1783 /* Emit SEQ into basic block just before INSN (that is assumed to be
1784 first instruction of some existing BB and return the newly
1787 emit_to_new_bb_before (rtx seq, rtx insn)
1793 /* If there happens to be an fallthru edge (possibly created by cleanup_cfg
1794 call), we don't want it to go into newly created landing pad or other EH
1796 for (e = BLOCK_FOR_INSN (insn)->pred; e; e = e->pred_next)
1797 if (e->flags & EDGE_FALLTHRU)
1798 force_nonfallthru (e);
1799 last = emit_insn_before (seq, insn);
1800 if (GET_CODE (last) == BARRIER)
1801 last = PREV_INSN (last);
1802 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1803 update_bb_for_insn (bb);
1804 bb->flags |= BB_SUPERBLOCK;
1808 /* Generate the code to actually handle exceptions, which will follow the
1812 build_post_landing_pads (void)
1816 for (i = cfun->eh->last_region_number; i > 0; --i)
1818 struct eh_region *region = cfun->eh->region_array[i];
1821 /* Mind we don't process a region more than once. */
1822 if (!region || region->region_number != i)
1825 switch (region->type)
1828 /* ??? Collect the set of all non-overlapping catch handlers
1829 all the way up the chain until blocked by a cleanup. */
1830 /* ??? Outer try regions can share landing pads with inner
1831 try regions if the types are completely non-overlapping,
1832 and there are no intervening cleanups. */
1834 region->post_landing_pad = gen_label_rtx ();
1838 emit_label (region->post_landing_pad);
1840 /* ??? It is mighty inconvenient to call back into the
1841 switch statement generation code in expand_end_case.
1842 Rapid prototyping sez a sequence of ifs. */
1844 struct eh_region *c;
1845 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1847 if (c->u.catch.type_list == NULL)
1848 emit_jump (c->label);
1851 /* Need for one cmp/jump per type caught. Each type
1852 list entry has a matching entry in the filter list
1853 (see assign_filter_values). */
1854 tree tp_node = c->u.catch.type_list;
1855 tree flt_node = c->u.catch.filter_list;
1859 emit_cmp_and_jump_insns
1861 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1862 EQ, NULL_RTX, word_mode, 0, c->label);
1864 tp_node = TREE_CHAIN (tp_node);
1865 flt_node = TREE_CHAIN (flt_node);
1871 /* We delay the generation of the _Unwind_Resume until we generate
1872 landing pads. We emit a marker here so as to get good control
1873 flow data in the meantime. */
1875 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1881 emit_to_new_bb_before (seq, region->u.try.catch->label);
1885 case ERT_ALLOWED_EXCEPTIONS:
1886 region->post_landing_pad = gen_label_rtx ();
1890 emit_label (region->post_landing_pad);
1892 emit_cmp_and_jump_insns (cfun->eh->filter,
1893 GEN_INT (region->u.allowed.filter),
1894 EQ, NULL_RTX, word_mode, 0, region->label);
1896 /* We delay the generation of the _Unwind_Resume until we generate
1897 landing pads. We emit a marker here so as to get good control
1898 flow data in the meantime. */
1900 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1906 emit_to_new_bb_before (seq, region->label);
1910 case ERT_MUST_NOT_THROW:
1911 region->post_landing_pad = region->label;
1916 /* Nothing to do. */
1925 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1926 _Unwind_Resume otherwise. */
1929 connect_post_landing_pads (void)
1933 for (i = cfun->eh->last_region_number; i > 0; --i)
1935 struct eh_region *region = cfun->eh->region_array[i];
1936 struct eh_region *outer;
1940 /* Mind we don't process a region more than once. */
1941 if (!region || region->region_number != i)
1944 /* If there is no RESX, or it has been deleted by flow, there's
1945 nothing to fix up. */
1946 if (! region->resume || INSN_DELETED_P (region->resume))
1949 /* Search for another landing pad in this function. */
1950 for (outer = region->outer; outer ; outer = outer->outer)
1951 if (outer->post_landing_pad)
1959 basic_block src, dest;
1961 emit_jump (outer->post_landing_pad);
1962 src = BLOCK_FOR_INSN (region->resume);
1963 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1965 remove_edge (src->succ);
1966 e = make_edge (src, dest, 0);
1967 e->probability = REG_BR_PROB_BASE;
1968 e->count = src->count;
1972 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1973 VOIDmode, 1, cfun->eh->exc_ptr, ptr_mode);
1975 /* What we just emitted was a throwing libcall, so it got a
1976 barrier automatically added after it. If the last insn in
1977 the libcall sequence isn't the barrier, it's because the
1978 target emits multiple insns for a call, and there are insns
1979 after the actual call insn (which are redundant and would be
1980 optimized away). The barrier is inserted exactly after the
1981 call insn, so let's go get that and delete the insns after
1982 it, because below we need the barrier to be the last insn in
1984 delete_insns_since (NEXT_INSN (last_call_insn ()));
1989 barrier = emit_insn_before (seq, region->resume);
1990 /* Avoid duplicate barrier. */
1991 if (GET_CODE (barrier) != BARRIER)
1993 delete_insn (barrier);
1994 delete_insn (region->resume);
1996 /* ??? From tree-ssa we can wind up with catch regions whose
1997 label is not instantiated, but whose resx is present. Now
1998 that we've dealt with the resx, kill the region. */
1999 if (region->label == NULL && region->type == ERT_CLEANUP)
2000 remove_eh_handler (region);
2006 dw2_build_landing_pads (void)
2011 for (i = cfun->eh->last_region_number; i > 0; --i)
2013 struct eh_region *region = cfun->eh->region_array[i];
2016 bool clobbers_hard_regs = false;
2019 /* Mind we don't process a region more than once. */
2020 if (!region || region->region_number != i)
2023 if (region->type != ERT_CLEANUP
2024 && region->type != ERT_TRY
2025 && region->type != ERT_ALLOWED_EXCEPTIONS)
2030 region->landing_pad = gen_label_rtx ();
2031 emit_label (region->landing_pad);
2033 #ifdef HAVE_exception_receiver
2034 if (HAVE_exception_receiver)
2035 emit_insn (gen_exception_receiver ());
2038 #ifdef HAVE_nonlocal_goto_receiver
2039 if (HAVE_nonlocal_goto_receiver)
2040 emit_insn (gen_nonlocal_goto_receiver ());
2045 /* If the eh_return data registers are call-saved, then we
2046 won't have considered them clobbered from the call that
2047 threw. Kill them now. */
2050 unsigned r = EH_RETURN_DATA_REGNO (j);
2051 if (r == INVALID_REGNUM)
2053 if (! call_used_regs[r])
2055 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
2056 clobbers_hard_regs = true;
2060 if (clobbers_hard_regs)
2062 /* @@@ This is a kludge. Not all machine descriptions define a
2063 blockage insn, but we must not allow the code we just generated
2064 to be reordered by scheduling. So emit an ASM_INPUT to act as
2066 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
2069 emit_move_insn (cfun->eh->exc_ptr,
2070 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
2071 emit_move_insn (cfun->eh->filter,
2072 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
2077 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
2078 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2079 e->count = bb->count;
2080 e->probability = REG_BR_PROB_BASE;
2087 int directly_reachable;
2090 int call_site_index;
2094 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
2097 bool found_one = false;
2099 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2101 struct eh_region *region;
2102 enum reachable_code rc;
2106 if (! INSN_P (insn))
2109 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2110 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2113 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2115 type_thrown = NULL_TREE;
2116 if (region->type == ERT_THROW)
2118 type_thrown = region->u.throw.type;
2119 region = region->outer;
2122 /* Find the first containing region that might handle the exception.
2123 That's the landing pad to which we will transfer control. */
2124 rc = RNL_NOT_CAUGHT;
2125 for (; region; region = region->outer)
2127 rc = reachable_next_level (region, type_thrown, NULL);
2128 if (rc != RNL_NOT_CAUGHT)
2131 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2133 lp_info[region->region_number].directly_reachable = 1;
2142 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2147 /* First task: build the action table. */
2149 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
2150 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2152 for (i = cfun->eh->last_region_number; i > 0; --i)
2153 if (lp_info[i].directly_reachable)
2155 struct eh_region *r = cfun->eh->region_array[i];
2156 r->landing_pad = dispatch_label;
2157 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2158 if (lp_info[i].action_index != -1)
2159 cfun->uses_eh_lsda = 1;
2162 htab_delete (ar_hash);
2164 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2165 landing pad label for the region. For sjlj though, there is one
2166 common landing pad from which we dispatch to the post-landing pads.
2168 A region receives a dispatch index if it is directly reachable
2169 and requires in-function processing. Regions that share post-landing
2170 pads may share dispatch indices. */
2171 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2172 (see build_post_landing_pads) so we don't bother checking for it. */
2175 for (i = cfun->eh->last_region_number; i > 0; --i)
2176 if (lp_info[i].directly_reachable)
2177 lp_info[i].dispatch_index = index++;
2179 /* Finally: assign call-site values. If dwarf2 terms, this would be
2180 the region number assigned by convert_to_eh_region_ranges, but
2181 handles no-action and must-not-throw differently. */
2184 for (i = cfun->eh->last_region_number; i > 0; --i)
2185 if (lp_info[i].directly_reachable)
2187 int action = lp_info[i].action_index;
2189 /* Map must-not-throw to otherwise unused call-site index 0. */
2192 /* Map no-action to otherwise unused call-site index -1. */
2193 else if (action == -1)
2195 /* Otherwise, look it up in the table. */
2197 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2199 lp_info[i].call_site_index = index;
2204 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
2206 int last_call_site = -2;
2209 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2211 struct eh_region *region;
2213 rtx note, before, p;
2215 /* Reset value tracking at extended basic block boundaries. */
2216 if (GET_CODE (insn) == CODE_LABEL)
2217 last_call_site = -2;
2219 if (! INSN_P (insn))
2222 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2225 /* Calls (and trapping insns) without notes are outside any
2226 exception handling region in this function. Mark them as
2228 if (GET_CODE (insn) == CALL_INSN
2229 || (flag_non_call_exceptions
2230 && may_trap_p (PATTERN (insn))))
2231 this_call_site = -1;
2237 /* Calls that are known to not throw need not be marked. */
2238 if (INTVAL (XEXP (note, 0)) <= 0)
2241 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2242 this_call_site = lp_info[region->region_number].call_site_index;
2245 if (this_call_site == last_call_site)
2248 /* Don't separate a call from it's argument loads. */
2250 if (GET_CODE (insn) == CALL_INSN)
2251 before = find_first_parameter_load (insn, NULL_RTX);
2254 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2255 sjlj_fc_call_site_ofs);
2256 emit_move_insn (mem, GEN_INT (this_call_site));
2260 emit_insn_before (p, before);
2261 last_call_site = this_call_site;
2265 /* Construct the SjLj_Function_Context. */
2268 sjlj_emit_function_enter (rtx dispatch_label)
2270 rtx fn_begin, fc, mem, seq;
2272 fc = cfun->eh->sjlj_fc;
2276 /* We're storing this libcall's address into memory instead of
2277 calling it directly. Thus, we must call assemble_external_libcall
2278 here, as we can not depend on emit_library_call to do it for us. */
2279 assemble_external_libcall (eh_personality_libfunc);
2280 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2281 emit_move_insn (mem, eh_personality_libfunc);
2283 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2284 if (cfun->uses_eh_lsda)
2289 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
2290 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2291 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
2292 emit_move_insn (mem, sym);
2295 emit_move_insn (mem, const0_rtx);
2297 #ifdef DONT_USE_BUILTIN_SETJMP
2300 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2301 TYPE_MODE (integer_type_node), 1,
2302 plus_constant (XEXP (fc, 0),
2303 sjlj_fc_jbuf_ofs), Pmode);
2305 note = emit_note (NOTE_INSN_EXPECTED_VALUE);
2306 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2308 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2309 TYPE_MODE (integer_type_node), 0, dispatch_label);
2312 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2316 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2317 1, XEXP (fc, 0), Pmode);
2322 /* ??? Instead of doing this at the beginning of the function,
2323 do this in a block that is at loop level 0 and dominates all
2324 can_throw_internal instructions. */
2326 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2327 if (GET_CODE (fn_begin) == NOTE
2328 && (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG
2329 || NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_BASIC_BLOCK))
2331 if (NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2332 insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
2335 rtx last = BB_END (ENTRY_BLOCK_PTR->succ->dest);
2336 for (; ; fn_begin = NEXT_INSN (fn_begin))
2337 if ((GET_CODE (fn_begin) == NOTE
2338 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2339 || fn_begin == last)
2341 emit_insn_after (seq, fn_begin);
2345 /* Call back from expand_function_end to know where we should put
2346 the call to unwind_sjlj_unregister_libfunc if needed. */
2349 sjlj_emit_function_exit_after (rtx after)
2351 cfun->eh->sjlj_exit_after = after;
2355 sjlj_emit_function_exit (void)
2362 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2363 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2368 /* ??? Really this can be done in any block at loop level 0 that
2369 post-dominates all can_throw_internal instructions. This is
2370 the last possible moment. */
2372 for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
2373 if (e->flags & EDGE_FALLTHRU)
2379 /* Figure out whether the place we are supposed to insert libcall
2380 is inside the last basic block or after it. In the other case
2381 we need to emit to edge. */
2382 if (e->src->next_bb != EXIT_BLOCK_PTR)
2384 for (insn = NEXT_INSN (BB_END (e->src)); insn; insn = NEXT_INSN (insn))
2385 if (insn == cfun->eh->sjlj_exit_after)
2388 insert_insn_on_edge (seq, e);
2391 insn = cfun->eh->sjlj_exit_after;
2392 if (GET_CODE (insn) == CODE_LABEL)
2393 insn = NEXT_INSN (insn);
2394 emit_insn_after (seq, insn);
2400 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
2402 int i, first_reachable;
2403 rtx mem, dispatch, seq, fc;
2408 fc = cfun->eh->sjlj_fc;
2412 emit_label (dispatch_label);
2414 #ifndef DONT_USE_BUILTIN_SETJMP
2415 expand_builtin_setjmp_receiver (dispatch_label);
2418 /* Load up dispatch index, exc_ptr and filter values from the
2419 function context. */
2420 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2421 sjlj_fc_call_site_ofs);
2422 dispatch = copy_to_reg (mem);
2424 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2425 if (word_mode != ptr_mode)
2427 #ifdef POINTERS_EXTEND_UNSIGNED
2428 mem = convert_memory_address (ptr_mode, mem);
2430 mem = convert_to_mode (ptr_mode, mem, 0);
2433 emit_move_insn (cfun->eh->exc_ptr, mem);
2435 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2436 emit_move_insn (cfun->eh->filter, mem);
2438 /* Jump to one of the directly reachable regions. */
2439 /* ??? This really ought to be using a switch statement. */
2441 first_reachable = 0;
2442 for (i = cfun->eh->last_region_number; i > 0; --i)
2444 if (! lp_info[i].directly_reachable)
2447 if (! first_reachable)
2449 first_reachable = i;
2453 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2454 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2455 cfun->eh->region_array[i]->post_landing_pad);
2461 before = cfun->eh->region_array[first_reachable]->post_landing_pad;
2463 bb = emit_to_new_bb_before (seq, before);
2464 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2465 e->count = bb->count;
2466 e->probability = REG_BR_PROB_BASE;
2470 sjlj_build_landing_pads (void)
2472 struct sjlj_lp_info *lp_info;
2474 lp_info = xcalloc (cfun->eh->last_region_number + 1,
2475 sizeof (struct sjlj_lp_info));
2477 if (sjlj_find_directly_reachable_regions (lp_info))
2479 rtx dispatch_label = gen_label_rtx ();
2482 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2483 int_size_in_bytes (sjlj_fc_type_node),
2484 TYPE_ALIGN (sjlj_fc_type_node));
2486 sjlj_assign_call_site_values (dispatch_label, lp_info);
2487 sjlj_mark_call_sites (lp_info);
2489 sjlj_emit_function_enter (dispatch_label);
2490 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2491 sjlj_emit_function_exit ();
2498 finish_eh_generation (void)
2502 /* Nothing to do if no regions created. */
2503 if (cfun->eh->region_tree == NULL)
2506 /* The object here is to provide find_basic_blocks with detailed
2507 information (via reachable_handlers) on how exception control
2508 flows within the function. In this first pass, we can include
2509 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2510 regions, and hope that it will be useful in deleting unreachable
2511 handlers. Subsequently, we will generate landing pads which will
2512 connect many of the handlers, and then type information will not
2513 be effective. Still, this is a win over previous implementations. */
2515 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
2517 /* These registers are used by the landing pads. Make sure they
2518 have been generated. */
2519 get_exception_pointer (cfun);
2520 get_exception_filter (cfun);
2522 /* Construct the landing pads. */
2524 assign_filter_values ();
2525 build_post_landing_pads ();
2526 connect_post_landing_pads ();
2527 if (USING_SJLJ_EXCEPTIONS)
2528 sjlj_build_landing_pads ();
2530 dw2_build_landing_pads ();
2532 cfun->eh->built_landing_pads = 1;
2534 /* We've totally changed the CFG. Start over. */
2535 find_exception_handler_labels ();
2536 break_superblocks ();
2537 if (USING_SJLJ_EXCEPTIONS)
2538 commit_edge_insertions ();
2543 for (e = bb->succ; e; e = next)
2545 next = e->succ_next;
2546 if (e->flags & EDGE_EH)
2553 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2555 cleanup_cfg (CLEANUP_PRE_LOOP | CLEANUP_NO_INSN_DEL);
2559 ehl_hash (const void *pentry)
2561 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2563 /* 2^32 * ((sqrt(5) - 1) / 2) */
2564 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2565 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2569 ehl_eq (const void *pentry, const void *pdata)
2571 struct ehl_map_entry *entry = (struct ehl_map_entry *) pentry;
2572 struct ehl_map_entry *data = (struct ehl_map_entry *) pdata;
2574 return entry->label == data->label;
2577 /* This section handles removing dead code for flow. */
2579 /* Remove LABEL from exception_handler_label_map. */
2582 remove_exception_handler_label (rtx label)
2584 struct ehl_map_entry **slot, tmp;
2586 /* If exception_handler_label_map was not built yet,
2587 there is nothing to do. */
2588 if (cfun->eh->exception_handler_label_map == NULL)
2592 slot = (struct ehl_map_entry **)
2593 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2597 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2600 /* Splice REGION from the region tree etc. */
2603 remove_eh_handler (struct eh_region *region)
2605 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2608 /* For the benefit of efficiently handling REG_EH_REGION notes,
2609 replace this region in the region array with its containing
2610 region. Note that previous region deletions may result in
2611 multiple copies of this region in the array, so we have a
2612 list of alternate numbers by which we are known. */
2614 outer = region->outer;
2615 cfun->eh->region_array[region->region_number] = outer;
2619 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i,
2620 { cfun->eh->region_array[i] = outer; });
2626 outer->aka = BITMAP_GGC_ALLOC ();
2628 bitmap_a_or_b (outer->aka, outer->aka, region->aka);
2629 bitmap_set_bit (outer->aka, region->region_number);
2632 if (cfun->eh->built_landing_pads)
2633 lab = region->landing_pad;
2635 lab = region->label;
2637 remove_exception_handler_label (lab);
2640 pp_start = &outer->inner;
2642 pp_start = &cfun->eh->region_tree;
2643 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2645 *pp = region->next_peer;
2647 inner = region->inner;
2650 for (p = inner; p->next_peer ; p = p->next_peer)
2654 p->next_peer = *pp_start;
2658 if (region->type == ERT_CATCH)
2660 struct eh_region *try, *next, *prev;
2662 for (try = region->next_peer;
2663 try->type == ERT_CATCH;
2664 try = try->next_peer)
2666 if (try->type != ERT_TRY)
2669 next = region->u.catch.next_catch;
2670 prev = region->u.catch.prev_catch;
2673 next->u.catch.prev_catch = prev;
2675 try->u.try.last_catch = prev;
2677 prev->u.catch.next_catch = next;
2680 try->u.try.catch = next;
2682 remove_eh_handler (try);
2687 /* LABEL heads a basic block that is about to be deleted. If this
2688 label corresponds to an exception region, we may be able to
2689 delete the region. */
2692 maybe_remove_eh_handler (rtx label)
2694 struct ehl_map_entry **slot, tmp;
2695 struct eh_region *region;
2697 /* ??? After generating landing pads, it's not so simple to determine
2698 if the region data is completely unused. One must examine the
2699 landing pad and the post landing pad, and whether an inner try block
2700 is referencing the catch handlers directly. */
2701 if (cfun->eh->built_landing_pads)
2705 slot = (struct ehl_map_entry **)
2706 htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT);
2709 region = (*slot)->region;
2713 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2714 because there is no path to the fallback call to terminate.
2715 But the region continues to affect call-site data until there
2716 are no more contained calls, which we don't see here. */
2717 if (region->type == ERT_MUST_NOT_THROW)
2719 htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot);
2720 region->label = NULL_RTX;
2723 remove_eh_handler (region);
2726 /* Invokes CALLBACK for every exception handler label. Only used by old
2727 loop hackery; should not be used by new code. */
2730 for_each_eh_label (void (*callback) (rtx))
2732 htab_traverse (cfun->eh->exception_handler_label_map, for_each_eh_label_1,
2733 (void *) &callback);
2737 for_each_eh_label_1 (void **pentry, void *data)
2739 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2740 void (*callback) (rtx) = *(void (**) (rtx)) data;
2742 (*callback) (entry->label);
2746 /* This section describes CFG exception edges for flow. */
2748 /* For communicating between calls to reachable_next_level. */
2749 struct reachable_info
2753 void (*callback) (struct eh_region *, void *);
2754 void *callback_data;
2755 bool saw_any_handlers;
2758 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2759 base class of TYPE, is in HANDLED. */
2762 check_handled (tree handled, tree type)
2766 /* We can check for exact matches without front-end help. */
2767 if (! lang_eh_type_covers)
2769 for (t = handled; t ; t = TREE_CHAIN (t))
2770 if (TREE_VALUE (t) == type)
2775 for (t = handled; t ; t = TREE_CHAIN (t))
2776 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2783 /* A subroutine of reachable_next_level. If we are collecting a list
2784 of handlers, add one. After landing pad generation, reference
2785 it instead of the handlers themselves. Further, the handlers are
2786 all wired together, so by referencing one, we've got them all.
2787 Before landing pad generation we reference each handler individually.
2789 LP_REGION contains the landing pad; REGION is the handler. */
2792 add_reachable_handler (struct reachable_info *info,
2793 struct eh_region *lp_region, struct eh_region *region)
2798 info->saw_any_handlers = true;
2800 if (cfun->eh->built_landing_pads)
2801 info->callback (lp_region, info->callback_data);
2803 info->callback (region, info->callback_data);
2806 /* Process one level of exception regions for reachability.
2807 If TYPE_THROWN is non-null, then it is the *exact* type being
2808 propagated. If INFO is non-null, then collect handler labels
2809 and caught/allowed type information between invocations. */
2811 static enum reachable_code
2812 reachable_next_level (struct eh_region *region, tree type_thrown,
2813 struct reachable_info *info)
2815 switch (region->type)
2818 /* Before landing-pad generation, we model control flow
2819 directly to the individual handlers. In this way we can
2820 see that catch handler types may shadow one another. */
2821 add_reachable_handler (info, region, region);
2822 return RNL_MAYBE_CAUGHT;
2826 struct eh_region *c;
2827 enum reachable_code ret = RNL_NOT_CAUGHT;
2829 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2831 /* A catch-all handler ends the search. */
2832 if (c->u.catch.type_list == NULL)
2834 add_reachable_handler (info, region, c);
2840 /* If we have at least one type match, end the search. */
2841 tree tp_node = c->u.catch.type_list;
2843 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2845 tree type = TREE_VALUE (tp_node);
2847 if (type == type_thrown
2848 || (lang_eh_type_covers
2849 && (*lang_eh_type_covers) (type, type_thrown)))
2851 add_reachable_handler (info, region, c);
2856 /* If we have definitive information of a match failure,
2857 the catch won't trigger. */
2858 if (lang_eh_type_covers)
2859 return RNL_NOT_CAUGHT;
2862 /* At this point, we either don't know what type is thrown or
2863 don't have front-end assistance to help deciding if it is
2864 covered by one of the types in the list for this region.
2866 We'd then like to add this region to the list of reachable
2867 handlers since it is indeed potentially reachable based on the
2868 information we have.
2870 Actually, this handler is for sure not reachable if all the
2871 types it matches have already been caught. That is, it is only
2872 potentially reachable if at least one of the types it catches
2873 has not been previously caught. */
2876 ret = RNL_MAYBE_CAUGHT;
2879 tree tp_node = c->u.catch.type_list;
2880 bool maybe_reachable = false;
2882 /* Compute the potential reachability of this handler and
2883 update the list of types caught at the same time. */
2884 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2886 tree type = TREE_VALUE (tp_node);
2888 if (! check_handled (info->types_caught, type))
2891 = tree_cons (NULL, type, info->types_caught);
2893 maybe_reachable = true;
2897 if (maybe_reachable)
2899 add_reachable_handler (info, region, c);
2901 /* ??? If the catch type is a base class of every allowed
2902 type, then we know we can stop the search. */
2903 ret = RNL_MAYBE_CAUGHT;
2911 case ERT_ALLOWED_EXCEPTIONS:
2912 /* An empty list of types definitely ends the search. */
2913 if (region->u.allowed.type_list == NULL_TREE)
2915 add_reachable_handler (info, region, region);
2919 /* Collect a list of lists of allowed types for use in detecting
2920 when a catch may be transformed into a catch-all. */
2922 info->types_allowed = tree_cons (NULL_TREE,
2923 region->u.allowed.type_list,
2924 info->types_allowed);
2926 /* If we have definitive information about the type hierarchy,
2927 then we can tell if the thrown type will pass through the
2929 if (type_thrown && lang_eh_type_covers)
2931 if (check_handled (region->u.allowed.type_list, type_thrown))
2932 return RNL_NOT_CAUGHT;
2935 add_reachable_handler (info, region, region);
2940 add_reachable_handler (info, region, region);
2941 return RNL_MAYBE_CAUGHT;
2944 /* Catch regions are handled by their controlling try region. */
2945 return RNL_NOT_CAUGHT;
2947 case ERT_MUST_NOT_THROW:
2948 /* Here we end our search, since no exceptions may propagate.
2949 If we've touched down at some landing pad previous, then the
2950 explicit function call we generated may be used. Otherwise
2951 the call is made by the runtime. */
2952 if (info && info->saw_any_handlers)
2954 add_reachable_handler (info, region, region);
2963 /* Shouldn't see these here. */
2970 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2973 foreach_reachable_handler (int region_number, bool is_resx,
2974 void (*callback) (struct eh_region *, void *),
2975 void *callback_data)
2977 struct reachable_info info;
2978 struct eh_region *region;
2981 memset (&info, 0, sizeof (info));
2982 info.callback = callback;
2983 info.callback_data = callback_data;
2985 region = cfun->eh->region_array[region_number];
2987 type_thrown = NULL_TREE;
2990 /* A RESX leaves a region instead of entering it. Thus the
2991 region itself may have been deleted out from under us. */
2994 region = region->outer;
2996 else if (region->type == ERT_THROW)
2998 type_thrown = region->u.throw.type;
2999 region = region->outer;
3004 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
3006 /* If we have processed one cleanup, there is no point in
3007 processing any more of them. Each cleanup will have an edge
3008 to the next outer cleanup region, so the flow graph will be
3010 if (region->type == ERT_CLEANUP)
3011 region = region->u.cleanup.prev_try;
3013 region = region->outer;
3017 /* Retrieve a list of labels of exception handlers which can be
3018 reached by a given insn. */
3021 arh_to_landing_pad (struct eh_region *region, void *data)
3023 rtx *p_handlers = data;
3025 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
3029 arh_to_label (struct eh_region *region, void *data)
3031 rtx *p_handlers = data;
3032 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
3036 reachable_handlers (rtx insn)
3038 bool is_resx = false;
3039 rtx handlers = NULL;
3042 if (GET_CODE (insn) == JUMP_INSN
3043 && GET_CODE (PATTERN (insn)) == RESX)
3045 region_number = XINT (PATTERN (insn), 0);
3050 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3051 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3053 region_number = INTVAL (XEXP (note, 0));
3056 foreach_reachable_handler (region_number, is_resx,
3057 (cfun->eh->built_landing_pads
3058 ? arh_to_landing_pad
3065 /* Determine if the given INSN can throw an exception that is caught
3066 within the function. */
3069 can_throw_internal_1 (int region_number)
3071 struct eh_region *region;
3074 region = cfun->eh->region_array[region_number];
3076 type_thrown = NULL_TREE;
3077 if (region->type == ERT_THROW)
3079 type_thrown = region->u.throw.type;
3080 region = region->outer;
3083 /* If this exception is ignored by each and every containing region,
3084 then control passes straight out. The runtime may handle some
3085 regions, which also do not require processing internally. */
3086 for (; region; region = region->outer)
3088 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
3089 if (how == RNL_BLOCKED)
3091 if (how != RNL_NOT_CAUGHT)
3099 can_throw_internal (rtx insn)
3103 if (! INSN_P (insn))
3106 if (GET_CODE (insn) == INSN
3107 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3108 insn = XVECEXP (PATTERN (insn), 0, 0);
3110 /* Every insn that might throw has an EH_REGION note. */
3111 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3112 if (!note || INTVAL (XEXP (note, 0)) <= 0)
3115 return can_throw_internal_1 (INTVAL (XEXP (note, 0)));
3118 /* Determine if the given INSN can throw an exception that is
3119 visible outside the function. */
3122 can_throw_external_1 (int region_number)
3124 struct eh_region *region;
3127 region = cfun->eh->region_array[region_number];
3129 type_thrown = NULL_TREE;
3130 if (region->type == ERT_THROW)
3132 type_thrown = region->u.throw.type;
3133 region = region->outer;
3136 /* If the exception is caught or blocked by any containing region,
3137 then it is not seen by any calling function. */
3138 for (; region ; region = region->outer)
3139 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
3146 can_throw_external (rtx insn)
3150 if (! INSN_P (insn))
3153 if (GET_CODE (insn) == INSN
3154 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3155 insn = XVECEXP (PATTERN (insn), 0, 0);
3157 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3160 /* Calls (and trapping insns) without notes are outside any
3161 exception handling region in this function. We have to
3162 assume it might throw. Given that the front end and middle
3163 ends mark known NOTHROW functions, this isn't so wildly
3165 return (GET_CODE (insn) == CALL_INSN
3166 || (flag_non_call_exceptions
3167 && may_trap_p (PATTERN (insn))));
3169 if (INTVAL (XEXP (note, 0)) <= 0)
3172 return can_throw_external_1 (INTVAL (XEXP (note, 0)));
3175 /* Set current_function_nothrow and cfun->all_throwers_are_sibcalls. */
3178 set_nothrow_function_flags (void)
3182 current_function_nothrow = 1;
3184 /* Assume cfun->all_throwers_are_sibcalls until we encounter
3185 something that can throw an exception. We specifically exempt
3186 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
3187 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
3190 cfun->all_throwers_are_sibcalls = 1;
3192 if (! flag_exceptions)
3195 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3196 if (can_throw_external (insn))
3198 current_function_nothrow = 0;
3200 if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
3202 cfun->all_throwers_are_sibcalls = 0;
3207 for (insn = current_function_epilogue_delay_list; insn;
3208 insn = XEXP (insn, 1))
3209 if (can_throw_external (insn))
3211 current_function_nothrow = 0;
3213 if (GET_CODE (insn) != CALL_INSN || !SIBLING_CALL_P (insn))
3215 cfun->all_throwers_are_sibcalls = 0;
3222 /* Various hooks for unwind library. */
3224 /* Do any necessary initialization to access arbitrary stack frames.
3225 On the SPARC, this means flushing the register windows. */
3228 expand_builtin_unwind_init (void)
3230 /* Set this so all the registers get saved in our frame; we need to be
3231 able to copy the saved values for any registers from frames we unwind. */
3232 current_function_has_nonlocal_label = 1;
3234 #ifdef SETUP_FRAME_ADDRESSES
3235 SETUP_FRAME_ADDRESSES ();
3240 expand_builtin_eh_return_data_regno (tree arglist)
3242 tree which = TREE_VALUE (arglist);
3243 unsigned HOST_WIDE_INT iwhich;
3245 if (TREE_CODE (which) != INTEGER_CST)
3247 error ("argument of `__builtin_eh_return_regno' must be constant");
3251 iwhich = tree_low_cst (which, 1);
3252 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3253 if (iwhich == INVALID_REGNUM)
3256 #ifdef DWARF_FRAME_REGNUM
3257 iwhich = DWARF_FRAME_REGNUM (iwhich);
3259 iwhich = DBX_REGISTER_NUMBER (iwhich);
3262 return GEN_INT (iwhich);
3265 /* Given a value extracted from the return address register or stack slot,
3266 return the actual address encoded in that value. */
3269 expand_builtin_extract_return_addr (tree addr_tree)
3271 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
3273 if (GET_MODE (addr) != Pmode
3274 && GET_MODE (addr) != VOIDmode)
3276 #ifdef POINTERS_EXTEND_UNSIGNED
3277 addr = convert_memory_address (Pmode, addr);
3279 addr = convert_to_mode (Pmode, addr, 0);
3283 /* First mask out any unwanted bits. */
3284 #ifdef MASK_RETURN_ADDR
3285 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3288 /* Then adjust to find the real return address. */
3289 #if defined (RETURN_ADDR_OFFSET)
3290 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3296 /* Given an actual address in addr_tree, do any necessary encoding
3297 and return the value to be stored in the return address register or
3298 stack slot so the epilogue will return to that address. */
3301 expand_builtin_frob_return_addr (tree addr_tree)
3303 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3305 addr = convert_memory_address (Pmode, addr);
3307 #ifdef RETURN_ADDR_OFFSET
3308 addr = force_reg (Pmode, addr);
3309 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3315 /* Set up the epilogue with the magic bits we'll need to return to the
3316 exception handler. */
3319 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
3324 #ifdef EH_RETURN_STACKADJ_RTX
3325 tmp = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3326 tmp = convert_memory_address (Pmode, tmp);
3327 if (!cfun->eh->ehr_stackadj)
3328 cfun->eh->ehr_stackadj = copy_to_reg (tmp);
3329 else if (tmp != cfun->eh->ehr_stackadj)
3330 emit_move_insn (cfun->eh->ehr_stackadj, tmp);
3333 tmp = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
3334 tmp = convert_memory_address (Pmode, tmp);
3335 if (!cfun->eh->ehr_handler)
3336 cfun->eh->ehr_handler = copy_to_reg (tmp);
3337 else if (tmp != cfun->eh->ehr_handler)
3338 emit_move_insn (cfun->eh->ehr_handler, tmp);
3340 if (!cfun->eh->ehr_label)
3341 cfun->eh->ehr_label = gen_label_rtx ();
3342 emit_jump (cfun->eh->ehr_label);
3346 expand_eh_return (void)
3350 if (! cfun->eh->ehr_label)
3353 current_function_calls_eh_return = 1;
3355 #ifdef EH_RETURN_STACKADJ_RTX
3356 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3359 around_label = gen_label_rtx ();
3360 emit_jump (around_label);
3362 emit_label (cfun->eh->ehr_label);
3363 clobber_return_register ();
3365 #ifdef EH_RETURN_STACKADJ_RTX
3366 emit_move_insn (EH_RETURN_STACKADJ_RTX, cfun->eh->ehr_stackadj);
3369 #ifdef HAVE_eh_return
3371 emit_insn (gen_eh_return (cfun->eh->ehr_handler));
3375 #ifdef EH_RETURN_HANDLER_RTX
3376 emit_move_insn (EH_RETURN_HANDLER_RTX, cfun->eh->ehr_handler);
3378 error ("__builtin_eh_return not supported on this target");
3382 emit_label (around_label);
3385 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3386 POINTERS_EXTEND_UNSIGNED and return it. */
3389 expand_builtin_extend_pointer (tree addr_tree)
3391 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3394 #ifdef POINTERS_EXTEND_UNSIGNED
3395 extend = POINTERS_EXTEND_UNSIGNED;
3397 /* The previous EH code did an unsigned extend by default, so we do this also
3402 return convert_modes (word_mode, ptr_mode, addr, extend);
3405 /* In the following functions, we represent entries in the action table
3406 as 1-based indices. Special cases are:
3408 0: null action record, non-null landing pad; implies cleanups
3409 -1: null action record, null landing pad; implies no action
3410 -2: no call-site entry; implies must_not_throw
3411 -3: we have yet to process outer regions
3413 Further, no special cases apply to the "next" field of the record.
3414 For next, 0 means end of list. */
3416 struct action_record
3424 action_record_eq (const void *pentry, const void *pdata)
3426 const struct action_record *entry = (const struct action_record *) pentry;
3427 const struct action_record *data = (const struct action_record *) pdata;
3428 return entry->filter == data->filter && entry->next == data->next;
3432 action_record_hash (const void *pentry)
3434 const struct action_record *entry = (const struct action_record *) pentry;
3435 return entry->next * 1009 + entry->filter;
3439 add_action_record (htab_t ar_hash, int filter, int next)
3441 struct action_record **slot, *new, tmp;
3443 tmp.filter = filter;
3445 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3447 if ((new = *slot) == NULL)
3449 new = xmalloc (sizeof (*new));
3450 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3451 new->filter = filter;
3455 /* The filter value goes in untouched. The link to the next
3456 record is a "self-relative" byte offset, or zero to indicate
3457 that there is no next record. So convert the absolute 1 based
3458 indices we've been carrying around into a displacement. */
3460 push_sleb128 (&cfun->eh->action_record_data, filter);
3462 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3463 push_sleb128 (&cfun->eh->action_record_data, next);
3470 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3472 struct eh_region *c;
3475 /* If we've reached the top of the region chain, then we have
3476 no actions, and require no landing pad. */
3480 switch (region->type)
3483 /* A cleanup adds a zero filter to the beginning of the chain, but
3484 there are special cases to look out for. If there are *only*
3485 cleanups along a path, then it compresses to a zero action.
3486 Further, if there are multiple cleanups along a path, we only
3487 need to represent one of them, as that is enough to trigger
3488 entry to the landing pad at runtime. */
3489 next = collect_one_action_chain (ar_hash, region->outer);
3492 for (c = region->outer; c ; c = c->outer)
3493 if (c->type == ERT_CLEANUP)
3495 return add_action_record (ar_hash, 0, next);
3498 /* Process the associated catch regions in reverse order.
3499 If there's a catch-all handler, then we don't need to
3500 search outer regions. Use a magic -3 value to record
3501 that we haven't done the outer search. */
3503 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3505 if (c->u.catch.type_list == NULL)
3507 /* Retrieve the filter from the head of the filter list
3508 where we have stored it (see assign_filter_values). */
3510 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3512 next = add_action_record (ar_hash, filter, 0);
3516 /* Once the outer search is done, trigger an action record for
3517 each filter we have. */
3522 next = collect_one_action_chain (ar_hash, region->outer);
3524 /* If there is no next action, terminate the chain. */
3527 /* If all outer actions are cleanups or must_not_throw,
3528 we'll have no action record for it, since we had wanted
3529 to encode these states in the call-site record directly.
3530 Add a cleanup action to the chain to catch these. */
3532 next = add_action_record (ar_hash, 0, 0);
3535 flt_node = c->u.catch.filter_list;
3536 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3538 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3539 next = add_action_record (ar_hash, filter, next);
3545 case ERT_ALLOWED_EXCEPTIONS:
3546 /* An exception specification adds its filter to the
3547 beginning of the chain. */
3548 next = collect_one_action_chain (ar_hash, region->outer);
3550 /* If there is no next action, terminate the chain. */
3553 /* If all outer actions are cleanups or must_not_throw,
3554 we'll have no action record for it, since we had wanted
3555 to encode these states in the call-site record directly.
3556 Add a cleanup action to the chain to catch these. */
3558 next = add_action_record (ar_hash, 0, 0);
3560 return add_action_record (ar_hash, region->u.allowed.filter, next);
3562 case ERT_MUST_NOT_THROW:
3563 /* A must-not-throw region with no inner handlers or cleanups
3564 requires no call-site entry. Note that this differs from
3565 the no handler or cleanup case in that we do require an lsda
3566 to be generated. Return a magic -2 value to record this. */
3571 /* CATCH regions are handled in TRY above. THROW regions are
3572 for optimization information only and produce no output. */
3573 return collect_one_action_chain (ar_hash, region->outer);
3581 add_call_site (rtx landing_pad, int action)
3583 struct call_site_record *data = cfun->eh->call_site_data;
3584 int used = cfun->eh->call_site_data_used;
3585 int size = cfun->eh->call_site_data_size;
3589 size = (size ? size * 2 : 64);
3590 data = ggc_realloc (data, sizeof (*data) * size);
3591 cfun->eh->call_site_data = data;
3592 cfun->eh->call_site_data_size = size;
3595 data[used].landing_pad = landing_pad;
3596 data[used].action = action;
3598 cfun->eh->call_site_data_used = used + 1;
3600 return used + call_site_base;
3603 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3604 The new note numbers will not refer to region numbers, but
3605 instead to call site entries. */
3608 convert_to_eh_region_ranges (void)
3610 rtx insn, iter, note;
3612 int last_action = -3;
3613 rtx last_action_insn = NULL_RTX;
3614 rtx last_landing_pad = NULL_RTX;
3615 rtx first_no_action_insn = NULL_RTX;
3618 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3621 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3623 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3625 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3628 struct eh_region *region;
3630 rtx this_landing_pad;
3633 if (GET_CODE (insn) == INSN
3634 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3635 insn = XVECEXP (PATTERN (insn), 0, 0);
3637 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3640 if (! (GET_CODE (insn) == CALL_INSN
3641 || (flag_non_call_exceptions
3642 && may_trap_p (PATTERN (insn)))))
3649 if (INTVAL (XEXP (note, 0)) <= 0)
3651 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3652 this_action = collect_one_action_chain (ar_hash, region);
3655 /* Existence of catch handlers, or must-not-throw regions
3656 implies that an lsda is needed (even if empty). */
3657 if (this_action != -1)
3658 cfun->uses_eh_lsda = 1;
3660 /* Delay creation of region notes for no-action regions
3661 until we're sure that an lsda will be required. */
3662 else if (last_action == -3)
3664 first_no_action_insn = iter;
3668 /* Cleanups and handlers may share action chains but not
3669 landing pads. Collect the landing pad for this region. */
3670 if (this_action >= 0)
3672 struct eh_region *o;
3673 for (o = region; ! o->landing_pad ; o = o->outer)
3675 this_landing_pad = o->landing_pad;
3678 this_landing_pad = NULL_RTX;
3680 /* Differing actions or landing pads implies a change in call-site
3681 info, which implies some EH_REGION note should be emitted. */
3682 if (last_action != this_action
3683 || last_landing_pad != this_landing_pad)
3685 /* If we'd not seen a previous action (-3) or the previous
3686 action was must-not-throw (-2), then we do not need an
3688 if (last_action >= -1)
3690 /* If we delayed the creation of the begin, do it now. */
3691 if (first_no_action_insn)
3693 call_site = add_call_site (NULL_RTX, 0);
3694 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3695 first_no_action_insn);
3696 NOTE_EH_HANDLER (note) = call_site;
3697 first_no_action_insn = NULL_RTX;
3700 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3702 NOTE_EH_HANDLER (note) = call_site;
3705 /* If the new action is must-not-throw, then no region notes
3707 if (this_action >= -1)
3709 call_site = add_call_site (this_landing_pad,
3710 this_action < 0 ? 0 : this_action);
3711 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3712 NOTE_EH_HANDLER (note) = call_site;
3715 last_action = this_action;
3716 last_landing_pad = this_landing_pad;
3718 last_action_insn = iter;
3721 if (last_action >= -1 && ! first_no_action_insn)
3723 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3724 NOTE_EH_HANDLER (note) = call_site;
3727 htab_delete (ar_hash);
3732 push_uleb128 (varray_type *data_area, unsigned int value)
3736 unsigned char byte = value & 0x7f;
3740 VARRAY_PUSH_UCHAR (*data_area, byte);
3746 push_sleb128 (varray_type *data_area, int value)
3753 byte = value & 0x7f;
3755 more = ! ((value == 0 && (byte & 0x40) == 0)
3756 || (value == -1 && (byte & 0x40) != 0));
3759 VARRAY_PUSH_UCHAR (*data_area, byte);
3765 #ifndef HAVE_AS_LEB128
3767 dw2_size_of_call_site_table (void)
3769 int n = cfun->eh->call_site_data_used;
3770 int size = n * (4 + 4 + 4);
3773 for (i = 0; i < n; ++i)
3775 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3776 size += size_of_uleb128 (cs->action);
3783 sjlj_size_of_call_site_table (void)
3785 int n = cfun->eh->call_site_data_used;
3789 for (i = 0; i < n; ++i)
3791 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3792 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3793 size += size_of_uleb128 (cs->action);
3801 dw2_output_call_site_table (void)
3803 const char *const function_start_lab
3804 = IDENTIFIER_POINTER (current_function_func_begin_label);
3805 int n = cfun->eh->call_site_data_used;
3808 for (i = 0; i < n; ++i)
3810 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3811 char reg_start_lab[32];
3812 char reg_end_lab[32];
3813 char landing_pad_lab[32];
3815 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3816 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3818 if (cs->landing_pad)
3819 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3820 CODE_LABEL_NUMBER (cs->landing_pad));
3822 /* ??? Perhaps use insn length scaling if the assembler supports
3823 generic arithmetic. */
3824 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3825 data4 if the function is small enough. */
3826 #ifdef HAVE_AS_LEB128
3827 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3828 "region %d start", i);
3829 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3831 if (cs->landing_pad)
3832 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3835 dw2_asm_output_data_uleb128 (0, "landing pad");
3837 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3838 "region %d start", i);
3839 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3840 if (cs->landing_pad)
3841 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3844 dw2_asm_output_data (4, 0, "landing pad");
3846 dw2_asm_output_data_uleb128 (cs->action, "action");
3849 call_site_base += n;
3853 sjlj_output_call_site_table (void)
3855 int n = cfun->eh->call_site_data_used;
3858 for (i = 0; i < n; ++i)
3860 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3862 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3863 "region %d landing pad", i);
3864 dw2_asm_output_data_uleb128 (cs->action, "action");
3867 call_site_base += n;
3870 /* Tell assembler to switch to the section for the exception handling
3874 default_exception_section (void)
3876 if (targetm.have_named_sections)
3879 #ifdef HAVE_LD_RO_RW_SECTION_MIXING
3880 int tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3883 || ((tt_format & 0x70) != DW_EH_PE_absptr
3884 && (tt_format & 0x70) != DW_EH_PE_aligned))
3885 ? 0 : SECTION_WRITE;
3887 flags = SECTION_WRITE;
3889 named_section_flags (".gcc_except_table", flags);
3894 readonly_data_section ();
3898 output_function_exception_table (void)
3900 int tt_format, cs_format, lp_format, i, n;
3901 #ifdef HAVE_AS_LEB128
3902 char ttype_label[32];
3903 char cs_after_size_label[32];
3904 char cs_end_label[32];
3909 int tt_format_size = 0;
3911 /* Not all functions need anything. */
3912 if (! cfun->uses_eh_lsda)
3915 #ifdef IA64_UNWIND_INFO
3916 fputs ("\t.personality\t", asm_out_file);
3917 output_addr_const (asm_out_file, eh_personality_libfunc);
3918 fputs ("\n\t.handlerdata\n", asm_out_file);
3919 /* Note that varasm still thinks we're in the function's code section.
3920 The ".endp" directive that will immediately follow will take us back. */
3922 targetm.asm_out.exception_section ();
3925 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3926 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3928 /* Indicate the format of the @TType entries. */
3930 tt_format = DW_EH_PE_omit;
3933 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3934 #ifdef HAVE_AS_LEB128
3935 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3936 current_function_funcdef_no);
3938 tt_format_size = size_of_encoded_value (tt_format);
3940 assemble_align (tt_format_size * BITS_PER_UNIT);
3943 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3944 current_function_funcdef_no);
3946 /* The LSDA header. */
3948 /* Indicate the format of the landing pad start pointer. An omitted
3949 field implies @LPStart == @Start. */
3950 /* Currently we always put @LPStart == @Start. This field would
3951 be most useful in moving the landing pads completely out of
3952 line to another section, but it could also be used to minimize
3953 the size of uleb128 landing pad offsets. */
3954 lp_format = DW_EH_PE_omit;
3955 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3956 eh_data_format_name (lp_format));
3958 /* @LPStart pointer would go here. */
3960 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3961 eh_data_format_name (tt_format));
3963 #ifndef HAVE_AS_LEB128
3964 if (USING_SJLJ_EXCEPTIONS)
3965 call_site_len = sjlj_size_of_call_site_table ();
3967 call_site_len = dw2_size_of_call_site_table ();
3970 /* A pc-relative 4-byte displacement to the @TType data. */
3973 #ifdef HAVE_AS_LEB128
3974 char ttype_after_disp_label[32];
3975 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3976 current_function_funcdef_no);
3977 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3978 "@TType base offset");
3979 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3981 /* Ug. Alignment queers things. */
3982 unsigned int before_disp, after_disp, last_disp, disp;
3984 before_disp = 1 + 1;
3985 after_disp = (1 + size_of_uleb128 (call_site_len)
3987 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3988 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3994 unsigned int disp_size, pad;
3997 disp_size = size_of_uleb128 (disp);
3998 pad = before_disp + disp_size + after_disp;
3999 if (pad % tt_format_size)
4000 pad = tt_format_size - (pad % tt_format_size);
4003 disp = after_disp + pad;
4005 while (disp != last_disp);
4007 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
4011 /* Indicate the format of the call-site offsets. */
4012 #ifdef HAVE_AS_LEB128
4013 cs_format = DW_EH_PE_uleb128;
4015 cs_format = DW_EH_PE_udata4;
4017 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
4018 eh_data_format_name (cs_format));
4020 #ifdef HAVE_AS_LEB128
4021 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
4022 current_function_funcdef_no);
4023 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
4024 current_function_funcdef_no);
4025 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
4026 "Call-site table length");
4027 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
4028 if (USING_SJLJ_EXCEPTIONS)
4029 sjlj_output_call_site_table ();
4031 dw2_output_call_site_table ();
4032 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
4034 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
4035 if (USING_SJLJ_EXCEPTIONS)
4036 sjlj_output_call_site_table ();
4038 dw2_output_call_site_table ();
4041 /* ??? Decode and interpret the data for flag_debug_asm. */
4042 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
4043 for (i = 0; i < n; ++i)
4044 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
4045 (i ? NULL : "Action record table"));
4048 assemble_align (tt_format_size * BITS_PER_UNIT);
4050 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
4053 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
4056 if (type == NULL_TREE)
4060 struct cgraph_varpool_node *node;
4062 type = lookup_type_for_runtime (type);
4063 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
4065 /* Let cgraph know that the rtti decl is used. Not all of the
4066 paths below go through assemble_integer, which would take
4067 care of this for us. */
4069 if (TREE_CODE (type) == ADDR_EXPR)
4071 type = TREE_OPERAND (type, 0);
4072 node = cgraph_varpool_node (type);
4074 cgraph_varpool_mark_needed_node (node);
4076 else if (TREE_CODE (type) != INTEGER_CST)
4080 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
4081 assemble_integer (value, tt_format_size,
4082 tt_format_size * BITS_PER_UNIT, 1);
4084 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
4087 #ifdef HAVE_AS_LEB128
4089 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
4092 /* ??? Decode and interpret the data for flag_debug_asm. */
4093 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
4094 for (i = 0; i < n; ++i)
4095 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
4096 (i ? NULL : "Exception specification table"));
4098 function_section (current_function_decl);
4101 #include "gt-except.h"