1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "pointer-set.h"
30 #include "tree-flow.h"
31 #include "tree-dump.h"
32 #include "tree-inline.h"
33 #include "tree-iterator.h"
34 #include "tree-pass.h"
36 #include "langhooks.h"
38 #include "diagnostic-core.h"
42 /* In some instances a tree and a gimple need to be stored in a same table,
43 i.e. in hash tables. This is a structure to do this. */
44 typedef union {tree *tp; tree t; gimple g;} treemple;
46 /* Nonzero if we are using EH to handle cleanups. */
47 static int using_eh_for_cleanups_p = 0;
50 using_eh_for_cleanups (void)
52 using_eh_for_cleanups_p = 1;
55 /* Misc functions used in this file. */
57 /* Remember and lookup EH landing pad data for arbitrary statements.
58 Really this means any statement that could_throw_p. We could
59 stuff this information into the stmt_ann data structure, but:
61 (1) We absolutely rely on this information being kept until
62 we get to rtl. Once we're done with lowering here, if we lose
63 the information there's no way to recover it!
65 (2) There are many more statements that *cannot* throw as
66 compared to those that can. We should be saving some amount
67 of space by only allocating memory for those that can throw. */
69 /* Add statement T in function IFUN to landing pad NUM. */
72 add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
74 struct throw_stmt_node *n;
77 gcc_assert (num != 0);
79 n = ggc_alloc_throw_stmt_node ();
83 if (!get_eh_throw_stmt_table (ifun))
84 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
88 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
93 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
96 add_stmt_to_eh_lp (gimple t, int num)
98 add_stmt_to_eh_lp_fn (cfun, t, num);
101 /* Add statement T to the single EH landing pad in REGION. */
104 record_stmt_eh_region (eh_region region, gimple t)
108 if (region->type == ERT_MUST_NOT_THROW)
109 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
112 eh_landing_pad lp = region->landing_pads;
114 lp = gen_eh_landing_pad (region);
116 gcc_assert (lp->next_lp == NULL);
117 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
122 /* Remove statement T in function IFUN from its EH landing pad. */
125 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
127 struct throw_stmt_node dummy;
130 if (!get_eh_throw_stmt_table (ifun))
134 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
138 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
146 /* Remove statement T in the current function (cfun) from its
150 remove_stmt_from_eh_lp (gimple t)
152 return remove_stmt_from_eh_lp_fn (cfun, t);
155 /* Determine if statement T is inside an EH region in function IFUN.
156 Positive numbers indicate a landing pad index; negative numbers
157 indicate a MUST_NOT_THROW region index; zero indicates that the
158 statement is not recorded in the region table. */
161 lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
163 struct throw_stmt_node *p, n;
165 if (ifun->eh->throw_stmt_table == NULL)
169 p = (struct throw_stmt_node *) htab_find (ifun->eh->throw_stmt_table, &n);
170 return p ? p->lp_nr : 0;
173 /* Likewise, but always use the current function. */
176 lookup_stmt_eh_lp (gimple t)
178 /* We can get called from initialized data when -fnon-call-exceptions
179 is on; prevent crash. */
182 return lookup_stmt_eh_lp_fn (cfun, t);
185 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
186 nodes and LABEL_DECL nodes. We will use this during the second phase to
187 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
189 struct finally_tree_node
191 /* When storing a GIMPLE_TRY, we have to record a gimple. However
192 when deciding whether a GOTO to a certain LABEL_DECL (which is a
193 tree) leaves the TRY block, its necessary to record a tree in
194 this field. Thus a treemple is used. */
199 /* Note that this table is *not* marked GTY. It is short-lived. */
200 static htab_t finally_tree;
203 record_in_finally_tree (treemple child, gimple parent)
205 struct finally_tree_node *n;
208 n = XNEW (struct finally_tree_node);
212 slot = htab_find_slot (finally_tree, n, INSERT);
218 collect_finally_tree (gimple stmt, gimple region);
220 /* Go through the gimple sequence. Works with collect_finally_tree to
221 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
224 collect_finally_tree_1 (gimple_seq seq, gimple region)
226 gimple_stmt_iterator gsi;
228 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
229 collect_finally_tree (gsi_stmt (gsi), region);
233 collect_finally_tree (gimple stmt, gimple region)
237 switch (gimple_code (stmt))
240 temp.t = gimple_label_label (stmt);
241 record_in_finally_tree (temp, region);
245 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
248 record_in_finally_tree (temp, region);
249 collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
250 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
252 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
254 collect_finally_tree_1 (gimple_try_eval (stmt), region);
255 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
260 collect_finally_tree_1 (gimple_catch_handler (stmt), region);
263 case GIMPLE_EH_FILTER:
264 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
268 collect_finally_tree_1 (gimple_eh_else_n_body (stmt), region);
269 collect_finally_tree_1 (gimple_eh_else_e_body (stmt), region);
273 /* A type, a decl, or some kind of statement that we're not
274 interested in. Don't walk them. */
280 /* Use the finally tree to determine if a jump from START to TARGET
281 would leave the try_finally node that START lives in. */
284 outside_finally_tree (treemple start, gimple target)
286 struct finally_tree_node n, *p;
291 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
296 while (start.g != target);
301 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
302 nodes into a set of gotos, magic labels, and eh regions.
303 The eh region creation is straight-forward, but frobbing all the gotos
304 and such into shape isn't. */
306 /* The sequence into which we record all EH stuff. This will be
307 placed at the end of the function when we're all done. */
308 static gimple_seq eh_seq;
310 /* Record whether an EH region contains something that can throw,
311 indexed by EH region number. */
312 static bitmap eh_region_may_contain_throw_map;
314 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
315 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
316 The idea is to record a gimple statement for everything except for
317 the conditionals, which get their labels recorded. Since labels are
318 of type 'tree', we need this node to store both gimple and tree
319 objects. REPL_STMT is the sequence used to replace the goto/return
320 statement. CONT_STMT is used to store the statement that allows
321 the return/goto to jump to the original destination. */
323 struct goto_queue_node
326 gimple_seq repl_stmt;
329 /* This is used when index >= 0 to indicate that stmt is a label (as
330 opposed to a goto stmt). */
334 /* State of the world while lowering. */
338 /* What's "current" while constructing the eh region tree. These
339 correspond to variables of the same name in cfun->eh, which we
340 don't have easy access to. */
341 eh_region cur_region;
343 /* What's "current" for the purposes of __builtin_eh_pointer. For
344 a CATCH, this is the associated TRY. For an EH_FILTER, this is
345 the associated ALLOWED_EXCEPTIONS, etc. */
346 eh_region ehp_region;
348 /* Processing of TRY_FINALLY requires a bit more state. This is
349 split out into a separate structure so that we don't have to
350 copy so much when processing other nodes. */
351 struct leh_tf_state *tf;
356 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
357 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
358 this so that outside_finally_tree can reliably reference the tree used
359 in the collect_finally_tree data structures. */
360 gimple try_finally_expr;
363 /* While lowering a top_p usually it is expanded into multiple statements,
364 thus we need the following field to store them. */
365 gimple_seq top_p_seq;
367 /* The state outside this try_finally node. */
368 struct leh_state *outer;
370 /* The exception region created for it. */
373 /* The goto queue. */
374 struct goto_queue_node *goto_queue;
375 size_t goto_queue_size;
376 size_t goto_queue_active;
378 /* Pointer map to help in searching goto_queue when it is large. */
379 struct pointer_map_t *goto_queue_map;
381 /* The set of unique labels seen as entries in the goto queue. */
382 VEC(tree,heap) *dest_array;
384 /* A label to be added at the end of the completed transformed
385 sequence. It will be set if may_fallthru was true *at one time*,
386 though subsequent transformations may have cleared that flag. */
389 /* True if it is possible to fall out the bottom of the try block.
390 Cleared if the fallthru is converted to a goto. */
393 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
396 /* True if the finally block can receive an exception edge.
397 Cleared if the exception case is handled by code duplication. */
401 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple);
403 /* Search for STMT in the goto queue. Return the replacement,
404 or null if the statement isn't in the queue. */
406 #define LARGE_GOTO_QUEUE 20
408 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq);
411 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
416 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
418 for (i = 0; i < tf->goto_queue_active; i++)
419 if ( tf->goto_queue[i].stmt.g == stmt.g)
420 return tf->goto_queue[i].repl_stmt;
424 /* If we have a large number of entries in the goto_queue, create a
425 pointer map and use that for searching. */
427 if (!tf->goto_queue_map)
429 tf->goto_queue_map = pointer_map_create ();
430 for (i = 0; i < tf->goto_queue_active; i++)
432 slot = pointer_map_insert (tf->goto_queue_map,
433 tf->goto_queue[i].stmt.g);
434 gcc_assert (*slot == NULL);
435 *slot = &tf->goto_queue[i];
439 slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
441 return (((struct goto_queue_node *) *slot)->repl_stmt);
446 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
447 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
448 then we can just splat it in, otherwise we add the new stmts immediately
449 after the GIMPLE_COND and redirect. */
452 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
453 gimple_stmt_iterator *gsi)
458 location_t loc = gimple_location (gsi_stmt (*gsi));
461 new_seq = find_goto_replacement (tf, temp);
465 if (gimple_seq_singleton_p (new_seq)
466 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
468 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
472 label = create_artificial_label (loc);
473 /* Set the new label for the GIMPLE_COND */
476 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
477 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
480 /* The real work of replace_goto_queue. Returns with TSI updated to
481 point to the next statement. */
483 static void replace_goto_queue_stmt_list (gimple_seq, struct leh_tf_state *);
486 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
487 gimple_stmt_iterator *gsi)
493 switch (gimple_code (stmt))
498 seq = find_goto_replacement (tf, temp);
501 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
502 gsi_remove (gsi, false);
508 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
509 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
513 replace_goto_queue_stmt_list (gimple_try_eval (stmt), tf);
514 replace_goto_queue_stmt_list (gimple_try_cleanup (stmt), tf);
517 replace_goto_queue_stmt_list (gimple_catch_handler (stmt), tf);
519 case GIMPLE_EH_FILTER:
520 replace_goto_queue_stmt_list (gimple_eh_filter_failure (stmt), tf);
523 replace_goto_queue_stmt_list (gimple_eh_else_n_body (stmt), tf);
524 replace_goto_queue_stmt_list (gimple_eh_else_e_body (stmt), tf);
528 /* These won't have gotos in them. */
535 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
538 replace_goto_queue_stmt_list (gimple_seq seq, struct leh_tf_state *tf)
540 gimple_stmt_iterator gsi = gsi_start (seq);
542 while (!gsi_end_p (gsi))
543 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
546 /* Replace all goto queue members. */
549 replace_goto_queue (struct leh_tf_state *tf)
551 if (tf->goto_queue_active == 0)
553 replace_goto_queue_stmt_list (tf->top_p_seq, tf);
554 replace_goto_queue_stmt_list (eh_seq, tf);
557 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
558 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
562 record_in_goto_queue (struct leh_tf_state *tf,
568 struct goto_queue_node *q;
570 gcc_assert (!tf->goto_queue_map);
572 active = tf->goto_queue_active;
573 size = tf->goto_queue_size;
576 size = (size ? size * 2 : 32);
577 tf->goto_queue_size = size;
579 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
582 q = &tf->goto_queue[active];
583 tf->goto_queue_active = active + 1;
585 memset (q, 0, sizeof (*q));
588 q->is_label = is_label;
591 /* Record the LABEL label in the goto queue contained in TF.
595 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label)
598 treemple temp, new_stmt;
603 /* Computed and non-local gotos do not get processed. Given
604 their nature we can neither tell whether we've escaped the
605 finally block nor redirect them if we knew. */
606 if (TREE_CODE (label) != LABEL_DECL)
609 /* No need to record gotos that don't leave the try block. */
611 if (!outside_finally_tree (temp, tf->try_finally_expr))
614 if (! tf->dest_array)
616 tf->dest_array = VEC_alloc (tree, heap, 10);
617 VEC_quick_push (tree, tf->dest_array, label);
622 int n = VEC_length (tree, tf->dest_array);
623 for (index = 0; index < n; ++index)
624 if (VEC_index (tree, tf->dest_array, index) == label)
627 VEC_safe_push (tree, heap, tf->dest_array, label);
630 /* In the case of a GOTO we want to record the destination label,
631 since with a GIMPLE_COND we have an easy access to the then/else
634 record_in_goto_queue (tf, new_stmt, index, true);
637 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
638 node, and if so record that fact in the goto queue associated with that
642 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
644 struct leh_tf_state *tf = state->tf;
650 switch (gimple_code (stmt))
653 new_stmt.tp = gimple_op_ptr (stmt, 2);
654 record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt));
655 new_stmt.tp = gimple_op_ptr (stmt, 3);
656 record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt));
660 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt));
664 tf->may_return = true;
666 record_in_goto_queue (tf, new_stmt, -1, false);
675 #ifdef ENABLE_CHECKING
676 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
677 was in fact structured, and we've not yet done jump threading, then none
678 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
681 verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
683 struct leh_tf_state *tf = state->tf;
689 n = gimple_switch_num_labels (switch_expr);
691 for (i = 0; i < n; ++i)
694 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
696 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
700 #define verify_norecord_switch_expr(state, switch_expr)
703 /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
704 non-null, insert it before the new branch. */
707 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
711 /* In the case of a return, the queue node must be a gimple statement. */
712 gcc_assert (!q->is_label);
714 /* Note that the return value may have already been computed, e.g.,
727 should return 0, not 1. We don't have to do anything to make
728 this happens because the return value has been placed in the
729 RESULT_DECL already. */
731 q->cont_stmt = q->stmt.g;
734 q->repl_stmt = gimple_seq_alloc ();
737 gimple_seq_add_seq (&q->repl_stmt, mod);
739 x = gimple_build_goto (finlab);
740 gimple_seq_add_stmt (&q->repl_stmt, x);
743 /* Similar, but easier, for GIMPLE_GOTO. */
746 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
747 struct leh_tf_state *tf)
751 gcc_assert (q->is_label);
753 q->repl_stmt = gimple_seq_alloc ();
755 q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array, q->index));
758 gimple_seq_add_seq (&q->repl_stmt, mod);
760 x = gimple_build_goto (finlab);
761 gimple_seq_add_stmt (&q->repl_stmt, x);
764 /* Emit a standard landing pad sequence into SEQ for REGION. */
767 emit_post_landing_pad (gimple_seq *seq, eh_region region)
769 eh_landing_pad lp = region->landing_pads;
773 lp = gen_eh_landing_pad (region);
775 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
776 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
778 x = gimple_build_label (lp->post_landing_pad);
779 gimple_seq_add_stmt (seq, x);
782 /* Emit a RESX statement into SEQ for REGION. */
785 emit_resx (gimple_seq *seq, eh_region region)
787 gimple x = gimple_build_resx (region->index);
788 gimple_seq_add_stmt (seq, x);
790 record_stmt_eh_region (region->outer, x);
793 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
796 emit_eh_dispatch (gimple_seq *seq, eh_region region)
798 gimple x = gimple_build_eh_dispatch (region->index);
799 gimple_seq_add_stmt (seq, x);
802 /* Note that the current EH region may contain a throw, or a
803 call to a function which itself may contain a throw. */
806 note_eh_region_may_contain_throw (eh_region region)
808 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
810 if (region->type == ERT_MUST_NOT_THROW)
812 region = region->outer;
818 /* Check if REGION has been marked as containing a throw. If REGION is
819 NULL, this predicate is false. */
822 eh_region_may_contain_throw (eh_region r)
824 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
827 /* We want to transform
828 try { body; } catch { stuff; }
838 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
839 should be placed before the second operand, or NULL. OVER is
840 an existing label that should be put at the exit, or NULL. */
843 frob_into_branch_around (gimple tp, eh_region region, tree over)
846 gimple_seq cleanup, result;
847 location_t loc = gimple_location (tp);
849 cleanup = gimple_try_cleanup (tp);
850 result = gimple_try_eval (tp);
853 emit_post_landing_pad (&eh_seq, region);
855 if (gimple_seq_may_fallthru (cleanup))
858 over = create_artificial_label (loc);
859 x = gimple_build_goto (over);
860 gimple_seq_add_stmt (&cleanup, x);
862 gimple_seq_add_seq (&eh_seq, cleanup);
866 x = gimple_build_label (over);
867 gimple_seq_add_stmt (&result, x);
872 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
873 Make sure to record all new labels found. */
876 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state)
878 gimple region = NULL;
881 new_seq = copy_gimple_seq_and_replace_locals (seq);
884 region = outer_state->tf->try_finally_expr;
885 collect_finally_tree_1 (new_seq, region);
890 /* A subroutine of lower_try_finally. Create a fallthru label for
891 the given try_finally state. The only tricky bit here is that
892 we have to make sure to record the label in our outer context. */
895 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
897 tree label = tf->fallthru_label;
902 label = create_artificial_label (gimple_location (tf->try_finally_expr));
903 tf->fallthru_label = label;
907 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
913 /* A subroutine of lower_try_finally. If FINALLY consits of a
914 GIMPLE_EH_ELSE node, return it. */
917 get_eh_else (gimple_seq finally)
919 gimple x = gimple_seq_first_stmt (finally);
920 if (gimple_code (x) == GIMPLE_EH_ELSE)
922 gcc_assert (gimple_seq_singleton_p (finally));
928 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
929 langhook returns non-null, then the language requires that the exception
930 path out of a try_finally be treated specially. To wit: the code within
931 the finally block may not itself throw an exception. We have two choices
932 here. First we can duplicate the finally block and wrap it in a
933 must_not_throw region. Second, we can generate code like
938 if (fintmp == eh_edge)
939 protect_cleanup_actions;
942 where "fintmp" is the temporary used in the switch statement generation
943 alternative considered below. For the nonce, we always choose the first
946 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
949 honor_protect_cleanup_actions (struct leh_state *outer_state,
950 struct leh_state *this_state,
951 struct leh_tf_state *tf)
953 tree protect_cleanup_actions;
954 gimple_stmt_iterator gsi;
955 bool finally_may_fallthru;
959 /* First check for nothing to do. */
960 if (lang_hooks.eh_protect_cleanup_actions == NULL)
962 protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions ();
963 if (protect_cleanup_actions == NULL)
966 finally = gimple_try_cleanup (tf->top_p);
967 eh_else = get_eh_else (finally);
969 /* Duplicate the FINALLY block. Only need to do this for try-finally,
970 and not for cleanups. If we've got an EH_ELSE, extract it now. */
973 finally = gimple_eh_else_e_body (eh_else);
974 gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
977 finally = lower_try_finally_dup_block (finally, outer_state);
978 finally_may_fallthru = gimple_seq_may_fallthru (finally);
980 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
981 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
982 to be in an enclosing scope, but needs to be implemented at this level
983 to avoid a nesting violation (see wrap_temporary_cleanups in
984 cp/decl.c). Since it's logically at an outer level, we should call
985 terminate before we get to it, so strip it away before adding the
986 MUST_NOT_THROW filter. */
987 gsi = gsi_start (finally);
989 if (gimple_code (x) == GIMPLE_TRY
990 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
991 && gimple_try_catch_is_cleanup (x))
993 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
994 gsi_remove (&gsi, false);
997 /* Wrap the block with protect_cleanup_actions as the action. */
998 x = gimple_build_eh_must_not_throw (protect_cleanup_actions);
999 x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x),
1001 finally = lower_eh_must_not_throw (outer_state, x);
1003 /* Drop all of this into the exception sequence. */
1004 emit_post_landing_pad (&eh_seq, tf->region);
1005 gimple_seq_add_seq (&eh_seq, finally);
1006 if (finally_may_fallthru)
1007 emit_resx (&eh_seq, tf->region);
1009 /* Having now been handled, EH isn't to be considered with
1010 the rest of the outgoing edges. */
1011 tf->may_throw = false;
1014 /* A subroutine of lower_try_finally. We have determined that there is
1015 no fallthru edge out of the finally block. This means that there is
1016 no outgoing edge corresponding to any incoming edge. Restructure the
1017 try_finally node for this special case. */
1020 lower_try_finally_nofallthru (struct leh_state *state,
1021 struct leh_tf_state *tf)
1026 struct goto_queue_node *q, *qe;
1028 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1030 /* We expect that tf->top_p is a GIMPLE_TRY. */
1031 finally = gimple_try_cleanup (tf->top_p);
1032 tf->top_p_seq = gimple_try_eval (tf->top_p);
1034 x = gimple_build_label (lab);
1035 gimple_seq_add_stmt (&tf->top_p_seq, x);
1038 qe = q + tf->goto_queue_active;
1041 do_return_redirection (q, lab, NULL);
1043 do_goto_redirection (q, lab, NULL, tf);
1045 replace_goto_queue (tf);
1047 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1048 eh_else = get_eh_else (finally);
1051 finally = gimple_eh_else_n_body (eh_else);
1052 lower_eh_constructs_1 (state, finally);
1053 gimple_seq_add_seq (&tf->top_p_seq, finally);
1057 finally = gimple_eh_else_e_body (eh_else);
1058 lower_eh_constructs_1 (state, finally);
1060 emit_post_landing_pad (&eh_seq, tf->region);
1061 gimple_seq_add_seq (&eh_seq, finally);
1066 lower_eh_constructs_1 (state, finally);
1067 gimple_seq_add_seq (&tf->top_p_seq, finally);
1071 emit_post_landing_pad (&eh_seq, tf->region);
1073 x = gimple_build_goto (lab);
1074 gimple_seq_add_stmt (&eh_seq, x);
1079 /* A subroutine of lower_try_finally. We have determined that there is
1080 exactly one destination of the finally block. Restructure the
1081 try_finally node for this special case. */
1084 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1086 struct goto_queue_node *q, *qe;
1090 location_t loc = gimple_location (tf->try_finally_expr);
1092 finally = gimple_try_cleanup (tf->top_p);
1093 tf->top_p_seq = gimple_try_eval (tf->top_p);
1095 /* Since there's only one destination, and the destination edge can only
1096 either be EH or non-EH, that implies that all of our incoming edges
1097 are of the same type. Therefore we can lower EH_ELSE immediately. */
1098 x = get_eh_else (finally);
1102 finally = gimple_eh_else_e_body (x);
1104 finally = gimple_eh_else_n_body (x);
1107 lower_eh_constructs_1 (state, finally);
1111 /* Only reachable via the exception edge. Add the given label to
1112 the head of the FINALLY block. Append a RESX at the end. */
1113 emit_post_landing_pad (&eh_seq, tf->region);
1114 gimple_seq_add_seq (&eh_seq, finally);
1115 emit_resx (&eh_seq, tf->region);
1119 if (tf->may_fallthru)
1121 /* Only reachable via the fallthru edge. Do nothing but let
1122 the two blocks run together; we'll fall out the bottom. */
1123 gimple_seq_add_seq (&tf->top_p_seq, finally);
1127 finally_label = create_artificial_label (loc);
1128 x = gimple_build_label (finally_label);
1129 gimple_seq_add_stmt (&tf->top_p_seq, x);
1131 gimple_seq_add_seq (&tf->top_p_seq, finally);
1134 qe = q + tf->goto_queue_active;
1138 /* Reachable by return expressions only. Redirect them. */
1140 do_return_redirection (q, finally_label, NULL);
1141 replace_goto_queue (tf);
1145 /* Reachable by goto expressions only. Redirect them. */
1147 do_goto_redirection (q, finally_label, NULL, tf);
1148 replace_goto_queue (tf);
1150 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1152 /* Reachable by goto to fallthru label only. Redirect it
1153 to the new label (already created, sadly), and do not
1154 emit the final branch out, or the fallthru label. */
1155 tf->fallthru_label = NULL;
1160 /* Place the original return/goto to the original destination
1161 immediately after the finally block. */
1162 x = tf->goto_queue[0].cont_stmt;
1163 gimple_seq_add_stmt (&tf->top_p_seq, x);
1164 maybe_record_in_goto_queue (state, x);
1167 /* A subroutine of lower_try_finally. There are multiple edges incoming
1168 and outgoing from the finally block. Implement this by duplicating the
1169 finally block for every destination. */
1172 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1175 gimple_seq new_stmt;
1179 location_t tf_loc = gimple_location (tf->try_finally_expr);
1181 finally = gimple_try_cleanup (tf->top_p);
1183 /* Notice EH_ELSE, and simplify some of the remaining code
1184 by considering FINALLY to be the normal return path only. */
1185 eh_else = get_eh_else (finally);
1187 finally = gimple_eh_else_n_body (eh_else);
1189 tf->top_p_seq = gimple_try_eval (tf->top_p);
1192 if (tf->may_fallthru)
1194 seq = lower_try_finally_dup_block (finally, state);
1195 lower_eh_constructs_1 (state, seq);
1196 gimple_seq_add_seq (&new_stmt, seq);
1198 tmp = lower_try_finally_fallthru_label (tf);
1199 x = gimple_build_goto (tmp);
1200 gimple_seq_add_stmt (&new_stmt, x);
1205 /* We don't need to copy the EH path of EH_ELSE,
1206 since it is only emitted once. */
1208 seq = gimple_eh_else_e_body (eh_else);
1210 seq = lower_try_finally_dup_block (finally, state);
1211 lower_eh_constructs_1 (state, seq);
1213 emit_post_landing_pad (&eh_seq, tf->region);
1214 gimple_seq_add_seq (&eh_seq, seq);
1215 emit_resx (&eh_seq, tf->region);
1220 struct goto_queue_node *q, *qe;
1221 int return_index, index;
1224 struct goto_queue_node *q;
1228 return_index = VEC_length (tree, tf->dest_array);
1229 labels = XCNEWVEC (struct labels_s, return_index + 1);
1232 qe = q + tf->goto_queue_active;
1235 index = q->index < 0 ? return_index : q->index;
1237 if (!labels[index].q)
1238 labels[index].q = q;
1241 for (index = 0; index < return_index + 1; index++)
1245 q = labels[index].q;
1249 lab = labels[index].label
1250 = create_artificial_label (tf_loc);
1252 if (index == return_index)
1253 do_return_redirection (q, lab, NULL);
1255 do_goto_redirection (q, lab, NULL, tf);
1257 x = gimple_build_label (lab);
1258 gimple_seq_add_stmt (&new_stmt, x);
1260 seq = lower_try_finally_dup_block (finally, state);
1261 lower_eh_constructs_1 (state, seq);
1262 gimple_seq_add_seq (&new_stmt, seq);
1264 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1265 maybe_record_in_goto_queue (state, q->cont_stmt);
1268 for (q = tf->goto_queue; q < qe; q++)
1272 index = q->index < 0 ? return_index : q->index;
1274 if (labels[index].q == q)
1277 lab = labels[index].label;
1279 if (index == return_index)
1280 do_return_redirection (q, lab, NULL);
1282 do_goto_redirection (q, lab, NULL, tf);
1285 replace_goto_queue (tf);
1289 /* Need to link new stmts after running replace_goto_queue due
1290 to not wanting to process the same goto stmts twice. */
1291 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1294 /* A subroutine of lower_try_finally. There are multiple edges incoming
1295 and outgoing from the finally block. Implement this by instrumenting
1296 each incoming edge and creating a switch statement at the end of the
1297 finally block that branches to the appropriate destination. */
1300 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1302 struct goto_queue_node *q, *qe;
1303 tree finally_tmp, finally_label;
1304 int return_index, eh_index, fallthru_index;
1305 int nlabels, ndests, j, last_case_index;
1307 VEC (tree,heap) *case_label_vec;
1308 gimple_seq switch_body;
1313 struct pointer_map_t *cont_map = NULL;
1314 /* The location of the TRY_FINALLY stmt. */
1315 location_t tf_loc = gimple_location (tf->try_finally_expr);
1316 /* The location of the finally block. */
1317 location_t finally_loc;
1319 switch_body = gimple_seq_alloc ();
1320 finally = gimple_try_cleanup (tf->top_p);
1321 eh_else = get_eh_else (finally);
1323 /* Mash the TRY block to the head of the chain. */
1324 tf->top_p_seq = gimple_try_eval (tf->top_p);
1326 /* The location of the finally is either the last stmt in the finally
1327 block or the location of the TRY_FINALLY itself. */
1328 x = gimple_seq_last_stmt (finally);
1329 finally_loc = x ? gimple_location (x) : tf_loc;
1331 /* Lower the finally block itself. */
1332 lower_eh_constructs_1 (state, finally);
1334 /* Prepare for switch statement generation. */
1335 nlabels = VEC_length (tree, tf->dest_array);
1336 return_index = nlabels;
1337 eh_index = return_index + tf->may_return;
1338 fallthru_index = eh_index + (tf->may_throw && !eh_else);
1339 ndests = fallthru_index + tf->may_fallthru;
1341 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1342 finally_label = create_artificial_label (finally_loc);
1344 /* We use VEC_quick_push on case_label_vec throughout this function,
1345 since we know the size in advance and allocate precisely as muce
1347 case_label_vec = VEC_alloc (tree, heap, ndests);
1349 last_case_index = 0;
1351 /* Begin inserting code for getting to the finally block. Things
1352 are done in this order to correspond to the sequence the code is
1355 if (tf->may_fallthru)
1357 x = gimple_build_assign (finally_tmp,
1358 build_int_cst (integer_type_node,
1360 gimple_seq_add_stmt (&tf->top_p_seq, x);
1362 tmp = build_int_cst (integer_type_node, fallthru_index);
1363 last_case = build_case_label (tmp, NULL,
1364 create_artificial_label (tf_loc));
1365 VEC_quick_push (tree, case_label_vec, last_case);
1368 x = gimple_build_label (CASE_LABEL (last_case));
1369 gimple_seq_add_stmt (&switch_body, x);
1371 tmp = lower_try_finally_fallthru_label (tf);
1372 x = gimple_build_goto (tmp);
1373 gimple_seq_add_stmt (&switch_body, x);
1376 /* For EH_ELSE, emit the exception path (plus resx) now, then
1377 subsequently we only need consider the normal path. */
1382 finally = gimple_eh_else_e_body (eh_else);
1383 lower_eh_constructs_1 (state, finally);
1385 emit_post_landing_pad (&eh_seq, tf->region);
1386 gimple_seq_add_seq (&eh_seq, finally);
1387 emit_resx (&eh_seq, tf->region);
1390 finally = gimple_eh_else_n_body (eh_else);
1392 else if (tf->may_throw)
1394 emit_post_landing_pad (&eh_seq, tf->region);
1396 x = gimple_build_assign (finally_tmp,
1397 build_int_cst (integer_type_node, eh_index));
1398 gimple_seq_add_stmt (&eh_seq, x);
1400 x = gimple_build_goto (finally_label);
1401 gimple_seq_add_stmt (&eh_seq, x);
1403 tmp = build_int_cst (integer_type_node, eh_index);
1404 last_case = build_case_label (tmp, NULL,
1405 create_artificial_label (tf_loc));
1406 VEC_quick_push (tree, case_label_vec, last_case);
1409 x = gimple_build_label (CASE_LABEL (last_case));
1410 gimple_seq_add_stmt (&eh_seq, x);
1411 emit_resx (&eh_seq, tf->region);
1414 x = gimple_build_label (finally_label);
1415 gimple_seq_add_stmt (&tf->top_p_seq, x);
1417 gimple_seq_add_seq (&tf->top_p_seq, finally);
1419 /* Redirect each incoming goto edge. */
1421 qe = q + tf->goto_queue_active;
1422 j = last_case_index + tf->may_return;
1423 /* Prepare the assignments to finally_tmp that are executed upon the
1424 entrance through a particular edge. */
1429 unsigned int case_index;
1431 mod = gimple_seq_alloc ();
1435 x = gimple_build_assign (finally_tmp,
1436 build_int_cst (integer_type_node,
1438 gimple_seq_add_stmt (&mod, x);
1439 do_return_redirection (q, finally_label, mod);
1440 switch_id = return_index;
1444 x = gimple_build_assign (finally_tmp,
1445 build_int_cst (integer_type_node, q->index));
1446 gimple_seq_add_stmt (&mod, x);
1447 do_goto_redirection (q, finally_label, mod, tf);
1448 switch_id = q->index;
1451 case_index = j + q->index;
1452 if (VEC_length (tree, case_label_vec) <= case_index
1453 || !VEC_index (tree, case_label_vec, case_index))
1457 tmp = build_int_cst (integer_type_node, switch_id);
1458 case_lab = build_case_label (tmp, NULL,
1459 create_artificial_label (tf_loc));
1460 /* We store the cont_stmt in the pointer map, so that we can recover
1461 it in the loop below. */
1463 cont_map = pointer_map_create ();
1464 slot = pointer_map_insert (cont_map, case_lab);
1465 *slot = q->cont_stmt;
1466 VEC_quick_push (tree, case_label_vec, case_lab);
1469 for (j = last_case_index; j < last_case_index + nlabels; j++)
1474 last_case = VEC_index (tree, case_label_vec, j);
1476 gcc_assert (last_case);
1477 gcc_assert (cont_map);
1479 slot = pointer_map_contains (cont_map, last_case);
1481 cont_stmt = *(gimple *) slot;
1483 x = gimple_build_label (CASE_LABEL (last_case));
1484 gimple_seq_add_stmt (&switch_body, x);
1485 gimple_seq_add_stmt (&switch_body, cont_stmt);
1486 maybe_record_in_goto_queue (state, cont_stmt);
1489 pointer_map_destroy (cont_map);
1491 replace_goto_queue (tf);
1493 /* Make sure that the last case is the default label, as one is required.
1494 Then sort the labels, which is also required in GIMPLE. */
1495 CASE_LOW (last_case) = NULL;
1496 sort_case_labels (case_label_vec);
1498 /* Build the switch statement, setting last_case to be the default
1500 switch_stmt = gimple_build_switch_vec (finally_tmp, last_case,
1502 gimple_set_location (switch_stmt, finally_loc);
1504 /* Need to link SWITCH_STMT after running replace_goto_queue
1505 due to not wanting to process the same goto stmts twice. */
1506 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1507 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1510 /* Decide whether or not we are going to duplicate the finally block.
1511 There are several considerations.
1513 First, if this is Java, then the finally block contains code
1514 written by the user. It has line numbers associated with it,
1515 so duplicating the block means it's difficult to set a breakpoint.
1516 Since controlling code generation via -g is verboten, we simply
1517 never duplicate code without optimization.
1519 Second, we'd like to prevent egregious code growth. One way to
1520 do this is to estimate the size of the finally block, multiply
1521 that by the number of copies we'd need to make, and compare against
1522 the estimate of the size of the switch machinery we'd have to add. */
1525 decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
1527 int f_estimate, sw_estimate;
1530 /* If there's an EH_ELSE involved, the exception path is separate
1531 and really doesn't come into play for this computation. */
1532 eh_else = get_eh_else (finally);
1535 ndests -= may_throw;
1536 finally = gimple_eh_else_n_body (eh_else);
1541 gimple_stmt_iterator gsi;
1546 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1548 gimple stmt = gsi_stmt (gsi);
1549 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
1555 /* Finally estimate N times, plus N gotos. */
1556 f_estimate = count_insns_seq (finally, &eni_size_weights);
1557 f_estimate = (f_estimate + 1) * ndests;
1559 /* Switch statement (cost 10), N variable assignments, N gotos. */
1560 sw_estimate = 10 + 2 * ndests;
1562 /* Optimize for size clearly wants our best guess. */
1563 if (optimize_function_for_size_p (cfun))
1564 return f_estimate < sw_estimate;
1566 /* ??? These numbers are completely made up so far. */
1568 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1570 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1573 /* REG is the enclosing region for a possible cleanup region, or the region
1574 itself. Returns TRUE if such a region would be unreachable.
1576 Cleanup regions within a must-not-throw region aren't actually reachable
1577 even if there are throwing stmts within them, because the personality
1578 routine will call terminate before unwinding. */
1581 cleanup_is_dead_in (eh_region reg)
1583 while (reg && reg->type == ERT_CLEANUP)
1585 return (reg && reg->type == ERT_MUST_NOT_THROW);
1588 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1589 to a sequence of labels and blocks, plus the exception region trees
1590 that record all the magic. This is complicated by the need to
1591 arrange for the FINALLY block to be executed on all exits. */
1594 lower_try_finally (struct leh_state *state, gimple tp)
1596 struct leh_tf_state this_tf;
1597 struct leh_state this_state;
1599 gimple_seq old_eh_seq;
1601 /* Process the try block. */
1603 memset (&this_tf, 0, sizeof (this_tf));
1604 this_tf.try_finally_expr = tp;
1606 this_tf.outer = state;
1607 if (using_eh_for_cleanups_p && !cleanup_is_dead_in (state->cur_region))
1609 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1610 this_state.cur_region = this_tf.region;
1614 this_tf.region = NULL;
1615 this_state.cur_region = state->cur_region;
1618 this_state.ehp_region = state->ehp_region;
1619 this_state.tf = &this_tf;
1621 old_eh_seq = eh_seq;
1624 lower_eh_constructs_1 (&this_state, gimple_try_eval(tp));
1626 /* Determine if the try block is escaped through the bottom. */
1627 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1629 /* Determine if any exceptions are possible within the try block. */
1631 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1632 if (this_tf.may_throw)
1633 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1635 /* Determine how many edges (still) reach the finally block. Or rather,
1636 how many destinations are reached by the finally block. Use this to
1637 determine how we process the finally block itself. */
1639 ndests = VEC_length (tree, this_tf.dest_array);
1640 ndests += this_tf.may_fallthru;
1641 ndests += this_tf.may_return;
1642 ndests += this_tf.may_throw;
1644 /* If the FINALLY block is not reachable, dike it out. */
1647 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1648 gimple_try_set_cleanup (tp, NULL);
1650 /* If the finally block doesn't fall through, then any destination
1651 we might try to impose there isn't reached either. There may be
1652 some minor amount of cleanup and redirection still needed. */
1653 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1654 lower_try_finally_nofallthru (state, &this_tf);
1656 /* We can easily special-case redirection to a single destination. */
1657 else if (ndests == 1)
1658 lower_try_finally_onedest (state, &this_tf);
1659 else if (decide_copy_try_finally (ndests, this_tf.may_throw,
1660 gimple_try_cleanup (tp)))
1661 lower_try_finally_copy (state, &this_tf);
1663 lower_try_finally_switch (state, &this_tf);
1665 /* If someone requested we add a label at the end of the transformed
1667 if (this_tf.fallthru_label)
1669 /* This must be reached only if ndests == 0. */
1670 gimple x = gimple_build_label (this_tf.fallthru_label);
1671 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1674 VEC_free (tree, heap, this_tf.dest_array);
1675 free (this_tf.goto_queue);
1676 if (this_tf.goto_queue_map)
1677 pointer_map_destroy (this_tf.goto_queue_map);
1679 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1680 If there was no old eh_seq, then the append is trivially already done. */
1684 eh_seq = old_eh_seq;
1687 gimple_seq new_eh_seq = eh_seq;
1688 eh_seq = old_eh_seq;
1689 gimple_seq_add_seq(&eh_seq, new_eh_seq);
1693 return this_tf.top_p_seq;
1696 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1697 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1698 exception region trees that records all the magic. */
1701 lower_catch (struct leh_state *state, gimple tp)
1703 eh_region try_region = NULL;
1704 struct leh_state this_state = *state;
1705 gimple_stmt_iterator gsi;
1709 location_t try_catch_loc = gimple_location (tp);
1711 if (flag_exceptions)
1713 try_region = gen_eh_region_try (state->cur_region);
1714 this_state.cur_region = try_region;
1717 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1719 if (!eh_region_may_contain_throw (try_region))
1720 return gimple_try_eval (tp);
1723 emit_eh_dispatch (&new_seq, try_region);
1724 emit_resx (&new_seq, try_region);
1726 this_state.cur_region = state->cur_region;
1727 this_state.ehp_region = try_region;
1730 for (gsi = gsi_start (gimple_try_cleanup (tp));
1738 gcatch = gsi_stmt (gsi);
1739 c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch));
1741 handler = gimple_catch_handler (gcatch);
1742 lower_eh_constructs_1 (&this_state, handler);
1744 c->label = create_artificial_label (UNKNOWN_LOCATION);
1745 x = gimple_build_label (c->label);
1746 gimple_seq_add_stmt (&new_seq, x);
1748 gimple_seq_add_seq (&new_seq, handler);
1750 if (gimple_seq_may_fallthru (new_seq))
1753 out_label = create_artificial_label (try_catch_loc);
1755 x = gimple_build_goto (out_label);
1756 gimple_seq_add_stmt (&new_seq, x);
1762 gimple_try_set_cleanup (tp, new_seq);
1764 return frob_into_branch_around (tp, try_region, out_label);
1767 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1768 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1769 region trees that record all the magic. */
1772 lower_eh_filter (struct leh_state *state, gimple tp)
1774 struct leh_state this_state = *state;
1775 eh_region this_region = NULL;
1779 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1781 if (flag_exceptions)
1783 this_region = gen_eh_region_allowed (state->cur_region,
1784 gimple_eh_filter_types (inner));
1785 this_state.cur_region = this_region;
1788 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1790 if (!eh_region_may_contain_throw (this_region))
1791 return gimple_try_eval (tp);
1794 this_state.cur_region = state->cur_region;
1795 this_state.ehp_region = this_region;
1797 emit_eh_dispatch (&new_seq, this_region);
1798 emit_resx (&new_seq, this_region);
1800 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1801 x = gimple_build_label (this_region->u.allowed.label);
1802 gimple_seq_add_stmt (&new_seq, x);
1804 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure (inner));
1805 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1807 gimple_try_set_cleanup (tp, new_seq);
1809 return frob_into_branch_around (tp, this_region, NULL);
1812 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1813 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1814 plus the exception region trees that record all the magic. */
1817 lower_eh_must_not_throw (struct leh_state *state, gimple tp)
1819 struct leh_state this_state = *state;
1821 if (flag_exceptions)
1823 gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1824 eh_region this_region;
1826 this_region = gen_eh_region_must_not_throw (state->cur_region);
1827 this_region->u.must_not_throw.failure_decl
1828 = gimple_eh_must_not_throw_fndecl (inner);
1829 this_region->u.must_not_throw.failure_loc = gimple_location (tp);
1831 /* In order to get mangling applied to this decl, we must mark it
1832 used now. Otherwise, pass_ipa_free_lang_data won't think it
1834 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1836 this_state.cur_region = this_region;
1839 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1841 return gimple_try_eval (tp);
1844 /* Implement a cleanup expression. This is similar to try-finally,
1845 except that we only execute the cleanup block for exception edges. */
1848 lower_cleanup (struct leh_state *state, gimple tp)
1850 struct leh_state this_state = *state;
1851 eh_region this_region = NULL;
1852 struct leh_tf_state fake_tf;
1854 bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
1856 if (flag_exceptions && !cleanup_dead)
1858 this_region = gen_eh_region_cleanup (state->cur_region);
1859 this_state.cur_region = this_region;
1862 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1864 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
1865 return gimple_try_eval (tp);
1867 /* Build enough of a try-finally state so that we can reuse
1868 honor_protect_cleanup_actions. */
1869 memset (&fake_tf, 0, sizeof (fake_tf));
1870 fake_tf.top_p = fake_tf.try_finally_expr = tp;
1871 fake_tf.outer = state;
1872 fake_tf.region = this_region;
1873 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1874 fake_tf.may_throw = true;
1876 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1878 if (fake_tf.may_throw)
1880 /* In this case honor_protect_cleanup_actions had nothing to do,
1881 and we should process this normally. */
1882 lower_eh_constructs_1 (state, gimple_try_cleanup (tp));
1883 result = frob_into_branch_around (tp, this_region,
1884 fake_tf.fallthru_label);
1888 /* In this case honor_protect_cleanup_actions did nearly all of
1889 the work. All we have left is to append the fallthru_label. */
1891 result = gimple_try_eval (tp);
1892 if (fake_tf.fallthru_label)
1894 gimple x = gimple_build_label (fake_tf.fallthru_label);
1895 gimple_seq_add_stmt (&result, x);
1901 /* Main loop for lowering eh constructs. Also moves gsi to the next
1905 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1909 gimple stmt = gsi_stmt (*gsi);
1911 switch (gimple_code (stmt))
1915 tree fndecl = gimple_call_fndecl (stmt);
1918 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1919 switch (DECL_FUNCTION_CODE (fndecl))
1921 case BUILT_IN_EH_POINTER:
1922 /* The front end may have generated a call to
1923 __builtin_eh_pointer (0) within a catch region. Replace
1924 this zero argument with the current catch region number. */
1925 if (state->ehp_region)
1927 tree nr = build_int_cst (integer_type_node,
1928 state->ehp_region->index);
1929 gimple_call_set_arg (stmt, 0, nr);
1933 /* The user has dome something silly. Remove it. */
1934 rhs = null_pointer_node;
1939 case BUILT_IN_EH_FILTER:
1940 /* ??? This should never appear, but since it's a builtin it
1941 is accessible to abuse by users. Just remove it and
1942 replace the use with the arbitrary value zero. */
1943 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
1945 lhs = gimple_call_lhs (stmt);
1946 x = gimple_build_assign (lhs, rhs);
1947 gsi_insert_before (gsi, x, GSI_SAME_STMT);
1950 case BUILT_IN_EH_COPY_VALUES:
1951 /* Likewise this should not appear. Remove it. */
1952 gsi_remove (gsi, true);
1962 /* If the stmt can throw use a new temporary for the assignment
1963 to a LHS. This makes sure the old value of the LHS is
1964 available on the EH edge. Only do so for statements that
1965 potentially fall thru (no noreturn calls e.g.), otherwise
1966 this new assignment might create fake fallthru regions. */
1967 if (stmt_could_throw_p (stmt)
1968 && gimple_has_lhs (stmt)
1969 && gimple_stmt_may_fallthru (stmt)
1970 && !tree_could_throw_p (gimple_get_lhs (stmt))
1971 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
1973 tree lhs = gimple_get_lhs (stmt);
1974 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
1975 gimple s = gimple_build_assign (lhs, tmp);
1976 gimple_set_location (s, gimple_location (stmt));
1977 gimple_set_block (s, gimple_block (stmt));
1978 gimple_set_lhs (stmt, tmp);
1979 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
1980 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
1981 DECL_GIMPLE_REG_P (tmp) = 1;
1982 gsi_insert_after (gsi, s, GSI_SAME_STMT);
1984 /* Look for things that can throw exceptions, and record them. */
1985 if (state->cur_region && stmt_could_throw_p (stmt))
1987 record_stmt_eh_region (state->cur_region, stmt);
1988 note_eh_region_may_contain_throw (state->cur_region);
1995 maybe_record_in_goto_queue (state, stmt);
1999 verify_norecord_switch_expr (state, stmt);
2003 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
2004 replace = lower_try_finally (state, stmt);
2007 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
2010 replace = gimple_try_eval (stmt);
2011 lower_eh_constructs_1 (state, replace);
2014 switch (gimple_code (x))
2017 replace = lower_catch (state, stmt);
2019 case GIMPLE_EH_FILTER:
2020 replace = lower_eh_filter (state, stmt);
2022 case GIMPLE_EH_MUST_NOT_THROW:
2023 replace = lower_eh_must_not_throw (state, stmt);
2025 case GIMPLE_EH_ELSE:
2026 /* This code is only valid with GIMPLE_TRY_FINALLY. */
2029 replace = lower_cleanup (state, stmt);
2034 /* Remove the old stmt and insert the transformed sequence
2036 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
2037 gsi_remove (gsi, true);
2039 /* Return since we don't want gsi_next () */
2042 case GIMPLE_EH_ELSE:
2043 /* We should be eliminating this in lower_try_finally et al. */
2047 /* A type, a decl, or some kind of statement that we're not
2048 interested in. Don't walk them. */
2055 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2058 lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq)
2060 gimple_stmt_iterator gsi;
2061 for (gsi = gsi_start (seq); !gsi_end_p (gsi);)
2062 lower_eh_constructs_2 (state, &gsi);
2066 lower_eh_constructs (void)
2068 struct leh_state null_state;
2071 bodyp = gimple_body (current_function_decl);
2075 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
2076 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
2077 memset (&null_state, 0, sizeof (null_state));
2079 collect_finally_tree_1 (bodyp, NULL);
2080 lower_eh_constructs_1 (&null_state, bodyp);
2082 /* We assume there's a return statement, or something, at the end of
2083 the function, and thus ploping the EH sequence afterward won't
2085 gcc_assert (!gimple_seq_may_fallthru (bodyp));
2086 gimple_seq_add_seq (&bodyp, eh_seq);
2088 /* We assume that since BODYP already existed, adding EH_SEQ to it
2089 didn't change its value, and we don't have to re-set the function. */
2090 gcc_assert (bodyp == gimple_body (current_function_decl));
2092 htab_delete (finally_tree);
2093 BITMAP_FREE (eh_region_may_contain_throw_map);
2096 /* If this function needs a language specific EH personality routine
2097 and the frontend didn't already set one do so now. */
2098 if (function_needs_eh_personality (cfun) == eh_personality_lang
2099 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2100 DECL_FUNCTION_PERSONALITY (current_function_decl)
2101 = lang_hooks.eh_personality ();
2106 struct gimple_opt_pass pass_lower_eh =
2112 lower_eh_constructs, /* execute */
2115 0, /* static_pass_number */
2116 TV_TREE_EH, /* tv_id */
2117 PROP_gimple_lcf, /* properties_required */
2118 PROP_gimple_leh, /* properties_provided */
2119 0, /* properties_destroyed */
2120 0, /* todo_flags_start */
2121 0 /* todo_flags_finish */
2125 /* Create the multiple edges from an EH_DISPATCH statement to all of
2126 the possible handlers for its EH region. Return true if there's
2127 no fallthru edge; false if there is. */
2130 make_eh_dispatch_edges (gimple stmt)
2134 basic_block src, dst;
2136 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2137 src = gimple_bb (stmt);
2142 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2144 dst = label_to_block (c->label);
2145 make_edge (src, dst, 0);
2147 /* A catch-all handler doesn't have a fallthru. */
2148 if (c->type_list == NULL)
2153 case ERT_ALLOWED_EXCEPTIONS:
2154 dst = label_to_block (r->u.allowed.label);
2155 make_edge (src, dst, 0);
2165 /* Create the single EH edge from STMT to its nearest landing pad,
2166 if there is such a landing pad within the current function. */
2169 make_eh_edges (gimple stmt)
2171 basic_block src, dst;
2175 lp_nr = lookup_stmt_eh_lp (stmt);
2179 lp = get_eh_landing_pad_from_number (lp_nr);
2180 gcc_assert (lp != NULL);
2182 src = gimple_bb (stmt);
2183 dst = label_to_block (lp->post_landing_pad);
2184 make_edge (src, dst, EDGE_EH);
2187 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2188 do not actually perform the final edge redirection.
2190 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2191 we intend to change the destination EH region as well; this means
2192 EH_LANDING_PAD_NR must already be set on the destination block label.
2193 If false, we're being called from generic cfg manipulation code and we
2194 should preserve our place within the region tree. */
2197 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2199 eh_landing_pad old_lp, new_lp;
2202 int old_lp_nr, new_lp_nr;
2203 tree old_label, new_label;
2207 old_bb = edge_in->dest;
2208 old_label = gimple_block_label (old_bb);
2209 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2210 gcc_assert (old_lp_nr > 0);
2211 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2213 throw_stmt = last_stmt (edge_in->src);
2214 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2216 new_label = gimple_block_label (new_bb);
2218 /* Look for an existing region that might be using NEW_BB already. */
2219 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2222 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2223 gcc_assert (new_lp);
2225 /* Unless CHANGE_REGION is true, the new and old landing pad
2226 had better be associated with the same EH region. */
2227 gcc_assert (change_region || new_lp->region == old_lp->region);
2232 gcc_assert (!change_region);
2235 /* Notice when we redirect the last EH edge away from OLD_BB. */
2236 FOR_EACH_EDGE (e, ei, old_bb->preds)
2237 if (e != edge_in && (e->flags & EDGE_EH))
2242 /* NEW_LP already exists. If there are still edges into OLD_LP,
2243 there's nothing to do with the EH tree. If there are no more
2244 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2245 If CHANGE_REGION is true, then our caller is expecting to remove
2247 if (e == NULL && !change_region)
2248 remove_eh_landing_pad (old_lp);
2252 /* No correct landing pad exists. If there are no more edges
2253 into OLD_LP, then we can simply re-use the existing landing pad.
2254 Otherwise, we have to create a new landing pad. */
2257 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2261 new_lp = gen_eh_landing_pad (old_lp->region);
2262 new_lp->post_landing_pad = new_label;
2263 EH_LANDING_PAD_NR (new_label) = new_lp->index;
2266 /* Maybe move the throwing statement to the new region. */
2267 if (old_lp != new_lp)
2269 remove_stmt_from_eh_lp (throw_stmt);
2270 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2274 /* Redirect EH edge E to NEW_BB. */
2277 redirect_eh_edge (edge edge_in, basic_block new_bb)
2279 redirect_eh_edge_1 (edge_in, new_bb, false);
2280 return ssa_redirect_edge (edge_in, new_bb);
2283 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2284 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2285 The actual edge update will happen in the caller. */
2288 redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb)
2290 tree new_lab = gimple_block_label (new_bb);
2291 bool any_changed = false;
2296 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2300 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2302 old_bb = label_to_block (c->label);
2303 if (old_bb == e->dest)
2311 case ERT_ALLOWED_EXCEPTIONS:
2312 old_bb = label_to_block (r->u.allowed.label);
2313 gcc_assert (old_bb == e->dest);
2314 r->u.allowed.label = new_lab;
2322 gcc_assert (any_changed);
2325 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2328 operation_could_trap_helper_p (enum tree_code op,
2339 case TRUNC_DIV_EXPR:
2341 case FLOOR_DIV_EXPR:
2342 case ROUND_DIV_EXPR:
2343 case EXACT_DIV_EXPR:
2345 case FLOOR_MOD_EXPR:
2346 case ROUND_MOD_EXPR:
2347 case TRUNC_MOD_EXPR:
2349 if (honor_snans || honor_trapv)
2352 return flag_trapping_math;
2353 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2362 /* Some floating point comparisons may trap. */
2367 case UNORDERED_EXPR:
2377 case FIX_TRUNC_EXPR:
2378 /* Conversion of floating point might trap. */
2384 /* These operations don't trap with floating point. */
2392 /* Any floating arithmetic may trap. */
2393 if (fp_operation && flag_trapping_math)
2401 /* Constructing an object cannot trap. */
2405 /* Any floating arithmetic may trap. */
2406 if (fp_operation && flag_trapping_math)
2414 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2415 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2416 type operands that may trap. If OP is a division operator, DIVISOR contains
2417 the value of the divisor. */
2420 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2423 bool honor_nans = (fp_operation && flag_trapping_math
2424 && !flag_finite_math_only);
2425 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2428 if (TREE_CODE_CLASS (op) != tcc_comparison
2429 && TREE_CODE_CLASS (op) != tcc_unary
2430 && TREE_CODE_CLASS (op) != tcc_binary)
2433 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2434 honor_nans, honor_snans, divisor,
2438 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2439 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2440 This routine expects only GIMPLE lhs or rhs input. */
2443 tree_could_trap_p (tree expr)
2445 enum tree_code code;
2446 bool fp_operation = false;
2447 bool honor_trapv = false;
2448 tree t, base, div = NULL_TREE;
2453 code = TREE_CODE (expr);
2454 t = TREE_TYPE (expr);
2458 if (COMPARISON_CLASS_P (expr))
2459 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2461 fp_operation = FLOAT_TYPE_P (t);
2462 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2465 if (TREE_CODE_CLASS (code) == tcc_binary)
2466 div = TREE_OPERAND (expr, 1);
2467 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2473 case TARGET_MEM_REF:
2474 if (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
2475 && !TMR_INDEX (expr) && !TMR_INDEX2 (expr))
2477 return !TREE_THIS_NOTRAP (expr);
2483 case VIEW_CONVERT_EXPR:
2484 case WITH_SIZE_EXPR:
2485 expr = TREE_OPERAND (expr, 0);
2486 code = TREE_CODE (expr);
2489 case ARRAY_RANGE_REF:
2490 base = TREE_OPERAND (expr, 0);
2491 if (tree_could_trap_p (base))
2493 if (TREE_THIS_NOTRAP (expr))
2495 return !range_in_array_bounds_p (expr);
2498 base = TREE_OPERAND (expr, 0);
2499 if (tree_could_trap_p (base))
2501 if (TREE_THIS_NOTRAP (expr))
2503 return !in_array_bounds_p (expr);
2506 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2510 return !TREE_THIS_NOTRAP (expr);
2513 return TREE_THIS_VOLATILE (expr);
2516 t = get_callee_fndecl (expr);
2517 /* Assume that calls to weak functions may trap. */
2518 if (!t || !DECL_P (t))
2521 return tree_could_trap_p (t);
2525 /* Assume that accesses to weak functions may trap, unless we know
2526 they are certainly defined in current TU or in some other
2528 if (DECL_WEAK (expr))
2530 struct cgraph_node *node;
2531 if (!DECL_EXTERNAL (expr))
2533 node = cgraph_function_node (cgraph_get_node (expr), NULL);
2534 if (node && node->in_other_partition)
2541 /* Assume that accesses to weak vars may trap, unless we know
2542 they are certainly defined in current TU or in some other
2544 if (DECL_WEAK (expr))
2546 struct varpool_node *node;
2547 if (!DECL_EXTERNAL (expr))
2549 node = varpool_variable_node (varpool_get_node (expr), NULL);
2550 if (node && node->in_other_partition)
2562 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2563 an assignment or a conditional) may throw. */
2566 stmt_could_throw_1_p (gimple stmt)
2568 enum tree_code code = gimple_expr_code (stmt);
2569 bool honor_nans = false;
2570 bool honor_snans = false;
2571 bool fp_operation = false;
2572 bool honor_trapv = false;
2577 if (TREE_CODE_CLASS (code) == tcc_comparison
2578 || TREE_CODE_CLASS (code) == tcc_unary
2579 || TREE_CODE_CLASS (code) == tcc_binary)
2581 if (is_gimple_assign (stmt)
2582 && TREE_CODE_CLASS (code) == tcc_comparison)
2583 t = TREE_TYPE (gimple_assign_rhs1 (stmt));
2584 else if (gimple_code (stmt) == GIMPLE_COND)
2585 t = TREE_TYPE (gimple_cond_lhs (stmt));
2587 t = gimple_expr_type (stmt);
2588 fp_operation = FLOAT_TYPE_P (t);
2591 honor_nans = flag_trapping_math && !flag_finite_math_only;
2592 honor_snans = flag_signaling_nans != 0;
2594 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2598 /* Check if the main expression may trap. */
2599 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2600 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2601 honor_nans, honor_snans, t,
2606 /* If the expression does not trap, see if any of the individual operands may
2608 for (i = 0; i < gimple_num_ops (stmt); i++)
2609 if (tree_could_trap_p (gimple_op (stmt, i)))
2616 /* Return true if statement STMT could throw an exception. */
2619 stmt_could_throw_p (gimple stmt)
2621 if (!flag_exceptions)
2624 /* The only statements that can throw an exception are assignments,
2625 conditionals, calls, resx, and asms. */
2626 switch (gimple_code (stmt))
2632 return !gimple_call_nothrow_p (stmt);
2636 if (!cfun->can_throw_non_call_exceptions)
2638 return stmt_could_throw_1_p (stmt);
2641 if (!cfun->can_throw_non_call_exceptions)
2643 return gimple_asm_volatile_p (stmt);
2651 /* Return true if expression T could throw an exception. */
2654 tree_could_throw_p (tree t)
2656 if (!flag_exceptions)
2658 if (TREE_CODE (t) == MODIFY_EXPR)
2660 if (cfun->can_throw_non_call_exceptions
2661 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2663 t = TREE_OPERAND (t, 1);
2666 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2667 t = TREE_OPERAND (t, 0);
2668 if (TREE_CODE (t) == CALL_EXPR)
2669 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2670 if (cfun->can_throw_non_call_exceptions)
2671 return tree_could_trap_p (t);
2675 /* Return true if STMT can throw an exception that is not caught within
2676 the current function (CFUN). */
2679 stmt_can_throw_external (gimple stmt)
2683 if (!stmt_could_throw_p (stmt))
2686 lp_nr = lookup_stmt_eh_lp (stmt);
2690 /* Return true if STMT can throw an exception that is caught within
2691 the current function (CFUN). */
2694 stmt_can_throw_internal (gimple stmt)
2698 if (!stmt_could_throw_p (stmt))
2701 lp_nr = lookup_stmt_eh_lp (stmt);
2705 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2706 remove any entry it might have from the EH table. Return true if
2707 any change was made. */
2710 maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
2712 if (stmt_could_throw_p (stmt))
2714 return remove_stmt_from_eh_lp_fn (ifun, stmt);
2717 /* Likewise, but always use the current function. */
2720 maybe_clean_eh_stmt (gimple stmt)
2722 return maybe_clean_eh_stmt_fn (cfun, stmt);
2725 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2726 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2727 in the table if it should be in there. Return TRUE if a replacement was
2728 done that my require an EH edge purge. */
2731 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2733 int lp_nr = lookup_stmt_eh_lp (old_stmt);
2737 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2739 if (new_stmt == old_stmt && new_stmt_could_throw)
2742 remove_stmt_from_eh_lp (old_stmt);
2743 if (new_stmt_could_throw)
2745 add_stmt_to_eh_lp (new_stmt, lp_nr);
2755 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statment NEW_STMT
2756 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2757 operand is the return value of duplicate_eh_regions. */
2760 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
2761 struct function *old_fun, gimple old_stmt,
2762 struct pointer_map_t *map, int default_lp_nr)
2764 int old_lp_nr, new_lp_nr;
2767 if (!stmt_could_throw_p (new_stmt))
2770 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2773 if (default_lp_nr == 0)
2775 new_lp_nr = default_lp_nr;
2777 else if (old_lp_nr > 0)
2779 eh_landing_pad old_lp, new_lp;
2781 old_lp = VEC_index (eh_landing_pad, old_fun->eh->lp_array, old_lp_nr);
2782 slot = pointer_map_contains (map, old_lp);
2783 new_lp = (eh_landing_pad) *slot;
2784 new_lp_nr = new_lp->index;
2788 eh_region old_r, new_r;
2790 old_r = VEC_index (eh_region, old_fun->eh->region_array, -old_lp_nr);
2791 slot = pointer_map_contains (map, old_r);
2792 new_r = (eh_region) *slot;
2793 new_lp_nr = -new_r->index;
2796 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2800 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2801 and thus no remapping is required. */
2804 maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
2808 if (!stmt_could_throw_p (new_stmt))
2811 lp_nr = lookup_stmt_eh_lp (old_stmt);
2815 add_stmt_to_eh_lp (new_stmt, lp_nr);
2819 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2820 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2821 this only handles handlers consisting of a single call, as that's the
2822 important case for C++: a destructor call for a particular object showing
2823 up in multiple handlers. */
2826 same_handler_p (gimple_seq oneh, gimple_seq twoh)
2828 gimple_stmt_iterator gsi;
2832 gsi = gsi_start (oneh);
2833 if (!gsi_one_before_end_p (gsi))
2835 ones = gsi_stmt (gsi);
2837 gsi = gsi_start (twoh);
2838 if (!gsi_one_before_end_p (gsi))
2840 twos = gsi_stmt (gsi);
2842 if (!is_gimple_call (ones)
2843 || !is_gimple_call (twos)
2844 || gimple_call_lhs (ones)
2845 || gimple_call_lhs (twos)
2846 || gimple_call_chain (ones)
2847 || gimple_call_chain (twos)
2848 || !gimple_call_same_target_p (ones, twos)
2849 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
2852 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
2853 if (!operand_equal_p (gimple_call_arg (ones, ai),
2854 gimple_call_arg (twos, ai), 0))
2861 try { A() } finally { try { ~B() } catch { ~A() } }
2862 try { ... } finally { ~A() }
2864 try { A() } catch { ~B() }
2865 try { ~B() ... } finally { ~A() }
2867 This occurs frequently in C++, where A is a local variable and B is a
2868 temporary used in the initializer for A. */
2871 optimize_double_finally (gimple one, gimple two)
2874 gimple_stmt_iterator gsi;
2876 gsi = gsi_start (gimple_try_cleanup (one));
2877 if (!gsi_one_before_end_p (gsi))
2880 oneh = gsi_stmt (gsi);
2881 if (gimple_code (oneh) != GIMPLE_TRY
2882 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
2885 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
2887 gimple_seq seq = gimple_try_eval (oneh);
2889 gimple_try_set_cleanup (one, seq);
2890 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
2891 seq = copy_gimple_seq_and_replace_locals (seq);
2892 gimple_seq_add_seq (&seq, gimple_try_eval (two));
2893 gimple_try_set_eval (two, seq);
2897 /* Perform EH refactoring optimizations that are simpler to do when code
2898 flow has been lowered but EH structures haven't. */
2901 refactor_eh_r (gimple_seq seq)
2903 gimple_stmt_iterator gsi;
2908 gsi = gsi_start (seq);
2912 if (gsi_end_p (gsi))
2915 two = gsi_stmt (gsi);
2918 && gimple_code (one) == GIMPLE_TRY
2919 && gimple_code (two) == GIMPLE_TRY
2920 && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
2921 && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
2922 optimize_double_finally (one, two);
2924 switch (gimple_code (one))
2927 refactor_eh_r (gimple_try_eval (one));
2928 refactor_eh_r (gimple_try_cleanup (one));
2931 refactor_eh_r (gimple_catch_handler (one));
2933 case GIMPLE_EH_FILTER:
2934 refactor_eh_r (gimple_eh_filter_failure (one));
2936 case GIMPLE_EH_ELSE:
2937 refactor_eh_r (gimple_eh_else_n_body (one));
2938 refactor_eh_r (gimple_eh_else_e_body (one));
2953 refactor_eh_r (gimple_body (current_function_decl));
2958 gate_refactor_eh (void)
2960 return flag_exceptions != 0;
2963 struct gimple_opt_pass pass_refactor_eh =
2968 gate_refactor_eh, /* gate */
2969 refactor_eh, /* execute */
2972 0, /* static_pass_number */
2973 TV_TREE_EH, /* tv_id */
2974 PROP_gimple_lcf, /* properties_required */
2975 0, /* properties_provided */
2976 0, /* properties_destroyed */
2977 0, /* todo_flags_start */
2978 0 /* todo_flags_finish */
2982 /* At the end of gimple optimization, we can lower RESX. */
2985 lower_resx (basic_block bb, gimple stmt, struct pointer_map_t *mnt_map)
2988 eh_region src_r, dst_r;
2989 gimple_stmt_iterator gsi;
2994 lp_nr = lookup_stmt_eh_lp (stmt);
2996 dst_r = get_eh_region_from_lp_number (lp_nr);
3000 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
3001 gsi = gsi_last_bb (bb);
3005 /* We can wind up with no source region when pass_cleanup_eh shows
3006 that there are no entries into an eh region and deletes it, but
3007 then the block that contains the resx isn't removed. This can
3008 happen without optimization when the switch statement created by
3009 lower_try_finally_switch isn't simplified to remove the eh case.
3011 Resolve this by expanding the resx node to an abort. */
3013 fn = builtin_decl_implicit (BUILT_IN_TRAP);
3014 x = gimple_build_call (fn, 0);
3015 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3017 while (EDGE_COUNT (bb->succs) > 0)
3018 remove_edge (EDGE_SUCC (bb, 0));
3022 /* When we have a destination region, we resolve this by copying
3023 the excptr and filter values into place, and changing the edge
3024 to immediately after the landing pad. */
3033 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3034 the failure decl into a new block, if needed. */
3035 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
3037 slot = pointer_map_contains (mnt_map, dst_r);
3040 gimple_stmt_iterator gsi2;
3042 new_bb = create_empty_bb (bb);
3043 lab = gimple_block_label (new_bb);
3044 gsi2 = gsi_start_bb (new_bb);
3046 fn = dst_r->u.must_not_throw.failure_decl;
3047 x = gimple_build_call (fn, 0);
3048 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
3049 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
3051 slot = pointer_map_insert (mnt_map, dst_r);
3057 new_bb = label_to_block (lab);
3060 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3061 e = make_edge (bb, new_bb, EDGE_FALLTHRU);
3062 e->count = bb->count;
3063 e->probability = REG_BR_PROB_BASE;
3068 tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
3070 fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
3071 src_nr = build_int_cst (integer_type_node, src_r->index);
3072 x = gimple_build_call (fn, 2, dst_nr, src_nr);
3073 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3075 /* Update the flags for the outgoing edge. */
3076 e = single_succ_edge (bb);
3077 gcc_assert (e->flags & EDGE_EH);
3078 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3080 /* If there are no more EH users of the landing pad, delete it. */
3081 FOR_EACH_EDGE (e, ei, e->dest->preds)
3082 if (e->flags & EDGE_EH)
3086 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
3087 remove_eh_landing_pad (lp);
3097 /* When we don't have a destination region, this exception escapes
3098 up the call chain. We resolve this by generating a call to the
3099 _Unwind_Resume library function. */
3101 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
3102 with no arguments for C++ and Java. Check for that. */
3103 if (src_r->use_cxa_end_cleanup)
3105 fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
3106 x = gimple_build_call (fn, 0);
3107 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3111 fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
3112 src_nr = build_int_cst (integer_type_node, src_r->index);
3113 x = gimple_build_call (fn, 1, src_nr);
3114 var = create_tmp_var (ptr_type_node, NULL);
3115 var = make_ssa_name (var, x);
3116 gimple_call_set_lhs (x, var);
3117 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3119 fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
3120 x = gimple_build_call (fn, 1, var);
3121 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3124 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3127 gsi_remove (&gsi, true);
3133 execute_lower_resx (void)
3136 struct pointer_map_t *mnt_map;
3137 bool dominance_invalidated = false;
3138 bool any_rewritten = false;
3140 mnt_map = pointer_map_create ();
3144 gimple last = last_stmt (bb);
3145 if (last && is_gimple_resx (last))
3147 dominance_invalidated |= lower_resx (bb, last, mnt_map);
3148 any_rewritten = true;
3152 pointer_map_destroy (mnt_map);
3154 if (dominance_invalidated)
3156 free_dominance_info (CDI_DOMINATORS);
3157 free_dominance_info (CDI_POST_DOMINATORS);
3160 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3164 gate_lower_resx (void)
3166 return flag_exceptions != 0;
3169 struct gimple_opt_pass pass_lower_resx =
3174 gate_lower_resx, /* gate */
3175 execute_lower_resx, /* execute */
3178 0, /* static_pass_number */
3179 TV_TREE_EH, /* tv_id */
3180 PROP_gimple_lcf, /* properties_required */
3181 0, /* properties_provided */
3182 0, /* properties_destroyed */
3183 0, /* todo_flags_start */
3184 TODO_verify_flow /* todo_flags_finish */
3188 /* Try to optimize var = {v} {CLOBBER} stmts followed just by
3192 optimize_clobbers (basic_block bb)
3194 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3195 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3197 gimple stmt = gsi_stmt (gsi);
3198 if (is_gimple_debug (stmt))
3200 if (!gimple_clobber_p (stmt)
3201 || TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
3203 unlink_stmt_vdef (stmt);
3204 gsi_remove (&gsi, true);
3205 release_defs (stmt);
3209 /* Try to sink var = {v} {CLOBBER} stmts followed just by
3210 internal throw to successor BB. */
3213 sink_clobbers (basic_block bb)
3217 gimple_stmt_iterator gsi, dgsi;
3219 bool any_clobbers = false;
3221 /* Only optimize if BB has a single EH successor and
3222 all predecessor edges are EH too. */
3223 if (!single_succ_p (bb)
3224 || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
3227 FOR_EACH_EDGE (e, ei, bb->preds)
3229 if ((e->flags & EDGE_EH) == 0)
3233 /* And BB contains only CLOBBER stmts before the final
3235 gsi = gsi_last_bb (bb);
3236 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3238 gimple stmt = gsi_stmt (gsi);
3239 if (is_gimple_debug (stmt))
3241 if (gimple_code (stmt) == GIMPLE_LABEL)
3243 if (!gimple_clobber_p (stmt)
3244 || TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
3246 any_clobbers = true;
3251 succbb = single_succ (bb);
3252 dgsi = gsi_after_labels (succbb);
3253 gsi = gsi_last_bb (bb);
3254 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3256 gimple stmt = gsi_stmt (gsi);
3258 if (is_gimple_debug (stmt))
3260 if (gimple_code (stmt) == GIMPLE_LABEL)
3262 unlink_stmt_vdef (stmt);
3263 gsi_remove (&gsi, false);
3264 vdef = gimple_vdef (stmt);
3265 if (vdef && TREE_CODE (vdef) == SSA_NAME)
3267 vdef = SSA_NAME_VAR (vdef);
3268 mark_sym_for_renaming (vdef);
3269 gimple_set_vdef (stmt, vdef);
3270 gimple_set_vuse (stmt, vdef);
3272 release_defs (stmt);
3273 gsi_insert_before (&dgsi, stmt, GSI_SAME_STMT);
3276 return TODO_update_ssa_only_virtuals;
3279 /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3280 we have found some duplicate labels and removed some edges. */
3283 lower_eh_dispatch (basic_block src, gimple stmt)
3285 gimple_stmt_iterator gsi;
3290 bool redirected = false;
3292 region_nr = gimple_eh_dispatch_region (stmt);
3293 r = get_eh_region_from_number (region_nr);
3295 gsi = gsi_last_bb (src);
3301 VEC (tree, heap) *labels = NULL;
3302 tree default_label = NULL;
3306 struct pointer_set_t *seen_values = pointer_set_create ();
3308 /* Collect the labels for a switch. Zero the post_landing_pad
3309 field becase we'll no longer have anything keeping these labels
3310 in existance and the optimizer will be free to merge these
3312 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3314 tree tp_node, flt_node, lab = c->label;
3315 bool have_label = false;
3318 tp_node = c->type_list;
3319 flt_node = c->filter_list;
3321 if (tp_node == NULL)
3323 default_label = lab;
3328 /* Filter out duplicate labels that arise when this handler
3329 is shadowed by an earlier one. When no labels are
3330 attached to the handler anymore, we remove
3331 the corresponding edge and then we delete unreachable
3332 blocks at the end of this pass. */
3333 if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node)))
3335 tree t = build_case_label (TREE_VALUE (flt_node),
3337 VEC_safe_push (tree, heap, labels, t);
3338 pointer_set_insert (seen_values, TREE_VALUE (flt_node));
3342 tp_node = TREE_CHAIN (tp_node);
3343 flt_node = TREE_CHAIN (flt_node);
3348 remove_edge (find_edge (src, label_to_block (lab)));
3353 /* Clean up the edge flags. */
3354 FOR_EACH_EDGE (e, ei, src->succs)
3356 if (e->flags & EDGE_FALLTHRU)
3358 /* If there was no catch-all, use the fallthru edge. */
3359 if (default_label == NULL)
3360 default_label = gimple_block_label (e->dest);
3361 e->flags &= ~EDGE_FALLTHRU;
3364 gcc_assert (default_label != NULL);
3366 /* Don't generate a switch if there's only a default case.
3367 This is common in the form of try { A; } catch (...) { B; }. */
3370 e = single_succ_edge (src);
3371 e->flags |= EDGE_FALLTHRU;
3375 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3376 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3378 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3379 filter = make_ssa_name (filter, x);
3380 gimple_call_set_lhs (x, filter);
3381 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3383 /* Turn the default label into a default case. */
3384 default_label = build_case_label (NULL, NULL, default_label);
3385 sort_case_labels (labels);
3387 x = gimple_build_switch_vec (filter, default_label, labels);
3388 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3390 VEC_free (tree, heap, labels);
3392 pointer_set_destroy (seen_values);
3396 case ERT_ALLOWED_EXCEPTIONS:
3398 edge b_e = BRANCH_EDGE (src);
3399 edge f_e = FALLTHRU_EDGE (src);
3401 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3402 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3404 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3405 filter = make_ssa_name (filter, x);
3406 gimple_call_set_lhs (x, filter);
3407 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3409 r->u.allowed.label = NULL;
3410 x = gimple_build_cond (EQ_EXPR, filter,
3411 build_int_cst (TREE_TYPE (filter),
3412 r->u.allowed.filter),
3413 NULL_TREE, NULL_TREE);
3414 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3416 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3417 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3425 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3426 gsi_remove (&gsi, true);
3431 execute_lower_eh_dispatch (void)
3435 bool redirected = false;
3437 assign_filter_values ();
3441 gimple last = last_stmt (bb);
3444 if (gimple_code (last) == GIMPLE_EH_DISPATCH)
3446 redirected |= lower_eh_dispatch (bb, last);
3447 flags |= TODO_update_ssa_only_virtuals;
3449 else if (gimple_code (last) == GIMPLE_RESX)
3451 if (stmt_can_throw_external (last))
3452 optimize_clobbers (bb);
3454 flags |= sink_clobbers (bb);
3459 delete_unreachable_blocks ();
3464 gate_lower_eh_dispatch (void)
3466 return cfun->eh->region_tree != NULL;
3469 struct gimple_opt_pass pass_lower_eh_dispatch =
3473 "ehdisp", /* name */
3474 gate_lower_eh_dispatch, /* gate */
3475 execute_lower_eh_dispatch, /* execute */
3478 0, /* static_pass_number */
3479 TV_TREE_EH, /* tv_id */
3480 PROP_gimple_lcf, /* properties_required */
3481 0, /* properties_provided */
3482 0, /* properties_destroyed */
3483 0, /* todo_flags_start */
3484 TODO_verify_flow /* todo_flags_finish */
3488 /* Walk statements, see what regions are really referenced and remove
3489 those that are unused. */
3492 remove_unreachable_handlers (void)
3494 sbitmap r_reachable, lp_reachable;
3500 r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
3502 = sbitmap_alloc (VEC_length (eh_landing_pad, cfun->eh->lp_array));
3503 sbitmap_zero (r_reachable);
3504 sbitmap_zero (lp_reachable);
3508 gimple_stmt_iterator gsi;
3510 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3512 gimple stmt = gsi_stmt (gsi);
3513 lp_nr = lookup_stmt_eh_lp (stmt);
3515 /* Negative LP numbers are MUST_NOT_THROW regions which
3516 are not considered BB enders. */
3518 SET_BIT (r_reachable, -lp_nr);
3520 /* Positive LP numbers are real landing pads, are are BB enders. */
3523 gcc_assert (gsi_one_before_end_p (gsi));
3524 region = get_eh_region_from_lp_number (lp_nr);
3525 SET_BIT (r_reachable, region->index);
3526 SET_BIT (lp_reachable, lp_nr);
3529 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3530 switch (gimple_code (stmt))
3533 SET_BIT (r_reachable, gimple_resx_region (stmt));
3535 case GIMPLE_EH_DISPATCH:
3536 SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
3546 fprintf (dump_file, "Before removal of unreachable regions:\n");
3547 dump_eh_tree (dump_file, cfun);
3548 fprintf (dump_file, "Reachable regions: ");
3549 dump_sbitmap_file (dump_file, r_reachable);
3550 fprintf (dump_file, "Reachable landing pads: ");
3551 dump_sbitmap_file (dump_file, lp_reachable);
3555 VEC_iterate (eh_region, cfun->eh->region_array, r_nr, region); ++r_nr)
3556 if (region && !TEST_BIT (r_reachable, r_nr))
3559 fprintf (dump_file, "Removing unreachable region %d\n", r_nr);
3560 remove_eh_handler (region);
3564 VEC_iterate (eh_landing_pad, cfun->eh->lp_array, lp_nr, lp); ++lp_nr)
3565 if (lp && !TEST_BIT (lp_reachable, lp_nr))
3568 fprintf (dump_file, "Removing unreachable landing pad %d\n", lp_nr);
3569 remove_eh_landing_pad (lp);
3574 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3575 dump_eh_tree (dump_file, cfun);
3576 fprintf (dump_file, "\n\n");
3579 sbitmap_free (r_reachable);
3580 sbitmap_free (lp_reachable);
3582 #ifdef ENABLE_CHECKING
3583 verify_eh_tree (cfun);
3587 /* Remove unreachable handlers if any landing pads have been removed after
3588 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
3591 maybe_remove_unreachable_handlers (void)
3596 if (cfun->eh == NULL)
3599 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3600 if (lp && lp->post_landing_pad)
3602 if (label_to_block (lp->post_landing_pad) == NULL)
3604 remove_unreachable_handlers ();
3610 /* Remove regions that do not have landing pads. This assumes
3611 that remove_unreachable_handlers has already been run, and
3612 that we've just manipulated the landing pads since then. */
3615 remove_unreachable_handlers_no_lp (void)
3619 sbitmap r_reachable;
3622 r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
3623 sbitmap_zero (r_reachable);
3627 gimple stmt = last_stmt (bb);
3629 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3630 switch (gimple_code (stmt))
3633 SET_BIT (r_reachable, gimple_resx_region (stmt));
3635 case GIMPLE_EH_DISPATCH:
3636 SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
3643 for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
3644 if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW
3645 && !TEST_BIT (r_reachable, i))
3648 fprintf (dump_file, "Removing unreachable region %d\n", i);
3649 remove_eh_handler (r);
3652 sbitmap_free (r_reachable);
3655 /* Undo critical edge splitting on an EH landing pad. Earlier, we
3656 optimisticaly split all sorts of edges, including EH edges. The
3657 optimization passes in between may not have needed them; if not,
3658 we should undo the split.
3660 Recognize this case by having one EH edge incoming to the BB and
3661 one normal edge outgoing; BB should be empty apart from the
3662 post_landing_pad label.
3664 Note that this is slightly different from the empty handler case
3665 handled by cleanup_empty_eh, in that the actual handler may yet
3666 have actual code but the landing pad has been separated from the
3667 handler. As such, cleanup_empty_eh relies on this transformation
3668 having been done first. */
3671 unsplit_eh (eh_landing_pad lp)
3673 basic_block bb = label_to_block (lp->post_landing_pad);
3674 gimple_stmt_iterator gsi;
3677 /* Quickly check the edge counts on BB for singularity. */
3678 if (EDGE_COUNT (bb->preds) != 1 || EDGE_COUNT (bb->succs) != 1)
3680 e_in = EDGE_PRED (bb, 0);
3681 e_out = EDGE_SUCC (bb, 0);
3683 /* Input edge must be EH and output edge must be normal. */
3684 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
3687 /* The block must be empty except for the labels and debug insns. */
3688 gsi = gsi_after_labels (bb);
3689 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3690 gsi_next_nondebug (&gsi);
3691 if (!gsi_end_p (gsi))
3694 /* The destination block must not already have a landing pad
3695 for a different region. */
3696 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3698 gimple stmt = gsi_stmt (gsi);
3702 if (gimple_code (stmt) != GIMPLE_LABEL)
3704 lab = gimple_label_label (stmt);
3705 lp_nr = EH_LANDING_PAD_NR (lab);
3706 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3710 /* The new destination block must not already be a destination of
3711 the source block, lest we merge fallthru and eh edges and get
3712 all sorts of confused. */
3713 if (find_edge (e_in->src, e_out->dest))
3716 /* ??? We can get degenerate phis due to cfg cleanups. I would have
3717 thought this should have been cleaned up by a phicprop pass, but
3718 that doesn't appear to handle virtuals. Propagate by hand. */
3719 if (!gimple_seq_empty_p (phi_nodes (bb)))
3721 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
3723 gimple use_stmt, phi = gsi_stmt (gsi);
3724 tree lhs = gimple_phi_result (phi);
3725 tree rhs = gimple_phi_arg_def (phi, 0);
3726 use_operand_p use_p;
3727 imm_use_iterator iter;
3729 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
3731 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3732 SET_USE (use_p, rhs);
3735 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3736 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
3738 remove_phi_node (&gsi, true);
3742 if (dump_file && (dump_flags & TDF_DETAILS))
3743 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
3744 lp->index, e_out->dest->index);
3746 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
3747 a successor edge, humor it. But do the real CFG change with the
3748 predecessor of E_OUT in order to preserve the ordering of arguments
3749 to the PHI nodes in E_OUT->DEST. */
3750 redirect_eh_edge_1 (e_in, e_out->dest, false);
3751 redirect_edge_pred (e_out, e_in->src);
3752 e_out->flags = e_in->flags;
3753 e_out->probability = e_in->probability;
3754 e_out->count = e_in->count;
3760 /* Examine each landing pad block and see if it matches unsplit_eh. */
3763 unsplit_all_eh (void)
3765 bool changed = false;
3769 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3771 changed |= unsplit_eh (lp);
3776 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
3777 to OLD_BB to NEW_BB; return true on success, false on failure.
3779 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
3780 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
3781 Virtual PHIs may be deleted and marked for renaming. */
3784 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
3785 edge old_bb_out, bool change_region)
3787 gimple_stmt_iterator ngsi, ogsi;
3790 bitmap rename_virts;
3791 bitmap ophi_handled;
3793 /* The destination block must not be a regular successor for any
3794 of the preds of the landing pad. Thus, avoid turning
3804 which CFG verification would choke on. See PR45172 and PR51089. */
3805 FOR_EACH_EDGE (e, ei, old_bb->preds)
3806 if (find_edge (e->src, new_bb))
3809 FOR_EACH_EDGE (e, ei, old_bb->preds)
3810 redirect_edge_var_map_clear (e);
3812 ophi_handled = BITMAP_ALLOC (NULL);
3813 rename_virts = BITMAP_ALLOC (NULL);
3815 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
3816 for the edges we're going to move. */
3817 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
3819 gimple ophi, nphi = gsi_stmt (ngsi);
3822 nresult = gimple_phi_result (nphi);
3823 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
3825 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
3826 the source ssa_name. */
3828 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3830 ophi = gsi_stmt (ogsi);
3831 if (gimple_phi_result (ophi) == nop)
3836 /* If we did find the corresponding PHI, copy those inputs. */
3839 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
3840 if (!has_single_use (nop))
3842 imm_use_iterator imm_iter;
3843 use_operand_p use_p;
3845 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
3847 if (!gimple_debug_bind_p (USE_STMT (use_p))
3848 && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
3849 || gimple_bb (USE_STMT (use_p)) != new_bb))
3853 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
3854 FOR_EACH_EDGE (e, ei, old_bb->preds)
3859 if ((e->flags & EDGE_EH) == 0)
3861 oop = gimple_phi_arg_def (ophi, e->dest_idx);
3862 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
3863 redirect_edge_var_map_add (e, nresult, oop, oloc);
3866 /* If we didn't find the PHI, but it's a VOP, remember to rename
3867 it later, assuming all other tests succeed. */
3868 else if (!is_gimple_reg (nresult))
3869 bitmap_set_bit (rename_virts, SSA_NAME_VERSION (nresult));
3870 /* If we didn't find the PHI, and it's a real variable, we know
3871 from the fact that OLD_BB is tree_empty_eh_handler_p that the
3872 variable is unchanged from input to the block and we can simply
3873 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
3877 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
3878 FOR_EACH_EDGE (e, ei, old_bb->preds)
3879 redirect_edge_var_map_add (e, nresult, nop, nloc);
3883 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
3884 we don't know what values from the other edges into NEW_BB to use. */
3885 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3887 gimple ophi = gsi_stmt (ogsi);
3888 tree oresult = gimple_phi_result (ophi);
3889 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
3893 /* At this point we know that the merge will succeed. Remove the PHI
3894 nodes for the virtuals that we want to rename. */
3895 if (!bitmap_empty_p (rename_virts))
3897 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); )
3899 gimple nphi = gsi_stmt (ngsi);
3900 tree nresult = gimple_phi_result (nphi);
3901 if (bitmap_bit_p (rename_virts, SSA_NAME_VERSION (nresult)))
3903 mark_virtual_phi_result_for_renaming (nphi);
3904 remove_phi_node (&ngsi, true);
3911 /* Finally, move the edges and update the PHIs. */
3912 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
3913 if (e->flags & EDGE_EH)
3915 redirect_eh_edge_1 (e, new_bb, change_region);
3916 redirect_edge_succ (e, new_bb);
3917 flush_pending_stmts (e);
3922 BITMAP_FREE (ophi_handled);
3923 BITMAP_FREE (rename_virts);
3927 FOR_EACH_EDGE (e, ei, old_bb->preds)
3928 redirect_edge_var_map_clear (e);
3929 BITMAP_FREE (ophi_handled);
3930 BITMAP_FREE (rename_virts);
3934 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
3935 old region to NEW_REGION at BB. */
3938 cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
3939 eh_landing_pad lp, eh_region new_region)
3941 gimple_stmt_iterator gsi;
3944 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
3948 lp->region = new_region;
3949 lp->next_lp = new_region->landing_pads;
3950 new_region->landing_pads = lp;
3952 /* Delete the RESX that was matched within the empty handler block. */
3953 gsi = gsi_last_bb (bb);
3954 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
3955 gsi_remove (&gsi, true);
3957 /* Clean up E_OUT for the fallthru. */
3958 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3959 e_out->probability = REG_BR_PROB_BASE;
3962 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
3963 unsplitting than unsplit_eh was prepared to handle, e.g. when
3964 multiple incoming edges and phis are involved. */
3967 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
3969 gimple_stmt_iterator gsi;
3972 /* We really ought not have totally lost everything following
3973 a landing pad label. Given that BB is empty, there had better
3975 gcc_assert (e_out != NULL);
3977 /* The destination block must not already have a landing pad
3978 for a different region. */
3980 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3982 gimple stmt = gsi_stmt (gsi);
3985 if (gimple_code (stmt) != GIMPLE_LABEL)
3987 lab = gimple_label_label (stmt);
3988 lp_nr = EH_LANDING_PAD_NR (lab);
3989 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3993 /* Attempt to move the PHIs into the successor block. */
3994 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
3996 if (dump_file && (dump_flags & TDF_DETAILS))
3998 "Unsplit EH landing pad %d to block %i "
3999 "(via cleanup_empty_eh).\n",
4000 lp->index, e_out->dest->index);
4007 /* Return true if edge E_FIRST is part of an empty infinite loop
4008 or leads to such a loop through a series of single successor
4012 infinite_empty_loop_p (edge e_first)
4014 bool inf_loop = false;
4017 if (e_first->dest == e_first->src)
4020 e_first->src->aux = (void *) 1;
4021 for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
4023 gimple_stmt_iterator gsi;
4029 e->dest->aux = (void *) 1;
4030 gsi = gsi_after_labels (e->dest);
4031 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4032 gsi_next_nondebug (&gsi);
4033 if (!gsi_end_p (gsi))
4036 e_first->src->aux = NULL;
4037 for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
4038 e->dest->aux = NULL;
4043 /* Examine the block associated with LP to determine if it's an empty
4044 handler for its EH region. If so, attempt to redirect EH edges to
4045 an outer region. Return true the CFG was updated in any way. This
4046 is similar to jump forwarding, just across EH edges. */
4049 cleanup_empty_eh (eh_landing_pad lp)
4051 basic_block bb = label_to_block (lp->post_landing_pad);
4052 gimple_stmt_iterator gsi;
4054 eh_region new_region;
4057 bool has_non_eh_pred;
4061 /* There can be zero or one edges out of BB. This is the quickest test. */
4062 switch (EDGE_COUNT (bb->succs))
4068 e_out = EDGE_SUCC (bb, 0);
4074 resx = last_stmt (bb);
4075 if (resx && is_gimple_resx (resx))
4077 if (stmt_can_throw_external (resx))
4078 optimize_clobbers (bb);
4079 else if (sink_clobbers (bb))
4083 gsi = gsi_after_labels (bb);
4085 /* Make sure to skip debug statements. */
4086 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4087 gsi_next_nondebug (&gsi);
4089 /* If the block is totally empty, look for more unsplitting cases. */
4090 if (gsi_end_p (gsi))
4092 /* For the degenerate case of an infinite loop bail out. */
4093 if (infinite_empty_loop_p (e_out))
4096 return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
4099 /* The block should consist only of a single RESX statement, modulo a
4100 preceding call to __builtin_stack_restore if there is no outgoing
4101 edge, since the call can be eliminated in this case. */
4102 resx = gsi_stmt (gsi);
4103 if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
4106 resx = gsi_stmt (gsi);
4108 if (!is_gimple_resx (resx))
4110 gcc_assert (gsi_one_before_end_p (gsi));
4112 /* Determine if there are non-EH edges, or resx edges into the handler. */
4113 has_non_eh_pred = false;
4114 FOR_EACH_EDGE (e, ei, bb->preds)
4115 if (!(e->flags & EDGE_EH))
4116 has_non_eh_pred = true;
4118 /* Find the handler that's outer of the empty handler by looking at
4119 where the RESX instruction was vectored. */
4120 new_lp_nr = lookup_stmt_eh_lp (resx);
4121 new_region = get_eh_region_from_lp_number (new_lp_nr);
4123 /* If there's no destination region within the current function,
4124 redirection is trivial via removing the throwing statements from
4125 the EH region, removing the EH edges, and allowing the block
4126 to go unreachable. */
4127 if (new_region == NULL)
4129 gcc_assert (e_out == NULL);
4130 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4131 if (e->flags & EDGE_EH)
4133 gimple stmt = last_stmt (e->src);
4134 remove_stmt_from_eh_lp (stmt);
4142 /* If the destination region is a MUST_NOT_THROW, allow the runtime
4143 to handle the abort and allow the blocks to go unreachable. */
4144 if (new_region->type == ERT_MUST_NOT_THROW)
4146 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4147 if (e->flags & EDGE_EH)
4149 gimple stmt = last_stmt (e->src);
4150 remove_stmt_from_eh_lp (stmt);
4151 add_stmt_to_eh_lp (stmt, new_lp_nr);
4159 /* Try to redirect the EH edges and merge the PHIs into the destination
4160 landing pad block. If the merge succeeds, we'll already have redirected
4161 all the EH edges. The handler itself will go unreachable if there were
4163 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
4166 /* Finally, if all input edges are EH edges, then we can (potentially)
4167 reduce the number of transfers from the runtime by moving the landing
4168 pad from the original region to the new region. This is a win when
4169 we remove the last CLEANUP region along a particular exception
4170 propagation path. Since nothing changes except for the region with
4171 which the landing pad is associated, the PHI nodes do not need to be
4173 if (!has_non_eh_pred)
4175 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
4176 if (dump_file && (dump_flags & TDF_DETAILS))
4177 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
4178 lp->index, new_region->index);
4180 /* ??? The CFG didn't change, but we may have rendered the
4181 old EH region unreachable. Trigger a cleanup there. */
4188 if (dump_file && (dump_flags & TDF_DETAILS))
4189 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
4190 remove_eh_landing_pad (lp);
4194 /* Do a post-order traversal of the EH region tree. Examine each
4195 post_landing_pad block and see if we can eliminate it as empty. */
4198 cleanup_all_empty_eh (void)
4200 bool changed = false;
4204 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
4206 changed |= cleanup_empty_eh (lp);
4211 /* Perform cleanups and lowering of exception handling
4212 1) cleanups regions with handlers doing nothing are optimized out
4213 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4214 3) Info about regions that are containing instructions, and regions
4215 reachable via local EH edges is collected
4216 4) Eh tree is pruned for regions no longer neccesary.
4218 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4219 Unify those that have the same failure decl and locus.
4223 execute_cleanup_eh_1 (void)
4225 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4226 looking up unreachable landing pads. */
4227 remove_unreachable_handlers ();
4229 /* Watch out for the region tree vanishing due to all unreachable. */
4230 if (cfun->eh->region_tree && optimize)
4232 bool changed = false;
4234 changed |= unsplit_all_eh ();
4235 changed |= cleanup_all_empty_eh ();
4239 free_dominance_info (CDI_DOMINATORS);
4240 free_dominance_info (CDI_POST_DOMINATORS);
4242 /* We delayed all basic block deletion, as we may have performed
4243 cleanups on EH edges while non-EH edges were still present. */
4244 delete_unreachable_blocks ();
4246 /* We manipulated the landing pads. Remove any region that no
4247 longer has a landing pad. */
4248 remove_unreachable_handlers_no_lp ();
4250 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
4258 execute_cleanup_eh (void)
4260 int ret = execute_cleanup_eh_1 ();
4262 /* If the function no longer needs an EH personality routine
4263 clear it. This exposes cross-language inlining opportunities
4264 and avoids references to a never defined personality routine. */
4265 if (DECL_FUNCTION_PERSONALITY (current_function_decl)
4266 && function_needs_eh_personality (cfun) != eh_personality_lang)
4267 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
4273 gate_cleanup_eh (void)
4275 return cfun->eh != NULL && cfun->eh->region_tree != NULL;
4278 struct gimple_opt_pass pass_cleanup_eh = {
4281 "ehcleanup", /* name */
4282 gate_cleanup_eh, /* gate */
4283 execute_cleanup_eh, /* execute */
4286 0, /* static_pass_number */
4287 TV_TREE_EH, /* tv_id */
4288 PROP_gimple_lcf, /* properties_required */
4289 0, /* properties_provided */
4290 0, /* properties_destroyed */
4291 0, /* todo_flags_start */
4292 0 /* todo_flags_finish */
4296 /* Verify that BB containing STMT as the last statement, has precisely the
4297 edge that make_eh_edges would create. */
4300 verify_eh_edges (gimple stmt)
4302 basic_block bb = gimple_bb (stmt);
4303 eh_landing_pad lp = NULL;
4308 lp_nr = lookup_stmt_eh_lp (stmt);
4310 lp = get_eh_landing_pad_from_number (lp_nr);
4313 FOR_EACH_EDGE (e, ei, bb->succs)
4315 if (e->flags & EDGE_EH)
4319 error ("BB %i has multiple EH edges", bb->index);
4331 error ("BB %i can not throw but has an EH edge", bb->index);
4337 if (!stmt_could_throw_p (stmt))
4339 error ("BB %i last statement has incorrectly set lp", bb->index);
4343 if (eh_edge == NULL)
4345 error ("BB %i is missing an EH edge", bb->index);
4349 if (eh_edge->dest != label_to_block (lp->post_landing_pad))
4351 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
4358 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4361 verify_eh_dispatch_edge (gimple stmt)
4365 basic_block src, dst;
4366 bool want_fallthru = true;
4370 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4371 src = gimple_bb (stmt);
4373 FOR_EACH_EDGE (e, ei, src->succs)
4374 gcc_assert (e->aux == NULL);
4379 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4381 dst = label_to_block (c->label);
4382 e = find_edge (src, dst);
4385 error ("BB %i is missing an edge", src->index);
4390 /* A catch-all handler doesn't have a fallthru. */
4391 if (c->type_list == NULL)
4393 want_fallthru = false;
4399 case ERT_ALLOWED_EXCEPTIONS:
4400 dst = label_to_block (r->u.allowed.label);
4401 e = find_edge (src, dst);
4404 error ("BB %i is missing an edge", src->index);
4415 FOR_EACH_EDGE (e, ei, src->succs)
4417 if (e->flags & EDGE_FALLTHRU)
4419 if (fall_edge != NULL)
4421 error ("BB %i too many fallthru edges", src->index);
4430 error ("BB %i has incorrect edge", src->index);
4434 if ((fall_edge != NULL) ^ want_fallthru)
4436 error ("BB %i has incorrect fallthru edge", src->index);