1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "pointer-set.h"
30 #include "tree-flow.h"
31 #include "tree-dump.h"
32 #include "tree-inline.h"
33 #include "tree-iterator.h"
34 #include "tree-pass.h"
36 #include "langhooks.h"
38 #include "diagnostic-core.h"
42 /* In some instances a tree and a gimple need to be stored in a same table,
43 i.e. in hash tables. This is a structure to do this. */
44 typedef union {tree *tp; tree t; gimple g;} treemple;
46 /* Nonzero if we are using EH to handle cleanups. */
47 static int using_eh_for_cleanups_p = 0;
50 using_eh_for_cleanups (void)
52 using_eh_for_cleanups_p = 1;
55 /* Misc functions used in this file. */
57 /* Remember and lookup EH landing pad data for arbitrary statements.
58 Really this means any statement that could_throw_p. We could
59 stuff this information into the stmt_ann data structure, but:
61 (1) We absolutely rely on this information being kept until
62 we get to rtl. Once we're done with lowering here, if we lose
63 the information there's no way to recover it!
65 (2) There are many more statements that *cannot* throw as
66 compared to those that can. We should be saving some amount
67 of space by only allocating memory for those that can throw. */
69 /* Add statement T in function IFUN to landing pad NUM. */
72 add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
74 struct throw_stmt_node *n;
77 gcc_assert (num != 0);
79 n = ggc_alloc_throw_stmt_node ();
83 if (!get_eh_throw_stmt_table (ifun))
84 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
88 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
93 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
96 add_stmt_to_eh_lp (gimple t, int num)
98 add_stmt_to_eh_lp_fn (cfun, t, num);
101 /* Add statement T to the single EH landing pad in REGION. */
104 record_stmt_eh_region (eh_region region, gimple t)
108 if (region->type == ERT_MUST_NOT_THROW)
109 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
112 eh_landing_pad lp = region->landing_pads;
114 lp = gen_eh_landing_pad (region);
116 gcc_assert (lp->next_lp == NULL);
117 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
122 /* Remove statement T in function IFUN from its EH landing pad. */
125 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
127 struct throw_stmt_node dummy;
130 if (!get_eh_throw_stmt_table (ifun))
134 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
138 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
146 /* Remove statement T in the current function (cfun) from its
150 remove_stmt_from_eh_lp (gimple t)
152 return remove_stmt_from_eh_lp_fn (cfun, t);
155 /* Determine if statement T is inside an EH region in function IFUN.
156 Positive numbers indicate a landing pad index; negative numbers
157 indicate a MUST_NOT_THROW region index; zero indicates that the
158 statement is not recorded in the region table. */
161 lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
163 struct throw_stmt_node *p, n;
165 if (ifun->eh->throw_stmt_table == NULL)
169 p = (struct throw_stmt_node *) htab_find (ifun->eh->throw_stmt_table, &n);
170 return p ? p->lp_nr : 0;
173 /* Likewise, but always use the current function. */
176 lookup_stmt_eh_lp (gimple t)
178 /* We can get called from initialized data when -fnon-call-exceptions
179 is on; prevent crash. */
182 return lookup_stmt_eh_lp_fn (cfun, t);
185 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
186 nodes and LABEL_DECL nodes. We will use this during the second phase to
187 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
189 struct finally_tree_node
191 /* When storing a GIMPLE_TRY, we have to record a gimple. However
192 when deciding whether a GOTO to a certain LABEL_DECL (which is a
193 tree) leaves the TRY block, its necessary to record a tree in
194 this field. Thus a treemple is used. */
199 /* Note that this table is *not* marked GTY. It is short-lived. */
200 static htab_t finally_tree;
203 record_in_finally_tree (treemple child, gimple parent)
205 struct finally_tree_node *n;
208 n = XNEW (struct finally_tree_node);
212 slot = htab_find_slot (finally_tree, n, INSERT);
218 collect_finally_tree (gimple stmt, gimple region);
220 /* Go through the gimple sequence. Works with collect_finally_tree to
221 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
224 collect_finally_tree_1 (gimple_seq seq, gimple region)
226 gimple_stmt_iterator gsi;
228 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
229 collect_finally_tree (gsi_stmt (gsi), region);
233 collect_finally_tree (gimple stmt, gimple region)
237 switch (gimple_code (stmt))
240 temp.t = gimple_label_label (stmt);
241 record_in_finally_tree (temp, region);
245 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
248 record_in_finally_tree (temp, region);
249 collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
250 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
252 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
254 collect_finally_tree_1 (gimple_try_eval (stmt), region);
255 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
260 collect_finally_tree_1 (gimple_catch_handler (stmt), region);
263 case GIMPLE_EH_FILTER:
264 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
268 collect_finally_tree_1 (gimple_eh_else_n_body (stmt), region);
269 collect_finally_tree_1 (gimple_eh_else_e_body (stmt), region);
273 /* A type, a decl, or some kind of statement that we're not
274 interested in. Don't walk them. */
280 /* Use the finally tree to determine if a jump from START to TARGET
281 would leave the try_finally node that START lives in. */
284 outside_finally_tree (treemple start, gimple target)
286 struct finally_tree_node n, *p;
291 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
296 while (start.g != target);
301 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
302 nodes into a set of gotos, magic labels, and eh regions.
303 The eh region creation is straight-forward, but frobbing all the gotos
304 and such into shape isn't. */
306 /* The sequence into which we record all EH stuff. This will be
307 placed at the end of the function when we're all done. */
308 static gimple_seq eh_seq;
310 /* Record whether an EH region contains something that can throw,
311 indexed by EH region number. */
312 static bitmap eh_region_may_contain_throw_map;
314 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
315 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
316 The idea is to record a gimple statement for everything except for
317 the conditionals, which get their labels recorded. Since labels are
318 of type 'tree', we need this node to store both gimple and tree
319 objects. REPL_STMT is the sequence used to replace the goto/return
320 statement. CONT_STMT is used to store the statement that allows
321 the return/goto to jump to the original destination. */
323 struct goto_queue_node
326 gimple_seq repl_stmt;
329 /* This is used when index >= 0 to indicate that stmt is a label (as
330 opposed to a goto stmt). */
334 /* State of the world while lowering. */
338 /* What's "current" while constructing the eh region tree. These
339 correspond to variables of the same name in cfun->eh, which we
340 don't have easy access to. */
341 eh_region cur_region;
343 /* What's "current" for the purposes of __builtin_eh_pointer. For
344 a CATCH, this is the associated TRY. For an EH_FILTER, this is
345 the associated ALLOWED_EXCEPTIONS, etc. */
346 eh_region ehp_region;
348 /* Processing of TRY_FINALLY requires a bit more state. This is
349 split out into a separate structure so that we don't have to
350 copy so much when processing other nodes. */
351 struct leh_tf_state *tf;
356 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
357 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
358 this so that outside_finally_tree can reliably reference the tree used
359 in the collect_finally_tree data structures. */
360 gimple try_finally_expr;
363 /* While lowering a top_p usually it is expanded into multiple statements,
364 thus we need the following field to store them. */
365 gimple_seq top_p_seq;
367 /* The state outside this try_finally node. */
368 struct leh_state *outer;
370 /* The exception region created for it. */
373 /* The goto queue. */
374 struct goto_queue_node *goto_queue;
375 size_t goto_queue_size;
376 size_t goto_queue_active;
378 /* Pointer map to help in searching goto_queue when it is large. */
379 struct pointer_map_t *goto_queue_map;
381 /* The set of unique labels seen as entries in the goto queue. */
382 VEC(tree,heap) *dest_array;
384 /* A label to be added at the end of the completed transformed
385 sequence. It will be set if may_fallthru was true *at one time*,
386 though subsequent transformations may have cleared that flag. */
389 /* True if it is possible to fall out the bottom of the try block.
390 Cleared if the fallthru is converted to a goto. */
393 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
396 /* True if the finally block can receive an exception edge.
397 Cleared if the exception case is handled by code duplication. */
401 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple);
403 /* Search for STMT in the goto queue. Return the replacement,
404 or null if the statement isn't in the queue. */
406 #define LARGE_GOTO_QUEUE 20
408 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq);
411 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
416 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
418 for (i = 0; i < tf->goto_queue_active; i++)
419 if ( tf->goto_queue[i].stmt.g == stmt.g)
420 return tf->goto_queue[i].repl_stmt;
424 /* If we have a large number of entries in the goto_queue, create a
425 pointer map and use that for searching. */
427 if (!tf->goto_queue_map)
429 tf->goto_queue_map = pointer_map_create ();
430 for (i = 0; i < tf->goto_queue_active; i++)
432 slot = pointer_map_insert (tf->goto_queue_map,
433 tf->goto_queue[i].stmt.g);
434 gcc_assert (*slot == NULL);
435 *slot = &tf->goto_queue[i];
439 slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
441 return (((struct goto_queue_node *) *slot)->repl_stmt);
446 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
447 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
448 then we can just splat it in, otherwise we add the new stmts immediately
449 after the GIMPLE_COND and redirect. */
452 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
453 gimple_stmt_iterator *gsi)
458 location_t loc = gimple_location (gsi_stmt (*gsi));
461 new_seq = find_goto_replacement (tf, temp);
465 if (gimple_seq_singleton_p (new_seq)
466 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
468 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
472 label = create_artificial_label (loc);
473 /* Set the new label for the GIMPLE_COND */
476 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
477 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
480 /* The real work of replace_goto_queue. Returns with TSI updated to
481 point to the next statement. */
483 static void replace_goto_queue_stmt_list (gimple_seq, struct leh_tf_state *);
486 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
487 gimple_stmt_iterator *gsi)
493 switch (gimple_code (stmt))
498 seq = find_goto_replacement (tf, temp);
501 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
502 gsi_remove (gsi, false);
508 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
509 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
513 replace_goto_queue_stmt_list (gimple_try_eval (stmt), tf);
514 replace_goto_queue_stmt_list (gimple_try_cleanup (stmt), tf);
517 replace_goto_queue_stmt_list (gimple_catch_handler (stmt), tf);
519 case GIMPLE_EH_FILTER:
520 replace_goto_queue_stmt_list (gimple_eh_filter_failure (stmt), tf);
523 replace_goto_queue_stmt_list (gimple_eh_else_n_body (stmt), tf);
524 replace_goto_queue_stmt_list (gimple_eh_else_e_body (stmt), tf);
528 /* These won't have gotos in them. */
535 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
538 replace_goto_queue_stmt_list (gimple_seq seq, struct leh_tf_state *tf)
540 gimple_stmt_iterator gsi = gsi_start (seq);
542 while (!gsi_end_p (gsi))
543 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
546 /* Replace all goto queue members. */
549 replace_goto_queue (struct leh_tf_state *tf)
551 if (tf->goto_queue_active == 0)
553 replace_goto_queue_stmt_list (tf->top_p_seq, tf);
554 replace_goto_queue_stmt_list (eh_seq, tf);
557 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
558 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
562 record_in_goto_queue (struct leh_tf_state *tf,
568 struct goto_queue_node *q;
570 gcc_assert (!tf->goto_queue_map);
572 active = tf->goto_queue_active;
573 size = tf->goto_queue_size;
576 size = (size ? size * 2 : 32);
577 tf->goto_queue_size = size;
579 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
582 q = &tf->goto_queue[active];
583 tf->goto_queue_active = active + 1;
585 memset (q, 0, sizeof (*q));
588 q->is_label = is_label;
591 /* Record the LABEL label in the goto queue contained in TF.
595 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label)
598 treemple temp, new_stmt;
603 /* Computed and non-local gotos do not get processed. Given
604 their nature we can neither tell whether we've escaped the
605 finally block nor redirect them if we knew. */
606 if (TREE_CODE (label) != LABEL_DECL)
609 /* No need to record gotos that don't leave the try block. */
611 if (!outside_finally_tree (temp, tf->try_finally_expr))
614 if (! tf->dest_array)
616 tf->dest_array = VEC_alloc (tree, heap, 10);
617 VEC_quick_push (tree, tf->dest_array, label);
622 int n = VEC_length (tree, tf->dest_array);
623 for (index = 0; index < n; ++index)
624 if (VEC_index (tree, tf->dest_array, index) == label)
627 VEC_safe_push (tree, heap, tf->dest_array, label);
630 /* In the case of a GOTO we want to record the destination label,
631 since with a GIMPLE_COND we have an easy access to the then/else
634 record_in_goto_queue (tf, new_stmt, index, true);
637 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
638 node, and if so record that fact in the goto queue associated with that
642 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
644 struct leh_tf_state *tf = state->tf;
650 switch (gimple_code (stmt))
653 new_stmt.tp = gimple_op_ptr (stmt, 2);
654 record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt));
655 new_stmt.tp = gimple_op_ptr (stmt, 3);
656 record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt));
660 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt));
664 tf->may_return = true;
666 record_in_goto_queue (tf, new_stmt, -1, false);
675 #ifdef ENABLE_CHECKING
676 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
677 was in fact structured, and we've not yet done jump threading, then none
678 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
681 verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
683 struct leh_tf_state *tf = state->tf;
689 n = gimple_switch_num_labels (switch_expr);
691 for (i = 0; i < n; ++i)
694 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
696 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
700 #define verify_norecord_switch_expr(state, switch_expr)
703 /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
704 non-null, insert it before the new branch. */
707 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
711 /* In the case of a return, the queue node must be a gimple statement. */
712 gcc_assert (!q->is_label);
714 /* Note that the return value may have already been computed, e.g.,
727 should return 0, not 1. We don't have to do anything to make
728 this happens because the return value has been placed in the
729 RESULT_DECL already. */
731 q->cont_stmt = q->stmt.g;
734 q->repl_stmt = gimple_seq_alloc ();
737 gimple_seq_add_seq (&q->repl_stmt, mod);
739 x = gimple_build_goto (finlab);
740 gimple_seq_add_stmt (&q->repl_stmt, x);
743 /* Similar, but easier, for GIMPLE_GOTO. */
746 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
747 struct leh_tf_state *tf)
751 gcc_assert (q->is_label);
753 q->repl_stmt = gimple_seq_alloc ();
755 q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array, q->index));
758 gimple_seq_add_seq (&q->repl_stmt, mod);
760 x = gimple_build_goto (finlab);
761 gimple_seq_add_stmt (&q->repl_stmt, x);
764 /* Emit a standard landing pad sequence into SEQ for REGION. */
767 emit_post_landing_pad (gimple_seq *seq, eh_region region)
769 eh_landing_pad lp = region->landing_pads;
773 lp = gen_eh_landing_pad (region);
775 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
776 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
778 x = gimple_build_label (lp->post_landing_pad);
779 gimple_seq_add_stmt (seq, x);
782 /* Emit a RESX statement into SEQ for REGION. */
785 emit_resx (gimple_seq *seq, eh_region region)
787 gimple x = gimple_build_resx (region->index);
788 gimple_seq_add_stmt (seq, x);
790 record_stmt_eh_region (region->outer, x);
793 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
796 emit_eh_dispatch (gimple_seq *seq, eh_region region)
798 gimple x = gimple_build_eh_dispatch (region->index);
799 gimple_seq_add_stmt (seq, x);
802 /* Note that the current EH region may contain a throw, or a
803 call to a function which itself may contain a throw. */
806 note_eh_region_may_contain_throw (eh_region region)
808 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
810 if (region->type == ERT_MUST_NOT_THROW)
812 region = region->outer;
818 /* Check if REGION has been marked as containing a throw. If REGION is
819 NULL, this predicate is false. */
822 eh_region_may_contain_throw (eh_region r)
824 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
827 /* We want to transform
828 try { body; } catch { stuff; }
838 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
839 should be placed before the second operand, or NULL. OVER is
840 an existing label that should be put at the exit, or NULL. */
843 frob_into_branch_around (gimple tp, eh_region region, tree over)
846 gimple_seq cleanup, result;
847 location_t loc = gimple_location (tp);
849 cleanup = gimple_try_cleanup (tp);
850 result = gimple_try_eval (tp);
853 emit_post_landing_pad (&eh_seq, region);
855 if (gimple_seq_may_fallthru (cleanup))
858 over = create_artificial_label (loc);
859 x = gimple_build_goto (over);
860 gimple_seq_add_stmt (&cleanup, x);
862 gimple_seq_add_seq (&eh_seq, cleanup);
866 x = gimple_build_label (over);
867 gimple_seq_add_stmt (&result, x);
872 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
873 Make sure to record all new labels found. */
876 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state)
878 gimple region = NULL;
881 new_seq = copy_gimple_seq_and_replace_locals (seq);
884 region = outer_state->tf->try_finally_expr;
885 collect_finally_tree_1 (new_seq, region);
890 /* A subroutine of lower_try_finally. Create a fallthru label for
891 the given try_finally state. The only tricky bit here is that
892 we have to make sure to record the label in our outer context. */
895 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
897 tree label = tf->fallthru_label;
902 label = create_artificial_label (gimple_location (tf->try_finally_expr));
903 tf->fallthru_label = label;
907 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
913 /* A subroutine of lower_try_finally. If FINALLY consits of a
914 GIMPLE_EH_ELSE node, return it. */
917 get_eh_else (gimple_seq finally)
919 gimple x = gimple_seq_first_stmt (finally);
920 if (gimple_code (x) == GIMPLE_EH_ELSE)
922 gcc_assert (gimple_seq_singleton_p (finally));
928 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
929 langhook returns non-null, then the language requires that the exception
930 path out of a try_finally be treated specially. To wit: the code within
931 the finally block may not itself throw an exception. We have two choices
932 here. First we can duplicate the finally block and wrap it in a
933 must_not_throw region. Second, we can generate code like
938 if (fintmp == eh_edge)
939 protect_cleanup_actions;
942 where "fintmp" is the temporary used in the switch statement generation
943 alternative considered below. For the nonce, we always choose the first
946 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
949 honor_protect_cleanup_actions (struct leh_state *outer_state,
950 struct leh_state *this_state,
951 struct leh_tf_state *tf)
953 tree protect_cleanup_actions;
954 gimple_stmt_iterator gsi;
955 bool finally_may_fallthru;
959 /* First check for nothing to do. */
960 if (lang_hooks.eh_protect_cleanup_actions == NULL)
962 protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions ();
963 if (protect_cleanup_actions == NULL)
966 finally = gimple_try_cleanup (tf->top_p);
967 eh_else = get_eh_else (finally);
969 /* Duplicate the FINALLY block. Only need to do this for try-finally,
970 and not for cleanups. If we've got an EH_ELSE, extract it now. */
973 finally = gimple_eh_else_e_body (eh_else);
974 gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
977 finally = lower_try_finally_dup_block (finally, outer_state);
978 finally_may_fallthru = gimple_seq_may_fallthru (finally);
980 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
981 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
982 to be in an enclosing scope, but needs to be implemented at this level
983 to avoid a nesting violation (see wrap_temporary_cleanups in
984 cp/decl.c). Since it's logically at an outer level, we should call
985 terminate before we get to it, so strip it away before adding the
986 MUST_NOT_THROW filter. */
987 gsi = gsi_start (finally);
989 if (gimple_code (x) == GIMPLE_TRY
990 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
991 && gimple_try_catch_is_cleanup (x))
993 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
994 gsi_remove (&gsi, false);
997 /* Wrap the block with protect_cleanup_actions as the action. */
998 x = gimple_build_eh_must_not_throw (protect_cleanup_actions);
999 x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x),
1001 finally = lower_eh_must_not_throw (outer_state, x);
1003 /* Drop all of this into the exception sequence. */
1004 emit_post_landing_pad (&eh_seq, tf->region);
1005 gimple_seq_add_seq (&eh_seq, finally);
1006 if (finally_may_fallthru)
1007 emit_resx (&eh_seq, tf->region);
1009 /* Having now been handled, EH isn't to be considered with
1010 the rest of the outgoing edges. */
1011 tf->may_throw = false;
1014 /* A subroutine of lower_try_finally. We have determined that there is
1015 no fallthru edge out of the finally block. This means that there is
1016 no outgoing edge corresponding to any incoming edge. Restructure the
1017 try_finally node for this special case. */
1020 lower_try_finally_nofallthru (struct leh_state *state,
1021 struct leh_tf_state *tf)
1026 struct goto_queue_node *q, *qe;
1028 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1030 /* We expect that tf->top_p is a GIMPLE_TRY. */
1031 finally = gimple_try_cleanup (tf->top_p);
1032 tf->top_p_seq = gimple_try_eval (tf->top_p);
1034 x = gimple_build_label (lab);
1035 gimple_seq_add_stmt (&tf->top_p_seq, x);
1038 qe = q + tf->goto_queue_active;
1041 do_return_redirection (q, lab, NULL);
1043 do_goto_redirection (q, lab, NULL, tf);
1045 replace_goto_queue (tf);
1047 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1048 eh_else = get_eh_else (finally);
1051 finally = gimple_eh_else_n_body (eh_else);
1052 lower_eh_constructs_1 (state, finally);
1053 gimple_seq_add_seq (&tf->top_p_seq, finally);
1057 finally = gimple_eh_else_e_body (eh_else);
1058 lower_eh_constructs_1 (state, finally);
1060 emit_post_landing_pad (&eh_seq, tf->region);
1061 gimple_seq_add_seq (&eh_seq, finally);
1066 lower_eh_constructs_1 (state, finally);
1067 gimple_seq_add_seq (&tf->top_p_seq, finally);
1071 emit_post_landing_pad (&eh_seq, tf->region);
1073 x = gimple_build_goto (lab);
1074 gimple_seq_add_stmt (&eh_seq, x);
1079 /* A subroutine of lower_try_finally. We have determined that there is
1080 exactly one destination of the finally block. Restructure the
1081 try_finally node for this special case. */
1084 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1086 struct goto_queue_node *q, *qe;
1090 location_t loc = gimple_location (tf->try_finally_expr);
1092 finally = gimple_try_cleanup (tf->top_p);
1093 tf->top_p_seq = gimple_try_eval (tf->top_p);
1095 /* Since there's only one destination, and the destination edge can only
1096 either be EH or non-EH, that implies that all of our incoming edges
1097 are of the same type. Therefore we can lower EH_ELSE immediately. */
1098 x = get_eh_else (finally);
1102 finally = gimple_eh_else_e_body (x);
1104 finally = gimple_eh_else_n_body (x);
1107 lower_eh_constructs_1 (state, finally);
1111 /* Only reachable via the exception edge. Add the given label to
1112 the head of the FINALLY block. Append a RESX at the end. */
1113 emit_post_landing_pad (&eh_seq, tf->region);
1114 gimple_seq_add_seq (&eh_seq, finally);
1115 emit_resx (&eh_seq, tf->region);
1119 if (tf->may_fallthru)
1121 /* Only reachable via the fallthru edge. Do nothing but let
1122 the two blocks run together; we'll fall out the bottom. */
1123 gimple_seq_add_seq (&tf->top_p_seq, finally);
1127 finally_label = create_artificial_label (loc);
1128 x = gimple_build_label (finally_label);
1129 gimple_seq_add_stmt (&tf->top_p_seq, x);
1131 gimple_seq_add_seq (&tf->top_p_seq, finally);
1134 qe = q + tf->goto_queue_active;
1138 /* Reachable by return expressions only. Redirect them. */
1140 do_return_redirection (q, finally_label, NULL);
1141 replace_goto_queue (tf);
1145 /* Reachable by goto expressions only. Redirect them. */
1147 do_goto_redirection (q, finally_label, NULL, tf);
1148 replace_goto_queue (tf);
1150 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1152 /* Reachable by goto to fallthru label only. Redirect it
1153 to the new label (already created, sadly), and do not
1154 emit the final branch out, or the fallthru label. */
1155 tf->fallthru_label = NULL;
1160 /* Place the original return/goto to the original destination
1161 immediately after the finally block. */
1162 x = tf->goto_queue[0].cont_stmt;
1163 gimple_seq_add_stmt (&tf->top_p_seq, x);
1164 maybe_record_in_goto_queue (state, x);
1167 /* A subroutine of lower_try_finally. There are multiple edges incoming
1168 and outgoing from the finally block. Implement this by duplicating the
1169 finally block for every destination. */
1172 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1175 gimple_seq new_stmt;
1179 location_t tf_loc = gimple_location (tf->try_finally_expr);
1181 finally = gimple_try_cleanup (tf->top_p);
1183 /* Notice EH_ELSE, and simplify some of the remaining code
1184 by considering FINALLY to be the normal return path only. */
1185 eh_else = get_eh_else (finally);
1187 finally = gimple_eh_else_n_body (eh_else);
1189 tf->top_p_seq = gimple_try_eval (tf->top_p);
1192 if (tf->may_fallthru)
1194 seq = lower_try_finally_dup_block (finally, state);
1195 lower_eh_constructs_1 (state, seq);
1196 gimple_seq_add_seq (&new_stmt, seq);
1198 tmp = lower_try_finally_fallthru_label (tf);
1199 x = gimple_build_goto (tmp);
1200 gimple_seq_add_stmt (&new_stmt, x);
1205 /* We don't need to copy the EH path of EH_ELSE,
1206 since it is only emitted once. */
1208 seq = gimple_eh_else_e_body (eh_else);
1210 seq = lower_try_finally_dup_block (finally, state);
1211 lower_eh_constructs_1 (state, seq);
1213 emit_post_landing_pad (&eh_seq, tf->region);
1214 gimple_seq_add_seq (&eh_seq, seq);
1215 emit_resx (&eh_seq, tf->region);
1220 struct goto_queue_node *q, *qe;
1221 int return_index, index;
1224 struct goto_queue_node *q;
1228 return_index = VEC_length (tree, tf->dest_array);
1229 labels = XCNEWVEC (struct labels_s, return_index + 1);
1232 qe = q + tf->goto_queue_active;
1235 index = q->index < 0 ? return_index : q->index;
1237 if (!labels[index].q)
1238 labels[index].q = q;
1241 for (index = 0; index < return_index + 1; index++)
1245 q = labels[index].q;
1249 lab = labels[index].label
1250 = create_artificial_label (tf_loc);
1252 if (index == return_index)
1253 do_return_redirection (q, lab, NULL);
1255 do_goto_redirection (q, lab, NULL, tf);
1257 x = gimple_build_label (lab);
1258 gimple_seq_add_stmt (&new_stmt, x);
1260 seq = lower_try_finally_dup_block (finally, state);
1261 lower_eh_constructs_1 (state, seq);
1262 gimple_seq_add_seq (&new_stmt, seq);
1264 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1265 maybe_record_in_goto_queue (state, q->cont_stmt);
1268 for (q = tf->goto_queue; q < qe; q++)
1272 index = q->index < 0 ? return_index : q->index;
1274 if (labels[index].q == q)
1277 lab = labels[index].label;
1279 if (index == return_index)
1280 do_return_redirection (q, lab, NULL);
1282 do_goto_redirection (q, lab, NULL, tf);
1285 replace_goto_queue (tf);
1289 /* Need to link new stmts after running replace_goto_queue due
1290 to not wanting to process the same goto stmts twice. */
1291 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1294 /* A subroutine of lower_try_finally. There are multiple edges incoming
1295 and outgoing from the finally block. Implement this by instrumenting
1296 each incoming edge and creating a switch statement at the end of the
1297 finally block that branches to the appropriate destination. */
1300 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1302 struct goto_queue_node *q, *qe;
1303 tree finally_tmp, finally_label;
1304 int return_index, eh_index, fallthru_index;
1305 int nlabels, ndests, j, last_case_index;
1307 VEC (tree,heap) *case_label_vec;
1308 gimple_seq switch_body;
1313 struct pointer_map_t *cont_map = NULL;
1314 /* The location of the TRY_FINALLY stmt. */
1315 location_t tf_loc = gimple_location (tf->try_finally_expr);
1316 /* The location of the finally block. */
1317 location_t finally_loc;
1319 switch_body = gimple_seq_alloc ();
1320 finally = gimple_try_cleanup (tf->top_p);
1321 eh_else = get_eh_else (finally);
1323 /* Mash the TRY block to the head of the chain. */
1324 tf->top_p_seq = gimple_try_eval (tf->top_p);
1326 /* The location of the finally is either the last stmt in the finally
1327 block or the location of the TRY_FINALLY itself. */
1328 x = gimple_seq_last_stmt (finally);
1329 finally_loc = x ? gimple_location (x) : tf_loc;
1331 /* Prepare for switch statement generation. */
1332 nlabels = VEC_length (tree, tf->dest_array);
1333 return_index = nlabels;
1334 eh_index = return_index + tf->may_return;
1335 fallthru_index = eh_index + (tf->may_throw && !eh_else);
1336 ndests = fallthru_index + tf->may_fallthru;
1338 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1339 finally_label = create_artificial_label (finally_loc);
1341 /* We use VEC_quick_push on case_label_vec throughout this function,
1342 since we know the size in advance and allocate precisely as muce
1344 case_label_vec = VEC_alloc (tree, heap, ndests);
1346 last_case_index = 0;
1348 /* Begin inserting code for getting to the finally block. Things
1349 are done in this order to correspond to the sequence the code is
1352 if (tf->may_fallthru)
1354 x = gimple_build_assign (finally_tmp,
1355 build_int_cst (integer_type_node,
1357 gimple_seq_add_stmt (&tf->top_p_seq, x);
1359 tmp = build_int_cst (integer_type_node, fallthru_index);
1360 last_case = build_case_label (tmp, NULL,
1361 create_artificial_label (tf_loc));
1362 VEC_quick_push (tree, case_label_vec, last_case);
1365 x = gimple_build_label (CASE_LABEL (last_case));
1366 gimple_seq_add_stmt (&switch_body, x);
1368 tmp = lower_try_finally_fallthru_label (tf);
1369 x = gimple_build_goto (tmp);
1370 gimple_seq_add_stmt (&switch_body, x);
1373 /* For EH_ELSE, emit the exception path (plus resx) now, then
1374 subsequently we only need consider the normal path. */
1379 finally = gimple_eh_else_e_body (eh_else);
1380 lower_eh_constructs_1 (state, finally);
1382 emit_post_landing_pad (&eh_seq, tf->region);
1383 gimple_seq_add_seq (&eh_seq, finally);
1384 emit_resx (&eh_seq, tf->region);
1387 finally = gimple_eh_else_n_body (eh_else);
1389 else if (tf->may_throw)
1391 emit_post_landing_pad (&eh_seq, tf->region);
1393 x = gimple_build_assign (finally_tmp,
1394 build_int_cst (integer_type_node, eh_index));
1395 gimple_seq_add_stmt (&eh_seq, x);
1397 x = gimple_build_goto (finally_label);
1398 gimple_seq_add_stmt (&eh_seq, x);
1400 tmp = build_int_cst (integer_type_node, eh_index);
1401 last_case = build_case_label (tmp, NULL,
1402 create_artificial_label (tf_loc));
1403 VEC_quick_push (tree, case_label_vec, last_case);
1406 x = gimple_build_label (CASE_LABEL (last_case));
1407 gimple_seq_add_stmt (&eh_seq, x);
1408 emit_resx (&eh_seq, tf->region);
1411 x = gimple_build_label (finally_label);
1412 gimple_seq_add_stmt (&tf->top_p_seq, x);
1414 lower_eh_constructs_1 (state, finally);
1415 gimple_seq_add_seq (&tf->top_p_seq, finally);
1417 /* Redirect each incoming goto edge. */
1419 qe = q + tf->goto_queue_active;
1420 j = last_case_index + tf->may_return;
1421 /* Prepare the assignments to finally_tmp that are executed upon the
1422 entrance through a particular edge. */
1427 unsigned int case_index;
1429 mod = gimple_seq_alloc ();
1433 x = gimple_build_assign (finally_tmp,
1434 build_int_cst (integer_type_node,
1436 gimple_seq_add_stmt (&mod, x);
1437 do_return_redirection (q, finally_label, mod);
1438 switch_id = return_index;
1442 x = gimple_build_assign (finally_tmp,
1443 build_int_cst (integer_type_node, q->index));
1444 gimple_seq_add_stmt (&mod, x);
1445 do_goto_redirection (q, finally_label, mod, tf);
1446 switch_id = q->index;
1449 case_index = j + q->index;
1450 if (VEC_length (tree, case_label_vec) <= case_index
1451 || !VEC_index (tree, case_label_vec, case_index))
1455 tmp = build_int_cst (integer_type_node, switch_id);
1456 case_lab = build_case_label (tmp, NULL,
1457 create_artificial_label (tf_loc));
1458 /* We store the cont_stmt in the pointer map, so that we can recover
1459 it in the loop below. */
1461 cont_map = pointer_map_create ();
1462 slot = pointer_map_insert (cont_map, case_lab);
1463 *slot = q->cont_stmt;
1464 VEC_quick_push (tree, case_label_vec, case_lab);
1467 for (j = last_case_index; j < last_case_index + nlabels; j++)
1472 last_case = VEC_index (tree, case_label_vec, j);
1474 gcc_assert (last_case);
1475 gcc_assert (cont_map);
1477 slot = pointer_map_contains (cont_map, last_case);
1479 cont_stmt = *(gimple *) slot;
1481 x = gimple_build_label (CASE_LABEL (last_case));
1482 gimple_seq_add_stmt (&switch_body, x);
1483 gimple_seq_add_stmt (&switch_body, cont_stmt);
1484 maybe_record_in_goto_queue (state, cont_stmt);
1487 pointer_map_destroy (cont_map);
1489 replace_goto_queue (tf);
1491 /* Make sure that the last case is the default label, as one is required.
1492 Then sort the labels, which is also required in GIMPLE. */
1493 CASE_LOW (last_case) = NULL;
1494 sort_case_labels (case_label_vec);
1496 /* Build the switch statement, setting last_case to be the default
1498 switch_stmt = gimple_build_switch_vec (finally_tmp, last_case,
1500 gimple_set_location (switch_stmt, finally_loc);
1502 /* Need to link SWITCH_STMT after running replace_goto_queue
1503 due to not wanting to process the same goto stmts twice. */
1504 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1505 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1508 /* Decide whether or not we are going to duplicate the finally block.
1509 There are several considerations.
1511 First, if this is Java, then the finally block contains code
1512 written by the user. It has line numbers associated with it,
1513 so duplicating the block means it's difficult to set a breakpoint.
1514 Since controlling code generation via -g is verboten, we simply
1515 never duplicate code without optimization.
1517 Second, we'd like to prevent egregious code growth. One way to
1518 do this is to estimate the size of the finally block, multiply
1519 that by the number of copies we'd need to make, and compare against
1520 the estimate of the size of the switch machinery we'd have to add. */
1523 decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
1525 int f_estimate, sw_estimate;
1528 /* If there's an EH_ELSE involved, the exception path is separate
1529 and really doesn't come into play for this computation. */
1530 eh_else = get_eh_else (finally);
1533 ndests -= may_throw;
1534 finally = gimple_eh_else_n_body (eh_else);
1539 gimple_stmt_iterator gsi;
1544 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1546 gimple stmt = gsi_stmt (gsi);
1547 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
1553 /* Finally estimate N times, plus N gotos. */
1554 f_estimate = count_insns_seq (finally, &eni_size_weights);
1555 f_estimate = (f_estimate + 1) * ndests;
1557 /* Switch statement (cost 10), N variable assignments, N gotos. */
1558 sw_estimate = 10 + 2 * ndests;
1560 /* Optimize for size clearly wants our best guess. */
1561 if (optimize_function_for_size_p (cfun))
1562 return f_estimate < sw_estimate;
1564 /* ??? These numbers are completely made up so far. */
1566 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1568 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1571 /* REG is the enclosing region for a possible cleanup region, or the region
1572 itself. Returns TRUE if such a region would be unreachable.
1574 Cleanup regions within a must-not-throw region aren't actually reachable
1575 even if there are throwing stmts within them, because the personality
1576 routine will call terminate before unwinding. */
1579 cleanup_is_dead_in (eh_region reg)
1581 while (reg && reg->type == ERT_CLEANUP)
1583 return (reg && reg->type == ERT_MUST_NOT_THROW);
1586 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1587 to a sequence of labels and blocks, plus the exception region trees
1588 that record all the magic. This is complicated by the need to
1589 arrange for the FINALLY block to be executed on all exits. */
1592 lower_try_finally (struct leh_state *state, gimple tp)
1594 struct leh_tf_state this_tf;
1595 struct leh_state this_state;
1597 gimple_seq old_eh_seq;
1599 /* Process the try block. */
1601 memset (&this_tf, 0, sizeof (this_tf));
1602 this_tf.try_finally_expr = tp;
1604 this_tf.outer = state;
1605 if (using_eh_for_cleanups_p && !cleanup_is_dead_in (state->cur_region))
1607 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1608 this_state.cur_region = this_tf.region;
1612 this_tf.region = NULL;
1613 this_state.cur_region = state->cur_region;
1616 this_state.ehp_region = state->ehp_region;
1617 this_state.tf = &this_tf;
1619 old_eh_seq = eh_seq;
1622 lower_eh_constructs_1 (&this_state, gimple_try_eval(tp));
1624 /* Determine if the try block is escaped through the bottom. */
1625 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1627 /* Determine if any exceptions are possible within the try block. */
1629 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1630 if (this_tf.may_throw)
1631 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1633 /* Determine how many edges (still) reach the finally block. Or rather,
1634 how many destinations are reached by the finally block. Use this to
1635 determine how we process the finally block itself. */
1637 ndests = VEC_length (tree, this_tf.dest_array);
1638 ndests += this_tf.may_fallthru;
1639 ndests += this_tf.may_return;
1640 ndests += this_tf.may_throw;
1642 /* If the FINALLY block is not reachable, dike it out. */
1645 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1646 gimple_try_set_cleanup (tp, NULL);
1648 /* If the finally block doesn't fall through, then any destination
1649 we might try to impose there isn't reached either. There may be
1650 some minor amount of cleanup and redirection still needed. */
1651 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1652 lower_try_finally_nofallthru (state, &this_tf);
1654 /* We can easily special-case redirection to a single destination. */
1655 else if (ndests == 1)
1656 lower_try_finally_onedest (state, &this_tf);
1657 else if (decide_copy_try_finally (ndests, this_tf.may_throw,
1658 gimple_try_cleanup (tp)))
1659 lower_try_finally_copy (state, &this_tf);
1661 lower_try_finally_switch (state, &this_tf);
1663 /* If someone requested we add a label at the end of the transformed
1665 if (this_tf.fallthru_label)
1667 /* This must be reached only if ndests == 0. */
1668 gimple x = gimple_build_label (this_tf.fallthru_label);
1669 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1672 VEC_free (tree, heap, this_tf.dest_array);
1673 free (this_tf.goto_queue);
1674 if (this_tf.goto_queue_map)
1675 pointer_map_destroy (this_tf.goto_queue_map);
1677 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1678 If there was no old eh_seq, then the append is trivially already done. */
1682 eh_seq = old_eh_seq;
1685 gimple_seq new_eh_seq = eh_seq;
1686 eh_seq = old_eh_seq;
1687 gimple_seq_add_seq(&eh_seq, new_eh_seq);
1691 return this_tf.top_p_seq;
1694 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1695 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1696 exception region trees that records all the magic. */
1699 lower_catch (struct leh_state *state, gimple tp)
1701 eh_region try_region = NULL;
1702 struct leh_state this_state = *state;
1703 gimple_stmt_iterator gsi;
1707 location_t try_catch_loc = gimple_location (tp);
1709 if (flag_exceptions)
1711 try_region = gen_eh_region_try (state->cur_region);
1712 this_state.cur_region = try_region;
1715 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1717 if (!eh_region_may_contain_throw (try_region))
1718 return gimple_try_eval (tp);
1721 emit_eh_dispatch (&new_seq, try_region);
1722 emit_resx (&new_seq, try_region);
1724 this_state.cur_region = state->cur_region;
1725 this_state.ehp_region = try_region;
1728 for (gsi = gsi_start (gimple_try_cleanup (tp));
1736 gcatch = gsi_stmt (gsi);
1737 c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch));
1739 handler = gimple_catch_handler (gcatch);
1740 lower_eh_constructs_1 (&this_state, handler);
1742 c->label = create_artificial_label (UNKNOWN_LOCATION);
1743 x = gimple_build_label (c->label);
1744 gimple_seq_add_stmt (&new_seq, x);
1746 gimple_seq_add_seq (&new_seq, handler);
1748 if (gimple_seq_may_fallthru (new_seq))
1751 out_label = create_artificial_label (try_catch_loc);
1753 x = gimple_build_goto (out_label);
1754 gimple_seq_add_stmt (&new_seq, x);
1760 gimple_try_set_cleanup (tp, new_seq);
1762 return frob_into_branch_around (tp, try_region, out_label);
1765 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1766 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1767 region trees that record all the magic. */
1770 lower_eh_filter (struct leh_state *state, gimple tp)
1772 struct leh_state this_state = *state;
1773 eh_region this_region = NULL;
1777 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1779 if (flag_exceptions)
1781 this_region = gen_eh_region_allowed (state->cur_region,
1782 gimple_eh_filter_types (inner));
1783 this_state.cur_region = this_region;
1786 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1788 if (!eh_region_may_contain_throw (this_region))
1789 return gimple_try_eval (tp);
1792 this_state.cur_region = state->cur_region;
1793 this_state.ehp_region = this_region;
1795 emit_eh_dispatch (&new_seq, this_region);
1796 emit_resx (&new_seq, this_region);
1798 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1799 x = gimple_build_label (this_region->u.allowed.label);
1800 gimple_seq_add_stmt (&new_seq, x);
1802 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure (inner));
1803 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1805 gimple_try_set_cleanup (tp, new_seq);
1807 return frob_into_branch_around (tp, this_region, NULL);
1810 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1811 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1812 plus the exception region trees that record all the magic. */
1815 lower_eh_must_not_throw (struct leh_state *state, gimple tp)
1817 struct leh_state this_state = *state;
1819 if (flag_exceptions)
1821 gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1822 eh_region this_region;
1824 this_region = gen_eh_region_must_not_throw (state->cur_region);
1825 this_region->u.must_not_throw.failure_decl
1826 = gimple_eh_must_not_throw_fndecl (inner);
1827 this_region->u.must_not_throw.failure_loc = gimple_location (tp);
1829 /* In order to get mangling applied to this decl, we must mark it
1830 used now. Otherwise, pass_ipa_free_lang_data won't think it
1832 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1834 this_state.cur_region = this_region;
1837 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1839 return gimple_try_eval (tp);
1842 /* Implement a cleanup expression. This is similar to try-finally,
1843 except that we only execute the cleanup block for exception edges. */
1846 lower_cleanup (struct leh_state *state, gimple tp)
1848 struct leh_state this_state = *state;
1849 eh_region this_region = NULL;
1850 struct leh_tf_state fake_tf;
1852 bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
1854 if (flag_exceptions && !cleanup_dead)
1856 this_region = gen_eh_region_cleanup (state->cur_region);
1857 this_state.cur_region = this_region;
1860 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1862 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
1863 return gimple_try_eval (tp);
1865 /* Build enough of a try-finally state so that we can reuse
1866 honor_protect_cleanup_actions. */
1867 memset (&fake_tf, 0, sizeof (fake_tf));
1868 fake_tf.top_p = fake_tf.try_finally_expr = tp;
1869 fake_tf.outer = state;
1870 fake_tf.region = this_region;
1871 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1872 fake_tf.may_throw = true;
1874 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1876 if (fake_tf.may_throw)
1878 /* In this case honor_protect_cleanup_actions had nothing to do,
1879 and we should process this normally. */
1880 lower_eh_constructs_1 (state, gimple_try_cleanup (tp));
1881 result = frob_into_branch_around (tp, this_region,
1882 fake_tf.fallthru_label);
1886 /* In this case honor_protect_cleanup_actions did nearly all of
1887 the work. All we have left is to append the fallthru_label. */
1889 result = gimple_try_eval (tp);
1890 if (fake_tf.fallthru_label)
1892 gimple x = gimple_build_label (fake_tf.fallthru_label);
1893 gimple_seq_add_stmt (&result, x);
1899 /* Main loop for lowering eh constructs. Also moves gsi to the next
1903 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1907 gimple stmt = gsi_stmt (*gsi);
1909 switch (gimple_code (stmt))
1913 tree fndecl = gimple_call_fndecl (stmt);
1916 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1917 switch (DECL_FUNCTION_CODE (fndecl))
1919 case BUILT_IN_EH_POINTER:
1920 /* The front end may have generated a call to
1921 __builtin_eh_pointer (0) within a catch region. Replace
1922 this zero argument with the current catch region number. */
1923 if (state->ehp_region)
1925 tree nr = build_int_cst (integer_type_node,
1926 state->ehp_region->index);
1927 gimple_call_set_arg (stmt, 0, nr);
1931 /* The user has dome something silly. Remove it. */
1932 rhs = null_pointer_node;
1937 case BUILT_IN_EH_FILTER:
1938 /* ??? This should never appear, but since it's a builtin it
1939 is accessible to abuse by users. Just remove it and
1940 replace the use with the arbitrary value zero. */
1941 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
1943 lhs = gimple_call_lhs (stmt);
1944 x = gimple_build_assign (lhs, rhs);
1945 gsi_insert_before (gsi, x, GSI_SAME_STMT);
1948 case BUILT_IN_EH_COPY_VALUES:
1949 /* Likewise this should not appear. Remove it. */
1950 gsi_remove (gsi, true);
1960 /* If the stmt can throw use a new temporary for the assignment
1961 to a LHS. This makes sure the old value of the LHS is
1962 available on the EH edge. Only do so for statements that
1963 potentially fall thru (no noreturn calls e.g.), otherwise
1964 this new assignment might create fake fallthru regions. */
1965 if (stmt_could_throw_p (stmt)
1966 && gimple_has_lhs (stmt)
1967 && gimple_stmt_may_fallthru (stmt)
1968 && !tree_could_throw_p (gimple_get_lhs (stmt))
1969 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
1971 tree lhs = gimple_get_lhs (stmt);
1972 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
1973 gimple s = gimple_build_assign (lhs, tmp);
1974 gimple_set_location (s, gimple_location (stmt));
1975 gimple_set_block (s, gimple_block (stmt));
1976 gimple_set_lhs (stmt, tmp);
1977 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
1978 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
1979 DECL_GIMPLE_REG_P (tmp) = 1;
1980 gsi_insert_after (gsi, s, GSI_SAME_STMT);
1982 /* Look for things that can throw exceptions, and record them. */
1983 if (state->cur_region && stmt_could_throw_p (stmt))
1985 record_stmt_eh_region (state->cur_region, stmt);
1986 note_eh_region_may_contain_throw (state->cur_region);
1993 maybe_record_in_goto_queue (state, stmt);
1997 verify_norecord_switch_expr (state, stmt);
2001 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
2002 replace = lower_try_finally (state, stmt);
2005 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
2008 replace = gimple_try_eval (stmt);
2009 lower_eh_constructs_1 (state, replace);
2012 switch (gimple_code (x))
2015 replace = lower_catch (state, stmt);
2017 case GIMPLE_EH_FILTER:
2018 replace = lower_eh_filter (state, stmt);
2020 case GIMPLE_EH_MUST_NOT_THROW:
2021 replace = lower_eh_must_not_throw (state, stmt);
2023 case GIMPLE_EH_ELSE:
2024 /* This code is only valid with GIMPLE_TRY_FINALLY. */
2027 replace = lower_cleanup (state, stmt);
2032 /* Remove the old stmt and insert the transformed sequence
2034 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
2035 gsi_remove (gsi, true);
2037 /* Return since we don't want gsi_next () */
2040 case GIMPLE_EH_ELSE:
2041 /* We should be eliminating this in lower_try_finally et al. */
2045 /* A type, a decl, or some kind of statement that we're not
2046 interested in. Don't walk them. */
2053 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2056 lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq)
2058 gimple_stmt_iterator gsi;
2059 for (gsi = gsi_start (seq); !gsi_end_p (gsi);)
2060 lower_eh_constructs_2 (state, &gsi);
2064 lower_eh_constructs (void)
2066 struct leh_state null_state;
2069 bodyp = gimple_body (current_function_decl);
2073 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
2074 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
2075 memset (&null_state, 0, sizeof (null_state));
2077 collect_finally_tree_1 (bodyp, NULL);
2078 lower_eh_constructs_1 (&null_state, bodyp);
2080 /* We assume there's a return statement, or something, at the end of
2081 the function, and thus ploping the EH sequence afterward won't
2083 gcc_assert (!gimple_seq_may_fallthru (bodyp));
2084 gimple_seq_add_seq (&bodyp, eh_seq);
2086 /* We assume that since BODYP already existed, adding EH_SEQ to it
2087 didn't change its value, and we don't have to re-set the function. */
2088 gcc_assert (bodyp == gimple_body (current_function_decl));
2090 htab_delete (finally_tree);
2091 BITMAP_FREE (eh_region_may_contain_throw_map);
2094 /* If this function needs a language specific EH personality routine
2095 and the frontend didn't already set one do so now. */
2096 if (function_needs_eh_personality (cfun) == eh_personality_lang
2097 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2098 DECL_FUNCTION_PERSONALITY (current_function_decl)
2099 = lang_hooks.eh_personality ();
2104 struct gimple_opt_pass pass_lower_eh =
2110 lower_eh_constructs, /* execute */
2113 0, /* static_pass_number */
2114 TV_TREE_EH, /* tv_id */
2115 PROP_gimple_lcf, /* properties_required */
2116 PROP_gimple_leh, /* properties_provided */
2117 0, /* properties_destroyed */
2118 0, /* todo_flags_start */
2119 0 /* todo_flags_finish */
2123 /* Create the multiple edges from an EH_DISPATCH statement to all of
2124 the possible handlers for its EH region. Return true if there's
2125 no fallthru edge; false if there is. */
2128 make_eh_dispatch_edges (gimple stmt)
2132 basic_block src, dst;
2134 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2135 src = gimple_bb (stmt);
2140 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2142 dst = label_to_block (c->label);
2143 make_edge (src, dst, 0);
2145 /* A catch-all handler doesn't have a fallthru. */
2146 if (c->type_list == NULL)
2151 case ERT_ALLOWED_EXCEPTIONS:
2152 dst = label_to_block (r->u.allowed.label);
2153 make_edge (src, dst, 0);
2163 /* Create the single EH edge from STMT to its nearest landing pad,
2164 if there is such a landing pad within the current function. */
2167 make_eh_edges (gimple stmt)
2169 basic_block src, dst;
2173 lp_nr = lookup_stmt_eh_lp (stmt);
2177 lp = get_eh_landing_pad_from_number (lp_nr);
2178 gcc_assert (lp != NULL);
2180 src = gimple_bb (stmt);
2181 dst = label_to_block (lp->post_landing_pad);
2182 make_edge (src, dst, EDGE_EH);
2185 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2186 do not actually perform the final edge redirection.
2188 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2189 we intend to change the destination EH region as well; this means
2190 EH_LANDING_PAD_NR must already be set on the destination block label.
2191 If false, we're being called from generic cfg manipulation code and we
2192 should preserve our place within the region tree. */
2195 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2197 eh_landing_pad old_lp, new_lp;
2200 int old_lp_nr, new_lp_nr;
2201 tree old_label, new_label;
2205 old_bb = edge_in->dest;
2206 old_label = gimple_block_label (old_bb);
2207 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2208 gcc_assert (old_lp_nr > 0);
2209 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2211 throw_stmt = last_stmt (edge_in->src);
2212 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2214 new_label = gimple_block_label (new_bb);
2216 /* Look for an existing region that might be using NEW_BB already. */
2217 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2220 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2221 gcc_assert (new_lp);
2223 /* Unless CHANGE_REGION is true, the new and old landing pad
2224 had better be associated with the same EH region. */
2225 gcc_assert (change_region || new_lp->region == old_lp->region);
2230 gcc_assert (!change_region);
2233 /* Notice when we redirect the last EH edge away from OLD_BB. */
2234 FOR_EACH_EDGE (e, ei, old_bb->preds)
2235 if (e != edge_in && (e->flags & EDGE_EH))
2240 /* NEW_LP already exists. If there are still edges into OLD_LP,
2241 there's nothing to do with the EH tree. If there are no more
2242 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2243 If CHANGE_REGION is true, then our caller is expecting to remove
2245 if (e == NULL && !change_region)
2246 remove_eh_landing_pad (old_lp);
2250 /* No correct landing pad exists. If there are no more edges
2251 into OLD_LP, then we can simply re-use the existing landing pad.
2252 Otherwise, we have to create a new landing pad. */
2255 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2259 new_lp = gen_eh_landing_pad (old_lp->region);
2260 new_lp->post_landing_pad = new_label;
2261 EH_LANDING_PAD_NR (new_label) = new_lp->index;
2264 /* Maybe move the throwing statement to the new region. */
2265 if (old_lp != new_lp)
2267 remove_stmt_from_eh_lp (throw_stmt);
2268 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2272 /* Redirect EH edge E to NEW_BB. */
2275 redirect_eh_edge (edge edge_in, basic_block new_bb)
2277 redirect_eh_edge_1 (edge_in, new_bb, false);
2278 return ssa_redirect_edge (edge_in, new_bb);
2281 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2282 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2283 The actual edge update will happen in the caller. */
2286 redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb)
2288 tree new_lab = gimple_block_label (new_bb);
2289 bool any_changed = false;
2294 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2298 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2300 old_bb = label_to_block (c->label);
2301 if (old_bb == e->dest)
2309 case ERT_ALLOWED_EXCEPTIONS:
2310 old_bb = label_to_block (r->u.allowed.label);
2311 gcc_assert (old_bb == e->dest);
2312 r->u.allowed.label = new_lab;
2320 gcc_assert (any_changed);
2323 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2326 operation_could_trap_helper_p (enum tree_code op,
2337 case TRUNC_DIV_EXPR:
2339 case FLOOR_DIV_EXPR:
2340 case ROUND_DIV_EXPR:
2341 case EXACT_DIV_EXPR:
2343 case FLOOR_MOD_EXPR:
2344 case ROUND_MOD_EXPR:
2345 case TRUNC_MOD_EXPR:
2347 if (honor_snans || honor_trapv)
2350 return flag_trapping_math;
2351 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2360 /* Some floating point comparisons may trap. */
2365 case UNORDERED_EXPR:
2375 case FIX_TRUNC_EXPR:
2376 /* Conversion of floating point might trap. */
2382 /* These operations don't trap with floating point. */
2390 /* Any floating arithmetic may trap. */
2391 if (fp_operation && flag_trapping_math)
2399 /* Constructing an object cannot trap. */
2403 /* Any floating arithmetic may trap. */
2404 if (fp_operation && flag_trapping_math)
2412 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2413 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2414 type operands that may trap. If OP is a division operator, DIVISOR contains
2415 the value of the divisor. */
2418 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2421 bool honor_nans = (fp_operation && flag_trapping_math
2422 && !flag_finite_math_only);
2423 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2426 if (TREE_CODE_CLASS (op) != tcc_comparison
2427 && TREE_CODE_CLASS (op) != tcc_unary
2428 && TREE_CODE_CLASS (op) != tcc_binary)
2431 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2432 honor_nans, honor_snans, divisor,
2436 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2437 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2438 This routine expects only GIMPLE lhs or rhs input. */
2441 tree_could_trap_p (tree expr)
2443 enum tree_code code;
2444 bool fp_operation = false;
2445 bool honor_trapv = false;
2446 tree t, base, div = NULL_TREE;
2451 code = TREE_CODE (expr);
2452 t = TREE_TYPE (expr);
2456 if (COMPARISON_CLASS_P (expr))
2457 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2459 fp_operation = FLOAT_TYPE_P (t);
2460 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2463 if (TREE_CODE_CLASS (code) == tcc_binary)
2464 div = TREE_OPERAND (expr, 1);
2465 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2471 case TARGET_MEM_REF:
2472 if (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
2473 && !TMR_INDEX (expr) && !TMR_INDEX2 (expr))
2475 return !TREE_THIS_NOTRAP (expr);
2481 case VIEW_CONVERT_EXPR:
2482 case WITH_SIZE_EXPR:
2483 expr = TREE_OPERAND (expr, 0);
2484 code = TREE_CODE (expr);
2487 case ARRAY_RANGE_REF:
2488 base = TREE_OPERAND (expr, 0);
2489 if (tree_could_trap_p (base))
2491 if (TREE_THIS_NOTRAP (expr))
2493 return !range_in_array_bounds_p (expr);
2496 base = TREE_OPERAND (expr, 0);
2497 if (tree_could_trap_p (base))
2499 if (TREE_THIS_NOTRAP (expr))
2501 return !in_array_bounds_p (expr);
2504 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2508 return !TREE_THIS_NOTRAP (expr);
2511 return TREE_THIS_VOLATILE (expr);
2514 t = get_callee_fndecl (expr);
2515 /* Assume that calls to weak functions may trap. */
2516 if (!t || !DECL_P (t))
2519 return tree_could_trap_p (t);
2523 /* Assume that accesses to weak functions may trap, unless we know
2524 they are certainly defined in current TU or in some other
2526 if (DECL_WEAK (expr))
2528 struct cgraph_node *node;
2529 if (!DECL_EXTERNAL (expr))
2531 node = cgraph_function_node (cgraph_get_node (expr), NULL);
2532 if (node && node->in_other_partition)
2539 /* Assume that accesses to weak vars may trap, unless we know
2540 they are certainly defined in current TU or in some other
2542 if (DECL_WEAK (expr))
2544 struct varpool_node *node;
2545 if (!DECL_EXTERNAL (expr))
2547 node = varpool_variable_node (varpool_get_node (expr), NULL);
2548 if (node && node->in_other_partition)
2556 /* Assume that accesses to weak vars or functions may trap. */
2557 if (DECL_WEAK (expr))
2567 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2568 an assignment or a conditional) may throw. */
2571 stmt_could_throw_1_p (gimple stmt)
2573 enum tree_code code = gimple_expr_code (stmt);
2574 bool honor_nans = false;
2575 bool honor_snans = false;
2576 bool fp_operation = false;
2577 bool honor_trapv = false;
2582 if (TREE_CODE_CLASS (code) == tcc_comparison
2583 || TREE_CODE_CLASS (code) == tcc_unary
2584 || TREE_CODE_CLASS (code) == tcc_binary)
2586 if (is_gimple_assign (stmt)
2587 && TREE_CODE_CLASS (code) == tcc_comparison)
2588 t = TREE_TYPE (gimple_assign_rhs1 (stmt));
2589 else if (gimple_code (stmt) == GIMPLE_COND)
2590 t = TREE_TYPE (gimple_cond_lhs (stmt));
2592 t = gimple_expr_type (stmt);
2593 fp_operation = FLOAT_TYPE_P (t);
2596 honor_nans = flag_trapping_math && !flag_finite_math_only;
2597 honor_snans = flag_signaling_nans != 0;
2599 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2603 /* Check if the main expression may trap. */
2604 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2605 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2606 honor_nans, honor_snans, t,
2611 /* If the expression does not trap, see if any of the individual operands may
2613 for (i = 0; i < gimple_num_ops (stmt); i++)
2614 if (tree_could_trap_p (gimple_op (stmt, i)))
2621 /* Return true if statement STMT could throw an exception. */
2624 stmt_could_throw_p (gimple stmt)
2626 if (!flag_exceptions)
2629 /* The only statements that can throw an exception are assignments,
2630 conditionals, calls, resx, and asms. */
2631 switch (gimple_code (stmt))
2637 return !gimple_call_nothrow_p (stmt);
2641 if (!cfun->can_throw_non_call_exceptions)
2643 return stmt_could_throw_1_p (stmt);
2646 if (!cfun->can_throw_non_call_exceptions)
2648 return gimple_asm_volatile_p (stmt);
2656 /* Return true if expression T could throw an exception. */
2659 tree_could_throw_p (tree t)
2661 if (!flag_exceptions)
2663 if (TREE_CODE (t) == MODIFY_EXPR)
2665 if (cfun->can_throw_non_call_exceptions
2666 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2668 t = TREE_OPERAND (t, 1);
2671 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2672 t = TREE_OPERAND (t, 0);
2673 if (TREE_CODE (t) == CALL_EXPR)
2674 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2675 if (cfun->can_throw_non_call_exceptions)
2676 return tree_could_trap_p (t);
2680 /* Return true if STMT can throw an exception that is not caught within
2681 the current function (CFUN). */
2684 stmt_can_throw_external (gimple stmt)
2688 if (!stmt_could_throw_p (stmt))
2691 lp_nr = lookup_stmt_eh_lp (stmt);
2695 /* Return true if STMT can throw an exception that is caught within
2696 the current function (CFUN). */
2699 stmt_can_throw_internal (gimple stmt)
2703 if (!stmt_could_throw_p (stmt))
2706 lp_nr = lookup_stmt_eh_lp (stmt);
2710 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2711 remove any entry it might have from the EH table. Return true if
2712 any change was made. */
2715 maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
2717 if (stmt_could_throw_p (stmt))
2719 return remove_stmt_from_eh_lp_fn (ifun, stmt);
2722 /* Likewise, but always use the current function. */
2725 maybe_clean_eh_stmt (gimple stmt)
2727 return maybe_clean_eh_stmt_fn (cfun, stmt);
2730 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2731 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2732 in the table if it should be in there. Return TRUE if a replacement was
2733 done that my require an EH edge purge. */
2736 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2738 int lp_nr = lookup_stmt_eh_lp (old_stmt);
2742 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2744 if (new_stmt == old_stmt && new_stmt_could_throw)
2747 remove_stmt_from_eh_lp (old_stmt);
2748 if (new_stmt_could_throw)
2750 add_stmt_to_eh_lp (new_stmt, lp_nr);
2760 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statment NEW_STMT
2761 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2762 operand is the return value of duplicate_eh_regions. */
2765 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
2766 struct function *old_fun, gimple old_stmt,
2767 struct pointer_map_t *map, int default_lp_nr)
2769 int old_lp_nr, new_lp_nr;
2772 if (!stmt_could_throw_p (new_stmt))
2775 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2778 if (default_lp_nr == 0)
2780 new_lp_nr = default_lp_nr;
2782 else if (old_lp_nr > 0)
2784 eh_landing_pad old_lp, new_lp;
2786 old_lp = VEC_index (eh_landing_pad, old_fun->eh->lp_array, old_lp_nr);
2787 slot = pointer_map_contains (map, old_lp);
2788 new_lp = (eh_landing_pad) *slot;
2789 new_lp_nr = new_lp->index;
2793 eh_region old_r, new_r;
2795 old_r = VEC_index (eh_region, old_fun->eh->region_array, -old_lp_nr);
2796 slot = pointer_map_contains (map, old_r);
2797 new_r = (eh_region) *slot;
2798 new_lp_nr = -new_r->index;
2801 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2805 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2806 and thus no remapping is required. */
2809 maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
2813 if (!stmt_could_throw_p (new_stmt))
2816 lp_nr = lookup_stmt_eh_lp (old_stmt);
2820 add_stmt_to_eh_lp (new_stmt, lp_nr);
2824 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2825 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2826 this only handles handlers consisting of a single call, as that's the
2827 important case for C++: a destructor call for a particular object showing
2828 up in multiple handlers. */
2831 same_handler_p (gimple_seq oneh, gimple_seq twoh)
2833 gimple_stmt_iterator gsi;
2837 gsi = gsi_start (oneh);
2838 if (!gsi_one_before_end_p (gsi))
2840 ones = gsi_stmt (gsi);
2842 gsi = gsi_start (twoh);
2843 if (!gsi_one_before_end_p (gsi))
2845 twos = gsi_stmt (gsi);
2847 if (!is_gimple_call (ones)
2848 || !is_gimple_call (twos)
2849 || gimple_call_lhs (ones)
2850 || gimple_call_lhs (twos)
2851 || gimple_call_chain (ones)
2852 || gimple_call_chain (twos)
2853 || !gimple_call_same_target_p (ones, twos)
2854 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
2857 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
2858 if (!operand_equal_p (gimple_call_arg (ones, ai),
2859 gimple_call_arg (twos, ai), 0))
2866 try { A() } finally { try { ~B() } catch { ~A() } }
2867 try { ... } finally { ~A() }
2869 try { A() } catch { ~B() }
2870 try { ~B() ... } finally { ~A() }
2872 This occurs frequently in C++, where A is a local variable and B is a
2873 temporary used in the initializer for A. */
2876 optimize_double_finally (gimple one, gimple two)
2879 gimple_stmt_iterator gsi;
2881 gsi = gsi_start (gimple_try_cleanup (one));
2882 if (!gsi_one_before_end_p (gsi))
2885 oneh = gsi_stmt (gsi);
2886 if (gimple_code (oneh) != GIMPLE_TRY
2887 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
2890 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
2892 gimple_seq seq = gimple_try_eval (oneh);
2894 gimple_try_set_cleanup (one, seq);
2895 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
2896 seq = copy_gimple_seq_and_replace_locals (seq);
2897 gimple_seq_add_seq (&seq, gimple_try_eval (two));
2898 gimple_try_set_eval (two, seq);
2902 /* Perform EH refactoring optimizations that are simpler to do when code
2903 flow has been lowered but EH structures haven't. */
2906 refactor_eh_r (gimple_seq seq)
2908 gimple_stmt_iterator gsi;
2913 gsi = gsi_start (seq);
2917 if (gsi_end_p (gsi))
2920 two = gsi_stmt (gsi);
2923 && gimple_code (one) == GIMPLE_TRY
2924 && gimple_code (two) == GIMPLE_TRY
2925 && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
2926 && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
2927 optimize_double_finally (one, two);
2929 switch (gimple_code (one))
2932 refactor_eh_r (gimple_try_eval (one));
2933 refactor_eh_r (gimple_try_cleanup (one));
2936 refactor_eh_r (gimple_catch_handler (one));
2938 case GIMPLE_EH_FILTER:
2939 refactor_eh_r (gimple_eh_filter_failure (one));
2941 case GIMPLE_EH_ELSE:
2942 refactor_eh_r (gimple_eh_else_n_body (one));
2943 refactor_eh_r (gimple_eh_else_e_body (one));
2958 refactor_eh_r (gimple_body (current_function_decl));
2963 gate_refactor_eh (void)
2965 return flag_exceptions != 0;
2968 struct gimple_opt_pass pass_refactor_eh =
2973 gate_refactor_eh, /* gate */
2974 refactor_eh, /* execute */
2977 0, /* static_pass_number */
2978 TV_TREE_EH, /* tv_id */
2979 PROP_gimple_lcf, /* properties_required */
2980 0, /* properties_provided */
2981 0, /* properties_destroyed */
2982 0, /* todo_flags_start */
2983 0 /* todo_flags_finish */
2987 /* At the end of gimple optimization, we can lower RESX. */
2990 lower_resx (basic_block bb, gimple stmt, struct pointer_map_t *mnt_map)
2993 eh_region src_r, dst_r;
2994 gimple_stmt_iterator gsi;
2999 lp_nr = lookup_stmt_eh_lp (stmt);
3001 dst_r = get_eh_region_from_lp_number (lp_nr);
3005 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
3006 gsi = gsi_last_bb (bb);
3010 /* We can wind up with no source region when pass_cleanup_eh shows
3011 that there are no entries into an eh region and deletes it, but
3012 then the block that contains the resx isn't removed. This can
3013 happen without optimization when the switch statement created by
3014 lower_try_finally_switch isn't simplified to remove the eh case.
3016 Resolve this by expanding the resx node to an abort. */
3018 fn = builtin_decl_implicit (BUILT_IN_TRAP);
3019 x = gimple_build_call (fn, 0);
3020 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3022 while (EDGE_COUNT (bb->succs) > 0)
3023 remove_edge (EDGE_SUCC (bb, 0));
3027 /* When we have a destination region, we resolve this by copying
3028 the excptr and filter values into place, and changing the edge
3029 to immediately after the landing pad. */
3038 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3039 the failure decl into a new block, if needed. */
3040 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
3042 slot = pointer_map_contains (mnt_map, dst_r);
3045 gimple_stmt_iterator gsi2;
3047 new_bb = create_empty_bb (bb);
3048 lab = gimple_block_label (new_bb);
3049 gsi2 = gsi_start_bb (new_bb);
3051 fn = dst_r->u.must_not_throw.failure_decl;
3052 x = gimple_build_call (fn, 0);
3053 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
3054 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
3056 slot = pointer_map_insert (mnt_map, dst_r);
3062 new_bb = label_to_block (lab);
3065 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3066 e = make_edge (bb, new_bb, EDGE_FALLTHRU);
3067 e->count = bb->count;
3068 e->probability = REG_BR_PROB_BASE;
3073 tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
3075 fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
3076 src_nr = build_int_cst (integer_type_node, src_r->index);
3077 x = gimple_build_call (fn, 2, dst_nr, src_nr);
3078 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3080 /* Update the flags for the outgoing edge. */
3081 e = single_succ_edge (bb);
3082 gcc_assert (e->flags & EDGE_EH);
3083 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3085 /* If there are no more EH users of the landing pad, delete it. */
3086 FOR_EACH_EDGE (e, ei, e->dest->preds)
3087 if (e->flags & EDGE_EH)
3091 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
3092 remove_eh_landing_pad (lp);
3102 /* When we don't have a destination region, this exception escapes
3103 up the call chain. We resolve this by generating a call to the
3104 _Unwind_Resume library function. */
3106 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
3107 with no arguments for C++ and Java. Check for that. */
3108 if (src_r->use_cxa_end_cleanup)
3110 fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
3111 x = gimple_build_call (fn, 0);
3112 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3116 fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
3117 src_nr = build_int_cst (integer_type_node, src_r->index);
3118 x = gimple_build_call (fn, 1, src_nr);
3119 var = create_tmp_var (ptr_type_node, NULL);
3120 var = make_ssa_name (var, x);
3121 gimple_call_set_lhs (x, var);
3122 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3124 fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
3125 x = gimple_build_call (fn, 1, var);
3126 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3129 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3132 gsi_remove (&gsi, true);
3138 execute_lower_resx (void)
3141 struct pointer_map_t *mnt_map;
3142 bool dominance_invalidated = false;
3143 bool any_rewritten = false;
3145 mnt_map = pointer_map_create ();
3149 gimple last = last_stmt (bb);
3150 if (last && is_gimple_resx (last))
3152 dominance_invalidated |= lower_resx (bb, last, mnt_map);
3153 any_rewritten = true;
3157 pointer_map_destroy (mnt_map);
3159 if (dominance_invalidated)
3161 free_dominance_info (CDI_DOMINATORS);
3162 free_dominance_info (CDI_POST_DOMINATORS);
3165 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3169 gate_lower_resx (void)
3171 return flag_exceptions != 0;
3174 struct gimple_opt_pass pass_lower_resx =
3179 gate_lower_resx, /* gate */
3180 execute_lower_resx, /* execute */
3183 0, /* static_pass_number */
3184 TV_TREE_EH, /* tv_id */
3185 PROP_gimple_lcf, /* properties_required */
3186 0, /* properties_provided */
3187 0, /* properties_destroyed */
3188 0, /* todo_flags_start */
3189 TODO_verify_flow /* todo_flags_finish */
3193 /* Try to optimize var = {v} {CLOBBER} stmts followed just by
3197 optimize_clobbers (basic_block bb)
3199 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3200 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3202 gimple stmt = gsi_stmt (gsi);
3203 if (is_gimple_debug (stmt))
3205 if (!gimple_clobber_p (stmt)
3206 || TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
3208 unlink_stmt_vdef (stmt);
3209 gsi_remove (&gsi, true);
3210 release_defs (stmt);
3214 /* Try to sink var = {v} {CLOBBER} stmts followed just by
3215 internal throw to successor BB. */
3218 sink_clobbers (basic_block bb)
3222 gimple_stmt_iterator gsi, dgsi;
3224 bool any_clobbers = false;
3226 /* Only optimize if BB has a single EH successor and
3227 all predecessor edges are EH too. */
3228 if (!single_succ_p (bb)
3229 || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
3232 FOR_EACH_EDGE (e, ei, bb->preds)
3234 if ((e->flags & EDGE_EH) == 0)
3238 /* And BB contains only CLOBBER stmts before the final
3240 gsi = gsi_last_bb (bb);
3241 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3243 gimple stmt = gsi_stmt (gsi);
3244 if (is_gimple_debug (stmt))
3246 if (gimple_code (stmt) == GIMPLE_LABEL)
3248 if (!gimple_clobber_p (stmt)
3249 || TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
3251 any_clobbers = true;
3256 succbb = single_succ (bb);
3257 dgsi = gsi_after_labels (succbb);
3258 gsi = gsi_last_bb (bb);
3259 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3261 gimple stmt = gsi_stmt (gsi);
3263 if (is_gimple_debug (stmt))
3265 if (gimple_code (stmt) == GIMPLE_LABEL)
3267 unlink_stmt_vdef (stmt);
3268 gsi_remove (&gsi, false);
3269 vdef = gimple_vdef (stmt);
3270 if (vdef && TREE_CODE (vdef) == SSA_NAME)
3272 vdef = SSA_NAME_VAR (vdef);
3273 mark_sym_for_renaming (vdef);
3274 gimple_set_vdef (stmt, vdef);
3275 gimple_set_vuse (stmt, vdef);
3277 release_defs (stmt);
3278 gsi_insert_before (&dgsi, stmt, GSI_SAME_STMT);
3281 return TODO_update_ssa_only_virtuals;
3284 /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3285 we have found some duplicate labels and removed some edges. */
3288 lower_eh_dispatch (basic_block src, gimple stmt)
3290 gimple_stmt_iterator gsi;
3295 bool redirected = false;
3297 region_nr = gimple_eh_dispatch_region (stmt);
3298 r = get_eh_region_from_number (region_nr);
3300 gsi = gsi_last_bb (src);
3306 VEC (tree, heap) *labels = NULL;
3307 tree default_label = NULL;
3311 struct pointer_set_t *seen_values = pointer_set_create ();
3313 /* Collect the labels for a switch. Zero the post_landing_pad
3314 field becase we'll no longer have anything keeping these labels
3315 in existance and the optimizer will be free to merge these
3317 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3319 tree tp_node, flt_node, lab = c->label;
3320 bool have_label = false;
3323 tp_node = c->type_list;
3324 flt_node = c->filter_list;
3326 if (tp_node == NULL)
3328 default_label = lab;
3333 /* Filter out duplicate labels that arise when this handler
3334 is shadowed by an earlier one. When no labels are
3335 attached to the handler anymore, we remove
3336 the corresponding edge and then we delete unreachable
3337 blocks at the end of this pass. */
3338 if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node)))
3340 tree t = build_case_label (TREE_VALUE (flt_node),
3342 VEC_safe_push (tree, heap, labels, t);
3343 pointer_set_insert (seen_values, TREE_VALUE (flt_node));
3347 tp_node = TREE_CHAIN (tp_node);
3348 flt_node = TREE_CHAIN (flt_node);
3353 remove_edge (find_edge (src, label_to_block (lab)));
3358 /* Clean up the edge flags. */
3359 FOR_EACH_EDGE (e, ei, src->succs)
3361 if (e->flags & EDGE_FALLTHRU)
3363 /* If there was no catch-all, use the fallthru edge. */
3364 if (default_label == NULL)
3365 default_label = gimple_block_label (e->dest);
3366 e->flags &= ~EDGE_FALLTHRU;
3369 gcc_assert (default_label != NULL);
3371 /* Don't generate a switch if there's only a default case.
3372 This is common in the form of try { A; } catch (...) { B; }. */
3375 e = single_succ_edge (src);
3376 e->flags |= EDGE_FALLTHRU;
3380 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3381 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3383 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3384 filter = make_ssa_name (filter, x);
3385 gimple_call_set_lhs (x, filter);
3386 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3388 /* Turn the default label into a default case. */
3389 default_label = build_case_label (NULL, NULL, default_label);
3390 sort_case_labels (labels);
3392 x = gimple_build_switch_vec (filter, default_label, labels);
3393 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3395 VEC_free (tree, heap, labels);
3397 pointer_set_destroy (seen_values);
3401 case ERT_ALLOWED_EXCEPTIONS:
3403 edge b_e = BRANCH_EDGE (src);
3404 edge f_e = FALLTHRU_EDGE (src);
3406 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3407 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3409 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3410 filter = make_ssa_name (filter, x);
3411 gimple_call_set_lhs (x, filter);
3412 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3414 r->u.allowed.label = NULL;
3415 x = gimple_build_cond (EQ_EXPR, filter,
3416 build_int_cst (TREE_TYPE (filter),
3417 r->u.allowed.filter),
3418 NULL_TREE, NULL_TREE);
3419 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3421 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3422 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3430 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3431 gsi_remove (&gsi, true);
3436 execute_lower_eh_dispatch (void)
3440 bool redirected = false;
3442 assign_filter_values ();
3446 gimple last = last_stmt (bb);
3449 if (gimple_code (last) == GIMPLE_EH_DISPATCH)
3451 redirected |= lower_eh_dispatch (bb, last);
3452 flags |= TODO_update_ssa_only_virtuals;
3454 else if (gimple_code (last) == GIMPLE_RESX)
3456 if (stmt_can_throw_external (last))
3457 optimize_clobbers (bb);
3459 flags |= sink_clobbers (bb);
3464 delete_unreachable_blocks ();
3469 gate_lower_eh_dispatch (void)
3471 return cfun->eh->region_tree != NULL;
3474 struct gimple_opt_pass pass_lower_eh_dispatch =
3478 "ehdisp", /* name */
3479 gate_lower_eh_dispatch, /* gate */
3480 execute_lower_eh_dispatch, /* execute */
3483 0, /* static_pass_number */
3484 TV_TREE_EH, /* tv_id */
3485 PROP_gimple_lcf, /* properties_required */
3486 0, /* properties_provided */
3487 0, /* properties_destroyed */
3488 0, /* todo_flags_start */
3489 TODO_verify_flow /* todo_flags_finish */
3493 /* Walk statements, see what regions are really referenced and remove
3494 those that are unused. */
3497 remove_unreachable_handlers (void)
3499 sbitmap r_reachable, lp_reachable;
3505 r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
3507 = sbitmap_alloc (VEC_length (eh_landing_pad, cfun->eh->lp_array));
3508 sbitmap_zero (r_reachable);
3509 sbitmap_zero (lp_reachable);
3513 gimple_stmt_iterator gsi;
3515 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3517 gimple stmt = gsi_stmt (gsi);
3518 lp_nr = lookup_stmt_eh_lp (stmt);
3520 /* Negative LP numbers are MUST_NOT_THROW regions which
3521 are not considered BB enders. */
3523 SET_BIT (r_reachable, -lp_nr);
3525 /* Positive LP numbers are real landing pads, are are BB enders. */
3528 gcc_assert (gsi_one_before_end_p (gsi));
3529 region = get_eh_region_from_lp_number (lp_nr);
3530 SET_BIT (r_reachable, region->index);
3531 SET_BIT (lp_reachable, lp_nr);
3534 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3535 switch (gimple_code (stmt))
3538 SET_BIT (r_reachable, gimple_resx_region (stmt));
3540 case GIMPLE_EH_DISPATCH:
3541 SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
3551 fprintf (dump_file, "Before removal of unreachable regions:\n");
3552 dump_eh_tree (dump_file, cfun);
3553 fprintf (dump_file, "Reachable regions: ");
3554 dump_sbitmap_file (dump_file, r_reachable);
3555 fprintf (dump_file, "Reachable landing pads: ");
3556 dump_sbitmap_file (dump_file, lp_reachable);
3560 VEC_iterate (eh_region, cfun->eh->region_array, r_nr, region); ++r_nr)
3561 if (region && !TEST_BIT (r_reachable, r_nr))
3564 fprintf (dump_file, "Removing unreachable region %d\n", r_nr);
3565 remove_eh_handler (region);
3569 VEC_iterate (eh_landing_pad, cfun->eh->lp_array, lp_nr, lp); ++lp_nr)
3570 if (lp && !TEST_BIT (lp_reachable, lp_nr))
3573 fprintf (dump_file, "Removing unreachable landing pad %d\n", lp_nr);
3574 remove_eh_landing_pad (lp);
3579 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3580 dump_eh_tree (dump_file, cfun);
3581 fprintf (dump_file, "\n\n");
3584 sbitmap_free (r_reachable);
3585 sbitmap_free (lp_reachable);
3587 #ifdef ENABLE_CHECKING
3588 verify_eh_tree (cfun);
3592 /* Remove unreachable handlers if any landing pads have been removed after
3593 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
3596 maybe_remove_unreachable_handlers (void)
3601 if (cfun->eh == NULL)
3604 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3605 if (lp && lp->post_landing_pad)
3607 if (label_to_block (lp->post_landing_pad) == NULL)
3609 remove_unreachable_handlers ();
3615 /* Remove regions that do not have landing pads. This assumes
3616 that remove_unreachable_handlers has already been run, and
3617 that we've just manipulated the landing pads since then. */
3620 remove_unreachable_handlers_no_lp (void)
3624 sbitmap r_reachable;
3627 r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
3628 sbitmap_zero (r_reachable);
3632 gimple stmt = last_stmt (bb);
3634 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3635 switch (gimple_code (stmt))
3638 SET_BIT (r_reachable, gimple_resx_region (stmt));
3640 case GIMPLE_EH_DISPATCH:
3641 SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
3648 for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
3649 if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW
3650 && !TEST_BIT (r_reachable, i))
3653 fprintf (dump_file, "Removing unreachable region %d\n", i);
3654 remove_eh_handler (r);
3657 sbitmap_free (r_reachable);
3660 /* Undo critical edge splitting on an EH landing pad. Earlier, we
3661 optimisticaly split all sorts of edges, including EH edges. The
3662 optimization passes in between may not have needed them; if not,
3663 we should undo the split.
3665 Recognize this case by having one EH edge incoming to the BB and
3666 one normal edge outgoing; BB should be empty apart from the
3667 post_landing_pad label.
3669 Note that this is slightly different from the empty handler case
3670 handled by cleanup_empty_eh, in that the actual handler may yet
3671 have actual code but the landing pad has been separated from the
3672 handler. As such, cleanup_empty_eh relies on this transformation
3673 having been done first. */
3676 unsplit_eh (eh_landing_pad lp)
3678 basic_block bb = label_to_block (lp->post_landing_pad);
3679 gimple_stmt_iterator gsi;
3682 /* Quickly check the edge counts on BB for singularity. */
3683 if (EDGE_COUNT (bb->preds) != 1 || EDGE_COUNT (bb->succs) != 1)
3685 e_in = EDGE_PRED (bb, 0);
3686 e_out = EDGE_SUCC (bb, 0);
3688 /* Input edge must be EH and output edge must be normal. */
3689 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
3692 /* The block must be empty except for the labels and debug insns. */
3693 gsi = gsi_after_labels (bb);
3694 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3695 gsi_next_nondebug (&gsi);
3696 if (!gsi_end_p (gsi))
3699 /* The destination block must not already have a landing pad
3700 for a different region. */
3701 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3703 gimple stmt = gsi_stmt (gsi);
3707 if (gimple_code (stmt) != GIMPLE_LABEL)
3709 lab = gimple_label_label (stmt);
3710 lp_nr = EH_LANDING_PAD_NR (lab);
3711 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3715 /* The new destination block must not already be a destination of
3716 the source block, lest we merge fallthru and eh edges and get
3717 all sorts of confused. */
3718 if (find_edge (e_in->src, e_out->dest))
3721 /* ??? We can get degenerate phis due to cfg cleanups. I would have
3722 thought this should have been cleaned up by a phicprop pass, but
3723 that doesn't appear to handle virtuals. Propagate by hand. */
3724 if (!gimple_seq_empty_p (phi_nodes (bb)))
3726 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
3728 gimple use_stmt, phi = gsi_stmt (gsi);
3729 tree lhs = gimple_phi_result (phi);
3730 tree rhs = gimple_phi_arg_def (phi, 0);
3731 use_operand_p use_p;
3732 imm_use_iterator iter;
3734 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
3736 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3737 SET_USE (use_p, rhs);
3740 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3741 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
3743 remove_phi_node (&gsi, true);
3747 if (dump_file && (dump_flags & TDF_DETAILS))
3748 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
3749 lp->index, e_out->dest->index);
3751 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
3752 a successor edge, humor it. But do the real CFG change with the
3753 predecessor of E_OUT in order to preserve the ordering of arguments
3754 to the PHI nodes in E_OUT->DEST. */
3755 redirect_eh_edge_1 (e_in, e_out->dest, false);
3756 redirect_edge_pred (e_out, e_in->src);
3757 e_out->flags = e_in->flags;
3758 e_out->probability = e_in->probability;
3759 e_out->count = e_in->count;
3765 /* Examine each landing pad block and see if it matches unsplit_eh. */
3768 unsplit_all_eh (void)
3770 bool changed = false;
3774 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3776 changed |= unsplit_eh (lp);
3781 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
3782 to OLD_BB to NEW_BB; return true on success, false on failure.
3784 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
3785 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
3786 Virtual PHIs may be deleted and marked for renaming. */
3789 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
3790 edge old_bb_out, bool change_region)
3792 gimple_stmt_iterator ngsi, ogsi;
3795 bitmap rename_virts;
3796 bitmap ophi_handled;
3798 /* The destination block must not be a regular successor for any
3799 of the preds of the landing pad. Thus, avoid turning
3809 which CFG verification would choke on. See PR45172 and PR51089. */
3810 FOR_EACH_EDGE (e, ei, old_bb->preds)
3811 if (find_edge (e->src, new_bb))
3814 FOR_EACH_EDGE (e, ei, old_bb->preds)
3815 redirect_edge_var_map_clear (e);
3817 ophi_handled = BITMAP_ALLOC (NULL);
3818 rename_virts = BITMAP_ALLOC (NULL);
3820 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
3821 for the edges we're going to move. */
3822 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
3824 gimple ophi, nphi = gsi_stmt (ngsi);
3827 nresult = gimple_phi_result (nphi);
3828 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
3830 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
3831 the source ssa_name. */
3833 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3835 ophi = gsi_stmt (ogsi);
3836 if (gimple_phi_result (ophi) == nop)
3841 /* If we did find the corresponding PHI, copy those inputs. */
3844 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
3845 if (!has_single_use (nop))
3847 imm_use_iterator imm_iter;
3848 use_operand_p use_p;
3850 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
3852 if (!gimple_debug_bind_p (USE_STMT (use_p))
3853 && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
3854 || gimple_bb (USE_STMT (use_p)) != new_bb))
3858 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
3859 FOR_EACH_EDGE (e, ei, old_bb->preds)
3864 if ((e->flags & EDGE_EH) == 0)
3866 oop = gimple_phi_arg_def (ophi, e->dest_idx);
3867 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
3868 redirect_edge_var_map_add (e, nresult, oop, oloc);
3871 /* If we didn't find the PHI, but it's a VOP, remember to rename
3872 it later, assuming all other tests succeed. */
3873 else if (!is_gimple_reg (nresult))
3874 bitmap_set_bit (rename_virts, SSA_NAME_VERSION (nresult));
3875 /* If we didn't find the PHI, and it's a real variable, we know
3876 from the fact that OLD_BB is tree_empty_eh_handler_p that the
3877 variable is unchanged from input to the block and we can simply
3878 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
3882 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
3883 FOR_EACH_EDGE (e, ei, old_bb->preds)
3884 redirect_edge_var_map_add (e, nresult, nop, nloc);
3888 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
3889 we don't know what values from the other edges into NEW_BB to use. */
3890 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3892 gimple ophi = gsi_stmt (ogsi);
3893 tree oresult = gimple_phi_result (ophi);
3894 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
3898 /* At this point we know that the merge will succeed. Remove the PHI
3899 nodes for the virtuals that we want to rename. */
3900 if (!bitmap_empty_p (rename_virts))
3902 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); )
3904 gimple nphi = gsi_stmt (ngsi);
3905 tree nresult = gimple_phi_result (nphi);
3906 if (bitmap_bit_p (rename_virts, SSA_NAME_VERSION (nresult)))
3908 mark_virtual_phi_result_for_renaming (nphi);
3909 remove_phi_node (&ngsi, true);
3916 /* Finally, move the edges and update the PHIs. */
3917 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
3918 if (e->flags & EDGE_EH)
3920 redirect_eh_edge_1 (e, new_bb, change_region);
3921 redirect_edge_succ (e, new_bb);
3922 flush_pending_stmts (e);
3927 BITMAP_FREE (ophi_handled);
3928 BITMAP_FREE (rename_virts);
3932 FOR_EACH_EDGE (e, ei, old_bb->preds)
3933 redirect_edge_var_map_clear (e);
3934 BITMAP_FREE (ophi_handled);
3935 BITMAP_FREE (rename_virts);
3939 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
3940 old region to NEW_REGION at BB. */
3943 cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
3944 eh_landing_pad lp, eh_region new_region)
3946 gimple_stmt_iterator gsi;
3949 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
3953 lp->region = new_region;
3954 lp->next_lp = new_region->landing_pads;
3955 new_region->landing_pads = lp;
3957 /* Delete the RESX that was matched within the empty handler block. */
3958 gsi = gsi_last_bb (bb);
3959 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
3960 gsi_remove (&gsi, true);
3962 /* Clean up E_OUT for the fallthru. */
3963 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3964 e_out->probability = REG_BR_PROB_BASE;
3967 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
3968 unsplitting than unsplit_eh was prepared to handle, e.g. when
3969 multiple incoming edges and phis are involved. */
3972 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
3974 gimple_stmt_iterator gsi;
3977 /* We really ought not have totally lost everything following
3978 a landing pad label. Given that BB is empty, there had better
3980 gcc_assert (e_out != NULL);
3982 /* The destination block must not already have a landing pad
3983 for a different region. */
3985 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3987 gimple stmt = gsi_stmt (gsi);
3990 if (gimple_code (stmt) != GIMPLE_LABEL)
3992 lab = gimple_label_label (stmt);
3993 lp_nr = EH_LANDING_PAD_NR (lab);
3994 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3998 /* Attempt to move the PHIs into the successor block. */
3999 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
4001 if (dump_file && (dump_flags & TDF_DETAILS))
4003 "Unsplit EH landing pad %d to block %i "
4004 "(via cleanup_empty_eh).\n",
4005 lp->index, e_out->dest->index);
4012 /* Return true if edge E_FIRST is part of an empty infinite loop
4013 or leads to such a loop through a series of single successor
4017 infinite_empty_loop_p (edge e_first)
4019 bool inf_loop = false;
4022 if (e_first->dest == e_first->src)
4025 e_first->src->aux = (void *) 1;
4026 for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
4028 gimple_stmt_iterator gsi;
4034 e->dest->aux = (void *) 1;
4035 gsi = gsi_after_labels (e->dest);
4036 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4037 gsi_next_nondebug (&gsi);
4038 if (!gsi_end_p (gsi))
4041 e_first->src->aux = NULL;
4042 for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
4043 e->dest->aux = NULL;
4048 /* Examine the block associated with LP to determine if it's an empty
4049 handler for its EH region. If so, attempt to redirect EH edges to
4050 an outer region. Return true the CFG was updated in any way. This
4051 is similar to jump forwarding, just across EH edges. */
4054 cleanup_empty_eh (eh_landing_pad lp)
4056 basic_block bb = label_to_block (lp->post_landing_pad);
4057 gimple_stmt_iterator gsi;
4059 eh_region new_region;
4062 bool has_non_eh_pred;
4066 /* There can be zero or one edges out of BB. This is the quickest test. */
4067 switch (EDGE_COUNT (bb->succs))
4073 e_out = EDGE_SUCC (bb, 0);
4079 resx = last_stmt (bb);
4080 if (resx && is_gimple_resx (resx))
4082 if (stmt_can_throw_external (resx))
4083 optimize_clobbers (bb);
4084 else if (sink_clobbers (bb))
4088 gsi = gsi_after_labels (bb);
4090 /* Make sure to skip debug statements. */
4091 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4092 gsi_next_nondebug (&gsi);
4094 /* If the block is totally empty, look for more unsplitting cases. */
4095 if (gsi_end_p (gsi))
4097 /* For the degenerate case of an infinite loop bail out. */
4098 if (infinite_empty_loop_p (e_out))
4101 return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
4104 /* The block should consist only of a single RESX statement, modulo a
4105 preceding call to __builtin_stack_restore if there is no outgoing
4106 edge, since the call can be eliminated in this case. */
4107 resx = gsi_stmt (gsi);
4108 if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
4111 resx = gsi_stmt (gsi);
4113 if (!is_gimple_resx (resx))
4115 gcc_assert (gsi_one_before_end_p (gsi));
4117 /* Determine if there are non-EH edges, or resx edges into the handler. */
4118 has_non_eh_pred = false;
4119 FOR_EACH_EDGE (e, ei, bb->preds)
4120 if (!(e->flags & EDGE_EH))
4121 has_non_eh_pred = true;
4123 /* Find the handler that's outer of the empty handler by looking at
4124 where the RESX instruction was vectored. */
4125 new_lp_nr = lookup_stmt_eh_lp (resx);
4126 new_region = get_eh_region_from_lp_number (new_lp_nr);
4128 /* If there's no destination region within the current function,
4129 redirection is trivial via removing the throwing statements from
4130 the EH region, removing the EH edges, and allowing the block
4131 to go unreachable. */
4132 if (new_region == NULL)
4134 gcc_assert (e_out == NULL);
4135 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4136 if (e->flags & EDGE_EH)
4138 gimple stmt = last_stmt (e->src);
4139 remove_stmt_from_eh_lp (stmt);
4147 /* If the destination region is a MUST_NOT_THROW, allow the runtime
4148 to handle the abort and allow the blocks to go unreachable. */
4149 if (new_region->type == ERT_MUST_NOT_THROW)
4151 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4152 if (e->flags & EDGE_EH)
4154 gimple stmt = last_stmt (e->src);
4155 remove_stmt_from_eh_lp (stmt);
4156 add_stmt_to_eh_lp (stmt, new_lp_nr);
4164 /* Try to redirect the EH edges and merge the PHIs into the destination
4165 landing pad block. If the merge succeeds, we'll already have redirected
4166 all the EH edges. The handler itself will go unreachable if there were
4168 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
4171 /* Finally, if all input edges are EH edges, then we can (potentially)
4172 reduce the number of transfers from the runtime by moving the landing
4173 pad from the original region to the new region. This is a win when
4174 we remove the last CLEANUP region along a particular exception
4175 propagation path. Since nothing changes except for the region with
4176 which the landing pad is associated, the PHI nodes do not need to be
4178 if (!has_non_eh_pred)
4180 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
4181 if (dump_file && (dump_flags & TDF_DETAILS))
4182 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
4183 lp->index, new_region->index);
4185 /* ??? The CFG didn't change, but we may have rendered the
4186 old EH region unreachable. Trigger a cleanup there. */
4193 if (dump_file && (dump_flags & TDF_DETAILS))
4194 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
4195 remove_eh_landing_pad (lp);
4199 /* Do a post-order traversal of the EH region tree. Examine each
4200 post_landing_pad block and see if we can eliminate it as empty. */
4203 cleanup_all_empty_eh (void)
4205 bool changed = false;
4209 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
4211 changed |= cleanup_empty_eh (lp);
4216 /* Perform cleanups and lowering of exception handling
4217 1) cleanups regions with handlers doing nothing are optimized out
4218 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4219 3) Info about regions that are containing instructions, and regions
4220 reachable via local EH edges is collected
4221 4) Eh tree is pruned for regions no longer neccesary.
4223 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4224 Unify those that have the same failure decl and locus.
4228 execute_cleanup_eh_1 (void)
4230 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4231 looking up unreachable landing pads. */
4232 remove_unreachable_handlers ();
4234 /* Watch out for the region tree vanishing due to all unreachable. */
4235 if (cfun->eh->region_tree && optimize)
4237 bool changed = false;
4239 changed |= unsplit_all_eh ();
4240 changed |= cleanup_all_empty_eh ();
4244 free_dominance_info (CDI_DOMINATORS);
4245 free_dominance_info (CDI_POST_DOMINATORS);
4247 /* We delayed all basic block deletion, as we may have performed
4248 cleanups on EH edges while non-EH edges were still present. */
4249 delete_unreachable_blocks ();
4251 /* We manipulated the landing pads. Remove any region that no
4252 longer has a landing pad. */
4253 remove_unreachable_handlers_no_lp ();
4255 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
4263 execute_cleanup_eh (void)
4265 int ret = execute_cleanup_eh_1 ();
4267 /* If the function no longer needs an EH personality routine
4268 clear it. This exposes cross-language inlining opportunities
4269 and avoids references to a never defined personality routine. */
4270 if (DECL_FUNCTION_PERSONALITY (current_function_decl)
4271 && function_needs_eh_personality (cfun) != eh_personality_lang)
4272 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
4278 gate_cleanup_eh (void)
4280 return cfun->eh != NULL && cfun->eh->region_tree != NULL;
4283 struct gimple_opt_pass pass_cleanup_eh = {
4286 "ehcleanup", /* name */
4287 gate_cleanup_eh, /* gate */
4288 execute_cleanup_eh, /* execute */
4291 0, /* static_pass_number */
4292 TV_TREE_EH, /* tv_id */
4293 PROP_gimple_lcf, /* properties_required */
4294 0, /* properties_provided */
4295 0, /* properties_destroyed */
4296 0, /* todo_flags_start */
4297 0 /* todo_flags_finish */
4301 /* Verify that BB containing STMT as the last statement, has precisely the
4302 edge that make_eh_edges would create. */
4305 verify_eh_edges (gimple stmt)
4307 basic_block bb = gimple_bb (stmt);
4308 eh_landing_pad lp = NULL;
4313 lp_nr = lookup_stmt_eh_lp (stmt);
4315 lp = get_eh_landing_pad_from_number (lp_nr);
4318 FOR_EACH_EDGE (e, ei, bb->succs)
4320 if (e->flags & EDGE_EH)
4324 error ("BB %i has multiple EH edges", bb->index);
4336 error ("BB %i can not throw but has an EH edge", bb->index);
4342 if (!stmt_could_throw_p (stmt))
4344 error ("BB %i last statement has incorrectly set lp", bb->index);
4348 if (eh_edge == NULL)
4350 error ("BB %i is missing an EH edge", bb->index);
4354 if (eh_edge->dest != label_to_block (lp->post_landing_pad))
4356 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
4363 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4366 verify_eh_dispatch_edge (gimple stmt)
4370 basic_block src, dst;
4371 bool want_fallthru = true;
4375 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4376 src = gimple_bb (stmt);
4378 FOR_EACH_EDGE (e, ei, src->succs)
4379 gcc_assert (e->aux == NULL);
4384 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4386 dst = label_to_block (c->label);
4387 e = find_edge (src, dst);
4390 error ("BB %i is missing an edge", src->index);
4395 /* A catch-all handler doesn't have a fallthru. */
4396 if (c->type_list == NULL)
4398 want_fallthru = false;
4404 case ERT_ALLOWED_EXCEPTIONS:
4405 dst = label_to_block (r->u.allowed.label);
4406 e = find_edge (src, dst);
4409 error ("BB %i is missing an edge", src->index);
4420 FOR_EACH_EDGE (e, ei, src->succs)
4422 if (e->flags & EDGE_FALLTHRU)
4424 if (fall_edge != NULL)
4426 error ("BB %i too many fallthru edges", src->index);
4435 error ("BB %i has incorrect edge", src->index);
4439 if ((fall_edge != NULL) ^ want_fallthru)
4441 error ("BB %i has incorrect fallthru edge", src->index);