1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
37 #include "langhooks.h"
43 /* In some instances a tree and a gimple need to be stored in a same table,
44 i.e. in hash tables. This is a structure to do this. */
45 typedef union {tree *tp; tree t; gimple g;} treemple;
47 /* Nonzero if we are using EH to handle cleanups. */
48 static int using_eh_for_cleanups_p = 0;
51 using_eh_for_cleanups (void)
53 using_eh_for_cleanups_p = 1;
56 /* Misc functions used in this file. */
58 /* Compare and hash for any structure which begins with a canonical
59 pointer. Assumes all pointers are interchangeable, which is sort
60 of already assumed by gcc elsewhere IIRC. */
63 struct_ptr_eq (const void *a, const void *b)
65 const void * const * x = (const void * const *) a;
66 const void * const * y = (const void * const *) b;
71 struct_ptr_hash (const void *a)
73 const void * const * x = (const void * const *) a;
74 return (size_t)*x >> 4;
78 /* Remember and lookup EH landing pad data for arbitrary statements.
79 Really this means any statement that could_throw_p. We could
80 stuff this information into the stmt_ann data structure, but:
82 (1) We absolutely rely on this information being kept until
83 we get to rtl. Once we're done with lowering here, if we lose
84 the information there's no way to recover it!
86 (2) There are many more statements that *cannot* throw as
87 compared to those that can. We should be saving some amount
88 of space by only allocating memory for those that can throw. */
90 /* Add statement T in function IFUN to landing pad NUM. */
93 add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
95 struct throw_stmt_node *n;
98 gcc_assert (num != 0);
100 n = GGC_NEW (struct throw_stmt_node);
104 if (!get_eh_throw_stmt_table (ifun))
105 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
109 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
114 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
117 add_stmt_to_eh_lp (gimple t, int num)
119 add_stmt_to_eh_lp_fn (cfun, t, num);
122 /* Add statement T to the single EH landing pad in REGION. */
125 record_stmt_eh_region (eh_region region, gimple t)
129 if (region->type == ERT_MUST_NOT_THROW)
130 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
133 eh_landing_pad lp = region->landing_pads;
135 lp = gen_eh_landing_pad (region);
137 gcc_assert (lp->next_lp == NULL);
138 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
143 /* Remove statement T in function IFUN from its EH landing pad. */
146 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
148 struct throw_stmt_node dummy;
151 if (!get_eh_throw_stmt_table (ifun))
155 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
159 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
167 /* Remove statement T in the current function (cfun) from its
171 remove_stmt_from_eh_lp (gimple t)
173 return remove_stmt_from_eh_lp_fn (cfun, t);
176 /* Determine if statement T is inside an EH region in function IFUN.
177 Positive numbers indicate a landing pad index; negative numbers
178 indicate a MUST_NOT_THROW region index; zero indicates that the
179 statement is not recorded in the region table. */
182 lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
184 struct throw_stmt_node *p, n;
186 if (ifun->eh->throw_stmt_table == NULL)
190 p = (struct throw_stmt_node *) htab_find (ifun->eh->throw_stmt_table, &n);
191 return p ? p->lp_nr : 0;
194 /* Likewise, but always use the current function. */
197 lookup_stmt_eh_lp (gimple t)
199 /* We can get called from initialized data when -fnon-call-exceptions
200 is on; prevent crash. */
203 return lookup_stmt_eh_lp_fn (cfun, t);
206 /* Likewise, but reference a tree expression instead. */
209 lookup_expr_eh_lp (tree t)
211 if (cfun && cfun->eh->throw_stmt_table && t && EXPR_P (t))
213 tree_ann_common_t ann = tree_common_ann (t);
221 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
222 nodes and LABEL_DECL nodes. We will use this during the second phase to
223 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
225 struct finally_tree_node
227 /* When storing a GIMPLE_TRY, we have to record a gimple. However
228 when deciding whether a GOTO to a certain LABEL_DECL (which is a
229 tree) leaves the TRY block, its necessary to record a tree in
230 this field. Thus a treemple is used. */
235 /* Note that this table is *not* marked GTY. It is short-lived. */
236 static htab_t finally_tree;
239 record_in_finally_tree (treemple child, gimple parent)
241 struct finally_tree_node *n;
244 n = XNEW (struct finally_tree_node);
248 slot = htab_find_slot (finally_tree, n, INSERT);
254 collect_finally_tree (gimple stmt, gimple region);
256 /* Go through the gimple sequence. Works with collect_finally_tree to
257 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
260 collect_finally_tree_1 (gimple_seq seq, gimple region)
262 gimple_stmt_iterator gsi;
264 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
265 collect_finally_tree (gsi_stmt (gsi), region);
269 collect_finally_tree (gimple stmt, gimple region)
273 switch (gimple_code (stmt))
276 temp.t = gimple_label_label (stmt);
277 record_in_finally_tree (temp, region);
281 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
284 record_in_finally_tree (temp, region);
285 collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
286 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
288 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
290 collect_finally_tree_1 (gimple_try_eval (stmt), region);
291 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
296 collect_finally_tree_1 (gimple_catch_handler (stmt), region);
299 case GIMPLE_EH_FILTER:
300 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
304 /* A type, a decl, or some kind of statement that we're not
305 interested in. Don't walk them. */
311 /* Use the finally tree to determine if a jump from START to TARGET
312 would leave the try_finally node that START lives in. */
315 outside_finally_tree (treemple start, gimple target)
317 struct finally_tree_node n, *p;
322 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
327 while (start.g != target);
332 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
333 nodes into a set of gotos, magic labels, and eh regions.
334 The eh region creation is straight-forward, but frobbing all the gotos
335 and such into shape isn't. */
337 /* The sequence into which we record all EH stuff. This will be
338 placed at the end of the function when we're all done. */
339 static gimple_seq eh_seq;
341 /* Record whether an EH region contains something that can throw,
342 indexed by EH region number. */
343 static bitmap eh_region_may_contain_throw;
345 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
346 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
347 The idea is to record a gimple statement for everything except for
348 the conditionals, which get their labels recorded. Since labels are
349 of type 'tree', we need this node to store both gimple and tree
350 objects. REPL_STMT is the sequence used to replace the goto/return
351 statement. CONT_STMT is used to store the statement that allows
352 the return/goto to jump to the original destination. */
354 struct goto_queue_node
357 gimple_seq repl_stmt;
360 /* This is used when index >= 0 to indicate that stmt is a label (as
361 opposed to a goto stmt). */
365 /* State of the world while lowering. */
369 /* What's "current" while constructing the eh region tree. These
370 correspond to variables of the same name in cfun->eh, which we
371 don't have easy access to. */
372 eh_region cur_region;
374 /* What's "current" for the purposes of __builtin_eh_pointer. For
375 a CATCH, this is the associated TRY. For an EH_FILTER, this is
376 the associated ALLOWED_EXCEPTIONS, etc. */
377 eh_region ehp_region;
379 /* Processing of TRY_FINALLY requires a bit more state. This is
380 split out into a separate structure so that we don't have to
381 copy so much when processing other nodes. */
382 struct leh_tf_state *tf;
387 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
388 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
389 this so that outside_finally_tree can reliably reference the tree used
390 in the collect_finally_tree data structures. */
391 gimple try_finally_expr;
394 /* While lowering a top_p usually it is expanded into multiple statements,
395 thus we need the following field to store them. */
396 gimple_seq top_p_seq;
398 /* The state outside this try_finally node. */
399 struct leh_state *outer;
401 /* The exception region created for it. */
404 /* The goto queue. */
405 struct goto_queue_node *goto_queue;
406 size_t goto_queue_size;
407 size_t goto_queue_active;
409 /* Pointer map to help in searching goto_queue when it is large. */
410 struct pointer_map_t *goto_queue_map;
412 /* The set of unique labels seen as entries in the goto queue. */
413 VEC(tree,heap) *dest_array;
415 /* A label to be added at the end of the completed transformed
416 sequence. It will be set if may_fallthru was true *at one time*,
417 though subsequent transformations may have cleared that flag. */
420 /* True if it is possible to fall out the bottom of the try block.
421 Cleared if the fallthru is converted to a goto. */
424 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
427 /* True if the finally block can receive an exception edge.
428 Cleared if the exception case is handled by code duplication. */
432 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple);
434 /* Search for STMT in the goto queue. Return the replacement,
435 or null if the statement isn't in the queue. */
437 #define LARGE_GOTO_QUEUE 20
439 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq);
442 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
447 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
449 for (i = 0; i < tf->goto_queue_active; i++)
450 if ( tf->goto_queue[i].stmt.g == stmt.g)
451 return tf->goto_queue[i].repl_stmt;
455 /* If we have a large number of entries in the goto_queue, create a
456 pointer map and use that for searching. */
458 if (!tf->goto_queue_map)
460 tf->goto_queue_map = pointer_map_create ();
461 for (i = 0; i < tf->goto_queue_active; i++)
463 slot = pointer_map_insert (tf->goto_queue_map,
464 tf->goto_queue[i].stmt.g);
465 gcc_assert (*slot == NULL);
466 *slot = &tf->goto_queue[i];
470 slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
472 return (((struct goto_queue_node *) *slot)->repl_stmt);
477 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
478 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
479 then we can just splat it in, otherwise we add the new stmts immediately
480 after the GIMPLE_COND and redirect. */
483 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
484 gimple_stmt_iterator *gsi)
489 location_t loc = gimple_location (gsi_stmt (*gsi));
492 new_seq = find_goto_replacement (tf, temp);
496 if (gimple_seq_singleton_p (new_seq)
497 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
499 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
503 label = create_artificial_label (loc);
504 /* Set the new label for the GIMPLE_COND */
507 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
508 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
511 /* The real work of replace_goto_queue. Returns with TSI updated to
512 point to the next statement. */
514 static void replace_goto_queue_stmt_list (gimple_seq, struct leh_tf_state *);
517 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
518 gimple_stmt_iterator *gsi)
524 switch (gimple_code (stmt))
529 seq = find_goto_replacement (tf, temp);
532 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
533 gsi_remove (gsi, false);
539 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
540 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
544 replace_goto_queue_stmt_list (gimple_try_eval (stmt), tf);
545 replace_goto_queue_stmt_list (gimple_try_cleanup (stmt), tf);
548 replace_goto_queue_stmt_list (gimple_catch_handler (stmt), tf);
550 case GIMPLE_EH_FILTER:
551 replace_goto_queue_stmt_list (gimple_eh_filter_failure (stmt), tf);
555 /* These won't have gotos in them. */
562 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
565 replace_goto_queue_stmt_list (gimple_seq seq, struct leh_tf_state *tf)
567 gimple_stmt_iterator gsi = gsi_start (seq);
569 while (!gsi_end_p (gsi))
570 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
573 /* Replace all goto queue members. */
576 replace_goto_queue (struct leh_tf_state *tf)
578 if (tf->goto_queue_active == 0)
580 replace_goto_queue_stmt_list (tf->top_p_seq, tf);
583 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
584 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
588 record_in_goto_queue (struct leh_tf_state *tf,
594 struct goto_queue_node *q;
596 gcc_assert (!tf->goto_queue_map);
598 active = tf->goto_queue_active;
599 size = tf->goto_queue_size;
602 size = (size ? size * 2 : 32);
603 tf->goto_queue_size = size;
605 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
608 q = &tf->goto_queue[active];
609 tf->goto_queue_active = active + 1;
611 memset (q, 0, sizeof (*q));
614 q->is_label = is_label;
617 /* Record the LABEL label in the goto queue contained in TF.
621 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label)
624 treemple temp, new_stmt;
629 /* Computed and non-local gotos do not get processed. Given
630 their nature we can neither tell whether we've escaped the
631 finally block nor redirect them if we knew. */
632 if (TREE_CODE (label) != LABEL_DECL)
635 /* No need to record gotos that don't leave the try block. */
637 if (!outside_finally_tree (temp, tf->try_finally_expr))
640 if (! tf->dest_array)
642 tf->dest_array = VEC_alloc (tree, heap, 10);
643 VEC_quick_push (tree, tf->dest_array, label);
648 int n = VEC_length (tree, tf->dest_array);
649 for (index = 0; index < n; ++index)
650 if (VEC_index (tree, tf->dest_array, index) == label)
653 VEC_safe_push (tree, heap, tf->dest_array, label);
656 /* In the case of a GOTO we want to record the destination label,
657 since with a GIMPLE_COND we have an easy access to the then/else
660 record_in_goto_queue (tf, new_stmt, index, true);
664 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
665 node, and if so record that fact in the goto queue associated with that
669 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
671 struct leh_tf_state *tf = state->tf;
677 switch (gimple_code (stmt))
680 new_stmt.tp = gimple_op_ptr (stmt, 2);
681 record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt));
682 new_stmt.tp = gimple_op_ptr (stmt, 3);
683 record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt));
687 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt));
691 tf->may_return = true;
693 record_in_goto_queue (tf, new_stmt, -1, false);
702 #ifdef ENABLE_CHECKING
703 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
704 was in fact structured, and we've not yet done jump threading, then none
705 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
708 verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
710 struct leh_tf_state *tf = state->tf;
716 n = gimple_switch_num_labels (switch_expr);
718 for (i = 0; i < n; ++i)
721 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
723 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
727 #define verify_norecord_switch_expr(state, switch_expr)
730 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
731 whatever is needed to finish the return. If MOD is non-null, insert it
732 before the new branch. RETURN_VALUE_P is a cache containing a temporary
733 variable to be used in manipulating the value returned from the function. */
736 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
737 tree *return_value_p)
742 /* In the case of a return, the queue node must be a gimple statement. */
743 gcc_assert (!q->is_label);
745 ret_expr = gimple_return_retval (q->stmt.g);
749 if (!*return_value_p)
750 *return_value_p = ret_expr;
752 gcc_assert (*return_value_p == ret_expr);
753 q->cont_stmt = q->stmt.g;
754 /* The nasty part about redirecting the return value is that the
755 return value itself is to be computed before the FINALLY block
769 should return 0, not 1. Arrange for this to happen by copying
770 computed the return value into a local temporary. This also
771 allows us to redirect multiple return statements through the
772 same destination block; whether this is a net win or not really
773 depends, I guess, but it does make generation of the switch in
774 lower_try_finally_switch easier. */
776 if (TREE_CODE (ret_expr) == RESULT_DECL)
778 if (!*return_value_p)
779 *return_value_p = ret_expr;
781 gcc_assert (*return_value_p == ret_expr);
782 q->cont_stmt = q->stmt.g;
788 /* If we don't return a value, all return statements are the same. */
789 q->cont_stmt = q->stmt.g;
792 q->repl_stmt = gimple_seq_alloc ();
795 gimple_seq_add_seq (&q->repl_stmt, mod);
797 x = gimple_build_goto (finlab);
798 gimple_seq_add_stmt (&q->repl_stmt, x);
801 /* Similar, but easier, for GIMPLE_GOTO. */
804 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
805 struct leh_tf_state *tf)
809 gcc_assert (q->is_label);
811 q->repl_stmt = gimple_seq_alloc ();
813 q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array, q->index));
816 gimple_seq_add_seq (&q->repl_stmt, mod);
818 x = gimple_build_goto (finlab);
819 gimple_seq_add_stmt (&q->repl_stmt, x);
822 /* Emit a standard landing pad sequence into SEQ for REGION. */
825 emit_post_landing_pad (gimple_seq *seq, eh_region region)
827 eh_landing_pad lp = region->landing_pads;
831 lp = gen_eh_landing_pad (region);
833 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
834 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
836 x = gimple_build_label (lp->post_landing_pad);
837 gimple_seq_add_stmt (seq, x);
840 /* Emit a RESX statement into SEQ for REGION. */
843 emit_resx (gimple_seq *seq, eh_region region)
845 gimple x = gimple_build_resx (region->index);
846 gimple_seq_add_stmt (seq, x);
848 record_stmt_eh_region (region->outer, x);
851 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
854 emit_eh_dispatch (gimple_seq *seq, eh_region region)
856 gimple x = gimple_build_eh_dispatch (region->index);
857 gimple_seq_add_stmt (seq, x);
860 /* Note that the current EH region may contain a throw, or a
861 call to a function which itself may contain a throw. */
864 note_eh_region_may_contain_throw (eh_region region)
866 while (!bitmap_bit_p (eh_region_may_contain_throw, region->index))
868 bitmap_set_bit (eh_region_may_contain_throw, region->index);
869 region = region->outer;
875 /* We want to transform
876 try { body; } catch { stuff; }
886 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
887 should be placed before the second operand, or NULL. OVER is
888 an existing label that should be put at the exit, or NULL. */
891 frob_into_branch_around (gimple tp, eh_region region, tree over)
894 gimple_seq cleanup, result;
895 location_t loc = gimple_location (tp);
897 cleanup = gimple_try_cleanup (tp);
898 result = gimple_try_eval (tp);
901 emit_post_landing_pad (&eh_seq, region);
903 if (gimple_seq_may_fallthru (cleanup))
906 over = create_artificial_label (loc);
907 x = gimple_build_goto (over);
908 gimple_seq_add_stmt (&cleanup, x);
910 gimple_seq_add_seq (&eh_seq, cleanup);
914 x = gimple_build_label (over);
915 gimple_seq_add_stmt (&result, x);
920 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
921 Make sure to record all new labels found. */
924 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state)
926 gimple region = NULL;
929 new_seq = copy_gimple_seq_and_replace_locals (seq);
932 region = outer_state->tf->try_finally_expr;
933 collect_finally_tree_1 (new_seq, region);
938 /* A subroutine of lower_try_finally. Create a fallthru label for
939 the given try_finally state. The only tricky bit here is that
940 we have to make sure to record the label in our outer context. */
943 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
945 tree label = tf->fallthru_label;
950 label = create_artificial_label (gimple_location (tf->try_finally_expr));
951 tf->fallthru_label = label;
955 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
961 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
962 returns non-null, then the language requires that the exception path out
963 of a try_finally be treated specially. To wit: the code within the
964 finally block may not itself throw an exception. We have two choices here.
965 First we can duplicate the finally block and wrap it in a must_not_throw
966 region. Second, we can generate code like
971 if (fintmp == eh_edge)
972 protect_cleanup_actions;
975 where "fintmp" is the temporary used in the switch statement generation
976 alternative considered below. For the nonce, we always choose the first
979 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
982 honor_protect_cleanup_actions (struct leh_state *outer_state,
983 struct leh_state *this_state,
984 struct leh_tf_state *tf)
986 tree protect_cleanup_actions;
987 gimple_stmt_iterator gsi;
988 bool finally_may_fallthru;
992 /* First check for nothing to do. */
993 if (lang_protect_cleanup_actions == NULL)
995 protect_cleanup_actions = lang_protect_cleanup_actions ();
996 if (protect_cleanup_actions == NULL)
999 finally = gimple_try_cleanup (tf->top_p);
1000 finally_may_fallthru = gimple_seq_may_fallthru (finally);
1002 /* Duplicate the FINALLY block. Only need to do this for try-finally,
1003 and not for cleanups. */
1005 finally = lower_try_finally_dup_block (finally, outer_state);
1007 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
1008 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
1009 to be in an enclosing scope, but needs to be implemented at this level
1010 to avoid a nesting violation (see wrap_temporary_cleanups in
1011 cp/decl.c). Since it's logically at an outer level, we should call
1012 terminate before we get to it, so strip it away before adding the
1013 MUST_NOT_THROW filter. */
1014 gsi = gsi_start (finally);
1016 if (gimple_code (x) == GIMPLE_TRY
1017 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
1018 && gimple_try_catch_is_cleanup (x))
1020 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
1021 gsi_remove (&gsi, false);
1024 /* Wrap the block with protect_cleanup_actions as the action. */
1025 x = gimple_build_eh_must_not_throw (protect_cleanup_actions);
1026 x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x),
1028 finally = lower_eh_must_not_throw (outer_state, x);
1030 /* Drop all of this into the exception sequence. */
1031 emit_post_landing_pad (&eh_seq, tf->region);
1032 gimple_seq_add_seq (&eh_seq, finally);
1033 if (finally_may_fallthru)
1034 emit_resx (&eh_seq, tf->region);
1036 /* Having now been handled, EH isn't to be considered with
1037 the rest of the outgoing edges. */
1038 tf->may_throw = false;
1041 /* A subroutine of lower_try_finally. We have determined that there is
1042 no fallthru edge out of the finally block. This means that there is
1043 no outgoing edge corresponding to any incoming edge. Restructure the
1044 try_finally node for this special case. */
1047 lower_try_finally_nofallthru (struct leh_state *state,
1048 struct leh_tf_state *tf)
1050 tree lab, return_val;
1053 struct goto_queue_node *q, *qe;
1055 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1057 /* We expect that tf->top_p is a GIMPLE_TRY. */
1058 finally = gimple_try_cleanup (tf->top_p);
1059 tf->top_p_seq = gimple_try_eval (tf->top_p);
1061 x = gimple_build_label (lab);
1062 gimple_seq_add_stmt (&tf->top_p_seq, x);
1066 qe = q + tf->goto_queue_active;
1069 do_return_redirection (q, lab, NULL, &return_val);
1071 do_goto_redirection (q, lab, NULL, tf);
1073 replace_goto_queue (tf);
1075 lower_eh_constructs_1 (state, finally);
1076 gimple_seq_add_seq (&tf->top_p_seq, finally);
1080 emit_post_landing_pad (&eh_seq, tf->region);
1082 x = gimple_build_goto (lab);
1083 gimple_seq_add_stmt (&eh_seq, x);
1087 /* A subroutine of lower_try_finally. We have determined that there is
1088 exactly one destination of the finally block. Restructure the
1089 try_finally node for this special case. */
1092 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1094 struct goto_queue_node *q, *qe;
1098 location_t loc = gimple_location (tf->try_finally_expr);
1100 finally = gimple_try_cleanup (tf->top_p);
1101 tf->top_p_seq = gimple_try_eval (tf->top_p);
1103 lower_eh_constructs_1 (state, finally);
1107 /* Only reachable via the exception edge. Add the given label to
1108 the head of the FINALLY block. Append a RESX at the end. */
1109 emit_post_landing_pad (&eh_seq, tf->region);
1110 gimple_seq_add_seq (&eh_seq, finally);
1111 emit_resx (&eh_seq, tf->region);
1115 if (tf->may_fallthru)
1117 /* Only reachable via the fallthru edge. Do nothing but let
1118 the two blocks run together; we'll fall out the bottom. */
1119 gimple_seq_add_seq (&tf->top_p_seq, finally);
1123 finally_label = create_artificial_label (loc);
1124 x = gimple_build_label (finally_label);
1125 gimple_seq_add_stmt (&tf->top_p_seq, x);
1127 gimple_seq_add_seq (&tf->top_p_seq, finally);
1130 qe = q + tf->goto_queue_active;
1134 /* Reachable by return expressions only. Redirect them. */
1135 tree return_val = NULL;
1137 do_return_redirection (q, finally_label, NULL, &return_val);
1138 replace_goto_queue (tf);
1142 /* Reachable by goto expressions only. Redirect them. */
1144 do_goto_redirection (q, finally_label, NULL, tf);
1145 replace_goto_queue (tf);
1147 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1149 /* Reachable by goto to fallthru label only. Redirect it
1150 to the new label (already created, sadly), and do not
1151 emit the final branch out, or the fallthru label. */
1152 tf->fallthru_label = NULL;
1157 /* Place the original return/goto to the original destination
1158 immediately after the finally block. */
1159 x = tf->goto_queue[0].cont_stmt;
1160 gimple_seq_add_stmt (&tf->top_p_seq, x);
1161 maybe_record_in_goto_queue (state, x);
1164 /* A subroutine of lower_try_finally. There are multiple edges incoming
1165 and outgoing from the finally block. Implement this by duplicating the
1166 finally block for every destination. */
1169 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1172 gimple_seq new_stmt;
1176 location_t tf_loc = gimple_location (tf->try_finally_expr);
1178 finally = gimple_try_cleanup (tf->top_p);
1179 tf->top_p_seq = gimple_try_eval (tf->top_p);
1182 if (tf->may_fallthru)
1184 seq = lower_try_finally_dup_block (finally, state);
1185 lower_eh_constructs_1 (state, seq);
1186 gimple_seq_add_seq (&new_stmt, seq);
1188 tmp = lower_try_finally_fallthru_label (tf);
1189 x = gimple_build_goto (tmp);
1190 gimple_seq_add_stmt (&new_stmt, x);
1195 emit_post_landing_pad (&eh_seq, tf->region);
1197 seq = lower_try_finally_dup_block (finally, state);
1198 lower_eh_constructs_1 (state, seq);
1199 gimple_seq_add_seq (&eh_seq, seq);
1201 emit_resx (&eh_seq, tf->region);
1206 struct goto_queue_node *q, *qe;
1207 tree return_val = NULL;
1208 int return_index, index;
1211 struct goto_queue_node *q;
1215 return_index = VEC_length (tree, tf->dest_array);
1216 labels = XCNEWVEC (struct labels_s, return_index + 1);
1219 qe = q + tf->goto_queue_active;
1222 index = q->index < 0 ? return_index : q->index;
1224 if (!labels[index].q)
1225 labels[index].q = q;
1228 for (index = 0; index < return_index + 1; index++)
1232 q = labels[index].q;
1236 lab = labels[index].label
1237 = create_artificial_label (tf_loc);
1239 if (index == return_index)
1240 do_return_redirection (q, lab, NULL, &return_val);
1242 do_goto_redirection (q, lab, NULL, tf);
1244 x = gimple_build_label (lab);
1245 gimple_seq_add_stmt (&new_stmt, x);
1247 seq = lower_try_finally_dup_block (finally, state);
1248 lower_eh_constructs_1 (state, seq);
1249 gimple_seq_add_seq (&new_stmt, seq);
1251 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1252 maybe_record_in_goto_queue (state, q->cont_stmt);
1255 for (q = tf->goto_queue; q < qe; q++)
1259 index = q->index < 0 ? return_index : q->index;
1261 if (labels[index].q == q)
1264 lab = labels[index].label;
1266 if (index == return_index)
1267 do_return_redirection (q, lab, NULL, &return_val);
1269 do_goto_redirection (q, lab, NULL, tf);
1272 replace_goto_queue (tf);
1276 /* Need to link new stmts after running replace_goto_queue due
1277 to not wanting to process the same goto stmts twice. */
1278 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1281 /* A subroutine of lower_try_finally. There are multiple edges incoming
1282 and outgoing from the finally block. Implement this by instrumenting
1283 each incoming edge and creating a switch statement at the end of the
1284 finally block that branches to the appropriate destination. */
1287 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1289 struct goto_queue_node *q, *qe;
1290 tree return_val = NULL;
1291 tree finally_tmp, finally_label;
1292 int return_index, eh_index, fallthru_index;
1293 int nlabels, ndests, j, last_case_index;
1295 VEC (tree,heap) *case_label_vec;
1296 gimple_seq switch_body;
1301 struct pointer_map_t *cont_map = NULL;
1302 /* The location of the TRY_FINALLY stmt. */
1303 location_t tf_loc = gimple_location (tf->try_finally_expr);
1304 /* The location of the finally block. */
1305 location_t finally_loc;
1307 switch_body = gimple_seq_alloc ();
1309 /* Mash the TRY block to the head of the chain. */
1310 finally = gimple_try_cleanup (tf->top_p);
1311 tf->top_p_seq = gimple_try_eval (tf->top_p);
1313 /* The location of the finally is either the last stmt in the finally
1314 block or the location of the TRY_FINALLY itself. */
1315 finally_loc = gimple_seq_last_stmt (tf->top_p_seq) != NULL ?
1316 gimple_location (gimple_seq_last_stmt (tf->top_p_seq))
1319 /* Lower the finally block itself. */
1320 lower_eh_constructs_1 (state, finally);
1322 /* Prepare for switch statement generation. */
1323 nlabels = VEC_length (tree, tf->dest_array);
1324 return_index = nlabels;
1325 eh_index = return_index + tf->may_return;
1326 fallthru_index = eh_index + tf->may_throw;
1327 ndests = fallthru_index + tf->may_fallthru;
1329 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1330 finally_label = create_artificial_label (finally_loc);
1332 /* We use VEC_quick_push on case_label_vec throughout this function,
1333 since we know the size in advance and allocate precisely as muce
1335 case_label_vec = VEC_alloc (tree, heap, ndests);
1337 last_case_index = 0;
1339 /* Begin inserting code for getting to the finally block. Things
1340 are done in this order to correspond to the sequence the code is
1343 if (tf->may_fallthru)
1345 x = gimple_build_assign (finally_tmp,
1346 build_int_cst (NULL, fallthru_index));
1347 gimple_seq_add_stmt (&tf->top_p_seq, x);
1349 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1350 build_int_cst (NULL, fallthru_index),
1351 NULL, create_artificial_label (tf_loc));
1352 VEC_quick_push (tree, case_label_vec, last_case);
1355 x = gimple_build_label (CASE_LABEL (last_case));
1356 gimple_seq_add_stmt (&switch_body, x);
1358 tmp = lower_try_finally_fallthru_label (tf);
1359 x = gimple_build_goto (tmp);
1360 gimple_seq_add_stmt (&switch_body, x);
1365 emit_post_landing_pad (&eh_seq, tf->region);
1367 x = gimple_build_assign (finally_tmp,
1368 build_int_cst (NULL, eh_index));
1369 gimple_seq_add_stmt (&eh_seq, x);
1371 x = gimple_build_goto (finally_label);
1372 gimple_seq_add_stmt (&eh_seq, x);
1374 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1375 build_int_cst (NULL, eh_index),
1376 NULL, create_artificial_label (tf_loc));
1377 VEC_quick_push (tree, case_label_vec, last_case);
1380 x = gimple_build_label (CASE_LABEL (last_case));
1381 gimple_seq_add_stmt (&eh_seq, x);
1382 emit_resx (&eh_seq, tf->region);
1385 x = gimple_build_label (finally_label);
1386 gimple_seq_add_stmt (&tf->top_p_seq, x);
1388 gimple_seq_add_seq (&tf->top_p_seq, finally);
1390 /* Redirect each incoming goto edge. */
1392 qe = q + tf->goto_queue_active;
1393 j = last_case_index + tf->may_return;
1394 /* Prepare the assignments to finally_tmp that are executed upon the
1395 entrance through a particular edge. */
1400 unsigned int case_index;
1402 mod = gimple_seq_alloc ();
1406 x = gimple_build_assign (finally_tmp,
1407 build_int_cst (NULL, return_index));
1408 gimple_seq_add_stmt (&mod, x);
1409 do_return_redirection (q, finally_label, mod, &return_val);
1410 switch_id = return_index;
1414 x = gimple_build_assign (finally_tmp,
1415 build_int_cst (NULL, q->index));
1416 gimple_seq_add_stmt (&mod, x);
1417 do_goto_redirection (q, finally_label, mod, tf);
1418 switch_id = q->index;
1421 case_index = j + q->index;
1422 if (VEC_length (tree, case_label_vec) <= case_index
1423 || !VEC_index (tree, case_label_vec, case_index))
1427 case_lab = build3 (CASE_LABEL_EXPR, void_type_node,
1428 build_int_cst (NULL, switch_id),
1430 /* We store the cont_stmt in the pointer map, so that we can recover
1431 it in the loop below. We don't create the new label while
1432 walking the goto_queue because pointers don't offer a stable
1435 cont_map = pointer_map_create ();
1436 slot = pointer_map_insert (cont_map, case_lab);
1437 *slot = q->cont_stmt;
1438 VEC_quick_push (tree, case_label_vec, case_lab);
1441 for (j = last_case_index; j < last_case_index + nlabels; j++)
1447 last_case = VEC_index (tree, case_label_vec, j);
1449 gcc_assert (last_case);
1450 gcc_assert (cont_map);
1452 slot = pointer_map_contains (cont_map, last_case);
1453 /* As the comment above suggests, CASE_LABEL (last_case) was just a
1454 placeholder, it does not store an actual label, yet. */
1456 cont_stmt = *(gimple *) slot;
1458 label = create_artificial_label (tf_loc);
1459 CASE_LABEL (last_case) = label;
1461 x = gimple_build_label (label);
1462 gimple_seq_add_stmt (&switch_body, x);
1463 gimple_seq_add_stmt (&switch_body, cont_stmt);
1464 maybe_record_in_goto_queue (state, cont_stmt);
1467 pointer_map_destroy (cont_map);
1469 replace_goto_queue (tf);
1471 /* Make sure that the last case is the default label, as one is required.
1472 Then sort the labels, which is also required in GIMPLE. */
1473 CASE_LOW (last_case) = NULL;
1474 sort_case_labels (case_label_vec);
1476 /* Build the switch statement, setting last_case to be the default
1478 switch_stmt = gimple_build_switch_vec (finally_tmp, last_case,
1480 gimple_set_location (switch_stmt, finally_loc);
1482 /* Need to link SWITCH_STMT after running replace_goto_queue
1483 due to not wanting to process the same goto stmts twice. */
1484 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1485 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1488 /* Decide whether or not we are going to duplicate the finally block.
1489 There are several considerations.
1491 First, if this is Java, then the finally block contains code
1492 written by the user. It has line numbers associated with it,
1493 so duplicating the block means it's difficult to set a breakpoint.
1494 Since controlling code generation via -g is verboten, we simply
1495 never duplicate code without optimization.
1497 Second, we'd like to prevent egregious code growth. One way to
1498 do this is to estimate the size of the finally block, multiply
1499 that by the number of copies we'd need to make, and compare against
1500 the estimate of the size of the switch machinery we'd have to add. */
1503 decide_copy_try_finally (int ndests, gimple_seq finally)
1505 int f_estimate, sw_estimate;
1510 /* Finally estimate N times, plus N gotos. */
1511 f_estimate = count_insns_seq (finally, &eni_size_weights);
1512 f_estimate = (f_estimate + 1) * ndests;
1514 /* Switch statement (cost 10), N variable assignments, N gotos. */
1515 sw_estimate = 10 + 2 * ndests;
1517 /* Optimize for size clearly wants our best guess. */
1518 if (optimize_function_for_size_p (cfun))
1519 return f_estimate < sw_estimate;
1521 /* ??? These numbers are completely made up so far. */
1523 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1525 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1529 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1530 to a sequence of labels and blocks, plus the exception region trees
1531 that record all the magic. This is complicated by the need to
1532 arrange for the FINALLY block to be executed on all exits. */
1535 lower_try_finally (struct leh_state *state, gimple tp)
1537 struct leh_tf_state this_tf;
1538 struct leh_state this_state;
1541 /* Process the try block. */
1543 memset (&this_tf, 0, sizeof (this_tf));
1544 this_tf.try_finally_expr = tp;
1546 this_tf.outer = state;
1547 if (using_eh_for_cleanups_p)
1548 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1550 this_tf.region = NULL;
1552 this_state.cur_region = this_tf.region;
1553 this_state.ehp_region = state->ehp_region;
1554 this_state.tf = &this_tf;
1556 lower_eh_constructs_1 (&this_state, gimple_try_eval(tp));
1558 /* Determine if the try block is escaped through the bottom. */
1559 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1561 /* Determine if any exceptions are possible within the try block. */
1562 if (using_eh_for_cleanups_p)
1563 this_tf.may_throw = bitmap_bit_p (eh_region_may_contain_throw,
1564 this_tf.region->index);
1565 if (this_tf.may_throw)
1566 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1568 /* Determine how many edges (still) reach the finally block. Or rather,
1569 how many destinations are reached by the finally block. Use this to
1570 determine how we process the finally block itself. */
1572 ndests = VEC_length (tree, this_tf.dest_array);
1573 ndests += this_tf.may_fallthru;
1574 ndests += this_tf.may_return;
1575 ndests += this_tf.may_throw;
1577 /* If the FINALLY block is not reachable, dike it out. */
1580 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1581 gimple_try_set_cleanup (tp, NULL);
1583 /* If the finally block doesn't fall through, then any destination
1584 we might try to impose there isn't reached either. There may be
1585 some minor amount of cleanup and redirection still needed. */
1586 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1587 lower_try_finally_nofallthru (state, &this_tf);
1589 /* We can easily special-case redirection to a single destination. */
1590 else if (ndests == 1)
1591 lower_try_finally_onedest (state, &this_tf);
1592 else if (decide_copy_try_finally (ndests, gimple_try_cleanup (tp)))
1593 lower_try_finally_copy (state, &this_tf);
1595 lower_try_finally_switch (state, &this_tf);
1597 /* If someone requested we add a label at the end of the transformed
1599 if (this_tf.fallthru_label)
1601 /* This must be reached only if ndests == 0. */
1602 gimple x = gimple_build_label (this_tf.fallthru_label);
1603 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1606 VEC_free (tree, heap, this_tf.dest_array);
1607 if (this_tf.goto_queue)
1608 free (this_tf.goto_queue);
1609 if (this_tf.goto_queue_map)
1610 pointer_map_destroy (this_tf.goto_queue_map);
1612 return this_tf.top_p_seq;
1615 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1616 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1617 exception region trees that records all the magic. */
1620 lower_catch (struct leh_state *state, gimple tp)
1622 eh_region try_region;
1623 struct leh_state this_state;
1624 gimple_stmt_iterator gsi;
1628 location_t try_catch_loc = gimple_location (tp);
1630 try_region = gen_eh_region_try (state->cur_region);
1632 this_state = *state;
1633 this_state.cur_region = try_region;
1635 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1637 if (!bitmap_bit_p (eh_region_may_contain_throw, try_region->index))
1638 return gimple_try_eval (tp);
1641 emit_eh_dispatch (&new_seq, try_region);
1642 emit_resx (&new_seq, try_region);
1644 this_state.cur_region = state->cur_region;
1645 this_state.ehp_region = try_region;
1648 for (gsi = gsi_start (gimple_try_cleanup (tp));
1656 gcatch = gsi_stmt (gsi);
1657 c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch));
1659 handler = gimple_catch_handler (gcatch);
1660 lower_eh_constructs_1 (&this_state, handler);
1662 c->label = create_artificial_label (UNKNOWN_LOCATION);
1663 x = gimple_build_label (c->label);
1664 gimple_seq_add_stmt (&new_seq, x);
1666 gimple_seq_add_seq (&new_seq, handler);
1668 if (gimple_seq_may_fallthru (new_seq))
1671 out_label = create_artificial_label (try_catch_loc);
1673 x = gimple_build_goto (out_label);
1674 gimple_seq_add_stmt (&new_seq, x);
1678 gimple_try_set_cleanup (tp, new_seq);
1680 return frob_into_branch_around (tp, try_region, out_label);
1683 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1684 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1685 region trees that record all the magic. */
1688 lower_eh_filter (struct leh_state *state, gimple tp)
1690 struct leh_state this_state;
1691 eh_region this_region;
1695 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1697 this_region = gen_eh_region_allowed (state->cur_region,
1698 gimple_eh_filter_types (inner));
1699 this_state = *state;
1700 this_state.cur_region = this_region;
1702 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1704 if (!bitmap_bit_p (eh_region_may_contain_throw, this_region->index))
1705 return gimple_try_eval (tp);
1708 this_state.cur_region = state->cur_region;
1709 this_state.ehp_region = this_region;
1711 emit_eh_dispatch (&new_seq, this_region);
1712 emit_resx (&new_seq, this_region);
1714 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1715 x = gimple_build_label (this_region->u.allowed.label);
1716 gimple_seq_add_stmt (&new_seq, x);
1718 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure (inner));
1719 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1721 gimple_try_set_cleanup (tp, new_seq);
1723 return frob_into_branch_around (tp, this_region, NULL);
1726 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1727 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1728 plus the exception region trees that record all the magic. */
1731 lower_eh_must_not_throw (struct leh_state *state, gimple tp)
1733 struct leh_state this_state;
1734 eh_region this_region;
1737 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1739 this_region = gen_eh_region_must_not_throw (state->cur_region);
1740 this_region->u.must_not_throw.failure_decl
1741 = gimple_eh_must_not_throw_fndecl (inner);
1742 this_region->u.must_not_throw.failure_loc = gimple_location (tp);
1744 /* In order to get mangling applied to this decl, we must mark it
1745 used now. Otherwise, pass_ipa_free_lang_data won't think it
1747 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1749 this_state = *state;
1750 this_state.cur_region = this_region;
1752 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1754 return gimple_try_eval (tp);
1757 /* Implement a cleanup expression. This is similar to try-finally,
1758 except that we only execute the cleanup block for exception edges. */
1761 lower_cleanup (struct leh_state *state, gimple tp)
1763 struct leh_state this_state;
1764 eh_region this_region;
1765 struct leh_tf_state fake_tf;
1768 /* If not using eh, then exception-only cleanups are no-ops. */
1769 if (!flag_exceptions)
1771 result = gimple_try_eval (tp);
1772 lower_eh_constructs_1 (state, result);
1776 this_region = gen_eh_region_cleanup (state->cur_region);
1777 this_state = *state;
1778 this_state.cur_region = this_region;
1780 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1782 if (!bitmap_bit_p (eh_region_may_contain_throw, this_region->index))
1783 return gimple_try_eval (tp);
1785 /* Build enough of a try-finally state so that we can reuse
1786 honor_protect_cleanup_actions. */
1787 memset (&fake_tf, 0, sizeof (fake_tf));
1788 fake_tf.top_p = fake_tf.try_finally_expr = tp;
1789 fake_tf.outer = state;
1790 fake_tf.region = this_region;
1791 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1792 fake_tf.may_throw = true;
1794 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1796 if (fake_tf.may_throw)
1798 /* In this case honor_protect_cleanup_actions had nothing to do,
1799 and we should process this normally. */
1800 lower_eh_constructs_1 (state, gimple_try_cleanup (tp));
1801 result = frob_into_branch_around (tp, this_region,
1802 fake_tf.fallthru_label);
1806 /* In this case honor_protect_cleanup_actions did nearly all of
1807 the work. All we have left is to append the fallthru_label. */
1809 result = gimple_try_eval (tp);
1810 if (fake_tf.fallthru_label)
1812 gimple x = gimple_build_label (fake_tf.fallthru_label);
1813 gimple_seq_add_stmt (&result, x);
1819 /* Main loop for lowering eh constructs. Also moves gsi to the next
1823 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1827 gimple stmt = gsi_stmt (*gsi);
1829 switch (gimple_code (stmt))
1833 tree fndecl = gimple_call_fndecl (stmt);
1836 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1837 switch (DECL_FUNCTION_CODE (fndecl))
1839 case BUILT_IN_EH_POINTER:
1840 /* The front end may have generated a call to
1841 __builtin_eh_pointer (0) within a catch region. Replace
1842 this zero argument with the current catch region number. */
1843 if (state->ehp_region)
1845 tree nr = build_int_cst (NULL, state->ehp_region->index);
1846 gimple_call_set_arg (stmt, 0, nr);
1850 /* The user has dome something silly. Remove it. */
1851 rhs = build_int_cst (ptr_type_node, 0);
1856 case BUILT_IN_EH_FILTER:
1857 /* ??? This should never appear, but since it's a builtin it
1858 is accessible to abuse by users. Just remove it and
1859 replace the use with the arbitrary value zero. */
1860 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
1862 lhs = gimple_call_lhs (stmt);
1863 x = gimple_build_assign (lhs, rhs);
1864 gsi_insert_before (gsi, x, GSI_SAME_STMT);
1867 case BUILT_IN_EH_COPY_VALUES:
1868 /* Likewise this should not appear. Remove it. */
1869 gsi_remove (gsi, true);
1879 /* If the stmt can throw use a new temporary for the assignment
1880 to a LHS. This makes sure the old value of the LHS is
1881 available on the EH edge. */
1882 if (stmt_could_throw_p (stmt)
1883 && gimple_has_lhs (stmt)
1884 && !tree_could_throw_p (gimple_get_lhs (stmt))
1885 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
1887 tree lhs = gimple_get_lhs (stmt);
1888 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
1889 gimple s = gimple_build_assign (lhs, tmp);
1890 gimple_set_location (s, gimple_location (stmt));
1891 gimple_set_block (s, gimple_block (stmt));
1892 gimple_set_lhs (stmt, tmp);
1893 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
1894 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
1895 DECL_GIMPLE_REG_P (tmp) = 1;
1896 gsi_insert_after (gsi, s, GSI_SAME_STMT);
1898 /* Look for things that can throw exceptions, and record them. */
1899 if (state->cur_region && stmt_could_throw_p (stmt))
1901 record_stmt_eh_region (state->cur_region, stmt);
1902 note_eh_region_may_contain_throw (state->cur_region);
1909 maybe_record_in_goto_queue (state, stmt);
1913 verify_norecord_switch_expr (state, stmt);
1917 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
1918 replace = lower_try_finally (state, stmt);
1921 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
1922 switch (gimple_code (x))
1925 replace = lower_catch (state, stmt);
1927 case GIMPLE_EH_FILTER:
1928 replace = lower_eh_filter (state, stmt);
1930 case GIMPLE_EH_MUST_NOT_THROW:
1931 replace = lower_eh_must_not_throw (state, stmt);
1934 replace = lower_cleanup (state, stmt);
1939 /* Remove the old stmt and insert the transformed sequence
1941 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
1942 gsi_remove (gsi, true);
1944 /* Return since we don't want gsi_next () */
1948 /* A type, a decl, or some kind of statement that we're not
1949 interested in. Don't walk them. */
1956 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
1959 lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq)
1961 gimple_stmt_iterator gsi;
1962 for (gsi = gsi_start (seq); !gsi_end_p (gsi);)
1963 lower_eh_constructs_2 (state, &gsi);
1967 lower_eh_constructs (void)
1969 struct leh_state null_state;
1972 bodyp = gimple_body (current_function_decl);
1976 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1977 eh_region_may_contain_throw = BITMAP_ALLOC (NULL);
1978 memset (&null_state, 0, sizeof (null_state));
1980 collect_finally_tree_1 (bodyp, NULL);
1981 lower_eh_constructs_1 (&null_state, bodyp);
1983 /* We assume there's a return statement, or something, at the end of
1984 the function, and thus ploping the EH sequence afterward won't
1986 gcc_assert (!gimple_seq_may_fallthru (bodyp));
1987 gimple_seq_add_seq (&bodyp, eh_seq);
1989 /* We assume that since BODYP already existed, adding EH_SEQ to it
1990 didn't change its value, and we don't have to re-set the function. */
1991 gcc_assert (bodyp == gimple_body (current_function_decl));
1993 htab_delete (finally_tree);
1994 BITMAP_FREE (eh_region_may_contain_throw);
1997 /* If this function needs a language specific EH personality routine
1998 and the frontend didn't already set one do so now. */
1999 if (function_needs_eh_personality (cfun) == eh_personality_lang
2000 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2001 DECL_FUNCTION_PERSONALITY (current_function_decl)
2002 = lang_hooks.eh_personality ();
2007 struct gimple_opt_pass pass_lower_eh =
2013 lower_eh_constructs, /* execute */
2016 0, /* static_pass_number */
2017 TV_TREE_EH, /* tv_id */
2018 PROP_gimple_lcf, /* properties_required */
2019 PROP_gimple_leh, /* properties_provided */
2020 0, /* properties_destroyed */
2021 0, /* todo_flags_start */
2022 TODO_dump_func /* todo_flags_finish */
2026 /* Create the multiple edges from an EH_DISPATCH statement to all of
2027 the possible handlers for its EH region. Return true if there's
2028 no fallthru edge; false if there is. */
2031 make_eh_dispatch_edges (gimple stmt)
2035 basic_block src, dst;
2037 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2038 src = gimple_bb (stmt);
2043 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2045 dst = label_to_block (c->label);
2046 make_edge (src, dst, 0);
2048 /* A catch-all handler doesn't have a fallthru. */
2049 if (c->type_list == NULL)
2054 case ERT_ALLOWED_EXCEPTIONS:
2055 dst = label_to_block (r->u.allowed.label);
2056 make_edge (src, dst, 0);
2066 /* Create the single EH edge from STMT to its nearest landing pad,
2067 if there is such a landing pad within the current function. */
2070 make_eh_edges (gimple stmt)
2072 basic_block src, dst;
2076 lp_nr = lookup_stmt_eh_lp (stmt);
2080 lp = get_eh_landing_pad_from_number (lp_nr);
2081 gcc_assert (lp != NULL);
2083 src = gimple_bb (stmt);
2084 dst = label_to_block (lp->post_landing_pad);
2085 make_edge (src, dst, EDGE_EH);
2088 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2089 do not actually perform the final edge redirection.
2091 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2092 we intend to change the destination EH region as well; this means
2093 EH_LANDING_PAD_NR must already be set on the destination block label.
2094 If false, we're being called from generic cfg manipulation code and we
2095 should preserve our place within the region tree. */
2098 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2100 eh_landing_pad old_lp, new_lp;
2103 int old_lp_nr, new_lp_nr;
2104 tree old_label, new_label;
2108 old_bb = edge_in->dest;
2109 old_label = gimple_block_label (old_bb);
2110 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2111 gcc_assert (old_lp_nr > 0);
2112 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2114 throw_stmt = last_stmt (edge_in->src);
2115 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2117 new_label = gimple_block_label (new_bb);
2119 /* Look for an existing region that might be using NEW_BB already. */
2120 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2123 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2124 gcc_assert (new_lp);
2126 /* Unless CHANGE_REGION is true, the new and old landing pad
2127 had better be associated with the same EH region. */
2128 gcc_assert (change_region || new_lp->region == old_lp->region);
2133 gcc_assert (!change_region);
2136 /* Notice when we redirect the last EH edge away from OLD_BB. */
2137 FOR_EACH_EDGE (e, ei, old_bb->preds)
2138 if (e != edge_in && (e->flags & EDGE_EH))
2143 /* NEW_LP already exists. If there are still edges into OLD_LP,
2144 there's nothing to do with the EH tree. If there are no more
2145 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2146 If CHANGE_REGION is true, then our caller is expecting to remove
2148 if (e == NULL && !change_region)
2149 remove_eh_landing_pad (old_lp);
2153 /* No correct landing pad exists. If there are no more edges
2154 into OLD_LP, then we can simply re-use the existing landing pad.
2155 Otherwise, we have to create a new landing pad. */
2158 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2162 new_lp = gen_eh_landing_pad (old_lp->region);
2163 new_lp->post_landing_pad = new_label;
2164 EH_LANDING_PAD_NR (new_label) = new_lp->index;
2167 /* Maybe move the throwing statement to the new region. */
2168 if (old_lp != new_lp)
2170 remove_stmt_from_eh_lp (throw_stmt);
2171 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2175 /* Redirect EH edge E to NEW_BB. */
2178 redirect_eh_edge (edge edge_in, basic_block new_bb)
2180 redirect_eh_edge_1 (edge_in, new_bb, false);
2181 return ssa_redirect_edge (edge_in, new_bb);
2184 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2185 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2186 The actual edge update will happen in the caller. */
2189 redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb)
2191 tree new_lab = gimple_block_label (new_bb);
2192 bool any_changed = false;
2197 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2201 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2203 old_bb = label_to_block (c->label);
2204 if (old_bb == e->dest)
2212 case ERT_ALLOWED_EXCEPTIONS:
2213 old_bb = label_to_block (r->u.allowed.label);
2214 gcc_assert (old_bb == e->dest);
2215 r->u.allowed.label = new_lab;
2223 gcc_assert (any_changed);
2226 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2229 operation_could_trap_helper_p (enum tree_code op,
2240 case TRUNC_DIV_EXPR:
2242 case FLOOR_DIV_EXPR:
2243 case ROUND_DIV_EXPR:
2244 case EXACT_DIV_EXPR:
2246 case FLOOR_MOD_EXPR:
2247 case ROUND_MOD_EXPR:
2248 case TRUNC_MOD_EXPR:
2250 if (honor_snans || honor_trapv)
2253 return flag_trapping_math;
2254 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2263 /* Some floating point comparisons may trap. */
2268 case UNORDERED_EXPR:
2278 case FIX_TRUNC_EXPR:
2279 /* Conversion of floating point might trap. */
2285 /* These operations don't trap with floating point. */
2293 /* Any floating arithmetic may trap. */
2294 if (fp_operation && flag_trapping_math)
2301 /* Any floating arithmetic may trap. */
2302 if (fp_operation && flag_trapping_math)
2310 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2311 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2312 type operands that may trap. If OP is a division operator, DIVISOR contains
2313 the value of the divisor. */
2316 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2319 bool honor_nans = (fp_operation && flag_trapping_math
2320 && !flag_finite_math_only);
2321 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2324 if (TREE_CODE_CLASS (op) != tcc_comparison
2325 && TREE_CODE_CLASS (op) != tcc_unary
2326 && TREE_CODE_CLASS (op) != tcc_binary)
2329 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2330 honor_nans, honor_snans, divisor,
2334 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2335 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2336 This routine expects only GIMPLE lhs or rhs input. */
2339 tree_could_trap_p (tree expr)
2341 enum tree_code code;
2342 bool fp_operation = false;
2343 bool honor_trapv = false;
2344 tree t, base, div = NULL_TREE;
2349 code = TREE_CODE (expr);
2350 t = TREE_TYPE (expr);
2354 if (COMPARISON_CLASS_P (expr))
2355 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2357 fp_operation = FLOAT_TYPE_P (t);
2358 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2361 if (TREE_CODE_CLASS (code) == tcc_binary)
2362 div = TREE_OPERAND (expr, 1);
2363 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2369 case TARGET_MEM_REF:
2370 /* For TARGET_MEM_REFs use the information based on the original
2372 expr = TMR_ORIGINAL (expr);
2373 code = TREE_CODE (expr);
2380 case VIEW_CONVERT_EXPR:
2381 case WITH_SIZE_EXPR:
2382 expr = TREE_OPERAND (expr, 0);
2383 code = TREE_CODE (expr);
2386 case ARRAY_RANGE_REF:
2387 base = TREE_OPERAND (expr, 0);
2388 if (tree_could_trap_p (base))
2390 if (TREE_THIS_NOTRAP (expr))
2392 return !range_in_array_bounds_p (expr);
2395 base = TREE_OPERAND (expr, 0);
2396 if (tree_could_trap_p (base))
2398 if (TREE_THIS_NOTRAP (expr))
2400 return !in_array_bounds_p (expr);
2403 case ALIGN_INDIRECT_REF:
2404 case MISALIGNED_INDIRECT_REF:
2405 return !TREE_THIS_NOTRAP (expr);
2408 return TREE_THIS_VOLATILE (expr);
2411 t = get_callee_fndecl (expr);
2412 /* Assume that calls to weak functions may trap. */
2413 if (!t || !DECL_P (t) || DECL_WEAK (t))
2423 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2424 an assignment or a conditional) may throw. */
2427 stmt_could_throw_1_p (gimple stmt)
2429 enum tree_code code = gimple_expr_code (stmt);
2430 bool honor_nans = false;
2431 bool honor_snans = false;
2432 bool fp_operation = false;
2433 bool honor_trapv = false;
2438 if (TREE_CODE_CLASS (code) == tcc_comparison
2439 || TREE_CODE_CLASS (code) == tcc_unary
2440 || TREE_CODE_CLASS (code) == tcc_binary)
2442 t = gimple_expr_type (stmt);
2443 fp_operation = FLOAT_TYPE_P (t);
2446 honor_nans = flag_trapping_math && !flag_finite_math_only;
2447 honor_snans = flag_signaling_nans != 0;
2449 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2453 /* Check if the main expression may trap. */
2454 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2455 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2456 honor_nans, honor_snans, t,
2461 /* If the expression does not trap, see if any of the individual operands may
2463 for (i = 0; i < gimple_num_ops (stmt); i++)
2464 if (tree_could_trap_p (gimple_op (stmt, i)))
2471 /* Return true if statement STMT could throw an exception. */
2474 stmt_could_throw_p (gimple stmt)
2476 if (!flag_exceptions)
2479 /* The only statements that can throw an exception are assignments,
2480 conditionals, calls, resx, and asms. */
2481 switch (gimple_code (stmt))
2487 return !gimple_call_nothrow_p (stmt);
2491 if (!flag_non_call_exceptions)
2493 return stmt_could_throw_1_p (stmt);
2496 if (!flag_non_call_exceptions)
2498 return gimple_asm_volatile_p (stmt);
2506 /* Return true if expression T could throw an exception. */
2509 tree_could_throw_p (tree t)
2511 if (!flag_exceptions)
2513 if (TREE_CODE (t) == MODIFY_EXPR)
2515 if (flag_non_call_exceptions
2516 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2518 t = TREE_OPERAND (t, 1);
2521 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2522 t = TREE_OPERAND (t, 0);
2523 if (TREE_CODE (t) == CALL_EXPR)
2524 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2525 if (flag_non_call_exceptions)
2526 return tree_could_trap_p (t);
2530 /* Return true if STMT can throw an exception that is not caught within
2531 the current function (CFUN). */
2534 stmt_can_throw_external (gimple stmt)
2538 if (!stmt_could_throw_p (stmt))
2541 lp_nr = lookup_stmt_eh_lp (stmt);
2545 /* Return true if STMT can throw an exception that is caught within
2546 the current function (CFUN). */
2549 stmt_can_throw_internal (gimple stmt)
2553 if (!stmt_could_throw_p (stmt))
2556 lp_nr = lookup_stmt_eh_lp (stmt);
2560 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2561 remove any entry it might have from the EH table. Return true if
2562 any change was made. */
2565 maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
2567 if (stmt_could_throw_p (stmt))
2569 return remove_stmt_from_eh_lp_fn (ifun, stmt);
2572 /* Likewise, but always use the current function. */
2575 maybe_clean_eh_stmt (gimple stmt)
2577 return maybe_clean_eh_stmt_fn (cfun, stmt);
2580 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2581 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2582 in the table if it should be in there. Return TRUE if a replacement was
2583 done that my require an EH edge purge. */
2586 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2588 int lp_nr = lookup_stmt_eh_lp (old_stmt);
2592 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2594 if (new_stmt == old_stmt && new_stmt_could_throw)
2597 remove_stmt_from_eh_lp (old_stmt);
2598 if (new_stmt_could_throw)
2600 add_stmt_to_eh_lp (new_stmt, lp_nr);
2610 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statment NEW_STMT
2611 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2612 operand is the return value of duplicate_eh_regions. */
2615 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
2616 struct function *old_fun, gimple old_stmt,
2617 struct pointer_map_t *map, int default_lp_nr)
2619 int old_lp_nr, new_lp_nr;
2622 if (!stmt_could_throw_p (new_stmt))
2625 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2628 if (default_lp_nr == 0)
2630 new_lp_nr = default_lp_nr;
2632 else if (old_lp_nr > 0)
2634 eh_landing_pad old_lp, new_lp;
2636 old_lp = VEC_index (eh_landing_pad, old_fun->eh->lp_array, old_lp_nr);
2637 slot = pointer_map_contains (map, old_lp);
2638 new_lp = (eh_landing_pad) *slot;
2639 new_lp_nr = new_lp->index;
2643 eh_region old_r, new_r;
2645 old_r = VEC_index (eh_region, old_fun->eh->region_array, -old_lp_nr);
2646 slot = pointer_map_contains (map, old_r);
2647 new_r = (eh_region) *slot;
2648 new_lp_nr = -new_r->index;
2651 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2655 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2656 and thus no remapping is required. */
2659 maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
2663 if (!stmt_could_throw_p (new_stmt))
2666 lp_nr = lookup_stmt_eh_lp (old_stmt);
2670 add_stmt_to_eh_lp (new_stmt, lp_nr);
2674 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2675 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2676 this only handles handlers consisting of a single call, as that's the
2677 important case for C++: a destructor call for a particular object showing
2678 up in multiple handlers. */
2681 same_handler_p (gimple_seq oneh, gimple_seq twoh)
2683 gimple_stmt_iterator gsi;
2687 gsi = gsi_start (oneh);
2688 if (!gsi_one_before_end_p (gsi))
2690 ones = gsi_stmt (gsi);
2692 gsi = gsi_start (twoh);
2693 if (!gsi_one_before_end_p (gsi))
2695 twos = gsi_stmt (gsi);
2697 if (!is_gimple_call (ones)
2698 || !is_gimple_call (twos)
2699 || gimple_call_lhs (ones)
2700 || gimple_call_lhs (twos)
2701 || gimple_call_chain (ones)
2702 || gimple_call_chain (twos)
2703 || !operand_equal_p (gimple_call_fn (ones), gimple_call_fn (twos), 0)
2704 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
2707 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
2708 if (!operand_equal_p (gimple_call_arg (ones, ai),
2709 gimple_call_arg (twos, ai), 0))
2716 try { A() } finally { try { ~B() } catch { ~A() } }
2717 try { ... } finally { ~A() }
2719 try { A() } catch { ~B() }
2720 try { ~B() ... } finally { ~A() }
2722 This occurs frequently in C++, where A is a local variable and B is a
2723 temporary used in the initializer for A. */
2726 optimize_double_finally (gimple one, gimple two)
2729 gimple_stmt_iterator gsi;
2731 gsi = gsi_start (gimple_try_cleanup (one));
2732 if (!gsi_one_before_end_p (gsi))
2735 oneh = gsi_stmt (gsi);
2736 if (gimple_code (oneh) != GIMPLE_TRY
2737 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
2740 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
2742 gimple_seq seq = gimple_try_eval (oneh);
2744 gimple_try_set_cleanup (one, seq);
2745 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
2746 seq = copy_gimple_seq_and_replace_locals (seq);
2747 gimple_seq_add_seq (&seq, gimple_try_eval (two));
2748 gimple_try_set_eval (two, seq);
2752 /* Perform EH refactoring optimizations that are simpler to do when code
2753 flow has been lowered but EH structures haven't. */
2756 refactor_eh_r (gimple_seq seq)
2758 gimple_stmt_iterator gsi;
2763 gsi = gsi_start (seq);
2767 if (gsi_end_p (gsi))
2770 two = gsi_stmt (gsi);
2773 && gimple_code (one) == GIMPLE_TRY
2774 && gimple_code (two) == GIMPLE_TRY
2775 && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
2776 && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
2777 optimize_double_finally (one, two);
2779 switch (gimple_code (one))
2782 refactor_eh_r (gimple_try_eval (one));
2783 refactor_eh_r (gimple_try_cleanup (one));
2786 refactor_eh_r (gimple_catch_handler (one));
2788 case GIMPLE_EH_FILTER:
2789 refactor_eh_r (gimple_eh_filter_failure (one));
2804 refactor_eh_r (gimple_body (current_function_decl));
2809 gate_refactor_eh (void)
2811 return flag_exceptions != 0;
2814 struct gimple_opt_pass pass_refactor_eh =
2819 gate_refactor_eh, /* gate */
2820 refactor_eh, /* execute */
2823 0, /* static_pass_number */
2824 TV_TREE_EH, /* tv_id */
2825 PROP_gimple_lcf, /* properties_required */
2826 0, /* properties_provided */
2827 0, /* properties_destroyed */
2828 0, /* todo_flags_start */
2829 TODO_dump_func /* todo_flags_finish */
2833 /* At the end of gimple optimization, we can lower RESX. */
2836 lower_resx (basic_block bb, gimple stmt, struct pointer_map_t *mnt_map)
2839 eh_region src_r, dst_r;
2840 gimple_stmt_iterator gsi;
2845 lp_nr = lookup_stmt_eh_lp (stmt);
2847 dst_r = get_eh_region_from_lp_number (lp_nr);
2851 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
2852 gsi = gsi_last_bb (bb);
2856 /* We can wind up with no source region when pass_cleanup_eh shows
2857 that there are no entries into an eh region and deletes it, but
2858 then the block that contains the resx isn't removed. This can
2859 happen without optimization when the switch statement created by
2860 lower_try_finally_switch isn't simplified to remove the eh case.
2862 Resolve this by expanding the resx node to an abort. */
2864 fn = implicit_built_in_decls[BUILT_IN_TRAP];
2865 x = gimple_build_call (fn, 0);
2866 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2868 while (EDGE_COUNT (bb->succs) > 0)
2869 remove_edge (EDGE_SUCC (bb, 0));
2873 /* When we have a destination region, we resolve this by copying
2874 the excptr and filter values into place, and changing the edge
2875 to immediately after the landing pad. */
2884 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
2885 the failure decl into a new block, if needed. */
2886 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
2888 slot = pointer_map_contains (mnt_map, dst_r);
2891 gimple_stmt_iterator gsi2;
2893 new_bb = create_empty_bb (bb);
2894 lab = gimple_block_label (new_bb);
2895 gsi2 = gsi_start_bb (new_bb);
2897 fn = dst_r->u.must_not_throw.failure_decl;
2898 x = gimple_build_call (fn, 0);
2899 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
2900 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
2902 slot = pointer_map_insert (mnt_map, dst_r);
2908 new_bb = label_to_block (lab);
2911 gcc_assert (EDGE_COUNT (bb->succs) == 0);
2912 e = make_edge (bb, new_bb, EDGE_FALLTHRU);
2913 e->count = bb->count;
2914 e->probability = REG_BR_PROB_BASE;
2919 tree dst_nr = build_int_cst (NULL, dst_r->index);
2921 fn = implicit_built_in_decls[BUILT_IN_EH_COPY_VALUES];
2922 src_nr = build_int_cst (NULL, src_r->index);
2923 x = gimple_build_call (fn, 2, dst_nr, src_nr);
2924 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2926 /* Update the flags for the outgoing edge. */
2927 e = single_succ_edge (bb);
2928 gcc_assert (e->flags & EDGE_EH);
2929 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
2931 /* If there are no more EH users of the landing pad, delete it. */
2932 FOR_EACH_EDGE (e, ei, e->dest->preds)
2933 if (e->flags & EDGE_EH)
2937 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2938 remove_eh_landing_pad (lp);
2948 /* When we don't have a destination region, this exception escapes
2949 up the call chain. We resolve this by generating a call to the
2950 _Unwind_Resume library function. */
2952 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
2953 with no arguments for C++ and Java. Check for that. */
2954 if (src_r->use_cxa_end_cleanup)
2956 fn = implicit_built_in_decls[BUILT_IN_CXA_END_CLEANUP];
2957 x = gimple_build_call (fn, 0);
2958 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2962 fn = implicit_built_in_decls[BUILT_IN_EH_POINTER];
2963 src_nr = build_int_cst (NULL, src_r->index);
2964 x = gimple_build_call (fn, 1, src_nr);
2965 var = create_tmp_var (ptr_type_node, NULL);
2966 var = make_ssa_name (var, x);
2967 gimple_call_set_lhs (x, var);
2968 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2970 fn = implicit_built_in_decls[BUILT_IN_UNWIND_RESUME];
2971 x = gimple_build_call (fn, 1, var);
2972 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2975 gcc_assert (EDGE_COUNT (bb->succs) == 0);
2978 gsi_remove (&gsi, true);
2984 execute_lower_resx (void)
2987 struct pointer_map_t *mnt_map;
2988 bool dominance_invalidated = false;
2989 bool any_rewritten = false;
2991 mnt_map = pointer_map_create ();
2995 gimple last = last_stmt (bb);
2996 if (last && is_gimple_resx (last))
2998 dominance_invalidated |= lower_resx (bb, last, mnt_map);
2999 any_rewritten = true;
3003 pointer_map_destroy (mnt_map);
3005 if (dominance_invalidated)
3007 free_dominance_info (CDI_DOMINATORS);
3008 free_dominance_info (CDI_POST_DOMINATORS);
3011 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3015 gate_lower_resx (void)
3017 return flag_exceptions != 0;
3020 struct gimple_opt_pass pass_lower_resx =
3025 gate_lower_resx, /* gate */
3026 execute_lower_resx, /* execute */
3029 0, /* static_pass_number */
3030 TV_TREE_EH, /* tv_id */
3031 PROP_gimple_lcf, /* properties_required */
3032 0, /* properties_provided */
3033 0, /* properties_destroyed */
3034 0, /* todo_flags_start */
3035 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
3040 /* At the end of inlining, we can lower EH_DISPATCH. */
3043 lower_eh_dispatch (basic_block src, gimple stmt)
3045 gimple_stmt_iterator gsi;
3051 region_nr = gimple_eh_dispatch_region (stmt);
3052 r = get_eh_region_from_number (region_nr);
3054 gsi = gsi_last_bb (src);
3060 VEC (tree, heap) *labels = NULL;
3061 tree default_label = NULL;
3066 /* Collect the labels for a switch. Zero the post_landing_pad
3067 field becase we'll no longer have anything keeping these labels
3068 in existance and the optimizer will be free to merge these
3070 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3072 tree tp_node, flt_node, lab = c->label;
3075 tp_node = c->type_list;
3076 flt_node = c->filter_list;
3078 if (tp_node == NULL)
3080 default_label = lab;
3085 tree t = build3 (CASE_LABEL_EXPR, void_type_node,
3086 TREE_VALUE (flt_node), NULL, lab);
3087 VEC_safe_push (tree, heap, labels, t);
3089 tp_node = TREE_CHAIN (tp_node);
3090 flt_node = TREE_CHAIN (flt_node);
3095 /* Clean up the edge flags. */
3096 FOR_EACH_EDGE (e, ei, src->succs)
3098 if (e->flags & EDGE_FALLTHRU)
3100 /* If there was no catch-all, use the fallthru edge. */
3101 if (default_label == NULL)
3102 default_label = gimple_block_label (e->dest);
3103 e->flags &= ~EDGE_FALLTHRU;
3106 gcc_assert (default_label != NULL);
3108 /* Don't generate a switch if there's only a default case.
3109 This is common in the form of try { A; } catch (...) { B; }. */
3112 e = single_succ_edge (src);
3113 e->flags |= EDGE_FALLTHRU;
3117 fn = implicit_built_in_decls[BUILT_IN_EH_FILTER];
3118 x = gimple_build_call (fn, 1, build_int_cst (NULL, region_nr));
3119 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3120 filter = make_ssa_name (filter, x);
3121 gimple_call_set_lhs (x, filter);
3122 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3124 /* Turn the default label into a default case. */
3125 default_label = build3 (CASE_LABEL_EXPR, void_type_node,
3126 NULL, NULL, default_label);
3127 sort_case_labels (labels);
3129 x = gimple_build_switch_vec (filter, default_label, labels);
3130 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3132 VEC_free (tree, heap, labels);
3137 case ERT_ALLOWED_EXCEPTIONS:
3139 edge b_e = BRANCH_EDGE (src);
3140 edge f_e = FALLTHRU_EDGE (src);
3142 fn = implicit_built_in_decls[BUILT_IN_EH_FILTER];
3143 x = gimple_build_call (fn, 1, build_int_cst (NULL, region_nr));
3144 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3145 filter = make_ssa_name (filter, x);
3146 gimple_call_set_lhs (x, filter);
3147 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3149 r->u.allowed.label = NULL;
3150 x = gimple_build_cond (EQ_EXPR, filter,
3151 build_int_cst (TREE_TYPE (filter),
3152 r->u.allowed.filter),
3153 NULL_TREE, NULL_TREE);
3154 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3156 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3157 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3165 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3166 gsi_remove (&gsi, true);
3170 execute_lower_eh_dispatch (void)
3173 bool any_rewritten = false;
3175 assign_filter_values ();
3179 gimple last = last_stmt (bb);
3180 if (last && gimple_code (last) == GIMPLE_EH_DISPATCH)
3182 lower_eh_dispatch (bb, last);
3183 any_rewritten = true;
3187 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3191 gate_lower_eh_dispatch (void)
3193 return cfun->eh->region_tree != NULL;
3196 struct gimple_opt_pass pass_lower_eh_dispatch =
3200 "ehdisp", /* name */
3201 gate_lower_eh_dispatch, /* gate */
3202 execute_lower_eh_dispatch, /* execute */
3205 0, /* static_pass_number */
3206 TV_TREE_EH, /* tv_id */
3207 PROP_gimple_lcf, /* properties_required */
3208 0, /* properties_provided */
3209 0, /* properties_destroyed */
3210 0, /* todo_flags_start */
3211 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
3215 /* Walk statements, see what regions are really referenced and remove
3216 those that are unused. */
3219 remove_unreachable_handlers (void)
3221 sbitmap r_reachable, lp_reachable;
3227 r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
3229 = sbitmap_alloc (VEC_length (eh_landing_pad, cfun->eh->lp_array));
3230 sbitmap_zero (r_reachable);
3231 sbitmap_zero (lp_reachable);
3235 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3237 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3239 gimple stmt = gsi_stmt (gsi);
3240 lp_nr = lookup_stmt_eh_lp (stmt);
3242 /* Negative LP numbers are MUST_NOT_THROW regions which
3243 are not considered BB enders. */
3245 SET_BIT (r_reachable, -lp_nr);
3247 /* Positive LP numbers are real landing pads, are are BB enders. */
3250 gcc_assert (gsi_one_before_end_p (gsi));
3251 region = get_eh_region_from_lp_number (lp_nr);
3252 SET_BIT (r_reachable, region->index);
3253 SET_BIT (lp_reachable, lp_nr);
3260 fprintf (dump_file, "Before removal of unreachable regions:\n");
3261 dump_eh_tree (dump_file, cfun);
3262 fprintf (dump_file, "Reachable regions: ");
3263 dump_sbitmap_file (dump_file, r_reachable);
3264 fprintf (dump_file, "Reachable landing pads: ");
3265 dump_sbitmap_file (dump_file, lp_reachable);
3269 VEC_iterate (eh_region, cfun->eh->region_array, r_nr, region); ++r_nr)
3270 if (region && !TEST_BIT (r_reachable, r_nr))
3273 fprintf (dump_file, "Removing unreachable region %d\n", r_nr);
3274 remove_eh_handler (region);
3278 VEC_iterate (eh_landing_pad, cfun->eh->lp_array, lp_nr, lp); ++lp_nr)
3279 if (lp && !TEST_BIT (lp_reachable, lp_nr))
3282 fprintf (dump_file, "Removing unreachable landing pad %d\n", lp_nr);
3283 remove_eh_landing_pad (lp);
3288 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3289 dump_eh_tree (dump_file, cfun);
3290 fprintf (dump_file, "\n\n");
3293 sbitmap_free (r_reachable);
3294 sbitmap_free (lp_reachable);
3296 #ifdef ENABLE_CHECKING
3297 verify_eh_tree (cfun);
3301 /* Remove regions that do not have landing pads. This assumes
3302 that remove_unreachable_handlers has already been run, and
3303 that we've just manipulated the landing pads since then. */
3306 remove_unreachable_handlers_no_lp (void)
3311 for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
3312 if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW)
3315 fprintf (dump_file, "Removing unreachable region %d\n", i);
3316 remove_eh_handler (r);
3320 /* Undo critical edge splitting on an EH landing pad. Earlier, we
3321 optimisticaly split all sorts of edges, including EH edges. The
3322 optimization passes in between may not have needed them; if not,
3323 we should undo the split.
3325 Recognize this case by having one EH edge incoming to the BB and
3326 one normal edge outgoing; BB should be empty apart from the
3327 post_landing_pad label.
3329 Note that this is slightly different from the empty handler case
3330 handled by cleanup_empty_eh, in that the actual handler may yet
3331 have actual code but the landing pad has been separated from the
3332 handler. As such, cleanup_empty_eh relies on this transformation
3333 having been done first. */
3336 unsplit_eh (eh_landing_pad lp)
3338 basic_block bb = label_to_block (lp->post_landing_pad);
3339 gimple_stmt_iterator gsi;
3342 /* Quickly check the edge counts on BB for singularity. */
3343 if (EDGE_COUNT (bb->preds) != 1 || EDGE_COUNT (bb->succs) != 1)
3345 e_in = EDGE_PRED (bb, 0);
3346 e_out = EDGE_SUCC (bb, 0);
3348 /* Input edge must be EH and output edge must be normal. */
3349 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
3352 /* The block must be empty except for the labels. */
3353 if (!gsi_end_p (gsi_after_labels (bb)))
3356 /* The destination block must not already have a landing pad
3357 for a different region. */
3358 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3360 gimple stmt = gsi_stmt (gsi);
3364 if (gimple_code (stmt) != GIMPLE_LABEL)
3366 lab = gimple_label_label (stmt);
3367 lp_nr = EH_LANDING_PAD_NR (lab);
3368 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3372 /* The new destination block must not already be a destination of
3373 the source block, lest we merge fallthru and eh edges and get
3374 all sorts of confused. */
3375 if (find_edge (e_in->src, e_out->dest))
3378 /* ??? I can't imagine there would be PHI nodes, since by nature
3379 of critical edge splitting this block should never have been
3380 a dominance frontier. If cfg cleanups somehow confuse this,
3381 due to single edges in and out we ought to have degenerate PHIs
3382 and can easily propagate the PHI arguments. */
3383 gcc_assert (gimple_seq_empty_p (phi_nodes (bb)));
3385 if (dump_file && (dump_flags & TDF_DETAILS))
3386 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
3387 lp->index, e_out->dest->index);
3389 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
3390 a successor edge, humor it. But do the real CFG change with the
3391 predecessor of E_OUT in order to preserve the ordering of arguments
3392 to the PHI nodes in E_OUT->DEST. */
3393 redirect_eh_edge_1 (e_in, e_out->dest, false);
3394 redirect_edge_pred (e_out, e_in->src);
3395 e_out->flags = e_in->flags;
3396 e_out->probability = e_in->probability;
3397 e_out->count = e_in->count;
3403 /* Examine each landing pad block and see if it matches unsplit_eh. */
3406 unsplit_all_eh (void)
3408 bool changed = false;
3412 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3414 changed |= unsplit_eh (lp);
3419 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
3420 to OLD_BB to NEW_BB; return true on success, false on failure.
3422 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
3423 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
3424 Virtual PHIs may be deleted and marked for renaming. */
3427 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
3430 gimple_stmt_iterator ngsi, ogsi;
3433 bitmap rename_virts;
3434 bitmap ophi_handled;
3436 FOR_EACH_EDGE (e, ei, old_bb->preds)
3437 redirect_edge_var_map_clear (e);
3439 ophi_handled = BITMAP_ALLOC (NULL);
3440 rename_virts = BITMAP_ALLOC (NULL);
3442 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
3443 for the edges we're going to move. */
3444 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
3446 gimple ophi, nphi = gsi_stmt (ngsi);
3449 nresult = gimple_phi_result (nphi);
3450 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
3452 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
3453 the source ssa_name. */
3455 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3457 ophi = gsi_stmt (ogsi);
3458 if (gimple_phi_result (ophi) == nop)
3463 /* If we did find the corresponding PHI, copy those inputs. */
3466 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
3467 FOR_EACH_EDGE (e, ei, old_bb->preds)
3472 if ((e->flags & EDGE_EH) == 0)
3474 oop = gimple_phi_arg_def (ophi, e->dest_idx);
3475 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
3476 redirect_edge_var_map_add (e, nresult, oop, oloc);
3479 /* If we didn't find the PHI, but it's a VOP, remember to rename
3480 it later, assuming all other tests succeed. */
3481 else if (!is_gimple_reg (nresult))
3482 bitmap_set_bit (rename_virts, SSA_NAME_VERSION (nresult));
3483 /* If we didn't find the PHI, and it's a real variable, we know
3484 from the fact that OLD_BB is tree_empty_eh_handler_p that the
3485 variable is unchanged from input to the block and we can simply
3486 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
3490 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
3491 FOR_EACH_EDGE (e, ei, old_bb->preds)
3492 redirect_edge_var_map_add (e, nresult, nop, nloc);
3496 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
3497 we don't know what values from the other edges into NEW_BB to use. */
3498 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3500 gimple ophi = gsi_stmt (ogsi);
3501 tree oresult = gimple_phi_result (ophi);
3502 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
3506 /* At this point we know that the merge will succeed. Remove the PHI
3507 nodes for the virtuals that we want to rename. */
3508 if (!bitmap_empty_p (rename_virts))
3510 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); )
3512 gimple nphi = gsi_stmt (ngsi);
3513 tree nresult = gimple_phi_result (nphi);
3514 if (bitmap_bit_p (rename_virts, SSA_NAME_VERSION (nresult)))
3516 mark_virtual_phi_result_for_renaming (nphi);
3517 remove_phi_node (&ngsi, true);
3524 /* Finally, move the edges and update the PHIs. */
3525 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
3526 if (e->flags & EDGE_EH)
3528 redirect_eh_edge_1 (e, new_bb, true);
3529 redirect_edge_succ (e, new_bb);
3530 flush_pending_stmts (e);
3535 BITMAP_FREE (ophi_handled);
3536 BITMAP_FREE (rename_virts);
3540 FOR_EACH_EDGE (e, ei, old_bb->preds)
3541 redirect_edge_var_map_clear (e);
3542 BITMAP_FREE (ophi_handled);
3543 BITMAP_FREE (rename_virts);
3547 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
3548 old region to NEW_REGION at BB. */
3551 cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
3552 eh_landing_pad lp, eh_region new_region)
3554 gimple_stmt_iterator gsi;
3557 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
3561 lp->region = new_region;
3562 lp->next_lp = new_region->landing_pads;
3563 new_region->landing_pads = lp;
3565 /* Delete the RESX that was matched within the empty handler block. */
3566 gsi = gsi_last_bb (bb);
3567 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
3568 gsi_remove (&gsi, true);
3570 /* Clean up E_OUT for the fallthru. */
3571 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3572 e_out->probability = REG_BR_PROB_BASE;
3575 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
3576 unsplitting than unsplit_eh was prepared to handle, e.g. when
3577 multiple incoming edges and phis are involved. */
3580 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad olp)
3582 gimple_stmt_iterator gsi;
3586 /* We really ought not have totally lost everything following
3587 a landing pad label. Given that BB is empty, there had better
3589 gcc_assert (e_out != NULL);
3591 /* Look for an EH label in the successor block. */
3593 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3595 gimple stmt = gsi_stmt (gsi);
3596 if (gimple_code (stmt) != GIMPLE_LABEL)
3598 lab = gimple_label_label (stmt);
3599 if (EH_LANDING_PAD_NR (lab))
3605 /* The other label had better be part of the same EH region. Given that
3606 we've not lowered RESX, there should be no way to have a totally empty
3607 landing pad that crosses to another EH region. */
3608 nlp = get_eh_landing_pad_from_number (EH_LANDING_PAD_NR (lab));
3609 gcc_assert (nlp->region == olp->region);
3611 /* Attempt to move the PHIs into the successor block. */
3612 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out))
3614 if (dump_file && (dump_flags & TDF_DETAILS))
3616 "Unsplit EH landing pad %d to block %d via lp %d.\n",
3617 olp->index, e_out->dest->index, nlp->index);
3619 remove_eh_landing_pad (olp);
3626 /* Examine the block associated with LP to determine if it's an empty
3627 handler for its EH region. If so, attempt to redirect EH edges to
3628 an outer region. Return true the CFG was updated in any way. This
3629 is similar to jump forwarding, just across EH edges. */
3632 cleanup_empty_eh (eh_landing_pad lp)
3634 basic_block bb = label_to_block (lp->post_landing_pad);
3635 gimple_stmt_iterator gsi;
3637 eh_region new_region;
3640 bool has_non_eh_pred;
3643 /* There can be zero or one edges out of BB. This is the quickest test. */
3644 switch (EDGE_COUNT (bb->succs))
3650 e_out = EDGE_SUCC (bb, 0);
3655 gsi = gsi_after_labels (bb);
3657 /* Make sure to skip debug statements. */
3658 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3659 gsi_next_nondebug (&gsi);
3661 /* If the block is totally empty, look for more unsplitting cases. */
3662 if (gsi_end_p (gsi))
3663 return cleanup_empty_eh_unsplit (bb, e_out, lp);
3665 /* The block should consist only of a single RESX statement. */
3666 resx = gsi_stmt (gsi);
3667 if (!is_gimple_resx (resx))
3669 gcc_assert (gsi_one_before_end_p (gsi));
3671 /* Determine if there are non-EH edges, or resx edges into the handler. */
3672 has_non_eh_pred = false;
3673 FOR_EACH_EDGE (e, ei, bb->preds)
3674 if (!(e->flags & EDGE_EH))
3675 has_non_eh_pred = true;
3677 /* Find the handler that's outer of the empty handler by looking at
3678 where the RESX instruction was vectored. */
3679 new_lp_nr = lookup_stmt_eh_lp (resx);
3680 new_region = get_eh_region_from_lp_number (new_lp_nr);
3682 /* If there's no destination region within the current function,
3683 redirection is trivial via removing the throwing statements from
3684 the EH region, removing the EH edges, and allowing the block
3685 to go unreachable. */
3686 if (new_region == NULL)
3688 gcc_assert (e_out == NULL);
3689 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3690 if (e->flags & EDGE_EH)
3692 gimple stmt = last_stmt (e->src);
3693 remove_stmt_from_eh_lp (stmt);
3701 /* If the destination region is a MUST_NOT_THROW, allow the runtime
3702 to handle the abort and allow the blocks to go unreachable. */
3703 if (new_region->type == ERT_MUST_NOT_THROW)
3705 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3706 if (e->flags & EDGE_EH)
3708 gimple stmt = last_stmt (e->src);
3709 remove_stmt_from_eh_lp (stmt);
3710 add_stmt_to_eh_lp (stmt, new_lp_nr);
3718 /* Try to redirect the EH edges and merge the PHIs into the destination
3719 landing pad block. If the merge succeeds, we'll already have redirected
3720 all the EH edges. The handler itself will go unreachable if there were
3722 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out))
3725 /* Finally, if all input edges are EH edges, then we can (potentially)
3726 reduce the number of transfers from the runtime by moving the landing
3727 pad from the original region to the new region. This is a win when
3728 we remove the last CLEANUP region along a particular exception
3729 propagation path. Since nothing changes except for the region with
3730 which the landing pad is associated, the PHI nodes do not need to be
3732 if (!has_non_eh_pred)
3734 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
3735 if (dump_file && (dump_flags & TDF_DETAILS))
3736 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
3737 lp->index, new_region->index);
3739 /* ??? The CFG didn't change, but we may have rendered the
3740 old EH region unreachable. Trigger a cleanup there. */
3747 if (dump_file && (dump_flags & TDF_DETAILS))
3748 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
3749 remove_eh_landing_pad (lp);
3753 /* Do a post-order traversal of the EH region tree. Examine each
3754 post_landing_pad block and see if we can eliminate it as empty. */
3757 cleanup_all_empty_eh (void)
3759 bool changed = false;
3763 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3765 changed |= cleanup_empty_eh (lp);
3770 /* Perform cleanups and lowering of exception handling
3771 1) cleanups regions with handlers doing nothing are optimized out
3772 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
3773 3) Info about regions that are containing instructions, and regions
3774 reachable via local EH edges is collected
3775 4) Eh tree is pruned for regions no longer neccesary.
3777 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
3778 Unify those that have the same failure decl and locus.
3782 execute_cleanup_eh (void)
3784 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
3785 looking up unreachable landing pads. */
3786 remove_unreachable_handlers ();
3788 /* Watch out for the region tree vanishing due to all unreachable. */
3789 if (cfun->eh->region_tree && optimize)
3791 bool changed = false;
3793 changed |= unsplit_all_eh ();
3794 changed |= cleanup_all_empty_eh ();
3798 free_dominance_info (CDI_DOMINATORS);
3799 free_dominance_info (CDI_POST_DOMINATORS);
3801 /* We delayed all basic block deletion, as we may have performed
3802 cleanups on EH edges while non-EH edges were still present. */
3803 delete_unreachable_blocks ();
3805 /* We manipulated the landing pads. Remove any region that no
3806 longer has a landing pad. */
3807 remove_unreachable_handlers_no_lp ();
3809 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
3817 gate_cleanup_eh (void)
3819 return cfun->eh != NULL && cfun->eh->region_tree != NULL;
3822 struct gimple_opt_pass pass_cleanup_eh = {
3825 "ehcleanup", /* name */
3826 gate_cleanup_eh, /* gate */
3827 execute_cleanup_eh, /* execute */
3830 0, /* static_pass_number */
3831 TV_TREE_EH, /* tv_id */
3832 PROP_gimple_lcf, /* properties_required */
3833 0, /* properties_provided */
3834 0, /* properties_destroyed */
3835 0, /* todo_flags_start */
3836 TODO_dump_func /* todo_flags_finish */
3840 /* Verify that BB containing STMT as the last statement, has precisely the
3841 edge that make_eh_edges would create. */
3844 verify_eh_edges (gimple stmt)
3846 basic_block bb = gimple_bb (stmt);
3847 eh_landing_pad lp = NULL;
3852 lp_nr = lookup_stmt_eh_lp (stmt);
3854 lp = get_eh_landing_pad_from_number (lp_nr);
3857 FOR_EACH_EDGE (e, ei, bb->succs)
3859 if (e->flags & EDGE_EH)
3863 error ("BB %i has multiple EH edges", bb->index);
3875 error ("BB %i can not throw but has an EH edge", bb->index);
3881 if (!stmt_could_throw_p (stmt))
3883 error ("BB %i last statement has incorrectly set lp", bb->index);
3887 if (eh_edge == NULL)
3889 error ("BB %i is missing an EH edge", bb->index);
3893 if (eh_edge->dest != label_to_block (lp->post_landing_pad))
3895 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
3902 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
3905 verify_eh_dispatch_edge (gimple stmt)
3909 basic_block src, dst;
3910 bool want_fallthru = true;
3914 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
3915 src = gimple_bb (stmt);
3917 FOR_EACH_EDGE (e, ei, src->succs)
3918 gcc_assert (e->aux == NULL);
3923 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3925 dst = label_to_block (c->label);
3926 e = find_edge (src, dst);
3929 error ("BB %i is missing an edge", src->index);
3934 /* A catch-all handler doesn't have a fallthru. */
3935 if (c->type_list == NULL)
3937 want_fallthru = false;
3943 case ERT_ALLOWED_EXCEPTIONS:
3944 dst = label_to_block (r->u.allowed.label);
3945 e = find_edge (src, dst);
3948 error ("BB %i is missing an edge", src->index);
3959 FOR_EACH_EDGE (e, ei, src->succs)
3961 if (e->flags & EDGE_FALLTHRU)
3963 if (fall_edge != NULL)
3965 error ("BB %i too many fallthru edges", src->index);
3974 error ("BB %i has incorrect edge", src->index);
3978 if ((fall_edge != NULL) ^ want_fallthru)
3980 error ("BB %i has incorrect fallthru edge", src->index);