1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "pointer-set.h"
30 #include "tree-flow.h"
31 #include "tree-dump.h"
32 #include "tree-inline.h"
33 #include "tree-iterator.h"
34 #include "tree-pass.h"
36 #include "langhooks.h"
38 #include "diagnostic-core.h"
42 /* In some instances a tree and a gimple need to be stored in a same table,
43 i.e. in hash tables. This is a structure to do this. */
44 typedef union {tree *tp; tree t; gimple g;} treemple;
46 /* Nonzero if we are using EH to handle cleanups. */
47 static int using_eh_for_cleanups_p = 0;
50 using_eh_for_cleanups (void)
52 using_eh_for_cleanups_p = 1;
55 /* Misc functions used in this file. */
57 /* Compare and hash for any structure which begins with a canonical
58 pointer. Assumes all pointers are interchangeable, which is sort
59 of already assumed by gcc elsewhere IIRC. */
62 struct_ptr_eq (const void *a, const void *b)
64 const void * const * x = (const void * const *) a;
65 const void * const * y = (const void * const *) b;
70 struct_ptr_hash (const void *a)
72 const void * const * x = (const void * const *) a;
73 return (size_t)*x >> 4;
77 /* Remember and lookup EH landing pad data for arbitrary statements.
78 Really this means any statement that could_throw_p. We could
79 stuff this information into the stmt_ann data structure, but:
81 (1) We absolutely rely on this information being kept until
82 we get to rtl. Once we're done with lowering here, if we lose
83 the information there's no way to recover it!
85 (2) There are many more statements that *cannot* throw as
86 compared to those that can. We should be saving some amount
87 of space by only allocating memory for those that can throw. */
89 /* Add statement T in function IFUN to landing pad NUM. */
92 add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
94 struct throw_stmt_node *n;
97 gcc_assert (num != 0);
99 n = ggc_alloc_throw_stmt_node ();
103 if (!get_eh_throw_stmt_table (ifun))
104 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
108 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
113 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
116 add_stmt_to_eh_lp (gimple t, int num)
118 add_stmt_to_eh_lp_fn (cfun, t, num);
121 /* Add statement T to the single EH landing pad in REGION. */
124 record_stmt_eh_region (eh_region region, gimple t)
128 if (region->type == ERT_MUST_NOT_THROW)
129 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
132 eh_landing_pad lp = region->landing_pads;
134 lp = gen_eh_landing_pad (region);
136 gcc_assert (lp->next_lp == NULL);
137 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
142 /* Remove statement T in function IFUN from its EH landing pad. */
145 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
147 struct throw_stmt_node dummy;
150 if (!get_eh_throw_stmt_table (ifun))
154 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
158 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
166 /* Remove statement T in the current function (cfun) from its
170 remove_stmt_from_eh_lp (gimple t)
172 return remove_stmt_from_eh_lp_fn (cfun, t);
175 /* Determine if statement T is inside an EH region in function IFUN.
176 Positive numbers indicate a landing pad index; negative numbers
177 indicate a MUST_NOT_THROW region index; zero indicates that the
178 statement is not recorded in the region table. */
181 lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
183 struct throw_stmt_node *p, n;
185 if (ifun->eh->throw_stmt_table == NULL)
189 p = (struct throw_stmt_node *) htab_find (ifun->eh->throw_stmt_table, &n);
190 return p ? p->lp_nr : 0;
193 /* Likewise, but always use the current function. */
196 lookup_stmt_eh_lp (gimple t)
198 /* We can get called from initialized data when -fnon-call-exceptions
199 is on; prevent crash. */
202 return lookup_stmt_eh_lp_fn (cfun, t);
205 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
206 nodes and LABEL_DECL nodes. We will use this during the second phase to
207 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
209 struct finally_tree_node
211 /* When storing a GIMPLE_TRY, we have to record a gimple. However
212 when deciding whether a GOTO to a certain LABEL_DECL (which is a
213 tree) leaves the TRY block, its necessary to record a tree in
214 this field. Thus a treemple is used. */
219 /* Note that this table is *not* marked GTY. It is short-lived. */
220 static htab_t finally_tree;
223 record_in_finally_tree (treemple child, gimple parent)
225 struct finally_tree_node *n;
228 n = XNEW (struct finally_tree_node);
232 slot = htab_find_slot (finally_tree, n, INSERT);
238 collect_finally_tree (gimple stmt, gimple region);
240 /* Go through the gimple sequence. Works with collect_finally_tree to
241 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
244 collect_finally_tree_1 (gimple_seq seq, gimple region)
246 gimple_stmt_iterator gsi;
248 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
249 collect_finally_tree (gsi_stmt (gsi), region);
253 collect_finally_tree (gimple stmt, gimple region)
257 switch (gimple_code (stmt))
260 temp.t = gimple_label_label (stmt);
261 record_in_finally_tree (temp, region);
265 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
268 record_in_finally_tree (temp, region);
269 collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
270 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
272 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
274 collect_finally_tree_1 (gimple_try_eval (stmt), region);
275 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
280 collect_finally_tree_1 (gimple_catch_handler (stmt), region);
283 case GIMPLE_EH_FILTER:
284 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
288 /* A type, a decl, or some kind of statement that we're not
289 interested in. Don't walk them. */
295 /* Use the finally tree to determine if a jump from START to TARGET
296 would leave the try_finally node that START lives in. */
299 outside_finally_tree (treemple start, gimple target)
301 struct finally_tree_node n, *p;
306 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
311 while (start.g != target);
316 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
317 nodes into a set of gotos, magic labels, and eh regions.
318 The eh region creation is straight-forward, but frobbing all the gotos
319 and such into shape isn't. */
321 /* The sequence into which we record all EH stuff. This will be
322 placed at the end of the function when we're all done. */
323 static gimple_seq eh_seq;
325 /* Record whether an EH region contains something that can throw,
326 indexed by EH region number. */
327 static bitmap eh_region_may_contain_throw_map;
329 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
330 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
331 The idea is to record a gimple statement for everything except for
332 the conditionals, which get their labels recorded. Since labels are
333 of type 'tree', we need this node to store both gimple and tree
334 objects. REPL_STMT is the sequence used to replace the goto/return
335 statement. CONT_STMT is used to store the statement that allows
336 the return/goto to jump to the original destination. */
338 struct goto_queue_node
341 gimple_seq repl_stmt;
344 /* This is used when index >= 0 to indicate that stmt is a label (as
345 opposed to a goto stmt). */
349 /* State of the world while lowering. */
353 /* What's "current" while constructing the eh region tree. These
354 correspond to variables of the same name in cfun->eh, which we
355 don't have easy access to. */
356 eh_region cur_region;
358 /* What's "current" for the purposes of __builtin_eh_pointer. For
359 a CATCH, this is the associated TRY. For an EH_FILTER, this is
360 the associated ALLOWED_EXCEPTIONS, etc. */
361 eh_region ehp_region;
363 /* Processing of TRY_FINALLY requires a bit more state. This is
364 split out into a separate structure so that we don't have to
365 copy so much when processing other nodes. */
366 struct leh_tf_state *tf;
371 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
372 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
373 this so that outside_finally_tree can reliably reference the tree used
374 in the collect_finally_tree data structures. */
375 gimple try_finally_expr;
378 /* While lowering a top_p usually it is expanded into multiple statements,
379 thus we need the following field to store them. */
380 gimple_seq top_p_seq;
382 /* The state outside this try_finally node. */
383 struct leh_state *outer;
385 /* The exception region created for it. */
388 /* The goto queue. */
389 struct goto_queue_node *goto_queue;
390 size_t goto_queue_size;
391 size_t goto_queue_active;
393 /* Pointer map to help in searching goto_queue when it is large. */
394 struct pointer_map_t *goto_queue_map;
396 /* The set of unique labels seen as entries in the goto queue. */
397 VEC(tree,heap) *dest_array;
399 /* A label to be added at the end of the completed transformed
400 sequence. It will be set if may_fallthru was true *at one time*,
401 though subsequent transformations may have cleared that flag. */
404 /* True if it is possible to fall out the bottom of the try block.
405 Cleared if the fallthru is converted to a goto. */
408 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
411 /* True if the finally block can receive an exception edge.
412 Cleared if the exception case is handled by code duplication. */
416 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple);
418 /* Search for STMT in the goto queue. Return the replacement,
419 or null if the statement isn't in the queue. */
421 #define LARGE_GOTO_QUEUE 20
423 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq);
426 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
431 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
433 for (i = 0; i < tf->goto_queue_active; i++)
434 if ( tf->goto_queue[i].stmt.g == stmt.g)
435 return tf->goto_queue[i].repl_stmt;
439 /* If we have a large number of entries in the goto_queue, create a
440 pointer map and use that for searching. */
442 if (!tf->goto_queue_map)
444 tf->goto_queue_map = pointer_map_create ();
445 for (i = 0; i < tf->goto_queue_active; i++)
447 slot = pointer_map_insert (tf->goto_queue_map,
448 tf->goto_queue[i].stmt.g);
449 gcc_assert (*slot == NULL);
450 *slot = &tf->goto_queue[i];
454 slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
456 return (((struct goto_queue_node *) *slot)->repl_stmt);
461 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
462 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
463 then we can just splat it in, otherwise we add the new stmts immediately
464 after the GIMPLE_COND and redirect. */
467 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
468 gimple_stmt_iterator *gsi)
473 location_t loc = gimple_location (gsi_stmt (*gsi));
476 new_seq = find_goto_replacement (tf, temp);
480 if (gimple_seq_singleton_p (new_seq)
481 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
483 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
487 label = create_artificial_label (loc);
488 /* Set the new label for the GIMPLE_COND */
491 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
492 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
495 /* The real work of replace_goto_queue. Returns with TSI updated to
496 point to the next statement. */
498 static void replace_goto_queue_stmt_list (gimple_seq, struct leh_tf_state *);
501 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
502 gimple_stmt_iterator *gsi)
508 switch (gimple_code (stmt))
513 seq = find_goto_replacement (tf, temp);
516 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
517 gsi_remove (gsi, false);
523 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
524 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
528 replace_goto_queue_stmt_list (gimple_try_eval (stmt), tf);
529 replace_goto_queue_stmt_list (gimple_try_cleanup (stmt), tf);
532 replace_goto_queue_stmt_list (gimple_catch_handler (stmt), tf);
534 case GIMPLE_EH_FILTER:
535 replace_goto_queue_stmt_list (gimple_eh_filter_failure (stmt), tf);
539 /* These won't have gotos in them. */
546 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
549 replace_goto_queue_stmt_list (gimple_seq seq, struct leh_tf_state *tf)
551 gimple_stmt_iterator gsi = gsi_start (seq);
553 while (!gsi_end_p (gsi))
554 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
557 /* Replace all goto queue members. */
560 replace_goto_queue (struct leh_tf_state *tf)
562 if (tf->goto_queue_active == 0)
564 replace_goto_queue_stmt_list (tf->top_p_seq, tf);
565 replace_goto_queue_stmt_list (eh_seq, tf);
568 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
569 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
573 record_in_goto_queue (struct leh_tf_state *tf,
579 struct goto_queue_node *q;
581 gcc_assert (!tf->goto_queue_map);
583 active = tf->goto_queue_active;
584 size = tf->goto_queue_size;
587 size = (size ? size * 2 : 32);
588 tf->goto_queue_size = size;
590 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
593 q = &tf->goto_queue[active];
594 tf->goto_queue_active = active + 1;
596 memset (q, 0, sizeof (*q));
599 q->is_label = is_label;
602 /* Record the LABEL label in the goto queue contained in TF.
606 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label)
609 treemple temp, new_stmt;
614 /* Computed and non-local gotos do not get processed. Given
615 their nature we can neither tell whether we've escaped the
616 finally block nor redirect them if we knew. */
617 if (TREE_CODE (label) != LABEL_DECL)
620 /* No need to record gotos that don't leave the try block. */
622 if (!outside_finally_tree (temp, tf->try_finally_expr))
625 if (! tf->dest_array)
627 tf->dest_array = VEC_alloc (tree, heap, 10);
628 VEC_quick_push (tree, tf->dest_array, label);
633 int n = VEC_length (tree, tf->dest_array);
634 for (index = 0; index < n; ++index)
635 if (VEC_index (tree, tf->dest_array, index) == label)
638 VEC_safe_push (tree, heap, tf->dest_array, label);
641 /* In the case of a GOTO we want to record the destination label,
642 since with a GIMPLE_COND we have an easy access to the then/else
645 record_in_goto_queue (tf, new_stmt, index, true);
648 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
649 node, and if so record that fact in the goto queue associated with that
653 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
655 struct leh_tf_state *tf = state->tf;
661 switch (gimple_code (stmt))
664 new_stmt.tp = gimple_op_ptr (stmt, 2);
665 record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt));
666 new_stmt.tp = gimple_op_ptr (stmt, 3);
667 record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt));
671 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt));
675 tf->may_return = true;
677 record_in_goto_queue (tf, new_stmt, -1, false);
686 #ifdef ENABLE_CHECKING
687 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
688 was in fact structured, and we've not yet done jump threading, then none
689 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
692 verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
694 struct leh_tf_state *tf = state->tf;
700 n = gimple_switch_num_labels (switch_expr);
702 for (i = 0; i < n; ++i)
705 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
707 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
711 #define verify_norecord_switch_expr(state, switch_expr)
714 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
715 whatever is needed to finish the return. If MOD is non-null, insert it
716 before the new branch. RETURN_VALUE_P is a cache containing a temporary
717 variable to be used in manipulating the value returned from the function. */
720 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
721 tree *return_value_p)
726 /* In the case of a return, the queue node must be a gimple statement. */
727 gcc_assert (!q->is_label);
729 ret_expr = gimple_return_retval (q->stmt.g);
733 if (!*return_value_p)
734 *return_value_p = ret_expr;
736 gcc_assert (*return_value_p == ret_expr);
737 q->cont_stmt = q->stmt.g;
738 /* The nasty part about redirecting the return value is that the
739 return value itself is to be computed before the FINALLY block
753 should return 0, not 1. Arrange for this to happen by copying
754 computed the return value into a local temporary. This also
755 allows us to redirect multiple return statements through the
756 same destination block; whether this is a net win or not really
757 depends, I guess, but it does make generation of the switch in
758 lower_try_finally_switch easier. */
760 if (TREE_CODE (ret_expr) == RESULT_DECL)
762 if (!*return_value_p)
763 *return_value_p = ret_expr;
765 gcc_assert (*return_value_p == ret_expr);
766 q->cont_stmt = q->stmt.g;
772 /* If we don't return a value, all return statements are the same. */
773 q->cont_stmt = q->stmt.g;
776 q->repl_stmt = gimple_seq_alloc ();
779 gimple_seq_add_seq (&q->repl_stmt, mod);
781 x = gimple_build_goto (finlab);
782 gimple_seq_add_stmt (&q->repl_stmt, x);
785 /* Similar, but easier, for GIMPLE_GOTO. */
788 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
789 struct leh_tf_state *tf)
793 gcc_assert (q->is_label);
795 q->repl_stmt = gimple_seq_alloc ();
797 q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array, q->index));
800 gimple_seq_add_seq (&q->repl_stmt, mod);
802 x = gimple_build_goto (finlab);
803 gimple_seq_add_stmt (&q->repl_stmt, x);
806 /* Emit a standard landing pad sequence into SEQ for REGION. */
809 emit_post_landing_pad (gimple_seq *seq, eh_region region)
811 eh_landing_pad lp = region->landing_pads;
815 lp = gen_eh_landing_pad (region);
817 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
818 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
820 x = gimple_build_label (lp->post_landing_pad);
821 gimple_seq_add_stmt (seq, x);
824 /* Emit a RESX statement into SEQ for REGION. */
827 emit_resx (gimple_seq *seq, eh_region region)
829 gimple x = gimple_build_resx (region->index);
830 gimple_seq_add_stmt (seq, x);
832 record_stmt_eh_region (region->outer, x);
835 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
838 emit_eh_dispatch (gimple_seq *seq, eh_region region)
840 gimple x = gimple_build_eh_dispatch (region->index);
841 gimple_seq_add_stmt (seq, x);
844 /* Note that the current EH region may contain a throw, or a
845 call to a function which itself may contain a throw. */
848 note_eh_region_may_contain_throw (eh_region region)
850 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
852 region = region->outer;
858 /* Check if REGION has been marked as containing a throw. If REGION is
859 NULL, this predicate is false. */
862 eh_region_may_contain_throw (eh_region r)
864 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
867 /* We want to transform
868 try { body; } catch { stuff; }
878 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
879 should be placed before the second operand, or NULL. OVER is
880 an existing label that should be put at the exit, or NULL. */
883 frob_into_branch_around (gimple tp, eh_region region, tree over)
886 gimple_seq cleanup, result;
887 location_t loc = gimple_location (tp);
889 cleanup = gimple_try_cleanup (tp);
890 result = gimple_try_eval (tp);
893 emit_post_landing_pad (&eh_seq, region);
895 if (gimple_seq_may_fallthru (cleanup))
898 over = create_artificial_label (loc);
899 x = gimple_build_goto (over);
900 gimple_seq_add_stmt (&cleanup, x);
902 gimple_seq_add_seq (&eh_seq, cleanup);
906 x = gimple_build_label (over);
907 gimple_seq_add_stmt (&result, x);
912 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
913 Make sure to record all new labels found. */
916 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state)
918 gimple region = NULL;
921 new_seq = copy_gimple_seq_and_replace_locals (seq);
924 region = outer_state->tf->try_finally_expr;
925 collect_finally_tree_1 (new_seq, region);
930 /* A subroutine of lower_try_finally. Create a fallthru label for
931 the given try_finally state. The only tricky bit here is that
932 we have to make sure to record the label in our outer context. */
935 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
937 tree label = tf->fallthru_label;
942 label = create_artificial_label (gimple_location (tf->try_finally_expr));
943 tf->fallthru_label = label;
947 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
953 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
954 langhook returns non-null, then the language requires that the exception
955 path out of a try_finally be treated specially. To wit: the code within
956 the finally block may not itself throw an exception. We have two choices
957 here. First we can duplicate the finally block and wrap it in a
958 must_not_throw region. Second, we can generate code like
963 if (fintmp == eh_edge)
964 protect_cleanup_actions;
967 where "fintmp" is the temporary used in the switch statement generation
968 alternative considered below. For the nonce, we always choose the first
971 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
974 honor_protect_cleanup_actions (struct leh_state *outer_state,
975 struct leh_state *this_state,
976 struct leh_tf_state *tf)
978 tree protect_cleanup_actions;
979 gimple_stmt_iterator gsi;
980 bool finally_may_fallthru;
984 /* First check for nothing to do. */
985 if (lang_hooks.eh_protect_cleanup_actions == NULL)
987 protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions ();
988 if (protect_cleanup_actions == NULL)
991 finally = gimple_try_cleanup (tf->top_p);
992 finally_may_fallthru = gimple_seq_may_fallthru (finally);
994 /* Duplicate the FINALLY block. Only need to do this for try-finally,
995 and not for cleanups. */
997 finally = lower_try_finally_dup_block (finally, outer_state);
999 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
1000 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
1001 to be in an enclosing scope, but needs to be implemented at this level
1002 to avoid a nesting violation (see wrap_temporary_cleanups in
1003 cp/decl.c). Since it's logically at an outer level, we should call
1004 terminate before we get to it, so strip it away before adding the
1005 MUST_NOT_THROW filter. */
1006 gsi = gsi_start (finally);
1008 if (gimple_code (x) == GIMPLE_TRY
1009 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
1010 && gimple_try_catch_is_cleanup (x))
1012 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
1013 gsi_remove (&gsi, false);
1016 /* Wrap the block with protect_cleanup_actions as the action. */
1017 x = gimple_build_eh_must_not_throw (protect_cleanup_actions);
1018 x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x),
1020 finally = lower_eh_must_not_throw (outer_state, x);
1022 /* Drop all of this into the exception sequence. */
1023 emit_post_landing_pad (&eh_seq, tf->region);
1024 gimple_seq_add_seq (&eh_seq, finally);
1025 if (finally_may_fallthru)
1026 emit_resx (&eh_seq, tf->region);
1028 /* Having now been handled, EH isn't to be considered with
1029 the rest of the outgoing edges. */
1030 tf->may_throw = false;
1033 /* A subroutine of lower_try_finally. We have determined that there is
1034 no fallthru edge out of the finally block. This means that there is
1035 no outgoing edge corresponding to any incoming edge. Restructure the
1036 try_finally node for this special case. */
1039 lower_try_finally_nofallthru (struct leh_state *state,
1040 struct leh_tf_state *tf)
1042 tree lab, return_val;
1045 struct goto_queue_node *q, *qe;
1047 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1049 /* We expect that tf->top_p is a GIMPLE_TRY. */
1050 finally = gimple_try_cleanup (tf->top_p);
1051 tf->top_p_seq = gimple_try_eval (tf->top_p);
1053 x = gimple_build_label (lab);
1054 gimple_seq_add_stmt (&tf->top_p_seq, x);
1058 qe = q + tf->goto_queue_active;
1061 do_return_redirection (q, lab, NULL, &return_val);
1063 do_goto_redirection (q, lab, NULL, tf);
1065 replace_goto_queue (tf);
1067 lower_eh_constructs_1 (state, finally);
1068 gimple_seq_add_seq (&tf->top_p_seq, finally);
1072 emit_post_landing_pad (&eh_seq, tf->region);
1074 x = gimple_build_goto (lab);
1075 gimple_seq_add_stmt (&eh_seq, x);
1079 /* A subroutine of lower_try_finally. We have determined that there is
1080 exactly one destination of the finally block. Restructure the
1081 try_finally node for this special case. */
1084 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1086 struct goto_queue_node *q, *qe;
1090 location_t loc = gimple_location (tf->try_finally_expr);
1092 finally = gimple_try_cleanup (tf->top_p);
1093 tf->top_p_seq = gimple_try_eval (tf->top_p);
1095 lower_eh_constructs_1 (state, finally);
1099 /* Only reachable via the exception edge. Add the given label to
1100 the head of the FINALLY block. Append a RESX at the end. */
1101 emit_post_landing_pad (&eh_seq, tf->region);
1102 gimple_seq_add_seq (&eh_seq, finally);
1103 emit_resx (&eh_seq, tf->region);
1107 if (tf->may_fallthru)
1109 /* Only reachable via the fallthru edge. Do nothing but let
1110 the two blocks run together; we'll fall out the bottom. */
1111 gimple_seq_add_seq (&tf->top_p_seq, finally);
1115 finally_label = create_artificial_label (loc);
1116 x = gimple_build_label (finally_label);
1117 gimple_seq_add_stmt (&tf->top_p_seq, x);
1119 gimple_seq_add_seq (&tf->top_p_seq, finally);
1122 qe = q + tf->goto_queue_active;
1126 /* Reachable by return expressions only. Redirect them. */
1127 tree return_val = NULL;
1129 do_return_redirection (q, finally_label, NULL, &return_val);
1130 replace_goto_queue (tf);
1134 /* Reachable by goto expressions only. Redirect them. */
1136 do_goto_redirection (q, finally_label, NULL, tf);
1137 replace_goto_queue (tf);
1139 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1141 /* Reachable by goto to fallthru label only. Redirect it
1142 to the new label (already created, sadly), and do not
1143 emit the final branch out, or the fallthru label. */
1144 tf->fallthru_label = NULL;
1149 /* Place the original return/goto to the original destination
1150 immediately after the finally block. */
1151 x = tf->goto_queue[0].cont_stmt;
1152 gimple_seq_add_stmt (&tf->top_p_seq, x);
1153 maybe_record_in_goto_queue (state, x);
1156 /* A subroutine of lower_try_finally. There are multiple edges incoming
1157 and outgoing from the finally block. Implement this by duplicating the
1158 finally block for every destination. */
1161 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1164 gimple_seq new_stmt;
1168 location_t tf_loc = gimple_location (tf->try_finally_expr);
1170 finally = gimple_try_cleanup (tf->top_p);
1171 tf->top_p_seq = gimple_try_eval (tf->top_p);
1174 if (tf->may_fallthru)
1176 seq = lower_try_finally_dup_block (finally, state);
1177 lower_eh_constructs_1 (state, seq);
1178 gimple_seq_add_seq (&new_stmt, seq);
1180 tmp = lower_try_finally_fallthru_label (tf);
1181 x = gimple_build_goto (tmp);
1182 gimple_seq_add_stmt (&new_stmt, x);
1187 seq = lower_try_finally_dup_block (finally, state);
1188 lower_eh_constructs_1 (state, seq);
1190 emit_post_landing_pad (&eh_seq, tf->region);
1191 gimple_seq_add_seq (&eh_seq, seq);
1192 emit_resx (&eh_seq, tf->region);
1197 struct goto_queue_node *q, *qe;
1198 tree return_val = NULL;
1199 int return_index, index;
1202 struct goto_queue_node *q;
1206 return_index = VEC_length (tree, tf->dest_array);
1207 labels = XCNEWVEC (struct labels_s, return_index + 1);
1210 qe = q + tf->goto_queue_active;
1213 index = q->index < 0 ? return_index : q->index;
1215 if (!labels[index].q)
1216 labels[index].q = q;
1219 for (index = 0; index < return_index + 1; index++)
1223 q = labels[index].q;
1227 lab = labels[index].label
1228 = create_artificial_label (tf_loc);
1230 if (index == return_index)
1231 do_return_redirection (q, lab, NULL, &return_val);
1233 do_goto_redirection (q, lab, NULL, tf);
1235 x = gimple_build_label (lab);
1236 gimple_seq_add_stmt (&new_stmt, x);
1238 seq = lower_try_finally_dup_block (finally, state);
1239 lower_eh_constructs_1 (state, seq);
1240 gimple_seq_add_seq (&new_stmt, seq);
1242 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1243 maybe_record_in_goto_queue (state, q->cont_stmt);
1246 for (q = tf->goto_queue; q < qe; q++)
1250 index = q->index < 0 ? return_index : q->index;
1252 if (labels[index].q == q)
1255 lab = labels[index].label;
1257 if (index == return_index)
1258 do_return_redirection (q, lab, NULL, &return_val);
1260 do_goto_redirection (q, lab, NULL, tf);
1263 replace_goto_queue (tf);
1267 /* Need to link new stmts after running replace_goto_queue due
1268 to not wanting to process the same goto stmts twice. */
1269 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1272 /* A subroutine of lower_try_finally. There are multiple edges incoming
1273 and outgoing from the finally block. Implement this by instrumenting
1274 each incoming edge and creating a switch statement at the end of the
1275 finally block that branches to the appropriate destination. */
1278 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1280 struct goto_queue_node *q, *qe;
1281 tree return_val = NULL;
1282 tree finally_tmp, finally_label;
1283 int return_index, eh_index, fallthru_index;
1284 int nlabels, ndests, j, last_case_index;
1286 VEC (tree,heap) *case_label_vec;
1287 gimple_seq switch_body;
1292 struct pointer_map_t *cont_map = NULL;
1293 /* The location of the TRY_FINALLY stmt. */
1294 location_t tf_loc = gimple_location (tf->try_finally_expr);
1295 /* The location of the finally block. */
1296 location_t finally_loc;
1298 switch_body = gimple_seq_alloc ();
1300 /* Mash the TRY block to the head of the chain. */
1301 finally = gimple_try_cleanup (tf->top_p);
1302 tf->top_p_seq = gimple_try_eval (tf->top_p);
1304 /* The location of the finally is either the last stmt in the finally
1305 block or the location of the TRY_FINALLY itself. */
1306 finally_loc = gimple_seq_last_stmt (tf->top_p_seq) != NULL ?
1307 gimple_location (gimple_seq_last_stmt (tf->top_p_seq))
1310 /* Lower the finally block itself. */
1311 lower_eh_constructs_1 (state, finally);
1313 /* Prepare for switch statement generation. */
1314 nlabels = VEC_length (tree, tf->dest_array);
1315 return_index = nlabels;
1316 eh_index = return_index + tf->may_return;
1317 fallthru_index = eh_index + tf->may_throw;
1318 ndests = fallthru_index + tf->may_fallthru;
1320 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1321 finally_label = create_artificial_label (finally_loc);
1323 /* We use VEC_quick_push on case_label_vec throughout this function,
1324 since we know the size in advance and allocate precisely as muce
1326 case_label_vec = VEC_alloc (tree, heap, ndests);
1328 last_case_index = 0;
1330 /* Begin inserting code for getting to the finally block. Things
1331 are done in this order to correspond to the sequence the code is
1334 if (tf->may_fallthru)
1336 x = gimple_build_assign (finally_tmp,
1337 build_int_cst (NULL, fallthru_index));
1338 gimple_seq_add_stmt (&tf->top_p_seq, x);
1340 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1341 build_int_cst (NULL, fallthru_index),
1342 NULL, create_artificial_label (tf_loc));
1343 VEC_quick_push (tree, case_label_vec, last_case);
1346 x = gimple_build_label (CASE_LABEL (last_case));
1347 gimple_seq_add_stmt (&switch_body, x);
1349 tmp = lower_try_finally_fallthru_label (tf);
1350 x = gimple_build_goto (tmp);
1351 gimple_seq_add_stmt (&switch_body, x);
1356 emit_post_landing_pad (&eh_seq, tf->region);
1358 x = gimple_build_assign (finally_tmp,
1359 build_int_cst (NULL, eh_index));
1360 gimple_seq_add_stmt (&eh_seq, x);
1362 x = gimple_build_goto (finally_label);
1363 gimple_seq_add_stmt (&eh_seq, x);
1365 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1366 build_int_cst (NULL, eh_index),
1367 NULL, create_artificial_label (tf_loc));
1368 VEC_quick_push (tree, case_label_vec, last_case);
1371 x = gimple_build_label (CASE_LABEL (last_case));
1372 gimple_seq_add_stmt (&eh_seq, x);
1373 emit_resx (&eh_seq, tf->region);
1376 x = gimple_build_label (finally_label);
1377 gimple_seq_add_stmt (&tf->top_p_seq, x);
1379 gimple_seq_add_seq (&tf->top_p_seq, finally);
1381 /* Redirect each incoming goto edge. */
1383 qe = q + tf->goto_queue_active;
1384 j = last_case_index + tf->may_return;
1385 /* Prepare the assignments to finally_tmp that are executed upon the
1386 entrance through a particular edge. */
1391 unsigned int case_index;
1393 mod = gimple_seq_alloc ();
1397 x = gimple_build_assign (finally_tmp,
1398 build_int_cst (NULL, return_index));
1399 gimple_seq_add_stmt (&mod, x);
1400 do_return_redirection (q, finally_label, mod, &return_val);
1401 switch_id = return_index;
1405 x = gimple_build_assign (finally_tmp,
1406 build_int_cst (NULL, q->index));
1407 gimple_seq_add_stmt (&mod, x);
1408 do_goto_redirection (q, finally_label, mod, tf);
1409 switch_id = q->index;
1412 case_index = j + q->index;
1413 if (VEC_length (tree, case_label_vec) <= case_index
1414 || !VEC_index (tree, case_label_vec, case_index))
1418 case_lab = build3 (CASE_LABEL_EXPR, void_type_node,
1419 build_int_cst (NULL, switch_id),
1421 /* We store the cont_stmt in the pointer map, so that we can recover
1422 it in the loop below. We don't create the new label while
1423 walking the goto_queue because pointers don't offer a stable
1426 cont_map = pointer_map_create ();
1427 slot = pointer_map_insert (cont_map, case_lab);
1428 *slot = q->cont_stmt;
1429 VEC_quick_push (tree, case_label_vec, case_lab);
1432 for (j = last_case_index; j < last_case_index + nlabels; j++)
1438 last_case = VEC_index (tree, case_label_vec, j);
1440 gcc_assert (last_case);
1441 gcc_assert (cont_map);
1443 slot = pointer_map_contains (cont_map, last_case);
1444 /* As the comment above suggests, CASE_LABEL (last_case) was just a
1445 placeholder, it does not store an actual label, yet. */
1447 cont_stmt = *(gimple *) slot;
1449 label = create_artificial_label (tf_loc);
1450 CASE_LABEL (last_case) = label;
1452 x = gimple_build_label (label);
1453 gimple_seq_add_stmt (&switch_body, x);
1454 gimple_seq_add_stmt (&switch_body, cont_stmt);
1455 maybe_record_in_goto_queue (state, cont_stmt);
1458 pointer_map_destroy (cont_map);
1460 replace_goto_queue (tf);
1462 /* Make sure that the last case is the default label, as one is required.
1463 Then sort the labels, which is also required in GIMPLE. */
1464 CASE_LOW (last_case) = NULL;
1465 sort_case_labels (case_label_vec);
1467 /* Build the switch statement, setting last_case to be the default
1469 switch_stmt = gimple_build_switch_vec (finally_tmp, last_case,
1471 gimple_set_location (switch_stmt, finally_loc);
1473 /* Need to link SWITCH_STMT after running replace_goto_queue
1474 due to not wanting to process the same goto stmts twice. */
1475 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1476 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1479 /* Decide whether or not we are going to duplicate the finally block.
1480 There are several considerations.
1482 First, if this is Java, then the finally block contains code
1483 written by the user. It has line numbers associated with it,
1484 so duplicating the block means it's difficult to set a breakpoint.
1485 Since controlling code generation via -g is verboten, we simply
1486 never duplicate code without optimization.
1488 Second, we'd like to prevent egregious code growth. One way to
1489 do this is to estimate the size of the finally block, multiply
1490 that by the number of copies we'd need to make, and compare against
1491 the estimate of the size of the switch machinery we'd have to add. */
1494 decide_copy_try_finally (int ndests, gimple_seq finally)
1496 int f_estimate, sw_estimate;
1501 /* Finally estimate N times, plus N gotos. */
1502 f_estimate = count_insns_seq (finally, &eni_size_weights);
1503 f_estimate = (f_estimate + 1) * ndests;
1505 /* Switch statement (cost 10), N variable assignments, N gotos. */
1506 sw_estimate = 10 + 2 * ndests;
1508 /* Optimize for size clearly wants our best guess. */
1509 if (optimize_function_for_size_p (cfun))
1510 return f_estimate < sw_estimate;
1512 /* ??? These numbers are completely made up so far. */
1514 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1516 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1519 /* REG is the enclosing region for a possible cleanup region, or the region
1520 itself. Returns TRUE if such a region would be unreachable.
1522 Cleanup regions within a must-not-throw region aren't actually reachable
1523 even if there are throwing stmts within them, because the personality
1524 routine will call terminate before unwinding. */
1527 cleanup_is_dead_in (eh_region reg)
1529 while (reg && reg->type == ERT_CLEANUP)
1531 return (reg && reg->type == ERT_MUST_NOT_THROW);
1534 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1535 to a sequence of labels and blocks, plus the exception region trees
1536 that record all the magic. This is complicated by the need to
1537 arrange for the FINALLY block to be executed on all exits. */
1540 lower_try_finally (struct leh_state *state, gimple tp)
1542 struct leh_tf_state this_tf;
1543 struct leh_state this_state;
1545 gimple_seq old_eh_seq;
1547 /* Process the try block. */
1549 memset (&this_tf, 0, sizeof (this_tf));
1550 this_tf.try_finally_expr = tp;
1552 this_tf.outer = state;
1553 if (using_eh_for_cleanups_p && !cleanup_is_dead_in (state->cur_region))
1555 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1556 this_state.cur_region = this_tf.region;
1560 this_tf.region = NULL;
1561 this_state.cur_region = state->cur_region;
1564 this_state.ehp_region = state->ehp_region;
1565 this_state.tf = &this_tf;
1567 old_eh_seq = eh_seq;
1570 lower_eh_constructs_1 (&this_state, gimple_try_eval(tp));
1572 /* Determine if the try block is escaped through the bottom. */
1573 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1575 /* Determine if any exceptions are possible within the try block. */
1577 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1578 if (this_tf.may_throw)
1579 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1581 /* Determine how many edges (still) reach the finally block. Or rather,
1582 how many destinations are reached by the finally block. Use this to
1583 determine how we process the finally block itself. */
1585 ndests = VEC_length (tree, this_tf.dest_array);
1586 ndests += this_tf.may_fallthru;
1587 ndests += this_tf.may_return;
1588 ndests += this_tf.may_throw;
1590 /* If the FINALLY block is not reachable, dike it out. */
1593 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1594 gimple_try_set_cleanup (tp, NULL);
1596 /* If the finally block doesn't fall through, then any destination
1597 we might try to impose there isn't reached either. There may be
1598 some minor amount of cleanup and redirection still needed. */
1599 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1600 lower_try_finally_nofallthru (state, &this_tf);
1602 /* We can easily special-case redirection to a single destination. */
1603 else if (ndests == 1)
1604 lower_try_finally_onedest (state, &this_tf);
1605 else if (decide_copy_try_finally (ndests, gimple_try_cleanup (tp)))
1606 lower_try_finally_copy (state, &this_tf);
1608 lower_try_finally_switch (state, &this_tf);
1610 /* If someone requested we add a label at the end of the transformed
1612 if (this_tf.fallthru_label)
1614 /* This must be reached only if ndests == 0. */
1615 gimple x = gimple_build_label (this_tf.fallthru_label);
1616 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1619 VEC_free (tree, heap, this_tf.dest_array);
1620 if (this_tf.goto_queue)
1621 free (this_tf.goto_queue);
1622 if (this_tf.goto_queue_map)
1623 pointer_map_destroy (this_tf.goto_queue_map);
1625 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1626 If there was no old eh_seq, then the append is trivially already done. */
1630 eh_seq = old_eh_seq;
1633 gimple_seq new_eh_seq = eh_seq;
1634 eh_seq = old_eh_seq;
1635 gimple_seq_add_seq(&eh_seq, new_eh_seq);
1639 return this_tf.top_p_seq;
1642 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1643 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1644 exception region trees that records all the magic. */
1647 lower_catch (struct leh_state *state, gimple tp)
1649 eh_region try_region = NULL;
1650 struct leh_state this_state = *state;
1651 gimple_stmt_iterator gsi;
1655 location_t try_catch_loc = gimple_location (tp);
1657 if (flag_exceptions)
1659 try_region = gen_eh_region_try (state->cur_region);
1660 this_state.cur_region = try_region;
1663 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1665 if (!eh_region_may_contain_throw (try_region))
1666 return gimple_try_eval (tp);
1669 emit_eh_dispatch (&new_seq, try_region);
1670 emit_resx (&new_seq, try_region);
1672 this_state.cur_region = state->cur_region;
1673 this_state.ehp_region = try_region;
1676 for (gsi = gsi_start (gimple_try_cleanup (tp));
1684 gcatch = gsi_stmt (gsi);
1685 c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch));
1687 handler = gimple_catch_handler (gcatch);
1688 lower_eh_constructs_1 (&this_state, handler);
1690 c->label = create_artificial_label (UNKNOWN_LOCATION);
1691 x = gimple_build_label (c->label);
1692 gimple_seq_add_stmt (&new_seq, x);
1694 gimple_seq_add_seq (&new_seq, handler);
1696 if (gimple_seq_may_fallthru (new_seq))
1699 out_label = create_artificial_label (try_catch_loc);
1701 x = gimple_build_goto (out_label);
1702 gimple_seq_add_stmt (&new_seq, x);
1708 gimple_try_set_cleanup (tp, new_seq);
1710 return frob_into_branch_around (tp, try_region, out_label);
1713 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1714 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1715 region trees that record all the magic. */
1718 lower_eh_filter (struct leh_state *state, gimple tp)
1720 struct leh_state this_state = *state;
1721 eh_region this_region = NULL;
1725 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1727 if (flag_exceptions)
1729 this_region = gen_eh_region_allowed (state->cur_region,
1730 gimple_eh_filter_types (inner));
1731 this_state.cur_region = this_region;
1734 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1736 if (!eh_region_may_contain_throw (this_region))
1737 return gimple_try_eval (tp);
1740 this_state.cur_region = state->cur_region;
1741 this_state.ehp_region = this_region;
1743 emit_eh_dispatch (&new_seq, this_region);
1744 emit_resx (&new_seq, this_region);
1746 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1747 x = gimple_build_label (this_region->u.allowed.label);
1748 gimple_seq_add_stmt (&new_seq, x);
1750 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure (inner));
1751 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1753 gimple_try_set_cleanup (tp, new_seq);
1755 return frob_into_branch_around (tp, this_region, NULL);
1758 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1759 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1760 plus the exception region trees that record all the magic. */
1763 lower_eh_must_not_throw (struct leh_state *state, gimple tp)
1765 struct leh_state this_state = *state;
1767 if (flag_exceptions)
1769 gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1770 eh_region this_region;
1772 this_region = gen_eh_region_must_not_throw (state->cur_region);
1773 this_region->u.must_not_throw.failure_decl
1774 = gimple_eh_must_not_throw_fndecl (inner);
1775 this_region->u.must_not_throw.failure_loc = gimple_location (tp);
1777 /* In order to get mangling applied to this decl, we must mark it
1778 used now. Otherwise, pass_ipa_free_lang_data won't think it
1780 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1782 this_state.cur_region = this_region;
1785 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1787 return gimple_try_eval (tp);
1790 /* Implement a cleanup expression. This is similar to try-finally,
1791 except that we only execute the cleanup block for exception edges. */
1794 lower_cleanup (struct leh_state *state, gimple tp)
1796 struct leh_state this_state = *state;
1797 eh_region this_region = NULL;
1798 struct leh_tf_state fake_tf;
1800 bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
1802 if (flag_exceptions && !cleanup_dead)
1804 this_region = gen_eh_region_cleanup (state->cur_region);
1805 this_state.cur_region = this_region;
1808 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1810 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
1811 return gimple_try_eval (tp);
1813 /* Build enough of a try-finally state so that we can reuse
1814 honor_protect_cleanup_actions. */
1815 memset (&fake_tf, 0, sizeof (fake_tf));
1816 fake_tf.top_p = fake_tf.try_finally_expr = tp;
1817 fake_tf.outer = state;
1818 fake_tf.region = this_region;
1819 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1820 fake_tf.may_throw = true;
1822 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1824 if (fake_tf.may_throw)
1826 /* In this case honor_protect_cleanup_actions had nothing to do,
1827 and we should process this normally. */
1828 lower_eh_constructs_1 (state, gimple_try_cleanup (tp));
1829 result = frob_into_branch_around (tp, this_region,
1830 fake_tf.fallthru_label);
1834 /* In this case honor_protect_cleanup_actions did nearly all of
1835 the work. All we have left is to append the fallthru_label. */
1837 result = gimple_try_eval (tp);
1838 if (fake_tf.fallthru_label)
1840 gimple x = gimple_build_label (fake_tf.fallthru_label);
1841 gimple_seq_add_stmt (&result, x);
1847 /* Main loop for lowering eh constructs. Also moves gsi to the next
1851 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1855 gimple stmt = gsi_stmt (*gsi);
1857 switch (gimple_code (stmt))
1861 tree fndecl = gimple_call_fndecl (stmt);
1864 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1865 switch (DECL_FUNCTION_CODE (fndecl))
1867 case BUILT_IN_EH_POINTER:
1868 /* The front end may have generated a call to
1869 __builtin_eh_pointer (0) within a catch region. Replace
1870 this zero argument with the current catch region number. */
1871 if (state->ehp_region)
1873 tree nr = build_int_cst (NULL, state->ehp_region->index);
1874 gimple_call_set_arg (stmt, 0, nr);
1878 /* The user has dome something silly. Remove it. */
1879 rhs = null_pointer_node;
1884 case BUILT_IN_EH_FILTER:
1885 /* ??? This should never appear, but since it's a builtin it
1886 is accessible to abuse by users. Just remove it and
1887 replace the use with the arbitrary value zero. */
1888 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
1890 lhs = gimple_call_lhs (stmt);
1891 x = gimple_build_assign (lhs, rhs);
1892 gsi_insert_before (gsi, x, GSI_SAME_STMT);
1895 case BUILT_IN_EH_COPY_VALUES:
1896 /* Likewise this should not appear. Remove it. */
1897 gsi_remove (gsi, true);
1907 /* If the stmt can throw use a new temporary for the assignment
1908 to a LHS. This makes sure the old value of the LHS is
1909 available on the EH edge. Only do so for statements that
1910 potentially fall thru (no noreturn calls e.g.), otherwise
1911 this new assignment might create fake fallthru regions. */
1912 if (stmt_could_throw_p (stmt)
1913 && gimple_has_lhs (stmt)
1914 && gimple_stmt_may_fallthru (stmt)
1915 && !tree_could_throw_p (gimple_get_lhs (stmt))
1916 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
1918 tree lhs = gimple_get_lhs (stmt);
1919 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
1920 gimple s = gimple_build_assign (lhs, tmp);
1921 gimple_set_location (s, gimple_location (stmt));
1922 gimple_set_block (s, gimple_block (stmt));
1923 gimple_set_lhs (stmt, tmp);
1924 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
1925 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
1926 DECL_GIMPLE_REG_P (tmp) = 1;
1927 gsi_insert_after (gsi, s, GSI_SAME_STMT);
1929 /* Look for things that can throw exceptions, and record them. */
1930 if (state->cur_region && stmt_could_throw_p (stmt))
1932 record_stmt_eh_region (state->cur_region, stmt);
1933 note_eh_region_may_contain_throw (state->cur_region);
1940 maybe_record_in_goto_queue (state, stmt);
1944 verify_norecord_switch_expr (state, stmt);
1948 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
1949 replace = lower_try_finally (state, stmt);
1952 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
1955 replace = gimple_try_eval (stmt);
1956 lower_eh_constructs_1 (state, replace);
1959 switch (gimple_code (x))
1962 replace = lower_catch (state, stmt);
1964 case GIMPLE_EH_FILTER:
1965 replace = lower_eh_filter (state, stmt);
1967 case GIMPLE_EH_MUST_NOT_THROW:
1968 replace = lower_eh_must_not_throw (state, stmt);
1971 replace = lower_cleanup (state, stmt);
1976 /* Remove the old stmt and insert the transformed sequence
1978 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
1979 gsi_remove (gsi, true);
1981 /* Return since we don't want gsi_next () */
1985 /* A type, a decl, or some kind of statement that we're not
1986 interested in. Don't walk them. */
1993 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
1996 lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq)
1998 gimple_stmt_iterator gsi;
1999 for (gsi = gsi_start (seq); !gsi_end_p (gsi);)
2000 lower_eh_constructs_2 (state, &gsi);
2004 lower_eh_constructs (void)
2006 struct leh_state null_state;
2009 bodyp = gimple_body (current_function_decl);
2013 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
2014 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
2015 memset (&null_state, 0, sizeof (null_state));
2017 collect_finally_tree_1 (bodyp, NULL);
2018 lower_eh_constructs_1 (&null_state, bodyp);
2020 /* We assume there's a return statement, or something, at the end of
2021 the function, and thus ploping the EH sequence afterward won't
2023 gcc_assert (!gimple_seq_may_fallthru (bodyp));
2024 gimple_seq_add_seq (&bodyp, eh_seq);
2026 /* We assume that since BODYP already existed, adding EH_SEQ to it
2027 didn't change its value, and we don't have to re-set the function. */
2028 gcc_assert (bodyp == gimple_body (current_function_decl));
2030 htab_delete (finally_tree);
2031 BITMAP_FREE (eh_region_may_contain_throw_map);
2034 /* If this function needs a language specific EH personality routine
2035 and the frontend didn't already set one do so now. */
2036 if (function_needs_eh_personality (cfun) == eh_personality_lang
2037 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2038 DECL_FUNCTION_PERSONALITY (current_function_decl)
2039 = lang_hooks.eh_personality ();
2044 struct gimple_opt_pass pass_lower_eh =
2050 lower_eh_constructs, /* execute */
2053 0, /* static_pass_number */
2054 TV_TREE_EH, /* tv_id */
2055 PROP_gimple_lcf, /* properties_required */
2056 PROP_gimple_leh, /* properties_provided */
2057 0, /* properties_destroyed */
2058 0, /* todo_flags_start */
2059 TODO_dump_func /* todo_flags_finish */
2063 /* Create the multiple edges from an EH_DISPATCH statement to all of
2064 the possible handlers for its EH region. Return true if there's
2065 no fallthru edge; false if there is. */
2068 make_eh_dispatch_edges (gimple stmt)
2072 basic_block src, dst;
2074 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2075 src = gimple_bb (stmt);
2080 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2082 dst = label_to_block (c->label);
2083 make_edge (src, dst, 0);
2085 /* A catch-all handler doesn't have a fallthru. */
2086 if (c->type_list == NULL)
2091 case ERT_ALLOWED_EXCEPTIONS:
2092 dst = label_to_block (r->u.allowed.label);
2093 make_edge (src, dst, 0);
2103 /* Create the single EH edge from STMT to its nearest landing pad,
2104 if there is such a landing pad within the current function. */
2107 make_eh_edges (gimple stmt)
2109 basic_block src, dst;
2113 lp_nr = lookup_stmt_eh_lp (stmt);
2117 lp = get_eh_landing_pad_from_number (lp_nr);
2118 gcc_assert (lp != NULL);
2120 src = gimple_bb (stmt);
2121 dst = label_to_block (lp->post_landing_pad);
2122 make_edge (src, dst, EDGE_EH);
2125 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2126 do not actually perform the final edge redirection.
2128 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2129 we intend to change the destination EH region as well; this means
2130 EH_LANDING_PAD_NR must already be set on the destination block label.
2131 If false, we're being called from generic cfg manipulation code and we
2132 should preserve our place within the region tree. */
2135 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2137 eh_landing_pad old_lp, new_lp;
2140 int old_lp_nr, new_lp_nr;
2141 tree old_label, new_label;
2145 old_bb = edge_in->dest;
2146 old_label = gimple_block_label (old_bb);
2147 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2148 gcc_assert (old_lp_nr > 0);
2149 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2151 throw_stmt = last_stmt (edge_in->src);
2152 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2154 new_label = gimple_block_label (new_bb);
2156 /* Look for an existing region that might be using NEW_BB already. */
2157 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2160 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2161 gcc_assert (new_lp);
2163 /* Unless CHANGE_REGION is true, the new and old landing pad
2164 had better be associated with the same EH region. */
2165 gcc_assert (change_region || new_lp->region == old_lp->region);
2170 gcc_assert (!change_region);
2173 /* Notice when we redirect the last EH edge away from OLD_BB. */
2174 FOR_EACH_EDGE (e, ei, old_bb->preds)
2175 if (e != edge_in && (e->flags & EDGE_EH))
2180 /* NEW_LP already exists. If there are still edges into OLD_LP,
2181 there's nothing to do with the EH tree. If there are no more
2182 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2183 If CHANGE_REGION is true, then our caller is expecting to remove
2185 if (e == NULL && !change_region)
2186 remove_eh_landing_pad (old_lp);
2190 /* No correct landing pad exists. If there are no more edges
2191 into OLD_LP, then we can simply re-use the existing landing pad.
2192 Otherwise, we have to create a new landing pad. */
2195 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2199 new_lp = gen_eh_landing_pad (old_lp->region);
2200 new_lp->post_landing_pad = new_label;
2201 EH_LANDING_PAD_NR (new_label) = new_lp->index;
2204 /* Maybe move the throwing statement to the new region. */
2205 if (old_lp != new_lp)
2207 remove_stmt_from_eh_lp (throw_stmt);
2208 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2212 /* Redirect EH edge E to NEW_BB. */
2215 redirect_eh_edge (edge edge_in, basic_block new_bb)
2217 redirect_eh_edge_1 (edge_in, new_bb, false);
2218 return ssa_redirect_edge (edge_in, new_bb);
2221 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2222 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2223 The actual edge update will happen in the caller. */
2226 redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb)
2228 tree new_lab = gimple_block_label (new_bb);
2229 bool any_changed = false;
2234 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2238 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2240 old_bb = label_to_block (c->label);
2241 if (old_bb == e->dest)
2249 case ERT_ALLOWED_EXCEPTIONS:
2250 old_bb = label_to_block (r->u.allowed.label);
2251 gcc_assert (old_bb == e->dest);
2252 r->u.allowed.label = new_lab;
2260 gcc_assert (any_changed);
2263 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2266 operation_could_trap_helper_p (enum tree_code op,
2277 case TRUNC_DIV_EXPR:
2279 case FLOOR_DIV_EXPR:
2280 case ROUND_DIV_EXPR:
2281 case EXACT_DIV_EXPR:
2283 case FLOOR_MOD_EXPR:
2284 case ROUND_MOD_EXPR:
2285 case TRUNC_MOD_EXPR:
2287 if (honor_snans || honor_trapv)
2290 return flag_trapping_math;
2291 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2300 /* Some floating point comparisons may trap. */
2305 case UNORDERED_EXPR:
2315 case FIX_TRUNC_EXPR:
2316 /* Conversion of floating point might trap. */
2322 /* These operations don't trap with floating point. */
2330 /* Any floating arithmetic may trap. */
2331 if (fp_operation && flag_trapping_math)
2338 /* Any floating arithmetic may trap. */
2339 if (fp_operation && flag_trapping_math)
2347 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2348 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2349 type operands that may trap. If OP is a division operator, DIVISOR contains
2350 the value of the divisor. */
2353 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2356 bool honor_nans = (fp_operation && flag_trapping_math
2357 && !flag_finite_math_only);
2358 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2361 if (TREE_CODE_CLASS (op) != tcc_comparison
2362 && TREE_CODE_CLASS (op) != tcc_unary
2363 && TREE_CODE_CLASS (op) != tcc_binary)
2366 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2367 honor_nans, honor_snans, divisor,
2371 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2372 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2373 This routine expects only GIMPLE lhs or rhs input. */
2376 tree_could_trap_p (tree expr)
2378 enum tree_code code;
2379 bool fp_operation = false;
2380 bool honor_trapv = false;
2381 tree t, base, div = NULL_TREE;
2386 code = TREE_CODE (expr);
2387 t = TREE_TYPE (expr);
2391 if (COMPARISON_CLASS_P (expr))
2392 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2394 fp_operation = FLOAT_TYPE_P (t);
2395 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2398 if (TREE_CODE_CLASS (code) == tcc_binary)
2399 div = TREE_OPERAND (expr, 1);
2400 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2406 case TARGET_MEM_REF:
2407 if (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
2408 && !TMR_INDEX (expr) && !TMR_INDEX2 (expr))
2410 return !TREE_THIS_NOTRAP (expr);
2416 case VIEW_CONVERT_EXPR:
2417 case WITH_SIZE_EXPR:
2418 expr = TREE_OPERAND (expr, 0);
2419 code = TREE_CODE (expr);
2422 case ARRAY_RANGE_REF:
2423 base = TREE_OPERAND (expr, 0);
2424 if (tree_could_trap_p (base))
2426 if (TREE_THIS_NOTRAP (expr))
2428 return !range_in_array_bounds_p (expr);
2431 base = TREE_OPERAND (expr, 0);
2432 if (tree_could_trap_p (base))
2434 if (TREE_THIS_NOTRAP (expr))
2436 return !in_array_bounds_p (expr);
2439 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2443 case MISALIGNED_INDIRECT_REF:
2444 return !TREE_THIS_NOTRAP (expr);
2447 return TREE_THIS_VOLATILE (expr);
2450 t = get_callee_fndecl (expr);
2451 /* Assume that calls to weak functions may trap. */
2452 if (!t || !DECL_P (t) || DECL_WEAK (t))
2462 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2463 an assignment or a conditional) may throw. */
2466 stmt_could_throw_1_p (gimple stmt)
2468 enum tree_code code = gimple_expr_code (stmt);
2469 bool honor_nans = false;
2470 bool honor_snans = false;
2471 bool fp_operation = false;
2472 bool honor_trapv = false;
2477 if (TREE_CODE_CLASS (code) == tcc_comparison
2478 || TREE_CODE_CLASS (code) == tcc_unary
2479 || TREE_CODE_CLASS (code) == tcc_binary)
2481 t = gimple_expr_type (stmt);
2482 fp_operation = FLOAT_TYPE_P (t);
2485 honor_nans = flag_trapping_math && !flag_finite_math_only;
2486 honor_snans = flag_signaling_nans != 0;
2488 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2492 /* Check if the main expression may trap. */
2493 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2494 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2495 honor_nans, honor_snans, t,
2500 /* If the expression does not trap, see if any of the individual operands may
2502 for (i = 0; i < gimple_num_ops (stmt); i++)
2503 if (tree_could_trap_p (gimple_op (stmt, i)))
2510 /* Return true if statement STMT could throw an exception. */
2513 stmt_could_throw_p (gimple stmt)
2515 if (!flag_exceptions)
2518 /* The only statements that can throw an exception are assignments,
2519 conditionals, calls, resx, and asms. */
2520 switch (gimple_code (stmt))
2526 return !gimple_call_nothrow_p (stmt);
2530 if (!cfun->can_throw_non_call_exceptions)
2532 return stmt_could_throw_1_p (stmt);
2535 if (!cfun->can_throw_non_call_exceptions)
2537 return gimple_asm_volatile_p (stmt);
2545 /* Return true if expression T could throw an exception. */
2548 tree_could_throw_p (tree t)
2550 if (!flag_exceptions)
2552 if (TREE_CODE (t) == MODIFY_EXPR)
2554 if (cfun->can_throw_non_call_exceptions
2555 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2557 t = TREE_OPERAND (t, 1);
2560 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2561 t = TREE_OPERAND (t, 0);
2562 if (TREE_CODE (t) == CALL_EXPR)
2563 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2564 if (cfun->can_throw_non_call_exceptions)
2565 return tree_could_trap_p (t);
2569 /* Return true if STMT can throw an exception that is not caught within
2570 the current function (CFUN). */
2573 stmt_can_throw_external (gimple stmt)
2577 if (!stmt_could_throw_p (stmt))
2580 lp_nr = lookup_stmt_eh_lp (stmt);
2584 /* Return true if STMT can throw an exception that is caught within
2585 the current function (CFUN). */
2588 stmt_can_throw_internal (gimple stmt)
2592 if (!stmt_could_throw_p (stmt))
2595 lp_nr = lookup_stmt_eh_lp (stmt);
2599 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2600 remove any entry it might have from the EH table. Return true if
2601 any change was made. */
2604 maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
2606 if (stmt_could_throw_p (stmt))
2608 return remove_stmt_from_eh_lp_fn (ifun, stmt);
2611 /* Likewise, but always use the current function. */
2614 maybe_clean_eh_stmt (gimple stmt)
2616 return maybe_clean_eh_stmt_fn (cfun, stmt);
2619 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2620 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2621 in the table if it should be in there. Return TRUE if a replacement was
2622 done that my require an EH edge purge. */
2625 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2627 int lp_nr = lookup_stmt_eh_lp (old_stmt);
2631 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2633 if (new_stmt == old_stmt && new_stmt_could_throw)
2636 remove_stmt_from_eh_lp (old_stmt);
2637 if (new_stmt_could_throw)
2639 add_stmt_to_eh_lp (new_stmt, lp_nr);
2649 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statment NEW_STMT
2650 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2651 operand is the return value of duplicate_eh_regions. */
2654 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
2655 struct function *old_fun, gimple old_stmt,
2656 struct pointer_map_t *map, int default_lp_nr)
2658 int old_lp_nr, new_lp_nr;
2661 if (!stmt_could_throw_p (new_stmt))
2664 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2667 if (default_lp_nr == 0)
2669 new_lp_nr = default_lp_nr;
2671 else if (old_lp_nr > 0)
2673 eh_landing_pad old_lp, new_lp;
2675 old_lp = VEC_index (eh_landing_pad, old_fun->eh->lp_array, old_lp_nr);
2676 slot = pointer_map_contains (map, old_lp);
2677 new_lp = (eh_landing_pad) *slot;
2678 new_lp_nr = new_lp->index;
2682 eh_region old_r, new_r;
2684 old_r = VEC_index (eh_region, old_fun->eh->region_array, -old_lp_nr);
2685 slot = pointer_map_contains (map, old_r);
2686 new_r = (eh_region) *slot;
2687 new_lp_nr = -new_r->index;
2690 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2694 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2695 and thus no remapping is required. */
2698 maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
2702 if (!stmt_could_throw_p (new_stmt))
2705 lp_nr = lookup_stmt_eh_lp (old_stmt);
2709 add_stmt_to_eh_lp (new_stmt, lp_nr);
2713 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2714 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2715 this only handles handlers consisting of a single call, as that's the
2716 important case for C++: a destructor call for a particular object showing
2717 up in multiple handlers. */
2720 same_handler_p (gimple_seq oneh, gimple_seq twoh)
2722 gimple_stmt_iterator gsi;
2726 gsi = gsi_start (oneh);
2727 if (!gsi_one_before_end_p (gsi))
2729 ones = gsi_stmt (gsi);
2731 gsi = gsi_start (twoh);
2732 if (!gsi_one_before_end_p (gsi))
2734 twos = gsi_stmt (gsi);
2736 if (!is_gimple_call (ones)
2737 || !is_gimple_call (twos)
2738 || gimple_call_lhs (ones)
2739 || gimple_call_lhs (twos)
2740 || gimple_call_chain (ones)
2741 || gimple_call_chain (twos)
2742 || !operand_equal_p (gimple_call_fn (ones), gimple_call_fn (twos), 0)
2743 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
2746 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
2747 if (!operand_equal_p (gimple_call_arg (ones, ai),
2748 gimple_call_arg (twos, ai), 0))
2755 try { A() } finally { try { ~B() } catch { ~A() } }
2756 try { ... } finally { ~A() }
2758 try { A() } catch { ~B() }
2759 try { ~B() ... } finally { ~A() }
2761 This occurs frequently in C++, where A is a local variable and B is a
2762 temporary used in the initializer for A. */
2765 optimize_double_finally (gimple one, gimple two)
2768 gimple_stmt_iterator gsi;
2770 gsi = gsi_start (gimple_try_cleanup (one));
2771 if (!gsi_one_before_end_p (gsi))
2774 oneh = gsi_stmt (gsi);
2775 if (gimple_code (oneh) != GIMPLE_TRY
2776 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
2779 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
2781 gimple_seq seq = gimple_try_eval (oneh);
2783 gimple_try_set_cleanup (one, seq);
2784 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
2785 seq = copy_gimple_seq_and_replace_locals (seq);
2786 gimple_seq_add_seq (&seq, gimple_try_eval (two));
2787 gimple_try_set_eval (two, seq);
2791 /* Perform EH refactoring optimizations that are simpler to do when code
2792 flow has been lowered but EH structures haven't. */
2795 refactor_eh_r (gimple_seq seq)
2797 gimple_stmt_iterator gsi;
2802 gsi = gsi_start (seq);
2806 if (gsi_end_p (gsi))
2809 two = gsi_stmt (gsi);
2812 && gimple_code (one) == GIMPLE_TRY
2813 && gimple_code (two) == GIMPLE_TRY
2814 && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
2815 && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
2816 optimize_double_finally (one, two);
2818 switch (gimple_code (one))
2821 refactor_eh_r (gimple_try_eval (one));
2822 refactor_eh_r (gimple_try_cleanup (one));
2825 refactor_eh_r (gimple_catch_handler (one));
2827 case GIMPLE_EH_FILTER:
2828 refactor_eh_r (gimple_eh_filter_failure (one));
2843 refactor_eh_r (gimple_body (current_function_decl));
2848 gate_refactor_eh (void)
2850 return flag_exceptions != 0;
2853 struct gimple_opt_pass pass_refactor_eh =
2858 gate_refactor_eh, /* gate */
2859 refactor_eh, /* execute */
2862 0, /* static_pass_number */
2863 TV_TREE_EH, /* tv_id */
2864 PROP_gimple_lcf, /* properties_required */
2865 0, /* properties_provided */
2866 0, /* properties_destroyed */
2867 0, /* todo_flags_start */
2868 TODO_dump_func /* todo_flags_finish */
2872 /* At the end of gimple optimization, we can lower RESX. */
2875 lower_resx (basic_block bb, gimple stmt, struct pointer_map_t *mnt_map)
2878 eh_region src_r, dst_r;
2879 gimple_stmt_iterator gsi;
2884 lp_nr = lookup_stmt_eh_lp (stmt);
2886 dst_r = get_eh_region_from_lp_number (lp_nr);
2890 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
2891 gsi = gsi_last_bb (bb);
2895 /* We can wind up with no source region when pass_cleanup_eh shows
2896 that there are no entries into an eh region and deletes it, but
2897 then the block that contains the resx isn't removed. This can
2898 happen without optimization when the switch statement created by
2899 lower_try_finally_switch isn't simplified to remove the eh case.
2901 Resolve this by expanding the resx node to an abort. */
2903 fn = implicit_built_in_decls[BUILT_IN_TRAP];
2904 x = gimple_build_call (fn, 0);
2905 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2907 while (EDGE_COUNT (bb->succs) > 0)
2908 remove_edge (EDGE_SUCC (bb, 0));
2912 /* When we have a destination region, we resolve this by copying
2913 the excptr and filter values into place, and changing the edge
2914 to immediately after the landing pad. */
2923 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
2924 the failure decl into a new block, if needed. */
2925 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
2927 slot = pointer_map_contains (mnt_map, dst_r);
2930 gimple_stmt_iterator gsi2;
2932 new_bb = create_empty_bb (bb);
2933 lab = gimple_block_label (new_bb);
2934 gsi2 = gsi_start_bb (new_bb);
2936 fn = dst_r->u.must_not_throw.failure_decl;
2937 x = gimple_build_call (fn, 0);
2938 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
2939 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
2941 slot = pointer_map_insert (mnt_map, dst_r);
2947 new_bb = label_to_block (lab);
2950 gcc_assert (EDGE_COUNT (bb->succs) == 0);
2951 e = make_edge (bb, new_bb, EDGE_FALLTHRU);
2952 e->count = bb->count;
2953 e->probability = REG_BR_PROB_BASE;
2958 tree dst_nr = build_int_cst (NULL, dst_r->index);
2960 fn = implicit_built_in_decls[BUILT_IN_EH_COPY_VALUES];
2961 src_nr = build_int_cst (NULL, src_r->index);
2962 x = gimple_build_call (fn, 2, dst_nr, src_nr);
2963 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2965 /* Update the flags for the outgoing edge. */
2966 e = single_succ_edge (bb);
2967 gcc_assert (e->flags & EDGE_EH);
2968 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
2970 /* If there are no more EH users of the landing pad, delete it. */
2971 FOR_EACH_EDGE (e, ei, e->dest->preds)
2972 if (e->flags & EDGE_EH)
2976 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2977 remove_eh_landing_pad (lp);
2987 /* When we don't have a destination region, this exception escapes
2988 up the call chain. We resolve this by generating a call to the
2989 _Unwind_Resume library function. */
2991 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
2992 with no arguments for C++ and Java. Check for that. */
2993 if (src_r->use_cxa_end_cleanup)
2995 fn = implicit_built_in_decls[BUILT_IN_CXA_END_CLEANUP];
2996 x = gimple_build_call (fn, 0);
2997 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3001 fn = implicit_built_in_decls[BUILT_IN_EH_POINTER];
3002 src_nr = build_int_cst (NULL, src_r->index);
3003 x = gimple_build_call (fn, 1, src_nr);
3004 var = create_tmp_var (ptr_type_node, NULL);
3005 var = make_ssa_name (var, x);
3006 gimple_call_set_lhs (x, var);
3007 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3009 fn = implicit_built_in_decls[BUILT_IN_UNWIND_RESUME];
3010 x = gimple_build_call (fn, 1, var);
3011 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3014 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3017 gsi_remove (&gsi, true);
3023 execute_lower_resx (void)
3026 struct pointer_map_t *mnt_map;
3027 bool dominance_invalidated = false;
3028 bool any_rewritten = false;
3030 mnt_map = pointer_map_create ();
3034 gimple last = last_stmt (bb);
3035 if (last && is_gimple_resx (last))
3037 dominance_invalidated |= lower_resx (bb, last, mnt_map);
3038 any_rewritten = true;
3042 pointer_map_destroy (mnt_map);
3044 if (dominance_invalidated)
3046 free_dominance_info (CDI_DOMINATORS);
3047 free_dominance_info (CDI_POST_DOMINATORS);
3050 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3054 gate_lower_resx (void)
3056 return flag_exceptions != 0;
3059 struct gimple_opt_pass pass_lower_resx =
3064 gate_lower_resx, /* gate */
3065 execute_lower_resx, /* execute */
3068 0, /* static_pass_number */
3069 TV_TREE_EH, /* tv_id */
3070 PROP_gimple_lcf, /* properties_required */
3071 0, /* properties_provided */
3072 0, /* properties_destroyed */
3073 0, /* todo_flags_start */
3074 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
3079 /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3080 we have found some duplicate labels and removed some edges. */
3083 lower_eh_dispatch (basic_block src, gimple stmt)
3085 gimple_stmt_iterator gsi;
3090 bool redirected = false;
3092 region_nr = gimple_eh_dispatch_region (stmt);
3093 r = get_eh_region_from_number (region_nr);
3095 gsi = gsi_last_bb (src);
3101 VEC (tree, heap) *labels = NULL;
3102 tree default_label = NULL;
3106 struct pointer_set_t *seen_values = pointer_set_create ();
3108 /* Collect the labels for a switch. Zero the post_landing_pad
3109 field becase we'll no longer have anything keeping these labels
3110 in existance and the optimizer will be free to merge these
3112 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3114 tree tp_node, flt_node, lab = c->label;
3115 bool have_label = false;
3118 tp_node = c->type_list;
3119 flt_node = c->filter_list;
3121 if (tp_node == NULL)
3123 default_label = lab;
3128 /* Filter out duplicate labels that arise when this handler
3129 is shadowed by an earlier one. When no labels are
3130 attached to the handler anymore, we remove
3131 the corresponding edge and then we delete unreachable
3132 blocks at the end of this pass. */
3133 if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node)))
3135 tree t = build3 (CASE_LABEL_EXPR, void_type_node,
3136 TREE_VALUE (flt_node), NULL, lab);
3137 VEC_safe_push (tree, heap, labels, t);
3138 pointer_set_insert (seen_values, TREE_VALUE (flt_node));
3142 tp_node = TREE_CHAIN (tp_node);
3143 flt_node = TREE_CHAIN (flt_node);
3148 remove_edge (find_edge (src, label_to_block (lab)));
3153 /* Clean up the edge flags. */
3154 FOR_EACH_EDGE (e, ei, src->succs)
3156 if (e->flags & EDGE_FALLTHRU)
3158 /* If there was no catch-all, use the fallthru edge. */
3159 if (default_label == NULL)
3160 default_label = gimple_block_label (e->dest);
3161 e->flags &= ~EDGE_FALLTHRU;
3164 gcc_assert (default_label != NULL);
3166 /* Don't generate a switch if there's only a default case.
3167 This is common in the form of try { A; } catch (...) { B; }. */
3170 e = single_succ_edge (src);
3171 e->flags |= EDGE_FALLTHRU;
3175 fn = implicit_built_in_decls[BUILT_IN_EH_FILTER];
3176 x = gimple_build_call (fn, 1, build_int_cst (NULL, region_nr));
3177 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3178 filter = make_ssa_name (filter, x);
3179 gimple_call_set_lhs (x, filter);
3180 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3182 /* Turn the default label into a default case. */
3183 default_label = build3 (CASE_LABEL_EXPR, void_type_node,
3184 NULL, NULL, default_label);
3185 sort_case_labels (labels);
3187 x = gimple_build_switch_vec (filter, default_label, labels);
3188 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3190 VEC_free (tree, heap, labels);
3192 pointer_set_destroy (seen_values);
3196 case ERT_ALLOWED_EXCEPTIONS:
3198 edge b_e = BRANCH_EDGE (src);
3199 edge f_e = FALLTHRU_EDGE (src);
3201 fn = implicit_built_in_decls[BUILT_IN_EH_FILTER];
3202 x = gimple_build_call (fn, 1, build_int_cst (NULL, region_nr));
3203 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3204 filter = make_ssa_name (filter, x);
3205 gimple_call_set_lhs (x, filter);
3206 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3208 r->u.allowed.label = NULL;
3209 x = gimple_build_cond (EQ_EXPR, filter,
3210 build_int_cst (TREE_TYPE (filter),
3211 r->u.allowed.filter),
3212 NULL_TREE, NULL_TREE);
3213 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3215 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3216 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3224 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3225 gsi_remove (&gsi, true);
3230 execute_lower_eh_dispatch (void)
3233 bool any_rewritten = false;
3234 bool redirected = false;
3236 assign_filter_values ();
3240 gimple last = last_stmt (bb);
3241 if (last && gimple_code (last) == GIMPLE_EH_DISPATCH)
3243 redirected |= lower_eh_dispatch (bb, last);
3244 any_rewritten = true;
3249 delete_unreachable_blocks ();
3250 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3254 gate_lower_eh_dispatch (void)
3256 return cfun->eh->region_tree != NULL;
3259 struct gimple_opt_pass pass_lower_eh_dispatch =
3263 "ehdisp", /* name */
3264 gate_lower_eh_dispatch, /* gate */
3265 execute_lower_eh_dispatch, /* execute */
3268 0, /* static_pass_number */
3269 TV_TREE_EH, /* tv_id */
3270 PROP_gimple_lcf, /* properties_required */
3271 0, /* properties_provided */
3272 0, /* properties_destroyed */
3273 0, /* todo_flags_start */
3274 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
3278 /* Walk statements, see what regions are really referenced and remove
3279 those that are unused. */
3282 remove_unreachable_handlers (void)
3284 sbitmap r_reachable, lp_reachable;
3290 r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
3292 = sbitmap_alloc (VEC_length (eh_landing_pad, cfun->eh->lp_array));
3293 sbitmap_zero (r_reachable);
3294 sbitmap_zero (lp_reachable);
3298 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3300 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3302 gimple stmt = gsi_stmt (gsi);
3303 lp_nr = lookup_stmt_eh_lp (stmt);
3305 /* Negative LP numbers are MUST_NOT_THROW regions which
3306 are not considered BB enders. */
3308 SET_BIT (r_reachable, -lp_nr);
3310 /* Positive LP numbers are real landing pads, are are BB enders. */
3313 gcc_assert (gsi_one_before_end_p (gsi));
3314 region = get_eh_region_from_lp_number (lp_nr);
3315 SET_BIT (r_reachable, region->index);
3316 SET_BIT (lp_reachable, lp_nr);
3323 fprintf (dump_file, "Before removal of unreachable regions:\n");
3324 dump_eh_tree (dump_file, cfun);
3325 fprintf (dump_file, "Reachable regions: ");
3326 dump_sbitmap_file (dump_file, r_reachable);
3327 fprintf (dump_file, "Reachable landing pads: ");
3328 dump_sbitmap_file (dump_file, lp_reachable);
3332 VEC_iterate (eh_region, cfun->eh->region_array, r_nr, region); ++r_nr)
3333 if (region && !TEST_BIT (r_reachable, r_nr))
3336 fprintf (dump_file, "Removing unreachable region %d\n", r_nr);
3337 remove_eh_handler (region);
3341 VEC_iterate (eh_landing_pad, cfun->eh->lp_array, lp_nr, lp); ++lp_nr)
3342 if (lp && !TEST_BIT (lp_reachable, lp_nr))
3345 fprintf (dump_file, "Removing unreachable landing pad %d\n", lp_nr);
3346 remove_eh_landing_pad (lp);
3351 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3352 dump_eh_tree (dump_file, cfun);
3353 fprintf (dump_file, "\n\n");
3356 sbitmap_free (r_reachable);
3357 sbitmap_free (lp_reachable);
3359 #ifdef ENABLE_CHECKING
3360 verify_eh_tree (cfun);
3364 /* Remove regions that do not have landing pads. This assumes
3365 that remove_unreachable_handlers has already been run, and
3366 that we've just manipulated the landing pads since then. */
3369 remove_unreachable_handlers_no_lp (void)
3374 for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
3375 if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW)
3378 fprintf (dump_file, "Removing unreachable region %d\n", i);
3379 remove_eh_handler (r);
3383 /* Undo critical edge splitting on an EH landing pad. Earlier, we
3384 optimisticaly split all sorts of edges, including EH edges. The
3385 optimization passes in between may not have needed them; if not,
3386 we should undo the split.
3388 Recognize this case by having one EH edge incoming to the BB and
3389 one normal edge outgoing; BB should be empty apart from the
3390 post_landing_pad label.
3392 Note that this is slightly different from the empty handler case
3393 handled by cleanup_empty_eh, in that the actual handler may yet
3394 have actual code but the landing pad has been separated from the
3395 handler. As such, cleanup_empty_eh relies on this transformation
3396 having been done first. */
3399 unsplit_eh (eh_landing_pad lp)
3401 basic_block bb = label_to_block (lp->post_landing_pad);
3402 gimple_stmt_iterator gsi;
3405 /* Quickly check the edge counts on BB for singularity. */
3406 if (EDGE_COUNT (bb->preds) != 1 || EDGE_COUNT (bb->succs) != 1)
3408 e_in = EDGE_PRED (bb, 0);
3409 e_out = EDGE_SUCC (bb, 0);
3411 /* Input edge must be EH and output edge must be normal. */
3412 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
3415 /* The block must be empty except for the labels and debug insns. */
3416 gsi = gsi_after_labels (bb);
3417 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3418 gsi_next_nondebug (&gsi);
3419 if (!gsi_end_p (gsi))
3422 /* The destination block must not already have a landing pad
3423 for a different region. */
3424 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3426 gimple stmt = gsi_stmt (gsi);
3430 if (gimple_code (stmt) != GIMPLE_LABEL)
3432 lab = gimple_label_label (stmt);
3433 lp_nr = EH_LANDING_PAD_NR (lab);
3434 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3438 /* The new destination block must not already be a destination of
3439 the source block, lest we merge fallthru and eh edges and get
3440 all sorts of confused. */
3441 if (find_edge (e_in->src, e_out->dest))
3444 /* ??? We can get degenerate phis due to cfg cleanups. I would have
3445 thought this should have been cleaned up by a phicprop pass, but
3446 that doesn't appear to handle virtuals. Propagate by hand. */
3447 if (!gimple_seq_empty_p (phi_nodes (bb)))
3449 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
3451 gimple use_stmt, phi = gsi_stmt (gsi);
3452 tree lhs = gimple_phi_result (phi);
3453 tree rhs = gimple_phi_arg_def (phi, 0);
3454 use_operand_p use_p;
3455 imm_use_iterator iter;
3457 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
3459 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3460 SET_USE (use_p, rhs);
3463 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3464 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
3466 remove_phi_node (&gsi, true);
3470 if (dump_file && (dump_flags & TDF_DETAILS))
3471 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
3472 lp->index, e_out->dest->index);
3474 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
3475 a successor edge, humor it. But do the real CFG change with the
3476 predecessor of E_OUT in order to preserve the ordering of arguments
3477 to the PHI nodes in E_OUT->DEST. */
3478 redirect_eh_edge_1 (e_in, e_out->dest, false);
3479 redirect_edge_pred (e_out, e_in->src);
3480 e_out->flags = e_in->flags;
3481 e_out->probability = e_in->probability;
3482 e_out->count = e_in->count;
3488 /* Examine each landing pad block and see if it matches unsplit_eh. */
3491 unsplit_all_eh (void)
3493 bool changed = false;
3497 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3499 changed |= unsplit_eh (lp);
3504 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
3505 to OLD_BB to NEW_BB; return true on success, false on failure.
3507 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
3508 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
3509 Virtual PHIs may be deleted and marked for renaming. */
3512 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
3513 edge old_bb_out, bool change_region)
3515 gimple_stmt_iterator ngsi, ogsi;
3518 bitmap rename_virts;
3519 bitmap ophi_handled;
3521 FOR_EACH_EDGE (e, ei, old_bb->preds)
3522 redirect_edge_var_map_clear (e);
3524 ophi_handled = BITMAP_ALLOC (NULL);
3525 rename_virts = BITMAP_ALLOC (NULL);
3527 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
3528 for the edges we're going to move. */
3529 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
3531 gimple ophi, nphi = gsi_stmt (ngsi);
3534 nresult = gimple_phi_result (nphi);
3535 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
3537 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
3538 the source ssa_name. */
3540 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3542 ophi = gsi_stmt (ogsi);
3543 if (gimple_phi_result (ophi) == nop)
3548 /* If we did find the corresponding PHI, copy those inputs. */
3551 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
3552 FOR_EACH_EDGE (e, ei, old_bb->preds)
3557 if ((e->flags & EDGE_EH) == 0)
3559 oop = gimple_phi_arg_def (ophi, e->dest_idx);
3560 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
3561 redirect_edge_var_map_add (e, nresult, oop, oloc);
3564 /* If we didn't find the PHI, but it's a VOP, remember to rename
3565 it later, assuming all other tests succeed. */
3566 else if (!is_gimple_reg (nresult))
3567 bitmap_set_bit (rename_virts, SSA_NAME_VERSION (nresult));
3568 /* If we didn't find the PHI, and it's a real variable, we know
3569 from the fact that OLD_BB is tree_empty_eh_handler_p that the
3570 variable is unchanged from input to the block and we can simply
3571 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
3575 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
3576 FOR_EACH_EDGE (e, ei, old_bb->preds)
3577 redirect_edge_var_map_add (e, nresult, nop, nloc);
3581 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
3582 we don't know what values from the other edges into NEW_BB to use. */
3583 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3585 gimple ophi = gsi_stmt (ogsi);
3586 tree oresult = gimple_phi_result (ophi);
3587 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
3591 /* At this point we know that the merge will succeed. Remove the PHI
3592 nodes for the virtuals that we want to rename. */
3593 if (!bitmap_empty_p (rename_virts))
3595 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); )
3597 gimple nphi = gsi_stmt (ngsi);
3598 tree nresult = gimple_phi_result (nphi);
3599 if (bitmap_bit_p (rename_virts, SSA_NAME_VERSION (nresult)))
3601 mark_virtual_phi_result_for_renaming (nphi);
3602 remove_phi_node (&ngsi, true);
3609 /* Finally, move the edges and update the PHIs. */
3610 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
3611 if (e->flags & EDGE_EH)
3613 redirect_eh_edge_1 (e, new_bb, change_region);
3614 redirect_edge_succ (e, new_bb);
3615 flush_pending_stmts (e);
3620 BITMAP_FREE (ophi_handled);
3621 BITMAP_FREE (rename_virts);
3625 FOR_EACH_EDGE (e, ei, old_bb->preds)
3626 redirect_edge_var_map_clear (e);
3627 BITMAP_FREE (ophi_handled);
3628 BITMAP_FREE (rename_virts);
3632 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
3633 old region to NEW_REGION at BB. */
3636 cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
3637 eh_landing_pad lp, eh_region new_region)
3639 gimple_stmt_iterator gsi;
3642 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
3646 lp->region = new_region;
3647 lp->next_lp = new_region->landing_pads;
3648 new_region->landing_pads = lp;
3650 /* Delete the RESX that was matched within the empty handler block. */
3651 gsi = gsi_last_bb (bb);
3652 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
3653 gsi_remove (&gsi, true);
3655 /* Clean up E_OUT for the fallthru. */
3656 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3657 e_out->probability = REG_BR_PROB_BASE;
3660 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
3661 unsplitting than unsplit_eh was prepared to handle, e.g. when
3662 multiple incoming edges and phis are involved. */
3665 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
3667 gimple_stmt_iterator gsi;
3670 /* We really ought not have totally lost everything following
3671 a landing pad label. Given that BB is empty, there had better
3673 gcc_assert (e_out != NULL);
3675 /* The destination block must not already have a landing pad
3676 for a different region. */
3678 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3680 gimple stmt = gsi_stmt (gsi);
3683 if (gimple_code (stmt) != GIMPLE_LABEL)
3685 lab = gimple_label_label (stmt);
3686 lp_nr = EH_LANDING_PAD_NR (lab);
3687 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3691 /* Attempt to move the PHIs into the successor block. */
3692 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
3694 if (dump_file && (dump_flags & TDF_DETAILS))
3696 "Unsplit EH landing pad %d to block %i "
3697 "(via cleanup_empty_eh).\n",
3698 lp->index, e_out->dest->index);
3705 /* Examine the block associated with LP to determine if it's an empty
3706 handler for its EH region. If so, attempt to redirect EH edges to
3707 an outer region. Return true the CFG was updated in any way. This
3708 is similar to jump forwarding, just across EH edges. */
3711 cleanup_empty_eh (eh_landing_pad lp)
3713 basic_block bb = label_to_block (lp->post_landing_pad);
3714 gimple_stmt_iterator gsi;
3716 eh_region new_region;
3719 bool has_non_eh_pred;
3722 /* There can be zero or one edges out of BB. This is the quickest test. */
3723 switch (EDGE_COUNT (bb->succs))
3729 e_out = EDGE_SUCC (bb, 0);
3734 gsi = gsi_after_labels (bb);
3736 /* Make sure to skip debug statements. */
3737 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3738 gsi_next_nondebug (&gsi);
3740 /* If the block is totally empty, look for more unsplitting cases. */
3741 if (gsi_end_p (gsi))
3742 return cleanup_empty_eh_unsplit (bb, e_out, lp);
3744 /* The block should consist only of a single RESX statement. */
3745 resx = gsi_stmt (gsi);
3746 if (!is_gimple_resx (resx))
3748 gcc_assert (gsi_one_before_end_p (gsi));
3750 /* Determine if there are non-EH edges, or resx edges into the handler. */
3751 has_non_eh_pred = false;
3752 FOR_EACH_EDGE (e, ei, bb->preds)
3753 if (!(e->flags & EDGE_EH))
3754 has_non_eh_pred = true;
3756 /* Find the handler that's outer of the empty handler by looking at
3757 where the RESX instruction was vectored. */
3758 new_lp_nr = lookup_stmt_eh_lp (resx);
3759 new_region = get_eh_region_from_lp_number (new_lp_nr);
3761 /* If there's no destination region within the current function,
3762 redirection is trivial via removing the throwing statements from
3763 the EH region, removing the EH edges, and allowing the block
3764 to go unreachable. */
3765 if (new_region == NULL)
3767 gcc_assert (e_out == NULL);
3768 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3769 if (e->flags & EDGE_EH)
3771 gimple stmt = last_stmt (e->src);
3772 remove_stmt_from_eh_lp (stmt);
3780 /* If the destination region is a MUST_NOT_THROW, allow the runtime
3781 to handle the abort and allow the blocks to go unreachable. */
3782 if (new_region->type == ERT_MUST_NOT_THROW)
3784 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3785 if (e->flags & EDGE_EH)
3787 gimple stmt = last_stmt (e->src);
3788 remove_stmt_from_eh_lp (stmt);
3789 add_stmt_to_eh_lp (stmt, new_lp_nr);
3797 /* Try to redirect the EH edges and merge the PHIs into the destination
3798 landing pad block. If the merge succeeds, we'll already have redirected
3799 all the EH edges. The handler itself will go unreachable if there were
3801 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
3804 /* Finally, if all input edges are EH edges, then we can (potentially)
3805 reduce the number of transfers from the runtime by moving the landing
3806 pad from the original region to the new region. This is a win when
3807 we remove the last CLEANUP region along a particular exception
3808 propagation path. Since nothing changes except for the region with
3809 which the landing pad is associated, the PHI nodes do not need to be
3811 if (!has_non_eh_pred)
3813 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
3814 if (dump_file && (dump_flags & TDF_DETAILS))
3815 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
3816 lp->index, new_region->index);
3818 /* ??? The CFG didn't change, but we may have rendered the
3819 old EH region unreachable. Trigger a cleanup there. */
3826 if (dump_file && (dump_flags & TDF_DETAILS))
3827 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
3828 remove_eh_landing_pad (lp);
3832 /* Do a post-order traversal of the EH region tree. Examine each
3833 post_landing_pad block and see if we can eliminate it as empty. */
3836 cleanup_all_empty_eh (void)
3838 bool changed = false;
3842 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3844 changed |= cleanup_empty_eh (lp);
3849 /* Perform cleanups and lowering of exception handling
3850 1) cleanups regions with handlers doing nothing are optimized out
3851 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
3852 3) Info about regions that are containing instructions, and regions
3853 reachable via local EH edges is collected
3854 4) Eh tree is pruned for regions no longer neccesary.
3856 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
3857 Unify those that have the same failure decl and locus.
3861 execute_cleanup_eh_1 (void)
3863 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
3864 looking up unreachable landing pads. */
3865 remove_unreachable_handlers ();
3867 /* Watch out for the region tree vanishing due to all unreachable. */
3868 if (cfun->eh->region_tree && optimize)
3870 bool changed = false;
3872 changed |= unsplit_all_eh ();
3873 changed |= cleanup_all_empty_eh ();
3877 free_dominance_info (CDI_DOMINATORS);
3878 free_dominance_info (CDI_POST_DOMINATORS);
3880 /* We delayed all basic block deletion, as we may have performed
3881 cleanups on EH edges while non-EH edges were still present. */
3882 delete_unreachable_blocks ();
3884 /* We manipulated the landing pads. Remove any region that no
3885 longer has a landing pad. */
3886 remove_unreachable_handlers_no_lp ();
3888 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
3896 execute_cleanup_eh (void)
3898 int ret = execute_cleanup_eh_1 ();
3900 /* If the function no longer needs an EH personality routine
3901 clear it. This exposes cross-language inlining opportunities
3902 and avoids references to a never defined personality routine. */
3903 if (DECL_FUNCTION_PERSONALITY (current_function_decl)
3904 && function_needs_eh_personality (cfun) != eh_personality_lang)
3905 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
3911 gate_cleanup_eh (void)
3913 return cfun->eh != NULL && cfun->eh->region_tree != NULL;
3916 struct gimple_opt_pass pass_cleanup_eh = {
3919 "ehcleanup", /* name */
3920 gate_cleanup_eh, /* gate */
3921 execute_cleanup_eh, /* execute */
3924 0, /* static_pass_number */
3925 TV_TREE_EH, /* tv_id */
3926 PROP_gimple_lcf, /* properties_required */
3927 0, /* properties_provided */
3928 0, /* properties_destroyed */
3929 0, /* todo_flags_start */
3930 TODO_dump_func /* todo_flags_finish */
3934 /* Verify that BB containing STMT as the last statement, has precisely the
3935 edge that make_eh_edges would create. */
3938 verify_eh_edges (gimple stmt)
3940 basic_block bb = gimple_bb (stmt);
3941 eh_landing_pad lp = NULL;
3946 lp_nr = lookup_stmt_eh_lp (stmt);
3948 lp = get_eh_landing_pad_from_number (lp_nr);
3951 FOR_EACH_EDGE (e, ei, bb->succs)
3953 if (e->flags & EDGE_EH)
3957 error ("BB %i has multiple EH edges", bb->index);
3969 error ("BB %i can not throw but has an EH edge", bb->index);
3975 if (!stmt_could_throw_p (stmt))
3977 error ("BB %i last statement has incorrectly set lp", bb->index);
3981 if (eh_edge == NULL)
3983 error ("BB %i is missing an EH edge", bb->index);
3987 if (eh_edge->dest != label_to_block (lp->post_landing_pad))
3989 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
3996 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
3999 verify_eh_dispatch_edge (gimple stmt)
4003 basic_block src, dst;
4004 bool want_fallthru = true;
4008 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4009 src = gimple_bb (stmt);
4011 FOR_EACH_EDGE (e, ei, src->succs)
4012 gcc_assert (e->aux == NULL);
4017 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4019 dst = label_to_block (c->label);
4020 e = find_edge (src, dst);
4023 error ("BB %i is missing an edge", src->index);
4028 /* A catch-all handler doesn't have a fallthru. */
4029 if (c->type_list == NULL)
4031 want_fallthru = false;
4037 case ERT_ALLOWED_EXCEPTIONS:
4038 dst = label_to_block (r->u.allowed.label);
4039 e = find_edge (src, dst);
4042 error ("BB %i is missing an edge", src->index);
4053 FOR_EACH_EDGE (e, ei, src->succs)
4055 if (e->flags & EDGE_FALLTHRU)
4057 if (fall_edge != NULL)
4059 error ("BB %i too many fallthru edges", src->index);
4068 error ("BB %i has incorrect edge", src->index);
4072 if ((fall_edge != NULL) ^ want_fallthru)
4074 error ("BB %i has incorrect fallthru edge", src->index);