1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "pointer-set.h"
30 #include "tree-flow.h"
31 #include "tree-dump.h"
32 #include "tree-inline.h"
33 #include "tree-iterator.h"
34 #include "tree-pass.h"
36 #include "langhooks.h"
38 #include "diagnostic-core.h"
42 /* In some instances a tree and a gimple need to be stored in a same table,
43 i.e. in hash tables. This is a structure to do this. */
44 typedef union {tree *tp; tree t; gimple g;} treemple;
46 /* Nonzero if we are using EH to handle cleanups. */
47 static int using_eh_for_cleanups_p = 0;
50 using_eh_for_cleanups (void)
52 using_eh_for_cleanups_p = 1;
55 /* Misc functions used in this file. */
57 /* Compare and hash for any structure which begins with a canonical
58 pointer. Assumes all pointers are interchangeable, which is sort
59 of already assumed by gcc elsewhere IIRC. */
62 struct_ptr_eq (const void *a, const void *b)
64 const void * const * x = (const void * const *) a;
65 const void * const * y = (const void * const *) b;
70 struct_ptr_hash (const void *a)
72 const void * const * x = (const void * const *) a;
73 return (size_t)*x >> 4;
77 /* Remember and lookup EH landing pad data for arbitrary statements.
78 Really this means any statement that could_throw_p. We could
79 stuff this information into the stmt_ann data structure, but:
81 (1) We absolutely rely on this information being kept until
82 we get to rtl. Once we're done with lowering here, if we lose
83 the information there's no way to recover it!
85 (2) There are many more statements that *cannot* throw as
86 compared to those that can. We should be saving some amount
87 of space by only allocating memory for those that can throw. */
89 /* Add statement T in function IFUN to landing pad NUM. */
92 add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
94 struct throw_stmt_node *n;
97 gcc_assert (num != 0);
99 n = ggc_alloc_throw_stmt_node ();
103 if (!get_eh_throw_stmt_table (ifun))
104 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
108 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
113 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
116 add_stmt_to_eh_lp (gimple t, int num)
118 add_stmt_to_eh_lp_fn (cfun, t, num);
121 /* Add statement T to the single EH landing pad in REGION. */
124 record_stmt_eh_region (eh_region region, gimple t)
128 if (region->type == ERT_MUST_NOT_THROW)
129 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
132 eh_landing_pad lp = region->landing_pads;
134 lp = gen_eh_landing_pad (region);
136 gcc_assert (lp->next_lp == NULL);
137 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
142 /* Remove statement T in function IFUN from its EH landing pad. */
145 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
147 struct throw_stmt_node dummy;
150 if (!get_eh_throw_stmt_table (ifun))
154 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
158 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
166 /* Remove statement T in the current function (cfun) from its
170 remove_stmt_from_eh_lp (gimple t)
172 return remove_stmt_from_eh_lp_fn (cfun, t);
175 /* Determine if statement T is inside an EH region in function IFUN.
176 Positive numbers indicate a landing pad index; negative numbers
177 indicate a MUST_NOT_THROW region index; zero indicates that the
178 statement is not recorded in the region table. */
181 lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
183 struct throw_stmt_node *p, n;
185 if (ifun->eh->throw_stmt_table == NULL)
189 p = (struct throw_stmt_node *) htab_find (ifun->eh->throw_stmt_table, &n);
190 return p ? p->lp_nr : 0;
193 /* Likewise, but always use the current function. */
196 lookup_stmt_eh_lp (gimple t)
198 /* We can get called from initialized data when -fnon-call-exceptions
199 is on; prevent crash. */
202 return lookup_stmt_eh_lp_fn (cfun, t);
205 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
206 nodes and LABEL_DECL nodes. We will use this during the second phase to
207 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
209 struct finally_tree_node
211 /* When storing a GIMPLE_TRY, we have to record a gimple. However
212 when deciding whether a GOTO to a certain LABEL_DECL (which is a
213 tree) leaves the TRY block, its necessary to record a tree in
214 this field. Thus a treemple is used. */
219 /* Note that this table is *not* marked GTY. It is short-lived. */
220 static htab_t finally_tree;
223 record_in_finally_tree (treemple child, gimple parent)
225 struct finally_tree_node *n;
228 n = XNEW (struct finally_tree_node);
232 slot = htab_find_slot (finally_tree, n, INSERT);
238 collect_finally_tree (gimple stmt, gimple region);
240 /* Go through the gimple sequence. Works with collect_finally_tree to
241 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
244 collect_finally_tree_1 (gimple_seq seq, gimple region)
246 gimple_stmt_iterator gsi;
248 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
249 collect_finally_tree (gsi_stmt (gsi), region);
253 collect_finally_tree (gimple stmt, gimple region)
257 switch (gimple_code (stmt))
260 temp.t = gimple_label_label (stmt);
261 record_in_finally_tree (temp, region);
265 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
268 record_in_finally_tree (temp, region);
269 collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
270 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
272 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
274 collect_finally_tree_1 (gimple_try_eval (stmt), region);
275 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
280 collect_finally_tree_1 (gimple_catch_handler (stmt), region);
283 case GIMPLE_EH_FILTER:
284 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
288 /* A type, a decl, or some kind of statement that we're not
289 interested in. Don't walk them. */
295 /* Use the finally tree to determine if a jump from START to TARGET
296 would leave the try_finally node that START lives in. */
299 outside_finally_tree (treemple start, gimple target)
301 struct finally_tree_node n, *p;
306 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
311 while (start.g != target);
316 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
317 nodes into a set of gotos, magic labels, and eh regions.
318 The eh region creation is straight-forward, but frobbing all the gotos
319 and such into shape isn't. */
321 /* The sequence into which we record all EH stuff. This will be
322 placed at the end of the function when we're all done. */
323 static gimple_seq eh_seq;
325 /* Record whether an EH region contains something that can throw,
326 indexed by EH region number. */
327 static bitmap eh_region_may_contain_throw_map;
329 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
330 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
331 The idea is to record a gimple statement for everything except for
332 the conditionals, which get their labels recorded. Since labels are
333 of type 'tree', we need this node to store both gimple and tree
334 objects. REPL_STMT is the sequence used to replace the goto/return
335 statement. CONT_STMT is used to store the statement that allows
336 the return/goto to jump to the original destination. */
338 struct goto_queue_node
341 gimple_seq repl_stmt;
344 /* This is used when index >= 0 to indicate that stmt is a label (as
345 opposed to a goto stmt). */
349 /* State of the world while lowering. */
353 /* What's "current" while constructing the eh region tree. These
354 correspond to variables of the same name in cfun->eh, which we
355 don't have easy access to. */
356 eh_region cur_region;
358 /* What's "current" for the purposes of __builtin_eh_pointer. For
359 a CATCH, this is the associated TRY. For an EH_FILTER, this is
360 the associated ALLOWED_EXCEPTIONS, etc. */
361 eh_region ehp_region;
363 /* Processing of TRY_FINALLY requires a bit more state. This is
364 split out into a separate structure so that we don't have to
365 copy so much when processing other nodes. */
366 struct leh_tf_state *tf;
371 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
372 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
373 this so that outside_finally_tree can reliably reference the tree used
374 in the collect_finally_tree data structures. */
375 gimple try_finally_expr;
378 /* While lowering a top_p usually it is expanded into multiple statements,
379 thus we need the following field to store them. */
380 gimple_seq top_p_seq;
382 /* The state outside this try_finally node. */
383 struct leh_state *outer;
385 /* The exception region created for it. */
388 /* The goto queue. */
389 struct goto_queue_node *goto_queue;
390 size_t goto_queue_size;
391 size_t goto_queue_active;
393 /* Pointer map to help in searching goto_queue when it is large. */
394 struct pointer_map_t *goto_queue_map;
396 /* The set of unique labels seen as entries in the goto queue. */
397 VEC(tree,heap) *dest_array;
399 /* A label to be added at the end of the completed transformed
400 sequence. It will be set if may_fallthru was true *at one time*,
401 though subsequent transformations may have cleared that flag. */
404 /* True if it is possible to fall out the bottom of the try block.
405 Cleared if the fallthru is converted to a goto. */
408 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
411 /* True if the finally block can receive an exception edge.
412 Cleared if the exception case is handled by code duplication. */
416 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple);
418 /* Search for STMT in the goto queue. Return the replacement,
419 or null if the statement isn't in the queue. */
421 #define LARGE_GOTO_QUEUE 20
423 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq);
426 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
431 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
433 for (i = 0; i < tf->goto_queue_active; i++)
434 if ( tf->goto_queue[i].stmt.g == stmt.g)
435 return tf->goto_queue[i].repl_stmt;
439 /* If we have a large number of entries in the goto_queue, create a
440 pointer map and use that for searching. */
442 if (!tf->goto_queue_map)
444 tf->goto_queue_map = pointer_map_create ();
445 for (i = 0; i < tf->goto_queue_active; i++)
447 slot = pointer_map_insert (tf->goto_queue_map,
448 tf->goto_queue[i].stmt.g);
449 gcc_assert (*slot == NULL);
450 *slot = &tf->goto_queue[i];
454 slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
456 return (((struct goto_queue_node *) *slot)->repl_stmt);
461 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
462 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
463 then we can just splat it in, otherwise we add the new stmts immediately
464 after the GIMPLE_COND and redirect. */
467 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
468 gimple_stmt_iterator *gsi)
473 location_t loc = gimple_location (gsi_stmt (*gsi));
476 new_seq = find_goto_replacement (tf, temp);
480 if (gimple_seq_singleton_p (new_seq)
481 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
483 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
487 label = create_artificial_label (loc);
488 /* Set the new label for the GIMPLE_COND */
491 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
492 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
495 /* The real work of replace_goto_queue. Returns with TSI updated to
496 point to the next statement. */
498 static void replace_goto_queue_stmt_list (gimple_seq, struct leh_tf_state *);
501 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
502 gimple_stmt_iterator *gsi)
508 switch (gimple_code (stmt))
513 seq = find_goto_replacement (tf, temp);
516 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
517 gsi_remove (gsi, false);
523 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
524 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
528 replace_goto_queue_stmt_list (gimple_try_eval (stmt), tf);
529 replace_goto_queue_stmt_list (gimple_try_cleanup (stmt), tf);
532 replace_goto_queue_stmt_list (gimple_catch_handler (stmt), tf);
534 case GIMPLE_EH_FILTER:
535 replace_goto_queue_stmt_list (gimple_eh_filter_failure (stmt), tf);
539 /* These won't have gotos in them. */
546 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
549 replace_goto_queue_stmt_list (gimple_seq seq, struct leh_tf_state *tf)
551 gimple_stmt_iterator gsi = gsi_start (seq);
553 while (!gsi_end_p (gsi))
554 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
557 /* Replace all goto queue members. */
560 replace_goto_queue (struct leh_tf_state *tf)
562 if (tf->goto_queue_active == 0)
564 replace_goto_queue_stmt_list (tf->top_p_seq, tf);
565 replace_goto_queue_stmt_list (eh_seq, tf);
568 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
569 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
573 record_in_goto_queue (struct leh_tf_state *tf,
579 struct goto_queue_node *q;
581 gcc_assert (!tf->goto_queue_map);
583 active = tf->goto_queue_active;
584 size = tf->goto_queue_size;
587 size = (size ? size * 2 : 32);
588 tf->goto_queue_size = size;
590 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
593 q = &tf->goto_queue[active];
594 tf->goto_queue_active = active + 1;
596 memset (q, 0, sizeof (*q));
599 q->is_label = is_label;
602 /* Record the LABEL label in the goto queue contained in TF.
606 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label)
609 treemple temp, new_stmt;
614 /* Computed and non-local gotos do not get processed. Given
615 their nature we can neither tell whether we've escaped the
616 finally block nor redirect them if we knew. */
617 if (TREE_CODE (label) != LABEL_DECL)
620 /* No need to record gotos that don't leave the try block. */
622 if (!outside_finally_tree (temp, tf->try_finally_expr))
625 if (! tf->dest_array)
627 tf->dest_array = VEC_alloc (tree, heap, 10);
628 VEC_quick_push (tree, tf->dest_array, label);
633 int n = VEC_length (tree, tf->dest_array);
634 for (index = 0; index < n; ++index)
635 if (VEC_index (tree, tf->dest_array, index) == label)
638 VEC_safe_push (tree, heap, tf->dest_array, label);
641 /* In the case of a GOTO we want to record the destination label,
642 since with a GIMPLE_COND we have an easy access to the then/else
645 record_in_goto_queue (tf, new_stmt, index, true);
648 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
649 node, and if so record that fact in the goto queue associated with that
653 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
655 struct leh_tf_state *tf = state->tf;
661 switch (gimple_code (stmt))
664 new_stmt.tp = gimple_op_ptr (stmt, 2);
665 record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt));
666 new_stmt.tp = gimple_op_ptr (stmt, 3);
667 record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt));
671 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt));
675 tf->may_return = true;
677 record_in_goto_queue (tf, new_stmt, -1, false);
686 #ifdef ENABLE_CHECKING
687 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
688 was in fact structured, and we've not yet done jump threading, then none
689 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
692 verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
694 struct leh_tf_state *tf = state->tf;
700 n = gimple_switch_num_labels (switch_expr);
702 for (i = 0; i < n; ++i)
705 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
707 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
711 #define verify_norecord_switch_expr(state, switch_expr)
714 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
715 whatever is needed to finish the return. If MOD is non-null, insert it
716 before the new branch. RETURN_VALUE_P is a cache containing a temporary
717 variable to be used in manipulating the value returned from the function. */
720 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
721 tree *return_value_p)
726 /* In the case of a return, the queue node must be a gimple statement. */
727 gcc_assert (!q->is_label);
729 ret_expr = gimple_return_retval (q->stmt.g);
733 if (!*return_value_p)
734 *return_value_p = ret_expr;
736 gcc_assert (*return_value_p == ret_expr);
737 q->cont_stmt = q->stmt.g;
738 /* The nasty part about redirecting the return value is that the
739 return value itself is to be computed before the FINALLY block
753 should return 0, not 1. Arrange for this to happen by copying
754 computed the return value into a local temporary. This also
755 allows us to redirect multiple return statements through the
756 same destination block; whether this is a net win or not really
757 depends, I guess, but it does make generation of the switch in
758 lower_try_finally_switch easier. */
760 if (TREE_CODE (ret_expr) == RESULT_DECL)
762 if (!*return_value_p)
763 *return_value_p = ret_expr;
765 gcc_assert (*return_value_p == ret_expr);
766 q->cont_stmt = q->stmt.g;
772 /* If we don't return a value, all return statements are the same. */
773 q->cont_stmt = q->stmt.g;
776 q->repl_stmt = gimple_seq_alloc ();
779 gimple_seq_add_seq (&q->repl_stmt, mod);
781 x = gimple_build_goto (finlab);
782 gimple_seq_add_stmt (&q->repl_stmt, x);
785 /* Similar, but easier, for GIMPLE_GOTO. */
788 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
789 struct leh_tf_state *tf)
793 gcc_assert (q->is_label);
795 q->repl_stmt = gimple_seq_alloc ();
797 q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array, q->index));
800 gimple_seq_add_seq (&q->repl_stmt, mod);
802 x = gimple_build_goto (finlab);
803 gimple_seq_add_stmt (&q->repl_stmt, x);
806 /* Emit a standard landing pad sequence into SEQ for REGION. */
809 emit_post_landing_pad (gimple_seq *seq, eh_region region)
811 eh_landing_pad lp = region->landing_pads;
815 lp = gen_eh_landing_pad (region);
817 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
818 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
820 x = gimple_build_label (lp->post_landing_pad);
821 gimple_seq_add_stmt (seq, x);
824 /* Emit a RESX statement into SEQ for REGION. */
827 emit_resx (gimple_seq *seq, eh_region region)
829 gimple x = gimple_build_resx (region->index);
830 gimple_seq_add_stmt (seq, x);
832 record_stmt_eh_region (region->outer, x);
835 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
838 emit_eh_dispatch (gimple_seq *seq, eh_region region)
840 gimple x = gimple_build_eh_dispatch (region->index);
841 gimple_seq_add_stmt (seq, x);
844 /* Note that the current EH region may contain a throw, or a
845 call to a function which itself may contain a throw. */
848 note_eh_region_may_contain_throw (eh_region region)
850 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
852 if (region->type == ERT_MUST_NOT_THROW)
854 region = region->outer;
860 /* Check if REGION has been marked as containing a throw. If REGION is
861 NULL, this predicate is false. */
864 eh_region_may_contain_throw (eh_region r)
866 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
869 /* We want to transform
870 try { body; } catch { stuff; }
880 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
881 should be placed before the second operand, or NULL. OVER is
882 an existing label that should be put at the exit, or NULL. */
885 frob_into_branch_around (gimple tp, eh_region region, tree over)
888 gimple_seq cleanup, result;
889 location_t loc = gimple_location (tp);
891 cleanup = gimple_try_cleanup (tp);
892 result = gimple_try_eval (tp);
895 emit_post_landing_pad (&eh_seq, region);
897 if (gimple_seq_may_fallthru (cleanup))
900 over = create_artificial_label (loc);
901 x = gimple_build_goto (over);
902 gimple_seq_add_stmt (&cleanup, x);
904 gimple_seq_add_seq (&eh_seq, cleanup);
908 x = gimple_build_label (over);
909 gimple_seq_add_stmt (&result, x);
914 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
915 Make sure to record all new labels found. */
918 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state)
920 gimple region = NULL;
923 new_seq = copy_gimple_seq_and_replace_locals (seq);
926 region = outer_state->tf->try_finally_expr;
927 collect_finally_tree_1 (new_seq, region);
932 /* A subroutine of lower_try_finally. Create a fallthru label for
933 the given try_finally state. The only tricky bit here is that
934 we have to make sure to record the label in our outer context. */
937 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
939 tree label = tf->fallthru_label;
944 label = create_artificial_label (gimple_location (tf->try_finally_expr));
945 tf->fallthru_label = label;
949 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
955 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
956 langhook returns non-null, then the language requires that the exception
957 path out of a try_finally be treated specially. To wit: the code within
958 the finally block may not itself throw an exception. We have two choices
959 here. First we can duplicate the finally block and wrap it in a
960 must_not_throw region. Second, we can generate code like
965 if (fintmp == eh_edge)
966 protect_cleanup_actions;
969 where "fintmp" is the temporary used in the switch statement generation
970 alternative considered below. For the nonce, we always choose the first
973 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
976 honor_protect_cleanup_actions (struct leh_state *outer_state,
977 struct leh_state *this_state,
978 struct leh_tf_state *tf)
980 tree protect_cleanup_actions;
981 gimple_stmt_iterator gsi;
982 bool finally_may_fallthru;
986 /* First check for nothing to do. */
987 if (lang_hooks.eh_protect_cleanup_actions == NULL)
989 protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions ();
990 if (protect_cleanup_actions == NULL)
993 finally = gimple_try_cleanup (tf->top_p);
994 finally_may_fallthru = gimple_seq_may_fallthru (finally);
996 /* Duplicate the FINALLY block. Only need to do this for try-finally,
997 and not for cleanups. */
999 finally = lower_try_finally_dup_block (finally, outer_state);
1001 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
1002 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
1003 to be in an enclosing scope, but needs to be implemented at this level
1004 to avoid a nesting violation (see wrap_temporary_cleanups in
1005 cp/decl.c). Since it's logically at an outer level, we should call
1006 terminate before we get to it, so strip it away before adding the
1007 MUST_NOT_THROW filter. */
1008 gsi = gsi_start (finally);
1010 if (gimple_code (x) == GIMPLE_TRY
1011 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
1012 && gimple_try_catch_is_cleanup (x))
1014 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
1015 gsi_remove (&gsi, false);
1018 /* Wrap the block with protect_cleanup_actions as the action. */
1019 x = gimple_build_eh_must_not_throw (protect_cleanup_actions);
1020 x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x),
1022 finally = lower_eh_must_not_throw (outer_state, x);
1024 /* Drop all of this into the exception sequence. */
1025 emit_post_landing_pad (&eh_seq, tf->region);
1026 gimple_seq_add_seq (&eh_seq, finally);
1027 if (finally_may_fallthru)
1028 emit_resx (&eh_seq, tf->region);
1030 /* Having now been handled, EH isn't to be considered with
1031 the rest of the outgoing edges. */
1032 tf->may_throw = false;
1035 /* A subroutine of lower_try_finally. We have determined that there is
1036 no fallthru edge out of the finally block. This means that there is
1037 no outgoing edge corresponding to any incoming edge. Restructure the
1038 try_finally node for this special case. */
1041 lower_try_finally_nofallthru (struct leh_state *state,
1042 struct leh_tf_state *tf)
1044 tree lab, return_val;
1047 struct goto_queue_node *q, *qe;
1049 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1051 /* We expect that tf->top_p is a GIMPLE_TRY. */
1052 finally = gimple_try_cleanup (tf->top_p);
1053 tf->top_p_seq = gimple_try_eval (tf->top_p);
1055 x = gimple_build_label (lab);
1056 gimple_seq_add_stmt (&tf->top_p_seq, x);
1060 qe = q + tf->goto_queue_active;
1063 do_return_redirection (q, lab, NULL, &return_val);
1065 do_goto_redirection (q, lab, NULL, tf);
1067 replace_goto_queue (tf);
1069 lower_eh_constructs_1 (state, finally);
1070 gimple_seq_add_seq (&tf->top_p_seq, finally);
1074 emit_post_landing_pad (&eh_seq, tf->region);
1076 x = gimple_build_goto (lab);
1077 gimple_seq_add_stmt (&eh_seq, x);
1081 /* A subroutine of lower_try_finally. We have determined that there is
1082 exactly one destination of the finally block. Restructure the
1083 try_finally node for this special case. */
1086 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1088 struct goto_queue_node *q, *qe;
1092 location_t loc = gimple_location (tf->try_finally_expr);
1094 finally = gimple_try_cleanup (tf->top_p);
1095 tf->top_p_seq = gimple_try_eval (tf->top_p);
1097 lower_eh_constructs_1 (state, finally);
1101 /* Only reachable via the exception edge. Add the given label to
1102 the head of the FINALLY block. Append a RESX at the end. */
1103 emit_post_landing_pad (&eh_seq, tf->region);
1104 gimple_seq_add_seq (&eh_seq, finally);
1105 emit_resx (&eh_seq, tf->region);
1109 if (tf->may_fallthru)
1111 /* Only reachable via the fallthru edge. Do nothing but let
1112 the two blocks run together; we'll fall out the bottom. */
1113 gimple_seq_add_seq (&tf->top_p_seq, finally);
1117 finally_label = create_artificial_label (loc);
1118 x = gimple_build_label (finally_label);
1119 gimple_seq_add_stmt (&tf->top_p_seq, x);
1121 gimple_seq_add_seq (&tf->top_p_seq, finally);
1124 qe = q + tf->goto_queue_active;
1128 /* Reachable by return expressions only. Redirect them. */
1129 tree return_val = NULL;
1131 do_return_redirection (q, finally_label, NULL, &return_val);
1132 replace_goto_queue (tf);
1136 /* Reachable by goto expressions only. Redirect them. */
1138 do_goto_redirection (q, finally_label, NULL, tf);
1139 replace_goto_queue (tf);
1141 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1143 /* Reachable by goto to fallthru label only. Redirect it
1144 to the new label (already created, sadly), and do not
1145 emit the final branch out, or the fallthru label. */
1146 tf->fallthru_label = NULL;
1151 /* Place the original return/goto to the original destination
1152 immediately after the finally block. */
1153 x = tf->goto_queue[0].cont_stmt;
1154 gimple_seq_add_stmt (&tf->top_p_seq, x);
1155 maybe_record_in_goto_queue (state, x);
1158 /* A subroutine of lower_try_finally. There are multiple edges incoming
1159 and outgoing from the finally block. Implement this by duplicating the
1160 finally block for every destination. */
1163 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1166 gimple_seq new_stmt;
1170 location_t tf_loc = gimple_location (tf->try_finally_expr);
1172 finally = gimple_try_cleanup (tf->top_p);
1173 tf->top_p_seq = gimple_try_eval (tf->top_p);
1176 if (tf->may_fallthru)
1178 seq = lower_try_finally_dup_block (finally, state);
1179 lower_eh_constructs_1 (state, seq);
1180 gimple_seq_add_seq (&new_stmt, seq);
1182 tmp = lower_try_finally_fallthru_label (tf);
1183 x = gimple_build_goto (tmp);
1184 gimple_seq_add_stmt (&new_stmt, x);
1189 seq = lower_try_finally_dup_block (finally, state);
1190 lower_eh_constructs_1 (state, seq);
1192 emit_post_landing_pad (&eh_seq, tf->region);
1193 gimple_seq_add_seq (&eh_seq, seq);
1194 emit_resx (&eh_seq, tf->region);
1199 struct goto_queue_node *q, *qe;
1200 tree return_val = NULL;
1201 int return_index, index;
1204 struct goto_queue_node *q;
1208 return_index = VEC_length (tree, tf->dest_array);
1209 labels = XCNEWVEC (struct labels_s, return_index + 1);
1212 qe = q + tf->goto_queue_active;
1215 index = q->index < 0 ? return_index : q->index;
1217 if (!labels[index].q)
1218 labels[index].q = q;
1221 for (index = 0; index < return_index + 1; index++)
1225 q = labels[index].q;
1229 lab = labels[index].label
1230 = create_artificial_label (tf_loc);
1232 if (index == return_index)
1233 do_return_redirection (q, lab, NULL, &return_val);
1235 do_goto_redirection (q, lab, NULL, tf);
1237 x = gimple_build_label (lab);
1238 gimple_seq_add_stmt (&new_stmt, x);
1240 seq = lower_try_finally_dup_block (finally, state);
1241 lower_eh_constructs_1 (state, seq);
1242 gimple_seq_add_seq (&new_stmt, seq);
1244 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1245 maybe_record_in_goto_queue (state, q->cont_stmt);
1248 for (q = tf->goto_queue; q < qe; q++)
1252 index = q->index < 0 ? return_index : q->index;
1254 if (labels[index].q == q)
1257 lab = labels[index].label;
1259 if (index == return_index)
1260 do_return_redirection (q, lab, NULL, &return_val);
1262 do_goto_redirection (q, lab, NULL, tf);
1265 replace_goto_queue (tf);
1269 /* Need to link new stmts after running replace_goto_queue due
1270 to not wanting to process the same goto stmts twice. */
1271 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1274 /* A subroutine of lower_try_finally. There are multiple edges incoming
1275 and outgoing from the finally block. Implement this by instrumenting
1276 each incoming edge and creating a switch statement at the end of the
1277 finally block that branches to the appropriate destination. */
1280 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1282 struct goto_queue_node *q, *qe;
1283 tree return_val = NULL;
1284 tree finally_tmp, finally_label;
1285 int return_index, eh_index, fallthru_index;
1286 int nlabels, ndests, j, last_case_index;
1288 VEC (tree,heap) *case_label_vec;
1289 gimple_seq switch_body;
1294 struct pointer_map_t *cont_map = NULL;
1295 /* The location of the TRY_FINALLY stmt. */
1296 location_t tf_loc = gimple_location (tf->try_finally_expr);
1297 /* The location of the finally block. */
1298 location_t finally_loc;
1300 switch_body = gimple_seq_alloc ();
1302 /* Mash the TRY block to the head of the chain. */
1303 finally = gimple_try_cleanup (tf->top_p);
1304 tf->top_p_seq = gimple_try_eval (tf->top_p);
1306 /* The location of the finally is either the last stmt in the finally
1307 block or the location of the TRY_FINALLY itself. */
1308 finally_loc = gimple_seq_last_stmt (tf->top_p_seq) != NULL ?
1309 gimple_location (gimple_seq_last_stmt (tf->top_p_seq))
1312 /* Lower the finally block itself. */
1313 lower_eh_constructs_1 (state, finally);
1315 /* Prepare for switch statement generation. */
1316 nlabels = VEC_length (tree, tf->dest_array);
1317 return_index = nlabels;
1318 eh_index = return_index + tf->may_return;
1319 fallthru_index = eh_index + tf->may_throw;
1320 ndests = fallthru_index + tf->may_fallthru;
1322 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1323 finally_label = create_artificial_label (finally_loc);
1325 /* We use VEC_quick_push on case_label_vec throughout this function,
1326 since we know the size in advance and allocate precisely as muce
1328 case_label_vec = VEC_alloc (tree, heap, ndests);
1330 last_case_index = 0;
1332 /* Begin inserting code for getting to the finally block. Things
1333 are done in this order to correspond to the sequence the code is
1336 if (tf->may_fallthru)
1338 x = gimple_build_assign (finally_tmp,
1339 build_int_cst (NULL, fallthru_index));
1340 gimple_seq_add_stmt (&tf->top_p_seq, x);
1342 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1343 build_int_cst (NULL, fallthru_index),
1344 NULL, create_artificial_label (tf_loc));
1345 VEC_quick_push (tree, case_label_vec, last_case);
1348 x = gimple_build_label (CASE_LABEL (last_case));
1349 gimple_seq_add_stmt (&switch_body, x);
1351 tmp = lower_try_finally_fallthru_label (tf);
1352 x = gimple_build_goto (tmp);
1353 gimple_seq_add_stmt (&switch_body, x);
1358 emit_post_landing_pad (&eh_seq, tf->region);
1360 x = gimple_build_assign (finally_tmp,
1361 build_int_cst (NULL, eh_index));
1362 gimple_seq_add_stmt (&eh_seq, x);
1364 x = gimple_build_goto (finally_label);
1365 gimple_seq_add_stmt (&eh_seq, x);
1367 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1368 build_int_cst (NULL, eh_index),
1369 NULL, create_artificial_label (tf_loc));
1370 VEC_quick_push (tree, case_label_vec, last_case);
1373 x = gimple_build_label (CASE_LABEL (last_case));
1374 gimple_seq_add_stmt (&eh_seq, x);
1375 emit_resx (&eh_seq, tf->region);
1378 x = gimple_build_label (finally_label);
1379 gimple_seq_add_stmt (&tf->top_p_seq, x);
1381 gimple_seq_add_seq (&tf->top_p_seq, finally);
1383 /* Redirect each incoming goto edge. */
1385 qe = q + tf->goto_queue_active;
1386 j = last_case_index + tf->may_return;
1387 /* Prepare the assignments to finally_tmp that are executed upon the
1388 entrance through a particular edge. */
1393 unsigned int case_index;
1395 mod = gimple_seq_alloc ();
1399 x = gimple_build_assign (finally_tmp,
1400 build_int_cst (NULL, return_index));
1401 gimple_seq_add_stmt (&mod, x);
1402 do_return_redirection (q, finally_label, mod, &return_val);
1403 switch_id = return_index;
1407 x = gimple_build_assign (finally_tmp,
1408 build_int_cst (NULL, q->index));
1409 gimple_seq_add_stmt (&mod, x);
1410 do_goto_redirection (q, finally_label, mod, tf);
1411 switch_id = q->index;
1414 case_index = j + q->index;
1415 if (VEC_length (tree, case_label_vec) <= case_index
1416 || !VEC_index (tree, case_label_vec, case_index))
1420 case_lab = build3 (CASE_LABEL_EXPR, void_type_node,
1421 build_int_cst (NULL, switch_id),
1423 /* We store the cont_stmt in the pointer map, so that we can recover
1424 it in the loop below. We don't create the new label while
1425 walking the goto_queue because pointers don't offer a stable
1428 cont_map = pointer_map_create ();
1429 slot = pointer_map_insert (cont_map, case_lab);
1430 *slot = q->cont_stmt;
1431 VEC_quick_push (tree, case_label_vec, case_lab);
1434 for (j = last_case_index; j < last_case_index + nlabels; j++)
1440 last_case = VEC_index (tree, case_label_vec, j);
1442 gcc_assert (last_case);
1443 gcc_assert (cont_map);
1445 slot = pointer_map_contains (cont_map, last_case);
1446 /* As the comment above suggests, CASE_LABEL (last_case) was just a
1447 placeholder, it does not store an actual label, yet. */
1449 cont_stmt = *(gimple *) slot;
1451 label = create_artificial_label (tf_loc);
1452 CASE_LABEL (last_case) = label;
1454 x = gimple_build_label (label);
1455 gimple_seq_add_stmt (&switch_body, x);
1456 gimple_seq_add_stmt (&switch_body, cont_stmt);
1457 maybe_record_in_goto_queue (state, cont_stmt);
1460 pointer_map_destroy (cont_map);
1462 replace_goto_queue (tf);
1464 /* Make sure that the last case is the default label, as one is required.
1465 Then sort the labels, which is also required in GIMPLE. */
1466 CASE_LOW (last_case) = NULL;
1467 sort_case_labels (case_label_vec);
1469 /* Build the switch statement, setting last_case to be the default
1471 switch_stmt = gimple_build_switch_vec (finally_tmp, last_case,
1473 gimple_set_location (switch_stmt, finally_loc);
1475 /* Need to link SWITCH_STMT after running replace_goto_queue
1476 due to not wanting to process the same goto stmts twice. */
1477 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1478 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1481 /* Decide whether or not we are going to duplicate the finally block.
1482 There are several considerations.
1484 First, if this is Java, then the finally block contains code
1485 written by the user. It has line numbers associated with it,
1486 so duplicating the block means it's difficult to set a breakpoint.
1487 Since controlling code generation via -g is verboten, we simply
1488 never duplicate code without optimization.
1490 Second, we'd like to prevent egregious code growth. One way to
1491 do this is to estimate the size of the finally block, multiply
1492 that by the number of copies we'd need to make, and compare against
1493 the estimate of the size of the switch machinery we'd have to add. */
1496 decide_copy_try_finally (int ndests, gimple_seq finally)
1498 int f_estimate, sw_estimate;
1503 /* Finally estimate N times, plus N gotos. */
1504 f_estimate = count_insns_seq (finally, &eni_size_weights);
1505 f_estimate = (f_estimate + 1) * ndests;
1507 /* Switch statement (cost 10), N variable assignments, N gotos. */
1508 sw_estimate = 10 + 2 * ndests;
1510 /* Optimize for size clearly wants our best guess. */
1511 if (optimize_function_for_size_p (cfun))
1512 return f_estimate < sw_estimate;
1514 /* ??? These numbers are completely made up so far. */
1516 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1518 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1521 /* REG is the enclosing region for a possible cleanup region, or the region
1522 itself. Returns TRUE if such a region would be unreachable.
1524 Cleanup regions within a must-not-throw region aren't actually reachable
1525 even if there are throwing stmts within them, because the personality
1526 routine will call terminate before unwinding. */
1529 cleanup_is_dead_in (eh_region reg)
1531 while (reg && reg->type == ERT_CLEANUP)
1533 return (reg && reg->type == ERT_MUST_NOT_THROW);
1536 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1537 to a sequence of labels and blocks, plus the exception region trees
1538 that record all the magic. This is complicated by the need to
1539 arrange for the FINALLY block to be executed on all exits. */
1542 lower_try_finally (struct leh_state *state, gimple tp)
1544 struct leh_tf_state this_tf;
1545 struct leh_state this_state;
1547 gimple_seq old_eh_seq;
1549 /* Process the try block. */
1551 memset (&this_tf, 0, sizeof (this_tf));
1552 this_tf.try_finally_expr = tp;
1554 this_tf.outer = state;
1555 if (using_eh_for_cleanups_p && !cleanup_is_dead_in (state->cur_region))
1557 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1558 this_state.cur_region = this_tf.region;
1562 this_tf.region = NULL;
1563 this_state.cur_region = state->cur_region;
1566 this_state.ehp_region = state->ehp_region;
1567 this_state.tf = &this_tf;
1569 old_eh_seq = eh_seq;
1572 lower_eh_constructs_1 (&this_state, gimple_try_eval(tp));
1574 /* Determine if the try block is escaped through the bottom. */
1575 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1577 /* Determine if any exceptions are possible within the try block. */
1579 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1580 if (this_tf.may_throw)
1581 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1583 /* Determine how many edges (still) reach the finally block. Or rather,
1584 how many destinations are reached by the finally block. Use this to
1585 determine how we process the finally block itself. */
1587 ndests = VEC_length (tree, this_tf.dest_array);
1588 ndests += this_tf.may_fallthru;
1589 ndests += this_tf.may_return;
1590 ndests += this_tf.may_throw;
1592 /* If the FINALLY block is not reachable, dike it out. */
1595 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1596 gimple_try_set_cleanup (tp, NULL);
1598 /* If the finally block doesn't fall through, then any destination
1599 we might try to impose there isn't reached either. There may be
1600 some minor amount of cleanup and redirection still needed. */
1601 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1602 lower_try_finally_nofallthru (state, &this_tf);
1604 /* We can easily special-case redirection to a single destination. */
1605 else if (ndests == 1)
1606 lower_try_finally_onedest (state, &this_tf);
1607 else if (decide_copy_try_finally (ndests, gimple_try_cleanup (tp)))
1608 lower_try_finally_copy (state, &this_tf);
1610 lower_try_finally_switch (state, &this_tf);
1612 /* If someone requested we add a label at the end of the transformed
1614 if (this_tf.fallthru_label)
1616 /* This must be reached only if ndests == 0. */
1617 gimple x = gimple_build_label (this_tf.fallthru_label);
1618 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1621 VEC_free (tree, heap, this_tf.dest_array);
1622 if (this_tf.goto_queue)
1623 free (this_tf.goto_queue);
1624 if (this_tf.goto_queue_map)
1625 pointer_map_destroy (this_tf.goto_queue_map);
1627 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1628 If there was no old eh_seq, then the append is trivially already done. */
1632 eh_seq = old_eh_seq;
1635 gimple_seq new_eh_seq = eh_seq;
1636 eh_seq = old_eh_seq;
1637 gimple_seq_add_seq(&eh_seq, new_eh_seq);
1641 return this_tf.top_p_seq;
1644 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1645 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1646 exception region trees that records all the magic. */
1649 lower_catch (struct leh_state *state, gimple tp)
1651 eh_region try_region = NULL;
1652 struct leh_state this_state = *state;
1653 gimple_stmt_iterator gsi;
1657 location_t try_catch_loc = gimple_location (tp);
1659 if (flag_exceptions)
1661 try_region = gen_eh_region_try (state->cur_region);
1662 this_state.cur_region = try_region;
1665 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1667 if (!eh_region_may_contain_throw (try_region))
1668 return gimple_try_eval (tp);
1671 emit_eh_dispatch (&new_seq, try_region);
1672 emit_resx (&new_seq, try_region);
1674 this_state.cur_region = state->cur_region;
1675 this_state.ehp_region = try_region;
1678 for (gsi = gsi_start (gimple_try_cleanup (tp));
1686 gcatch = gsi_stmt (gsi);
1687 c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch));
1689 handler = gimple_catch_handler (gcatch);
1690 lower_eh_constructs_1 (&this_state, handler);
1692 c->label = create_artificial_label (UNKNOWN_LOCATION);
1693 x = gimple_build_label (c->label);
1694 gimple_seq_add_stmt (&new_seq, x);
1696 gimple_seq_add_seq (&new_seq, handler);
1698 if (gimple_seq_may_fallthru (new_seq))
1701 out_label = create_artificial_label (try_catch_loc);
1703 x = gimple_build_goto (out_label);
1704 gimple_seq_add_stmt (&new_seq, x);
1710 gimple_try_set_cleanup (tp, new_seq);
1712 return frob_into_branch_around (tp, try_region, out_label);
1715 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1716 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1717 region trees that record all the magic. */
1720 lower_eh_filter (struct leh_state *state, gimple tp)
1722 struct leh_state this_state = *state;
1723 eh_region this_region = NULL;
1727 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1729 if (flag_exceptions)
1731 this_region = gen_eh_region_allowed (state->cur_region,
1732 gimple_eh_filter_types (inner));
1733 this_state.cur_region = this_region;
1736 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1738 if (!eh_region_may_contain_throw (this_region))
1739 return gimple_try_eval (tp);
1742 this_state.cur_region = state->cur_region;
1743 this_state.ehp_region = this_region;
1745 emit_eh_dispatch (&new_seq, this_region);
1746 emit_resx (&new_seq, this_region);
1748 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1749 x = gimple_build_label (this_region->u.allowed.label);
1750 gimple_seq_add_stmt (&new_seq, x);
1752 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure (inner));
1753 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1755 gimple_try_set_cleanup (tp, new_seq);
1757 return frob_into_branch_around (tp, this_region, NULL);
1760 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1761 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1762 plus the exception region trees that record all the magic. */
1765 lower_eh_must_not_throw (struct leh_state *state, gimple tp)
1767 struct leh_state this_state = *state;
1769 if (flag_exceptions)
1771 gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1772 eh_region this_region;
1774 this_region = gen_eh_region_must_not_throw (state->cur_region);
1775 this_region->u.must_not_throw.failure_decl
1776 = gimple_eh_must_not_throw_fndecl (inner);
1777 this_region->u.must_not_throw.failure_loc = gimple_location (tp);
1779 /* In order to get mangling applied to this decl, we must mark it
1780 used now. Otherwise, pass_ipa_free_lang_data won't think it
1782 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1784 this_state.cur_region = this_region;
1787 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1789 return gimple_try_eval (tp);
1792 /* Implement a cleanup expression. This is similar to try-finally,
1793 except that we only execute the cleanup block for exception edges. */
1796 lower_cleanup (struct leh_state *state, gimple tp)
1798 struct leh_state this_state = *state;
1799 eh_region this_region = NULL;
1800 struct leh_tf_state fake_tf;
1802 bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
1804 if (flag_exceptions && !cleanup_dead)
1806 this_region = gen_eh_region_cleanup (state->cur_region);
1807 this_state.cur_region = this_region;
1810 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1812 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
1813 return gimple_try_eval (tp);
1815 /* Build enough of a try-finally state so that we can reuse
1816 honor_protect_cleanup_actions. */
1817 memset (&fake_tf, 0, sizeof (fake_tf));
1818 fake_tf.top_p = fake_tf.try_finally_expr = tp;
1819 fake_tf.outer = state;
1820 fake_tf.region = this_region;
1821 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1822 fake_tf.may_throw = true;
1824 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1826 if (fake_tf.may_throw)
1828 /* In this case honor_protect_cleanup_actions had nothing to do,
1829 and we should process this normally. */
1830 lower_eh_constructs_1 (state, gimple_try_cleanup (tp));
1831 result = frob_into_branch_around (tp, this_region,
1832 fake_tf.fallthru_label);
1836 /* In this case honor_protect_cleanup_actions did nearly all of
1837 the work. All we have left is to append the fallthru_label. */
1839 result = gimple_try_eval (tp);
1840 if (fake_tf.fallthru_label)
1842 gimple x = gimple_build_label (fake_tf.fallthru_label);
1843 gimple_seq_add_stmt (&result, x);
1849 /* Main loop for lowering eh constructs. Also moves gsi to the next
1853 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1857 gimple stmt = gsi_stmt (*gsi);
1859 switch (gimple_code (stmt))
1863 tree fndecl = gimple_call_fndecl (stmt);
1866 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1867 switch (DECL_FUNCTION_CODE (fndecl))
1869 case BUILT_IN_EH_POINTER:
1870 /* The front end may have generated a call to
1871 __builtin_eh_pointer (0) within a catch region. Replace
1872 this zero argument with the current catch region number. */
1873 if (state->ehp_region)
1875 tree nr = build_int_cst (NULL, state->ehp_region->index);
1876 gimple_call_set_arg (stmt, 0, nr);
1880 /* The user has dome something silly. Remove it. */
1881 rhs = null_pointer_node;
1886 case BUILT_IN_EH_FILTER:
1887 /* ??? This should never appear, but since it's a builtin it
1888 is accessible to abuse by users. Just remove it and
1889 replace the use with the arbitrary value zero. */
1890 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
1892 lhs = gimple_call_lhs (stmt);
1893 x = gimple_build_assign (lhs, rhs);
1894 gsi_insert_before (gsi, x, GSI_SAME_STMT);
1897 case BUILT_IN_EH_COPY_VALUES:
1898 /* Likewise this should not appear. Remove it. */
1899 gsi_remove (gsi, true);
1909 /* If the stmt can throw use a new temporary for the assignment
1910 to a LHS. This makes sure the old value of the LHS is
1911 available on the EH edge. Only do so for statements that
1912 potentially fall thru (no noreturn calls e.g.), otherwise
1913 this new assignment might create fake fallthru regions. */
1914 if (stmt_could_throw_p (stmt)
1915 && gimple_has_lhs (stmt)
1916 && gimple_stmt_may_fallthru (stmt)
1917 && !tree_could_throw_p (gimple_get_lhs (stmt))
1918 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
1920 tree lhs = gimple_get_lhs (stmt);
1921 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
1922 gimple s = gimple_build_assign (lhs, tmp);
1923 gimple_set_location (s, gimple_location (stmt));
1924 gimple_set_block (s, gimple_block (stmt));
1925 gimple_set_lhs (stmt, tmp);
1926 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
1927 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
1928 DECL_GIMPLE_REG_P (tmp) = 1;
1929 gsi_insert_after (gsi, s, GSI_SAME_STMT);
1931 /* Look for things that can throw exceptions, and record them. */
1932 if (state->cur_region && stmt_could_throw_p (stmt))
1934 record_stmt_eh_region (state->cur_region, stmt);
1935 note_eh_region_may_contain_throw (state->cur_region);
1942 maybe_record_in_goto_queue (state, stmt);
1946 verify_norecord_switch_expr (state, stmt);
1950 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
1951 replace = lower_try_finally (state, stmt);
1954 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
1957 replace = gimple_try_eval (stmt);
1958 lower_eh_constructs_1 (state, replace);
1961 switch (gimple_code (x))
1964 replace = lower_catch (state, stmt);
1966 case GIMPLE_EH_FILTER:
1967 replace = lower_eh_filter (state, stmt);
1969 case GIMPLE_EH_MUST_NOT_THROW:
1970 replace = lower_eh_must_not_throw (state, stmt);
1973 replace = lower_cleanup (state, stmt);
1978 /* Remove the old stmt and insert the transformed sequence
1980 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
1981 gsi_remove (gsi, true);
1983 /* Return since we don't want gsi_next () */
1987 /* A type, a decl, or some kind of statement that we're not
1988 interested in. Don't walk them. */
1995 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
1998 lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq)
2000 gimple_stmt_iterator gsi;
2001 for (gsi = gsi_start (seq); !gsi_end_p (gsi);)
2002 lower_eh_constructs_2 (state, &gsi);
2006 lower_eh_constructs (void)
2008 struct leh_state null_state;
2011 bodyp = gimple_body (current_function_decl);
2015 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
2016 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
2017 memset (&null_state, 0, sizeof (null_state));
2019 collect_finally_tree_1 (bodyp, NULL);
2020 lower_eh_constructs_1 (&null_state, bodyp);
2022 /* We assume there's a return statement, or something, at the end of
2023 the function, and thus ploping the EH sequence afterward won't
2025 gcc_assert (!gimple_seq_may_fallthru (bodyp));
2026 gimple_seq_add_seq (&bodyp, eh_seq);
2028 /* We assume that since BODYP already existed, adding EH_SEQ to it
2029 didn't change its value, and we don't have to re-set the function. */
2030 gcc_assert (bodyp == gimple_body (current_function_decl));
2032 htab_delete (finally_tree);
2033 BITMAP_FREE (eh_region_may_contain_throw_map);
2036 /* If this function needs a language specific EH personality routine
2037 and the frontend didn't already set one do so now. */
2038 if (function_needs_eh_personality (cfun) == eh_personality_lang
2039 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2040 DECL_FUNCTION_PERSONALITY (current_function_decl)
2041 = lang_hooks.eh_personality ();
2046 struct gimple_opt_pass pass_lower_eh =
2052 lower_eh_constructs, /* execute */
2055 0, /* static_pass_number */
2056 TV_TREE_EH, /* tv_id */
2057 PROP_gimple_lcf, /* properties_required */
2058 PROP_gimple_leh, /* properties_provided */
2059 0, /* properties_destroyed */
2060 0, /* todo_flags_start */
2061 TODO_dump_func /* todo_flags_finish */
2065 /* Create the multiple edges from an EH_DISPATCH statement to all of
2066 the possible handlers for its EH region. Return true if there's
2067 no fallthru edge; false if there is. */
2070 make_eh_dispatch_edges (gimple stmt)
2074 basic_block src, dst;
2076 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2077 src = gimple_bb (stmt);
2082 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2084 dst = label_to_block (c->label);
2085 make_edge (src, dst, 0);
2087 /* A catch-all handler doesn't have a fallthru. */
2088 if (c->type_list == NULL)
2093 case ERT_ALLOWED_EXCEPTIONS:
2094 dst = label_to_block (r->u.allowed.label);
2095 make_edge (src, dst, 0);
2105 /* Create the single EH edge from STMT to its nearest landing pad,
2106 if there is such a landing pad within the current function. */
2109 make_eh_edges (gimple stmt)
2111 basic_block src, dst;
2115 lp_nr = lookup_stmt_eh_lp (stmt);
2119 lp = get_eh_landing_pad_from_number (lp_nr);
2120 gcc_assert (lp != NULL);
2122 src = gimple_bb (stmt);
2123 dst = label_to_block (lp->post_landing_pad);
2124 make_edge (src, dst, EDGE_EH);
2127 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2128 do not actually perform the final edge redirection.
2130 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2131 we intend to change the destination EH region as well; this means
2132 EH_LANDING_PAD_NR must already be set on the destination block label.
2133 If false, we're being called from generic cfg manipulation code and we
2134 should preserve our place within the region tree. */
2137 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2139 eh_landing_pad old_lp, new_lp;
2142 int old_lp_nr, new_lp_nr;
2143 tree old_label, new_label;
2147 old_bb = edge_in->dest;
2148 old_label = gimple_block_label (old_bb);
2149 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2150 gcc_assert (old_lp_nr > 0);
2151 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2153 throw_stmt = last_stmt (edge_in->src);
2154 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2156 new_label = gimple_block_label (new_bb);
2158 /* Look for an existing region that might be using NEW_BB already. */
2159 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2162 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2163 gcc_assert (new_lp);
2165 /* Unless CHANGE_REGION is true, the new and old landing pad
2166 had better be associated with the same EH region. */
2167 gcc_assert (change_region || new_lp->region == old_lp->region);
2172 gcc_assert (!change_region);
2175 /* Notice when we redirect the last EH edge away from OLD_BB. */
2176 FOR_EACH_EDGE (e, ei, old_bb->preds)
2177 if (e != edge_in && (e->flags & EDGE_EH))
2182 /* NEW_LP already exists. If there are still edges into OLD_LP,
2183 there's nothing to do with the EH tree. If there are no more
2184 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2185 If CHANGE_REGION is true, then our caller is expecting to remove
2187 if (e == NULL && !change_region)
2188 remove_eh_landing_pad (old_lp);
2192 /* No correct landing pad exists. If there are no more edges
2193 into OLD_LP, then we can simply re-use the existing landing pad.
2194 Otherwise, we have to create a new landing pad. */
2197 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2201 new_lp = gen_eh_landing_pad (old_lp->region);
2202 new_lp->post_landing_pad = new_label;
2203 EH_LANDING_PAD_NR (new_label) = new_lp->index;
2206 /* Maybe move the throwing statement to the new region. */
2207 if (old_lp != new_lp)
2209 remove_stmt_from_eh_lp (throw_stmt);
2210 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2214 /* Redirect EH edge E to NEW_BB. */
2217 redirect_eh_edge (edge edge_in, basic_block new_bb)
2219 redirect_eh_edge_1 (edge_in, new_bb, false);
2220 return ssa_redirect_edge (edge_in, new_bb);
2223 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2224 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2225 The actual edge update will happen in the caller. */
2228 redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb)
2230 tree new_lab = gimple_block_label (new_bb);
2231 bool any_changed = false;
2236 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2240 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2242 old_bb = label_to_block (c->label);
2243 if (old_bb == e->dest)
2251 case ERT_ALLOWED_EXCEPTIONS:
2252 old_bb = label_to_block (r->u.allowed.label);
2253 gcc_assert (old_bb == e->dest);
2254 r->u.allowed.label = new_lab;
2262 gcc_assert (any_changed);
2265 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2268 operation_could_trap_helper_p (enum tree_code op,
2279 case TRUNC_DIV_EXPR:
2281 case FLOOR_DIV_EXPR:
2282 case ROUND_DIV_EXPR:
2283 case EXACT_DIV_EXPR:
2285 case FLOOR_MOD_EXPR:
2286 case ROUND_MOD_EXPR:
2287 case TRUNC_MOD_EXPR:
2289 if (honor_snans || honor_trapv)
2292 return flag_trapping_math;
2293 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2302 /* Some floating point comparisons may trap. */
2307 case UNORDERED_EXPR:
2317 case FIX_TRUNC_EXPR:
2318 /* Conversion of floating point might trap. */
2324 /* These operations don't trap with floating point. */
2332 /* Any floating arithmetic may trap. */
2333 if (fp_operation && flag_trapping_math)
2341 /* Constructing an object cannot trap. */
2345 /* Any floating arithmetic may trap. */
2346 if (fp_operation && flag_trapping_math)
2354 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2355 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2356 type operands that may trap. If OP is a division operator, DIVISOR contains
2357 the value of the divisor. */
2360 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2363 bool honor_nans = (fp_operation && flag_trapping_math
2364 && !flag_finite_math_only);
2365 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2368 if (TREE_CODE_CLASS (op) != tcc_comparison
2369 && TREE_CODE_CLASS (op) != tcc_unary
2370 && TREE_CODE_CLASS (op) != tcc_binary)
2373 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2374 honor_nans, honor_snans, divisor,
2378 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2379 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2380 This routine expects only GIMPLE lhs or rhs input. */
2383 tree_could_trap_p (tree expr)
2385 enum tree_code code;
2386 bool fp_operation = false;
2387 bool honor_trapv = false;
2388 tree t, base, div = NULL_TREE;
2393 code = TREE_CODE (expr);
2394 t = TREE_TYPE (expr);
2398 if (COMPARISON_CLASS_P (expr))
2399 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2401 fp_operation = FLOAT_TYPE_P (t);
2402 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2405 if (TREE_CODE_CLASS (code) == tcc_binary)
2406 div = TREE_OPERAND (expr, 1);
2407 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2413 case TARGET_MEM_REF:
2414 if (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
2415 && !TMR_INDEX (expr) && !TMR_INDEX2 (expr))
2417 return !TREE_THIS_NOTRAP (expr);
2423 case VIEW_CONVERT_EXPR:
2424 case WITH_SIZE_EXPR:
2425 expr = TREE_OPERAND (expr, 0);
2426 code = TREE_CODE (expr);
2429 case ARRAY_RANGE_REF:
2430 base = TREE_OPERAND (expr, 0);
2431 if (tree_could_trap_p (base))
2433 if (TREE_THIS_NOTRAP (expr))
2435 return !range_in_array_bounds_p (expr);
2438 base = TREE_OPERAND (expr, 0);
2439 if (tree_could_trap_p (base))
2441 if (TREE_THIS_NOTRAP (expr))
2443 return !in_array_bounds_p (expr);
2446 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2450 return !TREE_THIS_NOTRAP (expr);
2453 return TREE_THIS_VOLATILE (expr);
2456 t = get_callee_fndecl (expr);
2457 /* Assume that calls to weak functions may trap. */
2458 if (!t || !DECL_P (t) || DECL_WEAK (t))
2464 /* Assume that accesses to weak vars or functions may trap. */
2465 if (DECL_WEAK (expr))
2475 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2476 an assignment or a conditional) may throw. */
2479 stmt_could_throw_1_p (gimple stmt)
2481 enum tree_code code = gimple_expr_code (stmt);
2482 bool honor_nans = false;
2483 bool honor_snans = false;
2484 bool fp_operation = false;
2485 bool honor_trapv = false;
2490 if (TREE_CODE_CLASS (code) == tcc_comparison
2491 || TREE_CODE_CLASS (code) == tcc_unary
2492 || TREE_CODE_CLASS (code) == tcc_binary)
2494 t = gimple_expr_type (stmt);
2495 fp_operation = FLOAT_TYPE_P (t);
2498 honor_nans = flag_trapping_math && !flag_finite_math_only;
2499 honor_snans = flag_signaling_nans != 0;
2501 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2505 /* Check if the main expression may trap. */
2506 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2507 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2508 honor_nans, honor_snans, t,
2513 /* If the expression does not trap, see if any of the individual operands may
2515 for (i = 0; i < gimple_num_ops (stmt); i++)
2516 if (tree_could_trap_p (gimple_op (stmt, i)))
2523 /* Return true if statement STMT could throw an exception. */
2526 stmt_could_throw_p (gimple stmt)
2528 if (!flag_exceptions)
2531 /* The only statements that can throw an exception are assignments,
2532 conditionals, calls, resx, and asms. */
2533 switch (gimple_code (stmt))
2539 return !gimple_call_nothrow_p (stmt);
2543 if (!cfun->can_throw_non_call_exceptions)
2545 return stmt_could_throw_1_p (stmt);
2548 if (!cfun->can_throw_non_call_exceptions)
2550 return gimple_asm_volatile_p (stmt);
2558 /* Return true if expression T could throw an exception. */
2561 tree_could_throw_p (tree t)
2563 if (!flag_exceptions)
2565 if (TREE_CODE (t) == MODIFY_EXPR)
2567 if (cfun->can_throw_non_call_exceptions
2568 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2570 t = TREE_OPERAND (t, 1);
2573 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2574 t = TREE_OPERAND (t, 0);
2575 if (TREE_CODE (t) == CALL_EXPR)
2576 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2577 if (cfun->can_throw_non_call_exceptions)
2578 return tree_could_trap_p (t);
2582 /* Return true if STMT can throw an exception that is not caught within
2583 the current function (CFUN). */
2586 stmt_can_throw_external (gimple stmt)
2590 if (!stmt_could_throw_p (stmt))
2593 lp_nr = lookup_stmt_eh_lp (stmt);
2597 /* Return true if STMT can throw an exception that is caught within
2598 the current function (CFUN). */
2601 stmt_can_throw_internal (gimple stmt)
2605 if (!stmt_could_throw_p (stmt))
2608 lp_nr = lookup_stmt_eh_lp (stmt);
2612 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2613 remove any entry it might have from the EH table. Return true if
2614 any change was made. */
2617 maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
2619 if (stmt_could_throw_p (stmt))
2621 return remove_stmt_from_eh_lp_fn (ifun, stmt);
2624 /* Likewise, but always use the current function. */
2627 maybe_clean_eh_stmt (gimple stmt)
2629 return maybe_clean_eh_stmt_fn (cfun, stmt);
2632 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2633 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2634 in the table if it should be in there. Return TRUE if a replacement was
2635 done that my require an EH edge purge. */
2638 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2640 int lp_nr = lookup_stmt_eh_lp (old_stmt);
2644 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2646 if (new_stmt == old_stmt && new_stmt_could_throw)
2649 remove_stmt_from_eh_lp (old_stmt);
2650 if (new_stmt_could_throw)
2652 add_stmt_to_eh_lp (new_stmt, lp_nr);
2662 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statment NEW_STMT
2663 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2664 operand is the return value of duplicate_eh_regions. */
2667 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
2668 struct function *old_fun, gimple old_stmt,
2669 struct pointer_map_t *map, int default_lp_nr)
2671 int old_lp_nr, new_lp_nr;
2674 if (!stmt_could_throw_p (new_stmt))
2677 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2680 if (default_lp_nr == 0)
2682 new_lp_nr = default_lp_nr;
2684 else if (old_lp_nr > 0)
2686 eh_landing_pad old_lp, new_lp;
2688 old_lp = VEC_index (eh_landing_pad, old_fun->eh->lp_array, old_lp_nr);
2689 slot = pointer_map_contains (map, old_lp);
2690 new_lp = (eh_landing_pad) *slot;
2691 new_lp_nr = new_lp->index;
2695 eh_region old_r, new_r;
2697 old_r = VEC_index (eh_region, old_fun->eh->region_array, -old_lp_nr);
2698 slot = pointer_map_contains (map, old_r);
2699 new_r = (eh_region) *slot;
2700 new_lp_nr = -new_r->index;
2703 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2707 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2708 and thus no remapping is required. */
2711 maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
2715 if (!stmt_could_throw_p (new_stmt))
2718 lp_nr = lookup_stmt_eh_lp (old_stmt);
2722 add_stmt_to_eh_lp (new_stmt, lp_nr);
2726 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2727 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2728 this only handles handlers consisting of a single call, as that's the
2729 important case for C++: a destructor call for a particular object showing
2730 up in multiple handlers. */
2733 same_handler_p (gimple_seq oneh, gimple_seq twoh)
2735 gimple_stmt_iterator gsi;
2739 gsi = gsi_start (oneh);
2740 if (!gsi_one_before_end_p (gsi))
2742 ones = gsi_stmt (gsi);
2744 gsi = gsi_start (twoh);
2745 if (!gsi_one_before_end_p (gsi))
2747 twos = gsi_stmt (gsi);
2749 if (!is_gimple_call (ones)
2750 || !is_gimple_call (twos)
2751 || gimple_call_lhs (ones)
2752 || gimple_call_lhs (twos)
2753 || gimple_call_chain (ones)
2754 || gimple_call_chain (twos)
2755 || !operand_equal_p (gimple_call_fn (ones), gimple_call_fn (twos), 0)
2756 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
2759 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
2760 if (!operand_equal_p (gimple_call_arg (ones, ai),
2761 gimple_call_arg (twos, ai), 0))
2768 try { A() } finally { try { ~B() } catch { ~A() } }
2769 try { ... } finally { ~A() }
2771 try { A() } catch { ~B() }
2772 try { ~B() ... } finally { ~A() }
2774 This occurs frequently in C++, where A is a local variable and B is a
2775 temporary used in the initializer for A. */
2778 optimize_double_finally (gimple one, gimple two)
2781 gimple_stmt_iterator gsi;
2783 gsi = gsi_start (gimple_try_cleanup (one));
2784 if (!gsi_one_before_end_p (gsi))
2787 oneh = gsi_stmt (gsi);
2788 if (gimple_code (oneh) != GIMPLE_TRY
2789 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
2792 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
2794 gimple_seq seq = gimple_try_eval (oneh);
2796 gimple_try_set_cleanup (one, seq);
2797 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
2798 seq = copy_gimple_seq_and_replace_locals (seq);
2799 gimple_seq_add_seq (&seq, gimple_try_eval (two));
2800 gimple_try_set_eval (two, seq);
2804 /* Perform EH refactoring optimizations that are simpler to do when code
2805 flow has been lowered but EH structures haven't. */
2808 refactor_eh_r (gimple_seq seq)
2810 gimple_stmt_iterator gsi;
2815 gsi = gsi_start (seq);
2819 if (gsi_end_p (gsi))
2822 two = gsi_stmt (gsi);
2825 && gimple_code (one) == GIMPLE_TRY
2826 && gimple_code (two) == GIMPLE_TRY
2827 && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
2828 && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
2829 optimize_double_finally (one, two);
2831 switch (gimple_code (one))
2834 refactor_eh_r (gimple_try_eval (one));
2835 refactor_eh_r (gimple_try_cleanup (one));
2838 refactor_eh_r (gimple_catch_handler (one));
2840 case GIMPLE_EH_FILTER:
2841 refactor_eh_r (gimple_eh_filter_failure (one));
2856 refactor_eh_r (gimple_body (current_function_decl));
2861 gate_refactor_eh (void)
2863 return flag_exceptions != 0;
2866 struct gimple_opt_pass pass_refactor_eh =
2871 gate_refactor_eh, /* gate */
2872 refactor_eh, /* execute */
2875 0, /* static_pass_number */
2876 TV_TREE_EH, /* tv_id */
2877 PROP_gimple_lcf, /* properties_required */
2878 0, /* properties_provided */
2879 0, /* properties_destroyed */
2880 0, /* todo_flags_start */
2881 TODO_dump_func /* todo_flags_finish */
2885 /* At the end of gimple optimization, we can lower RESX. */
2888 lower_resx (basic_block bb, gimple stmt, struct pointer_map_t *mnt_map)
2891 eh_region src_r, dst_r;
2892 gimple_stmt_iterator gsi;
2897 lp_nr = lookup_stmt_eh_lp (stmt);
2899 dst_r = get_eh_region_from_lp_number (lp_nr);
2903 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
2904 gsi = gsi_last_bb (bb);
2908 /* We can wind up with no source region when pass_cleanup_eh shows
2909 that there are no entries into an eh region and deletes it, but
2910 then the block that contains the resx isn't removed. This can
2911 happen without optimization when the switch statement created by
2912 lower_try_finally_switch isn't simplified to remove the eh case.
2914 Resolve this by expanding the resx node to an abort. */
2916 fn = implicit_built_in_decls[BUILT_IN_TRAP];
2917 x = gimple_build_call (fn, 0);
2918 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2920 while (EDGE_COUNT (bb->succs) > 0)
2921 remove_edge (EDGE_SUCC (bb, 0));
2925 /* When we have a destination region, we resolve this by copying
2926 the excptr and filter values into place, and changing the edge
2927 to immediately after the landing pad. */
2936 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
2937 the failure decl into a new block, if needed. */
2938 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
2940 slot = pointer_map_contains (mnt_map, dst_r);
2943 gimple_stmt_iterator gsi2;
2945 new_bb = create_empty_bb (bb);
2946 lab = gimple_block_label (new_bb);
2947 gsi2 = gsi_start_bb (new_bb);
2949 fn = dst_r->u.must_not_throw.failure_decl;
2950 x = gimple_build_call (fn, 0);
2951 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
2952 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
2954 slot = pointer_map_insert (mnt_map, dst_r);
2960 new_bb = label_to_block (lab);
2963 gcc_assert (EDGE_COUNT (bb->succs) == 0);
2964 e = make_edge (bb, new_bb, EDGE_FALLTHRU);
2965 e->count = bb->count;
2966 e->probability = REG_BR_PROB_BASE;
2971 tree dst_nr = build_int_cst (NULL, dst_r->index);
2973 fn = implicit_built_in_decls[BUILT_IN_EH_COPY_VALUES];
2974 src_nr = build_int_cst (NULL, src_r->index);
2975 x = gimple_build_call (fn, 2, dst_nr, src_nr);
2976 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
2978 /* Update the flags for the outgoing edge. */
2979 e = single_succ_edge (bb);
2980 gcc_assert (e->flags & EDGE_EH);
2981 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
2983 /* If there are no more EH users of the landing pad, delete it. */
2984 FOR_EACH_EDGE (e, ei, e->dest->preds)
2985 if (e->flags & EDGE_EH)
2989 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2990 remove_eh_landing_pad (lp);
3000 /* When we don't have a destination region, this exception escapes
3001 up the call chain. We resolve this by generating a call to the
3002 _Unwind_Resume library function. */
3004 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
3005 with no arguments for C++ and Java. Check for that. */
3006 if (src_r->use_cxa_end_cleanup)
3008 fn = implicit_built_in_decls[BUILT_IN_CXA_END_CLEANUP];
3009 x = gimple_build_call (fn, 0);
3010 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3014 fn = implicit_built_in_decls[BUILT_IN_EH_POINTER];
3015 src_nr = build_int_cst (NULL, src_r->index);
3016 x = gimple_build_call (fn, 1, src_nr);
3017 var = create_tmp_var (ptr_type_node, NULL);
3018 var = make_ssa_name (var, x);
3019 gimple_call_set_lhs (x, var);
3020 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3022 fn = implicit_built_in_decls[BUILT_IN_UNWIND_RESUME];
3023 x = gimple_build_call (fn, 1, var);
3024 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3027 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3030 gsi_remove (&gsi, true);
3036 execute_lower_resx (void)
3039 struct pointer_map_t *mnt_map;
3040 bool dominance_invalidated = false;
3041 bool any_rewritten = false;
3043 mnt_map = pointer_map_create ();
3047 gimple last = last_stmt (bb);
3048 if (last && is_gimple_resx (last))
3050 dominance_invalidated |= lower_resx (bb, last, mnt_map);
3051 any_rewritten = true;
3055 pointer_map_destroy (mnt_map);
3057 if (dominance_invalidated)
3059 free_dominance_info (CDI_DOMINATORS);
3060 free_dominance_info (CDI_POST_DOMINATORS);
3063 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3067 gate_lower_resx (void)
3069 return flag_exceptions != 0;
3072 struct gimple_opt_pass pass_lower_resx =
3077 gate_lower_resx, /* gate */
3078 execute_lower_resx, /* execute */
3081 0, /* static_pass_number */
3082 TV_TREE_EH, /* tv_id */
3083 PROP_gimple_lcf, /* properties_required */
3084 0, /* properties_provided */
3085 0, /* properties_destroyed */
3086 0, /* todo_flags_start */
3087 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
3092 /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3093 we have found some duplicate labels and removed some edges. */
3096 lower_eh_dispatch (basic_block src, gimple stmt)
3098 gimple_stmt_iterator gsi;
3103 bool redirected = false;
3105 region_nr = gimple_eh_dispatch_region (stmt);
3106 r = get_eh_region_from_number (region_nr);
3108 gsi = gsi_last_bb (src);
3114 VEC (tree, heap) *labels = NULL;
3115 tree default_label = NULL;
3119 struct pointer_set_t *seen_values = pointer_set_create ();
3121 /* Collect the labels for a switch. Zero the post_landing_pad
3122 field becase we'll no longer have anything keeping these labels
3123 in existance and the optimizer will be free to merge these
3125 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3127 tree tp_node, flt_node, lab = c->label;
3128 bool have_label = false;
3131 tp_node = c->type_list;
3132 flt_node = c->filter_list;
3134 if (tp_node == NULL)
3136 default_label = lab;
3141 /* Filter out duplicate labels that arise when this handler
3142 is shadowed by an earlier one. When no labels are
3143 attached to the handler anymore, we remove
3144 the corresponding edge and then we delete unreachable
3145 blocks at the end of this pass. */
3146 if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node)))
3148 tree t = build3 (CASE_LABEL_EXPR, void_type_node,
3149 TREE_VALUE (flt_node), NULL, lab);
3150 VEC_safe_push (tree, heap, labels, t);
3151 pointer_set_insert (seen_values, TREE_VALUE (flt_node));
3155 tp_node = TREE_CHAIN (tp_node);
3156 flt_node = TREE_CHAIN (flt_node);
3161 remove_edge (find_edge (src, label_to_block (lab)));
3166 /* Clean up the edge flags. */
3167 FOR_EACH_EDGE (e, ei, src->succs)
3169 if (e->flags & EDGE_FALLTHRU)
3171 /* If there was no catch-all, use the fallthru edge. */
3172 if (default_label == NULL)
3173 default_label = gimple_block_label (e->dest);
3174 e->flags &= ~EDGE_FALLTHRU;
3177 gcc_assert (default_label != NULL);
3179 /* Don't generate a switch if there's only a default case.
3180 This is common in the form of try { A; } catch (...) { B; }. */
3183 e = single_succ_edge (src);
3184 e->flags |= EDGE_FALLTHRU;
3188 fn = implicit_built_in_decls[BUILT_IN_EH_FILTER];
3189 x = gimple_build_call (fn, 1, build_int_cst (NULL, region_nr));
3190 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3191 filter = make_ssa_name (filter, x);
3192 gimple_call_set_lhs (x, filter);
3193 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3195 /* Turn the default label into a default case. */
3196 default_label = build3 (CASE_LABEL_EXPR, void_type_node,
3197 NULL, NULL, default_label);
3198 sort_case_labels (labels);
3200 x = gimple_build_switch_vec (filter, default_label, labels);
3201 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3203 VEC_free (tree, heap, labels);
3205 pointer_set_destroy (seen_values);
3209 case ERT_ALLOWED_EXCEPTIONS:
3211 edge b_e = BRANCH_EDGE (src);
3212 edge f_e = FALLTHRU_EDGE (src);
3214 fn = implicit_built_in_decls[BUILT_IN_EH_FILTER];
3215 x = gimple_build_call (fn, 1, build_int_cst (NULL, region_nr));
3216 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3217 filter = make_ssa_name (filter, x);
3218 gimple_call_set_lhs (x, filter);
3219 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3221 r->u.allowed.label = NULL;
3222 x = gimple_build_cond (EQ_EXPR, filter,
3223 build_int_cst (TREE_TYPE (filter),
3224 r->u.allowed.filter),
3225 NULL_TREE, NULL_TREE);
3226 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3228 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3229 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3237 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3238 gsi_remove (&gsi, true);
3243 execute_lower_eh_dispatch (void)
3246 bool any_rewritten = false;
3247 bool redirected = false;
3249 assign_filter_values ();
3253 gimple last = last_stmt (bb);
3254 if (last && gimple_code (last) == GIMPLE_EH_DISPATCH)
3256 redirected |= lower_eh_dispatch (bb, last);
3257 any_rewritten = true;
3262 delete_unreachable_blocks ();
3263 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3267 gate_lower_eh_dispatch (void)
3269 return cfun->eh->region_tree != NULL;
3272 struct gimple_opt_pass pass_lower_eh_dispatch =
3276 "ehdisp", /* name */
3277 gate_lower_eh_dispatch, /* gate */
3278 execute_lower_eh_dispatch, /* execute */
3281 0, /* static_pass_number */
3282 TV_TREE_EH, /* tv_id */
3283 PROP_gimple_lcf, /* properties_required */
3284 0, /* properties_provided */
3285 0, /* properties_destroyed */
3286 0, /* todo_flags_start */
3287 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
3291 /* Walk statements, see what regions are really referenced and remove
3292 those that are unused. */
3295 remove_unreachable_handlers (void)
3297 sbitmap r_reachable, lp_reachable;
3303 r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
3305 = sbitmap_alloc (VEC_length (eh_landing_pad, cfun->eh->lp_array));
3306 sbitmap_zero (r_reachable);
3307 sbitmap_zero (lp_reachable);
3311 gimple_stmt_iterator gsi = gsi_start_bb (bb);
3313 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3315 gimple stmt = gsi_stmt (gsi);
3316 lp_nr = lookup_stmt_eh_lp (stmt);
3318 /* Negative LP numbers are MUST_NOT_THROW regions which
3319 are not considered BB enders. */
3321 SET_BIT (r_reachable, -lp_nr);
3323 /* Positive LP numbers are real landing pads, are are BB enders. */
3326 gcc_assert (gsi_one_before_end_p (gsi));
3327 region = get_eh_region_from_lp_number (lp_nr);
3328 SET_BIT (r_reachable, region->index);
3329 SET_BIT (lp_reachable, lp_nr);
3332 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3333 switch (gimple_code (stmt))
3336 SET_BIT (r_reachable, gimple_resx_region (stmt));
3338 case GIMPLE_EH_DISPATCH:
3339 SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
3349 fprintf (dump_file, "Before removal of unreachable regions:\n");
3350 dump_eh_tree (dump_file, cfun);
3351 fprintf (dump_file, "Reachable regions: ");
3352 dump_sbitmap_file (dump_file, r_reachable);
3353 fprintf (dump_file, "Reachable landing pads: ");
3354 dump_sbitmap_file (dump_file, lp_reachable);
3358 VEC_iterate (eh_region, cfun->eh->region_array, r_nr, region); ++r_nr)
3359 if (region && !TEST_BIT (r_reachable, r_nr))
3362 fprintf (dump_file, "Removing unreachable region %d\n", r_nr);
3363 remove_eh_handler (region);
3367 VEC_iterate (eh_landing_pad, cfun->eh->lp_array, lp_nr, lp); ++lp_nr)
3368 if (lp && !TEST_BIT (lp_reachable, lp_nr))
3371 fprintf (dump_file, "Removing unreachable landing pad %d\n", lp_nr);
3372 remove_eh_landing_pad (lp);
3377 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3378 dump_eh_tree (dump_file, cfun);
3379 fprintf (dump_file, "\n\n");
3382 sbitmap_free (r_reachable);
3383 sbitmap_free (lp_reachable);
3385 #ifdef ENABLE_CHECKING
3386 verify_eh_tree (cfun);
3390 /* Remove regions that do not have landing pads. This assumes
3391 that remove_unreachable_handlers has already been run, and
3392 that we've just manipulated the landing pads since then. */
3395 remove_unreachable_handlers_no_lp (void)
3400 for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
3401 if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW)
3404 fprintf (dump_file, "Removing unreachable region %d\n", i);
3405 remove_eh_handler (r);
3409 /* Undo critical edge splitting on an EH landing pad. Earlier, we
3410 optimisticaly split all sorts of edges, including EH edges. The
3411 optimization passes in between may not have needed them; if not,
3412 we should undo the split.
3414 Recognize this case by having one EH edge incoming to the BB and
3415 one normal edge outgoing; BB should be empty apart from the
3416 post_landing_pad label.
3418 Note that this is slightly different from the empty handler case
3419 handled by cleanup_empty_eh, in that the actual handler may yet
3420 have actual code but the landing pad has been separated from the
3421 handler. As such, cleanup_empty_eh relies on this transformation
3422 having been done first. */
3425 unsplit_eh (eh_landing_pad lp)
3427 basic_block bb = label_to_block (lp->post_landing_pad);
3428 gimple_stmt_iterator gsi;
3431 /* Quickly check the edge counts on BB for singularity. */
3432 if (EDGE_COUNT (bb->preds) != 1 || EDGE_COUNT (bb->succs) != 1)
3434 e_in = EDGE_PRED (bb, 0);
3435 e_out = EDGE_SUCC (bb, 0);
3437 /* Input edge must be EH and output edge must be normal. */
3438 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
3441 /* The block must be empty except for the labels and debug insns. */
3442 gsi = gsi_after_labels (bb);
3443 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3444 gsi_next_nondebug (&gsi);
3445 if (!gsi_end_p (gsi))
3448 /* The destination block must not already have a landing pad
3449 for a different region. */
3450 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3452 gimple stmt = gsi_stmt (gsi);
3456 if (gimple_code (stmt) != GIMPLE_LABEL)
3458 lab = gimple_label_label (stmt);
3459 lp_nr = EH_LANDING_PAD_NR (lab);
3460 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3464 /* The new destination block must not already be a destination of
3465 the source block, lest we merge fallthru and eh edges and get
3466 all sorts of confused. */
3467 if (find_edge (e_in->src, e_out->dest))
3470 /* ??? We can get degenerate phis due to cfg cleanups. I would have
3471 thought this should have been cleaned up by a phicprop pass, but
3472 that doesn't appear to handle virtuals. Propagate by hand. */
3473 if (!gimple_seq_empty_p (phi_nodes (bb)))
3475 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
3477 gimple use_stmt, phi = gsi_stmt (gsi);
3478 tree lhs = gimple_phi_result (phi);
3479 tree rhs = gimple_phi_arg_def (phi, 0);
3480 use_operand_p use_p;
3481 imm_use_iterator iter;
3483 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
3485 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3486 SET_USE (use_p, rhs);
3489 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3490 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
3492 remove_phi_node (&gsi, true);
3496 if (dump_file && (dump_flags & TDF_DETAILS))
3497 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
3498 lp->index, e_out->dest->index);
3500 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
3501 a successor edge, humor it. But do the real CFG change with the
3502 predecessor of E_OUT in order to preserve the ordering of arguments
3503 to the PHI nodes in E_OUT->DEST. */
3504 redirect_eh_edge_1 (e_in, e_out->dest, false);
3505 redirect_edge_pred (e_out, e_in->src);
3506 e_out->flags = e_in->flags;
3507 e_out->probability = e_in->probability;
3508 e_out->count = e_in->count;
3514 /* Examine each landing pad block and see if it matches unsplit_eh. */
3517 unsplit_all_eh (void)
3519 bool changed = false;
3523 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3525 changed |= unsplit_eh (lp);
3530 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
3531 to OLD_BB to NEW_BB; return true on success, false on failure.
3533 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
3534 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
3535 Virtual PHIs may be deleted and marked for renaming. */
3538 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
3539 edge old_bb_out, bool change_region)
3541 gimple_stmt_iterator ngsi, ogsi;
3544 bitmap rename_virts;
3545 bitmap ophi_handled;
3547 FOR_EACH_EDGE (e, ei, old_bb->preds)
3548 redirect_edge_var_map_clear (e);
3550 ophi_handled = BITMAP_ALLOC (NULL);
3551 rename_virts = BITMAP_ALLOC (NULL);
3553 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
3554 for the edges we're going to move. */
3555 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
3557 gimple ophi, nphi = gsi_stmt (ngsi);
3560 nresult = gimple_phi_result (nphi);
3561 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
3563 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
3564 the source ssa_name. */
3566 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3568 ophi = gsi_stmt (ogsi);
3569 if (gimple_phi_result (ophi) == nop)
3574 /* If we did find the corresponding PHI, copy those inputs. */
3577 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
3578 if (!has_single_use (nop))
3580 imm_use_iterator imm_iter;
3581 use_operand_p use_p;
3583 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
3585 if (!gimple_debug_bind_p (USE_STMT (use_p))
3586 && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
3587 || gimple_bb (USE_STMT (use_p)) != new_bb))
3591 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
3592 FOR_EACH_EDGE (e, ei, old_bb->preds)
3597 if ((e->flags & EDGE_EH) == 0)
3599 oop = gimple_phi_arg_def (ophi, e->dest_idx);
3600 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
3601 redirect_edge_var_map_add (e, nresult, oop, oloc);
3604 /* If we didn't find the PHI, but it's a VOP, remember to rename
3605 it later, assuming all other tests succeed. */
3606 else if (!is_gimple_reg (nresult))
3607 bitmap_set_bit (rename_virts, SSA_NAME_VERSION (nresult));
3608 /* If we didn't find the PHI, and it's a real variable, we know
3609 from the fact that OLD_BB is tree_empty_eh_handler_p that the
3610 variable is unchanged from input to the block and we can simply
3611 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
3615 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
3616 FOR_EACH_EDGE (e, ei, old_bb->preds)
3617 redirect_edge_var_map_add (e, nresult, nop, nloc);
3621 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
3622 we don't know what values from the other edges into NEW_BB to use. */
3623 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3625 gimple ophi = gsi_stmt (ogsi);
3626 tree oresult = gimple_phi_result (ophi);
3627 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
3631 /* At this point we know that the merge will succeed. Remove the PHI
3632 nodes for the virtuals that we want to rename. */
3633 if (!bitmap_empty_p (rename_virts))
3635 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); )
3637 gimple nphi = gsi_stmt (ngsi);
3638 tree nresult = gimple_phi_result (nphi);
3639 if (bitmap_bit_p (rename_virts, SSA_NAME_VERSION (nresult)))
3641 mark_virtual_phi_result_for_renaming (nphi);
3642 remove_phi_node (&ngsi, true);
3649 /* Finally, move the edges and update the PHIs. */
3650 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
3651 if (e->flags & EDGE_EH)
3653 redirect_eh_edge_1 (e, new_bb, change_region);
3654 redirect_edge_succ (e, new_bb);
3655 flush_pending_stmts (e);
3660 BITMAP_FREE (ophi_handled);
3661 BITMAP_FREE (rename_virts);
3665 FOR_EACH_EDGE (e, ei, old_bb->preds)
3666 redirect_edge_var_map_clear (e);
3667 BITMAP_FREE (ophi_handled);
3668 BITMAP_FREE (rename_virts);
3672 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
3673 old region to NEW_REGION at BB. */
3676 cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
3677 eh_landing_pad lp, eh_region new_region)
3679 gimple_stmt_iterator gsi;
3682 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
3686 lp->region = new_region;
3687 lp->next_lp = new_region->landing_pads;
3688 new_region->landing_pads = lp;
3690 /* Delete the RESX that was matched within the empty handler block. */
3691 gsi = gsi_last_bb (bb);
3692 mark_virtual_ops_for_renaming (gsi_stmt (gsi));
3693 gsi_remove (&gsi, true);
3695 /* Clean up E_OUT for the fallthru. */
3696 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3697 e_out->probability = REG_BR_PROB_BASE;
3700 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
3701 unsplitting than unsplit_eh was prepared to handle, e.g. when
3702 multiple incoming edges and phis are involved. */
3705 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
3707 gimple_stmt_iterator gsi;
3712 /* We really ought not have totally lost everything following
3713 a landing pad label. Given that BB is empty, there had better
3715 gcc_assert (e_out != NULL);
3717 /* The destination block must not already have a landing pad
3718 for a different region. */
3720 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3722 gimple stmt = gsi_stmt (gsi);
3725 if (gimple_code (stmt) != GIMPLE_LABEL)
3727 lab = gimple_label_label (stmt);
3728 lp_nr = EH_LANDING_PAD_NR (lab);
3729 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3733 /* The destination block must not be a regular successor for any
3734 of the preds of the landing pad. Thus, avoid turning
3744 which CFG verification would choke on. See PR45172. */
3745 FOR_EACH_EDGE (e, ei, bb->preds)
3746 if (find_edge (e->src, e_out->dest))
3749 /* Attempt to move the PHIs into the successor block. */
3750 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
3752 if (dump_file && (dump_flags & TDF_DETAILS))
3754 "Unsplit EH landing pad %d to block %i "
3755 "(via cleanup_empty_eh).\n",
3756 lp->index, e_out->dest->index);
3763 /* Return true if edge E_FIRST is part of an empty infinite loop
3764 or leads to such a loop through a series of single successor
3768 infinite_empty_loop_p (edge e_first)
3770 bool inf_loop = false;
3773 if (e_first->dest == e_first->src)
3776 e_first->src->aux = (void *) 1;
3777 for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
3779 gimple_stmt_iterator gsi;
3785 e->dest->aux = (void *) 1;
3786 gsi = gsi_after_labels (e->dest);
3787 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3788 gsi_next_nondebug (&gsi);
3789 if (!gsi_end_p (gsi))
3792 e_first->src->aux = NULL;
3793 for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
3794 e->dest->aux = NULL;
3799 /* Examine the block associated with LP to determine if it's an empty
3800 handler for its EH region. If so, attempt to redirect EH edges to
3801 an outer region. Return true the CFG was updated in any way. This
3802 is similar to jump forwarding, just across EH edges. */
3805 cleanup_empty_eh (eh_landing_pad lp)
3807 basic_block bb = label_to_block (lp->post_landing_pad);
3808 gimple_stmt_iterator gsi;
3810 eh_region new_region;
3813 bool has_non_eh_pred;
3816 /* There can be zero or one edges out of BB. This is the quickest test. */
3817 switch (EDGE_COUNT (bb->succs))
3823 e_out = EDGE_SUCC (bb, 0);
3828 gsi = gsi_after_labels (bb);
3830 /* Make sure to skip debug statements. */
3831 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3832 gsi_next_nondebug (&gsi);
3834 /* If the block is totally empty, look for more unsplitting cases. */
3835 if (gsi_end_p (gsi))
3837 /* For the degenerate case of an infinite loop bail out. */
3838 if (infinite_empty_loop_p (e_out))
3841 return cleanup_empty_eh_unsplit (bb, e_out, lp);
3844 /* The block should consist only of a single RESX statement. */
3845 resx = gsi_stmt (gsi);
3846 if (!is_gimple_resx (resx))
3848 gcc_assert (gsi_one_before_end_p (gsi));
3850 /* Determine if there are non-EH edges, or resx edges into the handler. */
3851 has_non_eh_pred = false;
3852 FOR_EACH_EDGE (e, ei, bb->preds)
3853 if (!(e->flags & EDGE_EH))
3854 has_non_eh_pred = true;
3856 /* Find the handler that's outer of the empty handler by looking at
3857 where the RESX instruction was vectored. */
3858 new_lp_nr = lookup_stmt_eh_lp (resx);
3859 new_region = get_eh_region_from_lp_number (new_lp_nr);
3861 /* If there's no destination region within the current function,
3862 redirection is trivial via removing the throwing statements from
3863 the EH region, removing the EH edges, and allowing the block
3864 to go unreachable. */
3865 if (new_region == NULL)
3867 gcc_assert (e_out == NULL);
3868 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3869 if (e->flags & EDGE_EH)
3871 gimple stmt = last_stmt (e->src);
3872 remove_stmt_from_eh_lp (stmt);
3880 /* If the destination region is a MUST_NOT_THROW, allow the runtime
3881 to handle the abort and allow the blocks to go unreachable. */
3882 if (new_region->type == ERT_MUST_NOT_THROW)
3884 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3885 if (e->flags & EDGE_EH)
3887 gimple stmt = last_stmt (e->src);
3888 remove_stmt_from_eh_lp (stmt);
3889 add_stmt_to_eh_lp (stmt, new_lp_nr);
3897 /* Try to redirect the EH edges and merge the PHIs into the destination
3898 landing pad block. If the merge succeeds, we'll already have redirected
3899 all the EH edges. The handler itself will go unreachable if there were
3901 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
3904 /* Finally, if all input edges are EH edges, then we can (potentially)
3905 reduce the number of transfers from the runtime by moving the landing
3906 pad from the original region to the new region. This is a win when
3907 we remove the last CLEANUP region along a particular exception
3908 propagation path. Since nothing changes except for the region with
3909 which the landing pad is associated, the PHI nodes do not need to be
3911 if (!has_non_eh_pred)
3913 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
3914 if (dump_file && (dump_flags & TDF_DETAILS))
3915 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
3916 lp->index, new_region->index);
3918 /* ??? The CFG didn't change, but we may have rendered the
3919 old EH region unreachable. Trigger a cleanup there. */
3926 if (dump_file && (dump_flags & TDF_DETAILS))
3927 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
3928 remove_eh_landing_pad (lp);
3932 /* Do a post-order traversal of the EH region tree. Examine each
3933 post_landing_pad block and see if we can eliminate it as empty. */
3936 cleanup_all_empty_eh (void)
3938 bool changed = false;
3942 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3944 changed |= cleanup_empty_eh (lp);
3949 /* Perform cleanups and lowering of exception handling
3950 1) cleanups regions with handlers doing nothing are optimized out
3951 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
3952 3) Info about regions that are containing instructions, and regions
3953 reachable via local EH edges is collected
3954 4) Eh tree is pruned for regions no longer neccesary.
3956 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
3957 Unify those that have the same failure decl and locus.
3961 execute_cleanup_eh_1 (void)
3963 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
3964 looking up unreachable landing pads. */
3965 remove_unreachable_handlers ();
3967 /* Watch out for the region tree vanishing due to all unreachable. */
3968 if (cfun->eh->region_tree && optimize)
3970 bool changed = false;
3972 changed |= unsplit_all_eh ();
3973 changed |= cleanup_all_empty_eh ();
3977 free_dominance_info (CDI_DOMINATORS);
3978 free_dominance_info (CDI_POST_DOMINATORS);
3980 /* We delayed all basic block deletion, as we may have performed
3981 cleanups on EH edges while non-EH edges were still present. */
3982 delete_unreachable_blocks ();
3984 /* We manipulated the landing pads. Remove any region that no
3985 longer has a landing pad. */
3986 remove_unreachable_handlers_no_lp ();
3988 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
3996 execute_cleanup_eh (void)
3998 int ret = execute_cleanup_eh_1 ();
4000 /* If the function no longer needs an EH personality routine
4001 clear it. This exposes cross-language inlining opportunities
4002 and avoids references to a never defined personality routine. */
4003 if (DECL_FUNCTION_PERSONALITY (current_function_decl)
4004 && function_needs_eh_personality (cfun) != eh_personality_lang)
4005 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
4011 gate_cleanup_eh (void)
4013 return cfun->eh != NULL && cfun->eh->region_tree != NULL;
4016 struct gimple_opt_pass pass_cleanup_eh = {
4019 "ehcleanup", /* name */
4020 gate_cleanup_eh, /* gate */
4021 execute_cleanup_eh, /* execute */
4024 0, /* static_pass_number */
4025 TV_TREE_EH, /* tv_id */
4026 PROP_gimple_lcf, /* properties_required */
4027 0, /* properties_provided */
4028 0, /* properties_destroyed */
4029 0, /* todo_flags_start */
4030 TODO_dump_func /* todo_flags_finish */
4034 /* Verify that BB containing STMT as the last statement, has precisely the
4035 edge that make_eh_edges would create. */
4038 verify_eh_edges (gimple stmt)
4040 basic_block bb = gimple_bb (stmt);
4041 eh_landing_pad lp = NULL;
4046 lp_nr = lookup_stmt_eh_lp (stmt);
4048 lp = get_eh_landing_pad_from_number (lp_nr);
4051 FOR_EACH_EDGE (e, ei, bb->succs)
4053 if (e->flags & EDGE_EH)
4057 error ("BB %i has multiple EH edges", bb->index);
4069 error ("BB %i can not throw but has an EH edge", bb->index);
4075 if (!stmt_could_throw_p (stmt))
4077 error ("BB %i last statement has incorrectly set lp", bb->index);
4081 if (eh_edge == NULL)
4083 error ("BB %i is missing an EH edge", bb->index);
4087 if (eh_edge->dest != label_to_block (lp->post_landing_pad))
4089 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
4096 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4099 verify_eh_dispatch_edge (gimple stmt)
4103 basic_block src, dst;
4104 bool want_fallthru = true;
4108 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4109 src = gimple_bb (stmt);
4111 FOR_EACH_EDGE (e, ei, src->succs)
4112 gcc_assert (e->aux == NULL);
4117 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4119 dst = label_to_block (c->label);
4120 e = find_edge (src, dst);
4123 error ("BB %i is missing an edge", src->index);
4128 /* A catch-all handler doesn't have a fallthru. */
4129 if (c->type_list == NULL)
4131 want_fallthru = false;
4137 case ERT_ALLOWED_EXCEPTIONS:
4138 dst = label_to_block (r->u.allowed.label);
4139 e = find_edge (src, dst);
4142 error ("BB %i is missing an edge", src->index);
4153 FOR_EACH_EDGE (e, ei, src->succs)
4155 if (e->flags & EDGE_FALLTHRU)
4157 if (fall_edge != NULL)
4159 error ("BB %i too many fallthru edges", src->index);
4168 error ("BB %i has incorrect edge", src->index);
4172 if ((fall_edge != NULL) ^ want_fallthru)
4174 error ("BB %i has incorrect fallthru edge", src->index);