1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
37 #include "langhooks.h"
42 /* In some instances a tree and a gimple need to be stored in a same table,
43 i.e. in hash tables. This is a structure to do this. */
44 typedef union {tree *tp; tree t; gimple g;} treemple;
46 /* Nonzero if we are using EH to handle cleanups. */
47 static int using_eh_for_cleanups_p = 0;
50 using_eh_for_cleanups (void)
52 using_eh_for_cleanups_p = 1;
55 /* Misc functions used in this file. */
57 /* Compare and hash for any structure which begins with a canonical
58 pointer. Assumes all pointers are interchangeable, which is sort
59 of already assumed by gcc elsewhere IIRC. */
62 struct_ptr_eq (const void *a, const void *b)
64 const void * const * x = (const void * const *) a;
65 const void * const * y = (const void * const *) b;
70 struct_ptr_hash (const void *a)
72 const void * const * x = (const void * const *) a;
73 return (size_t)*x >> 4;
77 /* Remember and lookup EH region data for arbitrary statements.
78 Really this means any statement that could_throw_p. We could
79 stuff this information into the stmt_ann data structure, but:
81 (1) We absolutely rely on this information being kept until
82 we get to rtl. Once we're done with lowering here, if we lose
83 the information there's no way to recover it!
85 (2) There are many more statements that *cannot* throw as
86 compared to those that can. We should be saving some amount
87 of space by only allocating memory for those that can throw. */
90 record_stmt_eh_region (struct eh_region_d *region, gimple t)
95 add_stmt_to_eh_region (t, get_eh_region_number (region));
99 /* Add statement T in function IFUN to EH region NUM. */
102 add_stmt_to_eh_region_fn (struct function *ifun, gimple t, int num)
104 struct throw_stmt_node *n;
107 gcc_assert (num >= 0);
108 gcc_assert (gimple_code (t) != GIMPLE_RESX);
110 n = GGC_NEW (struct throw_stmt_node);
114 if (!get_eh_throw_stmt_table (ifun))
115 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
119 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
125 /* Add statement T in the current function (cfun) to EH region number
129 add_stmt_to_eh_region (gimple t, int num)
131 add_stmt_to_eh_region_fn (cfun, t, num);
135 /* Remove statement T in function IFUN from the EH region holding it. */
138 remove_stmt_from_eh_region_fn (struct function *ifun, gimple t)
140 struct throw_stmt_node dummy;
143 if (!get_eh_throw_stmt_table (ifun))
147 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
151 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
159 /* Remove statement T in the current function (cfun) from the EH
160 region holding it. */
163 remove_stmt_from_eh_region (gimple t)
165 return remove_stmt_from_eh_region_fn (cfun, t);
168 /* Determine if statement T is inside an EH region in function IFUN.
169 Return the EH region number if found, return -2 if IFUN does not
170 have an EH table and -1 if T could not be found in IFUN's EH region
174 lookup_stmt_eh_region_fn (struct function *ifun, gimple t)
176 struct throw_stmt_node *p, n;
178 if (!get_eh_throw_stmt_table (ifun))
182 p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun), &n);
183 return (p ? p->region_nr : -1);
187 /* Determine if statement T is inside an EH region in the current
188 function (cfun). Return the EH region number if found, return -2
189 if cfun does not have an EH table and -1 if T could not be found in
190 cfun's EH region table. */
193 lookup_stmt_eh_region (gimple t)
195 /* We can get called from initialized data when -fnon-call-exceptions
196 is on; prevent crash. */
200 return lookup_stmt_eh_region_fn (cfun, t);
204 /* Determine if expression T is inside an EH region in the current
205 function (cfun). Return the EH region number if found, return -2
206 if IFUN does not have an EH table and -1 if T could not be found in
207 IFUN's EH region table. */
210 lookup_expr_eh_region (tree t)
212 /* We can get called from initialized data when -fnon-call-exceptions
213 is on; prevent crash. */
217 if (!get_eh_throw_stmt_table (cfun))
222 tree_ann_common_t ann = tree_common_ann (t);
224 return (int) ann->rn;
231 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
232 nodes and LABEL_DECL nodes. We will use this during the second phase to
233 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
235 struct finally_tree_node
237 /* When storing a GIMPLE_TRY, we have to record a gimple. However
238 when deciding whether a GOTO to a certain LABEL_DECL (which is a
239 tree) leaves the TRY block, its necessary to record a tree in
240 this field. Thus a treemple is used. */
245 /* Note that this table is *not* marked GTY. It is short-lived. */
246 static htab_t finally_tree;
249 record_in_finally_tree (treemple child, gimple parent)
251 struct finally_tree_node *n;
254 n = XNEW (struct finally_tree_node);
258 slot = htab_find_slot (finally_tree, n, INSERT);
264 collect_finally_tree (gimple stmt, gimple region);
266 /* Go through the gimple sequence. Works with collect_finally_tree to
267 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
270 collect_finally_tree_1 (gimple_seq seq, gimple region)
272 gimple_stmt_iterator gsi;
274 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
275 collect_finally_tree (gsi_stmt (gsi), region);
279 collect_finally_tree (gimple stmt, gimple region)
283 switch (gimple_code (stmt))
286 temp.t = gimple_label_label (stmt);
287 record_in_finally_tree (temp, region);
291 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
294 record_in_finally_tree (temp, region);
295 collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
296 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
298 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
300 collect_finally_tree_1 (gimple_try_eval (stmt), region);
301 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
306 collect_finally_tree_1 (gimple_catch_handler (stmt), region);
309 case GIMPLE_EH_FILTER:
310 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
314 /* A type, a decl, or some kind of statement that we're not
315 interested in. Don't walk them. */
321 /* Use the finally tree to determine if a jump from START to TARGET
322 would leave the try_finally node that START lives in. */
325 outside_finally_tree (treemple start, gimple target)
327 struct finally_tree_node n, *p;
332 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
337 while (start.g != target);
342 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
343 nodes into a set of gotos, magic labels, and eh regions.
344 The eh region creation is straight-forward, but frobbing all the gotos
345 and such into shape isn't. */
347 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
348 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
349 The idea is to record a gimple statement for everything except for
350 the conditionals, which get their labels recorded. Since labels are
351 of type 'tree', we need this node to store both gimple and tree
352 objects. REPL_STMT is the sequence used to replace the goto/return
353 statement. CONT_STMT is used to store the statement that allows
354 the return/goto to jump to the original destination. */
356 struct goto_queue_node
359 gimple_seq repl_stmt;
362 /* This is used when index >= 0 to indicate that stmt is a label (as
363 opposed to a goto stmt). */
367 /* State of the world while lowering. */
371 /* What's "current" while constructing the eh region tree. These
372 correspond to variables of the same name in cfun->eh, which we
373 don't have easy access to. */
374 struct eh_region_d *cur_region;
376 /* Processing of TRY_FINALLY requires a bit more state. This is
377 split out into a separate structure so that we don't have to
378 copy so much when processing other nodes. */
379 struct leh_tf_state *tf;
384 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
385 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
386 this so that outside_finally_tree can reliably reference the tree used
387 in the collect_finally_tree data structures. */
388 gimple try_finally_expr;
390 /* While lowering a top_p usually it is expanded into multiple statements,
391 thus we need the following field to store them. */
392 gimple_seq top_p_seq;
394 /* The state outside this try_finally node. */
395 struct leh_state *outer;
397 /* The exception region created for it. */
398 struct eh_region_d *region;
400 /* The goto queue. */
401 struct goto_queue_node *goto_queue;
402 size_t goto_queue_size;
403 size_t goto_queue_active;
405 /* Pointer map to help in searching goto_queue when it is large. */
406 struct pointer_map_t *goto_queue_map;
408 /* The set of unique labels seen as entries in the goto queue. */
409 VEC(tree,heap) *dest_array;
411 /* A label to be added at the end of the completed transformed
412 sequence. It will be set if may_fallthru was true *at one time*,
413 though subsequent transformations may have cleared that flag. */
416 /* A label that has been registered with except.c to be the
417 landing pad for this try block. */
420 /* True if it is possible to fall out the bottom of the try block.
421 Cleared if the fallthru is converted to a goto. */
424 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
427 /* True if the finally block can receive an exception edge.
428 Cleared if the exception case is handled by code duplication. */
432 static gimple_seq lower_eh_filter (struct leh_state *, gimple);
434 /* Search for STMT in the goto queue. Return the replacement,
435 or null if the statement isn't in the queue. */
437 #define LARGE_GOTO_QUEUE 20
439 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq);
442 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
447 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
449 for (i = 0; i < tf->goto_queue_active; i++)
450 if ( tf->goto_queue[i].stmt.g == stmt.g)
451 return tf->goto_queue[i].repl_stmt;
455 /* If we have a large number of entries in the goto_queue, create a
456 pointer map and use that for searching. */
458 if (!tf->goto_queue_map)
460 tf->goto_queue_map = pointer_map_create ();
461 for (i = 0; i < tf->goto_queue_active; i++)
463 slot = pointer_map_insert (tf->goto_queue_map,
464 tf->goto_queue[i].stmt.g);
465 gcc_assert (*slot == NULL);
466 *slot = &tf->goto_queue[i];
470 slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
472 return (((struct goto_queue_node *) *slot)->repl_stmt);
477 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
478 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
479 then we can just splat it in, otherwise we add the new stmts immediately
480 after the GIMPLE_COND and redirect. */
483 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
484 gimple_stmt_iterator *gsi)
489 location_t loc = gimple_location (gsi_stmt (*gsi));
492 new_seq = find_goto_replacement (tf, temp);
496 if (gimple_seq_singleton_p (new_seq)
497 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
499 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
503 label = create_artificial_label (loc);
504 /* Set the new label for the GIMPLE_COND */
507 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
508 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
511 /* The real work of replace_goto_queue. Returns with TSI updated to
512 point to the next statement. */
514 static void replace_goto_queue_stmt_list (gimple_seq, struct leh_tf_state *);
517 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
518 gimple_stmt_iterator *gsi)
524 switch (gimple_code (stmt))
529 seq = find_goto_replacement (tf, temp);
532 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
533 gsi_remove (gsi, false);
539 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
540 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
544 replace_goto_queue_stmt_list (gimple_try_eval (stmt), tf);
545 replace_goto_queue_stmt_list (gimple_try_cleanup (stmt), tf);
548 replace_goto_queue_stmt_list (gimple_catch_handler (stmt), tf);
550 case GIMPLE_EH_FILTER:
551 replace_goto_queue_stmt_list (gimple_eh_filter_failure (stmt), tf);
555 /* These won't have gotos in them. */
562 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
565 replace_goto_queue_stmt_list (gimple_seq seq, struct leh_tf_state *tf)
567 gimple_stmt_iterator gsi = gsi_start (seq);
569 while (!gsi_end_p (gsi))
570 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
573 /* Replace all goto queue members. */
576 replace_goto_queue (struct leh_tf_state *tf)
578 if (tf->goto_queue_active == 0)
580 replace_goto_queue_stmt_list (tf->top_p_seq, tf);
583 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
584 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
588 record_in_goto_queue (struct leh_tf_state *tf,
594 struct goto_queue_node *q;
596 gcc_assert (!tf->goto_queue_map);
598 active = tf->goto_queue_active;
599 size = tf->goto_queue_size;
602 size = (size ? size * 2 : 32);
603 tf->goto_queue_size = size;
605 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
608 q = &tf->goto_queue[active];
609 tf->goto_queue_active = active + 1;
611 memset (q, 0, sizeof (*q));
614 q->is_label = is_label;
617 /* Record the LABEL label in the goto queue contained in TF.
621 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label)
624 treemple temp, new_stmt;
629 /* Computed and non-local gotos do not get processed. Given
630 their nature we can neither tell whether we've escaped the
631 finally block nor redirect them if we knew. */
632 if (TREE_CODE (label) != LABEL_DECL)
635 /* No need to record gotos that don't leave the try block. */
637 if (!outside_finally_tree (temp, tf->try_finally_expr))
640 if (! tf->dest_array)
642 tf->dest_array = VEC_alloc (tree, heap, 10);
643 VEC_quick_push (tree, tf->dest_array, label);
648 int n = VEC_length (tree, tf->dest_array);
649 for (index = 0; index < n; ++index)
650 if (VEC_index (tree, tf->dest_array, index) == label)
653 VEC_safe_push (tree, heap, tf->dest_array, label);
656 /* In the case of a GOTO we want to record the destination label,
657 since with a GIMPLE_COND we have an easy access to the then/else
660 record_in_goto_queue (tf, new_stmt, index, true);
664 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
665 node, and if so record that fact in the goto queue associated with that
669 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
671 struct leh_tf_state *tf = state->tf;
677 switch (gimple_code (stmt))
680 new_stmt.tp = gimple_op_ptr (stmt, 2);
681 record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt));
682 new_stmt.tp = gimple_op_ptr (stmt, 3);
683 record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt));
687 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt));
691 tf->may_return = true;
693 record_in_goto_queue (tf, new_stmt, -1, false);
702 #ifdef ENABLE_CHECKING
703 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
704 was in fact structured, and we've not yet done jump threading, then none
705 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
708 verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
710 struct leh_tf_state *tf = state->tf;
716 n = gimple_switch_num_labels (switch_expr);
718 for (i = 0; i < n; ++i)
721 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
723 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
727 #define verify_norecord_switch_expr(state, switch_expr)
730 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
731 whatever is needed to finish the return. If MOD is non-null, insert it
732 before the new branch. RETURN_VALUE_P is a cache containing a temporary
733 variable to be used in manipulating the value returned from the function. */
736 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
737 tree *return_value_p)
742 /* In the case of a return, the queue node must be a gimple statement. */
743 gcc_assert (!q->is_label);
745 ret_expr = gimple_return_retval (q->stmt.g);
749 if (!*return_value_p)
750 *return_value_p = ret_expr;
752 gcc_assert (*return_value_p == ret_expr);
753 q->cont_stmt = q->stmt.g;
754 /* The nasty part about redirecting the return value is that the
755 return value itself is to be computed before the FINALLY block
769 should return 0, not 1. Arrange for this to happen by copying
770 computed the return value into a local temporary. This also
771 allows us to redirect multiple return statements through the
772 same destination block; whether this is a net win or not really
773 depends, I guess, but it does make generation of the switch in
774 lower_try_finally_switch easier. */
776 if (TREE_CODE (ret_expr) == RESULT_DECL)
778 if (!*return_value_p)
779 *return_value_p = ret_expr;
781 gcc_assert (*return_value_p == ret_expr);
782 q->cont_stmt = q->stmt.g;
788 /* If we don't return a value, all return statements are the same. */
789 q->cont_stmt = q->stmt.g;
792 q->repl_stmt = gimple_seq_alloc ();
795 gimple_seq_add_seq (&q->repl_stmt, mod);
797 x = gimple_build_goto (finlab);
798 gimple_seq_add_stmt (&q->repl_stmt, x);
801 /* Similar, but easier, for GIMPLE_GOTO. */
804 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
805 struct leh_tf_state *tf)
809 gcc_assert (q->is_label);
811 q->repl_stmt = gimple_seq_alloc ();
813 q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array,q->index));
816 gimple_seq_add_seq (&q->repl_stmt, mod);
818 x = gimple_build_goto (finlab);
819 gimple_seq_add_stmt (&q->repl_stmt, x);
822 /* We want to transform
823 try { body; } catch { stuff; }
825 body; goto over; lab: stuff; over:
827 TP is a GIMPLE_TRY node. LAB is the label that
828 should be placed before the second operand, or NULL. OVER is
829 an existing label that should be put at the exit, or NULL. */
832 frob_into_branch_around (gimple tp, tree lab, tree over)
835 gimple_seq cleanup, result;
836 location_t loc = gimple_location (tp);
838 cleanup = gimple_try_cleanup (tp);
839 result = gimple_try_eval (tp);
841 if (gimple_seq_may_fallthru (result))
844 over = create_artificial_label (loc);
845 x = gimple_build_goto (over);
846 gimple_seq_add_stmt (&result, x);
851 x = gimple_build_label (lab);
852 gimple_seq_add_stmt (&result, x);
855 gimple_seq_add_seq (&result, cleanup);
859 x = gimple_build_label (over);
860 gimple_seq_add_stmt (&result, x);
865 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
866 Make sure to record all new labels found. */
869 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state)
871 gimple region = NULL;
874 new_seq = copy_gimple_seq_and_replace_locals (seq);
877 region = outer_state->tf->try_finally_expr;
878 collect_finally_tree_1 (new_seq, region);
883 /* A subroutine of lower_try_finally. Create a fallthru label for
884 the given try_finally state. The only tricky bit here is that
885 we have to make sure to record the label in our outer context. */
888 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
890 tree label = tf->fallthru_label;
895 label = create_artificial_label (gimple_location (tf->try_finally_expr));
896 tf->fallthru_label = label;
900 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
906 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
907 returns non-null, then the language requires that the exception path out
908 of a try_finally be treated specially. To wit: the code within the
909 finally block may not itself throw an exception. We have two choices here.
910 First we can duplicate the finally block and wrap it in a must_not_throw
911 region. Second, we can generate code like
916 if (fintmp == eh_edge)
917 protect_cleanup_actions;
920 where "fintmp" is the temporary used in the switch statement generation
921 alternative considered below. For the nonce, we always choose the first
924 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
927 honor_protect_cleanup_actions (struct leh_state *outer_state,
928 struct leh_state *this_state,
929 struct leh_tf_state *tf)
931 gimple protect_cleanup_actions;
932 gimple_stmt_iterator gsi;
933 bool finally_may_fallthru;
937 /* First check for nothing to do. */
938 if (lang_protect_cleanup_actions)
939 protect_cleanup_actions = lang_protect_cleanup_actions ();
941 protect_cleanup_actions = NULL;
943 finally = gimple_try_cleanup (tf->top_p);
945 /* If the EH case of the finally block can fall through, this may be a
946 structure of the form
959 E.g. with an inline destructor with an embedded try block. In this
960 case we must save the runtime EH data around the nested exception.
962 This complication means that any time the previous runtime data might
963 be used (via fallthru from the finally) we handle the eh case here,
964 whether or not protect_cleanup_actions is active. */
966 finally_may_fallthru = gimple_seq_may_fallthru (finally);
967 if (!finally_may_fallthru && !protect_cleanup_actions)
970 /* Duplicate the FINALLY block. Only need to do this for try-finally,
971 and not for cleanups. */
973 finally = lower_try_finally_dup_block (finally, outer_state);
975 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
976 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
977 to be in an enclosing scope, but needs to be implemented at this level
978 to avoid a nesting violation (see wrap_temporary_cleanups in
979 cp/decl.c). Since it's logically at an outer level, we should call
980 terminate before we get to it, so strip it away before adding the
981 MUST_NOT_THROW filter. */
982 gsi = gsi_start (finally);
984 if (protect_cleanup_actions
985 && gimple_code (x) == GIMPLE_TRY
986 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
987 && gimple_try_catch_is_cleanup (x))
989 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
990 gsi_remove (&gsi, false);
993 /* Resume execution after the exception. Adding this now lets
994 lower_eh_filter not add unnecessary gotos, as it is clear that
995 we never fallthru from this copy of the finally block. */
996 if (finally_may_fallthru)
998 tree save_eptr, save_filt;
1001 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
1002 save_filt = create_tmp_var (integer_type_node, "save_filt");
1004 gsi = gsi_start (finally);
1005 tmp = build0 (EXC_PTR_EXPR, ptr_type_node);
1006 x = gimple_build_assign (save_eptr, tmp);
1007 gsi_insert_before (&gsi, x, GSI_CONTINUE_LINKING);
1009 tmp = build0 (FILTER_EXPR, integer_type_node);
1010 x = gimple_build_assign (save_filt, tmp);
1011 gsi_insert_before (&gsi, x, GSI_CONTINUE_LINKING);
1013 gsi = gsi_last (finally);
1014 tmp = build0 (EXC_PTR_EXPR, ptr_type_node);
1015 x = gimple_build_assign (tmp, save_eptr);
1016 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1018 tmp = build0 (FILTER_EXPR, integer_type_node);
1019 x = gimple_build_assign (tmp, save_filt);
1020 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1022 x = gimple_build_resx (get_eh_region_number (tf->region));
1023 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1026 /* Wrap the block with protect_cleanup_actions as the action. */
1027 if (protect_cleanup_actions)
1029 gimple_seq seq = NULL, failure = NULL;
1031 gimple_seq_add_stmt (&failure, protect_cleanup_actions);
1032 x = gimple_build_eh_filter (NULL, failure);
1033 gimple_eh_filter_set_must_not_throw (x, 1);
1035 gimple_seq_add_stmt (&seq, x);
1036 x = gimple_build_try (finally, seq, GIMPLE_TRY_CATCH);
1037 finally = lower_eh_filter (outer_state, x);
1040 lower_eh_constructs_1 (outer_state, finally);
1042 /* Hook this up to the end of the existing try block. If we
1043 previously fell through the end, we'll have to branch around.
1044 This means adding a new goto, and adding it to the queue. */
1046 gsi = gsi_last (gimple_try_eval (tf->top_p));
1048 if (tf->may_fallthru)
1051 tmp = lower_try_finally_fallthru_label (tf);
1052 x = gimple_build_goto (tmp);
1053 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1056 maybe_record_in_goto_queue (this_state, x);
1058 tf->may_fallthru = false;
1061 x = gimple_build_label (tf->eh_label);
1062 gsi_insert_after (&gsi, x, GSI_CONTINUE_LINKING);
1063 gsi_insert_seq_after (&gsi, finally, GSI_CONTINUE_LINKING);
1065 /* Having now been handled, EH isn't to be considered with
1066 the rest of the outgoing edges. */
1067 tf->may_throw = false;
1070 /* A subroutine of lower_try_finally. We have determined that there is
1071 no fallthru edge out of the finally block. This means that there is
1072 no outgoing edge corresponding to any incoming edge. Restructure the
1073 try_finally node for this special case. */
1076 lower_try_finally_nofallthru (struct leh_state *state,
1077 struct leh_tf_state *tf)
1079 tree lab, return_val;
1082 struct goto_queue_node *q, *qe;
1087 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1089 /* We expect that tf->top_p is a GIMPLE_TRY. */
1090 finally = gimple_try_cleanup (tf->top_p);
1091 tf->top_p_seq = gimple_try_eval (tf->top_p);
1093 x = gimple_build_label (lab);
1094 gimple_seq_add_stmt (&tf->top_p_seq, x);
1098 qe = q + tf->goto_queue_active;
1101 do_return_redirection (q, lab, NULL, &return_val);
1103 do_goto_redirection (q, lab, NULL, tf);
1105 replace_goto_queue (tf);
1107 lower_eh_constructs_1 (state, finally);
1108 gimple_seq_add_seq (&tf->top_p_seq, finally);
1111 /* A subroutine of lower_try_finally. We have determined that there is
1112 exactly one destination of the finally block. Restructure the
1113 try_finally node for this special case. */
1116 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1118 struct goto_queue_node *q, *qe;
1122 location_t loc = gimple_location (tf->try_finally_expr);
1124 finally = gimple_try_cleanup (tf->top_p);
1125 tf->top_p_seq = gimple_try_eval (tf->top_p);
1127 lower_eh_constructs_1 (state, finally);
1131 /* Only reachable via the exception edge. Add the given label to
1132 the head of the FINALLY block. Append a RESX at the end. */
1134 x = gimple_build_label (tf->eh_label);
1135 gimple_seq_add_stmt (&tf->top_p_seq, x);
1137 gimple_seq_add_seq (&tf->top_p_seq, finally);
1139 x = gimple_build_resx (get_eh_region_number (tf->region));
1141 gimple_seq_add_stmt (&tf->top_p_seq, x);
1146 if (tf->may_fallthru)
1148 /* Only reachable via the fallthru edge. Do nothing but let
1149 the two blocks run together; we'll fall out the bottom. */
1150 gimple_seq_add_seq (&tf->top_p_seq, finally);
1154 finally_label = create_artificial_label (loc);
1155 x = gimple_build_label (finally_label);
1156 gimple_seq_add_stmt (&tf->top_p_seq, x);
1158 gimple_seq_add_seq (&tf->top_p_seq, finally);
1161 qe = q + tf->goto_queue_active;
1165 /* Reachable by return expressions only. Redirect them. */
1166 tree return_val = NULL;
1168 do_return_redirection (q, finally_label, NULL, &return_val);
1169 replace_goto_queue (tf);
1173 /* Reachable by goto expressions only. Redirect them. */
1175 do_goto_redirection (q, finally_label, NULL, tf);
1176 replace_goto_queue (tf);
1178 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1180 /* Reachable by goto to fallthru label only. Redirect it
1181 to the new label (already created, sadly), and do not
1182 emit the final branch out, or the fallthru label. */
1183 tf->fallthru_label = NULL;
1188 /* Place the original return/goto to the original destination
1189 immediately after the finally block. */
1190 x = tf->goto_queue[0].cont_stmt;
1191 gimple_seq_add_stmt (&tf->top_p_seq, x);
1192 maybe_record_in_goto_queue (state, x);
1195 /* A subroutine of lower_try_finally. There are multiple edges incoming
1196 and outgoing from the finally block. Implement this by duplicating the
1197 finally block for every destination. */
1200 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1203 gimple_seq new_stmt;
1207 location_t tf_loc = gimple_location (tf->try_finally_expr);
1209 finally = gimple_try_cleanup (tf->top_p);
1210 tf->top_p_seq = gimple_try_eval (tf->top_p);
1213 if (tf->may_fallthru)
1215 seq = lower_try_finally_dup_block (finally, state);
1216 lower_eh_constructs_1 (state, seq);
1217 gimple_seq_add_seq (&new_stmt, seq);
1219 tmp = lower_try_finally_fallthru_label (tf);
1220 x = gimple_build_goto (tmp);
1221 gimple_seq_add_stmt (&new_stmt, x);
1226 x = gimple_build_label (tf->eh_label);
1227 gimple_seq_add_stmt (&new_stmt, x);
1229 seq = lower_try_finally_dup_block (finally, state);
1230 lower_eh_constructs_1 (state, seq);
1231 gimple_seq_add_seq (&new_stmt, seq);
1233 x = gimple_build_resx (get_eh_region_number (tf->region));
1234 gimple_seq_add_stmt (&new_stmt, x);
1239 struct goto_queue_node *q, *qe;
1240 tree return_val = NULL;
1241 int return_index, index;
1244 struct goto_queue_node *q;
1248 return_index = VEC_length (tree, tf->dest_array);
1249 labels = XCNEWVEC (struct labels_s, return_index + 1);
1252 qe = q + tf->goto_queue_active;
1255 index = q->index < 0 ? return_index : q->index;
1257 if (!labels[index].q)
1258 labels[index].q = q;
1261 for (index = 0; index < return_index + 1; index++)
1265 q = labels[index].q;
1269 lab = labels[index].label
1270 = create_artificial_label (tf_loc);
1272 if (index == return_index)
1273 do_return_redirection (q, lab, NULL, &return_val);
1275 do_goto_redirection (q, lab, NULL, tf);
1277 x = gimple_build_label (lab);
1278 gimple_seq_add_stmt (&new_stmt, x);
1280 seq = lower_try_finally_dup_block (finally, state);
1281 lower_eh_constructs_1 (state, seq);
1282 gimple_seq_add_seq (&new_stmt, seq);
1284 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1285 maybe_record_in_goto_queue (state, q->cont_stmt);
1288 for (q = tf->goto_queue; q < qe; q++)
1292 index = q->index < 0 ? return_index : q->index;
1294 if (labels[index].q == q)
1297 lab = labels[index].label;
1299 if (index == return_index)
1300 do_return_redirection (q, lab, NULL, &return_val);
1302 do_goto_redirection (q, lab, NULL, tf);
1305 replace_goto_queue (tf);
1309 /* Need to link new stmts after running replace_goto_queue due
1310 to not wanting to process the same goto stmts twice. */
1311 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1314 /* A subroutine of lower_try_finally. There are multiple edges incoming
1315 and outgoing from the finally block. Implement this by instrumenting
1316 each incoming edge and creating a switch statement at the end of the
1317 finally block that branches to the appropriate destination. */
1320 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1322 struct goto_queue_node *q, *qe;
1323 tree return_val = NULL;
1324 tree finally_tmp, finally_label;
1325 int return_index, eh_index, fallthru_index;
1326 int nlabels, ndests, j, last_case_index;
1328 VEC (tree,heap) *case_label_vec;
1329 gimple_seq switch_body;
1334 struct pointer_map_t *cont_map = NULL;
1335 /* The location of the TRY_FINALLY stmt. */
1336 location_t tf_loc = gimple_location (tf->try_finally_expr);
1337 /* The location of the finally block. */
1338 location_t finally_loc;
1340 switch_body = gimple_seq_alloc ();
1342 /* Mash the TRY block to the head of the chain. */
1343 finally = gimple_try_cleanup (tf->top_p);
1344 tf->top_p_seq = gimple_try_eval (tf->top_p);
1346 /* The location of the finally is either the last stmt in the finally
1347 block or the location of the TRY_FINALLY itself. */
1348 finally_loc = gimple_seq_last_stmt (tf->top_p_seq) != NULL ?
1349 gimple_location (gimple_seq_last_stmt (tf->top_p_seq))
1352 /* Lower the finally block itself. */
1353 lower_eh_constructs_1 (state, finally);
1355 /* Prepare for switch statement generation. */
1356 nlabels = VEC_length (tree, tf->dest_array);
1357 return_index = nlabels;
1358 eh_index = return_index + tf->may_return;
1359 fallthru_index = eh_index + tf->may_throw;
1360 ndests = fallthru_index + tf->may_fallthru;
1362 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1363 finally_label = create_artificial_label (finally_loc);
1365 /* We use VEC_quick_push on case_label_vec throughout this function,
1366 since we know the size in advance and allocate precisely as muce
1368 case_label_vec = VEC_alloc (tree, heap, ndests);
1370 last_case_index = 0;
1372 /* Begin inserting code for getting to the finally block. Things
1373 are done in this order to correspond to the sequence the code is
1376 if (tf->may_fallthru)
1378 x = gimple_build_assign (finally_tmp, build_int_cst (integer_type_node,
1380 gimple_seq_add_stmt (&tf->top_p_seq, x);
1384 x = gimple_build_goto (finally_label);
1385 gimple_seq_add_stmt (&tf->top_p_seq, x);
1389 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1390 build_int_cst (NULL_TREE, fallthru_index), NULL,
1391 create_artificial_label (tf_loc));
1392 VEC_quick_push (tree, case_label_vec, last_case);
1395 x = gimple_build_label (CASE_LABEL (last_case));
1396 gimple_seq_add_stmt (&switch_body, x);
1398 tmp = lower_try_finally_fallthru_label (tf);
1399 x = gimple_build_goto (tmp);
1400 gimple_seq_add_stmt (&switch_body, x);
1405 x = gimple_build_label (tf->eh_label);
1406 gimple_seq_add_stmt (&tf->top_p_seq, x);
1408 x = gimple_build_assign (finally_tmp, build_int_cst (integer_type_node,
1410 gimple_seq_add_stmt (&tf->top_p_seq, x);
1412 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1413 build_int_cst (NULL_TREE, eh_index), NULL,
1414 create_artificial_label (tf_loc));
1415 VEC_quick_push (tree, case_label_vec, last_case);
1418 x = gimple_build_label (CASE_LABEL (last_case));
1419 gimple_seq_add_stmt (&switch_body, x);
1420 x = gimple_build_resx (get_eh_region_number (tf->region));
1421 gimple_seq_add_stmt (&switch_body, x);
1424 x = gimple_build_label (finally_label);
1425 gimple_seq_add_stmt (&tf->top_p_seq, x);
1427 gimple_seq_add_seq (&tf->top_p_seq, finally);
1429 /* Redirect each incoming goto edge. */
1431 qe = q + tf->goto_queue_active;
1432 j = last_case_index + tf->may_return;
1433 /* Prepare the assignments to finally_tmp that are executed upon the
1434 entrance through a particular edge. */
1439 unsigned int case_index;
1441 mod = gimple_seq_alloc ();
1445 x = gimple_build_assign (finally_tmp,
1446 build_int_cst (integer_type_node,
1448 gimple_seq_add_stmt (&mod, x);
1449 do_return_redirection (q, finally_label, mod, &return_val);
1450 switch_id = return_index;
1454 x = gimple_build_assign (finally_tmp,
1455 build_int_cst (integer_type_node, q->index));
1456 gimple_seq_add_stmt (&mod, x);
1457 do_goto_redirection (q, finally_label, mod, tf);
1458 switch_id = q->index;
1461 case_index = j + q->index;
1462 if (VEC_length (tree, case_label_vec) <= case_index
1463 || !VEC_index (tree, case_label_vec, case_index))
1467 case_lab = build3 (CASE_LABEL_EXPR, void_type_node,
1468 build_int_cst (NULL_TREE, switch_id), NULL,
1470 /* We store the cont_stmt in the pointer map, so that we can recover
1471 it in the loop below. We don't create the new label while
1472 walking the goto_queue because pointers don't offer a stable
1475 cont_map = pointer_map_create ();
1476 slot = pointer_map_insert (cont_map, case_lab);
1477 *slot = q->cont_stmt;
1478 VEC_quick_push (tree, case_label_vec, case_lab);
1481 for (j = last_case_index; j < last_case_index + nlabels; j++)
1487 last_case = VEC_index (tree, case_label_vec, j);
1489 gcc_assert (last_case);
1490 gcc_assert (cont_map);
1492 slot = pointer_map_contains (cont_map, last_case);
1493 /* As the comment above suggests, CASE_LABEL (last_case) was just a
1494 placeholder, it does not store an actual label, yet. */
1496 cont_stmt = *(gimple *) slot;
1498 label = create_artificial_label (tf_loc);
1499 CASE_LABEL (last_case) = label;
1501 x = gimple_build_label (label);
1502 gimple_seq_add_stmt (&switch_body, x);
1503 gimple_seq_add_stmt (&switch_body, cont_stmt);
1504 maybe_record_in_goto_queue (state, cont_stmt);
1507 pointer_map_destroy (cont_map);
1509 replace_goto_queue (tf);
1511 /* Make sure that the last case is the default label, as one is required.
1512 Then sort the labels, which is also required in GIMPLE. */
1513 CASE_LOW (last_case) = NULL;
1514 sort_case_labels (case_label_vec);
1516 /* Build the switch statement, setting last_case to be the default
1518 switch_stmt = gimple_build_switch_vec (finally_tmp, last_case,
1520 gimple_set_location (switch_stmt, finally_loc);
1522 /* Need to link SWITCH_STMT after running replace_goto_queue
1523 due to not wanting to process the same goto stmts twice. */
1524 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1525 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1528 /* Decide whether or not we are going to duplicate the finally block.
1529 There are several considerations.
1531 First, if this is Java, then the finally block contains code
1532 written by the user. It has line numbers associated with it,
1533 so duplicating the block means it's difficult to set a breakpoint.
1534 Since controlling code generation via -g is verboten, we simply
1535 never duplicate code without optimization.
1537 Second, we'd like to prevent egregious code growth. One way to
1538 do this is to estimate the size of the finally block, multiply
1539 that by the number of copies we'd need to make, and compare against
1540 the estimate of the size of the switch machinery we'd have to add. */
1543 decide_copy_try_finally (int ndests, gimple_seq finally)
1545 int f_estimate, sw_estimate;
1550 /* Finally estimate N times, plus N gotos. */
1551 f_estimate = count_insns_seq (finally, &eni_size_weights);
1552 f_estimate = (f_estimate + 1) * ndests;
1554 /* Switch statement (cost 10), N variable assignments, N gotos. */
1555 sw_estimate = 10 + 2 * ndests;
1557 /* Optimize for size clearly wants our best guess. */
1558 if (optimize_function_for_size_p (cfun))
1559 return f_estimate < sw_estimate;
1561 /* ??? These numbers are completely made up so far. */
1563 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1565 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1569 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1570 to a sequence of labels and blocks, plus the exception region trees
1571 that record all the magic. This is complicated by the need to
1572 arrange for the FINALLY block to be executed on all exits. */
1575 lower_try_finally (struct leh_state *state, gimple tp)
1577 struct leh_tf_state this_tf;
1578 struct leh_state this_state;
1580 location_t tf_loc = gimple_location (tp);
1582 /* Process the try block. */
1584 memset (&this_tf, 0, sizeof (this_tf));
1585 this_tf.try_finally_expr = tp;
1587 this_tf.outer = state;
1588 if (using_eh_for_cleanups_p)
1590 = gen_eh_region_cleanup (state->cur_region);
1592 this_tf.region = NULL;
1594 this_state.cur_region = this_tf.region;
1595 this_state.tf = &this_tf;
1597 lower_eh_constructs_1 (&this_state, gimple_try_eval(tp));
1599 /* Determine if the try block is escaped through the bottom. */
1600 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1602 /* Determine if any exceptions are possible within the try block. */
1603 if (using_eh_for_cleanups_p)
1604 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1605 if (this_tf.may_throw)
1607 this_tf.eh_label = create_artificial_label (tf_loc);
1608 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1609 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1612 /* Determine how many edges (still) reach the finally block. Or rather,
1613 how many destinations are reached by the finally block. Use this to
1614 determine how we process the finally block itself. */
1616 ndests = VEC_length (tree, this_tf.dest_array);
1617 ndests += this_tf.may_fallthru;
1618 ndests += this_tf.may_return;
1619 ndests += this_tf.may_throw;
1621 /* If the FINALLY block is not reachable, dike it out. */
1624 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1625 gimple_try_set_cleanup (tp, NULL);
1627 /* If the finally block doesn't fall through, then any destination
1628 we might try to impose there isn't reached either. There may be
1629 some minor amount of cleanup and redirection still needed. */
1630 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1631 lower_try_finally_nofallthru (state, &this_tf);
1633 /* We can easily special-case redirection to a single destination. */
1634 else if (ndests == 1)
1635 lower_try_finally_onedest (state, &this_tf);
1636 else if (decide_copy_try_finally (ndests, gimple_try_cleanup (tp)))
1637 lower_try_finally_copy (state, &this_tf);
1639 lower_try_finally_switch (state, &this_tf);
1641 /* If someone requested we add a label at the end of the transformed
1643 if (this_tf.fallthru_label)
1645 /* This must be reached only if ndests == 0. */
1646 gimple x = gimple_build_label (this_tf.fallthru_label);
1647 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1650 VEC_free (tree, heap, this_tf.dest_array);
1651 if (this_tf.goto_queue)
1652 free (this_tf.goto_queue);
1653 if (this_tf.goto_queue_map)
1654 pointer_map_destroy (this_tf.goto_queue_map);
1656 return this_tf.top_p_seq;
1659 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1660 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1661 exception region trees that records all the magic. */
1664 lower_catch (struct leh_state *state, gimple tp)
1666 struct eh_region_d *try_region;
1667 struct leh_state this_state;
1668 gimple_stmt_iterator gsi;
1670 location_t try_catch_loc = gimple_location (tp);
1672 try_region = gen_eh_region_try (state->cur_region);
1673 this_state.cur_region = try_region;
1674 this_state.tf = state->tf;
1676 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1678 if (!get_eh_region_may_contain_throw (try_region))
1680 return gimple_try_eval (tp);
1684 for (gsi = gsi_start (gimple_try_cleanup (tp)); !gsi_end_p (gsi); )
1686 struct eh_region_d *catch_region;
1690 gcatch = gsi_stmt (gsi);
1691 catch_region = gen_eh_region_catch (try_region,
1692 gimple_catch_types (gcatch));
1694 this_state.cur_region = catch_region;
1695 lower_eh_constructs_1 (&this_state, gimple_catch_handler (gcatch));
1697 eh_label = create_artificial_label (try_catch_loc);
1698 set_eh_region_tree_label (catch_region, eh_label);
1700 x = gimple_build_label (eh_label);
1701 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
1703 if (gimple_seq_may_fallthru (gimple_catch_handler (gcatch)))
1706 out_label = create_artificial_label (try_catch_loc);
1708 x = gimple_build_goto (out_label);
1709 gimple_seq_add_stmt (gimple_catch_handler_ptr (gcatch), x);
1712 gsi_insert_seq_before (&gsi, gimple_catch_handler (gcatch),
1714 gsi_remove (&gsi, false);
1717 return frob_into_branch_around (tp, NULL, out_label);
1720 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1721 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1722 region trees that record all the magic. */
1725 lower_eh_filter (struct leh_state *state, gimple tp)
1727 struct leh_state this_state;
1728 struct eh_region_d *this_region;
1732 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1734 if (gimple_eh_filter_must_not_throw (inner))
1735 this_region = gen_eh_region_must_not_throw (state->cur_region);
1737 this_region = gen_eh_region_allowed (state->cur_region,
1738 gimple_eh_filter_types (inner));
1739 this_state = *state;
1740 this_state.cur_region = this_region;
1742 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1744 if (!get_eh_region_may_contain_throw (this_region))
1746 return gimple_try_eval (tp);
1749 lower_eh_constructs_1 (state, gimple_eh_filter_failure (inner));
1750 gimple_try_set_cleanup (tp, gimple_eh_filter_failure (inner));
1752 eh_label = create_artificial_label (gimple_location (inner));
1753 set_eh_region_tree_label (this_region, eh_label);
1755 return frob_into_branch_around (tp, eh_label, NULL);
1758 /* Implement a cleanup expression. This is similar to try-finally,
1759 except that we only execute the cleanup block for exception edges. */
1762 lower_cleanup (struct leh_state *state, gimple tp)
1764 struct leh_state this_state;
1765 struct eh_region_d *this_region;
1766 struct leh_tf_state fake_tf;
1769 /* If not using eh, then exception-only cleanups are no-ops. */
1770 if (!flag_exceptions)
1772 result = gimple_try_eval (tp);
1773 lower_eh_constructs_1 (state, result);
1777 this_region = gen_eh_region_cleanup (state->cur_region);
1778 this_state = *state;
1779 this_state.cur_region = this_region;
1781 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
1783 if (!get_eh_region_may_contain_throw (this_region))
1785 return gimple_try_eval (tp);
1788 /* Build enough of a try-finally state so that we can reuse
1789 honor_protect_cleanup_actions. */
1790 memset (&fake_tf, 0, sizeof (fake_tf));
1791 fake_tf.top_p = fake_tf.try_finally_expr = tp;
1792 fake_tf.outer = state;
1793 fake_tf.region = this_region;
1794 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1795 fake_tf.may_throw = true;
1797 fake_tf.eh_label = create_artificial_label (gimple_location (tp));
1798 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1800 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1802 if (fake_tf.may_throw)
1804 /* In this case honor_protect_cleanup_actions had nothing to do,
1805 and we should process this normally. */
1806 lower_eh_constructs_1 (state, gimple_try_cleanup (tp));
1807 result = frob_into_branch_around (tp, fake_tf.eh_label,
1808 fake_tf.fallthru_label);
1812 /* In this case honor_protect_cleanup_actions did nearly all of
1813 the work. All we have left is to append the fallthru_label. */
1815 result = gimple_try_eval (tp);
1816 if (fake_tf.fallthru_label)
1818 gimple x = gimple_build_label (fake_tf.fallthru_label);
1819 gimple_seq_add_stmt (&result, x);
1827 /* Main loop for lowering eh constructs. Also moves gsi to the next
1831 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1835 gimple stmt = gsi_stmt (*gsi);
1837 switch (gimple_code (stmt))
1841 /* If the stmt can throw use a new temporary for the assignment
1842 to a LHS. This makes sure the old value of the LHS is
1843 available on the EH edge. */
1844 if (stmt_could_throw_p (stmt)
1845 && gimple_has_lhs (stmt)
1846 && !tree_could_throw_p (gimple_get_lhs (stmt))
1847 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
1849 tree lhs = gimple_get_lhs (stmt);
1850 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
1851 gimple s = gimple_build_assign (lhs, tmp);
1852 gimple_set_location (s, gimple_location (stmt));
1853 gimple_set_block (s, gimple_block (stmt));
1854 gimple_set_lhs (stmt, tmp);
1855 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
1856 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
1857 DECL_GIMPLE_REG_P (tmp) = 1;
1858 gsi_insert_after (gsi, s, GSI_SAME_STMT);
1860 /* Look for things that can throw exceptions, and record them. */
1861 if (state->cur_region && stmt_could_throw_p (stmt))
1863 record_stmt_eh_region (state->cur_region, stmt);
1864 note_eh_region_may_contain_throw (state->cur_region);
1871 maybe_record_in_goto_queue (state, stmt);
1875 verify_norecord_switch_expr (state, stmt);
1879 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
1880 replace = lower_try_finally (state, stmt);
1883 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
1884 switch (gimple_code (x))
1887 replace = lower_catch (state, stmt);
1889 case GIMPLE_EH_FILTER:
1890 replace = lower_eh_filter (state, stmt);
1893 replace = lower_cleanup (state, stmt);
1898 /* Remove the old stmt and insert the transformed sequence
1900 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
1901 gsi_remove (gsi, true);
1903 /* Return since we don't want gsi_next () */
1907 /* A type, a decl, or some kind of statement that we're not
1908 interested in. Don't walk them. */
1915 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
1918 lower_eh_constructs_1 (struct leh_state *state, gimple_seq seq)
1920 gimple_stmt_iterator gsi;
1921 for (gsi = gsi_start (seq); !gsi_end_p (gsi);)
1922 lower_eh_constructs_2 (state, &gsi);
1926 lower_eh_constructs (void)
1928 struct leh_state null_state;
1930 gimple_seq bodyp = gimple_body (current_function_decl);
1932 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1934 collect_finally_tree_1 (bodyp, NULL);
1936 memset (&null_state, 0, sizeof (null_state));
1937 lower_eh_constructs_1 (&null_state, bodyp);
1939 htab_delete (finally_tree);
1941 collect_eh_region_array ();
1943 /* If this function needs a language specific EH personality routine
1944 and the frontend didn't already set one do so now. */
1945 if (function_needs_eh_personality (cfun) == eh_personality_lang
1946 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
1947 DECL_FUNCTION_PERSONALITY (current_function_decl)
1948 = lang_hooks.eh_personality ();
1953 struct gimple_opt_pass pass_lower_eh =
1959 lower_eh_constructs, /* execute */
1962 0, /* static_pass_number */
1963 TV_TREE_EH, /* tv_id */
1964 PROP_gimple_lcf, /* properties_required */
1965 PROP_gimple_leh, /* properties_provided */
1966 0, /* properties_destroyed */
1967 0, /* todo_flags_start */
1968 TODO_dump_func /* todo_flags_finish */
1973 /* Construct EH edges for STMT. */
1976 make_eh_edge (struct eh_region_d *region, void *data)
1980 basic_block src, dst;
1982 stmt = (gimple) data;
1983 lab = get_eh_region_tree_label (region);
1985 src = gimple_bb (stmt);
1986 dst = label_to_block (lab);
1988 make_edge (src, dst, EDGE_EH);
1991 /* See if STMT is call that might be inlined. */
1994 inlinable_call_p (gimple stmt)
1997 if (gimple_code (stmt) != GIMPLE_CALL)
1999 if (cfun->after_inlining)
2001 /* Indirect calls can be propagated to direct call
2003 decl = gimple_call_fndecl (stmt);
2006 if (cgraph_function_flags_ready
2007 && cgraph_function_body_availability (cgraph_node (decl))
2008 < AVAIL_OVERWRITABLE)
2010 return !DECL_UNINLINABLE (decl);
2014 make_eh_edges (gimple stmt)
2018 bool inlinable = false;
2021 if (gimple_code (stmt) == GIMPLE_RESX)
2023 region_nr = gimple_resx_region (stmt);
2028 region_nr = lookup_stmt_eh_region (stmt);
2032 inlinable = inlinable_call_p (stmt);
2035 foreach_reachable_handler (region_nr, is_resx, inlinable, make_eh_edge, stmt);
2037 /* Make CFG profile more consistent assuming that exception will resume to first
2038 available EH handler. In practice this makes little difference, but we get
2039 fewer consistency errors in the dumps. */
2040 bb = gimple_bb (stmt);
2041 if (is_resx && EDGE_COUNT (bb->succs))
2042 EDGE_SUCC (bb, 0)->probability = REG_BR_PROB_BASE;
2045 /* Redirect EH edge E to NEW_BB. */
2048 redirect_eh_edge (edge e, basic_block new_bb)
2050 gimple stmt = gsi_stmt (gsi_last_bb (e->src));
2051 int region_nr, new_region_nr;
2053 bool inlinable = false;
2054 tree label = gimple_block_label (new_bb);
2055 struct eh_region_d *r;
2057 if (gimple_code (stmt) == GIMPLE_RESX)
2059 region_nr = gimple_resx_region (stmt);
2064 region_nr = lookup_stmt_eh_region (stmt);
2065 gcc_assert (region_nr >= 0);
2067 inlinable = inlinable_call_p (stmt);
2070 if (dump_file && (dump_flags & TDF_DETAILS))
2071 fprintf (dump_file, "Redirecting EH edge %i->%i to %i, region %i, resx %i\n",
2072 e->src->index, e->dest->index, new_bb->index, region_nr, is_resx);
2073 r = redirect_eh_edge_to_label (e, label, is_resx, inlinable, region_nr);
2074 new_region_nr = get_eh_region_number (r);
2075 if (new_region_nr != region_nr)
2078 gimple_resx_set_region (stmt, new_region_nr);
2081 remove_stmt_from_eh_region (stmt);
2082 add_stmt_to_eh_region (stmt, new_region_nr);
2085 e = ssa_redirect_edge (e, new_bb);
2089 static bool mark_eh_edge_found_error;
2091 /* Mark edge make_eh_edge would create for given region by setting it aux
2092 field, output error if something goes wrong. */
2095 mark_eh_edge (struct eh_region_d *region, void *data)
2099 basic_block src, dst;
2102 stmt = (gimple) data;
2103 lab = get_eh_region_tree_label (region);
2105 src = gimple_bb (stmt);
2106 dst = label_to_block (lab);
2108 e = find_edge (src, dst);
2111 error ("EH edge %i->%i is missing", src->index, dst->index);
2112 mark_eh_edge_found_error = true;
2114 else if (!(e->flags & EDGE_EH))
2116 error ("EH edge %i->%i miss EH flag", src->index, dst->index);
2117 mark_eh_edge_found_error = true;
2121 /* ??? might not be mistake. */
2122 error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
2123 mark_eh_edge_found_error = true;
2129 /* Verify that BB containing STMT as the last statement, has precisely the
2130 edges that make_eh_edges would create. */
2133 verify_eh_edges (gimple stmt)
2137 basic_block bb = gimple_bb (stmt);
2140 bool inlinable = false;
2142 FOR_EACH_EDGE (e, ei, bb->succs)
2143 gcc_assert (!e->aux);
2144 mark_eh_edge_found_error = false;
2145 if (gimple_code (stmt) == GIMPLE_RESX)
2147 region_nr = gimple_resx_region (stmt);
2152 region_nr = lookup_stmt_eh_region (stmt);
2155 FOR_EACH_EDGE (e, ei, bb->succs)
2156 if (e->flags & EDGE_EH)
2158 error ("BB %i can not throw but has EH edges", bb->index);
2163 if (!stmt_could_throw_p (stmt))
2165 error ("BB %i last statement has incorrectly set region", bb->index);
2168 inlinable = inlinable_call_p (stmt);
2172 foreach_reachable_handler (region_nr, is_resx, inlinable, mark_eh_edge, stmt);
2173 FOR_EACH_EDGE (e, ei, bb->succs)
2175 if ((e->flags & EDGE_EH) && !e->aux)
2177 error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
2178 mark_eh_edge_found_error = true;
2184 return mark_eh_edge_found_error;
2188 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2191 operation_could_trap_helper_p (enum tree_code op,
2202 case TRUNC_DIV_EXPR:
2204 case FLOOR_DIV_EXPR:
2205 case ROUND_DIV_EXPR:
2206 case EXACT_DIV_EXPR:
2208 case FLOOR_MOD_EXPR:
2209 case ROUND_MOD_EXPR:
2210 case TRUNC_MOD_EXPR:
2212 if (honor_snans || honor_trapv)
2215 return flag_trapping_math;
2216 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2225 /* Some floating point comparisons may trap. */
2230 case UNORDERED_EXPR:
2240 case FIX_TRUNC_EXPR:
2241 /* Conversion of floating point might trap. */
2247 /* These operations don't trap with floating point. */
2255 /* Any floating arithmetic may trap. */
2256 if (fp_operation && flag_trapping_math)
2263 /* Any floating arithmetic may trap. */
2264 if (fp_operation && flag_trapping_math)
2272 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2273 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2274 type operands that may trap. If OP is a division operator, DIVISOR contains
2275 the value of the divisor. */
2278 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2281 bool honor_nans = (fp_operation && flag_trapping_math
2282 && !flag_finite_math_only);
2283 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2286 if (TREE_CODE_CLASS (op) != tcc_comparison
2287 && TREE_CODE_CLASS (op) != tcc_unary
2288 && TREE_CODE_CLASS (op) != tcc_binary)
2291 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2292 honor_nans, honor_snans, divisor,
2296 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2297 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2298 This routine expects only GIMPLE lhs or rhs input. */
2301 tree_could_trap_p (tree expr)
2303 enum tree_code code;
2304 bool fp_operation = false;
2305 bool honor_trapv = false;
2306 tree t, base, div = NULL_TREE;
2311 code = TREE_CODE (expr);
2312 t = TREE_TYPE (expr);
2316 if (COMPARISON_CLASS_P (expr))
2317 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2319 fp_operation = FLOAT_TYPE_P (t);
2320 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2323 if (TREE_CODE_CLASS (code) == tcc_binary)
2324 div = TREE_OPERAND (expr, 1);
2325 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2331 case TARGET_MEM_REF:
2332 /* For TARGET_MEM_REFs use the information based on the original
2334 expr = TMR_ORIGINAL (expr);
2335 code = TREE_CODE (expr);
2342 case VIEW_CONVERT_EXPR:
2343 case WITH_SIZE_EXPR:
2344 expr = TREE_OPERAND (expr, 0);
2345 code = TREE_CODE (expr);
2348 case ARRAY_RANGE_REF:
2349 base = TREE_OPERAND (expr, 0);
2350 if (tree_could_trap_p (base))
2353 if (TREE_THIS_NOTRAP (expr))
2356 return !range_in_array_bounds_p (expr);
2359 base = TREE_OPERAND (expr, 0);
2360 if (tree_could_trap_p (base))
2363 if (TREE_THIS_NOTRAP (expr))
2366 return !in_array_bounds_p (expr);
2369 case ALIGN_INDIRECT_REF:
2370 case MISALIGNED_INDIRECT_REF:
2371 return !TREE_THIS_NOTRAP (expr);
2374 return TREE_THIS_VOLATILE (expr);
2378 t = get_callee_fndecl (expr);
2379 /* Assume that calls to weak functions may trap. */
2380 if (!t || !DECL_P (t) || DECL_WEAK (t))
2390 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2391 an assignment or a conditional) may throw. */
2394 stmt_could_throw_1_p (gimple stmt)
2396 enum tree_code code = gimple_expr_code (stmt);
2397 bool honor_nans = false;
2398 bool honor_snans = false;
2399 bool fp_operation = false;
2400 bool honor_trapv = false;
2405 if (TREE_CODE_CLASS (code) == tcc_comparison
2406 || TREE_CODE_CLASS (code) == tcc_unary
2407 || TREE_CODE_CLASS (code) == tcc_binary)
2409 t = gimple_expr_type (stmt);
2410 fp_operation = FLOAT_TYPE_P (t);
2413 honor_nans = flag_trapping_math && !flag_finite_math_only;
2414 honor_snans = flag_signaling_nans != 0;
2416 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2420 /* Check if the main expression may trap. */
2421 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2422 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2423 honor_nans, honor_snans, t,
2428 /* If the expression does not trap, see if any of the individual operands may
2430 for (i = 0; i < gimple_num_ops (stmt); i++)
2431 if (tree_could_trap_p (gimple_op (stmt, i)))
2438 /* Return true if statement STMT could throw an exception. */
2441 stmt_could_throw_p (gimple stmt)
2443 enum gimple_code code;
2445 if (!flag_exceptions)
2448 /* The only statements that can throw an exception are assignments,
2449 conditionals, calls and asms. */
2450 code = gimple_code (stmt);
2451 if (code != GIMPLE_ASSIGN
2452 && code != GIMPLE_COND
2453 && code != GIMPLE_CALL
2454 && code != GIMPLE_ASM)
2457 /* If exceptions can only be thrown by function calls and STMT is not a
2458 GIMPLE_CALL, the statement cannot throw. */
2459 if (!flag_non_call_exceptions && code != GIMPLE_CALL)
2462 if (code == GIMPLE_ASSIGN || code == GIMPLE_COND)
2463 return stmt_could_throw_1_p (stmt);
2464 else if (is_gimple_call (stmt))
2465 return (gimple_call_flags (stmt) & ECF_NOTHROW) == 0;
2466 else if (gimple_code (stmt) == GIMPLE_ASM)
2467 return (gimple_asm_volatile_p (stmt));
2475 /* Return true if expression T could throw an exception. */
2478 tree_could_throw_p (tree t)
2480 if (!flag_exceptions)
2482 if (TREE_CODE (t) == MODIFY_EXPR)
2484 if (flag_non_call_exceptions
2485 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2487 t = TREE_OPERAND (t, 1);
2490 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2491 t = TREE_OPERAND (t, 0);
2492 if (TREE_CODE (t) == CALL_EXPR)
2493 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2494 if (flag_non_call_exceptions)
2495 return tree_could_trap_p (t);
2499 /* Return true if STMT can throw an exception that is not caught within
2500 the current function (CFUN). */
2503 stmt_can_throw_external (gimple stmt)
2506 bool is_resx = false;
2507 bool inlinable_call = false;
2509 if (!stmt_could_throw_p (stmt))
2512 if (gimple_code (stmt) == GIMPLE_RESX)
2514 region_nr = gimple_resx_region (stmt);
2518 region_nr = lookup_stmt_eh_region (stmt);
2523 return can_throw_external_1 (region_nr, is_resx, inlinable_call);
2526 /* Return true if STMT can throw an exception that is caught within
2527 the current function (CFUN). */
2530 stmt_can_throw_internal (gimple stmt)
2533 bool is_resx = false;
2534 bool inlinable_call = false;
2536 if (gimple_code (stmt) == GIMPLE_RESX)
2538 region_nr = gimple_resx_region (stmt);
2543 region_nr = lookup_stmt_eh_region (stmt);
2544 inlinable_call = inlinable_call_p (stmt);
2550 return can_throw_internal_1 (region_nr, is_resx, inlinable_call);
2554 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2555 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2556 in the table if it should be in there. Return TRUE if a replacement was
2557 done that my require an EH edge purge. */
2560 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2562 int region_nr = lookup_stmt_eh_region (old_stmt);
2566 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2568 if (new_stmt == old_stmt && new_stmt_could_throw)
2571 remove_stmt_from_eh_region (old_stmt);
2572 if (new_stmt_could_throw)
2574 add_stmt_to_eh_region (new_stmt, region_nr);
2584 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2585 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2586 this only handles handlers consisting of a single call, as that's the
2587 important case for C++: a destructor call for a particular object showing
2588 up in multiple handlers. */
2591 same_handler_p (gimple_seq oneh, gimple_seq twoh)
2593 gimple_stmt_iterator gsi;
2597 gsi = gsi_start (oneh);
2598 if (!gsi_one_before_end_p (gsi))
2600 ones = gsi_stmt (gsi);
2602 gsi = gsi_start (twoh);
2603 if (!gsi_one_before_end_p (gsi))
2605 twos = gsi_stmt (gsi);
2607 if (!is_gimple_call (ones)
2608 || !is_gimple_call (twos)
2609 || gimple_call_lhs (ones)
2610 || gimple_call_lhs (twos)
2611 || gimple_call_chain (ones)
2612 || gimple_call_chain (twos)
2613 || !operand_equal_p (gimple_call_fn (ones), gimple_call_fn (twos), 0)
2614 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
2617 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
2618 if (!operand_equal_p (gimple_call_arg (ones, ai),
2619 gimple_call_arg (twos, ai), 0))
2626 try { A() } finally { try { ~B() } catch { ~A() } }
2627 try { ... } finally { ~A() }
2629 try { A() } catch { ~B() }
2630 try { ~B() ... } finally { ~A() }
2632 This occurs frequently in C++, where A is a local variable and B is a
2633 temporary used in the initializer for A. */
2636 optimize_double_finally (gimple one, gimple two)
2639 gimple_stmt_iterator gsi;
2641 gsi = gsi_start (gimple_try_cleanup (one));
2642 if (!gsi_one_before_end_p (gsi))
2645 oneh = gsi_stmt (gsi);
2646 if (gimple_code (oneh) != GIMPLE_TRY
2647 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
2650 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
2652 gimple_seq seq = gimple_try_eval (oneh);
2654 gimple_try_set_cleanup (one, seq);
2655 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
2656 seq = copy_gimple_seq_and_replace_locals (seq);
2657 gimple_seq_add_seq (&seq, gimple_try_eval (two));
2658 gimple_try_set_eval (two, seq);
2662 /* Perform EH refactoring optimizations that are simpler to do when code
2663 flow has been lowered but EH structures haven't. */
2666 refactor_eh_r (gimple_seq seq)
2668 gimple_stmt_iterator gsi;
2673 gsi = gsi_start (seq);
2677 if (gsi_end_p (gsi))
2680 two = gsi_stmt (gsi);
2683 && gimple_code (one) == GIMPLE_TRY
2684 && gimple_code (two) == GIMPLE_TRY
2685 && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
2686 && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
2687 optimize_double_finally (one, two);
2689 switch (gimple_code (one))
2692 refactor_eh_r (gimple_try_eval (one));
2693 refactor_eh_r (gimple_try_cleanup (one));
2696 refactor_eh_r (gimple_catch_handler (one));
2698 case GIMPLE_EH_FILTER:
2699 refactor_eh_r (gimple_eh_filter_failure (one));
2714 refactor_eh_r (gimple_body (current_function_decl));
2718 struct gimple_opt_pass pass_refactor_eh =
2724 refactor_eh, /* execute */
2727 0, /* static_pass_number */
2728 TV_TREE_EH, /* tv_id */
2729 PROP_gimple_lcf, /* properties_required */
2730 0, /* properties_provided */
2731 0, /* properties_destroyed */
2732 0, /* todo_flags_start */
2733 TODO_dump_func /* todo_flags_finish */
2737 /* Walk statements, see what regions are really references and remove unreachable ones. */
2740 tree_remove_unreachable_handlers (void)
2742 sbitmap reachable, contains_stmt;
2743 VEC(int,heap) * label_to_region;
2746 label_to_region = label_to_region_map ();
2747 reachable = sbitmap_alloc (num_eh_regions ());
2748 sbitmap_zero (reachable);
2749 contains_stmt = sbitmap_alloc (num_eh_regions ());
2750 sbitmap_zero (contains_stmt);
2754 gimple_stmt_iterator gsi;
2756 bool has_eh_preds = bb_has_eh_pred (bb);
2758 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2760 gimple stmt = gsi_stmt (gsi);
2762 if (gimple_code (stmt) == GIMPLE_LABEL && has_eh_preds)
2764 int uid = LABEL_DECL_UID (gimple_label_label (stmt));
2767 for (region = VEC_index (int, label_to_region, uid);
2768 region; region = get_next_region_sharing_label (region))
2769 SET_BIT (reachable, region);
2771 if (gimple_code (stmt) == GIMPLE_RESX)
2773 VEC_index (eh_region, cfun->eh->region_array,
2774 gimple_resx_region (stmt))->region_number);
2775 if ((region = lookup_stmt_eh_region (stmt)) >= 0)
2776 SET_BIT (contains_stmt, region);
2782 fprintf (dump_file, "Before removal of unreachable regions:\n");
2783 dump_eh_tree (dump_file, cfun);
2784 fprintf (dump_file, "Reachable regions: ");
2785 dump_sbitmap_file (dump_file, reachable);
2786 fprintf (dump_file, "Regions containing insns: ");
2787 dump_sbitmap_file (dump_file, contains_stmt);
2790 remove_unreachable_regions (reachable, contains_stmt);
2791 sbitmap_free (reachable);
2792 sbitmap_free (contains_stmt);
2793 VEC_free (int, heap, label_to_region);
2796 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
2797 dump_eh_tree (dump_file, cfun);
2798 fprintf (dump_file, "\n\n");
2802 /* Pattern match emtpy EH receiver looking like:
2804 save_filt.6352_662 = [filter_expr] <<<filter object>>>;
2805 save_eptr.6351_663 = [exc_ptr_expr] <<<exception object>>>;
2806 <<<exception object>>> = save_eptr.6351_663;
2807 <<<filter object>>> = save_filt.6352_662;
2810 And various minor variants after DCE or copy propagation.
2814 tree_empty_eh_handler_p (basic_block bb)
2816 gimple_stmt_iterator gsi;
2820 use_operand_p imm_use;
2824 gsi = gsi_last_bb (bb);
2827 if (gsi_end_p (gsi))
2829 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_RESX)
2831 region = gimple_resx_region (gsi_stmt (gsi));
2833 /* filter_object set. */
2834 gsi_prev_nondebug (&gsi);
2835 if (gsi_end_p (gsi))
2837 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_ASSIGN)
2842 if (TREE_CODE (gimple_assign_lhs (gsi_stmt (gsi))) != FILTER_EXPR)
2844 filter_tmp = gimple_assign_rhs1 (gsi_stmt (gsi));
2846 /* filter_object set. */
2847 gsi_prev_nondebug (&gsi);
2848 if (gsi_end_p (gsi))
2850 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN)
2852 if (TREE_CODE (gimple_assign_lhs (gsi_stmt (gsi))) != EXC_PTR_EXPR)
2854 exc_ptr_tmp = gimple_assign_rhs1 (gsi_stmt (gsi));
2857 if (TREE_CODE (exc_ptr_tmp) != EXC_PTR_EXPR)
2859 gsi_prev_nondebug (&gsi);
2860 if (gsi_end_p (gsi))
2862 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN)
2864 if (TREE_CODE (gimple_assign_rhs1 (gsi_stmt (gsi))) != EXC_PTR_EXPR)
2866 if (exc_ptr_tmp != gimple_assign_lhs (gsi_stmt (gsi)))
2868 if (!single_imm_use (exc_ptr_tmp, &imm_use, &use_stmt))
2872 /* filter_object get. */
2873 if (TREE_CODE (filter_tmp) != FILTER_EXPR)
2875 gsi_prev_nondebug (&gsi);
2876 if (gsi_end_p (gsi))
2878 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_ASSIGN)
2880 if (TREE_CODE (gimple_assign_rhs1 (gsi_stmt (gsi))) != FILTER_EXPR)
2882 if (filter_tmp != gimple_assign_lhs (gsi_stmt (gsi)))
2884 if (!single_imm_use (filter_tmp, &imm_use, &use_stmt))
2889 gsi_prev_nondebug (&gsi);
2890 if (gsi_end_p (gsi))
2893 if (gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2896 /* Be sure that there is at least on EH region reaching the block directly.
2897 After EH edge redirection, it is possible that block is reached by one handler
2898 but resumed by different. */
2899 FOR_EACH_EDGE (e, ei, bb->preds)
2900 if ((e->flags & EDGE_EH))
2907 /* Return true if it is possible to remove basic block BB and propagate
2910 This means that every PHI in BB has all uses such that they are PHIs
2911 of basic blocks reachable througt BB and they appears only in use
2912 reachable by the edge from BB to the block contianing the use.
2914 This is same as in merge-phi code, but in slightly more general setting
2915 because BB can have multiple successors. */
2918 all_phis_safe_to_merge (basic_block bb)
2920 gimple_stmt_iterator si;
2923 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2925 gimple phi = gsi_stmt (si);
2926 tree result = gimple_phi_result (phi);
2928 use_operand_p imm_use;
2929 imm_use_iterator imm_iter;
2931 /* If the PHI's result is never used, then we can just
2933 if (has_zero_uses (result))
2935 /* We can always rebuild virtuals if needed. */
2936 if (!is_gimple_reg (result))
2938 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, result)
2940 if (gimple_code (stmt) != GIMPLE_PHI)
2942 if (dump_file && (dump_flags & TDF_DETAILS))
2944 "PHI result has use in non-PHI statement.\n");
2946 BREAK_FROM_IMM_USE_STMT (imm_iter);
2949 FOR_EACH_IMM_USE_ON_STMT (imm_use, imm_iter)
2952 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (imm_use));
2955 if (dump_file && (dump_flags & TDF_DETAILS))
2956 fprintf (dump_file, "PHI has use in PHI not reached from"
2957 "empty cleanup itself.\n");
2963 BREAK_FROM_IMM_USE_STMT (imm_iter);
2971 static bool dominance_info_invalidated;
2973 /* Information to pass into make_eh_edge_and_update_phi. */
2977 basic_block bb_to_remove, bb;
2978 edge edge_to_remove;
2981 /* DATA points to update-info structure.
2982 Like make_eh_edge create EH edge from DATA->bb to basic block containing
2983 handler of REGION. In addition also update PHI operands by copying
2984 operands from DATA->bb_to_remove. */
2987 make_eh_edge_and_update_phi (struct eh_region_d *region, void *data)
2989 struct update_info *info = (struct update_info *) data;
2992 basic_block src, dst;
2993 gimple_stmt_iterator si;
2995 lab = get_eh_region_tree_label (region);
2998 dst = label_to_block (lab);
3000 e = find_edge (src, dst);
3003 gcc_assert (e->flags & EDGE_EH);
3007 dominance_info_invalidated = true;
3008 e2 = find_edge (info->bb_to_remove, dst);
3009 e = make_edge (src, dst, EDGE_EH);
3012 for (si = gsi_start_phis (dst); !gsi_end_p (si); gsi_next (&si))
3014 gimple phi = gsi_stmt (si);
3015 tree use = USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e2));
3016 gimple def = (TREE_CODE (use) == SSA_NAME
3017 ? SSA_NAME_DEF_STMT (use) : NULL);
3019 if (def && gimple_bb (def) == info->bb_to_remove)
3021 use = USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (def,
3022 info->edge_to_remove));
3023 gcc_assert (info->bb_to_remove == info->edge_to_remove->dest);
3024 def = TREE_CODE (use) == SSA_NAME ? SSA_NAME_DEF_STMT (use) : NULL;
3026 || gimple_bb (def) != info->bb_to_remove
3027 || !is_gimple_reg (use));
3029 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e), use);
3033 /* Make EH edges corresponding to STMT while updating PHI nodes after removal
3034 empty cleanup BB_TO_REMOVE joined to BB containing STMT
3037 Return if EDGE_TO_REMOVE was really removed. It might stay reachable when
3038 not all EH regions are cleaned up. */
3041 update_eh_edges (gimple stmt, basic_block bb_to_remove, edge edge_to_remove)
3045 bool inlinable = false;
3046 struct update_info info;
3049 int probability_sum = 0;
3050 bool removed = false;
3052 info.bb_to_remove = bb_to_remove;
3053 info.bb = gimple_bb (stmt);
3054 info.edge_to_remove = edge_to_remove;
3056 if (gimple_code (stmt) == GIMPLE_RESX)
3058 region_nr = gimple_resx_region (stmt);
3063 region_nr = lookup_stmt_eh_region (stmt);
3065 inlinable = inlinable_call_p (stmt);
3068 /* First add new edges as neccesary. */
3069 foreach_reachable_handler (region_nr, is_resx, inlinable,
3070 make_eh_edge_and_update_phi, &info);
3072 /* And remove edges we didn't marked. */
3073 for (ei = ei_start (info.bb->succs); (e = ei_safe_edge (ei)); )
3075 if ((e->flags & EDGE_EH) && !e->aux)
3077 dominance_info_invalidated = true;
3078 if (e == edge_to_remove)
3085 probability_sum += e->probability;
3090 /* Make CFG profile more consistent assuming that exception will resume to
3091 first available EH handler. In practice this makes little difference, but
3092 we get fewer consistency errors in the dumps. */
3093 if (is_resx && EDGE_COUNT (info.bb->succs) && !probability_sum)
3094 EDGE_SUCC (info.bb, 0)->probability = REG_BR_PROB_BASE;
3098 /* Look for basic blocks containing empty exception handler and remove them.
3099 This is similar to jump forwarding, just across EH edges. */
3102 cleanup_empty_eh (basic_block bb, VEC(int,heap) * label_to_region)
3105 gimple_stmt_iterator si;
3108 /* When handler of EH region winds up to be empty, we can safely
3109 remove it. This leads to inner EH regions to be redirected
3110 to outer one, if present in function. So we need to rebuild
3111 EH edges in all sources. */
3112 if ((region = tree_empty_eh_handler_p (bb))
3113 && all_phis_safe_to_merge (bb))
3116 bool found = false, removed_some = false, has_non_eh_preds = false;
3117 gimple_stmt_iterator gsi;
3119 /* Look for all EH regions sharing label of this block.
3120 If they are not same as REGION, remove them and replace them
3121 by outer region of REGION. Also note if REGION itself is one
3124 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3125 if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
3127 int uid = LABEL_DECL_UID (gimple_label_label (gsi_stmt (gsi)));
3128 int r = VEC_index (int, label_to_region, uid);
3133 next = get_next_region_sharing_label (r);
3138 removed_some = true;
3139 remove_eh_region_and_replace_by_outer_of (r, region);
3140 if (dump_file && (dump_flags & TDF_DETAILS))
3141 fprintf (dump_file, "Empty EH handler %i removed and "
3142 "replaced by %i\n", r, region);
3150 gcc_assert (found || removed_some);
3151 FOR_EACH_EDGE (e, ei, bb->preds)
3152 if (!(e->flags & EDGE_EH))
3153 has_non_eh_preds = true;
3155 /* When block is empty EH cleanup, but it is reachable via non-EH code too,
3156 we can not remove the region it is resumed via, because doing so will
3157 lead to redirection of its RESX edges.
3159 This case will be handled later after edge forwarding if the EH cleanup
3162 if (found && !has_non_eh_preds)
3164 if (dump_file && (dump_flags & TDF_DETAILS))
3165 fprintf (dump_file, "Empty EH handler %i removed.\n", region);
3166 remove_eh_region (region);
3168 else if (!removed_some)
3171 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
3173 basic_block src = e->src;
3174 if (!(e->flags & EDGE_EH))
3179 if (stmt_can_throw_internal (last_stmt (src)))
3181 if (!update_eh_edges (last_stmt (src), bb, e))
3188 /* Verify that we eliminated all uses of PHI we are going to remove.
3189 If we didn't, rebuild SSA on affected variable (this is allowed only
3191 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
3193 gimple phi = gsi_stmt (si);
3194 tree result = gimple_phi_result (phi);
3195 if (!has_zero_uses (result))
3197 use_operand_p use_p;
3198 imm_use_iterator iter;
3201 FOR_EACH_IMM_USE_STMT (stmt, iter, result)
3203 /* We have use, see if it won't disappear after
3205 if (gimple_bb (stmt) == bb)
3207 if (gimple_code (stmt) == GIMPLE_PHI)
3211 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3212 if (gimple_phi_arg_edge (stmt,
3213 PHI_ARG_INDEX_FROM_USE (use_p))->src != bb)
3223 gcc_assert (!is_gimple_reg (result));
3224 mark_sym_for_renaming (SSA_NAME_VAR (result));
3225 /* As we are going to delete this block we will release all
3226 defs which makes the immediate uses on use stmts invalid.
3227 Avoid that by replacing all uses with the bare variable
3228 and updating the stmts. */
3229 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3230 SET_USE (use_p, SSA_NAME_VAR (result));
3235 if (!ei_safe_edge (ei_start (bb->preds)))
3236 delete_basic_block (bb);
3243 /* Perform cleanups and lowering of exception handling
3244 1) cleanups regions with handlers doing nothing are optimized out
3245 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
3246 3) Info about regions that are containing instructions, and regions
3247 reachable via local EH edges is collected
3248 4) Eh tree is pruned for regions no longer neccesary.
3254 bool changed = false;
3256 VEC(int,heap) * label_to_region;
3263 fprintf (dump_file, "Before cleanups:\n");
3264 dump_eh_tree (dump_file, cfun);
3269 label_to_region = label_to_region_map ();
3270 dominance_info_invalidated = false;
3271 /* We cannot use FOR_EACH_BB, since the basic blocks may get removed. */
3272 for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
3274 bb = BASIC_BLOCK (i);
3276 changed |= cleanup_empty_eh (bb, label_to_region);
3278 VEC_free (int, heap, label_to_region);
3279 if (dominance_info_invalidated)
3281 free_dominance_info (CDI_DOMINATORS);
3282 free_dominance_info (CDI_POST_DOMINATORS);
3285 /* Removing contained cleanup can render MUST_NOT_THROW regions empty. */
3287 delete_unreachable_blocks ();
3290 tree_remove_unreachable_handlers ();
3293 fprintf (dump_file, "After cleanups:\n");
3294 dump_eh_tree (dump_file, cfun);
3297 return (changed ? TODO_cleanup_cfg | TODO_update_ssa : 0);
3300 struct gimple_opt_pass pass_cleanup_eh = {
3303 "ehcleanup", /* name */
3305 cleanup_eh, /* execute */
3308 0, /* static_pass_number */
3309 TV_TREE_EH, /* tv_id */
3310 PROP_gimple_lcf, /* properties_required */
3311 0, /* properties_provided */
3312 0, /* properties_destroyed */
3313 0, /* todo_flags_start */
3314 TODO_dump_func /* todo_flags_finish */