1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 #include "coretypes.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
37 #include "langhooks.h"
41 /* Nonzero if we are using EH to handle cleanups. */
42 static int using_eh_for_cleanups_p = 0;
45 using_eh_for_cleanups (void)
47 using_eh_for_cleanups_p = 1;
50 /* Misc functions used in this file. */
52 /* Compare and hash for any structure which begins with a canonical
53 pointer. Assumes all pointers are interchangeable, which is sort
54 of already assumed by gcc elsewhere IIRC. */
57 struct_ptr_eq (const void *a, const void *b)
59 const void * const * x = a;
60 const void * const * y = b;
65 struct_ptr_hash (const void *a)
67 const void * const * x = a;
68 return (size_t)*x >> 4;
72 /* Remember and lookup EH region data for arbitrary statements.
73 Really this means any statement that could_throw_p. We could
74 stuff this information into the stmt_ann data structure, but:
76 (1) We absolutely rely on this information being kept until
77 we get to rtl. Once we're done with lowering here, if we lose
78 the information there's no way to recover it!
80 (2) There are many more statements that *cannot* throw as
81 compared to those that can. We should be saving some amount
82 of space by only allocating memory for those that can throw. */
85 record_stmt_eh_region (struct eh_region *region, tree t)
87 struct throw_stmt_node *n;
93 n = ggc_alloc (sizeof (*n));
95 n->region_nr = get_eh_region_number (region);
97 slot = htab_find_slot (get_eh_throw_stmt_table (cfun), n, INSERT);
103 add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
105 struct throw_stmt_node *n;
108 gcc_assert (num >= 0);
109 gcc_assert (TREE_CODE (t) != RESX_EXPR);
111 n = ggc_alloc (sizeof (*n));
115 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
121 add_stmt_to_eh_region (tree t, int num)
123 add_stmt_to_eh_region_fn (cfun, t, num);
127 remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
129 struct throw_stmt_node dummy;
132 if (!get_eh_throw_stmt_table (ifun))
136 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
140 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
148 remove_stmt_from_eh_region (tree t)
150 return remove_stmt_from_eh_region_fn (cfun, t);
154 lookup_stmt_eh_region_fn (struct function *ifun, tree t)
156 struct throw_stmt_node *p, n;
158 if (!get_eh_throw_stmt_table (ifun))
162 p = htab_find (get_eh_throw_stmt_table (ifun), &n);
164 return (p ? p->region_nr : -1);
168 lookup_stmt_eh_region (tree t)
170 /* We can get called from initialized data when -fnon-call-exceptions
171 is on; prevent crash. */
174 return lookup_stmt_eh_region_fn (cfun, t);
178 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
179 nodes and LABEL_DECL nodes. We will use this during the second phase to
180 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
182 struct finally_tree_node
187 /* Note that this table is *not* marked GTY. It is short-lived. */
188 static htab_t finally_tree;
191 record_in_finally_tree (tree child, tree parent)
193 struct finally_tree_node *n;
196 n = xmalloc (sizeof (*n));
200 slot = htab_find_slot (finally_tree, n, INSERT);
206 collect_finally_tree (tree t, tree region)
209 switch (TREE_CODE (t))
212 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
215 case TRY_FINALLY_EXPR:
216 record_in_finally_tree (t, region);
217 collect_finally_tree (TREE_OPERAND (t, 0), t);
218 t = TREE_OPERAND (t, 1);
222 collect_finally_tree (TREE_OPERAND (t, 0), region);
223 t = TREE_OPERAND (t, 1);
231 t = EH_FILTER_FAILURE (t);
236 tree_stmt_iterator i;
237 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
238 collect_finally_tree (tsi_stmt (i), region);
243 /* A type, a decl, or some kind of statement that we're not
244 interested in. Don't walk them. */
249 /* Use the finally tree to determine if a jump from START to TARGET
250 would leave the try_finally node that START lives in. */
253 outside_finally_tree (tree start, tree target)
255 struct finally_tree_node n, *p;
260 p = htab_find (finally_tree, &n);
265 while (start != target);
270 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
271 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
272 The eh region creation is straight-forward, but frobbing all the gotos
273 and such into shape isn't. */
275 /* State of the world while lowering. */
279 /* What's "current" while constructing the eh region tree. These
280 correspond to variables of the same name in cfun->eh, which we
281 don't have easy access to. */
282 struct eh_region *cur_region;
283 struct eh_region *prev_try;
285 /* Processing of TRY_FINALLY requires a bit more state. This is
286 split out into a separate structure so that we don't have to
287 copy so much when processing other nodes. */
288 struct leh_tf_state *tf;
293 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
294 is the original TRY_FINALLY_EXPR. We need to retain this so that
295 outside_finally_tree can reliably reference the tree used in the
296 collect_finally_tree data structures. */
297 tree try_finally_expr;
300 /* The state outside this try_finally node. */
301 struct leh_state *outer;
303 /* The exception region created for it. */
304 struct eh_region *region;
306 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
307 that are seen to escape this TRY_FINALLY_EXPR node. */
308 struct goto_queue_node {
314 size_t goto_queue_size;
315 size_t goto_queue_active;
317 /* The set of unique labels seen as entries in the goto queue. */
318 varray_type dest_array;
320 /* A label to be added at the end of the completed transformed
321 sequence. It will be set if may_fallthru was true *at one time*,
322 though subsequent transformations may have cleared that flag. */
325 /* A label that has been registered with except.c to be the
326 landing pad for this try block. */
329 /* True if it is possible to fall out the bottom of the try block.
330 Cleared if the fallthru is converted to a goto. */
333 /* True if any entry in goto_queue is a RETURN_EXPR. */
336 /* True if the finally block can receive an exception edge.
337 Cleared if the exception case is handled by code duplication. */
341 static void lower_eh_filter (struct leh_state *, tree *);
342 static void lower_eh_constructs_1 (struct leh_state *, tree *);
344 /* Comparison function for qsort/bsearch. We're interested in
345 searching goto queue elements for source statements. */
348 goto_queue_cmp (const void *x, const void *y)
350 tree a = ((const struct goto_queue_node *)x)->stmt;
351 tree b = ((const struct goto_queue_node *)y)->stmt;
352 return (a == b ? 0 : a < b ? -1 : 1);
355 /* Search for STMT in the goto queue. Return the replacement,
356 or null if the statement isn't in the queue. */
359 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
361 struct goto_queue_node tmp, *ret;
363 ret = bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
364 sizeof (struct goto_queue_node), goto_queue_cmp);
365 return (ret ? ret->repl_stmt : NULL);
368 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
369 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
370 then we can just splat it in, otherwise we add the new stmts immediately
371 after the COND_EXPR and redirect. */
374 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
375 tree_stmt_iterator *tsi)
377 tree new, one, label;
379 new = find_goto_replacement (tf, *tp);
383 one = expr_only (new);
384 if (one && TREE_CODE (one) == GOTO_EXPR)
390 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
391 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
393 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
394 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
397 /* The real work of replace_goto_queue. Returns with TSI updated to
398 point to the next statement. */
400 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
403 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
405 switch (TREE_CODE (t))
409 t = find_goto_replacement (tf, t);
412 tsi_link_before (tsi, t, TSI_SAME_STMT);
419 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
420 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
423 case TRY_FINALLY_EXPR:
425 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
426 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
429 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
432 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
439 /* These won't have gotos in them. */
446 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
449 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
451 tree_stmt_iterator i = tsi_start (t);
452 while (!tsi_end_p (i))
453 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
456 /* Replace all goto queue members. */
459 replace_goto_queue (struct leh_tf_state *tf)
461 if (tf->goto_queue_active == 0)
463 replace_goto_queue_stmt_list (*tf->top_p, tf);
466 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
467 node, and if so record that fact in the goto queue associated with that
471 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
473 struct leh_tf_state *tf = state->tf;
474 struct goto_queue_node *q;
481 switch (TREE_CODE (stmt))
485 tree lab = GOTO_DESTINATION (stmt);
487 /* Computed and non-local gotos do not get processed. Given
488 their nature we can neither tell whether we've escaped the
489 finally block nor redirect them if we knew. */
490 if (TREE_CODE (lab) != LABEL_DECL)
493 /* No need to record gotos that don't leave the try block. */
494 if (! outside_finally_tree (lab, tf->try_finally_expr))
497 if (! tf->dest_array)
499 VARRAY_TREE_INIT (tf->dest_array, 10, "dest_array");
500 VARRAY_PUSH_TREE (tf->dest_array, lab);
505 int n = VARRAY_ACTIVE_SIZE (tf->dest_array);
506 for (index = 0; index < n; ++index)
507 if (VARRAY_TREE (tf->dest_array, index) == lab)
510 VARRAY_PUSH_TREE (tf->dest_array, lab);
516 tf->may_return = true;
524 active = tf->goto_queue_active;
525 size = tf->goto_queue_size;
528 size = (size ? size * 2 : 32);
529 tf->goto_queue_size = size;
531 = xrealloc (tf->goto_queue, size * sizeof (struct goto_queue_node));
534 q = &tf->goto_queue[active];
535 tf->goto_queue_active = active + 1;
537 memset (q, 0, sizeof (*q));
542 #ifdef ENABLE_CHECKING
543 /* We do not process SWITCH_EXPRs for now. As long as the original source
544 was in fact structured, and we've not yet done jump threading, then none
545 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
548 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
550 struct leh_tf_state *tf = state->tf;
557 vec = SWITCH_LABELS (switch_expr);
558 n = TREE_VEC_LENGTH (vec);
560 for (i = 0; i < n; ++i)
562 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
563 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
567 #define verify_norecord_switch_expr(state, switch_expr)
570 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
571 whatever is needed to finish the return. If MOD is non-null, insert it
572 before the new branch. RETURN_VALUE_P is a cache containing a temporary
573 variable to be used in manipulating the value returned from the function. */
576 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
577 tree *return_value_p)
579 tree ret_expr = TREE_OPERAND (q->stmt, 0);
584 /* The nasty part about redirecting the return value is that the
585 return value itself is to be computed before the FINALLY block
599 should return 0, not 1. Arrange for this to happen by copying
600 computed the return value into a local temporary. This also
601 allows us to redirect multiple return statements through the
602 same destination block; whether this is a net win or not really
603 depends, I guess, but it does make generation of the switch in
604 lower_try_finally_switch easier. */
606 switch (TREE_CODE (ret_expr))
609 if (!*return_value_p)
610 *return_value_p = ret_expr;
612 gcc_assert (*return_value_p == ret_expr);
613 q->cont_stmt = q->stmt;
618 tree result = TREE_OPERAND (ret_expr, 0);
619 tree new, old = TREE_OPERAND (ret_expr, 1);
621 if (!*return_value_p)
623 if (aggregate_value_p (TREE_TYPE (result),
624 TREE_TYPE (current_function_decl)))
625 /* If this function returns in memory, copy the argument
626 into the return slot now. Otherwise, we might need to
627 worry about magic return semantics, so we need to use a
628 temporary to hold the value until we're actually ready
632 new = create_tmp_var (TREE_TYPE (old), "rettmp");
633 *return_value_p = new;
636 new = *return_value_p;
638 x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
639 append_to_statement_list (x, &q->repl_stmt);
644 x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
645 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
654 /* If we don't return a value, all return statements are the same. */
655 q->cont_stmt = q->stmt;
659 append_to_statement_list (mod, &q->repl_stmt);
661 x = build1 (GOTO_EXPR, void_type_node, finlab);
662 append_to_statement_list (x, &q->repl_stmt);
665 /* Similar, but easier, for GOTO_EXPR. */
668 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
672 q->cont_stmt = q->stmt;
674 append_to_statement_list (mod, &q->repl_stmt);
676 x = build1 (GOTO_EXPR, void_type_node, finlab);
677 append_to_statement_list (x, &q->repl_stmt);
680 /* We want to transform
681 try { body; } catch { stuff; }
683 body; goto over; lab: stuff; over:
685 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
686 should be placed before the second operand, or NULL. OVER is
687 an existing label that should be put at the exit, or NULL. */
690 frob_into_branch_around (tree *tp, tree lab, tree over)
694 op1 = TREE_OPERAND (*tp, 1);
695 *tp = TREE_OPERAND (*tp, 0);
697 if (block_may_fallthru (*tp))
700 over = create_artificial_label ();
701 x = build1 (GOTO_EXPR, void_type_node, over);
702 append_to_statement_list (x, tp);
707 x = build1 (LABEL_EXPR, void_type_node, lab);
708 append_to_statement_list (x, tp);
711 append_to_statement_list (op1, tp);
715 x = build1 (LABEL_EXPR, void_type_node, over);
716 append_to_statement_list (x, tp);
720 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
721 Make sure to record all new labels found. */
724 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
728 t = unsave_expr_now (t);
731 region = outer_state->tf->try_finally_expr;
732 collect_finally_tree (t, region);
737 /* A subroutine of lower_try_finally. Create a fallthru label for
738 the given try_finally state. The only tricky bit here is that
739 we have to make sure to record the label in our outer context. */
742 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
744 tree label = tf->fallthru_label;
747 label = create_artificial_label ();
748 tf->fallthru_label = label;
750 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
755 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
756 returns non-null, then the language requires that the exception path out
757 of a try_finally be treated specially. To wit: the code within the
758 finally block may not itself throw an exception. We have two choices here.
759 First we can duplicate the finally block and wrap it in a must_not_throw
760 region. Second, we can generate code like
765 if (fintmp == eh_edge)
766 protect_cleanup_actions;
769 where "fintmp" is the temporary used in the switch statement generation
770 alternative considered below. For the nonce, we always choose the first
773 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
776 honor_protect_cleanup_actions (struct leh_state *outer_state,
777 struct leh_state *this_state,
778 struct leh_tf_state *tf)
780 tree protect_cleanup_actions, finally, x;
781 tree_stmt_iterator i;
782 bool finally_may_fallthru;
784 /* First check for nothing to do. */
785 if (lang_protect_cleanup_actions)
786 protect_cleanup_actions = lang_protect_cleanup_actions ();
788 protect_cleanup_actions = NULL;
790 finally = TREE_OPERAND (*tf->top_p, 1);
792 /* If the EH case of the finally block can fall through, this may be a
793 structure of the form
806 E.g. with an inline destructor with an embedded try block. In this
807 case we must save the runtime EH data around the nested exception.
809 This complication means that any time the previous runtime data might
810 be used (via fallthru from the finally) we handle the eh case here,
811 whether or not protect_cleanup_actions is active. */
813 finally_may_fallthru = block_may_fallthru (finally);
814 if (!finally_may_fallthru && !protect_cleanup_actions)
817 /* Duplicate the FINALLY block. Only need to do this for try-finally,
818 and not for cleanups. */
820 finally = lower_try_finally_dup_block (finally, outer_state);
822 /* Resume execution after the exception. Adding this now lets
823 lower_eh_filter not add unnecessary gotos, as it is clear that
824 we never fallthru from this copy of the finally block. */
825 if (finally_may_fallthru)
827 tree save_eptr, save_filt;
829 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
830 save_filt = create_tmp_var (integer_type_node, "save_filt");
832 i = tsi_start (finally);
833 x = build (EXC_PTR_EXPR, ptr_type_node);
834 x = build (MODIFY_EXPR, void_type_node, save_eptr, x);
835 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
837 x = build (FILTER_EXPR, integer_type_node);
838 x = build (MODIFY_EXPR, void_type_node, save_filt, x);
839 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
841 i = tsi_last (finally);
842 x = build (EXC_PTR_EXPR, ptr_type_node);
843 x = build (MODIFY_EXPR, void_type_node, x, save_eptr);
844 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
846 x = build (FILTER_EXPR, integer_type_node);
847 x = build (MODIFY_EXPR, void_type_node, x, save_filt);
848 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
850 x = build_resx (get_eh_region_number (tf->region));
851 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
854 /* Wrap the block with protect_cleanup_actions as the action. */
855 if (protect_cleanup_actions)
857 x = build (EH_FILTER_EXPR, void_type_node, NULL, NULL);
858 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
859 EH_FILTER_MUST_NOT_THROW (x) = 1;
860 finally = build (TRY_CATCH_EXPR, void_type_node, finally, x);
861 lower_eh_filter (outer_state, &finally);
864 lower_eh_constructs_1 (outer_state, &finally);
866 /* Hook this up to the end of the existing try block. If we
867 previously fell through the end, we'll have to branch around.
868 This means adding a new goto, and adding it to the queue. */
870 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
872 if (tf->may_fallthru)
874 x = lower_try_finally_fallthru_label (tf);
875 x = build1 (GOTO_EXPR, void_type_node, x);
876 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
879 maybe_record_in_goto_queue (this_state, x);
881 tf->may_fallthru = false;
884 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
885 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
886 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
888 /* Having now been handled, EH isn't to be considered with
889 the rest of the outgoing edges. */
890 tf->may_throw = false;
893 /* A subroutine of lower_try_finally. We have determined that there is
894 no fallthru edge out of the finally block. This means that there is
895 no outgoing edge corresponding to any incoming edge. Restructure the
896 try_finally node for this special case. */
899 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
901 tree x, finally, lab, return_val;
902 struct goto_queue_node *q, *qe;
907 lab = create_artificial_label ();
909 finally = TREE_OPERAND (*tf->top_p, 1);
910 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
912 x = build1 (LABEL_EXPR, void_type_node, lab);
913 append_to_statement_list (x, tf->top_p);
917 qe = q + tf->goto_queue_active;
920 do_return_redirection (q, lab, NULL, &return_val);
922 do_goto_redirection (q, lab, NULL);
924 replace_goto_queue (tf);
926 lower_eh_constructs_1 (state, &finally);
927 append_to_statement_list (finally, tf->top_p);
930 /* A subroutine of lower_try_finally. We have determined that there is
931 exactly one destination of the finally block. Restructure the
932 try_finally node for this special case. */
935 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
937 struct goto_queue_node *q, *qe;
938 tree x, finally, finally_label;
940 finally = TREE_OPERAND (*tf->top_p, 1);
941 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
943 lower_eh_constructs_1 (state, &finally);
947 /* Only reachable via the exception edge. Add the given label to
948 the head of the FINALLY block. Append a RESX at the end. */
950 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
951 append_to_statement_list (x, tf->top_p);
953 append_to_statement_list (finally, tf->top_p);
955 x = build_resx (get_eh_region_number (tf->region));
957 append_to_statement_list (x, tf->top_p);
962 if (tf->may_fallthru)
964 /* Only reachable via the fallthru edge. Do nothing but let
965 the two blocks run together; we'll fall out the bottom. */
966 append_to_statement_list (finally, tf->top_p);
970 finally_label = create_artificial_label ();
971 x = build1 (LABEL_EXPR, void_type_node, finally_label);
972 append_to_statement_list (x, tf->top_p);
974 append_to_statement_list (finally, tf->top_p);
977 qe = q + tf->goto_queue_active;
981 /* Reachable by return expressions only. Redirect them. */
982 tree return_val = NULL;
984 do_return_redirection (q, finally_label, NULL, &return_val);
985 replace_goto_queue (tf);
989 /* Reachable by goto expressions only. Redirect them. */
991 do_goto_redirection (q, finally_label, NULL);
992 replace_goto_queue (tf);
994 if (VARRAY_TREE (tf->dest_array, 0) == tf->fallthru_label)
996 /* Reachable by goto to fallthru label only. Redirect it
997 to the new label (already created, sadly), and do not
998 emit the final branch out, or the fallthru label. */
999 tf->fallthru_label = NULL;
1004 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1005 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1008 /* A subroutine of lower_try_finally. There are multiple edges incoming
1009 and outgoing from the finally block. Implement this by duplicating the
1010 finally block for every destination. */
1013 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1015 tree finally, new_stmt;
1018 finally = TREE_OPERAND (*tf->top_p, 1);
1019 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1021 new_stmt = NULL_TREE;
1023 if (tf->may_fallthru)
1025 x = lower_try_finally_dup_block (finally, state);
1026 lower_eh_constructs_1 (state, &x);
1027 append_to_statement_list (x, &new_stmt);
1029 x = lower_try_finally_fallthru_label (tf);
1030 x = build1 (GOTO_EXPR, void_type_node, x);
1031 append_to_statement_list (x, &new_stmt);
1036 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1037 append_to_statement_list (x, &new_stmt);
1039 x = lower_try_finally_dup_block (finally, state);
1040 lower_eh_constructs_1 (state, &x);
1041 append_to_statement_list (x, &new_stmt);
1043 x = build_resx (get_eh_region_number (tf->region));
1044 append_to_statement_list (x, &new_stmt);
1049 struct goto_queue_node *q, *qe;
1050 tree return_val = NULL;
1051 int return_index, index;
1054 struct goto_queue_node *q;
1059 return_index = VARRAY_ACTIVE_SIZE (tf->dest_array);
1062 labels = xcalloc (sizeof (*labels), return_index + 1);
1065 qe = q + tf->goto_queue_active;
1068 index = q->index < 0 ? return_index : q->index;
1070 if (!labels[index].q)
1071 labels[index].q = q;
1074 for (index = 0; index < return_index + 1; index++)
1078 q = labels[index].q;
1082 lab = labels[index].label = create_artificial_label ();
1084 if (index == return_index)
1085 do_return_redirection (q, lab, NULL, &return_val);
1087 do_goto_redirection (q, lab, NULL);
1089 x = build1 (LABEL_EXPR, void_type_node, lab);
1090 append_to_statement_list (x, &new_stmt);
1092 x = lower_try_finally_dup_block (finally, state);
1093 lower_eh_constructs_1 (state, &x);
1094 append_to_statement_list (x, &new_stmt);
1096 append_to_statement_list (q->cont_stmt, &new_stmt);
1097 maybe_record_in_goto_queue (state, q->cont_stmt);
1100 for (q = tf->goto_queue; q < qe; q++)
1104 index = q->index < 0 ? return_index : q->index;
1106 if (labels[index].q == q)
1109 lab = labels[index].label;
1111 if (index == return_index)
1112 do_return_redirection (q, lab, NULL, &return_val);
1114 do_goto_redirection (q, lab, NULL);
1117 replace_goto_queue (tf);
1121 /* Need to link new stmts after running replace_goto_queue due
1122 to not wanting to process the same goto stmts twice. */
1123 append_to_statement_list (new_stmt, tf->top_p);
1126 /* A subroutine of lower_try_finally. There are multiple edges incoming
1127 and outgoing from the finally block. Implement this by instrumenting
1128 each incoming edge and creating a switch statement at the end of the
1129 finally block that branches to the appropriate destination. */
1132 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1134 struct goto_queue_node *q, *qe;
1135 tree return_val = NULL;
1136 tree finally, finally_tmp, finally_label;
1137 int return_index, eh_index, fallthru_index;
1138 int nlabels, ndests, j, last_case_index;
1139 tree case_label_vec, switch_stmt, last_case, switch_body;
1142 /* Mash the TRY block to the head of the chain. */
1143 finally = TREE_OPERAND (*tf->top_p, 1);
1144 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1146 /* Lower the finally block itself. */
1147 lower_eh_constructs_1 (state, &finally);
1149 /* Prepare for switch statement generation. */
1151 nlabels = VARRAY_ACTIVE_SIZE (tf->dest_array);
1154 return_index = nlabels;
1155 eh_index = return_index + tf->may_return;
1156 fallthru_index = eh_index + tf->may_throw;
1157 ndests = fallthru_index + tf->may_fallthru;
1159 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1160 finally_label = create_artificial_label ();
1162 case_label_vec = make_tree_vec (ndests);
1163 switch_stmt = build (SWITCH_EXPR, integer_type_node, finally_tmp,
1164 NULL_TREE, case_label_vec);
1167 last_case_index = 0;
1169 /* Begin inserting code for getting to the finally block. Things
1170 are done in this order to correspond to the sequence the code is
1173 if (tf->may_fallthru)
1175 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1176 build_int_cst (NULL_TREE, fallthru_index));
1177 append_to_statement_list (x, tf->top_p);
1181 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1182 append_to_statement_list (x, tf->top_p);
1186 last_case = build (CASE_LABEL_EXPR, void_type_node,
1187 build_int_cst (NULL_TREE, fallthru_index), NULL,
1188 create_artificial_label ());
1189 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1192 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1193 append_to_statement_list (x, &switch_body);
1195 x = lower_try_finally_fallthru_label (tf);
1196 x = build1 (GOTO_EXPR, void_type_node, x);
1197 append_to_statement_list (x, &switch_body);
1202 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1203 append_to_statement_list (x, tf->top_p);
1205 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1206 build_int_cst (NULL_TREE, eh_index));
1207 append_to_statement_list (x, tf->top_p);
1209 last_case = build (CASE_LABEL_EXPR, void_type_node,
1210 build_int_cst (NULL_TREE, eh_index), NULL,
1211 create_artificial_label ());
1212 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1215 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1216 append_to_statement_list (x, &switch_body);
1217 x = build_resx (get_eh_region_number (tf->region));
1218 append_to_statement_list (x, &switch_body);
1221 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1222 append_to_statement_list (x, tf->top_p);
1224 append_to_statement_list (finally, tf->top_p);
1226 /* Redirect each incoming goto edge. */
1228 qe = q + tf->goto_queue_active;
1229 j = last_case_index + tf->may_return;
1233 int switch_id, case_index;
1237 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1238 build_int_cst (NULL_TREE, return_index));
1239 do_return_redirection (q, finally_label, mod, &return_val);
1240 switch_id = return_index;
1244 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1245 build_int_cst (NULL_TREE, q->index));
1246 do_goto_redirection (q, finally_label, mod);
1247 switch_id = q->index;
1250 case_index = j + q->index;
1251 if (!TREE_VEC_ELT (case_label_vec, case_index))
1252 TREE_VEC_ELT (case_label_vec, case_index)
1253 = build (CASE_LABEL_EXPR, void_type_node,
1254 build_int_cst (NULL_TREE, switch_id), NULL,
1255 /* We store the cont_stmt in the
1256 CASE_LABEL, so that we can recover it
1257 in the loop below. We don't create
1258 the new label while walking the
1259 goto_queue because pointers don't
1260 offer a stable order. */
1263 for (j = last_case_index; j < last_case_index + nlabels; j++)
1268 last_case = TREE_VEC_ELT (case_label_vec, j);
1270 gcc_assert (last_case);
1272 cont_stmt = CASE_LABEL (last_case);
1274 label = create_artificial_label ();
1275 CASE_LABEL (last_case) = label;
1277 x = build (LABEL_EXPR, void_type_node, label);
1278 append_to_statement_list (x, &switch_body);
1279 append_to_statement_list (cont_stmt, &switch_body);
1280 maybe_record_in_goto_queue (state, cont_stmt);
1282 replace_goto_queue (tf);
1284 /* Make sure that the last case is the default label, as one is required.
1285 Then sort the labels, which is also required in GIMPLE. */
1286 CASE_LOW (last_case) = NULL;
1287 sort_case_labels (case_label_vec);
1289 /* Need to link switch_stmt after running replace_goto_queue due
1290 to not wanting to process the same goto stmts twice. */
1291 append_to_statement_list (switch_stmt, tf->top_p);
1292 append_to_statement_list (switch_body, tf->top_p);
1295 /* Decide whether or not we are going to duplicate the finally block.
1296 There are several considerations.
1298 First, if this is Java, then the finally block contains code
1299 written by the user. It has line numbers associated with it,
1300 so duplicating the block means it's difficult to set a breakpoint.
1301 Since controlling code generation via -g is verboten, we simply
1302 never duplicate code without optimization.
1304 Second, we'd like to prevent egregious code growth. One way to
1305 do this is to estimate the size of the finally block, multiply
1306 that by the number of copies we'd need to make, and compare against
1307 the estimate of the size of the switch machinery we'd have to add. */
1310 decide_copy_try_finally (int ndests, tree finally)
1312 int f_estimate, sw_estimate;
1317 /* Finally estimate N times, plus N gotos. */
1318 f_estimate = estimate_num_insns (finally);
1319 f_estimate = (f_estimate + 1) * ndests;
1321 /* Switch statement (cost 10), N variable assignments, N gotos. */
1322 sw_estimate = 10 + 2 * ndests;
1324 /* Optimize for size clearly wants our best guess. */
1326 return f_estimate < sw_estimate;
1328 /* ??? These numbers are completely made up so far. */
1330 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1332 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1335 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1336 to a sequence of labels and blocks, plus the exception region trees
1337 that record all the magic. This is complicated by the need to
1338 arrange for the FINALLY block to be executed on all exits. */
1341 lower_try_finally (struct leh_state *state, tree *tp)
1343 struct leh_tf_state this_tf;
1344 struct leh_state this_state;
1347 /* Process the try block. */
1349 memset (&this_tf, 0, sizeof (this_tf));
1350 this_tf.try_finally_expr = *tp;
1352 this_tf.outer = state;
1353 if (using_eh_for_cleanups_p)
1355 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1357 this_tf.region = NULL;
1359 this_state.cur_region = this_tf.region;
1360 this_state.prev_try = state->prev_try;
1361 this_state.tf = &this_tf;
1363 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1365 /* Determine if the try block is escaped through the bottom. */
1366 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1368 /* Determine if any exceptions are possible within the try block. */
1369 if (using_eh_for_cleanups_p)
1370 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1371 if (this_tf.may_throw)
1373 this_tf.eh_label = create_artificial_label ();
1374 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1375 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1378 /* Sort the goto queue for efficient searching later. */
1379 if (this_tf.goto_queue_active > 1)
1380 qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1381 sizeof (struct goto_queue_node), goto_queue_cmp);
1383 /* Determine how many edges (still) reach the finally block. Or rather,
1384 how many destinations are reached by the finally block. Use this to
1385 determine how we process the finally block itself. */
1387 if (this_tf.dest_array)
1388 ndests = VARRAY_ACTIVE_SIZE (this_tf.dest_array);
1391 ndests += this_tf.may_fallthru;
1392 ndests += this_tf.may_return;
1393 ndests += this_tf.may_throw;
1395 /* If the FINALLY block is not reachable, dike it out. */
1397 *tp = TREE_OPERAND (*tp, 0);
1399 /* If the finally block doesn't fall through, then any destination
1400 we might try to impose there isn't reached either. There may be
1401 some minor amount of cleanup and redirection still needed. */
1402 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1403 lower_try_finally_nofallthru (state, &this_tf);
1405 /* We can easily special-case redirection to a single destination. */
1406 else if (ndests == 1)
1407 lower_try_finally_onedest (state, &this_tf);
1409 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1410 lower_try_finally_copy (state, &this_tf);
1412 lower_try_finally_switch (state, &this_tf);
1414 /* If someone requested we add a label at the end of the transformed
1416 if (this_tf.fallthru_label)
1418 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1419 append_to_statement_list (x, tp);
1422 if (this_tf.goto_queue)
1423 free (this_tf.goto_queue);
1426 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1427 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1428 exception region trees that record all the magic. */
1431 lower_catch (struct leh_state *state, tree *tp)
1433 struct eh_region *try_region;
1434 struct leh_state this_state;
1435 tree_stmt_iterator i;
1438 try_region = gen_eh_region_try (state->cur_region);
1439 this_state.cur_region = try_region;
1440 this_state.prev_try = try_region;
1441 this_state.tf = state->tf;
1443 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1445 if (!get_eh_region_may_contain_throw (try_region))
1447 *tp = TREE_OPERAND (*tp, 0);
1452 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1454 struct eh_region *catch_region;
1455 tree catch, x, eh_label;
1457 catch = tsi_stmt (i);
1458 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1460 this_state.cur_region = catch_region;
1461 this_state.prev_try = state->prev_try;
1462 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1464 eh_label = create_artificial_label ();
1465 set_eh_region_tree_label (catch_region, eh_label);
1467 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1468 tsi_link_before (&i, x, TSI_SAME_STMT);
1470 if (block_may_fallthru (CATCH_BODY (catch)))
1473 out_label = create_artificial_label ();
1475 x = build1 (GOTO_EXPR, void_type_node, out_label);
1476 append_to_statement_list (x, &CATCH_BODY (catch));
1479 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1483 frob_into_branch_around (tp, NULL, out_label);
1486 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1487 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1488 region trees that record all the magic. */
1491 lower_eh_filter (struct leh_state *state, tree *tp)
1493 struct leh_state this_state;
1494 struct eh_region *this_region;
1495 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1498 if (EH_FILTER_MUST_NOT_THROW (inner))
1499 this_region = gen_eh_region_must_not_throw (state->cur_region);
1501 this_region = gen_eh_region_allowed (state->cur_region,
1502 EH_FILTER_TYPES (inner));
1503 this_state = *state;
1504 this_state.cur_region = this_region;
1506 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1508 if (!get_eh_region_may_contain_throw (this_region))
1510 *tp = TREE_OPERAND (*tp, 0);
1514 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1515 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1517 eh_label = create_artificial_label ();
1518 set_eh_region_tree_label (this_region, eh_label);
1520 frob_into_branch_around (tp, eh_label, NULL);
1523 /* Implement a cleanup expression. This is similar to try-finally,
1524 except that we only execute the cleanup block for exception edges. */
1527 lower_cleanup (struct leh_state *state, tree *tp)
1529 struct leh_state this_state;
1530 struct eh_region *this_region;
1531 struct leh_tf_state fake_tf;
1533 /* If not using eh, then exception-only cleanups are no-ops. */
1534 if (!flag_exceptions)
1536 *tp = TREE_OPERAND (*tp, 0);
1537 lower_eh_constructs_1 (state, tp);
1541 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1542 this_state = *state;
1543 this_state.cur_region = this_region;
1545 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1547 if (!get_eh_region_may_contain_throw (this_region))
1549 *tp = TREE_OPERAND (*tp, 0);
1553 /* Build enough of a try-finally state so that we can reuse
1554 honor_protect_cleanup_actions. */
1555 memset (&fake_tf, 0, sizeof (fake_tf));
1557 fake_tf.outer = state;
1558 fake_tf.region = this_region;
1559 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1560 fake_tf.may_throw = true;
1562 fake_tf.eh_label = create_artificial_label ();
1563 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1565 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1567 if (fake_tf.may_throw)
1569 /* In this case honor_protect_cleanup_actions had nothing to do,
1570 and we should process this normally. */
1571 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1572 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1576 /* In this case honor_protect_cleanup_actions did nearly all of
1577 the work. All we have left is to append the fallthru_label. */
1579 *tp = TREE_OPERAND (*tp, 0);
1580 if (fake_tf.fallthru_label)
1582 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1583 append_to_statement_list (x, tp);
1588 /* Main loop for lowering eh constructs. */
1591 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1593 tree_stmt_iterator i;
1596 switch (TREE_CODE (t))
1599 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1600 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1604 /* Look for things that can throw exceptions, and record them. */
1605 if (state->cur_region && tree_could_throw_p (t))
1607 record_stmt_eh_region (state->cur_region, t);
1608 note_eh_region_may_contain_throw (state->cur_region);
1613 /* Look for things that can throw exceptions, and record them. */
1614 if (state->cur_region && tree_could_throw_p (t))
1618 record_stmt_eh_region (state->cur_region, t);
1619 note_eh_region_may_contain_throw (state->cur_region);
1621 /* ??? For the benefit of calls.c, converting all this to rtl,
1622 we need to record the call expression, not just the outer
1623 modify statement. */
1624 op = get_call_expr_in (t);
1626 record_stmt_eh_region (state->cur_region, op);
1632 maybe_record_in_goto_queue (state, t);
1635 verify_norecord_switch_expr (state, t);
1638 case TRY_FINALLY_EXPR:
1639 lower_try_finally (state, tp);
1642 case TRY_CATCH_EXPR:
1643 i = tsi_start (TREE_OPERAND (t, 1));
1644 switch (TREE_CODE (tsi_stmt (i)))
1647 lower_catch (state, tp);
1649 case EH_FILTER_EXPR:
1650 lower_eh_filter (state, tp);
1653 lower_cleanup (state, tp);
1658 case STATEMENT_LIST:
1659 for (i = tsi_start (t); !tsi_end_p (i); )
1661 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1663 if (TREE_CODE (t) == STATEMENT_LIST)
1665 tsi_link_before (&i, t, TSI_SAME_STMT);
1674 /* A type, a decl, or some kind of statement that we're not
1675 interested in. Don't walk them. */
1681 lower_eh_constructs (void)
1683 struct leh_state null_state;
1684 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1686 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1687 set_eh_throw_stmt_table (cfun, htab_create_ggc (31, struct_ptr_hash,
1691 collect_finally_tree (*tp, NULL);
1693 memset (&null_state, 0, sizeof (null_state));
1694 lower_eh_constructs_1 (&null_state, tp);
1696 htab_delete (finally_tree);
1698 collect_eh_region_array ();
1701 struct tree_opt_pass pass_lower_eh =
1705 lower_eh_constructs, /* execute */
1708 0, /* static_pass_number */
1709 TV_TREE_EH, /* tv_id */
1710 PROP_gimple_lcf, /* properties_required */
1711 PROP_gimple_leh, /* properties_provided */
1712 PROP_gimple_lcf, /* properties_destroyed */
1713 0, /* todo_flags_start */
1714 TODO_dump_func, /* todo_flags_finish */
1719 /* Construct EH edges for STMT. */
1722 make_eh_edge (struct eh_region *region, void *data)
1725 basic_block src, dst;
1728 lab = get_eh_region_tree_label (region);
1730 src = bb_for_stmt (stmt);
1731 dst = label_to_block (lab);
1733 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1737 make_eh_edges (tree stmt)
1742 if (TREE_CODE (stmt) == RESX_EXPR)
1744 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1749 region_nr = lookup_stmt_eh_region (stmt);
1755 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1758 static bool mark_eh_edge_found_error;
1760 /* Mark edge make_eh_edge would create for given region by setting it aux
1761 field, output error if something goes wrong. */
1763 mark_eh_edge (struct eh_region *region, void *data)
1766 basic_block src, dst;
1770 lab = get_eh_region_tree_label (region);
1772 src = bb_for_stmt (stmt);
1773 dst = label_to_block (lab);
1775 e = find_edge (src, dst);
1778 error ("EH edge %i->%i is missing %i %i.", src->index, dst->index, src, dst);
1779 mark_eh_edge_found_error = true;
1781 else if (!(e->flags & EDGE_EH))
1783 error ("EH edge %i->%i miss EH flag.", src->index, dst->index);
1784 mark_eh_edge_found_error = true;
1788 /* ??? might not be mistake. */
1789 error ("EH edge %i->%i has duplicated regions.", src->index, dst->index);
1790 mark_eh_edge_found_error = true;
1796 /* Verify that BB containing stmt as last stmt has precisely the edges
1797 make_eh_edges would create. */
1799 verify_eh_edges (tree stmt)
1803 basic_block bb = bb_for_stmt (stmt);
1807 FOR_EACH_EDGE (e, ei, bb->succs)
1808 gcc_assert (!e->aux);
1809 mark_eh_edge_found_error = false;
1810 if (TREE_CODE (stmt) == RESX_EXPR)
1812 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1817 region_nr = lookup_stmt_eh_region (stmt);
1820 FOR_EACH_EDGE (e, ei, bb->succs)
1821 if (e->flags & EDGE_EH)
1823 error ("BB %i can not throw but has EH edges", bb->index);
1828 if (!tree_could_throw_p (stmt))
1830 error ("BB %i last statement has incorrectly set region", bb->index);
1836 foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
1837 FOR_EACH_EDGE (e, ei, bb->succs)
1839 if ((e->flags & EDGE_EH) && !e->aux)
1841 error ("Unnecesary EH edge %i->%i", bb->index, e->dest->index);
1842 mark_eh_edge_found_error = true;
1847 return mark_eh_edge_found_error;
1851 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1852 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1853 This routine expects only GIMPLE lhs or rhs input. */
1856 tree_could_trap_p (tree expr)
1858 enum tree_code code = TREE_CODE (expr);
1859 bool honor_nans = false;
1860 bool honor_snans = false;
1861 bool fp_operation = false;
1862 bool honor_trapv = false;
1865 if (TREE_CODE_CLASS (code) == tcc_comparison
1866 || TREE_CODE_CLASS (code) == tcc_unary
1867 || TREE_CODE_CLASS (code) == tcc_binary)
1869 t = TREE_TYPE (expr);
1870 fp_operation = FLOAT_TYPE_P (t);
1873 honor_nans = flag_trapping_math && !flag_finite_math_only;
1874 honor_snans = flag_signaling_nans != 0;
1876 else if (INTEGRAL_TYPE_P (t) && TYPE_TRAP_SIGNED (t))
1887 case WITH_SIZE_EXPR:
1888 expr = TREE_OPERAND (expr, 0);
1889 code = TREE_CODE (expr);
1892 case ARRAY_RANGE_REF:
1893 /* Let us be conservative here for now. We might be checking bounds of
1894 the access similarly to the case below. */
1895 if (!TREE_THIS_NOTRAP (expr))
1898 base = TREE_OPERAND (expr, 0);
1899 return tree_could_trap_p (base);
1902 base = TREE_OPERAND (expr, 0);
1903 if (tree_could_trap_p (base))
1906 if (TREE_THIS_NOTRAP (expr))
1909 return !in_array_bounds_p (expr);
1912 case ALIGN_INDIRECT_REF:
1913 case MISALIGNED_INDIRECT_REF:
1914 return !TREE_THIS_NOTRAP (expr);
1917 return TREE_THIS_VOLATILE (expr);
1919 case TRUNC_DIV_EXPR:
1921 case FLOOR_DIV_EXPR:
1922 case ROUND_DIV_EXPR:
1923 case EXACT_DIV_EXPR:
1925 case FLOOR_MOD_EXPR:
1926 case ROUND_MOD_EXPR:
1927 case TRUNC_MOD_EXPR:
1929 if (honor_snans || honor_trapv)
1932 return flag_trapping_math;
1933 t = TREE_OPERAND (expr, 1);
1934 if (!TREE_CONSTANT (t) || integer_zerop (t))
1943 /* Some floating point comparisons may trap. */
1948 case UNORDERED_EXPR:
1958 case FIX_TRUNC_EXPR:
1960 case FIX_FLOOR_EXPR:
1961 case FIX_ROUND_EXPR:
1962 /* Conversion of floating point might trap. */
1968 /* These operations don't trap with floating point. */
1976 /* Any floating arithmetic may trap. */
1977 if (fp_operation && flag_trapping_math)
1984 t = get_callee_fndecl (expr);
1985 /* Assume that calls to weak functions may trap. */
1986 if (!t || !DECL_P (t) || DECL_WEAK (t))
1991 /* Any floating arithmetic may trap. */
1992 if (fp_operation && flag_trapping_math)
1999 tree_could_throw_p (tree t)
2001 if (!flag_exceptions)
2003 if (TREE_CODE (t) == MODIFY_EXPR)
2005 if (flag_non_call_exceptions
2006 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2008 t = TREE_OPERAND (t, 1);
2011 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2012 t = TREE_OPERAND (t, 0);
2013 if (TREE_CODE (t) == CALL_EXPR)
2014 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2015 if (flag_non_call_exceptions)
2016 return tree_could_trap_p (t);
2021 tree_can_throw_internal (tree stmt)
2023 int region_nr = lookup_stmt_eh_region (stmt);
2026 return can_throw_internal_1 (region_nr);
2030 tree_can_throw_external (tree stmt)
2032 int region_nr = lookup_stmt_eh_region (stmt);
2035 return can_throw_external_1 (region_nr);
2039 maybe_clean_eh_stmt (tree stmt)
2041 if (!tree_could_throw_p (stmt))
2042 if (remove_stmt_from_eh_region (stmt))