1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "tree-flow.h"
31 #include "tree-dump.h"
32 #include "tree-inline.h"
33 #include "tree-iterator.h"
34 #include "tree-pass.h"
36 #include "langhooks.h"
41 /* Nonzero if we are using EH to handle cleanups. */
42 static int using_eh_for_cleanups_p = 0;
45 using_eh_for_cleanups (void)
47 using_eh_for_cleanups_p = 1;
50 /* Misc functions used in this file. */
52 /* Compare and hash for any structure which begins with a canonical
53 pointer. Assumes all pointers are interchangeable, which is sort
54 of already assumed by gcc elsewhere IIRC. */
57 struct_ptr_eq (const void *a, const void *b)
59 const void * const * x = (const void * const *) a;
60 const void * const * y = (const void * const *) b;
65 struct_ptr_hash (const void *a)
67 const void * const * x = (const void * const *) a;
68 return (size_t)*x >> 4;
72 /* Remember and lookup EH region data for arbitrary statements.
73 Really this means any statement that could_throw_p. We could
74 stuff this information into the stmt_ann data structure, but:
76 (1) We absolutely rely on this information being kept until
77 we get to rtl. Once we're done with lowering here, if we lose
78 the information there's no way to recover it!
80 (2) There are many more statements that *cannot* throw as
81 compared to those that can. We should be saving some amount
82 of space by only allocating memory for those that can throw. */
85 record_stmt_eh_region (struct eh_region *region, tree t)
90 add_stmt_to_eh_region (t, get_eh_region_number (region));
94 add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
96 struct throw_stmt_node *n;
99 gcc_assert (num >= 0);
100 gcc_assert (TREE_CODE (t) != RESX_EXPR);
102 n = GGC_NEW (struct throw_stmt_node);
106 if (!get_eh_throw_stmt_table (ifun))
107 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
111 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
117 add_stmt_to_eh_region (tree t, int num)
119 add_stmt_to_eh_region_fn (cfun, t, num);
123 remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
125 struct throw_stmt_node dummy;
128 if (!get_eh_throw_stmt_table (ifun))
132 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
136 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
144 remove_stmt_from_eh_region (tree t)
146 return remove_stmt_from_eh_region_fn (cfun, t);
150 lookup_stmt_eh_region_fn (struct function *ifun, tree t)
152 struct throw_stmt_node *p, n;
154 if (!get_eh_throw_stmt_table (ifun))
158 p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun),
161 return (p ? p->region_nr : -1);
165 lookup_stmt_eh_region (tree t)
167 /* We can get called from initialized data when -fnon-call-exceptions
168 is on; prevent crash. */
171 return lookup_stmt_eh_region_fn (cfun, t);
175 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
176 nodes and LABEL_DECL nodes. We will use this during the second phase to
177 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
179 struct finally_tree_node
184 /* Note that this table is *not* marked GTY. It is short-lived. */
185 static htab_t finally_tree;
188 record_in_finally_tree (tree child, tree parent)
190 struct finally_tree_node *n;
193 n = XNEW (struct finally_tree_node);
197 slot = htab_find_slot (finally_tree, n, INSERT);
203 collect_finally_tree (tree t, tree region)
206 switch (TREE_CODE (t))
209 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
212 case TRY_FINALLY_EXPR:
213 record_in_finally_tree (t, region);
214 collect_finally_tree (TREE_OPERAND (t, 0), t);
215 t = TREE_OPERAND (t, 1);
219 collect_finally_tree (TREE_OPERAND (t, 0), region);
220 t = TREE_OPERAND (t, 1);
228 t = EH_FILTER_FAILURE (t);
233 tree_stmt_iterator i;
234 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
235 collect_finally_tree (tsi_stmt (i), region);
240 /* A type, a decl, or some kind of statement that we're not
241 interested in. Don't walk them. */
246 /* Use the finally tree to determine if a jump from START to TARGET
247 would leave the try_finally node that START lives in. */
250 outside_finally_tree (tree start, tree target)
252 struct finally_tree_node n, *p;
257 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
262 while (start != target);
267 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
268 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
269 The eh region creation is straight-forward, but frobbing all the gotos
270 and such into shape isn't. */
272 /* State of the world while lowering. */
276 /* What's "current" while constructing the eh region tree. These
277 correspond to variables of the same name in cfun->eh, which we
278 don't have easy access to. */
279 struct eh_region *cur_region;
280 struct eh_region *prev_try;
282 /* Processing of TRY_FINALLY requires a bit more state. This is
283 split out into a separate structure so that we don't have to
284 copy so much when processing other nodes. */
285 struct leh_tf_state *tf;
290 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
291 is the original TRY_FINALLY_EXPR. We need to retain this so that
292 outside_finally_tree can reliably reference the tree used in the
293 collect_finally_tree data structures. */
294 tree try_finally_expr;
297 /* The state outside this try_finally node. */
298 struct leh_state *outer;
300 /* The exception region created for it. */
301 struct eh_region *region;
303 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
304 that are seen to escape this TRY_FINALLY_EXPR node. */
305 struct goto_queue_node {
311 size_t goto_queue_size;
312 size_t goto_queue_active;
314 /* The set of unique labels seen as entries in the goto queue. */
315 VEC(tree,heap) *dest_array;
317 /* A label to be added at the end of the completed transformed
318 sequence. It will be set if may_fallthru was true *at one time*,
319 though subsequent transformations may have cleared that flag. */
322 /* A label that has been registered with except.c to be the
323 landing pad for this try block. */
326 /* True if it is possible to fall out the bottom of the try block.
327 Cleared if the fallthru is converted to a goto. */
330 /* True if any entry in goto_queue is a RETURN_EXPR. */
333 /* True if the finally block can receive an exception edge.
334 Cleared if the exception case is handled by code duplication. */
338 static void lower_eh_filter (struct leh_state *, tree *);
339 static void lower_eh_constructs_1 (struct leh_state *, tree *);
341 /* Comparison function for qsort/bsearch. We're interested in
342 searching goto queue elements for source statements. */
345 goto_queue_cmp (const void *x, const void *y)
347 tree a = ((const struct goto_queue_node *)x)->stmt;
348 tree b = ((const struct goto_queue_node *)y)->stmt;
349 return (a == b ? 0 : a < b ? -1 : 1);
352 /* Search for STMT in the goto queue. Return the replacement,
353 or null if the statement isn't in the queue. */
356 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
358 struct goto_queue_node tmp, *ret;
360 ret = (struct goto_queue_node *)
361 bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
362 sizeof (struct goto_queue_node), goto_queue_cmp);
363 return (ret ? ret->repl_stmt : NULL);
366 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
367 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
368 then we can just splat it in, otherwise we add the new stmts immediately
369 after the COND_EXPR and redirect. */
372 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
373 tree_stmt_iterator *tsi)
375 tree new, one, label;
377 new = find_goto_replacement (tf, *tp);
381 one = expr_only (new);
382 if (one && TREE_CODE (one) == GOTO_EXPR)
388 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
389 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
391 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
392 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
395 /* The real work of replace_goto_queue. Returns with TSI updated to
396 point to the next statement. */
398 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
401 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
403 switch (TREE_CODE (t))
407 t = find_goto_replacement (tf, t);
410 tsi_link_before (tsi, t, TSI_SAME_STMT);
417 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
418 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
421 case TRY_FINALLY_EXPR:
423 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
424 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
427 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
430 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
437 /* These won't have gotos in them. */
444 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
447 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
449 tree_stmt_iterator i = tsi_start (t);
450 while (!tsi_end_p (i))
451 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
454 /* Replace all goto queue members. */
457 replace_goto_queue (struct leh_tf_state *tf)
459 if (tf->goto_queue_active == 0)
461 replace_goto_queue_stmt_list (*tf->top_p, tf);
464 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
465 node, and if so record that fact in the goto queue associated with that
469 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
471 struct leh_tf_state *tf = state->tf;
472 struct goto_queue_node *q;
479 switch (TREE_CODE (stmt))
483 tree lab = GOTO_DESTINATION (stmt);
485 /* Computed and non-local gotos do not get processed. Given
486 their nature we can neither tell whether we've escaped the
487 finally block nor redirect them if we knew. */
488 if (TREE_CODE (lab) != LABEL_DECL)
491 /* No need to record gotos that don't leave the try block. */
492 if (! outside_finally_tree (lab, tf->try_finally_expr))
495 if (! tf->dest_array)
497 tf->dest_array = VEC_alloc (tree, heap, 10);
498 VEC_quick_push (tree, tf->dest_array, lab);
503 int n = VEC_length (tree, tf->dest_array);
504 for (index = 0; index < n; ++index)
505 if (VEC_index (tree, tf->dest_array, index) == lab)
508 VEC_safe_push (tree, heap, tf->dest_array, lab);
514 tf->may_return = true;
522 active = tf->goto_queue_active;
523 size = tf->goto_queue_size;
526 size = (size ? size * 2 : 32);
527 tf->goto_queue_size = size;
529 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
532 q = &tf->goto_queue[active];
533 tf->goto_queue_active = active + 1;
535 memset (q, 0, sizeof (*q));
540 #ifdef ENABLE_CHECKING
541 /* We do not process SWITCH_EXPRs for now. As long as the original source
542 was in fact structured, and we've not yet done jump threading, then none
543 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
546 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
548 struct leh_tf_state *tf = state->tf;
555 vec = SWITCH_LABELS (switch_expr);
556 n = TREE_VEC_LENGTH (vec);
558 for (i = 0; i < n; ++i)
560 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
561 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
565 #define verify_norecord_switch_expr(state, switch_expr)
568 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
569 whatever is needed to finish the return. If MOD is non-null, insert it
570 before the new branch. RETURN_VALUE_P is a cache containing a temporary
571 variable to be used in manipulating the value returned from the function. */
574 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
575 tree *return_value_p)
577 tree ret_expr = TREE_OPERAND (q->stmt, 0);
582 /* The nasty part about redirecting the return value is that the
583 return value itself is to be computed before the FINALLY block
597 should return 0, not 1. Arrange for this to happen by copying
598 computed the return value into a local temporary. This also
599 allows us to redirect multiple return statements through the
600 same destination block; whether this is a net win or not really
601 depends, I guess, but it does make generation of the switch in
602 lower_try_finally_switch easier. */
604 switch (TREE_CODE (ret_expr))
607 if (!*return_value_p)
608 *return_value_p = ret_expr;
610 gcc_assert (*return_value_p == ret_expr);
611 q->cont_stmt = q->stmt;
614 case GIMPLE_MODIFY_STMT:
616 tree result = GIMPLE_STMT_OPERAND (ret_expr, 0);
617 tree new, old = GIMPLE_STMT_OPERAND (ret_expr, 1);
619 if (!*return_value_p)
621 if (aggregate_value_p (TREE_TYPE (result),
622 TREE_TYPE (current_function_decl)))
623 /* If this function returns in memory, copy the argument
624 into the return slot now. Otherwise, we might need to
625 worry about magic return semantics, so we need to use a
626 temporary to hold the value until we're actually ready
630 new = create_tmp_var (TREE_TYPE (old), "rettmp");
631 *return_value_p = new;
634 new = *return_value_p;
636 x = build_gimple_modify_stmt (new, old);
637 append_to_statement_list (x, &q->repl_stmt);
642 x = build_gimple_modify_stmt (result, new);
643 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
652 /* If we don't return a value, all return statements are the same. */
653 q->cont_stmt = q->stmt;
657 append_to_statement_list (mod, &q->repl_stmt);
659 x = build1 (GOTO_EXPR, void_type_node, finlab);
660 append_to_statement_list (x, &q->repl_stmt);
663 /* Similar, but easier, for GOTO_EXPR. */
666 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
670 q->cont_stmt = q->stmt;
672 append_to_statement_list (mod, &q->repl_stmt);
674 x = build1 (GOTO_EXPR, void_type_node, finlab);
675 append_to_statement_list (x, &q->repl_stmt);
678 /* We want to transform
679 try { body; } catch { stuff; }
681 body; goto over; lab: stuff; over:
683 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
684 should be placed before the second operand, or NULL. OVER is
685 an existing label that should be put at the exit, or NULL. */
688 frob_into_branch_around (tree *tp, tree lab, tree over)
692 op1 = TREE_OPERAND (*tp, 1);
693 *tp = TREE_OPERAND (*tp, 0);
695 if (block_may_fallthru (*tp))
698 over = create_artificial_label ();
699 x = build1 (GOTO_EXPR, void_type_node, over);
700 append_to_statement_list (x, tp);
705 x = build1 (LABEL_EXPR, void_type_node, lab);
706 append_to_statement_list (x, tp);
709 append_to_statement_list (op1, tp);
713 x = build1 (LABEL_EXPR, void_type_node, over);
714 append_to_statement_list (x, tp);
718 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
719 Make sure to record all new labels found. */
722 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
726 t = unsave_expr_now (t);
729 region = outer_state->tf->try_finally_expr;
730 collect_finally_tree (t, region);
735 /* A subroutine of lower_try_finally. Create a fallthru label for
736 the given try_finally state. The only tricky bit here is that
737 we have to make sure to record the label in our outer context. */
740 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
742 tree label = tf->fallthru_label;
745 label = create_artificial_label ();
746 tf->fallthru_label = label;
748 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
753 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
754 returns non-null, then the language requires that the exception path out
755 of a try_finally be treated specially. To wit: the code within the
756 finally block may not itself throw an exception. We have two choices here.
757 First we can duplicate the finally block and wrap it in a must_not_throw
758 region. Second, we can generate code like
763 if (fintmp == eh_edge)
764 protect_cleanup_actions;
767 where "fintmp" is the temporary used in the switch statement generation
768 alternative considered below. For the nonce, we always choose the first
771 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
774 honor_protect_cleanup_actions (struct leh_state *outer_state,
775 struct leh_state *this_state,
776 struct leh_tf_state *tf)
778 tree protect_cleanup_actions, finally, x;
779 tree_stmt_iterator i;
780 bool finally_may_fallthru;
782 /* First check for nothing to do. */
783 if (lang_protect_cleanup_actions)
784 protect_cleanup_actions = lang_protect_cleanup_actions ();
786 protect_cleanup_actions = NULL;
788 finally = TREE_OPERAND (*tf->top_p, 1);
790 /* If the EH case of the finally block can fall through, this may be a
791 structure of the form
804 E.g. with an inline destructor with an embedded try block. In this
805 case we must save the runtime EH data around the nested exception.
807 This complication means that any time the previous runtime data might
808 be used (via fallthru from the finally) we handle the eh case here,
809 whether or not protect_cleanup_actions is active. */
811 finally_may_fallthru = block_may_fallthru (finally);
812 if (!finally_may_fallthru && !protect_cleanup_actions)
815 /* Duplicate the FINALLY block. Only need to do this for try-finally,
816 and not for cleanups. */
818 finally = lower_try_finally_dup_block (finally, outer_state);
820 /* Resume execution after the exception. Adding this now lets
821 lower_eh_filter not add unnecessary gotos, as it is clear that
822 we never fallthru from this copy of the finally block. */
823 if (finally_may_fallthru)
825 tree save_eptr, save_filt;
827 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
828 save_filt = create_tmp_var (integer_type_node, "save_filt");
830 i = tsi_start (finally);
831 x = build0 (EXC_PTR_EXPR, ptr_type_node);
832 x = build_gimple_modify_stmt (save_eptr, x);
833 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
835 x = build0 (FILTER_EXPR, integer_type_node);
836 x = build_gimple_modify_stmt (save_filt, x);
837 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
839 i = tsi_last (finally);
840 x = build0 (EXC_PTR_EXPR, ptr_type_node);
841 x = build_gimple_modify_stmt (x, save_eptr);
842 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
844 x = build0 (FILTER_EXPR, integer_type_node);
845 x = build_gimple_modify_stmt (x, save_filt);
846 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
848 x = build_resx (get_eh_region_number (tf->region));
849 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
852 /* Wrap the block with protect_cleanup_actions as the action. */
853 if (protect_cleanup_actions)
855 x = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
856 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
857 EH_FILTER_MUST_NOT_THROW (x) = 1;
858 finally = build2 (TRY_CATCH_EXPR, void_type_node, finally, x);
859 lower_eh_filter (outer_state, &finally);
862 lower_eh_constructs_1 (outer_state, &finally);
864 /* Hook this up to the end of the existing try block. If we
865 previously fell through the end, we'll have to branch around.
866 This means adding a new goto, and adding it to the queue. */
868 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
870 if (tf->may_fallthru)
872 x = lower_try_finally_fallthru_label (tf);
873 x = build1 (GOTO_EXPR, void_type_node, x);
874 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
877 maybe_record_in_goto_queue (this_state, x);
879 tf->may_fallthru = false;
882 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
883 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
884 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
886 /* Having now been handled, EH isn't to be considered with
887 the rest of the outgoing edges. */
888 tf->may_throw = false;
891 /* A subroutine of lower_try_finally. We have determined that there is
892 no fallthru edge out of the finally block. This means that there is
893 no outgoing edge corresponding to any incoming edge. Restructure the
894 try_finally node for this special case. */
897 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
899 tree x, finally, lab, return_val;
900 struct goto_queue_node *q, *qe;
905 lab = create_artificial_label ();
907 finally = TREE_OPERAND (*tf->top_p, 1);
908 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
910 x = build1 (LABEL_EXPR, void_type_node, lab);
911 append_to_statement_list (x, tf->top_p);
915 qe = q + tf->goto_queue_active;
918 do_return_redirection (q, lab, NULL, &return_val);
920 do_goto_redirection (q, lab, NULL);
922 replace_goto_queue (tf);
924 lower_eh_constructs_1 (state, &finally);
925 append_to_statement_list (finally, tf->top_p);
928 /* A subroutine of lower_try_finally. We have determined that there is
929 exactly one destination of the finally block. Restructure the
930 try_finally node for this special case. */
933 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
935 struct goto_queue_node *q, *qe;
936 tree x, finally, finally_label;
938 finally = TREE_OPERAND (*tf->top_p, 1);
939 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
941 lower_eh_constructs_1 (state, &finally);
945 /* Only reachable via the exception edge. Add the given label to
946 the head of the FINALLY block. Append a RESX at the end. */
948 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
949 append_to_statement_list (x, tf->top_p);
951 append_to_statement_list (finally, tf->top_p);
953 x = build_resx (get_eh_region_number (tf->region));
955 append_to_statement_list (x, tf->top_p);
960 if (tf->may_fallthru)
962 /* Only reachable via the fallthru edge. Do nothing but let
963 the two blocks run together; we'll fall out the bottom. */
964 append_to_statement_list (finally, tf->top_p);
968 finally_label = create_artificial_label ();
969 x = build1 (LABEL_EXPR, void_type_node, finally_label);
970 append_to_statement_list (x, tf->top_p);
972 append_to_statement_list (finally, tf->top_p);
975 qe = q + tf->goto_queue_active;
979 /* Reachable by return expressions only. Redirect them. */
980 tree return_val = NULL;
982 do_return_redirection (q, finally_label, NULL, &return_val);
983 replace_goto_queue (tf);
987 /* Reachable by goto expressions only. Redirect them. */
989 do_goto_redirection (q, finally_label, NULL);
990 replace_goto_queue (tf);
992 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
994 /* Reachable by goto to fallthru label only. Redirect it
995 to the new label (already created, sadly), and do not
996 emit the final branch out, or the fallthru label. */
997 tf->fallthru_label = NULL;
1002 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1003 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1006 /* A subroutine of lower_try_finally. There are multiple edges incoming
1007 and outgoing from the finally block. Implement this by duplicating the
1008 finally block for every destination. */
1011 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1013 tree finally, new_stmt;
1016 finally = TREE_OPERAND (*tf->top_p, 1);
1017 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1019 new_stmt = NULL_TREE;
1021 if (tf->may_fallthru)
1023 x = lower_try_finally_dup_block (finally, state);
1024 lower_eh_constructs_1 (state, &x);
1025 append_to_statement_list (x, &new_stmt);
1027 x = lower_try_finally_fallthru_label (tf);
1028 x = build1 (GOTO_EXPR, void_type_node, x);
1029 append_to_statement_list (x, &new_stmt);
1034 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1035 append_to_statement_list (x, &new_stmt);
1037 x = lower_try_finally_dup_block (finally, state);
1038 lower_eh_constructs_1 (state, &x);
1039 append_to_statement_list (x, &new_stmt);
1041 x = build_resx (get_eh_region_number (tf->region));
1042 append_to_statement_list (x, &new_stmt);
1047 struct goto_queue_node *q, *qe;
1048 tree return_val = NULL;
1049 int return_index, index;
1052 struct goto_queue_node *q;
1056 return_index = VEC_length (tree, tf->dest_array);
1057 labels = XCNEWVEC (struct labels_s, return_index + 1);
1060 qe = q + tf->goto_queue_active;
1063 index = q->index < 0 ? return_index : q->index;
1065 if (!labels[index].q)
1066 labels[index].q = q;
1069 for (index = 0; index < return_index + 1; index++)
1073 q = labels[index].q;
1077 lab = labels[index].label = create_artificial_label ();
1079 if (index == return_index)
1080 do_return_redirection (q, lab, NULL, &return_val);
1082 do_goto_redirection (q, lab, NULL);
1084 x = build1 (LABEL_EXPR, void_type_node, lab);
1085 append_to_statement_list (x, &new_stmt);
1087 x = lower_try_finally_dup_block (finally, state);
1088 lower_eh_constructs_1 (state, &x);
1089 append_to_statement_list (x, &new_stmt);
1091 append_to_statement_list (q->cont_stmt, &new_stmt);
1092 maybe_record_in_goto_queue (state, q->cont_stmt);
1095 for (q = tf->goto_queue; q < qe; q++)
1099 index = q->index < 0 ? return_index : q->index;
1101 if (labels[index].q == q)
1104 lab = labels[index].label;
1106 if (index == return_index)
1107 do_return_redirection (q, lab, NULL, &return_val);
1109 do_goto_redirection (q, lab, NULL);
1112 replace_goto_queue (tf);
1116 /* Need to link new stmts after running replace_goto_queue due
1117 to not wanting to process the same goto stmts twice. */
1118 append_to_statement_list (new_stmt, tf->top_p);
1121 /* A subroutine of lower_try_finally. There are multiple edges incoming
1122 and outgoing from the finally block. Implement this by instrumenting
1123 each incoming edge and creating a switch statement at the end of the
1124 finally block that branches to the appropriate destination. */
1127 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1129 struct goto_queue_node *q, *qe;
1130 tree return_val = NULL;
1131 tree finally, finally_tmp, finally_label;
1132 int return_index, eh_index, fallthru_index;
1133 int nlabels, ndests, j, last_case_index;
1134 tree case_label_vec, switch_stmt, last_case, switch_body;
1137 /* Mash the TRY block to the head of the chain. */
1138 finally = TREE_OPERAND (*tf->top_p, 1);
1139 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1141 /* Lower the finally block itself. */
1142 lower_eh_constructs_1 (state, &finally);
1144 /* Prepare for switch statement generation. */
1145 nlabels = VEC_length (tree, tf->dest_array);
1146 return_index = nlabels;
1147 eh_index = return_index + tf->may_return;
1148 fallthru_index = eh_index + tf->may_throw;
1149 ndests = fallthru_index + tf->may_fallthru;
1151 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1152 finally_label = create_artificial_label ();
1154 case_label_vec = make_tree_vec (ndests);
1155 switch_stmt = build3 (SWITCH_EXPR, integer_type_node, finally_tmp,
1156 NULL_TREE, case_label_vec);
1159 last_case_index = 0;
1161 /* Begin inserting code for getting to the finally block. Things
1162 are done in this order to correspond to the sequence the code is
1165 if (tf->may_fallthru)
1167 x = build_gimple_modify_stmt (finally_tmp,
1168 build_int_cst (integer_type_node,
1170 append_to_statement_list (x, tf->top_p);
1174 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1175 append_to_statement_list (x, tf->top_p);
1179 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1180 build_int_cst (NULL_TREE, fallthru_index), NULL,
1181 create_artificial_label ());
1182 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1185 x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1186 append_to_statement_list (x, &switch_body);
1188 x = lower_try_finally_fallthru_label (tf);
1189 x = build1 (GOTO_EXPR, void_type_node, x);
1190 append_to_statement_list (x, &switch_body);
1195 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1196 append_to_statement_list (x, tf->top_p);
1198 x = build_gimple_modify_stmt (finally_tmp,
1199 build_int_cst (integer_type_node,
1201 append_to_statement_list (x, tf->top_p);
1203 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1204 build_int_cst (NULL_TREE, eh_index), NULL,
1205 create_artificial_label ());
1206 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1209 x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1210 append_to_statement_list (x, &switch_body);
1211 x = build_resx (get_eh_region_number (tf->region));
1212 append_to_statement_list (x, &switch_body);
1215 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1216 append_to_statement_list (x, tf->top_p);
1218 append_to_statement_list (finally, tf->top_p);
1220 /* Redirect each incoming goto edge. */
1222 qe = q + tf->goto_queue_active;
1223 j = last_case_index + tf->may_return;
1227 int switch_id, case_index;
1231 mod = build_gimple_modify_stmt (finally_tmp,
1232 build_int_cst (integer_type_node,
1234 do_return_redirection (q, finally_label, mod, &return_val);
1235 switch_id = return_index;
1239 mod = build_gimple_modify_stmt (finally_tmp,
1240 build_int_cst (integer_type_node,
1242 do_goto_redirection (q, finally_label, mod);
1243 switch_id = q->index;
1246 case_index = j + q->index;
1247 if (!TREE_VEC_ELT (case_label_vec, case_index))
1248 TREE_VEC_ELT (case_label_vec, case_index)
1249 = build3 (CASE_LABEL_EXPR, void_type_node,
1250 build_int_cst (NULL_TREE, switch_id), NULL,
1251 /* We store the cont_stmt in the
1252 CASE_LABEL, so that we can recover it
1253 in the loop below. We don't create
1254 the new label while walking the
1255 goto_queue because pointers don't
1256 offer a stable order. */
1259 for (j = last_case_index; j < last_case_index + nlabels; j++)
1264 last_case = TREE_VEC_ELT (case_label_vec, j);
1266 gcc_assert (last_case);
1268 cont_stmt = CASE_LABEL (last_case);
1270 label = create_artificial_label ();
1271 CASE_LABEL (last_case) = label;
1273 x = build1 (LABEL_EXPR, void_type_node, label);
1274 append_to_statement_list (x, &switch_body);
1275 append_to_statement_list (cont_stmt, &switch_body);
1276 maybe_record_in_goto_queue (state, cont_stmt);
1278 replace_goto_queue (tf);
1280 /* Make sure that the last case is the default label, as one is required.
1281 Then sort the labels, which is also required in GIMPLE. */
1282 CASE_LOW (last_case) = NULL;
1283 sort_case_labels (case_label_vec);
1285 /* Need to link switch_stmt after running replace_goto_queue due
1286 to not wanting to process the same goto stmts twice. */
1287 append_to_statement_list (switch_stmt, tf->top_p);
1288 append_to_statement_list (switch_body, tf->top_p);
1291 /* Decide whether or not we are going to duplicate the finally block.
1292 There are several considerations.
1294 First, if this is Java, then the finally block contains code
1295 written by the user. It has line numbers associated with it,
1296 so duplicating the block means it's difficult to set a breakpoint.
1297 Since controlling code generation via -g is verboten, we simply
1298 never duplicate code without optimization.
1300 Second, we'd like to prevent egregious code growth. One way to
1301 do this is to estimate the size of the finally block, multiply
1302 that by the number of copies we'd need to make, and compare against
1303 the estimate of the size of the switch machinery we'd have to add. */
1306 decide_copy_try_finally (int ndests, tree finally)
1308 int f_estimate, sw_estimate;
1313 /* Finally estimate N times, plus N gotos. */
1314 f_estimate = estimate_num_insns (finally, &eni_size_weights);
1315 f_estimate = (f_estimate + 1) * ndests;
1317 /* Switch statement (cost 10), N variable assignments, N gotos. */
1318 sw_estimate = 10 + 2 * ndests;
1320 /* Optimize for size clearly wants our best guess. */
1322 return f_estimate < sw_estimate;
1324 /* ??? These numbers are completely made up so far. */
1326 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1328 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1331 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1332 to a sequence of labels and blocks, plus the exception region trees
1333 that record all the magic. This is complicated by the need to
1334 arrange for the FINALLY block to be executed on all exits. */
1337 lower_try_finally (struct leh_state *state, tree *tp)
1339 struct leh_tf_state this_tf;
1340 struct leh_state this_state;
1343 /* Process the try block. */
1345 memset (&this_tf, 0, sizeof (this_tf));
1346 this_tf.try_finally_expr = *tp;
1348 this_tf.outer = state;
1349 if (using_eh_for_cleanups_p)
1351 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1353 this_tf.region = NULL;
1355 this_state.cur_region = this_tf.region;
1356 this_state.prev_try = state->prev_try;
1357 this_state.tf = &this_tf;
1359 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1361 /* Determine if the try block is escaped through the bottom. */
1362 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1364 /* Determine if any exceptions are possible within the try block. */
1365 if (using_eh_for_cleanups_p)
1366 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1367 if (this_tf.may_throw)
1369 this_tf.eh_label = create_artificial_label ();
1370 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1371 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1374 /* Sort the goto queue for efficient searching later. */
1375 if (this_tf.goto_queue_active > 1)
1376 qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1377 sizeof (struct goto_queue_node), goto_queue_cmp);
1379 /* Determine how many edges (still) reach the finally block. Or rather,
1380 how many destinations are reached by the finally block. Use this to
1381 determine how we process the finally block itself. */
1383 ndests = VEC_length (tree, this_tf.dest_array);
1384 ndests += this_tf.may_fallthru;
1385 ndests += this_tf.may_return;
1386 ndests += this_tf.may_throw;
1388 /* If the FINALLY block is not reachable, dike it out. */
1390 *tp = TREE_OPERAND (*tp, 0);
1392 /* If the finally block doesn't fall through, then any destination
1393 we might try to impose there isn't reached either. There may be
1394 some minor amount of cleanup and redirection still needed. */
1395 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1396 lower_try_finally_nofallthru (state, &this_tf);
1398 /* We can easily special-case redirection to a single destination. */
1399 else if (ndests == 1)
1400 lower_try_finally_onedest (state, &this_tf);
1402 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1403 lower_try_finally_copy (state, &this_tf);
1405 lower_try_finally_switch (state, &this_tf);
1407 /* If someone requested we add a label at the end of the transformed
1409 if (this_tf.fallthru_label)
1411 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1412 append_to_statement_list (x, tp);
1415 VEC_free (tree, heap, this_tf.dest_array);
1416 if (this_tf.goto_queue)
1417 free (this_tf.goto_queue);
1420 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1421 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1422 exception region trees that record all the magic. */
1425 lower_catch (struct leh_state *state, tree *tp)
1427 struct eh_region *try_region;
1428 struct leh_state this_state;
1429 tree_stmt_iterator i;
1432 try_region = gen_eh_region_try (state->cur_region);
1433 this_state.cur_region = try_region;
1434 this_state.prev_try = try_region;
1435 this_state.tf = state->tf;
1437 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1439 if (!get_eh_region_may_contain_throw (try_region))
1441 *tp = TREE_OPERAND (*tp, 0);
1446 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1448 struct eh_region *catch_region;
1449 tree catch, x, eh_label;
1451 catch = tsi_stmt (i);
1452 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1454 this_state.cur_region = catch_region;
1455 this_state.prev_try = state->prev_try;
1456 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1458 eh_label = create_artificial_label ();
1459 set_eh_region_tree_label (catch_region, eh_label);
1461 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1462 tsi_link_before (&i, x, TSI_SAME_STMT);
1464 if (block_may_fallthru (CATCH_BODY (catch)))
1467 out_label = create_artificial_label ();
1469 x = build1 (GOTO_EXPR, void_type_node, out_label);
1470 append_to_statement_list (x, &CATCH_BODY (catch));
1473 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1477 frob_into_branch_around (tp, NULL, out_label);
1480 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1481 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1482 region trees that record all the magic. */
1485 lower_eh_filter (struct leh_state *state, tree *tp)
1487 struct leh_state this_state;
1488 struct eh_region *this_region;
1489 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1492 if (EH_FILTER_MUST_NOT_THROW (inner))
1493 this_region = gen_eh_region_must_not_throw (state->cur_region);
1495 this_region = gen_eh_region_allowed (state->cur_region,
1496 EH_FILTER_TYPES (inner));
1497 this_state = *state;
1498 this_state.cur_region = this_region;
1499 /* For must not throw regions any cleanup regions inside it
1500 can't reach outer catch regions. */
1501 if (EH_FILTER_MUST_NOT_THROW (inner))
1502 this_state.prev_try = NULL;
1504 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1506 if (!get_eh_region_may_contain_throw (this_region))
1508 *tp = TREE_OPERAND (*tp, 0);
1512 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1513 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1515 eh_label = create_artificial_label ();
1516 set_eh_region_tree_label (this_region, eh_label);
1518 frob_into_branch_around (tp, eh_label, NULL);
1521 /* Implement a cleanup expression. This is similar to try-finally,
1522 except that we only execute the cleanup block for exception edges. */
1525 lower_cleanup (struct leh_state *state, tree *tp)
1527 struct leh_state this_state;
1528 struct eh_region *this_region;
1529 struct leh_tf_state fake_tf;
1531 /* If not using eh, then exception-only cleanups are no-ops. */
1532 if (!flag_exceptions)
1534 *tp = TREE_OPERAND (*tp, 0);
1535 lower_eh_constructs_1 (state, tp);
1539 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1540 this_state = *state;
1541 this_state.cur_region = this_region;
1543 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1545 if (!get_eh_region_may_contain_throw (this_region))
1547 *tp = TREE_OPERAND (*tp, 0);
1551 /* Build enough of a try-finally state so that we can reuse
1552 honor_protect_cleanup_actions. */
1553 memset (&fake_tf, 0, sizeof (fake_tf));
1555 fake_tf.outer = state;
1556 fake_tf.region = this_region;
1557 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1558 fake_tf.may_throw = true;
1560 fake_tf.eh_label = create_artificial_label ();
1561 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1563 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1565 if (fake_tf.may_throw)
1567 /* In this case honor_protect_cleanup_actions had nothing to do,
1568 and we should process this normally. */
1569 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1570 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1574 /* In this case honor_protect_cleanup_actions did nearly all of
1575 the work. All we have left is to append the fallthru_label. */
1577 *tp = TREE_OPERAND (*tp, 0);
1578 if (fake_tf.fallthru_label)
1580 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1581 append_to_statement_list (x, tp);
1586 /* Main loop for lowering eh constructs. */
1589 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1591 tree_stmt_iterator i;
1594 switch (TREE_CODE (t))
1597 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1598 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1602 /* Look for things that can throw exceptions, and record them. */
1603 if (state->cur_region && tree_could_throw_p (t))
1605 record_stmt_eh_region (state->cur_region, t);
1606 note_eh_region_may_contain_throw (state->cur_region);
1610 case GIMPLE_MODIFY_STMT:
1611 /* Look for things that can throw exceptions, and record them. */
1612 if (state->cur_region && tree_could_throw_p (t))
1614 record_stmt_eh_region (state->cur_region, t);
1615 note_eh_region_may_contain_throw (state->cur_region);
1621 maybe_record_in_goto_queue (state, t);
1624 verify_norecord_switch_expr (state, t);
1627 case TRY_FINALLY_EXPR:
1628 lower_try_finally (state, tp);
1631 case TRY_CATCH_EXPR:
1632 i = tsi_start (TREE_OPERAND (t, 1));
1633 switch (TREE_CODE (tsi_stmt (i)))
1636 lower_catch (state, tp);
1638 case EH_FILTER_EXPR:
1639 lower_eh_filter (state, tp);
1642 lower_cleanup (state, tp);
1647 case STATEMENT_LIST:
1648 for (i = tsi_start (t); !tsi_end_p (i); )
1650 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1652 if (TREE_CODE (t) == STATEMENT_LIST)
1654 tsi_link_before (&i, t, TSI_SAME_STMT);
1663 /* A type, a decl, or some kind of statement that we're not
1664 interested in. Don't walk them. */
1670 lower_eh_constructs (void)
1672 struct leh_state null_state;
1673 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1675 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1677 collect_finally_tree (*tp, NULL);
1679 memset (&null_state, 0, sizeof (null_state));
1680 lower_eh_constructs_1 (&null_state, tp);
1682 htab_delete (finally_tree);
1684 collect_eh_region_array ();
1688 struct tree_opt_pass pass_lower_eh =
1692 lower_eh_constructs, /* execute */
1695 0, /* static_pass_number */
1696 TV_TREE_EH, /* tv_id */
1697 PROP_gimple_lcf, /* properties_required */
1698 PROP_gimple_leh, /* properties_provided */
1699 0, /* properties_destroyed */
1700 0, /* todo_flags_start */
1701 TODO_dump_func, /* todo_flags_finish */
1706 /* Construct EH edges for STMT. */
1709 make_eh_edge (struct eh_region *region, void *data)
1712 basic_block src, dst;
1715 lab = get_eh_region_tree_label (region);
1717 src = bb_for_stmt (stmt);
1718 dst = label_to_block (lab);
1720 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1724 make_eh_edges (tree stmt)
1729 if (TREE_CODE (stmt) == RESX_EXPR)
1731 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1736 region_nr = lookup_stmt_eh_region (stmt);
1742 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1745 static bool mark_eh_edge_found_error;
1747 /* Mark edge make_eh_edge would create for given region by setting it aux
1748 field, output error if something goes wrong. */
1750 mark_eh_edge (struct eh_region *region, void *data)
1753 basic_block src, dst;
1757 lab = get_eh_region_tree_label (region);
1759 src = bb_for_stmt (stmt);
1760 dst = label_to_block (lab);
1762 e = find_edge (src, dst);
1765 error ("EH edge %i->%i is missing", src->index, dst->index);
1766 mark_eh_edge_found_error = true;
1768 else if (!(e->flags & EDGE_EH))
1770 error ("EH edge %i->%i miss EH flag", src->index, dst->index);
1771 mark_eh_edge_found_error = true;
1775 /* ??? might not be mistake. */
1776 error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
1777 mark_eh_edge_found_error = true;
1783 /* Verify that BB containing stmt as last stmt has precisely the edges
1784 make_eh_edges would create. */
1786 verify_eh_edges (tree stmt)
1790 basic_block bb = bb_for_stmt (stmt);
1794 FOR_EACH_EDGE (e, ei, bb->succs)
1795 gcc_assert (!e->aux);
1796 mark_eh_edge_found_error = false;
1797 if (TREE_CODE (stmt) == RESX_EXPR)
1799 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1804 region_nr = lookup_stmt_eh_region (stmt);
1807 FOR_EACH_EDGE (e, ei, bb->succs)
1808 if (e->flags & EDGE_EH)
1810 error ("BB %i can not throw but has EH edges", bb->index);
1815 if (!tree_could_throw_p (stmt))
1817 error ("BB %i last statement has incorrectly set region", bb->index);
1823 foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
1824 FOR_EACH_EDGE (e, ei, bb->succs)
1826 if ((e->flags & EDGE_EH) && !e->aux)
1828 error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
1829 mark_eh_edge_found_error = true;
1834 return mark_eh_edge_found_error;
1838 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1839 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1840 This routine expects only GIMPLE lhs or rhs input. */
1843 tree_could_trap_p (tree expr)
1845 enum tree_code code = TREE_CODE (expr);
1846 bool honor_nans = false;
1847 bool honor_snans = false;
1848 bool fp_operation = false;
1849 bool honor_trapv = false;
1852 if (TREE_CODE_CLASS (code) == tcc_comparison
1853 || TREE_CODE_CLASS (code) == tcc_unary
1854 || TREE_CODE_CLASS (code) == tcc_binary)
1856 t = TREE_TYPE (expr);
1857 fp_operation = FLOAT_TYPE_P (t);
1860 honor_nans = flag_trapping_math && !flag_finite_math_only;
1861 honor_snans = flag_signaling_nans != 0;
1863 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
1870 case TARGET_MEM_REF:
1871 /* For TARGET_MEM_REFs use the information based on the original
1873 expr = TMR_ORIGINAL (expr);
1874 code = TREE_CODE (expr);
1881 case VIEW_CONVERT_EXPR:
1882 case WITH_SIZE_EXPR:
1883 expr = TREE_OPERAND (expr, 0);
1884 code = TREE_CODE (expr);
1887 case ARRAY_RANGE_REF:
1888 base = TREE_OPERAND (expr, 0);
1889 if (tree_could_trap_p (base))
1892 if (TREE_THIS_NOTRAP (expr))
1895 return !range_in_array_bounds_p (expr);
1898 base = TREE_OPERAND (expr, 0);
1899 if (tree_could_trap_p (base))
1902 if (TREE_THIS_NOTRAP (expr))
1905 return !in_array_bounds_p (expr);
1908 case ALIGN_INDIRECT_REF:
1909 case MISALIGNED_INDIRECT_REF:
1910 return !TREE_THIS_NOTRAP (expr);
1913 return TREE_THIS_VOLATILE (expr);
1915 case TRUNC_DIV_EXPR:
1917 case FLOOR_DIV_EXPR:
1918 case ROUND_DIV_EXPR:
1919 case EXACT_DIV_EXPR:
1921 case FLOOR_MOD_EXPR:
1922 case ROUND_MOD_EXPR:
1923 case TRUNC_MOD_EXPR:
1925 if (honor_snans || honor_trapv)
1928 return flag_trapping_math;
1929 t = TREE_OPERAND (expr, 1);
1930 if (!TREE_CONSTANT (t) || integer_zerop (t))
1939 /* Some floating point comparisons may trap. */
1944 case UNORDERED_EXPR:
1954 case FIX_TRUNC_EXPR:
1955 /* Conversion of floating point might trap. */
1961 /* These operations don't trap with floating point. */
1969 /* Any floating arithmetic may trap. */
1970 if (fp_operation && flag_trapping_math)
1977 t = get_callee_fndecl (expr);
1978 /* Assume that calls to weak functions may trap. */
1979 if (!t || !DECL_P (t) || DECL_WEAK (t))
1984 /* Any floating arithmetic may trap. */
1985 if (fp_operation && flag_trapping_math)
1992 tree_could_throw_p (tree t)
1994 if (!flag_exceptions)
1996 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
1998 if (flag_non_call_exceptions
1999 && tree_could_trap_p (GIMPLE_STMT_OPERAND (t, 0)))
2001 t = GIMPLE_STMT_OPERAND (t, 1);
2004 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2005 t = TREE_OPERAND (t, 0);
2006 if (TREE_CODE (t) == CALL_EXPR)
2007 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2008 if (flag_non_call_exceptions)
2009 return tree_could_trap_p (t);
2014 tree_can_throw_internal (tree stmt)
2017 bool is_resx = false;
2019 if (TREE_CODE (stmt) == RESX_EXPR)
2020 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2022 region_nr = lookup_stmt_eh_region (stmt);
2025 return can_throw_internal_1 (region_nr, is_resx);
2029 tree_can_throw_external (tree stmt)
2032 bool is_resx = false;
2034 if (TREE_CODE (stmt) == RESX_EXPR)
2035 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2037 region_nr = lookup_stmt_eh_region (stmt);
2039 return tree_could_throw_p (stmt);
2041 return can_throw_external_1 (region_nr, is_resx);
2044 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2045 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2046 in the table if it should be in there. Return TRUE if a replacement was
2047 done that my require an EH edge purge. */
2050 maybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt)
2052 int region_nr = lookup_stmt_eh_region (old_stmt);
2056 bool new_stmt_could_throw = tree_could_throw_p (new_stmt);
2058 if (new_stmt == old_stmt && new_stmt_could_throw)
2061 remove_stmt_from_eh_region (old_stmt);
2062 if (new_stmt_could_throw)
2064 add_stmt_to_eh_region (new_stmt, region_nr);