1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
23 #include "coretypes.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "tree-inline.h"
34 #include "tree-iterator.h"
35 #include "tree-pass.h"
37 #include "langhooks.h"
40 /* In some circumstances we have to save EH data around a nested
41 exception. The EXC_PTR_EXPR and FILTER_EXPR values are saved
42 into these _DECL nodes.
44 We lazily create this pair of _DECL nodes once per function rather
45 than creating a new pair of _DECLs each time we need to save the
46 EXEC_PTR and FILTER. This can save us literally thousands of _DECL
47 nodes when we have many inline destructors with an embedded try block.
49 This is safe as we know the lifetime of the values in these _DECL nodes.
50 Their lifetimes also ensure that globbing these uses into a single
51 pair of _DECL nodes requires no additional PHI_NODEs or SSA_NAMEs when
52 compared to having a pair of _DECL nodes per inline destructor with
53 an embedded try block. */
54 static tree save_eptr, save_filt;
56 /* Nonzero if we are using EH to handle cleanups. */
57 static int using_eh_for_cleanups_p = 0;
60 using_eh_for_cleanups (void)
62 using_eh_for_cleanups_p = 1;
65 /* Misc functions used in this file. */
67 /* Compare and hash for any structure which begins with a canonical
68 pointer. Assumes all pointers are interchangable, which is sort
69 of already assumed by gcc elsewhere IIRC. */
72 struct_ptr_eq (const void *a, const void *b)
74 const void * const * x = a;
75 const void * const * y = b;
80 struct_ptr_hash (const void *a)
82 const void * const * x = a;
83 return (size_t)*x >> 4;
87 /* Remember and lookup EH region data for arbitrary statements.
88 Really this means any statement that could_throw_p. We could
89 stuff this information into the stmt_ann data structure, but:
91 (1) We absolutely rely on this information being kept until
92 we get to rtl. Once we're done with lowering here, if we lose
93 the information there's no way to recover it!
95 (2) There are many more statements that *cannot* throw as
96 compared to those that can. We should be saving some amount
97 of space by only allocating memory for those that can throw. */
99 struct throw_stmt_node GTY(())
105 static GTY((param_is (struct throw_stmt_node))) htab_t throw_stmt_table;
108 record_stmt_eh_region (struct eh_region *region, tree t)
110 struct throw_stmt_node *n;
116 n = ggc_alloc (sizeof (*n));
118 n->region_nr = get_eh_region_number (region);
120 slot = htab_find_slot (throw_stmt_table, n, INSERT);
126 add_stmt_to_eh_region (tree t, int num)
128 struct throw_stmt_node *n;
131 gcc_assert (num >= 0);
133 n = ggc_alloc (sizeof (*n));
137 slot = htab_find_slot (throw_stmt_table, n, INSERT);
143 remove_stmt_from_eh_region (tree t)
145 struct throw_stmt_node dummy;
148 if (!throw_stmt_table)
152 slot = htab_find_slot (throw_stmt_table, &dummy, NO_INSERT);
155 htab_clear_slot (throw_stmt_table, slot);
163 lookup_stmt_eh_region (tree t)
165 struct throw_stmt_node *p, n;
167 if (!throw_stmt_table)
171 p = htab_find (throw_stmt_table, &n);
173 return (p ? p->region_nr : -1);
178 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
179 nodes and LABEL_DECL nodes. We will use this during the second phase to
180 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
182 struct finally_tree_node
187 /* Note that this table is *not* marked GTY. It is short-lived. */
188 static htab_t finally_tree;
191 record_in_finally_tree (tree child, tree parent)
193 struct finally_tree_node *n;
196 n = xmalloc (sizeof (*n));
200 slot = htab_find_slot (finally_tree, n, INSERT);
206 collect_finally_tree (tree t, tree region)
209 switch (TREE_CODE (t))
212 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
215 case TRY_FINALLY_EXPR:
216 record_in_finally_tree (t, region);
217 collect_finally_tree (TREE_OPERAND (t, 0), t);
218 t = TREE_OPERAND (t, 1);
222 collect_finally_tree (TREE_OPERAND (t, 0), region);
223 t = TREE_OPERAND (t, 1);
231 t = EH_FILTER_FAILURE (t);
236 tree_stmt_iterator i;
237 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
238 collect_finally_tree (tsi_stmt (i), region);
243 /* A type, a decl, or some kind of statement that we're not
244 interested in. Don't walk them. */
249 /* Use the finally tree to determine if a jump from START to TARGET
250 would leave the try_finally node that START lives in. */
253 outside_finally_tree (tree start, tree target)
255 struct finally_tree_node n, *p;
260 p = htab_find (finally_tree, &n);
265 while (start != target);
270 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
271 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
272 The eh region creation is straight-forward, but frobbing all the gotos
273 and such into shape isn't. */
275 /* State of the world while lowering. */
279 /* What's "current" while constructing the eh region tree. These
280 correspond to variables of the same name in cfun->eh, which we
281 don't have easy access to. */
282 struct eh_region *cur_region;
283 struct eh_region *prev_try;
285 /* Processing of TRY_FINALLY requires a bit more state. This is
286 split out into a separate structure so that we don't have to
287 copy so much when processing other nodes. */
288 struct leh_tf_state *tf;
293 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
294 is the original TRY_FINALLY_EXPR. We need to retain this so that
295 outside_finally_tree can reliably reference the tree used in the
296 collect_finally_tree data structures. */
297 tree try_finally_expr;
300 /* The state outside this try_finally node. */
301 struct leh_state *outer;
303 /* The exception region created for it. */
304 struct eh_region *region;
306 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
307 that are seen to escape this TRY_FINALLY_EXPR node. */
308 struct goto_queue_node {
314 size_t goto_queue_size;
315 size_t goto_queue_active;
317 /* The set of unique labels seen as entries in the goto queue. */
318 varray_type dest_array;
320 /* A label to be added at the end of the completed transformed
321 sequence. It will be set if may_fallthru was true *at one time*,
322 though subsequent transformations may have cleared that flag. */
325 /* A label that has been registered with except.c to be the
326 landing pad for this try block. */
329 /* True if it is possible to fall out the bottom of the try block.
330 Cleared if the fallthru is converted to a goto. */
333 /* True if any entry in goto_queue is a RETURN_EXPR. */
336 /* True if the finally block can receive an exception edge.
337 Cleared if the exception case is handled by code duplication. */
341 static void lower_eh_filter (struct leh_state *, tree *);
342 static void lower_eh_constructs_1 (struct leh_state *, tree *);
344 /* Comparison function for qsort/bsearch. We're interested in
345 searching goto queue elements for source statements. */
348 goto_queue_cmp (const void *x, const void *y)
350 tree a = ((const struct goto_queue_node *)x)->stmt;
351 tree b = ((const struct goto_queue_node *)y)->stmt;
352 return (a == b ? 0 : a < b ? -1 : 1);
355 /* Search for STMT in the goto queue. Return the replacement,
356 or null if the statement isn't in the queue. */
359 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
361 struct goto_queue_node tmp, *ret;
363 ret = bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
364 sizeof (struct goto_queue_node), goto_queue_cmp);
365 return (ret ? ret->repl_stmt : NULL);
368 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
369 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
370 then we can just splat it in, otherwise we add the new stmts immediately
371 after the COND_EXPR and redirect. */
374 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
375 tree_stmt_iterator *tsi)
377 tree new, one, label;
379 new = find_goto_replacement (tf, *tp);
383 one = expr_only (new);
384 if (one && TREE_CODE (one) == GOTO_EXPR)
390 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
391 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
393 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
394 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
397 /* The real work of replace_goto_queue. Returns with TSI updated to
398 point to the next statement. */
400 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
403 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
405 switch (TREE_CODE (t))
409 t = find_goto_replacement (tf, t);
412 tsi_link_before (tsi, t, TSI_SAME_STMT);
419 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
420 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
423 case TRY_FINALLY_EXPR:
425 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
426 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
429 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
432 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
439 /* These won't have gotos in them. */
446 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
449 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
451 tree_stmt_iterator i = tsi_start (t);
452 while (!tsi_end_p (i))
453 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
456 /* Replace all goto queue members. */
459 replace_goto_queue (struct leh_tf_state *tf)
461 replace_goto_queue_stmt_list (*tf->top_p, tf);
464 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
465 node, and if so record that fact in the goto queue associated with that
469 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
471 struct leh_tf_state *tf = state->tf;
472 struct goto_queue_node *q;
479 switch (TREE_CODE (stmt))
483 tree lab = GOTO_DESTINATION (stmt);
485 /* Computed and non-local gotos do not get processed. Given
486 their nature we can neither tell whether we've escaped the
487 finally block nor redirect them if we knew. */
488 if (TREE_CODE (lab) != LABEL_DECL)
491 /* No need to record gotos that don't leave the try block. */
492 if (! outside_finally_tree (lab, tf->try_finally_expr))
495 if (! tf->dest_array)
497 VARRAY_TREE_INIT (tf->dest_array, 10, "dest_array");
498 VARRAY_PUSH_TREE (tf->dest_array, lab);
503 int n = VARRAY_ACTIVE_SIZE (tf->dest_array);
504 for (index = 0; index < n; ++index)
505 if (VARRAY_TREE (tf->dest_array, index) == lab)
508 VARRAY_PUSH_TREE (tf->dest_array, lab);
514 tf->may_return = true;
522 active = tf->goto_queue_active;
523 size = tf->goto_queue_size;
526 size = (size ? size * 2 : 32);
527 tf->goto_queue_size = size;
529 = xrealloc (tf->goto_queue, size * sizeof (struct goto_queue_node));
532 q = &tf->goto_queue[active];
533 tf->goto_queue_active = active + 1;
535 memset (q, 0, sizeof (*q));
540 #ifdef ENABLE_CHECKING
541 /* We do not process SWITCH_EXPRs for now. As long as the original source
542 was in fact structured, and we've not yet done jump threading, then none
543 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
546 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
548 struct leh_tf_state *tf = state->tf;
555 vec = SWITCH_LABELS (switch_expr);
556 n = TREE_VEC_LENGTH (vec);
558 for (i = 0; i < n; ++i)
560 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
561 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
565 #define verify_norecord_switch_expr(state, switch_expr)
568 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
569 whatever is needed to finish the return. If MOD is non-null, insert it
570 before the new branch. RETURN_VALUE_P is a cache containing a temporary
571 variable to be used in manipulating the value returned from the function. */
574 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
575 tree *return_value_p)
577 tree ret_expr = TREE_OPERAND (q->stmt, 0);
582 /* The nasty part about redirecting the return value is that the
583 return value itself is to be computed before the FINALLY block
597 should return 0, not 1. Arrange for this to happen by copying
598 computed the return value into a local temporary. This also
599 allows us to redirect multiple return statements through the
600 same destination block; whether this is a net win or not really
601 depends, I guess, but it does make generation of the switch in
602 lower_try_finally_switch easier. */
604 switch (TREE_CODE (ret_expr))
607 if (!*return_value_p)
608 *return_value_p = ret_expr;
610 gcc_assert (*return_value_p == ret_expr);
611 q->cont_stmt = q->stmt;
616 tree result = TREE_OPERAND (ret_expr, 0);
617 tree new, old = TREE_OPERAND (ret_expr, 1);
619 if (!*return_value_p)
621 if (aggregate_value_p (TREE_TYPE (result),
622 TREE_TYPE (current_function_decl)))
623 /* If this function returns in memory, copy the argument
624 into the return slot now. Otherwise, we might need to
625 worry about magic return semantics, so we need to use a
626 temporary to hold the value until we're actually ready
630 new = create_tmp_var (TREE_TYPE (old), "rettmp");
631 *return_value_p = new;
634 new = *return_value_p;
636 x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
637 append_to_statement_list (x, &q->repl_stmt);
642 x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
643 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
652 /* If we don't return a value, all return statements are the same. */
653 q->cont_stmt = q->stmt;
657 append_to_statement_list (mod, &q->repl_stmt);
659 x = build1 (GOTO_EXPR, void_type_node, finlab);
660 append_to_statement_list (x, &q->repl_stmt);
663 /* Similar, but easier, for GOTO_EXPR. */
666 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
670 q->cont_stmt = q->stmt;
672 append_to_statement_list (mod, &q->repl_stmt);
674 x = build1 (GOTO_EXPR, void_type_node, finlab);
675 append_to_statement_list (x, &q->repl_stmt);
678 /* We want to transform
679 try { body; } catch { stuff; }
681 body; goto over; lab: stuff; over:
683 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
684 should be placed before the second operand, or NULL. OVER is
685 an existing label that should be put at the exit, or NULL. */
688 frob_into_branch_around (tree *tp, tree lab, tree over)
692 op1 = TREE_OPERAND (*tp, 1);
693 *tp = TREE_OPERAND (*tp, 0);
695 if (block_may_fallthru (*tp))
698 over = create_artificial_label ();
699 x = build1 (GOTO_EXPR, void_type_node, over);
700 append_to_statement_list (x, tp);
705 x = build1 (LABEL_EXPR, void_type_node, lab);
706 append_to_statement_list (x, tp);
709 append_to_statement_list (op1, tp);
713 x = build1 (LABEL_EXPR, void_type_node, over);
714 append_to_statement_list (x, tp);
718 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
719 Make sure to record all new labels found. */
722 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
726 t = unsave_expr_now (t);
729 region = outer_state->tf->try_finally_expr;
730 collect_finally_tree (t, region);
735 /* A subroutine of lower_try_finally. Create a fallthru label for
736 the given try_finally state. The only tricky bit here is that
737 we have to make sure to record the label in our outer context. */
740 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
742 tree label = tf->fallthru_label;
745 label = create_artificial_label ();
746 tf->fallthru_label = label;
748 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
753 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
754 returns non-null, then the language requires that the exception path out
755 of a try_finally be treated specially. To wit: the code within the
756 finally block may not itself throw an exception. We have two choices here.
757 First we can duplicate the finally block and wrap it in a must_not_throw
758 region. Second, we can generate code like
763 if (fintmp == eh_edge)
764 protect_cleanup_actions;
767 where "fintmp" is the temporary used in the switch statement generation
768 alternative considered below. For the nonce, we always choose the first
771 THIS_STATE may be null if if this is a try-cleanup, not a try-finally. */
774 honor_protect_cleanup_actions (struct leh_state *outer_state,
775 struct leh_state *this_state,
776 struct leh_tf_state *tf)
778 tree protect_cleanup_actions, finally, x;
779 tree_stmt_iterator i;
780 bool finally_may_fallthru;
782 /* First check for nothing to do. */
783 if (lang_protect_cleanup_actions)
784 protect_cleanup_actions = lang_protect_cleanup_actions ();
786 protect_cleanup_actions = NULL;
788 finally = TREE_OPERAND (*tf->top_p, 1);
790 /* If the EH case of the finally block can fall through, this may be a
791 structure of the form
804 E.g. with an inline destructor with an embedded try block. In this
805 case we must save the runtime EH data around the nested exception.
807 This complication means that any time the previous runtime data might
808 be used (via fallthru from the finally) we handle the eh case here,
809 whether or not protect_cleanup_actions is active. */
811 finally_may_fallthru = block_may_fallthru (finally);
812 if (!finally_may_fallthru && !protect_cleanup_actions)
815 /* Duplicate the FINALLY block. Only need to do this for try-finally,
816 and not for cleanups. */
818 finally = lower_try_finally_dup_block (finally, outer_state);
820 /* Resume execution after the exception. Adding this now lets
821 lower_eh_filter not add unnecessary gotos, as it is clear that
822 we never fallthru from this copy of the finally block. */
823 if (finally_may_fallthru)
825 /* If we have not created _DECLs for saving the EXC_PTR
826 and FILTER_EXPR, create them now. */
829 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
830 save_filt = create_tmp_var (integer_type_node, "save_filt");
833 i = tsi_start (finally);
834 x = build (EXC_PTR_EXPR, ptr_type_node);
835 x = build (MODIFY_EXPR, void_type_node, save_eptr, x);
836 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
838 x = build (FILTER_EXPR, integer_type_node);
839 x = build (MODIFY_EXPR, void_type_node, save_filt, x);
840 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
842 i = tsi_last (finally);
843 x = build (EXC_PTR_EXPR, ptr_type_node);
844 x = build (MODIFY_EXPR, void_type_node, x, save_eptr);
845 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
847 x = build (FILTER_EXPR, integer_type_node);
848 x = build (MODIFY_EXPR, void_type_node, x, save_filt);
849 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
851 x = build1 (RESX_EXPR, void_type_node,
852 build_int_cst (NULL_TREE,
853 get_eh_region_number (tf->region)));
854 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
857 /* Wrap the block with protect_cleanup_actions as the action. */
858 if (protect_cleanup_actions)
860 x = build (EH_FILTER_EXPR, void_type_node, NULL, NULL);
861 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
862 EH_FILTER_MUST_NOT_THROW (x) = 1;
863 finally = build (TRY_CATCH_EXPR, void_type_node, finally, x);
864 lower_eh_filter (outer_state, &finally);
867 lower_eh_constructs_1 (outer_state, &finally);
869 /* Hook this up to the end of the existing try block. If we
870 previously fell through the end, we'll have to branch around.
871 This means adding a new goto, and adding it to the queue. */
873 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
875 if (tf->may_fallthru)
877 x = lower_try_finally_fallthru_label (tf);
878 x = build1 (GOTO_EXPR, void_type_node, x);
879 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
882 maybe_record_in_goto_queue (this_state, x);
884 tf->may_fallthru = false;
887 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
888 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
889 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
891 /* Having now been handled, EH isn't to be considered with
892 the rest of the outgoing edges. */
893 tf->may_throw = false;
896 /* A subroutine of lower_try_finally. We have determined that there is
897 no fallthru edge out of the finally block. This means that there is
898 no outgoing edge corresponding to any incoming edge. Restructure the
899 try_finally node for this special case. */
902 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
904 tree x, finally, lab, return_val;
905 struct goto_queue_node *q, *qe;
910 lab = create_artificial_label ();
912 finally = TREE_OPERAND (*tf->top_p, 1);
913 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
915 x = build1 (LABEL_EXPR, void_type_node, lab);
916 append_to_statement_list (x, tf->top_p);
920 qe = q + tf->goto_queue_active;
923 do_return_redirection (q, lab, NULL, &return_val);
925 do_goto_redirection (q, lab, NULL);
927 replace_goto_queue (tf);
929 lower_eh_constructs_1 (state, &finally);
930 append_to_statement_list (finally, tf->top_p);
933 /* A subroutine of lower_try_finally. We have determined that there is
934 exactly one destination of the finally block. Restructure the
935 try_finally node for this special case. */
938 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
940 struct goto_queue_node *q, *qe;
941 tree x, finally, finally_label;
943 finally = TREE_OPERAND (*tf->top_p, 1);
944 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
946 lower_eh_constructs_1 (state, &finally);
950 /* Only reachable via the exception edge. Add the given label to
951 the head of the FINALLY block. Append a RESX at the end. */
953 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
954 append_to_statement_list (x, tf->top_p);
956 append_to_statement_list (finally, tf->top_p);
958 x = build1 (RESX_EXPR, void_type_node,
959 build_int_cst (NULL_TREE,
960 get_eh_region_number (tf->region)));
961 append_to_statement_list (x, tf->top_p);
966 if (tf->may_fallthru)
968 /* Only reachable via the fallthru edge. Do nothing but let
969 the two blocks run together; we'll fall out the bottom. */
970 append_to_statement_list (finally, tf->top_p);
974 finally_label = create_artificial_label ();
975 x = build1 (LABEL_EXPR, void_type_node, finally_label);
976 append_to_statement_list (x, tf->top_p);
978 append_to_statement_list (finally, tf->top_p);
981 qe = q + tf->goto_queue_active;
985 /* Reachable by return expressions only. Redirect them. */
986 tree return_val = NULL;
988 do_return_redirection (q, finally_label, NULL, &return_val);
989 replace_goto_queue (tf);
993 /* Reachable by goto expressions only. Redirect them. */
995 do_goto_redirection (q, finally_label, NULL);
996 replace_goto_queue (tf);
998 if (VARRAY_TREE (tf->dest_array, 0) == tf->fallthru_label)
1000 /* Reachable by goto to fallthru label only. Redirect it
1001 to the new label (already created, sadly), and do not
1002 emit the final branch out, or the fallthru label. */
1003 tf->fallthru_label = NULL;
1008 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1009 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1012 /* A subroutine of lower_try_finally. There are multiple edges incoming
1013 and outgoing from the finally block. Implement this by duplicating the
1014 finally block for every destination. */
1017 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1019 tree finally, new_stmt;
1022 finally = TREE_OPERAND (*tf->top_p, 1);
1023 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1025 new_stmt = NULL_TREE;
1027 if (tf->may_fallthru)
1029 x = lower_try_finally_dup_block (finally, state);
1030 lower_eh_constructs_1 (state, &x);
1031 append_to_statement_list (x, &new_stmt);
1033 x = lower_try_finally_fallthru_label (tf);
1034 x = build1 (GOTO_EXPR, void_type_node, x);
1035 append_to_statement_list (x, &new_stmt);
1040 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1041 append_to_statement_list (x, &new_stmt);
1043 x = lower_try_finally_dup_block (finally, state);
1044 lower_eh_constructs_1 (state, &x);
1045 append_to_statement_list (x, &new_stmt);
1047 x = build1 (RESX_EXPR, void_type_node,
1048 build_int_cst (NULL_TREE,
1049 get_eh_region_number (tf->region)));
1050 append_to_statement_list (x, &new_stmt);
1055 struct goto_queue_node *q, *qe;
1056 tree return_val = NULL;
1061 return_index = VARRAY_ACTIVE_SIZE (tf->dest_array);
1064 labels = xcalloc (sizeof (tree), return_index + 1);
1067 qe = q + tf->goto_queue_active;
1070 int index = q->index < 0 ? return_index : q->index;
1071 tree lab = labels[index];
1072 bool build_p = false;
1076 labels[index] = lab = create_artificial_label ();
1080 if (index == return_index)
1081 do_return_redirection (q, lab, NULL, &return_val);
1083 do_goto_redirection (q, lab, NULL);
1087 x = build1 (LABEL_EXPR, void_type_node, lab);
1088 append_to_statement_list (x, &new_stmt);
1090 x = lower_try_finally_dup_block (finally, state);
1091 lower_eh_constructs_1 (state, &x);
1092 append_to_statement_list (x, &new_stmt);
1094 append_to_statement_list (q->cont_stmt, &new_stmt);
1095 maybe_record_in_goto_queue (state, q->cont_stmt);
1098 replace_goto_queue (tf);
1102 /* Need to link new stmts after running replace_goto_queue due
1103 to not wanting to process the same goto stmts twice. */
1104 append_to_statement_list (new_stmt, tf->top_p);
1107 /* A subroutine of lower_try_finally. There are multiple edges incoming
1108 and outgoing from the finally block. Implement this by instrumenting
1109 each incoming edge and creating a switch statement at the end of the
1110 finally block that branches to the appropriate destination. */
1113 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1115 struct goto_queue_node *q, *qe;
1116 tree return_val = NULL;
1117 tree finally, finally_tmp, finally_label;
1118 int return_index, eh_index, fallthru_index;
1119 int nlabels, ndests, j, last_case_index;
1120 tree case_label_vec, switch_stmt, last_case, switch_body;
1123 /* Mash the TRY block to the head of the chain. */
1124 finally = TREE_OPERAND (*tf->top_p, 1);
1125 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1127 /* Lower the finally block itself. */
1128 lower_eh_constructs_1 (state, &finally);
1130 /* Prepare for switch statement generation. */
1132 nlabels = VARRAY_ACTIVE_SIZE (tf->dest_array);
1135 return_index = nlabels;
1136 eh_index = return_index + tf->may_return;
1137 fallthru_index = eh_index + tf->may_throw;
1138 ndests = fallthru_index + tf->may_fallthru;
1140 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1141 finally_label = create_artificial_label ();
1143 case_label_vec = make_tree_vec (ndests);
1144 switch_stmt = build (SWITCH_EXPR, integer_type_node, finally_tmp,
1145 NULL_TREE, case_label_vec);
1148 last_case_index = 0;
1150 /* Begin inserting code for getting to the finally block. Things
1151 are done in this order to correspond to the sequence the code is
1154 if (tf->may_fallthru)
1156 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1157 build_int_cst (NULL_TREE, fallthru_index));
1158 append_to_statement_list (x, tf->top_p);
1162 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1163 append_to_statement_list (x, tf->top_p);
1167 last_case = build (CASE_LABEL_EXPR, void_type_node,
1168 build_int_cst (NULL_TREE, fallthru_index), NULL,
1169 create_artificial_label ());
1170 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1173 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1174 append_to_statement_list (x, &switch_body);
1176 x = lower_try_finally_fallthru_label (tf);
1177 x = build1 (GOTO_EXPR, void_type_node, x);
1178 append_to_statement_list (x, &switch_body);
1183 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1184 append_to_statement_list (x, tf->top_p);
1186 x = build (MODIFY_EXPR, void_type_node, finally_tmp,
1187 build_int_cst (NULL_TREE, eh_index));
1188 append_to_statement_list (x, tf->top_p);
1190 last_case = build (CASE_LABEL_EXPR, void_type_node,
1191 build_int_cst (NULL_TREE, eh_index), NULL,
1192 create_artificial_label ());
1193 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1196 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1197 append_to_statement_list (x, &switch_body);
1198 x = build1 (RESX_EXPR, void_type_node,
1199 build_int_cst (NULL_TREE,
1200 get_eh_region_number (tf->region)));
1201 append_to_statement_list (x, &switch_body);
1204 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1205 append_to_statement_list (x, tf->top_p);
1207 append_to_statement_list (finally, tf->top_p);
1209 /* Redirect each incoming goto edge. */
1211 qe = q + tf->goto_queue_active;
1212 j = last_case_index + tf->may_return;
1213 last_case_index += nlabels;
1217 int switch_id, case_index;
1221 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1222 build_int_cst (NULL_TREE, return_index));
1223 do_return_redirection (q, finally_label, mod, &return_val);
1224 switch_id = return_index;
1228 mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
1229 build_int_cst (NULL_TREE, q->index));
1230 do_goto_redirection (q, finally_label, mod);
1231 switch_id = q->index;
1234 case_index = j + q->index;
1235 if (!TREE_VEC_ELT (case_label_vec, case_index))
1237 last_case = build (CASE_LABEL_EXPR, void_type_node,
1238 build_int_cst (NULL_TREE, switch_id), NULL,
1239 create_artificial_label ());
1240 TREE_VEC_ELT (case_label_vec, case_index) = last_case;
1242 x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1243 append_to_statement_list (x, &switch_body);
1244 append_to_statement_list (q->cont_stmt, &switch_body);
1245 maybe_record_in_goto_queue (state, q->cont_stmt);
1248 replace_goto_queue (tf);
1249 last_case_index += nlabels;
1251 /* Make sure that the last case is the default label, as one is required.
1252 Then sort the labels, which is also required in GIMPLE. */
1253 CASE_LOW (last_case) = NULL;
1254 sort_case_labels (case_label_vec);
1256 /* Need to link switch_stmt after running replace_goto_queue due
1257 to not wanting to process the same goto stmts twice. */
1258 append_to_statement_list (switch_stmt, tf->top_p);
1259 append_to_statement_list (switch_body, tf->top_p);
1262 /* Decide whether or not we are going to duplicate the finally block.
1263 There are several considerations.
1265 First, if this is Java, then the finally block contains code
1266 written by the user. It has line numbers associated with it,
1267 so duplicating the block means it's difficult to set a breakpoint.
1268 Since controlling code generation via -g is verboten, we simply
1269 never duplicate code without optimization.
1271 Second, we'd like to prevent egregious code growth. One way to
1272 do this is to estimate the size of the finally block, multiply
1273 that by the number of copies we'd need to make, and compare against
1274 the estimate of the size of the switch machinery we'd have to add. */
1277 decide_copy_try_finally (int ndests, tree finally)
1279 int f_estimate, sw_estimate;
1284 /* Finally estimate N times, plus N gotos. */
1285 f_estimate = estimate_num_insns (finally);
1286 f_estimate = (f_estimate + 1) * ndests;
1288 /* Switch statement (cost 10), N variable assignments, N gotos. */
1289 sw_estimate = 10 + 2 * ndests;
1291 /* Optimize for size clearly wants our best guess. */
1293 return f_estimate < sw_estimate;
1295 /* ??? These numbers are completely made up so far. */
1297 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1299 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1302 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1303 to a sequence of labels and blocks, plus the exception region trees
1304 that record all the magic. This is complicated by the need to
1305 arrange for the FINALLY block to be executed on all exits. */
1308 lower_try_finally (struct leh_state *state, tree *tp)
1310 struct leh_tf_state this_tf;
1311 struct leh_state this_state;
1314 /* Process the try block. */
1316 memset (&this_tf, 0, sizeof (this_tf));
1317 this_tf.try_finally_expr = *tp;
1319 this_tf.outer = state;
1320 if (using_eh_for_cleanups_p)
1322 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1324 this_tf.region = NULL;
1326 this_state.cur_region = this_tf.region;
1327 this_state.prev_try = state->prev_try;
1328 this_state.tf = &this_tf;
1330 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1332 /* Determine if the try block is escaped through the bottom. */
1333 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1335 /* Determine if any exceptions are possible within the try block. */
1336 if (using_eh_for_cleanups_p)
1337 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1338 if (this_tf.may_throw)
1340 this_tf.eh_label = create_artificial_label ();
1341 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1342 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1345 /* Sort the goto queue for efficient searching later. */
1346 if (this_tf.goto_queue_active > 1)
1347 qsort (this_tf.goto_queue, this_tf.goto_queue_active,
1348 sizeof (struct goto_queue_node), goto_queue_cmp);
1350 /* Determine how many edges (still) reach the finally block. Or rather,
1351 how many destinations are reached by the finally block. Use this to
1352 determine how we process the finally block itself. */
1354 if (this_tf.dest_array)
1355 ndests = VARRAY_ACTIVE_SIZE (this_tf.dest_array);
1358 ndests += this_tf.may_fallthru;
1359 ndests += this_tf.may_return;
1360 ndests += this_tf.may_throw;
1362 /* If the FINALLY block is not reachable, dike it out. */
1364 *tp = TREE_OPERAND (*tp, 0);
1366 /* If the finally block doesn't fall through, then any destination
1367 we might try to impose there isn't reached either. There may be
1368 some minor amount of cleanup and redirection still needed. */
1369 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1370 lower_try_finally_nofallthru (state, &this_tf);
1372 /* We can easily special-case redirection to a single destination. */
1373 else if (ndests == 1)
1374 lower_try_finally_onedest (state, &this_tf);
1376 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1377 lower_try_finally_copy (state, &this_tf);
1379 lower_try_finally_switch (state, &this_tf);
1381 /* If someone requested we add a label at the end of the transformed
1383 if (this_tf.fallthru_label)
1385 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1386 append_to_statement_list (x, tp);
1389 if (this_tf.goto_queue)
1390 free (this_tf.goto_queue);
1393 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1394 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1395 exception region trees that record all the magic. */
1398 lower_catch (struct leh_state *state, tree *tp)
1400 struct eh_region *try_region;
1401 struct leh_state this_state;
1402 tree_stmt_iterator i;
1405 try_region = gen_eh_region_try (state->cur_region);
1406 this_state.cur_region = try_region;
1407 this_state.prev_try = try_region;
1408 this_state.tf = state->tf;
1410 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1412 if (!get_eh_region_may_contain_throw (try_region))
1414 *tp = TREE_OPERAND (*tp, 0);
1419 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1421 struct eh_region *catch_region;
1422 tree catch, x, eh_label;
1424 catch = tsi_stmt (i);
1425 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1427 this_state.cur_region = catch_region;
1428 this_state.prev_try = state->prev_try;
1429 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1431 eh_label = create_artificial_label ();
1432 set_eh_region_tree_label (catch_region, eh_label);
1434 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1435 tsi_link_before (&i, x, TSI_SAME_STMT);
1437 if (block_may_fallthru (CATCH_BODY (catch)))
1440 out_label = create_artificial_label ();
1442 x = build1 (GOTO_EXPR, void_type_node, out_label);
1443 append_to_statement_list (x, &CATCH_BODY (catch));
1446 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1450 frob_into_branch_around (tp, NULL, out_label);
1453 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1454 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1455 region trees that record all the magic. */
1458 lower_eh_filter (struct leh_state *state, tree *tp)
1460 struct leh_state this_state;
1461 struct eh_region *this_region;
1462 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1465 if (EH_FILTER_MUST_NOT_THROW (inner))
1466 this_region = gen_eh_region_must_not_throw (state->cur_region);
1468 this_region = gen_eh_region_allowed (state->cur_region,
1469 EH_FILTER_TYPES (inner));
1470 this_state = *state;
1471 this_state.cur_region = this_region;
1473 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1475 if (!get_eh_region_may_contain_throw (this_region))
1477 *tp = TREE_OPERAND (*tp, 0);
1481 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1482 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1484 eh_label = create_artificial_label ();
1485 set_eh_region_tree_label (this_region, eh_label);
1487 frob_into_branch_around (tp, eh_label, NULL);
1490 /* Implement a cleanup expression. This is similar to try-finally,
1491 except that we only execute the cleanup block for exception edges. */
1494 lower_cleanup (struct leh_state *state, tree *tp)
1496 struct leh_state this_state;
1497 struct eh_region *this_region;
1498 struct leh_tf_state fake_tf;
1500 /* If not using eh, then exception-only cleanups are no-ops. */
1501 if (!flag_exceptions)
1503 *tp = TREE_OPERAND (*tp, 0);
1504 lower_eh_constructs_1 (state, tp);
1508 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1509 this_state = *state;
1510 this_state.cur_region = this_region;
1512 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1514 if (!get_eh_region_may_contain_throw (this_region))
1516 *tp = TREE_OPERAND (*tp, 0);
1520 /* Build enough of a try-finally state so that we can reuse
1521 honor_protect_cleanup_actions. */
1522 memset (&fake_tf, 0, sizeof (fake_tf));
1524 fake_tf.outer = state;
1525 fake_tf.region = this_region;
1526 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1527 fake_tf.may_throw = true;
1529 fake_tf.eh_label = create_artificial_label ();
1530 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1532 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1534 if (fake_tf.may_throw)
1536 /* In this case honor_protect_cleanup_actions had nothing to do,
1537 and we should process this normally. */
1538 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1539 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1543 /* In this case honor_protect_cleanup_actions did nearly all of
1544 the work. All we have left is to append the fallthru_label. */
1546 *tp = TREE_OPERAND (*tp, 0);
1547 if (fake_tf.fallthru_label)
1549 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1550 append_to_statement_list (x, tp);
1555 /* Main loop for lowering eh constructs. */
1558 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1560 tree_stmt_iterator i;
1563 switch (TREE_CODE (t))
1566 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1567 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1571 /* Look for things that can throw exceptions, and record them. */
1572 if (state->cur_region && tree_could_throw_p (t))
1574 record_stmt_eh_region (state->cur_region, t);
1575 note_eh_region_may_contain_throw (state->cur_region);
1580 /* Look for things that can throw exceptions, and record them. */
1581 if (state->cur_region && tree_could_throw_p (t))
1585 record_stmt_eh_region (state->cur_region, t);
1586 note_eh_region_may_contain_throw (state->cur_region);
1588 /* ??? For the benefit of calls.c, converting all this to rtl,
1589 we need to record the call expression, not just the outer
1590 modify statement. */
1591 op = get_call_expr_in (t);
1593 record_stmt_eh_region (state->cur_region, op);
1599 maybe_record_in_goto_queue (state, t);
1602 verify_norecord_switch_expr (state, t);
1605 case TRY_FINALLY_EXPR:
1606 lower_try_finally (state, tp);
1609 case TRY_CATCH_EXPR:
1610 i = tsi_start (TREE_OPERAND (t, 1));
1611 switch (TREE_CODE (tsi_stmt (i)))
1614 lower_catch (state, tp);
1616 case EH_FILTER_EXPR:
1617 lower_eh_filter (state, tp);
1620 lower_cleanup (state, tp);
1625 case STATEMENT_LIST:
1626 for (i = tsi_start (t); !tsi_end_p (i); )
1628 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1630 if (TREE_CODE (t) == STATEMENT_LIST)
1632 tsi_link_before (&i, t, TSI_SAME_STMT);
1641 /* A type, a decl, or some kind of statement that we're not
1642 interested in. Don't walk them. */
1648 lower_eh_constructs (void)
1650 struct leh_state null_state;
1651 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1653 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1654 throw_stmt_table = htab_create_ggc (31, struct_ptr_hash, struct_ptr_eq,
1657 collect_finally_tree (*tp, NULL);
1659 memset (&null_state, 0, sizeof (null_state));
1660 lower_eh_constructs_1 (&null_state, tp);
1662 htab_delete (finally_tree);
1664 collect_eh_region_array ();
1666 /* Wipe the DECLs we use for saving the EXC_PTR and FILTER_EXPR
1667 to ensure we create new ones for the next function. */
1672 struct tree_opt_pass pass_lower_eh =
1676 lower_eh_constructs, /* execute */
1679 0, /* static_pass_number */
1680 TV_TREE_EH, /* tv_id */
1681 PROP_gimple_lcf, /* properties_required */
1682 PROP_gimple_leh, /* properties_provided */
1683 PROP_gimple_lcf, /* properties_destroyed */
1684 0, /* todo_flags_start */
1685 TODO_dump_func, /* todo_flags_finish */
1690 /* Construct EH edges for STMT. */
1693 make_eh_edge (struct eh_region *region, void *data)
1696 basic_block src, dst;
1699 lab = get_eh_region_tree_label (region);
1701 src = bb_for_stmt (stmt);
1702 dst = label_to_block (lab);
1704 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1708 make_eh_edges (tree stmt)
1713 if (TREE_CODE (stmt) == RESX_EXPR)
1715 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1720 region_nr = lookup_stmt_eh_region (stmt);
1726 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1731 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1732 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1733 This routine expects only GIMPLE lhs or rhs input. */
1736 tree_could_trap_p (tree expr)
1738 enum tree_code code = TREE_CODE (expr);
1739 bool honor_nans = false;
1740 bool honor_snans = false;
1741 bool fp_operation = false;
1742 bool honor_trapv = false;
1745 if (TREE_CODE_CLASS (code) == tcc_comparison
1746 || TREE_CODE_CLASS (code) == tcc_unary
1747 || TREE_CODE_CLASS (code) == tcc_binary)
1749 t = TREE_TYPE (expr);
1750 fp_operation = FLOAT_TYPE_P (t);
1753 honor_nans = flag_trapping_math && !flag_finite_math_only;
1754 honor_snans = flag_signaling_nans != 0;
1756 else if (INTEGRAL_TYPE_P (t) && TYPE_TRAP_SIGNED (t))
1767 case WITH_SIZE_EXPR:
1768 expr = TREE_OPERAND (expr, 0);
1769 code = TREE_CODE (expr);
1772 case ARRAY_RANGE_REF:
1773 /* Let us be conservative here for now. We might be checking bounds of
1774 the access similarly to the case below. */
1775 if (!TREE_THIS_NOTRAP (expr))
1778 base = TREE_OPERAND (expr, 0);
1779 return tree_could_trap_p (base);
1782 base = TREE_OPERAND (expr, 0);
1783 idx = TREE_OPERAND (expr, 1);
1784 if (tree_could_trap_p (base))
1787 if (TREE_THIS_NOTRAP (expr))
1790 return !in_array_bounds_p (expr);
1793 case ALIGN_INDIRECT_REF:
1794 case MISALIGNED_INDIRECT_REF:
1795 return !TREE_THIS_NOTRAP (expr);
1798 return TREE_THIS_VOLATILE (expr);
1800 case TRUNC_DIV_EXPR:
1802 case FLOOR_DIV_EXPR:
1803 case ROUND_DIV_EXPR:
1804 case EXACT_DIV_EXPR:
1806 case FLOOR_MOD_EXPR:
1807 case ROUND_MOD_EXPR:
1808 case TRUNC_MOD_EXPR:
1810 if (honor_snans || honor_trapv)
1812 if (fp_operation && flag_trapping_math)
1814 t = TREE_OPERAND (expr, 1);
1815 if (!TREE_CONSTANT (t) || integer_zerop (t))
1824 /* Some floating point comparisons may trap. */
1829 case UNORDERED_EXPR:
1839 case FIX_TRUNC_EXPR:
1841 case FIX_FLOOR_EXPR:
1842 case FIX_ROUND_EXPR:
1843 /* Conversion of floating point might trap. */
1849 /* These operations don't trap with floating point. */
1857 /* Any floating arithmetic may trap. */
1858 if (fp_operation && flag_trapping_math)
1865 /* Any floating arithmetic may trap. */
1866 if (fp_operation && flag_trapping_math)
1873 tree_could_throw_p (tree t)
1875 if (!flag_exceptions)
1877 if (TREE_CODE (t) == MODIFY_EXPR)
1879 if (flag_non_call_exceptions
1880 && tree_could_trap_p (TREE_OPERAND (t, 0)))
1882 t = TREE_OPERAND (t, 1);
1885 if (TREE_CODE (t) == WITH_SIZE_EXPR)
1886 t = TREE_OPERAND (t, 0);
1887 if (TREE_CODE (t) == CALL_EXPR)
1888 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
1889 if (flag_non_call_exceptions)
1890 return tree_could_trap_p (t);
1895 tree_can_throw_internal (tree stmt)
1897 int region_nr = lookup_stmt_eh_region (stmt);
1900 return can_throw_internal_1 (region_nr);
1904 tree_can_throw_external (tree stmt)
1906 int region_nr = lookup_stmt_eh_region (stmt);
1909 return can_throw_external_1 (region_nr);
1913 maybe_clean_eh_stmt (tree stmt)
1915 if (!tree_could_throw_p (stmt))
1916 if (remove_stmt_from_eh_region (stmt))
1921 #include "gt-tree-eh.h"