/* Exception handling semantics and decomposition for trees.
- Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
+ Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
Free Software Foundation, Inc.
This file is part of GCC.
#include "coretypes.h"
#include "tm.h"
#include "tree.h"
-#include "rtl.h"
-#include "tm_p.h"
#include "flags.h"
#include "function.h"
#include "except.h"
#include "timevar.h"
#include "langhooks.h"
#include "ggc.h"
-#include "toplev.h"
+#include "diagnostic-core.h"
#include "gimple.h"
#include "target.h"
gcc_assert (num != 0);
- n = GGC_NEW (struct throw_stmt_node);
+ n = ggc_alloc_throw_stmt_node ();
n->stmt = t;
n->lp_nr = num;
static void
note_eh_region_may_contain_throw (eh_region region)
{
- while (!bitmap_bit_p (eh_region_may_contain_throw_map, region->index))
+ while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
{
- bitmap_set_bit (eh_region_may_contain_throw_map, region->index);
region = region->outer;
if (region == NULL)
break;
return label;
}
-/* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
- returns non-null, then the language requires that the exception path out
- of a try_finally be treated specially. To wit: the code within the
- finally block may not itself throw an exception. We have two choices here.
- First we can duplicate the finally block and wrap it in a must_not_throw
- region. Second, we can generate code like
+/* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
+ langhook returns non-null, then the language requires that the exception
+ path out of a try_finally be treated specially. To wit: the code within
+ the finally block may not itself throw an exception. We have two choices
+ here. First we can duplicate the finally block and wrap it in a
+ must_not_throw region. Second, we can generate code like
try {
finally_block;
gimple x;
/* First check for nothing to do. */
- if (lang_protect_cleanup_actions == NULL)
+ if (lang_hooks.eh_protect_cleanup_actions == NULL)
return;
- protect_cleanup_actions = lang_protect_cleanup_actions ();
+ protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions ();
if (protect_cleanup_actions == NULL)
return;
return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
}
+/* REG is the enclosing region for a possible cleanup region, or the region
+ itself. Returns TRUE if such a region would be unreachable.
+
+ Cleanup regions within a must-not-throw region aren't actually reachable
+ even if there are throwing stmts within them, because the personality
+ routine will call terminate before unwinding. */
+
+static bool
+cleanup_is_dead_in (eh_region reg)
+{
+ while (reg && reg->type == ERT_CLEANUP)
+ reg = reg->outer;
+ return (reg && reg->type == ERT_MUST_NOT_THROW);
+}
/* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
to a sequence of labels and blocks, plus the exception region trees
this_tf.try_finally_expr = tp;
this_tf.top_p = tp;
this_tf.outer = state;
- if (using_eh_for_cleanups_p)
- this_tf.region = gen_eh_region_cleanup (state->cur_region);
+ if (using_eh_for_cleanups_p && !cleanup_is_dead_in (state->cur_region))
+ {
+ this_tf.region = gen_eh_region_cleanup (state->cur_region);
+ this_state.cur_region = this_tf.region;
+ }
else
- this_tf.region = NULL;
+ {
+ this_tf.region = NULL;
+ this_state.cur_region = state->cur_region;
+ }
- this_state.cur_region = this_tf.region;
this_state.ehp_region = state->ehp_region;
this_state.tf = &this_tf;
this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
/* Determine if any exceptions are possible within the try block. */
- if (using_eh_for_cleanups_p)
+ if (this_tf.region)
this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
if (this_tf.may_throw)
honor_protect_cleanup_actions (state, &this_state, &this_tf);
eh_region this_region = NULL;
struct leh_tf_state fake_tf;
gimple_seq result;
+ bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
- if (flag_exceptions)
+ if (flag_exceptions && !cleanup_dead)
{
this_region = gen_eh_region_cleanup (state->cur_region);
this_state.cur_region = this_region;
lower_eh_constructs_1 (&this_state, gimple_try_eval (tp));
- if (!eh_region_may_contain_throw (this_region))
+ if (cleanup_dead || !eh_region_may_contain_throw (this_region))
return gimple_try_eval (tp);
/* Build enough of a try-finally state so that we can reuse
else
{
/* The user has dome something silly. Remove it. */
- rhs = build_int_cst (ptr_type_node, 0);
+ rhs = null_pointer_node;
goto do_replace;
}
break;
return true;
return false;
+ case COMPLEX_EXPR:
+ case CONSTRUCTOR:
+ /* Constructing an object cannot trap. */
+ return false;
+
default:
/* Any floating arithmetic may trap. */
if (fp_operation && flag_trapping_math)
switch (code)
{
case TARGET_MEM_REF:
- /* For TARGET_MEM_REFs use the information based on the original
- reference. */
- expr = TMR_ORIGINAL (expr);
- code = TREE_CODE (expr);
- goto restart;
+ if (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
+ && !TMR_INDEX (expr) && !TMR_INDEX2 (expr))
+ return false;
+ return !TREE_THIS_NOTRAP (expr);
case COMPONENT_REF:
case REALPART_EXPR:
return false;
return !in_array_bounds_p (expr);
+ case MEM_REF:
+ if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
+ return false;
+ /* Fallthru. */
case INDIRECT_REF:
- case ALIGN_INDIRECT_REF:
- case MISALIGNED_INDIRECT_REF:
return !TREE_THIS_NOTRAP (expr);
case ASM_EXPR:
case GIMPLE_ASSIGN:
case GIMPLE_COND:
- if (!flag_non_call_exceptions)
+ if (!cfun->can_throw_non_call_exceptions)
return false;
return stmt_could_throw_1_p (stmt);
case GIMPLE_ASM:
- if (!flag_non_call_exceptions)
+ if (!cfun->can_throw_non_call_exceptions)
return false;
return gimple_asm_volatile_p (stmt);
return false;
if (TREE_CODE (t) == MODIFY_EXPR)
{
- if (flag_non_call_exceptions
+ if (cfun->can_throw_non_call_exceptions
&& tree_could_trap_p (TREE_OPERAND (t, 0)))
return true;
t = TREE_OPERAND (t, 1);
t = TREE_OPERAND (t, 0);
if (TREE_CODE (t) == CALL_EXPR)
return (call_expr_flags (t) & ECF_NOTHROW) == 0;
- if (flag_non_call_exceptions)
+ if (cfun->can_throw_non_call_exceptions)
return tree_could_trap_p (t);
return false;
}
/* If we did find the corresponding PHI, copy those inputs. */
if (ophi)
{
+ /* If NOP is used somewhere else beyond phis in new_bb, give up. */
+ if (!has_single_use (nop))
+ {
+ imm_use_iterator imm_iter;
+ use_operand_p use_p;
+
+ FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
+ {
+ if (!gimple_debug_bind_p (USE_STMT (use_p))
+ && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
+ || gimple_bb (USE_STMT (use_p)) != new_bb))
+ goto fail;
+ }
+ }
bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
FOR_EACH_EDGE (e, ei, old_bb->preds)
{
{
gimple_stmt_iterator gsi;
tree lab;
+ edge_iterator ei;
+ edge e;
/* We really ought not have totally lost everything following
a landing pad label. Given that BB is empty, there had better
return false;
}
+ /* The destination block must not be a regular successor for any
+ of the preds of the landing pad. Thus, avoid turning
+ <..>
+ | \ EH
+ | <..>
+ | /
+ <..>
+ into
+ <..>
+ | | EH
+ <..>
+ which CFG verification would choke on. See PR45172. */
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ if (find_edge (e->src, e_out->dest))
+ return false;
+
/* Attempt to move the PHIs into the successor block. */
if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
{
return false;
}
+/* Return true if edge E_FIRST is part of an empty infinite loop
+ or leads to such a loop through a series of single successor
+ empty bbs. */
+
+static bool
+infinite_empty_loop_p (edge e_first)
+{
+ bool inf_loop = false;
+ edge e;
+
+ if (e_first->dest == e_first->src)
+ return true;
+
+ e_first->src->aux = (void *) 1;
+ for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
+ {
+ gimple_stmt_iterator gsi;
+ if (e->dest->aux)
+ {
+ inf_loop = true;
+ break;
+ }
+ e->dest->aux = (void *) 1;
+ gsi = gsi_after_labels (e->dest);
+ if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
+ gsi_next_nondebug (&gsi);
+ if (!gsi_end_p (gsi))
+ break;
+ }
+ e_first->src->aux = NULL;
+ for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
+ e->dest->aux = NULL;
+
+ return inf_loop;
+}
+
/* Examine the block associated with LP to determine if it's an empty
handler for its EH region. If so, attempt to redirect EH edges to
an outer region. Return true the CFG was updated in any way. This
/* If the block is totally empty, look for more unsplitting cases. */
if (gsi_end_p (gsi))
- return cleanup_empty_eh_unsplit (bb, e_out, lp);
+ {
+ /* For the degenerate case of an infinite loop bail out. */
+ if (infinite_empty_loop_p (e_out))
+ return false;
+
+ return cleanup_empty_eh_unsplit (bb, e_out, lp);
+ }
/* The block should consist only of a single RESX statement. */
resx = gsi_stmt (gsi);
*/
static unsigned int
-execute_cleanup_eh (void)
+execute_cleanup_eh_1 (void)
{
/* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
looking up unreachable landing pads. */
return 0;
}
+static unsigned int
+execute_cleanup_eh (void)
+{
+ int ret = execute_cleanup_eh_1 ();
+
+ /* If the function no longer needs an EH personality routine
+ clear it. This exposes cross-language inlining opportunities
+ and avoids references to a never defined personality routine. */
+ if (DECL_FUNCTION_PERSONALITY (current_function_decl)
+ && function_needs_eh_personality (cfun) != eh_personality_lang)
+ DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
+
+ return ret;
+}
+
static bool
gate_cleanup_eh (void)
{
/* Verify that BB containing STMT as the last statement, has precisely the
edge that make_eh_edges would create. */
-bool
+DEBUG_FUNCTION bool
verify_eh_edges (gimple stmt)
{
basic_block bb = gimple_bb (stmt);
/* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
-bool
+DEBUG_FUNCTION bool
verify_eh_dispatch_edge (gimple stmt)
{
eh_region r;