/* Exception handling semantics and decomposition for trees.
- Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
+ Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
Free Software Foundation, Inc.
This file is part of GCC.
#define verify_norecord_switch_expr(state, switch_expr)
#endif
-/* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
- whatever is needed to finish the return. If MOD is non-null, insert it
- before the new branch. RETURN_VALUE_P is a cache containing a temporary
- variable to be used in manipulating the value returned from the function. */
+/* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
+ non-null, insert it before the new branch. */
static void
-do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
- tree *return_value_p)
+do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
{
- tree ret_expr;
gimple x;
- /* In the case of a return, the queue node must be a gimple statement. */
+ /* In the case of a return, the queue node must be a gimple statement. */
gcc_assert (!q->is_label);
- ret_expr = gimple_return_retval (q->stmt.g);
+ /* Note that the return value may have already been computed, e.g.,
- if (ret_expr)
- {
- if (!*return_value_p)
- *return_value_p = ret_expr;
- else
- gcc_assert (*return_value_p == ret_expr);
- q->cont_stmt = q->stmt.g;
- /* The nasty part about redirecting the return value is that the
- return value itself is to be computed before the FINALLY block
- is executed. e.g.
-
- int x;
- int foo (void)
- {
- x = 0;
- try {
- return x;
- } finally {
- x++;
- }
- }
-
- should return 0, not 1. Arrange for this to happen by copying
- computed the return value into a local temporary. This also
- allows us to redirect multiple return statements through the
- same destination block; whether this is a net win or not really
- depends, I guess, but it does make generation of the switch in
- lower_try_finally_switch easier. */
-
- if (TREE_CODE (ret_expr) == RESULT_DECL)
+ int x;
+ int foo (void)
{
- if (!*return_value_p)
- *return_value_p = ret_expr;
- else
- gcc_assert (*return_value_p == ret_expr);
- q->cont_stmt = q->stmt.g;
+ x = 0;
+ try {
+ return x;
+ } finally {
+ x++;
+ }
}
- else
- gcc_unreachable ();
- }
- else
- /* If we don't return a value, all return statements are the same. */
- q->cont_stmt = q->stmt.g;
+
+ should return 0, not 1. We don't have to do anything to make
+ this happens because the return value has been placed in the
+ RESULT_DECL already. */
+
+ q->cont_stmt = q->stmt.g;
if (!q->repl_stmt)
q->repl_stmt = gimple_seq_alloc ();
static void
note_eh_region_may_contain_throw (eh_region region)
{
- while (!bitmap_bit_p (eh_region_may_contain_throw_map, region->index))
+ while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
{
- bitmap_set_bit (eh_region_may_contain_throw_map, region->index);
+ if (region->type == ERT_MUST_NOT_THROW)
+ break;
region = region->outer;
if (region == NULL)
break;
lower_try_finally_nofallthru (struct leh_state *state,
struct leh_tf_state *tf)
{
- tree lab, return_val;
+ tree lab;
gimple x;
gimple_seq finally;
struct goto_queue_node *q, *qe;
x = gimple_build_label (lab);
gimple_seq_add_stmt (&tf->top_p_seq, x);
- return_val = NULL;
q = tf->goto_queue;
qe = q + tf->goto_queue_active;
for (; q < qe; ++q)
if (q->index < 0)
- do_return_redirection (q, lab, NULL, &return_val);
+ do_return_redirection (q, lab, NULL);
else
do_goto_redirection (q, lab, NULL, tf);
if (tf->may_return)
{
/* Reachable by return expressions only. Redirect them. */
- tree return_val = NULL;
for (; q < qe; ++q)
- do_return_redirection (q, finally_label, NULL, &return_val);
+ do_return_redirection (q, finally_label, NULL);
replace_goto_queue (tf);
}
else
if (tf->goto_queue)
{
struct goto_queue_node *q, *qe;
- tree return_val = NULL;
int return_index, index;
struct labels_s
{
= create_artificial_label (tf_loc);
if (index == return_index)
- do_return_redirection (q, lab, NULL, &return_val);
+ do_return_redirection (q, lab, NULL);
else
do_goto_redirection (q, lab, NULL, tf);
lab = labels[index].label;
if (index == return_index)
- do_return_redirection (q, lab, NULL, &return_val);
+ do_return_redirection (q, lab, NULL);
else
do_goto_redirection (q, lab, NULL, tf);
}
lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
{
struct goto_queue_node *q, *qe;
- tree return_val = NULL;
tree finally_tmp, finally_label;
int return_index, eh_index, fallthru_index;
int nlabels, ndests, j, last_case_index;
if (tf->may_fallthru)
{
x = gimple_build_assign (finally_tmp,
- build_int_cst (NULL, fallthru_index));
+ build_int_cst (integer_type_node,
+ fallthru_index));
gimple_seq_add_stmt (&tf->top_p_seq, x);
- last_case = build3 (CASE_LABEL_EXPR, void_type_node,
- build_int_cst (NULL, fallthru_index),
- NULL, create_artificial_label (tf_loc));
+ tmp = build_int_cst (integer_type_node, fallthru_index);
+ last_case = build_case_label (tmp, NULL,
+ create_artificial_label (tf_loc));
VEC_quick_push (tree, case_label_vec, last_case);
last_case_index++;
emit_post_landing_pad (&eh_seq, tf->region);
x = gimple_build_assign (finally_tmp,
- build_int_cst (NULL, eh_index));
+ build_int_cst (integer_type_node, eh_index));
gimple_seq_add_stmt (&eh_seq, x);
x = gimple_build_goto (finally_label);
gimple_seq_add_stmt (&eh_seq, x);
- last_case = build3 (CASE_LABEL_EXPR, void_type_node,
- build_int_cst (NULL, eh_index),
- NULL, create_artificial_label (tf_loc));
+ tmp = build_int_cst (integer_type_node, eh_index);
+ last_case = build_case_label (tmp, NULL,
+ create_artificial_label (tf_loc));
VEC_quick_push (tree, case_label_vec, last_case);
last_case_index++;
if (q->index < 0)
{
x = gimple_build_assign (finally_tmp,
- build_int_cst (NULL, return_index));
+ build_int_cst (integer_type_node,
+ return_index));
gimple_seq_add_stmt (&mod, x);
- do_return_redirection (q, finally_label, mod, &return_val);
+ do_return_redirection (q, finally_label, mod);
switch_id = return_index;
}
else
{
x = gimple_build_assign (finally_tmp,
- build_int_cst (NULL, q->index));
+ build_int_cst (integer_type_node, q->index));
gimple_seq_add_stmt (&mod, x);
do_goto_redirection (q, finally_label, mod, tf);
switch_id = q->index;
{
tree case_lab;
void **slot;
- case_lab = build3 (CASE_LABEL_EXPR, void_type_node,
- build_int_cst (NULL, switch_id),
- NULL, NULL);
+ tmp = build_int_cst (integer_type_node, switch_id);
+ case_lab = build_case_label (tmp, NULL,
+ create_artificial_label (tf_loc));
/* We store the cont_stmt in the pointer map, so that we can recover
- it in the loop below. We don't create the new label while
- walking the goto_queue because pointers don't offer a stable
- order. */
+ it in the loop below. */
if (!cont_map)
cont_map = pointer_map_create ();
slot = pointer_map_insert (cont_map, case_lab);
}
for (j = last_case_index; j < last_case_index + nlabels; j++)
{
- tree label;
gimple cont_stmt;
void **slot;
gcc_assert (cont_map);
slot = pointer_map_contains (cont_map, last_case);
- /* As the comment above suggests, CASE_LABEL (last_case) was just a
- placeholder, it does not store an actual label, yet. */
gcc_assert (slot);
cont_stmt = *(gimple *) slot;
- label = create_artificial_label (tf_loc);
- CASE_LABEL (last_case) = label;
-
- x = gimple_build_label (label);
+ x = gimple_build_label (CASE_LABEL (last_case));
gimple_seq_add_stmt (&switch_body, x);
gimple_seq_add_stmt (&switch_body, cont_stmt);
maybe_record_in_goto_queue (state, cont_stmt);
}
VEC_free (tree, heap, this_tf.dest_array);
- if (this_tf.goto_queue)
- free (this_tf.goto_queue);
+ free (this_tf.goto_queue);
if (this_tf.goto_queue_map)
pointer_map_destroy (this_tf.goto_queue_map);
this zero argument with the current catch region number. */
if (state->ehp_region)
{
- tree nr = build_int_cst (NULL, state->ehp_region->index);
+ tree nr = build_int_cst (integer_type_node,
+ state->ehp_region->index);
gimple_call_set_arg (stmt, 0, nr);
}
else
PROP_gimple_leh, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func /* todo_flags_finish */
+ 0 /* todo_flags_finish */
}
};
\f
return true;
return false;
+ case COMPLEX_EXPR:
+ case CONSTRUCTOR:
+ /* Constructing an object cannot trap. */
+ return false;
+
default:
/* Any floating arithmetic may trap. */
if (fp_operation && flag_trapping_math)
switch (code)
{
case TARGET_MEM_REF:
- if (TMR_SYMBOL (expr)
- && !TMR_INDEX (expr))
+ if (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
+ && !TMR_INDEX (expr) && !TMR_INDEX2 (expr))
return false;
return !TREE_THIS_NOTRAP (expr);
return false;
/* Fallthru. */
case INDIRECT_REF:
- case MISALIGNED_INDIRECT_REF:
return !TREE_THIS_NOTRAP (expr);
case ASM_EXPR:
case CALL_EXPR:
t = get_callee_fndecl (expr);
/* Assume that calls to weak functions may trap. */
- if (!t || !DECL_P (t) || DECL_WEAK (t))
+ if (!t || !DECL_P (t))
return true;
+ if (DECL_WEAK (t))
+ return tree_could_trap_p (t);
+ return false;
+
+ case FUNCTION_DECL:
+ /* Assume that accesses to weak functions may trap, unless we know
+ they are certainly defined in current TU or in some other
+ LTO partition. */
+ if (DECL_WEAK (expr))
+ {
+ struct cgraph_node *node;
+ if (!DECL_EXTERNAL (expr))
+ return false;
+ node = cgraph_function_node (cgraph_get_node (expr), NULL);
+ if (node && node->in_other_partition)
+ return false;
+ return true;
+ }
+ return false;
+
+ case VAR_DECL:
+ /* Assume that accesses to weak vars may trap, unless we know
+ they are certainly defined in current TU or in some other
+ LTO partition. */
+ if (DECL_WEAK (expr))
+ {
+ struct varpool_node *node;
+ if (!DECL_EXTERNAL (expr))
+ return false;
+ node = varpool_variable_node (varpool_get_node (expr), NULL);
+ if (node && node->in_other_partition)
+ return false;
+ return true;
+ }
return false;
default:
|| TREE_CODE_CLASS (code) == tcc_unary
|| TREE_CODE_CLASS (code) == tcc_binary)
{
- t = gimple_expr_type (stmt);
+ if (is_gimple_assign (stmt)
+ && TREE_CODE_CLASS (code) == tcc_comparison)
+ t = TREE_TYPE (gimple_assign_rhs1 (stmt));
+ else if (gimple_code (stmt) == GIMPLE_COND)
+ t = TREE_TYPE (gimple_cond_lhs (stmt));
+ else
+ t = gimple_expr_type (stmt);
fp_operation = FLOAT_TYPE_P (t);
if (fp_operation)
{
|| gimple_call_lhs (twos)
|| gimple_call_chain (ones)
|| gimple_call_chain (twos)
- || !operand_equal_p (gimple_call_fn (ones), gimple_call_fn (twos), 0)
+ || !gimple_call_same_target_p (ones, twos)
|| gimple_call_num_args (ones) != gimple_call_num_args (twos))
return false;
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func /* todo_flags_finish */
+ 0 /* todo_flags_finish */
}
};
\f
Resolve this by expanding the resx node to an abort. */
- fn = implicit_built_in_decls[BUILT_IN_TRAP];
+ fn = builtin_decl_implicit (BUILT_IN_TRAP);
x = gimple_build_call (fn, 0);
gsi_insert_before (&gsi, x, GSI_SAME_STMT);
else
{
edge_iterator ei;
- tree dst_nr = build_int_cst (NULL, dst_r->index);
+ tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
- fn = implicit_built_in_decls[BUILT_IN_EH_COPY_VALUES];
- src_nr = build_int_cst (NULL, src_r->index);
+ fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
+ src_nr = build_int_cst (integer_type_node, src_r->index);
x = gimple_build_call (fn, 2, dst_nr, src_nr);
gsi_insert_before (&gsi, x, GSI_SAME_STMT);
with no arguments for C++ and Java. Check for that. */
if (src_r->use_cxa_end_cleanup)
{
- fn = implicit_built_in_decls[BUILT_IN_CXA_END_CLEANUP];
+ fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
x = gimple_build_call (fn, 0);
gsi_insert_before (&gsi, x, GSI_SAME_STMT);
}
else
{
- fn = implicit_built_in_decls[BUILT_IN_EH_POINTER];
- src_nr = build_int_cst (NULL, src_r->index);
+ fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
+ src_nr = build_int_cst (integer_type_node, src_r->index);
x = gimple_build_call (fn, 1, src_nr);
var = create_tmp_var (ptr_type_node, NULL);
var = make_ssa_name (var, x);
gimple_call_set_lhs (x, var);
gsi_insert_before (&gsi, x, GSI_SAME_STMT);
- fn = implicit_built_in_decls[BUILT_IN_UNWIND_RESUME];
+ fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
x = gimple_build_call (fn, 1, var);
gsi_insert_before (&gsi, x, GSI_SAME_STMT);
}
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
+ TODO_verify_flow /* todo_flags_finish */
}
};
blocks at the end of this pass. */
if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node)))
{
- tree t = build3 (CASE_LABEL_EXPR, void_type_node,
- TREE_VALUE (flt_node), NULL, lab);
+ tree t = build_case_label (TREE_VALUE (flt_node),
+ NULL, lab);
VEC_safe_push (tree, heap, labels, t);
pointer_set_insert (seen_values, TREE_VALUE (flt_node));
have_label = true;
}
else
{
- fn = implicit_built_in_decls[BUILT_IN_EH_FILTER];
- x = gimple_build_call (fn, 1, build_int_cst (NULL, region_nr));
+ fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
+ x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
+ region_nr));
filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
filter = make_ssa_name (filter, x);
gimple_call_set_lhs (x, filter);
gsi_insert_before (&gsi, x, GSI_SAME_STMT);
/* Turn the default label into a default case. */
- default_label = build3 (CASE_LABEL_EXPR, void_type_node,
- NULL, NULL, default_label);
+ default_label = build_case_label (NULL, NULL, default_label);
sort_case_labels (labels);
x = gimple_build_switch_vec (filter, default_label, labels);
edge b_e = BRANCH_EDGE (src);
edge f_e = FALLTHRU_EDGE (src);
- fn = implicit_built_in_decls[BUILT_IN_EH_FILTER];
- x = gimple_build_call (fn, 1, build_int_cst (NULL, region_nr));
+ fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
+ x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
+ region_nr));
filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
filter = make_ssa_name (filter, x);
gimple_call_set_lhs (x, filter);
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_verify_flow /* todo_flags_finish */
+ TODO_verify_flow /* todo_flags_finish */
}
};
\f
FOR_EACH_BB (bb)
{
- gimple_stmt_iterator gsi = gsi_start_bb (bb);
+ gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
SET_BIT (r_reachable, region->index);
SET_BIT (lp_reachable, lp_nr);
}
+
+ /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
+ switch (gimple_code (stmt))
+ {
+ case GIMPLE_RESX:
+ SET_BIT (r_reachable, gimple_resx_region (stmt));
+ break;
+ case GIMPLE_EH_DISPATCH:
+ SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
+ break;
+ default:
+ break;
+ }
}
}
/* If we did find the corresponding PHI, copy those inputs. */
if (ophi)
{
+ /* If NOP is used somewhere else beyond phis in new_bb, give up. */
+ if (!has_single_use (nop))
+ {
+ imm_use_iterator imm_iter;
+ use_operand_p use_p;
+
+ FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
+ {
+ if (!gimple_debug_bind_p (USE_STMT (use_p))
+ && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
+ || gimple_bb (USE_STMT (use_p)) != new_bb))
+ goto fail;
+ }
+ }
bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
FOR_EACH_EDGE (e, ei, old_bb->preds)
{
{
gimple_stmt_iterator gsi;
tree lab;
+ edge_iterator ei;
+ edge e;
/* We really ought not have totally lost everything following
a landing pad label. Given that BB is empty, there had better
return false;
}
+ /* The destination block must not be a regular successor for any
+ of the preds of the landing pad. Thus, avoid turning
+ <..>
+ | \ EH
+ | <..>
+ | /
+ <..>
+ into
+ <..>
+ | | EH
+ <..>
+ which CFG verification would choke on. See PR45172. */
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ if (find_edge (e->src, e_out->dest))
+ return false;
+
/* Attempt to move the PHIs into the successor block. */
if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
{
return false;
}
+/* Return true if edge E_FIRST is part of an empty infinite loop
+ or leads to such a loop through a series of single successor
+ empty bbs. */
+
+static bool
+infinite_empty_loop_p (edge e_first)
+{
+ bool inf_loop = false;
+ edge e;
+
+ if (e_first->dest == e_first->src)
+ return true;
+
+ e_first->src->aux = (void *) 1;
+ for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
+ {
+ gimple_stmt_iterator gsi;
+ if (e->dest->aux)
+ {
+ inf_loop = true;
+ break;
+ }
+ e->dest->aux = (void *) 1;
+ gsi = gsi_after_labels (e->dest);
+ if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
+ gsi_next_nondebug (&gsi);
+ if (!gsi_end_p (gsi))
+ break;
+ }
+ e_first->src->aux = NULL;
+ for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
+ e->dest->aux = NULL;
+
+ return inf_loop;
+}
+
/* Examine the block associated with LP to determine if it's an empty
handler for its EH region. If so, attempt to redirect EH edges to
an outer region. Return true the CFG was updated in any way. This
/* If the block is totally empty, look for more unsplitting cases. */
if (gsi_end_p (gsi))
- return cleanup_empty_eh_unsplit (bb, e_out, lp);
+ {
+ /* For the degenerate case of an infinite loop bail out. */
+ if (infinite_empty_loop_p (e_out))
+ return false;
- /* The block should consist only of a single RESX statement. */
+ return cleanup_empty_eh_unsplit (bb, e_out, lp);
+ }
+
+ /* The block should consist only of a single RESX statement, modulo a
+ preceding call to __builtin_stack_restore if there is no outgoing
+ edge, since the call can be eliminated in this case. */
resx = gsi_stmt (gsi);
+ if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
+ {
+ gsi_next (&gsi);
+ resx = gsi_stmt (gsi);
+ }
if (!is_gimple_resx (resx))
return false;
gcc_assert (gsi_one_before_end_p (gsi));
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func /* todo_flags_finish */
+ 0 /* todo_flags_finish */
}
};
\f