/* Exception handling semantics and decomposition for trees.
- Copyright (C) 2003 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
This file is part of GCC.
#include "langhooks.h"
#include "ggc.h"
-/* HACK */
-extern int using_eh_for_cleanups_p;
+\f
+/* Nonzero if we are using EH to handle cleanups. */
+static int using_eh_for_cleanups_p = 0;
+
+void
+using_eh_for_cleanups (void)
+{
+ using_eh_for_cleanups_p = 1;
+}
\f
/* Misc functions used in this file. */
we get to rtl. Once we're done with lowering here, if we lose
the information there's no way to recover it!
- (2) There are many more statements that *cannot* throw as
+ (2) There are many more statements that *cannot* throw as
compared to those that can. We should be saving some amount
of space by only allocating memory for those that can throw. */
n->region_nr = get_eh_region_number (region);
slot = htab_find_slot (throw_stmt_table, n, INSERT);
- if (*slot)
- abort ();
+ gcc_assert (!*slot);
*slot = n;
}
struct throw_stmt_node *n;
void **slot;
- if (num < 0)
- abort ();
+ gcc_assert (num >= 0);
n = ggc_alloc (sizeof (*n));
n->stmt = t;
n->region_nr = num;
slot = htab_find_slot (throw_stmt_table, n, INSERT);
- if (*slot)
- abort ();
+ gcc_assert (!*slot);
*slot = n;
}
-
+
+bool
+remove_stmt_from_eh_region (tree t)
+{
+ struct throw_stmt_node dummy;
+ void **slot;
+
+ if (!throw_stmt_table)
+ return false;
+
+ dummy.stmt = t;
+ slot = htab_find_slot (throw_stmt_table, &dummy, NO_INSERT);
+ if (slot)
+ {
+ htab_clear_slot (throw_stmt_table, slot);
+ return true;
+ }
+ else
+ return false;
+}
+
int
lookup_stmt_eh_region (tree t)
{
n->parent = parent;
slot = htab_find_slot (finally_tree, n, INSERT);
- if (*slot)
- abort ();
+ gcc_assert (!*slot);
*slot = n;
}
struct leh_state
{
- /* What's "current" while constructing the eh region tree. These
+ /* What's "current" while constructing the eh region tree. These
correspond to variables of the same name in cfun->eh, which we
don't have easy access to. */
struct eh_region *cur_region;
struct leh_tf_state
{
/* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
- is the original TRY_FINALLY_EXPR. We need to retain this so that
+ is the original TRY_FINALLY_EXPR. We need to retain this so that
outside_finally_tree can reliably reference the tree used in the
collect_finally_tree data structures. */
tree try_finally_expr;
though subsequent transformations may have cleared that flag. */
tree fallthru_label;
- /* A label that has been registered with except.c to be the
+ /* A label that has been registered with except.c to be the
landing pad for this try block. */
tree eh_label;
static void lower_eh_filter (struct leh_state *, tree *);
static void lower_eh_constructs_1 (struct leh_state *, tree *);
-/* Comparison function for qsort/bsearch. We're interested in
+/* Comparison function for qsort/bsearch. We're interested in
searching goto queue elements for source statements. */
static int
tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
}
-/* The real work of replace_goto_queue. Returns with TSI updated to
+/* The real work of replace_goto_queue. Returns with TSI updated to
point to the next statement. */
static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
break;
case STATEMENT_LIST:
- abort ();
+ gcc_unreachable ();
default:
/* These won't have gotos in them. */
static void
replace_goto_queue (struct leh_tf_state *tf)
{
+ if (tf->goto_queue_active == 0)
+ return;
replace_goto_queue_stmt_list (*tf->top_p, tf);
}
{
tree lab = GOTO_DESTINATION (stmt);
- /* Computed and non-local gotos do not get processed. Given
+ /* Computed and non-local gotos do not get processed. Given
their nature we can neither tell whether we've escaped the
finally block nor redirect them if we knew. */
if (TREE_CODE (lab) != LABEL_DECL)
/* No need to record gotos that don't leave the try block. */
if (! outside_finally_tree (lab, tf->try_finally_expr))
return;
-
+
if (! tf->dest_array)
{
VARRAY_TREE_INIT (tf->dest_array, 10, "dest_array");
break;
default:
- abort ();
+ gcc_unreachable ();
}
active = tf->goto_queue_active;
q = &tf->goto_queue[active];
tf->goto_queue_active = active + 1;
-
+
memset (q, 0, sizeof (*q));
q->stmt = stmt;
q->index = index;
for (i = 0; i < n; ++i)
{
tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
- if (outside_finally_tree (lab, tf->try_finally_expr))
- abort ();
+ gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
}
}
#else
/* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
whatever is needed to finish the return. If MOD is non-null, insert it
before the new branch. RETURN_VALUE_P is a cache containing a temporary
- variable to be used in manipulating the value returned from the function. */
+ variable to be used in manipulating the value returned from the function. */
static void
do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
depends, I guess, but it does make generation of the switch in
lower_try_finally_switch easier. */
- if (TREE_CODE (ret_expr) == RESULT_DECL)
+ switch (TREE_CODE (ret_expr))
{
+ case RESULT_DECL:
if (!*return_value_p)
*return_value_p = ret_expr;
- else if (*return_value_p != ret_expr)
- abort ();
- q->cont_stmt = q->stmt;
- }
- else if (TREE_CODE (ret_expr) == MODIFY_EXPR)
- {
- tree result = TREE_OPERAND (ret_expr, 0);
- tree new, old = TREE_OPERAND (ret_expr, 1);
-
- if (!*return_value_p)
- {
- if (aggregate_value_p (TREE_TYPE (result),
- TREE_TYPE (current_function_decl)))
- /* If this function returns in memory, copy the argument
- into the return slot now. Otherwise, we might need to
- worry about magic return semantics, so we need to use a
- temporary to hold the value until we're actually ready
- to return. */
- new = result;
- else
- new = create_tmp_var (TREE_TYPE (old), "rettmp");
- *return_value_p = new;
- }
else
- new = *return_value_p;
+ gcc_assert (*return_value_p == ret_expr);
+ q->cont_stmt = q->stmt;
+ break;
- x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
- append_to_statement_list (x, &q->repl_stmt);
+ case MODIFY_EXPR:
+ {
+ tree result = TREE_OPERAND (ret_expr, 0);
+ tree new, old = TREE_OPERAND (ret_expr, 1);
+
+ if (!*return_value_p)
+ {
+ if (aggregate_value_p (TREE_TYPE (result),
+ TREE_TYPE (current_function_decl)))
+ /* If this function returns in memory, copy the argument
+ into the return slot now. Otherwise, we might need to
+ worry about magic return semantics, so we need to use a
+ temporary to hold the value until we're actually ready
+ to return. */
+ new = result;
+ else
+ new = create_tmp_var (TREE_TYPE (old), "rettmp");
+ *return_value_p = new;
+ }
+ else
+ new = *return_value_p;
+
+ x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
+ append_to_statement_list (x, &q->repl_stmt);
+
+ if (new == result)
+ x = result;
+ else
+ x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
+ q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
+ }
- if (new == result)
- x = result;
- else
- x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
- q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
+ default:
+ gcc_unreachable ();
}
- else
- abort ();
}
else
{
{
tree region = NULL;
- t = lhd_unsave_expr_now (t);
+ t = unsave_expr_now (t);
if (outer_state->tf)
region = outer_state->tf->try_finally_expr;
label = create_artificial_label ();
tf->fallthru_label = label;
if (tf->outer->tf)
- record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
+ record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
}
return label;
}
where "fintmp" is the temporary used in the switch statement generation
alternative considered below. For the nonce, we always choose the first
- option.
+ option.
- THIS_STATE may be null if if this is a try-cleanup, not a try-finally. */
+ THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
static void
honor_protect_cleanup_actions (struct leh_state *outer_state,
tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
x = build1 (RESX_EXPR, void_type_node,
- build_int_2 (get_eh_region_number (tf->region), 0));
+ build_int_cst (NULL_TREE,
+ get_eh_region_number (tf->region)));
tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
}
append_to_statement_list (x, tf->top_p);
append_to_statement_list (finally, tf->top_p);
-
+
x = build1 (RESX_EXPR, void_type_node,
- build_int_2 (get_eh_region_number (tf->region), 0));
+ build_int_cst (NULL_TREE,
+ get_eh_region_number (tf->region)));
append_to_statement_list (x, tf->top_p);
return;
for (; q < qe; ++q)
do_goto_redirection (q, finally_label, NULL);
replace_goto_queue (tf);
-
+
if (VARRAY_TREE (tf->dest_array, 0) == tf->fallthru_label)
{
/* Reachable by goto to fallthru label only. Redirect it
append_to_statement_list (x, &new_stmt);
x = build1 (RESX_EXPR, void_type_node,
- build_int_2 (get_eh_region_number (tf->region), 0));
+ build_int_cst (NULL_TREE,
+ get_eh_region_number (tf->region)));
append_to_statement_list (x, &new_stmt);
}
if (tf->may_fallthru)
{
x = build (MODIFY_EXPR, void_type_node, finally_tmp,
- build_int_2 (fallthru_index, 0));
+ build_int_cst (NULL_TREE, fallthru_index));
append_to_statement_list (x, tf->top_p);
if (tf->may_throw)
last_case = build (CASE_LABEL_EXPR, void_type_node,
- build_int_2 (fallthru_index, 0), NULL,
+ build_int_cst (NULL_TREE, fallthru_index), NULL,
create_artificial_label ());
TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
last_case_index++;
append_to_statement_list (x, tf->top_p);
x = build (MODIFY_EXPR, void_type_node, finally_tmp,
- build_int_2 (eh_index, 0));
+ build_int_cst (NULL_TREE, eh_index));
append_to_statement_list (x, tf->top_p);
last_case = build (CASE_LABEL_EXPR, void_type_node,
- build_int_2 (eh_index, 0), NULL,
+ build_int_cst (NULL_TREE, eh_index), NULL,
create_artificial_label ());
TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
last_case_index++;
x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
append_to_statement_list (x, &switch_body);
x = build1 (RESX_EXPR, void_type_node,
- build_int_2 (get_eh_region_number (tf->region), 0));
+ build_int_cst (NULL_TREE,
+ get_eh_region_number (tf->region)));
append_to_statement_list (x, &switch_body);
}
if (q->index < 0)
{
mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
- build_int_2 (return_index, 0));
+ build_int_cst (NULL_TREE, return_index));
do_return_redirection (q, finally_label, mod, &return_val);
switch_id = return_index;
}
else
{
mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
- build_int_2 (q->index, 0));
+ build_int_cst (NULL_TREE, q->index));
do_goto_redirection (q, finally_label, mod);
switch_id = q->index;
}
if (!TREE_VEC_ELT (case_label_vec, case_index))
{
last_case = build (CASE_LABEL_EXPR, void_type_node,
- build_int_2 (switch_id, 0), NULL,
+ build_int_cst (NULL_TREE, switch_id), NULL,
create_artificial_label ());
TREE_VEC_ELT (case_label_vec, case_index) = last_case;
/* ??? These numbers are completely made up so far. */
if (optimize > 1)
- return f_estimate < 100 || f_estimate * 2 < sw_estimate;
+ return f_estimate < 100 || f_estimate < sw_estimate * 2;
else
- return f_estimate < 40 || f_estimate * 3 < sw_estimate * 2;
+ return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
}
/* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
to a sequence of labels and blocks, plus the exception region trees
- that record all the magic. This is complicated by the need to
+ that record all the magic. This is complicated by the need to
arrange for the FINALLY block to be executed on all exits. */
static void
}
/* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
- list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
+ list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
exception region trees that record all the magic. */
static void
struct eh_region *this_region;
tree inner = expr_first (TREE_OPERAND (*tp, 1));
tree eh_label;
-
+
if (EH_FILTER_MUST_NOT_THROW (inner))
this_region = gen_eh_region_must_not_throw (state->cur_region);
else
EH_FILTER_TYPES (inner));
this_state = *state;
this_state.cur_region = this_region;
-
+
lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
if (!get_eh_region_may_contain_throw (this_region))
/* Look for things that can throw exceptions, and record them. */
if (state->cur_region && tree_could_throw_p (t))
{
+ tree op;
+
record_stmt_eh_region (state->cur_region, t);
note_eh_region_may_contain_throw (state->cur_region);
- /* ??? For the benefit of calls.c, converting all this to rtl,
+ /* ??? For the benefit of calls.c, converting all this to rtl,
we need to record the call expression, not just the outer
modify statement. */
- if (TREE_CODE (TREE_OPERAND (t, 1)) == CALL_EXPR)
- record_stmt_eh_region (state->cur_region, TREE_OPERAND (t, 1));
+ op = get_call_expr_in (t);
+ if (op)
+ record_stmt_eh_region (state->cur_region, op);
}
break;
tree *tp = &DECL_SAVED_TREE (current_function_decl);
finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
- throw_stmt_table = htab_create_ggc (31, struct_ptr_hash, struct_ptr_eq, free);
+ throw_stmt_table = htab_create_ggc (31, struct_ptr_hash, struct_ptr_eq,
+ ggc_free);
collect_finally_tree (*tp, NULL);
collect_eh_region_array ();
}
-struct tree_opt_pass pass_lower_eh =
+struct tree_opt_pass pass_lower_eh =
{
"eh", /* name */
NULL, /* gate */
PROP_gimple_leh, /* properties_provided */
PROP_gimple_lcf, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func /* todo_flags_finish */
+ TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
};
\f
make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
}
-
+
void
make_eh_edges (tree stmt)
{
\f
-/* Return true if the expr can trap, as in dereferencing an
- invalid pointer location. */
+/* Return true if the expr can trap, as in dereferencing an invalid pointer
+ location or floating point arithmetic. C.f. the rtl version, may_trap_p.
+ This routine expects only GIMPLE lhs or rhs input. */
bool
tree_could_trap_p (tree expr)
{
enum tree_code code = TREE_CODE (expr);
- tree t;
+ bool honor_nans = false;
+ bool honor_snans = false;
+ bool fp_operation = false;
+ bool honor_trapv = false;
+ tree t, base;
+
+ if (TREE_CODE_CLASS (code) == tcc_comparison
+ || TREE_CODE_CLASS (code) == tcc_unary
+ || TREE_CODE_CLASS (code) == tcc_binary)
+ {
+ t = TREE_TYPE (expr);
+ fp_operation = FLOAT_TYPE_P (t);
+ if (fp_operation)
+ {
+ honor_nans = flag_trapping_math && !flag_finite_math_only;
+ honor_snans = flag_signaling_nans != 0;
+ }
+ else if (INTEGRAL_TYPE_P (t) && TYPE_TRAP_SIGNED (t))
+ honor_trapv = true;
+ }
+ restart:
switch (code)
{
- case ARRAY_REF:
case COMPONENT_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
case BIT_FIELD_REF:
- t = get_base_address (expr);
- return !t || TREE_CODE (t) == INDIRECT_REF;
+ case WITH_SIZE_EXPR:
+ expr = TREE_OPERAND (expr, 0);
+ code = TREE_CODE (expr);
+ goto restart;
+
+ case ARRAY_RANGE_REF:
+ /* Let us be conservative here for now. We might be checking bounds of
+ the access similarly to the case below. */
+ if (!TREE_THIS_NOTRAP (expr))
+ return true;
+
+ base = TREE_OPERAND (expr, 0);
+ return tree_could_trap_p (base);
+
+ case ARRAY_REF:
+ base = TREE_OPERAND (expr, 0);
+ if (tree_could_trap_p (base))
+ return true;
+
+ if (TREE_THIS_NOTRAP (expr))
+ return false;
+
+ return !in_array_bounds_p (expr);
case INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
+ case MISALIGNED_INDIRECT_REF:
+ return !TREE_THIS_NOTRAP (expr);
+
+ case ASM_EXPR:
+ return TREE_THIS_VOLATILE (expr);
+
case TRUNC_DIV_EXPR:
case CEIL_DIV_EXPR:
case FLOOR_DIV_EXPR:
case FLOOR_MOD_EXPR:
case ROUND_MOD_EXPR:
case TRUNC_MOD_EXPR:
- return true;
+ case RDIV_EXPR:
+ if (honor_snans || honor_trapv)
+ return true;
+ if (fp_operation && flag_trapping_math)
+ return true;
+ t = TREE_OPERAND (expr, 1);
+ if (!TREE_CONSTANT (t) || integer_zerop (t))
+ return true;
+ return false;
+
+ case LT_EXPR:
+ case LE_EXPR:
+ case GT_EXPR:
+ case GE_EXPR:
+ case LTGT_EXPR:
+ /* Some floating point comparisons may trap. */
+ return honor_nans;
+
+ case EQ_EXPR:
+ case NE_EXPR:
+ case UNORDERED_EXPR:
+ case ORDERED_EXPR:
+ case UNLT_EXPR:
+ case UNLE_EXPR:
+ case UNGT_EXPR:
+ case UNGE_EXPR:
+ case UNEQ_EXPR:
+ return honor_snans;
+
+ case CONVERT_EXPR:
+ case FIX_TRUNC_EXPR:
+ case FIX_CEIL_EXPR:
+ case FIX_FLOOR_EXPR:
+ case FIX_ROUND_EXPR:
+ /* Conversion of floating point might trap. */
+ return honor_nans;
+
+ case NEGATE_EXPR:
+ case ABS_EXPR:
+ case CONJ_EXPR:
+ /* These operations don't trap with floating point. */
+ if (honor_trapv)
+ return true;
+ return false;
+
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ case MULT_EXPR:
+ /* Any floating arithmetic may trap. */
+ if (fp_operation && flag_trapping_math)
+ return true;
+ if (honor_trapv)
+ return true;
+ return false;
+
+ case CALL_EXPR:
+ t = get_callee_fndecl (expr);
+ /* Assume that calls to weak functions may trap. */
+ if (!t || !DECL_P (t) || DECL_WEAK (t))
+ return true;
+ return false;
default:
- break;
+ /* Any floating arithmetic may trap. */
+ if (fp_operation && flag_trapping_math)
+ return true;
+ return false;
}
-
- return false;
}
-
bool
tree_could_throw_p (tree t)
{
return false;
if (TREE_CODE (t) == MODIFY_EXPR)
{
- tree sub = TREE_OPERAND (t, 1);
- if (TREE_CODE (sub) == CALL_EXPR)
- t = sub;
- else
- {
- if (flag_non_call_exceptions)
- {
- if (tree_could_trap_p (sub))
- return true;
- return tree_could_trap_p (TREE_OPERAND (t, 0));
- }
- return false;
- }
+ if (flag_non_call_exceptions
+ && tree_could_trap_p (TREE_OPERAND (t, 0)))
+ return true;
+ t = TREE_OPERAND (t, 1);
}
+ if (TREE_CODE (t) == WITH_SIZE_EXPR)
+ t = TREE_OPERAND (t, 0);
if (TREE_CODE (t) == CALL_EXPR)
return (call_expr_flags (t) & ECF_NOTHROW) == 0;
-
+ if (flag_non_call_exceptions)
+ return tree_could_trap_p (t);
return false;
}
return can_throw_external_1 (region_nr);
}
+bool
+maybe_clean_eh_stmt (tree stmt)
+{
+ if (!tree_could_throw_p (stmt))
+ if (remove_stmt_from_eh_region (stmt))
+ return true;
+ return false;
+}
+
#include "gt-tree-eh.h"