/* Exception handling semantics and decomposition for trees.
- Copyright (C) 2003 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
This file is part of GCC.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+the Free Software Foundation, 51 Franklin Street, Fifth Floor,
+Boston, MA 02110-1301, USA. */
#include "config.h"
#include "system.h"
#include "timevar.h"
#include "langhooks.h"
#include "ggc.h"
+#include "toplev.h"
-/* HACK */
-extern int using_eh_for_cleanups_p;
+\f
+/* Nonzero if we are using EH to handle cleanups. */
+static int using_eh_for_cleanups_p = 0;
+
+void
+using_eh_for_cleanups (void)
+{
+ using_eh_for_cleanups_p = 1;
+}
\f
/* Misc functions used in this file. */
/* Compare and hash for any structure which begins with a canonical
- pointer. Assumes all pointers are interchangable, which is sort
+ pointer. Assumes all pointers are interchangeable, which is sort
of already assumed by gcc elsewhere IIRC. */
static int
we get to rtl. Once we're done with lowering here, if we lose
the information there's no way to recover it!
- (2) There are many more statements that *cannot* throw as
+ (2) There are many more statements that *cannot* throw as
compared to those that can. We should be saving some amount
of space by only allocating memory for those that can throw. */
-struct throw_stmt_node GTY(())
+static void
+record_stmt_eh_region (struct eh_region *region, tree t)
{
- tree stmt;
- int region_nr;
-};
+ if (!region)
+ return;
-static GTY((param_is (struct throw_stmt_node))) htab_t throw_stmt_table;
+ add_stmt_to_eh_region (t, get_eh_region_number (region));
+}
-static void
-record_stmt_eh_region (struct eh_region *region, tree t)
+void
+add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
{
struct throw_stmt_node *n;
void **slot;
- if (!region)
- return;
+ gcc_assert (num >= 0);
+ gcc_assert (TREE_CODE (t) != RESX_EXPR);
n = ggc_alloc (sizeof (*n));
n->stmt = t;
- n->region_nr = get_eh_region_number (region);
+ n->region_nr = num;
+
+ if (!get_eh_throw_stmt_table (ifun))
+ set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
+ struct_ptr_eq,
+ ggc_free));
- slot = htab_find_slot (throw_stmt_table, n, INSERT);
- if (*slot)
- abort ();
+ slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
+ gcc_assert (!*slot);
*slot = n;
+ /* ??? For the benefit of calls.c, converting all this to rtl,
+ we need to record the call expression, not just the outer
+ modify statement. */
+ if (TREE_CODE (t) == MODIFY_EXPR
+ && (t = get_call_expr_in (t)))
+ add_stmt_to_eh_region_fn (ifun, t, num);
}
void
add_stmt_to_eh_region (tree t, int num)
{
- struct throw_stmt_node *n;
+ add_stmt_to_eh_region_fn (cfun, t, num);
+}
+
+bool
+remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
+{
+ struct throw_stmt_node dummy;
void **slot;
- if (num < 0)
- abort ();
+ if (!get_eh_throw_stmt_table (ifun))
+ return false;
- n = ggc_alloc (sizeof (*n));
- n->stmt = t;
- n->region_nr = num;
+ dummy.stmt = t;
+ slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
+ NO_INSERT);
+ if (slot)
+ {
+ htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
+ /* ??? For the benefit of calls.c, converting all this to rtl,
+ we need to record the call expression, not just the outer
+ modify statement. */
+ if (TREE_CODE (t) == MODIFY_EXPR
+ && (t = get_call_expr_in (t)))
+ remove_stmt_from_eh_region_fn (ifun, t);
+ return true;
+ }
+ else
+ return false;
+}
- slot = htab_find_slot (throw_stmt_table, n, INSERT);
- if (*slot)
- abort ();
- *slot = n;
+bool
+remove_stmt_from_eh_region (tree t)
+{
+ return remove_stmt_from_eh_region_fn (cfun, t);
}
-
+
int
-lookup_stmt_eh_region (tree t)
+lookup_stmt_eh_region_fn (struct function *ifun, tree t)
{
struct throw_stmt_node *p, n;
- if (!throw_stmt_table)
+ if (!get_eh_throw_stmt_table (ifun))
return -2;
n.stmt = t;
- p = htab_find (throw_stmt_table, &n);
+ p = htab_find (get_eh_throw_stmt_table (ifun), &n);
return (p ? p->region_nr : -1);
}
+int
+lookup_stmt_eh_region (tree t)
+{
+ /* We can get called from initialized data when -fnon-call-exceptions
+ is on; prevent crash. */
+ if (!cfun)
+ return -1;
+ return lookup_stmt_eh_region_fn (cfun, t);
+}
\f
/* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
n->parent = parent;
slot = htab_find_slot (finally_tree, n, INSERT);
- if (*slot)
- abort ();
+ gcc_assert (!*slot);
*slot = n;
}
struct leh_state
{
- /* What's "current" while constructing the eh region tree. These
+ /* What's "current" while constructing the eh region tree. These
correspond to variables of the same name in cfun->eh, which we
don't have easy access to. */
struct eh_region *cur_region;
struct leh_tf_state
{
/* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
- is the original TRY_FINALLY_EXPR. We need to retain this so that
+ is the original TRY_FINALLY_EXPR. We need to retain this so that
outside_finally_tree can reliably reference the tree used in the
collect_finally_tree data structures. */
tree try_finally_expr;
size_t goto_queue_active;
/* The set of unique labels seen as entries in the goto queue. */
- varray_type dest_array;
+ VEC(tree,heap) *dest_array;
/* A label to be added at the end of the completed transformed
sequence. It will be set if may_fallthru was true *at one time*,
though subsequent transformations may have cleared that flag. */
tree fallthru_label;
- /* A label that has been registered with except.c to be the
+ /* A label that has been registered with except.c to be the
landing pad for this try block. */
tree eh_label;
static void lower_eh_filter (struct leh_state *, tree *);
static void lower_eh_constructs_1 (struct leh_state *, tree *);
-/* Comparison function for qsort/bsearch. We're interested in
+/* Comparison function for qsort/bsearch. We're interested in
searching goto queue elements for source statements. */
static int
tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
}
-/* The real work of replace_goto_queue. Returns with TSI updated to
+/* The real work of replace_goto_queue. Returns with TSI updated to
point to the next statement. */
static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
break;
case STATEMENT_LIST:
- abort ();
+ gcc_unreachable ();
default:
/* These won't have gotos in them. */
static void
replace_goto_queue (struct leh_tf_state *tf)
{
+ if (tf->goto_queue_active == 0)
+ return;
replace_goto_queue_stmt_list (*tf->top_p, tf);
}
{
tree lab = GOTO_DESTINATION (stmt);
- /* Computed and non-local gotos do not get processed. Given
+ /* Computed and non-local gotos do not get processed. Given
their nature we can neither tell whether we've escaped the
finally block nor redirect them if we knew. */
if (TREE_CODE (lab) != LABEL_DECL)
/* No need to record gotos that don't leave the try block. */
if (! outside_finally_tree (lab, tf->try_finally_expr))
return;
-
+
if (! tf->dest_array)
{
- VARRAY_TREE_INIT (tf->dest_array, 10, "dest_array");
- VARRAY_PUSH_TREE (tf->dest_array, lab);
+ tf->dest_array = VEC_alloc (tree, heap, 10);
+ VEC_quick_push (tree, tf->dest_array, lab);
index = 0;
}
else
{
- int n = VARRAY_ACTIVE_SIZE (tf->dest_array);
+ int n = VEC_length (tree, tf->dest_array);
for (index = 0; index < n; ++index)
- if (VARRAY_TREE (tf->dest_array, index) == lab)
+ if (VEC_index (tree, tf->dest_array, index) == lab)
break;
if (index == n)
- VARRAY_PUSH_TREE (tf->dest_array, lab);
+ VEC_safe_push (tree, heap, tf->dest_array, lab);
}
}
break;
break;
default:
- abort ();
+ gcc_unreachable ();
}
active = tf->goto_queue_active;
q = &tf->goto_queue[active];
tf->goto_queue_active = active + 1;
-
+
memset (q, 0, sizeof (*q));
q->stmt = stmt;
q->index = index;
for (i = 0; i < n; ++i)
{
tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
- if (outside_finally_tree (lab, tf->try_finally_expr))
- abort ();
+ gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
}
}
#else
depends, I guess, but it does make generation of the switch in
lower_try_finally_switch easier. */
- if (TREE_CODE (ret_expr) == RESULT_DECL)
+ switch (TREE_CODE (ret_expr))
{
+ case RESULT_DECL:
if (!*return_value_p)
*return_value_p = ret_expr;
- else if (*return_value_p != ret_expr)
- abort ();
- q->cont_stmt = q->stmt;
- }
- else if (TREE_CODE (ret_expr) == MODIFY_EXPR)
- {
- tree result = TREE_OPERAND (ret_expr, 0);
- tree new, old = TREE_OPERAND (ret_expr, 1);
-
- if (!*return_value_p)
- {
- if (aggregate_value_p (TREE_TYPE (result),
- TREE_TYPE (current_function_decl)))
- /* If this function returns in memory, copy the argument
- into the return slot now. Otherwise, we might need to
- worry about magic return semantics, so we need to use a
- temporary to hold the value until we're actually ready
- to return. */
- new = result;
- else
- new = create_tmp_var (TREE_TYPE (old), "rettmp");
- *return_value_p = new;
- }
else
- new = *return_value_p;
+ gcc_assert (*return_value_p == ret_expr);
+ q->cont_stmt = q->stmt;
+ break;
- x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
- append_to_statement_list (x, &q->repl_stmt);
+ case MODIFY_EXPR:
+ {
+ tree result = TREE_OPERAND (ret_expr, 0);
+ tree new, old = TREE_OPERAND (ret_expr, 1);
+
+ if (!*return_value_p)
+ {
+ if (aggregate_value_p (TREE_TYPE (result),
+ TREE_TYPE (current_function_decl)))
+ /* If this function returns in memory, copy the argument
+ into the return slot now. Otherwise, we might need to
+ worry about magic return semantics, so we need to use a
+ temporary to hold the value until we're actually ready
+ to return. */
+ new = result;
+ else
+ new = create_tmp_var (TREE_TYPE (old), "rettmp");
+ *return_value_p = new;
+ }
+ else
+ new = *return_value_p;
+
+ x = build2 (MODIFY_EXPR, TREE_TYPE (new), new, old);
+ append_to_statement_list (x, &q->repl_stmt);
+
+ if (new == result)
+ x = result;
+ else
+ x = build2 (MODIFY_EXPR, TREE_TYPE (result), result, new);
+ q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
+ }
- if (new == result)
- x = result;
- else
- x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
- q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
+ default:
+ gcc_unreachable ();
}
- else
- abort ();
}
else
{
{
tree region = NULL;
- t = lhd_unsave_expr_now (t);
+ t = unsave_expr_now (t);
if (outer_state->tf)
region = outer_state->tf->try_finally_expr;
label = create_artificial_label ();
tf->fallthru_label = label;
if (tf->outer->tf)
- record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
+ record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
}
return label;
}
where "fintmp" is the temporary used in the switch statement generation
alternative considered below. For the nonce, we always choose the first
- option.
+ option.
- THIS_STATE may be null if if this is a try-cleanup, not a try-finally. */
+ THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
static void
honor_protect_cleanup_actions (struct leh_state *outer_state,
save_filt = create_tmp_var (integer_type_node, "save_filt");
i = tsi_start (finally);
- x = build (EXC_PTR_EXPR, ptr_type_node);
- x = build (MODIFY_EXPR, void_type_node, save_eptr, x);
+ x = build0 (EXC_PTR_EXPR, ptr_type_node);
+ x = build2 (MODIFY_EXPR, void_type_node, save_eptr, x);
tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
- x = build (FILTER_EXPR, integer_type_node);
- x = build (MODIFY_EXPR, void_type_node, save_filt, x);
+ x = build0 (FILTER_EXPR, integer_type_node);
+ x = build2 (MODIFY_EXPR, void_type_node, save_filt, x);
tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
i = tsi_last (finally);
- x = build (EXC_PTR_EXPR, ptr_type_node);
- x = build (MODIFY_EXPR, void_type_node, x, save_eptr);
+ x = build0 (EXC_PTR_EXPR, ptr_type_node);
+ x = build2 (MODIFY_EXPR, void_type_node, x, save_eptr);
tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
- x = build (FILTER_EXPR, integer_type_node);
- x = build (MODIFY_EXPR, void_type_node, x, save_filt);
+ x = build0 (FILTER_EXPR, integer_type_node);
+ x = build2 (MODIFY_EXPR, void_type_node, x, save_filt);
tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
- x = build1 (RESX_EXPR, void_type_node,
- build_int_2 (get_eh_region_number (tf->region), 0));
+ x = build_resx (get_eh_region_number (tf->region));
tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
}
/* Wrap the block with protect_cleanup_actions as the action. */
if (protect_cleanup_actions)
{
- x = build (EH_FILTER_EXPR, void_type_node, NULL, NULL);
+ x = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
EH_FILTER_MUST_NOT_THROW (x) = 1;
- finally = build (TRY_CATCH_EXPR, void_type_node, finally, x);
+ finally = build2 (TRY_CATCH_EXPR, void_type_node, finally, x);
lower_eh_filter (outer_state, &finally);
}
else
append_to_statement_list (x, tf->top_p);
append_to_statement_list (finally, tf->top_p);
-
- x = build1 (RESX_EXPR, void_type_node,
- build_int_2 (get_eh_region_number (tf->region), 0));
+
+ x = build_resx (get_eh_region_number (tf->region));
+
append_to_statement_list (x, tf->top_p);
return;
for (; q < qe; ++q)
do_goto_redirection (q, finally_label, NULL);
replace_goto_queue (tf);
-
- if (VARRAY_TREE (tf->dest_array, 0) == tf->fallthru_label)
+
+ if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
{
/* Reachable by goto to fallthru label only. Redirect it
to the new label (already created, sadly), and do not
lower_eh_constructs_1 (state, &x);
append_to_statement_list (x, &new_stmt);
- x = build1 (RESX_EXPR, void_type_node,
- build_int_2 (get_eh_region_number (tf->region), 0));
+ x = build_resx (get_eh_region_number (tf->region));
append_to_statement_list (x, &new_stmt);
}
{
struct goto_queue_node *q, *qe;
tree return_val = NULL;
- int return_index;
- tree *labels;
+ int return_index, index;
+ struct
+ {
+ struct goto_queue_node *q;
+ tree label;
+ } *labels;
- if (tf->dest_array)
- return_index = VARRAY_ACTIVE_SIZE (tf->dest_array);
- else
- return_index = 0;
- labels = xcalloc (sizeof (tree), return_index + 1);
+ return_index = VEC_length (tree, tf->dest_array);
+ labels = xcalloc (sizeof (*labels), return_index + 1);
q = tf->goto_queue;
qe = q + tf->goto_queue_active;
for (; q < qe; q++)
{
- int index = q->index < 0 ? return_index : q->index;
- tree lab = labels[index];
- bool build_p = false;
+ index = q->index < 0 ? return_index : q->index;
- if (!lab)
- {
- labels[index] = lab = create_artificial_label ();
- build_p = true;
- }
+ if (!labels[index].q)
+ labels[index].q = q;
+ }
+
+ for (index = 0; index < return_index + 1; index++)
+ {
+ tree lab;
+
+ q = labels[index].q;
+ if (! q)
+ continue;
+
+ lab = labels[index].label = create_artificial_label ();
if (index == return_index)
do_return_redirection (q, lab, NULL, &return_val);
else
do_goto_redirection (q, lab, NULL);
- if (build_p)
- {
- x = build1 (LABEL_EXPR, void_type_node, lab);
- append_to_statement_list (x, &new_stmt);
+ x = build1 (LABEL_EXPR, void_type_node, lab);
+ append_to_statement_list (x, &new_stmt);
- x = lower_try_finally_dup_block (finally, state);
- lower_eh_constructs_1 (state, &x);
- append_to_statement_list (x, &new_stmt);
+ x = lower_try_finally_dup_block (finally, state);
+ lower_eh_constructs_1 (state, &x);
+ append_to_statement_list (x, &new_stmt);
- append_to_statement_list (q->cont_stmt, &new_stmt);
- maybe_record_in_goto_queue (state, q->cont_stmt);
- }
+ append_to_statement_list (q->cont_stmt, &new_stmt);
+ maybe_record_in_goto_queue (state, q->cont_stmt);
+ }
+
+ for (q = tf->goto_queue; q < qe; q++)
+ {
+ tree lab;
+
+ index = q->index < 0 ? return_index : q->index;
+
+ if (labels[index].q == q)
+ continue;
+
+ lab = labels[index].label;
+
+ if (index == return_index)
+ do_return_redirection (q, lab, NULL, &return_val);
+ else
+ do_goto_redirection (q, lab, NULL);
}
+
replace_goto_queue (tf);
free (labels);
}
lower_eh_constructs_1 (state, &finally);
/* Prepare for switch statement generation. */
- if (tf->dest_array)
- nlabels = VARRAY_ACTIVE_SIZE (tf->dest_array);
- else
- nlabels = 0;
+ nlabels = VEC_length (tree, tf->dest_array);
return_index = nlabels;
eh_index = return_index + tf->may_return;
fallthru_index = eh_index + tf->may_throw;
finally_label = create_artificial_label ();
case_label_vec = make_tree_vec (ndests);
- switch_stmt = build (SWITCH_EXPR, integer_type_node, finally_tmp,
- NULL_TREE, case_label_vec);
+ switch_stmt = build3 (SWITCH_EXPR, integer_type_node, finally_tmp,
+ NULL_TREE, case_label_vec);
switch_body = NULL;
last_case = NULL;
last_case_index = 0;
if (tf->may_fallthru)
{
- x = build (MODIFY_EXPR, void_type_node, finally_tmp,
- build_int_2 (fallthru_index, 0));
+ x = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
+ build_int_cst (NULL_TREE, fallthru_index));
append_to_statement_list (x, tf->top_p);
if (tf->may_throw)
}
- last_case = build (CASE_LABEL_EXPR, void_type_node,
- build_int_2 (fallthru_index, 0), NULL,
- create_artificial_label ());
+ last_case = build3 (CASE_LABEL_EXPR, void_type_node,
+ build_int_cst (NULL_TREE, fallthru_index), NULL,
+ create_artificial_label ());
TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
last_case_index++;
- x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
+ x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
append_to_statement_list (x, &switch_body);
x = lower_try_finally_fallthru_label (tf);
x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
append_to_statement_list (x, tf->top_p);
- x = build (MODIFY_EXPR, void_type_node, finally_tmp,
- build_int_2 (eh_index, 0));
+ x = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
+ build_int_cst (NULL_TREE, eh_index));
append_to_statement_list (x, tf->top_p);
- last_case = build (CASE_LABEL_EXPR, void_type_node,
- build_int_2 (eh_index, 0), NULL,
- create_artificial_label ());
+ last_case = build3 (CASE_LABEL_EXPR, void_type_node,
+ build_int_cst (NULL_TREE, eh_index), NULL,
+ create_artificial_label ());
TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
last_case_index++;
- x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
+ x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
append_to_statement_list (x, &switch_body);
- x = build1 (RESX_EXPR, void_type_node,
- build_int_2 (get_eh_region_number (tf->region), 0));
+ x = build_resx (get_eh_region_number (tf->region));
append_to_statement_list (x, &switch_body);
}
q = tf->goto_queue;
qe = q + tf->goto_queue_active;
j = last_case_index + tf->may_return;
- last_case_index += nlabels;
for (; q < qe; ++q)
{
tree mod;
if (q->index < 0)
{
- mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
- build_int_2 (return_index, 0));
+ mod = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
+ build_int_cst (NULL_TREE, return_index));
do_return_redirection (q, finally_label, mod, &return_val);
switch_id = return_index;
}
else
{
- mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
- build_int_2 (q->index, 0));
+ mod = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
+ build_int_cst (NULL_TREE, q->index));
do_goto_redirection (q, finally_label, mod);
switch_id = q->index;
}
case_index = j + q->index;
if (!TREE_VEC_ELT (case_label_vec, case_index))
- {
- last_case = build (CASE_LABEL_EXPR, void_type_node,
- build_int_2 (switch_id, 0), NULL,
- create_artificial_label ());
- TREE_VEC_ELT (case_label_vec, case_index) = last_case;
-
- x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
- append_to_statement_list (x, &switch_body);
- append_to_statement_list (q->cont_stmt, &switch_body);
- maybe_record_in_goto_queue (state, q->cont_stmt);
- }
+ TREE_VEC_ELT (case_label_vec, case_index)
+ = build3 (CASE_LABEL_EXPR, void_type_node,
+ build_int_cst (NULL_TREE, switch_id), NULL,
+ /* We store the cont_stmt in the
+ CASE_LABEL, so that we can recover it
+ in the loop below. We don't create
+ the new label while walking the
+ goto_queue because pointers don't
+ offer a stable order. */
+ q->cont_stmt);
+ }
+ for (j = last_case_index; j < last_case_index + nlabels; j++)
+ {
+ tree label;
+ tree cont_stmt;
+
+ last_case = TREE_VEC_ELT (case_label_vec, j);
+
+ gcc_assert (last_case);
+
+ cont_stmt = CASE_LABEL (last_case);
+
+ label = create_artificial_label ();
+ CASE_LABEL (last_case) = label;
+
+ x = build1 (LABEL_EXPR, void_type_node, label);
+ append_to_statement_list (x, &switch_body);
+ append_to_statement_list (cont_stmt, &switch_body);
+ maybe_record_in_goto_queue (state, cont_stmt);
}
replace_goto_queue (tf);
- last_case_index += nlabels;
/* Make sure that the last case is the default label, as one is required.
Then sort the labels, which is also required in GIMPLE. */
/* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
to a sequence of labels and blocks, plus the exception region trees
- that record all the magic. This is complicated by the need to
+ that record all the magic. This is complicated by the need to
arrange for the FINALLY block to be executed on all exits. */
static void
how many destinations are reached by the finally block. Use this to
determine how we process the finally block itself. */
- if (this_tf.dest_array)
- ndests = VARRAY_ACTIVE_SIZE (this_tf.dest_array);
- else
- ndests = 0;
+ ndests = VEC_length (tree, this_tf.dest_array);
ndests += this_tf.may_fallthru;
ndests += this_tf.may_return;
ndests += this_tf.may_throw;
append_to_statement_list (x, tp);
}
+ VEC_free (tree, heap, this_tf.dest_array);
if (this_tf.goto_queue)
free (this_tf.goto_queue);
}
/* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
- list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
+ list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
exception region trees that record all the magic. */
static void
struct eh_region *this_region;
tree inner = expr_first (TREE_OPERAND (*tp, 1));
tree eh_label;
-
+
if (EH_FILTER_MUST_NOT_THROW (inner))
this_region = gen_eh_region_must_not_throw (state->cur_region);
else
EH_FILTER_TYPES (inner));
this_state = *state;
this_state.cur_region = this_region;
-
+
lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
if (!get_eh_region_may_contain_throw (this_region))
{
record_stmt_eh_region (state->cur_region, t);
note_eh_region_may_contain_throw (state->cur_region);
-
- /* ??? For the benefit of calls.c, converting all this to rtl,
- we need to record the call expression, not just the outer
- modify statement. */
- if (TREE_CODE (TREE_OPERAND (t, 1)) == CALL_EXPR)
- record_stmt_eh_region (state->cur_region, TREE_OPERAND (t, 1));
}
break;
tree *tp = &DECL_SAVED_TREE (current_function_decl);
finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
- throw_stmt_table = htab_create_ggc (31, struct_ptr_hash, struct_ptr_eq, free);
collect_finally_tree (*tp, NULL);
collect_eh_region_array ();
}
-struct tree_opt_pass pass_lower_eh =
+struct tree_opt_pass pass_lower_eh =
{
"eh", /* name */
NULL, /* gate */
PROP_gimple_leh, /* properties_provided */
PROP_gimple_lcf, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func /* todo_flags_finish */
+ TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
};
\f
make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
}
-
+
void
make_eh_edges (tree stmt)
{
foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
}
+static bool mark_eh_edge_found_error;
+
+/* Mark edge make_eh_edge would create for given region by setting it aux
+ field, output error if something goes wrong. */
+static void
+mark_eh_edge (struct eh_region *region, void *data)
+{
+ tree stmt, lab;
+ basic_block src, dst;
+ edge e;
+
+ stmt = data;
+ lab = get_eh_region_tree_label (region);
+
+ src = bb_for_stmt (stmt);
+ dst = label_to_block (lab);
+
+ e = find_edge (src, dst);
+ if (!e)
+ {
+ error ("EH edge %i->%i is missing", src->index, dst->index);
+ mark_eh_edge_found_error = true;
+ }
+ else if (!(e->flags & EDGE_EH))
+ {
+ error ("EH edge %i->%i miss EH flag", src->index, dst->index);
+ mark_eh_edge_found_error = true;
+ }
+ else if (e->aux)
+ {
+ /* ??? might not be mistake. */
+ error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
+ mark_eh_edge_found_error = true;
+ }
+ else
+ e->aux = (void *)1;
+}
+
+/* Verify that BB containing stmt as last stmt has precisely the edges
+ make_eh_edges would create. */
+bool
+verify_eh_edges (tree stmt)
+{
+ int region_nr;
+ bool is_resx;
+ basic_block bb = bb_for_stmt (stmt);
+ edge_iterator ei;
+ edge e;
+
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ gcc_assert (!e->aux);
+ mark_eh_edge_found_error = false;
+ if (TREE_CODE (stmt) == RESX_EXPR)
+ {
+ region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
+ is_resx = true;
+ }
+ else
+ {
+ region_nr = lookup_stmt_eh_region (stmt);
+ if (region_nr < 0)
+ {
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ if (e->flags & EDGE_EH)
+ {
+ error ("BB %i can not throw but has EH edges", bb->index);
+ return true;
+ }
+ return false;
+ }
+ if (!tree_could_throw_p (stmt))
+ {
+ error ("BB %i last statement has incorrectly set region", bb->index);
+ return true;
+ }
+ is_resx = false;
+ }
+
+ foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ {
+ if ((e->flags & EDGE_EH) && !e->aux)
+ {
+ error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
+ mark_eh_edge_found_error = true;
+ return true;
+ }
+ e->aux = NULL;
+ }
+ return mark_eh_edge_found_error;
+}
\f
-/* Return true if the expr can trap, as in dereferencing an
- invalid pointer location. */
+/* Return true if the expr can trap, as in dereferencing an invalid pointer
+ location or floating point arithmetic. C.f. the rtl version, may_trap_p.
+ This routine expects only GIMPLE lhs or rhs input. */
bool
tree_could_trap_p (tree expr)
{
enum tree_code code = TREE_CODE (expr);
- tree t;
+ bool honor_nans = false;
+ bool honor_snans = false;
+ bool fp_operation = false;
+ bool honor_trapv = false;
+ tree t, base;
+
+ if (TREE_CODE_CLASS (code) == tcc_comparison
+ || TREE_CODE_CLASS (code) == tcc_unary
+ || TREE_CODE_CLASS (code) == tcc_binary)
+ {
+ t = TREE_TYPE (expr);
+ fp_operation = FLOAT_TYPE_P (t);
+ if (fp_operation)
+ {
+ honor_nans = flag_trapping_math && !flag_finite_math_only;
+ honor_snans = flag_signaling_nans != 0;
+ }
+ else if (INTEGRAL_TYPE_P (t) && TYPE_TRAP_SIGNED (t))
+ honor_trapv = true;
+ }
+ restart:
switch (code)
{
- case ARRAY_REF:
+ case TARGET_MEM_REF:
+ /* For TARGET_MEM_REFs use the information based on the original
+ reference. */
+ expr = TMR_ORIGINAL (expr);
+ code = TREE_CODE (expr);
+ goto restart;
+
case COMPONENT_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
case BIT_FIELD_REF:
- t = get_base_address (expr);
- return !t || TREE_CODE (t) == INDIRECT_REF;
+ case WITH_SIZE_EXPR:
+ expr = TREE_OPERAND (expr, 0);
+ code = TREE_CODE (expr);
+ goto restart;
+
+ case ARRAY_RANGE_REF:
+ /* Let us be conservative here for now. We might be checking bounds of
+ the access similarly to the case below. */
+ if (!TREE_THIS_NOTRAP (expr))
+ return true;
+
+ base = TREE_OPERAND (expr, 0);
+ return tree_could_trap_p (base);
+
+ case ARRAY_REF:
+ base = TREE_OPERAND (expr, 0);
+ if (tree_could_trap_p (base))
+ return true;
+
+ if (TREE_THIS_NOTRAP (expr))
+ return false;
+
+ return !in_array_bounds_p (expr);
case INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
+ case MISALIGNED_INDIRECT_REF:
+ return !TREE_THIS_NOTRAP (expr);
+
+ case ASM_EXPR:
+ return TREE_THIS_VOLATILE (expr);
+
case TRUNC_DIV_EXPR:
case CEIL_DIV_EXPR:
case FLOOR_DIV_EXPR:
case FLOOR_MOD_EXPR:
case ROUND_MOD_EXPR:
case TRUNC_MOD_EXPR:
- return true;
+ case RDIV_EXPR:
+ if (honor_snans || honor_trapv)
+ return true;
+ if (fp_operation)
+ return flag_trapping_math;
+ t = TREE_OPERAND (expr, 1);
+ if (!TREE_CONSTANT (t) || integer_zerop (t))
+ return true;
+ return false;
+
+ case LT_EXPR:
+ case LE_EXPR:
+ case GT_EXPR:
+ case GE_EXPR:
+ case LTGT_EXPR:
+ /* Some floating point comparisons may trap. */
+ return honor_nans;
+
+ case EQ_EXPR:
+ case NE_EXPR:
+ case UNORDERED_EXPR:
+ case ORDERED_EXPR:
+ case UNLT_EXPR:
+ case UNLE_EXPR:
+ case UNGT_EXPR:
+ case UNGE_EXPR:
+ case UNEQ_EXPR:
+ return honor_snans;
+
+ case CONVERT_EXPR:
+ case FIX_TRUNC_EXPR:
+ case FIX_CEIL_EXPR:
+ case FIX_FLOOR_EXPR:
+ case FIX_ROUND_EXPR:
+ /* Conversion of floating point might trap. */
+ return honor_nans;
+
+ case NEGATE_EXPR:
+ case ABS_EXPR:
+ case CONJ_EXPR:
+ /* These operations don't trap with floating point. */
+ if (honor_trapv)
+ return true;
+ return false;
+
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ case MULT_EXPR:
+ /* Any floating arithmetic may trap. */
+ if (fp_operation && flag_trapping_math)
+ return true;
+ if (honor_trapv)
+ return true;
+ return false;
+
+ case CALL_EXPR:
+ t = get_callee_fndecl (expr);
+ /* Assume that calls to weak functions may trap. */
+ if (!t || !DECL_P (t) || DECL_WEAK (t))
+ return true;
+ return false;
default:
- break;
+ /* Any floating arithmetic may trap. */
+ if (fp_operation && flag_trapping_math)
+ return true;
+ return false;
}
-
- return false;
}
-
bool
tree_could_throw_p (tree t)
{
t = TREE_OPERAND (t, 1);
}
+ if (TREE_CODE (t) == WITH_SIZE_EXPR)
+ t = TREE_OPERAND (t, 0);
if (TREE_CODE (t) == CALL_EXPR)
return (call_expr_flags (t) & ECF_NOTHROW) == 0;
if (flag_non_call_exceptions)
bool
tree_can_throw_internal (tree stmt)
{
- int region_nr = lookup_stmt_eh_region (stmt);
+ int region_nr;
+ bool is_resx = false;
+
+ if (TREE_CODE (stmt) == RESX_EXPR)
+ region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
+ else
+ region_nr = lookup_stmt_eh_region (stmt);
if (region_nr < 0)
return false;
- return can_throw_internal_1 (region_nr);
+ return can_throw_internal_1 (region_nr, is_resx);
}
bool
tree_can_throw_external (tree stmt)
{
- int region_nr = lookup_stmt_eh_region (stmt);
+ int region_nr;
+ bool is_resx = false;
+
+ if (TREE_CODE (stmt) == RESX_EXPR)
+ region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
+ else
+ region_nr = lookup_stmt_eh_region (stmt);
if (region_nr < 0)
- return false;
- return can_throw_external_1 (region_nr);
+ return tree_could_throw_p (stmt);
+ else
+ return can_throw_external_1 (region_nr, is_resx);
}
-#include "gt-tree-eh.h"
+/* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
+ OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
+ in the table if it should be in there. Return TRUE if a replacement was
+ done that my require an EH edge purge. */
+
+bool
+maybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt)
+{
+ int region_nr = lookup_stmt_eh_region (old_stmt);
+
+ if (region_nr >= 0)
+ {
+ bool new_stmt_could_throw = tree_could_throw_p (new_stmt);
+
+ if (new_stmt == old_stmt && new_stmt_could_throw)
+ return false;
+
+ remove_stmt_from_eh_region (old_stmt);
+ if (new_stmt_could_throw)
+ {
+ add_stmt_to_eh_region (new_stmt, region_nr);
+ return false;
+ }
+ else
+ return true;
+ }
+
+ return false;
+}