/* Exception handling semantics and decomposition for trees.
- Copyright (C) 2003 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008 Free Software
+ Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
+the Free Software Foundation; either version 3, or (at your option)
any later version.
GCC is distributed in the hope that it will be useful,
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
#include "timevar.h"
#include "langhooks.h"
#include "ggc.h"
+#include "toplev.h"
+#include "pointer-set.h"
-/* HACK */
-extern int using_eh_for_cleanups_p;
+\f
+/* Nonzero if we are using EH to handle cleanups. */
+static int using_eh_for_cleanups_p = 0;
+
+void
+using_eh_for_cleanups (void)
+{
+ using_eh_for_cleanups_p = 1;
+}
\f
/* Misc functions used in this file. */
/* Compare and hash for any structure which begins with a canonical
- pointer. Assumes all pointers are interchangable, which is sort
+ pointer. Assumes all pointers are interchangeable, which is sort
of already assumed by gcc elsewhere IIRC. */
static int
struct_ptr_eq (const void *a, const void *b)
{
- const void * const * x = a;
- const void * const * y = b;
+ const void * const * x = (const void * const *) a;
+ const void * const * y = (const void * const *) b;
return *x == *y;
}
static hashval_t
struct_ptr_hash (const void *a)
{
- const void * const * x = a;
+ const void * const * x = (const void * const *) a;
return (size_t)*x >> 4;
}
we get to rtl. Once we're done with lowering here, if we lose
the information there's no way to recover it!
- (2) There are many more statements that *cannot* throw as
+ (2) There are many more statements that *cannot* throw as
compared to those that can. We should be saving some amount
of space by only allocating memory for those that can throw. */
-struct throw_stmt_node GTY(())
-{
- tree stmt;
- int region_nr;
-};
-
-static GTY((param_is (struct throw_stmt_node))) htab_t throw_stmt_table;
-
static void
record_stmt_eh_region (struct eh_region *region, tree t)
{
- struct throw_stmt_node *n;
- void **slot;
-
if (!region)
return;
- n = ggc_alloc (sizeof (*n));
- n->stmt = t;
- n->region_nr = get_eh_region_number (region);
-
- slot = htab_find_slot (throw_stmt_table, n, INSERT);
- if (*slot)
- abort ();
- *slot = n;
+ add_stmt_to_eh_region (t, get_eh_region_number (region));
}
void
-add_stmt_to_eh_region (tree t, int num)
+add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
{
struct throw_stmt_node *n;
void **slot;
- if (num < 0)
- abort ();
+ gcc_assert (num >= 0);
+ gcc_assert (TREE_CODE (t) != RESX_EXPR);
- n = ggc_alloc (sizeof (*n));
+ n = GGC_NEW (struct throw_stmt_node);
n->stmt = t;
n->region_nr = num;
- slot = htab_find_slot (throw_stmt_table, n, INSERT);
- if (*slot)
- abort ();
+ if (!get_eh_throw_stmt_table (ifun))
+ set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
+ struct_ptr_eq,
+ ggc_free));
+
+ slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
+ gcc_assert (!*slot);
*slot = n;
}
+void
+add_stmt_to_eh_region (tree t, int num)
+{
+ add_stmt_to_eh_region_fn (cfun, t, num);
+}
+
bool
-remove_stmt_from_eh_region (tree t)
+remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
{
struct throw_stmt_node dummy;
void **slot;
- if (!throw_stmt_table)
+ if (!get_eh_throw_stmt_table (ifun))
return false;
dummy.stmt = t;
- slot = htab_find_slot (throw_stmt_table, &dummy, NO_INSERT);
+ slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
+ NO_INSERT);
if (slot)
{
- htab_clear_slot (throw_stmt_table, slot);
+ htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
return true;
}
else
return false;
}
+bool
+remove_stmt_from_eh_region (tree t)
+{
+ return remove_stmt_from_eh_region_fn (cfun, t);
+}
+
int
-lookup_stmt_eh_region (tree t)
+lookup_stmt_eh_region_fn (struct function *ifun, const_tree t)
{
struct throw_stmt_node *p, n;
- if (!throw_stmt_table)
+ if (!get_eh_throw_stmt_table (ifun))
return -2;
- n.stmt = t;
- p = htab_find (throw_stmt_table, &n);
+ /* The CONST_CAST is okay because we don't modify n.stmt throughout
+ its scope, or the scope of p. */
+ n.stmt = CONST_CAST_TREE (t);
+ p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun),
+ &n);
return (p ? p->region_nr : -1);
}
+int
+lookup_stmt_eh_region (const_tree t)
+{
+ /* We can get called from initialized data when -fnon-call-exceptions
+ is on; prevent crash. */
+ if (!cfun)
+ return -1;
+ return lookup_stmt_eh_region_fn (cfun, t);
+}
\f
/* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
struct finally_tree_node *n;
void **slot;
- n = xmalloc (sizeof (*n));
+ n = XNEW (struct finally_tree_node);
n->child = child;
n->parent = parent;
slot = htab_find_slot (finally_tree, n, INSERT);
- if (*slot)
- abort ();
+ gcc_assert (!*slot);
*slot = n;
}
do
{
n.child = start;
- p = htab_find (finally_tree, &n);
+ p = (struct finally_tree_node *) htab_find (finally_tree, &n);
if (!p)
return true;
start = p->parent;
struct leh_state
{
- /* What's "current" while constructing the eh region tree. These
+ /* What's "current" while constructing the eh region tree. These
correspond to variables of the same name in cfun->eh, which we
don't have easy access to. */
struct eh_region *cur_region;
struct leh_tf_state
{
/* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
- is the original TRY_FINALLY_EXPR. We need to retain this so that
+ is the original TRY_FINALLY_EXPR. We need to retain this so that
outside_finally_tree can reliably reference the tree used in the
collect_finally_tree data structures. */
tree try_finally_expr;
size_t goto_queue_size;
size_t goto_queue_active;
+ /* Pointer map to help in searching goto_queue when it is large. */
+ struct pointer_map_t *goto_queue_map;
+
/* The set of unique labels seen as entries in the goto queue. */
- varray_type dest_array;
+ VEC(tree,heap) *dest_array;
/* A label to be added at the end of the completed transformed
sequence. It will be set if may_fallthru was true *at one time*,
though subsequent transformations may have cleared that flag. */
tree fallthru_label;
- /* A label that has been registered with except.c to be the
+ /* A label that has been registered with except.c to be the
landing pad for this try block. */
tree eh_label;
static void lower_eh_filter (struct leh_state *, tree *);
static void lower_eh_constructs_1 (struct leh_state *, tree *);
-/* Comparison function for qsort/bsearch. We're interested in
- searching goto queue elements for source statements. */
-
-static int
-goto_queue_cmp (const void *x, const void *y)
-{
- tree a = ((const struct goto_queue_node *)x)->stmt;
- tree b = ((const struct goto_queue_node *)y)->stmt;
- return (a == b ? 0 : a < b ? -1 : 1);
-}
-
/* Search for STMT in the goto queue. Return the replacement,
or null if the statement isn't in the queue. */
+#define LARGE_GOTO_QUEUE 20
+
static tree
find_goto_replacement (struct leh_tf_state *tf, tree stmt)
{
- struct goto_queue_node tmp, *ret;
- tmp.stmt = stmt;
- ret = bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
- sizeof (struct goto_queue_node), goto_queue_cmp);
- return (ret ? ret->repl_stmt : NULL);
+ unsigned int i;
+ void **slot;
+
+ if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
+ {
+ for (i = 0; i < tf->goto_queue_active; i++)
+ if (tf->goto_queue[i].stmt == stmt)
+ return tf->goto_queue[i].repl_stmt;
+ return NULL;
+ }
+
+ /* If we have a large number of entries in the goto_queue, create a
+ pointer map and use that for searching. */
+
+ if (!tf->goto_queue_map)
+ {
+ tf->goto_queue_map = pointer_map_create ();
+ for (i = 0; i < tf->goto_queue_active; i++)
+ {
+ slot = pointer_map_insert (tf->goto_queue_map, tf->goto_queue[i].stmt);
+ gcc_assert (*slot == NULL);
+ *slot = (void *) &tf->goto_queue[i];
+ }
+ }
+
+ slot = pointer_map_contains (tf->goto_queue_map, stmt);
+ if (slot != NULL)
+ return (((struct goto_queue_node *) *slot)->repl_stmt);
+
+ return NULL;
}
/* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
}
-/* The real work of replace_goto_queue. Returns with TSI updated to
+/* The real work of replace_goto_queue. Returns with TSI updated to
point to the next statement. */
static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
break;
case STATEMENT_LIST:
- abort ();
+ gcc_unreachable ();
default:
/* These won't have gotos in them. */
static void
replace_goto_queue (struct leh_tf_state *tf)
{
+ if (tf->goto_queue_active == 0)
+ return;
replace_goto_queue_stmt_list (*tf->top_p, tf);
}
{
tree lab = GOTO_DESTINATION (stmt);
- /* Computed and non-local gotos do not get processed. Given
+ /* Computed and non-local gotos do not get processed. Given
their nature we can neither tell whether we've escaped the
finally block nor redirect them if we knew. */
if (TREE_CODE (lab) != LABEL_DECL)
/* No need to record gotos that don't leave the try block. */
if (! outside_finally_tree (lab, tf->try_finally_expr))
return;
-
+
if (! tf->dest_array)
{
- VARRAY_TREE_INIT (tf->dest_array, 10, "dest_array");
- VARRAY_PUSH_TREE (tf->dest_array, lab);
+ tf->dest_array = VEC_alloc (tree, heap, 10);
+ VEC_quick_push (tree, tf->dest_array, lab);
index = 0;
}
else
{
- int n = VARRAY_ACTIVE_SIZE (tf->dest_array);
+ int n = VEC_length (tree, tf->dest_array);
for (index = 0; index < n; ++index)
- if (VARRAY_TREE (tf->dest_array, index) == lab)
+ if (VEC_index (tree, tf->dest_array, index) == lab)
break;
if (index == n)
- VARRAY_PUSH_TREE (tf->dest_array, lab);
+ VEC_safe_push (tree, heap, tf->dest_array, lab);
}
}
break;
break;
default:
- abort ();
+ gcc_unreachable ();
}
+ gcc_assert (!tf->goto_queue_map);
+
active = tf->goto_queue_active;
size = tf->goto_queue_size;
if (active >= size)
size = (size ? size * 2 : 32);
tf->goto_queue_size = size;
tf->goto_queue
- = xrealloc (tf->goto_queue, size * sizeof (struct goto_queue_node));
+ = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
}
q = &tf->goto_queue[active];
tf->goto_queue_active = active + 1;
-
+
memset (q, 0, sizeof (*q));
q->stmt = stmt;
q->index = index;
for (i = 0; i < n; ++i)
{
tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
- if (outside_finally_tree (lab, tf->try_finally_expr))
- abort ();
+ gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
}
}
#else
depends, I guess, but it does make generation of the switch in
lower_try_finally_switch easier. */
- if (TREE_CODE (ret_expr) == RESULT_DECL)
+ switch (TREE_CODE (ret_expr))
{
+ case RESULT_DECL:
if (!*return_value_p)
*return_value_p = ret_expr;
- else if (*return_value_p != ret_expr)
- abort ();
- q->cont_stmt = q->stmt;
- }
- else if (TREE_CODE (ret_expr) == MODIFY_EXPR)
- {
- tree result = TREE_OPERAND (ret_expr, 0);
- tree new, old = TREE_OPERAND (ret_expr, 1);
-
- if (!*return_value_p)
- {
- if (aggregate_value_p (TREE_TYPE (result),
- TREE_TYPE (current_function_decl)))
- /* If this function returns in memory, copy the argument
- into the return slot now. Otherwise, we might need to
- worry about magic return semantics, so we need to use a
- temporary to hold the value until we're actually ready
- to return. */
- new = result;
- else
- new = create_tmp_var (TREE_TYPE (old), "rettmp");
- *return_value_p = new;
- }
else
- new = *return_value_p;
+ gcc_assert (*return_value_p == ret_expr);
+ q->cont_stmt = q->stmt;
+ break;
- x = build (MODIFY_EXPR, TREE_TYPE (new), new, old);
- append_to_statement_list (x, &q->repl_stmt);
+ case GIMPLE_MODIFY_STMT:
+ {
+ tree result = GIMPLE_STMT_OPERAND (ret_expr, 0);
+ tree new, old = GIMPLE_STMT_OPERAND (ret_expr, 1);
+
+ if (!*return_value_p)
+ {
+ if (aggregate_value_p (TREE_TYPE (result),
+ TREE_TYPE (current_function_decl)))
+ /* If this function returns in memory, copy the argument
+ into the return slot now. Otherwise, we might need to
+ worry about magic return semantics, so we need to use a
+ temporary to hold the value until we're actually ready
+ to return. */
+ new = result;
+ else
+ new = create_tmp_var (TREE_TYPE (old), "rettmp");
+ *return_value_p = new;
+ }
+ else
+ new = *return_value_p;
+
+ x = build_gimple_modify_stmt (new, old);
+ append_to_statement_list (x, &q->repl_stmt);
+
+ if (new == result)
+ x = result;
+ else
+ x = build_gimple_modify_stmt (result, new);
+ q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
+ }
- if (new == result)
- x = result;
- else
- x = build (MODIFY_EXPR, TREE_TYPE (result), result, new);
- q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
+ default:
+ gcc_unreachable ();
}
- else
- abort ();
}
else
{
{
tree region = NULL;
- t = lhd_unsave_expr_now (t);
+ t = unsave_expr_now (t);
if (outer_state->tf)
region = outer_state->tf->try_finally_expr;
label = create_artificial_label ();
tf->fallthru_label = label;
if (tf->outer->tf)
- record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
+ record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
}
return label;
}
where "fintmp" is the temporary used in the switch statement generation
alternative considered below. For the nonce, we always choose the first
- option.
+ option.
- THIS_STATE may be null if if this is a try-cleanup, not a try-finally. */
+ THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
static void
honor_protect_cleanup_actions (struct leh_state *outer_state,
if (this_state)
finally = lower_try_finally_dup_block (finally, outer_state);
+ /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
+ set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
+ to be in an enclosing scope, but needs to be implemented at this level
+ to avoid a nesting violation (see wrap_temporary_cleanups in
+ cp/decl.c). Since it's logically at an outer level, we should call
+ terminate before we get to it, so strip it away before adding the
+ MUST_NOT_THROW filter. */
+ i = tsi_start (finally);
+ x = tsi_stmt (i);
+ if (protect_cleanup_actions
+ && TREE_CODE (x) == TRY_CATCH_EXPR
+ && TRY_CATCH_IS_CLEANUP (x))
+ {
+ tsi_link_before (&i, TREE_OPERAND (x, 0), TSI_SAME_STMT);
+ tsi_delink (&i);
+ }
+
/* Resume execution after the exception. Adding this now lets
lower_eh_filter not add unnecessary gotos, as it is clear that
we never fallthru from this copy of the finally block. */
save_filt = create_tmp_var (integer_type_node, "save_filt");
i = tsi_start (finally);
- x = build (EXC_PTR_EXPR, ptr_type_node);
- x = build (MODIFY_EXPR, void_type_node, save_eptr, x);
+ x = build0 (EXC_PTR_EXPR, ptr_type_node);
+ x = build_gimple_modify_stmt (save_eptr, x);
tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
- x = build (FILTER_EXPR, integer_type_node);
- x = build (MODIFY_EXPR, void_type_node, save_filt, x);
+ x = build0 (FILTER_EXPR, integer_type_node);
+ x = build_gimple_modify_stmt (save_filt, x);
tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
i = tsi_last (finally);
- x = build (EXC_PTR_EXPR, ptr_type_node);
- x = build (MODIFY_EXPR, void_type_node, x, save_eptr);
+ x = build0 (EXC_PTR_EXPR, ptr_type_node);
+ x = build_gimple_modify_stmt (x, save_eptr);
tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
- x = build (FILTER_EXPR, integer_type_node);
- x = build (MODIFY_EXPR, void_type_node, x, save_filt);
+ x = build0 (FILTER_EXPR, integer_type_node);
+ x = build_gimple_modify_stmt (x, save_filt);
tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
- x = build1 (RESX_EXPR, void_type_node,
- build_int_2 (get_eh_region_number (tf->region), 0));
+ x = build_resx (get_eh_region_number (tf->region));
tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
}
/* Wrap the block with protect_cleanup_actions as the action. */
if (protect_cleanup_actions)
{
- x = build (EH_FILTER_EXPR, void_type_node, NULL, NULL);
+ x = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
EH_FILTER_MUST_NOT_THROW (x) = 1;
- finally = build (TRY_CATCH_EXPR, void_type_node, finally, x);
+ finally = build2 (TRY_CATCH_EXPR, void_type_node, finally, x);
lower_eh_filter (outer_state, &finally);
}
else
append_to_statement_list (x, tf->top_p);
append_to_statement_list (finally, tf->top_p);
-
- x = build1 (RESX_EXPR, void_type_node,
- build_int_2 (get_eh_region_number (tf->region), 0));
+
+ x = build_resx (get_eh_region_number (tf->region));
+
append_to_statement_list (x, tf->top_p);
return;
for (; q < qe; ++q)
do_goto_redirection (q, finally_label, NULL);
replace_goto_queue (tf);
-
- if (VARRAY_TREE (tf->dest_array, 0) == tf->fallthru_label)
+
+ if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
{
/* Reachable by goto to fallthru label only. Redirect it
to the new label (already created, sadly), and do not
}
}
+ /* Reset the locus of the goto since we're moving
+ goto to a different block which might be on a different line. */
+ SET_EXPR_LOCUS (tf->goto_queue[0].cont_stmt, NULL);
append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
}
lower_eh_constructs_1 (state, &x);
append_to_statement_list (x, &new_stmt);
- x = build1 (RESX_EXPR, void_type_node,
- build_int_2 (get_eh_region_number (tf->region), 0));
+ x = build_resx (get_eh_region_number (tf->region));
append_to_statement_list (x, &new_stmt);
}
{
struct goto_queue_node *q, *qe;
tree return_val = NULL;
- int return_index;
- tree *labels;
+ int return_index, index;
+ struct labels_s
+ {
+ struct goto_queue_node *q;
+ tree label;
+ } *labels;
- if (tf->dest_array)
- return_index = VARRAY_ACTIVE_SIZE (tf->dest_array);
- else
- return_index = 0;
- labels = xcalloc (sizeof (tree), return_index + 1);
+ return_index = VEC_length (tree, tf->dest_array);
+ labels = XCNEWVEC (struct labels_s, return_index + 1);
q = tf->goto_queue;
qe = q + tf->goto_queue_active;
for (; q < qe; q++)
{
- int index = q->index < 0 ? return_index : q->index;
- tree lab = labels[index];
- bool build_p = false;
+ index = q->index < 0 ? return_index : q->index;
- if (!lab)
- {
- labels[index] = lab = create_artificial_label ();
- build_p = true;
- }
+ if (!labels[index].q)
+ labels[index].q = q;
+ }
+
+ for (index = 0; index < return_index + 1; index++)
+ {
+ tree lab;
+
+ q = labels[index].q;
+ if (! q)
+ continue;
+
+ lab = labels[index].label = create_artificial_label ();
if (index == return_index)
do_return_redirection (q, lab, NULL, &return_val);
else
do_goto_redirection (q, lab, NULL);
- if (build_p)
- {
- x = build1 (LABEL_EXPR, void_type_node, lab);
- append_to_statement_list (x, &new_stmt);
+ x = build1 (LABEL_EXPR, void_type_node, lab);
+ append_to_statement_list (x, &new_stmt);
- x = lower_try_finally_dup_block (finally, state);
- lower_eh_constructs_1 (state, &x);
- append_to_statement_list (x, &new_stmt);
+ x = lower_try_finally_dup_block (finally, state);
+ lower_eh_constructs_1 (state, &x);
+ append_to_statement_list (x, &new_stmt);
- append_to_statement_list (q->cont_stmt, &new_stmt);
- maybe_record_in_goto_queue (state, q->cont_stmt);
- }
+ append_to_statement_list (q->cont_stmt, &new_stmt);
+ maybe_record_in_goto_queue (state, q->cont_stmt);
}
+
+ for (q = tf->goto_queue; q < qe; q++)
+ {
+ tree lab;
+
+ index = q->index < 0 ? return_index : q->index;
+
+ if (labels[index].q == q)
+ continue;
+
+ lab = labels[index].label;
+
+ if (index == return_index)
+ do_return_redirection (q, lab, NULL, &return_val);
+ else
+ do_goto_redirection (q, lab, NULL);
+ }
+
replace_goto_queue (tf);
free (labels);
}
lower_eh_constructs_1 (state, &finally);
/* Prepare for switch statement generation. */
- if (tf->dest_array)
- nlabels = VARRAY_ACTIVE_SIZE (tf->dest_array);
- else
- nlabels = 0;
+ nlabels = VEC_length (tree, tf->dest_array);
return_index = nlabels;
eh_index = return_index + tf->may_return;
fallthru_index = eh_index + tf->may_throw;
finally_label = create_artificial_label ();
case_label_vec = make_tree_vec (ndests);
- switch_stmt = build (SWITCH_EXPR, integer_type_node, finally_tmp,
- NULL_TREE, case_label_vec);
+ switch_stmt = build3 (SWITCH_EXPR, integer_type_node, finally_tmp,
+ NULL_TREE, case_label_vec);
switch_body = NULL;
last_case = NULL;
last_case_index = 0;
if (tf->may_fallthru)
{
- x = build (MODIFY_EXPR, void_type_node, finally_tmp,
- build_int_2 (fallthru_index, 0));
+ x = build_gimple_modify_stmt (finally_tmp,
+ build_int_cst (integer_type_node,
+ fallthru_index));
append_to_statement_list (x, tf->top_p);
if (tf->may_throw)
}
- last_case = build (CASE_LABEL_EXPR, void_type_node,
- build_int_2 (fallthru_index, 0), NULL,
- create_artificial_label ());
+ last_case = build3 (CASE_LABEL_EXPR, void_type_node,
+ build_int_cst (NULL_TREE, fallthru_index), NULL,
+ create_artificial_label ());
TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
last_case_index++;
- x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
+ x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
append_to_statement_list (x, &switch_body);
x = lower_try_finally_fallthru_label (tf);
x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
append_to_statement_list (x, tf->top_p);
- x = build (MODIFY_EXPR, void_type_node, finally_tmp,
- build_int_2 (eh_index, 0));
+ x = build_gimple_modify_stmt (finally_tmp,
+ build_int_cst (integer_type_node,
+ eh_index));
append_to_statement_list (x, tf->top_p);
- last_case = build (CASE_LABEL_EXPR, void_type_node,
- build_int_2 (eh_index, 0), NULL,
- create_artificial_label ());
+ last_case = build3 (CASE_LABEL_EXPR, void_type_node,
+ build_int_cst (NULL_TREE, eh_index), NULL,
+ create_artificial_label ());
TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
last_case_index++;
- x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
+ x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
append_to_statement_list (x, &switch_body);
- x = build1 (RESX_EXPR, void_type_node,
- build_int_2 (get_eh_region_number (tf->region), 0));
+ x = build_resx (get_eh_region_number (tf->region));
append_to_statement_list (x, &switch_body);
}
q = tf->goto_queue;
qe = q + tf->goto_queue_active;
j = last_case_index + tf->may_return;
- last_case_index += nlabels;
for (; q < qe; ++q)
{
tree mod;
if (q->index < 0)
{
- mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
- build_int_2 (return_index, 0));
+ mod = build_gimple_modify_stmt (finally_tmp,
+ build_int_cst (integer_type_node,
+ return_index));
do_return_redirection (q, finally_label, mod, &return_val);
switch_id = return_index;
}
else
{
- mod = build (MODIFY_EXPR, void_type_node, finally_tmp,
- build_int_2 (q->index, 0));
+ mod = build_gimple_modify_stmt (finally_tmp,
+ build_int_cst (integer_type_node,
+ q->index));
do_goto_redirection (q, finally_label, mod);
switch_id = q->index;
}
case_index = j + q->index;
if (!TREE_VEC_ELT (case_label_vec, case_index))
- {
- last_case = build (CASE_LABEL_EXPR, void_type_node,
- build_int_2 (switch_id, 0), NULL,
- create_artificial_label ());
- TREE_VEC_ELT (case_label_vec, case_index) = last_case;
-
- x = build (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
- append_to_statement_list (x, &switch_body);
- append_to_statement_list (q->cont_stmt, &switch_body);
- maybe_record_in_goto_queue (state, q->cont_stmt);
- }
+ TREE_VEC_ELT (case_label_vec, case_index)
+ = build3 (CASE_LABEL_EXPR, void_type_node,
+ build_int_cst (NULL_TREE, switch_id), NULL,
+ /* We store the cont_stmt in the
+ CASE_LABEL, so that we can recover it
+ in the loop below. We don't create
+ the new label while walking the
+ goto_queue because pointers don't
+ offer a stable order. */
+ q->cont_stmt);
+ }
+ for (j = last_case_index; j < last_case_index + nlabels; j++)
+ {
+ tree label;
+ tree cont_stmt;
+
+ last_case = TREE_VEC_ELT (case_label_vec, j);
+
+ gcc_assert (last_case);
+
+ cont_stmt = CASE_LABEL (last_case);
+
+ label = create_artificial_label ();
+ CASE_LABEL (last_case) = label;
+
+ x = build1 (LABEL_EXPR, void_type_node, label);
+ append_to_statement_list (x, &switch_body);
+ append_to_statement_list (cont_stmt, &switch_body);
+ maybe_record_in_goto_queue (state, cont_stmt);
}
replace_goto_queue (tf);
- last_case_index += nlabels;
/* Make sure that the last case is the default label, as one is required.
Then sort the labels, which is also required in GIMPLE. */
return false;
/* Finally estimate N times, plus N gotos. */
- f_estimate = estimate_num_insns (finally);
+ f_estimate = estimate_num_insns (finally, &eni_size_weights);
f_estimate = (f_estimate + 1) * ndests;
/* Switch statement (cost 10), N variable assignments, N gotos. */
/* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
to a sequence of labels and blocks, plus the exception region trees
- that record all the magic. This is complicated by the need to
+ that record all the magic. This is complicated by the need to
arrange for the FINALLY block to be executed on all exits. */
static void
honor_protect_cleanup_actions (state, &this_state, &this_tf);
}
- /* Sort the goto queue for efficient searching later. */
- if (this_tf.goto_queue_active > 1)
- qsort (this_tf.goto_queue, this_tf.goto_queue_active,
- sizeof (struct goto_queue_node), goto_queue_cmp);
-
/* Determine how many edges (still) reach the finally block. Or rather,
how many destinations are reached by the finally block. Use this to
determine how we process the finally block itself. */
- if (this_tf.dest_array)
- ndests = VARRAY_ACTIVE_SIZE (this_tf.dest_array);
- else
- ndests = 0;
+ ndests = VEC_length (tree, this_tf.dest_array);
ndests += this_tf.may_fallthru;
ndests += this_tf.may_return;
ndests += this_tf.may_throw;
append_to_statement_list (x, tp);
}
+ VEC_free (tree, heap, this_tf.dest_array);
if (this_tf.goto_queue)
free (this_tf.goto_queue);
+ if (this_tf.goto_queue_map)
+ pointer_map_destroy (this_tf.goto_queue_map);
}
/* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
- list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
+ list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
exception region trees that record all the magic. */
static void
struct eh_region *this_region;
tree inner = expr_first (TREE_OPERAND (*tp, 1));
tree eh_label;
-
+
if (EH_FILTER_MUST_NOT_THROW (inner))
this_region = gen_eh_region_must_not_throw (state->cur_region);
else
EH_FILTER_TYPES (inner));
this_state = *state;
this_state.cur_region = this_region;
-
+ /* For must not throw regions any cleanup regions inside it
+ can't reach outer catch regions. */
+ if (EH_FILTER_MUST_NOT_THROW (inner))
+ this_state.prev_try = NULL;
+
lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
if (!get_eh_region_may_contain_throw (this_region))
}
break;
- case MODIFY_EXPR:
+ case GIMPLE_MODIFY_STMT:
/* Look for things that can throw exceptions, and record them. */
if (state->cur_region && tree_could_throw_p (t))
{
- tree op;
-
record_stmt_eh_region (state->cur_region, t);
note_eh_region_may_contain_throw (state->cur_region);
-
- /* ??? For the benefit of calls.c, converting all this to rtl,
- we need to record the call expression, not just the outer
- modify statement. */
- op = get_call_expr_in (t);
- if (op)
- record_stmt_eh_region (state->cur_region, op);
}
break;
}
}
-static void
+static unsigned int
lower_eh_constructs (void)
{
struct leh_state null_state;
tree *tp = &DECL_SAVED_TREE (current_function_decl);
finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
- throw_stmt_table = htab_create_ggc (31, struct_ptr_hash, struct_ptr_eq,
- ggc_free);
collect_finally_tree (*tp, NULL);
htab_delete (finally_tree);
collect_eh_region_array ();
+ return 0;
}
-struct tree_opt_pass pass_lower_eh =
+struct gimple_opt_pass pass_lower_eh =
{
+ {
+ GIMPLE_PASS,
"eh", /* name */
NULL, /* gate */
lower_eh_constructs, /* execute */
TV_TREE_EH, /* tv_id */
PROP_gimple_lcf, /* properties_required */
PROP_gimple_leh, /* properties_provided */
- PROP_gimple_lcf, /* properties_destroyed */
+ 0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func /* todo_flags_finish */
+ }
};
\f
tree stmt, lab;
basic_block src, dst;
- stmt = data;
+ stmt = (tree) data;
lab = get_eh_region_tree_label (region);
src = bb_for_stmt (stmt);
make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
}
-
+
void
make_eh_edges (tree stmt)
{
foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
}
+static bool mark_eh_edge_found_error;
+
+/* Mark edge make_eh_edge would create for given region by setting it aux
+ field, output error if something goes wrong. */
+static void
+mark_eh_edge (struct eh_region *region, void *data)
+{
+ tree stmt, lab;
+ basic_block src, dst;
+ edge e;
+
+ stmt = (tree) data;
+ lab = get_eh_region_tree_label (region);
+
+ src = bb_for_stmt (stmt);
+ dst = label_to_block (lab);
+
+ e = find_edge (src, dst);
+ if (!e)
+ {
+ error ("EH edge %i->%i is missing", src->index, dst->index);
+ mark_eh_edge_found_error = true;
+ }
+ else if (!(e->flags & EDGE_EH))
+ {
+ error ("EH edge %i->%i miss EH flag", src->index, dst->index);
+ mark_eh_edge_found_error = true;
+ }
+ else if (e->aux)
+ {
+ /* ??? might not be mistake. */
+ error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
+ mark_eh_edge_found_error = true;
+ }
+ else
+ e->aux = (void *)1;
+}
+
+/* Verify that BB containing stmt as last stmt has precisely the edges
+ make_eh_edges would create. */
+bool
+verify_eh_edges (tree stmt)
+{
+ int region_nr;
+ bool is_resx;
+ basic_block bb = bb_for_stmt (stmt);
+ edge_iterator ei;
+ edge e;
+
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ gcc_assert (!e->aux);
+ mark_eh_edge_found_error = false;
+ if (TREE_CODE (stmt) == RESX_EXPR)
+ {
+ region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
+ is_resx = true;
+ }
+ else
+ {
+ region_nr = lookup_stmt_eh_region (stmt);
+ if (region_nr < 0)
+ {
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ if (e->flags & EDGE_EH)
+ {
+ error ("BB %i can not throw but has EH edges", bb->index);
+ return true;
+ }
+ return false;
+ }
+ if (!tree_could_throw_p (stmt))
+ {
+ error ("BB %i last statement has incorrectly set region", bb->index);
+ return true;
+ }
+ is_resx = false;
+ }
+
+ foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ {
+ if ((e->flags & EDGE_EH) && !e->aux)
+ {
+ error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
+ mark_eh_edge_found_error = true;
+ return true;
+ }
+ e->aux = NULL;
+ }
+ return mark_eh_edge_found_error;
+}
\f
/* Return true if the expr can trap, as in dereferencing an invalid pointer
bool honor_nans = false;
bool honor_snans = false;
bool fp_operation = false;
- tree t, base, idx;
+ bool honor_trapv = false;
+ tree t, base;
- if (TREE_CODE_CLASS (code) == '<'
- || TREE_CODE_CLASS (code) == '1'
- || TREE_CODE_CLASS (code) == '2')
+ if (TREE_CODE_CLASS (code) == tcc_comparison
+ || TREE_CODE_CLASS (code) == tcc_unary
+ || TREE_CODE_CLASS (code) == tcc_binary)
{
t = TREE_TYPE (expr);
- fp_operation = FLOAT_TYPE_P (t);
+ if (COMPARISON_CLASS_P (expr))
+ fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
+ else
+ fp_operation = FLOAT_TYPE_P (t);
if (fp_operation)
{
honor_nans = flag_trapping_math && !flag_finite_math_only;
honor_snans = flag_signaling_nans != 0;
}
+ else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
+ honor_trapv = true;
}
+ restart:
switch (code)
{
+ case TARGET_MEM_REF:
+ /* For TARGET_MEM_REFs use the information based on the original
+ reference. */
+ expr = TMR_ORIGINAL (expr);
+ code = TREE_CODE (expr);
+ goto restart;
+
case COMPONENT_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
case BIT_FIELD_REF:
- t = TREE_OPERAND (expr, 0);
- return tree_could_trap_p (t);
+ case VIEW_CONVERT_EXPR:
+ case WITH_SIZE_EXPR:
+ expr = TREE_OPERAND (expr, 0);
+ code = TREE_CODE (expr);
+ goto restart;
case ARRAY_RANGE_REF:
- /* Let us be conservative here for now. We might be checking bounds of
- the access similarly to the case below. */
- if (!TREE_THIS_NOTRAP (expr))
+ base = TREE_OPERAND (expr, 0);
+ if (tree_could_trap_p (base))
return true;
- base = TREE_OPERAND (expr, 0);
- return tree_could_trap_p (base);
+ if (TREE_THIS_NOTRAP (expr))
+ return false;
+
+ return !range_in_array_bounds_p (expr);
case ARRAY_REF:
base = TREE_OPERAND (expr, 0);
- idx = TREE_OPERAND (expr, 1);
if (tree_could_trap_p (base))
return true;
return !in_array_bounds_p (expr);
case INDIRECT_REF:
+ case ALIGN_INDIRECT_REF:
+ case MISALIGNED_INDIRECT_REF:
return !TREE_THIS_NOTRAP (expr);
case ASM_EXPR:
case ROUND_MOD_EXPR:
case TRUNC_MOD_EXPR:
case RDIV_EXPR:
- if (honor_snans)
- return true;
- if (fp_operation && flag_trapping_math)
+ if (honor_snans || honor_trapv)
return true;
+ if (fp_operation)
+ return flag_trapping_math;
t = TREE_OPERAND (expr, 1);
if (!TREE_CONSTANT (t) || integer_zerop (t))
return true;
case CONVERT_EXPR:
case FIX_TRUNC_EXPR:
- case FIX_CEIL_EXPR:
- case FIX_FLOOR_EXPR:
- case FIX_ROUND_EXPR:
/* Conversion of floating point might trap. */
return honor_nans;
case NEGATE_EXPR:
case ABS_EXPR:
case CONJ_EXPR:
- /* These operations don't trap even with floating point. */
+ /* These operations don't trap with floating point. */
+ if (honor_trapv)
+ return true;
+ return false;
+
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ case MULT_EXPR:
+ /* Any floating arithmetic may trap. */
+ if (fp_operation && flag_trapping_math)
+ return true;
+ if (honor_trapv)
+ return true;
+ return false;
+
+ case CALL_EXPR:
+ t = get_callee_fndecl (expr);
+ /* Assume that calls to weak functions may trap. */
+ if (!t || !DECL_P (t) || DECL_WEAK (t))
+ return true;
return false;
default:
{
if (!flag_exceptions)
return false;
- if (TREE_CODE (t) == MODIFY_EXPR)
+ if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
{
if (flag_non_call_exceptions
- && tree_could_trap_p (TREE_OPERAND (t, 0)))
+ && tree_could_trap_p (GIMPLE_STMT_OPERAND (t, 0)))
return true;
- t = TREE_OPERAND (t, 1);
+ t = GIMPLE_STMT_OPERAND (t, 1);
}
+ if (TREE_CODE (t) == WITH_SIZE_EXPR)
+ t = TREE_OPERAND (t, 0);
if (TREE_CODE (t) == CALL_EXPR)
return (call_expr_flags (t) & ECF_NOTHROW) == 0;
if (flag_non_call_exceptions)
}
bool
-tree_can_throw_internal (tree stmt)
+tree_can_throw_internal (const_tree stmt)
{
- int region_nr = lookup_stmt_eh_region (stmt);
+ int region_nr;
+ bool is_resx = false;
+
+ if (TREE_CODE (stmt) == RESX_EXPR)
+ region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
+ else
+ region_nr = lookup_stmt_eh_region (stmt);
if (region_nr < 0)
return false;
- return can_throw_internal_1 (region_nr);
+ return can_throw_internal_1 (region_nr, is_resx);
}
bool
tree_can_throw_external (tree stmt)
{
- int region_nr = lookup_stmt_eh_region (stmt);
+ int region_nr;
+ bool is_resx = false;
+
+ if (TREE_CODE (stmt) == RESX_EXPR)
+ region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
+ else
+ region_nr = lookup_stmt_eh_region (stmt);
if (region_nr < 0)
- return false;
- return can_throw_external_1 (region_nr);
+ return tree_could_throw_p (stmt);
+ else
+ return can_throw_external_1 (region_nr, is_resx);
}
-bool
-maybe_clean_eh_stmt (tree stmt)
+/* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
+ OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
+ in the table if it should be in there. Return TRUE if a replacement was
+ done that my require an EH edge purge. */
+
+bool
+maybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt)
{
- if (!tree_could_throw_p (stmt))
- if (remove_stmt_from_eh_region (stmt))
- return true;
+ int region_nr = lookup_stmt_eh_region (old_stmt);
+
+ if (region_nr >= 0)
+ {
+ bool new_stmt_could_throw = tree_could_throw_p (new_stmt);
+
+ if (new_stmt == old_stmt && new_stmt_could_throw)
+ return false;
+
+ remove_stmt_from_eh_region (old_stmt);
+ if (new_stmt_could_throw)
+ {
+ add_stmt_to_eh_region (new_stmt, region_nr);
+ return false;
+ }
+ else
+ return true;
+ }
+
return false;
}
+\f
+/* Returns TRUE if oneh and twoh are exception handlers (op 1 of
+ TRY_CATCH_EXPR or TRY_FINALLY_EXPR that are similar enough to be
+ considered the same. Currently this only handles handlers consisting of
+ a single call, as that's the important case for C++: a destructor call
+ for a particular object showing up in multiple handlers. */
+
+static bool
+same_handler_p (tree oneh, tree twoh)
+{
+ tree_stmt_iterator i;
+ tree ones, twos;
+ int ai;
+
+ i = tsi_start (oneh);
+ if (!tsi_one_before_end_p (i))
+ return false;
+ ones = tsi_stmt (i);
+
+ i = tsi_start (twoh);
+ if (!tsi_one_before_end_p (i))
+ return false;
+ twos = tsi_stmt (i);
+
+ if (TREE_CODE (ones) != CALL_EXPR
+ || TREE_CODE (twos) != CALL_EXPR
+ || !operand_equal_p (CALL_EXPR_FN (ones), CALL_EXPR_FN (twos), 0)
+ || call_expr_nargs (ones) != call_expr_nargs (twos))
+ return false;
+
+ for (ai = 0; ai < call_expr_nargs (ones); ++ai)
+ if (!operand_equal_p (CALL_EXPR_ARG (ones, ai),
+ CALL_EXPR_ARG (twos, ai), 0))
+ return false;
+
+ return true;
+}
+
+/* Optimize
+ try { A() } finally { try { ~B() } catch { ~A() } }
+ try { ... } finally { ~A() }
+ into
+ try { A() } catch { ~B() }
+ try { ~B() ... } finally { ~A() }
+
+ This occurs frequently in C++, where A is a local variable and B is a
+ temporary used in the initializer for A. */
-#include "gt-tree-eh.h"
+static void
+optimize_double_finally (tree one, tree two)
+{
+ tree oneh;
+ tree_stmt_iterator i;
+
+ i = tsi_start (TREE_OPERAND (one, 1));
+ if (!tsi_one_before_end_p (i))
+ return;
+
+ oneh = tsi_stmt (i);
+ if (TREE_CODE (oneh) != TRY_CATCH_EXPR)
+ return;
+
+ if (same_handler_p (TREE_OPERAND (oneh, 1), TREE_OPERAND (two, 1)))
+ {
+ tree b = TREE_OPERAND (oneh, 0);
+ TREE_OPERAND (one, 1) = b;
+ TREE_SET_CODE (one, TRY_CATCH_EXPR);
+
+ i = tsi_start (TREE_OPERAND (two, 0));
+ tsi_link_before (&i, unsave_expr_now (b), TSI_SAME_STMT);
+ }
+}
+
+/* Perform EH refactoring optimizations that are simpler to do when code
+ flow has been lowered but EH structures haven't. */
+
+static void
+refactor_eh_r (tree t)
+{
+ tailrecurse:
+ switch (TREE_CODE (t))
+ {
+ case TRY_FINALLY_EXPR:
+ case TRY_CATCH_EXPR:
+ refactor_eh_r (TREE_OPERAND (t, 0));
+ t = TREE_OPERAND (t, 1);
+ goto tailrecurse;
+
+ case CATCH_EXPR:
+ t = CATCH_BODY (t);
+ goto tailrecurse;
+
+ case EH_FILTER_EXPR:
+ t = EH_FILTER_FAILURE (t);
+ goto tailrecurse;
+
+ case STATEMENT_LIST:
+ {
+ tree_stmt_iterator i;
+ tree one = NULL_TREE, two = NULL_TREE;
+ /* Try to refactor double try/finally. */
+ for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
+ {
+ one = two;
+ two = tsi_stmt (i);
+ if (one && two
+ && TREE_CODE (one) == TRY_FINALLY_EXPR
+ && TREE_CODE (two) == TRY_FINALLY_EXPR)
+ optimize_double_finally (one, two);
+ if (one)
+ refactor_eh_r (one);
+ }
+ if (two)
+ {
+ t = two;
+ goto tailrecurse;
+ }
+ }
+ break;
+
+ default:
+ /* A type, a decl, or some kind of statement that we're not
+ interested in. Don't walk them. */
+ break;
+ }
+}
+
+static unsigned
+refactor_eh (void)
+{
+ refactor_eh_r (DECL_SAVED_TREE (current_function_decl));
+ return 0;
+}
+
+struct gimple_opt_pass pass_refactor_eh =
+{
+ {
+ GIMPLE_PASS,
+ "ehopt", /* name */
+ NULL, /* gate */
+ refactor_eh, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_TREE_EH, /* tv_id */
+ PROP_gimple_lcf, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func /* todo_flags_finish */
+ }
+};