/* Exception handling semantics and decomposition for trees.
- Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
+the Free Software Foundation; either version 3, or (at your option)
any later version.
GCC is distributed in the hope that it will be useful,
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 51 Franklin Street, Fifth Floor,
-Boston, MA 02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
#include "langhooks.h"
#include "ggc.h"
#include "toplev.h"
+#include "pointer-set.h"
\f
/* Nonzero if we are using EH to handle cleanups. */
slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
gcc_assert (!*slot);
*slot = n;
- /* ??? For the benefit of calls.c, converting all this to rtl,
- we need to record the call expression, not just the outer
- modify statement. */
- if (TREE_CODE (t) == MODIFY_EXPR
- && (t = get_call_expr_in (t)))
- add_stmt_to_eh_region_fn (ifun, t, num);
}
void
if (slot)
{
htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
- /* ??? For the benefit of calls.c, converting all this to rtl,
- we need to record the call expression, not just the outer
- modify statement. */
- if (TREE_CODE (t) == MODIFY_EXPR
- && (t = get_call_expr_in (t)))
- remove_stmt_from_eh_region_fn (ifun, t);
return true;
}
else
}
int
-lookup_stmt_eh_region_fn (struct function *ifun, tree t)
+lookup_stmt_eh_region_fn (struct function *ifun, const_tree t)
{
struct throw_stmt_node *p, n;
if (!get_eh_throw_stmt_table (ifun))
return -2;
- n.stmt = t;
+ /* The CONST_CAST is okay because we don't modify n.stmt throughout
+ its scope, or the scope of p. */
+ n.stmt = CONST_CAST_TREE (t);
p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun),
&n);
}
int
-lookup_stmt_eh_region (tree t)
+lookup_stmt_eh_region (const_tree t)
{
/* We can get called from initialized data when -fnon-call-exceptions
is on; prevent crash. */
size_t goto_queue_size;
size_t goto_queue_active;
+ /* Pointer map to help in searching qoto_queue when it is large. */
+ struct pointer_map_t *goto_queue_map;
+
/* The set of unique labels seen as entries in the goto queue. */
VEC(tree,heap) *dest_array;
static void lower_eh_filter (struct leh_state *, tree *);
static void lower_eh_constructs_1 (struct leh_state *, tree *);
-/* Comparison function for qsort/bsearch. We're interested in
- searching goto queue elements for source statements. */
-
-static int
-goto_queue_cmp (const void *x, const void *y)
-{
- tree a = ((const struct goto_queue_node *)x)->stmt;
- tree b = ((const struct goto_queue_node *)y)->stmt;
- return (a == b ? 0 : a < b ? -1 : 1);
-}
-
/* Search for STMT in the goto queue. Return the replacement,
or null if the statement isn't in the queue. */
+#define LARGE_GOTO_QUEUE 20
+
static tree
find_goto_replacement (struct leh_tf_state *tf, tree stmt)
{
- struct goto_queue_node tmp, *ret;
- tmp.stmt = stmt;
- ret = (struct goto_queue_node *)
- bsearch (&tmp, tf->goto_queue, tf->goto_queue_active,
- sizeof (struct goto_queue_node), goto_queue_cmp);
- return (ret ? ret->repl_stmt : NULL);
+ unsigned int i;
+ void **slot;
+
+ if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
+ {
+ for (i = 0; i < tf->goto_queue_active; i++)
+ if (tf->goto_queue[i].stmt == stmt)
+ return tf->goto_queue[i].repl_stmt;
+ return NULL;
+ }
+
+ /* If we have a large number of entries in the goto_queue, create a
+ pointer map and use that for searching. */
+
+ if (!tf->goto_queue_map)
+ {
+ tf->goto_queue_map = pointer_map_create ();
+ for (i = 0; i < tf->goto_queue_active; i++)
+ {
+ slot = pointer_map_insert (tf->goto_queue_map, tf->goto_queue[i].stmt);
+ gcc_assert (*slot == NULL);
+ *slot = (void *) &tf->goto_queue[i];
+ }
+ }
+
+ slot = pointer_map_contains (tf->goto_queue_map, stmt);
+ if (slot != NULL)
+ return (((struct goto_queue_node *) *slot)->repl_stmt);
+
+ return NULL;
}
/* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
gcc_unreachable ();
}
+ gcc_assert (!tf->goto_queue_map);
+
active = tf->goto_queue_active;
size = tf->goto_queue_size;
if (active >= size)
q->cont_stmt = q->stmt;
break;
- case MODIFY_EXPR:
+ case GIMPLE_MODIFY_STMT:
{
- tree result = TREE_OPERAND (ret_expr, 0);
- tree new, old = TREE_OPERAND (ret_expr, 1);
+ tree result = GIMPLE_STMT_OPERAND (ret_expr, 0);
+ tree new, old = GIMPLE_STMT_OPERAND (ret_expr, 1);
if (!*return_value_p)
{
else
new = *return_value_p;
- x = build2 (MODIFY_EXPR, TREE_TYPE (new), new, old);
+ x = build_gimple_modify_stmt (new, old);
append_to_statement_list (x, &q->repl_stmt);
if (new == result)
x = result;
else
- x = build2 (MODIFY_EXPR, TREE_TYPE (result), result, new);
+ x = build_gimple_modify_stmt (result, new);
q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
}
i = tsi_start (finally);
x = build0 (EXC_PTR_EXPR, ptr_type_node);
- x = build2 (MODIFY_EXPR, void_type_node, save_eptr, x);
+ x = build_gimple_modify_stmt (save_eptr, x);
tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
x = build0 (FILTER_EXPR, integer_type_node);
- x = build2 (MODIFY_EXPR, void_type_node, save_filt, x);
+ x = build_gimple_modify_stmt (save_filt, x);
tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
i = tsi_last (finally);
x = build0 (EXC_PTR_EXPR, ptr_type_node);
- x = build2 (MODIFY_EXPR, void_type_node, x, save_eptr);
+ x = build_gimple_modify_stmt (x, save_eptr);
tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
x = build0 (FILTER_EXPR, integer_type_node);
- x = build2 (MODIFY_EXPR, void_type_node, x, save_filt);
+ x = build_gimple_modify_stmt (x, save_filt);
tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
x = build_resx (get_eh_region_number (tf->region));
}
}
+ /* Reset the locus of the goto since we're moving
+ goto to a different block which might be on a different line. */
+ SET_EXPR_LOCUS (tf->goto_queue[0].cont_stmt, NULL);
append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
}
if (tf->may_fallthru)
{
- x = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
- build_int_cst (NULL_TREE, fallthru_index));
+ x = build_gimple_modify_stmt (finally_tmp,
+ build_int_cst (integer_type_node,
+ fallthru_index));
append_to_statement_list (x, tf->top_p);
if (tf->may_throw)
x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
append_to_statement_list (x, tf->top_p);
- x = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
- build_int_cst (NULL_TREE, eh_index));
+ x = build_gimple_modify_stmt (finally_tmp,
+ build_int_cst (integer_type_node,
+ eh_index));
append_to_statement_list (x, tf->top_p);
last_case = build3 (CASE_LABEL_EXPR, void_type_node,
if (q->index < 0)
{
- mod = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
- build_int_cst (NULL_TREE, return_index));
+ mod = build_gimple_modify_stmt (finally_tmp,
+ build_int_cst (integer_type_node,
+ return_index));
do_return_redirection (q, finally_label, mod, &return_val);
switch_id = return_index;
}
else
{
- mod = build2 (MODIFY_EXPR, void_type_node, finally_tmp,
- build_int_cst (NULL_TREE, q->index));
+ mod = build_gimple_modify_stmt (finally_tmp,
+ build_int_cst (integer_type_node,
+ q->index));
do_goto_redirection (q, finally_label, mod);
switch_id = q->index;
}
return false;
/* Finally estimate N times, plus N gotos. */
- f_estimate = estimate_num_insns (finally);
+ f_estimate = estimate_num_insns (finally, &eni_size_weights);
f_estimate = (f_estimate + 1) * ndests;
/* Switch statement (cost 10), N variable assignments, N gotos. */
honor_protect_cleanup_actions (state, &this_state, &this_tf);
}
- /* Sort the goto queue for efficient searching later. */
- if (this_tf.goto_queue_active > 1)
- qsort (this_tf.goto_queue, this_tf.goto_queue_active,
- sizeof (struct goto_queue_node), goto_queue_cmp);
-
/* Determine how many edges (still) reach the finally block. Or rather,
how many destinations are reached by the finally block. Use this to
determine how we process the finally block itself. */
VEC_free (tree, heap, this_tf.dest_array);
if (this_tf.goto_queue)
free (this_tf.goto_queue);
+ if (this_tf.goto_queue_map)
+ pointer_map_destroy (this_tf.goto_queue_map);
}
/* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
EH_FILTER_TYPES (inner));
this_state = *state;
this_state.cur_region = this_region;
+ /* For must not throw regions any cleanup regions inside it
+ can't reach outer catch regions. */
+ if (EH_FILTER_MUST_NOT_THROW (inner))
+ this_state.prev_try = NULL;
lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
}
break;
- case MODIFY_EXPR:
+ case GIMPLE_MODIFY_STMT:
/* Look for things that can throw exceptions, and record them. */
if (state->cur_region && tree_could_throw_p (t))
{
}
}
-static void
+static unsigned int
lower_eh_constructs (void)
{
struct leh_state null_state;
htab_delete (finally_tree);
collect_eh_region_array ();
+ return 0;
}
struct tree_opt_pass pass_lower_eh =
TV_TREE_EH, /* tv_id */
PROP_gimple_lcf, /* properties_required */
PROP_gimple_leh, /* properties_provided */
- PROP_gimple_lcf, /* properties_destroyed */
+ 0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func, /* todo_flags_finish */
0 /* letter */
honor_nans = flag_trapping_math && !flag_finite_math_only;
honor_snans = flag_signaling_nans != 0;
}
- else if (INTEGRAL_TYPE_P (t) && TYPE_TRAP_SIGNED (t))
+ else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
honor_trapv = true;
}
case REALPART_EXPR:
case IMAGPART_EXPR:
case BIT_FIELD_REF:
+ case VIEW_CONVERT_EXPR:
case WITH_SIZE_EXPR:
expr = TREE_OPERAND (expr, 0);
code = TREE_CODE (expr);
goto restart;
case ARRAY_RANGE_REF:
- /* Let us be conservative here for now. We might be checking bounds of
- the access similarly to the case below. */
- if (!TREE_THIS_NOTRAP (expr))
+ base = TREE_OPERAND (expr, 0);
+ if (tree_could_trap_p (base))
return true;
- base = TREE_OPERAND (expr, 0);
- return tree_could_trap_p (base);
+ if (TREE_THIS_NOTRAP (expr))
+ return false;
+
+ return !range_in_array_bounds_p (expr);
case ARRAY_REF:
base = TREE_OPERAND (expr, 0);
case CONVERT_EXPR:
case FIX_TRUNC_EXPR:
- case FIX_CEIL_EXPR:
- case FIX_FLOOR_EXPR:
- case FIX_ROUND_EXPR:
/* Conversion of floating point might trap. */
return honor_nans;
{
if (!flag_exceptions)
return false;
- if (TREE_CODE (t) == MODIFY_EXPR)
+ if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
{
if (flag_non_call_exceptions
- && tree_could_trap_p (TREE_OPERAND (t, 0)))
+ && tree_could_trap_p (GIMPLE_STMT_OPERAND (t, 0)))
return true;
- t = TREE_OPERAND (t, 1);
+ t = GIMPLE_STMT_OPERAND (t, 1);
}
if (TREE_CODE (t) == WITH_SIZE_EXPR)
}
bool
-tree_can_throw_internal (tree stmt)
+tree_can_throw_internal (const_tree stmt)
{
int region_nr;
bool is_resx = false;