/* Tail call optimization on trees.
- Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
+ Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
Free Software Foundation, Inc.
This file is part of GCC.
#include "coretypes.h"
#include "tm.h"
#include "tree.h"
-#include "rtl.h"
#include "tm_p.h"
-#include "hard-reg-set.h"
#include "basic-block.h"
#include "function.h"
#include "tree-flow.h"
#include "tree-dump.h"
-#include "diagnostic.h"
+#include "gimple-pretty-print.h"
#include "except.h"
#include "tree-pass.h"
#include "flags.h"
#include "langhooks.h"
#include "dbgcnt.h"
+#include "target.h"
+#include "common/common-target.h"
/* The file implements the tail recursion elimination. It is also used to
analyze the tail calls in general, passing the results to the rtl level
static bool
suitable_for_tail_opt_p (void)
{
- referenced_var_iterator rvi;
- tree var;
-
if (cfun->stdarg)
return false;
- /* No local variable nor structure field should be call-used. */
- FOR_EACH_REFERENCED_VAR (var, rvi)
- {
- if (!is_global_var (var)
- && is_call_used (var))
- return false;
- }
-
return true;
}
/* Returns false when the function is not suitable for tail call optimization
/* If we are using sjlj exceptions, we may need to add a call to
_Unwind_SjLj_Unregister at exit of the function. Which means
that we cannot do any sibcall transformations. */
- if (USING_SJLJ_EXCEPTIONS && current_function_has_exception_handlers ())
+ if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
+ && current_function_has_exception_handlers ())
return false;
/* Any function that calls setjmp might have longjmp called from
but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
for (param = DECL_ARGUMENTS (current_function_decl);
param;
- param = TREE_CHAIN (param))
+ param = DECL_CHAIN (param))
if (TREE_ADDRESSABLE (param))
return false;
process_assignment (gimple stmt, gimple_stmt_iterator call, tree *m,
tree *a, tree *ass_var)
{
- tree op0, op1, non_ass_var;
+ tree op0, op1 = NULL_TREE, non_ass_var = NULL_TREE;
tree dest = gimple_assign_lhs (stmt);
enum tree_code code = gimple_assign_rhs_code (stmt);
enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
return true;
}
- if (rhs_class != GIMPLE_BINARY_RHS)
- return false;
+ switch (rhs_class)
+ {
+ case GIMPLE_BINARY_RHS:
+ op1 = gimple_assign_rhs2 (stmt);
+
+ /* Fall through. */
+
+ case GIMPLE_UNARY_RHS:
+ op0 = gimple_assign_rhs1 (stmt);
+ break;
+
+ default:
+ return false;
+ }
/* Accumulator optimizations will reverse the order of operations.
We can only do that for floating-point types if we're assuming
if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
return false;
- /* We only handle the code like
-
- x = call ();
- y = m * x;
- z = y + a;
- return z;
-
- TODO -- Extend it for cases where the linear transformation of the output
- is expressed in a more complicated way. */
-
- op0 = gimple_assign_rhs1 (stmt);
- op1 = gimple_assign_rhs2 (stmt);
-
- if (op0 == *ass_var
+ if (rhs_class == GIMPLE_UNARY_RHS)
+ ;
+ else if (op0 == *ass_var
&& (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
;
else if (op1 == *ass_var
*ass_var = dest;
return true;
- /* TODO -- Handle other codes (NEGATE_EXPR, MINUS_EXPR,
- POINTER_PLUS_EXPR). */
+ case NEGATE_EXPR:
+ if (FLOAT_TYPE_P (TREE_TYPE (op0)))
+ *m = build_real (TREE_TYPE (op0), dconstm1);
+ else if (INTEGRAL_TYPE_P (TREE_TYPE (op0)))
+ *m = build_int_cst (TREE_TYPE (op0), -1);
+ else
+ return false;
+
+ *ass_var = dest;
+ return true;
+
+ case MINUS_EXPR:
+ if (*ass_var == op0)
+ *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
+ else
+ {
+ if (FLOAT_TYPE_P (TREE_TYPE (non_ass_var)))
+ *m = build_real (TREE_TYPE (non_ass_var), dconstm1);
+ else if (INTEGRAL_TYPE_P (TREE_TYPE (non_ass_var)))
+ *m = build_int_cst (TREE_TYPE (non_ass_var), -1);
+ else
+ return false;
+
+ *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
+ }
+
+ *ass_var = dest;
+ return true;
+
+ /* TODO -- Handle POINTER_PLUS_EXPR. */
default:
return false;
tree m, a;
basic_block abb;
size_t idx;
+ tree var;
+ referenced_var_iterator rvi;
if (!single_succ_p (bb))
return;
{
stmt = gsi_stmt (gsi);
- /* Ignore labels. */
- if (gimple_code (stmt) == GIMPLE_LABEL || is_gimple_debug (stmt))
+ /* Ignore labels, returns, clobbers and debug stmts. */
+ if (gimple_code (stmt) == GIMPLE_LABEL
+ || gimple_code (stmt) == GIMPLE_RETURN
+ || gimple_clobber_p (stmt)
+ || is_gimple_debug (stmt))
continue;
/* Check for a call. */
if (func == current_function_decl)
{
tree arg;
+
for (param = DECL_ARGUMENTS (func), idx = 0;
param && idx < gimple_call_num_args (call);
- param = TREE_CHAIN (param), idx ++)
+ param = DECL_CHAIN (param), idx ++)
{
arg = gimple_call_arg (call, idx);
if (param != arg)
tail_recursion = true;
}
+ /* Make sure the tail invocation of this function does not refer
+ to local variables. */
+ FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
+ {
+ if (TREE_CODE (var) != PARM_DECL
+ && auto_var_in_fn_p (var, cfun->decl)
+ && (ref_maybe_used_by_stmt_p (call, var)
+ || call_may_clobber_ref_p (call, var)))
+ return;
+ }
+
/* Now check the statements after the call. None of them has virtual
operands, so they may only depend on the call through its return
value. The return value should also be dependent on each of them,
if (gimple_code (stmt) == GIMPLE_RETURN)
break;
+ if (gimple_clobber_p (stmt))
+ continue;
+
if (is_gimple_debug (stmt))
continue;
if (tmp_a)
{
+ tree type = TREE_TYPE (tmp_a);
if (a)
- a = fold_build2 (PLUS_EXPR, TREE_TYPE (tmp_a), a, tmp_a);
+ a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a);
else
a = tmp_a;
}
if (tmp_m)
{
+ tree type = TREE_TYPE (tmp_m);
if (m)
- m = fold_build2 (MULT_EXPR, TREE_TYPE (tmp_m), m, tmp_m);
+ m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m);
else
m = tmp_m;
if (a)
- a = fold_build2 (MULT_EXPR, TREE_TYPE (tmp_m), a, tmp_m);
+ a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m);
}
}
if (!tail_recursion && (m || a))
return;
+ /* For pointers don't allow additions or multiplications. */
+ if ((m || a)
+ && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
+ return;
+
nw = XNEW (struct tailcall);
nw->call_gsi = gsi;
}
/* Creates a GIMPLE statement which computes the operation specified by
- CODE, OP0 and OP1 to a new variable with name LABEL and inserts the
- statement in the position specified by GSI and UPDATE. Returns the
+ CODE, ACC and OP1 to a new variable with name LABEL and inserts the
+ statement in the position specified by GSI. Returns the
tree node of the statement's result. */
static tree
{
tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
- tree tmp = create_tmp_var (ret_type, label);
+ tree tmp = create_tmp_reg (ret_type, label);
gimple stmt;
tree result;
- if (TREE_CODE (ret_type) == COMPLEX_TYPE
- || TREE_CODE (ret_type) == VECTOR_TYPE)
- DECL_GIMPLE_REG_P (tmp) = 1;
add_referenced_var (tmp);
if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
fold_convert (TREE_TYPE (op1), acc),
op1));
rhs = force_gimple_operand_gsi (&gsi, rhs,
- false, NULL, true, GSI_CONTINUE_LINKING);
+ false, NULL, true, GSI_SAME_STMT);
stmt = gimple_build_assign (NULL_TREE, rhs);
}
for (param = DECL_ARGUMENTS (current_function_decl),
idx = 0, gsi = gsi_start_phis (first);
param;
- param = TREE_CHAIN (param), idx++)
+ param = DECL_CHAIN (param), idx++)
{
if (!arg_needs_copy_p (param))
continue;
this, we cannot do much better than to rebuild the ssa form for
possibly affected virtual ssa names from scratch. */
- FOR_EACH_REFERENCED_VAR (var, rvi)
+ FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
{
if (!is_gimple_reg (var) && gimple_default_def (cfun, var) != NULL_TREE)
mark_sym_for_renaming (var);
create_tailcall_accumulator (const char *label, basic_block bb, tree init)
{
tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
- tree tmp = create_tmp_var (ret_type, label);
+ tree tmp = create_tmp_reg (ret_type, label);
gimple phi;
- if (TREE_CODE (ret_type) == COMPLEX_TYPE
- || TREE_CODE (ret_type) == VECTOR_TYPE)
- DECL_GIMPLE_REG_P (tmp) = 1;
add_referenced_var (tmp);
phi = create_phi_node (tmp, bb);
/* RET_TYPE can be a float when -ffast-maths is enabled. */
/* Copy the args if needed. */
for (param = DECL_ARGUMENTS (current_function_decl);
param;
- param = TREE_CHAIN (param))
+ param = DECL_CHAIN (param))
if (arg_needs_copy_p (param))
{
tree name = gimple_default_def (cfun, param);
integer_one_node);
}
+ if (a_acc || m_acc)
+ {
+ /* When the tail call elimination using accumulators is performed,
+ statements adding the accumulated value are inserted at all exits.
+ This turns all other tail calls to non-tail ones. */
+ opt_tailcalls = false;
+ }
+
for (; tailcalls; tailcalls = next)
{
next = tailcalls->next;
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
+ TODO_verify_ssa /* todo_flags_finish */
}
};
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
+ TODO_verify_ssa /* todo_flags_finish */
}
};