/* Tail call optimization on trees.
- Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
+ Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
Free Software Foundation, Inc.
This file is part of GCC.
return acc;
}
- To do this, we maintain two accumulators (a_acc and m_acc) that indicate
+ To do this, we maintain two accumulators (a_acc and m_acc) that indicate
when we reach the return x statement, we should return a_acc + x * m_acc
instead. They are initially initialized to 0 and 1, respectively,
so the semantics of the function is obviously preserved. If we are
1) Just return x, where x is not in any of the remaining special shapes.
We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
-
+
2) return f (...), where f is the current function, is rewritten in a
classical tail-recursion elimination way, into assignment of arguments
and jump to the start of the function. Values of the accumulators
are unchanged.
-
+
3) return a + m * f(...), where a and m do not depend on call to f.
To preserve the semantics described before we want this to be rewritten
in such a way that we finally return
static bool
suitable_for_tail_opt_p (void)
{
- referenced_var_iterator rvi;
- tree var;
-
if (cfun->stdarg)
return false;
- /* No local variable nor structure field should be call-used. */
- FOR_EACH_REFERENCED_VAR (var, rvi)
- {
- if (!is_global_var (var)
- && is_call_used (var))
- return false;
- }
-
return true;
}
/* Returns false when the function is not suitable for tail call optimization
bb->aux = &bb->aux;
while (1)
- {
+ {
at = SSA_NAME_DEF_STMT (expr);
bb = gimple_bb (at);
enum tree_code code = gimple_assign_rhs_code (stmt);
enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
tree src_var = gimple_assign_rhs1 (stmt);
-
+
/* See if this is a simple copy operation of an SSA name to the function
result. In that case we may have a simple tail call. Ignore type
conversions that can never produce extra code between the function
switch (code)
{
case PLUS_EXPR:
- /* There should be no previous addition. TODO -- it should be fairly
- straightforward to lift this restriction -- just allow storing
- more complicated expressions in *A, and gimplify it in
- adjust_accumulator_values. */
- if (*a)
- return false;
*a = non_ass_var;
*ass_var = dest;
return true;
case MULT_EXPR:
- /* Similar remark applies here. Handling multiplication after addition
- is just slightly more complicated -- we need to multiply both *A and
- *M. */
- if (*a || *m)
- return false;
*m = non_ass_var;
*ass_var = dest;
return true;
{
basic_block dest = e->dest;
gimple_stmt_iterator gsi;
-
+
for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple phi = gsi_stmt (gsi);
tree m, a;
basic_block abb;
size_t idx;
+ tree var;
+ referenced_var_iterator rvi;
if (!single_succ_p (bb))
return;
return;
}
- /* If the LHS of our call is not just a simple register, we can't
+ /* If the LHS of our call is not just a simple register, we can't
transform this into a tail or sibling call. This situation happens,
in (e.g.) "*p = foo()" where foo returns a struct. In this case
we won't have a temporary here, but we need to carry out the side
if (func == current_function_decl)
{
tree arg;
+
for (param = DECL_ARGUMENTS (func), idx = 0;
param && idx < gimple_call_num_args (call);
param = TREE_CHAIN (param), idx ++)
tail_recursion = true;
}
+ /* Make sure the tail invocation of this function does not refer
+ to local variables. */
+ FOR_EACH_REFERENCED_VAR (var, rvi)
+ {
+ if (TREE_CODE (var) != PARM_DECL
+ && auto_var_in_fn_p (var, cfun->decl)
+ && (ref_maybe_used_by_stmt_p (call, var)
+ || call_may_clobber_ref_p (call, var)))
+ return;
+ }
+
/* Now check the statements after the call. None of them has virtual
operands, so they may only depend on the call through its return
value. The return value should also be dependent on each of them,
agsi = gsi;
while (1)
{
+ tree tmp_a = NULL_TREE;
+ tree tmp_m = NULL_TREE;
gsi_next (&agsi);
while (gsi_end_p (agsi))
return;
/* This is a gimple assign. */
- if (! process_assignment (stmt, gsi, &m, &a, &ass_var))
+ if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var))
return;
+
+ if (tmp_a)
+ {
+ if (a)
+ a = fold_build2 (PLUS_EXPR, TREE_TYPE (tmp_a), a, tmp_a);
+ else
+ a = tmp_a;
+ }
+ if (tmp_m)
+ {
+ if (m)
+ m = fold_build2 (MULT_EXPR, TREE_TYPE (tmp_m), m, tmp_m);
+ else
+ m = tmp_m;
+
+ if (a)
+ a = fold_build2 (MULT_EXPR, TREE_TYPE (tmp_m), a, tmp_m);
+ }
}
/* See if this is a tail call we can handle. */
tree node of the statement's result. */
static tree
-adjust_return_value_with_ops (enum tree_code code, const char *label,
- tree op0, tree op1, gimple_stmt_iterator gsi,
- enum gsi_iterator_update update)
+adjust_return_value_with_ops (enum tree_code code, const char *label,
+ tree acc, tree op1, gimple_stmt_iterator gsi)
{
tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
- tree tmp = create_tmp_var (ret_type, label);
- gimple stmt = gimple_build_assign_with_ops (code, tmp, op0, op1);
+ tree tmp = create_tmp_reg (ret_type, label);
+ gimple stmt;
tree result;
- if (TREE_CODE (ret_type) == COMPLEX_TYPE
- || TREE_CODE (ret_type) == VECTOR_TYPE)
- DECL_GIMPLE_REG_P (tmp) = 1;
add_referenced_var (tmp);
+
+ if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
+ stmt = gimple_build_assign_with_ops (code, tmp, acc, op1);
+ else
+ {
+ tree rhs = fold_convert (TREE_TYPE (acc),
+ fold_build2 (code,
+ TREE_TYPE (op1),
+ fold_convert (TREE_TYPE (op1), acc),
+ op1));
+ rhs = force_gimple_operand_gsi (&gsi, rhs,
+ false, NULL, true, GSI_CONTINUE_LINKING);
+ stmt = gimple_build_assign (NULL_TREE, rhs);
+ }
+
result = make_ssa_name (tmp, stmt);
gimple_assign_set_lhs (stmt, result);
update_stmt (stmt);
- gsi_insert_before (&gsi, stmt, update);
+ gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
return result;
}
-/* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
+/* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
the computation specified by CODE and OP1 and insert the statement
at the position specified by GSI as a new statement. Returns new SSA name
of updated accumulator. */
update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
gimple_stmt_iterator gsi)
{
- gimple stmt = gimple_build_assign_with_ops (code, SSA_NAME_VAR (acc), acc,
- op1);
- tree var = make_ssa_name (SSA_NAME_VAR (acc), stmt);
+ gimple stmt;
+ tree var;
+ if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
+ stmt = gimple_build_assign_with_ops (code, SSA_NAME_VAR (acc), acc, op1);
+ else
+ {
+ tree rhs = fold_convert (TREE_TYPE (acc),
+ fold_build2 (code,
+ TREE_TYPE (op1),
+ fold_convert (TREE_TYPE (op1), acc),
+ op1));
+ rhs = force_gimple_operand_gsi (&gsi, rhs,
+ false, NULL, false, GSI_CONTINUE_LINKING);
+ stmt = gimple_build_assign (NULL_TREE, rhs);
+ }
+ var = make_ssa_name (SSA_NAME_VAR (acc), stmt);
gimple_assign_set_lhs (stmt, var);
update_stmt (stmt);
gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
static void
adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
{
- tree var, a_acc_arg = a_acc, m_acc_arg = m_acc;
+ tree var, a_acc_arg, m_acc_arg;
+ if (m)
+ m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
+ if (a)
+ a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);
+
+ a_acc_arg = a_acc;
+ m_acc_arg = m_acc;
if (a)
{
if (m_acc)
var = m_acc;
else
var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
- a, gsi, GSI_NEW_STMT);
+ a, gsi);
}
else
var = a;
if (m)
retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval,
- gsi, GSI_SAME_STMT);
+ gsi);
if (a)
retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval,
- gsi, GSI_SAME_STMT);
+ gsi);
gimple_return_set_retval (ret_stmt, retval);
update_stmt (ret_stmt);
}
if (!is_gimple_reg (param) || !var_ann (param))
return false;
-
+
/* Parameters that are only defined but never used need not be copied. */
def = gimple_default_def (cfun, param);
if (!def)
create_tailcall_accumulator (const char *label, basic_block bb, tree init)
{
tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
- tree tmp = create_tmp_var (ret_type, label);
+ tree tmp = create_tmp_reg (ret_type, label);
gimple phi;
- if (TREE_CODE (ret_type) == COMPLEX_TYPE
- || TREE_CODE (ret_type) == VECTOR_TYPE)
- DECL_GIMPLE_REG_P (tmp) = 1;
add_referenced_var (tmp);
phi = create_phi_node (tmp, bb);
/* RET_TYPE can be a float when -ffast-maths is enabled. */
UNKNOWN_LOCATION);
return PHI_RESULT (phi);
}
-
+
/* Optimizes tail calls in the function, turning the tail recursion
into iteration. */
set_default_def (param, new_name);
phi = create_phi_node (name, first);
SSA_NAME_DEF_STMT (name) = phi;
- add_phi_arg (phi, new_name, single_pred_edge (first),
+ add_phi_arg (phi, new_name, single_pred_edge (first),
EXPR_LOCATION (param));
}
phis_constructed = true;
return tree_optimize_tail_calls_1 (true);
}
-struct gimple_opt_pass pass_tail_recursion =
+struct gimple_opt_pass pass_tail_recursion =
{
{
GIMPLE_PASS,
}
};
-struct gimple_opt_pass pass_tail_calls =
+struct gimple_opt_pass pass_tail_calls =
{
{
GIMPLE_PASS,