/* Tail call optimization on trees.
- Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
+the Free Software Foundation; either version 3, or (at your option)
any later version.
GCC is distributed in the hope that it will be useful,
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 51 Franklin Street, Fifth Floor,
-Boston, MA 02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
#include "tree-pass.h"
#include "flags.h"
#include "langhooks.h"
+#include "dbgcnt.h"
/* The file implements the tail recursion elimination. It is also used to
analyze the tail calls in general, passing the results to the rtl level
FOR_EACH_REFERENCED_VAR (var, rvi)
{
-
if (!is_global_var (var)
&& (!MTAG_P (var) || TREE_CODE (var) == STRUCT_FIELD_TAG)
- && is_call_clobbered (var))
+ && (gimple_aliases_computed_p (cfun) ? is_call_clobbered (var)
+ : TREE_ADDRESSABLE (var)))
return false;
}
tree *a, tree *ass_var)
{
tree op0, op1, non_ass_var;
- tree dest = TREE_OPERAND (ass, 0);
- tree src = TREE_OPERAND (ass, 1);
+ tree dest = GIMPLE_STMT_OPERAND (ass, 0);
+ tree src = GIMPLE_STMT_OPERAND (ass, 1);
enum tree_code code = TREE_CODE (src);
tree src_var = src;
/* Accumulator optimizations will reverse the order of operations.
We can only do that for floating-point types if we're assuming
that addition and multiplication are associative. */
- if (!flag_unsafe_math_optimizations)
+ if (!flag_associative_math)
if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
return false;
*ass_var = dest;
return true;
- /* TODO -- Handle other codes (NEGATE_EXPR, MINUS_EXPR). */
+ /* TODO -- Handle other codes (NEGATE_EXPR, MINUS_EXPR, POINTER_PLUS_EXPR). */
default:
return false;
static void
find_tail_calls (basic_block bb, struct tailcall **ret)
{
- tree ass_var, ret_var, stmt, func, param, args, call = NULL_TREE;
+ tree ass_var, ret_var, stmt, func, param, call = NULL_TREE;
block_stmt_iterator bsi, absi;
bool tail_recursion;
struct tailcall *nw;
continue;
/* Check for a call. */
- if (TREE_CODE (stmt) == MODIFY_EXPR)
+ if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
{
- ass_var = TREE_OPERAND (stmt, 0);
- call = TREE_OPERAND (stmt, 1);
+ ass_var = GIMPLE_STMT_OPERAND (stmt, 0);
+ call = GIMPLE_STMT_OPERAND (stmt, 1);
if (TREE_CODE (call) == WITH_SIZE_EXPR)
call = TREE_OPERAND (call, 0);
}
/* If the statement has virtual or volatile operands, fail. */
ann = stmt_ann (stmt);
if (!ZERO_SSA_OPERANDS (stmt, (SSA_OP_VUSE | SSA_OP_VIRTUAL_DEFS))
- || ann->has_volatile_ops)
+ || ann->has_volatile_ops
+ || (!gimple_aliases_computed_p (cfun) && ann->references_memory))
return;
}
func = get_callee_fndecl (call);
if (func == current_function_decl)
{
- for (param = DECL_ARGUMENTS (func), args = TREE_OPERAND (call, 1);
- param && args;
- param = TREE_CHAIN (param), args = TREE_CHAIN (args))
+ call_expr_arg_iterator iter;
+ tree arg;
+ for (param = DECL_ARGUMENTS (func),
+ arg = first_call_expr_arg (call, &iter);
+ param && arg;
+ param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
{
- tree arg = TREE_VALUE (args);
if (param != arg)
{
/* Make sure there are no problems with copying. The parameter
equivalent types. The latter requirement could be relaxed if
we emitted a suitable type conversion statement. */
if (!is_gimple_reg_type (TREE_TYPE (param))
- || !lang_hooks.types_compatible_p (TREE_TYPE (param),
- TREE_TYPE (arg)))
+ || !useless_type_conversion_p (TREE_TYPE (param),
+ TREE_TYPE (arg)))
break;
/* The parameter should be a real operand, so that phi node
of copying the value. This test implies is_gimple_reg_type
from the previous condition, however this one could be
relaxed by being more careful with copying the new value
- of the parameter (emitting appropriate MODIFY_EXPR and
+ of the parameter (emitting appropriate GIMPLE_MODIFY_STMT and
updating the virtual operands). */
if (!is_gimple_reg (param))
break;
}
}
- if (!args && !param)
+ if (!arg && !param)
tail_recursion = true;
}
if (TREE_CODE (stmt) == RETURN_EXPR)
break;
- if (TREE_CODE (stmt) != MODIFY_EXPR)
+ if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
return;
if (!process_assignment (stmt, stmt, bsi, &m, &a, &ass_var))
/* See if this is a tail call we can handle. */
ret_var = TREE_OPERAND (stmt, 0);
if (ret_var
- && TREE_CODE (ret_var) == MODIFY_EXPR)
+ && TREE_CODE (ret_var) == GIMPLE_MODIFY_STMT)
{
- tree ret_op = TREE_OPERAND (ret_var, 1);
+ tree ret_op = GIMPLE_STMT_OPERAND (ret_var, 1);
STRIP_NOPS (ret_op);
if (!tail_recursion
&& TREE_CODE (ret_op) != SSA_NAME)
if (!process_assignment (ret_var, stmt, bsi, &m, &a, &ass_var))
return;
- ret_var = TREE_OPERAND (ret_var, 0);
+ ret_var = GIMPLE_STMT_OPERAND (ret_var, 0);
}
/* We may proceed if there either is no return value, or the return value
var = m_acc;
else
{
- stmt = build2 (MODIFY_EXPR, ret_type, NULL_TREE,
- build2 (MULT_EXPR, ret_type, m_acc, a));
+ stmt = build_gimple_modify_stmt (NULL_TREE,
+ build2 (MULT_EXPR, ret_type,
+ m_acc, a));
tmp = create_tmp_var (ret_type, "acc_tmp");
- add_referenced_tmp_var (tmp);
+ add_referenced_var (tmp);
var = make_ssa_name (tmp, stmt);
- TREE_OPERAND (stmt, 0) = var;
+ GIMPLE_STMT_OPERAND (stmt, 0) = var;
bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
}
}
else
var = a;
- stmt = build2 (MODIFY_EXPR, ret_type, NULL_TREE,
- build2 (PLUS_EXPR, ret_type, a_acc, var));
+ stmt = build_gimple_modify_stmt (NULL_TREE, build2 (PLUS_EXPR, ret_type,
+ a_acc, var));
var = make_ssa_name (SSA_NAME_VAR (a_acc), stmt);
- TREE_OPERAND (stmt, 0) = var;
+ GIMPLE_STMT_OPERAND (stmt, 0) = var;
bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
a_acc_arg = var;
}
if (m)
{
- stmt = build2 (MODIFY_EXPR, ret_type, NULL_TREE,
- build2 (MULT_EXPR, ret_type, m_acc, m));
+ stmt = build_gimple_modify_stmt (NULL_TREE,
+ build2 (MULT_EXPR, ret_type,
+ m_acc, m));
var = make_ssa_name (SSA_NAME_VAR (m_acc), stmt);
- TREE_OPERAND (stmt, 0) = var;
+ GIMPLE_STMT_OPERAND (stmt, 0) = var;
bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
m_acc_arg = var;
}
{
tree ret_stmt = last_stmt (bb), ret_var, var, stmt, tmp;
tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
+ tree *ret_op;
block_stmt_iterator bsi = bsi_last (bb);
gcc_assert (TREE_CODE (ret_stmt) == RETURN_EXPR);
if (!ret_var)
return;
- if (TREE_CODE (ret_var) == MODIFY_EXPR)
+ if (TREE_CODE (ret_var) == GIMPLE_MODIFY_STMT)
{
- ret_var->common.ann = (tree_ann_t) stmt_ann (ret_stmt);
- bsi_replace (&bsi, ret_var, true);
- SSA_NAME_DEF_STMT (TREE_OPERAND (ret_var, 0)) = ret_var;
- ret_var = TREE_OPERAND (ret_var, 0);
- ret_stmt = build1 (RETURN_EXPR, TREE_TYPE (ret_stmt), ret_var);
- bsi_insert_after (&bsi, ret_stmt, BSI_NEW_STMT);
+ ret_op = &GIMPLE_STMT_OPERAND (ret_var, 1);
+ ret_var = *ret_op;
}
+ else
+ ret_op = &TREE_OPERAND (ret_stmt, 0);
if (m)
{
- stmt = build2 (MODIFY_EXPR, ret_type, NULL_TREE,
- build2 (MULT_EXPR, ret_type, m_acc, ret_var));
+ stmt = build_gimple_modify_stmt (NULL_TREE,
+ build2 (MULT_EXPR, ret_type,
+ m_acc, ret_var));
tmp = create_tmp_var (ret_type, "acc_tmp");
- add_referenced_tmp_var (tmp);
+ add_referenced_var (tmp);
var = make_ssa_name (tmp, stmt);
- TREE_OPERAND (stmt, 0) = var;
+ GIMPLE_STMT_OPERAND (stmt, 0) = var;
bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
}
else
if (a)
{
- stmt = build2 (MODIFY_EXPR, ret_type, NULL_TREE,
- build2 (PLUS_EXPR, ret_type, a_acc, var));
+ stmt = build_gimple_modify_stmt (NULL_TREE,
+ build2 (PLUS_EXPR, ret_type,
+ a_acc, var));
tmp = create_tmp_var (ret_type, "acc_tmp");
- add_referenced_tmp_var (tmp);
+ add_referenced_var (tmp);
var = make_ssa_name (tmp, stmt);
- TREE_OPERAND (stmt, 0) = var;
+ GIMPLE_STMT_OPERAND (stmt, 0) = var;
bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
}
- TREE_OPERAND (ret_stmt, 0) = var;
+ *ret_op = var;
update_stmt (ret_stmt);
}
e->count = 0;
}
+/* Returns true if argument PARAM of the tail recursive call needs to be copied
+ when the call is eliminated. */
+
+static bool
+arg_needs_copy_p (tree param)
+{
+ tree def;
+
+ if (!is_gimple_reg (param) || !var_ann (param))
+ return false;
+
+ /* Parameters that are only defined but never used need not be copied. */
+ def = gimple_default_def (cfun, param);
+ if (!def)
+ return false;
+
+ return true;
+}
+
/* Eliminates tail call described by T. TMP_VARS is a list of
temporary variables used to copy the function arguments. */
static void
eliminate_tail_call (struct tailcall *t)
{
- tree param, stmt, args, rslt, call;
+ tree param, stmt, rslt, call;
+ tree arg;
+ call_expr_arg_iterator iter;
basic_block bb, first;
edge e;
tree phi;
block_stmt_iterator bsi;
- use_operand_p mayuse;
- def_operand_p maydef;
- ssa_op_iter iter;
tree orig_stmt;
stmt = orig_stmt = bsi_stmt (t->call_bsi);
fprintf (dump_file, "\n");
}
- if (TREE_CODE (stmt) == MODIFY_EXPR)
- stmt = TREE_OPERAND (stmt, 1);
+ if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
+ stmt = GIMPLE_STMT_OPERAND (stmt, 1);
first = single_succ (ENTRY_BLOCK_PTR);
if (TREE_CODE (t) == RETURN_EXPR)
break;
- bsi_remove (&bsi);
+ bsi_remove (&bsi, true);
release_defs (t);
}
gcc_assert (e);
PENDING_STMT (e) = NULL_TREE;
- /* Add phi node entries for arguments. Not every PHI node corresponds to
- a function argument (there may be PHI nodes for virtual definitions of the
- eliminated calls), so we search for a PHI corresponding to each argument
- rather than searching for which argument a PHI node corresponds to. */
-
+ /* Add phi node entries for arguments. The ordering of the phi nodes should
+ be the same as the ordering of the arguments. */
for (param = DECL_ARGUMENTS (current_function_decl),
- args = TREE_OPERAND (stmt, 1);
+ arg = first_call_expr_arg (stmt, &iter),
+ phi = phi_nodes (first);
param;
- param = TREE_CHAIN (param),
- args = TREE_CHAIN (args))
+ param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
{
-
- for (phi = phi_nodes (first); phi; phi = PHI_CHAIN (phi))
- if (param == SSA_NAME_VAR (PHI_RESULT (phi)))
- break;
-
- /* The phi node indeed does not have to be there, in case the operand is
- invariant in the function. */
- if (!phi)
+ if (!arg_needs_copy_p (param))
continue;
+ gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
- add_phi_arg (phi, TREE_VALUE (args), e);
- }
-
- /* Add phi nodes for the call clobbered variables. */
- FOR_EACH_SSA_MAYDEF_OPERAND (maydef, mayuse, orig_stmt, iter)
- {
- param = SSA_NAME_VAR (DEF_FROM_PTR (maydef));
- for (phi = phi_nodes (first); phi; phi = PHI_CHAIN (phi))
- if (param == SSA_NAME_VAR (PHI_RESULT (phi)))
- break;
-
- if (!phi)
- {
- tree name = default_def (param);
- tree new_name;
-
- if (!name)
- {
- /* It may happen that the tag does not have a default_def in case
- when all uses of it are dominated by a MUST_DEF. This however
- means that it is not necessary to add a phi node for this
- tag. */
- continue;
- }
- new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
-
- set_default_def (param, new_name);
- phi = create_phi_node (name, first);
- SSA_NAME_DEF_STMT (name) = phi;
- add_phi_arg (phi, new_name, single_succ_edge (ENTRY_BLOCK_PTR));
-
- /* For all calls the same set of variables should be clobbered. This
- means that there always should be the appropriate phi node except
- for the first time we eliminate the call. */
- gcc_assert (EDGE_COUNT (first->preds) <= 2);
- }
-
- add_phi_arg (phi, USE_FROM_PTR (mayuse), e);
+ add_phi_arg (phi, arg, e);
+ phi = PHI_CHAIN (phi);
}
/* Update the values of accumulators. */
adjust_accumulator_values (t->call_bsi, t->mult, t->add, e);
call = bsi_stmt (t->call_bsi);
- if (TREE_CODE (call) == MODIFY_EXPR)
+ if (TREE_CODE (call) == GIMPLE_MODIFY_STMT)
{
- rslt = TREE_OPERAND (call, 0);
+ rslt = GIMPLE_STMT_OPERAND (call, 0);
/* Result of the call will no longer be defined. So adjust the
SSA_NAME_DEF_STMT accordingly. */
SSA_NAME_DEF_STMT (rslt) = build_empty_stmt ();
}
- bsi_remove (&t->call_bsi);
+ bsi_remove (&t->call_bsi, true);
release_defs (call);
}
+/* Add phi nodes for the virtual operands defined in the function to the
+ header of the loop created by tail recursion elimination.
+
+ Originally, we used to add phi nodes only for call clobbered variables,
+ as the value of the non-call clobbered ones obviously cannot be used
+ or changed within the recursive call. However, the local variables
+ from multiple calls now share the same location, so the virtual ssa form
+ requires us to say that the location dies on further iterations of the loop,
+ which requires adding phi nodes.
+*/
+static void
+add_virtual_phis (void)
+{
+ referenced_var_iterator rvi;
+ tree var;
+
+ /* The problematic part is that there is no way how to know what
+ to put into phi nodes (there in fact does not have to be such
+ ssa name available). A solution would be to have an artificial
+ use/kill for all virtual operands in EXIT node. Unless we have
+ this, we cannot do much better than to rebuild the ssa form for
+ possibly affected virtual ssa names from scratch. */
+
+ FOR_EACH_REFERENCED_VAR (var, rvi)
+ {
+ if (!is_gimple_reg (var) && gimple_default_def (cfun, var) != NULL_TREE)
+ mark_sym_for_renaming (var);
+ }
+}
+
/* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
mark the tailcalls for the sibcall optimization. */
/* Optimizes tail calls in the function, turning the tail recursion
into iteration. */
-static void
+static unsigned int
tree_optimize_tail_calls_1 (bool opt_tailcalls)
{
edge e;
edge_iterator ei;
if (!suitable_for_tail_opt_p ())
- return;
+ return 0;
if (opt_tailcalls)
opt_tailcalls = suitable_for_tail_call_opt_p ();
if (!phis_constructed)
{
- tree name;
/* Ensure that there is only one predecessor of the block. */
if (!single_pred_p (first))
first = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
for (param = DECL_ARGUMENTS (current_function_decl);
param;
param = TREE_CHAIN (param))
- if (is_gimple_reg (param)
- && var_ann (param)
- /* Also parameters that are only defined but never used need not
- be copied. */
- && ((name = default_def (param))
- && TREE_CODE (name) == SSA_NAME))
- {
- tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
- tree phi;
-
- set_default_def (param, new_name);
- phi = create_phi_node (name, first);
- SSA_NAME_DEF_STMT (name) = phi;
- add_phi_arg (phi, new_name, single_pred_edge (first));
- }
+ if (arg_needs_copy_p (param))
+ {
+ tree name = gimple_default_def (cfun, param);
+ tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
+ tree phi;
+
+ set_default_def (param, new_name);
+ phi = create_phi_node (name, first);
+ SSA_NAME_DEF_STMT (name) = phi;
+ add_phi_arg (phi, new_name, single_pred_edge (first));
+ }
phis_constructed = true;
}
ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
tmp = create_tmp_var (ret_type, "add_acc");
- add_referenced_tmp_var (tmp);
+ add_referenced_var (tmp);
phi = create_phi_node (tmp, first);
add_phi_arg (phi,
ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
tmp = create_tmp_var (ret_type, "mult_acc");
- add_referenced_tmp_var (tmp);
+ add_referenced_var (tmp);
phi = create_phi_node (tmp, first);
add_phi_arg (phi,
}
}
+
+ if (phis_constructed)
+ {
+ /* Reverse the order of the phi nodes, so that it matches the order
+ of operands of the function, as assumed by eliminate_tail_call. */
+ set_phi_nodes (first, phi_reverse (phi_nodes (first)));
+ }
+
for (; tailcalls; tailcalls = next)
{
next = tailcalls->next;
}
if (changed)
- {
- free_dominance_info (CDI_DOMINATORS);
- cleanup_tree_cfg ();
- }
+ free_dominance_info (CDI_DOMINATORS);
+
+ if (phis_constructed)
+ add_virtual_phis ();
+ if (changed)
+ return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
+ return 0;
}
-static void
+static unsigned int
execute_tail_recursion (void)
{
- tree_optimize_tail_calls_1 (false);
+ return tree_optimize_tail_calls_1 (false);
}
static bool
gate_tail_calls (void)
{
- return flag_optimize_sibling_calls != 0;
+ return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call);
}
-static void
+static unsigned int
execute_tail_calls (void)
{
- tree_optimize_tail_calls_1 (true);
+ return tree_optimize_tail_calls_1 (true);
}
struct tree_opt_pass pass_tail_recursion =
{
"tailr", /* name */
- NULL, /* gate */
+ gate_tail_calls, /* gate */
execute_tail_recursion, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
0, /* tv_id */
- PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
+ PROP_cfg | PROP_ssa, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */