/* Tail call optimization on trees.
- Copyright (C) 2003 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004 Free Software Foundation, Inc.
This file is part of GCC.
{
basic_block bb, call_bb, at_bb;
edge e;
+ edge_iterator ei;
if (is_gimple_min_invariant (expr))
return expr;
/* Mark the blocks in the chain leading to the end. */
at_bb = bb_for_stmt (at);
call_bb = bb_for_stmt (bsi_stmt (bsi));
- for (bb = call_bb; bb != at_bb; bb = bb->succ->dest)
+ for (bb = call_bb; bb != at_bb; bb = EDGE_SUCC (bb, 0)->dest)
bb->aux = &bb->aux;
bb->aux = &bb->aux;
break;
}
- for (e = bb->pred; e; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, bb->preds)
if (e->src->aux)
break;
- if (!e)
- abort ();
+ gcc_assert (e);
expr = PHI_ARG_DEF_FROM_EDGE (at, e);
if (TREE_CODE (expr) != SSA_NAME)
}
/* Unmark the blocks. */
- for (bb = call_bb; bb != at_bb; bb = bb->succ->dest)
+ for (bb = call_bb; bb != at_bb; bb = EDGE_SUCC (bb, 0)->dest)
bb->aux = NULL;
bb->aux = NULL;
return true;
}
- if (TREE_CODE_CLASS (code) != '2')
+ if (TREE_CODE_CLASS (code) != tcc_binary)
return false;
+ /* Accumulator optimizations will reverse the order of operations.
+ We can only do that for floating-point types if we're assuming
+ that addition and multiplication are associative. */
+ if (!flag_unsafe_math_optimizations)
+ if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
+ return false;
+
/* We only handle the code like
x = call ();
basic_block abb;
stmt_ann_t ann;
- if (bb->succ->succ_next)
+ if (EDGE_COUNT (bb->succs) > 1)
return;
for (bsi = bsi_last (bb); !bsi_end_p (bsi); bsi_prev (&bsi))
if (bsi_end_p (bsi))
{
+ edge_iterator ei;
/* Recurse to the predecessors. */
- for (e = bb->pred; e; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, bb->preds)
find_tail_calls (e->src, ret);
return;
param = TREE_CHAIN (param), args = TREE_CHAIN (args))
{
tree arg = TREE_VALUE (args);
- if (param != arg
- /* Make sure there are no problems with copying. Note we must
+ if (param != arg)
+ {
+ /* Make sure there are no problems with copying. The parameter
have a copyable type and the two arguments must have reasonably
equivalent types. The latter requirement could be relaxed if
we emitted a suitable type conversion statement. */
- && (!is_gimple_reg_type (TREE_TYPE (param))
+ if (!is_gimple_reg_type (TREE_TYPE (param))
|| !lang_hooks.types_compatible_p (TREE_TYPE (param),
- TREE_TYPE (arg))))
- break;
+ TREE_TYPE (arg)))
+ break;
+
+ /* The parameter should be a real operand, so that phi node
+ created for it at the start of the function has the meaning
+ of copying the value. This test implies is_gimple_reg_type
+ from the previous condition, however this one could be
+ relaxed by being more careful with copying the new value
+ of the parameter (emitting appropriate MODIFY_EXPR and
+ updating the virtual operands). */
+ if (!is_gimple_reg (param))
+ break;
+ }
}
if (!args && !param)
tail_recursion = true;
while (bsi_end_p (absi))
{
- ass_var = propagate_through_phis (ass_var, abb->succ);
- abb = abb->succ->dest;
+ ass_var = propagate_through_phis (ass_var, EDGE_SUCC (abb, 0));
+ abb = EDGE_SUCC (abb, 0)->dest;
absi = bsi_start (abb);
}
if (PHI_RESULT (phi) == a_acc)
break;
- add_phi_arg (&phi, a_acc_arg, back);
+ add_phi_arg (phi, a_acc_arg, back);
}
if (m_acc)
if (PHI_RESULT (phi) == m_acc)
break;
- add_phi_arg (&phi, m_acc_arg, back);
+ add_phi_arg (phi, m_acc_arg, back);
}
}
tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
block_stmt_iterator bsi = bsi_last (bb);
- if (TREE_CODE (ret_stmt) != RETURN_EXPR)
- abort ();
+ gcc_assert (TREE_CODE (ret_stmt) == RETURN_EXPR);
ret_var = TREE_OPERAND (ret_stmt, 0);
if (!ret_var)
var = make_ssa_name (tmp, stmt);
TREE_OPERAND (stmt, 0) = var;
- bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
+ bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
}
else
var = ret_var;
var = make_ssa_name (tmp, stmt);
TREE_OPERAND (stmt, 0) = var;
- bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
+ bsi_insert_before (&bsi, stmt, BSI_SAME_STMT);
}
TREE_OPERAND (ret_stmt, 0) = var;
if (TREE_CODE (stmt) == MODIFY_EXPR)
stmt = TREE_OPERAND (stmt, 1);
- first = ENTRY_BLOCK_PTR->succ->dest;
+ first = EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest;
/* Remove the code after call_bsi that will become unreachable. The
possibly unreachable code in other blocks is removed later in
bsi_next (&bsi);
while (!bsi_end_p (bsi))
{
+ tree t = bsi_stmt (bsi);
/* Do not remove the return statement, so that redirect_edge_and_branch
sees how the block ends. */
- if (TREE_CODE (bsi_stmt (bsi)) == RETURN_EXPR)
+ if (TREE_CODE (t) == RETURN_EXPR)
break;
bsi_remove (&bsi);
+ release_defs (t);
}
/* Replace the call by a jump to the start of function. */
- e = redirect_edge_and_branch (t->call_block->succ, first);
- if (!e)
- abort ();
+ e = redirect_edge_and_branch (EDGE_SUCC (t->call_block, 0), first);
+ gcc_assert (e);
PENDING_STMT (e) = NULL_TREE;
/* Add phi node entries for arguments. Not every PHI node corresponds to
if (!phi)
continue;
- add_phi_arg (&phi, TREE_VALUE (args), e);
+ add_phi_arg (phi, TREE_VALUE (args), e);
}
/* Add phi nodes for the call clobbered variables. */
var_ann (param)->default_def = new_name;
phi = create_phi_node (name, first);
SSA_NAME_DEF_STMT (name) = phi;
- add_phi_arg (&phi, new_name, ENTRY_BLOCK_PTR->succ);
+ add_phi_arg (phi, new_name, EDGE_SUCC (ENTRY_BLOCK_PTR, 0));
/* For all calls the same set of variables should be clobbered. This
means that there always should be the appropriate phi node except
for the first time we eliminate the call. */
- if (first->pred->pred_next->pred_next)
- abort ();
+ gcc_assert (EDGE_COUNT (first->preds) <= 2);
}
- add_phi_arg (&phi, V_MAY_DEF_OP (v_may_defs, i), e);
+ add_phi_arg (phi, V_MAY_DEF_OP (v_may_defs, i), e);
}
/* Update the values of accumulators. */
}
bsi_remove (&t->call_bsi);
+ release_defs (call);
}
/* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
bool phis_constructed = false;
struct tailcall *tailcalls = NULL, *act, *next;
bool changed = false;
- basic_block first = ENTRY_BLOCK_PTR->succ->dest;
+ basic_block first = EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest;
tree stmt, param, ret_type, tmp, phi;
+ edge_iterator ei;
if (!suitable_for_tail_opt_p ())
return;
if (opt_tailcalls)
opt_tailcalls = suitable_for_tail_call_opt_p ();
- for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
{
/* Only traverse the normal exits, i.e. those that end with return
statement. */
if (!phis_constructed)
{
/* Ensure that there is only one predecessor of the block. */
- if (first->pred->pred_next)
- first = split_edge (ENTRY_BLOCK_PTR->succ);
+ if (EDGE_COUNT (first->preds) > 1)
+ first = split_edge (EDGE_SUCC (ENTRY_BLOCK_PTR, 0));
/* Copy the args if needed. */
for (param = DECL_ARGUMENTS (current_function_decl);
param;
param = TREE_CHAIN (param))
- if (var_ann (param)
+ if (is_gimple_reg (param)
+ && var_ann (param)
/* Also parameters that are only defined but never used need not
be copied. */
&& (var_ann (param)->default_def
var_ann (param)->default_def = new_name;
phi = create_phi_node (name, first);
SSA_NAME_DEF_STMT (name) = phi;
- add_phi_arg (&phi, new_name, first->pred);
+ add_phi_arg (phi, new_name, EDGE_PRED (first, 0));
}
phis_constructed = true;
}
add_referenced_tmp_var (tmp);
phi = create_phi_node (tmp, first);
- add_phi_arg (&phi, fold_convert (ret_type, integer_zero_node),
- first->pred);
+ add_phi_arg (phi,
+ /* RET_TYPE can be a float when -ffast-maths is
+ enabled. */
+ fold_convert (ret_type, integer_zero_node),
+ EDGE_PRED (first, 0));
a_acc = PHI_RESULT (phi);
}
add_referenced_tmp_var (tmp);
phi = create_phi_node (tmp, first);
- add_phi_arg (&phi, fold_convert (ret_type, integer_one_node),
- first->pred);
+ add_phi_arg (phi,
+ /* RET_TYPE can be a float when -ffast-maths is
+ enabled. */
+ fold_convert (ret_type, integer_one_node),
+ EDGE_PRED (first, 0));
m_acc = PHI_RESULT (phi);
}
}
if (a_acc || m_acc)
{
/* Modify the remaining return statements. */
- for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
{
stmt = last_stmt (e->src);
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
+ TODO_dump_func | TODO_verify_ssa, /* todo_flags_finish */
+ 0 /* letter */
};
struct tree_opt_pass pass_tail_calls =
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
+ TODO_dump_func | TODO_verify_ssa, /* todo_flags_finish */
+ 0 /* letter */
};