1 /* Tail call optimization on trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "basic-block.h"
29 #include "tree-flow.h"
30 #include "tree-dump.h"
31 #include "gimple-pretty-print.h"
33 #include "tree-pass.h"
35 #include "langhooks.h"
38 #include "common/common-target.h"
40 /* The file implements the tail recursion elimination. It is also used to
41 analyze the tail calls in general, passing the results to the rtl level
42 where they are used for sibcall optimization.
44 In addition to the standard tail recursion elimination, we handle the most
45 trivial cases of making the call tail recursive by creating accumulators.
46 For example the following function
51 return n + sum (n - 1);
68 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
69 when we reach the return x statement, we should return a_acc + x * m_acc
70 instead. They are initially initialized to 0 and 1, respectively,
71 so the semantics of the function is obviously preserved. If we are
72 guaranteed that the value of the accumulator never change, we
75 There are three cases how the function may exit. The first one is
76 handled in adjust_return_value, the other two in adjust_accumulator_values
77 (the second case is actually a special case of the third one and we
78 present it separately just for clarity):
80 1) Just return x, where x is not in any of the remaining special shapes.
81 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
83 2) return f (...), where f is the current function, is rewritten in a
84 classical tail-recursion elimination way, into assignment of arguments
85 and jump to the start of the function. Values of the accumulators
88 3) return a + m * f(...), where a and m do not depend on call to f.
89 To preserve the semantics described before we want this to be rewritten
90 in such a way that we finally return
92 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
94 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
95 eliminate the tail call to f. Special cases when the value is just
96 added or just multiplied are obtained by setting a = 0 or m = 1.
98 TODO -- it is possible to do similar tricks for other operations. */
100 /* A structure that describes the tailcall. */
104 /* The iterator pointing to the call statement. */
105 gimple_stmt_iterator call_gsi;
107 /* True if it is a call to the current function. */
110 /* The return value of the caller is mult * f + add, where f is the return
111 value of the call. */
114 /* Next tailcall in the chain. */
115 struct tailcall *next;
118 /* The variables holding the value of multiplicative and additive
120 static tree m_acc, a_acc;
122 static bool suitable_for_tail_opt_p (void);
123 static bool optimize_tail_call (struct tailcall *, bool);
124 static void eliminate_tail_call (struct tailcall *);
125 static void find_tail_calls (basic_block, struct tailcall **);
127 /* Returns false when the function is not suitable for tail call optimization
128 from some reason (e.g. if it takes variable number of arguments). */
131 suitable_for_tail_opt_p (void)
138 /* Returns false when the function is not suitable for tail call optimization
139 from some reason (e.g. if it takes variable number of arguments).
140 This test must pass in addition to suitable_for_tail_opt_p in order to make
141 tail call discovery happen. */
144 suitable_for_tail_call_opt_p (void)
148 /* alloca (until we have stack slot life analysis) inhibits
149 sibling call optimizations, but not tail recursion. */
150 if (cfun->calls_alloca)
153 /* If we are using sjlj exceptions, we may need to add a call to
154 _Unwind_SjLj_Unregister at exit of the function. Which means
155 that we cannot do any sibcall transformations. */
156 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
157 && current_function_has_exception_handlers ())
160 /* Any function that calls setjmp might have longjmp called from
161 any called function. ??? We really should represent this
162 properly in the CFG so that this needn't be special cased. */
163 if (cfun->calls_setjmp)
166 /* ??? It is OK if the argument of a function is taken in some cases,
167 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
168 for (param = DECL_ARGUMENTS (current_function_decl);
170 param = DECL_CHAIN (param))
171 if (TREE_ADDRESSABLE (param))
177 /* Checks whether the expression EXPR in stmt AT is independent of the
178 statement pointed to by GSI (in a sense that we already know EXPR's value
179 at GSI). We use the fact that we are only called from the chain of
180 basic blocks that have only single successor. Returns the expression
181 containing the value of EXPR at GSI. */
184 independent_of_stmt_p (tree expr, gimple at, gimple_stmt_iterator gsi)
186 basic_block bb, call_bb, at_bb;
190 if (is_gimple_min_invariant (expr))
193 if (TREE_CODE (expr) != SSA_NAME)
196 /* Mark the blocks in the chain leading to the end. */
197 at_bb = gimple_bb (at);
198 call_bb = gimple_bb (gsi_stmt (gsi));
199 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
205 at = SSA_NAME_DEF_STMT (expr);
208 /* The default definition or defined before the chain. */
214 for (; !gsi_end_p (gsi); gsi_next (&gsi))
215 if (gsi_stmt (gsi) == at)
218 if (!gsi_end_p (gsi))
223 if (gimple_code (at) != GIMPLE_PHI)
229 FOR_EACH_EDGE (e, ei, bb->preds)
234 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
235 if (TREE_CODE (expr) != SSA_NAME)
237 /* The value is a constant. */
242 /* Unmark the blocks. */
243 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
250 /* Simulates the effect of an assignment STMT on the return value of the tail
251 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
252 additive factor for the real return value. */
255 process_assignment (gimple stmt, gimple_stmt_iterator call, tree *m,
256 tree *a, tree *ass_var)
258 tree op0, op1 = NULL_TREE, non_ass_var = NULL_TREE;
259 tree dest = gimple_assign_lhs (stmt);
260 enum tree_code code = gimple_assign_rhs_code (stmt);
261 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
262 tree src_var = gimple_assign_rhs1 (stmt);
264 /* See if this is a simple copy operation of an SSA name to the function
265 result. In that case we may have a simple tail call. Ignore type
266 conversions that can never produce extra code between the function
267 call and the function return. */
268 if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt))
269 && (TREE_CODE (src_var) == SSA_NAME))
271 /* Reject a tailcall if the type conversion might need
273 if (gimple_assign_cast_p (stmt)
274 && TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var)))
277 if (src_var != *ass_var)
286 case GIMPLE_BINARY_RHS:
287 op1 = gimple_assign_rhs2 (stmt);
291 case GIMPLE_UNARY_RHS:
292 op0 = gimple_assign_rhs1 (stmt);
299 /* Accumulator optimizations will reverse the order of operations.
300 We can only do that for floating-point types if we're assuming
301 that addition and multiplication are associative. */
302 if (!flag_associative_math)
303 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
306 if (rhs_class == GIMPLE_UNARY_RHS)
308 else if (op0 == *ass_var
309 && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
311 else if (op1 == *ass_var
312 && (non_ass_var = independent_of_stmt_p (op0, stmt, call)))
330 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
331 *m = build_real (TREE_TYPE (op0), dconstm1);
332 else if (INTEGRAL_TYPE_P (TREE_TYPE (op0)))
333 *m = build_int_cst (TREE_TYPE (op0), -1);
342 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
345 if (FLOAT_TYPE_P (TREE_TYPE (non_ass_var)))
346 *m = build_real (TREE_TYPE (non_ass_var), dconstm1);
347 else if (INTEGRAL_TYPE_P (TREE_TYPE (non_ass_var)))
348 *m = build_int_cst (TREE_TYPE (non_ass_var), -1);
352 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
358 /* TODO -- Handle POINTER_PLUS_EXPR. */
365 /* Propagate VAR through phis on edge E. */
368 propagate_through_phis (tree var, edge e)
370 basic_block dest = e->dest;
371 gimple_stmt_iterator gsi;
373 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
375 gimple phi = gsi_stmt (gsi);
376 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
377 return PHI_RESULT (phi);
382 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
383 added to the start of RET. */
386 find_tail_calls (basic_block bb, struct tailcall **ret)
388 tree ass_var = NULL_TREE, ret_var, func, param;
389 gimple stmt, call = NULL;
390 gimple_stmt_iterator gsi, agsi;
398 referenced_var_iterator rvi;
400 if (!single_succ_p (bb))
403 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
405 stmt = gsi_stmt (gsi);
407 /* Ignore labels, returns, clobbers and debug stmts. */
408 if (gimple_code (stmt) == GIMPLE_LABEL
409 || gimple_code (stmt) == GIMPLE_RETURN
410 || gimple_clobber_p (stmt)
411 || is_gimple_debug (stmt))
414 /* Check for a call. */
415 if (is_gimple_call (stmt))
418 ass_var = gimple_call_lhs (stmt);
422 /* If the statement references memory or volatile operands, fail. */
423 if (gimple_references_memory_p (stmt)
424 || gimple_has_volatile_ops (stmt))
431 /* Recurse to the predecessors. */
432 FOR_EACH_EDGE (e, ei, bb->preds)
433 find_tail_calls (e->src, ret);
438 /* If the LHS of our call is not just a simple register, we can't
439 transform this into a tail or sibling call. This situation happens,
440 in (e.g.) "*p = foo()" where foo returns a struct. In this case
441 we won't have a temporary here, but we need to carry out the side
442 effect anyway, so tailcall is impossible.
444 ??? In some situations (when the struct is returned in memory via
445 invisible argument) we could deal with this, e.g. by passing 'p'
446 itself as that argument to foo, but it's too early to do this here,
447 and expand_call() will not handle it anyway. If it ever can, then
448 we need to revisit this here, to allow that situation. */
449 if (ass_var && !is_gimple_reg (ass_var))
452 /* We found the call, check whether it is suitable. */
453 tail_recursion = false;
454 func = gimple_call_fndecl (call);
455 if (func == current_function_decl)
459 for (param = DECL_ARGUMENTS (func), idx = 0;
460 param && idx < gimple_call_num_args (call);
461 param = DECL_CHAIN (param), idx ++)
463 arg = gimple_call_arg (call, idx);
466 /* Make sure there are no problems with copying. The parameter
467 have a copyable type and the two arguments must have reasonably
468 equivalent types. The latter requirement could be relaxed if
469 we emitted a suitable type conversion statement. */
470 if (!is_gimple_reg_type (TREE_TYPE (param))
471 || !useless_type_conversion_p (TREE_TYPE (param),
475 /* The parameter should be a real operand, so that phi node
476 created for it at the start of the function has the meaning
477 of copying the value. This test implies is_gimple_reg_type
478 from the previous condition, however this one could be
479 relaxed by being more careful with copying the new value
480 of the parameter (emitting appropriate GIMPLE_ASSIGN and
481 updating the virtual operands). */
482 if (!is_gimple_reg (param))
486 if (idx == gimple_call_num_args (call) && !param)
487 tail_recursion = true;
490 /* Make sure the tail invocation of this function does not refer
491 to local variables. */
492 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
494 if (TREE_CODE (var) != PARM_DECL
495 && auto_var_in_fn_p (var, cfun->decl)
496 && (ref_maybe_used_by_stmt_p (call, var)
497 || call_may_clobber_ref_p (call, var)))
501 /* Now check the statements after the call. None of them has virtual
502 operands, so they may only depend on the call through its return
503 value. The return value should also be dependent on each of them,
504 since we are running after dce. */
512 tree tmp_a = NULL_TREE;
513 tree tmp_m = NULL_TREE;
516 while (gsi_end_p (agsi))
518 ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
519 abb = single_succ (abb);
520 agsi = gsi_start_bb (abb);
523 stmt = gsi_stmt (agsi);
525 if (gimple_code (stmt) == GIMPLE_LABEL)
528 if (gimple_code (stmt) == GIMPLE_RETURN)
531 if (gimple_clobber_p (stmt))
534 if (is_gimple_debug (stmt))
537 if (gimple_code (stmt) != GIMPLE_ASSIGN)
540 /* This is a gimple assign. */
541 if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var))
546 tree type = TREE_TYPE (tmp_a);
548 a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a);
554 tree type = TREE_TYPE (tmp_m);
556 m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m);
561 a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m);
565 /* See if this is a tail call we can handle. */
566 ret_var = gimple_return_retval (stmt);
568 /* We may proceed if there either is no return value, or the return value
569 is identical to the call's return. */
571 && (ret_var != ass_var))
574 /* If this is not a tail recursive call, we cannot handle addends or
576 if (!tail_recursion && (m || a))
579 nw = XNEW (struct tailcall);
583 nw->tail_recursion = tail_recursion;
592 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
595 add_successor_phi_arg (edge e, tree var, tree phi_arg)
597 gimple_stmt_iterator gsi;
599 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
600 if (PHI_RESULT (gsi_stmt (gsi)) == var)
603 gcc_assert (!gsi_end_p (gsi));
604 add_phi_arg (gsi_stmt (gsi), phi_arg, e, UNKNOWN_LOCATION);
607 /* Creates a GIMPLE statement which computes the operation specified by
608 CODE, ACC and OP1 to a new variable with name LABEL and inserts the
609 statement in the position specified by GSI. Returns the
610 tree node of the statement's result. */
613 adjust_return_value_with_ops (enum tree_code code, const char *label,
614 tree acc, tree op1, gimple_stmt_iterator gsi)
617 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
618 tree tmp = create_tmp_reg (ret_type, label);
622 add_referenced_var (tmp);
624 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
625 stmt = gimple_build_assign_with_ops (code, tmp, acc, op1);
628 tree rhs = fold_convert (TREE_TYPE (acc),
631 fold_convert (TREE_TYPE (op1), acc),
633 rhs = force_gimple_operand_gsi (&gsi, rhs,
634 false, NULL, true, GSI_SAME_STMT);
635 stmt = gimple_build_assign (NULL_TREE, rhs);
638 result = make_ssa_name (tmp, stmt);
639 gimple_assign_set_lhs (stmt, result);
641 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
645 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
646 the computation specified by CODE and OP1 and insert the statement
647 at the position specified by GSI as a new statement. Returns new SSA name
648 of updated accumulator. */
651 update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
652 gimple_stmt_iterator gsi)
656 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
657 stmt = gimple_build_assign_with_ops (code, SSA_NAME_VAR (acc), acc, op1);
660 tree rhs = fold_convert (TREE_TYPE (acc),
663 fold_convert (TREE_TYPE (op1), acc),
665 rhs = force_gimple_operand_gsi (&gsi, rhs,
666 false, NULL, false, GSI_CONTINUE_LINKING);
667 stmt = gimple_build_assign (NULL_TREE, rhs);
669 var = make_ssa_name (SSA_NAME_VAR (acc), stmt);
670 gimple_assign_set_lhs (stmt, var);
672 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
676 /* Adjust the accumulator values according to A and M after GSI, and update
677 the phi nodes on edge BACK. */
680 adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
682 tree var, a_acc_arg, m_acc_arg;
685 m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
687 a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);
695 if (integer_onep (a))
698 var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
704 a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi);
708 m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi);
711 add_successor_phi_arg (back, a_acc, a_acc_arg);
714 add_successor_phi_arg (back, m_acc, m_acc_arg);
717 /* Adjust value of the return at the end of BB according to M and A
721 adjust_return_value (basic_block bb, tree m, tree a)
724 gimple ret_stmt = gimple_seq_last_stmt (bb_seq (bb));
725 gimple_stmt_iterator gsi = gsi_last_bb (bb);
727 gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
729 retval = gimple_return_retval (ret_stmt);
730 if (!retval || retval == error_mark_node)
734 retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval,
737 retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval,
739 gimple_return_set_retval (ret_stmt, retval);
740 update_stmt (ret_stmt);
743 /* Subtract COUNT and FREQUENCY from the basic block and it's
746 decrease_profile (basic_block bb, gcov_type count, int frequency)
752 bb->frequency -= frequency;
753 if (bb->frequency < 0)
755 if (!single_succ_p (bb))
757 gcc_assert (!EDGE_COUNT (bb->succs));
760 e = single_succ_edge (bb);
766 /* Returns true if argument PARAM of the tail recursive call needs to be copied
767 when the call is eliminated. */
770 arg_needs_copy_p (tree param)
774 if (!is_gimple_reg (param) || !var_ann (param))
777 /* Parameters that are only defined but never used need not be copied. */
778 def = gimple_default_def (cfun, param);
785 /* Eliminates tail call described by T. TMP_VARS is a list of
786 temporary variables used to copy the function arguments. */
789 eliminate_tail_call (struct tailcall *t)
795 basic_block bb, first;
798 gimple_stmt_iterator gsi;
801 stmt = orig_stmt = gsi_stmt (t->call_gsi);
802 bb = gsi_bb (t->call_gsi);
804 if (dump_file && (dump_flags & TDF_DETAILS))
806 fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
808 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
809 fprintf (dump_file, "\n");
812 gcc_assert (is_gimple_call (stmt));
814 first = single_succ (ENTRY_BLOCK_PTR);
816 /* Remove the code after call_gsi that will become unreachable. The
817 possibly unreachable code in other blocks is removed later in
821 while (!gsi_end_p (gsi))
823 gimple t = gsi_stmt (gsi);
824 /* Do not remove the return statement, so that redirect_edge_and_branch
825 sees how the block ends. */
826 if (gimple_code (t) == GIMPLE_RETURN)
829 gsi_remove (&gsi, true);
833 /* Number of executions of function has reduced by the tailcall. */
834 e = single_succ_edge (gsi_bb (t->call_gsi));
835 decrease_profile (EXIT_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
836 decrease_profile (ENTRY_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
837 if (e->dest != EXIT_BLOCK_PTR)
838 decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));
840 /* Replace the call by a jump to the start of function. */
841 e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
844 PENDING_STMT (e) = NULL;
846 /* Add phi node entries for arguments. The ordering of the phi nodes should
847 be the same as the ordering of the arguments. */
848 for (param = DECL_ARGUMENTS (current_function_decl),
849 idx = 0, gsi = gsi_start_phis (first);
851 param = DECL_CHAIN (param), idx++)
853 if (!arg_needs_copy_p (param))
856 arg = gimple_call_arg (stmt, idx);
857 phi = gsi_stmt (gsi);
858 gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
860 add_phi_arg (phi, arg, e, gimple_location (stmt));
864 /* Update the values of accumulators. */
865 adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);
867 call = gsi_stmt (t->call_gsi);
868 rslt = gimple_call_lhs (call);
869 if (rslt != NULL_TREE)
871 /* Result of the call will no longer be defined. So adjust the
872 SSA_NAME_DEF_STMT accordingly. */
873 SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
876 gsi_remove (&t->call_gsi, true);
880 /* Add phi nodes for the virtual operands defined in the function to the
881 header of the loop created by tail recursion elimination.
883 Originally, we used to add phi nodes only for call clobbered variables,
884 as the value of the non-call clobbered ones obviously cannot be used
885 or changed within the recursive call. However, the local variables
886 from multiple calls now share the same location, so the virtual ssa form
887 requires us to say that the location dies on further iterations of the loop,
888 which requires adding phi nodes.
891 add_virtual_phis (void)
893 referenced_var_iterator rvi;
896 /* The problematic part is that there is no way how to know what
897 to put into phi nodes (there in fact does not have to be such
898 ssa name available). A solution would be to have an artificial
899 use/kill for all virtual operands in EXIT node. Unless we have
900 this, we cannot do much better than to rebuild the ssa form for
901 possibly affected virtual ssa names from scratch. */
903 FOR_EACH_REFERENCED_VAR (cfun, var, rvi)
905 if (!is_gimple_reg (var) && gimple_default_def (cfun, var) != NULL_TREE)
906 mark_sym_for_renaming (var);
910 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
911 mark the tailcalls for the sibcall optimization. */
914 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
916 if (t->tail_recursion)
918 eliminate_tail_call (t);
924 gimple stmt = gsi_stmt (t->call_gsi);
926 gimple_call_set_tail (stmt, true);
927 if (dump_file && (dump_flags & TDF_DETAILS))
929 fprintf (dump_file, "Found tail call ");
930 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
931 fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index);
938 /* Creates a tail-call accumulator of the same type as the return type of the
939 current function. LABEL is the name used to creating the temporary
940 variable for the accumulator. The accumulator will be inserted in the
941 phis of a basic block BB with single predecessor with an initial value
942 INIT converted to the current function return type. */
945 create_tailcall_accumulator (const char *label, basic_block bb, tree init)
947 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
948 tree tmp = create_tmp_reg (ret_type, label);
951 add_referenced_var (tmp);
952 phi = create_phi_node (tmp, bb);
953 /* RET_TYPE can be a float when -ffast-maths is enabled. */
954 add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb),
956 return PHI_RESULT (phi);
959 /* Optimizes tail calls in the function, turning the tail recursion
963 tree_optimize_tail_calls_1 (bool opt_tailcalls)
966 bool phis_constructed = false;
967 struct tailcall *tailcalls = NULL, *act, *next;
968 bool changed = false;
969 basic_block first = single_succ (ENTRY_BLOCK_PTR);
974 if (!suitable_for_tail_opt_p ())
977 opt_tailcalls = suitable_for_tail_call_opt_p ();
979 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
981 /* Only traverse the normal exits, i.e. those that end with return
983 stmt = last_stmt (e->src);
986 && gimple_code (stmt) == GIMPLE_RETURN)
987 find_tail_calls (e->src, &tailcalls);
990 /* Construct the phi nodes and accumulators if necessary. */
991 a_acc = m_acc = NULL_TREE;
992 for (act = tailcalls; act; act = act->next)
994 if (!act->tail_recursion)
997 if (!phis_constructed)
999 /* Ensure that there is only one predecessor of the block
1000 or if there are existing degenerate PHI nodes. */
1001 if (!single_pred_p (first)
1002 || !gimple_seq_empty_p (phi_nodes (first)))
1003 first = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
1005 /* Copy the args if needed. */
1006 for (param = DECL_ARGUMENTS (current_function_decl);
1008 param = DECL_CHAIN (param))
1009 if (arg_needs_copy_p (param))
1011 tree name = gimple_default_def (cfun, param);
1012 tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
1015 set_default_def (param, new_name);
1016 phi = create_phi_node (name, first);
1017 SSA_NAME_DEF_STMT (name) = phi;
1018 add_phi_arg (phi, new_name, single_pred_edge (first),
1019 EXPR_LOCATION (param));
1021 phis_constructed = true;
1024 if (act->add && !a_acc)
1025 a_acc = create_tailcall_accumulator ("add_acc", first,
1028 if (act->mult && !m_acc)
1029 m_acc = create_tailcall_accumulator ("mult_acc", first,
1035 /* When the tail call elimination using accumulators is performed,
1036 statements adding the accumulated value are inserted at all exits.
1037 This turns all other tail calls to non-tail ones. */
1038 opt_tailcalls = false;
1041 for (; tailcalls; tailcalls = next)
1043 next = tailcalls->next;
1044 changed |= optimize_tail_call (tailcalls, opt_tailcalls);
1050 /* Modify the remaining return statements. */
1051 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1053 stmt = last_stmt (e->src);
1056 && gimple_code (stmt) == GIMPLE_RETURN)
1057 adjust_return_value (e->src, m_acc, a_acc);
1062 free_dominance_info (CDI_DOMINATORS);
1064 if (phis_constructed)
1065 add_virtual_phis ();
1067 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
1072 execute_tail_recursion (void)
1074 return tree_optimize_tail_calls_1 (false);
1078 gate_tail_calls (void)
1080 return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call);
1084 execute_tail_calls (void)
1086 return tree_optimize_tail_calls_1 (true);
1089 struct gimple_opt_pass pass_tail_recursion =
1094 gate_tail_calls, /* gate */
1095 execute_tail_recursion, /* execute */
1098 0, /* static_pass_number */
1099 TV_NONE, /* tv_id */
1100 PROP_cfg | PROP_ssa, /* properties_required */
1101 0, /* properties_provided */
1102 0, /* properties_destroyed */
1103 0, /* todo_flags_start */
1104 TODO_verify_ssa /* todo_flags_finish */
1108 struct gimple_opt_pass pass_tail_calls =
1113 gate_tail_calls, /* gate */
1114 execute_tail_calls, /* execute */
1117 0, /* static_pass_number */
1118 TV_NONE, /* tv_id */
1119 PROP_cfg | PROP_ssa, /* properties_required */
1120 0, /* properties_provided */
1121 0, /* properties_destroyed */
1122 0, /* todo_flags_start */
1123 TODO_verify_ssa /* todo_flags_finish */