1 /* Tail call optimization on trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "diagnostic.h"
35 #include "tree-pass.h"
37 #include "langhooks.h"
40 /* The file implements the tail recursion elimination. It is also used to
41 analyze the tail calls in general, passing the results to the rtl level
42 where they are used for sibcall optimization.
44 In addition to the standard tail recursion elimination, we handle the most
45 trivial cases of making the call tail recursive by creating accumulators.
46 For example the following function
51 return n + sum (n - 1);
68 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
69 when we reach the return x statement, we should return a_acc + x * m_acc
70 instead. They are initially initialized to 0 and 1, respectively,
71 so the semantics of the function is obviously preserved. If we are
72 guaranteed that the value of the accumulator never change, we
75 There are three cases how the function may exit. The first one is
76 handled in adjust_return_value, the other two in adjust_accumulator_values
77 (the second case is actually a special case of the third one and we
78 present it separately just for clarity):
80 1) Just return x, where x is not in any of the remaining special shapes.
81 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
83 2) return f (...), where f is the current function, is rewritten in a
84 classical tail-recursion elimination way, into assignment of arguments
85 and jump to the start of the function. Values of the accumulators
88 3) return a + m * f(...), where a and m do not depend on call to f.
89 To preserve the semantics described before we want this to be rewritten
90 in such a way that we finally return
92 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
94 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
95 eliminate the tail call to f. Special cases when the value is just
96 added or just multiplied are obtained by setting a = 0 or m = 1.
98 TODO -- it is possible to do similar tricks for other operations. */
100 /* A structure that describes the tailcall. */
104 /* The iterator pointing to the call statement. */
105 gimple_stmt_iterator call_gsi;
107 /* True if it is a call to the current function. */
110 /* The return value of the caller is mult * f + add, where f is the return
111 value of the call. */
114 /* Next tailcall in the chain. */
115 struct tailcall *next;
118 /* The variables holding the value of multiplicative and additive
120 static tree m_acc, a_acc;
122 static bool suitable_for_tail_opt_p (void);
123 static bool optimize_tail_call (struct tailcall *, bool);
124 static void eliminate_tail_call (struct tailcall *);
125 static void find_tail_calls (basic_block, struct tailcall **);
127 /* Returns false when the function is not suitable for tail call optimization
128 from some reason (e.g. if it takes variable number of arguments). */
131 suitable_for_tail_opt_p (void)
138 /* Returns false when the function is not suitable for tail call optimization
139 from some reason (e.g. if it takes variable number of arguments).
140 This test must pass in addition to suitable_for_tail_opt_p in order to make
141 tail call discovery happen. */
144 suitable_for_tail_call_opt_p (void)
148 /* alloca (until we have stack slot life analysis) inhibits
149 sibling call optimizations, but not tail recursion. */
150 if (cfun->calls_alloca)
153 /* If we are using sjlj exceptions, we may need to add a call to
154 _Unwind_SjLj_Unregister at exit of the function. Which means
155 that we cannot do any sibcall transformations. */
156 if (USING_SJLJ_EXCEPTIONS && current_function_has_exception_handlers ())
159 /* Any function that calls setjmp might have longjmp called from
160 any called function. ??? We really should represent this
161 properly in the CFG so that this needn't be special cased. */
162 if (cfun->calls_setjmp)
165 /* ??? It is OK if the argument of a function is taken in some cases,
166 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
167 for (param = DECL_ARGUMENTS (current_function_decl);
169 param = TREE_CHAIN (param))
170 if (TREE_ADDRESSABLE (param))
176 /* Checks whether the expression EXPR in stmt AT is independent of the
177 statement pointed to by GSI (in a sense that we already know EXPR's value
178 at GSI). We use the fact that we are only called from the chain of
179 basic blocks that have only single successor. Returns the expression
180 containing the value of EXPR at GSI. */
183 independent_of_stmt_p (tree expr, gimple at, gimple_stmt_iterator gsi)
185 basic_block bb, call_bb, at_bb;
189 if (is_gimple_min_invariant (expr))
192 if (TREE_CODE (expr) != SSA_NAME)
195 /* Mark the blocks in the chain leading to the end. */
196 at_bb = gimple_bb (at);
197 call_bb = gimple_bb (gsi_stmt (gsi));
198 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
204 at = SSA_NAME_DEF_STMT (expr);
207 /* The default definition or defined before the chain. */
213 for (; !gsi_end_p (gsi); gsi_next (&gsi))
214 if (gsi_stmt (gsi) == at)
217 if (!gsi_end_p (gsi))
222 if (gimple_code (at) != GIMPLE_PHI)
228 FOR_EACH_EDGE (e, ei, bb->preds)
233 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
234 if (TREE_CODE (expr) != SSA_NAME)
236 /* The value is a constant. */
241 /* Unmark the blocks. */
242 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
249 /* Simulates the effect of an assignment STMT on the return value of the tail
250 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
251 additive factor for the real return value. */
254 process_assignment (gimple stmt, gimple_stmt_iterator call, tree *m,
255 tree *a, tree *ass_var)
257 tree op0, op1, non_ass_var;
258 tree dest = gimple_assign_lhs (stmt);
259 enum tree_code code = gimple_assign_rhs_code (stmt);
260 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
261 tree src_var = gimple_assign_rhs1 (stmt);
263 /* See if this is a simple copy operation of an SSA name to the function
264 result. In that case we may have a simple tail call. Ignore type
265 conversions that can never produce extra code between the function
266 call and the function return. */
267 if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt))
268 && (TREE_CODE (src_var) == SSA_NAME))
270 /* Reject a tailcall if the type conversion might need
272 if (gimple_assign_cast_p (stmt)
273 && TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var)))
276 if (src_var != *ass_var)
283 if (rhs_class != GIMPLE_BINARY_RHS)
286 /* Accumulator optimizations will reverse the order of operations.
287 We can only do that for floating-point types if we're assuming
288 that addition and multiplication are associative. */
289 if (!flag_associative_math)
290 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
293 /* We only handle the code like
300 TODO -- Extend it for cases where the linear transformation of the output
301 is expressed in a more complicated way. */
303 op0 = gimple_assign_rhs1 (stmt);
304 op1 = gimple_assign_rhs2 (stmt);
307 && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
309 else if (op1 == *ass_var
310 && (non_ass_var = independent_of_stmt_p (op0, stmt, call)))
327 /* TODO -- Handle other codes (NEGATE_EXPR, MINUS_EXPR,
328 POINTER_PLUS_EXPR). */
335 /* Propagate VAR through phis on edge E. */
338 propagate_through_phis (tree var, edge e)
340 basic_block dest = e->dest;
341 gimple_stmt_iterator gsi;
343 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
345 gimple phi = gsi_stmt (gsi);
346 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
347 return PHI_RESULT (phi);
352 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
353 added to the start of RET. */
356 find_tail_calls (basic_block bb, struct tailcall **ret)
358 tree ass_var = NULL_TREE, ret_var, func, param;
359 gimple stmt, call = NULL;
360 gimple_stmt_iterator gsi, agsi;
368 referenced_var_iterator rvi;
370 if (!single_succ_p (bb))
373 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
375 stmt = gsi_stmt (gsi);
378 if (gimple_code (stmt) == GIMPLE_LABEL || is_gimple_debug (stmt))
381 /* Check for a call. */
382 if (is_gimple_call (stmt))
385 ass_var = gimple_call_lhs (stmt);
389 /* If the statement references memory or volatile operands, fail. */
390 if (gimple_references_memory_p (stmt)
391 || gimple_has_volatile_ops (stmt))
398 /* Recurse to the predecessors. */
399 FOR_EACH_EDGE (e, ei, bb->preds)
400 find_tail_calls (e->src, ret);
405 /* If the LHS of our call is not just a simple register, we can't
406 transform this into a tail or sibling call. This situation happens,
407 in (e.g.) "*p = foo()" where foo returns a struct. In this case
408 we won't have a temporary here, but we need to carry out the side
409 effect anyway, so tailcall is impossible.
411 ??? In some situations (when the struct is returned in memory via
412 invisible argument) we could deal with this, e.g. by passing 'p'
413 itself as that argument to foo, but it's too early to do this here,
414 and expand_call() will not handle it anyway. If it ever can, then
415 we need to revisit this here, to allow that situation. */
416 if (ass_var && !is_gimple_reg (ass_var))
419 /* We found the call, check whether it is suitable. */
420 tail_recursion = false;
421 func = gimple_call_fndecl (call);
422 if (func == current_function_decl)
426 for (param = DECL_ARGUMENTS (func), idx = 0;
427 param && idx < gimple_call_num_args (call);
428 param = TREE_CHAIN (param), idx ++)
430 arg = gimple_call_arg (call, idx);
433 /* Make sure there are no problems with copying. The parameter
434 have a copyable type and the two arguments must have reasonably
435 equivalent types. The latter requirement could be relaxed if
436 we emitted a suitable type conversion statement. */
437 if (!is_gimple_reg_type (TREE_TYPE (param))
438 || !useless_type_conversion_p (TREE_TYPE (param),
442 /* The parameter should be a real operand, so that phi node
443 created for it at the start of the function has the meaning
444 of copying the value. This test implies is_gimple_reg_type
445 from the previous condition, however this one could be
446 relaxed by being more careful with copying the new value
447 of the parameter (emitting appropriate GIMPLE_ASSIGN and
448 updating the virtual operands). */
449 if (!is_gimple_reg (param))
453 if (idx == gimple_call_num_args (call) && !param)
454 tail_recursion = true;
457 /* Make sure the tail invocation of this function does not refer
458 to local variables. */
459 FOR_EACH_REFERENCED_VAR (var, rvi)
461 if (!is_global_var (var)
462 && ref_maybe_used_by_stmt_p (call, var))
466 /* Now check the statements after the call. None of them has virtual
467 operands, so they may only depend on the call through its return
468 value. The return value should also be dependent on each of them,
469 since we are running after dce. */
477 tree tmp_a = NULL_TREE;
478 tree tmp_m = NULL_TREE;
481 while (gsi_end_p (agsi))
483 ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
484 abb = single_succ (abb);
485 agsi = gsi_start_bb (abb);
488 stmt = gsi_stmt (agsi);
490 if (gimple_code (stmt) == GIMPLE_LABEL)
493 if (gimple_code (stmt) == GIMPLE_RETURN)
496 if (is_gimple_debug (stmt))
499 if (gimple_code (stmt) != GIMPLE_ASSIGN)
502 /* This is a gimple assign. */
503 if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var))
509 a = fold_build2 (PLUS_EXPR, TREE_TYPE (tmp_a), a, tmp_a);
516 m = fold_build2 (MULT_EXPR, TREE_TYPE (tmp_m), m, tmp_m);
521 a = fold_build2 (MULT_EXPR, TREE_TYPE (tmp_m), a, tmp_m);
525 /* See if this is a tail call we can handle. */
526 ret_var = gimple_return_retval (stmt);
528 /* We may proceed if there either is no return value, or the return value
529 is identical to the call's return. */
531 && (ret_var != ass_var))
534 /* If this is not a tail recursive call, we cannot handle addends or
536 if (!tail_recursion && (m || a))
539 nw = XNEW (struct tailcall);
543 nw->tail_recursion = tail_recursion;
552 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
555 add_successor_phi_arg (edge e, tree var, tree phi_arg)
557 gimple_stmt_iterator gsi;
559 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
560 if (PHI_RESULT (gsi_stmt (gsi)) == var)
563 gcc_assert (!gsi_end_p (gsi));
564 add_phi_arg (gsi_stmt (gsi), phi_arg, e, UNKNOWN_LOCATION);
567 /* Creates a GIMPLE statement which computes the operation specified by
568 CODE, OP0 and OP1 to a new variable with name LABEL and inserts the
569 statement in the position specified by GSI and UPDATE. Returns the
570 tree node of the statement's result. */
573 adjust_return_value_with_ops (enum tree_code code, const char *label,
574 tree acc, tree op1, gimple_stmt_iterator gsi)
577 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
578 tree tmp = create_tmp_var (ret_type, label);
582 if (TREE_CODE (ret_type) == COMPLEX_TYPE
583 || TREE_CODE (ret_type) == VECTOR_TYPE)
584 DECL_GIMPLE_REG_P (tmp) = 1;
585 add_referenced_var (tmp);
587 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
588 stmt = gimple_build_assign_with_ops (code, tmp, acc, op1);
591 tree rhs = fold_convert (TREE_TYPE (acc),
594 fold_convert (TREE_TYPE (op1), acc),
596 rhs = force_gimple_operand_gsi (&gsi, rhs,
597 false, NULL, true, GSI_CONTINUE_LINKING);
598 stmt = gimple_build_assign (NULL_TREE, rhs);
601 result = make_ssa_name (tmp, stmt);
602 gimple_assign_set_lhs (stmt, result);
604 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
608 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
609 the computation specified by CODE and OP1 and insert the statement
610 at the position specified by GSI as a new statement. Returns new SSA name
611 of updated accumulator. */
614 update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
615 gimple_stmt_iterator gsi)
619 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
620 stmt = gimple_build_assign_with_ops (code, SSA_NAME_VAR (acc), acc, op1);
623 tree rhs = fold_convert (TREE_TYPE (acc),
626 fold_convert (TREE_TYPE (op1), acc),
628 rhs = force_gimple_operand_gsi (&gsi, rhs,
629 false, NULL, false, GSI_CONTINUE_LINKING);
630 stmt = gimple_build_assign (NULL_TREE, rhs);
632 var = make_ssa_name (SSA_NAME_VAR (acc), stmt);
633 gimple_assign_set_lhs (stmt, var);
635 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
639 /* Adjust the accumulator values according to A and M after GSI, and update
640 the phi nodes on edge BACK. */
643 adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
645 tree var, a_acc_arg, m_acc_arg;
648 m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
650 a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);
658 if (integer_onep (a))
661 var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
667 a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi);
671 m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi);
674 add_successor_phi_arg (back, a_acc, a_acc_arg);
677 add_successor_phi_arg (back, m_acc, m_acc_arg);
680 /* Adjust value of the return at the end of BB according to M and A
684 adjust_return_value (basic_block bb, tree m, tree a)
687 gimple ret_stmt = gimple_seq_last_stmt (bb_seq (bb));
688 gimple_stmt_iterator gsi = gsi_last_bb (bb);
690 gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
692 retval = gimple_return_retval (ret_stmt);
693 if (!retval || retval == error_mark_node)
697 retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval,
700 retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval,
702 gimple_return_set_retval (ret_stmt, retval);
703 update_stmt (ret_stmt);
706 /* Subtract COUNT and FREQUENCY from the basic block and it's
709 decrease_profile (basic_block bb, gcov_type count, int frequency)
715 bb->frequency -= frequency;
716 if (bb->frequency < 0)
718 if (!single_succ_p (bb))
720 gcc_assert (!EDGE_COUNT (bb->succs));
723 e = single_succ_edge (bb);
729 /* Returns true if argument PARAM of the tail recursive call needs to be copied
730 when the call is eliminated. */
733 arg_needs_copy_p (tree param)
737 if (!is_gimple_reg (param) || !var_ann (param))
740 /* Parameters that are only defined but never used need not be copied. */
741 def = gimple_default_def (cfun, param);
748 /* Eliminates tail call described by T. TMP_VARS is a list of
749 temporary variables used to copy the function arguments. */
752 eliminate_tail_call (struct tailcall *t)
758 basic_block bb, first;
761 gimple_stmt_iterator gsi;
764 stmt = orig_stmt = gsi_stmt (t->call_gsi);
765 bb = gsi_bb (t->call_gsi);
767 if (dump_file && (dump_flags & TDF_DETAILS))
769 fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
771 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
772 fprintf (dump_file, "\n");
775 gcc_assert (is_gimple_call (stmt));
777 first = single_succ (ENTRY_BLOCK_PTR);
779 /* Remove the code after call_gsi that will become unreachable. The
780 possibly unreachable code in other blocks is removed later in
784 while (!gsi_end_p (gsi))
786 gimple t = gsi_stmt (gsi);
787 /* Do not remove the return statement, so that redirect_edge_and_branch
788 sees how the block ends. */
789 if (gimple_code (t) == GIMPLE_RETURN)
792 gsi_remove (&gsi, true);
796 /* Number of executions of function has reduced by the tailcall. */
797 e = single_succ_edge (gsi_bb (t->call_gsi));
798 decrease_profile (EXIT_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
799 decrease_profile (ENTRY_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
800 if (e->dest != EXIT_BLOCK_PTR)
801 decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));
803 /* Replace the call by a jump to the start of function. */
804 e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
807 PENDING_STMT (e) = NULL;
809 /* Add phi node entries for arguments. The ordering of the phi nodes should
810 be the same as the ordering of the arguments. */
811 for (param = DECL_ARGUMENTS (current_function_decl),
812 idx = 0, gsi = gsi_start_phis (first);
814 param = TREE_CHAIN (param), idx++)
816 if (!arg_needs_copy_p (param))
819 arg = gimple_call_arg (stmt, idx);
820 phi = gsi_stmt (gsi);
821 gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
823 add_phi_arg (phi, arg, e, gimple_location (stmt));
827 /* Update the values of accumulators. */
828 adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);
830 call = gsi_stmt (t->call_gsi);
831 rslt = gimple_call_lhs (call);
832 if (rslt != NULL_TREE)
834 /* Result of the call will no longer be defined. So adjust the
835 SSA_NAME_DEF_STMT accordingly. */
836 SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
839 gsi_remove (&t->call_gsi, true);
843 /* Add phi nodes for the virtual operands defined in the function to the
844 header of the loop created by tail recursion elimination.
846 Originally, we used to add phi nodes only for call clobbered variables,
847 as the value of the non-call clobbered ones obviously cannot be used
848 or changed within the recursive call. However, the local variables
849 from multiple calls now share the same location, so the virtual ssa form
850 requires us to say that the location dies on further iterations of the loop,
851 which requires adding phi nodes.
854 add_virtual_phis (void)
856 referenced_var_iterator rvi;
859 /* The problematic part is that there is no way how to know what
860 to put into phi nodes (there in fact does not have to be such
861 ssa name available). A solution would be to have an artificial
862 use/kill for all virtual operands in EXIT node. Unless we have
863 this, we cannot do much better than to rebuild the ssa form for
864 possibly affected virtual ssa names from scratch. */
866 FOR_EACH_REFERENCED_VAR (var, rvi)
868 if (!is_gimple_reg (var) && gimple_default_def (cfun, var) != NULL_TREE)
869 mark_sym_for_renaming (var);
873 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
874 mark the tailcalls for the sibcall optimization. */
877 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
879 if (t->tail_recursion)
881 eliminate_tail_call (t);
887 gimple stmt = gsi_stmt (t->call_gsi);
889 gimple_call_set_tail (stmt, true);
890 if (dump_file && (dump_flags & TDF_DETAILS))
892 fprintf (dump_file, "Found tail call ");
893 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
894 fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index);
901 /* Creates a tail-call accumulator of the same type as the return type of the
902 current function. LABEL is the name used to creating the temporary
903 variable for the accumulator. The accumulator will be inserted in the
904 phis of a basic block BB with single predecessor with an initial value
905 INIT converted to the current function return type. */
908 create_tailcall_accumulator (const char *label, basic_block bb, tree init)
910 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
911 tree tmp = create_tmp_var (ret_type, label);
914 if (TREE_CODE (ret_type) == COMPLEX_TYPE
915 || TREE_CODE (ret_type) == VECTOR_TYPE)
916 DECL_GIMPLE_REG_P (tmp) = 1;
917 add_referenced_var (tmp);
918 phi = create_phi_node (tmp, bb);
919 /* RET_TYPE can be a float when -ffast-maths is enabled. */
920 add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb),
922 return PHI_RESULT (phi);
925 /* Optimizes tail calls in the function, turning the tail recursion
929 tree_optimize_tail_calls_1 (bool opt_tailcalls)
932 bool phis_constructed = false;
933 struct tailcall *tailcalls = NULL, *act, *next;
934 bool changed = false;
935 basic_block first = single_succ (ENTRY_BLOCK_PTR);
940 if (!suitable_for_tail_opt_p ())
943 opt_tailcalls = suitable_for_tail_call_opt_p ();
945 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
947 /* Only traverse the normal exits, i.e. those that end with return
949 stmt = last_stmt (e->src);
952 && gimple_code (stmt) == GIMPLE_RETURN)
953 find_tail_calls (e->src, &tailcalls);
956 /* Construct the phi nodes and accumulators if necessary. */
957 a_acc = m_acc = NULL_TREE;
958 for (act = tailcalls; act; act = act->next)
960 if (!act->tail_recursion)
963 if (!phis_constructed)
965 /* Ensure that there is only one predecessor of the block
966 or if there are existing degenerate PHI nodes. */
967 if (!single_pred_p (first)
968 || !gimple_seq_empty_p (phi_nodes (first)))
969 first = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
971 /* Copy the args if needed. */
972 for (param = DECL_ARGUMENTS (current_function_decl);
974 param = TREE_CHAIN (param))
975 if (arg_needs_copy_p (param))
977 tree name = gimple_default_def (cfun, param);
978 tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
981 set_default_def (param, new_name);
982 phi = create_phi_node (name, first);
983 SSA_NAME_DEF_STMT (name) = phi;
984 add_phi_arg (phi, new_name, single_pred_edge (first),
985 EXPR_LOCATION (param));
987 phis_constructed = true;
990 if (act->add && !a_acc)
991 a_acc = create_tailcall_accumulator ("add_acc", first,
994 if (act->mult && !m_acc)
995 m_acc = create_tailcall_accumulator ("mult_acc", first,
999 for (; tailcalls; tailcalls = next)
1001 next = tailcalls->next;
1002 changed |= optimize_tail_call (tailcalls, opt_tailcalls);
1008 /* Modify the remaining return statements. */
1009 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1011 stmt = last_stmt (e->src);
1014 && gimple_code (stmt) == GIMPLE_RETURN)
1015 adjust_return_value (e->src, m_acc, a_acc);
1020 free_dominance_info (CDI_DOMINATORS);
1022 if (phis_constructed)
1023 add_virtual_phis ();
1025 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
1030 execute_tail_recursion (void)
1032 return tree_optimize_tail_calls_1 (false);
1036 gate_tail_calls (void)
1038 return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call);
1042 execute_tail_calls (void)
1044 return tree_optimize_tail_calls_1 (true);
1047 struct gimple_opt_pass pass_tail_recursion =
1052 gate_tail_calls, /* gate */
1053 execute_tail_recursion, /* execute */
1056 0, /* static_pass_number */
1057 TV_NONE, /* tv_id */
1058 PROP_cfg | PROP_ssa, /* properties_required */
1059 0, /* properties_provided */
1060 0, /* properties_destroyed */
1061 0, /* todo_flags_start */
1062 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */
1066 struct gimple_opt_pass pass_tail_calls =
1071 gate_tail_calls, /* gate */
1072 execute_tail_calls, /* execute */
1075 0, /* static_pass_number */
1076 TV_NONE, /* tv_id */
1077 PROP_cfg | PROP_ssa, /* properties_required */
1078 0, /* properties_provided */
1079 0, /* properties_destroyed */
1080 0, /* todo_flags_start */
1081 TODO_dump_func | TODO_verify_ssa /* todo_flags_finish */