1 /* Perform optimizations on tree structure.
2 Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
3 Written by Mark Michell (mark@codesourcery.com).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
27 #include "insn-config.h"
29 #include "integrate.h"
38 o In order to make inlining-on-trees work, we pessimized
39 function-local static constants. In particular, they are now
40 always output, even when not addressed. Fix this by treating
41 function-local static constants just like global static
42 constants; the back-end already knows not to output them if they
45 o Provide heuristics to clamp inlining of recursive template
48 /* Data required for function inlining. */
50 typedef struct inline_data
52 /* A stack of the functions we are inlining. For example, if we are
53 compiling `f', which calls `g', which calls `h', and we are
54 inlining the body of `h', the stack will contain, `h', followed
55 by `g', followed by `f'. The first few elements of the stack may
56 contain other functions that we know we should not recurse into,
57 even though they are not directly being inlined. */
59 /* The index of the first element of FNS that really represents an
61 unsigned first_inlined_fn;
62 /* The label to jump to when a return statement is encountered. If
63 this value is NULL, then return statements will simply be
64 remapped as return statements, rather than as jumps. */
66 /* The map from local declarations in the inlined function to
67 equivalents in the function into which it is being inlined. */
69 /* Nonzero if we are currently within the cleanup for a
71 int in_target_cleanup_p;
72 /* A stack of the TARGET_EXPRs that we are currently processing. */
73 varray_type target_exprs;
74 /* A list of the functions current function has inlined. */
75 varray_type inlined_fns;
76 /* The approximate number of statements we have inlined in the
77 current call stack. */
79 /* We use the same mechanism to build clones that we do to perform
80 inlining. However, there are a few places where we need to
81 distinguish between those two situations. This flag is true nif
82 we are cloning, rather than inlining. */
84 /* Hash table used to prevent walk_tree from visiting the same node
85 umpteen million times. */
91 static tree initialize_inlined_parameters PARAMS ((inline_data *, tree, tree));
92 static tree declare_return_variable PARAMS ((inline_data *, tree *));
93 static tree copy_body_r PARAMS ((tree *, int *, void *));
94 static tree copy_body PARAMS ((inline_data *));
95 static tree expand_call_inline PARAMS ((tree *, int *, void *));
96 static void expand_calls_inline PARAMS ((tree *, inline_data *));
97 static int inlinable_function_p PARAMS ((tree, inline_data *));
98 static tree remap_decl PARAMS ((tree, inline_data *));
99 static void remap_block PARAMS ((tree, tree, inline_data *));
100 static void copy_scope_stmt PARAMS ((tree *, int *, inline_data *));
101 static tree calls_setjmp_r PARAMS ((tree *, int *, void *));
102 static void update_cloned_parm PARAMS ((tree, tree));
104 /* The approximate number of instructions per statement. This number
105 need not be particularly accurate; it is used only to make
106 decisions about when a function is too big to inline. */
107 #define INSNS_PER_STMT (10)
109 /* Remap DECL during the copying of the BLOCK tree for the function. */
112 remap_decl (decl, id)
119 /* We only remap local variables in the current function. */
120 fn = VARRAY_TOP_TREE (id->fns);
121 if (!nonstatic_local_decl_p (decl) || DECL_CONTEXT (decl) != fn)
124 /* See if we have remapped this declaration. */
125 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
126 /* If we didn't already have an equivalent for this declaration,
132 /* Make a copy of the variable or label. */
133 t = copy_decl_for_inlining (decl, fn,
134 VARRAY_TREE (id->fns, 0));
136 /* The decl T could be a dynamic array or other variable size type,
137 in which case some fields need to be remapped because they may
138 contain SAVE_EXPRs. */
139 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
140 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
141 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE
142 && TYPE_DOMAIN (TREE_TYPE (t)))
144 TREE_TYPE (t) = copy_node (TREE_TYPE (t));
145 TYPE_DOMAIN (TREE_TYPE (t))
146 = copy_node (TYPE_DOMAIN (TREE_TYPE (t)));
147 walk_tree (&TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (t))),
148 copy_body_r, id, NULL);
151 if (!DECL_NAME (t) && TREE_TYPE (t)
152 && ANON_AGGR_TYPE_P (TREE_TYPE ((t))))
154 /* For a VAR_DECL of anonymous type, we must also copy the
155 member VAR_DECLS here and rechain the
156 DECL_ANON_UNION_ELEMS. */
160 for (src = DECL_ANON_UNION_ELEMS (t); src;
161 src = TREE_CHAIN (src))
163 tree member = remap_decl (TREE_VALUE (src), id);
165 my_friendly_assert (!TREE_PURPOSE (src), 20010529);
166 members = tree_cons (NULL, member, members);
168 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
171 /* Remember it, so that if we encounter this local entity
172 again we can reuse this copy. */
173 n = splay_tree_insert (id->decl_map,
174 (splay_tree_key) decl,
175 (splay_tree_value) t);
178 return (tree) n->value;
181 /* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
182 remapped versions of the variables therein. And hook the new block
183 into the block-tree. If non-NULL, the DECLS are declarations to
184 add to use instead of the BLOCK_VARS in the old block. */
187 remap_block (scope_stmt, decls, id)
192 /* We cannot do this in the cleanup for a TARGET_EXPR since we do
193 not know whether or not expand_expr will actually write out the
194 code we put there. If it does not, then we'll have more BLOCKs
195 than block-notes, and things will go awry. At some point, we
196 should make the back-end handle BLOCK notes in a tidier way,
197 without requiring a strict correspondence to the block-tree; then
198 this check can go. */
199 if (id->in_target_cleanup_p)
201 SCOPE_STMT_BLOCK (scope_stmt) = NULL_TREE;
205 /* If this is the beginning of a scope, remap the associated BLOCK. */
206 if (SCOPE_BEGIN_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
213 /* Make the new block. */
214 old_block = SCOPE_STMT_BLOCK (scope_stmt);
215 new_block = make_node (BLOCK);
216 TREE_USED (new_block) = TREE_USED (old_block);
217 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
218 SCOPE_STMT_BLOCK (scope_stmt) = new_block;
220 /* Remap its variables. */
221 for (old_var = decls ? decls : BLOCK_VARS (old_block);
223 old_var = TREE_CHAIN (old_var))
227 /* Remap the variable. */
228 new_var = remap_decl (old_var, id);
229 /* If we didn't remap this variable, so we can't mess with
230 its TREE_CHAIN. If we remapped this variable to
231 something other than a declaration (say, if we mapped it
232 to a constant), then we must similarly omit any mention
234 if (!new_var || !DECL_P (new_var))
238 TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
239 BLOCK_VARS (new_block) = new_var;
242 /* We put the BLOCK_VARS in reverse order; fix that now. */
243 BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
244 fn = VARRAY_TREE (id->fns, 0);
246 /* We're building a clone; DECL_INITIAL is still
247 error_mark_node, and current_binding_level is the parm
249 insert_block (new_block);
252 /* Attach this new block after the DECL_INITIAL block for the
253 function into which this block is being inlined. In
254 rest_of_compilation we will straighten out the BLOCK tree. */
256 if (DECL_INITIAL (fn))
257 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
259 first_block = &DECL_INITIAL (fn);
260 BLOCK_CHAIN (new_block) = *first_block;
261 *first_block = new_block;
263 /* Remember the remapped block. */
264 splay_tree_insert (id->decl_map,
265 (splay_tree_key) old_block,
266 (splay_tree_value) new_block);
268 /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
270 else if (SCOPE_END_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
274 /* Find this block in the table of remapped things. */
275 n = splay_tree_lookup (id->decl_map,
276 (splay_tree_key) SCOPE_STMT_BLOCK (scope_stmt));
277 my_friendly_assert (n != NULL, 19991203);
278 SCOPE_STMT_BLOCK (scope_stmt) = (tree) n->value;
282 /* Copy the SCOPE_STMT pointed to by TP. */
285 copy_scope_stmt (tp, walk_subtrees, id)
292 /* Remember whether or not this statement was nullified. When
293 making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
294 doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
295 deal with copying BLOCKs if they do not wish to do so. */
296 block = SCOPE_STMT_BLOCK (*tp);
297 /* Copy (and replace) the statement. */
298 copy_tree_r (tp, walk_subtrees, NULL);
299 /* Restore the SCOPE_STMT_BLOCK. */
300 SCOPE_STMT_BLOCK (*tp) = block;
302 /* Remap the associated block. */
303 remap_block (*tp, NULL_TREE, id);
306 /* Called from copy_body via walk_tree. DATA is really an
310 copy_body_r (tp, walk_subtrees, data)
319 id = (inline_data *) data;
320 fn = VARRAY_TOP_TREE (id->fns);
322 /* All automatic variables should have a DECL_CONTEXT indicating
323 what function they come from. */
324 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
325 && DECL_NAMESPACE_SCOPE_P (*tp))
326 my_friendly_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp),
329 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
330 GOTO_STMT with the RET_LABEL as its target. */
331 if (TREE_CODE (*tp) == RETURN_STMT && id->ret_label)
333 tree return_stmt = *tp;
336 /* Build the GOTO_STMT. */
337 goto_stmt = build_stmt (GOTO_STMT, id->ret_label);
338 TREE_CHAIN (goto_stmt) = TREE_CHAIN (return_stmt);
340 /* If we're returning something, just turn that into an
341 assignment into the equivalent of the original
343 if (RETURN_EXPR (return_stmt))
345 *tp = build_stmt (EXPR_STMT,
346 RETURN_EXPR (return_stmt));
347 STMT_IS_FULL_EXPR_P (*tp) = 1;
348 /* And then jump to the end of the function. */
349 TREE_CHAIN (*tp) = goto_stmt;
351 /* If we're not returning anything just do the jump. */
355 /* Local variables and labels need to be replaced by equivalent
356 variables. We don't want to copy static variables; there's only
357 one of those, no matter how many times we inline the containing
359 else if (nonstatic_local_decl_p (*tp) && DECL_CONTEXT (*tp) == fn)
363 /* Remap the declaration. */
364 new_decl = remap_decl (*tp, id);
365 my_friendly_assert (new_decl != NULL_TREE, 19991203);
366 /* Replace this variable with the copy. */
367 STRIP_TYPE_NOPS (new_decl);
370 else if (nonstatic_local_decl_p (*tp)
371 && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
372 my_friendly_abort (0);
373 else if (TREE_CODE (*tp) == SAVE_EXPR)
374 remap_save_expr (tp, id->decl_map, VARRAY_TREE (id->fns, 0),
376 else if (TREE_CODE (*tp) == UNSAVE_EXPR)
377 /* UNSAVE_EXPRs should not be generated until expansion time. */
378 my_friendly_abort (19991113);
379 /* For a SCOPE_STMT, we must copy the associated block so that we
380 can write out debugging information for the inlined variables. */
381 else if (TREE_CODE (*tp) == SCOPE_STMT && !id->in_target_cleanup_p)
382 copy_scope_stmt (tp, walk_subtrees, id);
383 /* Otherwise, just copy the node. Note that copy_tree_r already
384 knows not to copy VAR_DECLs, etc., so this is safe. */
387 copy_tree_r (tp, walk_subtrees, NULL);
389 /* The copied TARGET_EXPR has never been expanded, even if the
390 original node was expanded already. */
391 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
393 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
394 TREE_OPERAND (*tp, 3) = NULL_TREE;
396 else if (TREE_CODE (*tp) == MODIFY_EXPR
397 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
398 && nonstatic_local_decl_p (TREE_OPERAND (*tp, 0))
399 && DECL_CONTEXT (TREE_OPERAND (*tp, 0)) == fn)
401 /* Some assignments VAR = VAR; don't generate any rtl code
402 and thus don't count as variable modification. Avoid
403 keeping bogosities like 0 = 0. */
404 tree decl = TREE_OPERAND (*tp, 0), value;
407 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
410 value = (tree) n->value;
411 STRIP_TYPE_NOPS (value);
412 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
418 /* Keep iterating. */
422 /* Make a copy of the body of FN so that it can be inserted inline in
431 body = DECL_SAVED_TREE (VARRAY_TOP_TREE (id->fns));
432 walk_tree (&body, copy_body_r, id, NULL);
437 /* Generate code to initialize the parameters of the function at the
438 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
441 initialize_inlined_parameters (id, args, fn)
451 /* Figure out what the parameters are. */
452 parms = DECL_ARGUMENTS (fn);
454 /* Start with no initializations whatsoever. */
455 init_stmts = NULL_TREE;
457 /* Loop through the parameter declarations, replacing each with an
458 equivalent VAR_DECL, appropriately initialized. */
459 for (p = parms, a = args; p; a = TREE_CHAIN (a), p = TREE_CHAIN (p))
465 /* Find the initializer. */
466 value = TREE_VALUE (a);
467 /* If the parameter is never assigned to, we may not need to
468 create a new variable here at all. Instead, we may be able
469 to just use the argument value. */
470 if (TREE_READONLY (p)
471 && !TREE_ADDRESSABLE (p)
472 && !TREE_SIDE_EFFECTS (value))
474 /* Simplify the value, if possible. */
475 value = fold (decl_constant_value (value));
477 /* We can't risk substituting complex expressions. They
478 might contain variables that will be assigned to later.
479 Theoretically, we could check the expression to see if
480 all of the variables that determine its value are
481 read-only, but we don't bother. */
482 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
484 /* If this is a declaration, wrap it a NOP_EXPR so that
485 we don't try to put the VALUE on the list of
488 value = build1 (NOP_EXPR, TREE_TYPE (value), value);
490 splay_tree_insert (id->decl_map,
492 (splay_tree_value) value);
497 /* Make an equivalent VAR_DECL. */
498 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
499 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
500 that way, when the PARM_DECL is encountered, it will be
501 automatically replaced by the VAR_DECL. */
502 splay_tree_insert (id->decl_map,
504 (splay_tree_value) var);
506 /* Declare this new variable. */
507 init_stmt = build_stmt (DECL_STMT, var);
508 TREE_CHAIN (init_stmt) = init_stmts;
509 init_stmts = init_stmt;
511 /* Initialize this VAR_DECL from the equivalent argument. If
512 the argument is an object, created via a constructor or copy,
513 this will not result in an extra copy: the TARGET_EXPR
514 representing the argument will be bound to VAR, and the
515 object will be constructed in VAR. */
516 if (! TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
517 DECL_INITIAL (var) = value;
520 /* Even if P was TREE_READONLY, the new VAR should not be.
521 In the original code, we would have constructed a
522 temporary, and then the function body would have never
523 changed the value of P. However, now, we will be
524 constructing VAR directly. The constructor body may
525 change its value multiple times as it is being
526 constructed. Therefore, it must not be TREE_READONLY;
527 the back-end assumes that TREE_READONLY variable is
528 assigned to only once. */
529 TREE_READONLY (var) = 0;
531 /* Build a run-time initialization. */
532 init_stmt = build_stmt (EXPR_STMT,
533 build (INIT_EXPR, TREE_TYPE (p),
535 /* Add this initialization to the list. Note that we want the
536 declaration *after* the initialization because we are going
537 to reverse all the initialization statements below. */
538 TREE_CHAIN (init_stmt) = init_stmts;
539 init_stmts = init_stmt;
543 /* The initialization statements have been built up in reverse
544 order. Straighten them out now. */
545 return nreverse (init_stmts);
548 /* Declare a return variable to replace the RESULT_DECL for the
549 function we are calling. An appropriate DECL_STMT is returned.
550 The USE_STMT is filled in to contain a use of the declaration to
551 indicate the return value of the function. */
554 declare_return_variable (id, use_stmt)
555 struct inline_data *id;
558 tree fn = VARRAY_TOP_TREE (id->fns);
559 tree result = DECL_RESULT (fn);
561 int aggregate_return_p;
563 /* We don't need to do anything for functions that don't return
565 if (!result || VOID_TYPE_P (TREE_TYPE (result)))
567 *use_stmt = NULL_TREE;
571 /* Figure out whether or not FN returns an aggregate. */
572 aggregate_return_p = IS_AGGR_TYPE (TREE_TYPE (result));
574 /* If FN returns an aggregate then the caller will always create the
575 temporary (using a TARGET_EXPR) and the call will be the
576 initializing expression for the TARGET_EXPR. If we were just to
577 create a new VAR_DECL here, then the result of this function
578 would be copied (bitwise) into the variable initialized by the
579 TARGET_EXPR. That's incorrect, so we must transform any
580 references to the RESULT into references to the target. */
581 if (aggregate_return_p)
583 my_friendly_assert (VARRAY_ACTIVE_SIZE (id->target_exprs) != 0,
585 var = TREE_OPERAND (VARRAY_TOP_TREE (id->target_exprs), 0);
587 (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (var),
591 /* Otherwise, make an appropriate copy. */
593 var = copy_decl_for_inlining (result, fn, VARRAY_TREE (id->fns, 0));
595 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
596 way, when the RESULT_DECL is encountered, it will be
597 automatically replaced by the VAR_DECL. */
598 splay_tree_insert (id->decl_map,
599 (splay_tree_key) result,
600 (splay_tree_value) var);
602 /* Build the USE_STMT. */
603 *use_stmt = build_stmt (EXPR_STMT, var);
605 /* Build the declaration statement if FN does not return an
607 if (!aggregate_return_p)
608 return build_stmt (DECL_STMT, var);
609 /* If FN does return an aggregate, there's no need to declare the
610 return variable; we're using a variable in our caller's frame. */
615 /* Returns non-zero if FN is a function that can be inlined. */
618 inlinable_function_p (fn, id)
624 /* If we've already decided this function shouldn't be inlined,
625 there's no need to check again. */
626 if (DECL_UNINLINABLE (fn))
629 /* Assume it is not inlinable. */
632 /* If we're not inlining things, then nothing is inlinable. */
633 if (!flag_inline_trees)
635 /* If the function was not declared `inline', then we don't inline
637 else if (!DECL_INLINE (fn))
639 /* We can't inline varargs functions. */
640 else if (varargs_function_p (fn))
642 /* We can't inline functions that are too big. */
643 else if (DECL_NUM_STMTS (fn) * INSNS_PER_STMT > MAX_INLINE_INSNS)
645 /* All is well. We can inline this function. Traditionally, GCC
646 has refused to inline functions using alloca, or functions whose
647 values are returned in a PARALLEL, and a few other such obscure
648 conditions. We are not equally constrained at the tree level. */
652 /* Squirrel away the result so that we don't have to check again. */
653 DECL_UNINLINABLE (fn) = !inlinable;
655 /* Even if this function is not itself too big to inline, it might
656 be that we've done so much inlining already that we don't want to
657 risk inlining any more. */
658 if ((DECL_NUM_STMTS (fn) + id->inlined_stmts) * INSNS_PER_STMT
662 /* We can inline a template instantiation only if it's fully
665 && DECL_TEMPLATE_INFO (fn)
666 && TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn)))
668 fn = instantiate_decl (fn, /*defer_ok=*/0);
669 inlinable = !TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn));
672 /* If we don't have the function body available, we can't inline
674 if (!DECL_SAVED_TREE (fn))
677 /* Don't do recursive inlining, either. We don't record this in
678 DECL_UNINLINABLE; we may be able to inline this function later. */
683 for (i = 0; i < VARRAY_ACTIVE_SIZE (id->fns); ++i)
684 if (VARRAY_TREE (id->fns, i) == fn)
687 if (inlinable && DECL_LANG_SPECIFIC (fn) && DECL_INLINED_FNS (fn))
690 tree inlined_fns = DECL_INLINED_FNS (fn);
692 for (j = 0; j < TREE_VEC_LENGTH (inlined_fns); ++j)
693 if (TREE_VEC_ELT (inlined_fns, j) == VARRAY_TREE (id->fns, 0))
698 /* Return the result. */
702 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
705 expand_call_inline (tp, walk_subtrees, data)
721 /* See what we've got. */
722 id = (inline_data *) data;
725 /* Recurse, but letting recursive invocations know that we are
726 inside the body of a TARGET_EXPR. */
727 if (TREE_CODE (*tp) == TARGET_EXPR)
729 int i, len = first_rtl_op (TARGET_EXPR);
731 /* We're walking our own subtrees. */
734 /* Push *TP on the stack of pending TARGET_EXPRs. */
735 VARRAY_PUSH_TREE (id->target_exprs, *tp);
737 /* Actually walk over them. This loop is the body of
738 walk_trees, omitting the case where the TARGET_EXPR
739 itself is handled. */
740 for (i = 0; i < len; ++i)
743 ++id->in_target_cleanup_p;
744 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
747 --id->in_target_cleanup_p;
750 /* We're done with this TARGET_EXPR now. */
751 VARRAY_POP (id->target_exprs);
757 /* Because types were not copied in copy_body, CALL_EXPRs beneath
758 them should not be expanded. This can happen if the type is a
759 dynamic array type, for example. */
762 /* From here on, we're only interested in CALL_EXPRs. */
763 if (TREE_CODE (t) != CALL_EXPR)
766 /* First, see if we can figure out what function is being called.
767 If we cannot, then there is no hope of inlining the function. */
768 fn = get_callee_fndecl (t);
772 /* Don't try to inline functions that are not well-suited to
774 if (!inlinable_function_p (fn, id))
777 /* Set the current filename and line number to the function we are
778 inlining so that when we create new _STMT nodes here they get
779 line numbers corresponding to the function we are calling. We
780 wrap the whole inlined body in an EXPR_WITH_FILE_AND_LINE as well
781 because individual statements don't record the filename. */
782 push_srcloc (fn->decl.filename, fn->decl.linenum);
784 /* Build a statement-expression containing code to initialize the
785 arguments, the actual inline expansion of the body, and a label
786 for the return statements within the function to jump to. The
787 type of the statement expression is the return type of the
789 expr = build_min (STMT_EXPR, TREE_TYPE (TREE_TYPE (fn)), NULL_TREE);
791 /* Local declarations will be replaced by their equivalents in this
794 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
797 /* Initialize the parameters. */
798 arg_inits = initialize_inlined_parameters (id, TREE_OPERAND (t, 1), fn);
799 /* Expand any inlined calls in the initializers. Do this before we
800 push FN on the stack of functions we are inlining; we want to
801 inline calls to FN that appear in the initializers for the
803 expand_calls_inline (&arg_inits, id);
804 /* And add them to the tree. */
805 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), arg_inits);
807 /* Record the function we are about to inline so that we can avoid
808 recursing into it. */
809 VARRAY_PUSH_TREE (id->fns, fn);
811 /* Record the function we are about to inline if optimize_function
812 has not been called on it yet and we don't have it in the list. */
813 if (DECL_LANG_SPECIFIC (fn) && !DECL_INLINED_FNS (fn))
817 for (i = VARRAY_ACTIVE_SIZE (id->inlined_fns) - 1; i >= 0; i--)
818 if (VARRAY_TREE (id->inlined_fns, i) == fn)
821 VARRAY_PUSH_TREE (id->inlined_fns, fn);
824 /* Return statements in the function body will be replaced by jumps
826 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
827 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
829 /* Create a block to put the parameters in. We have to do this
830 after the parameters have been remapped because remapping
831 parameters is different from remapping ordinary variables. */
832 scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
833 SCOPE_BEGIN_P (scope_stmt) = 1;
834 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
835 remap_block (scope_stmt, DECL_ARGUMENTS (fn), id);
836 TREE_CHAIN (scope_stmt) = STMT_EXPR_STMT (expr);
837 STMT_EXPR_STMT (expr) = scope_stmt;
839 /* Tell the debugging backends that this block represents the
840 outermost scope of the inlined function. */
841 if (SCOPE_STMT_BLOCK (scope_stmt))
842 BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt)) = DECL_ORIGIN (fn);
844 /* Declare the return variable for the function. */
845 STMT_EXPR_STMT (expr)
846 = chainon (STMT_EXPR_STMT (expr),
847 declare_return_variable (id, &use_stmt));
849 /* After we've initialized the parameters, we insert the body of the
851 inlined_body = &STMT_EXPR_STMT (expr);
852 while (*inlined_body)
853 inlined_body = &TREE_CHAIN (*inlined_body);
854 *inlined_body = copy_body (id);
856 /* Close the block for the parameters. */
857 scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
858 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
859 my_friendly_assert (DECL_INITIAL (fn)
860 && TREE_CODE (DECL_INITIAL (fn)) == BLOCK,
862 remap_block (scope_stmt, NULL_TREE, id);
863 STMT_EXPR_STMT (expr)
864 = chainon (STMT_EXPR_STMT (expr), scope_stmt);
866 /* After the body of the function comes the RET_LABEL. This must come
867 before we evaluate the returned value below, because that evalulation
868 may cause RTL to be generated. */
869 STMT_EXPR_STMT (expr)
870 = chainon (STMT_EXPR_STMT (expr),
871 build_stmt (LABEL_STMT, id->ret_label));
873 /* Finally, mention the returned value so that the value of the
874 statement-expression is the returned value of the function. */
875 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), use_stmt);
878 splay_tree_delete (id->decl_map);
881 /* The new expression has side-effects if the old one did. */
882 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
884 /* Replace the call by the inlined body. Wrap it in an
885 EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
886 pointing to the right place. */
887 chain = TREE_CHAIN (*tp);
888 *tp = build_expr_wfl (expr, DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn),
890 EXPR_WFL_EMIT_LINE_NOTE (*tp) = 1;
891 TREE_CHAIN (*tp) = chain;
894 /* If the value of the new expression is ignored, that's OK. We
895 don't warn about this for CALL_EXPRs, so we shouldn't warn about
896 the equivalent inlined version either. */
899 /* Our function now has more statements than it did before. */
900 DECL_NUM_STMTS (VARRAY_TREE (id->fns, 0)) += DECL_NUM_STMTS (fn);
901 id->inlined_stmts += DECL_NUM_STMTS (fn);
903 /* Recurse into the body of the just inlined function. */
904 expand_calls_inline (inlined_body, id);
905 VARRAY_POP (id->fns);
907 /* If we've returned to the top level, clear out the record of how
908 much inlining has been done. */
909 if (VARRAY_ACTIVE_SIZE (id->fns) == id->first_inlined_fn)
910 id->inlined_stmts = 0;
912 /* Don't walk into subtrees. We've already handled them above. */
915 /* Keep iterating. */
919 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
920 expansions as appropriate. */
923 expand_calls_inline (tp, id)
927 /* Search through *TP, replacing all calls to inline functions by
928 appropriate equivalents. Use walk_tree in no-duplicates mode
929 to avoid exponential time complexity. (We can't just use
930 walk_tree_without_duplicates, because of the special TARGET_EXPR
931 handling in expand_calls. The hash table is set up in
932 optimize_function. */
933 walk_tree (tp, expand_call_inline, id, id->tree_pruner);
936 /* Optimize the body of FN. */
939 optimize_function (fn)
942 /* While in this function, we may choose to go off and compile
943 another function. For example, we might instantiate a function
944 in the hopes of inlining it. Normally, that wouldn't trigger any
945 actual RTL code-generation -- but it will if the template is
946 actually needed. (For example, if it's address is taken, or if
947 some other function already refers to the template.) If
948 code-generation occurs, then garbage collection will occur, so we
949 must protect ourselves, just as we do while building up the body
953 /* Expand calls to inline functions. */
954 if (flag_inline_trees)
958 struct saved_scope *s;
961 memset (&id, 0, sizeof (id));
963 /* Don't allow recursion into FN. */
964 VARRAY_TREE_INIT (id.fns, 32, "fns");
965 VARRAY_PUSH_TREE (id.fns, fn);
966 /* Or any functions that aren't finished yet. */
968 if (current_function_decl)
970 VARRAY_PUSH_TREE (id.fns, current_function_decl);
971 prev_fn = current_function_decl;
973 for (s = scope_chain; s; s = s->prev)
974 if (s->function_decl && s->function_decl != prev_fn)
976 VARRAY_PUSH_TREE (id.fns, s->function_decl);
977 prev_fn = s->function_decl;
980 /* Create the stack of TARGET_EXPRs. */
981 VARRAY_TREE_INIT (id.target_exprs, 32, "target_exprs");
983 /* Create the list of functions this call will inline. */
984 VARRAY_TREE_INIT (id.inlined_fns, 32, "inlined_fns");
986 /* Keep track of the low-water mark, i.e., the point where
987 the first real inlining is represented in ID.FNS. */
988 id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
990 /* Replace all calls to inline functions with the bodies of those
992 id.tree_pruner = htab_create (37, htab_hash_pointer,
993 htab_eq_pointer, NULL);
994 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
997 htab_delete (id.tree_pruner);
998 VARRAY_FREE (id.fns);
999 VARRAY_FREE (id.target_exprs);
1000 if (DECL_LANG_SPECIFIC (fn))
1002 tree ifn = make_tree_vec (VARRAY_ACTIVE_SIZE (id.inlined_fns));
1004 memcpy (&TREE_VEC_ELT (ifn, 0), &VARRAY_TREE (id.inlined_fns, 0),
1005 VARRAY_ACTIVE_SIZE (id.inlined_fns) * sizeof (tree));
1006 DECL_INLINED_FNS (fn) = ifn;
1008 VARRAY_FREE (id.inlined_fns);
1011 /* Undo the call to ggc_push_context above. */
1015 /* Called from calls_setjmp_p via walk_tree. */
1018 calls_setjmp_r (tp, walk_subtrees, data)
1020 int *walk_subtrees ATTRIBUTE_UNUSED;
1021 void *data ATTRIBUTE_UNUSED;
1023 /* We're only interested in FUNCTION_DECLS. */
1024 if (TREE_CODE (*tp) != FUNCTION_DECL)
1027 return setjmp_call_p (*tp) ? *tp : NULL_TREE;
1030 /* Returns non-zero if FN calls `setjmp' or some other function that
1031 can return more than once. This function is conservative; it may
1032 occasionally return a non-zero value even when FN does not actually
1039 return walk_tree_without_duplicates (&DECL_SAVED_TREE (fn),
1044 /* CLONED_PARM is a copy of CLONE, generated for a cloned constructor
1045 or destructor. Update it to ensure that the source-position for
1046 the cloned parameter matches that for the original, and that the
1047 debugging generation code will be able to find the original PARM. */
1050 update_cloned_parm (parm, cloned_parm)
1054 DECL_ABSTRACT_ORIGIN (cloned_parm) = parm;
1056 /* We may have taken its address. */
1057 TREE_ADDRESSABLE (cloned_parm) = TREE_ADDRESSABLE (parm);
1059 /* The definition might have different constness. */
1060 TREE_READONLY (cloned_parm) = TREE_READONLY (parm);
1062 TREE_USED (cloned_parm) = TREE_USED (parm);
1064 /* The name may have changed from the declaration. */
1065 DECL_NAME (cloned_parm) = DECL_NAME (parm);
1066 DECL_SOURCE_FILE (cloned_parm) = DECL_SOURCE_FILE (parm);
1067 DECL_SOURCE_LINE (cloned_parm) = DECL_SOURCE_LINE (parm);
1070 /* FN is a function that has a complete body. Clone the body as
1071 necessary. Returns non-zero if there's no longer any need to
1072 process the main body. */
1075 maybe_clone_body (fn)
1082 /* We only clone constructors and destructors. */
1083 if (!DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (fn)
1084 && !DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (fn))
1087 /* Emit the DWARF1 abstract instance. */
1088 note_deferral_of_defined_inline_function (fn);
1090 /* We know that any clones immediately follow FN in the TYPE_METHODS
1092 for (clone = TREE_CHAIN (fn);
1093 clone && DECL_CLONED_FUNCTION_P (clone);
1094 clone = TREE_CHAIN (clone), first = 0)
1100 /* Update CLONE's source position information to match FN's. */
1101 DECL_SOURCE_FILE (clone) = DECL_SOURCE_FILE (fn);
1102 DECL_SOURCE_LINE (clone) = DECL_SOURCE_LINE (fn);
1103 DECL_INLINE (clone) = DECL_INLINE (fn);
1104 DECL_DECLARED_INLINE_P (clone) = DECL_DECLARED_INLINE_P (fn);
1105 DECL_COMDAT (clone) = DECL_COMDAT (fn);
1106 DECL_WEAK (clone) = DECL_WEAK (fn);
1107 DECL_ONE_ONLY (clone) = DECL_ONE_ONLY (fn);
1108 DECL_SECTION_NAME (clone) = DECL_SECTION_NAME (fn);
1109 DECL_USE_TEMPLATE (clone) = DECL_USE_TEMPLATE (fn);
1110 DECL_EXTERNAL (clone) = DECL_EXTERNAL (fn);
1111 DECL_INTERFACE_KNOWN (clone) = DECL_INTERFACE_KNOWN (fn);
1112 DECL_NOT_REALLY_EXTERN (clone) = DECL_NOT_REALLY_EXTERN (fn);
1113 TREE_PUBLIC (clone) = TREE_PUBLIC (fn);
1115 /* Adjust the parameter names and locations. */
1116 parm = DECL_ARGUMENTS (fn);
1117 clone_parm = DECL_ARGUMENTS (clone);
1118 /* Update the `this' parameter, which is always first.
1119 Sometimes, we end update the `this' parameter twice because
1120 we process it again in the loop below. That is harmless. */
1121 update_cloned_parm (parm, clone_parm);
1122 if (DECL_HAS_IN_CHARGE_PARM_P (fn))
1123 parm = TREE_CHAIN (parm);
1124 if (DECL_HAS_VTT_PARM_P (fn))
1125 parm = TREE_CHAIN (parm);
1126 if (DECL_HAS_VTT_PARM_P (clone))
1127 clone_parm = TREE_CHAIN (clone_parm);
1129 parm = TREE_CHAIN (parm), clone_parm = TREE_CHAIN (clone_parm))
1131 /* Update this paramter. */
1132 update_cloned_parm (parm, clone_parm);
1133 /* We should only give unused information for one clone. */
1135 TREE_USED (clone_parm) = 1;
1138 /* Start processing the function. */
1139 push_to_top_level ();
1140 start_function (NULL_TREE, clone, NULL_TREE, SF_PRE_PARSED);
1142 /* Just clone the body, as if we were making an inline call.
1143 But, remap the parameters in the callee to the parameters of
1144 caller. If there's an in-charge parameter, map it to an
1145 appropriate constant. */
1146 memset (&id, 0, sizeof (id));
1147 VARRAY_TREE_INIT (id.fns, 2, "fns");
1148 VARRAY_PUSH_TREE (id.fns, clone);
1149 VARRAY_PUSH_TREE (id.fns, fn);
1151 /* Cloning is treated slightly differently from inlining. Set
1152 CLONING_P so that its clear which operation we're performing. */
1153 id.cloning_p = true;
1155 /* Remap the parameters. */
1156 id.decl_map = splay_tree_new (splay_tree_compare_pointers,
1159 parm = DECL_ARGUMENTS (fn),
1160 clone_parm = DECL_ARGUMENTS (clone);
1163 parm = TREE_CHAIN (parm))
1165 /* Map the in-charge parameter to an appropriate constant. */
1166 if (DECL_HAS_IN_CHARGE_PARM_P (fn) && parmno == 1)
1169 in_charge = in_charge_arg_for_name (DECL_NAME (clone));
1170 splay_tree_insert (id.decl_map,
1171 (splay_tree_key) parm,
1172 (splay_tree_value) in_charge);
1174 else if (DECL_ARTIFICIAL (parm)
1175 && DECL_NAME (parm) == vtt_parm_identifier)
1177 /* For a subobject constructor or destructor, the next
1178 argument is the VTT parameter. Remap the VTT_PARM
1179 from the CLONE to this parameter. */
1180 if (DECL_HAS_VTT_PARM_P (clone))
1182 DECL_ABSTRACT_ORIGIN (clone_parm) = parm;
1183 splay_tree_insert (id.decl_map,
1184 (splay_tree_key) parm,
1185 (splay_tree_value) clone_parm);
1186 clone_parm = TREE_CHAIN (clone_parm);
1188 /* Otherwise, map the VTT parameter to `NULL'. */
1191 splay_tree_insert (id.decl_map,
1192 (splay_tree_key) parm,
1193 (splay_tree_value) null_pointer_node);
1196 /* Map other parameters to their equivalents in the cloned
1200 splay_tree_insert (id.decl_map,
1201 (splay_tree_key) parm,
1202 (splay_tree_value) clone_parm);
1203 clone_parm = TREE_CHAIN (clone_parm);
1207 /* Actually copy the body. */
1208 TREE_CHAIN (DECL_SAVED_TREE (clone)) = copy_body (&id);
1210 /* There are as many statements in the clone as in the
1212 DECL_NUM_STMTS (clone) = DECL_NUM_STMTS (fn);
1215 splay_tree_delete (id.decl_map);
1216 VARRAY_FREE (id.fns);
1218 /* Now, expand this function into RTL, if appropriate. */
1219 finish_function (0);
1220 BLOCK_ABSTRACT_ORIGIN (DECL_INITIAL (clone)) = DECL_INITIAL (fn);
1221 expand_body (clone);
1222 pop_from_top_level ();
1225 /* We don't need to process the original function any further. */