1 /* Perform optimizations on tree structure.
2 Copyright (C) 1998, 1999, 2000 Free Software Foundation, Inc.
3 Written by Mark Michell (mark@codesourcery.com).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
27 #include "insn-config.h"
29 #include "integrate.h"
34 o In order to make inlining-on-trees work, we pessimized
35 function-local static constants. In particular, they are now
36 always output, even when not addressed. Fix this by treating
37 function-local static constants just like global static
38 constants; the back-end already knows not to output them if they
41 o Provide heuristics to clamp inlining of recursive template
44 /* Data required for function inlining. */
46 typedef struct inline_data
48 /* A stack of the functions we are inlining. For example, if we are
49 compiling `f', which calls `g', which calls `h', and we are
50 inlining the body of `h', the stack will contain, `h', followed
51 by `g', followed by `f'. */
53 /* The label to jump to when a return statement is encountered. If
54 this value is NULL, then return statements will simply be
55 remapped as return statements, rather than as jumps. */
57 /* The map from local declarations in the inlined function to
58 equivalents in the function into which it is being inlined. */
60 /* Nonzero if we are currently within the cleanup for a
62 int in_target_cleanup_p;
63 /* A stack of the TARGET_EXPRs that we are currently processing. */
64 varray_type target_exprs;
69 static tree initialize_inlined_parameters PARAMS ((inline_data *, tree, tree));
70 static tree declare_return_variable PARAMS ((inline_data *, tree *));
71 static tree copy_body_r PARAMS ((tree *, int *, void *));
72 static tree copy_body PARAMS ((inline_data *));
73 static tree expand_call_inline PARAMS ((tree *, int *, void *));
74 static void expand_calls_inline PARAMS ((tree *, inline_data *));
75 static int inlinable_function_p PARAMS ((tree, inline_data *));
76 static tree remap_decl PARAMS ((tree, inline_data *));
77 static void remap_block PARAMS ((tree, tree, inline_data *));
78 static void copy_scope_stmt PARAMS ((tree *, int *, inline_data *));
79 static tree calls_setjmp_r PARAMS ((tree *, int *, void *));
81 /* Remap DECL during the copying of the BLOCK tree for the function.
82 DATA is really an `inline_data *'. */
92 /* We only remap local variables in the current function. */
93 fn = VARRAY_TOP_TREE (id->fns);
94 if (!nonstatic_local_decl_p (decl) || DECL_CONTEXT (decl) != fn)
97 /* See if we have remapped this declaration. */
98 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
99 /* If we didn't already have an equivalent for this declaration,
105 /* Make a copy of the variable or label. */
106 t = copy_decl_for_inlining (decl, fn,
107 VARRAY_TREE (id->fns, 0));
109 /* The decl T could be a dynamic array or other variable size type,
110 in which case some fields need to be remapped because they may
111 contain SAVE_EXPRs. */
112 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
113 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
114 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE
115 && TYPE_DOMAIN (TREE_TYPE (t)))
117 TREE_TYPE (t) = copy_node (TREE_TYPE (t));
118 TYPE_DOMAIN (TREE_TYPE (t))
119 = copy_node (TYPE_DOMAIN (TREE_TYPE (t)));
120 walk_tree (&TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (t))),
121 copy_body_r, id, NULL);
124 /* Remember it, so that if we encounter this local entity
125 again we can reuse this copy. */
126 n = splay_tree_insert (id->decl_map,
127 (splay_tree_key) decl,
128 (splay_tree_value) t);
131 return (tree) n->value;
134 /* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
135 remapped versions of the variables therein. And hook the new block
136 into the block-tree. If non-NULL, the DECLS are declarations to
137 add to use instead of the BLOCK_VARS in the old block. */
140 remap_block (scope_stmt, decls, id)
145 /* We cannot do this in the cleanup for a TARGET_EXPR since we do
146 not know whether or not expand_expr will actually write out the
147 code we put there. If it does not, then we'll have more BLOCKs
148 than block-notes, and things will go awry. At some point, we
149 should make the back-end handle BLOCK notes in a tidier way,
150 without requiring a strict correspondence to the block-tree; then
151 this check can go. */
152 if (id->in_target_cleanup_p)
154 SCOPE_STMT_BLOCK (scope_stmt) = NULL_TREE;
158 /* If this is the beginning of a scope, remap the associated BLOCK. */
159 if (SCOPE_BEGIN_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
167 /* Make the new block. */
168 old_block = SCOPE_STMT_BLOCK (scope_stmt);
169 new_block = make_node (BLOCK);
170 TREE_USED (new_block) = TREE_USED (old_block);
171 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
172 SCOPE_STMT_BLOCK (scope_stmt) = new_block;
174 /* Remap its variables. */
175 for (old_var = decls ? decls : BLOCK_VARS (old_block);
177 old_var = TREE_CHAIN (old_var))
181 /* Remap the variable. */
182 new_var = remap_decl (old_var, id);
183 /* If we didn't remap this variable, so we can't mess with
184 its TREE_CHAIN. If we remapped this variable to
185 something other than a declaration (say, if we mapped it
186 to a constant), then we must similarly omit any mention
188 if (!new_var || !DECL_P (new_var))
192 TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
193 BLOCK_VARS (new_block) = new_var;
196 /* We put the BLOCK_VARS in reverse order; fix that now. */
197 BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
198 /* Attach this new block after the DECL_INITIAL block for the
199 function into which this block is being inlined. In
200 rest_of_compilation we will straighten out the BLOCK tree. */
201 fn = VARRAY_TREE (id->fns, 0);
202 if (DECL_INITIAL (fn))
203 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
205 first_block = &DECL_INITIAL (fn);
206 BLOCK_CHAIN (new_block) = *first_block;
207 *first_block = new_block;
208 /* Remember the remapped block. */
209 splay_tree_insert (id->decl_map,
210 (splay_tree_key) old_block,
211 (splay_tree_value) new_block);
213 /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
215 else if (SCOPE_END_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
219 /* Find this block in the table of remapped things. */
220 n = splay_tree_lookup (id->decl_map,
221 (splay_tree_key) SCOPE_STMT_BLOCK (scope_stmt));
222 my_friendly_assert (n != NULL, 19991203);
223 SCOPE_STMT_BLOCK (scope_stmt) = (tree) n->value;
227 /* Copy the SCOPE_STMT pointed to by TP. */
230 copy_scope_stmt (tp, walk_subtrees, id)
237 /* Remember whether or not this statement was nullified. When
238 making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
239 doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
240 deal with copying BLOCKs if they do not wish to do so. */
241 block = SCOPE_STMT_BLOCK (*tp);
242 /* Copy (and replace) the statement. */
243 copy_tree_r (tp, walk_subtrees, NULL);
244 /* Restore the SCOPE_STMT_BLOCK. */
245 SCOPE_STMT_BLOCK (*tp) = block;
247 /* Remap the associated block. */
248 remap_block (*tp, NULL_TREE, id);
251 /* Called from copy_body via walk_tree. DATA is really an
255 copy_body_r (tp, walk_subtrees, data)
264 id = (inline_data *) data;
265 fn = VARRAY_TOP_TREE (id->fns);
267 /* All automatic variables should have a DECL_CONTEXT indicating
268 what function they come from. */
269 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
270 && DECL_NAMESPACE_SCOPE_P (*tp))
271 my_friendly_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp),
274 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
275 GOTO_STMT with the RET_LABEL as its target. */
276 if (TREE_CODE (*tp) == RETURN_STMT && id->ret_label)
278 tree return_stmt = *tp;
281 /* Build the GOTO_STMT. */
282 goto_stmt = build_stmt (GOTO_STMT, id->ret_label);
283 TREE_CHAIN (goto_stmt) = TREE_CHAIN (return_stmt);
285 /* If we're returning something, just turn that into an
286 assignment into the equivalent of the original
288 if (RETURN_EXPR (return_stmt))
290 *tp = build_stmt (EXPR_STMT,
291 RETURN_EXPR (return_stmt));
292 /* And then jump to the end of the function. */
293 TREE_CHAIN (*tp) = goto_stmt;
295 /* If we're not returning anything just do the jump. */
299 /* Local variables and labels need to be replaced by equivalent
300 variables. We don't want to copy static variables; there's only
301 one of those, no matter how many times we inline the containing
303 else if (nonstatic_local_decl_p (*tp) && DECL_CONTEXT (*tp) == fn)
307 /* Remap the declaration. */
308 new_decl = remap_decl (*tp, id);
309 my_friendly_assert (new_decl != NULL_TREE, 19991203);
310 /* Replace this variable with the copy. */
311 STRIP_TYPE_NOPS (new_decl);
314 else if (nonstatic_local_decl_p (*tp)
315 && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
316 my_friendly_abort (0);
317 else if (TREE_CODE (*tp) == SAVE_EXPR)
318 remap_save_expr (tp, id->decl_map, VARRAY_TREE (id->fns, 0),
320 else if (TREE_CODE (*tp) == UNSAVE_EXPR)
321 my_friendly_abort (19991113);
322 /* For a SCOPE_STMT, we must copy the associated block so that we
323 can write out debugging information for the inlined variables. */
324 else if (TREE_CODE (*tp) == SCOPE_STMT && !id->in_target_cleanup_p)
325 copy_scope_stmt (tp, walk_subtrees, id);
326 /* Otherwise, just copy the node. Note that copy_tree_r already
327 knows not to copy VAR_DECLs, etc., so this is safe. */
330 copy_tree_r (tp, walk_subtrees, NULL);
332 /* The copied TARGET_EXPR has never been expanded, even if the
333 original node was expanded already. */
334 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
336 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
337 TREE_OPERAND (*tp, 3) = NULL_TREE;
339 /* Similarly, if we're copying a CALL_EXPR, the RTL for the
340 result is no longer valid. */
341 else if (TREE_CODE (*tp) == CALL_EXPR)
342 CALL_EXPR_RTL (*tp) = NULL_RTX;
345 /* Keep iterating. */
349 /* Make a copy of the body of FN so that it can be inserted inline in
358 body = DECL_SAVED_TREE (VARRAY_TOP_TREE (id->fns));
359 walk_tree (&body, copy_body_r, id, NULL);
364 /* Generate code to initialize the parameters of the function at the
365 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
368 initialize_inlined_parameters (id, args, fn)
378 /* Figure out what the parameters are. */
379 parms = DECL_ARGUMENTS (fn);
381 /* Start with no initializations whatsoever. */
382 init_stmts = NULL_TREE;
384 /* Loop through the parameter declarations, replacing each with an
385 equivalent VAR_DECL, appropriately initialized. */
386 for (p = parms, a = args; p; a = TREE_CHAIN (a), p = TREE_CHAIN (p))
392 /* Find the initializer. */
393 value = TREE_VALUE (a);
394 /* If the parameter is never assigned to, we may not need to
395 create a new variable here at all. Instead, we may be able
396 to just use the argument value. */
397 if (TREE_READONLY (p)
398 && !TREE_ADDRESSABLE (p)
399 && !TREE_SIDE_EFFECTS (value))
401 /* Simplify the value, if possible. */
402 value = fold (decl_constant_value (value));
404 /* We can't risk substituting complex expressions. They
405 might contain variables that will be assigned to later.
406 Theoretically, we could check the expression to see if
407 all of the variables that determine its value are
408 read-only, but we don't bother. */
409 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
411 /* If this is a declaration, wrap it a NOP_EXPR so that
412 we don't try to put the VALUE on the list of
415 value = build1 (NOP_EXPR, TREE_TYPE (value), value);
417 splay_tree_insert (id->decl_map,
419 (splay_tree_value) value);
424 /* Make an equivalent VAR_DECL. */
425 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
426 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
427 that way, when the PARM_DECL is encountered, it will be
428 automatically replaced by the VAR_DECL. */
429 splay_tree_insert (id->decl_map,
431 (splay_tree_value) var);
433 /* Declare this new variable. */
434 init_stmt = build_stmt (DECL_STMT, var);
435 TREE_CHAIN (init_stmt) = init_stmts;
436 init_stmts = init_stmt;
438 /* Initialize this VAR_DECL from the equivalent argument. If
439 the argument is an object, created via a constructor or copy,
440 this will not result in an extra copy: the TARGET_EXPR
441 representing the argument will be bound to VAR, and the
442 object will be constructed in VAR. */
443 if (! TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
444 DECL_INITIAL (var) = value;
447 init_stmt = build_stmt (EXPR_STMT,
448 build (INIT_EXPR, TREE_TYPE (p),
450 /* Add this initialization to the list. Note that we want the
451 declaration *after* the initialization because we are going
452 to reverse all the initialization statements below. */
453 TREE_CHAIN (init_stmt) = init_stmts;
454 init_stmts = init_stmt;
458 /* The initialization statements have been built up in reverse
459 order. Straighten them out now. */
460 return nreverse (init_stmts);
463 /* Declare a return variable to replace the RESULT_DECL for the
464 function we are calling. An appropriate DECL_STMT is returned.
465 The USE_STMT is filled in to contain a use of the declaration to
466 indicate the return value of the function. */
469 declare_return_variable (id, use_stmt)
470 struct inline_data *id;
473 tree fn = VARRAY_TOP_TREE (id->fns);
474 tree result = DECL_RESULT (fn);
476 int aggregate_return_p;
478 /* We don't need to do anything for functions that don't return
480 if (!result || VOID_TYPE_P (TREE_TYPE (result)))
482 *use_stmt = NULL_TREE;
486 /* Figure out whether or not FN returns an aggregate. */
487 aggregate_return_p = IS_AGGR_TYPE (TREE_TYPE (result));
489 /* If FN returns an aggregate then the caller will always create the
490 temporary (using a TARGET_EXPR) and the call will be the
491 initializing expression for the TARGET_EXPR. If we were just to
492 create a new VAR_DECL here, then the result of this function
493 would be copied (bitwise) into the variable initialized by the
494 TARGET_EXPR. That's incorrect, so we must transform any
495 references to the RESULT into references to the target. */
496 if (aggregate_return_p)
498 my_friendly_assert (id->target_exprs->elements_used != 0,
500 var = TREE_OPERAND (VARRAY_TOP_TREE (id->target_exprs), 0);
502 (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (var),
506 /* Otherwise, make an appropriate copy. */
508 var = copy_decl_for_inlining (result, fn, VARRAY_TREE (id->fns, 0));
510 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
511 way, when the RESULT_DECL is encountered, it will be
512 automatically replaced by the VAR_DECL. */
513 splay_tree_insert (id->decl_map,
514 (splay_tree_key) result,
515 (splay_tree_value) var);
517 /* Build the USE_STMT. */
518 *use_stmt = build_stmt (EXPR_STMT, var);
520 /* Build the declaration statement if FN does not return an
522 if (!aggregate_return_p)
523 return build_stmt (DECL_STMT, var);
524 /* If FN does return an aggregate, there's no need to declare the
525 return variable; we're using a variable in our caller's frame. */
530 /* Returns non-zero if FN is a function that can be inlined. */
533 inlinable_function_p (fn, id)
539 /* If we've already decided this function shouldn't be inlined,
540 there's no need to check again. */
541 if (DECL_UNINLINABLE (fn))
544 /* Assume it is not inlinable. */
547 /* If we're not inlining things, then nothing is inlinable. */
548 if (!flag_inline_trees)
550 /* If the function was not declared `inline', then we don't inline
552 else if (!DECL_INLINE (fn))
554 /* We can't inline varargs functions. */
555 else if (varargs_function_p (fn))
557 /* All is well. We can inline this function. Traditionally, GCC
558 has refused to inline functions using alloca, or functions whose
559 values are returned in a PARALLEL, and a few other such obscure
560 conditions. We are not equally constrained at the tree level. */
564 /* Squirrel away the result so that we don't have to check again. */
565 DECL_UNINLINABLE (fn) = !inlinable;
567 /* We can inline a template instantiation only if it's fully
570 && DECL_TEMPLATE_INFO (fn)
571 && TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn)))
573 fn = instantiate_decl (fn, /*defer_ok=*/0);
574 inlinable = !TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn));
577 /* If we don't have the function body available, we can't inline
579 if (!DECL_SAVED_TREE (fn))
582 /* Don't do recursive inlining, either. We don't record this in
583 DECL_UNLINABLE; we may be able to inline this function later. */
588 for (i = 0; i < id->fns->elements_used; ++i)
589 if (VARRAY_TREE (id->fns, i) == fn)
593 /* Return the result. */
597 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
600 expand_call_inline (tp, walk_subtrees, data)
616 /* See what we've got. */
617 id = (inline_data *) data;
620 /* Recurse, but letting recursive invocations know that we are
621 inside the body of a TARGET_EXPR. */
622 if (TREE_CODE (*tp) == TARGET_EXPR)
624 int i, len = first_rtl_op (TARGET_EXPR);
626 /* We're walking our own subtrees. */
629 /* Push *TP on the stack of pending TARGET_EXPRs. */
630 VARRAY_PUSH_TREE (id->target_exprs, *tp);
632 /* Actually walk over them. This loop is the body of
633 walk_trees, omitting the case where the TARGET_EXPR
634 itself is handled. */
635 for (i = 0; i < len; ++i)
638 ++id->in_target_cleanup_p;
639 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
642 --id->in_target_cleanup_p;
645 /* We're done with this TARGET_EXPR now. */
646 VARRAY_POP (id->target_exprs);
651 /* From here on, we're only interested in CALL_EXPRs. */
652 if (TREE_CODE (t) != CALL_EXPR)
655 /* First, see if we can figure out what function is being called.
656 If we cannot, then there is no hope of inlining the function. */
657 fn = get_callee_fndecl (t);
661 /* Don't try to inline functions that are not well-suited to
663 if (!inlinable_function_p (fn, id))
666 /* Set the current filename and line number to the function we are
667 inlining so that when we create new _STMT nodes here they get
668 line numbers corresponding to the function we are calling. We
669 wrap the whole inlined body in an EXPR_WITH_FILE_AND_LINE as well
670 because individual statements don't record the filename. */
671 push_srcloc (fn->decl.filename, fn->decl.linenum);
673 /* Build a statement-expression containing code to initialize the
674 arguments, the actual inline expansion of the body, and a label
675 for the return statements within the function to jump to. The
676 type of the statement expression is the return type of the
678 expr = build_min (STMT_EXPR, TREE_TYPE (TREE_TYPE (fn)), NULL_TREE);
680 /* Local declarations will be replaced by their equivalents in this
683 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
686 /* Initialize the parameters. */
687 arg_inits = initialize_inlined_parameters (id, TREE_OPERAND (t, 1), fn);
688 /* Expand any inlined calls in the initializers. Do this before we
689 push FN on the stack of functions we are inlining; we want to
690 inline calls to FN that appear in the initializers for the
692 expand_calls_inline (&arg_inits, id);
693 /* And add them to the tree. */
694 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), arg_inits);
696 /* Record the function we are about to inline so that we can avoid
697 recursing into it. */
698 VARRAY_PUSH_TREE (id->fns, fn);
700 /* Return statements in the function body will be replaced by jumps
702 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
703 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
705 /* Create a block to put the parameters in. We have to do this
706 after the parameters have been remapped because remapping
707 parameters is different from remapping ordinary variables. */
708 scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
709 SCOPE_BEGIN_P (scope_stmt) = 1;
710 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
711 remap_block (scope_stmt, DECL_ARGUMENTS (fn), id);
712 TREE_CHAIN (scope_stmt) = STMT_EXPR_STMT (expr);
713 STMT_EXPR_STMT (expr) = scope_stmt;
715 /* Tell the debugging backends that this block represents the
716 outermost scope of the inlined function. */
717 if (SCOPE_STMT_BLOCK (scope_stmt))
718 BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt)) = DECL_ORIGIN (fn);
720 /* Declare the return variable for the function. */
721 STMT_EXPR_STMT (expr)
722 = chainon (STMT_EXPR_STMT (expr),
723 declare_return_variable (id, &use_stmt));
725 /* After we've initialized the parameters, we insert the body of the
727 inlined_body = &STMT_EXPR_STMT (expr);
728 while (*inlined_body)
729 inlined_body = &TREE_CHAIN (*inlined_body);
730 *inlined_body = copy_body (id);
732 /* Close the block for the parameters. */
733 scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
734 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
735 my_friendly_assert (DECL_INITIAL (fn)
736 && TREE_CODE (DECL_INITIAL (fn)) == BLOCK,
738 remap_block (scope_stmt, NULL_TREE, id);
739 STMT_EXPR_STMT (expr)
740 = chainon (STMT_EXPR_STMT (expr), scope_stmt);
742 /* After the body of the function comes the RET_LABEL. This must come
743 before we evaluate the returned value below, because that evalulation
744 may cause RTL to be generated. */
745 STMT_EXPR_STMT (expr)
746 = chainon (STMT_EXPR_STMT (expr),
747 build_stmt (LABEL_STMT, id->ret_label));
749 /* Finally, mention the returned value so that the value of the
750 statement-expression is the returned value of the function. */
751 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), use_stmt);
754 splay_tree_delete (id->decl_map);
757 /* The new expression has side-effects if the old one did. */
758 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
760 /* Replace the call by the inlined body. Wrap it in an
761 EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
762 pointing to the right place. */
763 chain = TREE_CHAIN (*tp);
764 *tp = build_expr_wfl (expr, DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn),
766 EXPR_WFL_EMIT_LINE_NOTE (*tp) = 1;
767 TREE_CHAIN (*tp) = chain;
770 /* If the value of the new expression is ignored, that's OK. We
771 don't warn about this for CALL_EXPRs, so we shouldn't warn about
772 the equivalent inlined version either. */
775 /* Recurse into the body of the just inlined function. */
776 expand_calls_inline (inlined_body, id);
777 VARRAY_POP (id->fns);
779 /* Don't walk into subtrees. We've already handled them above. */
782 /* Keep iterating. */
786 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
787 expansions as appropriate. */
790 expand_calls_inline (tp, id)
794 /* Search through *TP, replacing all calls to inline functions by
795 appropriate equivalents. */
796 walk_tree (tp, expand_call_inline, id, NULL);
799 /* Optimize the body of FN. */
802 optimize_function (fn)
805 /* While in this function, we may choose to go off and compile
806 another function. For example, we might instantiate a function
807 in the hopes of inlining it. Normally, that wouldn't trigger any
808 actual RTL code-generation -- but it will if the template is
809 actually needed. (For example, if it's address is taken, or if
810 some other function already refers to the template.) If
811 code-generation occurs, then garbage collection will occur, so we
812 must protect ourselves, just as we do while building up the body
816 /* Expand calls to inline functions. */
817 if (flag_inline_trees)
821 struct saved_scope *s;
824 memset (&id, 0, sizeof (id));
826 /* Don't allow recursion into FN. */
827 VARRAY_TREE_INIT (id.fns, 32, "fns");
828 VARRAY_PUSH_TREE (id.fns, fn);
829 /* Or any functions that aren't finished yet. */
831 if (current_function_decl)
833 VARRAY_PUSH_TREE (id.fns, current_function_decl);
834 prev_fn = current_function_decl;
836 for (s = scope_chain; s; s = s->prev)
837 if (s->function_decl && s->function_decl != prev_fn)
839 VARRAY_PUSH_TREE (id.fns, s->function_decl);
840 prev_fn = s->function_decl;
843 /* Create the stack of TARGET_EXPRs. */
844 VARRAY_TREE_INIT (id.target_exprs, 32, "target_exprs");
846 /* Replace all calls to inline functions with the bodies of those
848 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
851 VARRAY_FREE (id.fns);
852 VARRAY_FREE (id.target_exprs);
855 /* Undo the call to ggc_push_context above. */
859 /* Called from calls_setjmp_p via walk_tree. */
862 calls_setjmp_r (tp, walk_subtrees, data)
864 int *walk_subtrees ATTRIBUTE_UNUSED;
865 void *data ATTRIBUTE_UNUSED;
867 /* We're only interested in FUNCTION_DECLS. */
868 if (TREE_CODE (*tp) != FUNCTION_DECL)
871 return setjmp_call_p (*tp) ? *tp : NULL_TREE;
874 /* Returns non-zero if FN calls `setjmp' or some other function that
875 can return more than once. This function is conservative; it may
876 occasionally return a non-zero value even when FN does not actually
883 return walk_tree_without_duplicates (&DECL_SAVED_TREE (fn),
888 /* FN is a function that has a complete body. Clone the body as
889 necessary. Returns non-zero if there's no longer any need to
890 process the main body. */
893 maybe_clone_body (fn)
899 /* We don't clone constructors and destructors under the old ABI. */
903 /* We only clone constructors and destructors. */
904 if (!DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (fn)
905 && !DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (fn))
908 /* We know that any clones immediately follow FN in the TYPE_METHODS
910 for (clone = TREE_CHAIN (fn);
911 clone && DECL_CLONED_FUNCTION_P (clone);
912 clone = TREE_CHAIN (clone))
918 /* Update CLONE's source position information to match FN's. */
919 DECL_SOURCE_FILE (clone) = DECL_SOURCE_FILE (fn);
920 DECL_SOURCE_LINE (clone) = DECL_SOURCE_LINE (fn);
921 DECL_INLINE (clone) = DECL_INLINE (fn);
922 DECL_THIS_INLINE (clone) = DECL_THIS_INLINE (fn);
923 DECL_COMDAT (clone) = DECL_COMDAT (fn);
924 DECL_WEAK (clone) = DECL_WEAK (fn);
925 DECL_ONE_ONLY (clone) = DECL_ONE_ONLY (fn);
926 DECL_SECTION_NAME (clone) = DECL_SECTION_NAME (fn);
927 DECL_USE_TEMPLATE (clone) = DECL_USE_TEMPLATE (fn);
928 DECL_EXTERNAL (clone) = DECL_EXTERNAL (fn);
929 DECL_INTERFACE_KNOWN (clone) = DECL_INTERFACE_KNOWN (fn);
930 DECL_NOT_REALLY_EXTERN (clone) = DECL_NOT_REALLY_EXTERN (fn);
932 /* Start processing the function. */
933 push_to_top_level ();
934 start_function (NULL_TREE, clone, NULL_TREE, SF_PRE_PARSED);
936 /* Just clone the body, as if we were making an inline call.
937 But, remap the parameters in the callee to the parameters of
938 caller. If there's an in-charge parameter, map it to an
939 appropriate constant. */
940 memset (&id, 0, sizeof (id));
941 VARRAY_TREE_INIT (id.fns, 2, "fns");
942 VARRAY_PUSH_TREE (id.fns, clone);
943 VARRAY_PUSH_TREE (id.fns, fn);
945 /* Remap the parameters. */
946 id.decl_map = splay_tree_new (splay_tree_compare_pointers,
949 parm = DECL_ARGUMENTS (fn),
950 clone_parm = DECL_ARGUMENTS (clone);
953 parm = TREE_CHAIN (parm))
955 /* Map the in-charge parameter to an appropriate constant. */
956 if (DECL_HAS_IN_CHARGE_PARM_P (fn) && parmno == 1)
959 in_charge = in_charge_arg_for_name (DECL_NAME (clone));
960 splay_tree_insert (id.decl_map,
961 (splay_tree_key) parm,
962 (splay_tree_value) in_charge);
964 /* For a subobject constructor or destructor, the next
965 argument is the VTT parameter. Remap the VTT_PARM
966 from the CLONE to this parameter. */
967 if (DECL_NEEDS_VTT_PARM_P (clone))
969 splay_tree_insert (id.decl_map,
970 (splay_tree_key) DECL_VTT_PARM (fn),
971 (splay_tree_value) clone_parm);
972 splay_tree_insert (id.decl_map,
973 (splay_tree_key) DECL_USE_VTT_PARM (fn),
974 (splay_tree_value) boolean_true_node);
975 clone_parm = TREE_CHAIN (clone_parm);
977 /* Otherwise, map the VTT parameter to `NULL'. */
978 else if (DECL_VTT_PARM (fn))
980 splay_tree_insert (id.decl_map,
981 (splay_tree_key) DECL_VTT_PARM (fn),
982 (splay_tree_value) null_pointer_node);
983 splay_tree_insert (id.decl_map,
984 (splay_tree_key) DECL_USE_VTT_PARM (fn),
985 (splay_tree_value) boolean_false_node);
988 /* Map other parameters to their equivalents in the cloned
992 splay_tree_insert (id.decl_map,
993 (splay_tree_key) parm,
994 (splay_tree_value) clone_parm);
995 clone_parm = TREE_CHAIN (clone_parm);
999 /* Actually copy the body. */
1000 TREE_CHAIN (DECL_SAVED_TREE (clone)) = copy_body (&id);
1003 splay_tree_delete (id.decl_map);
1004 VARRAY_FREE (id.fns);
1006 /* Now, expand this function into RTL, if appropriate. */
1007 function_name_declared_p = 1;
1008 expand_body (finish_function (0));
1009 pop_from_top_level ();
1012 /* We don't need to process the original function any further. */