1 /* Perform optimizations on tree structure.
2 Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
3 Written by Mark Michell (mark@codesourcery.com).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
27 #include "insn-config.h"
29 #include "integrate.h"
34 o In order to make inlining-on-trees work, we pessimized
35 function-local static constants. In particular, they are now
36 always output, even when not addressed. Fix this by treating
37 function-local static constants just like global static
38 constants; the back-end already knows not to output them if they
41 o Provide heuristics to clamp inlining of recursive template
44 /* Data required for function inlining. */
46 typedef struct inline_data
48 /* A stack of the functions we are inlining. For example, if we are
49 compiling `f', which calls `g', which calls `h', and we are
50 inlining the body of `h', the stack will contain, `h', followed
51 by `g', followed by `f'. */
53 /* The label to jump to when a return statement is encountered. If
54 this value is NULL, then return statements will simply be
55 remapped as return statements, rather than as jumps. */
57 /* The map from local declarations in the inlined function to
58 equivalents in the function into which it is being inlined. */
60 /* Nonzero if we are currently within the cleanup for a
62 int in_target_cleanup_p;
63 /* A stack of the TARGET_EXPRs that we are currently processing. */
64 varray_type target_exprs;
69 static tree initialize_inlined_parameters PARAMS ((inline_data *, tree, tree));
70 static tree declare_return_variable PARAMS ((inline_data *, tree *));
71 static tree copy_body_r PARAMS ((tree *, int *, void *));
72 static tree copy_body PARAMS ((inline_data *));
73 static tree expand_call_inline PARAMS ((tree *, int *, void *));
74 static void expand_calls_inline PARAMS ((tree *, inline_data *));
75 static int inlinable_function_p PARAMS ((tree, inline_data *));
76 static tree remap_decl PARAMS ((tree, inline_data *));
77 static void remap_block PARAMS ((tree, tree, inline_data *));
78 static void copy_scope_stmt PARAMS ((tree *, int *, inline_data *));
79 static tree calls_setjmp_r PARAMS ((tree *, int *, void *));
81 /* Remap DECL during the copying of the BLOCK tree for the function.
82 DATA is really an `inline_data *'. */
92 /* We only remap local variables in the current function. */
93 fn = VARRAY_TOP_TREE (id->fns);
94 if (!nonstatic_local_decl_p (decl) || DECL_CONTEXT (decl) != fn)
97 /* See if we have remapped this declaration. */
98 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
99 /* If we didn't already have an equivalent for this declaration,
105 /* Make a copy of the variable or label. */
106 t = copy_decl_for_inlining (decl, fn,
107 VARRAY_TREE (id->fns, 0));
109 /* The decl T could be a dynamic array or other variable size type,
110 in which case some fields need to be remapped because they may
111 contain SAVE_EXPRs. */
112 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
113 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
114 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE
115 && TYPE_DOMAIN (TREE_TYPE (t)))
117 TREE_TYPE (t) = copy_node (TREE_TYPE (t));
118 TYPE_DOMAIN (TREE_TYPE (t))
119 = copy_node (TYPE_DOMAIN (TREE_TYPE (t)));
120 walk_tree (&TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (t))),
121 copy_body_r, id, NULL);
124 /* Remember it, so that if we encounter this local entity
125 again we can reuse this copy. */
126 n = splay_tree_insert (id->decl_map,
127 (splay_tree_key) decl,
128 (splay_tree_value) t);
131 return (tree) n->value;
134 /* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
135 remapped versions of the variables therein. And hook the new block
136 into the block-tree. If non-NULL, the DECLS are declarations to
137 add to use instead of the BLOCK_VARS in the old block. */
140 remap_block (scope_stmt, decls, id)
145 /* We cannot do this in the cleanup for a TARGET_EXPR since we do
146 not know whether or not expand_expr will actually write out the
147 code we put there. If it does not, then we'll have more BLOCKs
148 than block-notes, and things will go awry. At some point, we
149 should make the back-end handle BLOCK notes in a tidier way,
150 without requiring a strict correspondence to the block-tree; then
151 this check can go. */
152 if (id->in_target_cleanup_p)
154 SCOPE_STMT_BLOCK (scope_stmt) = NULL_TREE;
158 /* If this is the beginning of a scope, remap the associated BLOCK. */
159 if (SCOPE_BEGIN_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
167 /* Make the new block. */
168 old_block = SCOPE_STMT_BLOCK (scope_stmt);
169 new_block = make_node (BLOCK);
170 TREE_USED (new_block) = TREE_USED (old_block);
171 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
172 SCOPE_STMT_BLOCK (scope_stmt) = new_block;
174 /* Remap its variables. */
175 for (old_var = decls ? decls : BLOCK_VARS (old_block);
177 old_var = TREE_CHAIN (old_var))
181 /* Remap the variable. */
182 new_var = remap_decl (old_var, id);
183 /* If we didn't remap this variable, so we can't mess with
184 its TREE_CHAIN. If we remapped this variable to
185 something other than a declaration (say, if we mapped it
186 to a constant), then we must similarly omit any mention
188 if (!new_var || !DECL_P (new_var))
192 TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
193 BLOCK_VARS (new_block) = new_var;
196 /* We put the BLOCK_VARS in reverse order; fix that now. */
197 BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
198 /* Attach this new block after the DECL_INITIAL block for the
199 function into which this block is being inlined. In
200 rest_of_compilation we will straighten out the BLOCK tree. */
201 fn = VARRAY_TREE (id->fns, 0);
202 if (DECL_INITIAL (fn))
203 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
205 first_block = &DECL_INITIAL (fn);
206 BLOCK_CHAIN (new_block) = *first_block;
207 *first_block = new_block;
208 /* Remember the remapped block. */
209 splay_tree_insert (id->decl_map,
210 (splay_tree_key) old_block,
211 (splay_tree_value) new_block);
213 /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
215 else if (SCOPE_END_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
219 /* Find this block in the table of remapped things. */
220 n = splay_tree_lookup (id->decl_map,
221 (splay_tree_key) SCOPE_STMT_BLOCK (scope_stmt));
222 my_friendly_assert (n != NULL, 19991203);
223 SCOPE_STMT_BLOCK (scope_stmt) = (tree) n->value;
227 /* Copy the SCOPE_STMT pointed to by TP. */
230 copy_scope_stmt (tp, walk_subtrees, id)
237 /* Remember whether or not this statement was nullified. When
238 making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
239 doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
240 deal with copying BLOCKs if they do not wish to do so. */
241 block = SCOPE_STMT_BLOCK (*tp);
242 /* Copy (and replace) the statement. */
243 copy_tree_r (tp, walk_subtrees, NULL);
244 /* Restore the SCOPE_STMT_BLOCK. */
245 SCOPE_STMT_BLOCK (*tp) = block;
247 /* Remap the associated block. */
248 remap_block (*tp, NULL_TREE, id);
251 /* Called from copy_body via walk_tree. DATA is really an
255 copy_body_r (tp, walk_subtrees, data)
264 id = (inline_data *) data;
265 fn = VARRAY_TOP_TREE (id->fns);
267 /* All automatic variables should have a DECL_CONTEXT indicating
268 what function they come from. */
269 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
270 && DECL_NAMESPACE_SCOPE_P (*tp))
271 my_friendly_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp),
274 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
275 GOTO_STMT with the RET_LABEL as its target. */
276 if (TREE_CODE (*tp) == RETURN_STMT && id->ret_label)
278 tree return_stmt = *tp;
281 /* Build the GOTO_STMT. */
282 goto_stmt = build_stmt (GOTO_STMT, id->ret_label);
283 TREE_CHAIN (goto_stmt) = TREE_CHAIN (return_stmt);
285 /* If we're returning something, just turn that into an
286 assignment into the equivalent of the original
288 if (RETURN_EXPR (return_stmt))
290 *tp = build_stmt (EXPR_STMT,
291 RETURN_EXPR (return_stmt));
292 STMT_IS_FULL_EXPR_P (*tp) = 1;
293 /* And then jump to the end of the function. */
294 TREE_CHAIN (*tp) = goto_stmt;
296 /* If we're not returning anything just do the jump. */
300 /* Local variables and labels need to be replaced by equivalent
301 variables. We don't want to copy static variables; there's only
302 one of those, no matter how many times we inline the containing
304 else if (nonstatic_local_decl_p (*tp) && DECL_CONTEXT (*tp) == fn)
308 /* Remap the declaration. */
309 new_decl = remap_decl (*tp, id);
310 my_friendly_assert (new_decl != NULL_TREE, 19991203);
311 /* Replace this variable with the copy. */
312 STRIP_TYPE_NOPS (new_decl);
315 else if (nonstatic_local_decl_p (*tp)
316 && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
317 my_friendly_abort (0);
318 else if (TREE_CODE (*tp) == SAVE_EXPR)
319 remap_save_expr (tp, id->decl_map, VARRAY_TREE (id->fns, 0),
321 else if (TREE_CODE (*tp) == UNSAVE_EXPR)
322 /* UNSAVE_EXPRs should not be generated until expansion time. */
323 my_friendly_abort (19991113);
324 /* For a SCOPE_STMT, we must copy the associated block so that we
325 can write out debugging information for the inlined variables. */
326 else if (TREE_CODE (*tp) == SCOPE_STMT && !id->in_target_cleanup_p)
327 copy_scope_stmt (tp, walk_subtrees, id);
328 /* Otherwise, just copy the node. Note that copy_tree_r already
329 knows not to copy VAR_DECLs, etc., so this is safe. */
332 copy_tree_r (tp, walk_subtrees, NULL);
334 /* The copied TARGET_EXPR has never been expanded, even if the
335 original node was expanded already. */
336 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
338 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
339 TREE_OPERAND (*tp, 3) = NULL_TREE;
341 else if (TREE_CODE (*tp) == MODIFY_EXPR
342 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
343 && nonstatic_local_decl_p (TREE_OPERAND (*tp, 0))
344 && DECL_CONTEXT (TREE_OPERAND (*tp, 0)) == fn)
346 /* Some assignments VAR = VAR; don't generate any rtl code
347 and thus don't count as variable modification. Avoid
348 keeping bogosities like 0 = 0. */
349 tree decl = TREE_OPERAND (*tp, 0), value;
352 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
355 value = (tree) n->value;
356 STRIP_TYPE_NOPS (value);
357 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
363 /* Keep iterating. */
367 /* Make a copy of the body of FN so that it can be inserted inline in
376 body = DECL_SAVED_TREE (VARRAY_TOP_TREE (id->fns));
377 walk_tree (&body, copy_body_r, id, NULL);
382 /* Generate code to initialize the parameters of the function at the
383 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
386 initialize_inlined_parameters (id, args, fn)
396 /* Figure out what the parameters are. */
397 parms = DECL_ARGUMENTS (fn);
399 /* Start with no initializations whatsoever. */
400 init_stmts = NULL_TREE;
402 /* Loop through the parameter declarations, replacing each with an
403 equivalent VAR_DECL, appropriately initialized. */
404 for (p = parms, a = args; p; a = TREE_CHAIN (a), p = TREE_CHAIN (p))
410 /* Find the initializer. */
411 value = TREE_VALUE (a);
412 /* If the parameter is never assigned to, we may not need to
413 create a new variable here at all. Instead, we may be able
414 to just use the argument value. */
415 if (TREE_READONLY (p)
416 && !TREE_ADDRESSABLE (p)
417 && !TREE_SIDE_EFFECTS (value))
419 /* Simplify the value, if possible. */
420 value = fold (decl_constant_value (value));
422 /* We can't risk substituting complex expressions. They
423 might contain variables that will be assigned to later.
424 Theoretically, we could check the expression to see if
425 all of the variables that determine its value are
426 read-only, but we don't bother. */
427 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
429 /* If this is a declaration, wrap it a NOP_EXPR so that
430 we don't try to put the VALUE on the list of
433 value = build1 (NOP_EXPR, TREE_TYPE (value), value);
435 splay_tree_insert (id->decl_map,
437 (splay_tree_value) value);
442 /* Make an equivalent VAR_DECL. */
443 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
444 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
445 that way, when the PARM_DECL is encountered, it will be
446 automatically replaced by the VAR_DECL. */
447 splay_tree_insert (id->decl_map,
449 (splay_tree_value) var);
451 /* Declare this new variable. */
452 init_stmt = build_stmt (DECL_STMT, var);
453 TREE_CHAIN (init_stmt) = init_stmts;
454 init_stmts = init_stmt;
456 /* Initialize this VAR_DECL from the equivalent argument. If
457 the argument is an object, created via a constructor or copy,
458 this will not result in an extra copy: the TARGET_EXPR
459 representing the argument will be bound to VAR, and the
460 object will be constructed in VAR. */
461 if (! TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
462 DECL_INITIAL (var) = value;
465 init_stmt = build_stmt (EXPR_STMT,
466 build (INIT_EXPR, TREE_TYPE (p),
468 /* Add this initialization to the list. Note that we want the
469 declaration *after* the initialization because we are going
470 to reverse all the initialization statements below. */
471 TREE_CHAIN (init_stmt) = init_stmts;
472 init_stmts = init_stmt;
476 /* The initialization statements have been built up in reverse
477 order. Straighten them out now. */
478 return nreverse (init_stmts);
481 /* Declare a return variable to replace the RESULT_DECL for the
482 function we are calling. An appropriate DECL_STMT is returned.
483 The USE_STMT is filled in to contain a use of the declaration to
484 indicate the return value of the function. */
487 declare_return_variable (id, use_stmt)
488 struct inline_data *id;
491 tree fn = VARRAY_TOP_TREE (id->fns);
492 tree result = DECL_RESULT (fn);
494 int aggregate_return_p;
496 /* We don't need to do anything for functions that don't return
498 if (!result || VOID_TYPE_P (TREE_TYPE (result)))
500 *use_stmt = NULL_TREE;
504 /* Figure out whether or not FN returns an aggregate. */
505 aggregate_return_p = IS_AGGR_TYPE (TREE_TYPE (result));
507 /* If FN returns an aggregate then the caller will always create the
508 temporary (using a TARGET_EXPR) and the call will be the
509 initializing expression for the TARGET_EXPR. If we were just to
510 create a new VAR_DECL here, then the result of this function
511 would be copied (bitwise) into the variable initialized by the
512 TARGET_EXPR. That's incorrect, so we must transform any
513 references to the RESULT into references to the target. */
514 if (aggregate_return_p)
516 my_friendly_assert (id->target_exprs->elements_used != 0,
518 var = TREE_OPERAND (VARRAY_TOP_TREE (id->target_exprs), 0);
520 (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (var),
524 /* Otherwise, make an appropriate copy. */
526 var = copy_decl_for_inlining (result, fn, VARRAY_TREE (id->fns, 0));
528 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
529 way, when the RESULT_DECL is encountered, it will be
530 automatically replaced by the VAR_DECL. */
531 splay_tree_insert (id->decl_map,
532 (splay_tree_key) result,
533 (splay_tree_value) var);
535 /* Build the USE_STMT. */
536 *use_stmt = build_stmt (EXPR_STMT, var);
538 /* Build the declaration statement if FN does not return an
540 if (!aggregate_return_p)
541 return build_stmt (DECL_STMT, var);
542 /* If FN does return an aggregate, there's no need to declare the
543 return variable; we're using a variable in our caller's frame. */
548 /* Returns non-zero if FN is a function that can be inlined. */
551 inlinable_function_p (fn, id)
557 /* If we've already decided this function shouldn't be inlined,
558 there's no need to check again. */
559 if (DECL_UNINLINABLE (fn))
562 /* Assume it is not inlinable. */
565 /* If we're not inlining things, then nothing is inlinable. */
566 if (!flag_inline_trees)
568 /* If the function was not declared `inline', then we don't inline
570 else if (!DECL_INLINE (fn))
572 /* We can't inline varargs functions. */
573 else if (varargs_function_p (fn))
575 /* All is well. We can inline this function. Traditionally, GCC
576 has refused to inline functions using alloca, or functions whose
577 values are returned in a PARALLEL, and a few other such obscure
578 conditions. We are not equally constrained at the tree level. */
582 /* Squirrel away the result so that we don't have to check again. */
583 DECL_UNINLINABLE (fn) = !inlinable;
585 /* We can inline a template instantiation only if it's fully
588 && DECL_TEMPLATE_INFO (fn)
589 && TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn)))
591 fn = instantiate_decl (fn, /*defer_ok=*/0);
592 inlinable = !TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn));
595 /* If we don't have the function body available, we can't inline
597 if (!DECL_SAVED_TREE (fn))
600 /* Don't do recursive inlining, either. We don't record this in
601 DECL_UNLINABLE; we may be able to inline this function later. */
606 for (i = 0; i < id->fns->elements_used; ++i)
607 if (VARRAY_TREE (id->fns, i) == fn)
611 /* Return the result. */
615 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
618 expand_call_inline (tp, walk_subtrees, data)
634 /* See what we've got. */
635 id = (inline_data *) data;
638 /* Recurse, but letting recursive invocations know that we are
639 inside the body of a TARGET_EXPR. */
640 if (TREE_CODE (*tp) == TARGET_EXPR)
642 int i, len = first_rtl_op (TARGET_EXPR);
644 /* We're walking our own subtrees. */
647 /* Push *TP on the stack of pending TARGET_EXPRs. */
648 VARRAY_PUSH_TREE (id->target_exprs, *tp);
650 /* Actually walk over them. This loop is the body of
651 walk_trees, omitting the case where the TARGET_EXPR
652 itself is handled. */
653 for (i = 0; i < len; ++i)
656 ++id->in_target_cleanup_p;
657 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
660 --id->in_target_cleanup_p;
663 /* We're done with this TARGET_EXPR now. */
664 VARRAY_POP (id->target_exprs);
669 /* From here on, we're only interested in CALL_EXPRs. */
670 if (TREE_CODE (t) != CALL_EXPR)
673 /* First, see if we can figure out what function is being called.
674 If we cannot, then there is no hope of inlining the function. */
675 fn = get_callee_fndecl (t);
679 /* Don't try to inline functions that are not well-suited to
681 if (!inlinable_function_p (fn, id))
684 /* Set the current filename and line number to the function we are
685 inlining so that when we create new _STMT nodes here they get
686 line numbers corresponding to the function we are calling. We
687 wrap the whole inlined body in an EXPR_WITH_FILE_AND_LINE as well
688 because individual statements don't record the filename. */
689 push_srcloc (fn->decl.filename, fn->decl.linenum);
691 /* Build a statement-expression containing code to initialize the
692 arguments, the actual inline expansion of the body, and a label
693 for the return statements within the function to jump to. The
694 type of the statement expression is the return type of the
696 expr = build_min (STMT_EXPR, TREE_TYPE (TREE_TYPE (fn)), NULL_TREE);
698 /* Local declarations will be replaced by their equivalents in this
701 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
704 /* Initialize the parameters. */
705 arg_inits = initialize_inlined_parameters (id, TREE_OPERAND (t, 1), fn);
706 /* Expand any inlined calls in the initializers. Do this before we
707 push FN on the stack of functions we are inlining; we want to
708 inline calls to FN that appear in the initializers for the
710 expand_calls_inline (&arg_inits, id);
711 /* And add them to the tree. */
712 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), arg_inits);
714 /* Record the function we are about to inline so that we can avoid
715 recursing into it. */
716 VARRAY_PUSH_TREE (id->fns, fn);
718 /* Return statements in the function body will be replaced by jumps
720 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
721 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
723 /* Create a block to put the parameters in. We have to do this
724 after the parameters have been remapped because remapping
725 parameters is different from remapping ordinary variables. */
726 scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
727 SCOPE_BEGIN_P (scope_stmt) = 1;
728 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
729 remap_block (scope_stmt, DECL_ARGUMENTS (fn), id);
730 TREE_CHAIN (scope_stmt) = STMT_EXPR_STMT (expr);
731 STMT_EXPR_STMT (expr) = scope_stmt;
733 /* Tell the debugging backends that this block represents the
734 outermost scope of the inlined function. */
735 if (SCOPE_STMT_BLOCK (scope_stmt))
736 BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt)) = DECL_ORIGIN (fn);
738 /* Declare the return variable for the function. */
739 STMT_EXPR_STMT (expr)
740 = chainon (STMT_EXPR_STMT (expr),
741 declare_return_variable (id, &use_stmt));
743 /* After we've initialized the parameters, we insert the body of the
745 inlined_body = &STMT_EXPR_STMT (expr);
746 while (*inlined_body)
747 inlined_body = &TREE_CHAIN (*inlined_body);
748 *inlined_body = copy_body (id);
750 /* Close the block for the parameters. */
751 scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
752 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
753 my_friendly_assert (DECL_INITIAL (fn)
754 && TREE_CODE (DECL_INITIAL (fn)) == BLOCK,
756 remap_block (scope_stmt, NULL_TREE, id);
757 STMT_EXPR_STMT (expr)
758 = chainon (STMT_EXPR_STMT (expr), scope_stmt);
760 /* After the body of the function comes the RET_LABEL. This must come
761 before we evaluate the returned value below, because that evalulation
762 may cause RTL to be generated. */
763 STMT_EXPR_STMT (expr)
764 = chainon (STMT_EXPR_STMT (expr),
765 build_stmt (LABEL_STMT, id->ret_label));
767 /* Finally, mention the returned value so that the value of the
768 statement-expression is the returned value of the function. */
769 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), use_stmt);
772 splay_tree_delete (id->decl_map);
775 /* The new expression has side-effects if the old one did. */
776 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
778 /* Replace the call by the inlined body. Wrap it in an
779 EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
780 pointing to the right place. */
781 chain = TREE_CHAIN (*tp);
782 *tp = build_expr_wfl (expr, DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn),
784 EXPR_WFL_EMIT_LINE_NOTE (*tp) = 1;
785 TREE_CHAIN (*tp) = chain;
788 /* If the value of the new expression is ignored, that's OK. We
789 don't warn about this for CALL_EXPRs, so we shouldn't warn about
790 the equivalent inlined version either. */
793 /* Recurse into the body of the just inlined function. */
794 expand_calls_inline (inlined_body, id);
795 VARRAY_POP (id->fns);
797 /* Don't walk into subtrees. We've already handled them above. */
800 /* Keep iterating. */
804 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
805 expansions as appropriate. */
808 expand_calls_inline (tp, id)
812 /* Search through *TP, replacing all calls to inline functions by
813 appropriate equivalents. */
814 walk_tree (tp, expand_call_inline, id, NULL);
817 /* Optimize the body of FN. */
820 optimize_function (fn)
823 /* While in this function, we may choose to go off and compile
824 another function. For example, we might instantiate a function
825 in the hopes of inlining it. Normally, that wouldn't trigger any
826 actual RTL code-generation -- but it will if the template is
827 actually needed. (For example, if it's address is taken, or if
828 some other function already refers to the template.) If
829 code-generation occurs, then garbage collection will occur, so we
830 must protect ourselves, just as we do while building up the body
834 /* Expand calls to inline functions. */
835 if (flag_inline_trees)
839 struct saved_scope *s;
842 memset (&id, 0, sizeof (id));
844 /* Don't allow recursion into FN. */
845 VARRAY_TREE_INIT (id.fns, 32, "fns");
846 VARRAY_PUSH_TREE (id.fns, fn);
847 /* Or any functions that aren't finished yet. */
849 if (current_function_decl)
851 VARRAY_PUSH_TREE (id.fns, current_function_decl);
852 prev_fn = current_function_decl;
854 for (s = scope_chain; s; s = s->prev)
855 if (s->function_decl && s->function_decl != prev_fn)
857 VARRAY_PUSH_TREE (id.fns, s->function_decl);
858 prev_fn = s->function_decl;
861 /* Create the stack of TARGET_EXPRs. */
862 VARRAY_TREE_INIT (id.target_exprs, 32, "target_exprs");
864 /* Replace all calls to inline functions with the bodies of those
866 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
869 VARRAY_FREE (id.fns);
870 VARRAY_FREE (id.target_exprs);
873 /* Undo the call to ggc_push_context above. */
877 /* Called from calls_setjmp_p via walk_tree. */
880 calls_setjmp_r (tp, walk_subtrees, data)
882 int *walk_subtrees ATTRIBUTE_UNUSED;
883 void *data ATTRIBUTE_UNUSED;
885 /* We're only interested in FUNCTION_DECLS. */
886 if (TREE_CODE (*tp) != FUNCTION_DECL)
889 return setjmp_call_p (*tp) ? *tp : NULL_TREE;
892 /* Returns non-zero if FN calls `setjmp' or some other function that
893 can return more than once. This function is conservative; it may
894 occasionally return a non-zero value even when FN does not actually
901 return walk_tree_without_duplicates (&DECL_SAVED_TREE (fn),
906 /* FN is a function that has a complete body. Clone the body as
907 necessary. Returns non-zero if there's no longer any need to
908 process the main body. */
911 maybe_clone_body (fn)
917 /* We only clone constructors and destructors. */
918 if (!DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (fn)
919 && !DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (fn))
922 /* We know that any clones immediately follow FN in the TYPE_METHODS
924 for (clone = TREE_CHAIN (fn);
925 clone && DECL_CLONED_FUNCTION_P (clone);
926 clone = TREE_CHAIN (clone))
932 /* Update CLONE's source position information to match FN's. */
933 DECL_SOURCE_FILE (clone) = DECL_SOURCE_FILE (fn);
934 DECL_SOURCE_LINE (clone) = DECL_SOURCE_LINE (fn);
935 DECL_INLINE (clone) = DECL_INLINE (fn);
936 DECL_THIS_INLINE (clone) = DECL_THIS_INLINE (fn);
937 DECL_COMDAT (clone) = DECL_COMDAT (fn);
938 DECL_WEAK (clone) = DECL_WEAK (fn);
939 DECL_ONE_ONLY (clone) = DECL_ONE_ONLY (fn);
940 DECL_SECTION_NAME (clone) = DECL_SECTION_NAME (fn);
941 DECL_USE_TEMPLATE (clone) = DECL_USE_TEMPLATE (fn);
942 DECL_EXTERNAL (clone) = DECL_EXTERNAL (fn);
943 DECL_INTERFACE_KNOWN (clone) = DECL_INTERFACE_KNOWN (fn);
944 DECL_NOT_REALLY_EXTERN (clone) = DECL_NOT_REALLY_EXTERN (fn);
946 /* Start processing the function. */
947 push_to_top_level ();
948 start_function (NULL_TREE, clone, NULL_TREE, SF_PRE_PARSED);
950 /* Just clone the body, as if we were making an inline call.
951 But, remap the parameters in the callee to the parameters of
952 caller. If there's an in-charge parameter, map it to an
953 appropriate constant. */
954 memset (&id, 0, sizeof (id));
955 VARRAY_TREE_INIT (id.fns, 2, "fns");
956 VARRAY_PUSH_TREE (id.fns, clone);
957 VARRAY_PUSH_TREE (id.fns, fn);
959 /* Remap the parameters. */
960 id.decl_map = splay_tree_new (splay_tree_compare_pointers,
963 parm = DECL_ARGUMENTS (fn),
964 clone_parm = DECL_ARGUMENTS (clone);
967 parm = TREE_CHAIN (parm))
969 /* Map the in-charge parameter to an appropriate constant. */
970 if (DECL_HAS_IN_CHARGE_PARM_P (fn) && parmno == 1)
973 in_charge = in_charge_arg_for_name (DECL_NAME (clone));
974 splay_tree_insert (id.decl_map,
975 (splay_tree_key) parm,
976 (splay_tree_value) in_charge);
978 /* For a subobject constructor or destructor, the next
979 argument is the VTT parameter. Remap the VTT_PARM
980 from the CLONE to this parameter. */
981 if (DECL_NEEDS_VTT_PARM_P (clone))
983 splay_tree_insert (id.decl_map,
984 (splay_tree_key) DECL_VTT_PARM (fn),
985 (splay_tree_value) clone_parm);
986 splay_tree_insert (id.decl_map,
987 (splay_tree_key) DECL_USE_VTT_PARM (fn),
988 (splay_tree_value) boolean_true_node);
989 clone_parm = TREE_CHAIN (clone_parm);
991 /* Otherwise, map the VTT parameter to `NULL'. */
992 else if (DECL_VTT_PARM (fn))
994 splay_tree_insert (id.decl_map,
995 (splay_tree_key) DECL_VTT_PARM (fn),
996 (splay_tree_value) null_pointer_node);
997 splay_tree_insert (id.decl_map,
998 (splay_tree_key) DECL_USE_VTT_PARM (fn),
999 (splay_tree_value) boolean_false_node);
1002 /* Map other parameters to their equivalents in the cloned
1006 splay_tree_insert (id.decl_map,
1007 (splay_tree_key) parm,
1008 (splay_tree_value) clone_parm);
1009 clone_parm = TREE_CHAIN (clone_parm);
1013 /* Actually copy the body. */
1014 TREE_CHAIN (DECL_SAVED_TREE (clone)) = copy_body (&id);
1017 splay_tree_delete (id.decl_map);
1018 VARRAY_FREE (id.fns);
1020 /* Now, expand this function into RTL, if appropriate. */
1021 function_name_declared_p = 1;
1022 expand_body (finish_function (0));
1023 pop_from_top_level ();
1026 /* We don't need to process the original function any further. */