1 /* Perform optimizations on tree structure.
2 Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
3 Written by Mark Michell (mark@codesourcery.com).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
27 #include "insn-config.h"
29 #include "integrate.h"
38 o In order to make inlining-on-trees work, we pessimized
39 function-local static constants. In particular, they are now
40 always output, even when not addressed. Fix this by treating
41 function-local static constants just like global static
42 constants; the back-end already knows not to output them if they
45 o Provide heuristics to clamp inlining of recursive template
48 /* Data required for function inlining. */
50 typedef struct inline_data
52 /* A stack of the functions we are inlining. For example, if we are
53 compiling `f', which calls `g', which calls `h', and we are
54 inlining the body of `h', the stack will contain, `h', followed
55 by `g', followed by `f'. The first few elements of the stack may
56 contain other functions that we know we should not recurse into,
57 even though they are not directly being inlined. */
59 /* The index of the first element of FNS that really represents an
61 unsigned first_inlined_fn;
62 /* The label to jump to when a return statement is encountered. If
63 this value is NULL, then return statements will simply be
64 remapped as return statements, rather than as jumps. */
66 /* The map from local declarations in the inlined function to
67 equivalents in the function into which it is being inlined. */
69 /* Nonzero if we are currently within the cleanup for a
71 int in_target_cleanup_p;
72 /* A stack of the TARGET_EXPRs that we are currently processing. */
73 varray_type target_exprs;
74 /* A list of the functions current function has inlined. */
75 varray_type inlined_fns;
76 /* The approximate number of statements we have inlined in the
77 current call stack. */
79 /* We use the same mechanism to build clones that we do to perform
80 inlining. However, there are a few places where we need to
81 distinguish between those two situations. This flag is true if
82 we are cloning, rather than inlining. */
84 /* Hash table used to prevent walk_tree from visiting the same node
85 umpteen million times. */
91 static tree initialize_inlined_parameters PARAMS ((inline_data *, tree, tree));
92 static tree declare_return_variable PARAMS ((inline_data *, tree *));
93 static tree copy_body_r PARAMS ((tree *, int *, void *));
94 static tree copy_body PARAMS ((inline_data *));
95 static tree expand_call_inline PARAMS ((tree *, int *, void *));
96 static void expand_calls_inline PARAMS ((tree *, inline_data *));
97 static int inlinable_function_p PARAMS ((tree, inline_data *));
98 static tree remap_decl PARAMS ((tree, inline_data *));
99 static void remap_block PARAMS ((tree, tree, inline_data *));
100 static void copy_scope_stmt PARAMS ((tree *, int *, inline_data *));
101 static void optimize_inline_calls PARAMS ((tree));
102 static tree calls_setjmp_r PARAMS ((tree *, int *, void *));
103 static void update_cloned_parm PARAMS ((tree, tree));
104 static void dump_function PARAMS ((enum tree_dump_index, tree));
106 /* The approximate number of instructions per statement. This number
107 need not be particularly accurate; it is used only to make
108 decisions about when a function is too big to inline. */
109 #define INSNS_PER_STMT (10)
111 /* Remap DECL during the copying of the BLOCK tree for the function. */
114 remap_decl (decl, id)
121 /* We only remap local variables in the current function. */
122 fn = VARRAY_TOP_TREE (id->fns);
123 if (!nonstatic_local_decl_p (decl) || DECL_CONTEXT (decl) != fn)
126 /* See if we have remapped this declaration. */
127 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
128 /* If we didn't already have an equivalent for this declaration,
134 /* Make a copy of the variable or label. */
135 t = copy_decl_for_inlining (decl, fn,
136 VARRAY_TREE (id->fns, 0));
138 /* The decl T could be a dynamic array or other variable size type,
139 in which case some fields need to be remapped because they may
140 contain SAVE_EXPRs. */
141 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
142 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
143 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE
144 && TYPE_DOMAIN (TREE_TYPE (t)))
146 TREE_TYPE (t) = copy_node (TREE_TYPE (t));
147 TYPE_DOMAIN (TREE_TYPE (t))
148 = copy_node (TYPE_DOMAIN (TREE_TYPE (t)));
149 walk_tree (&TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (t))),
150 copy_body_r, id, NULL);
153 if (!DECL_NAME (t) && TREE_TYPE (t)
154 && ANON_AGGR_TYPE_P (TREE_TYPE ((t))))
156 /* For a VAR_DECL of anonymous type, we must also copy the
157 member VAR_DECLS here and rechain the
158 DECL_ANON_UNION_ELEMS. */
162 for (src = DECL_ANON_UNION_ELEMS (t); src;
163 src = TREE_CHAIN (src))
165 tree member = remap_decl (TREE_VALUE (src), id);
167 my_friendly_assert (!TREE_PURPOSE (src), 20010529);
168 members = tree_cons (NULL, member, members);
170 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
173 /* Remember it, so that if we encounter this local entity
174 again we can reuse this copy. */
175 n = splay_tree_insert (id->decl_map,
176 (splay_tree_key) decl,
177 (splay_tree_value) t);
180 return (tree) n->value;
183 /* Copy the SCOPE_STMT_BLOCK associated with SCOPE_STMT to contain
184 remapped versions of the variables therein. And hook the new block
185 into the block-tree. If non-NULL, the DECLS are declarations to
186 add to use instead of the BLOCK_VARS in the old block. */
189 remap_block (scope_stmt, decls, id)
194 /* We cannot do this in the cleanup for a TARGET_EXPR since we do
195 not know whether or not expand_expr will actually write out the
196 code we put there. If it does not, then we'll have more BLOCKs
197 than block-notes, and things will go awry. At some point, we
198 should make the back-end handle BLOCK notes in a tidier way,
199 without requiring a strict correspondence to the block-tree; then
200 this check can go. */
201 if (id->in_target_cleanup_p)
203 SCOPE_STMT_BLOCK (scope_stmt) = NULL_TREE;
207 /* If this is the beginning of a scope, remap the associated BLOCK. */
208 if (SCOPE_BEGIN_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
215 /* Make the new block. */
216 old_block = SCOPE_STMT_BLOCK (scope_stmt);
217 new_block = make_node (BLOCK);
218 TREE_USED (new_block) = TREE_USED (old_block);
219 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
220 SCOPE_STMT_BLOCK (scope_stmt) = new_block;
222 /* Remap its variables. */
223 for (old_var = decls ? decls : BLOCK_VARS (old_block);
225 old_var = TREE_CHAIN (old_var))
229 /* Remap the variable. */
230 new_var = remap_decl (old_var, id);
231 /* If we didn't remap this variable, so we can't mess with
232 its TREE_CHAIN. If we remapped this variable to
233 something other than a declaration (say, if we mapped it
234 to a constant), then we must similarly omit any mention
236 if (!new_var || !DECL_P (new_var))
240 TREE_CHAIN (new_var) = BLOCK_VARS (new_block);
241 BLOCK_VARS (new_block) = new_var;
244 /* We put the BLOCK_VARS in reverse order; fix that now. */
245 BLOCK_VARS (new_block) = nreverse (BLOCK_VARS (new_block));
246 fn = VARRAY_TREE (id->fns, 0);
248 /* We're building a clone; DECL_INITIAL is still
249 error_mark_node, and current_binding_level is the parm
251 insert_block (new_block);
254 /* Attach this new block after the DECL_INITIAL block for the
255 function into which this block is being inlined. In
256 rest_of_compilation we will straighten out the BLOCK tree. */
258 if (DECL_INITIAL (fn))
259 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
261 first_block = &DECL_INITIAL (fn);
262 BLOCK_CHAIN (new_block) = *first_block;
263 *first_block = new_block;
265 /* Remember the remapped block. */
266 splay_tree_insert (id->decl_map,
267 (splay_tree_key) old_block,
268 (splay_tree_value) new_block);
270 /* If this is the end of a scope, set the SCOPE_STMT_BLOCK to be the
272 else if (SCOPE_END_P (scope_stmt) && SCOPE_STMT_BLOCK (scope_stmt))
276 /* Find this block in the table of remapped things. */
277 n = splay_tree_lookup (id->decl_map,
278 (splay_tree_key) SCOPE_STMT_BLOCK (scope_stmt));
279 my_friendly_assert (n != NULL, 19991203);
280 SCOPE_STMT_BLOCK (scope_stmt) = (tree) n->value;
284 /* Copy the SCOPE_STMT pointed to by TP. */
287 copy_scope_stmt (tp, walk_subtrees, id)
294 /* Remember whether or not this statement was nullified. When
295 making a copy, copy_tree_r always sets SCOPE_NULLIFIED_P (and
296 doesn't copy the SCOPE_STMT_BLOCK) to free callers from having to
297 deal with copying BLOCKs if they do not wish to do so. */
298 block = SCOPE_STMT_BLOCK (*tp);
299 /* Copy (and replace) the statement. */
300 copy_tree_r (tp, walk_subtrees, NULL);
301 /* Restore the SCOPE_STMT_BLOCK. */
302 SCOPE_STMT_BLOCK (*tp) = block;
304 /* Remap the associated block. */
305 remap_block (*tp, NULL_TREE, id);
308 /* Called from copy_body via walk_tree. DATA is really an
312 copy_body_r (tp, walk_subtrees, data)
321 id = (inline_data *) data;
322 fn = VARRAY_TOP_TREE (id->fns);
324 /* All automatic variables should have a DECL_CONTEXT indicating
325 what function they come from. */
326 if ((TREE_CODE (*tp) == VAR_DECL || TREE_CODE (*tp) == LABEL_DECL)
327 && DECL_NAMESPACE_SCOPE_P (*tp))
328 my_friendly_assert (DECL_EXTERNAL (*tp) || TREE_STATIC (*tp),
331 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
332 GOTO_STMT with the RET_LABEL as its target. */
333 if (TREE_CODE (*tp) == RETURN_STMT && id->ret_label)
335 tree return_stmt = *tp;
338 /* Build the GOTO_STMT. */
339 goto_stmt = build_stmt (GOTO_STMT, id->ret_label);
340 TREE_CHAIN (goto_stmt) = TREE_CHAIN (return_stmt);
342 /* If we're returning something, just turn that into an
343 assignment into the equivalent of the original
345 if (RETURN_EXPR (return_stmt))
347 *tp = build_stmt (EXPR_STMT,
348 RETURN_EXPR (return_stmt));
349 STMT_IS_FULL_EXPR_P (*tp) = 1;
350 /* And then jump to the end of the function. */
351 TREE_CHAIN (*tp) = goto_stmt;
353 /* If we're not returning anything just do the jump. */
357 /* Local variables and labels need to be replaced by equivalent
358 variables. We don't want to copy static variables; there's only
359 one of those, no matter how many times we inline the containing
361 else if (nonstatic_local_decl_p (*tp) && DECL_CONTEXT (*tp) == fn)
365 /* Remap the declaration. */
366 new_decl = remap_decl (*tp, id);
367 my_friendly_assert (new_decl != NULL_TREE, 19991203);
368 /* Replace this variable with the copy. */
369 STRIP_TYPE_NOPS (new_decl);
372 else if (nonstatic_local_decl_p (*tp)
373 && DECL_CONTEXT (*tp) != VARRAY_TREE (id->fns, 0))
374 my_friendly_abort (0);
375 else if (TREE_CODE (*tp) == SAVE_EXPR)
376 remap_save_expr (tp, id->decl_map, VARRAY_TREE (id->fns, 0),
378 else if (TREE_CODE (*tp) == UNSAVE_EXPR)
379 /* UNSAVE_EXPRs should not be generated until expansion time. */
380 my_friendly_abort (19991113);
381 /* For a SCOPE_STMT, we must copy the associated block so that we
382 can write out debugging information for the inlined variables. */
383 else if (TREE_CODE (*tp) == SCOPE_STMT && !id->in_target_cleanup_p)
384 copy_scope_stmt (tp, walk_subtrees, id);
385 /* Otherwise, just copy the node. Note that copy_tree_r already
386 knows not to copy VAR_DECLs, etc., so this is safe. */
389 copy_tree_r (tp, walk_subtrees, NULL);
391 /* The copied TARGET_EXPR has never been expanded, even if the
392 original node was expanded already. */
393 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
395 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
396 TREE_OPERAND (*tp, 3) = NULL_TREE;
398 else if (TREE_CODE (*tp) == MODIFY_EXPR
399 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
400 && nonstatic_local_decl_p (TREE_OPERAND (*tp, 0))
401 && DECL_CONTEXT (TREE_OPERAND (*tp, 0)) == fn)
403 /* Some assignments VAR = VAR; don't generate any rtl code
404 and thus don't count as variable modification. Avoid
405 keeping bogosities like 0 = 0. */
406 tree decl = TREE_OPERAND (*tp, 0), value;
409 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
412 value = (tree) n->value;
413 STRIP_TYPE_NOPS (value);
414 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
420 /* Keep iterating. */
424 /* Make a copy of the body of FN so that it can be inserted inline in
433 body = DECL_SAVED_TREE (VARRAY_TOP_TREE (id->fns));
434 walk_tree (&body, copy_body_r, id, NULL);
439 /* Generate code to initialize the parameters of the function at the
440 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
443 initialize_inlined_parameters (id, args, fn)
453 /* Figure out what the parameters are. */
454 parms = DECL_ARGUMENTS (fn);
456 /* Start with no initializations whatsoever. */
457 init_stmts = NULL_TREE;
459 /* Loop through the parameter declarations, replacing each with an
460 equivalent VAR_DECL, appropriately initialized. */
461 for (p = parms, a = args; p; a = TREE_CHAIN (a), p = TREE_CHAIN (p))
467 /* Find the initializer. */
468 value = TREE_VALUE (a);
469 /* If the parameter is never assigned to, we may not need to
470 create a new variable here at all. Instead, we may be able
471 to just use the argument value. */
472 if (TREE_READONLY (p)
473 && !TREE_ADDRESSABLE (p)
474 && !TREE_SIDE_EFFECTS (value))
476 /* Simplify the value, if possible. */
477 value = fold (decl_constant_value (value));
479 /* We can't risk substituting complex expressions. They
480 might contain variables that will be assigned to later.
481 Theoretically, we could check the expression to see if
482 all of the variables that determine its value are
483 read-only, but we don't bother. */
484 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
486 /* If this is a declaration, wrap it a NOP_EXPR so that
487 we don't try to put the VALUE on the list of
490 value = build1 (NOP_EXPR, TREE_TYPE (value), value);
492 splay_tree_insert (id->decl_map,
494 (splay_tree_value) value);
499 /* Make an equivalent VAR_DECL. */
500 var = copy_decl_for_inlining (p, fn, VARRAY_TREE (id->fns, 0));
501 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
502 that way, when the PARM_DECL is encountered, it will be
503 automatically replaced by the VAR_DECL. */
504 splay_tree_insert (id->decl_map,
506 (splay_tree_value) var);
508 /* Declare this new variable. */
509 init_stmt = build_stmt (DECL_STMT, var);
510 TREE_CHAIN (init_stmt) = init_stmts;
511 init_stmts = init_stmt;
513 /* Initialize this VAR_DECL from the equivalent argument. If
514 the argument is an object, created via a constructor or copy,
515 this will not result in an extra copy: the TARGET_EXPR
516 representing the argument will be bound to VAR, and the
517 object will be constructed in VAR. */
518 if (! TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
519 DECL_INITIAL (var) = value;
522 /* Even if P was TREE_READONLY, the new VAR should not be.
523 In the original code, we would have constructed a
524 temporary, and then the function body would have never
525 changed the value of P. However, now, we will be
526 constructing VAR directly. The constructor body may
527 change its value multiple times as it is being
528 constructed. Therefore, it must not be TREE_READONLY;
529 the back-end assumes that TREE_READONLY variable is
530 assigned to only once. */
531 TREE_READONLY (var) = 0;
533 /* Build a run-time initialization. */
534 init_stmt = build_stmt (EXPR_STMT,
535 build (INIT_EXPR, TREE_TYPE (p),
537 /* Add this initialization to the list. Note that we want the
538 declaration *after* the initialization because we are going
539 to reverse all the initialization statements below. */
540 TREE_CHAIN (init_stmt) = init_stmts;
541 init_stmts = init_stmt;
545 /* The initialization statements have been built up in reverse
546 order. Straighten them out now. */
547 return nreverse (init_stmts);
550 /* Declare a return variable to replace the RESULT_DECL for the
551 function we are calling. An appropriate DECL_STMT is returned.
552 The USE_STMT is filled in to contain a use of the declaration to
553 indicate the return value of the function. */
556 declare_return_variable (id, use_stmt)
557 struct inline_data *id;
560 tree fn = VARRAY_TOP_TREE (id->fns);
561 tree result = DECL_RESULT (fn);
563 int aggregate_return_p;
565 /* We don't need to do anything for functions that don't return
567 if (!result || VOID_TYPE_P (TREE_TYPE (result)))
569 *use_stmt = NULL_TREE;
573 /* Figure out whether or not FN returns an aggregate. */
574 aggregate_return_p = IS_AGGR_TYPE (TREE_TYPE (result));
576 /* If FN returns an aggregate then the caller will always create the
577 temporary (using a TARGET_EXPR) and the call will be the
578 initializing expression for the TARGET_EXPR. If we were just to
579 create a new VAR_DECL here, then the result of this function
580 would be copied (bitwise) into the variable initialized by the
581 TARGET_EXPR. That's incorrect, so we must transform any
582 references to the RESULT into references to the target. */
583 if (aggregate_return_p)
585 my_friendly_assert (VARRAY_ACTIVE_SIZE (id->target_exprs) != 0,
587 var = TREE_OPERAND (VARRAY_TOP_TREE (id->target_exprs), 0);
589 (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (var),
593 /* Otherwise, make an appropriate copy. */
595 var = copy_decl_for_inlining (result, fn, VARRAY_TREE (id->fns, 0));
597 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
598 way, when the RESULT_DECL is encountered, it will be
599 automatically replaced by the VAR_DECL. */
600 splay_tree_insert (id->decl_map,
601 (splay_tree_key) result,
602 (splay_tree_value) var);
604 /* Build the USE_STMT. */
605 *use_stmt = build_stmt (EXPR_STMT, var);
607 /* Build the declaration statement if FN does not return an
609 if (!aggregate_return_p)
610 return build_stmt (DECL_STMT, var);
611 /* If FN does return an aggregate, there's no need to declare the
612 return variable; we're using a variable in our caller's frame. */
617 /* Returns non-zero if FN is a function that can be inlined. */
620 inlinable_function_p (fn, id)
626 /* If we've already decided this function shouldn't be inlined,
627 there's no need to check again. */
628 if (DECL_UNINLINABLE (fn))
631 /* Assume it is not inlinable. */
634 /* If we're not inlining things, then nothing is inlinable. */
635 if (!flag_inline_trees)
637 /* If the function was not declared `inline', then we don't inline
639 else if (!DECL_INLINE (fn))
641 /* We can't inline varargs functions. */
642 else if (varargs_function_p (fn))
644 /* We can't inline functions that are too big. */
645 else if (DECL_NUM_STMTS (fn) * INSNS_PER_STMT > MAX_INLINE_INSNS)
647 /* All is well. We can inline this function. Traditionally, GCC
648 has refused to inline functions using alloca, or functions whose
649 values are returned in a PARALLEL, and a few other such obscure
650 conditions. We are not equally constrained at the tree level. */
654 /* Squirrel away the result so that we don't have to check again. */
655 DECL_UNINLINABLE (fn) = !inlinable;
657 /* Even if this function is not itself too big to inline, it might
658 be that we've done so much inlining already that we don't want to
659 risk inlining any more. */
660 if ((DECL_NUM_STMTS (fn) + id->inlined_stmts) * INSNS_PER_STMT
664 /* We can inline a template instantiation only if it's fully
667 && DECL_TEMPLATE_INFO (fn)
668 && TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn)))
670 fn = instantiate_decl (fn, /*defer_ok=*/0);
671 inlinable = !TI_PENDING_TEMPLATE_FLAG (DECL_TEMPLATE_INFO (fn));
674 /* If we don't have the function body available, we can't inline
676 if (!DECL_SAVED_TREE (fn))
679 /* Don't do recursive inlining, either. We don't record this in
680 DECL_UNINLINABLE; we may be able to inline this function later. */
685 for (i = 0; i < VARRAY_ACTIVE_SIZE (id->fns); ++i)
686 if (VARRAY_TREE (id->fns, i) == fn)
689 if (inlinable && DECL_LANG_SPECIFIC (fn) && DECL_INLINED_FNS (fn))
692 tree inlined_fns = DECL_INLINED_FNS (fn);
694 for (j = 0; j < TREE_VEC_LENGTH (inlined_fns); ++j)
695 if (TREE_VEC_ELT (inlined_fns, j) == VARRAY_TREE (id->fns, 0))
700 /* Return the result. */
704 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
707 expand_call_inline (tp, walk_subtrees, data)
723 /* See what we've got. */
724 id = (inline_data *) data;
727 /* Recurse, but letting recursive invocations know that we are
728 inside the body of a TARGET_EXPR. */
729 if (TREE_CODE (*tp) == TARGET_EXPR)
731 int i, len = first_rtl_op (TARGET_EXPR);
733 /* We're walking our own subtrees. */
736 /* Push *TP on the stack of pending TARGET_EXPRs. */
737 VARRAY_PUSH_TREE (id->target_exprs, *tp);
739 /* Actually walk over them. This loop is the body of
740 walk_trees, omitting the case where the TARGET_EXPR
741 itself is handled. */
742 for (i = 0; i < len; ++i)
745 ++id->in_target_cleanup_p;
746 walk_tree (&TREE_OPERAND (*tp, i), expand_call_inline, data,
749 --id->in_target_cleanup_p;
752 /* We're done with this TARGET_EXPR now. */
753 VARRAY_POP (id->target_exprs);
759 /* Because types were not copied in copy_body, CALL_EXPRs beneath
760 them should not be expanded. This can happen if the type is a
761 dynamic array type, for example. */
764 /* From here on, we're only interested in CALL_EXPRs. */
765 if (TREE_CODE (t) != CALL_EXPR)
768 /* First, see if we can figure out what function is being called.
769 If we cannot, then there is no hope of inlining the function. */
770 fn = get_callee_fndecl (t);
774 /* Don't try to inline functions that are not well-suited to
776 if (!inlinable_function_p (fn, id))
779 /* Set the current filename and line number to the function we are
780 inlining so that when we create new _STMT nodes here they get
781 line numbers corresponding to the function we are calling. We
782 wrap the whole inlined body in an EXPR_WITH_FILE_AND_LINE as well
783 because individual statements don't record the filename. */
784 push_srcloc (fn->decl.filename, fn->decl.linenum);
786 /* Build a statement-expression containing code to initialize the
787 arguments, the actual inline expansion of the body, and a label
788 for the return statements within the function to jump to. The
789 type of the statement expression is the return type of the
791 expr = build_min (STMT_EXPR, TREE_TYPE (TREE_TYPE (fn)), NULL_TREE);
793 /* Local declarations will be replaced by their equivalents in this
796 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
799 /* Initialize the parameters. */
800 arg_inits = initialize_inlined_parameters (id, TREE_OPERAND (t, 1), fn);
801 /* Expand any inlined calls in the initializers. Do this before we
802 push FN on the stack of functions we are inlining; we want to
803 inline calls to FN that appear in the initializers for the
805 expand_calls_inline (&arg_inits, id);
806 /* And add them to the tree. */
807 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), arg_inits);
809 /* Record the function we are about to inline so that we can avoid
810 recursing into it. */
811 VARRAY_PUSH_TREE (id->fns, fn);
813 /* Record the function we are about to inline if optimize_function
814 has not been called on it yet and we don't have it in the list. */
815 if (DECL_LANG_SPECIFIC (fn) && !DECL_INLINED_FNS (fn))
819 for (i = VARRAY_ACTIVE_SIZE (id->inlined_fns) - 1; i >= 0; i--)
820 if (VARRAY_TREE (id->inlined_fns, i) == fn)
823 VARRAY_PUSH_TREE (id->inlined_fns, fn);
826 /* Return statements in the function body will be replaced by jumps
828 id->ret_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
829 DECL_CONTEXT (id->ret_label) = VARRAY_TREE (id->fns, 0);
831 /* Create a block to put the parameters in. We have to do this
832 after the parameters have been remapped because remapping
833 parameters is different from remapping ordinary variables. */
834 scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
835 SCOPE_BEGIN_P (scope_stmt) = 1;
836 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
837 remap_block (scope_stmt, DECL_ARGUMENTS (fn), id);
838 TREE_CHAIN (scope_stmt) = STMT_EXPR_STMT (expr);
839 STMT_EXPR_STMT (expr) = scope_stmt;
841 /* Tell the debugging backends that this block represents the
842 outermost scope of the inlined function. */
843 if (SCOPE_STMT_BLOCK (scope_stmt))
844 BLOCK_ABSTRACT_ORIGIN (SCOPE_STMT_BLOCK (scope_stmt)) = DECL_ORIGIN (fn);
846 /* Declare the return variable for the function. */
847 STMT_EXPR_STMT (expr)
848 = chainon (STMT_EXPR_STMT (expr),
849 declare_return_variable (id, &use_stmt));
851 /* After we've initialized the parameters, we insert the body of the
853 inlined_body = &STMT_EXPR_STMT (expr);
854 while (*inlined_body)
855 inlined_body = &TREE_CHAIN (*inlined_body);
856 *inlined_body = copy_body (id);
858 /* Close the block for the parameters. */
859 scope_stmt = build_stmt (SCOPE_STMT, DECL_INITIAL (fn));
860 SCOPE_NO_CLEANUPS_P (scope_stmt) = 1;
861 my_friendly_assert (DECL_INITIAL (fn)
862 && TREE_CODE (DECL_INITIAL (fn)) == BLOCK,
864 remap_block (scope_stmt, NULL_TREE, id);
865 STMT_EXPR_STMT (expr)
866 = chainon (STMT_EXPR_STMT (expr), scope_stmt);
868 /* After the body of the function comes the RET_LABEL. This must come
869 before we evaluate the returned value below, because that evalulation
870 may cause RTL to be generated. */
871 STMT_EXPR_STMT (expr)
872 = chainon (STMT_EXPR_STMT (expr),
873 build_stmt (LABEL_STMT, id->ret_label));
875 /* Finally, mention the returned value so that the value of the
876 statement-expression is the returned value of the function. */
877 STMT_EXPR_STMT (expr) = chainon (STMT_EXPR_STMT (expr), use_stmt);
880 splay_tree_delete (id->decl_map);
883 /* The new expression has side-effects if the old one did. */
884 TREE_SIDE_EFFECTS (expr) = TREE_SIDE_EFFECTS (t);
886 /* Replace the call by the inlined body. Wrap it in an
887 EXPR_WITH_FILE_LOCATION so that we'll get debugging line notes
888 pointing to the right place. */
889 chain = TREE_CHAIN (*tp);
890 *tp = build_expr_wfl (expr, DECL_SOURCE_FILE (fn), DECL_SOURCE_LINE (fn),
892 EXPR_WFL_EMIT_LINE_NOTE (*tp) = 1;
893 TREE_CHAIN (*tp) = chain;
896 /* If the value of the new expression is ignored, that's OK. We
897 don't warn about this for CALL_EXPRs, so we shouldn't warn about
898 the equivalent inlined version either. */
901 /* Our function now has more statements than it did before. */
902 DECL_NUM_STMTS (VARRAY_TREE (id->fns, 0)) += DECL_NUM_STMTS (fn);
903 id->inlined_stmts += DECL_NUM_STMTS (fn);
905 /* Recurse into the body of the just inlined function. */
906 expand_calls_inline (inlined_body, id);
907 VARRAY_POP (id->fns);
909 /* If we've returned to the top level, clear out the record of how
910 much inlining has been done. */
911 if (VARRAY_ACTIVE_SIZE (id->fns) == id->first_inlined_fn)
912 id->inlined_stmts = 0;
914 /* Don't walk into subtrees. We've already handled them above. */
917 /* Keep iterating. */
921 /* Walk over the entire tree *TP, replacing CALL_EXPRs with inline
922 expansions as appropriate. */
925 expand_calls_inline (tp, id)
929 /* Search through *TP, replacing all calls to inline functions by
930 appropriate equivalents. Use walk_tree in no-duplicates mode
931 to avoid exponential time complexity. (We can't just use
932 walk_tree_without_duplicates, because of the special TARGET_EXPR
933 handling in expand_calls. The hash table is set up in
934 optimize_function. */
935 walk_tree (tp, expand_call_inline, id, id->tree_pruner);
938 /* Expand calls to inline functions in the body of FN. */
941 optimize_inline_calls (fn)
946 struct saved_scope *s;
949 memset (&id, 0, sizeof (id));
951 /* Don't allow recursion into FN. */
952 VARRAY_TREE_INIT (id.fns, 32, "fns");
953 VARRAY_PUSH_TREE (id.fns, fn);
954 /* Or any functions that aren't finished yet. */
956 if (current_function_decl)
958 VARRAY_PUSH_TREE (id.fns, current_function_decl);
959 prev_fn = current_function_decl;
961 for (s = scope_chain; s; s = s->prev)
962 if (s->function_decl && s->function_decl != prev_fn)
964 VARRAY_PUSH_TREE (id.fns, s->function_decl);
965 prev_fn = s->function_decl;
968 /* Create the stack of TARGET_EXPRs. */
969 VARRAY_TREE_INIT (id.target_exprs, 32, "target_exprs");
971 /* Create the list of functions this call will inline. */
972 VARRAY_TREE_INIT (id.inlined_fns, 32, "inlined_fns");
974 /* Keep track of the low-water mark, i.e., the point where the first
975 real inlining is represented in ID.FNS. */
976 id.first_inlined_fn = VARRAY_ACTIVE_SIZE (id.fns);
978 /* Replace all calls to inline functions with the bodies of those
980 id.tree_pruner = htab_create (37, htab_hash_pointer,
981 htab_eq_pointer, NULL);
982 expand_calls_inline (&DECL_SAVED_TREE (fn), &id);
985 htab_delete (id.tree_pruner);
986 VARRAY_FREE (id.fns);
987 VARRAY_FREE (id.target_exprs);
988 if (DECL_LANG_SPECIFIC (fn))
990 tree ifn = make_tree_vec (VARRAY_ACTIVE_SIZE (id.inlined_fns));
992 memcpy (&TREE_VEC_ELT (ifn, 0), &VARRAY_TREE (id.inlined_fns, 0),
993 VARRAY_ACTIVE_SIZE (id.inlined_fns) * sizeof (tree));
994 DECL_INLINED_FNS (fn) = ifn;
996 VARRAY_FREE (id.inlined_fns);
998 dump_function (TDI_inlined, fn);
1001 /* Optimize the body of FN. */
1004 optimize_function (fn)
1007 dump_function (TDI_original, fn);
1009 /* While in this function, we may choose to go off and compile
1010 another function. For example, we might instantiate a function
1011 in the hopes of inlining it. Normally, that wouldn't trigger any
1012 actual RTL code-generation -- but it will if the template is
1013 actually needed. (For example, if it's address is taken, or if
1014 some other function already refers to the template.) If
1015 code-generation occurs, then garbage collection will occur, so we
1016 must protect ourselves, just as we do while building up the body
1020 if (flag_inline_trees
1021 /* We do not inline thunks, as (a) the backend tries to optimize
1022 the call to the thunkee, (b) tree based inlining breaks that
1023 optimization, (c) virtual functions are rarely inlineable,
1024 and (d) ASM_OUTPUT_MI_THUNK is there to DTRT anyway. */
1025 && !DECL_THUNK_P (fn))
1026 optimize_inline_calls (fn);
1028 /* Undo the call to ggc_push_context above. */
1031 dump_function (TDI_optimized, fn);
1034 /* Called from calls_setjmp_p via walk_tree. */
1037 calls_setjmp_r (tp, walk_subtrees, data)
1039 int *walk_subtrees ATTRIBUTE_UNUSED;
1040 void *data ATTRIBUTE_UNUSED;
1042 /* We're only interested in FUNCTION_DECLS. */
1043 if (TREE_CODE (*tp) != FUNCTION_DECL)
1046 return setjmp_call_p (*tp) ? *tp : NULL_TREE;
1049 /* Returns non-zero if FN calls `setjmp' or some other function that
1050 can return more than once. This function is conservative; it may
1051 occasionally return a non-zero value even when FN does not actually
1058 return walk_tree_without_duplicates (&DECL_SAVED_TREE (fn),
1063 /* CLONED_PARM is a copy of CLONE, generated for a cloned constructor
1064 or destructor. Update it to ensure that the source-position for
1065 the cloned parameter matches that for the original, and that the
1066 debugging generation code will be able to find the original PARM. */
1069 update_cloned_parm (parm, cloned_parm)
1073 DECL_ABSTRACT_ORIGIN (cloned_parm) = parm;
1075 /* We may have taken its address. */
1076 TREE_ADDRESSABLE (cloned_parm) = TREE_ADDRESSABLE (parm);
1078 /* The definition might have different constness. */
1079 TREE_READONLY (cloned_parm) = TREE_READONLY (parm);
1081 TREE_USED (cloned_parm) = TREE_USED (parm);
1083 /* The name may have changed from the declaration. */
1084 DECL_NAME (cloned_parm) = DECL_NAME (parm);
1085 DECL_SOURCE_FILE (cloned_parm) = DECL_SOURCE_FILE (parm);
1086 DECL_SOURCE_LINE (cloned_parm) = DECL_SOURCE_LINE (parm);
1089 /* FN is a function that has a complete body. Clone the body as
1090 necessary. Returns non-zero if there's no longer any need to
1091 process the main body. */
1094 maybe_clone_body (fn)
1101 /* We only clone constructors and destructors. */
1102 if (!DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (fn)
1103 && !DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (fn))
1106 /* Emit the DWARF1 abstract instance. */
1107 note_deferral_of_defined_inline_function (fn);
1109 /* We know that any clones immediately follow FN in the TYPE_METHODS
1111 for (clone = TREE_CHAIN (fn);
1112 clone && DECL_CLONED_FUNCTION_P (clone);
1113 clone = TREE_CHAIN (clone), first = 0)
1119 /* Update CLONE's source position information to match FN's. */
1120 DECL_SOURCE_FILE (clone) = DECL_SOURCE_FILE (fn);
1121 DECL_SOURCE_LINE (clone) = DECL_SOURCE_LINE (fn);
1122 DECL_INLINE (clone) = DECL_INLINE (fn);
1123 DECL_DECLARED_INLINE_P (clone) = DECL_DECLARED_INLINE_P (fn);
1124 DECL_COMDAT (clone) = DECL_COMDAT (fn);
1125 DECL_WEAK (clone) = DECL_WEAK (fn);
1126 DECL_ONE_ONLY (clone) = DECL_ONE_ONLY (fn);
1127 DECL_SECTION_NAME (clone) = DECL_SECTION_NAME (fn);
1128 DECL_USE_TEMPLATE (clone) = DECL_USE_TEMPLATE (fn);
1129 DECL_EXTERNAL (clone) = DECL_EXTERNAL (fn);
1130 DECL_INTERFACE_KNOWN (clone) = DECL_INTERFACE_KNOWN (fn);
1131 DECL_NOT_REALLY_EXTERN (clone) = DECL_NOT_REALLY_EXTERN (fn);
1132 TREE_PUBLIC (clone) = TREE_PUBLIC (fn);
1134 /* Adjust the parameter names and locations. */
1135 parm = DECL_ARGUMENTS (fn);
1136 clone_parm = DECL_ARGUMENTS (clone);
1137 /* Update the `this' parameter, which is always first.
1138 Sometimes, we end update the `this' parameter twice because
1139 we process it again in the loop below. That is harmless. */
1140 update_cloned_parm (parm, clone_parm);
1141 if (DECL_HAS_IN_CHARGE_PARM_P (fn))
1142 parm = TREE_CHAIN (parm);
1143 if (DECL_HAS_VTT_PARM_P (fn))
1144 parm = TREE_CHAIN (parm);
1145 if (DECL_HAS_VTT_PARM_P (clone))
1146 clone_parm = TREE_CHAIN (clone_parm);
1148 parm = TREE_CHAIN (parm), clone_parm = TREE_CHAIN (clone_parm))
1150 /* Update this paramter. */
1151 update_cloned_parm (parm, clone_parm);
1152 /* We should only give unused information for one clone. */
1154 TREE_USED (clone_parm) = 1;
1157 /* Start processing the function. */
1158 push_to_top_level ();
1159 start_function (NULL_TREE, clone, NULL_TREE, SF_PRE_PARSED);
1161 /* Just clone the body, as if we were making an inline call.
1162 But, remap the parameters in the callee to the parameters of
1163 caller. If there's an in-charge parameter, map it to an
1164 appropriate constant. */
1165 memset (&id, 0, sizeof (id));
1166 VARRAY_TREE_INIT (id.fns, 2, "fns");
1167 VARRAY_PUSH_TREE (id.fns, clone);
1168 VARRAY_PUSH_TREE (id.fns, fn);
1170 /* Cloning is treated slightly differently from inlining. Set
1171 CLONING_P so that its clear which operation we're performing. */
1172 id.cloning_p = true;
1174 /* Remap the parameters. */
1175 id.decl_map = splay_tree_new (splay_tree_compare_pointers,
1178 parm = DECL_ARGUMENTS (fn),
1179 clone_parm = DECL_ARGUMENTS (clone);
1182 parm = TREE_CHAIN (parm))
1184 /* Map the in-charge parameter to an appropriate constant. */
1185 if (DECL_HAS_IN_CHARGE_PARM_P (fn) && parmno == 1)
1188 in_charge = in_charge_arg_for_name (DECL_NAME (clone));
1189 splay_tree_insert (id.decl_map,
1190 (splay_tree_key) parm,
1191 (splay_tree_value) in_charge);
1193 else if (DECL_ARTIFICIAL (parm)
1194 && DECL_NAME (parm) == vtt_parm_identifier)
1196 /* For a subobject constructor or destructor, the next
1197 argument is the VTT parameter. Remap the VTT_PARM
1198 from the CLONE to this parameter. */
1199 if (DECL_HAS_VTT_PARM_P (clone))
1201 DECL_ABSTRACT_ORIGIN (clone_parm) = parm;
1202 splay_tree_insert (id.decl_map,
1203 (splay_tree_key) parm,
1204 (splay_tree_value) clone_parm);
1205 clone_parm = TREE_CHAIN (clone_parm);
1207 /* Otherwise, map the VTT parameter to `NULL'. */
1210 splay_tree_insert (id.decl_map,
1211 (splay_tree_key) parm,
1212 (splay_tree_value) null_pointer_node);
1215 /* Map other parameters to their equivalents in the cloned
1219 splay_tree_insert (id.decl_map,
1220 (splay_tree_key) parm,
1221 (splay_tree_value) clone_parm);
1222 clone_parm = TREE_CHAIN (clone_parm);
1226 /* Actually copy the body. */
1227 TREE_CHAIN (DECL_SAVED_TREE (clone)) = copy_body (&id);
1229 /* There are as many statements in the clone as in the
1231 DECL_NUM_STMTS (clone) = DECL_NUM_STMTS (fn);
1234 splay_tree_delete (id.decl_map);
1235 VARRAY_FREE (id.fns);
1237 /* Now, expand this function into RTL, if appropriate. */
1238 finish_function (0);
1239 BLOCK_ABSTRACT_ORIGIN (DECL_INITIAL (clone)) = DECL_INITIAL (fn);
1240 expand_body (clone);
1241 pop_from_top_level ();
1244 /* We don't need to process the original function any further. */
1248 /* Dump FUNCTION_DECL FN as tree dump PHASE. */
1251 dump_function (phase, fn)
1252 enum tree_dump_index phase;
1258 stream = dump_begin (phase, &flags);
1261 fprintf (stream, "\n;; Function %s",
1262 decl_as_string (fn, TFF_DECL_SPECIFIERS));
1263 fprintf (stream, " (%s)\n",
1264 decl_as_string (DECL_ASSEMBLER_NAME (fn), 0));
1265 fprintf (stream, ";; enabled by -%s\n", dump_flag_name (phase));
1266 fprintf (stream, "\n");
1268 dump_node (fn, TDF_SLIM | flags, stream);
1269 dump_end (phase, stream);