2 Copyright 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
24 #include "coretypes.h"
28 #include "tree-inline.h"
34 #include "insn-config.h"
37 #include "splay-tree.h"
38 #include "langhooks.h"
39 #include "basic-block.h"
40 #include "tree-iterator.h"
43 #include "tree-mudflap.h"
44 #include "tree-flow.h"
47 #include "tree-flow.h"
48 #include "diagnostic.h"
51 #include "pointer-set.h"
52 #include "integrate.h"
54 /* I'm not real happy about this, but we need to handle gimple and
56 #include "tree-gimple.h"
58 /* Inlining, Saving, Cloning
60 Inlining: a function body is duplicated, but the PARM_DECLs are
61 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
62 MODIFY_EXPRs that store to a dedicated returned-value variable.
63 The duplicated eh_region info of the copy will later be appended
64 to the info for the caller; the eh_region info in copied throwing
65 statements and RESX_EXPRs is adjusted accordingly.
67 Saving: make a semantically-identical copy of the function body.
68 Necessary when we want to generate code for the body (a destructive
69 operation), but we expect to need this body in the future (e.g. for
70 inlining into another function).
72 Cloning: (only in C++) We have one body for a con/de/structor, and
73 multiple function decls, each with a unique parameter list.
74 Duplicate the body, using the given splay tree; some parameters
75 will become constants (like 0 or 1).
77 All of these will simultaneously lookup any callgraph edges. If
78 we're going to inline the duplicated function body, and the given
79 function has some cloned callgraph nodes (one for each place this
80 function will be inlined) those callgraph edges will be duplicated.
81 If we're saving or cloning the body, those callgraph edges will be
82 updated to point into the new body. (Note that the original
83 callgraph node and edge list will not be altered.)
85 See the CALL_EXPR handling case in copy_body_r (). */
87 /* 0 if we should not perform inlining.
88 1 if we should expand functions calls inline at the tree level.
89 2 if we should consider *all* functions to be inline
92 int flag_inline_trees = 0;
96 o In order to make inlining-on-trees work, we pessimized
97 function-local static constants. In particular, they are now
98 always output, even when not addressed. Fix this by treating
99 function-local static constants just like global static
100 constants; the back-end already knows not to output them if they
103 o Provide heuristics to clamp inlining of recursive template
106 /* Data required for function inlining. */
108 typedef struct inline_data
110 /* FUNCTION_DECL for function being inlined. */
112 /* FUNCTION_DECL for function being inlined into. */
114 /* struct function for function being inlined. Usually this is the same
115 as DECL_STRUCT_FUNCTION (callee), but can be different if saved_cfg
116 and saved_eh are in use. */
117 struct function *callee_cfun;
118 /* The VAR_DECL for the return value. */
120 /* The map from local declarations in the inlined function to
121 equivalents in the function into which it is being inlined. */
123 /* We use the same mechanism to build clones that we do to perform
124 inlining. However, there are a few places where we need to
125 distinguish between those two situations. This flag is true if
126 we are cloning, rather than inlining. */
128 /* Similarly for saving function body. */
130 /* Callgraph node of function we are inlining into. */
131 struct cgraph_node *node;
132 /* Callgraph node of currently inlined function. */
133 struct cgraph_node *current_node;
136 /* Exception region the inlined call lie in. */
138 /* Take region number in the function being copied, add this value and
139 get eh region number of the duplicate in the function we inline into. */
140 int eh_region_offset;
145 static tree declare_return_variable (inline_data *, tree, tree, tree *);
146 static tree copy_body_r (tree *, int *, void *);
147 static tree copy_generic_body (inline_data *);
148 static bool inlinable_function_p (tree);
149 static tree remap_decl (tree, inline_data *);
150 static tree remap_type (tree, inline_data *);
151 static void remap_block (tree *, inline_data *);
152 static tree remap_decl (tree, inline_data *);
153 static tree remap_decls (tree, inline_data *);
154 static void copy_bind_expr (tree *, int *, inline_data *);
155 static tree mark_local_for_remap_r (tree *, int *, void *);
156 static void unsave_expr_1 (tree);
157 static tree unsave_r (tree *, int *, void *);
158 static void declare_inline_vars (tree, tree);
159 static void remap_save_expr (tree *, void *, int *);
161 static inline bool inlining_p (inline_data *id);
163 /* Insert a tree->tree mapping for ID. Despite the name suggests
164 that the trees should be variables, it is used for more than that. */
167 insert_decl_map (inline_data *id, tree key, tree value)
169 splay_tree_insert (id->decl_map, (splay_tree_key) key,
170 (splay_tree_value) value);
172 /* Always insert an identity map as well. If we see this same new
173 node again, we won't want to duplicate it a second time. */
175 splay_tree_insert (id->decl_map, (splay_tree_key) value,
176 (splay_tree_value) value);
179 /* Remap DECL during the copying of the BLOCK tree for the function. */
182 remap_decl (tree decl, inline_data *id)
187 /* We only remap local variables in the current function. */
190 /* See if we have remapped this declaration. */
192 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
194 /* If we didn't already have an equivalent for this declaration,
198 /* Make a copy of the variable or label. */
200 t = copy_decl_for_inlining (decl, fn, id->caller);
202 /* Remember it, so that if we encounter this local entity again
203 we can reuse this copy. Do this early because remap_type may
204 need this decl for TYPE_STUB_DECL. */
205 insert_decl_map (id, decl, t);
207 /* Remap types, if necessary. */
208 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
209 if (TREE_CODE (t) == TYPE_DECL)
210 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
211 else if (TREE_CODE (t) == PARM_DECL)
212 DECL_ARG_TYPE_AS_WRITTEN (t)
213 = remap_type (DECL_ARG_TYPE_AS_WRITTEN (t), id);
215 /* Remap sizes as necessary. */
216 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
217 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
219 /* If fields, do likewise for offset and qualifier. */
220 if (TREE_CODE (t) == FIELD_DECL)
222 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
223 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
224 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
228 /* FIXME handle anon aggrs. */
229 if (! DECL_NAME (t) && TREE_TYPE (t)
230 && lang_hooks.tree_inlining.anon_aggr_type_p (TREE_TYPE (t)))
232 /* For a VAR_DECL of anonymous type, we must also copy the
233 member VAR_DECLS here and rechain the DECL_ANON_UNION_ELEMS. */
237 for (src = DECL_ANON_UNION_ELEMS (t); src;
238 src = TREE_CHAIN (src))
240 tree member = remap_decl (TREE_VALUE (src), id);
242 gcc_assert (!TREE_PURPOSE (src));
243 members = tree_cons (NULL, member, members);
245 DECL_ANON_UNION_ELEMS (t) = nreverse (members);
249 /* If we are inlining and this is a variable (not a label), declare the
250 remapped variable in the callers' body. */
252 && (TREE_CODE (t) == VAR_DECL
253 || TREE_CODE (t) == PARM_DECL))
254 declare_inline_vars (id->block, t);
256 /* Remember it, so that if we encounter this local entity
257 again we can reuse this copy. */
258 insert_decl_map (id, decl, t);
262 return unshare_expr ((tree) n->value);
266 remap_type (tree type, inline_data *id)
268 splay_tree_node node;
274 /* See if we have remapped this type. */
275 node = splay_tree_lookup (id->decl_map, (splay_tree_key) type);
277 return (tree) node->value;
279 /* The type only needs remapping if it's variably modified. */
280 if (! variably_modified_type_p (type, id->callee))
282 insert_decl_map (id, type, type);
286 /* We do need a copy. build and register it now. If this is a pointer or
287 reference type, remap the designated type and make a new pointer or
289 if (TREE_CODE (type) == POINTER_TYPE)
291 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
293 TYPE_REF_CAN_ALIAS_ALL (type));
294 insert_decl_map (id, type, new);
297 else if (TREE_CODE (type) == REFERENCE_TYPE)
299 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
301 TYPE_REF_CAN_ALIAS_ALL (type));
302 insert_decl_map (id, type, new);
306 new = copy_node (type);
308 insert_decl_map (id, type, new);
310 /* This is a new type, not a copy of an old type. Need to reassociate
311 variants. We can handle everything except the main variant lazily. */
312 t = TYPE_MAIN_VARIANT (type);
315 t = remap_type (t, id);
316 TYPE_MAIN_VARIANT (new) = t;
317 TYPE_NEXT_VARIANT (new) = TYPE_MAIN_VARIANT (t);
318 TYPE_NEXT_VARIANT (t) = new;
322 TYPE_MAIN_VARIANT (new) = new;
323 TYPE_NEXT_VARIANT (new) = NULL;
326 if (TYPE_STUB_DECL (type))
327 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
329 /* Lazily create pointer and reference types. */
330 TYPE_POINTER_TO (new) = NULL;
331 TYPE_REFERENCE_TO (new) = NULL;
333 switch (TREE_CODE (new))
340 t = TYPE_MIN_VALUE (new);
341 if (t && TREE_CODE (t) != INTEGER_CST)
342 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
344 t = TYPE_MAX_VALUE (new);
345 if (t && TREE_CODE (t) != INTEGER_CST)
346 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
350 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
351 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
355 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
356 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
361 case QUAL_UNION_TYPE:
362 walk_tree (&TYPE_FIELDS (new), copy_body_r, id, NULL);
367 /* Shouldn't have been thought variable sized. */
371 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
372 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
378 remap_decls (tree decls, inline_data *id)
381 tree new_decls = NULL_TREE;
383 /* Remap its variables. */
384 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
388 /* Remap the variable. */
389 new_var = remap_decl (old_var, id);
391 /* If we didn't remap this variable, so we can't mess with its
392 TREE_CHAIN. If we remapped this variable to the return slot, it's
393 already declared somewhere else, so don't declare it here. */
394 if (!new_var || new_var == id->retvar)
398 gcc_assert (DECL_P (new_var));
399 TREE_CHAIN (new_var) = new_decls;
404 return nreverse (new_decls);
407 /* Copy the BLOCK to contain remapped versions of the variables
408 therein. And hook the new block into the block-tree. */
411 remap_block (tree *block, inline_data *id)
417 /* Make the new block. */
419 new_block = make_node (BLOCK);
420 TREE_USED (new_block) = TREE_USED (old_block);
421 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
424 /* Remap its variables. */
425 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
429 /* FIXME! It shouldn't be so hard to manage blocks. Rebuilding them in
430 rest_of_compilation is a good start. */
432 /* We're building a clone; DECL_INITIAL is still
433 error_mark_node, and current_binding_level is the parm
435 lang_hooks.decls.insert_block (new_block);
438 /* Attach this new block after the DECL_INITIAL block for the
439 function into which this block is being inlined. In
440 rest_of_compilation we will straighten out the BLOCK tree. */
442 if (DECL_INITIAL (fn))
443 first_block = &BLOCK_CHAIN (DECL_INITIAL (fn));
445 first_block = &DECL_INITIAL (fn);
446 BLOCK_CHAIN (new_block) = *first_block;
447 *first_block = new_block;
450 /* Remember the remapped block. */
451 insert_decl_map (id, old_block, new_block);
455 copy_statement_list (tree *tp)
457 tree_stmt_iterator oi, ni;
460 new = alloc_stmt_list ();
461 ni = tsi_start (new);
462 oi = tsi_start (*tp);
465 for (; !tsi_end_p (oi); tsi_next (&oi))
466 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
470 copy_bind_expr (tree *tp, int *walk_subtrees, inline_data *id)
472 tree block = BIND_EXPR_BLOCK (*tp);
473 /* Copy (and replace) the statement. */
474 copy_tree_r (tp, walk_subtrees, NULL);
477 remap_block (&block, id);
478 BIND_EXPR_BLOCK (*tp) = block;
481 if (BIND_EXPR_VARS (*tp))
482 /* This will remap a lot of the same decls again, but this should be
484 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
487 /* Called from copy_body_id via walk_tree. DATA is really an
491 copy_body_r (tree *tp, int *walk_subtrees, void *data)
493 inline_data *id = (inline_data *) data;
494 tree fn = id->callee;
496 /* Begin by recognizing trees that we'll completely rewrite for the
497 inlining context. Our output for these trees is completely
498 different from out input (e.g. RETURN_EXPR is deleted, and morphs
499 into an edge). Further down, we'll handle trees that get
500 duplicated and/or tweaked. */
502 /* If this is a RETURN_STMT, change it into an EXPR_STMT and a
503 GOTO_STMT with the RET_LABEL as its target. */
504 if (TREE_CODE (*tp) == RETURN_EXPR && inlining_p (id))
506 tree assignment = TREE_OPERAND (*tp, 0);
508 /* If we're returning something, just turn that into an
509 assignment into the equivalent of the original RESULT_DECL.
510 If the "assignment" is just the result decl, the result
511 decl has already been set (e.g. a recent "foo (&result_decl,
512 ...)"); just toss the entire RETURN_EXPR. */
513 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
515 /* Replace the RETURN_EXPR with (a copy of) the
516 MODIFY_EXPR hanging underneath. */
517 *tp = copy_node (assignment);
519 else /* Else the RETURN_EXPR returns no value. */
526 /* Local variables and labels need to be replaced by equivalent
527 variables. We don't want to copy static variables; there's only
528 one of those, no matter how many times we inline the containing
529 function. Similarly for globals from an outer function. */
530 else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
534 /* Remap the declaration. */
535 new_decl = remap_decl (*tp, id);
536 gcc_assert (new_decl);
537 /* Replace this variable with the copy. */
538 STRIP_TYPE_NOPS (new_decl);
542 else if (TREE_CODE (*tp) == STATEMENT_LIST)
543 copy_statement_list (tp);
544 else if (TREE_CODE (*tp) == SAVE_EXPR)
545 remap_save_expr (tp, id->decl_map, walk_subtrees);
546 else if (TREE_CODE (*tp) == LABEL_DECL)
547 /* These may need to be remapped for EH handling. */
548 remap_decl (*tp, id);
549 else if (TREE_CODE (*tp) == BIND_EXPR)
550 copy_bind_expr (tp, walk_subtrees, id);
551 /* Types may need remapping as well. */
552 else if (TYPE_P (*tp))
553 *tp = remap_type (*tp, id);
555 /* If this is a constant, we have to copy the node iff the type will be
556 remapped. copy_tree_r will not copy a constant. */
557 else if (CONSTANT_CLASS_P (*tp))
559 tree new_type = remap_type (TREE_TYPE (*tp), id);
561 if (new_type == TREE_TYPE (*tp))
564 else if (TREE_CODE (*tp) == INTEGER_CST)
565 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
566 TREE_INT_CST_HIGH (*tp));
569 *tp = copy_node (*tp);
570 TREE_TYPE (*tp) = new_type;
574 /* Otherwise, just copy the node. Note that copy_tree_r already
575 knows not to copy VAR_DECLs, etc., so this is safe. */
580 /* Here we handle trees that are not completely rewritten.
581 First we detect some inlining-induced bogosities for
583 if (TREE_CODE (*tp) == MODIFY_EXPR
584 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
585 && (lang_hooks.tree_inlining.auto_var_in_fn_p
586 (TREE_OPERAND (*tp, 0), fn)))
588 /* Some assignments VAR = VAR; don't generate any rtl code
589 and thus don't count as variable modification. Avoid
590 keeping bogosities like 0 = 0. */
591 tree decl = TREE_OPERAND (*tp, 0), value;
594 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
597 value = (tree) n->value;
598 STRIP_TYPE_NOPS (value);
599 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
601 *tp = build_empty_stmt ();
602 return copy_body_r (tp, walk_subtrees, data);
606 else if (TREE_CODE (*tp) == INDIRECT_REF)
608 /* Get rid of *& from inline substitutions that can happen when a
609 pointer argument is an ADDR_EXPR. */
610 tree decl = TREE_OPERAND (*tp, 0);
613 n = splay_tree_lookup (id->decl_map, (splay_tree_key) decl);
616 /* If we happen to get an ADDR_EXPR in n->value, strip
617 it manually here as we'll eventually get ADDR_EXPRs
618 which lie about their types pointed to. In this case
619 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
620 but we absolutely rely on that. */
621 if (TREE_CODE ((tree)n->value) == ADDR_EXPR)
622 *tp = TREE_OPERAND ((tree)n->value, 0);
624 *tp = build1 (INDIRECT_REF,
625 TREE_TYPE (TREE_TYPE ((tree)n->value)),
632 /* Here is the "usual case". Copy this tree node, and then
633 tweak some special cases. */
634 copy_tree_r (tp, walk_subtrees, NULL);
636 && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (*tp))))
637 TREE_BLOCK (*tp) = id->block;
639 /* We're duplicating a CALL_EXPR. Find any corresponding
640 callgraph edges and update or duplicate them. */
641 if (TREE_CODE (*tp) == CALL_EXPR && id->node && get_callee_fndecl (*tp))
645 struct cgraph_node *node;
646 struct cgraph_edge *edge;
648 /* We're saving a copy of the body, so we'll update the
649 callgraph nodes in place. Note that we avoid
650 altering the original callgraph node; we begin with
652 for (node = id->node->next_clone;
654 node = node->next_clone)
656 edge = cgraph_edge (node, old_node);
658 edge->call_expr = *tp;
663 struct cgraph_edge *edge;
665 /* We're cloning or inlining this body; duplicate the
666 associate callgraph nodes. */
667 edge = cgraph_edge (id->current_node, old_node);
669 cgraph_clone_edge (edge, id->node, *tp,
670 REG_BR_PROB_BASE, 1);
673 else if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
674 TREE_OPERAND (*tp, 0) =
677 id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
679 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
681 /* The copied TARGET_EXPR has never been expanded, even if the
682 original node was expanded already. */
683 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
685 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
686 TREE_OPERAND (*tp, 3) = NULL_TREE;
689 /* Variable substitution need not be simple. In particular, the
690 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
691 and friends are up-to-date. */
692 else if (TREE_CODE (*tp) == ADDR_EXPR)
694 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
695 recompute_tree_invarant_for_addr_expr (*tp);
700 /* Keep iterating. */
704 /* Copy basic block, scale profile accordingly. Edges will be taken care of
708 copy_bb (inline_data *id, basic_block bb, int frequency_scale, int count_scale)
710 block_stmt_iterator bsi, copy_bsi;
711 basic_block copy_basic_block;
713 /* create_basic_block() will append every new block to
714 basic_block_info automatically. */
715 copy_basic_block = create_basic_block (NULL, (void *) 0, bb->prev_bb->aux);
716 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
717 copy_basic_block->frequency = (bb->frequency
718 * frequency_scale / REG_BR_PROB_BASE);
719 copy_bsi = bsi_start (copy_basic_block);
721 for (bsi = bsi_start (bb);
722 !bsi_end_p (bsi); bsi_next (&bsi))
724 tree stmt = bsi_stmt (bsi);
725 tree orig_stmt = stmt;
727 walk_tree (&stmt, copy_body_r, id, NULL);
729 /* RETURN_EXPR might be removed,
730 this is signalled by making stmt pointer NULL. */
733 bsi_insert_after (©_bsi, stmt, BSI_NEW_STMT);
734 /* If you think we can abort here, you are wrong.
735 There is no region 0 in tree land. */
736 gcc_assert (lookup_stmt_eh_region_fn (id->callee_cfun, orig_stmt)
739 if (tree_could_throw_p (stmt))
741 int region = lookup_stmt_eh_region_fn (id->callee_cfun, orig_stmt);
742 /* Add an entry for the copied tree in the EH hashtable.
743 When saving or cloning or versioning, use the hashtable in
744 cfun, and just copy the EH number. When inlining, use the
745 hashtable in the caller, and adjust the region number. */
747 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
749 /* If this tree doesn't have a region associated with it,
750 and there is a "current region,"
751 then associate this tree with the current region
752 and add edges associated with this region. */
753 if ((lookup_stmt_eh_region_fn (id->callee_cfun,
755 && id->eh_region > 0)
756 && tree_could_throw_p (stmt))
757 add_stmt_to_eh_region (stmt, id->eh_region);
761 return copy_basic_block;
764 /* Copy edges from BB into its copy constructed earlier, scale profile
765 accordingly. Edges will be taken care of later. Assume aux
766 pointers to point to the copies of each BB. */
768 copy_edges_for_bb (basic_block bb, int count_scale)
770 basic_block new_bb = bb->aux;
773 block_stmt_iterator bsi;
776 /* Use the indices from the original blocks to create edges for the
778 FOR_EACH_EDGE (old_edge, ei, bb->succs)
782 flags = old_edge->flags;
784 /* Return edges do get a FALLTHRU flag when the get inlined. */
785 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
786 && old_edge->dest->aux != EXIT_BLOCK_PTR)
787 flags |= EDGE_FALLTHRU;
788 new = make_edge (new_bb, old_edge->dest->aux, flags);
789 new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
790 new->probability = old_edge->probability;
793 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
796 tree_purge_dead_eh_edges (new_bb);
797 for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
801 copy_stmt = bsi_stmt (bsi);
802 update_stmt (copy_stmt);
803 /* Do this before the possible split_block. */
806 /* If this tree could throw an exception, there are two
807 cases where we need to add abnormal edge(s): the
808 tree wasn't in a region and there is a "current
809 region" in the caller; or the original tree had
810 EH edges. In both cases split the block after the tree,
811 and add abnormal edge(s) as needed; we need both
812 those from the callee and the caller.
813 We check whether the copy can throw, because the const
814 propagation can change an INDIRECT_REF which throws
815 into a COMPONENT_REF which doesn't. If the copy
816 can throw, the original could also throw. */
818 if (TREE_CODE (copy_stmt) == RESX_EXPR
819 || (tree_could_throw_p (copy_stmt)
820 && lookup_stmt_eh_region (copy_stmt) > 0))
822 if (!bsi_end_p (bsi))
823 /* Note that bb's predecessor edges aren't necessarily
824 right at this point; split_block doesn't care. */
826 edge e = split_block (new_bb, copy_stmt);
828 bsi = bsi_start (new_bb);
831 make_eh_edges (copy_stmt);
836 /* Wrapper for remap_decl so it can be used as a callback. */
838 remap_decl_1 (tree decl, void *data)
840 return remap_decl (decl, data);
843 /* Make a copy of the body of FN so that it can be inserted inline in
844 another function. Walks FN via CFG, returns new fndecl. */
847 copy_cfg_body (inline_data * id, gcov_type count, int frequency,
848 basic_block entry_block_map, basic_block exit_block_map)
850 tree callee_fndecl = id->callee;
851 /* Original cfun for the callee, doesn't change. */
852 struct function *callee_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
853 /* Copy, built by this function. */
854 struct function *new_cfun;
855 /* Place to copy from; when a copy of the function was saved off earlier,
856 use that instead of the main copy. */
857 struct function *cfun_to_copy =
858 (struct function *) ggc_alloc_cleared (sizeof (struct function));
860 tree new_fndecl = NULL;
861 bool saving_or_cloning;
862 int count_scale, frequency_scale;
864 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count)
865 count_scale = (REG_BR_PROB_BASE * count
866 / ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count);
870 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency)
871 frequency_scale = (REG_BR_PROB_BASE * frequency
873 ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency);
875 frequency_scale = count_scale;
877 /* Register specific tree functions. */
878 tree_register_cfg_hooks ();
880 /* Must have a CFG here at this point. */
881 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
882 (DECL_STRUCT_FUNCTION (callee_fndecl)));
884 *cfun_to_copy = *DECL_STRUCT_FUNCTION (callee_fndecl);
886 /* If there is a saved_cfg+saved_args lurking in the
887 struct function, a copy of the callee body was saved there, and
888 the 'struct cgraph edge' nodes have been fudged to point into the
889 saved body. Accordingly, we want to copy that saved body so the
890 callgraph edges will be recognized and cloned properly. */
891 if (cfun_to_copy->saved_cfg)
893 cfun_to_copy->cfg = cfun_to_copy->saved_cfg;
894 cfun_to_copy->eh = cfun_to_copy->saved_eh;
896 id->callee_cfun = cfun_to_copy;
898 /* If saving or cloning a function body, create new basic_block_info
899 and label_to_block_maps. Otherwise, we're duplicating a function
900 body for inlining; insert our new blocks and labels into the
902 saving_or_cloning = (id->saving_p || id->cloning_p);
903 if (saving_or_cloning)
906 (struct function *) ggc_alloc_cleared (sizeof (struct function));
907 *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
908 new_cfun->cfg = NULL;
909 new_cfun->decl = new_fndecl = copy_node (callee_fndecl);
910 new_cfun->ib_boundaries_block = (varray_type) 0;
911 DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
912 push_cfun (new_cfun);
913 init_empty_tree_cfg ();
915 ENTRY_BLOCK_PTR->count =
916 (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count * count_scale /
918 ENTRY_BLOCK_PTR->frequency =
919 (ENTRY_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency *
920 frequency_scale / REG_BR_PROB_BASE);
921 EXIT_BLOCK_PTR->count =
922 (EXIT_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->count * count_scale /
924 EXIT_BLOCK_PTR->frequency =
925 (EXIT_BLOCK_PTR_FOR_FUNCTION (callee_cfun)->frequency *
926 frequency_scale / REG_BR_PROB_BASE);
928 entry_block_map = ENTRY_BLOCK_PTR;
929 exit_block_map = EXIT_BLOCK_PTR;
932 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
933 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
936 /* Duplicate any exception-handling regions. */
939 if (saving_or_cloning)
940 init_eh_for_function ();
941 id->eh_region_offset = duplicate_eh_regions (cfun_to_copy,
944 gcc_assert (inlining_p (id) || !id->eh_region_offset);
946 /* Use aux pointers to map the original blocks to copy. */
947 FOR_EACH_BB_FN (bb, cfun_to_copy)
948 bb->aux = copy_bb (id, bb, frequency_scale, count_scale);
949 /* Now that we've duplicated the blocks, duplicate their edges. */
950 FOR_ALL_BB_FN (bb, cfun_to_copy)
951 copy_edges_for_bb (bb, count_scale);
952 FOR_ALL_BB_FN (bb, cfun_to_copy)
955 if (saving_or_cloning)
961 /* Make a copy of the body of FN so that it can be inserted inline in
965 copy_generic_body (inline_data *id)
968 tree fndecl = id->callee;
970 body = DECL_SAVED_TREE (fndecl);
971 walk_tree (&body, copy_body_r, id, NULL);
977 copy_body (inline_data *id, gcov_type count, int frequency,
978 basic_block entry_block_map, basic_block exit_block_map)
980 tree fndecl = id->callee;
983 /* If this body has a CFG, walk CFG and copy. */
984 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
985 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
990 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
991 defined in function FN, or of a data member thereof. */
994 self_inlining_addr_expr (tree value, tree fn)
998 if (TREE_CODE (value) != ADDR_EXPR)
1001 var = get_base_address (TREE_OPERAND (value, 0));
1003 return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
1007 setup_one_parameter (inline_data *id, tree p, tree value, tree fn,
1008 basic_block bb, tree *vars)
1014 /* If the parameter is never assigned to, we may not need to
1015 create a new variable here at all. Instead, we may be able
1016 to just use the argument value. */
1017 if (TREE_READONLY (p)
1018 && !TREE_ADDRESSABLE (p)
1019 && value && !TREE_SIDE_EFFECTS (value))
1021 /* We may produce non-gimple trees by adding NOPs or introduce
1022 invalid sharing when operand is not really constant.
1023 It is not big deal to prohibit constant propagation here as
1024 we will constant propagate in DOM1 pass anyway. */
1025 if (is_gimple_min_invariant (value)
1026 && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
1027 /* We have to be very careful about ADDR_EXPR. Make sure
1028 the base variable isn't a local variable of the inlined
1029 function, e.g., when doing recursive inlining, direct or
1030 mutually-recursive or whatever, which is why we don't
1031 just test whether fn == current_function_decl. */
1032 && ! self_inlining_addr_expr (value, fn))
1034 insert_decl_map (id, p, value);
1039 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
1040 here since the type of this decl must be visible to the calling
1042 var = copy_decl_for_inlining (p, fn, id->caller);
1044 /* See if the frontend wants to pass this by invisible reference. If
1045 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1046 replace uses of the PARM_DECL with dereferences. */
1047 if (TREE_TYPE (var) != TREE_TYPE (p)
1048 && POINTER_TYPE_P (TREE_TYPE (var))
1049 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1051 insert_decl_map (id, var, var);
1052 var_sub = build_fold_indirect_ref (var);
1057 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1058 that way, when the PARM_DECL is encountered, it will be
1059 automatically replaced by the VAR_DECL. */
1060 insert_decl_map (id, p, var_sub);
1062 /* Declare this new variable. */
1063 TREE_CHAIN (var) = *vars;
1066 /* Make gimplifier happy about this variable. */
1067 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1069 /* Even if P was TREE_READONLY, the new VAR should not be.
1070 In the original code, we would have constructed a
1071 temporary, and then the function body would have never
1072 changed the value of P. However, now, we will be
1073 constructing VAR directly. The constructor body may
1074 change its value multiple times as it is being
1075 constructed. Therefore, it must not be TREE_READONLY;
1076 the back-end assumes that TREE_READONLY variable is
1077 assigned to only once. */
1078 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1079 TREE_READONLY (var) = 0;
1081 /* Initialize this VAR_DECL from the equivalent argument. Convert
1082 the argument to the proper type in case it was promoted. */
1085 tree rhs = fold_convert (TREE_TYPE (var), value);
1086 block_stmt_iterator bsi = bsi_last (bb);
1088 if (rhs == error_mark_node)
1091 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
1092 keep our trees in gimple form. */
1093 init_stmt = build (MODIFY_EXPR, TREE_TYPE (var), var, rhs);
1095 /* If we did not create a gimple value and we did not create a gimple
1096 cast of a gimple value, then we will need to gimplify INIT_STMTS
1097 at the end. Note that is_gimple_cast only checks the outer
1098 tree code, not its operand. Thus the explicit check that its
1099 operand is a gimple value. */
1100 if (!is_gimple_val (rhs)
1101 && (!is_gimple_cast (rhs)
1102 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
1103 gimplify_stmt (&init_stmt);
1104 bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
1108 /* Generate code to initialize the parameters of the function at the
1109 top of the stack in ID from the ARGS (presented as a TREE_LIST). */
1112 initialize_inlined_parameters (inline_data *id, tree args, tree static_chain,
1113 tree fn, basic_block bb)
1118 tree vars = NULL_TREE;
1121 /* Figure out what the parameters are. */
1122 parms = DECL_ARGUMENTS (fn);
1123 if (fn == current_function_decl)
1124 parms = cfun->saved_args;
1126 /* Loop through the parameter declarations, replacing each with an
1127 equivalent VAR_DECL, appropriately initialized. */
1128 for (p = parms, a = args; p;
1129 a = a ? TREE_CHAIN (a) : a, p = TREE_CHAIN (p))
1135 /* Find the initializer. */
1136 value = lang_hooks.tree_inlining.convert_parm_for_inlining
1137 (p, a ? TREE_VALUE (a) : NULL_TREE, fn, argnum);
1139 setup_one_parameter (id, p, value, fn, bb, &vars);
1142 /* Initialize the static chain. */
1143 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1144 if (fn == current_function_decl)
1145 p = DECL_STRUCT_FUNCTION (fn)->saved_static_chain_decl;
1148 /* No static chain? Seems like a bug in tree-nested.c. */
1149 gcc_assert (static_chain);
1151 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
1154 declare_inline_vars (id->block, vars);
1157 /* Declare a return variable to replace the RESULT_DECL for the
1158 function we are calling. An appropriate DECL_STMT is returned.
1159 The USE_STMT is filled to contain a use of the declaration to
1160 indicate the return value of the function.
1162 RETURN_SLOT_ADDR, if non-null, was a fake parameter that
1163 took the address of the result. MODIFY_DEST, if non-null, was the LHS of
1164 the MODIFY_EXPR to which this call is the RHS.
1166 The return value is a (possibly null) value that is the result of the
1167 function as seen by the callee. *USE_P is a (possibly null) value that
1168 holds the result as seen by the caller. */
1171 declare_return_variable (inline_data *id, tree return_slot_addr,
1172 tree modify_dest, tree *use_p)
1174 tree callee = id->callee;
1175 tree caller = id->caller;
1176 tree result = DECL_RESULT (callee);
1177 tree callee_type = TREE_TYPE (result);
1178 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1181 /* We don't need to do anything for functions that don't return
1183 if (!result || VOID_TYPE_P (callee_type))
1189 /* If there was a return slot, then the return value is the
1190 dereferenced address of that object. */
1191 if (return_slot_addr)
1193 /* The front end shouldn't have used both return_slot_addr and
1194 a modify expression. */
1195 gcc_assert (!modify_dest);
1196 if (DECL_BY_REFERENCE (result))
1197 var = return_slot_addr;
1199 var = build_fold_indirect_ref (return_slot_addr);
1204 /* All types requiring non-trivial constructors should have been handled. */
1205 gcc_assert (!TREE_ADDRESSABLE (callee_type));
1207 /* Attempt to avoid creating a new temporary variable. */
1210 bool use_it = false;
1212 /* We can't use MODIFY_DEST if there's type promotion involved. */
1213 if (!lang_hooks.types_compatible_p (caller_type, callee_type))
1216 /* ??? If we're assigning to a variable sized type, then we must
1217 reuse the destination variable, because we've no good way to
1218 create variable sized temporaries at this point. */
1219 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1222 /* If the callee cannot possibly modify MODIFY_DEST, then we can
1223 reuse it as the result of the call directly. Don't do this if
1224 it would promote MODIFY_DEST to addressable. */
1225 else if (!TREE_STATIC (modify_dest)
1226 && !TREE_ADDRESSABLE (modify_dest)
1227 && !TREE_ADDRESSABLE (result))
1238 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
1240 var = copy_decl_for_inlining (result, callee, caller);
1242 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1243 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1244 = tree_cons (NULL_TREE, var,
1245 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1247 /* Do not have the rest of GCC warn about this variable as it should
1248 not be visible to the user. */
1249 TREE_NO_WARNING (var) = 1;
1251 /* Build the use expr. If the return type of the function was
1252 promoted, convert it back to the expected type. */
1254 if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
1255 use = fold_convert (caller_type, var);
1258 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1259 way, when the RESULT_DECL is encountered, it will be
1260 automatically replaced by the VAR_DECL. */
1261 insert_decl_map (id, result, var);
1263 /* Remember this so we can ignore it in remap_decls. */
1270 /* Returns nonzero if a function can be inlined as a tree. */
1273 tree_inlinable_function_p (tree fn)
1275 return inlinable_function_p (fn);
1278 static const char *inline_forbidden_reason;
1281 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
1285 tree fn = (tree) fnp;
1288 switch (TREE_CODE (node))
1291 /* Refuse to inline alloca call unless user explicitly forced so as
1292 this may change program's memory overhead drastically when the
1293 function using alloca is called in loop. In GCC present in
1294 SPEC2000 inlining into schedule_block cause it to require 2GB of
1295 RAM instead of 256MB. */
1296 if (alloca_call_p (node)
1297 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1299 inline_forbidden_reason
1300 = N_("%Jfunction %qF can never be inlined because it uses "
1301 "alloca (override using the always_inline attribute)");
1304 t = get_callee_fndecl (node);
1308 /* We cannot inline functions that call setjmp. */
1309 if (setjmp_call_p (t))
1311 inline_forbidden_reason
1312 = N_("%Jfunction %qF can never be inlined because it uses setjmp");
1316 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1317 switch (DECL_FUNCTION_CODE (t))
1319 /* We cannot inline functions that take a variable number of
1321 case BUILT_IN_VA_START:
1322 case BUILT_IN_STDARG_START:
1323 case BUILT_IN_NEXT_ARG:
1324 case BUILT_IN_VA_END:
1325 inline_forbidden_reason
1326 = N_("%Jfunction %qF can never be inlined because it "
1327 "uses variable argument lists");
1330 case BUILT_IN_LONGJMP:
1331 /* We can't inline functions that call __builtin_longjmp at
1332 all. The non-local goto machinery really requires the
1333 destination be in a different function. If we allow the
1334 function calling __builtin_longjmp to be inlined into the
1335 function calling __builtin_setjmp, Things will Go Awry. */
1336 inline_forbidden_reason
1337 = N_("%Jfunction %qF can never be inlined because "
1338 "it uses setjmp-longjmp exception handling");
1341 case BUILT_IN_NONLOCAL_GOTO:
1343 inline_forbidden_reason
1344 = N_("%Jfunction %qF can never be inlined because "
1345 "it uses non-local goto");
1348 case BUILT_IN_RETURN:
1349 case BUILT_IN_APPLY_ARGS:
1350 /* If a __builtin_apply_args caller would be inlined,
1351 it would be saving arguments of the function it has
1352 been inlined into. Similarly __builtin_return would
1353 return from the function the inline has been inlined into. */
1354 inline_forbidden_reason
1355 = N_("%Jfunction %qF can never be inlined because "
1356 "it uses __builtin_return or __builtin_apply_args");
1365 t = TREE_OPERAND (node, 0);
1367 /* We will not inline a function which uses computed goto. The
1368 addresses of its local labels, which may be tucked into
1369 global storage, are of course not constant across
1370 instantiations, which causes unexpected behavior. */
1371 if (TREE_CODE (t) != LABEL_DECL)
1373 inline_forbidden_reason
1374 = N_("%Jfunction %qF can never be inlined "
1375 "because it contains a computed goto");
1381 t = TREE_OPERAND (node, 0);
1382 if (DECL_NONLOCAL (t))
1384 /* We cannot inline a function that receives a non-local goto
1385 because we cannot remap the destination label used in the
1386 function that is performing the non-local goto. */
1387 inline_forbidden_reason
1388 = N_("%Jfunction %qF can never be inlined "
1389 "because it receives a non-local goto");
1396 /* We cannot inline a function of the form
1398 void F (int i) { struct S { int ar[i]; } s; }
1400 Attempting to do so produces a catch-22.
1401 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1402 UNION_TYPE nodes, then it goes into infinite recursion on a
1403 structure containing a pointer to its own type. If it doesn't,
1404 then the type node for S doesn't get adjusted properly when
1407 ??? This is likely no longer true, but it's too late in the 4.0
1408 cycle to try to find out. This should be checked for 4.1. */
1409 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1410 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1412 inline_forbidden_reason
1413 = N_("%Jfunction %qF can never be inlined "
1414 "because it uses variable sized variables");
1425 /* Return subexpression representing possible alloca call, if any. */
1427 inline_forbidden_p (tree fndecl)
1429 location_t saved_loc = input_location;
1430 block_stmt_iterator bsi;
1432 tree ret = NULL_TREE;
1434 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
1435 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1437 ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
1438 inline_forbidden_p_1, fndecl);
1444 input_location = saved_loc;
1448 /* Returns nonzero if FN is a function that does not have any
1449 fundamental inline blocking properties. */
1452 inlinable_function_p (tree fn)
1454 bool inlinable = true;
1456 /* If we've already decided this function shouldn't be inlined,
1457 there's no need to check again. */
1458 if (DECL_UNINLINABLE (fn))
1461 /* See if there is any language-specific reason it cannot be
1462 inlined. (It is important that this hook be called early because
1463 in C++ it may result in template instantiation.)
1464 If the function is not inlinable for language-specific reasons,
1465 it is left up to the langhook to explain why. */
1466 inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
1468 /* If we don't have the function body available, we can't inline it.
1469 However, this should not be recorded since we also get here for
1470 forward declared inline functions. Therefore, return at once. */
1471 if (!DECL_SAVED_TREE (fn))
1474 /* If we're not inlining at all, then we cannot inline this function. */
1475 else if (!flag_inline_trees)
1478 /* Only try to inline functions if DECL_INLINE is set. This should be
1479 true for all functions declared `inline', and for all other functions
1480 as well with -finline-functions.
1482 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
1483 it's the front-end that must set DECL_INLINE in this case, because
1484 dwarf2out loses if a function that does not have DECL_INLINE set is
1485 inlined anyway. That is why we have both DECL_INLINE and
1486 DECL_DECLARED_INLINE_P. */
1487 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
1488 here should be redundant. */
1489 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
1492 else if (inline_forbidden_p (fn))
1494 /* See if we should warn about uninlinable functions. Previously,
1495 some of these warnings would be issued while trying to expand
1496 the function inline, but that would cause multiple warnings
1497 about functions that would for example call alloca. But since
1498 this a property of the function, just one warning is enough.
1499 As a bonus we can now give more details about the reason why a
1500 function is not inlinable.
1501 We only warn for functions declared `inline' by the user. */
1502 bool do_warning = (warn_inline
1504 && DECL_DECLARED_INLINE_P (fn)
1505 && !DECL_IN_SYSTEM_HEADER (fn));
1507 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1508 sorry (inline_forbidden_reason, fn, fn);
1509 else if (do_warning)
1510 warning (0, inline_forbidden_reason, fn, fn);
1515 /* Squirrel away the result so that we don't have to check again. */
1516 DECL_UNINLINABLE (fn) = !inlinable;
1521 /* Estimate the cost of a memory move. Use machine dependent
1522 word size and take possible memcpy call into account. */
1525 estimate_move_cost (tree type)
1529 size = int_size_in_bytes (type);
1531 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
1532 /* Cost of a memcpy call, 3 arguments and the call. */
1535 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
1538 /* Used by estimate_num_insns. Estimate number of instructions seen
1539 by given statement. */
1542 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
1547 if (IS_TYPE_OR_DECL_P (x))
1552 /* Assume that constants and references counts nothing. These should
1553 be majorized by amount of operations among them we count later
1554 and are common target of CSE and similar optimizations. */
1555 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
1558 switch (TREE_CODE (x))
1560 /* Containers have no cost. */
1567 case ALIGN_INDIRECT_REF:
1568 case MISALIGNED_INDIRECT_REF:
1570 case ARRAY_RANGE_REF:
1572 case EXC_PTR_EXPR: /* ??? */
1573 case FILTER_EXPR: /* ??? */
1576 case WITH_CLEANUP_EXPR:
1578 case VIEW_CONVERT_EXPR:
1583 case CASE_LABEL_EXPR:
1586 case EH_FILTER_EXPR:
1587 case STATEMENT_LIST:
1589 case NON_LVALUE_EXPR:
1592 case TRY_CATCH_EXPR:
1593 case TRY_FINALLY_EXPR:
1600 case WITH_SIZE_EXPR:
1603 /* We don't account constants for now. Assume that the cost is amortized
1604 by operations that do use them. We may re-consider this decision once
1605 we are able to optimize the tree before estimating its size and break
1606 out static initializers. */
1607 case IDENTIFIER_NODE:
1616 /* Try to estimate the cost of assignments. We have three cases to
1618 1) Simple assignments to registers;
1619 2) Stores to things that must live in memory. This includes
1620 "normal" stores to scalars, but also assignments of large
1621 structures, or constructors of big arrays;
1624 Let us look at the first two cases, assuming we have "a = b + C":
1625 <modify_expr <var_decl "a"> <plus_expr <var_decl "b"> <constant C>>
1626 If "a" is a GIMPLE register, the assignment to it is free on almost
1627 any target, because "a" usually ends up in a real register. Hence
1628 the only cost of this expression comes from the PLUS_EXPR, and we
1629 can ignore the MODIFY_EXPR.
1630 If "a" is not a GIMPLE register, the assignment to "a" will most
1631 likely be a real store, so the cost of the MODIFY_EXPR is the cost
1632 of moving something into "a", which we compute using the function
1635 The third case deals with TARGET_EXPRs, for which the semantics are
1636 that a temporary is assigned, unless the TARGET_EXPR itself is being
1637 assigned to something else. In the latter case we do not need the
1638 temporary. E.g. in <modify_expr <var_decl "a"> <target_expr>>, the
1639 MODIFY_EXPR is free. */
1642 /* Is the right and side a TARGET_EXPR? */
1643 if (TREE_CODE (TREE_OPERAND (x, 1)) == TARGET_EXPR)
1645 /* ... fall through ... */
1648 x = TREE_OPERAND (x, 0);
1649 /* Is this an assignments to a register? */
1650 if (is_gimple_reg (x))
1652 /* Otherwise it's a store, so fall through to compute the move cost. */
1655 *count += estimate_move_cost (TREE_TYPE (x));
1658 /* Assign cost of 1 to usual operations.
1659 ??? We may consider mapping RTL costs to this. */
1667 case FIX_TRUNC_EXPR:
1669 case FIX_FLOOR_EXPR:
1670 case FIX_ROUND_EXPR:
1688 case TRUTH_ANDIF_EXPR:
1689 case TRUTH_ORIF_EXPR:
1690 case TRUTH_AND_EXPR:
1692 case TRUTH_XOR_EXPR:
1693 case TRUTH_NOT_EXPR:
1702 case UNORDERED_EXPR:
1715 case PREDECREMENT_EXPR:
1716 case PREINCREMENT_EXPR:
1717 case POSTDECREMENT_EXPR:
1718 case POSTINCREMENT_EXPR:
1724 case REALIGN_LOAD_EXPR:
1730 /* Few special cases of expensive operations. This is useful
1731 to avoid inlining on functions having too many of these. */
1732 case TRUNC_DIV_EXPR:
1734 case FLOOR_DIV_EXPR:
1735 case ROUND_DIV_EXPR:
1736 case EXACT_DIV_EXPR:
1737 case TRUNC_MOD_EXPR:
1739 case FLOOR_MOD_EXPR:
1740 case ROUND_MOD_EXPR:
1746 tree decl = get_callee_fndecl (x);
1749 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
1750 switch (DECL_FUNCTION_CODE (decl))
1752 case BUILT_IN_CONSTANT_P:
1755 case BUILT_IN_EXPECT:
1761 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
1762 that does use function declaration to figure out the arguments. */
1765 for (arg = TREE_OPERAND (x, 1); arg; arg = TREE_CHAIN (arg))
1766 *count += estimate_move_cost (TREE_TYPE (TREE_VALUE (arg)));
1770 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
1771 *count += estimate_move_cost (TREE_TYPE (arg));
1774 *count += PARAM_VALUE (PARAM_INLINE_CALL_COST);
1783 /* Estimate number of instructions that will be created by expanding EXPR. */
1786 estimate_num_insns (tree expr)
1789 struct pointer_set_t *visited_nodes;
1791 block_stmt_iterator bsi;
1792 struct function *my_function;
1794 /* If we're given an entire function, walk the CFG. */
1795 if (TREE_CODE (expr) == FUNCTION_DECL)
1797 my_function = DECL_STRUCT_FUNCTION (expr);
1798 gcc_assert (my_function && my_function->cfg);
1799 visited_nodes = pointer_set_create ();
1800 FOR_EACH_BB_FN (bb, my_function)
1802 for (bsi = bsi_start (bb);
1806 walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
1807 &num, visited_nodes);
1810 pointer_set_destroy (visited_nodes);
1813 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &num);
1818 /* Initialized with NOGC, making this poisonous to the garbage collector. */
1819 static varray_type cfun_stack;
1822 push_cfun (struct function *new_cfun)
1824 static bool initialized = false;
1828 VARRAY_GENERIC_PTR_NOGC_INIT (cfun_stack, 20, "cfun_stack");
1831 VARRAY_PUSH_GENERIC_PTR (cfun_stack, cfun);
1838 cfun = (struct function *)VARRAY_TOP_GENERIC_PTR (cfun_stack);
1839 VARRAY_POP (cfun_stack);
1842 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
1844 add_lexical_block (tree current_block, tree new_block)
1848 /* Walk to the last sub-block. */
1849 for (blk_p = &BLOCK_SUBBLOCKS (current_block);
1851 blk_p = &TREE_CHAIN (*blk_p))
1854 BLOCK_SUPERCONTEXT (new_block) = current_block;
1855 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
1858 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
1861 expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
1869 tree return_slot_addr;
1871 location_t saved_location;
1872 struct cgraph_edge *cg_edge;
1874 basic_block return_block;
1876 block_stmt_iterator bsi, stmt_bsi;
1877 bool successfully_inlined = FALSE;
1880 struct cgraph_node *old_node;
1883 /* See what we've got. */
1884 id = (inline_data *) data;
1887 /* Set input_location here so we get the right instantiation context
1888 if we call instantiate_decl from inlinable_function_p. */
1889 saved_location = input_location;
1890 if (EXPR_HAS_LOCATION (t))
1891 input_location = EXPR_LOCATION (t);
1893 /* From here on, we're only interested in CALL_EXPRs. */
1894 if (TREE_CODE (t) != CALL_EXPR)
1897 /* First, see if we can figure out what function is being called.
1898 If we cannot, then there is no hope of inlining the function. */
1899 fn = get_callee_fndecl (t);
1903 /* Turn forward declarations into real ones. */
1904 fn = cgraph_node (fn)->decl;
1906 /* If fn is a declaration of a function in a nested scope that was
1907 globally declared inline, we don't set its DECL_INITIAL.
1908 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
1909 C++ front-end uses it for cdtors to refer to their internal
1910 declarations, that are not real functions. Fortunately those
1911 don't have trees to be saved, so we can tell by checking their
1913 if (! DECL_INITIAL (fn)
1914 && DECL_ABSTRACT_ORIGIN (fn)
1915 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
1916 fn = DECL_ABSTRACT_ORIGIN (fn);
1918 /* Objective C and fortran still calls tree_rest_of_compilation directly.
1919 Kill this check once this is fixed. */
1920 if (!id->current_node->analyzed)
1923 cg_edge = cgraph_edge (id->current_node, t);
1925 /* Constant propagation on argument done during previous inlining
1926 may create new direct call. Produce an edge for it. */
1929 struct cgraph_node *dest = cgraph_node (fn);
1931 /* We have missing edge in the callgraph. This can happen in one case
1932 where previous inlining turned indirect call into direct call by
1933 constant propagating arguments. In all other cases we hit a bug
1934 (incorrect node sharing is most common reason for missing edges. */
1935 gcc_assert (dest->needed || !flag_unit_at_a_time);
1936 cgraph_create_edge (id->node, dest, t,
1937 bb->count, bb->loop_depth)->inline_failed
1938 = N_("originally indirect function call not considered for inlining");
1942 /* Don't try to inline functions that are not well-suited to
1944 if (!cgraph_inline_p (cg_edge, &reason))
1946 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1948 sorry ("%Jinlining failed in call to %qF: %s", fn, fn, reason);
1949 sorry ("called from here");
1951 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
1952 && !DECL_IN_SYSTEM_HEADER (fn)
1954 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn)))
1956 warning (0, "%Jinlining failed in call to %qF: %s", fn, fn, reason);
1957 warning (0, "called from here");
1962 #ifdef ENABLE_CHECKING
1963 if (cg_edge->callee->decl != id->node->decl)
1964 verify_cgraph_node (cg_edge->callee);
1967 /* We will be inlining this callee. */
1969 id->eh_region = lookup_stmt_eh_region (stmt);
1971 /* Split the block holding the CALL_EXPR. */
1973 e = split_block (bb, stmt);
1975 return_block = e->dest;
1978 /* split_block splits before the statement, work around this by moving
1979 the call into the first half_bb. Not pretty, but seems easier than
1980 doing the CFG manipulation by hand when the CALL_EXPR is in the last
1982 stmt_bsi = bsi_last (bb);
1983 bsi = bsi_start (return_block);
1984 if (!bsi_end_p (bsi))
1985 bsi_move_before (&stmt_bsi, &bsi);
1988 tree stmt = bsi_stmt (stmt_bsi);
1989 bsi_remove (&stmt_bsi);
1990 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
1992 stmt_bsi = bsi_start (return_block);
1994 /* Build a block containing code to initialize the arguments, the
1995 actual inline expansion of the body, and a label for the return
1996 statements within the function to jump to. The type of the
1997 statement expression is the return type of the function call. */
1998 id->block = make_node (BLOCK);
1999 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
2000 add_lexical_block (TREE_BLOCK (stmt), id->block);
2003 /* Local declarations will be replaced by their equivalents in this
2006 id->decl_map = splay_tree_new (splay_tree_compare_pointers,
2009 /* Initialize the parameters. */
2010 args = TREE_OPERAND (t, 1);
2011 if (CALL_EXPR_HAS_RETURN_SLOT_ADDR (t))
2013 return_slot_addr = TREE_VALUE (args);
2014 args = TREE_CHAIN (args);
2017 return_slot_addr = NULL_TREE;
2019 initialize_inlined_parameters (id, args, TREE_OPERAND (t, 2), fn, bb);
2021 /* Record the function we are about to inline. */
2024 /* Return statements in the function body will be replaced by jumps
2025 to the RET_LABEL. */
2027 gcc_assert (DECL_INITIAL (fn));
2028 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
2030 /* Find the lhs to which the result of this call is assigned. */
2032 if (TREE_CODE (modify_dest) == MODIFY_EXPR)
2034 modify_dest = TREE_OPERAND (modify_dest, 0);
2036 /* The function which we are inlining might not return a value,
2037 in which case we should issue a warning that the function
2038 does not return a value. In that case the optimizers will
2039 see that the variable to which the value is assigned was not
2040 initialized. We do not want to issue a warning about that
2041 uninitialized variable. */
2042 if (DECL_P (modify_dest))
2043 TREE_NO_WARNING (modify_dest) = 1;
2048 /* Declare the return variable for the function. */
2049 decl = declare_return_variable (id, return_slot_addr,
2050 modify_dest, &use_retvar);
2051 /* Do this only if declare_return_variable created a new one. */
2052 if (decl && !return_slot_addr && decl != modify_dest)
2053 declare_inline_vars (id->block, decl);
2055 /* After we've initialized the parameters, we insert the body of the
2057 old_node = id->current_node;
2059 /* Anoint the callee-to-be-duplicated as the "current_node." When
2060 CALL_EXPRs within callee are duplicated, the edges from callee to
2061 callee's callees (caller's grandchildren) will be cloned. */
2062 id->current_node = cg_edge->callee;
2064 /* This is it. Duplicate the callee body. Assume callee is
2065 pre-gimplified. Note that we must not alter the caller
2066 function in any way before this point, as this CALL_EXPR may be
2067 a self-referential call; if we're calling ourselves, we need to
2068 duplicate our body before altering anything. */
2069 copy_body (id, bb->count, bb->frequency, bb, return_block);
2070 id->current_node = old_node;
2073 splay_tree_delete (id->decl_map);
2076 /* If the inlined function returns a result that we care about,
2077 clobber the CALL_EXPR with a reference to the return variable. */
2078 if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2081 maybe_clean_or_replace_eh_stmt (stmt, stmt);
2084 /* We're modifying a TSI owned by gimple_expand_calls_inline();
2085 tsi_delink() will leave the iterator in a sane state. */
2086 bsi_remove (&stmt_bsi);
2089 if (bsi_end_p (bsi))
2090 tree_purge_dead_eh_edges (return_block);
2092 /* If the value of the new expression is ignored, that's OK. We
2093 don't warn about this for CALL_EXPRs, so we shouldn't warn about
2094 the equivalent inlined version either. */
2095 TREE_USED (*tp) = 1;
2097 /* Output the inlining info for this abstract function, since it has been
2098 inlined. If we don't do this now, we can lose the information about the
2099 variables in the function when the blocks get blown away as soon as we
2100 remove the cgraph node. */
2101 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
2103 /* Update callgraph if needed. */
2104 cgraph_remove_node (cg_edge->callee);
2106 /* Declare the 'auto' variables added with this inlined body. */
2107 record_vars (BLOCK_VARS (id->block));
2108 id->block = NULL_TREE;
2110 /* Add local static vars in this inlined callee to caller. */
2111 for (t_step = id->callee_cfun->unexpanded_var_list;
2113 t_step = TREE_CHAIN (t_step))
2115 var = TREE_VALUE (t_step);
2116 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2119 successfully_inlined = TRUE;
2122 input_location = saved_location;
2123 return successfully_inlined;
2126 /* Expand call statements reachable from STMT_P.
2127 We can only have CALL_EXPRs as the "toplevel" tree code or nested
2128 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
2129 unfortunately not use that function here because we need a pointer
2130 to the CALL_EXPR, not the tree itself. */
2133 gimple_expand_calls_inline (basic_block bb, inline_data *id)
2135 block_stmt_iterator bsi;
2137 /* Register specific tree functions. */
2138 tree_register_cfg_hooks ();
2139 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2141 tree *expr_p = bsi_stmt_ptr (bsi);
2142 tree stmt = *expr_p;
2144 if (TREE_CODE (*expr_p) == MODIFY_EXPR)
2145 expr_p = &TREE_OPERAND (*expr_p, 1);
2146 if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2147 expr_p = &TREE_OPERAND (*expr_p, 0);
2148 if (TREE_CODE (*expr_p) == CALL_EXPR)
2149 if (expand_call_inline (bb, stmt, expr_p, id))
2155 /* Expand calls to inline functions in the body of FN. */
2158 optimize_inline_calls (tree fn)
2163 /* There is no point in performing inlining if errors have already
2164 occurred -- and we might crash if we try to inline invalid
2166 if (errorcount || sorrycount)
2170 memset (&id, 0, sizeof (id));
2172 id.current_node = id.node = cgraph_node (fn);
2174 /* Or any functions that aren't finished yet. */
2175 prev_fn = NULL_TREE;
2176 if (current_function_decl)
2178 id.caller = current_function_decl;
2179 prev_fn = current_function_decl;
2181 push_gimplify_context ();
2183 /* Reach the trees by walking over the CFG, and note the
2184 enclosing basic-blocks in the call edges. */
2185 /* We walk the blocks going forward, because inlined function bodies
2186 will split id->current_basic_block, and the new blocks will
2187 follow it; we'll trudge through them, processing their CALL_EXPRs
2190 gimple_expand_calls_inline (bb, &id);
2193 pop_gimplify_context (NULL);
2194 /* Renumber the (code) basic_blocks consecutively. */
2196 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2199 #ifdef ENABLE_CHECKING
2201 struct cgraph_edge *e;
2203 verify_cgraph_node (id.node);
2205 /* Double check that we inlined everything we are supposed to inline. */
2206 for (e = id.node->callees; e; e = e->next_callee)
2207 gcc_assert (e->inline_failed);
2210 /* We need to rescale frequencies again to peak at REG_BR_PROB_BASE
2211 as inlining loops might increase the maximum. */
2212 if (ENTRY_BLOCK_PTR->count)
2214 fold_cond_expr_cond ();
2217 /* FN is a function that has a complete body, and CLONE is a function whose
2218 body is to be set to a copy of FN, mapping argument declarations according
2219 to the ARG_MAP splay_tree. */
2222 clone_body (tree clone, tree fn, void *arg_map)
2226 /* Clone the body, as if we were making an inline call. But, remap the
2227 parameters in the callee to the parameters of caller. */
2228 memset (&id, 0, sizeof (id));
2231 id.callee_cfun = DECL_STRUCT_FUNCTION (fn);
2232 id.decl_map = (splay_tree)arg_map;
2234 /* Cloning is treated slightly differently from inlining. Set
2235 CLONING_P so that it's clear which operation we're performing. */
2236 id.cloning_p = true;
2238 /* We're not inside any EH region. */
2241 /* Actually copy the body. */
2242 append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
2245 /* Save duplicate body in FN. MAP is used to pass around splay tree
2246 used to update arguments in restore_body. */
2248 /* Make and return duplicate of body in FN. Put copies of DECL_ARGUMENTS
2249 in *arg_copy and of the static chain, if any, in *sc_copy. */
2252 save_body (tree fn, tree *arg_copy, tree *sc_copy)
2255 tree newdecl, *parg;
2256 basic_block fn_entry_block;
2258 memset (&id, 0, sizeof (id));
2260 id.callee_cfun = DECL_STRUCT_FUNCTION (fn);
2262 id.node = cgraph_node (fn);
2264 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2265 *arg_copy = DECL_ARGUMENTS (fn);
2267 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
2269 tree new = copy_node (*parg);
2271 lang_hooks.dup_lang_specific_decl (new);
2272 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*parg);
2273 insert_decl_map (&id, *parg, new);
2274 TREE_CHAIN (new) = TREE_CHAIN (*parg);
2278 *sc_copy = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2281 tree new = copy_node (*sc_copy);
2283 lang_hooks.dup_lang_specific_decl (new);
2284 DECL_ABSTRACT_ORIGIN (new) = DECL_ORIGIN (*sc_copy);
2285 insert_decl_map (&id, *sc_copy, new);
2286 TREE_CHAIN (new) = TREE_CHAIN (*sc_copy);
2290 /* We're not inside any EH region. */
2293 insert_decl_map (&id, DECL_RESULT (fn), DECL_RESULT (fn));
2295 /* Actually copy the body, including a new (struct function *) and CFG.
2296 EH info is also duplicated so its labels point into the copied
2297 CFG, not the original. */
2298 fn_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fn));
2299 newdecl = copy_body (&id, fn_entry_block->count, fn_entry_block->frequency, NULL, NULL);
2300 DECL_STRUCT_FUNCTION (fn)->saved_cfg = DECL_STRUCT_FUNCTION (newdecl)->cfg;
2301 DECL_STRUCT_FUNCTION (fn)->saved_eh = DECL_STRUCT_FUNCTION (newdecl)->eh;
2304 splay_tree_delete (id.decl_map);
2307 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2310 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2312 enum tree_code code = TREE_CODE (*tp);
2314 /* We make copies of most nodes. */
2315 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
2316 || code == TREE_LIST
2318 || code == TYPE_DECL)
2320 /* Because the chain gets clobbered when we make a copy, we save it
2322 tree chain = TREE_CHAIN (*tp);
2325 /* Copy the node. */
2326 new = copy_node (*tp);
2328 /* Propagate mudflap marked-ness. */
2329 if (flag_mudflap && mf_marked_p (*tp))
2334 /* Now, restore the chain, if appropriate. That will cause
2335 walk_tree to walk into the chain as well. */
2336 if (code == PARM_DECL || code == TREE_LIST)
2337 TREE_CHAIN (*tp) = chain;
2339 /* For now, we don't update BLOCKs when we make copies. So, we
2340 have to nullify all BIND_EXPRs. */
2341 if (TREE_CODE (*tp) == BIND_EXPR)
2342 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
2345 else if (TREE_CODE_CLASS (code) == tcc_type)
2347 else if (TREE_CODE_CLASS (code) == tcc_declaration)
2349 else if (TREE_CODE_CLASS (code) == tcc_constant)
2352 gcc_assert (code != STATEMENT_LIST);
2356 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2357 information indicating to what new SAVE_EXPR this one should be mapped,
2358 use that one. Otherwise, create a new node and enter it in ST. FN is
2359 the function into which the copy will be placed. */
2362 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
2364 splay_tree st = (splay_tree) st_;
2368 /* See if we already encountered this SAVE_EXPR. */
2369 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2371 /* If we didn't already remap this SAVE_EXPR, do so now. */
2374 t = copy_node (*tp);
2376 /* Remember this SAVE_EXPR. */
2377 splay_tree_insert (st, (splay_tree_key) *tp, (splay_tree_value) t);
2378 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
2379 splay_tree_insert (st, (splay_tree_key) t, (splay_tree_value) t);
2383 /* We've already walked into this SAVE_EXPR; don't do it again. */
2385 t = (tree) n->value;
2388 /* Replace this SAVE_EXPR with the copy. */
2392 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
2393 copies the declaration and enters it in the splay_tree in DATA (which is
2394 really an `inline_data *'). */
2397 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
2400 inline_data *id = (inline_data *) data;
2402 /* Don't walk into types. */
2406 else if (TREE_CODE (*tp) == LABEL_EXPR)
2408 tree decl = TREE_OPERAND (*tp, 0);
2410 /* Copy the decl and remember the copy. */
2411 insert_decl_map (id, decl,
2412 copy_decl_for_inlining (decl, DECL_CONTEXT (decl),
2413 DECL_CONTEXT (decl)));
2419 /* Perform any modifications to EXPR required when it is unsaved. Does
2420 not recurse into EXPR's subtrees. */
2423 unsave_expr_1 (tree expr)
2425 switch (TREE_CODE (expr))
2428 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
2429 It's OK for this to happen if it was part of a subtree that
2430 isn't immediately expanded, such as operand 2 of another
2432 if (TREE_OPERAND (expr, 1))
2435 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
2436 TREE_OPERAND (expr, 3) = NULL_TREE;
2444 /* Called via walk_tree when an expression is unsaved. Using the
2445 splay_tree pointed to by ST (which is really a `splay_tree'),
2446 remaps all local declarations to appropriate replacements. */
2449 unsave_r (tree *tp, int *walk_subtrees, void *data)
2451 inline_data *id = (inline_data *) data;
2452 splay_tree st = id->decl_map;
2455 /* Only a local declaration (variable or label). */
2456 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
2457 || TREE_CODE (*tp) == LABEL_DECL)
2459 /* Lookup the declaration. */
2460 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2462 /* If it's there, remap it. */
2464 *tp = (tree) n->value;
2467 else if (TREE_CODE (*tp) == STATEMENT_LIST)
2468 copy_statement_list (tp);
2469 else if (TREE_CODE (*tp) == BIND_EXPR)
2470 copy_bind_expr (tp, walk_subtrees, id);
2471 else if (TREE_CODE (*tp) == SAVE_EXPR)
2472 remap_save_expr (tp, st, walk_subtrees);
2475 copy_tree_r (tp, walk_subtrees, NULL);
2477 /* Do whatever unsaving is required. */
2478 unsave_expr_1 (*tp);
2481 /* Keep iterating. */
2485 /* Copies everything in EXPR and replaces variables, labels
2486 and SAVE_EXPRs local to EXPR. */
2489 unsave_expr_now (tree expr)
2493 /* There's nothing to do for NULL_TREE. */
2498 memset (&id, 0, sizeof (id));
2499 id.callee = current_function_decl;
2500 id.caller = current_function_decl;
2501 id.decl_map = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2503 /* Walk the tree once to find local labels. */
2504 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
2506 /* Walk the tree again, copying, remapping, and unsaving. */
2507 walk_tree (&expr, unsave_r, &id, NULL);
2510 splay_tree_delete (id.decl_map);
2515 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
2518 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
2527 debug_find_tree (tree top, tree search)
2529 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
2533 /* Declare the variables created by the inliner. Add all the variables in
2534 VARS to BIND_EXPR. */
2537 declare_inline_vars (tree block, tree vars)
2540 for (t = vars; t; t = TREE_CHAIN (t))
2541 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
2544 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
2547 /* Returns true if we're inlining. */
2549 inlining_p (inline_data *id)
2551 return (!id->saving_p && !id->cloning_p);