2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "tree-inline.h"
34 #include "insn-config.h"
36 #include "langhooks.h"
37 #include "basic-block.h"
38 #include "tree-iterator.h"
41 #include "tree-mudflap.h"
42 #include "tree-flow.h"
45 #include "tree-flow.h"
46 #include "diagnostic.h"
49 #include "pointer-set.h"
51 #include "value-prof.h"
52 #include "tree-pass.h"
54 #include "integrate.h"
56 /* I'm not real happy about this, but we need to handle gimple and
60 /* Inlining, Cloning, Versioning, Parallelization
62 Inlining: a function body is duplicated, but the PARM_DECLs are
63 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
64 MODIFY_EXPRs that store to a dedicated returned-value variable.
65 The duplicated eh_region info of the copy will later be appended
66 to the info for the caller; the eh_region info in copied throwing
67 statements and RESX statements are adjusted accordingly.
69 Cloning: (only in C++) We have one body for a con/de/structor, and
70 multiple function decls, each with a unique parameter list.
71 Duplicate the body, using the given splay tree; some parameters
72 will become constants (like 0 or 1).
74 Versioning: a function body is duplicated and the result is a new
75 function rather than into blocks of an existing function as with
76 inlining. Some parameters will become constants.
78 Parallelization: a region of a function is duplicated resulting in
79 a new function. Variables may be replaced with complex expressions
80 to enable shared variable semantics.
82 All of these will simultaneously lookup any callgraph edges. If
83 we're going to inline the duplicated function body, and the given
84 function has some cloned callgraph nodes (one for each place this
85 function will be inlined) those callgraph edges will be duplicated.
86 If we're cloning the body, those callgraph edges will be
87 updated to point into the new body. (Note that the original
88 callgraph node and edge list will not be altered.)
90 See the CALL_EXPR handling case in copy_tree_body_r (). */
94 o In order to make inlining-on-trees work, we pessimized
95 function-local static constants. In particular, they are now
96 always output, even when not addressed. Fix this by treating
97 function-local static constants just like global static
98 constants; the back-end already knows not to output them if they
101 o Provide heuristics to clamp inlining of recursive template
105 /* Weights that estimate_num_insns uses for heuristics in inlining. */
107 eni_weights eni_inlining_weights;
109 /* Weights that estimate_num_insns uses to estimate the size of the
112 eni_weights eni_size_weights;
114 /* Weights that estimate_num_insns uses to estimate the time necessary
115 to execute the produced code. */
117 eni_weights eni_time_weights;
121 static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
122 static void remap_block (tree *, copy_body_data *);
123 static void copy_bind_expr (tree *, int *, copy_body_data *);
124 static tree mark_local_for_remap_r (tree *, int *, void *);
125 static void unsave_expr_1 (tree);
126 static tree unsave_r (tree *, int *, void *);
127 static void declare_inline_vars (tree, tree);
128 static void remap_save_expr (tree *, void *, int *);
129 static void prepend_lexical_block (tree current_block, tree new_block);
130 static tree copy_decl_to_var (tree, copy_body_data *);
131 static tree copy_result_decl_to_var (tree, copy_body_data *);
132 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
133 static gimple remap_gimple_stmt (gimple, copy_body_data *);
134 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
136 /* Insert a tree->tree mapping for ID. Despite the name suggests
137 that the trees should be variables, it is used for more than that. */
140 insert_decl_map (copy_body_data *id, tree key, tree value)
142 *pointer_map_insert (id->decl_map, key) = value;
144 /* Always insert an identity map as well. If we see this same new
145 node again, we won't want to duplicate it a second time. */
147 *pointer_map_insert (id->decl_map, value) = value;
150 /* Insert a tree->tree mapping for ID. This is only used for
154 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
156 if (!gimple_in_ssa_p (id->src_cfun))
159 if (!MAY_HAVE_DEBUG_STMTS)
162 if (!target_for_debug_bind (key))
165 gcc_assert (TREE_CODE (key) == PARM_DECL);
166 gcc_assert (TREE_CODE (value) == VAR_DECL);
169 id->debug_map = pointer_map_create ();
171 *pointer_map_insert (id->debug_map, key) = value;
174 /* Construct new SSA name for old NAME. ID is the inline context. */
177 remap_ssa_name (tree name, copy_body_data *id)
182 gcc_assert (TREE_CODE (name) == SSA_NAME);
184 n = (tree *) pointer_map_contains (id->decl_map, name);
186 return unshare_expr (*n);
188 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
190 new_tree = remap_decl (SSA_NAME_VAR (name), id);
192 /* We might've substituted constant or another SSA_NAME for
195 Replace the SSA name representing RESULT_DECL by variable during
196 inlining: this saves us from need to introduce PHI node in a case
197 return value is just partly initialized. */
198 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
199 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
200 || !id->transform_return_to_modify))
202 new_tree = make_ssa_name (new_tree, NULL);
203 insert_decl_map (id, name, new_tree);
204 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
205 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
206 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
207 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
209 /* By inlining function having uninitialized variable, we might
210 extend the lifetime (variable might get reused). This cause
211 ICE in the case we end up extending lifetime of SSA name across
212 abnormal edge, but also increase register pressure.
214 We simply initialize all uninitialized vars by 0 except
215 for case we are inlining to very first BB. We can avoid
216 this for all BBs that are not inside strongly connected
217 regions of the CFG, but this is expensive to test. */
219 && is_gimple_reg (SSA_NAME_VAR (name))
220 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
221 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
222 || EDGE_COUNT (id->entry_bb->preds) != 1))
224 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
227 init_stmt = gimple_build_assign (new_tree,
228 fold_convert (TREE_TYPE (new_tree),
230 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
231 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
235 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
236 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
238 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
243 insert_decl_map (id, name, new_tree);
247 /* If nonzero, we're remapping the contents of inlined debug
248 statements. If negative, an error has occurred, such as a
249 reference to a variable that isn't available in the inlined
251 int processing_debug_stmt = 0;
253 /* Remap DECL during the copying of the BLOCK tree for the function. */
256 remap_decl (tree decl, copy_body_data *id)
261 /* We only remap local variables in the current function. */
264 /* See if we have remapped this declaration. */
266 n = (tree *) pointer_map_contains (id->decl_map, decl);
268 if (!n && processing_debug_stmt)
270 processing_debug_stmt = -1;
274 /* If we didn't already have an equivalent for this declaration,
278 /* Make a copy of the variable or label. */
279 tree t = id->copy_decl (decl, id);
281 /* Remember it, so that if we encounter this local entity again
282 we can reuse this copy. Do this early because remap_type may
283 need this decl for TYPE_STUB_DECL. */
284 insert_decl_map (id, decl, t);
289 /* Remap types, if necessary. */
290 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
291 if (TREE_CODE (t) == TYPE_DECL)
292 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
294 /* Remap sizes as necessary. */
295 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
296 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
298 /* If fields, do likewise for offset and qualifier. */
299 if (TREE_CODE (t) == FIELD_DECL)
301 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
302 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
303 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
306 if (cfun && gimple_in_ssa_p (cfun)
307 && (TREE_CODE (t) == VAR_DECL
308 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
310 tree def = gimple_default_def (id->src_cfun, decl);
312 if (TREE_CODE (decl) != PARM_DECL && def)
314 tree map = remap_ssa_name (def, id);
315 /* Watch out RESULT_DECLs whose SSA names map directly
317 if (TREE_CODE (map) == SSA_NAME
318 && gimple_nop_p (SSA_NAME_DEF_STMT (map)))
319 set_default_def (t, map);
321 add_referenced_var (t);
326 if (id->do_not_unshare)
329 return unshare_expr (*n);
333 remap_type_1 (tree type, copy_body_data *id)
337 /* We do need a copy. build and register it now. If this is a pointer or
338 reference type, remap the designated type and make a new pointer or
340 if (TREE_CODE (type) == POINTER_TYPE)
342 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
344 TYPE_REF_CAN_ALIAS_ALL (type));
345 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
346 new_tree = build_type_attribute_qual_variant (new_tree,
347 TYPE_ATTRIBUTES (type),
349 insert_decl_map (id, type, new_tree);
352 else if (TREE_CODE (type) == REFERENCE_TYPE)
354 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
356 TYPE_REF_CAN_ALIAS_ALL (type));
357 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
358 new_tree = build_type_attribute_qual_variant (new_tree,
359 TYPE_ATTRIBUTES (type),
361 insert_decl_map (id, type, new_tree);
365 new_tree = copy_node (type);
367 insert_decl_map (id, type, new_tree);
369 /* This is a new type, not a copy of an old type. Need to reassociate
370 variants. We can handle everything except the main variant lazily. */
371 t = TYPE_MAIN_VARIANT (type);
374 t = remap_type (t, id);
375 TYPE_MAIN_VARIANT (new_tree) = t;
376 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
377 TYPE_NEXT_VARIANT (t) = new_tree;
381 TYPE_MAIN_VARIANT (new_tree) = new_tree;
382 TYPE_NEXT_VARIANT (new_tree) = NULL;
385 if (TYPE_STUB_DECL (type))
386 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
388 /* Lazily create pointer and reference types. */
389 TYPE_POINTER_TO (new_tree) = NULL;
390 TYPE_REFERENCE_TO (new_tree) = NULL;
392 switch (TREE_CODE (new_tree))
396 case FIXED_POINT_TYPE:
399 t = TYPE_MIN_VALUE (new_tree);
400 if (t && TREE_CODE (t) != INTEGER_CST)
401 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
403 t = TYPE_MAX_VALUE (new_tree);
404 if (t && TREE_CODE (t) != INTEGER_CST)
405 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
409 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
410 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
414 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
415 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
420 case QUAL_UNION_TYPE:
424 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
426 t = remap_decl (f, id);
427 DECL_CONTEXT (t) = new_tree;
431 TYPE_FIELDS (new_tree) = nreverse (nf);
437 /* Shouldn't have been thought variable sized. */
441 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
442 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
448 remap_type (tree type, copy_body_data *id)
456 /* See if we have remapped this type. */
457 node = (tree *) pointer_map_contains (id->decl_map, type);
461 /* The type only needs remapping if it's variably modified. */
462 if (! variably_modified_type_p (type, id->src_fn))
464 insert_decl_map (id, type, type);
468 id->remapping_type_depth++;
469 tmp = remap_type_1 (type, id);
470 id->remapping_type_depth--;
475 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
476 is NULL or TYPE has not been remapped before. */
479 remapped_type (tree type, copy_body_data *id)
486 /* See if we have remapped this type. */
487 node = (tree *) pointer_map_contains (id->decl_map, type);
494 /* The type only needs remapping if it's variably modified. */
495 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
498 can_be_nonlocal (tree decl, copy_body_data *id)
500 /* We can not duplicate function decls. */
501 if (TREE_CODE (decl) == FUNCTION_DECL)
504 /* Local static vars must be non-local or we get multiple declaration
506 if (TREE_CODE (decl) == VAR_DECL
507 && !auto_var_in_fn_p (decl, id->src_fn))
510 /* At the moment dwarf2out can handle only these types of nodes. We
511 can support more later. */
512 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
515 /* We must use global type. We call remapped_type instead of
516 remap_type since we don't want to remap this type here if it
517 hasn't been remapped before. */
518 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
521 /* Wihtout SSA we can't tell if variable is used. */
522 if (!gimple_in_ssa_p (cfun))
525 /* Live variables must be copied so we can attach DECL_RTL. */
533 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
536 tree new_decls = NULL_TREE;
538 /* Remap its variables. */
539 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
542 tree origin_var = DECL_ORIGIN (old_var);
544 if (can_be_nonlocal (old_var, id))
546 if (TREE_CODE (old_var) == VAR_DECL
547 && ! DECL_EXTERNAL (old_var)
548 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
549 cfun->local_decls = tree_cons (NULL_TREE, old_var,
551 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
552 && !DECL_IGNORED_P (old_var)
553 && nonlocalized_list)
554 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
558 /* Remap the variable. */
559 new_var = remap_decl (old_var, id);
561 /* If we didn't remap this variable, we can't mess with its
562 TREE_CHAIN. If we remapped this variable to the return slot, it's
563 already declared somewhere else, so don't declare it here. */
565 if (new_var == id->retvar)
569 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
570 && !DECL_IGNORED_P (old_var)
571 && nonlocalized_list)
572 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
576 gcc_assert (DECL_P (new_var));
577 TREE_CHAIN (new_var) = new_decls;
582 return nreverse (new_decls);
585 /* Copy the BLOCK to contain remapped versions of the variables
586 therein. And hook the new block into the block-tree. */
589 remap_block (tree *block, copy_body_data *id)
595 /* Make the new block. */
597 new_block = make_node (BLOCK);
598 TREE_USED (new_block) = TREE_USED (old_block);
599 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
600 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
601 BLOCK_NONLOCALIZED_VARS (new_block)
602 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
605 /* Remap its variables. */
606 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
607 &BLOCK_NONLOCALIZED_VARS (new_block),
612 if (id->transform_lang_insert_block)
613 id->transform_lang_insert_block (new_block);
615 /* Remember the remapped block. */
616 insert_decl_map (id, old_block, new_block);
619 /* Copy the whole block tree and root it in id->block. */
621 remap_blocks (tree block, copy_body_data *id)
624 tree new_tree = block;
629 remap_block (&new_tree, id);
630 gcc_assert (new_tree != block);
631 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
632 prepend_lexical_block (new_tree, remap_blocks (t, id));
633 /* Blocks are in arbitrary order, but make things slightly prettier and do
634 not swap order when producing a copy. */
635 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
640 copy_statement_list (tree *tp)
642 tree_stmt_iterator oi, ni;
645 new_tree = alloc_stmt_list ();
646 ni = tsi_start (new_tree);
647 oi = tsi_start (*tp);
648 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
651 for (; !tsi_end_p (oi); tsi_next (&oi))
653 tree stmt = tsi_stmt (oi);
654 if (TREE_CODE (stmt) == STATEMENT_LIST)
655 copy_statement_list (&stmt);
656 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
661 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
663 tree block = BIND_EXPR_BLOCK (*tp);
664 /* Copy (and replace) the statement. */
665 copy_tree_r (tp, walk_subtrees, NULL);
668 remap_block (&block, id);
669 BIND_EXPR_BLOCK (*tp) = block;
672 if (BIND_EXPR_VARS (*tp))
673 /* This will remap a lot of the same decls again, but this should be
675 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
679 /* Create a new gimple_seq by remapping all the statements in BODY
680 using the inlining information in ID. */
683 remap_gimple_seq (gimple_seq body, copy_body_data *id)
685 gimple_stmt_iterator si;
686 gimple_seq new_body = NULL;
688 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
690 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
691 gimple_seq_add_stmt (&new_body, new_stmt);
698 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
699 block using the mapping information in ID. */
702 copy_gimple_bind (gimple stmt, copy_body_data *id)
705 tree new_block, new_vars;
706 gimple_seq body, new_body;
708 /* Copy the statement. Note that we purposely don't use copy_stmt
709 here because we need to remap statements as we copy. */
710 body = gimple_bind_body (stmt);
711 new_body = remap_gimple_seq (body, id);
713 new_block = gimple_bind_block (stmt);
715 remap_block (&new_block, id);
717 /* This will remap a lot of the same decls again, but this should be
719 new_vars = gimple_bind_vars (stmt);
721 new_vars = remap_decls (new_vars, NULL, id);
723 new_bind = gimple_build_bind (new_vars, new_body, new_block);
729 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
730 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
731 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
732 recursing into the children nodes of *TP. */
735 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
737 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
738 copy_body_data *id = (copy_body_data *) wi_p->info;
739 tree fn = id->src_fn;
741 if (TREE_CODE (*tp) == SSA_NAME)
743 *tp = remap_ssa_name (*tp, id);
747 else if (auto_var_in_fn_p (*tp, fn))
749 /* Local variables and labels need to be replaced by equivalent
750 variables. We don't want to copy static variables; there's
751 only one of those, no matter how many times we inline the
752 containing function. Similarly for globals from an outer
756 /* Remap the declaration. */
757 new_decl = remap_decl (*tp, id);
758 gcc_assert (new_decl);
759 /* Replace this variable with the copy. */
760 STRIP_TYPE_NOPS (new_decl);
761 /* ??? The C++ frontend uses void * pointer zero to initialize
762 any other type. This confuses the middle-end type verification.
763 As cloned bodies do not go through gimplification again the fixup
764 there doesn't trigger. */
765 if (TREE_CODE (new_decl) == INTEGER_CST
766 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
767 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
771 else if (TREE_CODE (*tp) == STATEMENT_LIST)
773 else if (TREE_CODE (*tp) == SAVE_EXPR)
775 else if (TREE_CODE (*tp) == LABEL_DECL
776 && (!DECL_CONTEXT (*tp)
777 || decl_function_context (*tp) == id->src_fn))
778 /* These may need to be remapped for EH handling. */
779 *tp = remap_decl (*tp, id);
780 else if (TYPE_P (*tp))
781 /* Types may need remapping as well. */
782 *tp = remap_type (*tp, id);
783 else if (CONSTANT_CLASS_P (*tp))
785 /* If this is a constant, we have to copy the node iff the type
786 will be remapped. copy_tree_r will not copy a constant. */
787 tree new_type = remap_type (TREE_TYPE (*tp), id);
789 if (new_type == TREE_TYPE (*tp))
792 else if (TREE_CODE (*tp) == INTEGER_CST)
793 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
794 TREE_INT_CST_HIGH (*tp));
797 *tp = copy_node (*tp);
798 TREE_TYPE (*tp) = new_type;
803 /* Otherwise, just copy the node. Note that copy_tree_r already
804 knows not to copy VAR_DECLs, etc., so this is safe. */
805 if (TREE_CODE (*tp) == INDIRECT_REF)
807 /* Get rid of *& from inline substitutions that can happen when a
808 pointer argument is an ADDR_EXPR. */
809 tree decl = TREE_OPERAND (*tp, 0);
812 n = (tree *) pointer_map_contains (id->decl_map, decl);
815 tree type, new_tree, old;
817 /* If we happen to get an ADDR_EXPR in n->value, strip
818 it manually here as we'll eventually get ADDR_EXPRs
819 which lie about their types pointed to. In this case
820 build_fold_indirect_ref wouldn't strip the
821 INDIRECT_REF, but we absolutely rely on that. As
822 fold_indirect_ref does other useful transformations,
823 try that first, though. */
824 type = TREE_TYPE (TREE_TYPE (*n));
825 new_tree = unshare_expr (*n);
827 *tp = gimple_fold_indirect_ref (new_tree);
830 if (TREE_CODE (new_tree) == ADDR_EXPR)
832 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
834 /* ??? We should either assert here or build
835 a VIEW_CONVERT_EXPR instead of blindly leaking
836 incompatible types to our IL. */
838 *tp = TREE_OPERAND (new_tree, 0);
842 *tp = build1 (INDIRECT_REF, type, new_tree);
843 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
844 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
852 /* Here is the "usual case". Copy this tree node, and then
853 tweak some special cases. */
854 copy_tree_r (tp, walk_subtrees, NULL);
856 /* Global variables we haven't seen yet need to go into referenced
857 vars. If not referenced from types only. */
858 if (gimple_in_ssa_p (cfun)
859 && TREE_CODE (*tp) == VAR_DECL
860 && id->remapping_type_depth == 0
861 && !processing_debug_stmt)
862 add_referenced_var (*tp);
864 /* We should never have TREE_BLOCK set on non-statements. */
866 gcc_assert (!TREE_BLOCK (*tp));
868 if (TREE_CODE (*tp) != OMP_CLAUSE)
869 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
871 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
873 /* The copied TARGET_EXPR has never been expanded, even if the
874 original node was expanded already. */
875 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
876 TREE_OPERAND (*tp, 3) = NULL_TREE;
878 else if (TREE_CODE (*tp) == ADDR_EXPR)
880 /* Variable substitution need not be simple. In particular,
881 the INDIRECT_REF substitution above. Make sure that
882 TREE_CONSTANT and friends are up-to-date. But make sure
883 to not improperly set TREE_BLOCK on some sub-expressions. */
884 int invariant = is_gimple_min_invariant (*tp);
885 tree block = id->block;
886 id->block = NULL_TREE;
887 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
890 /* Handle the case where we substituted an INDIRECT_REF
891 into the operand of the ADDR_EXPR. */
892 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
893 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
895 recompute_tree_invariant_for_addr_expr (*tp);
897 /* If this used to be invariant, but is not any longer,
898 then regimplification is probably needed. */
899 if (invariant && !is_gimple_min_invariant (*tp))
900 id->regimplify = true;
906 /* Keep iterating. */
911 /* Called from copy_body_id via walk_tree. DATA is really a
912 `copy_body_data *'. */
915 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
917 copy_body_data *id = (copy_body_data *) data;
918 tree fn = id->src_fn;
921 /* Begin by recognizing trees that we'll completely rewrite for the
922 inlining context. Our output for these trees is completely
923 different from out input (e.g. RETURN_EXPR is deleted, and morphs
924 into an edge). Further down, we'll handle trees that get
925 duplicated and/or tweaked. */
927 /* When requested, RETURN_EXPRs should be transformed to just the
928 contained MODIFY_EXPR. The branch semantics of the return will
929 be handled elsewhere by manipulating the CFG rather than a statement. */
930 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
932 tree assignment = TREE_OPERAND (*tp, 0);
934 /* If we're returning something, just turn that into an
935 assignment into the equivalent of the original RESULT_DECL.
936 If the "assignment" is just the result decl, the result
937 decl has already been set (e.g. a recent "foo (&result_decl,
938 ...)"); just toss the entire RETURN_EXPR. */
939 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
941 /* Replace the RETURN_EXPR with (a copy of) the
942 MODIFY_EXPR hanging underneath. */
943 *tp = copy_node (assignment);
945 else /* Else the RETURN_EXPR returns no value. */
948 return (tree) (void *)1;
951 else if (TREE_CODE (*tp) == SSA_NAME)
953 *tp = remap_ssa_name (*tp, id);
958 /* Local variables and labels need to be replaced by equivalent
959 variables. We don't want to copy static variables; there's only
960 one of those, no matter how many times we inline the containing
961 function. Similarly for globals from an outer function. */
962 else if (auto_var_in_fn_p (*tp, fn))
966 /* Remap the declaration. */
967 new_decl = remap_decl (*tp, id);
968 gcc_assert (new_decl);
969 /* Replace this variable with the copy. */
970 STRIP_TYPE_NOPS (new_decl);
974 else if (TREE_CODE (*tp) == STATEMENT_LIST)
975 copy_statement_list (tp);
976 else if (TREE_CODE (*tp) == SAVE_EXPR
977 || TREE_CODE (*tp) == TARGET_EXPR)
978 remap_save_expr (tp, id->decl_map, walk_subtrees);
979 else if (TREE_CODE (*tp) == LABEL_DECL
980 && (! DECL_CONTEXT (*tp)
981 || decl_function_context (*tp) == id->src_fn))
982 /* These may need to be remapped for EH handling. */
983 *tp = remap_decl (*tp, id);
984 else if (TREE_CODE (*tp) == BIND_EXPR)
985 copy_bind_expr (tp, walk_subtrees, id);
986 /* Types may need remapping as well. */
987 else if (TYPE_P (*tp))
988 *tp = remap_type (*tp, id);
990 /* If this is a constant, we have to copy the node iff the type will be
991 remapped. copy_tree_r will not copy a constant. */
992 else if (CONSTANT_CLASS_P (*tp))
994 tree new_type = remap_type (TREE_TYPE (*tp), id);
996 if (new_type == TREE_TYPE (*tp))
999 else if (TREE_CODE (*tp) == INTEGER_CST)
1000 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
1001 TREE_INT_CST_HIGH (*tp));
1004 *tp = copy_node (*tp);
1005 TREE_TYPE (*tp) = new_type;
1009 /* Otherwise, just copy the node. Note that copy_tree_r already
1010 knows not to copy VAR_DECLs, etc., so this is safe. */
1013 /* Here we handle trees that are not completely rewritten.
1014 First we detect some inlining-induced bogosities for
1016 if (TREE_CODE (*tp) == MODIFY_EXPR
1017 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1018 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1020 /* Some assignments VAR = VAR; don't generate any rtl code
1021 and thus don't count as variable modification. Avoid
1022 keeping bogosities like 0 = 0. */
1023 tree decl = TREE_OPERAND (*tp, 0), value;
1026 n = (tree *) pointer_map_contains (id->decl_map, decl);
1030 STRIP_TYPE_NOPS (value);
1031 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1033 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1034 return copy_tree_body_r (tp, walk_subtrees, data);
1038 else if (TREE_CODE (*tp) == INDIRECT_REF)
1040 /* Get rid of *& from inline substitutions that can happen when a
1041 pointer argument is an ADDR_EXPR. */
1042 tree decl = TREE_OPERAND (*tp, 0);
1045 n = (tree *) pointer_map_contains (id->decl_map, decl);
1050 /* If we happen to get an ADDR_EXPR in n->value, strip
1051 it manually here as we'll eventually get ADDR_EXPRs
1052 which lie about their types pointed to. In this case
1053 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1054 but we absolutely rely on that. As fold_indirect_ref
1055 does other useful transformations, try that first, though. */
1056 tree type = TREE_TYPE (TREE_TYPE (*n));
1057 if (id->do_not_unshare)
1060 new_tree = unshare_expr (*n);
1062 *tp = gimple_fold_indirect_ref (new_tree);
1065 if (TREE_CODE (new_tree) == ADDR_EXPR)
1067 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1069 /* ??? We should either assert here or build
1070 a VIEW_CONVERT_EXPR instead of blindly leaking
1071 incompatible types to our IL. */
1073 *tp = TREE_OPERAND (new_tree, 0);
1077 *tp = build1 (INDIRECT_REF, type, new_tree);
1078 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1079 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1087 /* Here is the "usual case". Copy this tree node, and then
1088 tweak some special cases. */
1089 copy_tree_r (tp, walk_subtrees, NULL);
1091 /* Global variables we haven't seen yet needs to go into referenced
1092 vars. If not referenced from types or debug stmts only. */
1093 if (gimple_in_ssa_p (cfun)
1094 && TREE_CODE (*tp) == VAR_DECL
1095 && id->remapping_type_depth == 0
1096 && !processing_debug_stmt)
1097 add_referenced_var (*tp);
1099 /* If EXPR has block defined, map it to newly constructed block.
1100 When inlining we want EXPRs without block appear in the block
1101 of function call. */
1104 new_block = id->block;
1105 if (TREE_BLOCK (*tp))
1108 n = (tree *) pointer_map_contains (id->decl_map,
1113 TREE_BLOCK (*tp) = new_block;
1116 if (TREE_CODE (*tp) != OMP_CLAUSE)
1117 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1119 /* The copied TARGET_EXPR has never been expanded, even if the
1120 original node was expanded already. */
1121 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1123 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1124 TREE_OPERAND (*tp, 3) = NULL_TREE;
1127 /* Variable substitution need not be simple. In particular, the
1128 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1129 and friends are up-to-date. */
1130 else if (TREE_CODE (*tp) == ADDR_EXPR)
1132 int invariant = is_gimple_min_invariant (*tp);
1133 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1135 /* Handle the case where we substituted an INDIRECT_REF
1136 into the operand of the ADDR_EXPR. */
1137 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1138 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1140 recompute_tree_invariant_for_addr_expr (*tp);
1142 /* If this used to be invariant, but is not any longer,
1143 then regimplification is probably needed. */
1144 if (invariant && !is_gimple_min_invariant (*tp))
1145 id->regimplify = true;
1151 /* Keep iterating. */
1155 /* Helper for remap_gimple_stmt. Given an EH region number for the
1156 source function, map that to the duplicate EH region number in
1157 the destination function. */
1160 remap_eh_region_nr (int old_nr, copy_body_data *id)
1162 eh_region old_r, new_r;
1165 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1166 slot = pointer_map_contains (id->eh_map, old_r);
1167 new_r = (eh_region) *slot;
1169 return new_r->index;
1172 /* Similar, but operate on INTEGER_CSTs. */
1175 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1179 old_nr = tree_low_cst (old_t_nr, 0);
1180 new_nr = remap_eh_region_nr (old_nr, id);
1182 return build_int_cst (NULL, new_nr);
1185 /* Helper for copy_bb. Remap statement STMT using the inlining
1186 information in ID. Return the new statement copy. */
1189 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1192 struct walk_stmt_info wi;
1194 bool skip_first = false;
1196 /* Begin by recognizing trees that we'll completely rewrite for the
1197 inlining context. Our output for these trees is completely
1198 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1199 into an edge). Further down, we'll handle trees that get
1200 duplicated and/or tweaked. */
1202 /* When requested, GIMPLE_RETURNs should be transformed to just the
1203 contained GIMPLE_ASSIGN. The branch semantics of the return will
1204 be handled elsewhere by manipulating the CFG rather than the
1206 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1208 tree retval = gimple_return_retval (stmt);
1210 /* If we're returning something, just turn that into an
1211 assignment into the equivalent of the original RESULT_DECL.
1212 If RETVAL is just the result decl, the result decl has
1213 already been set (e.g. a recent "foo (&result_decl, ...)");
1214 just toss the entire GIMPLE_RETURN. */
1215 if (retval && TREE_CODE (retval) != RESULT_DECL)
1217 copy = gimple_build_assign (id->retvar, retval);
1218 /* id->retvar is already substituted. Skip it on later remapping. */
1222 return gimple_build_nop ();
1224 else if (gimple_has_substatements (stmt))
1228 /* When cloning bodies from the C++ front end, we will be handed bodies
1229 in High GIMPLE form. Handle here all the High GIMPLE statements that
1230 have embedded statements. */
1231 switch (gimple_code (stmt))
1234 copy = copy_gimple_bind (stmt, id);
1238 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1239 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1242 case GIMPLE_EH_FILTER:
1243 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1244 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1248 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1249 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1250 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1253 case GIMPLE_WITH_CLEANUP_EXPR:
1254 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1255 copy = gimple_build_wce (s1);
1258 case GIMPLE_OMP_PARALLEL:
1259 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1260 copy = gimple_build_omp_parallel
1262 gimple_omp_parallel_clauses (stmt),
1263 gimple_omp_parallel_child_fn (stmt),
1264 gimple_omp_parallel_data_arg (stmt));
1267 case GIMPLE_OMP_TASK:
1268 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1269 copy = gimple_build_omp_task
1271 gimple_omp_task_clauses (stmt),
1272 gimple_omp_task_child_fn (stmt),
1273 gimple_omp_task_data_arg (stmt),
1274 gimple_omp_task_copy_fn (stmt),
1275 gimple_omp_task_arg_size (stmt),
1276 gimple_omp_task_arg_align (stmt));
1279 case GIMPLE_OMP_FOR:
1280 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1281 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1282 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1283 gimple_omp_for_collapse (stmt), s2);
1286 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1288 gimple_omp_for_set_index (copy, i,
1289 gimple_omp_for_index (stmt, i));
1290 gimple_omp_for_set_initial (copy, i,
1291 gimple_omp_for_initial (stmt, i));
1292 gimple_omp_for_set_final (copy, i,
1293 gimple_omp_for_final (stmt, i));
1294 gimple_omp_for_set_incr (copy, i,
1295 gimple_omp_for_incr (stmt, i));
1296 gimple_omp_for_set_cond (copy, i,
1297 gimple_omp_for_cond (stmt, i));
1302 case GIMPLE_OMP_MASTER:
1303 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1304 copy = gimple_build_omp_master (s1);
1307 case GIMPLE_OMP_ORDERED:
1308 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1309 copy = gimple_build_omp_ordered (s1);
1312 case GIMPLE_OMP_SECTION:
1313 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1314 copy = gimple_build_omp_section (s1);
1317 case GIMPLE_OMP_SECTIONS:
1318 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1319 copy = gimple_build_omp_sections
1320 (s1, gimple_omp_sections_clauses (stmt));
1323 case GIMPLE_OMP_SINGLE:
1324 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1325 copy = gimple_build_omp_single
1326 (s1, gimple_omp_single_clauses (stmt));
1329 case GIMPLE_OMP_CRITICAL:
1330 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1332 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1341 if (gimple_assign_copy_p (stmt)
1342 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1343 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1345 /* Here we handle statements that are not completely rewritten.
1346 First we detect some inlining-induced bogosities for
1349 /* Some assignments VAR = VAR; don't generate any rtl code
1350 and thus don't count as variable modification. Avoid
1351 keeping bogosities like 0 = 0. */
1352 tree decl = gimple_assign_lhs (stmt), value;
1355 n = (tree *) pointer_map_contains (id->decl_map, decl);
1359 STRIP_TYPE_NOPS (value);
1360 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1361 return gimple_build_nop ();
1365 if (gimple_debug_bind_p (stmt))
1367 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1368 gimple_debug_bind_get_value (stmt),
1370 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1374 /* Create a new deep copy of the statement. */
1375 copy = gimple_copy (stmt);
1377 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1378 RESX and EH_DISPATCH. */
1380 switch (gimple_code (copy))
1384 tree r, fndecl = gimple_call_fndecl (copy);
1385 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1386 switch (DECL_FUNCTION_CODE (fndecl))
1388 case BUILT_IN_EH_COPY_VALUES:
1389 r = gimple_call_arg (copy, 1);
1390 r = remap_eh_region_tree_nr (r, id);
1391 gimple_call_set_arg (copy, 1, r);
1394 case BUILT_IN_EH_POINTER:
1395 case BUILT_IN_EH_FILTER:
1396 r = gimple_call_arg (copy, 0);
1397 r = remap_eh_region_tree_nr (r, id);
1398 gimple_call_set_arg (copy, 0, r);
1409 int r = gimple_resx_region (copy);
1410 r = remap_eh_region_nr (r, id);
1411 gimple_resx_set_region (copy, r);
1415 case GIMPLE_EH_DISPATCH:
1417 int r = gimple_eh_dispatch_region (copy);
1418 r = remap_eh_region_nr (r, id);
1419 gimple_eh_dispatch_set_region (copy, r);
1428 /* If STMT has a block defined, map it to the newly constructed
1429 block. When inlining we want statements without a block to
1430 appear in the block of the function call. */
1431 new_block = id->block;
1432 if (gimple_block (copy))
1435 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1440 gimple_set_block (copy, new_block);
1442 if (gimple_debug_bind_p (copy))
1445 /* Remap all the operands in COPY. */
1446 memset (&wi, 0, sizeof (wi));
1449 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1451 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1453 /* Clear the copied virtual operands. We are not remapping them here
1454 but are going to recreate them from scratch. */
1455 if (gimple_has_mem_ops (copy))
1457 gimple_set_vdef (copy, NULL_TREE);
1458 gimple_set_vuse (copy, NULL_TREE);
1465 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1469 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1470 gcov_type count_scale)
1472 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1473 basic_block copy_basic_block;
1477 /* create_basic_block() will append every new block to
1478 basic_block_info automatically. */
1479 copy_basic_block = create_basic_block (NULL, (void *) 0,
1480 (basic_block) bb->prev_bb->aux);
1481 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1483 /* We are going to rebuild frequencies from scratch. These values
1484 have just small importance to drive canonicalize_loop_headers. */
1485 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1487 /* We recompute frequencies after inlining, so this is quite safe. */
1488 if (freq > BB_FREQ_MAX)
1490 copy_basic_block->frequency = freq;
1492 copy_gsi = gsi_start_bb (copy_basic_block);
1494 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1496 gimple stmt = gsi_stmt (gsi);
1497 gimple orig_stmt = stmt;
1499 id->regimplify = false;
1500 stmt = remap_gimple_stmt (stmt, id);
1501 if (gimple_nop_p (stmt))
1504 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1507 /* With return slot optimization we can end up with
1508 non-gimple (foo *)&this->m, fix that here. */
1509 if (is_gimple_assign (stmt)
1510 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1511 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1514 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1515 gimple_assign_rhs1 (stmt),
1516 true, NULL, false, GSI_NEW_STMT);
1517 gimple_assign_set_rhs1 (stmt, new_rhs);
1518 id->regimplify = false;
1521 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1524 gimple_regimplify_operands (stmt, &seq_gsi);
1526 /* If copy_basic_block has been empty at the start of this iteration,
1527 call gsi_start_bb again to get at the newly added statements. */
1528 if (gsi_end_p (copy_gsi))
1529 copy_gsi = gsi_start_bb (copy_basic_block);
1531 gsi_next (©_gsi);
1533 /* Process the new statement. The call to gimple_regimplify_operands
1534 possibly turned the statement into multiple statements, we
1535 need to process all of them. */
1540 stmt = gsi_stmt (copy_gsi);
1541 if (is_gimple_call (stmt)
1542 && gimple_call_va_arg_pack_p (stmt)
1545 /* __builtin_va_arg_pack () should be replaced by
1546 all arguments corresponding to ... in the caller. */
1549 VEC(tree, heap) *argarray;
1550 size_t nargs = gimple_call_num_args (id->gimple_call);
1553 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1556 /* Create the new array of arguments. */
1557 n = nargs + gimple_call_num_args (stmt);
1558 argarray = VEC_alloc (tree, heap, n);
1559 VEC_safe_grow (tree, heap, argarray, n);
1561 /* Copy all the arguments before '...' */
1562 memcpy (VEC_address (tree, argarray),
1563 gimple_call_arg_ptr (stmt, 0),
1564 gimple_call_num_args (stmt) * sizeof (tree));
1566 /* Append the arguments passed in '...' */
1567 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1568 gimple_call_arg_ptr (id->gimple_call, 0)
1569 + (gimple_call_num_args (id->gimple_call) - nargs),
1570 nargs * sizeof (tree));
1572 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1575 VEC_free (tree, heap, argarray);
1577 /* Copy all GIMPLE_CALL flags, location and block, except
1578 GF_CALL_VA_ARG_PACK. */
1579 gimple_call_copy_flags (new_call, stmt);
1580 gimple_call_set_va_arg_pack (new_call, false);
1581 gimple_set_location (new_call, gimple_location (stmt));
1582 gimple_set_block (new_call, gimple_block (stmt));
1583 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1585 gsi_replace (©_gsi, new_call, false);
1586 gimple_set_bb (stmt, NULL);
1589 else if (is_gimple_call (stmt)
1591 && (decl = gimple_call_fndecl (stmt))
1592 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1593 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1595 /* __builtin_va_arg_pack_len () should be replaced by
1596 the number of anonymous arguments. */
1597 size_t nargs = gimple_call_num_args (id->gimple_call);
1601 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1604 count = build_int_cst (integer_type_node, nargs);
1605 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1606 gsi_replace (©_gsi, new_stmt, false);
1610 /* Statements produced by inlining can be unfolded, especially
1611 when we constant propagated some operands. We can't fold
1612 them right now for two reasons:
1613 1) folding require SSA_NAME_DEF_STMTs to be correct
1614 2) we can't change function calls to builtins.
1615 So we just mark statement for later folding. We mark
1616 all new statements, instead just statements that has changed
1617 by some nontrivial substitution so even statements made
1618 foldable indirectly are updated. If this turns out to be
1619 expensive, copy_body can be told to watch for nontrivial
1621 if (id->statements_to_fold)
1622 pointer_set_insert (id->statements_to_fold, stmt);
1624 /* We're duplicating a CALL_EXPR. Find any corresponding
1625 callgraph edges and update or duplicate them. */
1626 if (is_gimple_call (stmt))
1628 struct cgraph_edge *edge;
1631 switch (id->transform_call_graph_edges)
1633 case CB_CGE_DUPLICATE:
1634 edge = cgraph_edge (id->src_node, orig_stmt);
1637 int edge_freq = edge->frequency;
1638 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1640 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1641 edge->frequency, true);
1642 /* We could also just rescale the frequency, but
1643 doing so would introduce roundoff errors and make
1644 verifier unhappy. */
1646 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1649 && profile_status_for_function (cfun) != PROFILE_ABSENT
1650 && (edge_freq > edge->frequency + 10
1651 || edge_freq < edge->frequency - 10))
1653 fprintf (dump_file, "Edge frequency estimated by "
1654 "cgraph %i diverge from inliner's estimate %i\n",
1658 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1661 copy_basic_block->frequency);
1666 case CB_CGE_MOVE_CLONES:
1667 cgraph_set_call_stmt_including_clones (id->dst_node,
1669 edge = cgraph_edge (id->dst_node, stmt);
1673 edge = cgraph_edge (id->dst_node, orig_stmt);
1675 cgraph_set_call_stmt (edge, stmt);
1682 /* Constant propagation on argument done during inlining
1683 may create new direct call. Produce an edge for it. */
1685 || (edge->indirect_call
1686 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1687 && is_gimple_call (stmt)
1688 && (fn = gimple_call_fndecl (stmt)) != NULL)
1690 struct cgraph_node *dest = cgraph_node (fn);
1692 /* We have missing edge in the callgraph. This can happen
1693 when previous inlining turned an indirect call into a
1694 direct call by constant propagating arguments or we are
1695 producing dead clone (for further clonning). In all
1696 other cases we hit a bug (incorrect node sharing is the
1697 most common reason for missing edges). */
1698 gcc_assert (dest->needed || !dest->analyzed
1699 || !id->src_node->analyzed);
1700 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1701 cgraph_create_edge_including_clones
1702 (id->dst_node, dest, stmt, bb->count,
1703 compute_call_stmt_bb_frequency (id->dst_node->decl,
1705 bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1707 cgraph_create_edge (id->dst_node, dest, stmt,
1708 bb->count, CGRAPH_FREQ_BASE,
1709 bb->loop_depth)->inline_failed
1710 = CIF_ORIGINALLY_INDIRECT_CALL;
1713 fprintf (dump_file, "Created new direct edge to %s",
1714 cgraph_node_name (dest));
1718 flags = gimple_call_flags (stmt);
1719 if (flags & ECF_MAY_BE_ALLOCA)
1720 cfun->calls_alloca = true;
1721 if (flags & ECF_RETURNS_TWICE)
1722 cfun->calls_setjmp = true;
1725 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1726 id->eh_map, id->eh_lp_nr);
1728 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1733 find_new_referenced_vars (gsi_stmt (copy_gsi));
1734 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1735 if (TREE_CODE (def) == SSA_NAME)
1736 SSA_NAME_DEF_STMT (def) = stmt;
1739 gsi_next (©_gsi);
1741 while (!gsi_end_p (copy_gsi));
1743 copy_gsi = gsi_last_bb (copy_basic_block);
1746 return copy_basic_block;
1749 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1750 form is quite easy, since dominator relationship for old basic blocks does
1753 There is however exception where inlining might change dominator relation
1754 across EH edges from basic block within inlined functions destinating
1755 to landing pads in function we inline into.
1757 The function fills in PHI_RESULTs of such PHI nodes if they refer
1758 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1759 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1760 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1761 set, and this means that there will be no overlapping live ranges
1762 for the underlying symbol.
1764 This might change in future if we allow redirecting of EH edges and
1765 we might want to change way build CFG pre-inlining to include
1766 all the possible edges then. */
1768 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1769 bool can_throw, bool nonlocal_goto)
1774 FOR_EACH_EDGE (e, ei, bb->succs)
1776 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1779 gimple_stmt_iterator si;
1782 gcc_assert (e->flags & EDGE_EH);
1785 gcc_assert (!(e->flags & EDGE_EH));
1787 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1791 phi = gsi_stmt (si);
1793 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1794 gcc_assert (!e->dest->aux);
1796 gcc_assert ((e->flags & EDGE_EH)
1797 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1799 if (!is_gimple_reg (PHI_RESULT (phi)))
1801 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1805 re = find_edge (ret_bb, e->dest);
1807 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1808 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1810 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1811 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1817 /* Copy edges from BB into its copy constructed earlier, scale profile
1818 accordingly. Edges will be taken care of later. Assume aux
1819 pointers to point to the copies of each BB. */
1822 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1824 basic_block new_bb = (basic_block) bb->aux;
1827 gimple_stmt_iterator si;
1830 /* Use the indices from the original blocks to create edges for the
1832 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1833 if (!(old_edge->flags & EDGE_EH))
1837 flags = old_edge->flags;
1839 /* Return edges do get a FALLTHRU flag when the get inlined. */
1840 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1841 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1842 flags |= EDGE_FALLTHRU;
1843 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1844 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1845 new_edge->probability = old_edge->probability;
1848 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1851 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1854 bool can_throw, nonlocal_goto;
1856 copy_stmt = gsi_stmt (si);
1857 if (!is_gimple_debug (copy_stmt))
1859 update_stmt (copy_stmt);
1860 if (gimple_in_ssa_p (cfun))
1861 mark_symbols_for_renaming (copy_stmt);
1864 /* Do this before the possible split_block. */
1867 /* If this tree could throw an exception, there are two
1868 cases where we need to add abnormal edge(s): the
1869 tree wasn't in a region and there is a "current
1870 region" in the caller; or the original tree had
1871 EH edges. In both cases split the block after the tree,
1872 and add abnormal edge(s) as needed; we need both
1873 those from the callee and the caller.
1874 We check whether the copy can throw, because the const
1875 propagation can change an INDIRECT_REF which throws
1876 into a COMPONENT_REF which doesn't. If the copy
1877 can throw, the original could also throw. */
1878 can_throw = stmt_can_throw_internal (copy_stmt);
1879 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1881 if (can_throw || nonlocal_goto)
1883 if (!gsi_end_p (si))
1884 /* Note that bb's predecessor edges aren't necessarily
1885 right at this point; split_block doesn't care. */
1887 edge e = split_block (new_bb, copy_stmt);
1890 new_bb->aux = e->src->aux;
1891 si = gsi_start_bb (new_bb);
1895 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1896 make_eh_dispatch_edges (copy_stmt);
1898 make_eh_edges (copy_stmt);
1901 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1903 if ((can_throw || nonlocal_goto)
1904 && gimple_in_ssa_p (cfun))
1905 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1906 can_throw, nonlocal_goto);
1910 /* Copy the PHIs. All blocks and edges are copied, some blocks
1911 was possibly split and new outgoing EH edges inserted.
1912 BB points to the block of original function and AUX pointers links
1913 the original and newly copied blocks. */
1916 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1918 basic_block const new_bb = (basic_block) bb->aux;
1921 gimple_stmt_iterator si;
1923 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1929 phi = gsi_stmt (si);
1930 res = PHI_RESULT (phi);
1932 if (is_gimple_reg (res))
1934 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1935 SSA_NAME_DEF_STMT (new_res)
1936 = new_phi = create_phi_node (new_res, new_bb);
1937 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1940 = find_edge ((basic_block) new_edge->src->aux, bb);
1941 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1943 tree block = id->block;
1944 id->block = NULL_TREE;
1945 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1947 gcc_assert (new_arg);
1948 /* With return slot optimization we can end up with
1949 non-gimple (foo *)&this->m, fix that here. */
1950 if (TREE_CODE (new_arg) != SSA_NAME
1951 && TREE_CODE (new_arg) != FUNCTION_DECL
1952 && !is_gimple_val (new_arg))
1954 gimple_seq stmts = NULL;
1955 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1956 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
1958 add_phi_arg (new_phi, new_arg, new_edge,
1959 gimple_phi_arg_location_from_edge (phi, old_edge));
1966 /* Wrapper for remap_decl so it can be used as a callback. */
1969 remap_decl_1 (tree decl, void *data)
1971 return remap_decl (decl, (copy_body_data *) data);
1974 /* Build struct function and associated datastructures for the new clone
1975 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1978 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
1980 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1981 gcov_type count_scale;
1983 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1984 count_scale = (REG_BR_PROB_BASE * count
1985 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1987 count_scale = REG_BR_PROB_BASE;
1989 /* Register specific tree functions. */
1990 gimple_register_cfg_hooks ();
1992 /* Get clean struct function. */
1993 push_struct_function (new_fndecl);
1995 /* We will rebuild these, so just sanity check that they are empty. */
1996 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
1997 gcc_assert (cfun->local_decls == NULL);
1998 gcc_assert (cfun->cfg == NULL);
1999 gcc_assert (cfun->decl == new_fndecl);
2001 /* Copy items we preserve during clonning. */
2002 cfun->static_chain_decl = src_cfun->static_chain_decl;
2003 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2004 cfun->function_end_locus = src_cfun->function_end_locus;
2005 cfun->curr_properties = src_cfun->curr_properties;
2006 cfun->last_verified = src_cfun->last_verified;
2007 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2008 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2009 cfun->function_frequency = src_cfun->function_frequency;
2010 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2011 cfun->stdarg = src_cfun->stdarg;
2012 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
2013 cfun->after_inlining = src_cfun->after_inlining;
2014 cfun->returns_struct = src_cfun->returns_struct;
2015 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2016 cfun->after_tree_profile = src_cfun->after_tree_profile;
2018 init_empty_tree_cfg ();
2020 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2021 ENTRY_BLOCK_PTR->count =
2022 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2024 ENTRY_BLOCK_PTR->frequency
2025 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2026 EXIT_BLOCK_PTR->count =
2027 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2029 EXIT_BLOCK_PTR->frequency =
2030 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2032 init_eh_for_function ();
2034 if (src_cfun->gimple_df)
2036 init_tree_ssa (cfun);
2037 cfun->gimple_df->in_ssa_p = true;
2038 init_ssa_operands ();
2043 /* Make a copy of the body of FN so that it can be inserted inline in
2044 another function. Walks FN via CFG, returns new fndecl. */
2047 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2048 basic_block entry_block_map, basic_block exit_block_map)
2050 tree callee_fndecl = id->src_fn;
2051 /* Original cfun for the callee, doesn't change. */
2052 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2053 struct function *cfun_to_copy;
2055 tree new_fndecl = NULL;
2056 gcov_type count_scale;
2059 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2060 count_scale = (REG_BR_PROB_BASE * count
2061 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2063 count_scale = REG_BR_PROB_BASE;
2065 /* Register specific tree functions. */
2066 gimple_register_cfg_hooks ();
2068 /* Must have a CFG here at this point. */
2069 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2070 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2072 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2074 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2075 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2076 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2077 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2079 /* Duplicate any exception-handling regions. */
2081 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2084 /* Use aux pointers to map the original blocks to copy. */
2085 FOR_EACH_BB_FN (bb, cfun_to_copy)
2087 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2092 last = last_basic_block;
2094 /* Now that we've duplicated the blocks, duplicate their edges. */
2095 FOR_ALL_BB_FN (bb, cfun_to_copy)
2096 copy_edges_for_bb (bb, count_scale, exit_block_map);
2098 if (gimple_in_ssa_p (cfun))
2099 FOR_ALL_BB_FN (bb, cfun_to_copy)
2100 copy_phis_for_bb (bb, id);
2102 FOR_ALL_BB_FN (bb, cfun_to_copy)
2104 ((basic_block)bb->aux)->aux = NULL;
2108 /* Zero out AUX fields of newly created block during EH edge
2110 for (; last < last_basic_block; last++)
2111 BASIC_BLOCK (last)->aux = NULL;
2112 entry_block_map->aux = NULL;
2113 exit_block_map->aux = NULL;
2117 pointer_map_destroy (id->eh_map);
2124 /* Copy the debug STMT using ID. We deal with these statements in a
2125 special way: if any variable in their VALUE expression wasn't
2126 remapped yet, we won't remap it, because that would get decl uids
2127 out of sync, causing codegen differences between -g and -g0. If
2128 this arises, we drop the VALUE expression altogether. */
2131 copy_debug_stmt (gimple stmt, copy_body_data *id)
2134 struct walk_stmt_info wi;
2137 if (gimple_block (stmt))
2140 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2144 gimple_set_block (stmt, t);
2146 /* Remap all the operands in COPY. */
2147 memset (&wi, 0, sizeof (wi));
2150 processing_debug_stmt = 1;
2152 t = gimple_debug_bind_get_var (stmt);
2154 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2155 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2157 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2161 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2163 gimple_debug_bind_set_var (stmt, t);
2165 if (gimple_debug_bind_has_value_p (stmt))
2166 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2167 remap_gimple_op_r, &wi, NULL);
2169 /* Punt if any decl couldn't be remapped. */
2170 if (processing_debug_stmt < 0)
2171 gimple_debug_bind_reset_value (stmt);
2173 processing_debug_stmt = 0;
2176 if (gimple_in_ssa_p (cfun))
2177 mark_symbols_for_renaming (stmt);
2180 /* Process deferred debug stmts. In order to give values better odds
2181 of being successfully remapped, we delay the processing of debug
2182 stmts until all other stmts that might require remapping are
2186 copy_debug_stmts (copy_body_data *id)
2191 if (!id->debug_stmts)
2194 for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2195 copy_debug_stmt (stmt, id);
2197 VEC_free (gimple, heap, id->debug_stmts);
2200 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2201 another function. */
2204 copy_tree_body (copy_body_data *id)
2206 tree fndecl = id->src_fn;
2207 tree body = DECL_SAVED_TREE (fndecl);
2209 walk_tree (&body, copy_tree_body_r, id, NULL);
2214 /* Make a copy of the body of FN so that it can be inserted inline in
2215 another function. */
2218 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2219 basic_block entry_block_map, basic_block exit_block_map)
2221 tree fndecl = id->src_fn;
2224 /* If this body has a CFG, walk CFG and copy. */
2225 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2226 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map);
2227 copy_debug_stmts (id);
2232 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2233 defined in function FN, or of a data member thereof. */
2236 self_inlining_addr_expr (tree value, tree fn)
2240 if (TREE_CODE (value) != ADDR_EXPR)
2243 var = get_base_address (TREE_OPERAND (value, 0));
2245 return var && auto_var_in_fn_p (var, fn);
2248 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2249 lexical block and line number information from base_stmt, if given,
2250 or from the last stmt of the block otherwise. */
2253 insert_init_debug_bind (copy_body_data *id,
2254 basic_block bb, tree var, tree value,
2258 gimple_stmt_iterator gsi;
2261 if (!gimple_in_ssa_p (id->src_cfun))
2264 if (!MAY_HAVE_DEBUG_STMTS)
2267 tracked_var = target_for_debug_bind (var);
2273 gsi = gsi_last_bb (bb);
2274 if (!base_stmt && !gsi_end_p (gsi))
2275 base_stmt = gsi_stmt (gsi);
2278 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2282 if (!gsi_end_p (gsi))
2283 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2285 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2292 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2294 /* If VAR represents a zero-sized variable, it's possible that the
2295 assignment statement may result in no gimple statements. */
2298 gimple_stmt_iterator si = gsi_last_bb (bb);
2300 /* We can end up with init statements that store to a non-register
2301 from a rhs with a conversion. Handle that here by forcing the
2302 rhs into a temporary. gimple_regimplify_operands is not
2303 prepared to do this for us. */
2304 if (!is_gimple_debug (init_stmt)
2305 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2306 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2307 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2309 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2310 gimple_expr_type (init_stmt),
2311 gimple_assign_rhs1 (init_stmt));
2312 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2314 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2315 gimple_assign_set_rhs1 (init_stmt, rhs);
2317 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2318 gimple_regimplify_operands (init_stmt, &si);
2319 mark_symbols_for_renaming (init_stmt);
2321 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2323 tree var, def = gimple_assign_lhs (init_stmt);
2325 if (TREE_CODE (def) == SSA_NAME)
2326 var = SSA_NAME_VAR (def);
2330 insert_init_debug_bind (id, bb, var, def, init_stmt);
2335 /* Initialize parameter P with VALUE. If needed, produce init statement
2336 at the end of BB. When BB is NULL, we return init statement to be
2339 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2340 basic_block bb, tree *vars)
2342 gimple init_stmt = NULL;
2345 tree def = (gimple_in_ssa_p (cfun)
2346 ? gimple_default_def (id->src_cfun, p) : NULL);
2349 && value != error_mark_node
2350 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2352 if (fold_convertible_p (TREE_TYPE (p), value))
2353 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2355 /* ??? For valid (GIMPLE) programs we should not end up here.
2356 Still if something has gone wrong and we end up with truly
2357 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2358 to not leak invalid GIMPLE to the following passes. */
2359 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2362 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2363 here since the type of this decl must be visible to the calling
2365 var = copy_decl_to_var (p, id);
2367 /* We're actually using the newly-created var. */
2368 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2371 add_referenced_var (var);
2374 /* Declare this new variable. */
2375 TREE_CHAIN (var) = *vars;
2378 /* Make gimplifier happy about this variable. */
2379 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2381 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2382 we would not need to create a new variable here at all, if it
2383 weren't for debug info. Still, we can just use the argument
2385 if (TREE_READONLY (p)
2386 && !TREE_ADDRESSABLE (p)
2387 && value && !TREE_SIDE_EFFECTS (value)
2390 /* We may produce non-gimple trees by adding NOPs or introduce
2391 invalid sharing when operand is not really constant.
2392 It is not big deal to prohibit constant propagation here as
2393 we will constant propagate in DOM1 pass anyway. */
2394 if (is_gimple_min_invariant (value)
2395 && useless_type_conversion_p (TREE_TYPE (p),
2397 /* We have to be very careful about ADDR_EXPR. Make sure
2398 the base variable isn't a local variable of the inlined
2399 function, e.g., when doing recursive inlining, direct or
2400 mutually-recursive or whatever, which is why we don't
2401 just test whether fn == current_function_decl. */
2402 && ! self_inlining_addr_expr (value, fn))
2404 insert_decl_map (id, p, value);
2405 insert_debug_decl_map (id, p, var);
2406 return insert_init_debug_bind (id, bb, var, value, NULL);
2410 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2411 that way, when the PARM_DECL is encountered, it will be
2412 automatically replaced by the VAR_DECL. */
2413 insert_decl_map (id, p, var);
2415 /* Even if P was TREE_READONLY, the new VAR should not be.
2416 In the original code, we would have constructed a
2417 temporary, and then the function body would have never
2418 changed the value of P. However, now, we will be
2419 constructing VAR directly. The constructor body may
2420 change its value multiple times as it is being
2421 constructed. Therefore, it must not be TREE_READONLY;
2422 the back-end assumes that TREE_READONLY variable is
2423 assigned to only once. */
2424 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2425 TREE_READONLY (var) = 0;
2427 /* If there is no setup required and we are in SSA, take the easy route
2428 replacing all SSA names representing the function parameter by the
2429 SSA name passed to function.
2431 We need to construct map for the variable anyway as it might be used
2432 in different SSA names when parameter is set in function.
2434 Do replacement at -O0 for const arguments replaced by constant.
2435 This is important for builtin_constant_p and other construct requiring
2436 constant argument to be visible in inlined function body. */
2437 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2439 || (TREE_READONLY (p)
2440 && is_gimple_min_invariant (rhs)))
2441 && (TREE_CODE (rhs) == SSA_NAME
2442 || is_gimple_min_invariant (rhs))
2443 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2445 insert_decl_map (id, def, rhs);
2446 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2449 /* If the value of argument is never used, don't care about initializing
2451 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2453 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2454 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2457 /* Initialize this VAR_DECL from the equivalent argument. Convert
2458 the argument to the proper type in case it was promoted. */
2461 if (rhs == error_mark_node)
2463 insert_decl_map (id, p, var);
2464 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2467 STRIP_USELESS_TYPE_CONVERSION (rhs);
2469 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2470 keep our trees in gimple form. */
2471 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2473 def = remap_ssa_name (def, id);
2474 init_stmt = gimple_build_assign (def, rhs);
2475 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2476 set_default_def (var, NULL);
2479 init_stmt = gimple_build_assign (var, rhs);
2481 if (bb && init_stmt)
2482 insert_init_stmt (id, bb, init_stmt);
2487 /* Generate code to initialize the parameters of the function at the
2488 top of the stack in ID from the GIMPLE_CALL STMT. */
2491 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2492 tree fn, basic_block bb)
2497 tree vars = NULL_TREE;
2498 tree static_chain = gimple_call_chain (stmt);
2500 /* Figure out what the parameters are. */
2501 parms = DECL_ARGUMENTS (fn);
2503 /* Loop through the parameter declarations, replacing each with an
2504 equivalent VAR_DECL, appropriately initialized. */
2505 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2508 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2509 setup_one_parameter (id, p, val, fn, bb, &vars);
2512 /* Initialize the static chain. */
2513 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2514 gcc_assert (fn != current_function_decl);
2517 /* No static chain? Seems like a bug in tree-nested.c. */
2518 gcc_assert (static_chain);
2520 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2523 declare_inline_vars (id->block, vars);
2527 /* Declare a return variable to replace the RESULT_DECL for the
2528 function we are calling. An appropriate DECL_STMT is returned.
2529 The USE_STMT is filled to contain a use of the declaration to
2530 indicate the return value of the function.
2532 RETURN_SLOT, if non-null is place where to store the result. It
2533 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2534 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2536 The return value is a (possibly null) value that is the result of the
2537 function as seen by the callee. *USE_P is a (possibly null) value that
2538 holds the result as seen by the caller. */
2541 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
2544 tree callee = id->src_fn;
2545 tree caller = id->dst_fn;
2546 tree result = DECL_RESULT (callee);
2547 tree callee_type = TREE_TYPE (result);
2548 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
2551 /* We don't need to do anything for functions that don't return
2553 if (!result || VOID_TYPE_P (callee_type))
2559 /* If there was a return slot, then the return value is the
2560 dereferenced address of that object. */
2563 /* The front end shouldn't have used both return_slot and
2564 a modify expression. */
2565 gcc_assert (!modify_dest);
2566 if (DECL_BY_REFERENCE (result))
2568 tree return_slot_addr = build_fold_addr_expr (return_slot);
2569 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2571 /* We are going to construct *&return_slot and we can't do that
2572 for variables believed to be not addressable.
2574 FIXME: This check possibly can match, because values returned
2575 via return slot optimization are not believed to have address
2576 taken by alias analysis. */
2577 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2578 if (gimple_in_ssa_p (cfun))
2580 HOST_WIDE_INT bitsize;
2581 HOST_WIDE_INT bitpos;
2583 enum machine_mode mode;
2587 base = get_inner_reference (return_slot, &bitsize, &bitpos,
2589 &mode, &unsignedp, &volatilep,
2591 if (TREE_CODE (base) == INDIRECT_REF)
2592 base = TREE_OPERAND (base, 0);
2593 if (TREE_CODE (base) == SSA_NAME)
2594 base = SSA_NAME_VAR (base);
2595 mark_sym_for_renaming (base);
2597 var = return_slot_addr;
2602 gcc_assert (TREE_CODE (var) != SSA_NAME);
2603 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2605 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2606 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2607 && !DECL_GIMPLE_REG_P (result)
2609 DECL_GIMPLE_REG_P (var) = 0;
2614 /* All types requiring non-trivial constructors should have been handled. */
2615 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2617 /* Attempt to avoid creating a new temporary variable. */
2619 && TREE_CODE (modify_dest) != SSA_NAME)
2621 bool use_it = false;
2623 /* We can't use MODIFY_DEST if there's type promotion involved. */
2624 if (!useless_type_conversion_p (callee_type, caller_type))
2627 /* ??? If we're assigning to a variable sized type, then we must
2628 reuse the destination variable, because we've no good way to
2629 create variable sized temporaries at this point. */
2630 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2633 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2634 reuse it as the result of the call directly. Don't do this if
2635 it would promote MODIFY_DEST to addressable. */
2636 else if (TREE_ADDRESSABLE (result))
2640 tree base_m = get_base_address (modify_dest);
2642 /* If the base isn't a decl, then it's a pointer, and we don't
2643 know where that's going to go. */
2644 if (!DECL_P (base_m))
2646 else if (is_global_var (base_m))
2648 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2649 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2650 && !DECL_GIMPLE_REG_P (result)
2651 && DECL_GIMPLE_REG_P (base_m))
2653 else if (!TREE_ADDRESSABLE (base_m))
2665 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2667 var = copy_result_decl_to_var (result, id);
2668 if (gimple_in_ssa_p (cfun))
2671 add_referenced_var (var);
2674 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2675 DECL_STRUCT_FUNCTION (caller)->local_decls
2676 = tree_cons (NULL_TREE, var,
2677 DECL_STRUCT_FUNCTION (caller)->local_decls);
2679 /* Do not have the rest of GCC warn about this variable as it should
2680 not be visible to the user. */
2681 TREE_NO_WARNING (var) = 1;
2683 declare_inline_vars (id->block, var);
2685 /* Build the use expr. If the return type of the function was
2686 promoted, convert it back to the expected type. */
2688 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2689 use = fold_convert (caller_type, var);
2691 STRIP_USELESS_TYPE_CONVERSION (use);
2693 if (DECL_BY_REFERENCE (result))
2695 TREE_ADDRESSABLE (var) = 1;
2696 var = build_fold_addr_expr (var);
2700 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2701 way, when the RESULT_DECL is encountered, it will be
2702 automatically replaced by the VAR_DECL. */
2703 insert_decl_map (id, result, var);
2705 /* Remember this so we can ignore it in remap_decls. */
2712 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2713 to a local label. */
2716 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2719 tree fn = (tree) fnp;
2721 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2730 /* Callback through walk_tree. Determine if we've got an aggregate
2731 type that we can't support; return non-null if so. */
2734 cannot_copy_type_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
2735 void *data ATTRIBUTE_UNUSED)
2737 tree t, node = *nodep;
2739 if (TREE_CODE (node) == RECORD_TYPE || TREE_CODE (node) == UNION_TYPE)
2741 /* We cannot inline a function of the form
2743 void F (int i) { struct S { int ar[i]; } s; }
2745 Attempting to do so produces a catch-22.
2746 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
2747 UNION_TYPE nodes, then it goes into infinite recursion on a
2748 structure containing a pointer to its own type. If it doesn't,
2749 then the type node for S doesn't get adjusted properly when
2752 ??? This is likely no longer true, but it's too late in the 4.0
2753 cycle to try to find out. This should be checked for 4.1. */
2754 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
2755 if (variably_modified_type_p (TREE_TYPE (t), NULL))
2763 /* Determine if the function can be copied. If so return NULL. If
2764 not return a string describng the reason for failure. */
2767 copy_forbidden (struct function *fun, tree fndecl)
2769 const char *reason = fun->cannot_be_copied_reason;
2772 /* Only examine the function once. */
2773 if (fun->cannot_be_copied_set)
2776 /* We cannot copy a function that receives a non-local goto
2777 because we cannot remap the destination label used in the
2778 function that is performing the non-local goto. */
2779 /* ??? Actually, this should be possible, if we work at it.
2780 No doubt there's just a handful of places that simply
2781 assume it doesn't happen and don't substitute properly. */
2782 if (fun->has_nonlocal_label)
2784 reason = G_("function %q+F can never be copied "
2785 "because it receives a non-local goto");
2789 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2791 tree decl = TREE_VALUE (step);
2793 if (TREE_CODE (decl) == VAR_DECL
2794 && TREE_STATIC (decl)
2795 && !DECL_EXTERNAL (decl)
2796 && DECL_INITIAL (decl)
2797 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2798 has_label_address_in_static_1,
2801 reason = G_("function %q+F can never be copied because it saves "
2802 "address of local label in a static variable");
2806 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2807 && variably_modified_type_p (TREE_TYPE (decl), NULL)
2808 && walk_tree_without_duplicates (&TREE_TYPE (decl),
2809 cannot_copy_type_1, NULL))
2811 reason = G_("function %q+F can never be copied "
2812 "because it uses variable sized variables");
2818 fun->cannot_be_copied_reason = reason;
2819 fun->cannot_be_copied_set = true;
2824 static const char *inline_forbidden_reason;
2826 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2827 iff a function can not be inlined. Also sets the reason why. */
2830 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2831 struct walk_stmt_info *wip)
2833 tree fn = (tree) wip->info;
2835 gimple stmt = gsi_stmt (*gsi);
2837 switch (gimple_code (stmt))
2840 /* Refuse to inline alloca call unless user explicitly forced so as
2841 this may change program's memory overhead drastically when the
2842 function using alloca is called in loop. In GCC present in
2843 SPEC2000 inlining into schedule_block cause it to require 2GB of
2844 RAM instead of 256MB. */
2845 if (gimple_alloca_call_p (stmt)
2846 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2848 inline_forbidden_reason
2849 = G_("function %q+F can never be inlined because it uses "
2850 "alloca (override using the always_inline attribute)");
2851 *handled_ops_p = true;
2855 t = gimple_call_fndecl (stmt);
2859 /* We cannot inline functions that call setjmp. */
2860 if (setjmp_call_p (t))
2862 inline_forbidden_reason
2863 = G_("function %q+F can never be inlined because it uses setjmp");
2864 *handled_ops_p = true;
2868 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2869 switch (DECL_FUNCTION_CODE (t))
2871 /* We cannot inline functions that take a variable number of
2873 case BUILT_IN_VA_START:
2874 case BUILT_IN_NEXT_ARG:
2875 case BUILT_IN_VA_END:
2876 inline_forbidden_reason
2877 = G_("function %q+F can never be inlined because it "
2878 "uses variable argument lists");
2879 *handled_ops_p = true;
2882 case BUILT_IN_LONGJMP:
2883 /* We can't inline functions that call __builtin_longjmp at
2884 all. The non-local goto machinery really requires the
2885 destination be in a different function. If we allow the
2886 function calling __builtin_longjmp to be inlined into the
2887 function calling __builtin_setjmp, Things will Go Awry. */
2888 inline_forbidden_reason
2889 = G_("function %q+F can never be inlined because "
2890 "it uses setjmp-longjmp exception handling");
2891 *handled_ops_p = true;
2894 case BUILT_IN_NONLOCAL_GOTO:
2896 inline_forbidden_reason
2897 = G_("function %q+F can never be inlined because "
2898 "it uses non-local goto");
2899 *handled_ops_p = true;
2902 case BUILT_IN_RETURN:
2903 case BUILT_IN_APPLY_ARGS:
2904 /* If a __builtin_apply_args caller would be inlined,
2905 it would be saving arguments of the function it has
2906 been inlined into. Similarly __builtin_return would
2907 return from the function the inline has been inlined into. */
2908 inline_forbidden_reason
2909 = G_("function %q+F can never be inlined because "
2910 "it uses __builtin_return or __builtin_apply_args");
2911 *handled_ops_p = true;
2920 t = gimple_goto_dest (stmt);
2922 /* We will not inline a function which uses computed goto. The
2923 addresses of its local labels, which may be tucked into
2924 global storage, are of course not constant across
2925 instantiations, which causes unexpected behavior. */
2926 if (TREE_CODE (t) != LABEL_DECL)
2928 inline_forbidden_reason
2929 = G_("function %q+F can never be inlined "
2930 "because it contains a computed goto");
2931 *handled_ops_p = true;
2940 *handled_ops_p = false;
2944 /* Return true if FNDECL is a function that cannot be inlined into
2948 inline_forbidden_p (tree fndecl)
2950 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
2951 struct walk_stmt_info wi;
2952 struct pointer_set_t *visited_nodes;
2954 bool forbidden_p = false;
2956 /* First check for shared reasons not to copy the code. */
2957 inline_forbidden_reason = copy_forbidden (fun, fndecl);
2958 if (inline_forbidden_reason != NULL)
2961 /* Next, walk the statements of the function looking for
2962 constraucts we can't handle, or are non-optimal for inlining. */
2963 visited_nodes = pointer_set_create ();
2964 memset (&wi, 0, sizeof (wi));
2965 wi.info = (void *) fndecl;
2966 wi.pset = visited_nodes;
2968 FOR_EACH_BB_FN (bb, fun)
2971 gimple_seq seq = bb_seq (bb);
2972 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
2973 forbidden_p = (ret != NULL);
2978 pointer_set_destroy (visited_nodes);
2982 /* Returns nonzero if FN is a function that does not have any
2983 fundamental inline blocking properties. */
2986 tree_inlinable_function_p (tree fn)
2988 bool inlinable = true;
2992 /* If we've already decided this function shouldn't be inlined,
2993 there's no need to check again. */
2994 if (DECL_UNINLINABLE (fn))
2997 /* We only warn for functions declared `inline' by the user. */
2998 do_warning = (warn_inline
2999 && DECL_DECLARED_INLINE_P (fn)
3000 && !DECL_NO_INLINE_WARNING_P (fn)
3001 && !DECL_IN_SYSTEM_HEADER (fn));
3003 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3006 && always_inline == NULL)
3009 warning (OPT_Winline, "function %q+F can never be inlined because it "
3010 "is suppressed using -fno-inline", fn);
3014 /* Don't auto-inline anything that might not be bound within
3015 this unit of translation. */
3016 else if (!DECL_DECLARED_INLINE_P (fn)
3017 && DECL_REPLACEABLE_P (fn))
3020 else if (!function_attribute_inlinable_p (fn))
3023 warning (OPT_Winline, "function %q+F can never be inlined because it "
3024 "uses attributes conflicting with inlining", fn);
3028 else if (inline_forbidden_p (fn))
3030 /* See if we should warn about uninlinable functions. Previously,
3031 some of these warnings would be issued while trying to expand
3032 the function inline, but that would cause multiple warnings
3033 about functions that would for example call alloca. But since
3034 this a property of the function, just one warning is enough.
3035 As a bonus we can now give more details about the reason why a
3036 function is not inlinable. */
3038 sorry (inline_forbidden_reason, fn);
3039 else if (do_warning)
3040 warning (OPT_Winline, inline_forbidden_reason, fn);
3045 /* Squirrel away the result so that we don't have to check again. */
3046 DECL_UNINLINABLE (fn) = !inlinable;
3051 /* Estimate the cost of a memory move. Use machine dependent
3052 word size and take possible memcpy call into account. */
3055 estimate_move_cost (tree type)
3059 gcc_assert (!VOID_TYPE_P (type));
3061 size = int_size_in_bytes (type);
3063 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3064 /* Cost of a memcpy call, 3 arguments and the call. */
3067 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3070 /* Returns cost of operation CODE, according to WEIGHTS */
3073 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3074 tree op1 ATTRIBUTE_UNUSED, tree op2)
3078 /* These are "free" conversions, or their presumed cost
3079 is folded into other operations. */
3086 /* Assign cost of 1 to usual operations.
3087 ??? We may consider mapping RTL costs to this. */
3092 case POINTER_PLUS_EXPR:
3096 case ADDR_SPACE_CONVERT_EXPR:
3097 case FIXED_CONVERT_EXPR:
3098 case FIX_TRUNC_EXPR:
3110 case VEC_LSHIFT_EXPR:
3111 case VEC_RSHIFT_EXPR:
3118 case TRUTH_ANDIF_EXPR:
3119 case TRUTH_ORIF_EXPR:
3120 case TRUTH_AND_EXPR:
3122 case TRUTH_XOR_EXPR:
3123 case TRUTH_NOT_EXPR:
3132 case UNORDERED_EXPR:
3143 case PREDECREMENT_EXPR:
3144 case PREINCREMENT_EXPR:
3145 case POSTDECREMENT_EXPR:
3146 case POSTINCREMENT_EXPR:
3148 case REALIGN_LOAD_EXPR:
3150 case REDUC_MAX_EXPR:
3151 case REDUC_MIN_EXPR:
3152 case REDUC_PLUS_EXPR:
3153 case WIDEN_SUM_EXPR:
3154 case WIDEN_MULT_EXPR:
3157 case VEC_WIDEN_MULT_HI_EXPR:
3158 case VEC_WIDEN_MULT_LO_EXPR:
3159 case VEC_UNPACK_HI_EXPR:
3160 case VEC_UNPACK_LO_EXPR:
3161 case VEC_UNPACK_FLOAT_HI_EXPR:
3162 case VEC_UNPACK_FLOAT_LO_EXPR:
3163 case VEC_PACK_TRUNC_EXPR:
3164 case VEC_PACK_SAT_EXPR:
3165 case VEC_PACK_FIX_TRUNC_EXPR:
3166 case VEC_EXTRACT_EVEN_EXPR:
3167 case VEC_EXTRACT_ODD_EXPR:
3168 case VEC_INTERLEAVE_HIGH_EXPR:
3169 case VEC_INTERLEAVE_LOW_EXPR:
3173 /* Few special cases of expensive operations. This is useful
3174 to avoid inlining on functions having too many of these. */
3175 case TRUNC_DIV_EXPR:
3177 case FLOOR_DIV_EXPR:
3178 case ROUND_DIV_EXPR:
3179 case EXACT_DIV_EXPR:
3180 case TRUNC_MOD_EXPR:
3182 case FLOOR_MOD_EXPR:
3183 case ROUND_MOD_EXPR:
3185 if (TREE_CODE (op2) != INTEGER_CST)
3186 return weights->div_mod_cost;
3190 /* We expect a copy assignment with no operator. */
3191 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3197 /* Estimate number of instructions that will be created by expanding
3198 the statements in the statement sequence STMTS.
3199 WEIGHTS contains weights attributed to various constructs. */
3202 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3205 gimple_stmt_iterator gsi;
3208 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3209 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3215 /* Estimate number of instructions that will be created by expanding STMT.
3216 WEIGHTS contains weights attributed to various constructs. */
3219 estimate_num_insns (gimple stmt, eni_weights *weights)
3222 enum gimple_code code = gimple_code (stmt);
3229 /* Try to estimate the cost of assignments. We have three cases to
3231 1) Simple assignments to registers;
3232 2) Stores to things that must live in memory. This includes
3233 "normal" stores to scalars, but also assignments of large
3234 structures, or constructors of big arrays;
3236 Let us look at the first two cases, assuming we have "a = b + C":
3237 <GIMPLE_ASSIGN <var_decl "a">
3238 <plus_expr <var_decl "b"> <constant C>>
3239 If "a" is a GIMPLE register, the assignment to it is free on almost
3240 any target, because "a" usually ends up in a real register. Hence
3241 the only cost of this expression comes from the PLUS_EXPR, and we
3242 can ignore the GIMPLE_ASSIGN.
3243 If "a" is not a GIMPLE register, the assignment to "a" will most
3244 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3245 of moving something into "a", which we compute using the function
3246 estimate_move_cost. */
3247 lhs = gimple_assign_lhs (stmt);
3248 rhs = gimple_assign_rhs1 (stmt);
3250 if (is_gimple_reg (lhs))
3253 cost = estimate_move_cost (TREE_TYPE (lhs));
3255 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3256 cost += estimate_move_cost (TREE_TYPE (rhs));
3258 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3259 gimple_assign_rhs1 (stmt),
3260 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3261 == GIMPLE_BINARY_RHS
3262 ? gimple_assign_rhs2 (stmt) : NULL);
3266 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3267 gimple_op (stmt, 0),
3268 gimple_op (stmt, 1));
3272 /* Take into account cost of the switch + guess 2 conditional jumps for
3275 TODO: once the switch expansion logic is sufficiently separated, we can
3276 do better job on estimating cost of the switch. */
3277 if (weights->time_based)
3278 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3280 cost = gimple_switch_num_labels (stmt) * 2;
3285 tree decl = gimple_call_fndecl (stmt);
3286 tree addr = gimple_call_fn (stmt);
3287 tree funtype = TREE_TYPE (addr);
3289 if (POINTER_TYPE_P (funtype))
3290 funtype = TREE_TYPE (funtype);
3292 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3293 cost = weights->target_builtin_call_cost;
3295 cost = weights->call_cost;
3297 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3298 switch (DECL_FUNCTION_CODE (decl))
3300 case BUILT_IN_CONSTANT_P:
3302 case BUILT_IN_EXPECT:
3305 /* Prefetch instruction is not expensive. */
3306 case BUILT_IN_PREFETCH:
3307 cost = weights->target_builtin_call_cost;
3315 funtype = TREE_TYPE (decl);
3317 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3318 cost += estimate_move_cost (TREE_TYPE (funtype));
3319 /* Our cost must be kept in sync with
3320 cgraph_estimate_size_after_inlining that does use function
3321 declaration to figure out the arguments. */
3322 if (decl && DECL_ARGUMENTS (decl))
3325 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3326 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3327 cost += estimate_move_cost (TREE_TYPE (arg));
3329 else if (funtype && prototype_p (funtype))
3332 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3334 if (!VOID_TYPE_P (TREE_VALUE (t)))
3335 cost += estimate_move_cost (TREE_VALUE (t));
3339 for (i = 0; i < gimple_call_num_args (stmt); i++)
3341 tree arg = gimple_call_arg (stmt, i);
3342 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3343 cost += estimate_move_cost (TREE_TYPE (arg));
3355 case GIMPLE_PREDICT:
3360 return asm_str_count (gimple_asm_string (stmt));
3363 /* This is either going to be an external function call with one
3364 argument, or two register copy statements plus a goto. */
3367 case GIMPLE_EH_DISPATCH:
3368 /* ??? This is going to turn into a switch statement. Ideally
3369 we'd have a look at the eh region and estimate the number of
3374 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3376 case GIMPLE_EH_FILTER:
3377 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3380 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3383 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3384 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3386 /* OpenMP directives are generally very expensive. */
3388 case GIMPLE_OMP_RETURN:
3389 case GIMPLE_OMP_SECTIONS_SWITCH:
3390 case GIMPLE_OMP_ATOMIC_STORE:
3391 case GIMPLE_OMP_CONTINUE:
3392 /* ...except these, which are cheap. */
3395 case GIMPLE_OMP_ATOMIC_LOAD:
3396 return weights->omp_cost;
3398 case GIMPLE_OMP_FOR:
3399 return (weights->omp_cost
3400 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3401 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3403 case GIMPLE_OMP_PARALLEL:
3404 case GIMPLE_OMP_TASK:
3405 case GIMPLE_OMP_CRITICAL:
3406 case GIMPLE_OMP_MASTER:
3407 case GIMPLE_OMP_ORDERED:
3408 case GIMPLE_OMP_SECTION:
3409 case GIMPLE_OMP_SECTIONS:
3410 case GIMPLE_OMP_SINGLE:
3411 return (weights->omp_cost
3412 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3421 /* Estimate number of instructions that will be created by expanding
3422 function FNDECL. WEIGHTS contains weights attributed to various
3426 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3428 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3429 gimple_stmt_iterator bsi;
3433 gcc_assert (my_function && my_function->cfg);
3434 FOR_EACH_BB_FN (bb, my_function)
3436 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3437 n += estimate_num_insns (gsi_stmt (bsi), weights);
3444 /* Initializes weights used by estimate_num_insns. */
3447 init_inline_once (void)
3449 eni_size_weights.call_cost = 1;
3450 eni_size_weights.target_builtin_call_cost = 1;
3451 eni_size_weights.div_mod_cost = 1;
3452 eni_size_weights.omp_cost = 40;
3453 eni_size_weights.time_based = false;
3455 /* Estimating time for call is difficult, since we have no idea what the
3456 called function does. In the current uses of eni_time_weights,
3457 underestimating the cost does less harm than overestimating it, so
3458 we choose a rather small value here. */
3459 eni_time_weights.call_cost = 10;
3460 eni_time_weights.target_builtin_call_cost = 10;
3461 eni_time_weights.div_mod_cost = 10;
3462 eni_time_weights.omp_cost = 40;
3463 eni_time_weights.time_based = true;
3466 /* Estimate the number of instructions in a gimple_seq. */
3469 count_insns_seq (gimple_seq seq, eni_weights *weights)
3471 gimple_stmt_iterator gsi;
3473 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3474 n += estimate_num_insns (gsi_stmt (gsi), weights);
3480 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3483 prepend_lexical_block (tree current_block, tree new_block)
3485 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3486 BLOCK_SUBBLOCKS (current_block) = new_block;
3487 BLOCK_SUPERCONTEXT (new_block) = current_block;
3490 /* Fetch callee declaration from the call graph edge going from NODE and
3491 associated with STMR call statement. Return NULL_TREE if not found. */
3493 get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3495 struct cgraph_edge *cs;
3497 cs = cgraph_edge (node, stmt);
3499 return cs->callee->decl;
3504 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3507 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3509 tree retvar, use_retvar;
3511 struct pointer_map_t *st, *dst;
3514 location_t saved_location;
3515 struct cgraph_edge *cg_edge;
3516 cgraph_inline_failed_t reason;
3517 basic_block return_block;
3519 gimple_stmt_iterator gsi, stmt_gsi;
3520 bool successfully_inlined = FALSE;
3521 bool purge_dead_abnormal_edges;
3525 /* Set input_location here so we get the right instantiation context
3526 if we call instantiate_decl from inlinable_function_p. */
3527 saved_location = input_location;
3528 if (gimple_has_location (stmt))
3529 input_location = gimple_location (stmt);
3531 /* From here on, we're only interested in CALL_EXPRs. */
3532 if (gimple_code (stmt) != GIMPLE_CALL)
3535 /* First, see if we can figure out what function is being called.
3536 If we cannot, then there is no hope of inlining the function. */
3537 fn = gimple_call_fndecl (stmt);
3540 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3545 /* Turn forward declarations into real ones. */
3546 fn = cgraph_node (fn)->decl;
3548 /* If FN is a declaration of a function in a nested scope that was
3549 globally declared inline, we don't set its DECL_INITIAL.
3550 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3551 C++ front-end uses it for cdtors to refer to their internal
3552 declarations, that are not real functions. Fortunately those
3553 don't have trees to be saved, so we can tell by checking their
3555 if (!DECL_INITIAL (fn)
3556 && DECL_ABSTRACT_ORIGIN (fn)
3557 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3558 fn = DECL_ABSTRACT_ORIGIN (fn);
3560 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3561 Kill this check once this is fixed. */
3562 if (!id->dst_node->analyzed)
3565 cg_edge = cgraph_edge (id->dst_node, stmt);
3567 /* Don't inline functions with different EH personalities. */
3568 if (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3569 && DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)
3570 && (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3571 != DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)))
3574 /* Don't try to inline functions that are not well-suited to
3576 if (!cgraph_inline_p (cg_edge, &reason))
3578 /* If this call was originally indirect, we do not want to emit any
3579 inlining related warnings or sorry messages because there are no
3580 guarantees regarding those. */
3581 if (cg_edge->indirect_call)
3584 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3585 /* Avoid warnings during early inline pass. */
3586 && cgraph_global_info_ready)
3588 sorry ("inlining failed in call to %q+F: %s", fn,
3589 cgraph_inline_failed_string (reason));
3590 sorry ("called from here");
3592 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3593 && !DECL_IN_SYSTEM_HEADER (fn)
3594 && reason != CIF_UNSPECIFIED
3595 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3596 /* Avoid warnings during early inline pass. */
3597 && cgraph_global_info_ready)
3599 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3600 fn, cgraph_inline_failed_string (reason));
3601 warning (OPT_Winline, "called from here");
3605 fn = cg_edge->callee->decl;
3607 #ifdef ENABLE_CHECKING
3608 if (cg_edge->callee->decl != id->dst_node->decl)
3609 verify_cgraph_node (cg_edge->callee);
3612 /* We will be inlining this callee. */
3613 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3615 /* Update the callers EH personality. */
3616 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3617 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3618 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3620 /* Split the block holding the GIMPLE_CALL. */
3621 e = split_block (bb, stmt);
3623 return_block = e->dest;
3626 /* split_block splits after the statement; work around this by
3627 moving the call into the second block manually. Not pretty,
3628 but seems easier than doing the CFG manipulation by hand
3629 when the GIMPLE_CALL is in the last statement of BB. */
3630 stmt_gsi = gsi_last_bb (bb);
3631 gsi_remove (&stmt_gsi, false);
3633 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3634 been the source of abnormal edges. In this case, schedule
3635 the removal of dead abnormal edges. */
3636 gsi = gsi_start_bb (return_block);
3637 if (gsi_end_p (gsi))
3639 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3640 purge_dead_abnormal_edges = true;
3644 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3645 purge_dead_abnormal_edges = false;
3648 stmt_gsi = gsi_start_bb (return_block);
3650 /* Build a block containing code to initialize the arguments, the
3651 actual inline expansion of the body, and a label for the return
3652 statements within the function to jump to. The type of the
3653 statement expression is the return type of the function call. */
3654 id->block = make_node (BLOCK);
3655 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3656 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3657 prepend_lexical_block (gimple_block (stmt), id->block);
3659 /* Local declarations will be replaced by their equivalents in this
3662 id->decl_map = pointer_map_create ();
3663 dst = id->debug_map;
3664 id->debug_map = NULL;
3666 /* Record the function we are about to inline. */
3668 id->src_node = cg_edge->callee;
3669 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3670 id->gimple_call = stmt;
3672 gcc_assert (!id->src_cfun->after_inlining);
3675 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3677 gimple_stmt_iterator si = gsi_last_bb (bb);
3678 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3682 initialize_inlined_parameters (id, stmt, fn, bb);
3684 if (DECL_INITIAL (fn))
3685 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3687 /* Return statements in the function body will be replaced by jumps
3688 to the RET_LABEL. */
3689 gcc_assert (DECL_INITIAL (fn));
3690 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3692 /* Find the LHS to which the result of this call is assigned. */
3694 if (gimple_call_lhs (stmt))
3696 modify_dest = gimple_call_lhs (stmt);
3698 /* The function which we are inlining might not return a value,
3699 in which case we should issue a warning that the function
3700 does not return a value. In that case the optimizers will
3701 see that the variable to which the value is assigned was not
3702 initialized. We do not want to issue a warning about that
3703 uninitialized variable. */
3704 if (DECL_P (modify_dest))
3705 TREE_NO_WARNING (modify_dest) = 1;
3707 if (gimple_call_return_slot_opt_p (stmt))
3709 return_slot = modify_dest;
3716 /* If we are inlining a call to the C++ operator new, we don't want
3717 to use type based alias analysis on the return value. Otherwise
3718 we may get confused if the compiler sees that the inlined new
3719 function returns a pointer which was just deleted. See bug
3721 if (DECL_IS_OPERATOR_NEW (fn))
3727 /* Declare the return variable for the function. */
3728 retvar = declare_return_variable (id, return_slot, modify_dest, &use_retvar);
3730 /* Add local vars in this inlined callee to caller. */
3731 t_step = id->src_cfun->local_decls;
3732 for (; t_step; t_step = TREE_CHAIN (t_step))
3734 var = TREE_VALUE (t_step);
3735 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3737 if (var_ann (var) && add_referenced_var (var))
3738 cfun->local_decls = tree_cons (NULL_TREE, var,
3741 else if (!can_be_nonlocal (var, id))
3742 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3746 if (dump_file && (dump_flags & TDF_DETAILS))
3748 fprintf (dump_file, "Inlining ");
3749 print_generic_expr (dump_file, id->src_fn, 0);
3750 fprintf (dump_file, " to ");
3751 print_generic_expr (dump_file, id->dst_fn, 0);
3752 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3755 /* This is it. Duplicate the callee body. Assume callee is
3756 pre-gimplified. Note that we must not alter the caller
3757 function in any way before this point, as this CALL_EXPR may be
3758 a self-referential call; if we're calling ourselves, we need to
3759 duplicate our body before altering anything. */
3760 copy_body (id, bb->count,
3761 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3764 /* Reset the escaped and callused solutions. */
3765 if (cfun->gimple_df)
3767 pt_solution_reset (&cfun->gimple_df->escaped);
3768 pt_solution_reset (&cfun->gimple_df->callused);
3774 pointer_map_destroy (id->debug_map);
3775 id->debug_map = dst;
3777 pointer_map_destroy (id->decl_map);
3780 /* Unlink the calls virtual operands before replacing it. */
3781 unlink_stmt_vdef (stmt);
3783 /* If the inlined function returns a result that we care about,
3784 substitute the GIMPLE_CALL with an assignment of the return
3785 variable to the LHS of the call. That is, if STMT was
3786 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3787 if (use_retvar && gimple_call_lhs (stmt))
3789 gimple old_stmt = stmt;
3790 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3791 gsi_replace (&stmt_gsi, stmt, false);
3792 if (gimple_in_ssa_p (cfun))
3793 mark_symbols_for_renaming (stmt);
3794 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3798 /* Handle the case of inlining a function with no return
3799 statement, which causes the return value to become undefined. */
3800 if (gimple_call_lhs (stmt)
3801 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3803 tree name = gimple_call_lhs (stmt);
3804 tree var = SSA_NAME_VAR (name);
3805 tree def = gimple_default_def (cfun, var);
3809 /* If the variable is used undefined, make this name
3810 undefined via a move. */
3811 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3812 gsi_replace (&stmt_gsi, stmt, true);
3816 /* Otherwise make this variable undefined. */
3817 gsi_remove (&stmt_gsi, true);
3818 set_default_def (var, name);
3819 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3823 gsi_remove (&stmt_gsi, true);
3826 if (purge_dead_abnormal_edges)
3827 gimple_purge_dead_abnormal_call_edges (return_block);
3829 /* If the value of the new expression is ignored, that's OK. We
3830 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3831 the equivalent inlined version either. */
3832 if (is_gimple_assign (stmt))
3834 gcc_assert (gimple_assign_single_p (stmt)
3835 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
3836 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3839 /* Output the inlining info for this abstract function, since it has been
3840 inlined. If we don't do this now, we can lose the information about the
3841 variables in the function when the blocks get blown away as soon as we
3842 remove the cgraph node. */
3843 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
3845 /* Update callgraph if needed. */
3846 if (cg_edge->callee->clone_of
3847 && !cg_edge->callee->clone_of->next_sibling_clone
3848 && !cg_edge->callee->analyzed)
3849 cgraph_remove_node (cg_edge->callee);
3850 cgraph_remove_node (cg_edge->callee);
3852 id->block = NULL_TREE;
3853 successfully_inlined = TRUE;
3856 input_location = saved_location;
3857 return successfully_inlined;
3860 /* Expand call statements reachable from STMT_P.
3861 We can only have CALL_EXPRs as the "toplevel" tree code or nested
3862 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
3863 unfortunately not use that function here because we need a pointer
3864 to the CALL_EXPR, not the tree itself. */
3867 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
3869 gimple_stmt_iterator gsi;
3871 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3873 gimple stmt = gsi_stmt (gsi);
3875 if (is_gimple_call (stmt)
3876 && expand_call_inline (bb, stmt, id))
3884 /* Walk all basic blocks created after FIRST and try to fold every statement
3885 in the STATEMENTS pointer set. */
3888 fold_marked_statements (int first, struct pointer_set_t *statements)
3890 for (; first < n_basic_blocks; first++)
3891 if (BASIC_BLOCK (first))
3893 gimple_stmt_iterator gsi;
3895 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3898 if (pointer_set_contains (statements, gsi_stmt (gsi)))
3900 gimple old_stmt = gsi_stmt (gsi);
3901 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
3903 if (old_decl && DECL_BUILT_IN (old_decl))
3905 /* Folding builtins can create multiple instructions,
3906 we need to look at all of them. */
3907 gimple_stmt_iterator i2 = gsi;
3909 if (fold_stmt (&gsi))
3913 i2 = gsi_start_bb (BASIC_BLOCK (first));
3918 new_stmt = gsi_stmt (i2);
3919 update_stmt (new_stmt);
3920 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3923 if (new_stmt == gsi_stmt (gsi))
3925 /* It is okay to check only for the very last
3926 of these statements. If it is a throwing
3927 statement nothing will change. If it isn't
3928 this can remove EH edges. If that weren't
3929 correct then because some intermediate stmts
3930 throw, but not the last one. That would mean
3931 we'd have to split the block, which we can't
3932 here and we'd loose anyway. And as builtins
3933 probably never throw, this all
3935 if (maybe_clean_or_replace_eh_stmt (old_stmt,
3937 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3944 else if (fold_stmt (&gsi))
3946 /* Re-read the statement from GSI as fold_stmt() may
3948 gimple new_stmt = gsi_stmt (gsi);
3949 update_stmt (new_stmt);
3951 if (is_gimple_call (old_stmt)
3952 || is_gimple_call (new_stmt))
3953 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3956 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
3957 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3963 /* Return true if BB has at least one abnormal outgoing edge. */
3966 has_abnormal_outgoing_edge_p (basic_block bb)
3971 FOR_EACH_EDGE (e, ei, bb->succs)
3972 if (e->flags & EDGE_ABNORMAL)
3978 /* Expand calls to inline functions in the body of FN. */
3981 optimize_inline_calls (tree fn)
3986 int last = n_basic_blocks;
3987 struct gimplify_ctx gctx;
3989 /* There is no point in performing inlining if errors have already
3990 occurred -- and we might crash if we try to inline invalid
3992 if (errorcount || sorrycount)
3996 memset (&id, 0, sizeof (id));
3998 id.src_node = id.dst_node = cgraph_node (fn);
4000 /* Or any functions that aren't finished yet. */
4001 prev_fn = NULL_TREE;
4002 if (current_function_decl)
4004 id.dst_fn = current_function_decl;
4005 prev_fn = current_function_decl;
4008 id.copy_decl = copy_decl_maybe_to_var;
4009 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4010 id.transform_new_cfg = false;
4011 id.transform_return_to_modify = true;
4012 id.transform_lang_insert_block = NULL;
4013 id.statements_to_fold = pointer_set_create ();
4015 push_gimplify_context (&gctx);
4017 /* We make no attempts to keep dominance info up-to-date. */
4018 free_dominance_info (CDI_DOMINATORS);
4019 free_dominance_info (CDI_POST_DOMINATORS);
4021 /* Register specific gimple functions. */
4022 gimple_register_cfg_hooks ();
4024 /* Reach the trees by walking over the CFG, and note the
4025 enclosing basic-blocks in the call edges. */
4026 /* We walk the blocks going forward, because inlined function bodies
4027 will split id->current_basic_block, and the new blocks will
4028 follow it; we'll trudge through them, processing their CALL_EXPRs
4031 gimple_expand_calls_inline (bb, &id);
4033 pop_gimplify_context (NULL);
4035 #ifdef ENABLE_CHECKING
4037 struct cgraph_edge *e;
4039 verify_cgraph_node (id.dst_node);
4041 /* Double check that we inlined everything we are supposed to inline. */
4042 for (e = id.dst_node->callees; e; e = e->next_callee)
4043 gcc_assert (e->inline_failed);
4047 /* Fold the statements before compacting/renumbering the basic blocks. */
4048 fold_marked_statements (last, id.statements_to_fold);
4049 pointer_set_destroy (id.statements_to_fold);
4051 gcc_assert (!id.debug_stmts);
4053 /* Renumber the (code) basic_blocks consecutively. */
4055 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4058 fold_cond_expr_cond ();
4059 delete_unreachable_blocks_update_callgraph (&id);
4060 #ifdef ENABLE_CHECKING
4061 verify_cgraph_node (id.dst_node);
4064 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4065 not possible yet - the IPA passes might make various functions to not
4066 throw and they don't care to proactively update local EH info. This is
4067 done later in fixup_cfg pass that also execute the verification. */
4068 return (TODO_update_ssa
4070 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4071 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4074 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4077 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4079 enum tree_code code = TREE_CODE (*tp);
4080 enum tree_code_class cl = TREE_CODE_CLASS (code);
4082 /* We make copies of most nodes. */
4083 if (IS_EXPR_CODE_CLASS (cl)
4084 || code == TREE_LIST
4086 || code == TYPE_DECL
4087 || code == OMP_CLAUSE)
4089 /* Because the chain gets clobbered when we make a copy, we save it
4091 tree chain = NULL_TREE, new_tree;
4093 chain = TREE_CHAIN (*tp);
4095 /* Copy the node. */
4096 new_tree = copy_node (*tp);
4098 /* Propagate mudflap marked-ness. */
4099 if (flag_mudflap && mf_marked_p (*tp))
4104 /* Now, restore the chain, if appropriate. That will cause
4105 walk_tree to walk into the chain as well. */
4106 if (code == PARM_DECL
4107 || code == TREE_LIST
4108 || code == OMP_CLAUSE)
4109 TREE_CHAIN (*tp) = chain;
4111 /* For now, we don't update BLOCKs when we make copies. So, we
4112 have to nullify all BIND_EXPRs. */
4113 if (TREE_CODE (*tp) == BIND_EXPR)
4114 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4116 else if (code == CONSTRUCTOR)
4118 /* CONSTRUCTOR nodes need special handling because
4119 we need to duplicate the vector of elements. */
4122 new_tree = copy_node (*tp);
4124 /* Propagate mudflap marked-ness. */
4125 if (flag_mudflap && mf_marked_p (*tp))
4128 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4129 CONSTRUCTOR_ELTS (*tp));
4132 else if (TREE_CODE_CLASS (code) == tcc_type)
4134 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4136 else if (TREE_CODE_CLASS (code) == tcc_constant)
4139 gcc_assert (code != STATEMENT_LIST);
4143 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4144 information indicating to what new SAVE_EXPR this one should be mapped,
4145 use that one. Otherwise, create a new node and enter it in ST. FN is
4146 the function into which the copy will be placed. */
4149 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4151 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4155 /* See if we already encountered this SAVE_EXPR. */
4156 n = (tree *) pointer_map_contains (st, *tp);
4158 /* If we didn't already remap this SAVE_EXPR, do so now. */
4161 t = copy_node (*tp);
4163 /* Remember this SAVE_EXPR. */
4164 *pointer_map_insert (st, *tp) = t;
4165 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4166 *pointer_map_insert (st, t) = t;
4170 /* We've already walked into this SAVE_EXPR; don't do it again. */
4175 /* Replace this SAVE_EXPR with the copy. */
4179 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4180 copies the declaration and enters it in the splay_tree in DATA (which is
4181 really an `copy_body_data *'). */
4184 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4187 copy_body_data *id = (copy_body_data *) data;
4189 /* Don't walk into types. */
4193 else if (TREE_CODE (*tp) == LABEL_EXPR)
4195 tree decl = TREE_OPERAND (*tp, 0);
4197 /* Copy the decl and remember the copy. */
4198 insert_decl_map (id, decl, id->copy_decl (decl, id));
4204 /* Perform any modifications to EXPR required when it is unsaved. Does
4205 not recurse into EXPR's subtrees. */
4208 unsave_expr_1 (tree expr)
4210 switch (TREE_CODE (expr))
4213 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4214 It's OK for this to happen if it was part of a subtree that
4215 isn't immediately expanded, such as operand 2 of another
4217 if (TREE_OPERAND (expr, 1))
4220 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4221 TREE_OPERAND (expr, 3) = NULL_TREE;
4229 /* Called via walk_tree when an expression is unsaved. Using the
4230 splay_tree pointed to by ST (which is really a `splay_tree'),
4231 remaps all local declarations to appropriate replacements. */
4234 unsave_r (tree *tp, int *walk_subtrees, void *data)
4236 copy_body_data *id = (copy_body_data *) data;
4237 struct pointer_map_t *st = id->decl_map;
4240 /* Only a local declaration (variable or label). */
4241 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4242 || TREE_CODE (*tp) == LABEL_DECL)
4244 /* Lookup the declaration. */
4245 n = (tree *) pointer_map_contains (st, *tp);
4247 /* If it's there, remap it. */
4252 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4254 else if (TREE_CODE (*tp) == BIND_EXPR)
4255 copy_bind_expr (tp, walk_subtrees, id);
4256 else if (TREE_CODE (*tp) == SAVE_EXPR
4257 || TREE_CODE (*tp) == TARGET_EXPR)
4258 remap_save_expr (tp, st, walk_subtrees);
4261 copy_tree_r (tp, walk_subtrees, NULL);
4263 /* Do whatever unsaving is required. */
4264 unsave_expr_1 (*tp);
4267 /* Keep iterating. */
4271 /* Copies everything in EXPR and replaces variables, labels
4272 and SAVE_EXPRs local to EXPR. */
4275 unsave_expr_now (tree expr)
4279 /* There's nothing to do for NULL_TREE. */
4284 memset (&id, 0, sizeof (id));
4285 id.src_fn = current_function_decl;
4286 id.dst_fn = current_function_decl;
4287 id.decl_map = pointer_map_create ();
4288 id.debug_map = NULL;
4290 id.copy_decl = copy_decl_no_change;
4291 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4292 id.transform_new_cfg = false;
4293 id.transform_return_to_modify = false;
4294 id.transform_lang_insert_block = NULL;
4296 /* Walk the tree once to find local labels. */
4297 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4299 /* Walk the tree again, copying, remapping, and unsaving. */
4300 walk_tree (&expr, unsave_r, &id, NULL);
4303 pointer_map_destroy (id.decl_map);
4305 pointer_map_destroy (id.debug_map);
4310 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4311 label, copies the declaration and enters it in the splay_tree in DATA (which
4312 is really a 'copy_body_data *'. */
4315 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4316 bool *handled_ops_p ATTRIBUTE_UNUSED,
4317 struct walk_stmt_info *wi)
4319 copy_body_data *id = (copy_body_data *) wi->info;
4320 gimple stmt = gsi_stmt (*gsip);
4322 if (gimple_code (stmt) == GIMPLE_LABEL)
4324 tree decl = gimple_label_label (stmt);
4326 /* Copy the decl and remember the copy. */
4327 insert_decl_map (id, decl, id->copy_decl (decl, id));
4334 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4335 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4336 remaps all local declarations to appropriate replacements in gimple
4340 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4342 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4343 copy_body_data *id = (copy_body_data *) wi->info;
4344 struct pointer_map_t *st = id->decl_map;
4348 /* Only a local declaration (variable or label). */
4349 if ((TREE_CODE (expr) == VAR_DECL
4350 && !TREE_STATIC (expr))
4351 || TREE_CODE (expr) == LABEL_DECL)
4353 /* Lookup the declaration. */
4354 n = (tree *) pointer_map_contains (st, expr);
4356 /* If it's there, remap it. */
4361 else if (TREE_CODE (expr) == STATEMENT_LIST
4362 || TREE_CODE (expr) == BIND_EXPR
4363 || TREE_CODE (expr) == SAVE_EXPR)
4365 else if (TREE_CODE (expr) == TARGET_EXPR)
4367 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4368 It's OK for this to happen if it was part of a subtree that
4369 isn't immediately expanded, such as operand 2 of another
4371 if (!TREE_OPERAND (expr, 1))
4373 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4374 TREE_OPERAND (expr, 3) = NULL_TREE;
4378 /* Keep iterating. */
4383 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4384 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4385 remaps all local declarations to appropriate replacements in gimple
4389 replace_locals_stmt (gimple_stmt_iterator *gsip,
4390 bool *handled_ops_p ATTRIBUTE_UNUSED,
4391 struct walk_stmt_info *wi)
4393 copy_body_data *id = (copy_body_data *) wi->info;
4394 gimple stmt = gsi_stmt (*gsip);
4396 if (gimple_code (stmt) == GIMPLE_BIND)
4398 tree block = gimple_bind_block (stmt);
4402 remap_block (&block, id);
4403 gimple_bind_set_block (stmt, block);
4406 /* This will remap a lot of the same decls again, but this should be
4408 if (gimple_bind_vars (stmt))
4409 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4412 /* Keep iterating. */
4417 /* Copies everything in SEQ and replaces variables and labels local to
4418 current_function_decl. */
4421 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4424 struct walk_stmt_info wi;
4425 struct pointer_set_t *visited;
4428 /* There's nothing to do for NULL_TREE. */
4433 memset (&id, 0, sizeof (id));
4434 id.src_fn = current_function_decl;
4435 id.dst_fn = current_function_decl;
4436 id.decl_map = pointer_map_create ();
4437 id.debug_map = NULL;
4439 id.copy_decl = copy_decl_no_change;
4440 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4441 id.transform_new_cfg = false;
4442 id.transform_return_to_modify = false;
4443 id.transform_lang_insert_block = NULL;
4445 /* Walk the tree once to find local labels. */
4446 memset (&wi, 0, sizeof (wi));
4447 visited = pointer_set_create ();
4450 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4451 pointer_set_destroy (visited);
4453 copy = gimple_seq_copy (seq);
4455 /* Walk the copy, remapping decls. */
4456 memset (&wi, 0, sizeof (wi));
4458 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4461 pointer_map_destroy (id.decl_map);
4463 pointer_map_destroy (id.debug_map);
4469 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4472 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4481 debug_find_tree (tree top, tree search)
4483 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4487 /* Declare the variables created by the inliner. Add all the variables in
4488 VARS to BIND_EXPR. */
4491 declare_inline_vars (tree block, tree vars)
4494 for (t = vars; t; t = TREE_CHAIN (t))
4496 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4497 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4498 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4502 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4505 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4506 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4507 VAR_DECL translation. */
4510 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4512 /* Don't generate debug information for the copy if we wouldn't have
4513 generated it for the copy either. */
4514 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4515 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4517 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4518 declaration inspired this copy. */
4519 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4521 /* The new variable/label has no RTL, yet. */
4522 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4523 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4524 SET_DECL_RTL (copy, NULL_RTX);
4526 /* These args would always appear unused, if not for this. */
4527 TREE_USED (copy) = 1;
4529 /* Set the context for the new declaration. */
4530 if (!DECL_CONTEXT (decl))
4531 /* Globals stay global. */
4533 else if (DECL_CONTEXT (decl) != id->src_fn)
4534 /* Things that weren't in the scope of the function we're inlining
4535 from aren't in the scope we're inlining to, either. */
4537 else if (TREE_STATIC (decl))
4538 /* Function-scoped static variables should stay in the original
4542 /* Ordinary automatic local variables are now in the scope of the
4544 DECL_CONTEXT (copy) = id->dst_fn;
4550 copy_decl_to_var (tree decl, copy_body_data *id)
4554 gcc_assert (TREE_CODE (decl) == PARM_DECL
4555 || TREE_CODE (decl) == RESULT_DECL);
4557 type = TREE_TYPE (decl);
4559 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4560 VAR_DECL, DECL_NAME (decl), type);
4561 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4562 TREE_READONLY (copy) = TREE_READONLY (decl);
4563 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4564 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4566 return copy_decl_for_dup_finish (id, decl, copy);
4569 /* Like copy_decl_to_var, but create a return slot object instead of a
4570 pointer variable for return by invisible reference. */
4573 copy_result_decl_to_var (tree decl, copy_body_data *id)
4577 gcc_assert (TREE_CODE (decl) == PARM_DECL
4578 || TREE_CODE (decl) == RESULT_DECL);
4580 type = TREE_TYPE (decl);
4581 if (DECL_BY_REFERENCE (decl))
4582 type = TREE_TYPE (type);
4584 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4585 VAR_DECL, DECL_NAME (decl), type);
4586 TREE_READONLY (copy) = TREE_READONLY (decl);
4587 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4588 if (!DECL_BY_REFERENCE (decl))
4590 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4591 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4594 return copy_decl_for_dup_finish (id, decl, copy);
4598 copy_decl_no_change (tree decl, copy_body_data *id)
4602 copy = copy_node (decl);
4604 /* The COPY is not abstract; it will be generated in DST_FN. */
4605 DECL_ABSTRACT (copy) = 0;
4606 lang_hooks.dup_lang_specific_decl (copy);
4608 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4609 been taken; it's for internal bookkeeping in expand_goto_internal. */
4610 if (TREE_CODE (copy) == LABEL_DECL)
4612 TREE_ADDRESSABLE (copy) = 0;
4613 LABEL_DECL_UID (copy) = -1;
4616 return copy_decl_for_dup_finish (id, decl, copy);
4620 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4622 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4623 return copy_decl_to_var (decl, id);
4625 return copy_decl_no_change (decl, id);
4628 /* Return a copy of the function's argument tree. */
4630 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4631 bitmap args_to_skip, tree *vars)
4634 tree new_parm = NULL;
4639 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4640 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4642 tree new_tree = remap_decl (arg, id);
4643 lang_hooks.dup_lang_specific_decl (new_tree);
4645 parg = &TREE_CHAIN (new_tree);
4647 else if (!pointer_map_contains (id->decl_map, arg))
4649 /* Make an equivalent VAR_DECL. If the argument was used
4650 as temporary variable later in function, the uses will be
4651 replaced by local variable. */
4652 tree var = copy_decl_to_var (arg, id);
4654 add_referenced_var (var);
4655 insert_decl_map (id, arg, var);
4656 /* Declare this new variable. */
4657 TREE_CHAIN (var) = *vars;
4663 /* Return a copy of the function's static chain. */
4665 copy_static_chain (tree static_chain, copy_body_data * id)
4667 tree *chain_copy, *pvar;
4669 chain_copy = &static_chain;
4670 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4672 tree new_tree = remap_decl (*pvar, id);
4673 lang_hooks.dup_lang_specific_decl (new_tree);
4674 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4677 return static_chain;
4680 /* Return true if the function is allowed to be versioned.
4681 This is a guard for the versioning functionality. */
4684 tree_versionable_function_p (tree fndecl)
4686 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4687 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4690 /* Delete all unreachable basic blocks and update callgraph.
4691 Doing so is somewhat nontrivial because we need to update all clones and
4692 remove inline function that become unreachable. */
4695 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4697 bool changed = false;
4698 basic_block b, next_bb;
4700 find_unreachable_blocks ();
4702 /* Delete all unreachable basic blocks. */
4704 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4706 next_bb = b->next_bb;
4708 if (!(b->flags & BB_REACHABLE))
4710 gimple_stmt_iterator bsi;
4712 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4713 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4715 struct cgraph_edge *e;
4716 struct cgraph_node *node;
4718 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4720 if (!e->inline_failed)
4721 cgraph_remove_node_and_inline_clones (e->callee);
4723 cgraph_remove_edge (e);
4725 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4726 && id->dst_node->clones)
4727 for (node = id->dst_node->clones; node != id->dst_node;)
4729 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4731 if (!e->inline_failed)
4732 cgraph_remove_node_and_inline_clones (e->callee);
4734 cgraph_remove_edge (e);
4738 node = node->clones;
4739 else if (node->next_sibling_clone)
4740 node = node->next_sibling_clone;
4743 while (node != id->dst_node && !node->next_sibling_clone)
4744 node = node->clone_of;
4745 if (node != id->dst_node)
4746 node = node->next_sibling_clone;
4750 delete_basic_block (b);
4756 tidy_fallthru_edges ();
4760 /* Update clone info after duplication. */
4763 update_clone_info (copy_body_data * id)
4765 struct cgraph_node *node;
4766 if (!id->dst_node->clones)
4768 for (node = id->dst_node->clones; node != id->dst_node;)
4770 /* First update replace maps to match the new body. */
4771 if (node->clone.tree_map)
4774 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4776 struct ipa_replace_map *replace_info;
4777 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4778 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4779 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4783 node = node->clones;
4784 else if (node->next_sibling_clone)
4785 node = node->next_sibling_clone;
4788 while (node != id->dst_node && !node->next_sibling_clone)
4789 node = node->clone_of;
4790 if (node != id->dst_node)
4791 node = node->next_sibling_clone;
4796 /* Create a copy of a function's tree.
4797 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4798 of the original function and the new copied function
4799 respectively. In case we want to replace a DECL
4800 tree with another tree while duplicating the function's
4801 body, TREE_MAP represents the mapping between these
4802 trees. If UPDATE_CLONES is set, the call_stmt fields
4803 of edges of clones of the function will be updated. */
4805 tree_function_versioning (tree old_decl, tree new_decl,
4806 VEC(ipa_replace_map_p,gc)* tree_map,
4807 bool update_clones, bitmap args_to_skip)
4809 struct cgraph_node *old_version_node;
4810 struct cgraph_node *new_version_node;
4814 struct ipa_replace_map *replace_info;
4815 basic_block old_entry_block, bb;
4816 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4819 tree old_current_function_decl = current_function_decl;
4820 tree vars = NULL_TREE;
4822 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4823 && TREE_CODE (new_decl) == FUNCTION_DECL);
4824 DECL_POSSIBLY_INLINED (old_decl) = 1;
4826 old_version_node = cgraph_node (old_decl);
4827 new_version_node = cgraph_node (new_decl);
4829 /* Output the inlining info for this abstract function, since it has been
4830 inlined. If we don't do this now, we can lose the information about the
4831 variables in the function when the blocks get blown away as soon as we
4832 remove the cgraph node. */
4833 (*debug_hooks->outlining_inline_function) (old_decl);
4835 DECL_ARTIFICIAL (new_decl) = 1;
4836 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4837 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
4839 /* Prepare the data structures for the tree copy. */
4840 memset (&id, 0, sizeof (id));
4842 /* Generate a new name for the new version. */
4843 id.statements_to_fold = pointer_set_create ();
4845 id.decl_map = pointer_map_create ();
4846 id.debug_map = NULL;
4847 id.src_fn = old_decl;
4848 id.dst_fn = new_decl;
4849 id.src_node = old_version_node;
4850 id.dst_node = new_version_node;
4851 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4852 if (id.src_node->ipa_transforms_to_apply)
4854 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
4857 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
4858 id.src_node->ipa_transforms_to_apply);
4859 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
4860 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
4861 VEC_index (ipa_opt_pass,
4862 old_transforms_to_apply,
4866 id.copy_decl = copy_decl_no_change;
4867 id.transform_call_graph_edges
4868 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4869 id.transform_new_cfg = true;
4870 id.transform_return_to_modify = false;
4871 id.transform_lang_insert_block = NULL;
4873 current_function_decl = new_decl;
4874 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4875 (DECL_STRUCT_FUNCTION (old_decl));
4876 initialize_cfun (new_decl, old_decl,
4877 old_entry_block->count);
4878 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
4880 /* Copy the function's static chain. */
4881 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4883 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4884 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4887 /* If there's a tree_map, prepare for substitution. */
4889 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
4892 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
4893 if (replace_info->replace_p)
4895 tree op = replace_info->new_tree;
4899 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4900 op = TREE_OPERAND (op, 0);
4902 if (TREE_CODE (op) == ADDR_EXPR)
4904 op = TREE_OPERAND (op, 0);
4905 while (handled_component_p (op))
4906 op = TREE_OPERAND (op, 0);
4907 if (TREE_CODE (op) == VAR_DECL)
4908 add_referenced_var (op);
4910 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4911 init = setup_one_parameter (&id, replace_info->old_tree,
4912 replace_info->new_tree, id.src_fn,
4916 VEC_safe_push (gimple, heap, init_stmts, init);
4919 /* Copy the function's arguments. */
4920 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4921 DECL_ARGUMENTS (new_decl) =
4922 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
4923 args_to_skip, &vars);
4925 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
4927 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4928 number_blocks (id.dst_fn);
4930 declare_inline_vars (DECL_INITIAL (new_decl), vars);
4932 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
4933 /* Add local vars. */
4934 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
4935 t_step; t_step = TREE_CHAIN (t_step))
4937 tree var = TREE_VALUE (t_step);
4938 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
4939 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
4940 else if (!can_be_nonlocal (var, &id))
4942 tree_cons (NULL_TREE, remap_decl (var, &id),
4946 /* Copy the Function's body. */
4947 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
4948 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
4950 if (DECL_RESULT (old_decl) != NULL_TREE)
4952 tree *res_decl = &DECL_RESULT (old_decl);
4953 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
4954 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
4957 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4958 number_blocks (new_decl);
4960 /* We want to create the BB unconditionally, so that the addition of
4961 debug stmts doesn't affect BB count, which may in the end cause
4962 codegen differences. */
4963 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
4964 while (VEC_length (gimple, init_stmts))
4965 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
4966 update_clone_info (&id);
4968 /* Remap the nonlocal_goto_save_area, if any. */
4969 if (cfun->nonlocal_goto_save_area)
4971 struct walk_stmt_info wi;
4973 memset (&wi, 0, sizeof (wi));
4975 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
4979 pointer_map_destroy (id.decl_map);
4981 pointer_map_destroy (id.debug_map);
4982 free_dominance_info (CDI_DOMINATORS);
4983 free_dominance_info (CDI_POST_DOMINATORS);
4985 fold_marked_statements (0, id.statements_to_fold);
4986 pointer_set_destroy (id.statements_to_fold);
4987 fold_cond_expr_cond ();
4988 delete_unreachable_blocks_update_callgraph (&id);
4989 update_ssa (TODO_update_ssa);
4990 free_dominance_info (CDI_DOMINATORS);
4991 free_dominance_info (CDI_POST_DOMINATORS);
4993 gcc_assert (!id.debug_stmts);
4994 VEC_free (gimple, heap, init_stmts);
4996 current_function_decl = old_current_function_decl;
4997 gcc_assert (!current_function_decl
4998 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
5002 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
5003 the callee and return the inlined body on success. */
5006 maybe_inline_call_in_expr (tree exp)
5008 tree fn = get_callee_fndecl (exp);
5010 /* We can only try to inline "const" functions. */
5011 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
5013 struct pointer_map_t *decl_map = pointer_map_create ();
5014 call_expr_arg_iterator iter;
5018 /* Remap the parameters. */
5019 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5021 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
5022 *pointer_map_insert (decl_map, param) = arg;
5024 memset (&id, 0, sizeof (id));
5026 id.dst_fn = current_function_decl;
5027 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5028 id.decl_map = decl_map;
5030 id.copy_decl = copy_decl_no_change;
5031 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5032 id.transform_new_cfg = false;
5033 id.transform_return_to_modify = true;
5034 id.transform_lang_insert_block = false;
5036 /* Make sure not to unshare trees behind the front-end's back
5037 since front-end specific mechanisms may rely on sharing. */
5038 id.regimplify = false;
5039 id.do_not_unshare = true;
5041 /* We're not inside any EH region. */
5044 t = copy_tree_body (&id);
5045 pointer_map_destroy (decl_map);
5047 /* We can only return something suitable for use in a GENERIC
5049 if (TREE_CODE (t) == MODIFY_EXPR)
5050 return TREE_OPERAND (t, 1);
5056 /* Duplicate a type, fields and all. */
5059 build_duplicate_type (tree type)
5061 struct copy_body_data id;
5063 memset (&id, 0, sizeof (id));
5064 id.src_fn = current_function_decl;
5065 id.dst_fn = current_function_decl;
5067 id.decl_map = pointer_map_create ();
5068 id.debug_map = NULL;
5069 id.copy_decl = copy_decl_no_change;
5071 type = remap_type_1 (type, &id);
5073 pointer_map_destroy (id.decl_map);
5075 pointer_map_destroy (id.debug_map);
5077 TYPE_CANONICAL (type) = type;
5082 /* Return whether it is safe to inline a function because it used different
5083 target specific options or call site actual types mismatch parameter types.
5084 E is the call edge to be checked. */
5086 tree_can_inline_p (struct cgraph_edge *e)
5089 /* This causes a regression in SPEC in that it prevents a cold function from
5090 inlining a hot function. Perhaps this should only apply to functions
5091 that the user declares hot/cold/optimize explicitly. */
5093 /* Don't inline a function with a higher optimization level than the
5094 caller, or with different space constraints (hot/cold functions). */
5095 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5096 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5098 if (caller_tree != callee_tree)
5100 struct cl_optimization *caller_opt
5101 = TREE_OPTIMIZATION ((caller_tree)
5103 : optimization_default_node);
5105 struct cl_optimization *callee_opt
5106 = TREE_OPTIMIZATION ((callee_tree)
5108 : optimization_default_node);
5110 if ((caller_opt->optimize > callee_opt->optimize)
5111 || (caller_opt->optimize_size != callee_opt->optimize_size))
5115 tree caller, callee;
5117 caller = e->caller->decl;
5118 callee = e->callee->decl;
5120 /* We cannot inline a function that uses a different EH personality
5122 if (DECL_FUNCTION_PERSONALITY (caller)
5123 && DECL_FUNCTION_PERSONALITY (callee)
5124 && (DECL_FUNCTION_PERSONALITY (caller)
5125 != DECL_FUNCTION_PERSONALITY (callee)))
5127 e->inline_failed = CIF_UNSPECIFIED;
5128 gimple_call_set_cannot_inline (e->call_stmt, true);
5132 /* Allow the backend to decide if inlining is ok. */
5133 if (!targetm.target_option.can_inline_p (caller, callee))
5135 e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5136 gimple_call_set_cannot_inline (e->call_stmt, true);
5137 e->call_stmt_cannot_inline_p = true;
5142 && !gimple_check_call_args (e->call_stmt))
5144 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5145 gimple_call_set_cannot_inline (e->call_stmt, true);
5146 e->call_stmt_cannot_inline_p = true;