2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "tree-inline.h"
34 #include "insn-config.h"
36 #include "langhooks.h"
37 #include "basic-block.h"
38 #include "tree-iterator.h"
41 #include "tree-mudflap.h"
42 #include "tree-flow.h"
45 #include "tree-flow.h"
46 #include "diagnostic.h"
49 #include "pointer-set.h"
51 #include "value-prof.h"
52 #include "tree-pass.h"
54 #include "integrate.h"
56 /* I'm not real happy about this, but we need to handle gimple and
60 /* Inlining, Cloning, Versioning, Parallelization
62 Inlining: a function body is duplicated, but the PARM_DECLs are
63 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
64 MODIFY_EXPRs that store to a dedicated returned-value variable.
65 The duplicated eh_region info of the copy will later be appended
66 to the info for the caller; the eh_region info in copied throwing
67 statements and RESX statements are adjusted accordingly.
69 Cloning: (only in C++) We have one body for a con/de/structor, and
70 multiple function decls, each with a unique parameter list.
71 Duplicate the body, using the given splay tree; some parameters
72 will become constants (like 0 or 1).
74 Versioning: a function body is duplicated and the result is a new
75 function rather than into blocks of an existing function as with
76 inlining. Some parameters will become constants.
78 Parallelization: a region of a function is duplicated resulting in
79 a new function. Variables may be replaced with complex expressions
80 to enable shared variable semantics.
82 All of these will simultaneously lookup any callgraph edges. If
83 we're going to inline the duplicated function body, and the given
84 function has some cloned callgraph nodes (one for each place this
85 function will be inlined) those callgraph edges will be duplicated.
86 If we're cloning the body, those callgraph edges will be
87 updated to point into the new body. (Note that the original
88 callgraph node and edge list will not be altered.)
90 See the CALL_EXPR handling case in copy_tree_body_r (). */
94 o In order to make inlining-on-trees work, we pessimized
95 function-local static constants. In particular, they are now
96 always output, even when not addressed. Fix this by treating
97 function-local static constants just like global static
98 constants; the back-end already knows not to output them if they
101 o Provide heuristics to clamp inlining of recursive template
105 /* Weights that estimate_num_insns uses for heuristics in inlining. */
107 eni_weights eni_inlining_weights;
109 /* Weights that estimate_num_insns uses to estimate the size of the
112 eni_weights eni_size_weights;
114 /* Weights that estimate_num_insns uses to estimate the time necessary
115 to execute the produced code. */
117 eni_weights eni_time_weights;
121 static tree declare_return_variable (copy_body_data *, tree, tree);
122 static void remap_block (tree *, copy_body_data *);
123 static void copy_bind_expr (tree *, int *, copy_body_data *);
124 static tree mark_local_for_remap_r (tree *, int *, void *);
125 static void unsave_expr_1 (tree);
126 static tree unsave_r (tree *, int *, void *);
127 static void declare_inline_vars (tree, tree);
128 static void remap_save_expr (tree *, void *, int *);
129 static void prepend_lexical_block (tree current_block, tree new_block);
130 static tree copy_decl_to_var (tree, copy_body_data *);
131 static tree copy_result_decl_to_var (tree, copy_body_data *);
132 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
133 static gimple remap_gimple_stmt (gimple, copy_body_data *);
134 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
136 /* Insert a tree->tree mapping for ID. Despite the name suggests
137 that the trees should be variables, it is used for more than that. */
140 insert_decl_map (copy_body_data *id, tree key, tree value)
142 *pointer_map_insert (id->decl_map, key) = value;
144 /* Always insert an identity map as well. If we see this same new
145 node again, we won't want to duplicate it a second time. */
147 *pointer_map_insert (id->decl_map, value) = value;
150 /* Insert a tree->tree mapping for ID. This is only used for
154 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
156 if (!gimple_in_ssa_p (id->src_cfun))
159 if (!MAY_HAVE_DEBUG_STMTS)
162 if (!target_for_debug_bind (key))
165 gcc_assert (TREE_CODE (key) == PARM_DECL);
166 gcc_assert (TREE_CODE (value) == VAR_DECL);
169 id->debug_map = pointer_map_create ();
171 *pointer_map_insert (id->debug_map, key) = value;
174 /* If nonzero, we're remapping the contents of inlined debug
175 statements. If negative, an error has occurred, such as a
176 reference to a variable that isn't available in the inlined
178 static int processing_debug_stmt = 0;
180 /* Construct new SSA name for old NAME. ID is the inline context. */
183 remap_ssa_name (tree name, copy_body_data *id)
188 gcc_assert (TREE_CODE (name) == SSA_NAME);
190 n = (tree *) pointer_map_contains (id->decl_map, name);
192 return unshare_expr (*n);
194 if (processing_debug_stmt)
196 processing_debug_stmt = -1;
200 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
202 new_tree = remap_decl (SSA_NAME_VAR (name), id);
204 /* We might've substituted constant or another SSA_NAME for
207 Replace the SSA name representing RESULT_DECL by variable during
208 inlining: this saves us from need to introduce PHI node in a case
209 return value is just partly initialized. */
210 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
211 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
212 || !id->transform_return_to_modify))
214 new_tree = make_ssa_name (new_tree, NULL);
215 insert_decl_map (id, name, new_tree);
216 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
217 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
218 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
219 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
221 /* By inlining function having uninitialized variable, we might
222 extend the lifetime (variable might get reused). This cause
223 ICE in the case we end up extending lifetime of SSA name across
224 abnormal edge, but also increase register pressure.
226 We simply initialize all uninitialized vars by 0 except
227 for case we are inlining to very first BB. We can avoid
228 this for all BBs that are not inside strongly connected
229 regions of the CFG, but this is expensive to test. */
231 && is_gimple_reg (SSA_NAME_VAR (name))
232 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
233 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
234 || EDGE_COUNT (id->entry_bb->preds) != 1))
236 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
239 init_stmt = gimple_build_assign (new_tree,
240 fold_convert (TREE_TYPE (new_tree),
242 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
243 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
247 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
248 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
250 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
255 insert_decl_map (id, name, new_tree);
259 /* Remap DECL during the copying of the BLOCK tree for the function. */
262 remap_decl (tree decl, copy_body_data *id)
266 /* We only remap local variables in the current function. */
268 /* See if we have remapped this declaration. */
270 n = (tree *) pointer_map_contains (id->decl_map, decl);
272 if (!n && processing_debug_stmt)
274 processing_debug_stmt = -1;
278 /* If we didn't already have an equivalent for this declaration,
282 /* Make a copy of the variable or label. */
283 tree t = id->copy_decl (decl, id);
285 /* Remember it, so that if we encounter this local entity again
286 we can reuse this copy. Do this early because remap_type may
287 need this decl for TYPE_STUB_DECL. */
288 insert_decl_map (id, decl, t);
293 /* Remap types, if necessary. */
294 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
295 if (TREE_CODE (t) == TYPE_DECL)
296 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
298 /* Remap sizes as necessary. */
299 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
300 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
302 /* If fields, do likewise for offset and qualifier. */
303 if (TREE_CODE (t) == FIELD_DECL)
305 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
306 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
307 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
310 if (cfun && gimple_in_ssa_p (cfun)
311 && (TREE_CODE (t) == VAR_DECL
312 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
315 add_referenced_var (t);
320 if (id->do_not_unshare)
323 return unshare_expr (*n);
327 remap_type_1 (tree type, copy_body_data *id)
331 /* We do need a copy. build and register it now. If this is a pointer or
332 reference type, remap the designated type and make a new pointer or
334 if (TREE_CODE (type) == POINTER_TYPE)
336 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
338 TYPE_REF_CAN_ALIAS_ALL (type));
339 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
340 new_tree = build_type_attribute_qual_variant (new_tree,
341 TYPE_ATTRIBUTES (type),
343 insert_decl_map (id, type, new_tree);
346 else if (TREE_CODE (type) == REFERENCE_TYPE)
348 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
350 TYPE_REF_CAN_ALIAS_ALL (type));
351 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
352 new_tree = build_type_attribute_qual_variant (new_tree,
353 TYPE_ATTRIBUTES (type),
355 insert_decl_map (id, type, new_tree);
359 new_tree = copy_node (type);
361 insert_decl_map (id, type, new_tree);
363 /* This is a new type, not a copy of an old type. Need to reassociate
364 variants. We can handle everything except the main variant lazily. */
365 t = TYPE_MAIN_VARIANT (type);
368 t = remap_type (t, id);
369 TYPE_MAIN_VARIANT (new_tree) = t;
370 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
371 TYPE_NEXT_VARIANT (t) = new_tree;
375 TYPE_MAIN_VARIANT (new_tree) = new_tree;
376 TYPE_NEXT_VARIANT (new_tree) = NULL;
379 if (TYPE_STUB_DECL (type))
380 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
382 /* Lazily create pointer and reference types. */
383 TYPE_POINTER_TO (new_tree) = NULL;
384 TYPE_REFERENCE_TO (new_tree) = NULL;
386 switch (TREE_CODE (new_tree))
390 case FIXED_POINT_TYPE:
393 t = TYPE_MIN_VALUE (new_tree);
394 if (t && TREE_CODE (t) != INTEGER_CST)
395 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
397 t = TYPE_MAX_VALUE (new_tree);
398 if (t && TREE_CODE (t) != INTEGER_CST)
399 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
403 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
404 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
408 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
409 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
414 case QUAL_UNION_TYPE:
418 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
420 t = remap_decl (f, id);
421 DECL_CONTEXT (t) = new_tree;
425 TYPE_FIELDS (new_tree) = nreverse (nf);
431 /* Shouldn't have been thought variable sized. */
435 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
436 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
442 remap_type (tree type, copy_body_data *id)
450 /* See if we have remapped this type. */
451 node = (tree *) pointer_map_contains (id->decl_map, type);
455 /* The type only needs remapping if it's variably modified. */
456 if (! variably_modified_type_p (type, id->src_fn))
458 insert_decl_map (id, type, type);
462 id->remapping_type_depth++;
463 tmp = remap_type_1 (type, id);
464 id->remapping_type_depth--;
469 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
470 is NULL or TYPE has not been remapped before. */
473 remapped_type (tree type, copy_body_data *id)
480 /* See if we have remapped this type. */
481 node = (tree *) pointer_map_contains (id->decl_map, type);
488 /* The type only needs remapping if it's variably modified. */
489 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
492 can_be_nonlocal (tree decl, copy_body_data *id)
494 /* We can not duplicate function decls. */
495 if (TREE_CODE (decl) == FUNCTION_DECL)
498 /* Local static vars must be non-local or we get multiple declaration
500 if (TREE_CODE (decl) == VAR_DECL
501 && !auto_var_in_fn_p (decl, id->src_fn))
504 /* At the moment dwarf2out can handle only these types of nodes. We
505 can support more later. */
506 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
509 /* We must use global type. We call remapped_type instead of
510 remap_type since we don't want to remap this type here if it
511 hasn't been remapped before. */
512 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
515 /* Wihtout SSA we can't tell if variable is used. */
516 if (!gimple_in_ssa_p (cfun))
519 /* Live variables must be copied so we can attach DECL_RTL. */
527 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
530 tree new_decls = NULL_TREE;
532 /* Remap its variables. */
533 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
537 if (can_be_nonlocal (old_var, id))
539 if (TREE_CODE (old_var) == VAR_DECL
540 && ! DECL_EXTERNAL (old_var)
541 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
542 cfun->local_decls = tree_cons (NULL_TREE, old_var,
544 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
545 && !DECL_IGNORED_P (old_var)
546 && nonlocalized_list)
547 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
551 /* Remap the variable. */
552 new_var = remap_decl (old_var, id);
554 /* If we didn't remap this variable, we can't mess with its
555 TREE_CHAIN. If we remapped this variable to the return slot, it's
556 already declared somewhere else, so don't declare it here. */
558 if (new_var == id->retvar)
562 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
563 && !DECL_IGNORED_P (old_var)
564 && nonlocalized_list)
565 VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
569 gcc_assert (DECL_P (new_var));
570 TREE_CHAIN (new_var) = new_decls;
575 return nreverse (new_decls);
578 /* Copy the BLOCK to contain remapped versions of the variables
579 therein. And hook the new block into the block-tree. */
582 remap_block (tree *block, copy_body_data *id)
587 /* Make the new block. */
589 new_block = make_node (BLOCK);
590 TREE_USED (new_block) = TREE_USED (old_block);
591 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
592 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
593 BLOCK_NONLOCALIZED_VARS (new_block)
594 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
597 /* Remap its variables. */
598 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
599 &BLOCK_NONLOCALIZED_VARS (new_block),
602 if (id->transform_lang_insert_block)
603 id->transform_lang_insert_block (new_block);
605 /* Remember the remapped block. */
606 insert_decl_map (id, old_block, new_block);
609 /* Copy the whole block tree and root it in id->block. */
611 remap_blocks (tree block, copy_body_data *id)
614 tree new_tree = block;
619 remap_block (&new_tree, id);
620 gcc_assert (new_tree != block);
621 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
622 prepend_lexical_block (new_tree, remap_blocks (t, id));
623 /* Blocks are in arbitrary order, but make things slightly prettier and do
624 not swap order when producing a copy. */
625 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
630 copy_statement_list (tree *tp)
632 tree_stmt_iterator oi, ni;
635 new_tree = alloc_stmt_list ();
636 ni = tsi_start (new_tree);
637 oi = tsi_start (*tp);
638 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
641 for (; !tsi_end_p (oi); tsi_next (&oi))
643 tree stmt = tsi_stmt (oi);
644 if (TREE_CODE (stmt) == STATEMENT_LIST)
645 copy_statement_list (&stmt);
646 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
651 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
653 tree block = BIND_EXPR_BLOCK (*tp);
654 /* Copy (and replace) the statement. */
655 copy_tree_r (tp, walk_subtrees, NULL);
658 remap_block (&block, id);
659 BIND_EXPR_BLOCK (*tp) = block;
662 if (BIND_EXPR_VARS (*tp))
663 /* This will remap a lot of the same decls again, but this should be
665 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
669 /* Create a new gimple_seq by remapping all the statements in BODY
670 using the inlining information in ID. */
673 remap_gimple_seq (gimple_seq body, copy_body_data *id)
675 gimple_stmt_iterator si;
676 gimple_seq new_body = NULL;
678 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
680 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
681 gimple_seq_add_stmt (&new_body, new_stmt);
688 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
689 block using the mapping information in ID. */
692 copy_gimple_bind (gimple stmt, copy_body_data *id)
695 tree new_block, new_vars;
696 gimple_seq body, new_body;
698 /* Copy the statement. Note that we purposely don't use copy_stmt
699 here because we need to remap statements as we copy. */
700 body = gimple_bind_body (stmt);
701 new_body = remap_gimple_seq (body, id);
703 new_block = gimple_bind_block (stmt);
705 remap_block (&new_block, id);
707 /* This will remap a lot of the same decls again, but this should be
709 new_vars = gimple_bind_vars (stmt);
711 new_vars = remap_decls (new_vars, NULL, id);
713 new_bind = gimple_build_bind (new_vars, new_body, new_block);
719 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
720 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
721 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
722 recursing into the children nodes of *TP. */
725 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
727 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
728 copy_body_data *id = (copy_body_data *) wi_p->info;
729 tree fn = id->src_fn;
731 if (TREE_CODE (*tp) == SSA_NAME)
733 *tp = remap_ssa_name (*tp, id);
737 else if (auto_var_in_fn_p (*tp, fn))
739 /* Local variables and labels need to be replaced by equivalent
740 variables. We don't want to copy static variables; there's
741 only one of those, no matter how many times we inline the
742 containing function. Similarly for globals from an outer
746 /* Remap the declaration. */
747 new_decl = remap_decl (*tp, id);
748 gcc_assert (new_decl);
749 /* Replace this variable with the copy. */
750 STRIP_TYPE_NOPS (new_decl);
751 /* ??? The C++ frontend uses void * pointer zero to initialize
752 any other type. This confuses the middle-end type verification.
753 As cloned bodies do not go through gimplification again the fixup
754 there doesn't trigger. */
755 if (TREE_CODE (new_decl) == INTEGER_CST
756 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
757 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
761 else if (TREE_CODE (*tp) == STATEMENT_LIST)
763 else if (TREE_CODE (*tp) == SAVE_EXPR)
765 else if (TREE_CODE (*tp) == LABEL_DECL
766 && (!DECL_CONTEXT (*tp)
767 || decl_function_context (*tp) == id->src_fn))
768 /* These may need to be remapped for EH handling. */
769 *tp = remap_decl (*tp, id);
770 else if (TYPE_P (*tp))
771 /* Types may need remapping as well. */
772 *tp = remap_type (*tp, id);
773 else if (CONSTANT_CLASS_P (*tp))
775 /* If this is a constant, we have to copy the node iff the type
776 will be remapped. copy_tree_r will not copy a constant. */
777 tree new_type = remap_type (TREE_TYPE (*tp), id);
779 if (new_type == TREE_TYPE (*tp))
782 else if (TREE_CODE (*tp) == INTEGER_CST)
783 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
784 TREE_INT_CST_HIGH (*tp));
787 *tp = copy_node (*tp);
788 TREE_TYPE (*tp) = new_type;
793 /* Otherwise, just copy the node. Note that copy_tree_r already
794 knows not to copy VAR_DECLs, etc., so this is safe. */
795 if (TREE_CODE (*tp) == INDIRECT_REF)
797 /* Get rid of *& from inline substitutions that can happen when a
798 pointer argument is an ADDR_EXPR. */
799 tree decl = TREE_OPERAND (*tp, 0);
802 n = (tree *) pointer_map_contains (id->decl_map, decl);
805 tree type, new_tree, old;
807 /* If we happen to get an ADDR_EXPR in n->value, strip
808 it manually here as we'll eventually get ADDR_EXPRs
809 which lie about their types pointed to. In this case
810 build_fold_indirect_ref wouldn't strip the
811 INDIRECT_REF, but we absolutely rely on that. As
812 fold_indirect_ref does other useful transformations,
813 try that first, though. */
814 type = TREE_TYPE (TREE_TYPE (*n));
815 new_tree = unshare_expr (*n);
817 *tp = gimple_fold_indirect_ref (new_tree);
820 if (TREE_CODE (new_tree) == ADDR_EXPR)
822 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
824 /* ??? We should either assert here or build
825 a VIEW_CONVERT_EXPR instead of blindly leaking
826 incompatible types to our IL. */
828 *tp = TREE_OPERAND (new_tree, 0);
832 *tp = build1 (INDIRECT_REF, type, new_tree);
833 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
834 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
842 /* Here is the "usual case". Copy this tree node, and then
843 tweak some special cases. */
844 copy_tree_r (tp, walk_subtrees, NULL);
846 /* Global variables we haven't seen yet need to go into referenced
847 vars. If not referenced from types only. */
848 if (gimple_in_ssa_p (cfun)
849 && TREE_CODE (*tp) == VAR_DECL
850 && id->remapping_type_depth == 0
851 && !processing_debug_stmt)
852 add_referenced_var (*tp);
854 /* We should never have TREE_BLOCK set on non-statements. */
856 gcc_assert (!TREE_BLOCK (*tp));
858 if (TREE_CODE (*tp) != OMP_CLAUSE)
859 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
861 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
863 /* The copied TARGET_EXPR has never been expanded, even if the
864 original node was expanded already. */
865 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
866 TREE_OPERAND (*tp, 3) = NULL_TREE;
868 else if (TREE_CODE (*tp) == ADDR_EXPR)
870 /* Variable substitution need not be simple. In particular,
871 the INDIRECT_REF substitution above. Make sure that
872 TREE_CONSTANT and friends are up-to-date. But make sure
873 to not improperly set TREE_BLOCK on some sub-expressions. */
874 int invariant = is_gimple_min_invariant (*tp);
875 tree block = id->block;
876 id->block = NULL_TREE;
877 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
880 /* Handle the case where we substituted an INDIRECT_REF
881 into the operand of the ADDR_EXPR. */
882 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
883 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
885 recompute_tree_invariant_for_addr_expr (*tp);
887 /* If this used to be invariant, but is not any longer,
888 then regimplification is probably needed. */
889 if (invariant && !is_gimple_min_invariant (*tp))
890 id->regimplify = true;
896 /* Keep iterating. */
901 /* Called from copy_body_id via walk_tree. DATA is really a
902 `copy_body_data *'. */
905 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
907 copy_body_data *id = (copy_body_data *) data;
908 tree fn = id->src_fn;
911 /* Begin by recognizing trees that we'll completely rewrite for the
912 inlining context. Our output for these trees is completely
913 different from out input (e.g. RETURN_EXPR is deleted, and morphs
914 into an edge). Further down, we'll handle trees that get
915 duplicated and/or tweaked. */
917 /* When requested, RETURN_EXPRs should be transformed to just the
918 contained MODIFY_EXPR. The branch semantics of the return will
919 be handled elsewhere by manipulating the CFG rather than a statement. */
920 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
922 tree assignment = TREE_OPERAND (*tp, 0);
924 /* If we're returning something, just turn that into an
925 assignment into the equivalent of the original RESULT_DECL.
926 If the "assignment" is just the result decl, the result
927 decl has already been set (e.g. a recent "foo (&result_decl,
928 ...)"); just toss the entire RETURN_EXPR. */
929 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
931 /* Replace the RETURN_EXPR with (a copy of) the
932 MODIFY_EXPR hanging underneath. */
933 *tp = copy_node (assignment);
935 else /* Else the RETURN_EXPR returns no value. */
938 return (tree) (void *)1;
941 else if (TREE_CODE (*tp) == SSA_NAME)
943 *tp = remap_ssa_name (*tp, id);
948 /* Local variables and labels need to be replaced by equivalent
949 variables. We don't want to copy static variables; there's only
950 one of those, no matter how many times we inline the containing
951 function. Similarly for globals from an outer function. */
952 else if (auto_var_in_fn_p (*tp, fn))
956 /* Remap the declaration. */
957 new_decl = remap_decl (*tp, id);
958 gcc_assert (new_decl);
959 /* Replace this variable with the copy. */
960 STRIP_TYPE_NOPS (new_decl);
964 else if (TREE_CODE (*tp) == STATEMENT_LIST)
965 copy_statement_list (tp);
966 else if (TREE_CODE (*tp) == SAVE_EXPR
967 || TREE_CODE (*tp) == TARGET_EXPR)
968 remap_save_expr (tp, id->decl_map, walk_subtrees);
969 else if (TREE_CODE (*tp) == LABEL_DECL
970 && (! DECL_CONTEXT (*tp)
971 || decl_function_context (*tp) == id->src_fn))
972 /* These may need to be remapped for EH handling. */
973 *tp = remap_decl (*tp, id);
974 else if (TREE_CODE (*tp) == BIND_EXPR)
975 copy_bind_expr (tp, walk_subtrees, id);
976 /* Types may need remapping as well. */
977 else if (TYPE_P (*tp))
978 *tp = remap_type (*tp, id);
980 /* If this is a constant, we have to copy the node iff the type will be
981 remapped. copy_tree_r will not copy a constant. */
982 else if (CONSTANT_CLASS_P (*tp))
984 tree new_type = remap_type (TREE_TYPE (*tp), id);
986 if (new_type == TREE_TYPE (*tp))
989 else if (TREE_CODE (*tp) == INTEGER_CST)
990 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
991 TREE_INT_CST_HIGH (*tp));
994 *tp = copy_node (*tp);
995 TREE_TYPE (*tp) = new_type;
999 /* Otherwise, just copy the node. Note that copy_tree_r already
1000 knows not to copy VAR_DECLs, etc., so this is safe. */
1003 /* Here we handle trees that are not completely rewritten.
1004 First we detect some inlining-induced bogosities for
1006 if (TREE_CODE (*tp) == MODIFY_EXPR
1007 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1008 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1010 /* Some assignments VAR = VAR; don't generate any rtl code
1011 and thus don't count as variable modification. Avoid
1012 keeping bogosities like 0 = 0. */
1013 tree decl = TREE_OPERAND (*tp, 0), value;
1016 n = (tree *) pointer_map_contains (id->decl_map, decl);
1020 STRIP_TYPE_NOPS (value);
1021 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1023 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1024 return copy_tree_body_r (tp, walk_subtrees, data);
1028 else if (TREE_CODE (*tp) == INDIRECT_REF)
1030 /* Get rid of *& from inline substitutions that can happen when a
1031 pointer argument is an ADDR_EXPR. */
1032 tree decl = TREE_OPERAND (*tp, 0);
1035 n = (tree *) pointer_map_contains (id->decl_map, decl);
1040 /* If we happen to get an ADDR_EXPR in n->value, strip
1041 it manually here as we'll eventually get ADDR_EXPRs
1042 which lie about their types pointed to. In this case
1043 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1044 but we absolutely rely on that. As fold_indirect_ref
1045 does other useful transformations, try that first, though. */
1046 tree type = TREE_TYPE (TREE_TYPE (*n));
1047 if (id->do_not_unshare)
1050 new_tree = unshare_expr (*n);
1052 *tp = gimple_fold_indirect_ref (new_tree);
1055 if (TREE_CODE (new_tree) == ADDR_EXPR)
1057 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1059 /* ??? We should either assert here or build
1060 a VIEW_CONVERT_EXPR instead of blindly leaking
1061 incompatible types to our IL. */
1063 *tp = TREE_OPERAND (new_tree, 0);
1067 *tp = build1 (INDIRECT_REF, type, new_tree);
1068 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1069 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1077 /* Here is the "usual case". Copy this tree node, and then
1078 tweak some special cases. */
1079 copy_tree_r (tp, walk_subtrees, NULL);
1081 /* Global variables we haven't seen yet needs to go into referenced
1082 vars. If not referenced from types or debug stmts only. */
1083 if (gimple_in_ssa_p (cfun)
1084 && TREE_CODE (*tp) == VAR_DECL
1085 && id->remapping_type_depth == 0
1086 && !processing_debug_stmt)
1087 add_referenced_var (*tp);
1089 /* If EXPR has block defined, map it to newly constructed block.
1090 When inlining we want EXPRs without block appear in the block
1091 of function call if we are not remapping a type. */
1094 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1095 if (TREE_BLOCK (*tp))
1098 n = (tree *) pointer_map_contains (id->decl_map,
1103 TREE_BLOCK (*tp) = new_block;
1106 if (TREE_CODE (*tp) != OMP_CLAUSE)
1107 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1109 /* The copied TARGET_EXPR has never been expanded, even if the
1110 original node was expanded already. */
1111 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1113 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1114 TREE_OPERAND (*tp, 3) = NULL_TREE;
1117 /* Variable substitution need not be simple. In particular, the
1118 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1119 and friends are up-to-date. */
1120 else if (TREE_CODE (*tp) == ADDR_EXPR)
1122 int invariant = is_gimple_min_invariant (*tp);
1123 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1125 /* Handle the case where we substituted an INDIRECT_REF
1126 into the operand of the ADDR_EXPR. */
1127 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1128 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1130 recompute_tree_invariant_for_addr_expr (*tp);
1132 /* If this used to be invariant, but is not any longer,
1133 then regimplification is probably needed. */
1134 if (invariant && !is_gimple_min_invariant (*tp))
1135 id->regimplify = true;
1141 /* Keep iterating. */
1145 /* Helper for remap_gimple_stmt. Given an EH region number for the
1146 source function, map that to the duplicate EH region number in
1147 the destination function. */
1150 remap_eh_region_nr (int old_nr, copy_body_data *id)
1152 eh_region old_r, new_r;
1155 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1156 slot = pointer_map_contains (id->eh_map, old_r);
1157 new_r = (eh_region) *slot;
1159 return new_r->index;
1162 /* Similar, but operate on INTEGER_CSTs. */
1165 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1169 old_nr = tree_low_cst (old_t_nr, 0);
1170 new_nr = remap_eh_region_nr (old_nr, id);
1172 return build_int_cst (NULL, new_nr);
1175 /* Helper for copy_bb. Remap statement STMT using the inlining
1176 information in ID. Return the new statement copy. */
1179 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1182 struct walk_stmt_info wi;
1184 bool skip_first = false;
1186 /* Begin by recognizing trees that we'll completely rewrite for the
1187 inlining context. Our output for these trees is completely
1188 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1189 into an edge). Further down, we'll handle trees that get
1190 duplicated and/or tweaked. */
1192 /* When requested, GIMPLE_RETURNs should be transformed to just the
1193 contained GIMPLE_ASSIGN. The branch semantics of the return will
1194 be handled elsewhere by manipulating the CFG rather than the
1196 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1198 tree retval = gimple_return_retval (stmt);
1200 /* If we're returning something, just turn that into an
1201 assignment into the equivalent of the original RESULT_DECL.
1202 If RETVAL is just the result decl, the result decl has
1203 already been set (e.g. a recent "foo (&result_decl, ...)");
1204 just toss the entire GIMPLE_RETURN. */
1205 if (retval && TREE_CODE (retval) != RESULT_DECL)
1207 copy = gimple_build_assign (id->retvar, retval);
1208 /* id->retvar is already substituted. Skip it on later remapping. */
1212 return gimple_build_nop ();
1214 else if (gimple_has_substatements (stmt))
1218 /* When cloning bodies from the C++ front end, we will be handed bodies
1219 in High GIMPLE form. Handle here all the High GIMPLE statements that
1220 have embedded statements. */
1221 switch (gimple_code (stmt))
1224 copy = copy_gimple_bind (stmt, id);
1228 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1229 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1232 case GIMPLE_EH_FILTER:
1233 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1234 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1238 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1239 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1240 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1243 case GIMPLE_WITH_CLEANUP_EXPR:
1244 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1245 copy = gimple_build_wce (s1);
1248 case GIMPLE_OMP_PARALLEL:
1249 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1250 copy = gimple_build_omp_parallel
1252 gimple_omp_parallel_clauses (stmt),
1253 gimple_omp_parallel_child_fn (stmt),
1254 gimple_omp_parallel_data_arg (stmt));
1257 case GIMPLE_OMP_TASK:
1258 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1259 copy = gimple_build_omp_task
1261 gimple_omp_task_clauses (stmt),
1262 gimple_omp_task_child_fn (stmt),
1263 gimple_omp_task_data_arg (stmt),
1264 gimple_omp_task_copy_fn (stmt),
1265 gimple_omp_task_arg_size (stmt),
1266 gimple_omp_task_arg_align (stmt));
1269 case GIMPLE_OMP_FOR:
1270 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1271 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1272 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1273 gimple_omp_for_collapse (stmt), s2);
1276 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1278 gimple_omp_for_set_index (copy, i,
1279 gimple_omp_for_index (stmt, i));
1280 gimple_omp_for_set_initial (copy, i,
1281 gimple_omp_for_initial (stmt, i));
1282 gimple_omp_for_set_final (copy, i,
1283 gimple_omp_for_final (stmt, i));
1284 gimple_omp_for_set_incr (copy, i,
1285 gimple_omp_for_incr (stmt, i));
1286 gimple_omp_for_set_cond (copy, i,
1287 gimple_omp_for_cond (stmt, i));
1292 case GIMPLE_OMP_MASTER:
1293 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1294 copy = gimple_build_omp_master (s1);
1297 case GIMPLE_OMP_ORDERED:
1298 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1299 copy = gimple_build_omp_ordered (s1);
1302 case GIMPLE_OMP_SECTION:
1303 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1304 copy = gimple_build_omp_section (s1);
1307 case GIMPLE_OMP_SECTIONS:
1308 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1309 copy = gimple_build_omp_sections
1310 (s1, gimple_omp_sections_clauses (stmt));
1313 case GIMPLE_OMP_SINGLE:
1314 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1315 copy = gimple_build_omp_single
1316 (s1, gimple_omp_single_clauses (stmt));
1319 case GIMPLE_OMP_CRITICAL:
1320 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1322 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1331 if (gimple_assign_copy_p (stmt)
1332 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1333 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1335 /* Here we handle statements that are not completely rewritten.
1336 First we detect some inlining-induced bogosities for
1339 /* Some assignments VAR = VAR; don't generate any rtl code
1340 and thus don't count as variable modification. Avoid
1341 keeping bogosities like 0 = 0. */
1342 tree decl = gimple_assign_lhs (stmt), value;
1345 n = (tree *) pointer_map_contains (id->decl_map, decl);
1349 STRIP_TYPE_NOPS (value);
1350 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1351 return gimple_build_nop ();
1355 if (gimple_debug_bind_p (stmt))
1357 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1358 gimple_debug_bind_get_value (stmt),
1360 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1364 /* Create a new deep copy of the statement. */
1365 copy = gimple_copy (stmt);
1367 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1368 RESX and EH_DISPATCH. */
1370 switch (gimple_code (copy))
1374 tree r, fndecl = gimple_call_fndecl (copy);
1375 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1376 switch (DECL_FUNCTION_CODE (fndecl))
1378 case BUILT_IN_EH_COPY_VALUES:
1379 r = gimple_call_arg (copy, 1);
1380 r = remap_eh_region_tree_nr (r, id);
1381 gimple_call_set_arg (copy, 1, r);
1384 case BUILT_IN_EH_POINTER:
1385 case BUILT_IN_EH_FILTER:
1386 r = gimple_call_arg (copy, 0);
1387 r = remap_eh_region_tree_nr (r, id);
1388 gimple_call_set_arg (copy, 0, r);
1399 int r = gimple_resx_region (copy);
1400 r = remap_eh_region_nr (r, id);
1401 gimple_resx_set_region (copy, r);
1405 case GIMPLE_EH_DISPATCH:
1407 int r = gimple_eh_dispatch_region (copy);
1408 r = remap_eh_region_nr (r, id);
1409 gimple_eh_dispatch_set_region (copy, r);
1418 /* If STMT has a block defined, map it to the newly constructed
1419 block. When inlining we want statements without a block to
1420 appear in the block of the function call. */
1421 new_block = id->block;
1422 if (gimple_block (copy))
1425 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1430 gimple_set_block (copy, new_block);
1432 if (gimple_debug_bind_p (copy))
1435 /* Remap all the operands in COPY. */
1436 memset (&wi, 0, sizeof (wi));
1439 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1441 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1443 /* Clear the copied virtual operands. We are not remapping them here
1444 but are going to recreate them from scratch. */
1445 if (gimple_has_mem_ops (copy))
1447 gimple_set_vdef (copy, NULL_TREE);
1448 gimple_set_vuse (copy, NULL_TREE);
1455 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1459 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1460 gcov_type count_scale)
1462 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1463 basic_block copy_basic_block;
1467 /* create_basic_block() will append every new block to
1468 basic_block_info automatically. */
1469 copy_basic_block = create_basic_block (NULL, (void *) 0,
1470 (basic_block) bb->prev_bb->aux);
1471 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1473 /* We are going to rebuild frequencies from scratch. These values
1474 have just small importance to drive canonicalize_loop_headers. */
1475 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1477 /* We recompute frequencies after inlining, so this is quite safe. */
1478 if (freq > BB_FREQ_MAX)
1480 copy_basic_block->frequency = freq;
1482 copy_gsi = gsi_start_bb (copy_basic_block);
1484 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1486 gimple stmt = gsi_stmt (gsi);
1487 gimple orig_stmt = stmt;
1489 id->regimplify = false;
1490 stmt = remap_gimple_stmt (stmt, id);
1491 if (gimple_nop_p (stmt))
1494 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1497 /* With return slot optimization we can end up with
1498 non-gimple (foo *)&this->m, fix that here. */
1499 if (is_gimple_assign (stmt)
1500 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1501 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1504 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1505 gimple_assign_rhs1 (stmt),
1506 true, NULL, false, GSI_NEW_STMT);
1507 gimple_assign_set_rhs1 (stmt, new_rhs);
1508 id->regimplify = false;
1511 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1514 gimple_regimplify_operands (stmt, &seq_gsi);
1516 /* If copy_basic_block has been empty at the start of this iteration,
1517 call gsi_start_bb again to get at the newly added statements. */
1518 if (gsi_end_p (copy_gsi))
1519 copy_gsi = gsi_start_bb (copy_basic_block);
1521 gsi_next (©_gsi);
1523 /* Process the new statement. The call to gimple_regimplify_operands
1524 possibly turned the statement into multiple statements, we
1525 need to process all of them. */
1530 stmt = gsi_stmt (copy_gsi);
1531 if (is_gimple_call (stmt)
1532 && gimple_call_va_arg_pack_p (stmt)
1535 /* __builtin_va_arg_pack () should be replaced by
1536 all arguments corresponding to ... in the caller. */
1539 VEC(tree, heap) *argarray;
1540 size_t nargs = gimple_call_num_args (id->gimple_call);
1543 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1546 /* Create the new array of arguments. */
1547 n = nargs + gimple_call_num_args (stmt);
1548 argarray = VEC_alloc (tree, heap, n);
1549 VEC_safe_grow (tree, heap, argarray, n);
1551 /* Copy all the arguments before '...' */
1552 memcpy (VEC_address (tree, argarray),
1553 gimple_call_arg_ptr (stmt, 0),
1554 gimple_call_num_args (stmt) * sizeof (tree));
1556 /* Append the arguments passed in '...' */
1557 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1558 gimple_call_arg_ptr (id->gimple_call, 0)
1559 + (gimple_call_num_args (id->gimple_call) - nargs),
1560 nargs * sizeof (tree));
1562 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1565 VEC_free (tree, heap, argarray);
1567 /* Copy all GIMPLE_CALL flags, location and block, except
1568 GF_CALL_VA_ARG_PACK. */
1569 gimple_call_copy_flags (new_call, stmt);
1570 gimple_call_set_va_arg_pack (new_call, false);
1571 gimple_set_location (new_call, gimple_location (stmt));
1572 gimple_set_block (new_call, gimple_block (stmt));
1573 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1575 gsi_replace (©_gsi, new_call, false);
1576 gimple_set_bb (stmt, NULL);
1579 else if (is_gimple_call (stmt)
1581 && (decl = gimple_call_fndecl (stmt))
1582 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1583 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1585 /* __builtin_va_arg_pack_len () should be replaced by
1586 the number of anonymous arguments. */
1587 size_t nargs = gimple_call_num_args (id->gimple_call);
1591 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1594 count = build_int_cst (integer_type_node, nargs);
1595 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1596 gsi_replace (©_gsi, new_stmt, false);
1600 /* Statements produced by inlining can be unfolded, especially
1601 when we constant propagated some operands. We can't fold
1602 them right now for two reasons:
1603 1) folding require SSA_NAME_DEF_STMTs to be correct
1604 2) we can't change function calls to builtins.
1605 So we just mark statement for later folding. We mark
1606 all new statements, instead just statements that has changed
1607 by some nontrivial substitution so even statements made
1608 foldable indirectly are updated. If this turns out to be
1609 expensive, copy_body can be told to watch for nontrivial
1611 if (id->statements_to_fold)
1612 pointer_set_insert (id->statements_to_fold, stmt);
1614 /* We're duplicating a CALL_EXPR. Find any corresponding
1615 callgraph edges and update or duplicate them. */
1616 if (is_gimple_call (stmt))
1618 struct cgraph_edge *edge;
1621 switch (id->transform_call_graph_edges)
1623 case CB_CGE_DUPLICATE:
1624 edge = cgraph_edge (id->src_node, orig_stmt);
1627 int edge_freq = edge->frequency;
1628 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1630 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1631 edge->frequency, true);
1632 /* We could also just rescale the frequency, but
1633 doing so would introduce roundoff errors and make
1634 verifier unhappy. */
1636 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1639 && profile_status_for_function (cfun) != PROFILE_ABSENT
1640 && (edge_freq > edge->frequency + 10
1641 || edge_freq < edge->frequency - 10))
1643 fprintf (dump_file, "Edge frequency estimated by "
1644 "cgraph %i diverge from inliner's estimate %i\n",
1648 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1651 copy_basic_block->frequency);
1656 case CB_CGE_MOVE_CLONES:
1657 cgraph_set_call_stmt_including_clones (id->dst_node,
1659 edge = cgraph_edge (id->dst_node, stmt);
1663 edge = cgraph_edge (id->dst_node, orig_stmt);
1665 cgraph_set_call_stmt (edge, stmt);
1672 /* Constant propagation on argument done during inlining
1673 may create new direct call. Produce an edge for it. */
1675 || (edge->indirect_call
1676 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1677 && is_gimple_call (stmt)
1678 && (fn = gimple_call_fndecl (stmt)) != NULL)
1680 struct cgraph_node *dest = cgraph_node (fn);
1682 /* We have missing edge in the callgraph. This can happen
1683 when previous inlining turned an indirect call into a
1684 direct call by constant propagating arguments or we are
1685 producing dead clone (for further clonning). In all
1686 other cases we hit a bug (incorrect node sharing is the
1687 most common reason for missing edges). */
1688 gcc_assert (dest->needed || !dest->analyzed
1689 || !id->src_node->analyzed);
1690 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1691 cgraph_create_edge_including_clones
1692 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1693 compute_call_stmt_bb_frequency (id->dst_node->decl,
1695 bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1697 cgraph_create_edge (id->dst_node, dest, stmt,
1699 compute_call_stmt_bb_frequency
1700 (id->dst_node->decl, copy_basic_block),
1701 bb->loop_depth)->inline_failed
1702 = CIF_ORIGINALLY_INDIRECT_CALL;
1705 fprintf (dump_file, "Created new direct edge to %s",
1706 cgraph_node_name (dest));
1710 flags = gimple_call_flags (stmt);
1711 if (flags & ECF_MAY_BE_ALLOCA)
1712 cfun->calls_alloca = true;
1713 if (flags & ECF_RETURNS_TWICE)
1714 cfun->calls_setjmp = true;
1717 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1718 id->eh_map, id->eh_lp_nr);
1720 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1725 find_new_referenced_vars (gsi_stmt (copy_gsi));
1726 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1727 if (TREE_CODE (def) == SSA_NAME)
1728 SSA_NAME_DEF_STMT (def) = stmt;
1731 gsi_next (©_gsi);
1733 while (!gsi_end_p (copy_gsi));
1735 copy_gsi = gsi_last_bb (copy_basic_block);
1738 return copy_basic_block;
1741 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1742 form is quite easy, since dominator relationship for old basic blocks does
1745 There is however exception where inlining might change dominator relation
1746 across EH edges from basic block within inlined functions destinating
1747 to landing pads in function we inline into.
1749 The function fills in PHI_RESULTs of such PHI nodes if they refer
1750 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1751 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1752 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1753 set, and this means that there will be no overlapping live ranges
1754 for the underlying symbol.
1756 This might change in future if we allow redirecting of EH edges and
1757 we might want to change way build CFG pre-inlining to include
1758 all the possible edges then. */
1760 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1761 bool can_throw, bool nonlocal_goto)
1766 FOR_EACH_EDGE (e, ei, bb->succs)
1768 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1771 gimple_stmt_iterator si;
1774 gcc_assert (e->flags & EDGE_EH);
1777 gcc_assert (!(e->flags & EDGE_EH));
1779 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1783 phi = gsi_stmt (si);
1785 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1786 gcc_assert (!e->dest->aux);
1788 gcc_assert ((e->flags & EDGE_EH)
1789 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1791 if (!is_gimple_reg (PHI_RESULT (phi)))
1793 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1797 re = find_edge (ret_bb, e->dest);
1799 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1800 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1802 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1803 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1809 /* Copy edges from BB into its copy constructed earlier, scale profile
1810 accordingly. Edges will be taken care of later. Assume aux
1811 pointers to point to the copies of each BB. */
1814 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1816 basic_block new_bb = (basic_block) bb->aux;
1819 gimple_stmt_iterator si;
1822 /* Use the indices from the original blocks to create edges for the
1824 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1825 if (!(old_edge->flags & EDGE_EH))
1829 flags = old_edge->flags;
1831 /* Return edges do get a FALLTHRU flag when the get inlined. */
1832 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1833 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1834 flags |= EDGE_FALLTHRU;
1835 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1836 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1837 new_edge->probability = old_edge->probability;
1840 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1843 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1846 bool can_throw, nonlocal_goto;
1848 copy_stmt = gsi_stmt (si);
1849 if (!is_gimple_debug (copy_stmt))
1851 update_stmt (copy_stmt);
1852 if (gimple_in_ssa_p (cfun))
1853 mark_symbols_for_renaming (copy_stmt);
1856 /* Do this before the possible split_block. */
1859 /* If this tree could throw an exception, there are two
1860 cases where we need to add abnormal edge(s): the
1861 tree wasn't in a region and there is a "current
1862 region" in the caller; or the original tree had
1863 EH edges. In both cases split the block after the tree,
1864 and add abnormal edge(s) as needed; we need both
1865 those from the callee and the caller.
1866 We check whether the copy can throw, because the const
1867 propagation can change an INDIRECT_REF which throws
1868 into a COMPONENT_REF which doesn't. If the copy
1869 can throw, the original could also throw. */
1870 can_throw = stmt_can_throw_internal (copy_stmt);
1871 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1873 if (can_throw || nonlocal_goto)
1875 if (!gsi_end_p (si))
1876 /* Note that bb's predecessor edges aren't necessarily
1877 right at this point; split_block doesn't care. */
1879 edge e = split_block (new_bb, copy_stmt);
1882 new_bb->aux = e->src->aux;
1883 si = gsi_start_bb (new_bb);
1887 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1888 make_eh_dispatch_edges (copy_stmt);
1890 make_eh_edges (copy_stmt);
1893 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1895 if ((can_throw || nonlocal_goto)
1896 && gimple_in_ssa_p (cfun))
1897 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1898 can_throw, nonlocal_goto);
1902 /* Copy the PHIs. All blocks and edges are copied, some blocks
1903 was possibly split and new outgoing EH edges inserted.
1904 BB points to the block of original function and AUX pointers links
1905 the original and newly copied blocks. */
1908 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1910 basic_block const new_bb = (basic_block) bb->aux;
1913 gimple_stmt_iterator si;
1915 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1921 phi = gsi_stmt (si);
1922 res = PHI_RESULT (phi);
1924 if (is_gimple_reg (res))
1926 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1927 SSA_NAME_DEF_STMT (new_res)
1928 = new_phi = create_phi_node (new_res, new_bb);
1929 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1932 = find_edge ((basic_block) new_edge->src->aux, bb);
1933 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1935 tree block = id->block;
1936 id->block = NULL_TREE;
1937 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1939 gcc_assert (new_arg);
1940 /* With return slot optimization we can end up with
1941 non-gimple (foo *)&this->m, fix that here. */
1942 if (TREE_CODE (new_arg) != SSA_NAME
1943 && TREE_CODE (new_arg) != FUNCTION_DECL
1944 && !is_gimple_val (new_arg))
1946 gimple_seq stmts = NULL;
1947 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1948 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
1950 add_phi_arg (new_phi, new_arg, new_edge,
1951 gimple_phi_arg_location_from_edge (phi, old_edge));
1958 /* Wrapper for remap_decl so it can be used as a callback. */
1961 remap_decl_1 (tree decl, void *data)
1963 return remap_decl (decl, (copy_body_data *) data);
1966 /* Build struct function and associated datastructures for the new clone
1967 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1970 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
1972 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1973 gcov_type count_scale;
1975 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1976 count_scale = (REG_BR_PROB_BASE * count
1977 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1979 count_scale = REG_BR_PROB_BASE;
1981 /* Register specific tree functions. */
1982 gimple_register_cfg_hooks ();
1984 /* Get clean struct function. */
1985 push_struct_function (new_fndecl);
1987 /* We will rebuild these, so just sanity check that they are empty. */
1988 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
1989 gcc_assert (cfun->local_decls == NULL);
1990 gcc_assert (cfun->cfg == NULL);
1991 gcc_assert (cfun->decl == new_fndecl);
1993 /* Copy items we preserve during clonning. */
1994 cfun->static_chain_decl = src_cfun->static_chain_decl;
1995 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
1996 cfun->function_end_locus = src_cfun->function_end_locus;
1997 cfun->curr_properties = src_cfun->curr_properties;
1998 cfun->last_verified = src_cfun->last_verified;
1999 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2000 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2001 cfun->function_frequency = src_cfun->function_frequency;
2002 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2003 cfun->stdarg = src_cfun->stdarg;
2004 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
2005 cfun->after_inlining = src_cfun->after_inlining;
2006 cfun->returns_struct = src_cfun->returns_struct;
2007 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2008 cfun->after_tree_profile = src_cfun->after_tree_profile;
2010 init_empty_tree_cfg ();
2012 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2013 ENTRY_BLOCK_PTR->count =
2014 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2016 ENTRY_BLOCK_PTR->frequency
2017 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2018 EXIT_BLOCK_PTR->count =
2019 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2021 EXIT_BLOCK_PTR->frequency =
2022 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2024 init_eh_for_function ();
2026 if (src_cfun->gimple_df)
2028 init_tree_ssa (cfun);
2029 cfun->gimple_df->in_ssa_p = true;
2030 init_ssa_operands ();
2035 /* Make a copy of the body of FN so that it can be inserted inline in
2036 another function. Walks FN via CFG, returns new fndecl. */
2039 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2040 basic_block entry_block_map, basic_block exit_block_map)
2042 tree callee_fndecl = id->src_fn;
2043 /* Original cfun for the callee, doesn't change. */
2044 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2045 struct function *cfun_to_copy;
2047 tree new_fndecl = NULL;
2048 gcov_type count_scale;
2051 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2052 count_scale = (REG_BR_PROB_BASE * count
2053 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2055 count_scale = REG_BR_PROB_BASE;
2057 /* Register specific tree functions. */
2058 gimple_register_cfg_hooks ();
2060 /* Must have a CFG here at this point. */
2061 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2062 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2064 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2066 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2067 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2068 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2069 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2071 /* Duplicate any exception-handling regions. */
2073 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2076 /* Use aux pointers to map the original blocks to copy. */
2077 FOR_EACH_BB_FN (bb, cfun_to_copy)
2079 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2084 last = last_basic_block;
2086 /* Now that we've duplicated the blocks, duplicate their edges. */
2087 FOR_ALL_BB_FN (bb, cfun_to_copy)
2088 copy_edges_for_bb (bb, count_scale, exit_block_map);
2090 if (gimple_in_ssa_p (cfun))
2091 FOR_ALL_BB_FN (bb, cfun_to_copy)
2092 copy_phis_for_bb (bb, id);
2094 FOR_ALL_BB_FN (bb, cfun_to_copy)
2096 ((basic_block)bb->aux)->aux = NULL;
2100 /* Zero out AUX fields of newly created block during EH edge
2102 for (; last < last_basic_block; last++)
2103 BASIC_BLOCK (last)->aux = NULL;
2104 entry_block_map->aux = NULL;
2105 exit_block_map->aux = NULL;
2109 pointer_map_destroy (id->eh_map);
2116 /* Copy the debug STMT using ID. We deal with these statements in a
2117 special way: if any variable in their VALUE expression wasn't
2118 remapped yet, we won't remap it, because that would get decl uids
2119 out of sync, causing codegen differences between -g and -g0. If
2120 this arises, we drop the VALUE expression altogether. */
2123 copy_debug_stmt (gimple stmt, copy_body_data *id)
2126 struct walk_stmt_info wi;
2129 if (gimple_block (stmt))
2132 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2136 gimple_set_block (stmt, t);
2138 /* Remap all the operands in COPY. */
2139 memset (&wi, 0, sizeof (wi));
2142 processing_debug_stmt = 1;
2144 t = gimple_debug_bind_get_var (stmt);
2146 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2147 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2149 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2152 else if (TREE_CODE (t) == VAR_DECL
2154 && gimple_in_ssa_p (cfun)
2155 && !pointer_map_contains (id->decl_map, t)
2157 /* T is a non-localized variable. */;
2159 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2161 gimple_debug_bind_set_var (stmt, t);
2163 if (gimple_debug_bind_has_value_p (stmt))
2164 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2165 remap_gimple_op_r, &wi, NULL);
2167 /* Punt if any decl couldn't be remapped. */
2168 if (processing_debug_stmt < 0)
2169 gimple_debug_bind_reset_value (stmt);
2171 processing_debug_stmt = 0;
2174 if (gimple_in_ssa_p (cfun))
2175 mark_symbols_for_renaming (stmt);
2178 /* Process deferred debug stmts. In order to give values better odds
2179 of being successfully remapped, we delay the processing of debug
2180 stmts until all other stmts that might require remapping are
2184 copy_debug_stmts (copy_body_data *id)
2189 if (!id->debug_stmts)
2192 for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2193 copy_debug_stmt (stmt, id);
2195 VEC_free (gimple, heap, id->debug_stmts);
2198 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2199 another function. */
2202 copy_tree_body (copy_body_data *id)
2204 tree fndecl = id->src_fn;
2205 tree body = DECL_SAVED_TREE (fndecl);
2207 walk_tree (&body, copy_tree_body_r, id, NULL);
2212 /* Make a copy of the body of FN so that it can be inserted inline in
2213 another function. */
2216 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2217 basic_block entry_block_map, basic_block exit_block_map)
2219 tree fndecl = id->src_fn;
2222 /* If this body has a CFG, walk CFG and copy. */
2223 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2224 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map);
2225 copy_debug_stmts (id);
2230 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2231 defined in function FN, or of a data member thereof. */
2234 self_inlining_addr_expr (tree value, tree fn)
2238 if (TREE_CODE (value) != ADDR_EXPR)
2241 var = get_base_address (TREE_OPERAND (value, 0));
2243 return var && auto_var_in_fn_p (var, fn);
2246 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2247 lexical block and line number information from base_stmt, if given,
2248 or from the last stmt of the block otherwise. */
2251 insert_init_debug_bind (copy_body_data *id,
2252 basic_block bb, tree var, tree value,
2256 gimple_stmt_iterator gsi;
2259 if (!gimple_in_ssa_p (id->src_cfun))
2262 if (!MAY_HAVE_DEBUG_STMTS)
2265 tracked_var = target_for_debug_bind (var);
2271 gsi = gsi_last_bb (bb);
2272 if (!base_stmt && !gsi_end_p (gsi))
2273 base_stmt = gsi_stmt (gsi);
2276 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2280 if (!gsi_end_p (gsi))
2281 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2283 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2290 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2292 /* If VAR represents a zero-sized variable, it's possible that the
2293 assignment statement may result in no gimple statements. */
2296 gimple_stmt_iterator si = gsi_last_bb (bb);
2298 /* We can end up with init statements that store to a non-register
2299 from a rhs with a conversion. Handle that here by forcing the
2300 rhs into a temporary. gimple_regimplify_operands is not
2301 prepared to do this for us. */
2302 if (!is_gimple_debug (init_stmt)
2303 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2304 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2305 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2307 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2308 gimple_expr_type (init_stmt),
2309 gimple_assign_rhs1 (init_stmt));
2310 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2312 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2313 gimple_assign_set_rhs1 (init_stmt, rhs);
2315 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2316 gimple_regimplify_operands (init_stmt, &si);
2317 mark_symbols_for_renaming (init_stmt);
2319 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2321 tree var, def = gimple_assign_lhs (init_stmt);
2323 if (TREE_CODE (def) == SSA_NAME)
2324 var = SSA_NAME_VAR (def);
2328 insert_init_debug_bind (id, bb, var, def, init_stmt);
2333 /* Initialize parameter P with VALUE. If needed, produce init statement
2334 at the end of BB. When BB is NULL, we return init statement to be
2337 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2338 basic_block bb, tree *vars)
2340 gimple init_stmt = NULL;
2343 tree def = (gimple_in_ssa_p (cfun)
2344 ? gimple_default_def (id->src_cfun, p) : NULL);
2347 && value != error_mark_node
2348 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2350 if (fold_convertible_p (TREE_TYPE (p), value))
2351 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2353 /* ??? For valid (GIMPLE) programs we should not end up here.
2354 Still if something has gone wrong and we end up with truly
2355 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2356 to not leak invalid GIMPLE to the following passes. */
2357 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2360 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2361 here since the type of this decl must be visible to the calling
2363 var = copy_decl_to_var (p, id);
2365 /* We're actually using the newly-created var. */
2366 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2369 add_referenced_var (var);
2372 /* Declare this new variable. */
2373 TREE_CHAIN (var) = *vars;
2376 /* Make gimplifier happy about this variable. */
2377 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2379 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2380 we would not need to create a new variable here at all, if it
2381 weren't for debug info. Still, we can just use the argument
2383 if (TREE_READONLY (p)
2384 && !TREE_ADDRESSABLE (p)
2385 && value && !TREE_SIDE_EFFECTS (value)
2388 /* We may produce non-gimple trees by adding NOPs or introduce
2389 invalid sharing when operand is not really constant.
2390 It is not big deal to prohibit constant propagation here as
2391 we will constant propagate in DOM1 pass anyway. */
2392 if (is_gimple_min_invariant (value)
2393 && useless_type_conversion_p (TREE_TYPE (p),
2395 /* We have to be very careful about ADDR_EXPR. Make sure
2396 the base variable isn't a local variable of the inlined
2397 function, e.g., when doing recursive inlining, direct or
2398 mutually-recursive or whatever, which is why we don't
2399 just test whether fn == current_function_decl. */
2400 && ! self_inlining_addr_expr (value, fn))
2402 insert_decl_map (id, p, value);
2403 insert_debug_decl_map (id, p, var);
2404 return insert_init_debug_bind (id, bb, var, value, NULL);
2408 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2409 that way, when the PARM_DECL is encountered, it will be
2410 automatically replaced by the VAR_DECL. */
2411 insert_decl_map (id, p, var);
2413 /* Even if P was TREE_READONLY, the new VAR should not be.
2414 In the original code, we would have constructed a
2415 temporary, and then the function body would have never
2416 changed the value of P. However, now, we will be
2417 constructing VAR directly. The constructor body may
2418 change its value multiple times as it is being
2419 constructed. Therefore, it must not be TREE_READONLY;
2420 the back-end assumes that TREE_READONLY variable is
2421 assigned to only once. */
2422 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2423 TREE_READONLY (var) = 0;
2425 /* If there is no setup required and we are in SSA, take the easy route
2426 replacing all SSA names representing the function parameter by the
2427 SSA name passed to function.
2429 We need to construct map for the variable anyway as it might be used
2430 in different SSA names when parameter is set in function.
2432 Do replacement at -O0 for const arguments replaced by constant.
2433 This is important for builtin_constant_p and other construct requiring
2434 constant argument to be visible in inlined function body. */
2435 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2437 || (TREE_READONLY (p)
2438 && is_gimple_min_invariant (rhs)))
2439 && (TREE_CODE (rhs) == SSA_NAME
2440 || is_gimple_min_invariant (rhs))
2441 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2443 insert_decl_map (id, def, rhs);
2444 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2447 /* If the value of argument is never used, don't care about initializing
2449 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2451 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2452 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2455 /* Initialize this VAR_DECL from the equivalent argument. Convert
2456 the argument to the proper type in case it was promoted. */
2459 if (rhs == error_mark_node)
2461 insert_decl_map (id, p, var);
2462 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2465 STRIP_USELESS_TYPE_CONVERSION (rhs);
2467 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2468 keep our trees in gimple form. */
2469 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2471 def = remap_ssa_name (def, id);
2472 init_stmt = gimple_build_assign (def, rhs);
2473 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2474 set_default_def (var, NULL);
2477 init_stmt = gimple_build_assign (var, rhs);
2479 if (bb && init_stmt)
2480 insert_init_stmt (id, bb, init_stmt);
2485 /* Generate code to initialize the parameters of the function at the
2486 top of the stack in ID from the GIMPLE_CALL STMT. */
2489 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2490 tree fn, basic_block bb)
2495 tree vars = NULL_TREE;
2496 tree static_chain = gimple_call_chain (stmt);
2498 /* Figure out what the parameters are. */
2499 parms = DECL_ARGUMENTS (fn);
2501 /* Loop through the parameter declarations, replacing each with an
2502 equivalent VAR_DECL, appropriately initialized. */
2503 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2506 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2507 setup_one_parameter (id, p, val, fn, bb, &vars);
2510 /* Initialize the static chain. */
2511 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2512 gcc_assert (fn != current_function_decl);
2515 /* No static chain? Seems like a bug in tree-nested.c. */
2516 gcc_assert (static_chain);
2518 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2521 declare_inline_vars (id->block, vars);
2525 /* Declare a return variable to replace the RESULT_DECL for the
2526 function we are calling. An appropriate DECL_STMT is returned.
2527 The USE_STMT is filled to contain a use of the declaration to
2528 indicate the return value of the function.
2530 RETURN_SLOT, if non-null is place where to store the result. It
2531 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2532 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2534 The return value is a (possibly null) value that holds the result
2535 as seen by the caller. */
2538 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest)
2540 tree callee = id->src_fn;
2541 tree caller = id->dst_fn;
2542 tree result = DECL_RESULT (callee);
2543 tree callee_type = TREE_TYPE (result);
2547 /* Handle type-mismatches in the function declaration return type
2548 vs. the call expression. */
2550 caller_type = TREE_TYPE (modify_dest);
2552 caller_type = TREE_TYPE (TREE_TYPE (callee));
2554 /* We don't need to do anything for functions that don't return
2556 if (!result || VOID_TYPE_P (callee_type))
2559 /* If there was a return slot, then the return value is the
2560 dereferenced address of that object. */
2563 /* The front end shouldn't have used both return_slot and
2564 a modify expression. */
2565 gcc_assert (!modify_dest);
2566 if (DECL_BY_REFERENCE (result))
2568 tree return_slot_addr = build_fold_addr_expr (return_slot);
2569 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2571 /* We are going to construct *&return_slot and we can't do that
2572 for variables believed to be not addressable.
2574 FIXME: This check possibly can match, because values returned
2575 via return slot optimization are not believed to have address
2576 taken by alias analysis. */
2577 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2578 if (gimple_in_ssa_p (cfun))
2580 HOST_WIDE_INT bitsize;
2581 HOST_WIDE_INT bitpos;
2583 enum machine_mode mode;
2587 base = get_inner_reference (return_slot, &bitsize, &bitpos,
2589 &mode, &unsignedp, &volatilep,
2591 if (TREE_CODE (base) == INDIRECT_REF)
2592 base = TREE_OPERAND (base, 0);
2593 if (TREE_CODE (base) == SSA_NAME)
2594 base = SSA_NAME_VAR (base);
2595 mark_sym_for_renaming (base);
2597 var = return_slot_addr;
2602 gcc_assert (TREE_CODE (var) != SSA_NAME);
2603 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2605 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2606 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2607 && !DECL_GIMPLE_REG_P (result)
2609 DECL_GIMPLE_REG_P (var) = 0;
2614 /* All types requiring non-trivial constructors should have been handled. */
2615 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2617 /* Attempt to avoid creating a new temporary variable. */
2619 && TREE_CODE (modify_dest) != SSA_NAME)
2621 bool use_it = false;
2623 /* We can't use MODIFY_DEST if there's type promotion involved. */
2624 if (!useless_type_conversion_p (callee_type, caller_type))
2627 /* ??? If we're assigning to a variable sized type, then we must
2628 reuse the destination variable, because we've no good way to
2629 create variable sized temporaries at this point. */
2630 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2633 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2634 reuse it as the result of the call directly. Don't do this if
2635 it would promote MODIFY_DEST to addressable. */
2636 else if (TREE_ADDRESSABLE (result))
2640 tree base_m = get_base_address (modify_dest);
2642 /* If the base isn't a decl, then it's a pointer, and we don't
2643 know where that's going to go. */
2644 if (!DECL_P (base_m))
2646 else if (is_global_var (base_m))
2648 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2649 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2650 && !DECL_GIMPLE_REG_P (result)
2651 && DECL_GIMPLE_REG_P (base_m))
2653 else if (!TREE_ADDRESSABLE (base_m))
2665 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2667 var = copy_result_decl_to_var (result, id);
2668 if (gimple_in_ssa_p (cfun))
2671 add_referenced_var (var);
2674 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2675 DECL_STRUCT_FUNCTION (caller)->local_decls
2676 = tree_cons (NULL_TREE, var,
2677 DECL_STRUCT_FUNCTION (caller)->local_decls);
2679 /* Do not have the rest of GCC warn about this variable as it should
2680 not be visible to the user. */
2681 TREE_NO_WARNING (var) = 1;
2683 declare_inline_vars (id->block, var);
2685 /* Build the use expr. If the return type of the function was
2686 promoted, convert it back to the expected type. */
2688 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2689 use = fold_convert (caller_type, var);
2691 STRIP_USELESS_TYPE_CONVERSION (use);
2693 if (DECL_BY_REFERENCE (result))
2695 TREE_ADDRESSABLE (var) = 1;
2696 var = build_fold_addr_expr (var);
2700 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2701 way, when the RESULT_DECL is encountered, it will be
2702 automatically replaced by the VAR_DECL. */
2703 insert_decl_map (id, result, var);
2705 /* Remember this so we can ignore it in remap_decls. */
2711 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2712 to a local label. */
2715 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2718 tree fn = (tree) fnp;
2720 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2729 /* Determine if the function can be copied. If so return NULL. If
2730 not return a string describng the reason for failure. */
2733 copy_forbidden (struct function *fun, tree fndecl)
2735 const char *reason = fun->cannot_be_copied_reason;
2738 /* Only examine the function once. */
2739 if (fun->cannot_be_copied_set)
2742 /* We cannot copy a function that receives a non-local goto
2743 because we cannot remap the destination label used in the
2744 function that is performing the non-local goto. */
2745 /* ??? Actually, this should be possible, if we work at it.
2746 No doubt there's just a handful of places that simply
2747 assume it doesn't happen and don't substitute properly. */
2748 if (fun->has_nonlocal_label)
2750 reason = G_("function %q+F can never be copied "
2751 "because it receives a non-local goto");
2755 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2757 tree decl = TREE_VALUE (step);
2759 if (TREE_CODE (decl) == VAR_DECL
2760 && TREE_STATIC (decl)
2761 && !DECL_EXTERNAL (decl)
2762 && DECL_INITIAL (decl)
2763 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2764 has_label_address_in_static_1,
2767 reason = G_("function %q+F can never be copied because it saves "
2768 "address of local label in a static variable");
2774 fun->cannot_be_copied_reason = reason;
2775 fun->cannot_be_copied_set = true;
2780 static const char *inline_forbidden_reason;
2782 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2783 iff a function can not be inlined. Also sets the reason why. */
2786 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2787 struct walk_stmt_info *wip)
2789 tree fn = (tree) wip->info;
2791 gimple stmt = gsi_stmt (*gsi);
2793 switch (gimple_code (stmt))
2796 /* Refuse to inline alloca call unless user explicitly forced so as
2797 this may change program's memory overhead drastically when the
2798 function using alloca is called in loop. In GCC present in
2799 SPEC2000 inlining into schedule_block cause it to require 2GB of
2800 RAM instead of 256MB. */
2801 if (gimple_alloca_call_p (stmt)
2802 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2804 inline_forbidden_reason
2805 = G_("function %q+F can never be inlined because it uses "
2806 "alloca (override using the always_inline attribute)");
2807 *handled_ops_p = true;
2811 t = gimple_call_fndecl (stmt);
2815 /* We cannot inline functions that call setjmp. */
2816 if (setjmp_call_p (t))
2818 inline_forbidden_reason
2819 = G_("function %q+F can never be inlined because it uses setjmp");
2820 *handled_ops_p = true;
2824 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2825 switch (DECL_FUNCTION_CODE (t))
2827 /* We cannot inline functions that take a variable number of
2829 case BUILT_IN_VA_START:
2830 case BUILT_IN_NEXT_ARG:
2831 case BUILT_IN_VA_END:
2832 inline_forbidden_reason
2833 = G_("function %q+F can never be inlined because it "
2834 "uses variable argument lists");
2835 *handled_ops_p = true;
2838 case BUILT_IN_LONGJMP:
2839 /* We can't inline functions that call __builtin_longjmp at
2840 all. The non-local goto machinery really requires the
2841 destination be in a different function. If we allow the
2842 function calling __builtin_longjmp to be inlined into the
2843 function calling __builtin_setjmp, Things will Go Awry. */
2844 inline_forbidden_reason
2845 = G_("function %q+F can never be inlined because "
2846 "it uses setjmp-longjmp exception handling");
2847 *handled_ops_p = true;
2850 case BUILT_IN_NONLOCAL_GOTO:
2852 inline_forbidden_reason
2853 = G_("function %q+F can never be inlined because "
2854 "it uses non-local goto");
2855 *handled_ops_p = true;
2858 case BUILT_IN_RETURN:
2859 case BUILT_IN_APPLY_ARGS:
2860 /* If a __builtin_apply_args caller would be inlined,
2861 it would be saving arguments of the function it has
2862 been inlined into. Similarly __builtin_return would
2863 return from the function the inline has been inlined into. */
2864 inline_forbidden_reason
2865 = G_("function %q+F can never be inlined because "
2866 "it uses __builtin_return or __builtin_apply_args");
2867 *handled_ops_p = true;
2876 t = gimple_goto_dest (stmt);
2878 /* We will not inline a function which uses computed goto. The
2879 addresses of its local labels, which may be tucked into
2880 global storage, are of course not constant across
2881 instantiations, which causes unexpected behavior. */
2882 if (TREE_CODE (t) != LABEL_DECL)
2884 inline_forbidden_reason
2885 = G_("function %q+F can never be inlined "
2886 "because it contains a computed goto");
2887 *handled_ops_p = true;
2896 *handled_ops_p = false;
2900 /* Return true if FNDECL is a function that cannot be inlined into
2904 inline_forbidden_p (tree fndecl)
2906 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
2907 struct walk_stmt_info wi;
2908 struct pointer_set_t *visited_nodes;
2910 bool forbidden_p = false;
2912 /* First check for shared reasons not to copy the code. */
2913 inline_forbidden_reason = copy_forbidden (fun, fndecl);
2914 if (inline_forbidden_reason != NULL)
2917 /* Next, walk the statements of the function looking for
2918 constraucts we can't handle, or are non-optimal for inlining. */
2919 visited_nodes = pointer_set_create ();
2920 memset (&wi, 0, sizeof (wi));
2921 wi.info = (void *) fndecl;
2922 wi.pset = visited_nodes;
2924 FOR_EACH_BB_FN (bb, fun)
2927 gimple_seq seq = bb_seq (bb);
2928 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
2929 forbidden_p = (ret != NULL);
2934 pointer_set_destroy (visited_nodes);
2938 /* Returns nonzero if FN is a function that does not have any
2939 fundamental inline blocking properties. */
2942 tree_inlinable_function_p (tree fn)
2944 bool inlinable = true;
2948 /* If we've already decided this function shouldn't be inlined,
2949 there's no need to check again. */
2950 if (DECL_UNINLINABLE (fn))
2953 /* We only warn for functions declared `inline' by the user. */
2954 do_warning = (warn_inline
2955 && DECL_DECLARED_INLINE_P (fn)
2956 && !DECL_NO_INLINE_WARNING_P (fn)
2957 && !DECL_IN_SYSTEM_HEADER (fn));
2959 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
2962 && always_inline == NULL)
2965 warning (OPT_Winline, "function %q+F can never be inlined because it "
2966 "is suppressed using -fno-inline", fn);
2970 /* Don't auto-inline anything that might not be bound within
2971 this unit of translation. */
2972 else if (!DECL_DECLARED_INLINE_P (fn)
2973 && DECL_REPLACEABLE_P (fn))
2976 else if (!function_attribute_inlinable_p (fn))
2979 warning (OPT_Winline, "function %q+F can never be inlined because it "
2980 "uses attributes conflicting with inlining", fn);
2984 else if (inline_forbidden_p (fn))
2986 /* See if we should warn about uninlinable functions. Previously,
2987 some of these warnings would be issued while trying to expand
2988 the function inline, but that would cause multiple warnings
2989 about functions that would for example call alloca. But since
2990 this a property of the function, just one warning is enough.
2991 As a bonus we can now give more details about the reason why a
2992 function is not inlinable. */
2994 sorry (inline_forbidden_reason, fn);
2995 else if (do_warning)
2996 warning (OPT_Winline, inline_forbidden_reason, fn);
3001 /* Squirrel away the result so that we don't have to check again. */
3002 DECL_UNINLINABLE (fn) = !inlinable;
3007 /* Estimate the cost of a memory move. Use machine dependent
3008 word size and take possible memcpy call into account. */
3011 estimate_move_cost (tree type)
3015 gcc_assert (!VOID_TYPE_P (type));
3017 size = int_size_in_bytes (type);
3019 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3020 /* Cost of a memcpy call, 3 arguments and the call. */
3023 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3026 /* Returns cost of operation CODE, according to WEIGHTS */
3029 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3030 tree op1 ATTRIBUTE_UNUSED, tree op2)
3034 /* These are "free" conversions, or their presumed cost
3035 is folded into other operations. */
3042 /* Assign cost of 1 to usual operations.
3043 ??? We may consider mapping RTL costs to this. */
3048 case POINTER_PLUS_EXPR:
3052 case ADDR_SPACE_CONVERT_EXPR:
3053 case FIXED_CONVERT_EXPR:
3054 case FIX_TRUNC_EXPR:
3066 case VEC_LSHIFT_EXPR:
3067 case VEC_RSHIFT_EXPR:
3074 case TRUTH_ANDIF_EXPR:
3075 case TRUTH_ORIF_EXPR:
3076 case TRUTH_AND_EXPR:
3078 case TRUTH_XOR_EXPR:
3079 case TRUTH_NOT_EXPR:
3088 case UNORDERED_EXPR:
3099 case PREDECREMENT_EXPR:
3100 case PREINCREMENT_EXPR:
3101 case POSTDECREMENT_EXPR:
3102 case POSTINCREMENT_EXPR:
3104 case REALIGN_LOAD_EXPR:
3106 case REDUC_MAX_EXPR:
3107 case REDUC_MIN_EXPR:
3108 case REDUC_PLUS_EXPR:
3109 case WIDEN_SUM_EXPR:
3110 case WIDEN_MULT_EXPR:
3113 case VEC_WIDEN_MULT_HI_EXPR:
3114 case VEC_WIDEN_MULT_LO_EXPR:
3115 case VEC_UNPACK_HI_EXPR:
3116 case VEC_UNPACK_LO_EXPR:
3117 case VEC_UNPACK_FLOAT_HI_EXPR:
3118 case VEC_UNPACK_FLOAT_LO_EXPR:
3119 case VEC_PACK_TRUNC_EXPR:
3120 case VEC_PACK_SAT_EXPR:
3121 case VEC_PACK_FIX_TRUNC_EXPR:
3122 case VEC_EXTRACT_EVEN_EXPR:
3123 case VEC_EXTRACT_ODD_EXPR:
3124 case VEC_INTERLEAVE_HIGH_EXPR:
3125 case VEC_INTERLEAVE_LOW_EXPR:
3129 /* Few special cases of expensive operations. This is useful
3130 to avoid inlining on functions having too many of these. */
3131 case TRUNC_DIV_EXPR:
3133 case FLOOR_DIV_EXPR:
3134 case ROUND_DIV_EXPR:
3135 case EXACT_DIV_EXPR:
3136 case TRUNC_MOD_EXPR:
3138 case FLOOR_MOD_EXPR:
3139 case ROUND_MOD_EXPR:
3141 if (TREE_CODE (op2) != INTEGER_CST)
3142 return weights->div_mod_cost;
3146 /* We expect a copy assignment with no operator. */
3147 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3153 /* Estimate number of instructions that will be created by expanding
3154 the statements in the statement sequence STMTS.
3155 WEIGHTS contains weights attributed to various constructs. */
3158 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3161 gimple_stmt_iterator gsi;
3164 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3165 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3171 /* Estimate number of instructions that will be created by expanding STMT.
3172 WEIGHTS contains weights attributed to various constructs. */
3175 estimate_num_insns (gimple stmt, eni_weights *weights)
3178 enum gimple_code code = gimple_code (stmt);
3185 /* Try to estimate the cost of assignments. We have three cases to
3187 1) Simple assignments to registers;
3188 2) Stores to things that must live in memory. This includes
3189 "normal" stores to scalars, but also assignments of large
3190 structures, or constructors of big arrays;
3192 Let us look at the first two cases, assuming we have "a = b + C":
3193 <GIMPLE_ASSIGN <var_decl "a">
3194 <plus_expr <var_decl "b"> <constant C>>
3195 If "a" is a GIMPLE register, the assignment to it is free on almost
3196 any target, because "a" usually ends up in a real register. Hence
3197 the only cost of this expression comes from the PLUS_EXPR, and we
3198 can ignore the GIMPLE_ASSIGN.
3199 If "a" is not a GIMPLE register, the assignment to "a" will most
3200 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3201 of moving something into "a", which we compute using the function
3202 estimate_move_cost. */
3203 lhs = gimple_assign_lhs (stmt);
3204 rhs = gimple_assign_rhs1 (stmt);
3206 if (is_gimple_reg (lhs))
3209 cost = estimate_move_cost (TREE_TYPE (lhs));
3211 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3212 cost += estimate_move_cost (TREE_TYPE (rhs));
3214 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3215 gimple_assign_rhs1 (stmt),
3216 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3217 == GIMPLE_BINARY_RHS
3218 ? gimple_assign_rhs2 (stmt) : NULL);
3222 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3223 gimple_op (stmt, 0),
3224 gimple_op (stmt, 1));
3228 /* Take into account cost of the switch + guess 2 conditional jumps for
3231 TODO: once the switch expansion logic is sufficiently separated, we can
3232 do better job on estimating cost of the switch. */
3233 if (weights->time_based)
3234 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3236 cost = gimple_switch_num_labels (stmt) * 2;
3241 tree decl = gimple_call_fndecl (stmt);
3242 tree addr = gimple_call_fn (stmt);
3243 tree funtype = TREE_TYPE (addr);
3245 if (POINTER_TYPE_P (funtype))
3246 funtype = TREE_TYPE (funtype);
3248 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3249 cost = weights->target_builtin_call_cost;
3251 cost = weights->call_cost;
3253 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3254 switch (DECL_FUNCTION_CODE (decl))
3256 case BUILT_IN_CONSTANT_P:
3258 case BUILT_IN_EXPECT:
3261 /* Prefetch instruction is not expensive. */
3262 case BUILT_IN_PREFETCH:
3263 cost = weights->target_builtin_call_cost;
3266 /* Exception state returns or moves registers around. */
3267 case BUILT_IN_EH_FILTER:
3268 case BUILT_IN_EH_POINTER:
3269 case BUILT_IN_EH_COPY_VALUES:
3277 funtype = TREE_TYPE (decl);
3279 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3280 cost += estimate_move_cost (TREE_TYPE (funtype));
3281 /* Our cost must be kept in sync with
3282 cgraph_estimate_size_after_inlining that does use function
3283 declaration to figure out the arguments. */
3284 if (decl && DECL_ARGUMENTS (decl))
3287 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3288 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3289 cost += estimate_move_cost (TREE_TYPE (arg));
3291 else if (funtype && prototype_p (funtype))
3294 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3296 if (!VOID_TYPE_P (TREE_VALUE (t)))
3297 cost += estimate_move_cost (TREE_VALUE (t));
3301 for (i = 0; i < gimple_call_num_args (stmt); i++)
3303 tree arg = gimple_call_arg (stmt, i);
3304 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3305 cost += estimate_move_cost (TREE_TYPE (arg));
3317 case GIMPLE_PREDICT:
3322 return asm_str_count (gimple_asm_string (stmt));
3325 /* This is either going to be an external function call with one
3326 argument, or two register copy statements plus a goto. */
3329 case GIMPLE_EH_DISPATCH:
3330 /* ??? This is going to turn into a switch statement. Ideally
3331 we'd have a look at the eh region and estimate the number of
3336 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3338 case GIMPLE_EH_FILTER:
3339 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3342 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3345 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3346 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3348 /* OpenMP directives are generally very expensive. */
3350 case GIMPLE_OMP_RETURN:
3351 case GIMPLE_OMP_SECTIONS_SWITCH:
3352 case GIMPLE_OMP_ATOMIC_STORE:
3353 case GIMPLE_OMP_CONTINUE:
3354 /* ...except these, which are cheap. */
3357 case GIMPLE_OMP_ATOMIC_LOAD:
3358 return weights->omp_cost;
3360 case GIMPLE_OMP_FOR:
3361 return (weights->omp_cost
3362 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3363 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3365 case GIMPLE_OMP_PARALLEL:
3366 case GIMPLE_OMP_TASK:
3367 case GIMPLE_OMP_CRITICAL:
3368 case GIMPLE_OMP_MASTER:
3369 case GIMPLE_OMP_ORDERED:
3370 case GIMPLE_OMP_SECTION:
3371 case GIMPLE_OMP_SECTIONS:
3372 case GIMPLE_OMP_SINGLE:
3373 return (weights->omp_cost
3374 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3383 /* Estimate number of instructions that will be created by expanding
3384 function FNDECL. WEIGHTS contains weights attributed to various
3388 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3390 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3391 gimple_stmt_iterator bsi;
3395 gcc_assert (my_function && my_function->cfg);
3396 FOR_EACH_BB_FN (bb, my_function)
3398 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3399 n += estimate_num_insns (gsi_stmt (bsi), weights);
3406 /* Initializes weights used by estimate_num_insns. */
3409 init_inline_once (void)
3411 eni_size_weights.call_cost = 1;
3412 eni_size_weights.target_builtin_call_cost = 1;
3413 eni_size_weights.div_mod_cost = 1;
3414 eni_size_weights.omp_cost = 40;
3415 eni_size_weights.time_based = false;
3417 /* Estimating time for call is difficult, since we have no idea what the
3418 called function does. In the current uses of eni_time_weights,
3419 underestimating the cost does less harm than overestimating it, so
3420 we choose a rather small value here. */
3421 eni_time_weights.call_cost = 10;
3422 eni_time_weights.target_builtin_call_cost = 10;
3423 eni_time_weights.div_mod_cost = 10;
3424 eni_time_weights.omp_cost = 40;
3425 eni_time_weights.time_based = true;
3428 /* Estimate the number of instructions in a gimple_seq. */
3431 count_insns_seq (gimple_seq seq, eni_weights *weights)
3433 gimple_stmt_iterator gsi;
3435 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3436 n += estimate_num_insns (gsi_stmt (gsi), weights);
3442 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3445 prepend_lexical_block (tree current_block, tree new_block)
3447 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3448 BLOCK_SUBBLOCKS (current_block) = new_block;
3449 BLOCK_SUPERCONTEXT (new_block) = current_block;
3452 /* Fetch callee declaration from the call graph edge going from NODE and
3453 associated with STMR call statement. Return NULL_TREE if not found. */
3455 get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3457 struct cgraph_edge *cs;
3459 cs = cgraph_edge (node, stmt);
3461 return cs->callee->decl;
3466 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3469 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3473 struct pointer_map_t *st, *dst;
3476 location_t saved_location;
3477 struct cgraph_edge *cg_edge;
3478 cgraph_inline_failed_t reason;
3479 basic_block return_block;
3481 gimple_stmt_iterator gsi, stmt_gsi;
3482 bool successfully_inlined = FALSE;
3483 bool purge_dead_abnormal_edges;
3487 /* Set input_location here so we get the right instantiation context
3488 if we call instantiate_decl from inlinable_function_p. */
3489 saved_location = input_location;
3490 if (gimple_has_location (stmt))
3491 input_location = gimple_location (stmt);
3493 /* From here on, we're only interested in CALL_EXPRs. */
3494 if (gimple_code (stmt) != GIMPLE_CALL)
3497 /* First, see if we can figure out what function is being called.
3498 If we cannot, then there is no hope of inlining the function. */
3499 fn = gimple_call_fndecl (stmt);
3502 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3507 /* Turn forward declarations into real ones. */
3508 fn = cgraph_node (fn)->decl;
3510 /* If FN is a declaration of a function in a nested scope that was
3511 globally declared inline, we don't set its DECL_INITIAL.
3512 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3513 C++ front-end uses it for cdtors to refer to their internal
3514 declarations, that are not real functions. Fortunately those
3515 don't have trees to be saved, so we can tell by checking their
3517 if (!DECL_INITIAL (fn)
3518 && DECL_ABSTRACT_ORIGIN (fn)
3519 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3520 fn = DECL_ABSTRACT_ORIGIN (fn);
3522 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3523 Kill this check once this is fixed. */
3524 if (!id->dst_node->analyzed)
3527 cg_edge = cgraph_edge (id->dst_node, stmt);
3529 /* Don't inline functions with different EH personalities. */
3530 if (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3531 && DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)
3532 && (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3533 != DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)))
3536 /* Don't try to inline functions that are not well-suited to
3538 if (!cgraph_inline_p (cg_edge, &reason))
3540 /* If this call was originally indirect, we do not want to emit any
3541 inlining related warnings or sorry messages because there are no
3542 guarantees regarding those. */
3543 if (cg_edge->indirect_call)
3546 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3547 /* Avoid warnings during early inline pass. */
3548 && cgraph_global_info_ready)
3550 sorry ("inlining failed in call to %q+F: %s", fn,
3551 cgraph_inline_failed_string (reason));
3552 sorry ("called from here");
3554 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3555 && !DECL_IN_SYSTEM_HEADER (fn)
3556 && reason != CIF_UNSPECIFIED
3557 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3558 /* Avoid warnings during early inline pass. */
3559 && cgraph_global_info_ready)
3561 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3562 fn, cgraph_inline_failed_string (reason));
3563 warning (OPT_Winline, "called from here");
3567 fn = cg_edge->callee->decl;
3569 #ifdef ENABLE_CHECKING
3570 if (cg_edge->callee->decl != id->dst_node->decl)
3571 verify_cgraph_node (cg_edge->callee);
3574 /* We will be inlining this callee. */
3575 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3577 /* Update the callers EH personality. */
3578 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3579 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3580 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3582 /* Split the block holding the GIMPLE_CALL. */
3583 e = split_block (bb, stmt);
3585 return_block = e->dest;
3588 /* split_block splits after the statement; work around this by
3589 moving the call into the second block manually. Not pretty,
3590 but seems easier than doing the CFG manipulation by hand
3591 when the GIMPLE_CALL is in the last statement of BB. */
3592 stmt_gsi = gsi_last_bb (bb);
3593 gsi_remove (&stmt_gsi, false);
3595 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3596 been the source of abnormal edges. In this case, schedule
3597 the removal of dead abnormal edges. */
3598 gsi = gsi_start_bb (return_block);
3599 if (gsi_end_p (gsi))
3601 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3602 purge_dead_abnormal_edges = true;
3606 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3607 purge_dead_abnormal_edges = false;
3610 stmt_gsi = gsi_start_bb (return_block);
3612 /* Build a block containing code to initialize the arguments, the
3613 actual inline expansion of the body, and a label for the return
3614 statements within the function to jump to. The type of the
3615 statement expression is the return type of the function call. */
3616 id->block = make_node (BLOCK);
3617 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3618 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3619 prepend_lexical_block (gimple_block (stmt), id->block);
3621 /* Local declarations will be replaced by their equivalents in this
3624 id->decl_map = pointer_map_create ();
3625 dst = id->debug_map;
3626 id->debug_map = NULL;
3628 /* Record the function we are about to inline. */
3630 id->src_node = cg_edge->callee;
3631 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3632 id->gimple_call = stmt;
3634 gcc_assert (!id->src_cfun->after_inlining);
3637 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3639 gimple_stmt_iterator si = gsi_last_bb (bb);
3640 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3644 initialize_inlined_parameters (id, stmt, fn, bb);
3646 if (DECL_INITIAL (fn))
3647 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3649 /* Return statements in the function body will be replaced by jumps
3650 to the RET_LABEL. */
3651 gcc_assert (DECL_INITIAL (fn));
3652 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3654 /* Find the LHS to which the result of this call is assigned. */
3656 if (gimple_call_lhs (stmt))
3658 modify_dest = gimple_call_lhs (stmt);
3660 /* The function which we are inlining might not return a value,
3661 in which case we should issue a warning that the function
3662 does not return a value. In that case the optimizers will
3663 see that the variable to which the value is assigned was not
3664 initialized. We do not want to issue a warning about that
3665 uninitialized variable. */
3666 if (DECL_P (modify_dest))
3667 TREE_NO_WARNING (modify_dest) = 1;
3669 if (gimple_call_return_slot_opt_p (stmt))
3671 return_slot = modify_dest;
3678 /* If we are inlining a call to the C++ operator new, we don't want
3679 to use type based alias analysis on the return value. Otherwise
3680 we may get confused if the compiler sees that the inlined new
3681 function returns a pointer which was just deleted. See bug
3683 if (DECL_IS_OPERATOR_NEW (fn))
3689 /* Declare the return variable for the function. */
3690 use_retvar = declare_return_variable (id, return_slot, modify_dest);
3692 /* Add local vars in this inlined callee to caller. */
3693 t_step = id->src_cfun->local_decls;
3694 for (; t_step; t_step = TREE_CHAIN (t_step))
3696 var = TREE_VALUE (t_step);
3697 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3699 if (var_ann (var) && add_referenced_var (var))
3700 cfun->local_decls = tree_cons (NULL_TREE, var,
3703 else if (!can_be_nonlocal (var, id))
3704 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3708 if (dump_file && (dump_flags & TDF_DETAILS))
3710 fprintf (dump_file, "Inlining ");
3711 print_generic_expr (dump_file, id->src_fn, 0);
3712 fprintf (dump_file, " to ");
3713 print_generic_expr (dump_file, id->dst_fn, 0);
3714 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3717 /* This is it. Duplicate the callee body. Assume callee is
3718 pre-gimplified. Note that we must not alter the caller
3719 function in any way before this point, as this CALL_EXPR may be
3720 a self-referential call; if we're calling ourselves, we need to
3721 duplicate our body before altering anything. */
3722 copy_body (id, bb->count,
3723 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3726 /* Reset the escaped and callused solutions. */
3727 if (cfun->gimple_df)
3729 pt_solution_reset (&cfun->gimple_df->escaped);
3730 pt_solution_reset (&cfun->gimple_df->callused);
3736 pointer_map_destroy (id->debug_map);
3737 id->debug_map = dst;
3739 pointer_map_destroy (id->decl_map);
3742 /* Unlink the calls virtual operands before replacing it. */
3743 unlink_stmt_vdef (stmt);
3745 /* If the inlined function returns a result that we care about,
3746 substitute the GIMPLE_CALL with an assignment of the return
3747 variable to the LHS of the call. That is, if STMT was
3748 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3749 if (use_retvar && gimple_call_lhs (stmt))
3751 gimple old_stmt = stmt;
3752 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3753 gsi_replace (&stmt_gsi, stmt, false);
3754 if (gimple_in_ssa_p (cfun))
3755 mark_symbols_for_renaming (stmt);
3756 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3760 /* Handle the case of inlining a function with no return
3761 statement, which causes the return value to become undefined. */
3762 if (gimple_call_lhs (stmt)
3763 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3765 tree name = gimple_call_lhs (stmt);
3766 tree var = SSA_NAME_VAR (name);
3767 tree def = gimple_default_def (cfun, var);
3771 /* If the variable is used undefined, make this name
3772 undefined via a move. */
3773 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3774 gsi_replace (&stmt_gsi, stmt, true);
3778 /* Otherwise make this variable undefined. */
3779 gsi_remove (&stmt_gsi, true);
3780 set_default_def (var, name);
3781 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3785 gsi_remove (&stmt_gsi, true);
3788 if (purge_dead_abnormal_edges)
3789 gimple_purge_dead_abnormal_call_edges (return_block);
3791 /* If the value of the new expression is ignored, that's OK. We
3792 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3793 the equivalent inlined version either. */
3794 if (is_gimple_assign (stmt))
3796 gcc_assert (gimple_assign_single_p (stmt)
3797 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
3798 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3801 /* Output the inlining info for this abstract function, since it has been
3802 inlined. If we don't do this now, we can lose the information about the
3803 variables in the function when the blocks get blown away as soon as we
3804 remove the cgraph node. */
3805 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
3807 /* Update callgraph if needed. */
3808 cgraph_remove_node (cg_edge->callee);
3810 id->block = NULL_TREE;
3811 successfully_inlined = TRUE;
3814 input_location = saved_location;
3815 return successfully_inlined;
3818 /* Expand call statements reachable from STMT_P.
3819 We can only have CALL_EXPRs as the "toplevel" tree code or nested
3820 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
3821 unfortunately not use that function here because we need a pointer
3822 to the CALL_EXPR, not the tree itself. */
3825 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
3827 gimple_stmt_iterator gsi;
3829 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3831 gimple stmt = gsi_stmt (gsi);
3833 if (is_gimple_call (stmt)
3834 && expand_call_inline (bb, stmt, id))
3842 /* Walk all basic blocks created after FIRST and try to fold every statement
3843 in the STATEMENTS pointer set. */
3846 fold_marked_statements (int first, struct pointer_set_t *statements)
3848 for (; first < n_basic_blocks; first++)
3849 if (BASIC_BLOCK (first))
3851 gimple_stmt_iterator gsi;
3853 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3856 if (pointer_set_contains (statements, gsi_stmt (gsi)))
3858 gimple old_stmt = gsi_stmt (gsi);
3859 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
3861 if (old_decl && DECL_BUILT_IN (old_decl))
3863 /* Folding builtins can create multiple instructions,
3864 we need to look at all of them. */
3865 gimple_stmt_iterator i2 = gsi;
3867 if (fold_stmt (&gsi))
3871 i2 = gsi_start_bb (BASIC_BLOCK (first));
3876 new_stmt = gsi_stmt (i2);
3877 update_stmt (new_stmt);
3878 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3881 if (new_stmt == gsi_stmt (gsi))
3883 /* It is okay to check only for the very last
3884 of these statements. If it is a throwing
3885 statement nothing will change. If it isn't
3886 this can remove EH edges. If that weren't
3887 correct then because some intermediate stmts
3888 throw, but not the last one. That would mean
3889 we'd have to split the block, which we can't
3890 here and we'd loose anyway. And as builtins
3891 probably never throw, this all
3893 if (maybe_clean_or_replace_eh_stmt (old_stmt,
3895 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3902 else if (fold_stmt (&gsi))
3904 /* Re-read the statement from GSI as fold_stmt() may
3906 gimple new_stmt = gsi_stmt (gsi);
3907 update_stmt (new_stmt);
3909 if (is_gimple_call (old_stmt)
3910 || is_gimple_call (new_stmt))
3911 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3914 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
3915 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3921 /* Return true if BB has at least one abnormal outgoing edge. */
3924 has_abnormal_outgoing_edge_p (basic_block bb)
3929 FOR_EACH_EDGE (e, ei, bb->succs)
3930 if (e->flags & EDGE_ABNORMAL)
3936 /* Expand calls to inline functions in the body of FN. */
3939 optimize_inline_calls (tree fn)
3943 int last = n_basic_blocks;
3944 struct gimplify_ctx gctx;
3946 /* There is no point in performing inlining if errors have already
3947 occurred -- and we might crash if we try to inline invalid
3949 if (errorcount || sorrycount)
3953 memset (&id, 0, sizeof (id));
3955 id.src_node = id.dst_node = cgraph_node (fn);
3957 /* Or any functions that aren't finished yet. */
3958 if (current_function_decl)
3959 id.dst_fn = current_function_decl;
3961 id.copy_decl = copy_decl_maybe_to_var;
3962 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3963 id.transform_new_cfg = false;
3964 id.transform_return_to_modify = true;
3965 id.transform_lang_insert_block = NULL;
3966 id.statements_to_fold = pointer_set_create ();
3968 push_gimplify_context (&gctx);
3970 /* We make no attempts to keep dominance info up-to-date. */
3971 free_dominance_info (CDI_DOMINATORS);
3972 free_dominance_info (CDI_POST_DOMINATORS);
3974 /* Register specific gimple functions. */
3975 gimple_register_cfg_hooks ();
3977 /* Reach the trees by walking over the CFG, and note the
3978 enclosing basic-blocks in the call edges. */
3979 /* We walk the blocks going forward, because inlined function bodies
3980 will split id->current_basic_block, and the new blocks will
3981 follow it; we'll trudge through them, processing their CALL_EXPRs
3984 gimple_expand_calls_inline (bb, &id);
3986 pop_gimplify_context (NULL);
3988 #ifdef ENABLE_CHECKING
3990 struct cgraph_edge *e;
3992 verify_cgraph_node (id.dst_node);
3994 /* Double check that we inlined everything we are supposed to inline. */
3995 for (e = id.dst_node->callees; e; e = e->next_callee)
3996 gcc_assert (e->inline_failed);
4000 /* Fold the statements before compacting/renumbering the basic blocks. */
4001 fold_marked_statements (last, id.statements_to_fold);
4002 pointer_set_destroy (id.statements_to_fold);
4004 gcc_assert (!id.debug_stmts);
4006 /* Renumber the (code) basic_blocks consecutively. */
4008 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4011 fold_cond_expr_cond ();
4012 delete_unreachable_blocks_update_callgraph (&id);
4013 #ifdef ENABLE_CHECKING
4014 verify_cgraph_node (id.dst_node);
4017 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4018 not possible yet - the IPA passes might make various functions to not
4019 throw and they don't care to proactively update local EH info. This is
4020 done later in fixup_cfg pass that also execute the verification. */
4021 return (TODO_update_ssa
4023 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4024 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4027 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4030 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4032 enum tree_code code = TREE_CODE (*tp);
4033 enum tree_code_class cl = TREE_CODE_CLASS (code);
4035 /* We make copies of most nodes. */
4036 if (IS_EXPR_CODE_CLASS (cl)
4037 || code == TREE_LIST
4039 || code == TYPE_DECL
4040 || code == OMP_CLAUSE)
4042 /* Because the chain gets clobbered when we make a copy, we save it
4044 tree chain = NULL_TREE, new_tree;
4046 chain = TREE_CHAIN (*tp);
4048 /* Copy the node. */
4049 new_tree = copy_node (*tp);
4051 /* Propagate mudflap marked-ness. */
4052 if (flag_mudflap && mf_marked_p (*tp))
4057 /* Now, restore the chain, if appropriate. That will cause
4058 walk_tree to walk into the chain as well. */
4059 if (code == PARM_DECL
4060 || code == TREE_LIST
4061 || code == OMP_CLAUSE)
4062 TREE_CHAIN (*tp) = chain;
4064 /* For now, we don't update BLOCKs when we make copies. So, we
4065 have to nullify all BIND_EXPRs. */
4066 if (TREE_CODE (*tp) == BIND_EXPR)
4067 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4069 else if (code == CONSTRUCTOR)
4071 /* CONSTRUCTOR nodes need special handling because
4072 we need to duplicate the vector of elements. */
4075 new_tree = copy_node (*tp);
4077 /* Propagate mudflap marked-ness. */
4078 if (flag_mudflap && mf_marked_p (*tp))
4081 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4082 CONSTRUCTOR_ELTS (*tp));
4085 else if (TREE_CODE_CLASS (code) == tcc_type)
4087 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4089 else if (TREE_CODE_CLASS (code) == tcc_constant)
4092 gcc_assert (code != STATEMENT_LIST);
4096 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4097 information indicating to what new SAVE_EXPR this one should be mapped,
4098 use that one. Otherwise, create a new node and enter it in ST. FN is
4099 the function into which the copy will be placed. */
4102 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4104 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4108 /* See if we already encountered this SAVE_EXPR. */
4109 n = (tree *) pointer_map_contains (st, *tp);
4111 /* If we didn't already remap this SAVE_EXPR, do so now. */
4114 t = copy_node (*tp);
4116 /* Remember this SAVE_EXPR. */
4117 *pointer_map_insert (st, *tp) = t;
4118 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4119 *pointer_map_insert (st, t) = t;
4123 /* We've already walked into this SAVE_EXPR; don't do it again. */
4128 /* Replace this SAVE_EXPR with the copy. */
4132 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4133 copies the declaration and enters it in the splay_tree in DATA (which is
4134 really an `copy_body_data *'). */
4137 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4140 copy_body_data *id = (copy_body_data *) data;
4142 /* Don't walk into types. */
4146 else if (TREE_CODE (*tp) == LABEL_EXPR)
4148 tree decl = TREE_OPERAND (*tp, 0);
4150 /* Copy the decl and remember the copy. */
4151 insert_decl_map (id, decl, id->copy_decl (decl, id));
4157 /* Perform any modifications to EXPR required when it is unsaved. Does
4158 not recurse into EXPR's subtrees. */
4161 unsave_expr_1 (tree expr)
4163 switch (TREE_CODE (expr))
4166 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4167 It's OK for this to happen if it was part of a subtree that
4168 isn't immediately expanded, such as operand 2 of another
4170 if (TREE_OPERAND (expr, 1))
4173 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4174 TREE_OPERAND (expr, 3) = NULL_TREE;
4182 /* Called via walk_tree when an expression is unsaved. Using the
4183 splay_tree pointed to by ST (which is really a `splay_tree'),
4184 remaps all local declarations to appropriate replacements. */
4187 unsave_r (tree *tp, int *walk_subtrees, void *data)
4189 copy_body_data *id = (copy_body_data *) data;
4190 struct pointer_map_t *st = id->decl_map;
4193 /* Only a local declaration (variable or label). */
4194 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4195 || TREE_CODE (*tp) == LABEL_DECL)
4197 /* Lookup the declaration. */
4198 n = (tree *) pointer_map_contains (st, *tp);
4200 /* If it's there, remap it. */
4205 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4207 else if (TREE_CODE (*tp) == BIND_EXPR)
4208 copy_bind_expr (tp, walk_subtrees, id);
4209 else if (TREE_CODE (*tp) == SAVE_EXPR
4210 || TREE_CODE (*tp) == TARGET_EXPR)
4211 remap_save_expr (tp, st, walk_subtrees);
4214 copy_tree_r (tp, walk_subtrees, NULL);
4216 /* Do whatever unsaving is required. */
4217 unsave_expr_1 (*tp);
4220 /* Keep iterating. */
4224 /* Copies everything in EXPR and replaces variables, labels
4225 and SAVE_EXPRs local to EXPR. */
4228 unsave_expr_now (tree expr)
4232 /* There's nothing to do for NULL_TREE. */
4237 memset (&id, 0, sizeof (id));
4238 id.src_fn = current_function_decl;
4239 id.dst_fn = current_function_decl;
4240 id.decl_map = pointer_map_create ();
4241 id.debug_map = NULL;
4243 id.copy_decl = copy_decl_no_change;
4244 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4245 id.transform_new_cfg = false;
4246 id.transform_return_to_modify = false;
4247 id.transform_lang_insert_block = NULL;
4249 /* Walk the tree once to find local labels. */
4250 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4252 /* Walk the tree again, copying, remapping, and unsaving. */
4253 walk_tree (&expr, unsave_r, &id, NULL);
4256 pointer_map_destroy (id.decl_map);
4258 pointer_map_destroy (id.debug_map);
4263 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4264 label, copies the declaration and enters it in the splay_tree in DATA (which
4265 is really a 'copy_body_data *'. */
4268 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4269 bool *handled_ops_p ATTRIBUTE_UNUSED,
4270 struct walk_stmt_info *wi)
4272 copy_body_data *id = (copy_body_data *) wi->info;
4273 gimple stmt = gsi_stmt (*gsip);
4275 if (gimple_code (stmt) == GIMPLE_LABEL)
4277 tree decl = gimple_label_label (stmt);
4279 /* Copy the decl and remember the copy. */
4280 insert_decl_map (id, decl, id->copy_decl (decl, id));
4287 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4288 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4289 remaps all local declarations to appropriate replacements in gimple
4293 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4295 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4296 copy_body_data *id = (copy_body_data *) wi->info;
4297 struct pointer_map_t *st = id->decl_map;
4301 /* Only a local declaration (variable or label). */
4302 if ((TREE_CODE (expr) == VAR_DECL
4303 && !TREE_STATIC (expr))
4304 || TREE_CODE (expr) == LABEL_DECL)
4306 /* Lookup the declaration. */
4307 n = (tree *) pointer_map_contains (st, expr);
4309 /* If it's there, remap it. */
4314 else if (TREE_CODE (expr) == STATEMENT_LIST
4315 || TREE_CODE (expr) == BIND_EXPR
4316 || TREE_CODE (expr) == SAVE_EXPR)
4318 else if (TREE_CODE (expr) == TARGET_EXPR)
4320 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4321 It's OK for this to happen if it was part of a subtree that
4322 isn't immediately expanded, such as operand 2 of another
4324 if (!TREE_OPERAND (expr, 1))
4326 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4327 TREE_OPERAND (expr, 3) = NULL_TREE;
4331 /* Keep iterating. */
4336 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4337 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4338 remaps all local declarations to appropriate replacements in gimple
4342 replace_locals_stmt (gimple_stmt_iterator *gsip,
4343 bool *handled_ops_p ATTRIBUTE_UNUSED,
4344 struct walk_stmt_info *wi)
4346 copy_body_data *id = (copy_body_data *) wi->info;
4347 gimple stmt = gsi_stmt (*gsip);
4349 if (gimple_code (stmt) == GIMPLE_BIND)
4351 tree block = gimple_bind_block (stmt);
4355 remap_block (&block, id);
4356 gimple_bind_set_block (stmt, block);
4359 /* This will remap a lot of the same decls again, but this should be
4361 if (gimple_bind_vars (stmt))
4362 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4365 /* Keep iterating. */
4370 /* Copies everything in SEQ and replaces variables and labels local to
4371 current_function_decl. */
4374 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4377 struct walk_stmt_info wi;
4378 struct pointer_set_t *visited;
4381 /* There's nothing to do for NULL_TREE. */
4386 memset (&id, 0, sizeof (id));
4387 id.src_fn = current_function_decl;
4388 id.dst_fn = current_function_decl;
4389 id.decl_map = pointer_map_create ();
4390 id.debug_map = NULL;
4392 id.copy_decl = copy_decl_no_change;
4393 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4394 id.transform_new_cfg = false;
4395 id.transform_return_to_modify = false;
4396 id.transform_lang_insert_block = NULL;
4398 /* Walk the tree once to find local labels. */
4399 memset (&wi, 0, sizeof (wi));
4400 visited = pointer_set_create ();
4403 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4404 pointer_set_destroy (visited);
4406 copy = gimple_seq_copy (seq);
4408 /* Walk the copy, remapping decls. */
4409 memset (&wi, 0, sizeof (wi));
4411 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4414 pointer_map_destroy (id.decl_map);
4416 pointer_map_destroy (id.debug_map);
4422 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4425 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4434 debug_find_tree (tree top, tree search)
4436 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4440 /* Declare the variables created by the inliner. Add all the variables in
4441 VARS to BIND_EXPR. */
4444 declare_inline_vars (tree block, tree vars)
4447 for (t = vars; t; t = TREE_CHAIN (t))
4449 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4450 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4451 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4455 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4458 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4459 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4460 VAR_DECL translation. */
4463 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4465 /* Don't generate debug information for the copy if we wouldn't have
4466 generated it for the copy either. */
4467 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4468 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4470 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4471 declaration inspired this copy. */
4472 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4474 /* The new variable/label has no RTL, yet. */
4475 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4476 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4477 SET_DECL_RTL (copy, NULL_RTX);
4479 /* These args would always appear unused, if not for this. */
4480 TREE_USED (copy) = 1;
4482 /* Set the context for the new declaration. */
4483 if (!DECL_CONTEXT (decl))
4484 /* Globals stay global. */
4486 else if (DECL_CONTEXT (decl) != id->src_fn)
4487 /* Things that weren't in the scope of the function we're inlining
4488 from aren't in the scope we're inlining to, either. */
4490 else if (TREE_STATIC (decl))
4491 /* Function-scoped static variables should stay in the original
4495 /* Ordinary automatic local variables are now in the scope of the
4497 DECL_CONTEXT (copy) = id->dst_fn;
4503 copy_decl_to_var (tree decl, copy_body_data *id)
4507 gcc_assert (TREE_CODE (decl) == PARM_DECL
4508 || TREE_CODE (decl) == RESULT_DECL);
4510 type = TREE_TYPE (decl);
4512 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4513 VAR_DECL, DECL_NAME (decl), type);
4514 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4515 TREE_READONLY (copy) = TREE_READONLY (decl);
4516 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4517 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4519 return copy_decl_for_dup_finish (id, decl, copy);
4522 /* Like copy_decl_to_var, but create a return slot object instead of a
4523 pointer variable for return by invisible reference. */
4526 copy_result_decl_to_var (tree decl, copy_body_data *id)
4530 gcc_assert (TREE_CODE (decl) == PARM_DECL
4531 || TREE_CODE (decl) == RESULT_DECL);
4533 type = TREE_TYPE (decl);
4534 if (DECL_BY_REFERENCE (decl))
4535 type = TREE_TYPE (type);
4537 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4538 VAR_DECL, DECL_NAME (decl), type);
4539 TREE_READONLY (copy) = TREE_READONLY (decl);
4540 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4541 if (!DECL_BY_REFERENCE (decl))
4543 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4544 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4547 return copy_decl_for_dup_finish (id, decl, copy);
4551 copy_decl_no_change (tree decl, copy_body_data *id)
4555 copy = copy_node (decl);
4557 /* The COPY is not abstract; it will be generated in DST_FN. */
4558 DECL_ABSTRACT (copy) = 0;
4559 lang_hooks.dup_lang_specific_decl (copy);
4561 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4562 been taken; it's for internal bookkeeping in expand_goto_internal. */
4563 if (TREE_CODE (copy) == LABEL_DECL)
4565 TREE_ADDRESSABLE (copy) = 0;
4566 LABEL_DECL_UID (copy) = -1;
4569 return copy_decl_for_dup_finish (id, decl, copy);
4573 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4575 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4576 return copy_decl_to_var (decl, id);
4578 return copy_decl_no_change (decl, id);
4581 /* Return a copy of the function's argument tree. */
4583 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4584 bitmap args_to_skip, tree *vars)
4587 tree new_parm = NULL;
4592 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4593 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4595 tree new_tree = remap_decl (arg, id);
4596 lang_hooks.dup_lang_specific_decl (new_tree);
4598 parg = &TREE_CHAIN (new_tree);
4600 else if (!pointer_map_contains (id->decl_map, arg))
4602 /* Make an equivalent VAR_DECL. If the argument was used
4603 as temporary variable later in function, the uses will be
4604 replaced by local variable. */
4605 tree var = copy_decl_to_var (arg, id);
4607 add_referenced_var (var);
4608 insert_decl_map (id, arg, var);
4609 /* Declare this new variable. */
4610 TREE_CHAIN (var) = *vars;
4616 /* Return a copy of the function's static chain. */
4618 copy_static_chain (tree static_chain, copy_body_data * id)
4620 tree *chain_copy, *pvar;
4622 chain_copy = &static_chain;
4623 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4625 tree new_tree = remap_decl (*pvar, id);
4626 lang_hooks.dup_lang_specific_decl (new_tree);
4627 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4630 return static_chain;
4633 /* Return true if the function is allowed to be versioned.
4634 This is a guard for the versioning functionality. */
4637 tree_versionable_function_p (tree fndecl)
4639 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4640 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4643 /* Delete all unreachable basic blocks and update callgraph.
4644 Doing so is somewhat nontrivial because we need to update all clones and
4645 remove inline function that become unreachable. */
4648 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4650 bool changed = false;
4651 basic_block b, next_bb;
4653 find_unreachable_blocks ();
4655 /* Delete all unreachable basic blocks. */
4657 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4659 next_bb = b->next_bb;
4661 if (!(b->flags & BB_REACHABLE))
4663 gimple_stmt_iterator bsi;
4665 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4666 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4668 struct cgraph_edge *e;
4669 struct cgraph_node *node;
4671 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4673 if (!e->inline_failed)
4674 cgraph_remove_node_and_inline_clones (e->callee);
4676 cgraph_remove_edge (e);
4678 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4679 && id->dst_node->clones)
4680 for (node = id->dst_node->clones; node != id->dst_node;)
4682 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4684 if (!e->inline_failed)
4685 cgraph_remove_node_and_inline_clones (e->callee);
4687 cgraph_remove_edge (e);
4691 node = node->clones;
4692 else if (node->next_sibling_clone)
4693 node = node->next_sibling_clone;
4696 while (node != id->dst_node && !node->next_sibling_clone)
4697 node = node->clone_of;
4698 if (node != id->dst_node)
4699 node = node->next_sibling_clone;
4703 delete_basic_block (b);
4709 tidy_fallthru_edges ();
4713 /* Update clone info after duplication. */
4716 update_clone_info (copy_body_data * id)
4718 struct cgraph_node *node;
4719 if (!id->dst_node->clones)
4721 for (node = id->dst_node->clones; node != id->dst_node;)
4723 /* First update replace maps to match the new body. */
4724 if (node->clone.tree_map)
4727 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4729 struct ipa_replace_map *replace_info;
4730 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4731 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4732 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4736 node = node->clones;
4737 else if (node->next_sibling_clone)
4738 node = node->next_sibling_clone;
4741 while (node != id->dst_node && !node->next_sibling_clone)
4742 node = node->clone_of;
4743 if (node != id->dst_node)
4744 node = node->next_sibling_clone;
4749 /* Create a copy of a function's tree.
4750 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4751 of the original function and the new copied function
4752 respectively. In case we want to replace a DECL
4753 tree with another tree while duplicating the function's
4754 body, TREE_MAP represents the mapping between these
4755 trees. If UPDATE_CLONES is set, the call_stmt fields
4756 of edges of clones of the function will be updated. */
4758 tree_function_versioning (tree old_decl, tree new_decl,
4759 VEC(ipa_replace_map_p,gc)* tree_map,
4760 bool update_clones, bitmap args_to_skip)
4762 struct cgraph_node *old_version_node;
4763 struct cgraph_node *new_version_node;
4767 struct ipa_replace_map *replace_info;
4768 basic_block old_entry_block, bb;
4769 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4772 tree old_current_function_decl = current_function_decl;
4773 tree vars = NULL_TREE;
4775 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4776 && TREE_CODE (new_decl) == FUNCTION_DECL);
4777 DECL_POSSIBLY_INLINED (old_decl) = 1;
4779 old_version_node = cgraph_node (old_decl);
4780 new_version_node = cgraph_node (new_decl);
4782 /* Output the inlining info for this abstract function, since it has been
4783 inlined. If we don't do this now, we can lose the information about the
4784 variables in the function when the blocks get blown away as soon as we
4785 remove the cgraph node. */
4786 (*debug_hooks->outlining_inline_function) (old_decl);
4788 DECL_ARTIFICIAL (new_decl) = 1;
4789 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4790 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
4792 /* Prepare the data structures for the tree copy. */
4793 memset (&id, 0, sizeof (id));
4795 /* Generate a new name for the new version. */
4796 id.statements_to_fold = pointer_set_create ();
4798 id.decl_map = pointer_map_create ();
4799 id.debug_map = NULL;
4800 id.src_fn = old_decl;
4801 id.dst_fn = new_decl;
4802 id.src_node = old_version_node;
4803 id.dst_node = new_version_node;
4804 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4805 if (id.src_node->ipa_transforms_to_apply)
4807 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
4810 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
4811 id.src_node->ipa_transforms_to_apply);
4812 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
4813 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
4814 VEC_index (ipa_opt_pass,
4815 old_transforms_to_apply,
4819 id.copy_decl = copy_decl_no_change;
4820 id.transform_call_graph_edges
4821 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4822 id.transform_new_cfg = true;
4823 id.transform_return_to_modify = false;
4824 id.transform_lang_insert_block = NULL;
4826 current_function_decl = new_decl;
4827 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4828 (DECL_STRUCT_FUNCTION (old_decl));
4829 initialize_cfun (new_decl, old_decl,
4830 old_entry_block->count);
4831 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
4833 /* Copy the function's static chain. */
4834 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4836 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4837 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4840 /* If there's a tree_map, prepare for substitution. */
4842 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
4845 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
4846 if (replace_info->replace_p)
4848 tree op = replace_info->new_tree;
4852 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4853 op = TREE_OPERAND (op, 0);
4855 if (TREE_CODE (op) == ADDR_EXPR)
4857 op = TREE_OPERAND (op, 0);
4858 while (handled_component_p (op))
4859 op = TREE_OPERAND (op, 0);
4860 if (TREE_CODE (op) == VAR_DECL)
4861 add_referenced_var (op);
4863 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4864 init = setup_one_parameter (&id, replace_info->old_tree,
4865 replace_info->new_tree, id.src_fn,
4869 VEC_safe_push (gimple, heap, init_stmts, init);
4872 /* Copy the function's arguments. */
4873 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4874 DECL_ARGUMENTS (new_decl) =
4875 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
4876 args_to_skip, &vars);
4878 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
4880 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4881 number_blocks (id.dst_fn);
4883 declare_inline_vars (DECL_INITIAL (new_decl), vars);
4885 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
4886 /* Add local vars. */
4887 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
4888 t_step; t_step = TREE_CHAIN (t_step))
4890 tree var = TREE_VALUE (t_step);
4891 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
4892 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
4893 else if (!can_be_nonlocal (var, &id))
4895 tree_cons (NULL_TREE, remap_decl (var, &id),
4899 /* Copy the Function's body. */
4900 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
4901 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
4903 if (DECL_RESULT (old_decl) != NULL_TREE)
4905 tree *res_decl = &DECL_RESULT (old_decl);
4906 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
4907 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
4910 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4911 number_blocks (new_decl);
4913 /* We want to create the BB unconditionally, so that the addition of
4914 debug stmts doesn't affect BB count, which may in the end cause
4915 codegen differences. */
4916 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
4917 while (VEC_length (gimple, init_stmts))
4918 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
4919 update_clone_info (&id);
4921 /* Remap the nonlocal_goto_save_area, if any. */
4922 if (cfun->nonlocal_goto_save_area)
4924 struct walk_stmt_info wi;
4926 memset (&wi, 0, sizeof (wi));
4928 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
4932 pointer_map_destroy (id.decl_map);
4934 pointer_map_destroy (id.debug_map);
4935 free_dominance_info (CDI_DOMINATORS);
4936 free_dominance_info (CDI_POST_DOMINATORS);
4938 fold_marked_statements (0, id.statements_to_fold);
4939 pointer_set_destroy (id.statements_to_fold);
4940 fold_cond_expr_cond ();
4941 delete_unreachable_blocks_update_callgraph (&id);
4942 update_ssa (TODO_update_ssa);
4943 free_dominance_info (CDI_DOMINATORS);
4944 free_dominance_info (CDI_POST_DOMINATORS);
4946 gcc_assert (!id.debug_stmts);
4947 VEC_free (gimple, heap, init_stmts);
4949 current_function_decl = old_current_function_decl;
4950 gcc_assert (!current_function_decl
4951 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
4955 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
4956 the callee and return the inlined body on success. */
4959 maybe_inline_call_in_expr (tree exp)
4961 tree fn = get_callee_fndecl (exp);
4963 /* We can only try to inline "const" functions. */
4964 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
4966 struct pointer_map_t *decl_map = pointer_map_create ();
4967 call_expr_arg_iterator iter;
4971 /* Remap the parameters. */
4972 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
4974 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
4975 *pointer_map_insert (decl_map, param) = arg;
4977 memset (&id, 0, sizeof (id));
4979 id.dst_fn = current_function_decl;
4980 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
4981 id.decl_map = decl_map;
4983 id.copy_decl = copy_decl_no_change;
4984 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4985 id.transform_new_cfg = false;
4986 id.transform_return_to_modify = true;
4987 id.transform_lang_insert_block = false;
4989 /* Make sure not to unshare trees behind the front-end's back
4990 since front-end specific mechanisms may rely on sharing. */
4991 id.regimplify = false;
4992 id.do_not_unshare = true;
4994 /* We're not inside any EH region. */
4997 t = copy_tree_body (&id);
4998 pointer_map_destroy (decl_map);
5000 /* We can only return something suitable for use in a GENERIC
5002 if (TREE_CODE (t) == MODIFY_EXPR)
5003 return TREE_OPERAND (t, 1);
5009 /* Duplicate a type, fields and all. */
5012 build_duplicate_type (tree type)
5014 struct copy_body_data id;
5016 memset (&id, 0, sizeof (id));
5017 id.src_fn = current_function_decl;
5018 id.dst_fn = current_function_decl;
5020 id.decl_map = pointer_map_create ();
5021 id.debug_map = NULL;
5022 id.copy_decl = copy_decl_no_change;
5024 type = remap_type_1 (type, &id);
5026 pointer_map_destroy (id.decl_map);
5028 pointer_map_destroy (id.debug_map);
5030 TYPE_CANONICAL (type) = type;
5035 /* Return whether it is safe to inline a function because it used different
5036 target specific options or call site actual types mismatch parameter types.
5037 E is the call edge to be checked. */
5039 tree_can_inline_p (struct cgraph_edge *e)
5042 /* This causes a regression in SPEC in that it prevents a cold function from
5043 inlining a hot function. Perhaps this should only apply to functions
5044 that the user declares hot/cold/optimize explicitly. */
5046 /* Don't inline a function with a higher optimization level than the
5047 caller, or with different space constraints (hot/cold functions). */
5048 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5049 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5051 if (caller_tree != callee_tree)
5053 struct cl_optimization *caller_opt
5054 = TREE_OPTIMIZATION ((caller_tree)
5056 : optimization_default_node);
5058 struct cl_optimization *callee_opt
5059 = TREE_OPTIMIZATION ((callee_tree)
5061 : optimization_default_node);
5063 if ((caller_opt->optimize > callee_opt->optimize)
5064 || (caller_opt->optimize_size != callee_opt->optimize_size))
5068 tree caller, callee;
5070 caller = e->caller->decl;
5071 callee = e->callee->decl;
5073 /* We cannot inline a function that uses a different EH personality
5075 if (DECL_FUNCTION_PERSONALITY (caller)
5076 && DECL_FUNCTION_PERSONALITY (callee)
5077 && (DECL_FUNCTION_PERSONALITY (caller)
5078 != DECL_FUNCTION_PERSONALITY (callee)))
5080 e->inline_failed = CIF_UNSPECIFIED;
5081 gimple_call_set_cannot_inline (e->call_stmt, true);
5085 /* Allow the backend to decide if inlining is ok. */
5086 if (!targetm.target_option.can_inline_p (caller, callee))
5088 e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5089 gimple_call_set_cannot_inline (e->call_stmt, true);
5090 e->call_stmt_cannot_inline_p = true;
5095 && !gimple_check_call_args (e->call_stmt))
5097 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5098 gimple_call_set_cannot_inline (e->call_stmt, true);
5099 e->call_stmt_cannot_inline_p = true;