2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "tree-inline.h"
34 #include "insn-config.h"
36 #include "langhooks.h"
37 #include "basic-block.h"
38 #include "tree-iterator.h"
41 #include "tree-mudflap.h"
42 #include "tree-flow.h"
45 #include "tree-flow.h"
46 #include "diagnostic.h"
49 #include "pointer-set.h"
51 #include "value-prof.h"
52 #include "tree-pass.h"
54 #include "integrate.h"
56 /* I'm not real happy about this, but we need to handle gimple and
60 /* Inlining, Cloning, Versioning, Parallelization
62 Inlining: a function body is duplicated, but the PARM_DECLs are
63 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
64 MODIFY_EXPRs that store to a dedicated returned-value variable.
65 The duplicated eh_region info of the copy will later be appended
66 to the info for the caller; the eh_region info in copied throwing
67 statements and RESX statements are adjusted accordingly.
69 Cloning: (only in C++) We have one body for a con/de/structor, and
70 multiple function decls, each with a unique parameter list.
71 Duplicate the body, using the given splay tree; some parameters
72 will become constants (like 0 or 1).
74 Versioning: a function body is duplicated and the result is a new
75 function rather than into blocks of an existing function as with
76 inlining. Some parameters will become constants.
78 Parallelization: a region of a function is duplicated resulting in
79 a new function. Variables may be replaced with complex expressions
80 to enable shared variable semantics.
82 All of these will simultaneously lookup any callgraph edges. If
83 we're going to inline the duplicated function body, and the given
84 function has some cloned callgraph nodes (one for each place this
85 function will be inlined) those callgraph edges will be duplicated.
86 If we're cloning the body, those callgraph edges will be
87 updated to point into the new body. (Note that the original
88 callgraph node and edge list will not be altered.)
90 See the CALL_EXPR handling case in copy_tree_body_r (). */
94 o In order to make inlining-on-trees work, we pessimized
95 function-local static constants. In particular, they are now
96 always output, even when not addressed. Fix this by treating
97 function-local static constants just like global static
98 constants; the back-end already knows not to output them if they
101 o Provide heuristics to clamp inlining of recursive template
105 /* Weights that estimate_num_insns uses for heuristics in inlining. */
107 eni_weights eni_inlining_weights;
109 /* Weights that estimate_num_insns uses to estimate the size of the
112 eni_weights eni_size_weights;
114 /* Weights that estimate_num_insns uses to estimate the time necessary
115 to execute the produced code. */
117 eni_weights eni_time_weights;
121 static tree declare_return_variable (copy_body_data *, tree, tree);
122 static void remap_block (tree *, copy_body_data *);
123 static void copy_bind_expr (tree *, int *, copy_body_data *);
124 static tree mark_local_for_remap_r (tree *, int *, void *);
125 static void unsave_expr_1 (tree);
126 static tree unsave_r (tree *, int *, void *);
127 static void declare_inline_vars (tree, tree);
128 static void remap_save_expr (tree *, void *, int *);
129 static void prepend_lexical_block (tree current_block, tree new_block);
130 static tree copy_decl_to_var (tree, copy_body_data *);
131 static tree copy_result_decl_to_var (tree, copy_body_data *);
132 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
133 static gimple remap_gimple_stmt (gimple, copy_body_data *);
134 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
136 /* Insert a tree->tree mapping for ID. Despite the name suggests
137 that the trees should be variables, it is used for more than that. */
140 insert_decl_map (copy_body_data *id, tree key, tree value)
142 *pointer_map_insert (id->decl_map, key) = value;
144 /* Always insert an identity map as well. If we see this same new
145 node again, we won't want to duplicate it a second time. */
147 *pointer_map_insert (id->decl_map, value) = value;
150 /* Insert a tree->tree mapping for ID. This is only used for
154 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
156 if (!gimple_in_ssa_p (id->src_cfun))
159 if (!MAY_HAVE_DEBUG_STMTS)
162 if (!target_for_debug_bind (key))
165 gcc_assert (TREE_CODE (key) == PARM_DECL);
166 gcc_assert (TREE_CODE (value) == VAR_DECL);
169 id->debug_map = pointer_map_create ();
171 *pointer_map_insert (id->debug_map, key) = value;
174 /* Construct new SSA name for old NAME. ID is the inline context. */
177 remap_ssa_name (tree name, copy_body_data *id)
182 gcc_assert (TREE_CODE (name) == SSA_NAME);
184 n = (tree *) pointer_map_contains (id->decl_map, name);
186 return unshare_expr (*n);
188 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
190 new_tree = remap_decl (SSA_NAME_VAR (name), id);
192 /* We might've substituted constant or another SSA_NAME for
195 Replace the SSA name representing RESULT_DECL by variable during
196 inlining: this saves us from need to introduce PHI node in a case
197 return value is just partly initialized. */
198 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
199 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
200 || !id->transform_return_to_modify))
202 new_tree = make_ssa_name (new_tree, NULL);
203 insert_decl_map (id, name, new_tree);
204 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
205 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
206 TREE_TYPE (new_tree) = TREE_TYPE (SSA_NAME_VAR (new_tree));
207 if (gimple_nop_p (SSA_NAME_DEF_STMT (name)))
209 /* By inlining function having uninitialized variable, we might
210 extend the lifetime (variable might get reused). This cause
211 ICE in the case we end up extending lifetime of SSA name across
212 abnormal edge, but also increase register pressure.
214 We simply initialize all uninitialized vars by 0 except
215 for case we are inlining to very first BB. We can avoid
216 this for all BBs that are not inside strongly connected
217 regions of the CFG, but this is expensive to test. */
219 && is_gimple_reg (SSA_NAME_VAR (name))
220 && TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL
221 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR, 0)->dest
222 || EDGE_COUNT (id->entry_bb->preds) != 1))
224 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
227 init_stmt = gimple_build_assign (new_tree,
228 fold_convert (TREE_TYPE (new_tree),
230 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
231 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
235 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
236 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name))
238 set_default_def (SSA_NAME_VAR (new_tree), new_tree);
243 insert_decl_map (id, name, new_tree);
247 /* If nonzero, we're remapping the contents of inlined debug
248 statements. If negative, an error has occurred, such as a
249 reference to a variable that isn't available in the inlined
251 int processing_debug_stmt = 0;
253 /* Remap DECL during the copying of the BLOCK tree for the function. */
256 remap_decl (tree decl, copy_body_data *id)
260 /* We only remap local variables in the current function. */
262 /* See if we have remapped this declaration. */
264 n = (tree *) pointer_map_contains (id->decl_map, decl);
266 if (!n && processing_debug_stmt)
268 processing_debug_stmt = -1;
272 /* If we didn't already have an equivalent for this declaration,
276 /* Make a copy of the variable or label. */
277 tree t = id->copy_decl (decl, id);
279 /* Remember it, so that if we encounter this local entity again
280 we can reuse this copy. Do this early because remap_type may
281 need this decl for TYPE_STUB_DECL. */
282 insert_decl_map (id, decl, t);
287 /* Remap types, if necessary. */
288 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
289 if (TREE_CODE (t) == TYPE_DECL)
290 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
292 /* Remap sizes as necessary. */
293 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
294 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
296 /* If fields, do likewise for offset and qualifier. */
297 if (TREE_CODE (t) == FIELD_DECL)
299 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
300 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
301 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
304 if (cfun && gimple_in_ssa_p (cfun)
305 && (TREE_CODE (t) == VAR_DECL
306 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
308 tree def = gimple_default_def (id->src_cfun, decl);
310 if (TREE_CODE (decl) != PARM_DECL && def)
312 tree map = remap_ssa_name (def, id);
313 /* Watch out RESULT_DECLs whose SSA names map directly
315 if (TREE_CODE (map) == SSA_NAME
316 && gimple_nop_p (SSA_NAME_DEF_STMT (map)))
317 set_default_def (t, map);
319 add_referenced_var (t);
324 if (id->do_not_unshare)
327 return unshare_expr (*n);
331 remap_type_1 (tree type, copy_body_data *id)
335 /* We do need a copy. build and register it now. If this is a pointer or
336 reference type, remap the designated type and make a new pointer or
338 if (TREE_CODE (type) == POINTER_TYPE)
340 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
342 TYPE_REF_CAN_ALIAS_ALL (type));
343 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
344 new_tree = build_type_attribute_qual_variant (new_tree,
345 TYPE_ATTRIBUTES (type),
347 insert_decl_map (id, type, new_tree);
350 else if (TREE_CODE (type) == REFERENCE_TYPE)
352 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
354 TYPE_REF_CAN_ALIAS_ALL (type));
355 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
356 new_tree = build_type_attribute_qual_variant (new_tree,
357 TYPE_ATTRIBUTES (type),
359 insert_decl_map (id, type, new_tree);
363 new_tree = copy_node (type);
365 insert_decl_map (id, type, new_tree);
367 /* This is a new type, not a copy of an old type. Need to reassociate
368 variants. We can handle everything except the main variant lazily. */
369 t = TYPE_MAIN_VARIANT (type);
372 t = remap_type (t, id);
373 TYPE_MAIN_VARIANT (new_tree) = t;
374 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
375 TYPE_NEXT_VARIANT (t) = new_tree;
379 TYPE_MAIN_VARIANT (new_tree) = new_tree;
380 TYPE_NEXT_VARIANT (new_tree) = NULL;
383 if (TYPE_STUB_DECL (type))
384 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
386 /* Lazily create pointer and reference types. */
387 TYPE_POINTER_TO (new_tree) = NULL;
388 TYPE_REFERENCE_TO (new_tree) = NULL;
390 switch (TREE_CODE (new_tree))
394 case FIXED_POINT_TYPE:
397 t = TYPE_MIN_VALUE (new_tree);
398 if (t && TREE_CODE (t) != INTEGER_CST)
399 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
401 t = TYPE_MAX_VALUE (new_tree);
402 if (t && TREE_CODE (t) != INTEGER_CST)
403 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
407 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
408 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
412 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
413 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
418 case QUAL_UNION_TYPE:
422 for (f = TYPE_FIELDS (new_tree); f ; f = TREE_CHAIN (f))
424 t = remap_decl (f, id);
425 DECL_CONTEXT (t) = new_tree;
429 TYPE_FIELDS (new_tree) = nreverse (nf);
435 /* Shouldn't have been thought variable sized. */
439 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
440 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
446 remap_type (tree type, copy_body_data *id)
454 /* See if we have remapped this type. */
455 node = (tree *) pointer_map_contains (id->decl_map, type);
459 /* The type only needs remapping if it's variably modified. */
460 if (! variably_modified_type_p (type, id->src_fn))
462 insert_decl_map (id, type, type);
466 id->remapping_type_depth++;
467 tmp = remap_type_1 (type, id);
468 id->remapping_type_depth--;
473 /* Return previously remapped type of TYPE in ID. Return NULL if TYPE
474 is NULL or TYPE has not been remapped before. */
477 remapped_type (tree type, copy_body_data *id)
484 /* See if we have remapped this type. */
485 node = (tree *) pointer_map_contains (id->decl_map, type);
492 /* The type only needs remapping if it's variably modified. */
493 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
496 can_be_nonlocal (tree decl, copy_body_data *id)
498 /* We can not duplicate function decls. */
499 if (TREE_CODE (decl) == FUNCTION_DECL)
502 /* Local static vars must be non-local or we get multiple declaration
504 if (TREE_CODE (decl) == VAR_DECL
505 && !auto_var_in_fn_p (decl, id->src_fn))
508 /* At the moment dwarf2out can handle only these types of nodes. We
509 can support more later. */
510 if (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != PARM_DECL)
513 /* We must use global type. We call remapped_type instead of
514 remap_type since we don't want to remap this type here if it
515 hasn't been remapped before. */
516 if (TREE_TYPE (decl) != remapped_type (TREE_TYPE (decl), id))
519 /* Wihtout SSA we can't tell if variable is used. */
520 if (!gimple_in_ssa_p (cfun))
523 /* Live variables must be copied so we can attach DECL_RTL. */
531 remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
534 tree new_decls = NULL_TREE;
536 /* Remap its variables. */
537 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
540 tree origin_var = DECL_ORIGIN (old_var);
542 if (can_be_nonlocal (old_var, id))
544 if (TREE_CODE (old_var) == VAR_DECL
545 && ! DECL_EXTERNAL (old_var)
546 && (var_ann (old_var) || !gimple_in_ssa_p (cfun)))
547 cfun->local_decls = tree_cons (NULL_TREE, old_var,
549 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
550 && !DECL_IGNORED_P (old_var)
551 && nonlocalized_list)
552 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
556 /* Remap the variable. */
557 new_var = remap_decl (old_var, id);
559 /* If we didn't remap this variable, we can't mess with its
560 TREE_CHAIN. If we remapped this variable to the return slot, it's
561 already declared somewhere else, so don't declare it here. */
563 if (new_var == id->retvar)
567 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
568 && !DECL_IGNORED_P (old_var)
569 && nonlocalized_list)
570 VEC_safe_push (tree, gc, *nonlocalized_list, origin_var);
574 gcc_assert (DECL_P (new_var));
575 TREE_CHAIN (new_var) = new_decls;
580 return nreverse (new_decls);
583 /* Copy the BLOCK to contain remapped versions of the variables
584 therein. And hook the new block into the block-tree. */
587 remap_block (tree *block, copy_body_data *id)
592 /* Make the new block. */
594 new_block = make_node (BLOCK);
595 TREE_USED (new_block) = TREE_USED (old_block);
596 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
597 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
598 BLOCK_NONLOCALIZED_VARS (new_block)
599 = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
602 /* Remap its variables. */
603 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
604 &BLOCK_NONLOCALIZED_VARS (new_block),
607 if (id->transform_lang_insert_block)
608 id->transform_lang_insert_block (new_block);
610 /* Remember the remapped block. */
611 insert_decl_map (id, old_block, new_block);
614 /* Copy the whole block tree and root it in id->block. */
616 remap_blocks (tree block, copy_body_data *id)
619 tree new_tree = block;
624 remap_block (&new_tree, id);
625 gcc_assert (new_tree != block);
626 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
627 prepend_lexical_block (new_tree, remap_blocks (t, id));
628 /* Blocks are in arbitrary order, but make things slightly prettier and do
629 not swap order when producing a copy. */
630 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
635 copy_statement_list (tree *tp)
637 tree_stmt_iterator oi, ni;
640 new_tree = alloc_stmt_list ();
641 ni = tsi_start (new_tree);
642 oi = tsi_start (*tp);
643 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
646 for (; !tsi_end_p (oi); tsi_next (&oi))
648 tree stmt = tsi_stmt (oi);
649 if (TREE_CODE (stmt) == STATEMENT_LIST)
650 copy_statement_list (&stmt);
651 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
656 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
658 tree block = BIND_EXPR_BLOCK (*tp);
659 /* Copy (and replace) the statement. */
660 copy_tree_r (tp, walk_subtrees, NULL);
663 remap_block (&block, id);
664 BIND_EXPR_BLOCK (*tp) = block;
667 if (BIND_EXPR_VARS (*tp))
668 /* This will remap a lot of the same decls again, but this should be
670 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
674 /* Create a new gimple_seq by remapping all the statements in BODY
675 using the inlining information in ID. */
678 remap_gimple_seq (gimple_seq body, copy_body_data *id)
680 gimple_stmt_iterator si;
681 gimple_seq new_body = NULL;
683 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
685 gimple new_stmt = remap_gimple_stmt (gsi_stmt (si), id);
686 gimple_seq_add_stmt (&new_body, new_stmt);
693 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
694 block using the mapping information in ID. */
697 copy_gimple_bind (gimple stmt, copy_body_data *id)
700 tree new_block, new_vars;
701 gimple_seq body, new_body;
703 /* Copy the statement. Note that we purposely don't use copy_stmt
704 here because we need to remap statements as we copy. */
705 body = gimple_bind_body (stmt);
706 new_body = remap_gimple_seq (body, id);
708 new_block = gimple_bind_block (stmt);
710 remap_block (&new_block, id);
712 /* This will remap a lot of the same decls again, but this should be
714 new_vars = gimple_bind_vars (stmt);
716 new_vars = remap_decls (new_vars, NULL, id);
718 new_bind = gimple_build_bind (new_vars, new_body, new_block);
724 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
725 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
726 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
727 recursing into the children nodes of *TP. */
730 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
732 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
733 copy_body_data *id = (copy_body_data *) wi_p->info;
734 tree fn = id->src_fn;
736 if (TREE_CODE (*tp) == SSA_NAME)
738 *tp = remap_ssa_name (*tp, id);
742 else if (auto_var_in_fn_p (*tp, fn))
744 /* Local variables and labels need to be replaced by equivalent
745 variables. We don't want to copy static variables; there's
746 only one of those, no matter how many times we inline the
747 containing function. Similarly for globals from an outer
751 /* Remap the declaration. */
752 new_decl = remap_decl (*tp, id);
753 gcc_assert (new_decl);
754 /* Replace this variable with the copy. */
755 STRIP_TYPE_NOPS (new_decl);
756 /* ??? The C++ frontend uses void * pointer zero to initialize
757 any other type. This confuses the middle-end type verification.
758 As cloned bodies do not go through gimplification again the fixup
759 there doesn't trigger. */
760 if (TREE_CODE (new_decl) == INTEGER_CST
761 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
762 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
766 else if (TREE_CODE (*tp) == STATEMENT_LIST)
768 else if (TREE_CODE (*tp) == SAVE_EXPR)
770 else if (TREE_CODE (*tp) == LABEL_DECL
771 && (!DECL_CONTEXT (*tp)
772 || decl_function_context (*tp) == id->src_fn))
773 /* These may need to be remapped for EH handling. */
774 *tp = remap_decl (*tp, id);
775 else if (TYPE_P (*tp))
776 /* Types may need remapping as well. */
777 *tp = remap_type (*tp, id);
778 else if (CONSTANT_CLASS_P (*tp))
780 /* If this is a constant, we have to copy the node iff the type
781 will be remapped. copy_tree_r will not copy a constant. */
782 tree new_type = remap_type (TREE_TYPE (*tp), id);
784 if (new_type == TREE_TYPE (*tp))
787 else if (TREE_CODE (*tp) == INTEGER_CST)
788 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
789 TREE_INT_CST_HIGH (*tp));
792 *tp = copy_node (*tp);
793 TREE_TYPE (*tp) = new_type;
798 /* Otherwise, just copy the node. Note that copy_tree_r already
799 knows not to copy VAR_DECLs, etc., so this is safe. */
800 if (TREE_CODE (*tp) == INDIRECT_REF)
802 /* Get rid of *& from inline substitutions that can happen when a
803 pointer argument is an ADDR_EXPR. */
804 tree decl = TREE_OPERAND (*tp, 0);
807 n = (tree *) pointer_map_contains (id->decl_map, decl);
810 tree type, new_tree, old;
812 /* If we happen to get an ADDR_EXPR in n->value, strip
813 it manually here as we'll eventually get ADDR_EXPRs
814 which lie about their types pointed to. In this case
815 build_fold_indirect_ref wouldn't strip the
816 INDIRECT_REF, but we absolutely rely on that. As
817 fold_indirect_ref does other useful transformations,
818 try that first, though. */
819 type = TREE_TYPE (TREE_TYPE (*n));
820 new_tree = unshare_expr (*n);
822 *tp = gimple_fold_indirect_ref (new_tree);
825 if (TREE_CODE (new_tree) == ADDR_EXPR)
827 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
829 /* ??? We should either assert here or build
830 a VIEW_CONVERT_EXPR instead of blindly leaking
831 incompatible types to our IL. */
833 *tp = TREE_OPERAND (new_tree, 0);
837 *tp = build1 (INDIRECT_REF, type, new_tree);
838 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
839 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
847 /* Here is the "usual case". Copy this tree node, and then
848 tweak some special cases. */
849 copy_tree_r (tp, walk_subtrees, NULL);
851 /* Global variables we haven't seen yet need to go into referenced
852 vars. If not referenced from types only. */
853 if (gimple_in_ssa_p (cfun)
854 && TREE_CODE (*tp) == VAR_DECL
855 && id->remapping_type_depth == 0
856 && !processing_debug_stmt)
857 add_referenced_var (*tp);
859 /* We should never have TREE_BLOCK set on non-statements. */
861 gcc_assert (!TREE_BLOCK (*tp));
863 if (TREE_CODE (*tp) != OMP_CLAUSE)
864 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
866 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
868 /* The copied TARGET_EXPR has never been expanded, even if the
869 original node was expanded already. */
870 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
871 TREE_OPERAND (*tp, 3) = NULL_TREE;
873 else if (TREE_CODE (*tp) == ADDR_EXPR)
875 /* Variable substitution need not be simple. In particular,
876 the INDIRECT_REF substitution above. Make sure that
877 TREE_CONSTANT and friends are up-to-date. But make sure
878 to not improperly set TREE_BLOCK on some sub-expressions. */
879 int invariant = is_gimple_min_invariant (*tp);
880 tree block = id->block;
881 id->block = NULL_TREE;
882 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
885 /* Handle the case where we substituted an INDIRECT_REF
886 into the operand of the ADDR_EXPR. */
887 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
888 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
890 recompute_tree_invariant_for_addr_expr (*tp);
892 /* If this used to be invariant, but is not any longer,
893 then regimplification is probably needed. */
894 if (invariant && !is_gimple_min_invariant (*tp))
895 id->regimplify = true;
901 /* Keep iterating. */
906 /* Called from copy_body_id via walk_tree. DATA is really a
907 `copy_body_data *'. */
910 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
912 copy_body_data *id = (copy_body_data *) data;
913 tree fn = id->src_fn;
916 /* Begin by recognizing trees that we'll completely rewrite for the
917 inlining context. Our output for these trees is completely
918 different from out input (e.g. RETURN_EXPR is deleted, and morphs
919 into an edge). Further down, we'll handle trees that get
920 duplicated and/or tweaked. */
922 /* When requested, RETURN_EXPRs should be transformed to just the
923 contained MODIFY_EXPR. The branch semantics of the return will
924 be handled elsewhere by manipulating the CFG rather than a statement. */
925 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
927 tree assignment = TREE_OPERAND (*tp, 0);
929 /* If we're returning something, just turn that into an
930 assignment into the equivalent of the original RESULT_DECL.
931 If the "assignment" is just the result decl, the result
932 decl has already been set (e.g. a recent "foo (&result_decl,
933 ...)"); just toss the entire RETURN_EXPR. */
934 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
936 /* Replace the RETURN_EXPR with (a copy of) the
937 MODIFY_EXPR hanging underneath. */
938 *tp = copy_node (assignment);
940 else /* Else the RETURN_EXPR returns no value. */
943 return (tree) (void *)1;
946 else if (TREE_CODE (*tp) == SSA_NAME)
948 *tp = remap_ssa_name (*tp, id);
953 /* Local variables and labels need to be replaced by equivalent
954 variables. We don't want to copy static variables; there's only
955 one of those, no matter how many times we inline the containing
956 function. Similarly for globals from an outer function. */
957 else if (auto_var_in_fn_p (*tp, fn))
961 /* Remap the declaration. */
962 new_decl = remap_decl (*tp, id);
963 gcc_assert (new_decl);
964 /* Replace this variable with the copy. */
965 STRIP_TYPE_NOPS (new_decl);
969 else if (TREE_CODE (*tp) == STATEMENT_LIST)
970 copy_statement_list (tp);
971 else if (TREE_CODE (*tp) == SAVE_EXPR
972 || TREE_CODE (*tp) == TARGET_EXPR)
973 remap_save_expr (tp, id->decl_map, walk_subtrees);
974 else if (TREE_CODE (*tp) == LABEL_DECL
975 && (! DECL_CONTEXT (*tp)
976 || decl_function_context (*tp) == id->src_fn))
977 /* These may need to be remapped for EH handling. */
978 *tp = remap_decl (*tp, id);
979 else if (TREE_CODE (*tp) == BIND_EXPR)
980 copy_bind_expr (tp, walk_subtrees, id);
981 /* Types may need remapping as well. */
982 else if (TYPE_P (*tp))
983 *tp = remap_type (*tp, id);
985 /* If this is a constant, we have to copy the node iff the type will be
986 remapped. copy_tree_r will not copy a constant. */
987 else if (CONSTANT_CLASS_P (*tp))
989 tree new_type = remap_type (TREE_TYPE (*tp), id);
991 if (new_type == TREE_TYPE (*tp))
994 else if (TREE_CODE (*tp) == INTEGER_CST)
995 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
996 TREE_INT_CST_HIGH (*tp));
999 *tp = copy_node (*tp);
1000 TREE_TYPE (*tp) = new_type;
1004 /* Otherwise, just copy the node. Note that copy_tree_r already
1005 knows not to copy VAR_DECLs, etc., so this is safe. */
1008 /* Here we handle trees that are not completely rewritten.
1009 First we detect some inlining-induced bogosities for
1011 if (TREE_CODE (*tp) == MODIFY_EXPR
1012 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1013 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1015 /* Some assignments VAR = VAR; don't generate any rtl code
1016 and thus don't count as variable modification. Avoid
1017 keeping bogosities like 0 = 0. */
1018 tree decl = TREE_OPERAND (*tp, 0), value;
1021 n = (tree *) pointer_map_contains (id->decl_map, decl);
1025 STRIP_TYPE_NOPS (value);
1026 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1028 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1029 return copy_tree_body_r (tp, walk_subtrees, data);
1033 else if (TREE_CODE (*tp) == INDIRECT_REF)
1035 /* Get rid of *& from inline substitutions that can happen when a
1036 pointer argument is an ADDR_EXPR. */
1037 tree decl = TREE_OPERAND (*tp, 0);
1040 n = (tree *) pointer_map_contains (id->decl_map, decl);
1045 /* If we happen to get an ADDR_EXPR in n->value, strip
1046 it manually here as we'll eventually get ADDR_EXPRs
1047 which lie about their types pointed to. In this case
1048 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1049 but we absolutely rely on that. As fold_indirect_ref
1050 does other useful transformations, try that first, though. */
1051 tree type = TREE_TYPE (TREE_TYPE (*n));
1052 if (id->do_not_unshare)
1055 new_tree = unshare_expr (*n);
1057 *tp = gimple_fold_indirect_ref (new_tree);
1060 if (TREE_CODE (new_tree) == ADDR_EXPR)
1062 *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
1064 /* ??? We should either assert here or build
1065 a VIEW_CONVERT_EXPR instead of blindly leaking
1066 incompatible types to our IL. */
1068 *tp = TREE_OPERAND (new_tree, 0);
1072 *tp = build1 (INDIRECT_REF, type, new_tree);
1073 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1074 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1082 /* Here is the "usual case". Copy this tree node, and then
1083 tweak some special cases. */
1084 copy_tree_r (tp, walk_subtrees, NULL);
1086 /* Global variables we haven't seen yet needs to go into referenced
1087 vars. If not referenced from types or debug stmts only. */
1088 if (gimple_in_ssa_p (cfun)
1089 && TREE_CODE (*tp) == VAR_DECL
1090 && id->remapping_type_depth == 0
1091 && !processing_debug_stmt)
1092 add_referenced_var (*tp);
1094 /* If EXPR has block defined, map it to newly constructed block.
1095 When inlining we want EXPRs without block appear in the block
1096 of function call if we are not remapping a type. */
1099 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1100 if (TREE_BLOCK (*tp))
1103 n = (tree *) pointer_map_contains (id->decl_map,
1108 TREE_BLOCK (*tp) = new_block;
1111 if (TREE_CODE (*tp) != OMP_CLAUSE)
1112 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1114 /* The copied TARGET_EXPR has never been expanded, even if the
1115 original node was expanded already. */
1116 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1118 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1119 TREE_OPERAND (*tp, 3) = NULL_TREE;
1122 /* Variable substitution need not be simple. In particular, the
1123 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1124 and friends are up-to-date. */
1125 else if (TREE_CODE (*tp) == ADDR_EXPR)
1127 int invariant = is_gimple_min_invariant (*tp);
1128 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1130 /* Handle the case where we substituted an INDIRECT_REF
1131 into the operand of the ADDR_EXPR. */
1132 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1133 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1135 recompute_tree_invariant_for_addr_expr (*tp);
1137 /* If this used to be invariant, but is not any longer,
1138 then regimplification is probably needed. */
1139 if (invariant && !is_gimple_min_invariant (*tp))
1140 id->regimplify = true;
1146 /* Keep iterating. */
1150 /* Helper for remap_gimple_stmt. Given an EH region number for the
1151 source function, map that to the duplicate EH region number in
1152 the destination function. */
1155 remap_eh_region_nr (int old_nr, copy_body_data *id)
1157 eh_region old_r, new_r;
1160 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1161 slot = pointer_map_contains (id->eh_map, old_r);
1162 new_r = (eh_region) *slot;
1164 return new_r->index;
1167 /* Similar, but operate on INTEGER_CSTs. */
1170 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1174 old_nr = tree_low_cst (old_t_nr, 0);
1175 new_nr = remap_eh_region_nr (old_nr, id);
1177 return build_int_cst (NULL, new_nr);
1180 /* Helper for copy_bb. Remap statement STMT using the inlining
1181 information in ID. Return the new statement copy. */
1184 remap_gimple_stmt (gimple stmt, copy_body_data *id)
1187 struct walk_stmt_info wi;
1189 bool skip_first = false;
1191 /* Begin by recognizing trees that we'll completely rewrite for the
1192 inlining context. Our output for these trees is completely
1193 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1194 into an edge). Further down, we'll handle trees that get
1195 duplicated and/or tweaked. */
1197 /* When requested, GIMPLE_RETURNs should be transformed to just the
1198 contained GIMPLE_ASSIGN. The branch semantics of the return will
1199 be handled elsewhere by manipulating the CFG rather than the
1201 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1203 tree retval = gimple_return_retval (stmt);
1205 /* If we're returning something, just turn that into an
1206 assignment into the equivalent of the original RESULT_DECL.
1207 If RETVAL is just the result decl, the result decl has
1208 already been set (e.g. a recent "foo (&result_decl, ...)");
1209 just toss the entire GIMPLE_RETURN. */
1210 if (retval && TREE_CODE (retval) != RESULT_DECL)
1212 copy = gimple_build_assign (id->retvar, retval);
1213 /* id->retvar is already substituted. Skip it on later remapping. */
1217 return gimple_build_nop ();
1219 else if (gimple_has_substatements (stmt))
1223 /* When cloning bodies from the C++ front end, we will be handed bodies
1224 in High GIMPLE form. Handle here all the High GIMPLE statements that
1225 have embedded statements. */
1226 switch (gimple_code (stmt))
1229 copy = copy_gimple_bind (stmt, id);
1233 s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
1234 copy = gimple_build_catch (gimple_catch_types (stmt), s1);
1237 case GIMPLE_EH_FILTER:
1238 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1239 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1243 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1244 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1245 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1248 case GIMPLE_WITH_CLEANUP_EXPR:
1249 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1250 copy = gimple_build_wce (s1);
1253 case GIMPLE_OMP_PARALLEL:
1254 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1255 copy = gimple_build_omp_parallel
1257 gimple_omp_parallel_clauses (stmt),
1258 gimple_omp_parallel_child_fn (stmt),
1259 gimple_omp_parallel_data_arg (stmt));
1262 case GIMPLE_OMP_TASK:
1263 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1264 copy = gimple_build_omp_task
1266 gimple_omp_task_clauses (stmt),
1267 gimple_omp_task_child_fn (stmt),
1268 gimple_omp_task_data_arg (stmt),
1269 gimple_omp_task_copy_fn (stmt),
1270 gimple_omp_task_arg_size (stmt),
1271 gimple_omp_task_arg_align (stmt));
1274 case GIMPLE_OMP_FOR:
1275 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1276 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1277 copy = gimple_build_omp_for (s1, gimple_omp_for_clauses (stmt),
1278 gimple_omp_for_collapse (stmt), s2);
1281 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1283 gimple_omp_for_set_index (copy, i,
1284 gimple_omp_for_index (stmt, i));
1285 gimple_omp_for_set_initial (copy, i,
1286 gimple_omp_for_initial (stmt, i));
1287 gimple_omp_for_set_final (copy, i,
1288 gimple_omp_for_final (stmt, i));
1289 gimple_omp_for_set_incr (copy, i,
1290 gimple_omp_for_incr (stmt, i));
1291 gimple_omp_for_set_cond (copy, i,
1292 gimple_omp_for_cond (stmt, i));
1297 case GIMPLE_OMP_MASTER:
1298 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1299 copy = gimple_build_omp_master (s1);
1302 case GIMPLE_OMP_ORDERED:
1303 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1304 copy = gimple_build_omp_ordered (s1);
1307 case GIMPLE_OMP_SECTION:
1308 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1309 copy = gimple_build_omp_section (s1);
1312 case GIMPLE_OMP_SECTIONS:
1313 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1314 copy = gimple_build_omp_sections
1315 (s1, gimple_omp_sections_clauses (stmt));
1318 case GIMPLE_OMP_SINGLE:
1319 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1320 copy = gimple_build_omp_single
1321 (s1, gimple_omp_single_clauses (stmt));
1324 case GIMPLE_OMP_CRITICAL:
1325 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1327 = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
1336 if (gimple_assign_copy_p (stmt)
1337 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1338 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1340 /* Here we handle statements that are not completely rewritten.
1341 First we detect some inlining-induced bogosities for
1344 /* Some assignments VAR = VAR; don't generate any rtl code
1345 and thus don't count as variable modification. Avoid
1346 keeping bogosities like 0 = 0. */
1347 tree decl = gimple_assign_lhs (stmt), value;
1350 n = (tree *) pointer_map_contains (id->decl_map, decl);
1354 STRIP_TYPE_NOPS (value);
1355 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1356 return gimple_build_nop ();
1360 if (gimple_debug_bind_p (stmt))
1362 copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1363 gimple_debug_bind_get_value (stmt),
1365 VEC_safe_push (gimple, heap, id->debug_stmts, copy);
1369 /* Create a new deep copy of the statement. */
1370 copy = gimple_copy (stmt);
1372 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1373 RESX and EH_DISPATCH. */
1375 switch (gimple_code (copy))
1379 tree r, fndecl = gimple_call_fndecl (copy);
1380 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1381 switch (DECL_FUNCTION_CODE (fndecl))
1383 case BUILT_IN_EH_COPY_VALUES:
1384 r = gimple_call_arg (copy, 1);
1385 r = remap_eh_region_tree_nr (r, id);
1386 gimple_call_set_arg (copy, 1, r);
1389 case BUILT_IN_EH_POINTER:
1390 case BUILT_IN_EH_FILTER:
1391 r = gimple_call_arg (copy, 0);
1392 r = remap_eh_region_tree_nr (r, id);
1393 gimple_call_set_arg (copy, 0, r);
1404 int r = gimple_resx_region (copy);
1405 r = remap_eh_region_nr (r, id);
1406 gimple_resx_set_region (copy, r);
1410 case GIMPLE_EH_DISPATCH:
1412 int r = gimple_eh_dispatch_region (copy);
1413 r = remap_eh_region_nr (r, id);
1414 gimple_eh_dispatch_set_region (copy, r);
1423 /* If STMT has a block defined, map it to the newly constructed
1424 block. When inlining we want statements without a block to
1425 appear in the block of the function call. */
1426 new_block = id->block;
1427 if (gimple_block (copy))
1430 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (copy));
1435 gimple_set_block (copy, new_block);
1437 if (gimple_debug_bind_p (copy))
1440 /* Remap all the operands in COPY. */
1441 memset (&wi, 0, sizeof (wi));
1444 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1446 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1448 /* Clear the copied virtual operands. We are not remapping them here
1449 but are going to recreate them from scratch. */
1450 if (gimple_has_mem_ops (copy))
1452 gimple_set_vdef (copy, NULL_TREE);
1453 gimple_set_vuse (copy, NULL_TREE);
1460 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1464 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1465 gcov_type count_scale)
1467 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1468 basic_block copy_basic_block;
1472 /* create_basic_block() will append every new block to
1473 basic_block_info automatically. */
1474 copy_basic_block = create_basic_block (NULL, (void *) 0,
1475 (basic_block) bb->prev_bb->aux);
1476 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
1478 /* We are going to rebuild frequencies from scratch. These values
1479 have just small importance to drive canonicalize_loop_headers. */
1480 freq = ((gcov_type)bb->frequency * frequency_scale / REG_BR_PROB_BASE);
1482 /* We recompute frequencies after inlining, so this is quite safe. */
1483 if (freq > BB_FREQ_MAX)
1485 copy_basic_block->frequency = freq;
1487 copy_gsi = gsi_start_bb (copy_basic_block);
1489 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1491 gimple stmt = gsi_stmt (gsi);
1492 gimple orig_stmt = stmt;
1494 id->regimplify = false;
1495 stmt = remap_gimple_stmt (stmt, id);
1496 if (gimple_nop_p (stmt))
1499 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
1502 /* With return slot optimization we can end up with
1503 non-gimple (foo *)&this->m, fix that here. */
1504 if (is_gimple_assign (stmt)
1505 && gimple_assign_rhs_code (stmt) == NOP_EXPR
1506 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1509 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1510 gimple_assign_rhs1 (stmt),
1511 true, NULL, false, GSI_NEW_STMT);
1512 gimple_assign_set_rhs1 (stmt, new_rhs);
1513 id->regimplify = false;
1516 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1519 gimple_regimplify_operands (stmt, &seq_gsi);
1521 /* If copy_basic_block has been empty at the start of this iteration,
1522 call gsi_start_bb again to get at the newly added statements. */
1523 if (gsi_end_p (copy_gsi))
1524 copy_gsi = gsi_start_bb (copy_basic_block);
1526 gsi_next (©_gsi);
1528 /* Process the new statement. The call to gimple_regimplify_operands
1529 possibly turned the statement into multiple statements, we
1530 need to process all of them. */
1535 stmt = gsi_stmt (copy_gsi);
1536 if (is_gimple_call (stmt)
1537 && gimple_call_va_arg_pack_p (stmt)
1540 /* __builtin_va_arg_pack () should be replaced by
1541 all arguments corresponding to ... in the caller. */
1544 VEC(tree, heap) *argarray;
1545 size_t nargs = gimple_call_num_args (id->gimple_call);
1548 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1551 /* Create the new array of arguments. */
1552 n = nargs + gimple_call_num_args (stmt);
1553 argarray = VEC_alloc (tree, heap, n);
1554 VEC_safe_grow (tree, heap, argarray, n);
1556 /* Copy all the arguments before '...' */
1557 memcpy (VEC_address (tree, argarray),
1558 gimple_call_arg_ptr (stmt, 0),
1559 gimple_call_num_args (stmt) * sizeof (tree));
1561 /* Append the arguments passed in '...' */
1562 memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
1563 gimple_call_arg_ptr (id->gimple_call, 0)
1564 + (gimple_call_num_args (id->gimple_call) - nargs),
1565 nargs * sizeof (tree));
1567 new_call = gimple_build_call_vec (gimple_call_fn (stmt),
1570 VEC_free (tree, heap, argarray);
1572 /* Copy all GIMPLE_CALL flags, location and block, except
1573 GF_CALL_VA_ARG_PACK. */
1574 gimple_call_copy_flags (new_call, stmt);
1575 gimple_call_set_va_arg_pack (new_call, false);
1576 gimple_set_location (new_call, gimple_location (stmt));
1577 gimple_set_block (new_call, gimple_block (stmt));
1578 gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
1580 gsi_replace (©_gsi, new_call, false);
1581 gimple_set_bb (stmt, NULL);
1584 else if (is_gimple_call (stmt)
1586 && (decl = gimple_call_fndecl (stmt))
1587 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1588 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1590 /* __builtin_va_arg_pack_len () should be replaced by
1591 the number of anonymous arguments. */
1592 size_t nargs = gimple_call_num_args (id->gimple_call);
1596 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
1599 count = build_int_cst (integer_type_node, nargs);
1600 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1601 gsi_replace (©_gsi, new_stmt, false);
1605 /* Statements produced by inlining can be unfolded, especially
1606 when we constant propagated some operands. We can't fold
1607 them right now for two reasons:
1608 1) folding require SSA_NAME_DEF_STMTs to be correct
1609 2) we can't change function calls to builtins.
1610 So we just mark statement for later folding. We mark
1611 all new statements, instead just statements that has changed
1612 by some nontrivial substitution so even statements made
1613 foldable indirectly are updated. If this turns out to be
1614 expensive, copy_body can be told to watch for nontrivial
1616 if (id->statements_to_fold)
1617 pointer_set_insert (id->statements_to_fold, stmt);
1619 /* We're duplicating a CALL_EXPR. Find any corresponding
1620 callgraph edges and update or duplicate them. */
1621 if (is_gimple_call (stmt))
1623 struct cgraph_edge *edge;
1626 switch (id->transform_call_graph_edges)
1628 case CB_CGE_DUPLICATE:
1629 edge = cgraph_edge (id->src_node, orig_stmt);
1632 int edge_freq = edge->frequency;
1633 edge = cgraph_clone_edge (edge, id->dst_node, stmt,
1635 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1636 edge->frequency, true);
1637 /* We could also just rescale the frequency, but
1638 doing so would introduce roundoff errors and make
1639 verifier unhappy. */
1641 = compute_call_stmt_bb_frequency (id->dst_node->decl,
1644 && profile_status_for_function (cfun) != PROFILE_ABSENT
1645 && (edge_freq > edge->frequency + 10
1646 || edge_freq < edge->frequency - 10))
1648 fprintf (dump_file, "Edge frequency estimated by "
1649 "cgraph %i diverge from inliner's estimate %i\n",
1653 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
1656 copy_basic_block->frequency);
1661 case CB_CGE_MOVE_CLONES:
1662 cgraph_set_call_stmt_including_clones (id->dst_node,
1664 edge = cgraph_edge (id->dst_node, stmt);
1668 edge = cgraph_edge (id->dst_node, orig_stmt);
1670 cgraph_set_call_stmt (edge, stmt);
1677 /* Constant propagation on argument done during inlining
1678 may create new direct call. Produce an edge for it. */
1680 || (edge->indirect_call
1681 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
1682 && is_gimple_call (stmt)
1683 && (fn = gimple_call_fndecl (stmt)) != NULL)
1685 struct cgraph_node *dest = cgraph_node (fn);
1687 /* We have missing edge in the callgraph. This can happen
1688 when previous inlining turned an indirect call into a
1689 direct call by constant propagating arguments or we are
1690 producing dead clone (for further clonning). In all
1691 other cases we hit a bug (incorrect node sharing is the
1692 most common reason for missing edges). */
1693 gcc_assert (dest->needed || !dest->analyzed
1694 || !id->src_node->analyzed);
1695 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
1696 cgraph_create_edge_including_clones
1697 (id->dst_node, dest, orig_stmt, stmt, bb->count,
1698 compute_call_stmt_bb_frequency (id->dst_node->decl,
1700 bb->loop_depth, CIF_ORIGINALLY_INDIRECT_CALL);
1702 cgraph_create_edge (id->dst_node, dest, stmt,
1704 compute_call_stmt_bb_frequency
1705 (id->dst_node->decl, copy_basic_block),
1706 bb->loop_depth)->inline_failed
1707 = CIF_ORIGINALLY_INDIRECT_CALL;
1710 fprintf (dump_file, "Created new direct edge to %s",
1711 cgraph_node_name (dest));
1715 flags = gimple_call_flags (stmt);
1716 if (flags & ECF_MAY_BE_ALLOCA)
1717 cfun->calls_alloca = true;
1718 if (flags & ECF_RETURNS_TWICE)
1719 cfun->calls_setjmp = true;
1722 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
1723 id->eh_map, id->eh_lp_nr);
1725 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
1730 find_new_referenced_vars (gsi_stmt (copy_gsi));
1731 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1732 if (TREE_CODE (def) == SSA_NAME)
1733 SSA_NAME_DEF_STMT (def) = stmt;
1736 gsi_next (©_gsi);
1738 while (!gsi_end_p (copy_gsi));
1740 copy_gsi = gsi_last_bb (copy_basic_block);
1743 return copy_basic_block;
1746 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1747 form is quite easy, since dominator relationship for old basic blocks does
1750 There is however exception where inlining might change dominator relation
1751 across EH edges from basic block within inlined functions destinating
1752 to landing pads in function we inline into.
1754 The function fills in PHI_RESULTs of such PHI nodes if they refer
1755 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1756 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1757 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1758 set, and this means that there will be no overlapping live ranges
1759 for the underlying symbol.
1761 This might change in future if we allow redirecting of EH edges and
1762 we might want to change way build CFG pre-inlining to include
1763 all the possible edges then. */
1765 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1766 bool can_throw, bool nonlocal_goto)
1771 FOR_EACH_EDGE (e, ei, bb->succs)
1773 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1776 gimple_stmt_iterator si;
1779 gcc_assert (e->flags & EDGE_EH);
1782 gcc_assert (!(e->flags & EDGE_EH));
1784 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
1788 phi = gsi_stmt (si);
1790 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1791 gcc_assert (!e->dest->aux);
1793 gcc_assert ((e->flags & EDGE_EH)
1794 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
1796 if (!is_gimple_reg (PHI_RESULT (phi)))
1798 mark_sym_for_renaming (SSA_NAME_VAR (PHI_RESULT (phi)));
1802 re = find_edge (ret_bb, e->dest);
1804 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1805 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1807 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1808 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1814 /* Copy edges from BB into its copy constructed earlier, scale profile
1815 accordingly. Edges will be taken care of later. Assume aux
1816 pointers to point to the copies of each BB. */
1819 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb)
1821 basic_block new_bb = (basic_block) bb->aux;
1824 gimple_stmt_iterator si;
1827 /* Use the indices from the original blocks to create edges for the
1829 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1830 if (!(old_edge->flags & EDGE_EH))
1834 flags = old_edge->flags;
1836 /* Return edges do get a FALLTHRU flag when the get inlined. */
1837 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1838 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1839 flags |= EDGE_FALLTHRU;
1840 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1841 new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1842 new_edge->probability = old_edge->probability;
1845 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1848 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
1851 bool can_throw, nonlocal_goto;
1853 copy_stmt = gsi_stmt (si);
1854 if (!is_gimple_debug (copy_stmt))
1856 update_stmt (copy_stmt);
1857 if (gimple_in_ssa_p (cfun))
1858 mark_symbols_for_renaming (copy_stmt);
1861 /* Do this before the possible split_block. */
1864 /* If this tree could throw an exception, there are two
1865 cases where we need to add abnormal edge(s): the
1866 tree wasn't in a region and there is a "current
1867 region" in the caller; or the original tree had
1868 EH edges. In both cases split the block after the tree,
1869 and add abnormal edge(s) as needed; we need both
1870 those from the callee and the caller.
1871 We check whether the copy can throw, because the const
1872 propagation can change an INDIRECT_REF which throws
1873 into a COMPONENT_REF which doesn't. If the copy
1874 can throw, the original could also throw. */
1875 can_throw = stmt_can_throw_internal (copy_stmt);
1876 nonlocal_goto = stmt_can_make_abnormal_goto (copy_stmt);
1878 if (can_throw || nonlocal_goto)
1880 if (!gsi_end_p (si))
1881 /* Note that bb's predecessor edges aren't necessarily
1882 right at this point; split_block doesn't care. */
1884 edge e = split_block (new_bb, copy_stmt);
1887 new_bb->aux = e->src->aux;
1888 si = gsi_start_bb (new_bb);
1892 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
1893 make_eh_dispatch_edges (copy_stmt);
1895 make_eh_edges (copy_stmt);
1898 make_abnormal_goto_edges (gimple_bb (copy_stmt), true);
1900 if ((can_throw || nonlocal_goto)
1901 && gimple_in_ssa_p (cfun))
1902 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
1903 can_throw, nonlocal_goto);
1907 /* Copy the PHIs. All blocks and edges are copied, some blocks
1908 was possibly split and new outgoing EH edges inserted.
1909 BB points to the block of original function and AUX pointers links
1910 the original and newly copied blocks. */
1913 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1915 basic_block const new_bb = (basic_block) bb->aux;
1918 gimple_stmt_iterator si;
1920 for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
1926 phi = gsi_stmt (si);
1927 res = PHI_RESULT (phi);
1929 if (is_gimple_reg (res))
1931 walk_tree (&new_res, copy_tree_body_r, id, NULL);
1932 SSA_NAME_DEF_STMT (new_res)
1933 = new_phi = create_phi_node (new_res, new_bb);
1934 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1937 = find_edge ((basic_block) new_edge->src->aux, bb);
1938 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1940 tree block = id->block;
1941 id->block = NULL_TREE;
1942 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
1944 gcc_assert (new_arg);
1945 /* With return slot optimization we can end up with
1946 non-gimple (foo *)&this->m, fix that here. */
1947 if (TREE_CODE (new_arg) != SSA_NAME
1948 && TREE_CODE (new_arg) != FUNCTION_DECL
1949 && !is_gimple_val (new_arg))
1951 gimple_seq stmts = NULL;
1952 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
1953 gsi_insert_seq_on_edge_immediate (new_edge, stmts);
1955 add_phi_arg (new_phi, new_arg, new_edge,
1956 gimple_phi_arg_location_from_edge (phi, old_edge));
1963 /* Wrapper for remap_decl so it can be used as a callback. */
1966 remap_decl_1 (tree decl, void *data)
1968 return remap_decl (decl, (copy_body_data *) data);
1971 /* Build struct function and associated datastructures for the new clone
1972 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1975 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
1977 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1978 gcov_type count_scale;
1980 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1981 count_scale = (REG_BR_PROB_BASE * count
1982 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1984 count_scale = REG_BR_PROB_BASE;
1986 /* Register specific tree functions. */
1987 gimple_register_cfg_hooks ();
1989 /* Get clean struct function. */
1990 push_struct_function (new_fndecl);
1992 /* We will rebuild these, so just sanity check that they are empty. */
1993 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
1994 gcc_assert (cfun->local_decls == NULL);
1995 gcc_assert (cfun->cfg == NULL);
1996 gcc_assert (cfun->decl == new_fndecl);
1998 /* Copy items we preserve during clonning. */
1999 cfun->static_chain_decl = src_cfun->static_chain_decl;
2000 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2001 cfun->function_end_locus = src_cfun->function_end_locus;
2002 cfun->curr_properties = src_cfun->curr_properties;
2003 cfun->last_verified = src_cfun->last_verified;
2004 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2005 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2006 cfun->function_frequency = src_cfun->function_frequency;
2007 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2008 cfun->stdarg = src_cfun->stdarg;
2009 cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
2010 cfun->after_inlining = src_cfun->after_inlining;
2011 cfun->returns_struct = src_cfun->returns_struct;
2012 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2013 cfun->after_tree_profile = src_cfun->after_tree_profile;
2015 init_empty_tree_cfg ();
2017 profile_status_for_function (cfun) = profile_status_for_function (src_cfun);
2018 ENTRY_BLOCK_PTR->count =
2019 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2021 ENTRY_BLOCK_PTR->frequency
2022 = ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2023 EXIT_BLOCK_PTR->count =
2024 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
2026 EXIT_BLOCK_PTR->frequency =
2027 EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency;
2029 init_eh_for_function ();
2031 if (src_cfun->gimple_df)
2033 init_tree_ssa (cfun);
2034 cfun->gimple_df->in_ssa_p = true;
2035 init_ssa_operands ();
2040 /* Make a copy of the body of FN so that it can be inserted inline in
2041 another function. Walks FN via CFG, returns new fndecl. */
2044 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2045 basic_block entry_block_map, basic_block exit_block_map)
2047 tree callee_fndecl = id->src_fn;
2048 /* Original cfun for the callee, doesn't change. */
2049 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2050 struct function *cfun_to_copy;
2052 tree new_fndecl = NULL;
2053 gcov_type count_scale;
2056 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
2057 count_scale = (REG_BR_PROB_BASE * count
2058 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
2060 count_scale = REG_BR_PROB_BASE;
2062 /* Register specific tree functions. */
2063 gimple_register_cfg_hooks ();
2065 /* Must have a CFG here at this point. */
2066 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
2067 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2069 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2071 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
2072 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
2073 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2074 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
2076 /* Duplicate any exception-handling regions. */
2078 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2081 /* Use aux pointers to map the original blocks to copy. */
2082 FOR_EACH_BB_FN (bb, cfun_to_copy)
2084 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2089 last = last_basic_block;
2091 /* Now that we've duplicated the blocks, duplicate their edges. */
2092 FOR_ALL_BB_FN (bb, cfun_to_copy)
2093 copy_edges_for_bb (bb, count_scale, exit_block_map);
2095 if (gimple_in_ssa_p (cfun))
2096 FOR_ALL_BB_FN (bb, cfun_to_copy)
2097 copy_phis_for_bb (bb, id);
2099 FOR_ALL_BB_FN (bb, cfun_to_copy)
2101 ((basic_block)bb->aux)->aux = NULL;
2105 /* Zero out AUX fields of newly created block during EH edge
2107 for (; last < last_basic_block; last++)
2108 BASIC_BLOCK (last)->aux = NULL;
2109 entry_block_map->aux = NULL;
2110 exit_block_map->aux = NULL;
2114 pointer_map_destroy (id->eh_map);
2121 /* Copy the debug STMT using ID. We deal with these statements in a
2122 special way: if any variable in their VALUE expression wasn't
2123 remapped yet, we won't remap it, because that would get decl uids
2124 out of sync, causing codegen differences between -g and -g0. If
2125 this arises, we drop the VALUE expression altogether. */
2128 copy_debug_stmt (gimple stmt, copy_body_data *id)
2131 struct walk_stmt_info wi;
2134 if (gimple_block (stmt))
2137 n = (tree *) pointer_map_contains (id->decl_map, gimple_block (stmt));
2141 gimple_set_block (stmt, t);
2143 /* Remap all the operands in COPY. */
2144 memset (&wi, 0, sizeof (wi));
2147 processing_debug_stmt = 1;
2149 t = gimple_debug_bind_get_var (stmt);
2151 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2152 && (n = (tree *) pointer_map_contains (id->debug_map, t)))
2154 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2158 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2160 gimple_debug_bind_set_var (stmt, t);
2162 if (gimple_debug_bind_has_value_p (stmt))
2163 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2164 remap_gimple_op_r, &wi, NULL);
2166 /* Punt if any decl couldn't be remapped. */
2167 if (processing_debug_stmt < 0)
2168 gimple_debug_bind_reset_value (stmt);
2170 processing_debug_stmt = 0;
2173 if (gimple_in_ssa_p (cfun))
2174 mark_symbols_for_renaming (stmt);
2177 /* Process deferred debug stmts. In order to give values better odds
2178 of being successfully remapped, we delay the processing of debug
2179 stmts until all other stmts that might require remapping are
2183 copy_debug_stmts (copy_body_data *id)
2188 if (!id->debug_stmts)
2191 for (i = 0; VEC_iterate (gimple, id->debug_stmts, i, stmt); i++)
2192 copy_debug_stmt (stmt, id);
2194 VEC_free (gimple, heap, id->debug_stmts);
2197 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2198 another function. */
2201 copy_tree_body (copy_body_data *id)
2203 tree fndecl = id->src_fn;
2204 tree body = DECL_SAVED_TREE (fndecl);
2206 walk_tree (&body, copy_tree_body_r, id, NULL);
2211 /* Make a copy of the body of FN so that it can be inserted inline in
2212 another function. */
2215 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2216 basic_block entry_block_map, basic_block exit_block_map)
2218 tree fndecl = id->src_fn;
2221 /* If this body has a CFG, walk CFG and copy. */
2222 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
2223 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map);
2224 copy_debug_stmts (id);
2229 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2230 defined in function FN, or of a data member thereof. */
2233 self_inlining_addr_expr (tree value, tree fn)
2237 if (TREE_CODE (value) != ADDR_EXPR)
2240 var = get_base_address (TREE_OPERAND (value, 0));
2242 return var && auto_var_in_fn_p (var, fn);
2245 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2246 lexical block and line number information from base_stmt, if given,
2247 or from the last stmt of the block otherwise. */
2250 insert_init_debug_bind (copy_body_data *id,
2251 basic_block bb, tree var, tree value,
2255 gimple_stmt_iterator gsi;
2258 if (!gimple_in_ssa_p (id->src_cfun))
2261 if (!MAY_HAVE_DEBUG_STMTS)
2264 tracked_var = target_for_debug_bind (var);
2270 gsi = gsi_last_bb (bb);
2271 if (!base_stmt && !gsi_end_p (gsi))
2272 base_stmt = gsi_stmt (gsi);
2275 note = gimple_build_debug_bind (tracked_var, value, base_stmt);
2279 if (!gsi_end_p (gsi))
2280 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
2282 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
2289 insert_init_stmt (copy_body_data *id, basic_block bb, gimple init_stmt)
2291 /* If VAR represents a zero-sized variable, it's possible that the
2292 assignment statement may result in no gimple statements. */
2295 gimple_stmt_iterator si = gsi_last_bb (bb);
2297 /* We can end up with init statements that store to a non-register
2298 from a rhs with a conversion. Handle that here by forcing the
2299 rhs into a temporary. gimple_regimplify_operands is not
2300 prepared to do this for us. */
2301 if (!is_gimple_debug (init_stmt)
2302 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
2303 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
2304 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
2306 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
2307 gimple_expr_type (init_stmt),
2308 gimple_assign_rhs1 (init_stmt));
2309 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
2311 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
2312 gimple_assign_set_rhs1 (init_stmt, rhs);
2314 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
2315 gimple_regimplify_operands (init_stmt, &si);
2316 mark_symbols_for_renaming (init_stmt);
2318 if (!is_gimple_debug (init_stmt) && MAY_HAVE_DEBUG_STMTS)
2320 tree var, def = gimple_assign_lhs (init_stmt);
2322 if (TREE_CODE (def) == SSA_NAME)
2323 var = SSA_NAME_VAR (def);
2327 insert_init_debug_bind (id, bb, var, def, init_stmt);
2332 /* Initialize parameter P with VALUE. If needed, produce init statement
2333 at the end of BB. When BB is NULL, we return init statement to be
2336 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
2337 basic_block bb, tree *vars)
2339 gimple init_stmt = NULL;
2342 tree def = (gimple_in_ssa_p (cfun)
2343 ? gimple_default_def (id->src_cfun, p) : NULL);
2346 && value != error_mark_node
2347 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
2349 if (fold_convertible_p (TREE_TYPE (p), value))
2350 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
2352 /* ??? For valid (GIMPLE) programs we should not end up here.
2353 Still if something has gone wrong and we end up with truly
2354 mismatched types here, fall back to using a VIEW_CONVERT_EXPR
2355 to not leak invalid GIMPLE to the following passes. */
2356 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
2359 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
2360 here since the type of this decl must be visible to the calling
2362 var = copy_decl_to_var (p, id);
2364 /* We're actually using the newly-created var. */
2365 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
2368 add_referenced_var (var);
2371 /* Declare this new variable. */
2372 TREE_CHAIN (var) = *vars;
2375 /* Make gimplifier happy about this variable. */
2376 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2378 /* If the parameter is never assigned to, has no SSA_NAMEs created,
2379 we would not need to create a new variable here at all, if it
2380 weren't for debug info. Still, we can just use the argument
2382 if (TREE_READONLY (p)
2383 && !TREE_ADDRESSABLE (p)
2384 && value && !TREE_SIDE_EFFECTS (value)
2387 /* We may produce non-gimple trees by adding NOPs or introduce
2388 invalid sharing when operand is not really constant.
2389 It is not big deal to prohibit constant propagation here as
2390 we will constant propagate in DOM1 pass anyway. */
2391 if (is_gimple_min_invariant (value)
2392 && useless_type_conversion_p (TREE_TYPE (p),
2394 /* We have to be very careful about ADDR_EXPR. Make sure
2395 the base variable isn't a local variable of the inlined
2396 function, e.g., when doing recursive inlining, direct or
2397 mutually-recursive or whatever, which is why we don't
2398 just test whether fn == current_function_decl. */
2399 && ! self_inlining_addr_expr (value, fn))
2401 insert_decl_map (id, p, value);
2402 insert_debug_decl_map (id, p, var);
2403 return insert_init_debug_bind (id, bb, var, value, NULL);
2407 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
2408 that way, when the PARM_DECL is encountered, it will be
2409 automatically replaced by the VAR_DECL. */
2410 insert_decl_map (id, p, var);
2412 /* Even if P was TREE_READONLY, the new VAR should not be.
2413 In the original code, we would have constructed a
2414 temporary, and then the function body would have never
2415 changed the value of P. However, now, we will be
2416 constructing VAR directly. The constructor body may
2417 change its value multiple times as it is being
2418 constructed. Therefore, it must not be TREE_READONLY;
2419 the back-end assumes that TREE_READONLY variable is
2420 assigned to only once. */
2421 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
2422 TREE_READONLY (var) = 0;
2424 /* If there is no setup required and we are in SSA, take the easy route
2425 replacing all SSA names representing the function parameter by the
2426 SSA name passed to function.
2428 We need to construct map for the variable anyway as it might be used
2429 in different SSA names when parameter is set in function.
2431 Do replacement at -O0 for const arguments replaced by constant.
2432 This is important for builtin_constant_p and other construct requiring
2433 constant argument to be visible in inlined function body. */
2434 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
2436 || (TREE_READONLY (p)
2437 && is_gimple_min_invariant (rhs)))
2438 && (TREE_CODE (rhs) == SSA_NAME
2439 || is_gimple_min_invariant (rhs))
2440 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2442 insert_decl_map (id, def, rhs);
2443 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2446 /* If the value of argument is never used, don't care about initializing
2448 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
2450 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
2451 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2454 /* Initialize this VAR_DECL from the equivalent argument. Convert
2455 the argument to the proper type in case it was promoted. */
2458 if (rhs == error_mark_node)
2460 insert_decl_map (id, p, var);
2461 return insert_init_debug_bind (id, bb, var, rhs, NULL);
2464 STRIP_USELESS_TYPE_CONVERSION (rhs);
2466 /* We want to use MODIFY_EXPR, not INIT_EXPR here so that we
2467 keep our trees in gimple form. */
2468 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
2470 def = remap_ssa_name (def, id);
2471 init_stmt = gimple_build_assign (def, rhs);
2472 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
2473 set_default_def (var, NULL);
2476 init_stmt = gimple_build_assign (var, rhs);
2478 if (bb && init_stmt)
2479 insert_init_stmt (id, bb, init_stmt);
2484 /* Generate code to initialize the parameters of the function at the
2485 top of the stack in ID from the GIMPLE_CALL STMT. */
2488 initialize_inlined_parameters (copy_body_data *id, gimple stmt,
2489 tree fn, basic_block bb)
2494 tree vars = NULL_TREE;
2495 tree static_chain = gimple_call_chain (stmt);
2497 /* Figure out what the parameters are. */
2498 parms = DECL_ARGUMENTS (fn);
2500 /* Loop through the parameter declarations, replacing each with an
2501 equivalent VAR_DECL, appropriately initialized. */
2502 for (p = parms, i = 0; p; p = TREE_CHAIN (p), i++)
2505 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
2506 setup_one_parameter (id, p, val, fn, bb, &vars);
2509 /* Initialize the static chain. */
2510 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
2511 gcc_assert (fn != current_function_decl);
2514 /* No static chain? Seems like a bug in tree-nested.c. */
2515 gcc_assert (static_chain);
2517 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
2520 declare_inline_vars (id->block, vars);
2524 /* Declare a return variable to replace the RESULT_DECL for the
2525 function we are calling. An appropriate DECL_STMT is returned.
2526 The USE_STMT is filled to contain a use of the declaration to
2527 indicate the return value of the function.
2529 RETURN_SLOT, if non-null is place where to store the result. It
2530 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
2531 was the LHS of the MODIFY_EXPR to which this call is the RHS.
2533 The return value is a (possibly null) value that holds the result
2534 as seen by the caller. */
2537 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest)
2539 tree callee = id->src_fn;
2540 tree caller = id->dst_fn;
2541 tree result = DECL_RESULT (callee);
2542 tree callee_type = TREE_TYPE (result);
2543 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
2546 /* We don't need to do anything for functions that don't return
2548 if (!result || VOID_TYPE_P (callee_type))
2551 /* If there was a return slot, then the return value is the
2552 dereferenced address of that object. */
2555 /* The front end shouldn't have used both return_slot and
2556 a modify expression. */
2557 gcc_assert (!modify_dest);
2558 if (DECL_BY_REFERENCE (result))
2560 tree return_slot_addr = build_fold_addr_expr (return_slot);
2561 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
2563 /* We are going to construct *&return_slot and we can't do that
2564 for variables believed to be not addressable.
2566 FIXME: This check possibly can match, because values returned
2567 via return slot optimization are not believed to have address
2568 taken by alias analysis. */
2569 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
2570 if (gimple_in_ssa_p (cfun))
2572 HOST_WIDE_INT bitsize;
2573 HOST_WIDE_INT bitpos;
2575 enum machine_mode mode;
2579 base = get_inner_reference (return_slot, &bitsize, &bitpos,
2581 &mode, &unsignedp, &volatilep,
2583 if (TREE_CODE (base) == INDIRECT_REF)
2584 base = TREE_OPERAND (base, 0);
2585 if (TREE_CODE (base) == SSA_NAME)
2586 base = SSA_NAME_VAR (base);
2587 mark_sym_for_renaming (base);
2589 var = return_slot_addr;
2594 gcc_assert (TREE_CODE (var) != SSA_NAME);
2595 TREE_ADDRESSABLE (var) |= TREE_ADDRESSABLE (result);
2597 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2598 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2599 && !DECL_GIMPLE_REG_P (result)
2601 DECL_GIMPLE_REG_P (var) = 0;
2606 /* All types requiring non-trivial constructors should have been handled. */
2607 gcc_assert (!TREE_ADDRESSABLE (callee_type));
2609 /* Attempt to avoid creating a new temporary variable. */
2611 && TREE_CODE (modify_dest) != SSA_NAME)
2613 bool use_it = false;
2615 /* We can't use MODIFY_DEST if there's type promotion involved. */
2616 if (!useless_type_conversion_p (callee_type, caller_type))
2619 /* ??? If we're assigning to a variable sized type, then we must
2620 reuse the destination variable, because we've no good way to
2621 create variable sized temporaries at this point. */
2622 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
2625 /* If the callee cannot possibly modify MODIFY_DEST, then we can
2626 reuse it as the result of the call directly. Don't do this if
2627 it would promote MODIFY_DEST to addressable. */
2628 else if (TREE_ADDRESSABLE (result))
2632 tree base_m = get_base_address (modify_dest);
2634 /* If the base isn't a decl, then it's a pointer, and we don't
2635 know where that's going to go. */
2636 if (!DECL_P (base_m))
2638 else if (is_global_var (base_m))
2640 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
2641 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
2642 && !DECL_GIMPLE_REG_P (result)
2643 && DECL_GIMPLE_REG_P (base_m))
2645 else if (!TREE_ADDRESSABLE (base_m))
2657 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
2659 var = copy_result_decl_to_var (result, id);
2660 if (gimple_in_ssa_p (cfun))
2663 add_referenced_var (var);
2666 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
2667 DECL_STRUCT_FUNCTION (caller)->local_decls
2668 = tree_cons (NULL_TREE, var,
2669 DECL_STRUCT_FUNCTION (caller)->local_decls);
2671 /* Do not have the rest of GCC warn about this variable as it should
2672 not be visible to the user. */
2673 TREE_NO_WARNING (var) = 1;
2675 declare_inline_vars (id->block, var);
2677 /* Build the use expr. If the return type of the function was
2678 promoted, convert it back to the expected type. */
2680 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
2681 use = fold_convert (caller_type, var);
2683 STRIP_USELESS_TYPE_CONVERSION (use);
2685 if (DECL_BY_REFERENCE (result))
2687 TREE_ADDRESSABLE (var) = 1;
2688 var = build_fold_addr_expr (var);
2692 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
2693 way, when the RESULT_DECL is encountered, it will be
2694 automatically replaced by the VAR_DECL. */
2695 insert_decl_map (id, result, var);
2697 /* Remember this so we can ignore it in remap_decls. */
2703 /* Callback through walk_tree. Determine if a DECL_INITIAL makes reference
2704 to a local label. */
2707 has_label_address_in_static_1 (tree *nodep, int *walk_subtrees, void *fnp)
2710 tree fn = (tree) fnp;
2712 if (TREE_CODE (node) == LABEL_DECL && DECL_CONTEXT (node) == fn)
2721 /* Callback through walk_tree. Determine if we've got an aggregate
2722 type that we can't support; return non-null if so. */
2725 cannot_copy_type_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
2726 void *data ATTRIBUTE_UNUSED)
2728 tree t, node = *nodep;
2730 if (TREE_CODE (node) == RECORD_TYPE || TREE_CODE (node) == UNION_TYPE)
2732 /* We cannot inline a function of the form
2734 void F (int i) { struct S { int ar[i]; } s; }
2736 Attempting to do so produces a catch-22.
2737 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
2738 UNION_TYPE nodes, then it goes into infinite recursion on a
2739 structure containing a pointer to its own type. If it doesn't,
2740 then the type node for S doesn't get adjusted properly when
2743 ??? This is likely no longer true, but it's too late in the 4.0
2744 cycle to try to find out. This should be checked for 4.1. */
2745 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
2746 if (variably_modified_type_p (TREE_TYPE (t), NULL))
2754 /* Determine if the function can be copied. If so return NULL. If
2755 not return a string describng the reason for failure. */
2758 copy_forbidden (struct function *fun, tree fndecl)
2760 const char *reason = fun->cannot_be_copied_reason;
2763 /* Only examine the function once. */
2764 if (fun->cannot_be_copied_set)
2767 /* We cannot copy a function that receives a non-local goto
2768 because we cannot remap the destination label used in the
2769 function that is performing the non-local goto. */
2770 /* ??? Actually, this should be possible, if we work at it.
2771 No doubt there's just a handful of places that simply
2772 assume it doesn't happen and don't substitute properly. */
2773 if (fun->has_nonlocal_label)
2775 reason = G_("function %q+F can never be copied "
2776 "because it receives a non-local goto");
2780 for (step = fun->local_decls; step; step = TREE_CHAIN (step))
2782 tree decl = TREE_VALUE (step);
2784 if (TREE_CODE (decl) == VAR_DECL
2785 && TREE_STATIC (decl)
2786 && !DECL_EXTERNAL (decl)
2787 && DECL_INITIAL (decl)
2788 && walk_tree_without_duplicates (&DECL_INITIAL (decl),
2789 has_label_address_in_static_1,
2792 reason = G_("function %q+F can never be copied because it saves "
2793 "address of local label in a static variable");
2797 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2798 && variably_modified_type_p (TREE_TYPE (decl), NULL)
2799 && walk_tree_without_duplicates (&TREE_TYPE (decl),
2800 cannot_copy_type_1, NULL))
2802 reason = G_("function %q+F can never be copied "
2803 "because it uses variable sized variables");
2809 fun->cannot_be_copied_reason = reason;
2810 fun->cannot_be_copied_set = true;
2815 static const char *inline_forbidden_reason;
2817 /* A callback for walk_gimple_seq to handle statements. Returns non-null
2818 iff a function can not be inlined. Also sets the reason why. */
2821 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
2822 struct walk_stmt_info *wip)
2824 tree fn = (tree) wip->info;
2826 gimple stmt = gsi_stmt (*gsi);
2828 switch (gimple_code (stmt))
2831 /* Refuse to inline alloca call unless user explicitly forced so as
2832 this may change program's memory overhead drastically when the
2833 function using alloca is called in loop. In GCC present in
2834 SPEC2000 inlining into schedule_block cause it to require 2GB of
2835 RAM instead of 256MB. */
2836 if (gimple_alloca_call_p (stmt)
2837 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
2839 inline_forbidden_reason
2840 = G_("function %q+F can never be inlined because it uses "
2841 "alloca (override using the always_inline attribute)");
2842 *handled_ops_p = true;
2846 t = gimple_call_fndecl (stmt);
2850 /* We cannot inline functions that call setjmp. */
2851 if (setjmp_call_p (t))
2853 inline_forbidden_reason
2854 = G_("function %q+F can never be inlined because it uses setjmp");
2855 *handled_ops_p = true;
2859 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
2860 switch (DECL_FUNCTION_CODE (t))
2862 /* We cannot inline functions that take a variable number of
2864 case BUILT_IN_VA_START:
2865 case BUILT_IN_NEXT_ARG:
2866 case BUILT_IN_VA_END:
2867 inline_forbidden_reason
2868 = G_("function %q+F can never be inlined because it "
2869 "uses variable argument lists");
2870 *handled_ops_p = true;
2873 case BUILT_IN_LONGJMP:
2874 /* We can't inline functions that call __builtin_longjmp at
2875 all. The non-local goto machinery really requires the
2876 destination be in a different function. If we allow the
2877 function calling __builtin_longjmp to be inlined into the
2878 function calling __builtin_setjmp, Things will Go Awry. */
2879 inline_forbidden_reason
2880 = G_("function %q+F can never be inlined because "
2881 "it uses setjmp-longjmp exception handling");
2882 *handled_ops_p = true;
2885 case BUILT_IN_NONLOCAL_GOTO:
2887 inline_forbidden_reason
2888 = G_("function %q+F can never be inlined because "
2889 "it uses non-local goto");
2890 *handled_ops_p = true;
2893 case BUILT_IN_RETURN:
2894 case BUILT_IN_APPLY_ARGS:
2895 /* If a __builtin_apply_args caller would be inlined,
2896 it would be saving arguments of the function it has
2897 been inlined into. Similarly __builtin_return would
2898 return from the function the inline has been inlined into. */
2899 inline_forbidden_reason
2900 = G_("function %q+F can never be inlined because "
2901 "it uses __builtin_return or __builtin_apply_args");
2902 *handled_ops_p = true;
2911 t = gimple_goto_dest (stmt);
2913 /* We will not inline a function which uses computed goto. The
2914 addresses of its local labels, which may be tucked into
2915 global storage, are of course not constant across
2916 instantiations, which causes unexpected behavior. */
2917 if (TREE_CODE (t) != LABEL_DECL)
2919 inline_forbidden_reason
2920 = G_("function %q+F can never be inlined "
2921 "because it contains a computed goto");
2922 *handled_ops_p = true;
2931 *handled_ops_p = false;
2935 /* Return true if FNDECL is a function that cannot be inlined into
2939 inline_forbidden_p (tree fndecl)
2941 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
2942 struct walk_stmt_info wi;
2943 struct pointer_set_t *visited_nodes;
2945 bool forbidden_p = false;
2947 /* First check for shared reasons not to copy the code. */
2948 inline_forbidden_reason = copy_forbidden (fun, fndecl);
2949 if (inline_forbidden_reason != NULL)
2952 /* Next, walk the statements of the function looking for
2953 constraucts we can't handle, or are non-optimal for inlining. */
2954 visited_nodes = pointer_set_create ();
2955 memset (&wi, 0, sizeof (wi));
2956 wi.info = (void *) fndecl;
2957 wi.pset = visited_nodes;
2959 FOR_EACH_BB_FN (bb, fun)
2962 gimple_seq seq = bb_seq (bb);
2963 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
2964 forbidden_p = (ret != NULL);
2969 pointer_set_destroy (visited_nodes);
2973 /* Returns nonzero if FN is a function that does not have any
2974 fundamental inline blocking properties. */
2977 tree_inlinable_function_p (tree fn)
2979 bool inlinable = true;
2983 /* If we've already decided this function shouldn't be inlined,
2984 there's no need to check again. */
2985 if (DECL_UNINLINABLE (fn))
2988 /* We only warn for functions declared `inline' by the user. */
2989 do_warning = (warn_inline
2990 && DECL_DECLARED_INLINE_P (fn)
2991 && !DECL_NO_INLINE_WARNING_P (fn)
2992 && !DECL_IN_SYSTEM_HEADER (fn));
2994 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
2997 && always_inline == NULL)
3000 warning (OPT_Winline, "function %q+F can never be inlined because it "
3001 "is suppressed using -fno-inline", fn);
3005 /* Don't auto-inline anything that might not be bound within
3006 this unit of translation. */
3007 else if (!DECL_DECLARED_INLINE_P (fn)
3008 && DECL_REPLACEABLE_P (fn))
3011 else if (!function_attribute_inlinable_p (fn))
3014 warning (OPT_Winline, "function %q+F can never be inlined because it "
3015 "uses attributes conflicting with inlining", fn);
3019 else if (inline_forbidden_p (fn))
3021 /* See if we should warn about uninlinable functions. Previously,
3022 some of these warnings would be issued while trying to expand
3023 the function inline, but that would cause multiple warnings
3024 about functions that would for example call alloca. But since
3025 this a property of the function, just one warning is enough.
3026 As a bonus we can now give more details about the reason why a
3027 function is not inlinable. */
3029 sorry (inline_forbidden_reason, fn);
3030 else if (do_warning)
3031 warning (OPT_Winline, inline_forbidden_reason, fn);
3036 /* Squirrel away the result so that we don't have to check again. */
3037 DECL_UNINLINABLE (fn) = !inlinable;
3042 /* Estimate the cost of a memory move. Use machine dependent
3043 word size and take possible memcpy call into account. */
3046 estimate_move_cost (tree type)
3050 gcc_assert (!VOID_TYPE_P (type));
3052 size = int_size_in_bytes (type);
3054 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
3055 /* Cost of a memcpy call, 3 arguments and the call. */
3058 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3061 /* Returns cost of operation CODE, according to WEIGHTS */
3064 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3065 tree op1 ATTRIBUTE_UNUSED, tree op2)
3069 /* These are "free" conversions, or their presumed cost
3070 is folded into other operations. */
3077 /* Assign cost of 1 to usual operations.
3078 ??? We may consider mapping RTL costs to this. */
3083 case POINTER_PLUS_EXPR:
3087 case ADDR_SPACE_CONVERT_EXPR:
3088 case FIXED_CONVERT_EXPR:
3089 case FIX_TRUNC_EXPR:
3101 case VEC_LSHIFT_EXPR:
3102 case VEC_RSHIFT_EXPR:
3109 case TRUTH_ANDIF_EXPR:
3110 case TRUTH_ORIF_EXPR:
3111 case TRUTH_AND_EXPR:
3113 case TRUTH_XOR_EXPR:
3114 case TRUTH_NOT_EXPR:
3123 case UNORDERED_EXPR:
3134 case PREDECREMENT_EXPR:
3135 case PREINCREMENT_EXPR:
3136 case POSTDECREMENT_EXPR:
3137 case POSTINCREMENT_EXPR:
3139 case REALIGN_LOAD_EXPR:
3141 case REDUC_MAX_EXPR:
3142 case REDUC_MIN_EXPR:
3143 case REDUC_PLUS_EXPR:
3144 case WIDEN_SUM_EXPR:
3145 case WIDEN_MULT_EXPR:
3148 case VEC_WIDEN_MULT_HI_EXPR:
3149 case VEC_WIDEN_MULT_LO_EXPR:
3150 case VEC_UNPACK_HI_EXPR:
3151 case VEC_UNPACK_LO_EXPR:
3152 case VEC_UNPACK_FLOAT_HI_EXPR:
3153 case VEC_UNPACK_FLOAT_LO_EXPR:
3154 case VEC_PACK_TRUNC_EXPR:
3155 case VEC_PACK_SAT_EXPR:
3156 case VEC_PACK_FIX_TRUNC_EXPR:
3157 case VEC_EXTRACT_EVEN_EXPR:
3158 case VEC_EXTRACT_ODD_EXPR:
3159 case VEC_INTERLEAVE_HIGH_EXPR:
3160 case VEC_INTERLEAVE_LOW_EXPR:
3164 /* Few special cases of expensive operations. This is useful
3165 to avoid inlining on functions having too many of these. */
3166 case TRUNC_DIV_EXPR:
3168 case FLOOR_DIV_EXPR:
3169 case ROUND_DIV_EXPR:
3170 case EXACT_DIV_EXPR:
3171 case TRUNC_MOD_EXPR:
3173 case FLOOR_MOD_EXPR:
3174 case ROUND_MOD_EXPR:
3176 if (TREE_CODE (op2) != INTEGER_CST)
3177 return weights->div_mod_cost;
3181 /* We expect a copy assignment with no operator. */
3182 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3188 /* Estimate number of instructions that will be created by expanding
3189 the statements in the statement sequence STMTS.
3190 WEIGHTS contains weights attributed to various constructs. */
3193 int estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3196 gimple_stmt_iterator gsi;
3199 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3200 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3206 /* Estimate number of instructions that will be created by expanding STMT.
3207 WEIGHTS contains weights attributed to various constructs. */
3210 estimate_num_insns (gimple stmt, eni_weights *weights)
3213 enum gimple_code code = gimple_code (stmt);
3220 /* Try to estimate the cost of assignments. We have three cases to
3222 1) Simple assignments to registers;
3223 2) Stores to things that must live in memory. This includes
3224 "normal" stores to scalars, but also assignments of large
3225 structures, or constructors of big arrays;
3227 Let us look at the first two cases, assuming we have "a = b + C":
3228 <GIMPLE_ASSIGN <var_decl "a">
3229 <plus_expr <var_decl "b"> <constant C>>
3230 If "a" is a GIMPLE register, the assignment to it is free on almost
3231 any target, because "a" usually ends up in a real register. Hence
3232 the only cost of this expression comes from the PLUS_EXPR, and we
3233 can ignore the GIMPLE_ASSIGN.
3234 If "a" is not a GIMPLE register, the assignment to "a" will most
3235 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
3236 of moving something into "a", which we compute using the function
3237 estimate_move_cost. */
3238 lhs = gimple_assign_lhs (stmt);
3239 rhs = gimple_assign_rhs1 (stmt);
3241 if (is_gimple_reg (lhs))
3244 cost = estimate_move_cost (TREE_TYPE (lhs));
3246 if (!is_gimple_reg (rhs) && !is_gimple_min_invariant (rhs))
3247 cost += estimate_move_cost (TREE_TYPE (rhs));
3249 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
3250 gimple_assign_rhs1 (stmt),
3251 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
3252 == GIMPLE_BINARY_RHS
3253 ? gimple_assign_rhs2 (stmt) : NULL);
3257 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
3258 gimple_op (stmt, 0),
3259 gimple_op (stmt, 1));
3263 /* Take into account cost of the switch + guess 2 conditional jumps for
3266 TODO: once the switch expansion logic is sufficiently separated, we can
3267 do better job on estimating cost of the switch. */
3268 if (weights->time_based)
3269 cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
3271 cost = gimple_switch_num_labels (stmt) * 2;
3276 tree decl = gimple_call_fndecl (stmt);
3277 tree addr = gimple_call_fn (stmt);
3278 tree funtype = TREE_TYPE (addr);
3280 if (POINTER_TYPE_P (funtype))
3281 funtype = TREE_TYPE (funtype);
3283 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
3284 cost = weights->target_builtin_call_cost;
3286 cost = weights->call_cost;
3288 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
3289 switch (DECL_FUNCTION_CODE (decl))
3291 case BUILT_IN_CONSTANT_P:
3293 case BUILT_IN_EXPECT:
3296 /* Prefetch instruction is not expensive. */
3297 case BUILT_IN_PREFETCH:
3298 cost = weights->target_builtin_call_cost;
3306 funtype = TREE_TYPE (decl);
3308 if (!VOID_TYPE_P (TREE_TYPE (funtype)))
3309 cost += estimate_move_cost (TREE_TYPE (funtype));
3310 /* Our cost must be kept in sync with
3311 cgraph_estimate_size_after_inlining that does use function
3312 declaration to figure out the arguments. */
3313 if (decl && DECL_ARGUMENTS (decl))
3316 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
3317 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3318 cost += estimate_move_cost (TREE_TYPE (arg));
3320 else if (funtype && prototype_p (funtype))
3323 for (t = TYPE_ARG_TYPES (funtype); t && t != void_list_node;
3325 if (!VOID_TYPE_P (TREE_VALUE (t)))
3326 cost += estimate_move_cost (TREE_VALUE (t));
3330 for (i = 0; i < gimple_call_num_args (stmt); i++)
3332 tree arg = gimple_call_arg (stmt, i);
3333 if (!VOID_TYPE_P (TREE_TYPE (arg)))
3334 cost += estimate_move_cost (TREE_TYPE (arg));
3346 case GIMPLE_PREDICT:
3351 return asm_str_count (gimple_asm_string (stmt));
3354 /* This is either going to be an external function call with one
3355 argument, or two register copy statements plus a goto. */
3358 case GIMPLE_EH_DISPATCH:
3359 /* ??? This is going to turn into a switch statement. Ideally
3360 we'd have a look at the eh region and estimate the number of
3365 return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
3367 case GIMPLE_EH_FILTER:
3368 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
3371 return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
3374 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
3375 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
3377 /* OpenMP directives are generally very expensive. */
3379 case GIMPLE_OMP_RETURN:
3380 case GIMPLE_OMP_SECTIONS_SWITCH:
3381 case GIMPLE_OMP_ATOMIC_STORE:
3382 case GIMPLE_OMP_CONTINUE:
3383 /* ...except these, which are cheap. */
3386 case GIMPLE_OMP_ATOMIC_LOAD:
3387 return weights->omp_cost;
3389 case GIMPLE_OMP_FOR:
3390 return (weights->omp_cost
3391 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
3392 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
3394 case GIMPLE_OMP_PARALLEL:
3395 case GIMPLE_OMP_TASK:
3396 case GIMPLE_OMP_CRITICAL:
3397 case GIMPLE_OMP_MASTER:
3398 case GIMPLE_OMP_ORDERED:
3399 case GIMPLE_OMP_SECTION:
3400 case GIMPLE_OMP_SECTIONS:
3401 case GIMPLE_OMP_SINGLE:
3402 return (weights->omp_cost
3403 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
3412 /* Estimate number of instructions that will be created by expanding
3413 function FNDECL. WEIGHTS contains weights attributed to various
3417 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
3419 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
3420 gimple_stmt_iterator bsi;
3424 gcc_assert (my_function && my_function->cfg);
3425 FOR_EACH_BB_FN (bb, my_function)
3427 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
3428 n += estimate_num_insns (gsi_stmt (bsi), weights);
3435 /* Initializes weights used by estimate_num_insns. */
3438 init_inline_once (void)
3440 eni_size_weights.call_cost = 1;
3441 eni_size_weights.target_builtin_call_cost = 1;
3442 eni_size_weights.div_mod_cost = 1;
3443 eni_size_weights.omp_cost = 40;
3444 eni_size_weights.time_based = false;
3446 /* Estimating time for call is difficult, since we have no idea what the
3447 called function does. In the current uses of eni_time_weights,
3448 underestimating the cost does less harm than overestimating it, so
3449 we choose a rather small value here. */
3450 eni_time_weights.call_cost = 10;
3451 eni_time_weights.target_builtin_call_cost = 10;
3452 eni_time_weights.div_mod_cost = 10;
3453 eni_time_weights.omp_cost = 40;
3454 eni_time_weights.time_based = true;
3457 /* Estimate the number of instructions in a gimple_seq. */
3460 count_insns_seq (gimple_seq seq, eni_weights *weights)
3462 gimple_stmt_iterator gsi;
3464 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
3465 n += estimate_num_insns (gsi_stmt (gsi), weights);
3471 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
3474 prepend_lexical_block (tree current_block, tree new_block)
3476 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
3477 BLOCK_SUBBLOCKS (current_block) = new_block;
3478 BLOCK_SUPERCONTEXT (new_block) = current_block;
3481 /* Fetch callee declaration from the call graph edge going from NODE and
3482 associated with STMR call statement. Return NULL_TREE if not found. */
3484 get_indirect_callee_fndecl (struct cgraph_node *node, gimple stmt)
3486 struct cgraph_edge *cs;
3488 cs = cgraph_edge (node, stmt);
3490 return cs->callee->decl;
3495 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
3498 expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
3502 struct pointer_map_t *st, *dst;
3505 location_t saved_location;
3506 struct cgraph_edge *cg_edge;
3507 cgraph_inline_failed_t reason;
3508 basic_block return_block;
3510 gimple_stmt_iterator gsi, stmt_gsi;
3511 bool successfully_inlined = FALSE;
3512 bool purge_dead_abnormal_edges;
3516 /* Set input_location here so we get the right instantiation context
3517 if we call instantiate_decl from inlinable_function_p. */
3518 saved_location = input_location;
3519 if (gimple_has_location (stmt))
3520 input_location = gimple_location (stmt);
3522 /* From here on, we're only interested in CALL_EXPRs. */
3523 if (gimple_code (stmt) != GIMPLE_CALL)
3526 /* First, see if we can figure out what function is being called.
3527 If we cannot, then there is no hope of inlining the function. */
3528 fn = gimple_call_fndecl (stmt);
3531 fn = get_indirect_callee_fndecl (id->dst_node, stmt);
3536 /* Turn forward declarations into real ones. */
3537 fn = cgraph_node (fn)->decl;
3539 /* If FN is a declaration of a function in a nested scope that was
3540 globally declared inline, we don't set its DECL_INITIAL.
3541 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
3542 C++ front-end uses it for cdtors to refer to their internal
3543 declarations, that are not real functions. Fortunately those
3544 don't have trees to be saved, so we can tell by checking their
3546 if (!DECL_INITIAL (fn)
3547 && DECL_ABSTRACT_ORIGIN (fn)
3548 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
3549 fn = DECL_ABSTRACT_ORIGIN (fn);
3551 /* Objective C and fortran still calls tree_rest_of_compilation directly.
3552 Kill this check once this is fixed. */
3553 if (!id->dst_node->analyzed)
3556 cg_edge = cgraph_edge (id->dst_node, stmt);
3558 /* Don't inline functions with different EH personalities. */
3559 if (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3560 && DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)
3561 && (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3562 != DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)))
3565 /* Don't try to inline functions that are not well-suited to
3567 if (!cgraph_inline_p (cg_edge, &reason))
3569 /* If this call was originally indirect, we do not want to emit any
3570 inlining related warnings or sorry messages because there are no
3571 guarantees regarding those. */
3572 if (cg_edge->indirect_call)
3575 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
3576 /* Avoid warnings during early inline pass. */
3577 && cgraph_global_info_ready)
3579 sorry ("inlining failed in call to %q+F: %s", fn,
3580 cgraph_inline_failed_string (reason));
3581 sorry ("called from here");
3583 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
3584 && !DECL_IN_SYSTEM_HEADER (fn)
3585 && reason != CIF_UNSPECIFIED
3586 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
3587 /* Avoid warnings during early inline pass. */
3588 && cgraph_global_info_ready)
3590 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
3591 fn, cgraph_inline_failed_string (reason));
3592 warning (OPT_Winline, "called from here");
3596 fn = cg_edge->callee->decl;
3598 #ifdef ENABLE_CHECKING
3599 if (cg_edge->callee->decl != id->dst_node->decl)
3600 verify_cgraph_node (cg_edge->callee);
3603 /* We will be inlining this callee. */
3604 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
3606 /* Update the callers EH personality. */
3607 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
3608 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
3609 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
3611 /* Split the block holding the GIMPLE_CALL. */
3612 e = split_block (bb, stmt);
3614 return_block = e->dest;
3617 /* split_block splits after the statement; work around this by
3618 moving the call into the second block manually. Not pretty,
3619 but seems easier than doing the CFG manipulation by hand
3620 when the GIMPLE_CALL is in the last statement of BB. */
3621 stmt_gsi = gsi_last_bb (bb);
3622 gsi_remove (&stmt_gsi, false);
3624 /* If the GIMPLE_CALL was in the last statement of BB, it may have
3625 been the source of abnormal edges. In this case, schedule
3626 the removal of dead abnormal edges. */
3627 gsi = gsi_start_bb (return_block);
3628 if (gsi_end_p (gsi))
3630 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3631 purge_dead_abnormal_edges = true;
3635 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
3636 purge_dead_abnormal_edges = false;
3639 stmt_gsi = gsi_start_bb (return_block);
3641 /* Build a block containing code to initialize the arguments, the
3642 actual inline expansion of the body, and a label for the return
3643 statements within the function to jump to. The type of the
3644 statement expression is the return type of the function call. */
3645 id->block = make_node (BLOCK);
3646 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
3647 BLOCK_SOURCE_LOCATION (id->block) = input_location;
3648 prepend_lexical_block (gimple_block (stmt), id->block);
3650 /* Local declarations will be replaced by their equivalents in this
3653 id->decl_map = pointer_map_create ();
3654 dst = id->debug_map;
3655 id->debug_map = NULL;
3657 /* Record the function we are about to inline. */
3659 id->src_node = cg_edge->callee;
3660 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
3661 id->gimple_call = stmt;
3663 gcc_assert (!id->src_cfun->after_inlining);
3666 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
3668 gimple_stmt_iterator si = gsi_last_bb (bb);
3669 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
3673 initialize_inlined_parameters (id, stmt, fn, bb);
3675 if (DECL_INITIAL (fn))
3676 prepend_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
3678 /* Return statements in the function body will be replaced by jumps
3679 to the RET_LABEL. */
3680 gcc_assert (DECL_INITIAL (fn));
3681 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
3683 /* Find the LHS to which the result of this call is assigned. */
3685 if (gimple_call_lhs (stmt))
3687 modify_dest = gimple_call_lhs (stmt);
3689 /* The function which we are inlining might not return a value,
3690 in which case we should issue a warning that the function
3691 does not return a value. In that case the optimizers will
3692 see that the variable to which the value is assigned was not
3693 initialized. We do not want to issue a warning about that
3694 uninitialized variable. */
3695 if (DECL_P (modify_dest))
3696 TREE_NO_WARNING (modify_dest) = 1;
3698 if (gimple_call_return_slot_opt_p (stmt))
3700 return_slot = modify_dest;
3707 /* If we are inlining a call to the C++ operator new, we don't want
3708 to use type based alias analysis on the return value. Otherwise
3709 we may get confused if the compiler sees that the inlined new
3710 function returns a pointer which was just deleted. See bug
3712 if (DECL_IS_OPERATOR_NEW (fn))
3718 /* Declare the return variable for the function. */
3719 use_retvar = declare_return_variable (id, return_slot, modify_dest);
3721 /* Add local vars in this inlined callee to caller. */
3722 t_step = id->src_cfun->local_decls;
3723 for (; t_step; t_step = TREE_CHAIN (t_step))
3725 var = TREE_VALUE (t_step);
3726 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3728 if (var_ann (var) && add_referenced_var (var))
3729 cfun->local_decls = tree_cons (NULL_TREE, var,
3732 else if (!can_be_nonlocal (var, id))
3733 cfun->local_decls = tree_cons (NULL_TREE, remap_decl (var, id),
3737 if (dump_file && (dump_flags & TDF_DETAILS))
3739 fprintf (dump_file, "Inlining ");
3740 print_generic_expr (dump_file, id->src_fn, 0);
3741 fprintf (dump_file, " to ");
3742 print_generic_expr (dump_file, id->dst_fn, 0);
3743 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
3746 /* This is it. Duplicate the callee body. Assume callee is
3747 pre-gimplified. Note that we must not alter the caller
3748 function in any way before this point, as this CALL_EXPR may be
3749 a self-referential call; if we're calling ourselves, we need to
3750 duplicate our body before altering anything. */
3751 copy_body (id, bb->count,
3752 cg_edge->frequency * REG_BR_PROB_BASE / CGRAPH_FREQ_BASE,
3755 /* Reset the escaped and callused solutions. */
3756 if (cfun->gimple_df)
3758 pt_solution_reset (&cfun->gimple_df->escaped);
3759 pt_solution_reset (&cfun->gimple_df->callused);
3765 pointer_map_destroy (id->debug_map);
3766 id->debug_map = dst;
3768 pointer_map_destroy (id->decl_map);
3771 /* Unlink the calls virtual operands before replacing it. */
3772 unlink_stmt_vdef (stmt);
3774 /* If the inlined function returns a result that we care about,
3775 substitute the GIMPLE_CALL with an assignment of the return
3776 variable to the LHS of the call. That is, if STMT was
3777 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
3778 if (use_retvar && gimple_call_lhs (stmt))
3780 gimple old_stmt = stmt;
3781 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
3782 gsi_replace (&stmt_gsi, stmt, false);
3783 if (gimple_in_ssa_p (cfun))
3784 mark_symbols_for_renaming (stmt);
3785 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
3789 /* Handle the case of inlining a function with no return
3790 statement, which causes the return value to become undefined. */
3791 if (gimple_call_lhs (stmt)
3792 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
3794 tree name = gimple_call_lhs (stmt);
3795 tree var = SSA_NAME_VAR (name);
3796 tree def = gimple_default_def (cfun, var);
3800 /* If the variable is used undefined, make this name
3801 undefined via a move. */
3802 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
3803 gsi_replace (&stmt_gsi, stmt, true);
3807 /* Otherwise make this variable undefined. */
3808 gsi_remove (&stmt_gsi, true);
3809 set_default_def (var, name);
3810 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
3814 gsi_remove (&stmt_gsi, true);
3817 if (purge_dead_abnormal_edges)
3818 gimple_purge_dead_abnormal_call_edges (return_block);
3820 /* If the value of the new expression is ignored, that's OK. We
3821 don't warn about this for CALL_EXPRs, so we shouldn't warn about
3822 the equivalent inlined version either. */
3823 if (is_gimple_assign (stmt))
3825 gcc_assert (gimple_assign_single_p (stmt)
3826 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
3827 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
3830 /* Output the inlining info for this abstract function, since it has been
3831 inlined. If we don't do this now, we can lose the information about the
3832 variables in the function when the blocks get blown away as soon as we
3833 remove the cgraph node. */
3834 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
3836 /* Update callgraph if needed. */
3837 cgraph_remove_node (cg_edge->callee);
3839 id->block = NULL_TREE;
3840 successfully_inlined = TRUE;
3843 input_location = saved_location;
3844 return successfully_inlined;
3847 /* Expand call statements reachable from STMT_P.
3848 We can only have CALL_EXPRs as the "toplevel" tree code or nested
3849 in a MODIFY_EXPR. See tree-gimple.c:get_call_expr_in(). We can
3850 unfortunately not use that function here because we need a pointer
3851 to the CALL_EXPR, not the tree itself. */
3854 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
3856 gimple_stmt_iterator gsi;
3858 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3860 gimple stmt = gsi_stmt (gsi);
3862 if (is_gimple_call (stmt)
3863 && expand_call_inline (bb, stmt, id))
3871 /* Walk all basic blocks created after FIRST and try to fold every statement
3872 in the STATEMENTS pointer set. */
3875 fold_marked_statements (int first, struct pointer_set_t *statements)
3877 for (; first < n_basic_blocks; first++)
3878 if (BASIC_BLOCK (first))
3880 gimple_stmt_iterator gsi;
3882 for (gsi = gsi_start_bb (BASIC_BLOCK (first));
3885 if (pointer_set_contains (statements, gsi_stmt (gsi)))
3887 gimple old_stmt = gsi_stmt (gsi);
3888 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
3890 if (old_decl && DECL_BUILT_IN (old_decl))
3892 /* Folding builtins can create multiple instructions,
3893 we need to look at all of them. */
3894 gimple_stmt_iterator i2 = gsi;
3896 if (fold_stmt (&gsi))
3900 i2 = gsi_start_bb (BASIC_BLOCK (first));
3905 new_stmt = gsi_stmt (i2);
3906 update_stmt (new_stmt);
3907 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3910 if (new_stmt == gsi_stmt (gsi))
3912 /* It is okay to check only for the very last
3913 of these statements. If it is a throwing
3914 statement nothing will change. If it isn't
3915 this can remove EH edges. If that weren't
3916 correct then because some intermediate stmts
3917 throw, but not the last one. That would mean
3918 we'd have to split the block, which we can't
3919 here and we'd loose anyway. And as builtins
3920 probably never throw, this all
3922 if (maybe_clean_or_replace_eh_stmt (old_stmt,
3924 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3931 else if (fold_stmt (&gsi))
3933 /* Re-read the statement from GSI as fold_stmt() may
3935 gimple new_stmt = gsi_stmt (gsi);
3936 update_stmt (new_stmt);
3938 if (is_gimple_call (old_stmt)
3939 || is_gimple_call (new_stmt))
3940 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
3943 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
3944 gimple_purge_dead_eh_edges (BASIC_BLOCK (first));
3950 /* Return true if BB has at least one abnormal outgoing edge. */
3953 has_abnormal_outgoing_edge_p (basic_block bb)
3958 FOR_EACH_EDGE (e, ei, bb->succs)
3959 if (e->flags & EDGE_ABNORMAL)
3965 /* Expand calls to inline functions in the body of FN. */
3968 optimize_inline_calls (tree fn)
3972 int last = n_basic_blocks;
3973 struct gimplify_ctx gctx;
3975 /* There is no point in performing inlining if errors have already
3976 occurred -- and we might crash if we try to inline invalid
3978 if (errorcount || sorrycount)
3982 memset (&id, 0, sizeof (id));
3984 id.src_node = id.dst_node = cgraph_node (fn);
3986 /* Or any functions that aren't finished yet. */
3987 if (current_function_decl)
3988 id.dst_fn = current_function_decl;
3990 id.copy_decl = copy_decl_maybe_to_var;
3991 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3992 id.transform_new_cfg = false;
3993 id.transform_return_to_modify = true;
3994 id.transform_lang_insert_block = NULL;
3995 id.statements_to_fold = pointer_set_create ();
3997 push_gimplify_context (&gctx);
3999 /* We make no attempts to keep dominance info up-to-date. */
4000 free_dominance_info (CDI_DOMINATORS);
4001 free_dominance_info (CDI_POST_DOMINATORS);
4003 /* Register specific gimple functions. */
4004 gimple_register_cfg_hooks ();
4006 /* Reach the trees by walking over the CFG, and note the
4007 enclosing basic-blocks in the call edges. */
4008 /* We walk the blocks going forward, because inlined function bodies
4009 will split id->current_basic_block, and the new blocks will
4010 follow it; we'll trudge through them, processing their CALL_EXPRs
4013 gimple_expand_calls_inline (bb, &id);
4015 pop_gimplify_context (NULL);
4017 #ifdef ENABLE_CHECKING
4019 struct cgraph_edge *e;
4021 verify_cgraph_node (id.dst_node);
4023 /* Double check that we inlined everything we are supposed to inline. */
4024 for (e = id.dst_node->callees; e; e = e->next_callee)
4025 gcc_assert (e->inline_failed);
4029 /* Fold the statements before compacting/renumbering the basic blocks. */
4030 fold_marked_statements (last, id.statements_to_fold);
4031 pointer_set_destroy (id.statements_to_fold);
4033 gcc_assert (!id.debug_stmts);
4035 /* Renumber the (code) basic_blocks consecutively. */
4037 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4040 fold_cond_expr_cond ();
4041 delete_unreachable_blocks_update_callgraph (&id);
4042 #ifdef ENABLE_CHECKING
4043 verify_cgraph_node (id.dst_node);
4046 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4047 not possible yet - the IPA passes might make various functions to not
4048 throw and they don't care to proactively update local EH info. This is
4049 done later in fixup_cfg pass that also execute the verification. */
4050 return (TODO_update_ssa
4052 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4053 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
4056 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
4059 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
4061 enum tree_code code = TREE_CODE (*tp);
4062 enum tree_code_class cl = TREE_CODE_CLASS (code);
4064 /* We make copies of most nodes. */
4065 if (IS_EXPR_CODE_CLASS (cl)
4066 || code == TREE_LIST
4068 || code == TYPE_DECL
4069 || code == OMP_CLAUSE)
4071 /* Because the chain gets clobbered when we make a copy, we save it
4073 tree chain = NULL_TREE, new_tree;
4075 chain = TREE_CHAIN (*tp);
4077 /* Copy the node. */
4078 new_tree = copy_node (*tp);
4080 /* Propagate mudflap marked-ness. */
4081 if (flag_mudflap && mf_marked_p (*tp))
4086 /* Now, restore the chain, if appropriate. That will cause
4087 walk_tree to walk into the chain as well. */
4088 if (code == PARM_DECL
4089 || code == TREE_LIST
4090 || code == OMP_CLAUSE)
4091 TREE_CHAIN (*tp) = chain;
4093 /* For now, we don't update BLOCKs when we make copies. So, we
4094 have to nullify all BIND_EXPRs. */
4095 if (TREE_CODE (*tp) == BIND_EXPR)
4096 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
4098 else if (code == CONSTRUCTOR)
4100 /* CONSTRUCTOR nodes need special handling because
4101 we need to duplicate the vector of elements. */
4104 new_tree = copy_node (*tp);
4106 /* Propagate mudflap marked-ness. */
4107 if (flag_mudflap && mf_marked_p (*tp))
4110 CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
4111 CONSTRUCTOR_ELTS (*tp));
4114 else if (TREE_CODE_CLASS (code) == tcc_type)
4116 else if (TREE_CODE_CLASS (code) == tcc_declaration)
4118 else if (TREE_CODE_CLASS (code) == tcc_constant)
4121 gcc_assert (code != STATEMENT_LIST);
4125 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
4126 information indicating to what new SAVE_EXPR this one should be mapped,
4127 use that one. Otherwise, create a new node and enter it in ST. FN is
4128 the function into which the copy will be placed. */
4131 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
4133 struct pointer_map_t *st = (struct pointer_map_t *) st_;
4137 /* See if we already encountered this SAVE_EXPR. */
4138 n = (tree *) pointer_map_contains (st, *tp);
4140 /* If we didn't already remap this SAVE_EXPR, do so now. */
4143 t = copy_node (*tp);
4145 /* Remember this SAVE_EXPR. */
4146 *pointer_map_insert (st, *tp) = t;
4147 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
4148 *pointer_map_insert (st, t) = t;
4152 /* We've already walked into this SAVE_EXPR; don't do it again. */
4157 /* Replace this SAVE_EXPR with the copy. */
4161 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
4162 copies the declaration and enters it in the splay_tree in DATA (which is
4163 really an `copy_body_data *'). */
4166 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
4169 copy_body_data *id = (copy_body_data *) data;
4171 /* Don't walk into types. */
4175 else if (TREE_CODE (*tp) == LABEL_EXPR)
4177 tree decl = TREE_OPERAND (*tp, 0);
4179 /* Copy the decl and remember the copy. */
4180 insert_decl_map (id, decl, id->copy_decl (decl, id));
4186 /* Perform any modifications to EXPR required when it is unsaved. Does
4187 not recurse into EXPR's subtrees. */
4190 unsave_expr_1 (tree expr)
4192 switch (TREE_CODE (expr))
4195 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4196 It's OK for this to happen if it was part of a subtree that
4197 isn't immediately expanded, such as operand 2 of another
4199 if (TREE_OPERAND (expr, 1))
4202 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4203 TREE_OPERAND (expr, 3) = NULL_TREE;
4211 /* Called via walk_tree when an expression is unsaved. Using the
4212 splay_tree pointed to by ST (which is really a `splay_tree'),
4213 remaps all local declarations to appropriate replacements. */
4216 unsave_r (tree *tp, int *walk_subtrees, void *data)
4218 copy_body_data *id = (copy_body_data *) data;
4219 struct pointer_map_t *st = id->decl_map;
4222 /* Only a local declaration (variable or label). */
4223 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
4224 || TREE_CODE (*tp) == LABEL_DECL)
4226 /* Lookup the declaration. */
4227 n = (tree *) pointer_map_contains (st, *tp);
4229 /* If it's there, remap it. */
4234 else if (TREE_CODE (*tp) == STATEMENT_LIST)
4236 else if (TREE_CODE (*tp) == BIND_EXPR)
4237 copy_bind_expr (tp, walk_subtrees, id);
4238 else if (TREE_CODE (*tp) == SAVE_EXPR
4239 || TREE_CODE (*tp) == TARGET_EXPR)
4240 remap_save_expr (tp, st, walk_subtrees);
4243 copy_tree_r (tp, walk_subtrees, NULL);
4245 /* Do whatever unsaving is required. */
4246 unsave_expr_1 (*tp);
4249 /* Keep iterating. */
4253 /* Copies everything in EXPR and replaces variables, labels
4254 and SAVE_EXPRs local to EXPR. */
4257 unsave_expr_now (tree expr)
4261 /* There's nothing to do for NULL_TREE. */
4266 memset (&id, 0, sizeof (id));
4267 id.src_fn = current_function_decl;
4268 id.dst_fn = current_function_decl;
4269 id.decl_map = pointer_map_create ();
4270 id.debug_map = NULL;
4272 id.copy_decl = copy_decl_no_change;
4273 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4274 id.transform_new_cfg = false;
4275 id.transform_return_to_modify = false;
4276 id.transform_lang_insert_block = NULL;
4278 /* Walk the tree once to find local labels. */
4279 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
4281 /* Walk the tree again, copying, remapping, and unsaving. */
4282 walk_tree (&expr, unsave_r, &id, NULL);
4285 pointer_map_destroy (id.decl_map);
4287 pointer_map_destroy (id.debug_map);
4292 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
4293 label, copies the declaration and enters it in the splay_tree in DATA (which
4294 is really a 'copy_body_data *'. */
4297 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
4298 bool *handled_ops_p ATTRIBUTE_UNUSED,
4299 struct walk_stmt_info *wi)
4301 copy_body_data *id = (copy_body_data *) wi->info;
4302 gimple stmt = gsi_stmt (*gsip);
4304 if (gimple_code (stmt) == GIMPLE_LABEL)
4306 tree decl = gimple_label_label (stmt);
4308 /* Copy the decl and remember the copy. */
4309 insert_decl_map (id, decl, id->copy_decl (decl, id));
4316 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4317 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4318 remaps all local declarations to appropriate replacements in gimple
4322 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
4324 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
4325 copy_body_data *id = (copy_body_data *) wi->info;
4326 struct pointer_map_t *st = id->decl_map;
4330 /* Only a local declaration (variable or label). */
4331 if ((TREE_CODE (expr) == VAR_DECL
4332 && !TREE_STATIC (expr))
4333 || TREE_CODE (expr) == LABEL_DECL)
4335 /* Lookup the declaration. */
4336 n = (tree *) pointer_map_contains (st, expr);
4338 /* If it's there, remap it. */
4343 else if (TREE_CODE (expr) == STATEMENT_LIST
4344 || TREE_CODE (expr) == BIND_EXPR
4345 || TREE_CODE (expr) == SAVE_EXPR)
4347 else if (TREE_CODE (expr) == TARGET_EXPR)
4349 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
4350 It's OK for this to happen if it was part of a subtree that
4351 isn't immediately expanded, such as operand 2 of another
4353 if (!TREE_OPERAND (expr, 1))
4355 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
4356 TREE_OPERAND (expr, 3) = NULL_TREE;
4360 /* Keep iterating. */
4365 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
4366 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
4367 remaps all local declarations to appropriate replacements in gimple
4371 replace_locals_stmt (gimple_stmt_iterator *gsip,
4372 bool *handled_ops_p ATTRIBUTE_UNUSED,
4373 struct walk_stmt_info *wi)
4375 copy_body_data *id = (copy_body_data *) wi->info;
4376 gimple stmt = gsi_stmt (*gsip);
4378 if (gimple_code (stmt) == GIMPLE_BIND)
4380 tree block = gimple_bind_block (stmt);
4384 remap_block (&block, id);
4385 gimple_bind_set_block (stmt, block);
4388 /* This will remap a lot of the same decls again, but this should be
4390 if (gimple_bind_vars (stmt))
4391 gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
4394 /* Keep iterating. */
4399 /* Copies everything in SEQ and replaces variables and labels local to
4400 current_function_decl. */
4403 copy_gimple_seq_and_replace_locals (gimple_seq seq)
4406 struct walk_stmt_info wi;
4407 struct pointer_set_t *visited;
4410 /* There's nothing to do for NULL_TREE. */
4415 memset (&id, 0, sizeof (id));
4416 id.src_fn = current_function_decl;
4417 id.dst_fn = current_function_decl;
4418 id.decl_map = pointer_map_create ();
4419 id.debug_map = NULL;
4421 id.copy_decl = copy_decl_no_change;
4422 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4423 id.transform_new_cfg = false;
4424 id.transform_return_to_modify = false;
4425 id.transform_lang_insert_block = NULL;
4427 /* Walk the tree once to find local labels. */
4428 memset (&wi, 0, sizeof (wi));
4429 visited = pointer_set_create ();
4432 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
4433 pointer_set_destroy (visited);
4435 copy = gimple_seq_copy (seq);
4437 /* Walk the copy, remapping decls. */
4438 memset (&wi, 0, sizeof (wi));
4440 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
4443 pointer_map_destroy (id.decl_map);
4445 pointer_map_destroy (id.debug_map);
4451 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
4454 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
4463 debug_find_tree (tree top, tree search)
4465 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
4469 /* Declare the variables created by the inliner. Add all the variables in
4470 VARS to BIND_EXPR. */
4473 declare_inline_vars (tree block, tree vars)
4476 for (t = vars; t; t = TREE_CHAIN (t))
4478 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
4479 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
4480 cfun->local_decls = tree_cons (NULL_TREE, t, cfun->local_decls);
4484 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
4487 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
4488 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
4489 VAR_DECL translation. */
4492 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
4494 /* Don't generate debug information for the copy if we wouldn't have
4495 generated it for the copy either. */
4496 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
4497 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
4499 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
4500 declaration inspired this copy. */
4501 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
4503 /* The new variable/label has no RTL, yet. */
4504 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
4505 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
4506 SET_DECL_RTL (copy, NULL_RTX);
4508 /* These args would always appear unused, if not for this. */
4509 TREE_USED (copy) = 1;
4511 /* Set the context for the new declaration. */
4512 if (!DECL_CONTEXT (decl))
4513 /* Globals stay global. */
4515 else if (DECL_CONTEXT (decl) != id->src_fn)
4516 /* Things that weren't in the scope of the function we're inlining
4517 from aren't in the scope we're inlining to, either. */
4519 else if (TREE_STATIC (decl))
4520 /* Function-scoped static variables should stay in the original
4524 /* Ordinary automatic local variables are now in the scope of the
4526 DECL_CONTEXT (copy) = id->dst_fn;
4532 copy_decl_to_var (tree decl, copy_body_data *id)
4536 gcc_assert (TREE_CODE (decl) == PARM_DECL
4537 || TREE_CODE (decl) == RESULT_DECL);
4539 type = TREE_TYPE (decl);
4541 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4542 VAR_DECL, DECL_NAME (decl), type);
4543 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4544 TREE_READONLY (copy) = TREE_READONLY (decl);
4545 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4546 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4548 return copy_decl_for_dup_finish (id, decl, copy);
4551 /* Like copy_decl_to_var, but create a return slot object instead of a
4552 pointer variable for return by invisible reference. */
4555 copy_result_decl_to_var (tree decl, copy_body_data *id)
4559 gcc_assert (TREE_CODE (decl) == PARM_DECL
4560 || TREE_CODE (decl) == RESULT_DECL);
4562 type = TREE_TYPE (decl);
4563 if (DECL_BY_REFERENCE (decl))
4564 type = TREE_TYPE (type);
4566 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
4567 VAR_DECL, DECL_NAME (decl), type);
4568 TREE_READONLY (copy) = TREE_READONLY (decl);
4569 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
4570 if (!DECL_BY_REFERENCE (decl))
4572 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
4573 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
4576 return copy_decl_for_dup_finish (id, decl, copy);
4580 copy_decl_no_change (tree decl, copy_body_data *id)
4584 copy = copy_node (decl);
4586 /* The COPY is not abstract; it will be generated in DST_FN. */
4587 DECL_ABSTRACT (copy) = 0;
4588 lang_hooks.dup_lang_specific_decl (copy);
4590 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
4591 been taken; it's for internal bookkeeping in expand_goto_internal. */
4592 if (TREE_CODE (copy) == LABEL_DECL)
4594 TREE_ADDRESSABLE (copy) = 0;
4595 LABEL_DECL_UID (copy) = -1;
4598 return copy_decl_for_dup_finish (id, decl, copy);
4602 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
4604 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
4605 return copy_decl_to_var (decl, id);
4607 return copy_decl_no_change (decl, id);
4610 /* Return a copy of the function's argument tree. */
4612 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
4613 bitmap args_to_skip, tree *vars)
4616 tree new_parm = NULL;
4621 for (arg = orig_parm; arg; arg = TREE_CHAIN (arg), i++)
4622 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
4624 tree new_tree = remap_decl (arg, id);
4625 lang_hooks.dup_lang_specific_decl (new_tree);
4627 parg = &TREE_CHAIN (new_tree);
4629 else if (!pointer_map_contains (id->decl_map, arg))
4631 /* Make an equivalent VAR_DECL. If the argument was used
4632 as temporary variable later in function, the uses will be
4633 replaced by local variable. */
4634 tree var = copy_decl_to_var (arg, id);
4636 add_referenced_var (var);
4637 insert_decl_map (id, arg, var);
4638 /* Declare this new variable. */
4639 TREE_CHAIN (var) = *vars;
4645 /* Return a copy of the function's static chain. */
4647 copy_static_chain (tree static_chain, copy_body_data * id)
4649 tree *chain_copy, *pvar;
4651 chain_copy = &static_chain;
4652 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
4654 tree new_tree = remap_decl (*pvar, id);
4655 lang_hooks.dup_lang_specific_decl (new_tree);
4656 TREE_CHAIN (new_tree) = TREE_CHAIN (*pvar);
4659 return static_chain;
4662 /* Return true if the function is allowed to be versioned.
4663 This is a guard for the versioning functionality. */
4666 tree_versionable_function_p (tree fndecl)
4668 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
4669 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl), fndecl) == NULL);
4672 /* Delete all unreachable basic blocks and update callgraph.
4673 Doing so is somewhat nontrivial because we need to update all clones and
4674 remove inline function that become unreachable. */
4677 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
4679 bool changed = false;
4680 basic_block b, next_bb;
4682 find_unreachable_blocks ();
4684 /* Delete all unreachable basic blocks. */
4686 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
4688 next_bb = b->next_bb;
4690 if (!(b->flags & BB_REACHABLE))
4692 gimple_stmt_iterator bsi;
4694 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
4695 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL)
4697 struct cgraph_edge *e;
4698 struct cgraph_node *node;
4700 if ((e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
4702 if (!e->inline_failed)
4703 cgraph_remove_node_and_inline_clones (e->callee);
4705 cgraph_remove_edge (e);
4707 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
4708 && id->dst_node->clones)
4709 for (node = id->dst_node->clones; node != id->dst_node;)
4711 if ((e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
4713 if (!e->inline_failed)
4714 cgraph_remove_node_and_inline_clones (e->callee);
4716 cgraph_remove_edge (e);
4720 node = node->clones;
4721 else if (node->next_sibling_clone)
4722 node = node->next_sibling_clone;
4725 while (node != id->dst_node && !node->next_sibling_clone)
4726 node = node->clone_of;
4727 if (node != id->dst_node)
4728 node = node->next_sibling_clone;
4732 delete_basic_block (b);
4738 tidy_fallthru_edges ();
4742 /* Update clone info after duplication. */
4745 update_clone_info (copy_body_data * id)
4747 struct cgraph_node *node;
4748 if (!id->dst_node->clones)
4750 for (node = id->dst_node->clones; node != id->dst_node;)
4752 /* First update replace maps to match the new body. */
4753 if (node->clone.tree_map)
4756 for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
4758 struct ipa_replace_map *replace_info;
4759 replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
4760 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
4761 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
4765 node = node->clones;
4766 else if (node->next_sibling_clone)
4767 node = node->next_sibling_clone;
4770 while (node != id->dst_node && !node->next_sibling_clone)
4771 node = node->clone_of;
4772 if (node != id->dst_node)
4773 node = node->next_sibling_clone;
4778 /* Create a copy of a function's tree.
4779 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
4780 of the original function and the new copied function
4781 respectively. In case we want to replace a DECL
4782 tree with another tree while duplicating the function's
4783 body, TREE_MAP represents the mapping between these
4784 trees. If UPDATE_CLONES is set, the call_stmt fields
4785 of edges of clones of the function will be updated. */
4787 tree_function_versioning (tree old_decl, tree new_decl,
4788 VEC(ipa_replace_map_p,gc)* tree_map,
4789 bool update_clones, bitmap args_to_skip)
4791 struct cgraph_node *old_version_node;
4792 struct cgraph_node *new_version_node;
4796 struct ipa_replace_map *replace_info;
4797 basic_block old_entry_block, bb;
4798 VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
4801 tree old_current_function_decl = current_function_decl;
4802 tree vars = NULL_TREE;
4804 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
4805 && TREE_CODE (new_decl) == FUNCTION_DECL);
4806 DECL_POSSIBLY_INLINED (old_decl) = 1;
4808 old_version_node = cgraph_node (old_decl);
4809 new_version_node = cgraph_node (new_decl);
4811 /* Output the inlining info for this abstract function, since it has been
4812 inlined. If we don't do this now, we can lose the information about the
4813 variables in the function when the blocks get blown away as soon as we
4814 remove the cgraph node. */
4815 (*debug_hooks->outlining_inline_function) (old_decl);
4817 DECL_ARTIFICIAL (new_decl) = 1;
4818 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
4819 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
4821 /* Prepare the data structures for the tree copy. */
4822 memset (&id, 0, sizeof (id));
4824 /* Generate a new name for the new version. */
4825 id.statements_to_fold = pointer_set_create ();
4827 id.decl_map = pointer_map_create ();
4828 id.debug_map = NULL;
4829 id.src_fn = old_decl;
4830 id.dst_fn = new_decl;
4831 id.src_node = old_version_node;
4832 id.dst_node = new_version_node;
4833 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
4834 if (id.src_node->ipa_transforms_to_apply)
4836 VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
4839 id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
4840 id.src_node->ipa_transforms_to_apply);
4841 for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
4842 VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
4843 VEC_index (ipa_opt_pass,
4844 old_transforms_to_apply,
4848 id.copy_decl = copy_decl_no_change;
4849 id.transform_call_graph_edges
4850 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
4851 id.transform_new_cfg = true;
4852 id.transform_return_to_modify = false;
4853 id.transform_lang_insert_block = NULL;
4855 current_function_decl = new_decl;
4856 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
4857 (DECL_STRUCT_FUNCTION (old_decl));
4858 initialize_cfun (new_decl, old_decl,
4859 old_entry_block->count);
4860 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
4862 /* Copy the function's static chain. */
4863 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
4865 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
4866 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
4869 /* If there's a tree_map, prepare for substitution. */
4871 for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
4874 replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
4875 if (replace_info->replace_p)
4877 tree op = replace_info->new_tree;
4881 if (TREE_CODE (op) == VIEW_CONVERT_EXPR)
4882 op = TREE_OPERAND (op, 0);
4884 if (TREE_CODE (op) == ADDR_EXPR)
4886 op = TREE_OPERAND (op, 0);
4887 while (handled_component_p (op))
4888 op = TREE_OPERAND (op, 0);
4889 if (TREE_CODE (op) == VAR_DECL)
4890 add_referenced_var (op);
4892 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
4893 init = setup_one_parameter (&id, replace_info->old_tree,
4894 replace_info->new_tree, id.src_fn,
4898 VEC_safe_push (gimple, heap, init_stmts, init);
4901 /* Copy the function's arguments. */
4902 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
4903 DECL_ARGUMENTS (new_decl) =
4904 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
4905 args_to_skip, &vars);
4907 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
4909 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4910 number_blocks (id.dst_fn);
4912 declare_inline_vars (DECL_INITIAL (new_decl), vars);
4914 if (DECL_STRUCT_FUNCTION (old_decl)->local_decls != NULL_TREE)
4915 /* Add local vars. */
4916 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->local_decls;
4917 t_step; t_step = TREE_CHAIN (t_step))
4919 tree var = TREE_VALUE (t_step);
4920 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
4921 cfun->local_decls = tree_cons (NULL_TREE, var, cfun->local_decls);
4922 else if (!can_be_nonlocal (var, &id))
4924 tree_cons (NULL_TREE, remap_decl (var, &id),
4928 /* Copy the Function's body. */
4929 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
4930 ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
4932 if (DECL_RESULT (old_decl) != NULL_TREE)
4934 tree *res_decl = &DECL_RESULT (old_decl);
4935 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
4936 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
4939 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4940 number_blocks (new_decl);
4942 /* We want to create the BB unconditionally, so that the addition of
4943 debug stmts doesn't affect BB count, which may in the end cause
4944 codegen differences. */
4945 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
4946 while (VEC_length (gimple, init_stmts))
4947 insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
4948 update_clone_info (&id);
4950 /* Remap the nonlocal_goto_save_area, if any. */
4951 if (cfun->nonlocal_goto_save_area)
4953 struct walk_stmt_info wi;
4955 memset (&wi, 0, sizeof (wi));
4957 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
4961 pointer_map_destroy (id.decl_map);
4963 pointer_map_destroy (id.debug_map);
4964 free_dominance_info (CDI_DOMINATORS);
4965 free_dominance_info (CDI_POST_DOMINATORS);
4967 fold_marked_statements (0, id.statements_to_fold);
4968 pointer_set_destroy (id.statements_to_fold);
4969 fold_cond_expr_cond ();
4970 delete_unreachable_blocks_update_callgraph (&id);
4971 update_ssa (TODO_update_ssa);
4972 free_dominance_info (CDI_DOMINATORS);
4973 free_dominance_info (CDI_POST_DOMINATORS);
4975 gcc_assert (!id.debug_stmts);
4976 VEC_free (gimple, heap, init_stmts);
4978 current_function_decl = old_current_function_decl;
4979 gcc_assert (!current_function_decl
4980 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
4984 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
4985 the callee and return the inlined body on success. */
4988 maybe_inline_call_in_expr (tree exp)
4990 tree fn = get_callee_fndecl (exp);
4992 /* We can only try to inline "const" functions. */
4993 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
4995 struct pointer_map_t *decl_map = pointer_map_create ();
4996 call_expr_arg_iterator iter;
5000 /* Remap the parameters. */
5001 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
5003 param = TREE_CHAIN (param), arg = next_call_expr_arg (&iter))
5004 *pointer_map_insert (decl_map, param) = arg;
5006 memset (&id, 0, sizeof (id));
5008 id.dst_fn = current_function_decl;
5009 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
5010 id.decl_map = decl_map;
5012 id.copy_decl = copy_decl_no_change;
5013 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5014 id.transform_new_cfg = false;
5015 id.transform_return_to_modify = true;
5016 id.transform_lang_insert_block = false;
5018 /* Make sure not to unshare trees behind the front-end's back
5019 since front-end specific mechanisms may rely on sharing. */
5020 id.regimplify = false;
5021 id.do_not_unshare = true;
5023 /* We're not inside any EH region. */
5026 t = copy_tree_body (&id);
5027 pointer_map_destroy (decl_map);
5029 /* We can only return something suitable for use in a GENERIC
5031 if (TREE_CODE (t) == MODIFY_EXPR)
5032 return TREE_OPERAND (t, 1);
5038 /* Duplicate a type, fields and all. */
5041 build_duplicate_type (tree type)
5043 struct copy_body_data id;
5045 memset (&id, 0, sizeof (id));
5046 id.src_fn = current_function_decl;
5047 id.dst_fn = current_function_decl;
5049 id.decl_map = pointer_map_create ();
5050 id.debug_map = NULL;
5051 id.copy_decl = copy_decl_no_change;
5053 type = remap_type_1 (type, &id);
5055 pointer_map_destroy (id.decl_map);
5057 pointer_map_destroy (id.debug_map);
5059 TYPE_CANONICAL (type) = type;
5064 /* Return whether it is safe to inline a function because it used different
5065 target specific options or call site actual types mismatch parameter types.
5066 E is the call edge to be checked. */
5068 tree_can_inline_p (struct cgraph_edge *e)
5071 /* This causes a regression in SPEC in that it prevents a cold function from
5072 inlining a hot function. Perhaps this should only apply to functions
5073 that the user declares hot/cold/optimize explicitly. */
5075 /* Don't inline a function with a higher optimization level than the
5076 caller, or with different space constraints (hot/cold functions). */
5077 tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller);
5078 tree callee_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee);
5080 if (caller_tree != callee_tree)
5082 struct cl_optimization *caller_opt
5083 = TREE_OPTIMIZATION ((caller_tree)
5085 : optimization_default_node);
5087 struct cl_optimization *callee_opt
5088 = TREE_OPTIMIZATION ((callee_tree)
5090 : optimization_default_node);
5092 if ((caller_opt->optimize > callee_opt->optimize)
5093 || (caller_opt->optimize_size != callee_opt->optimize_size))
5097 tree caller, callee;
5099 caller = e->caller->decl;
5100 callee = e->callee->decl;
5102 /* We cannot inline a function that uses a different EH personality
5104 if (DECL_FUNCTION_PERSONALITY (caller)
5105 && DECL_FUNCTION_PERSONALITY (callee)
5106 && (DECL_FUNCTION_PERSONALITY (caller)
5107 != DECL_FUNCTION_PERSONALITY (callee)))
5109 e->inline_failed = CIF_UNSPECIFIED;
5110 gimple_call_set_cannot_inline (e->call_stmt, true);
5114 /* Allow the backend to decide if inlining is ok. */
5115 if (!targetm.target_option.can_inline_p (caller, callee))
5117 e->inline_failed = CIF_TARGET_OPTION_MISMATCH;
5118 gimple_call_set_cannot_inline (e->call_stmt, true);
5119 e->call_stmt_cannot_inline_p = true;
5124 && !gimple_check_call_args (e->call_stmt))
5126 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
5127 gimple_call_set_cannot_inline (e->call_stmt, true);
5128 e->call_stmt_cannot_inline_p = true;