2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Alexandre Oliva <aoliva@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "tree-inline.h"
34 #include "insn-config.h"
37 #include "langhooks.h"
38 #include "basic-block.h"
39 #include "tree-iterator.h"
42 #include "tree-mudflap.h"
43 #include "tree-flow.h"
46 #include "tree-flow.h"
47 #include "diagnostic.h"
50 #include "pointer-set.h"
52 #include "value-prof.h"
53 #include "tree-pass.h"
55 #include "integrate.h"
57 /* I'm not real happy about this, but we need to handle gimple and
59 #include "tree-gimple.h"
61 /* Inlining, Cloning, Versioning, Parallelization
63 Inlining: a function body is duplicated, but the PARM_DECLs are
64 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
65 GIMPLE_MODIFY_STMTs that store to a dedicated returned-value variable.
66 The duplicated eh_region info of the copy will later be appended
67 to the info for the caller; the eh_region info in copied throwing
68 statements and RESX_EXPRs is adjusted accordingly.
70 Cloning: (only in C++) We have one body for a con/de/structor, and
71 multiple function decls, each with a unique parameter list.
72 Duplicate the body, using the given splay tree; some parameters
73 will become constants (like 0 or 1).
75 Versioning: a function body is duplicated and the result is a new
76 function rather than into blocks of an existing function as with
77 inlining. Some parameters will become constants.
79 Parallelization: a region of a function is duplicated resulting in
80 a new function. Variables may be replaced with complex expressions
81 to enable shared variable semantics.
83 All of these will simultaneously lookup any callgraph edges. If
84 we're going to inline the duplicated function body, and the given
85 function has some cloned callgraph nodes (one for each place this
86 function will be inlined) those callgraph edges will be duplicated.
87 If we're cloning the body, those callgraph edges will be
88 updated to point into the new body. (Note that the original
89 callgraph node and edge list will not be altered.)
91 See the CALL_EXPR handling case in copy_body_r (). */
93 /* 0 if we should not perform inlining.
94 1 if we should expand functions calls inline at the tree level.
95 2 if we should consider *all* functions to be inline
98 int flag_inline_trees = 0;
102 o In order to make inlining-on-trees work, we pessimized
103 function-local static constants. In particular, they are now
104 always output, even when not addressed. Fix this by treating
105 function-local static constants just like global static
106 constants; the back-end already knows not to output them if they
109 o Provide heuristics to clamp inlining of recursive template
113 /* Weights that estimate_num_insns uses for heuristics in inlining. */
115 eni_weights eni_inlining_weights;
117 /* Weights that estimate_num_insns uses to estimate the size of the
120 eni_weights eni_size_weights;
122 /* Weights that estimate_num_insns uses to estimate the time necessary
123 to execute the produced code. */
125 eni_weights eni_time_weights;
129 static tree declare_return_variable (copy_body_data *, tree, tree, tree *);
130 static tree copy_generic_body (copy_body_data *);
131 static bool inlinable_function_p (tree);
132 static void remap_block (tree *, copy_body_data *);
133 static tree remap_decls (tree, copy_body_data *);
134 static void copy_bind_expr (tree *, int *, copy_body_data *);
135 static tree mark_local_for_remap_r (tree *, int *, void *);
136 static void unsave_expr_1 (tree);
137 static tree unsave_r (tree *, int *, void *);
138 static void declare_inline_vars (tree, tree);
139 static void remap_save_expr (tree *, void *, int *);
140 static void add_lexical_block (tree current_block, tree new_block);
141 static tree copy_decl_to_var (tree, copy_body_data *);
142 static tree copy_result_decl_to_var (tree, copy_body_data *);
143 static tree copy_decl_no_change (tree, copy_body_data *);
144 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
146 /* Insert a tree->tree mapping for ID. Despite the name suggests
147 that the trees should be variables, it is used for more than that. */
150 insert_decl_map (copy_body_data *id, tree key, tree value)
152 *pointer_map_insert (id->decl_map, key) = value;
154 /* Always insert an identity map as well. If we see this same new
155 node again, we won't want to duplicate it a second time. */
157 *pointer_map_insert (id->decl_map, value) = value;
160 /* Construct new SSA name for old NAME. ID is the inline context. */
163 remap_ssa_name (tree name, copy_body_data *id)
168 gcc_assert (TREE_CODE (name) == SSA_NAME);
170 n = (tree *) pointer_map_contains (id->decl_map, name);
174 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
176 new = remap_decl (SSA_NAME_VAR (name), id);
177 /* We might've substituted constant or another SSA_NAME for
180 Replace the SSA name representing RESULT_DECL by variable during
181 inlining: this saves us from need to introduce PHI node in a case
182 return value is just partly initialized. */
183 if ((TREE_CODE (new) == VAR_DECL || TREE_CODE (new) == PARM_DECL)
184 && (TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
185 || !id->transform_return_to_modify))
187 new = make_ssa_name (new, NULL);
188 insert_decl_map (id, name, new);
189 if (IS_EMPTY_STMT (SSA_NAME_DEF_STMT (name)))
191 SSA_NAME_DEF_STMT (new) = build_empty_stmt ();
192 if (gimple_default_def (id->src_cfun, SSA_NAME_VAR (name)) == name)
193 set_default_def (SSA_NAME_VAR (new), new);
195 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new)
196 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
197 TREE_TYPE (new) = TREE_TYPE (SSA_NAME_VAR (new));
200 insert_decl_map (id, name, new);
204 /* Remap DECL during the copying of the BLOCK tree for the function. */
207 remap_decl (tree decl, copy_body_data *id)
212 /* We only remap local variables in the current function. */
215 /* See if we have remapped this declaration. */
217 n = (tree *) pointer_map_contains (id->decl_map, decl);
219 /* If we didn't already have an equivalent for this declaration,
223 /* Make a copy of the variable or label. */
224 tree t = id->copy_decl (decl, id);
226 /* Remember it, so that if we encounter this local entity again
227 we can reuse this copy. Do this early because remap_type may
228 need this decl for TYPE_STUB_DECL. */
229 insert_decl_map (id, decl, t);
234 /* Remap types, if necessary. */
235 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
236 if (TREE_CODE (t) == TYPE_DECL)
237 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
239 /* Remap sizes as necessary. */
240 walk_tree (&DECL_SIZE (t), copy_body_r, id, NULL);
241 walk_tree (&DECL_SIZE_UNIT (t), copy_body_r, id, NULL);
243 /* If fields, do likewise for offset and qualifier. */
244 if (TREE_CODE (t) == FIELD_DECL)
246 walk_tree (&DECL_FIELD_OFFSET (t), copy_body_r, id, NULL);
247 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
248 walk_tree (&DECL_QUALIFIER (t), copy_body_r, id, NULL);
251 if (cfun && gimple_in_ssa_p (cfun)
252 && (TREE_CODE (t) == VAR_DECL
253 || TREE_CODE (t) == RESULT_DECL || TREE_CODE (t) == PARM_DECL))
255 tree def = gimple_default_def (id->src_cfun, decl);
257 if (TREE_CODE (decl) != PARM_DECL && def)
259 tree map = remap_ssa_name (def, id);
260 /* Watch out RESULT_DECLs whose SSA names map directly
262 if (TREE_CODE (map) == SSA_NAME)
263 set_default_def (t, map);
265 add_referenced_var (t);
270 return unshare_expr (*n);
274 remap_type_1 (tree type, copy_body_data *id)
282 /* See if we have remapped this type. */
283 node = (tree *) pointer_map_contains (id->decl_map, type);
287 /* The type only needs remapping if it's variably modified. */
288 if (! variably_modified_type_p (type, id->src_fn))
290 insert_decl_map (id, type, type);
294 /* We do need a copy. build and register it now. If this is a pointer or
295 reference type, remap the designated type and make a new pointer or
297 if (TREE_CODE (type) == POINTER_TYPE)
299 new = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
301 TYPE_REF_CAN_ALIAS_ALL (type));
302 insert_decl_map (id, type, new);
305 else if (TREE_CODE (type) == REFERENCE_TYPE)
307 new = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
309 TYPE_REF_CAN_ALIAS_ALL (type));
310 insert_decl_map (id, type, new);
314 new = copy_node (type);
316 insert_decl_map (id, type, new);
318 /* This is a new type, not a copy of an old type. Need to reassociate
319 variants. We can handle everything except the main variant lazily. */
320 t = TYPE_MAIN_VARIANT (type);
323 t = remap_type (t, id);
324 TYPE_MAIN_VARIANT (new) = t;
325 TYPE_NEXT_VARIANT (new) = TYPE_NEXT_VARIANT (t);
326 TYPE_NEXT_VARIANT (t) = new;
330 TYPE_MAIN_VARIANT (new) = new;
331 TYPE_NEXT_VARIANT (new) = NULL;
334 if (TYPE_STUB_DECL (type))
335 TYPE_STUB_DECL (new) = remap_decl (TYPE_STUB_DECL (type), id);
337 /* Lazily create pointer and reference types. */
338 TYPE_POINTER_TO (new) = NULL;
339 TYPE_REFERENCE_TO (new) = NULL;
341 switch (TREE_CODE (new))
345 case FIXED_POINT_TYPE:
348 t = TYPE_MIN_VALUE (new);
349 if (t && TREE_CODE (t) != INTEGER_CST)
350 walk_tree (&TYPE_MIN_VALUE (new), copy_body_r, id, NULL);
352 t = TYPE_MAX_VALUE (new);
353 if (t && TREE_CODE (t) != INTEGER_CST)
354 walk_tree (&TYPE_MAX_VALUE (new), copy_body_r, id, NULL);
358 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
359 walk_tree (&TYPE_ARG_TYPES (new), copy_body_r, id, NULL);
363 TREE_TYPE (new) = remap_type (TREE_TYPE (new), id);
364 TYPE_DOMAIN (new) = remap_type (TYPE_DOMAIN (new), id);
369 case QUAL_UNION_TYPE:
373 for (f = TYPE_FIELDS (new); f ; f = TREE_CHAIN (f))
375 t = remap_decl (f, id);
376 DECL_CONTEXT (t) = new;
380 TYPE_FIELDS (new) = nreverse (nf);
386 /* Shouldn't have been thought variable sized. */
390 walk_tree (&TYPE_SIZE (new), copy_body_r, id, NULL);
391 walk_tree (&TYPE_SIZE_UNIT (new), copy_body_r, id, NULL);
397 remap_type (tree type, copy_body_data *id)
404 /* See if we have remapped this type. */
405 node = (tree *) pointer_map_contains (id->decl_map, type);
409 /* The type only needs remapping if it's variably modified. */
410 if (! variably_modified_type_p (type, id->src_fn))
412 insert_decl_map (id, type, type);
416 return remap_type_1 (type, id);
420 remap_decls (tree decls, copy_body_data *id)
423 tree new_decls = NULL_TREE;
425 /* Remap its variables. */
426 for (old_var = decls; old_var; old_var = TREE_CHAIN (old_var))
430 /* We can not chain the local static declarations into the unexpanded_var_list
431 as we can't duplicate them or break one decl rule. Go ahead and link
432 them into unexpanded_var_list. */
433 if (!auto_var_in_fn_p (old_var, id->src_fn)
434 && !DECL_EXTERNAL (old_var))
436 cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
437 cfun->unexpanded_var_list);
441 /* Remap the variable. */
442 new_var = remap_decl (old_var, id);
444 /* If we didn't remap this variable, so we can't mess with its
445 TREE_CHAIN. If we remapped this variable to the return slot, it's
446 already declared somewhere else, so don't declare it here. */
447 if (!new_var || new_var == id->retvar)
451 gcc_assert (DECL_P (new_var));
452 TREE_CHAIN (new_var) = new_decls;
457 return nreverse (new_decls);
460 /* Copy the BLOCK to contain remapped versions of the variables
461 therein. And hook the new block into the block-tree. */
464 remap_block (tree *block, copy_body_data *id)
470 /* Make the new block. */
472 new_block = make_node (BLOCK);
473 TREE_USED (new_block) = TREE_USED (old_block);
474 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
475 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
478 /* Remap its variables. */
479 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block), id);
483 if (id->transform_lang_insert_block)
484 lang_hooks.decls.insert_block (new_block);
486 /* Remember the remapped block. */
487 insert_decl_map (id, old_block, new_block);
490 /* Copy the whole block tree and root it in id->block. */
492 remap_blocks (tree block, copy_body_data *id)
500 remap_block (&new, id);
501 gcc_assert (new != block);
502 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
503 add_lexical_block (new, remap_blocks (t, id));
508 copy_statement_list (tree *tp)
510 tree_stmt_iterator oi, ni;
513 new = alloc_stmt_list ();
514 ni = tsi_start (new);
515 oi = tsi_start (*tp);
518 for (; !tsi_end_p (oi); tsi_next (&oi))
519 tsi_link_after (&ni, tsi_stmt (oi), TSI_NEW_STMT);
523 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
525 tree block = BIND_EXPR_BLOCK (*tp);
526 /* Copy (and replace) the statement. */
527 copy_tree_r (tp, walk_subtrees, NULL);
530 remap_block (&block, id);
531 BIND_EXPR_BLOCK (*tp) = block;
534 if (BIND_EXPR_VARS (*tp))
535 /* This will remap a lot of the same decls again, but this should be
537 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), id);
540 /* Called from copy_body_id via walk_tree. DATA is really an
541 `copy_body_data *'. */
544 copy_body_r (tree *tp, int *walk_subtrees, void *data)
546 copy_body_data *id = (copy_body_data *) data;
547 tree fn = id->src_fn;
550 /* Begin by recognizing trees that we'll completely rewrite for the
551 inlining context. Our output for these trees is completely
552 different from out input (e.g. RETURN_EXPR is deleted, and morphs
553 into an edge). Further down, we'll handle trees that get
554 duplicated and/or tweaked. */
556 /* When requested, RETURN_EXPRs should be transformed to just the
557 contained GIMPLE_MODIFY_STMT. The branch semantics of the return will
558 be handled elsewhere by manipulating the CFG rather than a statement. */
559 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
561 tree assignment = TREE_OPERAND (*tp, 0);
563 /* If we're returning something, just turn that into an
564 assignment into the equivalent of the original RESULT_DECL.
565 If the "assignment" is just the result decl, the result
566 decl has already been set (e.g. a recent "foo (&result_decl,
567 ...)"); just toss the entire RETURN_EXPR. */
568 if (assignment && TREE_CODE (assignment) == GIMPLE_MODIFY_STMT)
570 /* Replace the RETURN_EXPR with (a copy of) the
571 GIMPLE_MODIFY_STMT hanging underneath. */
572 *tp = copy_node (assignment);
574 else /* Else the RETURN_EXPR returns no value. */
577 return (tree) (void *)1;
580 else if (TREE_CODE (*tp) == SSA_NAME)
582 *tp = remap_ssa_name (*tp, id);
587 /* Local variables and labels need to be replaced by equivalent
588 variables. We don't want to copy static variables; there's only
589 one of those, no matter how many times we inline the containing
590 function. Similarly for globals from an outer function. */
591 else if (auto_var_in_fn_p (*tp, fn))
595 /* Remap the declaration. */
596 new_decl = remap_decl (*tp, id);
597 gcc_assert (new_decl);
598 /* Replace this variable with the copy. */
599 STRIP_TYPE_NOPS (new_decl);
603 else if (TREE_CODE (*tp) == STATEMENT_LIST)
604 copy_statement_list (tp);
605 else if (TREE_CODE (*tp) == SAVE_EXPR)
606 remap_save_expr (tp, id->decl_map, walk_subtrees);
607 else if (TREE_CODE (*tp) == LABEL_DECL
608 && (! DECL_CONTEXT (*tp)
609 || decl_function_context (*tp) == id->src_fn))
610 /* These may need to be remapped for EH handling. */
611 *tp = remap_decl (*tp, id);
612 else if (TREE_CODE (*tp) == BIND_EXPR)
613 copy_bind_expr (tp, walk_subtrees, id);
614 /* Types may need remapping as well. */
615 else if (TYPE_P (*tp))
616 *tp = remap_type (*tp, id);
618 /* If this is a constant, we have to copy the node iff the type will be
619 remapped. copy_tree_r will not copy a constant. */
620 else if (CONSTANT_CLASS_P (*tp))
622 tree new_type = remap_type (TREE_TYPE (*tp), id);
624 if (new_type == TREE_TYPE (*tp))
627 else if (TREE_CODE (*tp) == INTEGER_CST)
628 *tp = build_int_cst_wide (new_type, TREE_INT_CST_LOW (*tp),
629 TREE_INT_CST_HIGH (*tp));
632 *tp = copy_node (*tp);
633 TREE_TYPE (*tp) = new_type;
637 /* Otherwise, just copy the node. Note that copy_tree_r already
638 knows not to copy VAR_DECLs, etc., so this is safe. */
641 /* Here we handle trees that are not completely rewritten.
642 First we detect some inlining-induced bogosities for
644 if (TREE_CODE (*tp) == GIMPLE_MODIFY_STMT
645 && GIMPLE_STMT_OPERAND (*tp, 0) == GIMPLE_STMT_OPERAND (*tp, 1)
646 && (auto_var_in_fn_p (GIMPLE_STMT_OPERAND (*tp, 0), fn)))
648 /* Some assignments VAR = VAR; don't generate any rtl code
649 and thus don't count as variable modification. Avoid
650 keeping bogosities like 0 = 0. */
651 tree decl = GIMPLE_STMT_OPERAND (*tp, 0), value;
654 n = (tree *) pointer_map_contains (id->decl_map, decl);
658 STRIP_TYPE_NOPS (value);
659 if (TREE_CONSTANT (value) || TREE_READONLY_DECL_P (value))
661 *tp = build_empty_stmt ();
662 return copy_body_r (tp, walk_subtrees, data);
666 else if (TREE_CODE (*tp) == INDIRECT_REF)
668 /* Get rid of *& from inline substitutions that can happen when a
669 pointer argument is an ADDR_EXPR. */
670 tree decl = TREE_OPERAND (*tp, 0);
673 n = (tree *) pointer_map_contains (id->decl_map, decl);
678 /* If we happen to get an ADDR_EXPR in n->value, strip
679 it manually here as we'll eventually get ADDR_EXPRs
680 which lie about their types pointed to. In this case
681 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
682 but we absolutely rely on that. As fold_indirect_ref
683 does other useful transformations, try that first, though. */
684 tree type = TREE_TYPE (TREE_TYPE (*n));
685 new = unshare_expr (*n);
687 *tp = fold_indirect_ref_1 (type, new);
690 if (TREE_CODE (new) == ADDR_EXPR)
691 *tp = TREE_OPERAND (new, 0);
694 *tp = build1 (INDIRECT_REF, type, new);
695 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
703 /* Here is the "usual case". Copy this tree node, and then
704 tweak some special cases. */
705 copy_tree_r (tp, walk_subtrees, NULL);
707 /* Global variables we didn't seen yet needs to go into referenced
709 if (gimple_in_ssa_p (cfun) && TREE_CODE (*tp) == VAR_DECL)
710 add_referenced_var (*tp);
712 /* If EXPR has block defined, map it to newly constructed block.
713 When inlining we want EXPRs without block appear in the block
715 if (EXPR_P (*tp) || GIMPLE_STMT_P (*tp))
717 new_block = id->block;
718 if (TREE_BLOCK (*tp))
721 n = (tree *) pointer_map_contains (id->decl_map,
726 TREE_BLOCK (*tp) = new_block;
729 if (TREE_CODE (*tp) == RESX_EXPR && id->eh_region_offset)
730 TREE_OPERAND (*tp, 0) =
733 id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
735 if (!GIMPLE_TUPLE_P (*tp) && TREE_CODE (*tp) != OMP_CLAUSE)
736 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
738 /* The copied TARGET_EXPR has never been expanded, even if the
739 original node was expanded already. */
740 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
742 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
743 TREE_OPERAND (*tp, 3) = NULL_TREE;
746 /* Variable substitution need not be simple. In particular, the
747 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
748 and friends are up-to-date. */
749 else if (TREE_CODE (*tp) == ADDR_EXPR)
751 walk_tree (&TREE_OPERAND (*tp, 0), copy_body_r, id, NULL);
752 /* Handle the case where we substituted an INDIRECT_REF
753 into the operand of the ADDR_EXPR. */
754 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
755 *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
757 recompute_tree_invariant_for_addr_expr (*tp);
762 /* Keep iterating. */
766 /* Copy basic block, scale profile accordingly. Edges will be taken care of
770 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale, int count_scale)
772 block_stmt_iterator bsi, copy_bsi;
773 basic_block copy_basic_block;
775 /* create_basic_block() will append every new block to
776 basic_block_info automatically. */
777 copy_basic_block = create_basic_block (NULL, (void *) 0,
778 (basic_block) bb->prev_bb->aux);
779 copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
781 /* We are going to rebuild frequencies from scratch. These values have just
782 small importance to drive canonicalize_loop_headers. */
783 copy_basic_block->frequency = ((gcov_type)bb->frequency
784 * frequency_scale / REG_BR_PROB_BASE);
785 if (copy_basic_block->frequency > BB_FREQ_MAX)
786 copy_basic_block->frequency = BB_FREQ_MAX;
787 copy_bsi = bsi_start (copy_basic_block);
789 for (bsi = bsi_start (bb);
790 !bsi_end_p (bsi); bsi_next (&bsi))
792 tree stmt = bsi_stmt (bsi);
793 tree orig_stmt = stmt;
795 walk_tree (&stmt, copy_body_r, id, NULL);
797 /* RETURN_EXPR might be removed,
798 this is signalled by making stmt pointer NULL. */
803 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun, orig_stmt);
805 /* With return slot optimization we can end up with
806 non-gimple (foo *)&this->m, fix that here. */
807 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
808 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == NOP_EXPR
809 && !is_gimple_val (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0)))
810 gimplify_stmt (&stmt);
812 bsi_insert_after (©_bsi, stmt, BSI_NEW_STMT);
814 /* Process new statement. gimplify_stmt possibly turned statement
815 into multiple statements, we need to process all of them. */
816 while (!bsi_end_p (copy_bsi))
818 tree *stmtp = bsi_stmt_ptr (copy_bsi);
820 call = get_call_expr_in (stmt);
822 if (call && CALL_EXPR_VA_ARG_PACK (call) && id->call_expr)
824 /* __builtin_va_arg_pack () should be replaced by
825 all arguments corresponding to ... in the caller. */
826 tree p, *argarray, new_call, *call_ptr;
827 int nargs = call_expr_nargs (id->call_expr);
829 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
832 argarray = (tree *) alloca ((nargs + call_expr_nargs (call))
835 memcpy (argarray, CALL_EXPR_ARGP (call),
836 call_expr_nargs (call) * sizeof (*argarray));
837 memcpy (argarray + call_expr_nargs (call),
838 CALL_EXPR_ARGP (id->call_expr)
839 + (call_expr_nargs (id->call_expr) - nargs),
840 nargs * sizeof (*argarray));
842 new_call = build_call_array (TREE_TYPE (call),
844 nargs + call_expr_nargs (call),
846 /* Copy all CALL_EXPR flags, locus and block, except
847 CALL_EXPR_VA_ARG_PACK flag. */
848 CALL_EXPR_STATIC_CHAIN (new_call)
849 = CALL_EXPR_STATIC_CHAIN (call);
850 CALL_EXPR_TAILCALL (new_call) = CALL_EXPR_TAILCALL (call);
851 CALL_EXPR_RETURN_SLOT_OPT (new_call)
852 = CALL_EXPR_RETURN_SLOT_OPT (call);
853 CALL_FROM_THUNK_P (new_call) = CALL_FROM_THUNK_P (call);
854 CALL_CANNOT_INLINE_P (new_call)
855 = CALL_CANNOT_INLINE_P (call);
856 TREE_NOTHROW (new_call) = TREE_NOTHROW (call);
857 SET_EXPR_LOCUS (new_call, EXPR_LOCUS (call));
858 TREE_BLOCK (new_call) = TREE_BLOCK (call);
861 if (TREE_CODE (*call_ptr) == GIMPLE_MODIFY_STMT)
862 call_ptr = &GIMPLE_STMT_OPERAND (*call_ptr, 1);
863 if (TREE_CODE (*call_ptr) == WITH_SIZE_EXPR)
864 call_ptr = &TREE_OPERAND (*call_ptr, 0);
865 gcc_assert (*call_ptr == call);
866 if (call_ptr == stmtp)
868 bsi_replace (©_bsi, new_call, true);
869 stmtp = bsi_stmt_ptr (copy_bsi);
874 *call_ptr = new_call;
881 && (decl = get_callee_fndecl (call))
882 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
883 && DECL_FUNCTION_CODE (decl)
884 == BUILT_IN_VA_ARG_PACK_LEN)
886 /* __builtin_va_arg_pack_len () should be replaced by
887 the number of anonymous arguments. */
888 int nargs = call_expr_nargs (id->call_expr);
889 tree count, *call_ptr, p;
891 for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
894 count = build_int_cst (integer_type_node, nargs);
896 if (TREE_CODE (*call_ptr) == GIMPLE_MODIFY_STMT)
897 call_ptr = &GIMPLE_STMT_OPERAND (*call_ptr, 1);
898 if (TREE_CODE (*call_ptr) == WITH_SIZE_EXPR)
899 call_ptr = &TREE_OPERAND (*call_ptr, 0);
900 gcc_assert (*call_ptr == call && call_ptr != stmtp);
907 /* Statements produced by inlining can be unfolded, especially
908 when we constant propagated some operands. We can't fold
909 them right now for two reasons:
910 1) folding require SSA_NAME_DEF_STMTs to be correct
911 2) we can't change function calls to builtins.
912 So we just mark statement for later folding. We mark
913 all new statements, instead just statements that has changed
914 by some nontrivial substitution so even statements made
915 foldable indirectly are updated. If this turns out to be
916 expensive, copy_body can be told to watch for nontrivial
918 if (id->statements_to_fold)
919 pointer_set_insert (id->statements_to_fold, stmt);
920 /* We're duplicating a CALL_EXPR. Find any corresponding
921 callgraph edges and update or duplicate them. */
922 if (call && (decl = get_callee_fndecl (call)))
924 struct cgraph_node *node;
925 struct cgraph_edge *edge;
927 switch (id->transform_call_graph_edges)
929 case CB_CGE_DUPLICATE:
930 edge = cgraph_edge (id->src_node, orig_stmt);
932 cgraph_clone_edge (edge, id->dst_node, stmt,
933 REG_BR_PROB_BASE, 1, edge->frequency, true);
936 case CB_CGE_MOVE_CLONES:
937 for (node = id->dst_node->next_clone;
939 node = node->next_clone)
941 edge = cgraph_edge (node, orig_stmt);
943 cgraph_set_call_stmt (edge, stmt);
948 edge = cgraph_edge (id->dst_node, orig_stmt);
950 cgraph_set_call_stmt (edge, stmt);
957 /* If you think we can abort here, you are wrong.
958 There is no region 0 in tree land. */
959 gcc_assert (lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt)
962 if (tree_could_throw_p (stmt)
963 /* When we are cloning for inlining, we are supposed to
964 construct a clone that calls precisely the same functions
965 as original. However IPA optimizers might've proved
966 earlier some function calls as non-trapping that might
967 render some basic blocks dead that might become
970 We can't update SSA with unreachable blocks in CFG and thus
971 we prevent the scenario by preserving even the "dead" eh
972 edges until the point they are later removed by
974 || (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
975 && lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt) > 0))
977 int region = lookup_stmt_eh_region_fn (id->src_cfun, orig_stmt);
978 /* Add an entry for the copied tree in the EH hashtable.
979 When cloning or versioning, use the hashtable in
980 cfun, and just copy the EH number. When inlining, use the
981 hashtable in the caller, and adjust the region number. */
983 add_stmt_to_eh_region (stmt, region + id->eh_region_offset);
985 /* If this tree doesn't have a region associated with it,
986 and there is a "current region,"
987 then associate this tree with the current region
988 and add edges associated with this region. */
989 if ((lookup_stmt_eh_region_fn (id->src_cfun,
991 && id->eh_region > 0)
992 && tree_could_throw_p (stmt))
993 add_stmt_to_eh_region (stmt, id->eh_region);
995 if (gimple_in_ssa_p (cfun))
1000 find_new_referenced_vars (bsi_stmt_ptr (copy_bsi));
1001 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
1002 if (TREE_CODE (def) == SSA_NAME)
1003 SSA_NAME_DEF_STMT (def) = stmt;
1005 bsi_next (©_bsi);
1007 copy_bsi = bsi_last (copy_basic_block);
1010 return copy_basic_block;
1013 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
1014 form is quite easy, since dominator relationship for old basic blocks does
1017 There is however exception where inlining might change dominator relation
1018 across EH edges from basic block within inlined functions destinating
1019 to landing pads in function we inline into.
1021 The function fills in PHI_RESULTs of such PHI nodes if they refer
1022 to gimple regs. Otherwise, the function mark PHI_RESULT of such
1023 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
1024 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
1025 set, and this means that there will be no overlapping live ranges
1026 for the underlying symbol.
1028 This might change in future if we allow redirecting of EH edges and
1029 we might want to change way build CFG pre-inlining to include
1030 all the possible edges then. */
1032 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
1033 bool can_throw, bool nonlocal_goto)
1038 FOR_EACH_EDGE (e, ei, bb->succs)
1040 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
1044 gcc_assert (e->flags & EDGE_ABNORMAL);
1046 gcc_assert (e->flags & EDGE_EH);
1048 gcc_assert (!(e->flags & EDGE_EH));
1049 for (phi = phi_nodes (e->dest); phi; phi = PHI_CHAIN (phi))
1053 /* There shouldn't be any PHI nodes in the ENTRY_BLOCK. */
1054 gcc_assert (!e->dest->aux);
1056 gcc_assert (SSA_NAME_OCCURS_IN_ABNORMAL_PHI
1057 (PHI_RESULT (phi)));
1059 if (!is_gimple_reg (PHI_RESULT (phi)))
1061 mark_sym_for_renaming
1062 (SSA_NAME_VAR (PHI_RESULT (phi)));
1066 re = find_edge (ret_bb, e->dest);
1069 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
1070 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
1072 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1073 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
1078 /* Copy edges from BB into its copy constructed earlier, scale profile
1079 accordingly. Edges will be taken care of later. Assume aux
1080 pointers to point to the copies of each BB. */
1082 copy_edges_for_bb (basic_block bb, int count_scale, basic_block ret_bb)
1084 basic_block new_bb = (basic_block) bb->aux;
1087 block_stmt_iterator bsi;
1090 /* Use the indices from the original blocks to create edges for the
1092 FOR_EACH_EDGE (old_edge, ei, bb->succs)
1093 if (!(old_edge->flags & EDGE_EH))
1097 flags = old_edge->flags;
1099 /* Return edges do get a FALLTHRU flag when the get inlined. */
1100 if (old_edge->dest->index == EXIT_BLOCK && !old_edge->flags
1101 && old_edge->dest->aux != EXIT_BLOCK_PTR)
1102 flags |= EDGE_FALLTHRU;
1103 new = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
1104 new->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
1105 new->probability = old_edge->probability;
1108 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
1111 for (bsi = bsi_start (new_bb); !bsi_end_p (bsi);)
1114 bool can_throw, nonlocal_goto;
1116 copy_stmt = bsi_stmt (bsi);
1117 update_stmt (copy_stmt);
1118 if (gimple_in_ssa_p (cfun))
1119 mark_symbols_for_renaming (copy_stmt);
1120 /* Do this before the possible split_block. */
1123 /* If this tree could throw an exception, there are two
1124 cases where we need to add abnormal edge(s): the
1125 tree wasn't in a region and there is a "current
1126 region" in the caller; or the original tree had
1127 EH edges. In both cases split the block after the tree,
1128 and add abnormal edge(s) as needed; we need both
1129 those from the callee and the caller.
1130 We check whether the copy can throw, because the const
1131 propagation can change an INDIRECT_REF which throws
1132 into a COMPONENT_REF which doesn't. If the copy
1133 can throw, the original could also throw. */
1135 can_throw = tree_can_throw_internal (copy_stmt);
1136 nonlocal_goto = tree_can_make_abnormal_goto (copy_stmt);
1138 if (can_throw || nonlocal_goto)
1140 if (!bsi_end_p (bsi))
1141 /* Note that bb's predecessor edges aren't necessarily
1142 right at this point; split_block doesn't care. */
1144 edge e = split_block (new_bb, copy_stmt);
1147 new_bb->aux = e->src->aux;
1148 bsi = bsi_start (new_bb);
1153 make_eh_edges (copy_stmt);
1156 make_abnormal_goto_edges (bb_for_stmt (copy_stmt), true);
1158 if ((can_throw || nonlocal_goto)
1159 && gimple_in_ssa_p (cfun))
1160 update_ssa_across_abnormal_edges (bb_for_stmt (copy_stmt), ret_bb,
1161 can_throw, nonlocal_goto);
1165 /* Copy the PHIs. All blocks and edges are copied, some blocks
1166 was possibly split and new outgoing EH edges inserted.
1167 BB points to the block of original function and AUX pointers links
1168 the original and newly copied blocks. */
1171 copy_phis_for_bb (basic_block bb, copy_body_data *id)
1173 basic_block new_bb = bb->aux;
1177 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
1179 tree res = PHI_RESULT (phi);
1184 if (is_gimple_reg (res))
1186 walk_tree (&new_res, copy_body_r, id, NULL);
1187 SSA_NAME_DEF_STMT (new_res)
1188 = new_phi = create_phi_node (new_res, new_bb);
1189 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
1191 edge old_edge = find_edge (new_edge->src->aux, bb);
1192 tree arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
1195 walk_tree (&new_arg, copy_body_r, id, NULL);
1196 gcc_assert (new_arg);
1197 add_phi_arg (new_phi, new_arg, new_edge);
1203 /* Wrapper for remap_decl so it can be used as a callback. */
1205 remap_decl_1 (tree decl, void *data)
1207 return remap_decl (decl, (copy_body_data *) data);
1210 /* Build struct function and associated datastructures for the new clone
1211 NEW_FNDECL to be build. CALLEE_FNDECL is the original */
1214 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count,
1217 struct function *new_cfun
1218 = (struct function *) ggc_alloc_cleared (sizeof (struct function));
1219 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1220 int count_scale, frequency_scale;
1222 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1223 count_scale = (REG_BR_PROB_BASE * count
1224 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1228 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1229 frequency_scale = (REG_BR_PROB_BASE * frequency
1231 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1233 frequency_scale = count_scale;
1235 /* Register specific tree functions. */
1236 tree_register_cfg_hooks ();
1237 *new_cfun = *DECL_STRUCT_FUNCTION (callee_fndecl);
1238 new_cfun->funcdef_no = get_next_funcdef_no ();
1239 VALUE_HISTOGRAMS (new_cfun) = NULL;
1240 new_cfun->unexpanded_var_list = NULL;
1241 new_cfun->cfg = NULL;
1242 new_cfun->decl = new_fndecl /*= copy_node (callee_fndecl)*/;
1243 DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
1244 push_cfun (new_cfun);
1245 init_empty_tree_cfg ();
1247 ENTRY_BLOCK_PTR->count =
1248 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1250 ENTRY_BLOCK_PTR->frequency =
1251 (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1252 frequency_scale / REG_BR_PROB_BASE);
1253 EXIT_BLOCK_PTR->count =
1254 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count * count_scale /
1256 EXIT_BLOCK_PTR->frequency =
1257 (EXIT_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency *
1258 frequency_scale / REG_BR_PROB_BASE);
1260 init_eh_for_function ();
1262 if (src_cfun->gimple_df)
1265 cfun->gimple_df->in_ssa_p = true;
1266 init_ssa_operands ();
1271 /* Make a copy of the body of FN so that it can be inserted inline in
1272 another function. Walks FN via CFG, returns new fndecl. */
1275 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency,
1276 basic_block entry_block_map, basic_block exit_block_map)
1278 tree callee_fndecl = id->src_fn;
1279 /* Original cfun for the callee, doesn't change. */
1280 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1281 struct function *cfun_to_copy;
1283 tree new_fndecl = NULL;
1284 int count_scale, frequency_scale;
1287 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count)
1288 count_scale = (REG_BR_PROB_BASE * count
1289 / ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->count);
1293 if (ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency)
1294 frequency_scale = (REG_BR_PROB_BASE * frequency
1296 ENTRY_BLOCK_PTR_FOR_FUNCTION (src_cfun)->frequency);
1298 frequency_scale = count_scale;
1300 /* Register specific tree functions. */
1301 tree_register_cfg_hooks ();
1303 /* Must have a CFG here at this point. */
1304 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION
1305 (DECL_STRUCT_FUNCTION (callee_fndecl)));
1307 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
1310 ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = entry_block_map;
1311 EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy)->aux = exit_block_map;
1312 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1313 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FUNCTION (cfun_to_copy);
1315 /* Duplicate any exception-handling regions. */
1318 id->eh_region_offset
1319 = duplicate_eh_regions (cfun_to_copy, remap_decl_1, id,
1322 /* Use aux pointers to map the original blocks to copy. */
1323 FOR_EACH_BB_FN (bb, cfun_to_copy)
1325 basic_block new = copy_bb (id, bb, frequency_scale, count_scale);
1330 last = last_basic_block;
1331 /* Now that we've duplicated the blocks, duplicate their edges. */
1332 FOR_ALL_BB_FN (bb, cfun_to_copy)
1333 copy_edges_for_bb (bb, count_scale, exit_block_map);
1334 if (gimple_in_ssa_p (cfun))
1335 FOR_ALL_BB_FN (bb, cfun_to_copy)
1336 copy_phis_for_bb (bb, id);
1337 FOR_ALL_BB_FN (bb, cfun_to_copy)
1339 ((basic_block)bb->aux)->aux = NULL;
1342 /* Zero out AUX fields of newly created block during EH edge
1344 for (; last < last_basic_block; last++)
1345 BASIC_BLOCK (last)->aux = NULL;
1346 entry_block_map->aux = NULL;
1347 exit_block_map->aux = NULL;
1352 /* Make a copy of the body of FN so that it can be inserted inline in
1353 another function. */
1356 copy_generic_body (copy_body_data *id)
1359 tree fndecl = id->src_fn;
1361 body = DECL_SAVED_TREE (fndecl);
1362 walk_tree (&body, copy_body_r, id, NULL);
1368 copy_body (copy_body_data *id, gcov_type count, int frequency,
1369 basic_block entry_block_map, basic_block exit_block_map)
1371 tree fndecl = id->src_fn;
1374 /* If this body has a CFG, walk CFG and copy. */
1375 gcc_assert (ENTRY_BLOCK_PTR_FOR_FUNCTION (DECL_STRUCT_FUNCTION (fndecl)));
1376 body = copy_cfg_body (id, count, frequency, entry_block_map, exit_block_map);
1381 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
1382 defined in function FN, or of a data member thereof. */
1385 self_inlining_addr_expr (tree value, tree fn)
1389 if (TREE_CODE (value) != ADDR_EXPR)
1392 var = get_base_address (TREE_OPERAND (value, 0));
1394 return var && auto_var_in_fn_p (var, fn);
1398 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
1399 basic_block bb, tree *vars)
1405 tree def = (gimple_in_ssa_p (cfun)
1406 ? gimple_default_def (id->src_cfun, p) : NULL);
1409 && value != error_mark_node
1410 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
1411 rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
1413 /* If the parameter is never assigned to, has no SSA_NAMEs created,
1414 we may not need to create a new variable here at all. Instead, we may
1415 be able to just use the argument value. */
1416 if (TREE_READONLY (p)
1417 && !TREE_ADDRESSABLE (p)
1418 && value && !TREE_SIDE_EFFECTS (value)
1421 /* We may produce non-gimple trees by adding NOPs or introduce
1422 invalid sharing when operand is not really constant.
1423 It is not big deal to prohibit constant propagation here as
1424 we will constant propagate in DOM1 pass anyway. */
1425 if (is_gimple_min_invariant (value)
1426 && useless_type_conversion_p (TREE_TYPE (p),
1428 /* We have to be very careful about ADDR_EXPR. Make sure
1429 the base variable isn't a local variable of the inlined
1430 function, e.g., when doing recursive inlining, direct or
1431 mutually-recursive or whatever, which is why we don't
1432 just test whether fn == current_function_decl. */
1433 && ! self_inlining_addr_expr (value, fn))
1435 insert_decl_map (id, p, value);
1440 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
1441 here since the type of this decl must be visible to the calling
1443 var = copy_decl_to_var (p, id);
1444 if (gimple_in_ssa_p (cfun) && TREE_CODE (var) == VAR_DECL)
1447 add_referenced_var (var);
1450 /* See if the frontend wants to pass this by invisible reference. If
1451 so, our new VAR_DECL will have REFERENCE_TYPE, and we need to
1452 replace uses of the PARM_DECL with dereferences. */
1453 if (TREE_TYPE (var) != TREE_TYPE (p)
1454 && POINTER_TYPE_P (TREE_TYPE (var))
1455 && TREE_TYPE (TREE_TYPE (var)) == TREE_TYPE (p))
1457 insert_decl_map (id, var, var);
1458 var_sub = build_fold_indirect_ref (var);
1463 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
1464 that way, when the PARM_DECL is encountered, it will be
1465 automatically replaced by the VAR_DECL. */
1466 insert_decl_map (id, p, var_sub);
1468 /* Declare this new variable. */
1469 TREE_CHAIN (var) = *vars;
1472 /* Make gimplifier happy about this variable. */
1473 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1475 /* Even if P was TREE_READONLY, the new VAR should not be.
1476 In the original code, we would have constructed a
1477 temporary, and then the function body would have never
1478 changed the value of P. However, now, we will be
1479 constructing VAR directly. The constructor body may
1480 change its value multiple times as it is being
1481 constructed. Therefore, it must not be TREE_READONLY;
1482 the back-end assumes that TREE_READONLY variable is
1483 assigned to only once. */
1484 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
1485 TREE_READONLY (var) = 0;
1487 /* If there is no setup required and we are in SSA, take the easy route
1488 replacing all SSA names representing the function parameter by the
1489 SSA name passed to function.
1491 We need to construct map for the variable anyway as it might be used
1492 in different SSA names when parameter is set in function.
1494 FIXME: This usually kills the last connection in between inlined
1495 function parameter and the actual value in debug info. Can we do
1496 better here? If we just inserted the statement, copy propagation
1497 would kill it anyway as it always did in older versions of GCC.
1499 We might want to introduce a notion that single SSA_NAME might
1500 represent multiple variables for purposes of debugging. */
1501 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
1502 && (TREE_CODE (rhs) == SSA_NAME
1503 || is_gimple_min_invariant (rhs))
1504 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1506 insert_decl_map (id, def, rhs);
1510 /* Initialize this VAR_DECL from the equivalent argument. Convert
1511 the argument to the proper type in case it was promoted. */
1514 block_stmt_iterator bsi = bsi_last (bb);
1516 if (rhs == error_mark_node)
1518 insert_decl_map (id, p, var_sub);
1522 STRIP_USELESS_TYPE_CONVERSION (rhs);
1524 /* We want to use GIMPLE_MODIFY_STMT, not INIT_EXPR here so that we
1525 keep our trees in gimple form. */
1526 if (def && gimple_in_ssa_p (cfun) && is_gimple_reg (p))
1528 def = remap_ssa_name (def, id);
1529 init_stmt = build_gimple_modify_stmt (def, rhs);
1530 SSA_NAME_DEF_STMT (def) = init_stmt;
1531 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
1532 set_default_def (var, NULL);
1535 init_stmt = build_gimple_modify_stmt (var, rhs);
1537 /* If we did not create a gimple value and we did not create a gimple
1538 cast of a gimple value, then we will need to gimplify INIT_STMTS
1539 at the end. Note that is_gimple_cast only checks the outer
1540 tree code, not its operand. Thus the explicit check that its
1541 operand is a gimple value. */
1542 if ((!is_gimple_val (rhs)
1543 && (!is_gimple_cast (rhs)
1544 || !is_gimple_val (TREE_OPERAND (rhs, 0))))
1545 || !is_gimple_reg (var))
1547 tree_stmt_iterator i;
1549 push_gimplify_context ();
1550 gimplify_stmt (&init_stmt);
1551 if (gimple_in_ssa_p (cfun)
1552 && init_stmt && TREE_CODE (init_stmt) == STATEMENT_LIST)
1554 /* The replacement can expose previously unreferenced
1556 for (i = tsi_start (init_stmt); !tsi_end_p (i); tsi_next (&i))
1557 find_new_referenced_vars (tsi_stmt_ptr (i));
1559 pop_gimplify_context (NULL);
1562 /* If VAR represents a zero-sized variable, it's possible that the
1563 assignment statment may result in no gimple statements. */
1565 bsi_insert_after (&bsi, init_stmt, BSI_NEW_STMT);
1566 if (gimple_in_ssa_p (cfun))
1567 for (;!bsi_end_p (bsi); bsi_next (&bsi))
1568 mark_symbols_for_renaming (bsi_stmt (bsi));
1572 /* Generate code to initialize the parameters of the function at the
1573 top of the stack in ID from the CALL_EXPR EXP. */
1576 initialize_inlined_parameters (copy_body_data *id, tree exp,
1577 tree fn, basic_block bb)
1582 tree vars = NULL_TREE;
1583 call_expr_arg_iterator iter;
1584 tree static_chain = CALL_EXPR_STATIC_CHAIN (exp);
1586 /* Figure out what the parameters are. */
1587 parms = DECL_ARGUMENTS (fn);
1589 /* Loop through the parameter declarations, replacing each with an
1590 equivalent VAR_DECL, appropriately initialized. */
1591 for (p = parms, a = first_call_expr_arg (exp, &iter); p;
1592 a = next_call_expr_arg (&iter), p = TREE_CHAIN (p))
1593 setup_one_parameter (id, p, a, fn, bb, &vars);
1595 /* Initialize the static chain. */
1596 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
1597 gcc_assert (fn != current_function_decl);
1600 /* No static chain? Seems like a bug in tree-nested.c. */
1601 gcc_assert (static_chain);
1603 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
1606 declare_inline_vars (id->block, vars);
1609 /* Declare a return variable to replace the RESULT_DECL for the
1610 function we are calling. An appropriate DECL_STMT is returned.
1611 The USE_STMT is filled to contain a use of the declaration to
1612 indicate the return value of the function.
1614 RETURN_SLOT, if non-null is place where to store the result. It
1615 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
1616 was the LHS of the GIMPLE_MODIFY_STMT to which this call is the RHS.
1618 The return value is a (possibly null) value that is the result of the
1619 function as seen by the callee. *USE_P is a (possibly null) value that
1620 holds the result as seen by the caller. */
1623 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
1626 tree callee = id->src_fn;
1627 tree caller = id->dst_fn;
1628 tree result = DECL_RESULT (callee);
1629 tree callee_type = TREE_TYPE (result);
1630 tree caller_type = TREE_TYPE (TREE_TYPE (callee));
1633 /* We don't need to do anything for functions that don't return
1635 if (!result || VOID_TYPE_P (callee_type))
1641 /* If there was a return slot, then the return value is the
1642 dereferenced address of that object. */
1645 /* The front end shouldn't have used both return_slot and
1646 a modify expression. */
1647 gcc_assert (!modify_dest);
1648 if (DECL_BY_REFERENCE (result))
1650 tree return_slot_addr = build_fold_addr_expr (return_slot);
1651 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
1653 /* We are going to construct *&return_slot and we can't do that
1654 for variables believed to be not addressable.
1656 FIXME: This check possibly can match, because values returned
1657 via return slot optimization are not believed to have address
1658 taken by alias analysis. */
1659 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
1660 if (gimple_in_ssa_p (cfun))
1662 HOST_WIDE_INT bitsize;
1663 HOST_WIDE_INT bitpos;
1665 enum machine_mode mode;
1669 base = get_inner_reference (return_slot, &bitsize, &bitpos,
1671 &mode, &unsignedp, &volatilep,
1673 if (TREE_CODE (base) == INDIRECT_REF)
1674 base = TREE_OPERAND (base, 0);
1675 if (TREE_CODE (base) == SSA_NAME)
1676 base = SSA_NAME_VAR (base);
1677 mark_sym_for_renaming (base);
1679 var = return_slot_addr;
1684 gcc_assert (TREE_CODE (var) != SSA_NAME);
1686 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1687 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1688 && !DECL_GIMPLE_REG_P (result)
1690 DECL_GIMPLE_REG_P (var) = 0;
1695 /* All types requiring non-trivial constructors should have been handled. */
1696 gcc_assert (!TREE_ADDRESSABLE (callee_type));
1698 /* Attempt to avoid creating a new temporary variable. */
1700 && TREE_CODE (modify_dest) != SSA_NAME)
1702 bool use_it = false;
1704 /* We can't use MODIFY_DEST if there's type promotion involved. */
1705 if (!useless_type_conversion_p (callee_type, caller_type))
1708 /* ??? If we're assigning to a variable sized type, then we must
1709 reuse the destination variable, because we've no good way to
1710 create variable sized temporaries at this point. */
1711 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
1714 /* If the callee cannot possibly modify MODIFY_DEST, then we can
1715 reuse it as the result of the call directly. Don't do this if
1716 it would promote MODIFY_DEST to addressable. */
1717 else if (TREE_ADDRESSABLE (result))
1721 tree base_m = get_base_address (modify_dest);
1723 /* If the base isn't a decl, then it's a pointer, and we don't
1724 know where that's going to go. */
1725 if (!DECL_P (base_m))
1727 else if (is_global_var (base_m))
1729 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
1730 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
1731 && !DECL_GIMPLE_REG_P (result)
1732 && DECL_GIMPLE_REG_P (base_m))
1734 else if (!TREE_ADDRESSABLE (base_m))
1746 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
1748 var = copy_result_decl_to_var (result, id);
1749 if (gimple_in_ssa_p (cfun))
1752 add_referenced_var (var);
1755 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
1756 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list
1757 = tree_cons (NULL_TREE, var,
1758 DECL_STRUCT_FUNCTION (caller)->unexpanded_var_list);
1760 /* Do not have the rest of GCC warn about this variable as it should
1761 not be visible to the user. */
1762 TREE_NO_WARNING (var) = 1;
1764 declare_inline_vars (id->block, var);
1766 /* Build the use expr. If the return type of the function was
1767 promoted, convert it back to the expected type. */
1769 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
1770 use = fold_convert (caller_type, var);
1772 STRIP_USELESS_TYPE_CONVERSION (use);
1774 if (DECL_BY_REFERENCE (result))
1775 var = build_fold_addr_expr (var);
1778 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
1779 way, when the RESULT_DECL is encountered, it will be
1780 automatically replaced by the VAR_DECL. */
1781 insert_decl_map (id, result, var);
1783 /* Remember this so we can ignore it in remap_decls. */
1790 /* Returns nonzero if a function can be inlined as a tree. */
1793 tree_inlinable_function_p (tree fn)
1795 return inlinable_function_p (fn);
1798 static const char *inline_forbidden_reason;
1801 inline_forbidden_p_1 (tree *nodep, int *walk_subtrees ATTRIBUTE_UNUSED,
1805 tree fn = (tree) fnp;
1808 switch (TREE_CODE (node))
1811 /* Refuse to inline alloca call unless user explicitly forced so as
1812 this may change program's memory overhead drastically when the
1813 function using alloca is called in loop. In GCC present in
1814 SPEC2000 inlining into schedule_block cause it to require 2GB of
1815 RAM instead of 256MB. */
1816 if (alloca_call_p (node)
1817 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
1819 inline_forbidden_reason
1820 = G_("function %q+F can never be inlined because it uses "
1821 "alloca (override using the always_inline attribute)");
1824 t = get_callee_fndecl (node);
1828 /* We cannot inline functions that call setjmp. */
1829 if (setjmp_call_p (t))
1831 inline_forbidden_reason
1832 = G_("function %q+F can never be inlined because it uses setjmp");
1836 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
1837 switch (DECL_FUNCTION_CODE (t))
1839 /* We cannot inline functions that take a variable number of
1841 case BUILT_IN_VA_START:
1842 case BUILT_IN_STDARG_START:
1843 case BUILT_IN_NEXT_ARG:
1844 case BUILT_IN_VA_END:
1845 inline_forbidden_reason
1846 = G_("function %q+F can never be inlined because it "
1847 "uses variable argument lists");
1850 case BUILT_IN_LONGJMP:
1851 /* We can't inline functions that call __builtin_longjmp at
1852 all. The non-local goto machinery really requires the
1853 destination be in a different function. If we allow the
1854 function calling __builtin_longjmp to be inlined into the
1855 function calling __builtin_setjmp, Things will Go Awry. */
1856 inline_forbidden_reason
1857 = G_("function %q+F can never be inlined because "
1858 "it uses setjmp-longjmp exception handling");
1861 case BUILT_IN_NONLOCAL_GOTO:
1863 inline_forbidden_reason
1864 = G_("function %q+F can never be inlined because "
1865 "it uses non-local goto");
1868 case BUILT_IN_RETURN:
1869 case BUILT_IN_APPLY_ARGS:
1870 /* If a __builtin_apply_args caller would be inlined,
1871 it would be saving arguments of the function it has
1872 been inlined into. Similarly __builtin_return would
1873 return from the function the inline has been inlined into. */
1874 inline_forbidden_reason
1875 = G_("function %q+F can never be inlined because "
1876 "it uses __builtin_return or __builtin_apply_args");
1885 t = TREE_OPERAND (node, 0);
1887 /* We will not inline a function which uses computed goto. The
1888 addresses of its local labels, which may be tucked into
1889 global storage, are of course not constant across
1890 instantiations, which causes unexpected behavior. */
1891 if (TREE_CODE (t) != LABEL_DECL)
1893 inline_forbidden_reason
1894 = G_("function %q+F can never be inlined "
1895 "because it contains a computed goto");
1901 t = TREE_OPERAND (node, 0);
1902 if (DECL_NONLOCAL (t))
1904 /* We cannot inline a function that receives a non-local goto
1905 because we cannot remap the destination label used in the
1906 function that is performing the non-local goto. */
1907 inline_forbidden_reason
1908 = G_("function %q+F can never be inlined "
1909 "because it receives a non-local goto");
1916 /* We cannot inline a function of the form
1918 void F (int i) { struct S { int ar[i]; } s; }
1920 Attempting to do so produces a catch-22.
1921 If walk_tree examines the TYPE_FIELDS chain of RECORD_TYPE/
1922 UNION_TYPE nodes, then it goes into infinite recursion on a
1923 structure containing a pointer to its own type. If it doesn't,
1924 then the type node for S doesn't get adjusted properly when
1927 ??? This is likely no longer true, but it's too late in the 4.0
1928 cycle to try to find out. This should be checked for 4.1. */
1929 for (t = TYPE_FIELDS (node); t; t = TREE_CHAIN (t))
1930 if (variably_modified_type_p (TREE_TYPE (t), NULL))
1932 inline_forbidden_reason
1933 = G_("function %q+F can never be inlined "
1934 "because it uses variable sized variables");
1945 /* Return subexpression representing possible alloca call, if any. */
1947 inline_forbidden_p (tree fndecl)
1949 location_t saved_loc = input_location;
1950 block_stmt_iterator bsi;
1952 tree ret = NULL_TREE;
1954 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (fndecl))
1955 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1957 ret = walk_tree_without_duplicates (bsi_stmt_ptr (bsi),
1958 inline_forbidden_p_1, fndecl);
1964 input_location = saved_loc;
1968 /* Returns nonzero if FN is a function that does not have any
1969 fundamental inline blocking properties. */
1972 inlinable_function_p (tree fn)
1974 bool inlinable = true;
1978 /* If we've already decided this function shouldn't be inlined,
1979 there's no need to check again. */
1980 if (DECL_UNINLINABLE (fn))
1983 /* We only warn for functions declared `inline' by the user. */
1984 do_warning = (warn_inline
1986 && DECL_DECLARED_INLINE_P (fn)
1987 && !DECL_IN_SYSTEM_HEADER (fn));
1989 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
1991 if (flag_really_no_inline
1992 && always_inline == NULL)
1995 warning (OPT_Winline, "function %q+F can never be inlined because it "
1996 "is suppressed using -fno-inline", fn);
2000 /* Don't auto-inline anything that might not be bound within
2001 this unit of translation. */
2002 else if (!DECL_DECLARED_INLINE_P (fn)
2003 && DECL_REPLACEABLE_P (fn))
2006 else if (!function_attribute_inlinable_p (fn))
2009 warning (OPT_Winline, "function %q+F can never be inlined because it "
2010 "uses attributes conflicting with inlining", fn);
2014 /* If we don't have the function body available, we can't inline it.
2015 However, this should not be recorded since we also get here for
2016 forward declared inline functions. Therefore, return at once. */
2017 if (!DECL_SAVED_TREE (fn))
2020 /* If we're not inlining at all, then we cannot inline this function. */
2021 else if (!flag_inline_trees)
2024 /* Only try to inline functions if DECL_INLINE is set. This should be
2025 true for all functions declared `inline', and for all other functions
2026 as well with -finline-functions.
2028 Don't think of disregarding DECL_INLINE when flag_inline_trees == 2;
2029 it's the front-end that must set DECL_INLINE in this case, because
2030 dwarf2out loses if a function that does not have DECL_INLINE set is
2031 inlined anyway. That is why we have both DECL_INLINE and
2032 DECL_DECLARED_INLINE_P. */
2033 /* FIXME: When flag_inline_trees dies, the check for flag_unit_at_a_time
2034 here should be redundant. */
2035 else if (!DECL_INLINE (fn) && !flag_unit_at_a_time)
2038 else if (inline_forbidden_p (fn))
2040 /* See if we should warn about uninlinable functions. Previously,
2041 some of these warnings would be issued while trying to expand
2042 the function inline, but that would cause multiple warnings
2043 about functions that would for example call alloca. But since
2044 this a property of the function, just one warning is enough.
2045 As a bonus we can now give more details about the reason why a
2046 function is not inlinable. */
2048 sorry (inline_forbidden_reason, fn);
2049 else if (do_warning)
2050 warning (OPT_Winline, inline_forbidden_reason, fn);
2055 /* Squirrel away the result so that we don't have to check again. */
2056 DECL_UNINLINABLE (fn) = !inlinable;
2061 /* Estimate the cost of a memory move. Use machine dependent
2062 word size and take possible memcpy call into account. */
2065 estimate_move_cost (tree type)
2069 size = int_size_in_bytes (type);
2071 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO)
2072 /* Cost of a memcpy call, 3 arguments and the call. */
2075 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
2078 /* Arguments for estimate_num_insns_1. */
2082 /* Used to return the number of insns. */
2085 /* Weights of various constructs. */
2086 eni_weights *weights;
2089 /* Used by estimate_num_insns. Estimate number of instructions seen
2090 by given statement. */
2093 estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
2095 struct eni_data *d = data;
2099 if (IS_TYPE_OR_DECL_P (x))
2104 /* Assume that constants and references counts nothing. These should
2105 be majorized by amount of operations among them we count later
2106 and are common target of CSE and similar optimizations. */
2107 else if (CONSTANT_CLASS_P (x) || REFERENCE_CLASS_P (x))
2110 switch (TREE_CODE (x))
2112 /* Containers have no cost. */
2119 case ALIGN_INDIRECT_REF:
2120 case MISALIGNED_INDIRECT_REF:
2122 case ARRAY_RANGE_REF:
2124 case EXC_PTR_EXPR: /* ??? */
2125 case FILTER_EXPR: /* ??? */
2128 case WITH_CLEANUP_EXPR:
2131 case VIEW_CONVERT_EXPR:
2136 case CASE_LABEL_EXPR:
2139 case EH_FILTER_EXPR:
2140 case STATEMENT_LIST:
2142 case NON_LVALUE_EXPR:
2145 case TRY_CATCH_EXPR:
2146 case TRY_FINALLY_EXPR:
2153 case WITH_SIZE_EXPR:
2157 case OMP_SECTIONS_SWITCH:
2160 /* We don't account constants for now. Assume that the cost is amortized
2161 by operations that do use them. We may re-consider this decision once
2162 we are able to optimize the tree before estimating its size and break
2163 out static initializers. */
2164 case IDENTIFIER_NODE:
2174 /* CHANGE_DYNAMIC_TYPE_EXPR explicitly expands to nothing. */
2175 case CHANGE_DYNAMIC_TYPE_EXPR:
2179 /* Try to estimate the cost of assignments. We have three cases to
2181 1) Simple assignments to registers;
2182 2) Stores to things that must live in memory. This includes
2183 "normal" stores to scalars, but also assignments of large
2184 structures, or constructors of big arrays;
2187 Let us look at the first two cases, assuming we have "a = b + C":
2188 <GIMPLE_MODIFY_STMT <var_decl "a">
2189 <plus_expr <var_decl "b"> <constant C>>
2190 If "a" is a GIMPLE register, the assignment to it is free on almost
2191 any target, because "a" usually ends up in a real register. Hence
2192 the only cost of this expression comes from the PLUS_EXPR, and we
2193 can ignore the GIMPLE_MODIFY_STMT.
2194 If "a" is not a GIMPLE register, the assignment to "a" will most
2195 likely be a real store, so the cost of the GIMPLE_MODIFY_STMT is the cost
2196 of moving something into "a", which we compute using the function
2199 The third case deals with TARGET_EXPRs, for which the semantics are
2200 that a temporary is assigned, unless the TARGET_EXPR itself is being
2201 assigned to something else. In the latter case we do not need the
2203 <GIMPLE_MODIFY_STMT <var_decl "a"> <target_expr>>, the
2204 GIMPLE_MODIFY_STMT is free. */
2206 case GIMPLE_MODIFY_STMT:
2207 /* Is the right and side a TARGET_EXPR? */
2208 if (TREE_CODE (GENERIC_TREE_OPERAND (x, 1)) == TARGET_EXPR)
2210 /* ... fall through ... */
2213 x = GENERIC_TREE_OPERAND (x, 0);
2214 /* Is this an assignments to a register? */
2215 if (is_gimple_reg (x))
2217 /* Otherwise it's a store, so fall through to compute the move cost. */
2220 d->count += estimate_move_cost (TREE_TYPE (x));
2223 /* Assign cost of 1 to usual operations.
2224 ??? We may consider mapping RTL costs to this. */
2229 case POINTER_PLUS_EXPR:
2233 case FIXED_CONVERT_EXPR:
2234 case FIX_TRUNC_EXPR:
2246 case VEC_LSHIFT_EXPR:
2247 case VEC_RSHIFT_EXPR:
2254 case TRUTH_ANDIF_EXPR:
2255 case TRUTH_ORIF_EXPR:
2256 case TRUTH_AND_EXPR:
2258 case TRUTH_XOR_EXPR:
2259 case TRUTH_NOT_EXPR:
2268 case UNORDERED_EXPR:
2279 case PREDECREMENT_EXPR:
2280 case PREINCREMENT_EXPR:
2281 case POSTDECREMENT_EXPR:
2282 case POSTINCREMENT_EXPR:
2286 case REALIGN_LOAD_EXPR:
2288 case REDUC_MAX_EXPR:
2289 case REDUC_MIN_EXPR:
2290 case REDUC_PLUS_EXPR:
2291 case WIDEN_SUM_EXPR:
2293 case VEC_WIDEN_MULT_HI_EXPR:
2294 case VEC_WIDEN_MULT_LO_EXPR:
2295 case VEC_UNPACK_HI_EXPR:
2296 case VEC_UNPACK_LO_EXPR:
2297 case VEC_UNPACK_FLOAT_HI_EXPR:
2298 case VEC_UNPACK_FLOAT_LO_EXPR:
2299 case VEC_PACK_TRUNC_EXPR:
2300 case VEC_PACK_SAT_EXPR:
2301 case VEC_PACK_FIX_TRUNC_EXPR:
2303 case WIDEN_MULT_EXPR:
2305 case VEC_EXTRACT_EVEN_EXPR:
2306 case VEC_EXTRACT_ODD_EXPR:
2307 case VEC_INTERLEAVE_HIGH_EXPR:
2308 case VEC_INTERLEAVE_LOW_EXPR:
2315 /* TODO: Cost of a switch should be derived from the number of
2317 d->count += d->weights->switch_cost;
2320 /* Few special cases of expensive operations. This is useful
2321 to avoid inlining on functions having too many of these. */
2322 case TRUNC_DIV_EXPR:
2324 case FLOOR_DIV_EXPR:
2325 case ROUND_DIV_EXPR:
2326 case EXACT_DIV_EXPR:
2327 case TRUNC_MOD_EXPR:
2329 case FLOOR_MOD_EXPR:
2330 case ROUND_MOD_EXPR:
2332 d->count += d->weights->div_mod_cost;
2336 tree decl = get_callee_fndecl (x);
2338 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
2339 cost = d->weights->target_builtin_call_cost;
2341 cost = d->weights->call_cost;
2343 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
2344 switch (DECL_FUNCTION_CODE (decl))
2346 case BUILT_IN_CONSTANT_P:
2349 case BUILT_IN_EXPECT:
2351 /* Prefetch instruction is not expensive. */
2352 case BUILT_IN_PREFETCH:
2359 /* Our cost must be kept in sync with cgraph_estimate_size_after_inlining
2360 that does use function declaration to figure out the arguments. */
2364 call_expr_arg_iterator iter;
2365 FOR_EACH_CALL_EXPR_ARG (a, iter, x)
2366 d->count += estimate_move_cost (TREE_TYPE (a));
2371 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2372 d->count += estimate_move_cost (TREE_TYPE (arg));
2388 /* OpenMP directives are generally very expensive. */
2389 d->count += d->weights->omp_cost;
2398 /* Estimate number of instructions that will be created by expanding EXPR.
2399 WEIGHTS contains weights attributed to various constructs. */
2402 estimate_num_insns (tree expr, eni_weights *weights)
2404 struct pointer_set_t *visited_nodes;
2406 block_stmt_iterator bsi;
2407 struct function *my_function;
2408 struct eni_data data;
2411 data.weights = weights;
2413 /* If we're given an entire function, walk the CFG. */
2414 if (TREE_CODE (expr) == FUNCTION_DECL)
2416 my_function = DECL_STRUCT_FUNCTION (expr);
2417 gcc_assert (my_function && my_function->cfg);
2418 visited_nodes = pointer_set_create ();
2419 FOR_EACH_BB_FN (bb, my_function)
2421 for (bsi = bsi_start (bb);
2425 walk_tree (bsi_stmt_ptr (bsi), estimate_num_insns_1,
2426 &data, visited_nodes);
2429 pointer_set_destroy (visited_nodes);
2432 walk_tree_without_duplicates (&expr, estimate_num_insns_1, &data);
2437 /* Initializes weights used by estimate_num_insns. */
2440 init_inline_once (void)
2442 eni_inlining_weights.call_cost = PARAM_VALUE (PARAM_INLINE_CALL_COST);
2443 eni_inlining_weights.target_builtin_call_cost = 1;
2444 eni_inlining_weights.div_mod_cost = 10;
2445 eni_inlining_weights.switch_cost = 1;
2446 eni_inlining_weights.omp_cost = 40;
2448 eni_size_weights.call_cost = 1;
2449 eni_size_weights.target_builtin_call_cost = 1;
2450 eni_size_weights.div_mod_cost = 1;
2451 eni_size_weights.switch_cost = 10;
2452 eni_size_weights.omp_cost = 40;
2454 /* Estimating time for call is difficult, since we have no idea what the
2455 called function does. In the current uses of eni_time_weights,
2456 underestimating the cost does less harm than overestimating it, so
2457 we choose a rather small value here. */
2458 eni_time_weights.call_cost = 10;
2459 eni_time_weights.target_builtin_call_cost = 10;
2460 eni_time_weights.div_mod_cost = 10;
2461 eni_time_weights.switch_cost = 4;
2462 eni_time_weights.omp_cost = 40;
2465 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
2467 add_lexical_block (tree current_block, tree new_block)
2471 /* Walk to the last sub-block. */
2472 for (blk_p = &BLOCK_SUBBLOCKS (current_block);
2474 blk_p = &TREE_CHAIN (*blk_p))
2477 BLOCK_SUPERCONTEXT (new_block) = current_block;
2480 /* If *TP is a CALL_EXPR, replace it with its inline expansion. */
2483 expand_call_inline (basic_block bb, tree stmt, tree *tp, void *data)
2489 struct pointer_map_t *st;
2492 location_t saved_location;
2493 struct cgraph_edge *cg_edge;
2495 basic_block return_block;
2497 block_stmt_iterator bsi, stmt_bsi;
2498 bool successfully_inlined = FALSE;
2499 bool purge_dead_abnormal_edges;
2503 /* See what we've got. */
2504 id = (copy_body_data *) data;
2507 /* Set input_location here so we get the right instantiation context
2508 if we call instantiate_decl from inlinable_function_p. */
2509 saved_location = input_location;
2510 if (EXPR_HAS_LOCATION (t))
2511 input_location = EXPR_LOCATION (t);
2513 /* From here on, we're only interested in CALL_EXPRs. */
2514 if (TREE_CODE (t) != CALL_EXPR)
2517 /* First, see if we can figure out what function is being called.
2518 If we cannot, then there is no hope of inlining the function. */
2519 fn = get_callee_fndecl (t);
2523 /* Turn forward declarations into real ones. */
2524 fn = cgraph_node (fn)->decl;
2526 /* If fn is a declaration of a function in a nested scope that was
2527 globally declared inline, we don't set its DECL_INITIAL.
2528 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
2529 C++ front-end uses it for cdtors to refer to their internal
2530 declarations, that are not real functions. Fortunately those
2531 don't have trees to be saved, so we can tell by checking their
2533 if (! DECL_INITIAL (fn)
2534 && DECL_ABSTRACT_ORIGIN (fn)
2535 && DECL_SAVED_TREE (DECL_ABSTRACT_ORIGIN (fn)))
2536 fn = DECL_ABSTRACT_ORIGIN (fn);
2538 /* Objective C and fortran still calls tree_rest_of_compilation directly.
2539 Kill this check once this is fixed. */
2540 if (!id->dst_node->analyzed)
2543 cg_edge = cgraph_edge (id->dst_node, stmt);
2545 /* Constant propagation on argument done during previous inlining
2546 may create new direct call. Produce an edge for it. */
2549 struct cgraph_node *dest = cgraph_node (fn);
2551 /* We have missing edge in the callgraph. This can happen in one case
2552 where previous inlining turned indirect call into direct call by
2553 constant propagating arguments. In all other cases we hit a bug
2554 (incorrect node sharing is most common reason for missing edges. */
2555 gcc_assert (dest->needed || !flag_unit_at_a_time);
2556 cgraph_create_edge (id->dst_node, dest, stmt,
2557 bb->count, CGRAPH_FREQ_BASE,
2558 bb->loop_depth)->inline_failed
2559 = N_("originally indirect function call not considered for inlining");
2562 fprintf (dump_file, "Created new direct edge to %s",
2563 cgraph_node_name (dest));
2568 /* Don't try to inline functions that are not well-suited to
2570 if (!cgraph_inline_p (cg_edge, &reason))
2572 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
2573 /* Avoid warnings during early inline pass. */
2574 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2576 sorry ("inlining failed in call to %q+F: %s", fn, reason);
2577 sorry ("called from here");
2579 else if (warn_inline && DECL_DECLARED_INLINE_P (fn)
2580 && !DECL_IN_SYSTEM_HEADER (fn)
2582 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
2583 /* Avoid warnings during early inline pass. */
2584 && (!flag_unit_at_a_time || cgraph_global_info_ready))
2586 warning (OPT_Winline, "inlining failed in call to %q+F: %s",
2588 warning (OPT_Winline, "called from here");
2592 fn = cg_edge->callee->decl;
2594 #ifdef ENABLE_CHECKING
2595 if (cg_edge->callee->decl != id->dst_node->decl)
2596 verify_cgraph_node (cg_edge->callee);
2599 /* We will be inlining this callee. */
2600 id->eh_region = lookup_stmt_eh_region (stmt);
2602 /* Split the block holding the CALL_EXPR. */
2603 e = split_block (bb, stmt);
2605 return_block = e->dest;
2608 /* split_block splits after the statement; work around this by
2609 moving the call into the second block manually. Not pretty,
2610 but seems easier than doing the CFG manipulation by hand
2611 when the CALL_EXPR is in the last statement of BB. */
2612 stmt_bsi = bsi_last (bb);
2613 bsi_remove (&stmt_bsi, false);
2615 /* If the CALL_EXPR was in the last statement of BB, it may have
2616 been the source of abnormal edges. In this case, schedule
2617 the removal of dead abnormal edges. */
2618 bsi = bsi_start (return_block);
2619 if (bsi_end_p (bsi))
2621 bsi_insert_after (&bsi, stmt, BSI_NEW_STMT);
2622 purge_dead_abnormal_edges = true;
2626 bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
2627 purge_dead_abnormal_edges = false;
2630 stmt_bsi = bsi_start (return_block);
2632 /* Build a block containing code to initialize the arguments, the
2633 actual inline expansion of the body, and a label for the return
2634 statements within the function to jump to. The type of the
2635 statement expression is the return type of the function call. */
2636 id->block = make_node (BLOCK);
2637 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
2638 BLOCK_SOURCE_LOCATION (id->block) = input_location;
2639 add_lexical_block (TREE_BLOCK (stmt), id->block);
2641 /* Local declarations will be replaced by their equivalents in this
2644 id->decl_map = pointer_map_create ();
2646 /* Record the function we are about to inline. */
2648 id->src_node = cg_edge->callee;
2649 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
2652 initialize_inlined_parameters (id, t, fn, bb);
2654 if (DECL_INITIAL (fn))
2655 add_lexical_block (id->block, remap_blocks (DECL_INITIAL (fn), id));
2657 /* Return statements in the function body will be replaced by jumps
2658 to the RET_LABEL. */
2660 gcc_assert (DECL_INITIAL (fn));
2661 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
2663 /* Find the lhs to which the result of this call is assigned. */
2665 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
2667 modify_dest = GIMPLE_STMT_OPERAND (stmt, 0);
2669 /* The function which we are inlining might not return a value,
2670 in which case we should issue a warning that the function
2671 does not return a value. In that case the optimizers will
2672 see that the variable to which the value is assigned was not
2673 initialized. We do not want to issue a warning about that
2674 uninitialized variable. */
2675 if (DECL_P (modify_dest))
2676 TREE_NO_WARNING (modify_dest) = 1;
2677 if (CALL_EXPR_RETURN_SLOT_OPT (t))
2679 return_slot = modify_dest;
2686 /* Declare the return variable for the function. */
2687 declare_return_variable (id, return_slot,
2688 modify_dest, &use_retvar);
2690 /* This is it. Duplicate the callee body. Assume callee is
2691 pre-gimplified. Note that we must not alter the caller
2692 function in any way before this point, as this CALL_EXPR may be
2693 a self-referential call; if we're calling ourselves, we need to
2694 duplicate our body before altering anything. */
2695 copy_body (id, bb->count, bb->frequency, bb, return_block);
2697 /* Add local vars in this inlined callee to caller. */
2698 t_step = id->src_cfun->unexpanded_var_list;
2699 for (; t_step; t_step = TREE_CHAIN (t_step))
2701 var = TREE_VALUE (t_step);
2702 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
2703 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
2704 cfun->unexpanded_var_list);
2706 cfun->unexpanded_var_list = tree_cons (NULL_TREE, remap_decl (var, id),
2707 cfun->unexpanded_var_list);
2711 pointer_map_destroy (id->decl_map);
2714 /* If the inlined function returns a result that we care about,
2715 clobber the CALL_EXPR with a reference to the return variable. */
2716 if (use_retvar && (TREE_CODE (bsi_stmt (stmt_bsi)) != CALL_EXPR))
2719 if (gimple_in_ssa_p (cfun))
2722 mark_symbols_for_renaming (stmt);
2724 maybe_clean_or_replace_eh_stmt (stmt, stmt);
2727 /* We're modifying a TSI owned by gimple_expand_calls_inline();
2728 tsi_delink() will leave the iterator in a sane state. */
2730 /* Handle case of inlining function that miss return statement so
2731 return value becomes undefined. */
2732 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
2733 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 0)) == SSA_NAME)
2735 tree name = TREE_OPERAND (stmt, 0);
2736 tree var = SSA_NAME_VAR (TREE_OPERAND (stmt, 0));
2737 tree def = gimple_default_def (cfun, var);
2739 /* If the variable is used undefined, make this name undefined via
2743 TREE_OPERAND (stmt, 1) = def;
2746 /* Otherwise make this variable undefined. */
2749 bsi_remove (&stmt_bsi, true);
2750 set_default_def (var, name);
2751 SSA_NAME_DEF_STMT (name) = build_empty_stmt ();
2755 bsi_remove (&stmt_bsi, true);
2758 if (purge_dead_abnormal_edges)
2759 tree_purge_dead_abnormal_call_edges (return_block);
2761 /* If the value of the new expression is ignored, that's OK. We
2762 don't warn about this for CALL_EXPRs, so we shouldn't warn about
2763 the equivalent inlined version either. */
2764 TREE_USED (*tp) = 1;
2766 /* Output the inlining info for this abstract function, since it has been
2767 inlined. If we don't do this now, we can lose the information about the
2768 variables in the function when the blocks get blown away as soon as we
2769 remove the cgraph node. */
2770 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
2772 /* Update callgraph if needed. */
2773 cgraph_remove_node (cg_edge->callee);
2775 id->block = NULL_TREE;
2776 successfully_inlined = TRUE;
2779 input_location = saved_location;
2780 return successfully_inlined;
2783 /* Expand call statements reachable from STMT_P.
2784 We can only have CALL_EXPRs as the "toplevel" tree code or nested
2785 in a GIMPLE_MODIFY_STMT. See tree-gimple.c:get_call_expr_in(). We can
2786 unfortunately not use that function here because we need a pointer
2787 to the CALL_EXPR, not the tree itself. */
2790 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
2792 block_stmt_iterator bsi;
2794 /* Register specific tree functions. */
2795 tree_register_cfg_hooks ();
2796 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2798 tree *expr_p = bsi_stmt_ptr (bsi);
2799 tree stmt = *expr_p;
2801 if (TREE_CODE (*expr_p) == GIMPLE_MODIFY_STMT)
2802 expr_p = &GIMPLE_STMT_OPERAND (*expr_p, 1);
2803 if (TREE_CODE (*expr_p) == WITH_SIZE_EXPR)
2804 expr_p = &TREE_OPERAND (*expr_p, 0);
2805 if (TREE_CODE (*expr_p) == CALL_EXPR)
2806 if (expand_call_inline (bb, stmt, expr_p, id))
2812 /* Walk all basic blocks created after FIRST and try to fold every statement
2813 in the STATEMENTS pointer set. */
2815 fold_marked_statements (int first, struct pointer_set_t *statements)
2817 for (;first < n_basic_blocks;first++)
2818 if (BASIC_BLOCK (first))
2820 block_stmt_iterator bsi;
2821 for (bsi = bsi_start (BASIC_BLOCK (first));
2822 !bsi_end_p (bsi); bsi_next (&bsi))
2823 if (pointer_set_contains (statements, bsi_stmt (bsi)))
2825 tree old_stmt = bsi_stmt (bsi);
2826 if (fold_stmt (bsi_stmt_ptr (bsi)))
2828 update_stmt (bsi_stmt (bsi));
2829 if (maybe_clean_or_replace_eh_stmt (old_stmt, bsi_stmt (bsi)))
2830 tree_purge_dead_eh_edges (BASIC_BLOCK (first));
2836 /* Return true if BB has at least one abnormal outgoing edge. */
2839 has_abnormal_outgoing_edge_p (basic_block bb)
2844 FOR_EACH_EDGE (e, ei, bb->succs)
2845 if (e->flags & EDGE_ABNORMAL)
2851 /* Expand calls to inline functions in the body of FN. */
2854 optimize_inline_calls (tree fn)
2859 int last = n_basic_blocks;
2860 /* There is no point in performing inlining if errors have already
2861 occurred -- and we might crash if we try to inline invalid
2863 if (errorcount || sorrycount)
2867 memset (&id, 0, sizeof (id));
2869 id.src_node = id.dst_node = cgraph_node (fn);
2871 /* Or any functions that aren't finished yet. */
2872 prev_fn = NULL_TREE;
2873 if (current_function_decl)
2875 id.dst_fn = current_function_decl;
2876 prev_fn = current_function_decl;
2879 id.copy_decl = copy_decl_maybe_to_var;
2880 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2881 id.transform_new_cfg = false;
2882 id.transform_return_to_modify = true;
2883 id.transform_lang_insert_block = false;
2884 id.statements_to_fold = pointer_set_create ();
2886 push_gimplify_context ();
2888 /* We make no attempts to keep dominance info up-to-date. */
2889 free_dominance_info (CDI_DOMINATORS);
2890 free_dominance_info (CDI_POST_DOMINATORS);
2892 /* Reach the trees by walking over the CFG, and note the
2893 enclosing basic-blocks in the call edges. */
2894 /* We walk the blocks going forward, because inlined function bodies
2895 will split id->current_basic_block, and the new blocks will
2896 follow it; we'll trudge through them, processing their CALL_EXPRs
2899 gimple_expand_calls_inline (bb, &id);
2901 pop_gimplify_context (NULL);
2903 #ifdef ENABLE_CHECKING
2905 struct cgraph_edge *e;
2907 verify_cgraph_node (id.dst_node);
2909 /* Double check that we inlined everything we are supposed to inline. */
2910 for (e = id.dst_node->callees; e; e = e->next_callee)
2911 gcc_assert (e->inline_failed);
2915 /* Fold the statements before compacting/renumbering the basic blocks. */
2916 fold_marked_statements (last, id.statements_to_fold);
2917 pointer_set_destroy (id.statements_to_fold);
2919 /* Renumber the (code) basic_blocks consecutively. */
2921 /* Renumber the lexical scoping (non-code) blocks consecutively. */
2924 /* We are not going to maintain the cgraph edges up to date.
2925 Kill it so it won't confuse us. */
2926 cgraph_node_remove_callees (id.dst_node);
2928 fold_cond_expr_cond ();
2929 /* It would be nice to check SSA/CFG/statement consistency here, but it is
2930 not possible yet - the IPA passes might make various functions to not
2931 throw and they don't care to proactively update local EH info. This is
2932 done later in fixup_cfg pass that also execute the verification. */
2933 return (TODO_update_ssa | TODO_cleanup_cfg
2934 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
2935 | (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
2938 /* FN is a function that has a complete body, and CLONE is a function whose
2939 body is to be set to a copy of FN, mapping argument declarations according
2940 to the ARG_MAP splay_tree. */
2943 clone_body (tree clone, tree fn, void *arg_map)
2947 /* Clone the body, as if we were making an inline call. But, remap the
2948 parameters in the callee to the parameters of caller. */
2949 memset (&id, 0, sizeof (id));
2952 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
2953 id.decl_map = (struct pointer_map_t *)arg_map;
2955 id.copy_decl = copy_decl_no_change;
2956 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
2957 id.transform_new_cfg = true;
2958 id.transform_return_to_modify = false;
2959 id.transform_lang_insert_block = true;
2961 /* We're not inside any EH region. */
2964 /* Actually copy the body. */
2965 append_to_statement_list_force (copy_generic_body (&id), &DECL_SAVED_TREE (clone));
2968 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
2971 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2973 enum tree_code code = TREE_CODE (*tp);
2974 enum tree_code_class cl = TREE_CODE_CLASS (code);
2976 /* We make copies of most nodes. */
2977 if (IS_EXPR_CODE_CLASS (cl)
2978 || IS_GIMPLE_STMT_CODE_CLASS (cl)
2979 || code == TREE_LIST
2981 || code == TYPE_DECL
2982 || code == OMP_CLAUSE)
2984 /* Because the chain gets clobbered when we make a copy, we save it
2986 tree chain = NULL_TREE, new;
2988 if (!GIMPLE_TUPLE_P (*tp))
2989 chain = TREE_CHAIN (*tp);
2991 /* Copy the node. */
2992 new = copy_node (*tp);
2994 /* Propagate mudflap marked-ness. */
2995 if (flag_mudflap && mf_marked_p (*tp))
3000 /* Now, restore the chain, if appropriate. That will cause
3001 walk_tree to walk into the chain as well. */
3002 if (code == PARM_DECL
3003 || code == TREE_LIST
3004 || code == OMP_CLAUSE)
3005 TREE_CHAIN (*tp) = chain;
3007 /* For now, we don't update BLOCKs when we make copies. So, we
3008 have to nullify all BIND_EXPRs. */
3009 if (TREE_CODE (*tp) == BIND_EXPR)
3010 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
3012 else if (code == CONSTRUCTOR)
3014 /* CONSTRUCTOR nodes need special handling because
3015 we need to duplicate the vector of elements. */
3018 new = copy_node (*tp);
3020 /* Propagate mudflap marked-ness. */
3021 if (flag_mudflap && mf_marked_p (*tp))
3024 CONSTRUCTOR_ELTS (new) = VEC_copy (constructor_elt, gc,
3025 CONSTRUCTOR_ELTS (*tp));
3028 else if (TREE_CODE_CLASS (code) == tcc_type)
3030 else if (TREE_CODE_CLASS (code) == tcc_declaration)
3032 else if (TREE_CODE_CLASS (code) == tcc_constant)
3035 gcc_assert (code != STATEMENT_LIST);
3039 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
3040 information indicating to what new SAVE_EXPR this one should be mapped,
3041 use that one. Otherwise, create a new node and enter it in ST. FN is
3042 the function into which the copy will be placed. */
3045 remap_save_expr (tree *tp, void *st_, int *walk_subtrees)
3047 struct pointer_map_t *st = (struct pointer_map_t *) st_;
3051 /* See if we already encountered this SAVE_EXPR. */
3052 n = (tree *) pointer_map_contains (st, *tp);
3054 /* If we didn't already remap this SAVE_EXPR, do so now. */
3057 t = copy_node (*tp);
3059 /* Remember this SAVE_EXPR. */
3060 *pointer_map_insert (st, *tp) = t;
3061 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
3062 *pointer_map_insert (st, t) = t;
3066 /* We've already walked into this SAVE_EXPR; don't do it again. */
3071 /* Replace this SAVE_EXPR with the copy. */
3075 /* Called via walk_tree. If *TP points to a DECL_STMT for a local label,
3076 copies the declaration and enters it in the splay_tree in DATA (which is
3077 really an `copy_body_data *'). */
3080 mark_local_for_remap_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
3083 copy_body_data *id = (copy_body_data *) data;
3085 /* Don't walk into types. */
3089 else if (TREE_CODE (*tp) == LABEL_EXPR)
3091 tree decl = TREE_OPERAND (*tp, 0);
3093 /* Copy the decl and remember the copy. */
3094 insert_decl_map (id, decl, id->copy_decl (decl, id));
3100 /* Perform any modifications to EXPR required when it is unsaved. Does
3101 not recurse into EXPR's subtrees. */
3104 unsave_expr_1 (tree expr)
3106 switch (TREE_CODE (expr))
3109 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
3110 It's OK for this to happen if it was part of a subtree that
3111 isn't immediately expanded, such as operand 2 of another
3113 if (TREE_OPERAND (expr, 1))
3116 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
3117 TREE_OPERAND (expr, 3) = NULL_TREE;
3125 /* Called via walk_tree when an expression is unsaved. Using the
3126 splay_tree pointed to by ST (which is really a `splay_tree'),
3127 remaps all local declarations to appropriate replacements. */
3130 unsave_r (tree *tp, int *walk_subtrees, void *data)
3132 copy_body_data *id = (copy_body_data *) data;
3133 struct pointer_map_t *st = id->decl_map;
3136 /* Only a local declaration (variable or label). */
3137 if ((TREE_CODE (*tp) == VAR_DECL && !TREE_STATIC (*tp))
3138 || TREE_CODE (*tp) == LABEL_DECL)
3140 /* Lookup the declaration. */
3141 n = (tree *) pointer_map_contains (st, *tp);
3143 /* If it's there, remap it. */
3148 else if (TREE_CODE (*tp) == STATEMENT_LIST)
3149 copy_statement_list (tp);
3150 else if (TREE_CODE (*tp) == BIND_EXPR)
3151 copy_bind_expr (tp, walk_subtrees, id);
3152 else if (TREE_CODE (*tp) == SAVE_EXPR)
3153 remap_save_expr (tp, st, walk_subtrees);
3156 copy_tree_r (tp, walk_subtrees, NULL);
3158 /* Do whatever unsaving is required. */
3159 unsave_expr_1 (*tp);
3162 /* Keep iterating. */
3166 /* Copies everything in EXPR and replaces variables, labels
3167 and SAVE_EXPRs local to EXPR. */
3170 unsave_expr_now (tree expr)
3174 /* There's nothing to do for NULL_TREE. */
3179 memset (&id, 0, sizeof (id));
3180 id.src_fn = current_function_decl;
3181 id.dst_fn = current_function_decl;
3182 id.decl_map = pointer_map_create ();
3184 id.copy_decl = copy_decl_no_change;
3185 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
3186 id.transform_new_cfg = false;
3187 id.transform_return_to_modify = false;
3188 id.transform_lang_insert_block = false;
3190 /* Walk the tree once to find local labels. */
3191 walk_tree_without_duplicates (&expr, mark_local_for_remap_r, &id);
3193 /* Walk the tree again, copying, remapping, and unsaving. */
3194 walk_tree (&expr, unsave_r, &id, NULL);
3197 pointer_map_destroy (id.decl_map);
3202 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
3205 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
3214 debug_find_tree (tree top, tree search)
3216 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
3220 /* Declare the variables created by the inliner. Add all the variables in
3221 VARS to BIND_EXPR. */
3224 declare_inline_vars (tree block, tree vars)
3227 for (t = vars; t; t = TREE_CHAIN (t))
3229 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
3230 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
3231 cfun->unexpanded_var_list =
3232 tree_cons (NULL_TREE, t,
3233 cfun->unexpanded_var_list);
3237 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
3241 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
3242 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
3243 VAR_DECL translation. */
3246 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
3248 /* Don't generate debug information for the copy if we wouldn't have
3249 generated it for the copy either. */
3250 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
3251 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
3253 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
3254 declaration inspired this copy. */
3255 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
3257 /* The new variable/label has no RTL, yet. */
3258 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
3259 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
3260 SET_DECL_RTL (copy, NULL_RTX);
3262 /* These args would always appear unused, if not for this. */
3263 TREE_USED (copy) = 1;
3265 /* Set the context for the new declaration. */
3266 if (!DECL_CONTEXT (decl))
3267 /* Globals stay global. */
3269 else if (DECL_CONTEXT (decl) != id->src_fn)
3270 /* Things that weren't in the scope of the function we're inlining
3271 from aren't in the scope we're inlining to, either. */
3273 else if (TREE_STATIC (decl))
3274 /* Function-scoped static variables should stay in the original
3278 /* Ordinary automatic local variables are now in the scope of the
3280 DECL_CONTEXT (copy) = id->dst_fn;
3286 copy_decl_to_var (tree decl, copy_body_data *id)
3290 gcc_assert (TREE_CODE (decl) == PARM_DECL
3291 || TREE_CODE (decl) == RESULT_DECL);
3293 type = TREE_TYPE (decl);
3295 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
3296 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
3297 TREE_READONLY (copy) = TREE_READONLY (decl);
3298 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
3299 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
3300 DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl);
3302 return copy_decl_for_dup_finish (id, decl, copy);
3305 /* Like copy_decl_to_var, but create a return slot object instead of a
3306 pointer variable for return by invisible reference. */
3309 copy_result_decl_to_var (tree decl, copy_body_data *id)
3313 gcc_assert (TREE_CODE (decl) == PARM_DECL
3314 || TREE_CODE (decl) == RESULT_DECL);
3316 type = TREE_TYPE (decl);
3317 if (DECL_BY_REFERENCE (decl))
3318 type = TREE_TYPE (type);
3320 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
3321 TREE_READONLY (copy) = TREE_READONLY (decl);
3322 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
3323 if (!DECL_BY_REFERENCE (decl))
3325 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
3326 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
3327 DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl);
3330 return copy_decl_for_dup_finish (id, decl, copy);
3335 copy_decl_no_change (tree decl, copy_body_data *id)
3339 copy = copy_node (decl);
3341 /* The COPY is not abstract; it will be generated in DST_FN. */
3342 DECL_ABSTRACT (copy) = 0;
3343 lang_hooks.dup_lang_specific_decl (copy);
3345 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
3346 been taken; it's for internal bookkeeping in expand_goto_internal. */
3347 if (TREE_CODE (copy) == LABEL_DECL)
3349 TREE_ADDRESSABLE (copy) = 0;
3350 LABEL_DECL_UID (copy) = -1;
3353 return copy_decl_for_dup_finish (id, decl, copy);
3357 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
3359 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
3360 return copy_decl_to_var (decl, id);
3362 return copy_decl_no_change (decl, id);
3365 /* Return a copy of the function's argument tree. */
3367 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id)
3369 tree *arg_copy, *parg;
3371 arg_copy = &orig_parm;
3372 for (parg = arg_copy; *parg; parg = &TREE_CHAIN (*parg))
3374 tree new = remap_decl (*parg, id);
3375 lang_hooks.dup_lang_specific_decl (new);
3376 TREE_CHAIN (new) = TREE_CHAIN (*parg);
3382 /* Return a copy of the function's static chain. */
3384 copy_static_chain (tree static_chain, copy_body_data * id)
3386 tree *chain_copy, *pvar;
3388 chain_copy = &static_chain;
3389 for (pvar = chain_copy; *pvar; pvar = &TREE_CHAIN (*pvar))
3391 tree new = remap_decl (*pvar, id);
3392 lang_hooks.dup_lang_specific_decl (new);
3393 TREE_CHAIN (new) = TREE_CHAIN (*pvar);
3396 return static_chain;
3399 /* Return true if the function is allowed to be versioned.
3400 This is a guard for the versioning functionality. */
3402 tree_versionable_function_p (tree fndecl)
3404 if (fndecl == NULL_TREE)
3406 /* ??? There are cases where a function is
3407 uninlinable but can be versioned. */
3408 if (!tree_inlinable_function_p (fndecl))
3414 /* Create a copy of a function's tree.
3415 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
3416 of the original function and the new copied function
3417 respectively. In case we want to replace a DECL
3418 tree with another tree while duplicating the function's
3419 body, TREE_MAP represents the mapping between these
3420 trees. If UPDATE_CLONES is set, the call_stmt fields
3421 of edges of clones of the function will be updated. */
3423 tree_function_versioning (tree old_decl, tree new_decl, varray_type tree_map,
3426 struct cgraph_node *old_version_node;
3427 struct cgraph_node *new_version_node;
3431 struct ipa_replace_map *replace_info;
3432 basic_block old_entry_block;
3434 tree old_current_function_decl = current_function_decl;
3436 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
3437 && TREE_CODE (new_decl) == FUNCTION_DECL);
3438 DECL_POSSIBLY_INLINED (old_decl) = 1;
3440 old_version_node = cgraph_node (old_decl);
3441 new_version_node = cgraph_node (new_decl);
3443 DECL_ARTIFICIAL (new_decl) = 1;
3444 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
3446 /* Prepare the data structures for the tree copy. */
3447 memset (&id, 0, sizeof (id));
3449 /* Generate a new name for the new version. */
3452 DECL_NAME (new_decl) = create_tmp_var_name (NULL);
3453 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
3454 SET_DECL_RTL (new_decl, NULL_RTX);
3455 id.statements_to_fold = pointer_set_create ();
3458 id.decl_map = pointer_map_create ();
3459 id.src_fn = old_decl;
3460 id.dst_fn = new_decl;
3461 id.src_node = old_version_node;
3462 id.dst_node = new_version_node;
3463 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
3465 id.copy_decl = copy_decl_no_change;
3466 id.transform_call_graph_edges
3467 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
3468 id.transform_new_cfg = true;
3469 id.transform_return_to_modify = false;
3470 id.transform_lang_insert_block = false;
3472 current_function_decl = new_decl;
3473 old_entry_block = ENTRY_BLOCK_PTR_FOR_FUNCTION
3474 (DECL_STRUCT_FUNCTION (old_decl));
3475 initialize_cfun (new_decl, old_decl,
3476 old_entry_block->count,
3477 old_entry_block->frequency);
3478 push_cfun (DECL_STRUCT_FUNCTION (new_decl));
3480 /* Copy the function's static chain. */
3481 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
3483 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl =
3484 copy_static_chain (DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl,
3486 /* Copy the function's arguments. */
3487 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
3488 DECL_ARGUMENTS (new_decl) =
3489 copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id);
3491 /* If there's a tree_map, prepare for substitution. */
3493 for (i = 0; i < VARRAY_ACTIVE_SIZE (tree_map); i++)
3495 replace_info = VARRAY_GENERIC_PTR (tree_map, i);
3496 if (replace_info->replace_p)
3497 insert_decl_map (&id, replace_info->old_tree,
3498 replace_info->new_tree);
3501 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
3503 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3504 number_blocks (id.dst_fn);
3506 if (DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list != NULL_TREE)
3507 /* Add local vars. */
3508 for (t_step = DECL_STRUCT_FUNCTION (old_decl)->unexpanded_var_list;
3509 t_step; t_step = TREE_CHAIN (t_step))
3511 tree var = TREE_VALUE (t_step);
3512 if (TREE_STATIC (var) && !TREE_ASM_WRITTEN (var))
3513 cfun->unexpanded_var_list = tree_cons (NULL_TREE, var,
3514 cfun->unexpanded_var_list);
3516 cfun->unexpanded_var_list =
3517 tree_cons (NULL_TREE, remap_decl (var, &id),
3518 cfun->unexpanded_var_list);
3521 /* Copy the Function's body. */
3522 copy_body (&id, old_entry_block->count, old_entry_block->frequency, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR);
3524 if (DECL_RESULT (old_decl) != NULL_TREE)
3526 tree *res_decl = &DECL_RESULT (old_decl);
3527 DECL_RESULT (new_decl) = remap_decl (*res_decl, &id);
3528 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
3531 /* Renumber the lexical scoping (non-code) blocks consecutively. */
3532 number_blocks (new_decl);
3535 pointer_map_destroy (id.decl_map);
3538 fold_marked_statements (0, id.statements_to_fold);
3539 pointer_set_destroy (id.statements_to_fold);
3540 fold_cond_expr_cond ();
3542 if (gimple_in_ssa_p (cfun))
3544 free_dominance_info (CDI_DOMINATORS);
3545 free_dominance_info (CDI_POST_DOMINATORS);
3547 delete_unreachable_blocks ();
3548 update_ssa (TODO_update_ssa);
3551 fold_cond_expr_cond ();
3552 if (need_ssa_update_p ())
3553 update_ssa (TODO_update_ssa);
3556 free_dominance_info (CDI_DOMINATORS);
3557 free_dominance_info (CDI_POST_DOMINATORS);
3559 current_function_decl = old_current_function_decl;
3560 gcc_assert (!current_function_decl
3561 || DECL_STRUCT_FUNCTION (current_function_decl) == cfun);
3565 /* Duplicate a type, fields and all. */
3568 build_duplicate_type (tree type)
3570 struct copy_body_data id;
3572 memset (&id, 0, sizeof (id));
3573 id.src_fn = current_function_decl;
3574 id.dst_fn = current_function_decl;
3576 id.decl_map = pointer_map_create ();
3578 type = remap_type_1 (type, &id);
3580 pointer_map_destroy (id.decl_map);