1 /* Callgraph based interprocedural optimizations.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
3 2011 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This module implements main driver of compilation process as well as
23 few basic interprocedural optimizers.
25 The main scope of this file is to act as an interface in between
26 tree based frontends and the backend (and middle end)
28 The front-end is supposed to use following functionality:
30 - cgraph_finalize_function
32 This function is called once front-end has parsed whole body of function
33 and it is certain that the function body nor the declaration will change.
35 (There is one exception needed for implementing GCC extern inline
38 - varpool_finalize_variable
40 This function has same behavior as the above but is used for static
43 - cgraph_finalize_compilation_unit
45 This function is called once (source level) compilation unit is finalized
46 and it will no longer change.
48 In the the call-graph construction and local function
49 analysis takes place here. Bodies of unreachable functions are released
50 to conserve memory usage.
52 The function can be called multiple times when multiple source level
53 compilation units are combined (such as in C frontend)
57 In this unit-at-a-time compilation the intra procedural analysis takes
58 place here. In particular the static functions whose address is never
59 taken are marked as local. Backend can then use this information to
60 modify calling conventions, do better inlining or similar optimizations.
62 - cgraph_mark_needed_node
63 - varpool_mark_needed_node
65 When function or variable is referenced by some hidden way the call-graph
66 data structure must be updated accordingly by this function.
67 There should be little need to call this function and all the references
68 should be made explicit to cgraph code. At present these functions are
69 used by C++ frontend to explicitly mark the keyed methods.
71 - analyze_expr callback
73 This function is responsible for lowering tree nodes not understood by
74 generic code into understandable ones or alternatively marking
75 callgraph and varpool nodes referenced by the as needed.
77 ??? On the tree-ssa genericizing should take place here and we will avoid
78 need for these hooks (replacing them by genericizing hook)
80 Analyzing of all functions is deferred
81 to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
83 In cgraph_finalize_compilation_unit the reachable functions are
84 analyzed. During analysis the call-graph edges from reachable
85 functions are constructed and their destinations are marked as
86 reachable. References to functions and variables are discovered too
87 and variables found to be needed output to the assembly file. Via
88 mark_referenced call in assemble_variable functions referenced by
89 static variables are noticed too.
91 The intra-procedural information is produced and its existence
92 indicated by global_info_ready. Once this flag is set it is impossible
93 to change function from !reachable to reachable and thus
94 assemble_variable no longer call mark_referenced.
96 Finally the call-graph is topologically sorted and all reachable functions
97 that has not been completely inlined or are not external are output.
99 ??? It is possible that reference to function or variable is optimized
100 out. We can not deal with this nicely because topological order is not
101 suitable for it. For tree-ssa we may consider another pass doing
102 optimization and re-discovering reachable functions.
104 ??? Reorganize code so variables are output very last and only if they
105 really has been referenced by produced code, so we catch more cases
106 where reference has been optimized out. */
111 #include "coretypes.h"
115 #include "tree-flow.h"
116 #include "tree-inline.h"
117 #include "langhooks.h"
118 #include "pointer-set.h"
125 #include "diagnostic.h"
126 #include "tree-pretty-print.h"
127 #include "gimple-pretty-print.h"
132 #include "function.h"
133 #include "ipa-prop.h"
135 #include "tree-iterator.h"
136 #include "tree-pass.h"
137 #include "tree-dump.h"
139 #include "coverage.h"
142 static void cgraph_expand_all_functions (void);
143 static void cgraph_mark_functions_to_output (void);
144 static void cgraph_expand_function (struct cgraph_node *);
145 static void cgraph_output_pending_asms (void);
146 static void cgraph_analyze_function (struct cgraph_node *);
148 FILE *cgraph_dump_file;
150 /* Used for vtable lookup in thunk adjusting. */
151 static GTY (()) tree vtable_entry_type;
153 /* Determine if function DECL is needed. That is, visible to something
154 either outside this translation unit, something magic in the system
158 cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
160 /* If the user told us it is used, then it must be so. */
161 if (node->local.externally_visible)
164 /* ??? If the assembler name is set by hand, it is possible to assemble
165 the name later after finalizing the function and the fact is noticed
166 in assemble_name then. This is arguably a bug. */
167 if (DECL_ASSEMBLER_NAME_SET_P (decl)
168 && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
171 /* With -fkeep-inline-functions we are keeping all inline functions except
172 for extern inline ones. */
173 if (flag_keep_inline_functions
174 && DECL_DECLARED_INLINE_P (decl)
175 && !DECL_EXTERNAL (decl)
176 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
179 /* If we decided it was needed before, but at the time we didn't have
180 the body of the function available, then it's still needed. We have
181 to go back and re-check its dependencies now. */
185 /* Externally visible functions must be output. The exception is
186 COMDAT functions that must be output only when they are needed.
188 When not optimizing, also output the static functions. (see
189 PR24561), but don't do so for always_inline functions, functions
190 declared inline and nested functions. These were optimized out
191 in the original implementation and it is unclear whether we want
192 to change the behavior here. */
193 if (((TREE_PUBLIC (decl)
195 && !node->local.disregard_inline_limits
196 && !DECL_DECLARED_INLINE_P (decl)
197 && !(DECL_CONTEXT (decl)
198 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)))
199 && !flag_whole_program
201 && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
207 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
208 functions into callgraph in a way so they look like ordinary reachable
209 functions inserted into callgraph already at construction time. */
212 cgraph_process_new_functions (void)
216 struct cgraph_node *node;
218 varpool_analyze_pending_decls ();
219 /* Note that this queue may grow as its being processed, as the new
220 functions may generate new ones. */
221 while (cgraph_new_nodes)
223 node = cgraph_new_nodes;
225 cgraph_new_nodes = cgraph_new_nodes->next_needed;
226 switch (cgraph_state)
228 case CGRAPH_STATE_CONSTRUCTION:
229 /* At construction time we just need to finalize function and move
230 it into reachable functions list. */
232 node->next_needed = NULL;
233 cgraph_finalize_function (fndecl, false);
234 cgraph_mark_reachable_node (node);
238 case CGRAPH_STATE_IPA:
239 case CGRAPH_STATE_IPA_SSA:
240 /* When IPA optimization already started, do all essential
241 transformations that has been already performed on the whole
242 cgraph but not on this function. */
244 gimple_register_cfg_hooks ();
246 cgraph_analyze_function (node);
247 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
248 current_function_decl = fndecl;
249 compute_inline_parameters (node);
250 if ((cgraph_state == CGRAPH_STATE_IPA_SSA
251 && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
252 /* When not optimizing, be sure we run early local passes anyway
255 execute_pass_list (pass_early_local_passes.pass.sub);
256 free_dominance_info (CDI_POST_DOMINATORS);
257 free_dominance_info (CDI_DOMINATORS);
259 current_function_decl = NULL;
262 case CGRAPH_STATE_EXPANSION:
263 /* Functions created during expansion shall be compiled
266 cgraph_expand_function (node);
273 cgraph_call_function_insertion_hooks (node);
274 varpool_analyze_pending_decls ();
279 /* As an GCC extension we allow redefinition of the function. The
280 semantics when both copies of bodies differ is not well defined.
281 We replace the old body with new body so in unit at a time mode
282 we always use new body, while in normal mode we may end up with
283 old body inlined into some functions and new body expanded and
286 ??? It may make more sense to use one body for inlining and other
287 body for expanding the function but this is difficult to do. */
290 cgraph_reset_node (struct cgraph_node *node)
292 /* If node->process is set, then we have already begun whole-unit analysis.
293 This is *not* testing for whether we've already emitted the function.
294 That case can be sort-of legitimately seen with real function redefinition
295 errors. I would argue that the front end should never present us with
296 such a case, but don't enforce that for now. */
297 gcc_assert (!node->process);
299 /* Reset our data structures so we can analyze the function again. */
300 memset (&node->local, 0, sizeof (node->local));
301 memset (&node->global, 0, sizeof (node->global));
302 memset (&node->rtl, 0, sizeof (node->rtl));
303 node->analyzed = false;
304 node->local.redefined_extern_inline = true;
305 node->local.finalized = false;
307 cgraph_node_remove_callees (node);
309 /* We may need to re-queue the node for assembling in case
310 we already proceeded it and ignored as not needed or got
311 a re-declaration in IMA mode. */
314 struct cgraph_node *n;
316 for (n = cgraph_nodes_queue; n; n = n->next_needed)
325 cgraph_lower_function (struct cgraph_node *node)
331 lower_nested_functions (node->decl);
332 gcc_assert (!node->nested);
334 tree_lowering_passes (node->decl);
335 node->lowered = true;
338 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
339 logic in effect. If NESTED is true, then our caller cannot stand to have
340 the garbage collector run at the moment. We would need to either create
341 a new GC context, or just not compile right now. */
344 cgraph_finalize_function (tree decl, bool nested)
346 struct cgraph_node *node = cgraph_node (decl);
348 if (node->local.finalized)
349 cgraph_reset_node (node);
351 node->pid = cgraph_max_pid ++;
352 notice_global_symbol (decl);
353 node->local.finalized = true;
354 node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
355 node->finalized_by_frontend = true;
357 if (cgraph_decide_is_function_needed (node, decl))
358 cgraph_mark_needed_node (node);
360 /* Since we reclaim unreachable nodes at the end of every language
361 level unit, we need to be conservative about possible entry points
363 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
364 || DECL_STATIC_CONSTRUCTOR (decl)
365 || DECL_STATIC_DESTRUCTOR (decl)
366 /* COMDAT virtual functions may be referenced by vtable from
367 other compilatoin unit. Still we want to devirtualize calls
368 to those so we need to analyze them.
369 FIXME: We should introduce may edges for this purpose and update
370 their handling in unreachable function removal and inliner too. */
371 || (DECL_VIRTUAL_P (decl) && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
372 cgraph_mark_reachable_node (node);
374 /* If we've not yet emitted decl, tell the debug info about it. */
375 if (!TREE_ASM_WRITTEN (decl))
376 (*debug_hooks->deferred_inline_function) (decl);
378 /* Possibly warn about unused parameters. */
379 if (warn_unused_parameter)
380 do_warn_unused_parameter (decl);
386 /* C99 extern inline keywords allow changing of declaration after function
387 has been finalized. We need to re-decide if we want to mark the function as
391 cgraph_mark_if_needed (tree decl)
393 struct cgraph_node *node = cgraph_node (decl);
394 if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
395 cgraph_mark_needed_node (node);
398 /* Return TRUE if NODE2 is equivalent to NODE or its clone. */
400 clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
402 while (node != node2 && node2)
403 node2 = node2->clone_of;
404 return node2 != NULL;
407 /* Verify edge E count and frequency. */
410 verify_edge_count_and_frequency (struct cgraph_edge *e)
412 bool error_found = false;
415 error ("caller edge count is negative");
418 if (e->frequency < 0)
420 error ("caller edge frequency is negative");
423 if (e->frequency > CGRAPH_FREQ_MAX)
425 error ("caller edge frequency is too large");
428 if (gimple_has_body_p (e->caller->decl)
429 && !e->caller->global.inlined_to
431 != compute_call_stmt_bb_frequency (e->caller->decl,
432 gimple_bb (e->call_stmt))))
434 error ("caller edge frequency %i does not match BB freqency %i",
436 compute_call_stmt_bb_frequency (e->caller->decl,
437 gimple_bb (e->call_stmt)));
443 /* Verify cgraph nodes of given cgraph node. */
445 verify_cgraph_node (struct cgraph_node *node)
447 struct cgraph_edge *e;
448 struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
449 struct function *saved_cfun = cfun;
450 basic_block this_block;
451 gimple_stmt_iterator gsi;
452 bool error_found = false;
457 timevar_push (TV_CGRAPH_VERIFY);
458 /* debug_generic_stmt needs correct cfun */
459 set_cfun (this_cfun);
460 for (e = node->callees; e; e = e->next_callee)
463 error ("aux field set for edge %s->%s",
464 identifier_to_locale (cgraph_node_name (e->caller)),
465 identifier_to_locale (cgraph_node_name (e->callee)));
470 error ("execution count is negative");
473 if (node->global.inlined_to && node->local.externally_visible)
475 error ("externally visible inline clone");
478 if (node->global.inlined_to && node->address_taken)
480 error ("inline clone with address taken");
483 if (node->global.inlined_to && node->needed)
485 error ("inline clone is needed");
488 for (e = node->indirect_calls; e; e = e->next_callee)
492 error ("aux field set for indirect edge from %s",
493 identifier_to_locale (cgraph_node_name (e->caller)));
496 if (!e->indirect_unknown_callee
497 || !e->indirect_info)
499 error ("An indirect edge from %s is not marked as indirect or has "
500 "associated indirect_info, the corresponding statement is: ",
501 identifier_to_locale (cgraph_node_name (e->caller)));
502 debug_gimple_stmt (e->call_stmt);
506 for (e = node->callers; e; e = e->next_caller)
508 if (verify_edge_count_and_frequency (e))
510 if (!e->inline_failed)
512 if (node->global.inlined_to
513 != (e->caller->global.inlined_to
514 ? e->caller->global.inlined_to : e->caller))
516 error ("inlined_to pointer is wrong");
519 if (node->callers->next_caller)
521 error ("multiple inline callers");
526 if (node->global.inlined_to)
528 error ("inlined_to pointer set for noninline callers");
532 for (e = node->indirect_calls; e; e = e->next_callee)
533 if (verify_edge_count_and_frequency (e))
535 if (!node->callers && node->global.inlined_to)
537 error ("inlined_to pointer is set but no predecessors found");
540 if (node->global.inlined_to == node)
542 error ("inlined_to pointer refers to itself");
546 if (!cgraph_node (node->decl))
548 error ("node not found in cgraph_hash");
554 struct cgraph_node *n;
555 for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
560 error ("node has wrong clone_of");
566 struct cgraph_node *n;
567 for (n = node->clones; n; n = n->next_sibling_clone)
568 if (n->clone_of != node)
572 error ("node has wrong clone list");
576 if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
578 error ("node is in clone list but it is not clone");
581 if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
583 error ("node has wrong prev_clone pointer");
586 if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
588 error ("double linked list of clones corrupted");
591 if (node->same_comdat_group)
593 struct cgraph_node *n = node->same_comdat_group;
595 if (!DECL_ONE_ONLY (node->decl))
597 error ("non-DECL_ONE_ONLY node in a same_comdat_group list");
602 error ("node is alone in a comdat group");
607 if (!n->same_comdat_group)
609 error ("same_comdat_group is not a circular list");
613 n = n->same_comdat_group;
618 if (node->analyzed && gimple_has_body_p (node->decl)
619 && !TREE_ASM_WRITTEN (node->decl)
620 && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
625 /* The nodes we're interested in are never shared, so walk
626 the tree ignoring duplicates. */
627 struct pointer_set_t *visited_nodes = pointer_set_create ();
628 /* Reach the trees by walking over the CFG, and note the
629 enclosing basic-blocks in the call edges. */
630 FOR_EACH_BB_FN (this_block, this_cfun)
631 for (gsi = gsi_start_bb (this_block);
635 gimple stmt = gsi_stmt (gsi);
636 if (is_gimple_call (stmt))
638 struct cgraph_edge *e = cgraph_edge (node, stmt);
639 tree decl = gimple_call_fndecl (stmt);
644 error ("shared call_stmt:");
645 debug_gimple_stmt (stmt);
648 if (!e->indirect_unknown_callee)
650 struct cgraph_node *n;
652 if (e->callee->same_body_alias)
654 error ("edge points to same body alias:");
655 debug_tree (e->callee->decl);
658 else if (!e->callee->global.inlined_to
660 && cgraph_get_node (decl)
661 && (e->callee->former_clone_of
662 != cgraph_get_node (decl)->decl)
663 && !clone_of_p (cgraph_node (decl),
666 error ("edge points to wrong declaration:");
667 debug_tree (e->callee->decl);
668 fprintf (stderr," Instead of:");
673 && (n = cgraph_get_node_or_alias (decl))
674 && (n->same_body_alias
675 && n->thunk.thunk_p))
677 error ("a call to thunk improperly represented "
678 "in the call graph:");
679 debug_gimple_stmt (stmt);
684 error ("an indirect edge with unknown callee "
685 "corresponding to a call_stmt with "
686 "a known declaration:");
688 debug_gimple_stmt (e->call_stmt);
694 error ("missing callgraph edge for call stmt:");
695 debug_gimple_stmt (stmt);
700 pointer_set_destroy (visited_nodes);
703 /* No CFG available?! */
706 for (e = node->callees; e; e = e->next_callee)
710 error ("edge %s->%s has no corresponding call_stmt",
711 identifier_to_locale (cgraph_node_name (e->caller)),
712 identifier_to_locale (cgraph_node_name (e->callee)));
713 debug_gimple_stmt (e->call_stmt);
718 for (e = node->indirect_calls; e; e = e->next_callee)
722 error ("an indirect edge from %s has no corresponding call_stmt",
723 identifier_to_locale (cgraph_node_name (e->caller)));
724 debug_gimple_stmt (e->call_stmt);
732 dump_cgraph_node (stderr, node);
733 internal_error ("verify_cgraph_node failed");
735 set_cfun (saved_cfun);
736 timevar_pop (TV_CGRAPH_VERIFY);
739 /* Verify whole cgraph structure. */
743 struct cgraph_node *node;
748 for (node = cgraph_nodes; node; node = node->next)
749 verify_cgraph_node (node);
752 /* Output all asm statements we have stored up to be output. */
755 cgraph_output_pending_asms (void)
757 struct cgraph_asm_node *can;
762 for (can = cgraph_asm_nodes; can; can = can->next)
763 assemble_asm (can->asm_str);
764 cgraph_asm_nodes = NULL;
767 /* Analyze the function scheduled to be output. */
769 cgraph_analyze_function (struct cgraph_node *node)
771 tree save = current_function_decl;
772 tree decl = node->decl;
774 current_function_decl = decl;
775 push_cfun (DECL_STRUCT_FUNCTION (decl));
777 assign_assembler_name_if_neeeded (node->decl);
779 /* Make sure to gimplify bodies only once. During analyzing a
780 function we lower it, which will require gimplified nested
781 functions, so we can end up here with an already gimplified
783 if (!gimple_body (decl))
784 gimplify_function_tree (decl);
785 dump_function (TDI_generic, decl);
787 cgraph_lower_function (node);
788 node->analyzed = true;
791 current_function_decl = save;
794 /* Look for externally_visible and used attributes and mark cgraph nodes
797 We cannot mark the nodes at the point the attributes are processed (in
798 handle_*_attribute) because the copy of the declarations available at that
799 point may not be canonical. For example, in:
802 void f() __attribute__((used));
804 the declaration we see in handle_used_attribute will be the second
805 declaration -- but the front end will subsequently merge that declaration
806 with the original declaration and discard the second declaration.
808 Furthermore, we can't mark these nodes in cgraph_finalize_function because:
811 void f() __attribute__((externally_visible));
815 So, we walk the nodes at the end of the translation unit, applying the
816 attributes at that point. */
819 process_function_and_variable_attributes (struct cgraph_node *first,
820 struct varpool_node *first_var)
822 struct cgraph_node *node;
823 struct varpool_node *vnode;
825 for (node = cgraph_nodes; node != first; node = node->next)
827 tree decl = node->decl;
828 if (DECL_PRESERVE_P (decl))
829 cgraph_mark_needed_node (node);
830 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
831 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
832 && TREE_PUBLIC (node->decl))
834 if (node->local.finalized)
835 cgraph_mark_needed_node (node);
837 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
839 if (! TREE_PUBLIC (node->decl))
840 warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
841 "%<externally_visible%>"
842 " attribute have effect only on public objects");
843 else if (node->local.finalized)
844 cgraph_mark_needed_node (node);
847 for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
849 tree decl = vnode->decl;
850 if (DECL_PRESERVE_P (decl))
852 vnode->force_output = true;
853 if (vnode->finalized)
854 varpool_mark_needed_node (vnode);
856 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES
857 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (decl))
858 && TREE_PUBLIC (vnode->decl))
860 if (vnode->finalized)
861 varpool_mark_needed_node (vnode);
863 else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
865 if (! TREE_PUBLIC (vnode->decl))
866 warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
867 "%<externally_visible%>"
868 " attribute have effect only on public objects");
869 else if (vnode->finalized)
870 varpool_mark_needed_node (vnode);
875 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
876 each reachable functions) and build cgraph.
877 The function can be called multiple times after inserting new nodes
878 into beginning of queue. Just the new part of queue is re-scanned then. */
881 cgraph_analyze_functions (void)
883 /* Keep track of already processed nodes when called multiple times for
884 intermodule optimization. */
885 static struct cgraph_node *first_analyzed;
886 struct cgraph_node *first_processed = first_analyzed;
887 static struct varpool_node *first_analyzed_var;
888 struct cgraph_node *node, *next;
890 bitmap_obstack_initialize (NULL);
891 process_function_and_variable_attributes (first_processed,
893 first_processed = cgraph_nodes;
894 first_analyzed_var = varpool_nodes;
895 varpool_analyze_pending_decls ();
896 if (cgraph_dump_file)
898 fprintf (cgraph_dump_file, "Initial entry points:");
899 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
901 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
902 fprintf (cgraph_dump_file, "\n");
904 cgraph_process_new_functions ();
906 /* Propagate reachability flag and lower representation of all reachable
907 functions. In the future, lowering will introduce new functions and
908 new entry points on the way (by template instantiation and virtual
909 method table generation for instance). */
910 while (cgraph_nodes_queue)
912 struct cgraph_edge *edge;
913 tree decl = cgraph_nodes_queue->decl;
915 node = cgraph_nodes_queue;
916 cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
917 node->next_needed = NULL;
919 /* ??? It is possible to create extern inline function and later using
920 weak alias attribute to kill its body. See
921 gcc.c-torture/compile/20011119-1.c */
922 if (!DECL_STRUCT_FUNCTION (decl))
924 cgraph_reset_node (node);
929 cgraph_analyze_function (node);
931 for (edge = node->callees; edge; edge = edge->next_callee)
932 if (!edge->callee->reachable)
933 cgraph_mark_reachable_node (edge->callee);
935 if (node->same_comdat_group)
937 for (next = node->same_comdat_group;
939 next = next->same_comdat_group)
940 cgraph_mark_reachable_node (next);
943 /* If decl is a clone of an abstract function, mark that abstract
944 function so that we don't release its body. The DECL_INITIAL() of that
945 abstract function declaration will be later needed to output debug info. */
946 if (DECL_ABSTRACT_ORIGIN (decl))
948 struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
949 origin_node->abstract_and_needed = true;
952 /* We finalize local static variables during constructing callgraph
953 edges. Process their attributes too. */
954 process_function_and_variable_attributes (first_processed,
956 first_processed = cgraph_nodes;
957 first_analyzed_var = varpool_nodes;
958 varpool_analyze_pending_decls ();
959 cgraph_process_new_functions ();
962 /* Collect entry points to the unit. */
963 if (cgraph_dump_file)
965 fprintf (cgraph_dump_file, "Unit entry points:");
966 for (node = cgraph_nodes; node != first_analyzed; node = node->next)
968 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
969 fprintf (cgraph_dump_file, "\n\nInitial ");
970 dump_cgraph (cgraph_dump_file);
971 dump_varpool (cgraph_dump_file);
974 if (cgraph_dump_file)
975 fprintf (cgraph_dump_file, "\nReclaiming functions:");
977 for (node = cgraph_nodes; node != first_analyzed; node = next)
979 tree decl = node->decl;
982 if (node->local.finalized && !gimple_has_body_p (decl))
983 cgraph_reset_node (node);
985 if (!node->reachable && gimple_has_body_p (decl))
987 if (cgraph_dump_file)
988 fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
989 cgraph_remove_node (node);
993 node->next_needed = NULL;
994 gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
995 gcc_assert (node->analyzed == node->local.finalized);
997 if (cgraph_dump_file)
999 fprintf (cgraph_dump_file, "\n\nReclaimed ");
1000 dump_cgraph (cgraph_dump_file);
1001 dump_varpool (cgraph_dump_file);
1003 bitmap_obstack_release (NULL);
1004 first_analyzed = cgraph_nodes;
1009 /* Analyze the whole compilation unit once it is parsed completely. */
1012 cgraph_finalize_compilation_unit (void)
1014 timevar_push (TV_CGRAPH);
1016 /* Do not skip analyzing the functions if there were errors, we
1017 miss diagnostics for following functions otherwise. */
1019 /* Emit size functions we didn't inline. */
1020 finalize_size_functions ();
1022 /* Mark alias targets necessary and emit diagnostics. */
1023 finish_aliases_1 ();
1027 fprintf (stderr, "\nAnalyzing compilation unit\n");
1031 /* Gimplify and lower all functions, compute reachability and
1032 remove unreachable nodes. */
1033 cgraph_analyze_functions ();
1035 /* Mark alias targets necessary and emit diagnostics. */
1036 finish_aliases_1 ();
1038 /* Gimplify and lower thunks. */
1039 cgraph_analyze_functions ();
1041 /* Finally drive the pass manager. */
1044 timevar_pop (TV_CGRAPH);
1048 /* Figure out what functions we want to assemble. */
1051 cgraph_mark_functions_to_output (void)
1053 struct cgraph_node *node;
1054 #ifdef ENABLE_CHECKING
1055 bool check_same_comdat_groups = false;
1057 for (node = cgraph_nodes; node; node = node->next)
1058 gcc_assert (!node->process);
1061 for (node = cgraph_nodes; node; node = node->next)
1063 tree decl = node->decl;
1064 struct cgraph_edge *e;
1066 gcc_assert (!node->process || node->same_comdat_group);
1070 for (e = node->callers; e; e = e->next_caller)
1071 if (e->inline_failed)
1074 /* We need to output all local functions that are used and not
1075 always inlined, as well as those that are reachable from
1076 outside the current compilation unit. */
1078 && !node->global.inlined_to
1079 && (!cgraph_only_called_directly_p (node)
1080 || (e && node->reachable))
1081 && !TREE_ASM_WRITTEN (decl)
1082 && !DECL_EXTERNAL (decl))
1085 if (node->same_comdat_group)
1087 struct cgraph_node *next;
1088 for (next = node->same_comdat_group;
1090 next = next->same_comdat_group)
1094 else if (node->same_comdat_group)
1096 #ifdef ENABLE_CHECKING
1097 check_same_comdat_groups = true;
1102 /* We should've reclaimed all functions that are not needed. */
1103 #ifdef ENABLE_CHECKING
1104 if (!node->global.inlined_to
1105 && gimple_has_body_p (decl)
1106 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1107 are inside partition, we can end up not removing the body since we no longer
1108 have analyzed node pointing to it. */
1109 && !node->in_other_partition
1110 && !DECL_EXTERNAL (decl))
1112 dump_cgraph_node (stderr, node);
1113 internal_error ("failed to reclaim unneeded function");
1116 gcc_assert (node->global.inlined_to
1117 || !gimple_has_body_p (decl)
1118 || node->in_other_partition
1119 || DECL_EXTERNAL (decl));
1124 #ifdef ENABLE_CHECKING
1125 if (check_same_comdat_groups)
1126 for (node = cgraph_nodes; node; node = node->next)
1127 if (node->same_comdat_group && !node->process)
1129 tree decl = node->decl;
1130 if (!node->global.inlined_to
1131 && gimple_has_body_p (decl)
1132 /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
1133 are inside partition, we can end up not removing the body since we no longer
1134 have analyzed node pointing to it. */
1135 && !node->in_other_partition
1136 && !DECL_EXTERNAL (decl))
1138 dump_cgraph_node (stderr, node);
1139 internal_error ("failed to reclaim unneeded function");
1145 /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
1146 in lowered gimple form.
1148 Set current_function_decl and cfun to newly constructed empty function body.
1149 return basic block in the function body. */
1152 init_lowered_empty_function (tree decl)
1156 current_function_decl = decl;
1157 allocate_struct_function (decl, false);
1158 gimple_register_cfg_hooks ();
1159 init_empty_tree_cfg ();
1160 init_tree_ssa (cfun);
1161 init_ssa_operands ();
1162 cfun->gimple_df->in_ssa_p = true;
1163 DECL_INITIAL (decl) = make_node (BLOCK);
1165 DECL_SAVED_TREE (decl) = error_mark_node;
1166 cfun->curr_properties |=
1167 (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
1170 /* Create BB for body of the function and connect it properly. */
1171 bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
1172 make_edge (ENTRY_BLOCK_PTR, bb, 0);
1173 make_edge (bb, EXIT_BLOCK_PTR, 0);
1178 /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
1179 offset indicated by VIRTUAL_OFFSET, if that is
1180 non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
1181 zero for a result adjusting thunk. */
1184 thunk_adjust (gimple_stmt_iterator * bsi,
1185 tree ptr, bool this_adjusting,
1186 HOST_WIDE_INT fixed_offset, tree virtual_offset)
1192 && fixed_offset != 0)
1194 stmt = gimple_build_assign (ptr,
1195 fold_build2_loc (input_location,
1197 TREE_TYPE (ptr), ptr,
1198 size_int (fixed_offset)));
1199 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1202 /* If there's a virtual offset, look up that value in the vtable and
1203 adjust the pointer again. */
1211 if (!vtable_entry_type)
1213 tree vfunc_type = make_node (FUNCTION_TYPE);
1214 TREE_TYPE (vfunc_type) = integer_type_node;
1215 TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
1216 layout_type (vfunc_type);
1218 vtable_entry_type = build_pointer_type (vfunc_type);
1222 create_tmp_var (build_pointer_type
1223 (build_pointer_type (vtable_entry_type)), "vptr");
1225 /* The vptr is always at offset zero in the object. */
1226 stmt = gimple_build_assign (vtabletmp,
1227 build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
1229 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1230 mark_symbols_for_renaming (stmt);
1231 find_referenced_vars_in (stmt);
1233 /* Form the vtable address. */
1234 vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
1236 stmt = gimple_build_assign (vtabletmp2,
1237 build_simple_mem_ref (vtabletmp));
1238 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1239 mark_symbols_for_renaming (stmt);
1240 find_referenced_vars_in (stmt);
1242 /* Find the entry with the vcall offset. */
1243 stmt = gimple_build_assign (vtabletmp2,
1244 fold_build2_loc (input_location,
1246 TREE_TYPE (vtabletmp2),
1248 fold_convert (sizetype,
1250 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1252 /* Get the offset itself. */
1253 vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
1255 stmt = gimple_build_assign (vtabletmp3,
1256 build_simple_mem_ref (vtabletmp2));
1257 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1258 mark_symbols_for_renaming (stmt);
1259 find_referenced_vars_in (stmt);
1261 /* Cast to sizetype. */
1262 offsettmp = create_tmp_var (sizetype, "offset");
1263 stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
1264 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1265 mark_symbols_for_renaming (stmt);
1266 find_referenced_vars_in (stmt);
1268 /* Adjust the `this' pointer. */
1269 ptr = fold_build2_loc (input_location,
1270 POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
1275 && fixed_offset != 0)
1276 /* Adjust the pointer by the constant. */
1280 if (TREE_CODE (ptr) == VAR_DECL)
1284 ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
1285 stmt = gimple_build_assign (ptrtmp, ptr);
1286 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1287 mark_symbols_for_renaming (stmt);
1288 find_referenced_vars_in (stmt);
1290 ptr = fold_build2_loc (input_location,
1291 POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
1292 size_int (fixed_offset));
1295 /* Emit the statement and gimplify the adjustment expression. */
1296 ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
1297 stmt = gimple_build_assign (ret, ptr);
1298 mark_symbols_for_renaming (stmt);
1299 find_referenced_vars_in (stmt);
1300 gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
1305 /* Produce assembler for thunk NODE. */
1308 assemble_thunk (struct cgraph_node *node)
1310 bool this_adjusting = node->thunk.this_adjusting;
1311 HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
1312 HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
1313 tree virtual_offset = NULL;
1314 tree alias = node->thunk.alias;
1315 tree thunk_fndecl = node->decl;
1316 tree a = DECL_ARGUMENTS (thunk_fndecl);
1318 current_function_decl = thunk_fndecl;
1320 /* Ensure thunks are emitted in their correct sections. */
1321 resolve_unique_section (thunk_fndecl, 0, flag_function_sections);
1324 && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
1325 virtual_value, alias))
1330 DECL_RESULT (thunk_fndecl)
1331 = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
1332 RESULT_DECL, 0, integer_type_node);
1333 fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
1335 /* The back end expects DECL_INITIAL to contain a BLOCK, so we
1337 fn_block = make_node (BLOCK);
1338 BLOCK_VARS (fn_block) = a;
1339 DECL_INITIAL (thunk_fndecl) = fn_block;
1340 init_function_start (thunk_fndecl);
1342 assemble_start_function (thunk_fndecl, fnname);
1344 targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
1345 fixed_offset, virtual_value, alias);
1347 assemble_end_function (thunk_fndecl, fnname);
1348 init_insn_lengths ();
1349 free_after_compilation (cfun);
1351 TREE_ASM_WRITTEN (thunk_fndecl) = 1;
1356 basic_block bb, then_bb, else_bb, return_bb;
1357 gimple_stmt_iterator bsi;
1363 VEC(tree, heap) *vargs;
1368 DECL_IGNORED_P (thunk_fndecl) = 1;
1369 bitmap_obstack_initialize (NULL);
1371 if (node->thunk.virtual_offset_p)
1372 virtual_offset = size_int (virtual_value);
1374 /* Build the return declaration for the function. */
1375 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
1376 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
1378 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
1379 DECL_ARTIFICIAL (resdecl) = 1;
1380 DECL_IGNORED_P (resdecl) = 1;
1381 DECL_RESULT (thunk_fndecl) = resdecl;
1384 resdecl = DECL_RESULT (thunk_fndecl);
1386 bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
1388 bsi = gsi_start_bb (bb);
1390 /* Build call to the function being thunked. */
1391 if (!VOID_TYPE_P (restype))
1393 if (!is_gimple_reg_type (restype))
1396 add_local_decl (cfun, restmp);
1397 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
1400 restmp = create_tmp_var_raw (restype, "retval");
1403 for (arg = a; arg; arg = DECL_CHAIN (arg))
1405 vargs = VEC_alloc (tree, heap, nargs);
1407 VEC_quick_push (tree, vargs,
1412 VEC_quick_push (tree, vargs, a);
1413 for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
1414 VEC_quick_push (tree, vargs, arg);
1415 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
1416 VEC_free (tree, heap, vargs);
1417 gimple_call_set_cannot_inline (call, true);
1418 gimple_call_set_from_thunk (call, true);
1420 gimple_call_set_lhs (call, restmp);
1421 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
1422 mark_symbols_for_renaming (call);
1423 find_referenced_vars_in (call);
1426 if (restmp && !this_adjusting)
1428 tree true_label = NULL_TREE;
1430 if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
1433 /* If the return type is a pointer, we need to
1434 protect against NULL. We know there will be an
1435 adjustment, because that's why we're emitting a
1437 then_bb = create_basic_block (NULL, (void *) 0, bb);
1438 return_bb = create_basic_block (NULL, (void *) 0, then_bb);
1439 else_bb = create_basic_block (NULL, (void *) 0, else_bb);
1440 remove_edge (single_succ_edge (bb));
1441 true_label = gimple_block_label (then_bb);
1442 stmt = gimple_build_cond (NE_EXPR, restmp,
1443 build_zero_cst (TREE_TYPE (restmp)),
1444 NULL_TREE, NULL_TREE);
1445 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1446 make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1447 make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1448 make_edge (return_bb, EXIT_BLOCK_PTR, 0);
1449 make_edge (then_bb, return_bb, EDGE_FALLTHRU);
1450 make_edge (else_bb, return_bb, EDGE_FALLTHRU);
1451 bsi = gsi_last_bb (then_bb);
1454 restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
1455 fixed_offset, virtual_offset);
1459 bsi = gsi_last_bb (else_bb);
1460 stmt = gimple_build_assign (restmp,
1461 build_zero_cst (TREE_TYPE (restmp)));
1462 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
1463 bsi = gsi_last_bb (return_bb);
1467 gimple_call_set_tail (call, true);
1469 /* Build return value. */
1470 ret = gimple_build_return (restmp);
1471 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
1473 delete_unreachable_blocks ();
1474 update_ssa (TODO_update_ssa);
1476 cgraph_remove_same_body_alias (node);
1477 /* Since we want to emit the thunk, we explicitly mark its name as
1479 cgraph_add_new_function (thunk_fndecl, true);
1480 bitmap_obstack_release (NULL);
1482 current_function_decl = NULL;
1485 /* Expand function specified by NODE. */
1488 cgraph_expand_function (struct cgraph_node *node)
1490 tree decl = node->decl;
1492 /* We ought to not compile any inline clones. */
1493 gcc_assert (!node->global.inlined_to);
1495 announce_function (decl);
1497 if (node->same_body)
1499 struct cgraph_node *alias, *next;
1500 bool saved_alias = node->alias;
1501 for (alias = node->same_body;
1502 alias && alias->next; alias = alias->next)
1504 /* Walk aliases in the order they were created; it is possible that
1505 thunks reffers to the aliases made earlier. */
1506 for (; alias; alias = next)
1508 next = alias->previous;
1509 if (!alias->thunk.thunk_p)
1510 assemble_alias (alias->decl,
1511 DECL_ASSEMBLER_NAME (alias->thunk.alias));
1513 assemble_thunk (alias);
1515 node->alias = saved_alias;
1516 cgraph_process_new_functions ();
1519 gcc_assert (node->lowered);
1521 /* Generate RTL for the body of DECL. */
1522 tree_rest_of_compilation (decl);
1524 /* Make sure that BE didn't give up on compiling. */
1525 gcc_assert (TREE_ASM_WRITTEN (decl));
1526 current_function_decl = NULL;
1527 gcc_assert (!cgraph_preserve_function_body_p (decl));
1528 cgraph_release_function_body (node);
1529 /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
1530 points to the dead function body. */
1531 cgraph_node_remove_callees (node);
1533 cgraph_function_flags_ready = true;
1536 /* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
1539 cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
1541 *reason = e->inline_failed;
1542 return !e->inline_failed;
1547 /* Expand all functions that must be output.
1549 Attempt to topologically sort the nodes so function is output when
1550 all called functions are already assembled to allow data to be
1551 propagated across the callgraph. Use a stack to get smaller distance
1552 between a function and its callees (later we may choose to use a more
1553 sophisticated algorithm for function reordering; we will likely want
1554 to use subsections to make the output functions appear in top-down
1558 cgraph_expand_all_functions (void)
1560 struct cgraph_node *node;
1561 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1562 int order_pos, new_order_pos = 0;
1565 order_pos = cgraph_postorder (order);
1566 gcc_assert (order_pos == cgraph_n_nodes);
1568 /* Garbage collector may remove inline clones we eliminate during
1569 optimization. So we must be sure to not reference them. */
1570 for (i = 0; i < order_pos; i++)
1571 if (order[i]->process)
1572 order[new_order_pos++] = order[i];
1574 for (i = new_order_pos - 1; i >= 0; i--)
1579 gcc_assert (node->reachable);
1581 cgraph_expand_function (node);
1584 cgraph_process_new_functions ();
1590 /* This is used to sort the node types by the cgraph order number. */
1592 enum cgraph_order_sort_kind
1594 ORDER_UNDEFINED = 0,
1600 struct cgraph_order_sort
1602 enum cgraph_order_sort_kind kind;
1605 struct cgraph_node *f;
1606 struct varpool_node *v;
1607 struct cgraph_asm_node *a;
1611 /* Output all functions, variables, and asm statements in the order
1612 according to their order fields, which is the order in which they
1613 appeared in the file. This implements -fno-toplevel-reorder. In
1614 this mode we may output functions and variables which don't really
1615 need to be output. */
1618 cgraph_output_in_order (void)
1621 struct cgraph_order_sort *nodes;
1623 struct cgraph_node *pf;
1624 struct varpool_node *pv;
1625 struct cgraph_asm_node *pa;
1628 nodes = XCNEWVEC (struct cgraph_order_sort, max);
1630 varpool_analyze_pending_decls ();
1632 for (pf = cgraph_nodes; pf; pf = pf->next)
1637 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1638 nodes[i].kind = ORDER_FUNCTION;
1643 for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
1646 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1647 nodes[i].kind = ORDER_VAR;
1651 for (pa = cgraph_asm_nodes; pa; pa = pa->next)
1654 gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
1655 nodes[i].kind = ORDER_ASM;
1659 /* In toplevel reorder mode we output all statics; mark them as needed. */
1660 for (i = 0; i < max; ++i)
1662 if (nodes[i].kind == ORDER_VAR)
1664 varpool_mark_needed_node (nodes[i].u.v);
1667 varpool_empty_needed_queue ();
1669 for (i = 0; i < max; ++i)
1671 switch (nodes[i].kind)
1673 case ORDER_FUNCTION:
1674 nodes[i].u.f->process = 0;
1675 cgraph_expand_function (nodes[i].u.f);
1679 varpool_assemble_decl (nodes[i].u.v);
1683 assemble_asm (nodes[i].u.a->asm_str);
1686 case ORDER_UNDEFINED:
1694 cgraph_asm_nodes = NULL;
1698 /* Return true when function body of DECL still needs to be kept around
1699 for later re-use. */
1701 cgraph_preserve_function_body_p (tree decl)
1703 struct cgraph_node *node;
1705 gcc_assert (cgraph_global_info_ready);
1706 /* Look if there is any clone around. */
1707 node = cgraph_node (decl);
1717 current_function_decl = NULL;
1718 gimple_register_cfg_hooks ();
1719 bitmap_obstack_initialize (NULL);
1721 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
1725 execute_ipa_pass_list (all_small_ipa_passes);
1730 /* If pass_all_early_optimizations was not scheduled, the state of
1731 the cgraph will not be properly updated. Update it now. */
1732 if (cgraph_state < CGRAPH_STATE_IPA_SSA)
1733 cgraph_state = CGRAPH_STATE_IPA_SSA;
1737 /* Generate coverage variables and constructors. */
1740 /* Process new functions added. */
1742 current_function_decl = NULL;
1743 cgraph_process_new_functions ();
1745 execute_ipa_summary_passes
1746 ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
1749 /* Some targets need to handle LTO assembler output specially. */
1750 if (flag_generate_lto)
1751 targetm.asm_out.lto_start ();
1753 execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
1756 ipa_write_summaries ();
1758 if (flag_generate_lto)
1759 targetm.asm_out.lto_end ();
1762 execute_ipa_pass_list (all_regular_ipa_passes);
1763 invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
1765 bitmap_obstack_release (NULL);
1769 /* Perform simple optimizations based on callgraph. */
1772 cgraph_optimize (void)
1777 #ifdef ENABLE_CHECKING
1781 /* Frontend may output common variables after the unit has been finalized.
1782 It is safe to deal with them here as they are always zero initialized. */
1783 varpool_analyze_pending_decls ();
1785 timevar_push (TV_CGRAPHOPT);
1786 if (pre_ipa_mem_report)
1788 fprintf (stderr, "Memory consumption before IPA\n");
1789 dump_memory_report (false);
1792 fprintf (stderr, "Performing interprocedural optimizations\n");
1793 cgraph_state = CGRAPH_STATE_IPA;
1795 /* Don't run the IPA passes if there was any error or sorry messages. */
1799 /* Do nothing else if any IPA pass found errors. */
1802 timevar_pop (TV_CGRAPHOPT);
1806 /* This pass remove bodies of extern inline functions we never inlined.
1807 Do this later so other IPA passes see what is really going on. */
1808 cgraph_remove_unreachable_nodes (false, dump_file);
1809 cgraph_global_info_ready = true;
1810 if (cgraph_dump_file)
1812 fprintf (cgraph_dump_file, "Optimized ");
1813 dump_cgraph (cgraph_dump_file);
1814 dump_varpool (cgraph_dump_file);
1816 if (post_ipa_mem_report)
1818 fprintf (stderr, "Memory consumption after IPA\n");
1819 dump_memory_report (false);
1821 timevar_pop (TV_CGRAPHOPT);
1823 /* Output everything. */
1824 (*debug_hooks->assembly_start) ();
1826 fprintf (stderr, "Assembling functions:\n");
1827 #ifdef ENABLE_CHECKING
1831 cgraph_materialize_all_clones ();
1832 cgraph_mark_functions_to_output ();
1834 cgraph_state = CGRAPH_STATE_EXPANSION;
1835 if (!flag_toplevel_reorder)
1836 cgraph_output_in_order ();
1839 cgraph_output_pending_asms ();
1841 cgraph_expand_all_functions ();
1842 varpool_remove_unreferenced_decls ();
1844 varpool_assemble_pending_decls ();
1846 cgraph_process_new_functions ();
1847 cgraph_state = CGRAPH_STATE_FINISHED;
1849 if (cgraph_dump_file)
1851 fprintf (cgraph_dump_file, "\nFinal ");
1852 dump_cgraph (cgraph_dump_file);
1853 dump_varpool (cgraph_dump_file);
1855 #ifdef ENABLE_CHECKING
1857 /* Double check that all inline clones are gone and that all
1858 function bodies have been released from memory. */
1861 struct cgraph_node *node;
1862 bool error_found = false;
1864 for (node = cgraph_nodes; node; node = node->next)
1866 && (node->global.inlined_to
1867 || gimple_has_body_p (node->decl)))
1870 dump_cgraph_node (stderr, node);
1873 internal_error ("nodes with unreleased memory found");
1881 if (!cgraph_dump_file)
1882 cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
1885 /* The edges representing the callers of the NEW_VERSION node were
1886 fixed by cgraph_function_versioning (), now the call_expr in their
1887 respective tree code should be updated to call the NEW_VERSION. */
1890 update_call_expr (struct cgraph_node *new_version)
1892 struct cgraph_edge *e;
1894 gcc_assert (new_version);
1896 /* Update the call expr on the edges to call the new version. */
1897 for (e = new_version->callers; e; e = e->next_caller)
1899 struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
1900 gimple_call_set_fndecl (e->call_stmt, new_version->decl);
1901 maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
1906 /* Create a new cgraph node which is the new version of
1907 OLD_VERSION node. REDIRECT_CALLERS holds the callers
1908 edges which should be redirected to point to
1909 NEW_VERSION. ALL the callees edges of OLD_VERSION
1910 are cloned to the new version node. Return the new
1913 If non-NULL BLOCK_TO_COPY determine what basic blocks
1914 was copied to prevent duplications of calls that are dead
1917 static struct cgraph_node *
1918 cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
1920 VEC(cgraph_edge_p,heap) *redirect_callers,
1923 struct cgraph_node *new_version;
1924 struct cgraph_edge *e;
1927 gcc_assert (old_version);
1929 new_version = cgraph_node (new_decl);
1931 new_version->analyzed = true;
1932 new_version->local = old_version->local;
1933 new_version->local.externally_visible = false;
1934 new_version->local.local = true;
1935 new_version->local.vtable_method = false;
1936 new_version->global = old_version->global;
1937 new_version->rtl = old_version->rtl;
1938 new_version->reachable = true;
1939 new_version->count = old_version->count;
1941 for (e = old_version->callees; e; e=e->next_callee)
1943 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1944 cgraph_clone_edge (e, new_version, e->call_stmt,
1945 e->lto_stmt_uid, REG_BR_PROB_BASE,
1947 e->loop_nest, true);
1948 for (e = old_version->indirect_calls; e; e=e->next_callee)
1950 || bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
1951 cgraph_clone_edge (e, new_version, e->call_stmt,
1952 e->lto_stmt_uid, REG_BR_PROB_BASE,
1954 e->loop_nest, true);
1955 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
1957 /* Redirect calls to the old version node to point to its new
1959 cgraph_redirect_edge_callee (e, new_version);
1965 /* Perform function versioning.
1966 Function versioning includes copying of the tree and
1967 a callgraph update (creating a new cgraph node and updating
1968 its callees and callers).
1970 REDIRECT_CALLERS varray includes the edges to be redirected
1973 TREE_MAP is a mapping of tree nodes we want to replace with
1974 new ones (according to results of prior analysis).
1975 OLD_VERSION_NODE is the node that is versioned.
1976 It returns the new version's cgraph node.
1977 If non-NULL ARGS_TO_SKIP determine function parameters to remove
1979 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
1980 If non_NULL NEW_ENTRY determine new entry BB of the clone. */
1982 struct cgraph_node *
1983 cgraph_function_versioning (struct cgraph_node *old_version_node,
1984 VEC(cgraph_edge_p,heap) *redirect_callers,
1985 VEC (ipa_replace_map_p,gc)* tree_map,
1986 bitmap args_to_skip,
1988 basic_block new_entry_block,
1989 const char *clone_name)
1991 tree old_decl = old_version_node->decl;
1992 struct cgraph_node *new_version_node = NULL;
1995 if (!tree_versionable_function_p (old_decl))
1998 /* Make a new FUNCTION_DECL tree node for the
2001 new_decl = copy_node (old_decl);
2003 new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
2005 /* Generate a new name for the new version. */
2006 DECL_NAME (new_decl) = clone_function_name (old_decl, clone_name);
2007 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2008 SET_DECL_RTL (new_decl, NULL);
2010 /* Create the new version's call-graph node.
2011 and update the edges of the new node. */
2013 cgraph_copy_node_for_versioning (old_version_node, new_decl,
2014 redirect_callers, bbs_to_copy);
2016 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2017 tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
2018 bbs_to_copy, new_entry_block);
2020 /* Update the new version's properties.
2021 Make The new version visible only within this translation unit. Make sure
2022 that is not weak also.
2023 ??? We cannot use COMDAT linkage because there is no
2024 ABI support for this. */
2025 cgraph_make_decl_local (new_version_node->decl);
2026 DECL_VIRTUAL_P (new_version_node->decl) = 0;
2027 new_version_node->local.externally_visible = 0;
2028 new_version_node->local.local = 1;
2029 new_version_node->lowered = true;
2031 /* Update the call_expr on the edges to call the new version node. */
2032 update_call_expr (new_version_node);
2034 cgraph_call_function_insertion_hooks (new_version_node);
2035 return new_version_node;
2038 /* Produce separate function body for inline clones so the offline copy can be
2039 modified without affecting them. */
2040 struct cgraph_node *
2041 save_inline_function_body (struct cgraph_node *node)
2043 struct cgraph_node *first_clone, *n;
2045 gcc_assert (node == cgraph_node (node->decl));
2047 cgraph_lower_function (node);
2049 first_clone = node->clones;
2051 first_clone->decl = copy_node (node->decl);
2052 cgraph_insert_node_to_hashtable (first_clone);
2053 gcc_assert (first_clone == cgraph_node (first_clone->decl));
2054 if (first_clone->next_sibling_clone)
2056 for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
2057 n->clone_of = first_clone;
2058 n->clone_of = first_clone;
2059 n->next_sibling_clone = first_clone->clones;
2060 if (first_clone->clones)
2061 first_clone->clones->prev_sibling_clone = n;
2062 first_clone->clones = first_clone->next_sibling_clone;
2063 first_clone->next_sibling_clone->prev_sibling_clone = NULL;
2064 first_clone->next_sibling_clone = NULL;
2065 gcc_assert (!first_clone->prev_sibling_clone);
2067 first_clone->clone_of = NULL;
2068 node->clones = NULL;
2070 if (first_clone->clones)
2071 for (n = first_clone->clones; n != first_clone;)
2073 gcc_assert (n->decl == node->decl);
2074 n->decl = first_clone->decl;
2077 else if (n->next_sibling_clone)
2078 n = n->next_sibling_clone;
2081 while (n != first_clone && !n->next_sibling_clone)
2083 if (n != first_clone)
2084 n = n->next_sibling_clone;
2088 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2089 tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL,
2092 DECL_EXTERNAL (first_clone->decl) = 0;
2093 DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
2094 TREE_PUBLIC (first_clone->decl) = 0;
2095 DECL_COMDAT (first_clone->decl) = 0;
2096 VEC_free (ipa_opt_pass, heap,
2097 first_clone->ipa_transforms_to_apply);
2098 first_clone->ipa_transforms_to_apply = NULL;
2100 #ifdef ENABLE_CHECKING
2101 verify_cgraph_node (first_clone);
2106 /* Given virtual clone, turn it into actual clone. */
2108 cgraph_materialize_clone (struct cgraph_node *node)
2110 bitmap_obstack_initialize (NULL);
2111 node->former_clone_of = node->clone_of->decl;
2112 if (node->clone_of->former_clone_of)
2113 node->former_clone_of = node->clone_of->former_clone_of;
2114 /* Copy the OLD_VERSION_NODE function tree to the new version. */
2115 tree_function_versioning (node->clone_of->decl, node->decl,
2116 node->clone.tree_map, true,
2117 node->clone.args_to_skip, NULL, NULL);
2118 if (cgraph_dump_file)
2120 dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
2121 dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
2124 /* Function is no longer clone. */
2125 if (node->next_sibling_clone)
2126 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
2127 if (node->prev_sibling_clone)
2128 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
2130 node->clone_of->clones = node->next_sibling_clone;
2131 node->next_sibling_clone = NULL;
2132 node->prev_sibling_clone = NULL;
2133 if (!node->clone_of->analyzed && !node->clone_of->clones)
2135 cgraph_release_function_body (node->clone_of);
2136 cgraph_node_remove_callees (node->clone_of);
2137 ipa_remove_all_references (&node->clone_of->ref_list);
2139 node->clone_of = NULL;
2140 bitmap_obstack_release (NULL);
2143 /* If necessary, change the function declaration in the call statement
2144 associated with E so that it corresponds to the edge callee. */
2147 cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
2149 tree decl = gimple_call_fndecl (e->call_stmt);
2151 gimple_stmt_iterator gsi;
2152 bool gsi_computed = false;
2153 #ifdef ENABLE_CHECKING
2154 struct cgraph_node *node;
2157 if (e->indirect_unknown_callee
2158 || decl == e->callee->decl
2159 /* Don't update call from same body alias to the real function. */
2160 || (decl && cgraph_get_node (decl) == cgraph_get_node (e->callee->decl)))
2161 return e->call_stmt;
2163 #ifdef ENABLE_CHECKING
2166 node = cgraph_get_node (decl);
2167 gcc_assert (!node || !node->clone.combined_args_to_skip);
2171 if (cgraph_dump_file)
2173 fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
2174 cgraph_node_name (e->caller), e->caller->uid,
2175 cgraph_node_name (e->callee), e->callee->uid);
2176 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2177 if (e->callee->clone.combined_args_to_skip)
2179 fprintf (cgraph_dump_file, " combined args to skip: ");
2180 dump_bitmap (cgraph_dump_file,
2181 e->callee->clone.combined_args_to_skip);
2185 if (e->indirect_info &&
2186 e->indirect_info->thunk_delta != 0
2187 && (!e->callee->clone.combined_args_to_skip
2188 || !bitmap_bit_p (e->callee->clone.combined_args_to_skip, 0)))
2190 if (cgraph_dump_file)
2191 fprintf (cgraph_dump_file, " Thunk delta is "
2192 HOST_WIDE_INT_PRINT_DEC "\n", e->indirect_info->thunk_delta);
2193 gsi = gsi_for_stmt (e->call_stmt);
2194 gsi_computed = true;
2195 gimple_adjust_this_by_delta (&gsi,
2196 build_int_cst (sizetype,
2197 e->indirect_info->thunk_delta));
2198 e->indirect_info->thunk_delta = 0;
2201 if (e->callee->clone.combined_args_to_skip)
2206 = gimple_call_copy_skip_args (e->call_stmt,
2207 e->callee->clone.combined_args_to_skip);
2208 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2210 if (gimple_vdef (new_stmt)
2211 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
2212 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
2215 gsi = gsi_for_stmt (e->call_stmt);
2216 gsi_replace (&gsi, new_stmt, false);
2217 /* We need to defer cleaning EH info on the new statement to
2218 fixup-cfg. We may not have dominator information at this point
2219 and thus would end up with unreachable blocks and have no way
2220 to communicate that we need to run CFG cleanup then. */
2221 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
2224 remove_stmt_from_eh_lp (e->call_stmt);
2225 add_stmt_to_eh_lp (new_stmt, lp_nr);
2230 new_stmt = e->call_stmt;
2231 gimple_call_set_fndecl (new_stmt, e->callee->decl);
2232 update_stmt (new_stmt);
2235 cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
2237 if (cgraph_dump_file)
2239 fprintf (cgraph_dump_file, " updated to:");
2240 print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
2245 /* Once all functions from compilation unit are in memory, produce all clones
2246 and update all calls. We might also do this on demand if we don't want to
2247 bring all functions to memory prior compilation, but current WHOPR
2248 implementation does that and it is is bit easier to keep everything right in
2251 cgraph_materialize_all_clones (void)
2253 struct cgraph_node *node;
2254 bool stabilized = false;
2256 if (cgraph_dump_file)
2257 fprintf (cgraph_dump_file, "Materializing clones\n");
2258 #ifdef ENABLE_CHECKING
2262 /* We can also do topological order, but number of iterations should be
2263 bounded by number of IPA passes since single IPA pass is probably not
2264 going to create clones of clones it created itself. */
2268 for (node = cgraph_nodes; node; node = node->next)
2270 if (node->clone_of && node->decl != node->clone_of->decl
2271 && !gimple_has_body_p (node->decl))
2273 if (gimple_has_body_p (node->clone_of->decl))
2275 if (cgraph_dump_file)
2277 fprintf (cgraph_dump_file, "clonning %s to %s\n",
2278 cgraph_node_name (node->clone_of),
2279 cgraph_node_name (node));
2280 if (node->clone.tree_map)
2283 fprintf (cgraph_dump_file, " replace map: ");
2284 for (i = 0; i < VEC_length (ipa_replace_map_p,
2285 node->clone.tree_map);
2288 struct ipa_replace_map *replace_info;
2289 replace_info = VEC_index (ipa_replace_map_p,
2290 node->clone.tree_map,
2292 print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
2293 fprintf (cgraph_dump_file, " -> ");
2294 print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
2295 fprintf (cgraph_dump_file, "%s%s;",
2296 replace_info->replace_p ? "(replace)":"",
2297 replace_info->ref_p ? "(ref)":"");
2299 fprintf (cgraph_dump_file, "\n");
2301 if (node->clone.args_to_skip)
2303 fprintf (cgraph_dump_file, " args_to_skip: ");
2304 dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
2306 if (node->clone.args_to_skip)
2308 fprintf (cgraph_dump_file, " combined_args_to_skip:");
2309 dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
2312 cgraph_materialize_clone (node);
2318 for (node = cgraph_nodes; node; node = node->next)
2319 if (!node->analyzed && node->callees)
2320 cgraph_node_remove_callees (node);
2321 if (cgraph_dump_file)
2322 fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
2323 #ifdef ENABLE_CHECKING
2326 cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
2329 #include "gt-cgraphunit.h"