-/* Callgraph based intraprocedural optimizations.
- Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
+/* Callgraph based interprocedural optimizations.
+ Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
+ Free Software Foundation, Inc.
Contributed by Jan Hubicka
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 2, or (at your option) any later
+Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
/* This module implements main driver of compilation process as well as
- few basic intraprocedural optimizers.
+ few basic interprocedural optimizers.
The main scope of this file is to act as an interface in between
tree based frontends and the backend (and middle end)
This function is called once front-end has parsed whole body of function
and it is certain that the function body nor the declaration will change.
- (There is one exception needed for implementing GCC extern inline function.)
+ (There is one exception needed for implementing GCC extern inline
+ function.)
- - cgraph_varpool_finalize_variable
+ - varpool_finalize_variable
This function has same behavior as the above but is used for static
variables.
- cgraph_finalize_compilation_unit
- This function is called once compilation unit is finalized and it will
- no longer change.
+ This function is called once (source level) compilation unit is finalized
+ and it will no longer change.
- In the unit-at-a-time the call-graph construction and local function
+ In the the call-graph construction and local function
analysis takes place here. Bodies of unreachable functions are released
to conserve memory usage.
- ??? The compilation unit in this point of view should be compilation
- unit as defined by the language - for instance C frontend allows multiple
- compilation units to be parsed at once and it should call function each
- time parsing is done so we save memory.
+ The function can be called multiple times when multiple source level
+ compilation units are combined (such as in C frontend)
- cgraph_optimize
taken are marked as local. Backend can then use this information to
modify calling conventions, do better inlining or similar optimizations.
- - cgraph_assemble_pending_functions
- - cgraph_varpool_assemble_pending_variables
-
- In non-unit-at-a-time mode these functions can be used to force compilation
- of functions or variables that are known to be needed at given stage
- of compilation
-
- cgraph_mark_needed_node
- - cgraph_varpool_mark_needed_node
+ - varpool_mark_needed_node
- When function or variable is referenced by some hidden way (for instance
- via assembly code and marked by attribute "used"), the call-graph data structure
- must be updated accordingly by this function.
+ When function or variable is referenced by some hidden way the call-graph
+ data structure must be updated accordingly by this function.
+ There should be little need to call this function and all the references
+ should be made explicit to cgraph code. At present these functions are
+ used by C++ frontend to explicitly mark the keyed methods.
- analyze_expr callback
??? On the tree-ssa genericizing should take place here and we will avoid
need for these hooks (replacing them by genericizing hook)
- - expand_function callback
-
- This function is used to expand function and pass it into RTL back-end.
- Front-end should not make any assumptions about when this function can be
- called. In particular cgraph_assemble_pending_functions,
- cgraph_varpool_assemble_pending_variables, cgraph_finalize_function,
- cgraph_varpool_finalize_function, cgraph_optimize can cause arbitrarily
- previously finalized functions to be expanded.
-
- We implement two compilation modes.
-
- - unit-at-a-time: In this mode analyzing of all functions is deferred
+ Analyzing of all functions is deferred
to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
In cgraph_finalize_compilation_unit the reachable functions are
??? Reorganize code so variables are output very last and only if they
really has been referenced by produced code, so we catch more cases
- where reference has been optimized out.
-
- - non-unit-at-a-time
-
- All functions are variables are output as early as possible to conserve
- memory consumption. This may or may not result in less memory used but
- it is still needed for some legacy code that rely on particular ordering
- of things output from the compiler.
-
- Varpool data structures are not used and variables are output directly.
-
- Functions are output early using call of
- cgraph_assemble_pending_function from cgraph_finalize_function. The
- decision on whether function is needed is made more conservative so
- uninlininable static functions are needed too. During the call-graph
- construction the edge destinations are not marked as reachable and it
- is completely relied upn assemble_variable to mark them.
-
- Inlining decision heuristics
- ??? Move this to separate file after tree-ssa merge.
-
- We separate inlining decisions from the inliner itself and store it
- inside callgraph as so called inline plan. Refer to cgraph.c
- documentation about particular representation of inline plans in the
- callgraph
-
- The implementation of particular heuristics is separated from
- the rest of code to make it easier to replace it with more complicated
- implementation in the future. The rest of inlining code acts as a
- library aimed to modify the callgraph and verify that the parameters
- on code size growth fits.
-
- To mark given call inline, use cgraph_mark_inline function, the
- verification is performed by cgraph_default_inline_p and
- cgraph_check_inline_limits.
-
- The heuristics implements simple knapsack style algorithm ordering
- all functions by their "profitability" (estimated by code size growth)
- and inlining them in priority order.
-
- cgraph_decide_inlining implements heuristics taking whole callgraph
- into account, while cgraph_decide_inlining_incrementally considers
- only one function at a time and is used in non-unit-at-a-time mode. */
+ where reference has been optimized out. */
#include "config.h"
#include "timevar.h"
#include "params.h"
#include "fibheap.h"
-#include "c-common.h"
#include "intl.h"
#include "function.h"
-#include "tree-gimple.h"
+#include "ipa-prop.h"
+#include "gimple.h"
+#include "tree-iterator.h"
+#include "tree-pass.h"
+#include "tree-dump.h"
#include "output.h"
-
-#define INSNS_PER_CALL 10
+#include "coverage.h"
+#include "plugin.h"
static void cgraph_expand_all_functions (void);
static void cgraph_mark_functions_to_output (void);
static void cgraph_expand_function (struct cgraph_node *);
-static tree record_call_1 (tree *, int *, void *);
-static void cgraph_mark_local_functions (void);
-static bool cgraph_default_inline_p (struct cgraph_node *n);
-static void cgraph_analyze_function (struct cgraph_node *node);
-static void cgraph_decide_inlining_incrementally (struct cgraph_node *);
-
-/* Statistics we collect about inlining algorithm. */
-static int ncalls_inlined;
-static int nfunctions_inlined;
-static int initial_insns;
-static int overall_insns;
-
-/* Records tree nodes seen in cgraph_create_edges. Simply using
- walk_tree_without_duplicates doesn't guarantee each node is visited
- once because it gets a new htab upon each recursive call from
- record_calls_1. */
-static struct pointer_set_t *visited_nodes;
+static void cgraph_output_pending_asms (void);
+static void cgraph_analyze_function (struct cgraph_node *);
static FILE *cgraph_dump_file;
+/* A vector of FUNCTION_DECLs declared as static constructors. */
+static GTY (()) VEC(tree, gc) *static_ctors;
+/* A vector of FUNCTION_DECLs declared as static destructors. */
+static GTY (()) VEC(tree, gc) *static_dtors;
+
+/* Used for vtable lookup in thunk adjusting. */
+static GTY (()) tree vtable_entry_type;
+
+/* When target does not have ctors and dtors, we call all constructor
+ and destructor by special initialization/destruction function
+ recognized by collect2.
+
+ When we are going to build this function, collect all constructors and
+ destructors and turn them into normal functions. */
+
+static void
+record_cdtor_fn (tree fndecl)
+{
+ struct cgraph_node *node;
+ if (targetm.have_ctors_dtors
+ || (!DECL_STATIC_CONSTRUCTOR (fndecl)
+ && !DECL_STATIC_DESTRUCTOR (fndecl)))
+ return;
+
+ if (DECL_STATIC_CONSTRUCTOR (fndecl))
+ {
+ VEC_safe_push (tree, gc, static_ctors, fndecl);
+ DECL_STATIC_CONSTRUCTOR (fndecl) = 0;
+ }
+ if (DECL_STATIC_DESTRUCTOR (fndecl))
+ {
+ VEC_safe_push (tree, gc, static_dtors, fndecl);
+ DECL_STATIC_DESTRUCTOR (fndecl) = 0;
+ }
+ node = cgraph_node (fndecl);
+ node->local.disregard_inline_limits = 1;
+ cgraph_mark_reachable_node (node);
+}
+
+/* Define global constructors/destructor functions for the CDTORS, of
+ which they are LEN. The CDTORS are sorted by initialization
+ priority. If CTOR_P is true, these are constructors; otherwise,
+ they are destructors. */
+
+static void
+build_cdtor (bool ctor_p, tree *cdtors, size_t len)
+{
+ size_t i;
+
+ i = 0;
+ while (i < len)
+ {
+ tree body;
+ tree fn;
+ priority_type priority;
+
+ priority = 0;
+ body = NULL_TREE;
+ /* Find the next batch of constructors/destructors with the same
+ initialization priority. */
+ do
+ {
+ priority_type p;
+ fn = cdtors[i];
+ p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
+ if (!body)
+ priority = p;
+ else if (p != priority)
+ break;
+ append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION,
+ fn, 0),
+ &body);
+ ++i;
+ }
+ while (i < len);
+ gcc_assert (body != NULL_TREE);
+ /* Generate a function to call all the function of like
+ priority. */
+ cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
+ }
+}
+
+/* Comparison function for qsort. P1 and P2 are actually of type
+ "tree *" and point to static constructors. DECL_INIT_PRIORITY is
+ used to determine the sort order. */
+
+static int
+compare_ctor (const void *p1, const void *p2)
+{
+ tree f1;
+ tree f2;
+ int priority1;
+ int priority2;
+
+ f1 = *(const tree *)p1;
+ f2 = *(const tree *)p2;
+ priority1 = DECL_INIT_PRIORITY (f1);
+ priority2 = DECL_INIT_PRIORITY (f2);
+
+ if (priority1 < priority2)
+ return -1;
+ else if (priority1 > priority2)
+ return 1;
+ else
+ /* Ensure a stable sort. */
+ return (const tree *)p1 - (const tree *)p2;
+}
+
+/* Comparison function for qsort. P1 and P2 are actually of type
+ "tree *" and point to static destructors. DECL_FINI_PRIORITY is
+ used to determine the sort order. */
+
+static int
+compare_dtor (const void *p1, const void *p2)
+{
+ tree f1;
+ tree f2;
+ int priority1;
+ int priority2;
+
+ f1 = *(const tree *)p1;
+ f2 = *(const tree *)p2;
+ priority1 = DECL_FINI_PRIORITY (f1);
+ priority2 = DECL_FINI_PRIORITY (f2);
+
+ if (priority1 < priority2)
+ return -1;
+ else if (priority1 > priority2)
+ return 1;
+ else
+ /* Ensure a stable sort. */
+ return (const tree *)p1 - (const tree *)p2;
+}
+
+/* Generate functions to call static constructors and destructors
+ for targets that do not support .ctors/.dtors sections. These
+ functions have magic names which are detected by collect2. */
+
+static void
+cgraph_build_cdtor_fns (void)
+{
+ if (!VEC_empty (tree, static_ctors))
+ {
+ gcc_assert (!targetm.have_ctors_dtors);
+ qsort (VEC_address (tree, static_ctors),
+ VEC_length (tree, static_ctors),
+ sizeof (tree),
+ compare_ctor);
+ build_cdtor (/*ctor_p=*/true,
+ VEC_address (tree, static_ctors),
+ VEC_length (tree, static_ctors));
+ VEC_truncate (tree, static_ctors, 0);
+ }
+
+ if (!VEC_empty (tree, static_dtors))
+ {
+ gcc_assert (!targetm.have_ctors_dtors);
+ qsort (VEC_address (tree, static_dtors),
+ VEC_length (tree, static_dtors),
+ sizeof (tree),
+ compare_dtor);
+ build_cdtor (/*ctor_p=*/false,
+ VEC_address (tree, static_dtors),
+ VEC_length (tree, static_dtors));
+ VEC_truncate (tree, static_dtors, 0);
+ }
+}
+
/* Determine if function DECL is needed. That is, visible to something
either outside this translation unit, something magic in the system
- configury, or (if not doing unit-at-a-time) to something we havn't
- seen yet. */
+ configury. */
-static bool
-decide_is_function_needed (struct cgraph_node *node, tree decl)
+bool
+cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
{
- tree origin;
+ /* If the user told us it is used, then it must be so. */
+ if (node->local.externally_visible)
+ return true;
+
+ /* ??? If the assembler name is set by hand, it is possible to assemble
+ the name later after finalizing the function and the fact is noticed
+ in assemble_name then. This is arguably a bug. */
+ if (DECL_ASSEMBLER_NAME_SET_P (decl)
+ && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
+ return true;
+
+ /* With -fkeep-inline-functions we are keeping all inline functions except
+ for extern inline ones. */
+ if (flag_keep_inline_functions
+ && DECL_DECLARED_INLINE_P (decl)
+ && !DECL_EXTERNAL (decl)
+ && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl)))
+ return true;
/* If we decided it was needed before, but at the time we didn't have
the body of the function available, then it's still needed. We have
return true;
/* Externally visible functions must be output. The exception is
- COMDAT functions that must be output only when they are needed. */
- if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
+ COMDAT functions that must be output only when they are needed.
+
+ When not optimizing, also output the static functions. (see
+ PR24561), but don't do so for always_inline functions, functions
+ declared inline and nested functions. These was optimized out
+ in the original implementation and it is unclear whether we want
+ to change the behavior here. */
+ if (((TREE_PUBLIC (decl)
+ || (!optimize && !node->local.disregard_inline_limits
+ && !DECL_DECLARED_INLINE_P (decl)
+ && !node->origin))
+ && !flag_whole_program
+ && !flag_lto
+ && !flag_whopr)
+ && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
return true;
/* Constructors and destructors are reachable from the runtime by
if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
return true;
- /* If the user told us it is used, then it must be so. */
- if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
- return true;
-
- /* ??? If the assembler name is set by hand, it is possible to assemble
- the name later after finalizing the function and the fact is noticed
- in assemble_name then. This is arguably a bug. */
- if (DECL_ASSEMBLER_NAME_SET_P (decl)
- && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
- return true;
-
- if (flag_unit_at_a_time)
- return false;
-
- /* If not doing unit at a time, then we'll only defer this function
- if its marked for inlining. Otherwise we want to emit it now. */
-
- /* "extern inline" functions are never output locally. */
- if (DECL_EXTERNAL (decl))
- return false;
- /* Nested functions of extern inline function shall not be emit unless
- we inlined the origin. */
- for (origin = decl_function_context (decl); origin;
- origin = decl_function_context (origin))
- if (DECL_EXTERNAL (origin))
- return false;
- /* We want to emit COMDAT functions only when absolutely necessary. */
- if (DECL_COMDAT (decl))
- return false;
- if (!DECL_INLINE (decl)
- || (!node->local.disregard_inline_limits
- /* When declared inline, defer even the uninlinable functions.
- This allows them to be eliminated when unused. */
- && !DECL_DECLARED_INLINE_P (decl)
- && (!node->local.inlinable || !cgraph_default_inline_p (node))))
- return true;
-
return false;
}
-/* Walk the decls we marked as neccesary and see if they reference new variables
- or functions and add them into the worklists. */
-static bool
-cgraph_varpool_analyze_pending_decls (void)
+/* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
+ functions into callgraph in a way so they look like ordinary reachable
+ functions inserted into callgraph already at construction time. */
+
+bool
+cgraph_process_new_functions (void)
{
- bool changed = false;
- timevar_push (TV_CGRAPH);
+ bool output = false;
+ tree fndecl;
+ struct cgraph_node *node;
- while (cgraph_varpool_first_unanalyzed_node)
+ /* Note that this queue may grow as its being processed, as the new
+ functions may generate new ones. */
+ while (cgraph_new_nodes)
{
- tree decl = cgraph_varpool_first_unanalyzed_node->decl;
+ node = cgraph_new_nodes;
+ fndecl = node->decl;
+ cgraph_new_nodes = cgraph_new_nodes->next_needed;
+ switch (cgraph_state)
+ {
+ case CGRAPH_STATE_CONSTRUCTION:
+ /* At construction time we just need to finalize function and move
+ it into reachable functions list. */
+
+ node->next_needed = NULL;
+ cgraph_finalize_function (fndecl, false);
+ cgraph_mark_reachable_node (node);
+ output = true;
+ break;
- cgraph_varpool_first_unanalyzed_node->analyzed = true;
+ case CGRAPH_STATE_IPA:
+ case CGRAPH_STATE_IPA_SSA:
+ /* When IPA optimization already started, do all essential
+ transformations that has been already performed on the whole
+ cgraph but not on this function. */
+
+ gimple_register_cfg_hooks ();
+ if (!node->analyzed)
+ cgraph_analyze_function (node);
+ push_cfun (DECL_STRUCT_FUNCTION (fndecl));
+ current_function_decl = fndecl;
+ compute_inline_parameters (node);
+ if ((cgraph_state == CGRAPH_STATE_IPA_SSA
+ && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
+ /* When not optimizing, be sure we run early local passes anyway
+ to expand OMP. */
+ || !optimize)
+ execute_pass_list (pass_early_local_passes.pass.sub);
+ free_dominance_info (CDI_POST_DOMINATORS);
+ free_dominance_info (CDI_DOMINATORS);
+ pop_cfun ();
+ current_function_decl = NULL;
+ break;
- cgraph_varpool_first_unanalyzed_node = cgraph_varpool_first_unanalyzed_node->next_needed;
+ case CGRAPH_STATE_EXPANSION:
+ /* Functions created during expansion shall be compiled
+ directly. */
+ node->process = 0;
+ cgraph_expand_function (node);
+ break;
- if (DECL_INITIAL (decl))
- cgraph_create_edges (NULL, DECL_INITIAL (decl));
- changed = true;
+ default:
+ gcc_unreachable ();
+ break;
+ }
+ cgraph_call_function_insertion_hooks (node);
}
- timevar_pop (TV_CGRAPH);
- return changed;
+ return output;
}
-/* Optimization of function bodies might've rendered some variables as
- unnecesary so we want to avoid these from being compiled.
+/* As an GCC extension we allow redefinition of the function. The
+ semantics when both copies of bodies differ is not well defined.
+ We replace the old body with new body so in unit at a time mode
+ we always use new body, while in normal mode we may end up with
+ old body inlined into some functions and new body expanded and
+ inlined in others.
+
+ ??? It may make more sense to use one body for inlining and other
+ body for expanding the function but this is difficult to do. */
- This is done by prunning the queue and keeping only the variables that
- really appear needed (ie thery are either externally visible or referenced
- by compiled function). Re-doing the reachability analysis on variables
- brings back the remaining variables referenced by these. */
static void
-cgraph_varpool_remove_unreferenced_decls (void)
+cgraph_reset_node (struct cgraph_node *node)
{
- struct cgraph_varpool_node *next, *node = cgraph_varpool_nodes_queue;
-
- cgraph_varpool_reset_queue ();
-
- if (errorcount || sorrycount)
- return;
-
- while (node)
+ /* If node->process is set, then we have already begun whole-unit analysis.
+ This is *not* testing for whether we've already emitted the function.
+ That case can be sort-of legitimately seen with real function redefinition
+ errors. I would argue that the front end should never present us with
+ such a case, but don't enforce that for now. */
+ gcc_assert (!node->process);
+
+ /* Reset our data structures so we can analyze the function again. */
+ memset (&node->local, 0, sizeof (node->local));
+ memset (&node->global, 0, sizeof (node->global));
+ memset (&node->rtl, 0, sizeof (node->rtl));
+ node->analyzed = false;
+ node->local.redefined_extern_inline = true;
+ node->local.finalized = false;
+
+ cgraph_node_remove_callees (node);
+
+ /* We may need to re-queue the node for assembling in case
+ we already proceeded it and ignored as not needed or got
+ a re-declaration in IMA mode. */
+ if (node->reachable)
{
- tree decl = node->decl;
- next = node->next_needed;
- node->needed = 0;
+ struct cgraph_node *n;
- if (node->finalized
- && ((DECL_ASSEMBLER_NAME_SET_P (decl)
- && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
- || node->force_output
- || decide_is_variable_needed (node, decl)))
- cgraph_varpool_mark_needed_node (node);
-
- node = next;
+ for (n = cgraph_nodes_queue; n; n = n->next_needed)
+ if (n == node)
+ break;
+ if (!n)
+ node->reachable = 0;
}
- cgraph_varpool_analyze_pending_decls ();
}
-
-/* When not doing unit-at-a-time, output all functions enqueued.
- Return true when such a functions were found. */
-
-bool
-cgraph_assemble_pending_functions (void)
+static void
+cgraph_lower_function (struct cgraph_node *node)
{
- bool output = false;
-
- if (flag_unit_at_a_time)
- return false;
-
- while (cgraph_nodes_queue)
- {
- struct cgraph_node *n = cgraph_nodes_queue;
+ if (node->lowered)
+ return;
- cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
- n->next_needed = NULL;
- if (!n->global.inlined_to
- && !n->alias
- && !DECL_EXTERNAL (n->decl))
- {
- cgraph_expand_function (n);
- output = true;
- }
- }
+ if (node->nested)
+ lower_nested_functions (node->decl);
+ gcc_assert (!node->nested);
- return output;
+ tree_lowering_passes (node->decl);
+ node->lowered = true;
}
/* DECL has been parsed. Take it, queue it, compile it at the whim of the
struct cgraph_node *node = cgraph_node (decl);
if (node->local.finalized)
- {
- /* As an GCC extension we allow redefinition of the function. The
- semantics when both copies of bodies differ is not well defined.
- We replace the old body with new body so in unit at a time mode
- we always use new body, while in normal mode we may end up with
- old body inlined into some functions and new body expanded and
- inlined in others.
-
- ??? It may make more sense to use one body for inlining and other
- body for expanding the function but this is difficult to do. */
-
- /* If node->output is set, then this is a unit-at-a-time compilation
- and we have already begun whole-unit analysis. This is *not*
- testing for whether we've already emitted the function. That
- case can be sort-of legitimately seen with real function
- redefinition errors. I would argue that the front end should
- never present us with such a case, but don't enforce that for now. */
- gcc_assert (!node->output);
-
- /* Reset our data structures so we can analyze the function again. */
- memset (&node->local, 0, sizeof (node->local));
- memset (&node->global, 0, sizeof (node->global));
- memset (&node->rtl, 0, sizeof (node->rtl));
- node->analyzed = false;
- node->local.redefined_extern_inline = true;
-
- if (!flag_unit_at_a_time)
- {
- struct cgraph_node *n;
-
- for (n = cgraph_nodes; n; n = n->next)
- if (n->global.inlined_to == node)
- cgraph_remove_node (n);
- }
-
- cgraph_node_remove_callees (node);
-
- /* We may need to re-queue the node for assembling in case
- we already proceeded it and ignored as not needed. */
- if (node->reachable && !flag_unit_at_a_time)
- {
- struct cgraph_node *n;
-
- for (n = cgraph_nodes_queue; n; n = n->next_needed)
- if (n == node)
- break;
- if (!n)
- node->reachable = 0;
- }
- }
+ cgraph_reset_node (node);
+ node->pid = cgraph_max_pid ++;
notice_global_symbol (decl);
- node->decl = decl;
node->local.finalized = true;
- if (node->nested)
- lower_nested_functions (decl);
- gcc_assert (!node->nested);
-
- /* If not unit at a time, then we need to create the call graph
- now, so that called functions can be queued and emitted now. */
- if (!flag_unit_at_a_time)
- {
- cgraph_analyze_function (node);
- cgraph_decide_inlining_incrementally (node);
- }
+ node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
+ node->finalized_by_frontend = true;
+ record_cdtor_fn (node->decl);
- if (decide_is_function_needed (node, decl))
+ if (cgraph_decide_is_function_needed (node, decl))
cgraph_mark_needed_node (node);
- /* If not unit at a time, go ahead and emit everything we've found
- to be reachable at this time. */
- if (!nested)
- {
- if (!cgraph_assemble_pending_functions ())
- ggc_collect ();
- }
+ /* Since we reclaim unreachable nodes at the end of every language
+ level unit, we need to be conservative about possible entry points
+ there. */
+ if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
+ cgraph_mark_reachable_node (node);
/* If we've not yet emitted decl, tell the debug info about it. */
if (!TREE_ASM_WRITTEN (decl))
/* Possibly warn about unused parameters. */
if (warn_unused_parameter)
do_warn_unused_parameter (decl);
-}
-
-/* Walk tree and record all calls. Called via walk_tree. */
-static tree
-record_call_1 (tree *tp, int *walk_subtrees, void *data)
-{
- tree t = *tp;
-
- switch (TREE_CODE (t))
- {
- case VAR_DECL:
- /* ??? Really, we should mark this decl as *potentially* referenced
- by this function and re-examine whether the decl is actually used
- after rtl has been generated. */
- if (TREE_STATIC (t) || DECL_EXTERNAL (t))
- {
- cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
- if (lang_hooks.callgraph.analyze_expr)
- return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
- data);
- }
- break;
-
- case ADDR_EXPR:
- if (flag_unit_at_a_time)
- {
- /* Record dereferences to the functions. This makes the
- functions reachable unconditionally. */
- tree decl = TREE_OPERAND (*tp, 0);
- if (TREE_CODE (decl) == FUNCTION_DECL)
- cgraph_mark_needed_node (cgraph_node (decl));
- }
- break;
-
- case CALL_EXPR:
- {
- tree decl = get_callee_fndecl (*tp);
- if (decl && TREE_CODE (decl) == FUNCTION_DECL)
- {
- cgraph_create_edge (data, cgraph_node (decl), *tp);
-
- /* When we see a function call, we don't want to look at the
- function reference in the ADDR_EXPR that is hanging from
- the CALL_EXPR we're examining here, because we would
- conclude incorrectly that the function's address could be
- taken by something that is not a function call. So only
- walk the function parameter list, skip the other subtrees. */
-
- walk_tree (&TREE_OPERAND (*tp, 1), record_call_1, data,
- visited_nodes);
- *walk_subtrees = 0;
- }
- break;
- }
-
- default:
- /* Save some cycles by not walking types and declaration as we
- won't find anything useful there anyway. */
- if (IS_TYPE_OR_DECL_P (*tp))
- {
- *walk_subtrees = 0;
- break;
- }
- if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
- return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees, data);
- break;
- }
-
- return NULL;
+ if (!nested)
+ ggc_collect ();
}
-/* Create cgraph edges for function calls inside BODY from NODE. */
+/* C99 extern inline keywords allow changing of declaration after function
+ has been finalized. We need to re-decide if we want to mark the function as
+ needed then. */
void
-cgraph_create_edges (struct cgraph_node *node, tree body)
+cgraph_mark_if_needed (tree decl)
{
- /* The nodes we're interested in are never shared, so walk
- the tree ignoring duplicates. */
- visited_nodes = pointer_set_create ();
- walk_tree (&body, record_call_1, node, visited_nodes);
- pointer_set_destroy (visited_nodes);
- visited_nodes = NULL;
+ struct cgraph_node *node = cgraph_node (decl);
+ if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
+ cgraph_mark_needed_node (node);
}
-static bool error_found;
-
-/* Callback of verify_cgraph_node. Check that all call_exprs have
- cgraph nodes. */
-
-static tree
-verify_cgraph_node_1 (tree *tp, int *walk_subtrees, void *data)
+/* Return TRUE if NODE2 is equivalent to NODE or its clone. */
+static bool
+clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
{
- tree t = *tp;
- tree decl;
-
- if (TREE_CODE (t) == CALL_EXPR && (decl = get_callee_fndecl (t)))
- {
- struct cgraph_edge *e = cgraph_edge (data, t);
- if (e)
- {
- if (e->aux)
- {
- error ("Shared call_expr:");
- debug_tree (t);
- error_found = true;
- }
- if (e->callee->decl != cgraph_node (decl)->decl)
- {
- error ("Edge points to wrong declaration:");
- debug_tree (e->callee->decl);
- fprintf (stderr," Instead of:");
- debug_tree (decl);
- }
- e->aux = (void *)1;
- }
- else
- {
- error ("Missing callgraph edge for call expr:");
- debug_tree (t);
- error_found = true;
- }
- }
-
- /* Save some cycles by not walking types and declaration as we
- won't find anything useful there anyway. */
- if (IS_TYPE_OR_DECL_P (*tp))
- *walk_subtrees = 0;
-
- return NULL_TREE;
+ while (node != node2 && node2)
+ node2 = node2->clone_of;
+ return node2 != NULL;
}
/* Verify cgraph nodes of given cgraph node. */
verify_cgraph_node (struct cgraph_node *node)
{
struct cgraph_edge *e;
- struct cgraph_node *main_clone;
+ struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
+ struct function *saved_cfun = cfun;
+ basic_block this_block;
+ gimple_stmt_iterator gsi;
+ bool error_found = false;
+
+ if (errorcount || sorrycount)
+ return;
timevar_push (TV_CGRAPH_VERIFY);
- error_found = false;
+ /* debug_generic_stmt needs correct cfun */
+ set_cfun (this_cfun);
for (e = node->callees; e; e = e->next_callee)
if (e->aux)
{
- error ("Aux field set for edge %s->%s",
- cgraph_node_name (e->caller), cgraph_node_name (e->callee));
+ error ("aux field set for edge %s->%s",
+ identifier_to_locale (cgraph_node_name (e->caller)),
+ identifier_to_locale (cgraph_node_name (e->callee)));
error_found = true;
}
+ if (node->count < 0)
+ {
+ error ("Execution count is negative");
+ error_found = true;
+ }
+ if (node->global.inlined_to && node->local.externally_visible)
+ {
+ error ("Externally visible inline clone");
+ error_found = true;
+ }
+ if (node->global.inlined_to && node->address_taken)
+ {
+ error ("Inline clone with address taken");
+ error_found = true;
+ }
+ if (node->global.inlined_to && node->needed)
+ {
+ error ("Inline clone is needed");
+ error_found = true;
+ }
for (e = node->callers; e; e = e->next_caller)
{
+ if (e->count < 0)
+ {
+ error ("caller edge count is negative");
+ error_found = true;
+ }
+ if (e->frequency < 0)
+ {
+ error ("caller edge frequency is negative");
+ error_found = true;
+ }
+ if (e->frequency > CGRAPH_FREQ_MAX)
+ {
+ error ("caller edge frequency is too large");
+ error_found = true;
+ }
+ if (gimple_has_body_p (e->caller->decl)
+ && !e->caller->global.inlined_to
+ && (e->frequency
+ != compute_call_stmt_bb_frequency (e->caller->decl,
+ gimple_bb (e->call_stmt))))
+ {
+ error ("caller edge frequency %i does not match BB freqency %i",
+ e->frequency,
+ compute_call_stmt_bb_frequency (e->caller->decl,
+ gimple_bb (e->call_stmt)));
+ error_found = true;
+ }
if (!e->inline_failed)
{
if (node->global.inlined_to
!= (e->caller->global.inlined_to
? e->caller->global.inlined_to : e->caller))
{
- error ("Inlined_to pointer is wrong");
+ error ("inlined_to pointer is wrong");
error_found = true;
}
if (node->callers->next_caller)
{
- error ("Multiple inline callers");
+ error ("multiple inline callers");
error_found = true;
}
}
else
if (node->global.inlined_to)
{
- error ("Inlined_to pointer set for noninline callers");
+ error ("inlined_to pointer set for noninline callers");
error_found = true;
}
}
if (!node->callers && node->global.inlined_to)
{
- error ("Inlined_to pointer is set but no predecesors found");
+ error ("inlined_to pointer is set but no predecessors found");
error_found = true;
}
if (node->global.inlined_to == node)
{
- error ("Inlined_to pointer reffers to itself");
+ error ("inlined_to pointer refers to itself");
error_found = true;
}
- for (main_clone = cgraph_node (node->decl); main_clone;
- main_clone = main_clone->next_clone)
- if (main_clone == node)
- break;
- if (!node)
+ if (!cgraph_node (node->decl))
{
- error ("Node not found in DECL_ASSEMBLER_NAME hash");
+ error ("node not found in cgraph_hash");
error_found = true;
}
-
- if (node->analyzed
- && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
- && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
+
+ if (node->clone_of)
{
- walk_tree_without_duplicates (&DECL_SAVED_TREE (node->decl),
- verify_cgraph_node_1, node);
- for (e = node->callees; e; e = e->next_callee)
+ struct cgraph_node *n;
+ for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
+ if (n == node)
+ break;
+ if (!n)
{
- if (!e->aux)
- {
- error ("Edge %s->%s has no corresponding call_expr",
- cgraph_node_name (e->caller),
- cgraph_node_name (e->callee));
- error_found = true;
- }
- e->aux = 0;
+ error ("node has wrong clone_of");
+ error_found = true;
}
}
- if (error_found)
+ if (node->clones)
{
- dump_cgraph_node (stderr, node);
- internal_error ("verify_cgraph_node failed.");
+ struct cgraph_node *n;
+ for (n = node->clones; n; n = n->next_sibling_clone)
+ if (n->clone_of != node)
+ break;
+ if (n)
+ {
+ error ("node has wrong clone list");
+ error_found = true;
+ }
+ }
+ if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
+ {
+ error ("node is in clone list but it is not clone");
+ error_found = true;
+ }
+ if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
+ {
+ error ("node has wrong prev_clone pointer");
+ error_found = true;
+ }
+ if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
+ {
+ error ("double linked list of clones corrupted");
+ error_found = true;
}
- timevar_pop (TV_CGRAPH_VERIFY);
-}
-
-/* Verify whole cgraph structure. */
-void
-verify_cgraph (void)
-{
- struct cgraph_node *node;
+
+ if (node->analyzed && gimple_has_body_p (node->decl)
+ && !TREE_ASM_WRITTEN (node->decl)
+ && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
+ && !flag_wpa)
+ {
+ if (this_cfun->cfg)
+ {
+ /* The nodes we're interested in are never shared, so walk
+ the tree ignoring duplicates. */
+ struct pointer_set_t *visited_nodes = pointer_set_create ();
+ /* Reach the trees by walking over the CFG, and note the
+ enclosing basic-blocks in the call edges. */
+ FOR_EACH_BB_FN (this_block, this_cfun)
+ for (gsi = gsi_start_bb (this_block);
+ !gsi_end_p (gsi);
+ gsi_next (&gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
+ tree decl;
+ if (is_gimple_call (stmt) && (decl = gimple_call_fndecl (stmt)))
+ {
+ struct cgraph_edge *e = cgraph_edge (node, stmt);
+ if (e)
+ {
+ if (e->aux)
+ {
+ error ("shared call_stmt:");
+ debug_gimple_stmt (stmt);
+ error_found = true;
+ }
+ if (e->callee->same_body_alias)
+ {
+ error ("edge points to same body alias:");
+ debug_tree (e->callee->decl);
+ error_found = true;
+ }
+ else if (!node->global.inlined_to
+ && !e->callee->global.inlined_to
+ && !clone_of_p (cgraph_node (decl), e->callee))
+ {
+ error ("edge points to wrong declaration:");
+ debug_tree (e->callee->decl);
+ fprintf (stderr," Instead of:");
+ debug_tree (decl);
+ error_found = true;
+ }
+ e->aux = (void *)1;
+ }
+ else
+ {
+ error ("missing callgraph edge for call stmt:");
+ debug_gimple_stmt (stmt);
+ error_found = true;
+ }
+ }
+ }
+ pointer_set_destroy (visited_nodes);
+ }
+ else
+ /* No CFG available?! */
+ gcc_unreachable ();
+
+ for (e = node->callees; e; e = e->next_callee)
+ {
+ if (!e->aux && !e->indirect_call)
+ {
+ error ("edge %s->%s has no corresponding call_stmt",
+ identifier_to_locale (cgraph_node_name (e->caller)),
+ identifier_to_locale (cgraph_node_name (e->callee)));
+ debug_gimple_stmt (e->call_stmt);
+ error_found = true;
+ }
+ e->aux = 0;
+ }
+ }
+ if (error_found)
+ {
+ dump_cgraph_node (stderr, node);
+ internal_error ("verify_cgraph_node failed");
+ }
+ set_cfun (saved_cfun);
+ timevar_pop (TV_CGRAPH_VERIFY);
+}
+
+/* Verify whole cgraph structure. */
+void
+verify_cgraph (void)
+{
+ struct cgraph_node *node;
if (sorrycount || errorcount)
return;
verify_cgraph_node (node);
}
+/* Output all asm statements we have stored up to be output. */
-/* Output all variables enqueued to be assembled. */
-bool
-cgraph_varpool_assemble_pending_decls (void)
+static void
+cgraph_output_pending_asms (void)
{
- bool changed = false;
+ struct cgraph_asm_node *can;
if (errorcount || sorrycount)
- return false;
-
- /* EH might mark decls as needed during expansion. This should be safe since
- we don't create references to new function, but it should not be used
- elsewhere. */
- cgraph_varpool_analyze_pending_decls ();
-
- while (cgraph_varpool_nodes_queue)
- {
- tree decl = cgraph_varpool_nodes_queue->decl;
- struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue;
+ return;
- cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed;
- if (!TREE_ASM_WRITTEN (decl) && !node->alias && !DECL_EXTERNAL (decl))
- {
- assemble_variable (decl, 0, 1, 0);
- changed = true;
- }
- node->next_needed = NULL;
- }
- return changed;
+ for (can = cgraph_asm_nodes; can; can = can->next)
+ assemble_asm (can->asm_str);
+ cgraph_asm_nodes = NULL;
}
/* Analyze the function scheduled to be output. */
static void
cgraph_analyze_function (struct cgraph_node *node)
{
+ tree save = current_function_decl;
tree decl = node->decl;
- struct cgraph_edge *e;
current_function_decl = decl;
+ push_cfun (DECL_STRUCT_FUNCTION (decl));
- /* First kill forward declaration so reverse inlining works properly. */
- cgraph_create_edges (node, DECL_SAVED_TREE (decl));
+ assign_assembler_name_if_neeeded (node->decl);
- node->local.inlinable = tree_inlinable_function_p (decl);
- node->local.self_insns = estimate_num_insns (DECL_SAVED_TREE (decl));
- if (node->local.inlinable)
- node->local.disregard_inline_limits
- = lang_hooks.tree_inlining.disregard_inline_limits (decl);
- for (e = node->callers; e; e = e->next_caller)
- {
- if (node->local.redefined_extern_inline)
- e->inline_failed = N_("redefined extern inline functions are not "
- "considered for inlining");
- else if (!node->local.inlinable)
- e->inline_failed = N_("function not inlinable");
- else
- e->inline_failed = N_("function not considered for inlining");
- }
- if (flag_really_no_inline && !node->local.disregard_inline_limits)
- node->local.inlinable = 0;
- /* Inlining characteristics are maintained by the cgraph_mark_inline. */
- node->global.insns = node->local.self_insns;
+ /* Make sure to gimplify bodies only once. During analyzing a
+ function we lower it, which will require gimplified nested
+ functions, so we can end up here with an already gimplified
+ body. */
+ if (!gimple_body (decl))
+ gimplify_function_tree (decl);
+ dump_function (TDI_generic, decl);
+ cgraph_lower_function (node);
node->analyzed = true;
- current_function_decl = NULL;
+
+ pop_cfun ();
+ current_function_decl = save;
}
-/* Analyze the whole compilation unit once it is parsed completely. */
+/* Look for externally_visible and used attributes and mark cgraph nodes
+ accordingly.
-void
-cgraph_finalize_compilation_unit (void)
+ We cannot mark the nodes at the point the attributes are processed (in
+ handle_*_attribute) because the copy of the declarations available at that
+ point may not be canonical. For example, in:
+
+ void f();
+ void f() __attribute__((used));
+
+ the declaration we see in handle_used_attribute will be the second
+ declaration -- but the front end will subsequently merge that declaration
+ with the original declaration and discard the second declaration.
+
+ Furthermore, we can't mark these nodes in cgraph_finalize_function because:
+
+ void f() {}
+ void f() __attribute__((externally_visible));
+
+ is valid.
+
+ So, we walk the nodes at the end of the translation unit, applying the
+ attributes at that point. */
+
+static void
+process_function_and_variable_attributes (struct cgraph_node *first,
+ struct varpool_node *first_var)
{
struct cgraph_node *node;
- /* Keep track of already processed nodes when called multiple times for
- intermodule optmization. */
- static struct cgraph_node *first_analyzed;
-
- finish_aliases_1 ();
+ struct varpool_node *vnode;
- if (!flag_unit_at_a_time)
+ for (node = cgraph_nodes; node != first; node = node->next)
{
- cgraph_assemble_pending_functions ();
- return;
+ tree decl = node->decl;
+ if (DECL_PRESERVE_P (decl))
+ {
+ mark_decl_referenced (decl);
+ if (node->local.finalized)
+ cgraph_mark_needed_node (node);
+ }
+ if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
+ {
+ if (! TREE_PUBLIC (node->decl))
+ warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
+ "%<externally_visible%>"
+ " attribute have effect only on public objects");
+ else if (node->local.finalized)
+ cgraph_mark_needed_node (node);
+ }
}
-
- if (!quiet_flag)
+ for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
{
- fprintf (stderr, "\nAnalyzing compilation unit");
- fflush (stderr);
+ tree decl = vnode->decl;
+ if (DECL_PRESERVE_P (decl))
+ {
+ mark_decl_referenced (decl);
+ vnode->force_output = true;
+ if (vnode->finalized)
+ varpool_mark_needed_node (vnode);
+ }
+ if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
+ {
+ if (! TREE_PUBLIC (vnode->decl))
+ warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
+ "%<externally_visible%>"
+ " attribute have effect only on public objects");
+ else if (vnode->finalized)
+ varpool_mark_needed_node (vnode);
+ }
}
+}
- timevar_push (TV_CGRAPH);
- cgraph_varpool_analyze_pending_decls ();
+/* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
+ each reachable functions) and build cgraph.
+ The function can be called multiple times after inserting new nodes
+ into beginning of queue. Just the new part of queue is re-scanned then. */
+
+static void
+cgraph_analyze_functions (void)
+{
+ /* Keep track of already processed nodes when called multiple times for
+ intermodule optimization. */
+ static struct cgraph_node *first_analyzed;
+ struct cgraph_node *first_processed = first_analyzed;
+ static struct varpool_node *first_analyzed_var;
+ struct cgraph_node *node, *next;
+
+ process_function_and_variable_attributes (first_processed,
+ first_analyzed_var);
+ first_processed = cgraph_nodes;
+ first_analyzed_var = varpool_nodes;
+ varpool_analyze_pending_decls ();
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "Initial entry points:");
for (node = cgraph_nodes; node != first_analyzed; node = node->next)
- if (node->needed && DECL_SAVED_TREE (node->decl))
+ if (node->needed)
fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
fprintf (cgraph_dump_file, "\n");
}
+ cgraph_process_new_functions ();
/* Propagate reachability flag and lower representation of all reachable
functions. In the future, lowering will introduce new functions and
node->next_needed = NULL;
/* ??? It is possible to create extern inline function and later using
- weak alas attribute to kill its body. See
+ weak alias attribute to kill its body. See
gcc.c-torture/compile/20011119-1.c */
- if (!DECL_SAVED_TREE (decl))
- continue;
-
- gcc_assert (!node->analyzed && node->reachable);
- gcc_assert (DECL_SAVED_TREE (decl));
+ if (!DECL_STRUCT_FUNCTION (decl))
+ {
+ cgraph_reset_node (node);
+ continue;
+ }
- cgraph_analyze_function (node);
+ if (!node->analyzed)
+ cgraph_analyze_function (node);
for (edge = node->callees; edge; edge = edge->next_callee)
if (!edge->callee->reachable)
cgraph_mark_reachable_node (edge->callee);
- cgraph_varpool_analyze_pending_decls ();
+ if (node->same_comdat_group)
+ {
+ for (next = node->same_comdat_group;
+ next != node;
+ next = next->same_comdat_group)
+ cgraph_mark_reachable_node (next);
+ }
+
+ /* If decl is a clone of an abstract function, mark that abstract
+ function so that we don't release its body. The DECL_INITIAL() of that
+ abstract function declaration will be later needed to output debug info. */
+ if (DECL_ABSTRACT_ORIGIN (decl))
+ {
+ struct cgraph_node *origin_node = cgraph_node (DECL_ABSTRACT_ORIGIN (decl));
+ origin_node->abstract_and_needed = true;
+ }
+
+ /* We finalize local static variables during constructing callgraph
+ edges. Process their attributes too. */
+ process_function_and_variable_attributes (first_processed,
+ first_analyzed_var);
+ first_processed = cgraph_nodes;
+ first_analyzed_var = varpool_nodes;
+ varpool_analyze_pending_decls ();
+ cgraph_process_new_functions ();
}
/* Collect entry points to the unit. */
-
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "Unit entry points:");
for (node = cgraph_nodes; node != first_analyzed; node = node->next)
- if (node->needed && DECL_SAVED_TREE (node->decl))
+ if (node->needed)
fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
fprintf (cgraph_dump_file, "\n\nInitial ");
dump_cgraph (cgraph_dump_file);
if (cgraph_dump_file)
fprintf (cgraph_dump_file, "\nReclaiming functions:");
- for (node = cgraph_nodes; node != first_analyzed; node = node->next)
+ for (node = cgraph_nodes; node != first_analyzed; node = next)
{
tree decl = node->decl;
+ next = node->next;
+
+ if (node->local.finalized && !gimple_has_body_p (decl))
+ cgraph_reset_node (node);
- if (!node->reachable && DECL_SAVED_TREE (decl))
+ if (!node->reachable && gimple_has_body_p (decl))
{
if (cgraph_dump_file)
fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
cgraph_remove_node (node);
+ continue;
}
else
node->next_needed = NULL;
+ gcc_assert (!node->local.finalized || gimple_has_body_p (decl));
+ gcc_assert (node->analyzed == node->local.finalized);
}
if (cgraph_dump_file)
{
}
first_analyzed = cgraph_nodes;
ggc_collect ();
+}
+
+
+/* Analyze the whole compilation unit once it is parsed completely. */
+
+void
+cgraph_finalize_compilation_unit (void)
+{
+ timevar_push (TV_CGRAPH);
+
+ /* Do not skip analyzing the functions if there were errors, we
+ miss diagnostics for following functions otherwise. */
+
+ /* Emit size functions we didn't inline. */
+ finalize_size_functions ();
+
+ /* Call functions declared with the "constructor" or "destructor"
+ attribute. */
+ cgraph_build_cdtor_fns ();
+
+ /* Mark alias targets necessary and emit diagnostics. */
+ finish_aliases_1 ();
+
+ if (!quiet_flag)
+ {
+ fprintf (stderr, "\nAnalyzing compilation unit\n");
+ fflush (stderr);
+ }
+
+ /* Gimplify and lower all functions, compute reachability and
+ remove unreachable nodes. */
+ cgraph_analyze_functions ();
+
+ /* Mark alias targets necessary and emit diagnostics. */
+ finish_aliases_1 ();
+
+ /* Gimplify and lower thunks. */
+ cgraph_analyze_functions ();
+
+ /* Finally drive the pass manager. */
+ cgraph_optimize ();
+
timevar_pop (TV_CGRAPH);
}
+
+
/* Figure out what functions we want to assemble. */
static void
cgraph_mark_functions_to_output (void)
{
struct cgraph_node *node;
+#ifdef ENABLE_CHECKING
+ bool check_same_comdat_groups = false;
+
+ for (node = cgraph_nodes; node; node = node->next)
+ gcc_assert (!node->process);
+#endif
for (node = cgraph_nodes; node; node = node->next)
{
tree decl = node->decl;
struct cgraph_edge *e;
-
- gcc_assert (!node->output);
+
+ gcc_assert (!node->process || node->same_comdat_group);
+ if (node->process)
+ continue;
for (e = node->callers; e; e = e->next_caller)
if (e->inline_failed)
/* We need to output all local functions that are used and not
always inlined, as well as those that are reachable from
outside the current compilation unit. */
- if (DECL_SAVED_TREE (decl)
+ if (node->analyzed
&& !node->global.inlined_to
&& (node->needed
|| (e && node->reachable))
&& !TREE_ASM_WRITTEN (decl)
&& !DECL_EXTERNAL (decl))
- node->output = 1;
+ {
+ node->process = 1;
+ if (node->same_comdat_group)
+ {
+ struct cgraph_node *next;
+ for (next = node->same_comdat_group;
+ next != node;
+ next = next->same_comdat_group)
+ next->process = 1;
+ }
+ }
+ else if (node->same_comdat_group)
+ {
+#ifdef ENABLE_CHECKING
+ check_same_comdat_groups = true;
+#endif
+ }
else
{
/* We should've reclaimed all functions that are not needed. */
#ifdef ENABLE_CHECKING
- if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
+ if (!node->global.inlined_to
+ && gimple_has_body_p (decl)
&& !DECL_EXTERNAL (decl))
{
dump_cgraph_node (stderr, node);
internal_error ("failed to reclaim unneeded function");
}
#endif
- gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
+ gcc_assert (node->global.inlined_to
+ || !gimple_has_body_p (decl)
|| DECL_EXTERNAL (decl));
}
-
- }
-}
-
-/* Expand function specified by NODE. */
-
-static void
-cgraph_expand_function (struct cgraph_node *node)
-{
- tree decl = node->decl;
-
- /* We ought to not compile any inline clones. */
- gcc_assert (!node->global.inlined_to);
- if (flag_unit_at_a_time)
- announce_function (decl);
-
- /* Generate RTL for the body of DECL. */
- lang_hooks.callgraph.expand_function (decl);
-
- /* Make sure that BE didn't give up on compiling. */
- /* ??? Can happen with nested function of extern inline. */
- gcc_assert (TREE_ASM_WRITTEN (node->decl));
-
- current_function_decl = NULL;
- if (!cgraph_preserve_function_body_p (node->decl))
- {
- DECL_SAVED_TREE (node->decl) = NULL;
- DECL_STRUCT_FUNCTION (node->decl) = NULL;
- DECL_INITIAL (node->decl) = error_mark_node;
- /* Eliminate all call edges. This is important so the call_expr no longer
- points to the dead function body. */
- cgraph_node_remove_callees (node);
}
-}
-
-/* Fill array order with all nodes with output flag set in the reverse
- topological order. */
-
-static int
-cgraph_postorder (struct cgraph_node **order)
-{
- struct cgraph_node *node, *node2;
- int stack_size = 0;
- int order_pos = 0;
- struct cgraph_edge *edge, last;
-
- struct cgraph_node **stack =
- xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
-
- /* We have to deal with cycles nicely, so use a depth first traversal
- output algorithm. Ignore the fact that some functions won't need
- to be output and put them into order as well, so we get dependencies
- right through intline functions. */
- for (node = cgraph_nodes; node; node = node->next)
- node->aux = NULL;
- for (node = cgraph_nodes; node; node = node->next)
- if (!node->aux)
- {
- node2 = node;
- if (!node->callers)
- node->aux = &last;
- else
- node->aux = node->callers;
- while (node2)
- {
- while (node2->aux != &last)
- {
- edge = node2->aux;
- if (edge->next_caller)
- node2->aux = edge->next_caller;
- else
- node2->aux = &last;
- if (!edge->caller->aux)
- {
- if (!edge->caller->callers)
- edge->caller->aux = &last;
- else
- edge->caller->aux = edge->caller->callers;
- stack[stack_size++] = node2;
- node2 = edge->caller;
- break;
- }
- }
- if (node2->aux == &last)
- {
- order[order_pos++] = node2;
- if (stack_size)
- node2 = stack[--stack_size];
- else
- node2 = NULL;
- }
- }
- }
- free (stack);
- return order_pos;
-}
-
-
-/* Perform reachability analysis and reclaim all unreachable nodes.
- This function also remove unneeded bodies of extern inline functions
- and thus needs to be done only after inlining decisions has been made. */
-static bool
-cgraph_remove_unreachable_nodes (void)
-{
- struct cgraph_node *first = (void *) 1;
- struct cgraph_node *node;
- bool changed = false;
- int insns = 0;
-
-#ifdef ENABLE_CHECKING
- verify_cgraph ();
-#endif
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file, "\nReclaiming functions:");
#ifdef ENABLE_CHECKING
- for (node = cgraph_nodes; node; node = node->next)
- gcc_assert (!node->aux);
-#endif
- for (node = cgraph_nodes; node; node = node->next)
- if (node->needed && !node->global.inlined_to
- && (!DECL_EXTERNAL (node->decl) || !node->analyzed))
- {
- node->aux = first;
- first = node;
- }
- else
- gcc_assert (!node->aux);
-
- /* Perform reachability analysis. As a special case do not consider
- extern inline functions not inlined as live because we won't output
- them at all. */
- while (first != (void *) 1)
- {
- struct cgraph_edge *e;
- node = first;
- first = first->aux;
-
- for (e = node->callees; e; e = e->next_callee)
- if (!e->callee->aux
- && node->analyzed
- && (!e->inline_failed || !e->callee->analyzed
- || !DECL_EXTERNAL (e->callee->decl)))
- {
- e->callee->aux = first;
- first = e->callee;
- }
- }
-
- /* Remove unreachable nodes. Extern inline functions need special care;
- Unreachable extern inline functions shall be removed.
- Reachable extern inline functions we never inlined shall get their bodies
- eliminated.
- Reachable extern inline functions we sometimes inlined will be turned into
- unanalyzed nodes so they look like for true extern functions to the rest
- of code. Body of such functions is released via remove_node once the
- inline clones are eliminated. */
- for (node = cgraph_nodes; node; node = node->next)
- {
- if (!node->aux)
+ if (check_same_comdat_groups)
+ for (node = cgraph_nodes; node; node = node->next)
+ if (node->same_comdat_group && !node->process)
{
- int local_insns;
tree decl = node->decl;
-
- node->global.inlined_to = NULL;
- if (DECL_STRUCT_FUNCTION (decl))
- local_insns = node->local.self_insns;
- else
- local_insns = 0;
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
- if (!node->analyzed || !DECL_EXTERNAL (node->decl))
- cgraph_remove_node (node);
- else
+ if (!node->global.inlined_to
+ && gimple_has_body_p (decl)
+ && !DECL_EXTERNAL (decl))
{
- struct cgraph_edge *e;
-
- for (e = node->callers; e; e = e->next_caller)
- if (e->caller->aux)
- break;
- if (e || node->needed)
- {
- struct cgraph_node *clone;
-
- for (clone = node->next_clone; clone;
- clone = clone->next_clone)
- if (clone->aux)
- break;
- if (!clone)
- {
- DECL_SAVED_TREE (node->decl) = NULL;
- DECL_STRUCT_FUNCTION (node->decl) = NULL;
- DECL_INITIAL (node->decl) = error_mark_node;
- }
- cgraph_node_remove_callees (node);
- node->analyzed = false;
- }
- else
- cgraph_remove_node (node);
+ dump_cgraph_node (stderr, node);
+ internal_error ("failed to reclaim unneeded function");
}
- if (!DECL_SAVED_TREE (decl))
- insns += local_insns;
- changed = true;
}
- }
- for (node = cgraph_nodes; node; node = node->next)
- node->aux = NULL;
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file, "\nReclaimed %i insns", insns);
- return changed;
-}
-
-/* Estimate size of the function after inlining WHAT into TO. */
-
-static int
-cgraph_estimate_size_after_inlining (int times, struct cgraph_node *to,
- struct cgraph_node *what)
-{
- tree fndecl = what->decl;
- tree arg;
- int call_insns = PARAM_VALUE (PARAM_INLINE_CALL_COST);
- for (arg = DECL_ARGUMENTS (fndecl); arg; arg = TREE_CHAIN (arg))
- call_insns += estimate_move_cost (TREE_TYPE (arg));
- return (what->global.insns - call_insns) * times + to->global.insns;
-}
-
-/* Estimate the growth caused by inlining NODE into all callees. */
-
-static int
-cgraph_estimate_growth (struct cgraph_node *node)
-{
- int growth = 0;
- struct cgraph_edge *e;
-
- for (e = node->callers; e; e = e->next_caller)
- if (e->inline_failed)
- growth += (cgraph_estimate_size_after_inlining (1, e->caller, node)
- - e->caller->global.insns);
-
- /* ??? Wrong for self recursive functions or cases where we decide to not
- inline for different reasons, but it is not big deal as in that case
- we will keep the body around, but we will also avoid some inlining. */
- if (!node->needed && !DECL_EXTERNAL (node->decl))
- growth -= node->global.insns;
-
- return growth;
-}
-
-/* E is expected to be an edge being inlined. Clone destination node of
- the edge and redirect it to the new clone.
- DUPLICATE is used for bookkeeping on whether we are actually creating new
- clones or re-using node originally representing out-of-line function call.
- */
-void
-cgraph_clone_inlined_nodes (struct cgraph_edge *e, bool duplicate)
-{
- struct cgraph_node *n;
-
- /* We may eliminate the need for out-of-line copy to be output. In that
- case just go ahead and re-use it. */
- if (!e->callee->callers->next_caller
- && (!e->callee->needed || DECL_EXTERNAL (e->callee->decl))
- && duplicate
- && flag_unit_at_a_time)
- {
- gcc_assert (!e->callee->global.inlined_to);
- if (!DECL_EXTERNAL (e->callee->decl))
- overall_insns -= e->callee->global.insns, nfunctions_inlined++;
- duplicate = 0;
- }
- else if (duplicate)
- {
- n = cgraph_clone_node (e->callee);
- cgraph_redirect_edge_callee (e, n);
- }
-
- if (e->caller->global.inlined_to)
- e->callee->global.inlined_to = e->caller->global.inlined_to;
- else
- e->callee->global.inlined_to = e->caller;
-
- /* Recursively clone all bodies. */
- for (e = e->callee->callees; e; e = e->next_callee)
- if (!e->inline_failed)
- cgraph_clone_inlined_nodes (e, duplicate);
-}
-
-/* Mark edge E as inlined and update callgraph accordingly. */
-
-void
-cgraph_mark_inline_edge (struct cgraph_edge *e)
-{
- int old_insns = 0, new_insns = 0;
- struct cgraph_node *to = NULL, *what;
-
- gcc_assert (e->inline_failed);
- e->inline_failed = NULL;
-
- if (!e->callee->global.inlined && flag_unit_at_a_time)
- DECL_POSSIBLY_INLINED (e->callee->decl) = true;
- e->callee->global.inlined = true;
-
- cgraph_clone_inlined_nodes (e, true);
-
- what = e->callee;
-
- /* Now update size of caller and all functions caller is inlined into. */
- for (;e && !e->inline_failed; e = e->caller->callers)
- {
- old_insns = e->caller->global.insns;
- new_insns = cgraph_estimate_size_after_inlining (1, e->caller,
- what);
- gcc_assert (new_insns >= 0);
- to = e->caller;
- to->global.insns = new_insns;
- }
- gcc_assert (what->global.inlined_to == to);
- if (new_insns > old_insns)
- overall_insns += new_insns - old_insns;
- ncalls_inlined++;
-}
-
-/* Mark all calls of EDGE->CALLEE inlined into EDGE->CALLER.
- Return following unredirected edge in the list of callers
- of EDGE->CALLEE */
-
-static struct cgraph_edge *
-cgraph_mark_inline (struct cgraph_edge *edge)
-{
- struct cgraph_node *to = edge->caller;
- struct cgraph_node *what = edge->callee;
- struct cgraph_edge *e, *next;
- int times = 0;
-
- /* Look for all calls, mark them inline and clone recursively
- all inlined functions. */
- for (e = what->callers; e; e = next)
- {
- next = e->next_caller;
- if (e->caller == to && e->inline_failed)
- {
- cgraph_mark_inline_edge (e);
- if (e == edge)
- edge = next;
- times++;
- }
- }
- gcc_assert (times);
- return edge;
-}
-
-/* Return false when inlining WHAT into TO is not good idea
- as it would cause too large growth of function bodies. */
-
-static bool
-cgraph_check_inline_limits (struct cgraph_node *to, struct cgraph_node *what,
- const char **reason)
-{
- int times = 0;
- struct cgraph_edge *e;
- int newsize;
- int limit;
-
- if (to->global.inlined_to)
- to = to->global.inlined_to;
-
- for (e = to->callees; e; e = e->next_callee)
- if (e->callee == what)
- times++;
-
- /* When inlining large function body called once into small function,
- take the inlined function as base for limiting the growth. */
- if (to->local.self_insns > what->local.self_insns)
- limit = to->local.self_insns;
- else
- limit = what->local.self_insns;
-
- limit += limit * PARAM_VALUE (PARAM_LARGE_FUNCTION_GROWTH) / 100;
-
- newsize = cgraph_estimate_size_after_inlining (times, to, what);
- if (newsize > PARAM_VALUE (PARAM_LARGE_FUNCTION_INSNS)
- && newsize > limit)
- {
- if (reason)
- *reason = N_("--param large-function-growth limit reached");
- return false;
- }
- return true;
-}
-
-/* Return true when function N is small enough to be inlined. */
-
-static bool
-cgraph_default_inline_p (struct cgraph_node *n)
-{
- if (!DECL_INLINE (n->decl) || !DECL_SAVED_TREE (n->decl))
- return false;
- if (DECL_DECLARED_INLINE_P (n->decl))
- return n->global.insns < MAX_INLINE_INSNS_SINGLE;
- else
- return n->global.insns < MAX_INLINE_INSNS_AUTO;
-}
-
-/* Return true when inlining WHAT would create recursive inlining.
- We call recursive inlining all cases where same function appears more than
- once in the single recursion nest path in the inline graph. */
-
-static bool
-cgraph_recursive_inlining_p (struct cgraph_node *to,
- struct cgraph_node *what,
- const char **reason)
-{
- bool recursive;
- if (to->global.inlined_to)
- recursive = what->decl == to->global.inlined_to->decl;
- else
- recursive = what->decl == to->decl;
- /* Marking recursive function inline has sane semantic and thus we should
- not warn on it. */
- if (recursive && reason)
- *reason = (what->local.disregard_inline_limits
- ? N_("recursive inlining") : "");
- return recursive;
-}
-
-/* Recompute heap nodes for each of callees. */
-static void
-update_callee_keys (fibheap_t heap, struct fibnode **heap_node,
- struct cgraph_node *node)
-{
- struct cgraph_edge *e;
-
- for (e = node->callees; e; e = e->next_callee)
- if (e->inline_failed && heap_node[e->callee->uid])
- fibheap_replace_key (heap, heap_node[e->callee->uid],
- cgraph_estimate_growth (e->callee));
- else if (!e->inline_failed)
- update_callee_keys (heap, heap_node, e->callee);
-}
-
-/* Enqueue all recursive calls from NODE into queue linked via aux pointers
- in between FIRST and LAST. WHERE is used for bookkeeping while looking
- int calls inlined within NODE. */
-static void
-lookup_recursive_calls (struct cgraph_node *node, struct cgraph_node *where,
- struct cgraph_edge **first, struct cgraph_edge **last)
-{
- struct cgraph_edge *e;
- for (e = where->callees; e; e = e->next_callee)
- if (e->callee == node)
- {
- if (!*first)
- *first = e;
- else
- (*last)->aux = e;
- *last = e;
- }
- for (e = where->callees; e; e = e->next_callee)
- if (!e->inline_failed)
- lookup_recursive_calls (node, e->callee, first, last);
-}
-
-/* Decide on recursive inlining: in the case function has recursive calls,
- inline until body size reaches given argument. */
-static void
-cgraph_decide_recursive_inlining (struct cgraph_node *node)
-{
- int limit = PARAM_VALUE (PARAM_MAX_INLINE_INSNS_RECURSIVE_AUTO);
- int max_depth = PARAM_VALUE (PARAM_MAX_INLINE_RECURSIVE_DEPTH_AUTO);
- struct cgraph_edge *first_call = NULL, *last_call = NULL;
- struct cgraph_edge *last_in_current_depth;
- struct cgraph_edge *e;
- struct cgraph_node *master_clone;
- int depth = 0;
- int n = 0;
-
- if (DECL_DECLARED_INLINE_P (node->decl))
- {
- limit = PARAM_VALUE (PARAM_MAX_INLINE_INSNS_RECURSIVE);
- max_depth = PARAM_VALUE (PARAM_MAX_INLINE_RECURSIVE_DEPTH);
- }
-
- /* Make sure that function is small enough to be considered for inlining. */
- if (!max_depth
- || cgraph_estimate_size_after_inlining (1, node, node) >= limit)
- return;
- lookup_recursive_calls (node, node, &first_call, &last_call);
- if (!first_call)
- return;
-
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file,
- "\nPerforming recursive inlining on %s\n",
- cgraph_node_name (node));
-
- /* We need original clone to copy around. */
- master_clone = cgraph_clone_node (node);
- master_clone->needed = true;
- for (e = master_clone->callees; e; e = e->next_callee)
- if (!e->inline_failed)
- cgraph_clone_inlined_nodes (e, true);
-
- /* Do the inlining and update list of recursive call during process. */
- last_in_current_depth = last_call;
- while (first_call
- && cgraph_estimate_size_after_inlining (1, node, master_clone) <= limit)
- {
- struct cgraph_edge *curr = first_call;
-
- first_call = first_call->aux;
- curr->aux = NULL;
-
- cgraph_redirect_edge_callee (curr, master_clone);
- cgraph_mark_inline_edge (curr);
- lookup_recursive_calls (node, curr->callee, &first_call, &last_call);
-
- if (last_in_current_depth
- && ++depth >= max_depth)
- break;
- n++;
- }
-
- /* Cleanup queue pointers. */
- while (first_call)
- {
- struct cgraph_edge *next = first_call->aux;
- first_call->aux = NULL;
- first_call = next;
- }
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file,
- "\n Inlined %i times, body grown from %i to %i insns\n", n,
- master_clone->global.insns, node->global.insns);
-
- /* Remove master clone we used for inlining. We rely that clones inlined
- into master clone gets queued just before master clone so we don't
- need recursion. */
- for (node = cgraph_nodes; node != master_clone;
- node = node->next)
- if (node->global.inlined_to == master_clone)
- cgraph_remove_node (node);
- cgraph_remove_node (master_clone);
-}
-
-/* Set inline_failed for all callers of given function to REASON. */
-
-static void
-cgraph_set_inline_failed (struct cgraph_node *node, const char *reason)
-{
- struct cgraph_edge *e;
-
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file, "Inlining failed: %s\n", reason);
- for (e = node->callers; e; e = e->next_caller)
- if (e->inline_failed)
- e->inline_failed = reason;
+#endif
}
-/* We use greedy algorithm for inlining of small functions:
- All inline candidates are put into prioritized heap based on estimated
- growth of the overall number of instructions and then update the estimates.
-
- INLINED and INLINED_CALEES are just pointers to arrays large enough
- to be passed to cgraph_inlined_into and cgraph_inlined_callees. */
+/* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
+ in lowered gimple form.
+
+ Set current_function_decl and cfun to newly constructed empty function body.
+ return basic block in the function body. */
-static void
-cgraph_decide_inlining_of_small_functions (void)
+static basic_block
+init_lowered_empty_function (tree decl)
{
- struct cgraph_node *node;
- fibheap_t heap = fibheap_new ();
- struct fibnode **heap_node =
- xcalloc (cgraph_max_uid, sizeof (struct fibnode *));
- int max_insns = ((HOST_WIDEST_INT) initial_insns
- * (100 + PARAM_VALUE (PARAM_INLINE_UNIT_GROWTH)) / 100);
-
- /* Put all inline candidates into the heap. */
+ basic_block bb;
- for (node = cgraph_nodes; node; node = node->next)
- {
- if (!node->local.inlinable || !node->callers
- || node->local.disregard_inline_limits)
- continue;
-
- if (!cgraph_default_inline_p (node))
- {
- cgraph_set_inline_failed (node,
- N_("--param max-inline-insns-single limit reached"));
- continue;
- }
- heap_node[node->uid] =
- fibheap_insert (heap, cgraph_estimate_growth (node), node);
- }
-
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file, "\nDeciding on smaller functions:\n");
- while (overall_insns <= max_insns && (node = fibheap_extract_min (heap)))
- {
- struct cgraph_edge *e, *next;
- int old_insns = overall_insns;
-
- heap_node[node->uid] = NULL;
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file,
- "\nConsidering %s with %i insns\n"
- " Estimated growth is %+i insns.\n",
- cgraph_node_name (node), node->global.insns,
- cgraph_estimate_growth (node));
- if (!cgraph_default_inline_p (node))
- {
- cgraph_set_inline_failed (node,
- N_("--param max-inline-insns-single limit reached after inlining into the callee"));
- continue;
- }
- for (e = node->callers; e; e = next)
- {
- next = e->next_caller;
- if (e->inline_failed)
- {
- struct cgraph_node *where;
-
- if (cgraph_recursive_inlining_p (e->caller, e->callee,
- &e->inline_failed)
- || !cgraph_check_inline_limits (e->caller, e->callee,
- &e->inline_failed))
- {
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file, " Not inlining into %s:%s.\n",
- cgraph_node_name (e->caller), e->inline_failed);
- continue;
- }
- next = cgraph_mark_inline (e);
- where = e->caller;
- if (where->global.inlined_to)
- where = where->global.inlined_to;
-
- if (heap_node[where->uid])
- fibheap_replace_key (heap, heap_node[where->uid],
- cgraph_estimate_growth (where));
-
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file,
- " Inlined into %s which now has %i insns.\n",
- cgraph_node_name (e->caller),
- e->caller->global.insns);
- }
- }
+ current_function_decl = decl;
+ allocate_struct_function (decl, false);
+ gimple_register_cfg_hooks ();
+ init_empty_tree_cfg ();
+ init_tree_ssa (cfun);
+ init_ssa_operands ();
+ cfun->gimple_df->in_ssa_p = true;
+ DECL_INITIAL (decl) = make_node (BLOCK);
- cgraph_decide_recursive_inlining (node);
+ DECL_SAVED_TREE (decl) = error_mark_node;
+ cfun->curr_properties |=
+ (PROP_gimple_lcf | PROP_gimple_leh | PROP_cfg | PROP_referenced_vars |
+ PROP_ssa);
- /* Similarly all functions called by the function we just inlined
- are now called more times; update keys. */
- update_callee_keys (heap, heap_node, node);
+ /* Create BB for body of the function and connect it properly. */
+ bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR);
+ make_edge (ENTRY_BLOCK_PTR, bb, 0);
+ make_edge (bb, EXIT_BLOCK_PTR, 0);
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file,
- " Inlined for a net change of %+i insns.\n",
- overall_insns - old_insns);
- }
- while ((node = fibheap_extract_min (heap)) != NULL)
- if (!node->local.disregard_inline_limits)
- cgraph_set_inline_failed (node, N_("--param inline-unit-growth limit reached"));
- fibheap_delete (heap);
- free (heap_node);
+ return bb;
}
-/* Decide on the inlining. We do so in the topological order to avoid
- expenses on updating data structures. */
+/* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
+ offset indicated by VIRTUAL_OFFSET, if that is
+ non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
+ zero for a result adjusting thunk. */
-static void
-cgraph_decide_inlining (void)
+static tree
+thunk_adjust (gimple_stmt_iterator * bsi,
+ tree ptr, bool this_adjusting,
+ HOST_WIDE_INT fixed_offset, tree virtual_offset)
{
- struct cgraph_node *node;
- int nnodes;
- struct cgraph_node **order =
- xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
- int old_insns = 0;
- int i;
+ gimple stmt;
+ tree ret;
- for (node = cgraph_nodes; node; node = node->next)
- initial_insns += node->local.self_insns;
- overall_insns = initial_insns;
+ if (this_adjusting
+ && fixed_offset != 0)
+ {
+ stmt = gimple_build_assign (ptr,
+ fold_build2_loc (input_location,
+ POINTER_PLUS_EXPR,
+ TREE_TYPE (ptr), ptr,
+ size_int (fixed_offset)));
+ gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
+ }
- nnodes = cgraph_postorder (order);
+ /* If there's a virtual offset, look up that value in the vtable and
+ adjust the pointer again. */
+ if (virtual_offset)
+ {
+ tree vtabletmp;
+ tree vtabletmp2;
+ tree vtabletmp3;
+ tree offsettmp;
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file,
- "\nDeciding on inlining. Starting with %i insns.\n",
- initial_insns);
+ if (!vtable_entry_type)
+ {
+ tree vfunc_type = make_node (FUNCTION_TYPE);
+ TREE_TYPE (vfunc_type) = integer_type_node;
+ TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
+ layout_type (vfunc_type);
- for (node = cgraph_nodes; node; node = node->next)
- node->aux = 0;
+ vtable_entry_type = build_pointer_type (vfunc_type);
+ }
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file, "\nInlining always_inline functions:\n");
+ vtabletmp =
+ create_tmp_var (build_pointer_type
+ (build_pointer_type (vtable_entry_type)), "vptr");
+
+ /* The vptr is always at offset zero in the object. */
+ stmt = gimple_build_assign (vtabletmp,
+ build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
+ ptr));
+ gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
+ mark_symbols_for_renaming (stmt);
+ find_referenced_vars_in (stmt);
+
+ /* Form the vtable address. */
+ vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
+ "vtableaddr");
+ stmt = gimple_build_assign (vtabletmp2,
+ build1 (INDIRECT_REF,
+ TREE_TYPE (vtabletmp2), vtabletmp));
+ gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
+ mark_symbols_for_renaming (stmt);
+ find_referenced_vars_in (stmt);
+
+ /* Find the entry with the vcall offset. */
+ stmt = gimple_build_assign (vtabletmp2,
+ fold_build2_loc (input_location,
+ POINTER_PLUS_EXPR,
+ TREE_TYPE (vtabletmp2),
+ vtabletmp2,
+ fold_convert (sizetype,
+ virtual_offset)));
+ gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
+
+ /* Get the offset itself. */
+ vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
+ "vcalloffset");
+ stmt = gimple_build_assign (vtabletmp3,
+ build1 (INDIRECT_REF,
+ TREE_TYPE (vtabletmp3),
+ vtabletmp2));
+ gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
+ mark_symbols_for_renaming (stmt);
+ find_referenced_vars_in (stmt);
+
+ /* Cast to sizetype. */
+ offsettmp = create_tmp_var (sizetype, "offset");
+ stmt = gimple_build_assign (offsettmp, fold_convert (sizetype, vtabletmp3));
+ gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
+ mark_symbols_for_renaming (stmt);
+ find_referenced_vars_in (stmt);
+
+ /* Adjust the `this' pointer. */
+ ptr = fold_build2_loc (input_location,
+ POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
+ offsettmp);
+ }
- /* In the first pass mark all always_inline edges. Do this with a priority
- so none of our later choices will make this impossible. */
- for (i = nnodes - 1; i >= 0; i--)
+ if (!this_adjusting
+ && fixed_offset != 0)
+ /* Adjust the pointer by the constant. */
{
- struct cgraph_edge *e, *next;
-
- node = order[i];
+ tree ptrtmp;
- if (!node->local.disregard_inline_limits)
- continue;
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file,
- "\nConsidering %s %i insns (always inline)\n",
- cgraph_node_name (node), node->global.insns);
- old_insns = overall_insns;
- for (e = node->callers; e; e = next)
- {
- next = e->next_caller;
- if (!e->inline_failed)
- continue;
- if (cgraph_recursive_inlining_p (e->caller, e->callee,
- &e->inline_failed))
- continue;
- cgraph_mark_inline_edge (e);
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file,
- " Inlined into %s which now has %i insns.\n",
- cgraph_node_name (e->caller),
- e->caller->global.insns);
+ if (TREE_CODE (ptr) == VAR_DECL)
+ ptrtmp = ptr;
+ else
+ {
+ ptrtmp = create_tmp_var (TREE_TYPE (ptr), "ptr");
+ stmt = gimple_build_assign (ptrtmp, ptr);
+ gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
+ mark_symbols_for_renaming (stmt);
+ find_referenced_vars_in (stmt);
}
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file,
- " Inlined for a net change of %+i insns.\n",
- overall_insns - old_insns);
+ ptr = fold_build2_loc (input_location,
+ POINTER_PLUS_EXPR, TREE_TYPE (ptrtmp), ptrtmp,
+ size_int (fixed_offset));
}
- if (!flag_really_no_inline)
- {
- cgraph_decide_inlining_of_small_functions ();
+ /* Emit the statement and gimplify the adjustment expression. */
+ ret = create_tmp_var (TREE_TYPE (ptr), "adjusted_this");
+ stmt = gimple_build_assign (ret, ptr);
+ mark_symbols_for_renaming (stmt);
+ find_referenced_vars_in (stmt);
+ gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file, "\nDeciding on functions called once:\n");
+ return ret;
+}
- /* And finally decide what functions are called once. */
+/* Produce assembler for thunk NODE. */
- for (i = nnodes - 1; i >= 0; i--)
+static void
+assemble_thunk (struct cgraph_node *node)
+{
+ bool this_adjusting = node->thunk.this_adjusting;
+ HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
+ HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
+ tree virtual_offset = NULL;
+ tree alias = node->thunk.alias;
+ tree thunk_fndecl = node->decl;
+ tree a = DECL_ARGUMENTS (thunk_fndecl);
+
+ current_function_decl = thunk_fndecl;
+
+ if (this_adjusting
+ && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
+ virtual_value, alias))
+ {
+ const char *fnname;
+ tree fn_block;
+
+ DECL_RESULT (thunk_fndecl)
+ = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
+ RESULT_DECL, 0, integer_type_node);
+ fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
+
+ /* The back end expects DECL_INITIAL to contain a BLOCK, so we
+ create one. */
+ fn_block = make_node (BLOCK);
+ BLOCK_VARS (fn_block) = a;
+ DECL_INITIAL (thunk_fndecl) = fn_block;
+ init_function_start (thunk_fndecl);
+ cfun->is_thunk = 1;
+ assemble_start_function (thunk_fndecl, fnname);
+
+ targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
+ fixed_offset, virtual_value, alias);
+
+ assemble_end_function (thunk_fndecl, fnname);
+ init_insn_lengths ();
+ free_after_compilation (cfun);
+ set_cfun (NULL);
+ TREE_ASM_WRITTEN (thunk_fndecl) = 1;
+ }
+ else
+ {
+ tree restype;
+ basic_block bb, then_bb, else_bb, return_bb;
+ gimple_stmt_iterator bsi;
+ int nargs = 0;
+ tree arg;
+ int i;
+ tree resdecl;
+ tree restmp = NULL;
+ VEC(tree, heap) *vargs;
+
+ gimple call;
+ gimple ret;
+
+ DECL_IGNORED_P (thunk_fndecl) = 1;
+ bitmap_obstack_initialize (NULL);
+
+ if (node->thunk.virtual_offset_p)
+ virtual_offset = size_int (virtual_value);
+
+ /* Build the return declaration for the function. */
+ restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
+ if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
{
- node = order[i];
+ resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
+ DECL_ARTIFICIAL (resdecl) = 1;
+ DECL_IGNORED_P (resdecl) = 1;
+ DECL_RESULT (thunk_fndecl) = resdecl;
+ }
+ else
+ resdecl = DECL_RESULT (thunk_fndecl);
- if (node->callers && !node->callers->next_caller && !node->needed
- && node->local.inlinable && node->callers->inline_failed
- && !DECL_EXTERNAL (node->decl) && !DECL_COMDAT (node->decl))
+ bb = then_bb = else_bb = return_bb = init_lowered_empty_function (thunk_fndecl);
+
+ bsi = gsi_start_bb (bb);
+
+ /* Build call to the function being thunked. */
+ if (!VOID_TYPE_P (restype))
+ {
+ if (!is_gimple_reg_type (restype))
{
- bool ok = true;
- struct cgraph_node *node1;
-
- /* Verify that we won't duplicate the caller. */
- for (node1 = node->callers->caller;
- node1->callers && !node1->callers->inline_failed
- && ok; node1 = node1->callers->caller)
- if (node1->callers->next_caller || node1->needed)
- ok = false;
- if (ok)
- {
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file,
- "\nConsidering %s %i insns.\n"
- " Called once from %s %i insns.\n",
- cgraph_node_name (node), node->global.insns,
- cgraph_node_name (node->callers->caller),
- node->callers->caller->global.insns);
+ restmp = resdecl;
+ cfun->local_decls = tree_cons (NULL_TREE, restmp, cfun->local_decls);
+ BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
+ }
+ else
+ restmp = create_tmp_var_raw (restype, "retval");
+ }
- old_insns = overall_insns;
+ for (arg = a; arg; arg = TREE_CHAIN (arg))
+ nargs++;
+ vargs = VEC_alloc (tree, heap, nargs);
+ if (this_adjusting)
+ VEC_quick_push (tree, vargs,
+ thunk_adjust (&bsi,
+ a, 1, fixed_offset,
+ virtual_offset));
+ else
+ VEC_quick_push (tree, vargs, a);
+ for (i = 1, arg = TREE_CHAIN (a); i < nargs; i++, arg = TREE_CHAIN (arg))
+ VEC_quick_push (tree, vargs, arg);
+ call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
+ VEC_free (tree, heap, vargs);
+ gimple_call_set_cannot_inline (call, true);
+ gimple_call_set_from_thunk (call, true);
+ if (restmp)
+ gimple_call_set_lhs (call, restmp);
+ gsi_insert_after (&bsi, call, GSI_NEW_STMT);
+ mark_symbols_for_renaming (call);
+ find_referenced_vars_in (call);
+ update_stmt (call);
+
+ if (restmp && !this_adjusting)
+ {
+ tree true_label = NULL_TREE;
+
+ if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
+ {
+ gimple stmt;
+ /* If the return type is a pointer, we need to
+ protect against NULL. We know there will be an
+ adjustment, because that's why we're emitting a
+ thunk. */
+ then_bb = create_basic_block (NULL, (void *) 0, bb);
+ return_bb = create_basic_block (NULL, (void *) 0, then_bb);
+ else_bb = create_basic_block (NULL, (void *) 0, else_bb);
+ remove_edge (single_succ_edge (bb));
+ true_label = gimple_block_label (then_bb);
+ stmt = gimple_build_cond (NE_EXPR, restmp,
+ fold_convert (TREE_TYPE (restmp),
+ integer_zero_node),
+ NULL_TREE, NULL_TREE);
+ gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
+ make_edge (bb, then_bb, EDGE_TRUE_VALUE);
+ make_edge (bb, else_bb, EDGE_FALSE_VALUE);
+ make_edge (return_bb, EXIT_BLOCK_PTR, 0);
+ make_edge (then_bb, return_bb, EDGE_FALLTHRU);
+ make_edge (else_bb, return_bb, EDGE_FALLTHRU);
+ bsi = gsi_last_bb (then_bb);
+ }
- if (cgraph_check_inline_limits (node->callers->caller, node,
- NULL))
- {
- cgraph_mark_inline (node->callers);
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file,
- " Inlined into %s which now has %i insns"
- " for a net change of %+i insns.\n",
- cgraph_node_name (node->callers->caller),
- node->callers->caller->global.insns,
- overall_insns - old_insns);
- }
- else
- {
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file,
- " Inline limit reached, not inlined.\n");
- }
- }
+ restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
+ fixed_offset, virtual_offset);
+ if (true_label)
+ {
+ gimple stmt;
+ bsi = gsi_last_bb (else_bb);
+ stmt = gimple_build_assign (restmp, fold_convert (TREE_TYPE (restmp),
+ integer_zero_node));
+ gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
+ bsi = gsi_last_bb (return_bb);
}
}
- }
+ else
+ gimple_call_set_tail (call, true);
- /* We will never output extern functions we didn't inline.
- ??? Perhaps we can prevent accounting of growth of external
- inline functions. */
- cgraph_remove_unreachable_nodes ();
+ /* Build return value. */
+ ret = gimple_build_return (restmp);
+ gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file,
- "\nInlined %i calls, eliminated %i functions, "
- "%i insns turned to %i insns.\n\n",
- ncalls_inlined, nfunctions_inlined, initial_insns,
- overall_insns);
- free (order);
+ delete_unreachable_blocks ();
+ update_ssa (TODO_update_ssa);
+
+ cgraph_remove_same_body_alias (node);
+ /* Since we want to emit the thunk, we explicitly mark its name as
+ referenced. */
+ mark_decl_referenced (thunk_fndecl);
+ cgraph_add_new_function (thunk_fndecl, true);
+ bitmap_obstack_release (NULL);
+ }
+ current_function_decl = NULL;
}
-/* Decide on the inlining. We do so in the topological order to avoid
- expenses on updating data structures. */
+/* Expand function specified by NODE. */
static void
-cgraph_decide_inlining_incrementally (struct cgraph_node *node)
+cgraph_expand_function (struct cgraph_node *node)
{
- struct cgraph_edge *e;
+ tree decl = node->decl;
- /* First of all look for always inline functions. */
- for (e = node->callees; e; e = e->next_callee)
- if (e->callee->local.disregard_inline_limits
- && e->inline_failed
- && !cgraph_recursive_inlining_p (node, e->callee, &e->inline_failed)
- /* ??? It is possible that renaming variable removed the function body
- in duplicate_decls. See gcc.c-torture/compile/20011119-2.c */
- && DECL_SAVED_TREE (e->callee->decl))
- cgraph_mark_inline (e);
-
- /* Now do the automatic inlining. */
- if (!flag_really_no_inline)
- for (e = node->callees; e; e = e->next_callee)
- if (e->callee->local.inlinable
- && e->inline_failed
- && !e->callee->local.disregard_inline_limits
- && !cgraph_recursive_inlining_p (node, e->callee, &e->inline_failed)
- && cgraph_check_inline_limits (node, e->callee, &e->inline_failed)
- && DECL_SAVED_TREE (e->callee->decl))
- {
- if (cgraph_default_inline_p (e->callee))
- cgraph_mark_inline (e);
+ /* We ought to not compile any inline clones. */
+ gcc_assert (!node->global.inlined_to);
+
+ announce_function (decl);
+ node->process = 0;
+
+ gcc_assert (node->lowered);
+
+ /* Generate RTL for the body of DECL. */
+ tree_rest_of_compilation (decl);
+
+ /* Make sure that BE didn't give up on compiling. */
+ gcc_assert (TREE_ASM_WRITTEN (decl));
+ current_function_decl = NULL;
+ if (node->same_body)
+ {
+ struct cgraph_node *alias, *next;
+ bool saved_alias = node->alias;
+ for (alias = node->same_body;
+ alias && alias->next; alias = alias->next)
+ ;
+ /* Walk aliases in the order they were created; it is possible that
+ thunks reffers to the aliases made earlier. */
+ for (; alias; alias = next)
+ {
+ next = alias->previous;
+ if (!alias->thunk.thunk_p)
+ assemble_alias (alias->decl,
+ DECL_ASSEMBLER_NAME (alias->thunk.alias));
else
- e->inline_failed
- = N_("--param max-inline-insns-single limit reached");
+ assemble_thunk (alias);
}
-}
+ node->alias = saved_alias;
+ }
+ gcc_assert (!cgraph_preserve_function_body_p (decl));
+ cgraph_release_function_body (node);
+ /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
+ points to the dead function body. */
+ cgraph_node_remove_callees (node);
+ cgraph_function_flags_ready = true;
+}
/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
bool
-cgraph_inline_p (struct cgraph_edge *e, const char **reason)
+cgraph_inline_p (struct cgraph_edge *e, cgraph_inline_failed_t *reason)
{
*reason = e->inline_failed;
return !e->inline_failed;
cgraph_expand_all_functions (void)
{
struct cgraph_node *node;
- struct cgraph_node **order =
- xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
- int order_pos = 0, new_order_pos = 0;
+ struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
+ int order_pos, new_order_pos = 0;
int i;
order_pos = cgraph_postorder (order);
/* Garbage collector may remove inline clones we eliminate during
optimization. So we must be sure to not reference them. */
for (i = 0; i < order_pos; i++)
- if (order[i]->output)
+ if (order[i]->process)
order[new_order_pos++] = order[i];
for (i = new_order_pos - 1; i >= 0; i--)
{
node = order[i];
- if (node->output)
+ if (node->process)
{
gcc_assert (node->reachable);
- node->output = 0;
+ node->process = 0;
cgraph_expand_function (node);
}
}
+ cgraph_process_new_functions ();
+
free (order);
+
}
-/* Mark all local functions.
-
- A local function is one whose calls can occur only in the current
- compilation unit and all its calls are explicit, so we can change
- its calling convention. We simply mark all static functions whose
- address is not taken as local. */
+/* This is used to sort the node types by the cgraph order number. */
+
+enum cgraph_order_sort_kind
+{
+ ORDER_UNDEFINED = 0,
+ ORDER_FUNCTION,
+ ORDER_VAR,
+ ORDER_ASM
+};
+
+struct cgraph_order_sort
+{
+ enum cgraph_order_sort_kind kind;
+ union
+ {
+ struct cgraph_node *f;
+ struct varpool_node *v;
+ struct cgraph_asm_node *a;
+ } u;
+};
+
+/* Output all functions, variables, and asm statements in the order
+ according to their order fields, which is the order in which they
+ appeared in the file. This implements -fno-toplevel-reorder. In
+ this mode we may output functions and variables which don't really
+ need to be output. */
static void
-cgraph_mark_local_functions (void)
+cgraph_output_in_order (void)
{
- struct cgraph_node *node;
+ int max;
+ struct cgraph_order_sort *nodes;
+ int i;
+ struct cgraph_node *pf;
+ struct varpool_node *pv;
+ struct cgraph_asm_node *pa;
- /* Figure out functions we want to assemble. */
- for (node = cgraph_nodes; node; node = node->next)
+ max = cgraph_order;
+ nodes = XCNEWVEC (struct cgraph_order_sort, max);
+
+ varpool_analyze_pending_decls ();
+
+ for (pf = cgraph_nodes; pf; pf = pf->next)
{
- node->local.local = (!node->needed
- && DECL_SAVED_TREE (node->decl)
- && !TREE_PUBLIC (node->decl));
+ if (pf->process)
+ {
+ i = pf->order;
+ gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
+ nodes[i].kind = ORDER_FUNCTION;
+ nodes[i].u.f = pf;
+ }
}
- if (cgraph_dump_file)
+ for (pv = varpool_nodes_queue; pv; pv = pv->next_needed)
{
- fprintf (cgraph_dump_file, "\nMarking local functions:");
- for (node = cgraph_nodes; node; node = node->next)
- if (node->local.local)
- fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
- fprintf (cgraph_dump_file, "\n\n");
+ i = pv->order;
+ gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
+ nodes[i].kind = ORDER_VAR;
+ nodes[i].u.v = pv;
+ }
+
+ for (pa = cgraph_asm_nodes; pa; pa = pa->next)
+ {
+ i = pa->order;
+ gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
+ nodes[i].kind = ORDER_ASM;
+ nodes[i].u.a = pa;
+ }
+
+ /* In toplevel reorder mode we output all statics; mark them as needed. */
+ for (i = 0; i < max; ++i)
+ {
+ if (nodes[i].kind == ORDER_VAR)
+ {
+ varpool_mark_needed_node (nodes[i].u.v);
+ }
+ }
+ varpool_empty_needed_queue ();
+
+ for (i = 0; i < max; ++i)
+ {
+ switch (nodes[i].kind)
+ {
+ case ORDER_FUNCTION:
+ nodes[i].u.f->process = 0;
+ cgraph_expand_function (nodes[i].u.f);
+ break;
+
+ case ORDER_VAR:
+ varpool_assemble_decl (nodes[i].u.v);
+ break;
+
+ case ORDER_ASM:
+ assemble_asm (nodes[i].u.a->asm_str);
+ break;
+
+ case ORDER_UNDEFINED:
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
}
+
+ cgraph_asm_nodes = NULL;
+ free (nodes);
}
/* Return true when function body of DECL still needs to be kept around
cgraph_preserve_function_body_p (tree decl)
{
struct cgraph_node *node;
- /* Keep the body; we're going to dump it. */
- if (dump_enabled_p (TDI_tree_all))
- return true;
- if (!cgraph_global_info_ready)
- return (DECL_INLINE (decl) && !flag_really_no_inline);
+
+ gcc_assert (cgraph_global_info_ready);
/* Look if there is any clone around. */
- for (node = cgraph_node (decl); node; node = node->next_clone)
- if (node->global.inlined_to)
- return true;
+ node = cgraph_node (decl);
+ if (node->clones)
+ return true;
return false;
}
+static void
+ipa_passes (void)
+{
+ set_cfun (NULL);
+ current_function_decl = NULL;
+ gimple_register_cfg_hooks ();
+ bitmap_obstack_initialize (NULL);
+
+ invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
+
+ if (!in_lto_p)
+ execute_ipa_pass_list (all_small_ipa_passes);
+
+ /* If pass_all_early_optimizations was not scheduled, the state of
+ the cgraph will not be properly updated. Update it now. */
+ if (cgraph_state < CGRAPH_STATE_IPA_SSA)
+ cgraph_state = CGRAPH_STATE_IPA_SSA;
+
+ if (!in_lto_p)
+ {
+ /* Generate coverage variables and constructors. */
+ coverage_finish ();
+
+ /* Process new functions added. */
+ set_cfun (NULL);
+ current_function_decl = NULL;
+ cgraph_process_new_functions ();
+
+ execute_ipa_summary_passes
+ ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
+ }
+ execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
+
+ if (!in_lto_p)
+ ipa_write_summaries ();
+
+ if (!flag_ltrans)
+ execute_ipa_pass_list (all_regular_ipa_passes);
+ invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
+
+ bitmap_obstack_release (NULL);
+}
+
+
/* Perform simple optimizations based on callgraph. */
void
cgraph_optimize (void)
{
+ if (errorcount || sorrycount)
+ return;
+
#ifdef ENABLE_CHECKING
verify_cgraph ();
#endif
- if (!flag_unit_at_a_time)
- {
- cgraph_varpool_assemble_pending_decls ();
- return;
- }
- process_pending_assemble_externals ();
-
/* Frontend may output common variables after the unit has been finalized.
It is safe to deal with them here as they are always zero initialized. */
- cgraph_varpool_analyze_pending_decls ();
+ varpool_analyze_pending_decls ();
timevar_push (TV_CGRAPHOPT);
+ if (pre_ipa_mem_report)
+ {
+ fprintf (stderr, "Memory consumption before IPA\n");
+ dump_memory_report (false);
+ }
if (!quiet_flag)
- fprintf (stderr, "Performing intraprocedural optimizations\n");
+ fprintf (stderr, "Performing interprocedural optimizations\n");
+ cgraph_state = CGRAPH_STATE_IPA;
- cgraph_mark_local_functions ();
- if (cgraph_dump_file)
+ /* Don't run the IPA passes if there was any error or sorry messages. */
+ if (errorcount == 0 && sorrycount == 0)
+ ipa_passes ();
+
+ /* Do nothing else if any IPA pass found errors. */
+ if (errorcount || sorrycount)
{
- fprintf (cgraph_dump_file, "Marked ");
- dump_cgraph (cgraph_dump_file);
+ timevar_pop (TV_CGRAPHOPT);
+ return;
}
- if (flag_inline_trees)
- cgraph_decide_inlining ();
+ /* This pass remove bodies of extern inline functions we never inlined.
+ Do this later so other IPA passes see what is really going on. */
+ cgraph_remove_unreachable_nodes (false, dump_file);
cgraph_global_info_ready = true;
if (cgraph_dump_file)
{
dump_cgraph (cgraph_dump_file);
dump_varpool (cgraph_dump_file);
}
+ if (post_ipa_mem_report)
+ {
+ fprintf (stderr, "Memory consumption after IPA\n");
+ dump_memory_report (false);
+ }
timevar_pop (TV_CGRAPHOPT);
/* Output everything. */
+ (*debug_hooks->assembly_start) ();
if (!quiet_flag)
fprintf (stderr, "Assembling functions:\n");
#ifdef ENABLE_CHECKING
verify_cgraph ();
#endif
-
+
+ cgraph_materialize_all_clones ();
cgraph_mark_functions_to_output ();
- cgraph_expand_all_functions ();
- cgraph_varpool_remove_unreferenced_decls ();
- cgraph_varpool_assemble_pending_decls ();
+ cgraph_state = CGRAPH_STATE_EXPANSION;
+ if (!flag_toplevel_reorder)
+ cgraph_output_in_order ();
+ else
+ {
+ cgraph_output_pending_asms ();
+
+ cgraph_expand_all_functions ();
+ varpool_remove_unreferenced_decls ();
+
+ varpool_assemble_pending_decls ();
+ }
+ cgraph_process_new_functions ();
+ cgraph_state = CGRAPH_STATE_FINISHED;
if (cgraph_dump_file)
{
verify_cgraph ();
/* Double check that all inline clones are gone and that all
function bodies have been released from memory. */
- if (flag_unit_at_a_time
- && !dump_enabled_p (TDI_tree_all)
- && !(sorrycount || errorcount))
+ if (!(sorrycount || errorcount))
{
struct cgraph_node *node;
bool error_found = false;
for (node = cgraph_nodes; node; node = node->next)
if (node->analyzed
&& (node->global.inlined_to
- || DECL_SAVED_TREE (node->decl)))
+ || gimple_has_body_p (node->decl)))
{
error_found = true;
dump_cgraph_node (stderr, node);
- }
+ }
if (error_found)
- internal_error ("Nodes with no released memory found.");
+ internal_error ("nodes with unreleased memory found");
}
#endif
}
-/* Generate and emit a static constructor or destructor. WHICH must be
- one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
- GENERIC statements. */
+
+/* Generate and emit a static constructor or destructor. WHICH must
+ be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
+ is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
+ initialization priority for this constructor or destructor. */
void
cgraph_build_static_cdtor (char which, tree body, int priority)
char which_buf[16];
tree decl, name, resdecl;
- sprintf (which_buf, "%c_%d", which, counter++);
- name = get_file_function_name_long (which_buf);
+ /* The priority is encoded in the constructor or destructor name.
+ collect2 will sort the names and arrange that they are called at
+ program startup. */
+ sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
+ name = get_file_function_name (which_buf);
- decl = build_decl (FUNCTION_DECL, name,
+ decl = build_decl (input_location, FUNCTION_DECL, name,
build_function_type (void_type_node, void_list_node));
current_function_decl = decl;
- resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
+ resdecl = build_decl (input_location,
+ RESULT_DECL, NULL_TREE, void_type_node);
DECL_ARTIFICIAL (resdecl) = 1;
- DECL_IGNORED_P (resdecl) = 1;
DECL_RESULT (decl) = resdecl;
+ DECL_CONTEXT (resdecl) = decl;
- allocate_struct_function (decl);
+ allocate_struct_function (decl, false);
TREE_STATIC (decl) = 1;
TREE_USED (decl) = 1;
DECL_ARTIFICIAL (decl) = 1;
- DECL_IGNORED_P (decl) = 1;
DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
DECL_SAVED_TREE (decl) = body;
- TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
+ if (!targetm.have_ctors_dtors)
+ {
+ TREE_PUBLIC (decl) = 1;
+ DECL_PRESERVE_P (decl) = 1;
+ }
DECL_UNINLINABLE (decl) = 1;
DECL_INITIAL (decl) = make_node (BLOCK);
{
case 'I':
DECL_STATIC_CONSTRUCTOR (decl) = 1;
+ decl_init_priority_insert (decl, priority);
break;
case 'D':
DECL_STATIC_DESTRUCTOR (decl) = 1;
+ decl_fini_priority_insert (decl, priority);
break;
default:
gcc_unreachable ();
gimplify_function_tree (decl);
- /* ??? We will get called LATE in the compilation process. */
- if (cgraph_global_info_ready)
- tree_rest_of_compilation (decl);
+ cgraph_add_new_function (decl, false);
+ cgraph_mark_needed_node (cgraph_node (decl));
+ set_cfun (NULL);
+}
+
+void
+init_cgraph (void)
+{
+ cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
+}
+
+/* The edges representing the callers of the NEW_VERSION node were
+ fixed by cgraph_function_versioning (), now the call_expr in their
+ respective tree code should be updated to call the NEW_VERSION. */
+
+static void
+update_call_expr (struct cgraph_node *new_version)
+{
+ struct cgraph_edge *e;
+
+ gcc_assert (new_version);
+
+ /* Update the call expr on the edges to call the new version. */
+ for (e = new_version->callers; e; e = e->next_caller)
+ {
+ struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
+ gimple_call_set_fndecl (e->call_stmt, new_version->decl);
+ maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
+ }
+}
+
+
+/* Create a new cgraph node which is the new version of
+ OLD_VERSION node. REDIRECT_CALLERS holds the callers
+ edges which should be redirected to point to
+ NEW_VERSION. ALL the callees edges of OLD_VERSION
+ are cloned to the new version node. Return the new
+ version node. */
+
+static struct cgraph_node *
+cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
+ tree new_decl,
+ VEC(cgraph_edge_p,heap) *redirect_callers)
+ {
+ struct cgraph_node *new_version;
+ struct cgraph_edge *e, *new_e;
+ struct cgraph_edge *next_callee;
+ unsigned i;
+
+ gcc_assert (old_version);
+
+ new_version = cgraph_node (new_decl);
+
+ new_version->analyzed = true;
+ new_version->local = old_version->local;
+ new_version->global = old_version->global;
+ new_version->rtl = new_version->rtl;
+ new_version->reachable = true;
+ new_version->count = old_version->count;
+
+ /* Clone the old node callees. Recursive calls are
+ also cloned. */
+ for (e = old_version->callees;e; e=e->next_callee)
+ {
+ new_e = cgraph_clone_edge (e, new_version, e->call_stmt,
+ e->lto_stmt_uid, 0, e->frequency,
+ e->loop_nest, true);
+ new_e->count = e->count;
+ }
+ /* Fix recursive calls.
+ If OLD_VERSION has a recursive call after the
+ previous edge cloning, the new version will have an edge
+ pointing to the old version, which is wrong;
+ Redirect it to point to the new version. */
+ for (e = new_version->callees ; e; e = next_callee)
+ {
+ next_callee = e->next_callee;
+ if (e->callee == old_version)
+ cgraph_redirect_edge_callee (e, new_version);
+
+ if (!next_callee)
+ break;
+ }
+ for (i = 0; VEC_iterate (cgraph_edge_p, redirect_callers, i, e); i++)
+ {
+ /* Redirect calls to the old version node to point to its new
+ version. */
+ cgraph_redirect_edge_callee (e, new_version);
+ }
+
+ return new_version;
+ }
+
+ /* Perform function versioning.
+ Function versioning includes copying of the tree and
+ a callgraph update (creating a new cgraph node and updating
+ its callees and callers).
+
+ REDIRECT_CALLERS varray includes the edges to be redirected
+ to the new version.
+
+ TREE_MAP is a mapping of tree nodes we want to replace with
+ new ones (according to results of prior analysis).
+ OLD_VERSION_NODE is the node that is versioned.
+ It returns the new version's cgraph node.
+ ARGS_TO_SKIP lists arguments to be omitted from functions
+ */
+
+struct cgraph_node *
+cgraph_function_versioning (struct cgraph_node *old_version_node,
+ VEC(cgraph_edge_p,heap) *redirect_callers,
+ VEC (ipa_replace_map_p,gc)* tree_map,
+ bitmap args_to_skip)
+{
+ tree old_decl = old_version_node->decl;
+ struct cgraph_node *new_version_node = NULL;
+ tree new_decl;
+
+ if (!tree_versionable_function_p (old_decl))
+ return NULL;
+
+ /* Make a new FUNCTION_DECL tree node for the
+ new version. */
+ if (!args_to_skip)
+ new_decl = copy_node (old_decl);
else
- cgraph_finalize_function (decl, 0);
-
- if (targetm.have_ctors_dtors)
+ new_decl = build_function_decl_skip_args (old_decl, args_to_skip);
+
+ /* Create the new version's call-graph node.
+ and update the edges of the new node. */
+ new_version_node =
+ cgraph_copy_node_for_versioning (old_version_node, new_decl,
+ redirect_callers);
+
+ /* Copy the OLD_VERSION_NODE function tree to the new version. */
+ tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip);
+
+ /* Update the new version's properties.
+ Make The new version visible only within this translation unit. Make sure
+ that is not weak also.
+ ??? We cannot use COMDAT linkage because there is no
+ ABI support for this. */
+ cgraph_make_decl_local (new_version_node->decl);
+ DECL_VIRTUAL_P (new_version_node->decl) = 0;
+ new_version_node->local.externally_visible = 0;
+ new_version_node->local.local = 1;
+ new_version_node->lowered = true;
+
+ /* Update the call_expr on the edges to call the new version node. */
+ update_call_expr (new_version_node);
+
+ cgraph_call_function_insertion_hooks (new_version_node);
+ return new_version_node;
+}
+
+/* Produce separate function body for inline clones so the offline copy can be
+ modified without affecting them. */
+struct cgraph_node *
+save_inline_function_body (struct cgraph_node *node)
+{
+ struct cgraph_node *first_clone, *n;
+
+ gcc_assert (node == cgraph_node (node->decl));
+
+ cgraph_lower_function (node);
+
+ first_clone = node->clones;
+
+ first_clone->decl = copy_node (node->decl);
+ cgraph_insert_node_to_hashtable (first_clone);
+ gcc_assert (first_clone == cgraph_node (first_clone->decl));
+ if (first_clone->next_sibling_clone)
{
- void (*fn) (rtx, int);
+ for (n = first_clone->next_sibling_clone; n->next_sibling_clone; n = n->next_sibling_clone)
+ n->clone_of = first_clone;
+ n->clone_of = first_clone;
+ n->next_sibling_clone = first_clone->clones;
+ if (first_clone->clones)
+ first_clone->clones->prev_sibling_clone = n;
+ first_clone->clones = first_clone->next_sibling_clone;
+ first_clone->next_sibling_clone->prev_sibling_clone = NULL;
+ first_clone->next_sibling_clone = NULL;
+ gcc_assert (!first_clone->prev_sibling_clone);
+ }
+ first_clone->clone_of = NULL;
+ node->clones = NULL;
- if (which == 'I')
- fn = targetm.asm_out.constructor;
- else
- fn = targetm.asm_out.destructor;
- fn (XEXP (DECL_RTL (decl), 0), priority);
+ if (first_clone->clones)
+ for (n = first_clone->clones; n != first_clone;)
+ {
+ gcc_assert (n->decl == node->decl);
+ n->decl = first_clone->decl;
+ if (n->clones)
+ n = n->clones;
+ else if (n->next_sibling_clone)
+ n = n->next_sibling_clone;
+ else
+ {
+ while (n != first_clone && !n->next_sibling_clone)
+ n = n->clone_of;
+ if (n != first_clone)
+ n = n->next_sibling_clone;
+ }
+ }
+
+ /* Copy the OLD_VERSION_NODE function tree to the new version. */
+ tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL);
+
+ DECL_EXTERNAL (first_clone->decl) = 0;
+ DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
+ TREE_PUBLIC (first_clone->decl) = 0;
+ DECL_COMDAT (first_clone->decl) = 0;
+ VEC_free (ipa_opt_pass, heap,
+ first_clone->ipa_transforms_to_apply);
+ first_clone->ipa_transforms_to_apply = NULL;
+
+#ifdef ENABLE_CHECKING
+ verify_cgraph_node (first_clone);
+#endif
+ return first_clone;
+}
+
+/* Given virtual clone, turn it into actual clone. */
+static void
+cgraph_materialize_clone (struct cgraph_node *node)
+{
+ bitmap_obstack_initialize (NULL);
+ /* Copy the OLD_VERSION_NODE function tree to the new version. */
+ tree_function_versioning (node->clone_of->decl, node->decl,
+ node->clone.tree_map, true,
+ node->clone.args_to_skip);
+ if (cgraph_dump_file)
+ {
+ dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
+ dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
+ }
+
+ /* Function is no longer clone. */
+ if (node->next_sibling_clone)
+ node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
+ if (node->prev_sibling_clone)
+ node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
+ else
+ node->clone_of->clones = node->next_sibling_clone;
+ node->next_sibling_clone = NULL;
+ node->prev_sibling_clone = NULL;
+ if (!node->clone_of->analyzed && !node->clone_of->clones)
+ cgraph_remove_node (node->clone_of);
+ node->clone_of = NULL;
+ bitmap_obstack_release (NULL);
+}
+
+/* If necessary, change the function declaration in the call statement
+ associated with E so that it corresponds to the edge callee. */
+
+gimple
+cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *e)
+{
+ tree decl = gimple_call_fndecl (e->call_stmt);
+ gimple new_stmt;
+ gimple_stmt_iterator gsi;
+
+ if (!decl || decl == e->callee->decl
+ /* Don't update call from same body alias to the real function. */
+ || cgraph_get_node (decl) == cgraph_get_node (e->callee->decl))
+ return e->call_stmt;
+
+ if (cgraph_dump_file)
+ {
+ fprintf (cgraph_dump_file, "updating call of %s/%i -> %s/%i: ",
+ cgraph_node_name (e->caller), e->caller->uid,
+ cgraph_node_name (e->callee), e->callee->uid);
+ print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
+ }
+
+ if (e->callee->clone.combined_args_to_skip)
+ new_stmt = gimple_call_copy_skip_args (e->call_stmt,
+ e->callee->clone.combined_args_to_skip);
+ else
+ new_stmt = e->call_stmt;
+ if (gimple_vdef (new_stmt)
+ && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
+ SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
+ gimple_call_set_fndecl (new_stmt, e->callee->decl);
+
+ gsi = gsi_for_stmt (e->call_stmt);
+ gsi_replace (&gsi, new_stmt, true);
+
+ /* Update EH information too, just in case. */
+ maybe_clean_or_replace_eh_stmt (e->call_stmt, new_stmt);
+
+ cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt);
+
+ if (cgraph_dump_file)
+ {
+ fprintf (cgraph_dump_file, " updated to:");
+ print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
}
+ return new_stmt;
}
+/* Once all functions from compilation unit are in memory, produce all clones
+ and update all calls. We might also do this on demand if we don't want to
+ bring all functions to memory prior compilation, but current WHOPR
+ implementation does that and it is is bit easier to keep everything right in
+ this order. */
void
-init_cgraph (void)
+cgraph_materialize_all_clones (void)
{
- cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
+ struct cgraph_node *node;
+ bool stabilized = false;
+
+ if (cgraph_dump_file)
+ fprintf (cgraph_dump_file, "Materializing clones\n");
+#ifdef ENABLE_CHECKING
+ verify_cgraph ();
+#endif
+
+ /* We can also do topological order, but number of iterations should be
+ bounded by number of IPA passes since single IPA pass is probably not
+ going to create clones of clones it created itself. */
+ while (!stabilized)
+ {
+ stabilized = true;
+ for (node = cgraph_nodes; node; node = node->next)
+ {
+ if (node->clone_of && node->decl != node->clone_of->decl
+ && !gimple_has_body_p (node->decl))
+ {
+ if (gimple_has_body_p (node->clone_of->decl))
+ {
+ if (cgraph_dump_file)
+ {
+ fprintf (cgraph_dump_file, "clonning %s to %s\n",
+ cgraph_node_name (node->clone_of),
+ cgraph_node_name (node));
+ if (node->clone.tree_map)
+ {
+ unsigned int i;
+ fprintf (cgraph_dump_file, " replace map: ");
+ for (i = 0; i < VEC_length (ipa_replace_map_p,
+ node->clone.tree_map);
+ i++)
+ {
+ struct ipa_replace_map *replace_info;
+ replace_info = VEC_index (ipa_replace_map_p,
+ node->clone.tree_map,
+ i);
+ print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
+ fprintf (cgraph_dump_file, " -> ");
+ print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
+ fprintf (cgraph_dump_file, "%s%s;",
+ replace_info->replace_p ? "(replace)":"",
+ replace_info->ref_p ? "(ref)":"");
+ }
+ fprintf (cgraph_dump_file, "\n");
+ }
+ if (node->clone.args_to_skip)
+ {
+ fprintf (cgraph_dump_file, " args_to_skip: ");
+ dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
+ }
+ if (node->clone.args_to_skip)
+ {
+ fprintf (cgraph_dump_file, " combined_args_to_skip:");
+ dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
+ }
+ }
+ cgraph_materialize_clone (node);
+ }
+ else
+ stabilized = false;
+ }
+ }
+ }
+ for (node = cgraph_nodes; node; node = node->next)
+ if (!node->analyzed && node->callees)
+ cgraph_node_remove_callees (node);
+ if (cgraph_dump_file)
+ fprintf (cgraph_dump_file, "Updating call sites\n");
+ for (node = cgraph_nodes; node; node = node->next)
+ if (node->analyzed && !node->clone_of
+ && gimple_has_body_p (node->decl))
+ {
+ struct cgraph_edge *e;
+
+ current_function_decl = node->decl;
+ push_cfun (DECL_STRUCT_FUNCTION (node->decl));
+ for (e = node->callees; e; e = e->next_callee)
+ cgraph_redirect_edge_call_stmt_to_callee (e);
+ pop_cfun ();
+ current_function_decl = NULL;
+#ifdef ENABLE_CHECKING
+ verify_cgraph_node (node);
+#endif
+ }
+ if (cgraph_dump_file)
+ fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
+ /* All changes to parameters have been performed. In order not to
+ incorrectly repeat them, we simply dispose of the bitmaps that drive the
+ changes. */
+ for (node = cgraph_nodes; node; node = node->next)
+ node->clone.combined_args_to_skip = NULL;
+#ifdef ENABLE_CHECKING
+ verify_cgraph ();
+#endif
+ cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
}
+
+#include "gt-cgraphunit.h"