/* Callgraph based intraprocedural optimizations.
- Copyright (C) 2003 Free Software Foundation, Inc.
+ Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
Contributed by Jan Hubicka
This file is part of GCC.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
+02110-1301, USA. */
+
+/* This module implements main driver of compilation process as well as
+ few basic intraprocedural optimizers.
+
+ The main scope of this file is to act as an interface in between
+ tree based frontends and the backend (and middle end)
+
+ The front-end is supposed to use following functionality:
+
+ - cgraph_finalize_function
+
+ This function is called once front-end has parsed whole body of function
+ and it is certain that the function body nor the declaration will change.
+
+ (There is one exception needed for implementing GCC extern inline function.)
+
+ - cgraph_varpool_finalize_variable
+
+ This function has same behavior as the above but is used for static
+ variables.
+
+ - cgraph_finalize_compilation_unit
+
+ This function is called once compilation unit is finalized and it will
+ no longer change.
+
+ In the unit-at-a-time the call-graph construction and local function
+ analysis takes place here. Bodies of unreachable functions are released
+ to conserve memory usage.
+
+ ??? The compilation unit in this point of view should be compilation
+ unit as defined by the language - for instance C frontend allows multiple
+ compilation units to be parsed at once and it should call function each
+ time parsing is done so we save memory.
+
+ - cgraph_optimize
+
+ In this unit-at-a-time compilation the intra procedural analysis takes
+ place here. In particular the static functions whose address is never
+ taken are marked as local. Backend can then use this information to
+ modify calling conventions, do better inlining or similar optimizations.
+
+ - cgraph_assemble_pending_functions
+ - cgraph_varpool_assemble_pending_variables
+
+ In non-unit-at-a-time mode these functions can be used to force compilation
+ of functions or variables that are known to be needed at given stage
+ of compilation
+
+ - cgraph_mark_needed_node
+ - cgraph_varpool_mark_needed_node
+
+ When function or variable is referenced by some hidden way (for instance
+ via assembly code and marked by attribute "used"), the call-graph data structure
+ must be updated accordingly by this function.
+
+ - analyze_expr callback
+
+ This function is responsible for lowering tree nodes not understood by
+ generic code into understandable ones or alternatively marking
+ callgraph and varpool nodes referenced by the as needed.
+
+ ??? On the tree-ssa genericizing should take place here and we will avoid
+ need for these hooks (replacing them by genericizing hook)
+
+ - expand_function callback
+
+ This function is used to expand function and pass it into RTL back-end.
+ Front-end should not make any assumptions about when this function can be
+ called. In particular cgraph_assemble_pending_functions,
+ cgraph_varpool_assemble_pending_variables, cgraph_finalize_function,
+ cgraph_varpool_finalize_function, cgraph_optimize can cause arbitrarily
+ previously finalized functions to be expanded.
+
+ We implement two compilation modes.
+
+ - unit-at-a-time: In this mode analyzing of all functions is deferred
+ to cgraph_finalize_compilation_unit and expansion into cgraph_optimize.
+
+ In cgraph_finalize_compilation_unit the reachable functions are
+ analyzed. During analysis the call-graph edges from reachable
+ functions are constructed and their destinations are marked as
+ reachable. References to functions and variables are discovered too
+ and variables found to be needed output to the assembly file. Via
+ mark_referenced call in assemble_variable functions referenced by
+ static variables are noticed too.
+
+ The intra-procedural information is produced and its existence
+ indicated by global_info_ready. Once this flag is set it is impossible
+ to change function from !reachable to reachable and thus
+ assemble_variable no longer call mark_referenced.
+
+ Finally the call-graph is topologically sorted and all reachable functions
+ that has not been completely inlined or are not external are output.
+
+ ??? It is possible that reference to function or variable is optimized
+ out. We can not deal with this nicely because topological order is not
+ suitable for it. For tree-ssa we may consider another pass doing
+ optimization and re-discovering reachable functions.
+
+ ??? Reorganize code so variables are output very last and only if they
+ really has been referenced by produced code, so we catch more cases
+ where reference has been optimized out.
+
+ - non-unit-at-a-time
+
+ All functions are variables are output as early as possible to conserve
+ memory consumption. This may or may not result in less memory used but
+ it is still needed for some legacy code that rely on particular ordering
+ of things output from the compiler.
+
+ Varpool data structures are not used and variables are output directly.
+
+ Functions are output early using call of
+ cgraph_assemble_pending_function from cgraph_finalize_function. The
+ decision on whether function is needed is made more conservative so
+ uninlininable static functions are needed too. During the call-graph
+ construction the edge destinations are not marked as reachable and it
+ is completely relied upn assemble_variable to mark them. */
+
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "tree.h"
+#include "rtl.h"
+#include "tree-flow.h"
#include "tree-inline.h"
#include "langhooks.h"
-#include "hashtab.h"
+#include "pointer-set.h"
#include "toplev.h"
#include "flags.h"
#include "ggc.h"
#include "params.h"
#include "fibheap.h"
#include "c-common.h"
+#include "intl.h"
+#include "function.h"
+#include "tree-gimple.h"
+#include "tree-pass.h"
+#include "output.h"
-#define INSNS_PER_CALL 10
-
-static void cgraph_expand_functions (void);
+static void cgraph_expand_all_functions (void);
static void cgraph_mark_functions_to_output (void);
static void cgraph_expand_function (struct cgraph_node *);
-static tree record_call_1 (tree *, int *, void *);
-static void cgraph_mark_local_functions (void);
-static void cgraph_optimize_function (struct cgraph_node *);
-static bool cgraph_default_inline_p (struct cgraph_node *n);
+static tree record_reference (tree *, int *, void *);
static void cgraph_analyze_function (struct cgraph_node *node);
-/* Statistics we collect about inlining algorithm. */
-static int ncalls_inlined;
-static int nfunctions_inlined;
-static int initial_insns;
-static int overall_insns;
-
-/* Records tree nodes seen in cgraph_create_edges. Simply using
+/* Records tree nodes seen in record_reference. Simply using
walk_tree_without_duplicates doesn't guarantee each node is visited
once because it gets a new htab upon each recursive call from
- record_calls_1. */
-static htab_t visited_nodes;
+ record_reference itself. */
+static struct pointer_set_t *visited_nodes;
+
+static FILE *cgraph_dump_file;
/* Determine if function DECL is needed. That is, visible to something
either outside this translation unit, something magic in the system
static bool
decide_is_function_needed (struct cgraph_node *node, tree decl)
{
+ tree origin;
+ if (MAIN_NAME_P (DECL_NAME (decl))
+ && TREE_PUBLIC (decl))
+ {
+ node->local.externally_visible = true;
+ return true;
+ }
+
+ /* If the user told us it is used, then it must be so. */
+ if (node->local.externally_visible
+ || lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
+ return true;
+
+ /* ??? If the assembler name is set by hand, it is possible to assemble
+ the name later after finalizing the function and the fact is noticed
+ in assemble_name then. This is arguably a bug. */
+ if (DECL_ASSEMBLER_NAME_SET_P (decl)
+ && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
+ return true;
+
/* If we decided it was needed before, but at the time we didn't have
the body of the function available, then it's still needed. We have
to go back and re-check its dependencies now. */
/* Externally visible functions must be output. The exception is
COMDAT functions that must be output only when they are needed. */
- if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
+ if ((TREE_PUBLIC (decl) && !flag_whole_program)
+ && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
return true;
/* Constructors and destructors are reachable from the runtime by
if (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl))
return true;
- /* If the user told us it is used, then it must be so. */
- if (lookup_attribute ("used", DECL_ATTRIBUTES (decl)))
- return true;
-
- /* ??? If the assembler name is set by hand, it is possible to assemble
- the name later after finalizing the function and the fact is noticed
- in assemble_name then. This is arguably a bug. */
- if (DECL_ASSEMBLER_NAME_SET_P (decl)
- && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
- return true;
-
if (flag_unit_at_a_time)
return false;
/* "extern inline" functions are never output locally. */
if (DECL_EXTERNAL (decl))
return false;
- /* We want to emit COMDAT functions only when they turns out to be neccesary. */
+ /* Nested functions of extern inline function shall not be emit unless
+ we inlined the origin. */
+ for (origin = decl_function_context (decl); origin;
+ origin = decl_function_context (origin))
+ if (DECL_EXTERNAL (origin))
+ return false;
+ /* We want to emit COMDAT functions only when absolutely necessary. */
if (DECL_COMDAT (decl))
return false;
if (!DECL_INLINE (decl)
|| (!node->local.disregard_inline_limits
/* When declared inline, defer even the uninlinable functions.
- This allows them to be elliminated when unused. */
+ This allows them to be eliminated when unused. */
&& !DECL_DECLARED_INLINE_P (decl)
- && (node->local.inlinable || !cgraph_default_inline_p (node))))
+ && (!node->local.inlinable || !cgraph_default_inline_p (node))))
return true;
return false;
}
+/* Walk the decls we marked as necessary and see if they reference new
+ variables or functions and add them into the worklists. */
+static bool
+cgraph_varpool_analyze_pending_decls (void)
+{
+ bool changed = false;
+ timevar_push (TV_CGRAPH);
+
+ while (cgraph_varpool_first_unanalyzed_node)
+ {
+ tree decl = cgraph_varpool_first_unanalyzed_node->decl;
+
+ cgraph_varpool_first_unanalyzed_node->analyzed = true;
+
+ cgraph_varpool_first_unanalyzed_node = cgraph_varpool_first_unanalyzed_node->next_needed;
+
+ if (DECL_INITIAL (decl))
+ {
+ visited_nodes = pointer_set_create ();
+ walk_tree (&DECL_INITIAL (decl), record_reference, NULL, visited_nodes);
+ pointer_set_destroy (visited_nodes);
+ visited_nodes = NULL;
+ }
+ changed = true;
+ }
+ timevar_pop (TV_CGRAPH);
+ return changed;
+}
+
+/* Optimization of function bodies might've rendered some variables as
+ unnecessary so we want to avoid these from being compiled.
+
+ This is done by pruning the queue and keeping only the variables that
+ really appear needed (ie they are either externally visible or referenced
+ by compiled function). Re-doing the reachability analysis on variables
+ brings back the remaining variables referenced by these. */
+static void
+cgraph_varpool_remove_unreferenced_decls (void)
+{
+ struct cgraph_varpool_node *next, *node = cgraph_varpool_nodes_queue;
+
+ cgraph_varpool_reset_queue ();
+
+ if (errorcount || sorrycount)
+ return;
+
+ while (node)
+ {
+ tree decl = node->decl;
+ next = node->next_needed;
+ node->needed = 0;
+
+ if (node->finalized
+ && ((DECL_ASSEMBLER_NAME_SET_P (decl)
+ && TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)))
+ || node->force_output
+ || decide_is_variable_needed (node, decl)))
+ cgraph_varpool_mark_needed_node (node);
+
+ node = next;
+ }
+ cgraph_varpool_analyze_pending_decls ();
+}
+
+
/* When not doing unit-at-a-time, output all functions enqueued.
Return true when such a functions were found. */
-static bool
+
+bool
cgraph_assemble_pending_functions (void)
{
bool output = false;
struct cgraph_node *n = cgraph_nodes_queue;
cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
- if (!n->origin && !DECL_EXTERNAL (n->decl))
- cgraph_expand_function (n);
- output = true;
+ n->next_needed = NULL;
+ if (!n->global.inlined_to
+ && !n->alias
+ && !DECL_EXTERNAL (n->decl))
+ {
+ cgraph_expand_function (n);
+ output = true;
+ }
}
+
return output;
}
+/* As an GCC extension we allow redefinition of the function. The
+ semantics when both copies of bodies differ is not well defined.
+ We replace the old body with new body so in unit at a time mode
+ we always use new body, while in normal mode we may end up with
+ old body inlined into some functions and new body expanded and
+ inlined in others.
-/* Analyze function once it is parsed. Set up the local information
- available - create cgraph edges for function calls via BODY. */
+ ??? It may make more sense to use one body for inlining and other
+ body for expanding the function but this is difficult to do. */
+
+static void
+cgraph_reset_node (struct cgraph_node *node)
+{
+ /* If node->output is set, then this is a unit-at-a-time compilation
+ and we have already begun whole-unit analysis. This is *not*
+ testing for whether we've already emitted the function. That
+ case can be sort-of legitimately seen with real function
+ redefinition errors. I would argue that the front end should
+ never present us with such a case, but don't enforce that for now. */
+ gcc_assert (!node->output);
+
+ /* Reset our data structures so we can analyze the function again. */
+ memset (&node->local, 0, sizeof (node->local));
+ memset (&node->global, 0, sizeof (node->global));
+ memset (&node->rtl, 0, sizeof (node->rtl));
+ node->analyzed = false;
+ node->local.redefined_extern_inline = true;
+ node->local.finalized = false;
+
+ if (!flag_unit_at_a_time)
+ {
+ struct cgraph_node *n;
+
+ for (n = cgraph_nodes; n; n = n->next)
+ if (n->global.inlined_to == node)
+ cgraph_remove_node (n);
+ }
+
+ cgraph_node_remove_callees (node);
+
+ /* We may need to re-queue the node for assembling in case
+ we already proceeded it and ignored as not needed. */
+ if (node->reachable && !flag_unit_at_a_time)
+ {
+ struct cgraph_node *n;
+
+ for (n = cgraph_nodes_queue; n; n = n->next_needed)
+ if (n == node)
+ break;
+ if (!n)
+ node->reachable = 0;
+ }
+}
+
+/* DECL has been parsed. Take it, queue it, compile it at the whim of the
+ logic in effect. If NESTED is true, then our caller cannot stand to have
+ the garbage collector run at the moment. We would need to either create
+ a new GC context, or just not compile right now. */
void
-cgraph_finalize_function (tree decl)
+cgraph_finalize_function (tree decl, bool nested)
{
struct cgraph_node *node = cgraph_node (decl);
if (node->local.finalized)
- {
- /* As an GCC extension we allow redefinition of the function. The
- semantics when both copies of bodies differ is not well defined. We
- replace the old body with new body so in unit at a time mode we always
- use new body, while in normal mode we may end up with old body inlined
- into some functions and new body expanded and inlined in others.
-
- ??? It may make more sense to use one body for inlining and other body
- for expanding the function but this is dificult to do. */
- /* Reset our datastructures so we can analyze the function body
- again. */
- memset (&node->local, 0, sizeof (node->local));
- memset (&node->global, 0, sizeof (node->global));
- memset (&node->rtl, 0, sizeof (node->rtl));
- node->lowered = false;
- if (node->output)
- abort ();
- while (node->callees)
- cgraph_remove_call (node->decl, node->callees->callee->decl);
- /* We may need to re-queue the node for assembling in case
- we already proceeded it and ignored as not needed. */
- if (node->reachable && !flag_unit_at_a_time)
- {
- struct cgraph_node *n;
+ cgraph_reset_node (node);
- for (n = cgraph_nodes_queue; n; n = n->next_needed)
- if (n == node)
- break;
- if (!n)
- node->reachable = 0;
- }
- }
notice_global_symbol (decl);
node->decl = decl;
node->local.finalized = true;
+ node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
+ if (node->nested)
+ lower_nested_functions (decl);
+ gcc_assert (!node->nested);
/* If not unit at a time, then we need to create the call graph
now, so that called functions can be queued and emitted now. */
if (!flag_unit_at_a_time)
- cgraph_analyze_function (node);
+ {
+ cgraph_analyze_function (node);
+ cgraph_decide_inlining_incrementally (node, false);
+ }
if (decide_is_function_needed (node, decl))
cgraph_mark_needed_node (node);
- /* If not unit at a time, go ahead and emit everything we've
- found to be reachable at this time. Do this only at top-level. */
- if (!node->origin)
- cgraph_assemble_pending_functions ();
+ /* Since we reclaim unreachable nodes at the end of every language
+ level unit, we need to be conservative about possible entry points
+ there. */
+ if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl)))
+ cgraph_mark_reachable_node (node);
+
+ /* If not unit at a time, go ahead and emit everything we've found
+ to be reachable at this time. */
+ if (!nested)
+ {
+ if (!cgraph_assemble_pending_functions ())
+ ggc_collect ();
+ }
/* If we've not yet emitted decl, tell the debug info about it. */
- if (flag_unit_at_a_time || !node->reachable)
+ if (!TREE_ASM_WRITTEN (decl))
(*debug_hooks->deferred_inline_function) (decl);
+
+ /* Possibly warn about unused parameters. */
+ if (warn_unused_parameter)
+ do_warn_unused_parameter (decl);
+}
+
+void
+cgraph_lower_function (struct cgraph_node *node)
+{
+ if (node->lowered)
+ return;
+ tree_lowering_passes (node->decl);
+ node->lowered = true;
}
/* Walk tree and record all calls. Called via walk_tree. */
static tree
-record_call_1 (tree *tp, int *walk_subtrees, void *data)
+record_reference (tree *tp, int *walk_subtrees, void *data)
{
- if (TREE_CODE (*tp) == VAR_DECL && TREE_STATIC (*tp))
- cgraph_varpool_mark_needed_node (cgraph_varpool_node (*tp));
- /* Record dereferences to the functions. This makes the functions
- reachable unconditionally. */
- else if (TREE_CODE (*tp) == ADDR_EXPR && flag_unit_at_a_time)
- {
- tree decl = TREE_OPERAND (*tp, 0);
- if (TREE_CODE (decl) == FUNCTION_DECL)
- cgraph_mark_needed_node (cgraph_node (decl));
- }
- else if (TREE_CODE (*tp) == CALL_EXPR)
+ tree t = *tp;
+
+ switch (TREE_CODE (t))
{
- tree decl = get_callee_fndecl (*tp);
- if (decl && TREE_CODE (decl) == FUNCTION_DECL)
+ case VAR_DECL:
+ /* ??? Really, we should mark this decl as *potentially* referenced
+ by this function and re-examine whether the decl is actually used
+ after rtl has been generated. */
+ if (TREE_STATIC (t) || DECL_EXTERNAL (t))
+ {
+ cgraph_varpool_mark_needed_node (cgraph_varpool_node (t));
+ if (lang_hooks.callgraph.analyze_expr)
+ return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees,
+ data);
+ }
+ break;
+
+ case FDESC_EXPR:
+ case ADDR_EXPR:
+ if (flag_unit_at_a_time)
+ {
+ /* Record dereferences to the functions. This makes the
+ functions reachable unconditionally. */
+ tree decl = TREE_OPERAND (*tp, 0);
+ if (TREE_CODE (decl) == FUNCTION_DECL)
+ cgraph_mark_needed_node (cgraph_node (decl));
+ }
+ break;
+
+ default:
+ /* Save some cycles by not walking types and declaration as we
+ won't find anything useful there anyway. */
+ if (IS_TYPE_OR_DECL_P (*tp))
{
- if (DECL_BUILT_IN (decl))
- return NULL;
- cgraph_record_call (data, decl);
-
- /* When we see a function call, we don't want to look at the
- function reference in the ADDR_EXPR that is hanging from
- the CALL_EXPR we're examining here, because we would
- conclude incorrectly that the function's address could be
- taken by something that is not a function call. So only
- walk the function parameter list, skip the other subtrees. */
-
- walk_tree (&TREE_OPERAND (*tp, 1), record_call_1, data,
- visited_nodes);
*walk_subtrees = 0;
+ break;
}
+
+ if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
+ return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees, data);
+ break;
}
- /* Save some cycles by not walking types and declaration as we won't find anything
- usefull there anyway. */
- if (DECL_P (*tp) || TYPE_P (*tp))
- *walk_subtrees = 0;
+
return NULL;
}
-/* Create cgraph edges for function calls inside BODY from DECL. */
+/* Create cgraph edges for function calls inside BODY from NODE. */
-void
-cgraph_create_edges (tree decl, tree body)
+static void
+cgraph_create_edges (struct cgraph_node *node, tree body)
{
- /* The nodes we're interested in are never shared, so walk
- the tree ignoring duplicates. */
- visited_nodes = htab_create (37, htab_hash_pointer,
- htab_eq_pointer, NULL);
- walk_tree (&body, record_call_1, decl, visited_nodes);
- htab_delete (visited_nodes);
+ basic_block bb;
+
+ struct function *this_cfun = DECL_STRUCT_FUNCTION (body);
+ block_stmt_iterator bsi;
+ tree step;
+ visited_nodes = pointer_set_create ();
+
+ /* Reach the trees by walking over the CFG, and note the
+ enclosing basic-blocks in the call edges. */
+ FOR_EACH_BB_FN (bb, this_cfun)
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ tree stmt = bsi_stmt (bsi);
+ tree call = get_call_expr_in (stmt);
+ tree decl;
+
+ if (call && (decl = get_callee_fndecl (call)))
+ {
+ cgraph_create_edge (node, cgraph_node (decl), stmt,
+ bb->count,
+ bb->loop_depth);
+ walk_tree (&TREE_OPERAND (call, 1),
+ record_reference, node, visited_nodes);
+ if (TREE_CODE (stmt) == MODIFY_EXPR)
+ walk_tree (&TREE_OPERAND (stmt, 0),
+ record_reference, node, visited_nodes);
+ }
+ else
+ walk_tree (bsi_stmt_ptr (bsi), record_reference, node, visited_nodes);
+ }
+
+ /* Look for initializers of constant variables and private statics. */
+ for (step = DECL_STRUCT_FUNCTION (body)->unexpanded_var_list;
+ step;
+ step = TREE_CHAIN (step))
+ {
+ tree decl = TREE_VALUE (step);
+ if (TREE_CODE (decl) == VAR_DECL
+ && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
+ && flag_unit_at_a_time)
+ cgraph_varpool_finalize_decl (decl);
+ else if (TREE_CODE (decl) == VAR_DECL && DECL_INITIAL (decl))
+ walk_tree (&DECL_INITIAL (decl), record_reference, node, visited_nodes);
+ }
+
+ pointer_set_destroy (visited_nodes);
visited_nodes = NULL;
}
+/* Give initial reasons why inlining would fail. Those gets
+ either NULLified or usually overwritten by more precise reason
+ later. */
+static void
+initialize_inline_failed (struct cgraph_node *node)
+{
+ struct cgraph_edge *e;
+
+ for (e = node->callers; e; e = e->next_caller)
+ {
+ gcc_assert (!e->callee->global.inlined_to);
+ gcc_assert (e->inline_failed);
+ if (node->local.redefined_extern_inline)
+ e->inline_failed = N_("redefined extern inline functions are not "
+ "considered for inlining");
+ else if (!node->local.inlinable)
+ e->inline_failed = N_("function not inlinable");
+ else
+ e->inline_failed = N_("function not considered for inlining");
+ }
+}
+
+/* Rebuild call edges from current function after a passes not aware
+ of cgraph updating. */
+static void
+rebuild_cgraph_edges (void)
+{
+ basic_block bb;
+ struct cgraph_node *node = cgraph_node (current_function_decl);
+ block_stmt_iterator bsi;
+
+ cgraph_node_remove_callees (node);
+
+ node->count = ENTRY_BLOCK_PTR->count;
+
+ FOR_EACH_BB (bb)
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ tree stmt = bsi_stmt (bsi);
+ tree call = get_call_expr_in (stmt);
+ tree decl;
+
+ if (call && (decl = get_callee_fndecl (call)))
+ cgraph_create_edge (node, cgraph_node (decl), stmt,
+ bb->count,
+ bb->loop_depth);
+ }
+ initialize_inline_failed (node);
+ gcc_assert (!node->global.inlined_to);
+}
+
+struct tree_opt_pass pass_rebuild_cgraph_edges =
+{
+ NULL, /* name */
+ NULL, /* gate */
+ rebuild_cgraph_edges, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ PROP_cfg, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0, /* todo_flags_finish */
+ 0 /* letter */
+};
+
+/* Verify cgraph nodes of given cgraph node. */
+void
+verify_cgraph_node (struct cgraph_node *node)
+{
+ struct cgraph_edge *e;
+ struct cgraph_node *main_clone;
+ struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
+ basic_block this_block;
+ block_stmt_iterator bsi;
+ bool error_found = false;
+
+ timevar_push (TV_CGRAPH_VERIFY);
+ for (e = node->callees; e; e = e->next_callee)
+ if (e->aux)
+ {
+ error ("Aux field set for edge %s->%s",
+ cgraph_node_name (e->caller), cgraph_node_name (e->callee));
+ error_found = true;
+ }
+ for (e = node->callers; e; e = e->next_caller)
+ {
+ if (!e->inline_failed)
+ {
+ if (node->global.inlined_to
+ != (e->caller->global.inlined_to
+ ? e->caller->global.inlined_to : e->caller))
+ {
+ error ("Inlined_to pointer is wrong");
+ error_found = true;
+ }
+ if (node->callers->next_caller)
+ {
+ error ("Multiple inline callers");
+ error_found = true;
+ }
+ }
+ else
+ if (node->global.inlined_to)
+ {
+ error ("Inlined_to pointer set for noninline callers");
+ error_found = true;
+ }
+ }
+ if (!node->callers && node->global.inlined_to)
+ {
+ error ("Inlined_to pointer is set but no predecesors found");
+ error_found = true;
+ }
+ if (node->global.inlined_to == node)
+ {
+ error ("Inlined_to pointer refers to itself");
+ error_found = true;
+ }
+
+ for (main_clone = cgraph_node (node->decl); main_clone;
+ main_clone = main_clone->next_clone)
+ if (main_clone == node)
+ break;
+ if (!node)
+ {
+ error ("Node not found in DECL_ASSEMBLER_NAME hash");
+ error_found = true;
+ }
+
+ if (node->analyzed
+ && DECL_SAVED_TREE (node->decl) && !TREE_ASM_WRITTEN (node->decl)
+ && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
+ {
+ if (this_cfun->cfg)
+ {
+ /* The nodes we're interested in are never shared, so walk
+ the tree ignoring duplicates. */
+ visited_nodes = pointer_set_create ();
+ /* Reach the trees by walking over the CFG, and note the
+ enclosing basic-blocks in the call edges. */
+ FOR_EACH_BB_FN (this_block, this_cfun)
+ for (bsi = bsi_start (this_block); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ tree stmt = bsi_stmt (bsi);
+ tree call = get_call_expr_in (stmt);
+ tree decl;
+ if (call && (decl = get_callee_fndecl (call)))
+ {
+ struct cgraph_edge *e = cgraph_edge (node, stmt);
+ if (e)
+ {
+ if (e->aux)
+ {
+ error ("Shared call_stmt:");
+ debug_generic_stmt (stmt);
+ error_found = true;
+ }
+ if (e->callee->decl != cgraph_node (decl)->decl)
+ {
+ error ("Edge points to wrong declaration:");
+ debug_tree (e->callee->decl);
+ fprintf (stderr," Instead of:");
+ debug_tree (decl);
+ }
+ e->aux = (void *)1;
+ }
+ else
+ {
+ error ("Missing callgraph edge for call stmt:");
+ debug_generic_stmt (stmt);
+ error_found = true;
+ }
+ }
+ }
+ pointer_set_destroy (visited_nodes);
+ visited_nodes = NULL;
+ }
+ else
+ /* No CFG available?! */
+ gcc_unreachable ();
+
+ for (e = node->callees; e; e = e->next_callee)
+ {
+ if (!e->aux)
+ {
+ error ("Edge %s->%s has no corresponding call_stmt",
+ cgraph_node_name (e->caller),
+ cgraph_node_name (e->callee));
+ debug_generic_stmt (e->call_stmt);
+ error_found = true;
+ }
+ e->aux = 0;
+ }
+ }
+ if (error_found)
+ {
+ dump_cgraph_node (stderr, node);
+ internal_error ("verify_cgraph_node failed.");
+ }
+ timevar_pop (TV_CGRAPH_VERIFY);
+}
+
+/* Verify whole cgraph structure. */
+void
+verify_cgraph (void)
+{
+ struct cgraph_node *node;
+
+ if (sorrycount || errorcount)
+ return;
+
+ for (node = cgraph_nodes; node; node = node->next)
+ verify_cgraph_node (node);
+}
+
+
+/* Output all variables enqueued to be assembled. */
+bool
+cgraph_varpool_assemble_pending_decls (void)
+{
+ bool changed = false;
+
+ if (errorcount || sorrycount)
+ return false;
+
+ /* EH might mark decls as needed during expansion. This should be safe since
+ we don't create references to new function, but it should not be used
+ elsewhere. */
+ cgraph_varpool_analyze_pending_decls ();
+
+ while (cgraph_varpool_nodes_queue)
+ {
+ tree decl = cgraph_varpool_nodes_queue->decl;
+ struct cgraph_varpool_node *node = cgraph_varpool_nodes_queue;
+
+ cgraph_varpool_nodes_queue = cgraph_varpool_nodes_queue->next_needed;
+ if (!TREE_ASM_WRITTEN (decl) && !node->alias && !DECL_EXTERNAL (decl))
+ {
+ assemble_variable (decl, 0, 1, 0);
+ /* Local static variables are never seen by check_global_declarations
+ so we need to output debug info by hand. */
+ if (decl_function_context (decl) && errorcount == 0 && sorrycount == 0)
+ {
+ timevar_push (TV_SYMOUT);
+ (*debug_hooks->global_decl) (decl);
+ timevar_pop (TV_SYMOUT);
+ }
+ changed = true;
+ }
+ node->next_needed = NULL;
+ }
+ return changed;
+}
+
/* Analyze the function scheduled to be output. */
static void
cgraph_analyze_function (struct cgraph_node *node)
{
tree decl = node->decl;
- if (lang_hooks.callgraph.lower_function)
- (*lang_hooks.callgraph.lower_function) (decl);
-
- current_function_decl = node->decl;
+ current_function_decl = decl;
+ push_cfun (DECL_STRUCT_FUNCTION (decl));
+ cgraph_lower_function (node);
/* First kill forward declaration so reverse inlining works properly. */
- cgraph_create_edges (decl, DECL_SAVED_TREE (decl));
+ cgraph_create_edges (node, decl);
node->local.inlinable = tree_inlinable_function_p (decl);
- if (!DECL_ESTIMATED_INSNS (decl))
- DECL_ESTIMATED_INSNS (decl)
- = (*lang_hooks.tree_inlining.estimate_num_insns) (decl);
- node->local.self_insns = DECL_ESTIMATED_INSNS (decl);
+ node->local.self_insns = estimate_num_insns (decl);
if (node->local.inlinable)
node->local.disregard_inline_limits
- = (*lang_hooks.tree_inlining.disregard_inline_limits) (decl);
-
+ = lang_hooks.tree_inlining.disregard_inline_limits (decl);
+ initialize_inline_failed (node);
+ if (flag_really_no_inline && !node->local.disregard_inline_limits)
+ node->local.inlinable = 0;
/* Inlining characteristics are maintained by the cgraph_mark_inline. */
node->global.insns = node->local.self_insns;
- if (!DECL_EXTERNAL (node->decl))
- {
- node->global.cloned_times = 1;
- node->global.will_be_output = true;
- }
- node->lowered = true;
+ node->analyzed = true;
+ pop_cfun ();
current_function_decl = NULL;
}
cgraph_finalize_compilation_unit (void)
{
struct cgraph_node *node;
+ /* Keep track of already processed nodes when called multiple times for
+ intermodule optimization. */
+ static struct cgraph_node *first_analyzed;
+
+ finish_aliases_1 ();
if (!flag_unit_at_a_time)
{
return;
}
- cgraph_varpool_assemble_pending_decls ();
if (!quiet_flag)
- fprintf (stderr, "\nAnalyzing compilation unit\n");
+ {
+ fprintf (stderr, "\nAnalyzing compilation unit");
+ fflush (stderr);
+ }
timevar_push (TV_CGRAPH);
+ cgraph_varpool_analyze_pending_decls ();
if (cgraph_dump_file)
{
- fprintf (cgraph_dump_file, "\nInitial entry points:");
- for (node = cgraph_nodes; node; node = node->next)
+ fprintf (cgraph_dump_file, "Initial entry points:");
+ for (node = cgraph_nodes; node != first_analyzed; node = node->next)
if (node->needed && DECL_SAVED_TREE (node->decl))
fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
fprintf (cgraph_dump_file, "\n");
node = cgraph_nodes_queue;
cgraph_nodes_queue = cgraph_nodes_queue->next_needed;
+ node->next_needed = NULL;
/* ??? It is possible to create extern inline function and later using
- weak alas attribute to kill it's body. See
+ weak alias attribute to kill its body. See
gcc.c-torture/compile/20011119-1.c */
if (!DECL_SAVED_TREE (decl))
- continue;
+ {
+ cgraph_reset_node (node);
+ continue;
+ }
- if (node->lowered || !node->reachable || !DECL_SAVED_TREE (decl))
- abort ();
+ gcc_assert (!node->analyzed && node->reachable);
+ gcc_assert (DECL_SAVED_TREE (decl));
cgraph_analyze_function (node);
if (!edge->callee->reachable)
cgraph_mark_reachable_node (edge->callee);
- cgraph_varpool_assemble_pending_decls ();
+ cgraph_varpool_analyze_pending_decls ();
}
/* Collect entry points to the unit. */
if (cgraph_dump_file)
{
- fprintf (cgraph_dump_file, "\nUnit entry points:");
- for (node = cgraph_nodes; node; node = node->next)
+ fprintf (cgraph_dump_file, "Unit entry points:");
+ for (node = cgraph_nodes; node != first_analyzed; node = node->next)
if (node->needed && DECL_SAVED_TREE (node->decl))
fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
- fprintf (cgraph_dump_file, "\n");
+ fprintf (cgraph_dump_file, "\n\nInitial ");
dump_cgraph (cgraph_dump_file);
}
if (cgraph_dump_file)
fprintf (cgraph_dump_file, "\nReclaiming functions:");
- for (node = cgraph_nodes; node; node = node->next)
+ for (node = cgraph_nodes; node != first_analyzed; node = node->next)
{
tree decl = node->decl;
+ if (node->local.finalized && !DECL_SAVED_TREE (decl))
+ cgraph_reset_node (node);
+
if (!node->reachable && DECL_SAVED_TREE (decl))
{
- cgraph_remove_node (node);
if (cgraph_dump_file)
fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
+ cgraph_remove_node (node);
+ continue;
}
+ else
+ node->next_needed = NULL;
+ gcc_assert (!node->local.finalized || DECL_SAVED_TREE (decl));
+ gcc_assert (node->analyzed == node->local.finalized);
}
if (cgraph_dump_file)
- fprintf (cgraph_dump_file, "\n");
+ {
+ fprintf (cgraph_dump_file, "\n\nReclaimed ");
+ dump_cgraph (cgraph_dump_file);
+ }
+ first_analyzed = cgraph_nodes;
ggc_collect ();
timevar_pop (TV_CGRAPH);
}
-
/* Figure out what functions we want to assemble. */
static void
{
tree decl = node->decl;
struct cgraph_edge *e;
- if (node->output)
- abort ();
+
+ gcc_assert (!node->output);
for (e = node->callers; e; e = e->next_caller)
- if (!e->inline_call)
+ if (e->inline_failed)
break;
/* We need to output all local functions that are used and not
always inlined, as well as those that are reachable from
outside the current compilation unit. */
if (DECL_SAVED_TREE (decl)
+ && !node->global.inlined_to
&& (node->needed
|| (e && node->reachable))
- && !TREE_ASM_WRITTEN (decl) && !node->origin
+ && !TREE_ASM_WRITTEN (decl)
&& !DECL_EXTERNAL (decl))
node->output = 1;
- }
-}
-
-/* Optimize the function before expansion. */
-
-static void
-cgraph_optimize_function (struct cgraph_node *node)
-{
- tree decl = node->decl;
+ else
+ {
+ /* We should've reclaimed all functions that are not needed. */
+#ifdef ENABLE_CHECKING
+ if (!node->global.inlined_to && DECL_SAVED_TREE (decl)
+ && !DECL_EXTERNAL (decl))
+ {
+ dump_cgraph_node (stderr, node);
+ internal_error ("failed to reclaim unneeded function");
+ }
+#endif
+ gcc_assert (node->global.inlined_to || !DECL_SAVED_TREE (decl)
+ || DECL_EXTERNAL (decl));
- timevar_push (TV_INTEGRATION);
- /* optimize_inline_calls avoids inlining of current_function_decl. */
- current_function_decl = decl;
- if (flag_inline_trees)
- optimize_inline_calls (decl);
- if (node->nested)
- {
- for (node = node->nested; node; node = node->next_nested)
- cgraph_optimize_function (node);
+ }
+
}
- timevar_pop (TV_INTEGRATION);
}
/* Expand function specified by NODE. */
cgraph_expand_function (struct cgraph_node *node)
{
tree decl = node->decl;
- struct cgraph_edge *e;
- announce_function (decl);
+ /* We ought to not compile any inline clones. */
+ gcc_assert (!node->global.inlined_to);
- cgraph_optimize_function (node);
+ if (flag_unit_at_a_time)
+ announce_function (decl);
- /* Generate RTL for the body of DECL. Nested functions are expanded
- via lang_expand_decl_stmt. */
- (*lang_hooks.callgraph.expand_function) (decl);
+ cgraph_lower_function (node);
- if (!flag_unit_at_a_time)
- {
- if (!node->local.inlinable
- || (!node->local.disregard_inline_limits
- && !cgraph_default_inline_p (node)))
- DECL_SAVED_TREE (node->decl) = NULL;
- }
- else
- {
- for (e = node->callers; e; e = e->next_caller)
- if (e->inline_call)
- break;
- if (!e)
- DECL_SAVED_TREE (decl) = NULL;
- }
- current_function_decl = NULL;
-}
+ /* Generate RTL for the body of DECL. */
+ lang_hooks.callgraph.expand_function (decl);
-/* Fill array order with all nodes with output flag set in the reverse
- topological order. */
-static int
-cgraph_postorder (struct cgraph_node **order)
-{
- struct cgraph_node *node, *node2;
- int stack_size = 0;
- int order_pos = 0;
- struct cgraph_edge *edge, last;
+ /* Make sure that BE didn't give up on compiling. */
+ /* ??? Can happen with nested function of extern inline. */
+ gcc_assert (TREE_ASM_WRITTEN (node->decl));
- struct cgraph_node **stack =
- xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
+ current_function_decl = NULL;
+ if (!cgraph_preserve_function_body_p (node->decl))
+ {
+ DECL_SAVED_TREE (node->decl) = NULL;
+ DECL_STRUCT_FUNCTION (node->decl) = NULL;
+ DECL_INITIAL (node->decl) = error_mark_node;
+ /* Eliminate all call edges. This is important so the call_expr no longer
+ points to the dead function body. */
+ cgraph_node_remove_callees (node);
+ }
- /* We have to deal with cycles nicely, so use a depth first traversal
- output algorithm. Ignore the fact that some functions won't need
- to be output and put them into order as well, so we get dependencies
- right through intline functions. */
- for (node = cgraph_nodes; node; node = node->next)
- node->aux = NULL;
- for (node = cgraph_nodes; node; node = node->next)
- if (!node->aux)
- {
- node2 = node;
- if (!node->callers)
- node->aux = &last;
- else
- node->aux = node->callers;
- while (node2)
- {
- while (node2->aux != &last)
- {
- edge = node2->aux;
- if (edge->next_caller)
- node2->aux = edge->next_caller;
- else
- node2->aux = &last;
- if (!edge->caller->aux)
- {
- if (!edge->caller->callers)
- edge->caller->aux = &last;
- else
- edge->caller->aux = edge->caller->callers;
- stack[stack_size++] = node2;
- node2 = edge->caller;
- break;
- }
- }
- if (node2->aux == &last)
- {
- order[order_pos++] = node2;
- if (stack_size)
- node2 = stack[--stack_size];
- else
- node2 = NULL;
- }
- }
- }
- free (stack);
- return order_pos;
+ cgraph_function_flags_ready = true;
}
-#define INLINED_TIMES(node) ((size_t)(node)->aux)
-#define SET_INLINED_TIMES(node,times) ((node)->aux = (void *)(times))
-
-/* Return list of nodes we decided to inline NODE into, set their output
- flag and compute INLINED_TIMES.
-
- We do simple backtracing to get INLINED_TIMES right. This should not be
- expensive as we limit the amount of inlining. Alternatively we may first
- discover set of nodes, topologically sort these and propagate
- INLINED_TIMES */
+/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
-static int
-cgraph_inlined_into (struct cgraph_node *node, struct cgraph_node **array)
+bool
+cgraph_inline_p (struct cgraph_edge *e, const char **reason)
{
- int nfound = 0;
- struct cgraph_edge **stack;
- struct cgraph_edge *e, *e1;
- int sp;
- int i;
-
- /* Fast path: since we traverse in mostly topological order, we will likely
- find no edges. */
- for (e = node->callers; e; e = e->next_caller)
- if (e->inline_call)
- break;
-
- if (!e)
- return 0;
-
- /* Allocate stack for back-tracking up callgraph. */
- stack = xmalloc ((cgraph_n_nodes + 1) * sizeof (struct cgraph_edge));
- sp = 0;
+ *reason = e->inline_failed;
+ return !e->inline_failed;
+}
- /* Push the first edge on to the stack. */
- stack[sp++] = e;
- while (sp)
- {
- struct cgraph_node *caller;
- /* Look at the edge on the top of the stack. */
- e = stack[sp - 1];
- caller = e->caller;
+/* Expand all functions that must be output.
- /* Check if the caller destination has been visited yet. */
- if (!caller->output)
- {
- array[nfound++] = e->caller;
- /* Mark that we have visited the destination. */
- caller->output = true;
- SET_INLINED_TIMES (caller, 0);
- }
- SET_INLINED_TIMES (caller, INLINED_TIMES (caller) + 1);
+ Attempt to topologically sort the nodes so function is output when
+ all called functions are already assembled to allow data to be
+ propagated across the callgraph. Use a stack to get smaller distance
+ between a function and its callees (later we may choose to use a more
+ sophisticated algorithm for function reordering; we will likely want
+ to use subsections to make the output functions appear in top-down
+ order). */
- for (e1 = caller->callers; e1; e1 = e1->next_caller)
- if (e1->inline_call)
- break;
- if (e1)
- stack[sp++] = e1;
- else
- {
- while (true)
- {
- for (e1 = e->next_caller; e1; e1 = e1->next_caller)
- if (e1->inline_call)
- break;
-
- if (e1)
- {
- stack[sp - 1] = e1;
- break;
- }
- else
- {
- sp--;
- if (!sp)
- break;
- e = stack[sp - 1];
- }
- }
- }
- }
+static void
+cgraph_expand_all_functions (void)
+{
+ struct cgraph_node *node;
+ struct cgraph_node **order =
+ xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
+ int order_pos = 0, new_order_pos = 0;
+ int i;
- free (stack);
+ order_pos = cgraph_postorder (order);
+ gcc_assert (order_pos == cgraph_n_nodes);
+ /* Garbage collector may remove inline clones we eliminate during
+ optimization. So we must be sure to not reference them. */
+ for (i = 0; i < order_pos; i++)
+ if (order[i]->output)
+ order[new_order_pos++] = order[i];
- if (cgraph_dump_file)
+ for (i = new_order_pos - 1; i >= 0; i--)
{
- fprintf (cgraph_dump_file, "Found inline predecesors of %s:",
- cgraph_node_name (node));
- for (i = 0; i < nfound; i++)
+ node = order[i];
+ if (node->output)
{
- fprintf (cgraph_dump_file, " %s", cgraph_node_name (array[i]));
- if (INLINED_TIMES (array[i]) != 1)
- fprintf (cgraph_dump_file, " (%i times)",
- (int)INLINED_TIMES (array[i]));
+ gcc_assert (node->reachable);
+ node->output = 0;
+ cgraph_expand_function (node);
}
- fprintf (cgraph_dump_file, "\n");
}
-
- return nfound;
+ free (order);
}
-/* Return list of nodes we decided to inline into NODE, set their output
- flag and compute INLINED_TIMES.
+/* Mark visibility of all functions.
+
+ A local function is one whose calls can occur only in the current
+ compilation unit and all its calls are explicit, so we can change
+ its calling convention. We simply mark all static functions whose
+ address is not taken as local.
- This function is identical to cgraph_inlined_into with callers and callees
- nodes swapped. */
+ We also change the TREE_PUBLIC flag of all declarations that are public
+ in language point of view but we want to overwrite this default
+ via visibilities for the backend point of view. */
-static int
-cgraph_inlined_callees (struct cgraph_node *node, struct cgraph_node **array)
+static void
+cgraph_function_and_variable_visibility (void)
{
- int nfound = 0;
- struct cgraph_edge **stack;
- struct cgraph_edge *e, *e1;
- int sp;
- int i;
-
- /* Fast path: since we traverse in mostly topological order, we will likely
- find no edges. */
- for (e = node->callees; e; e = e->next_callee)
- if (e->inline_call)
- break;
-
- if (!e)
- return 0;
-
- /* Allocate stack for back-tracking up callgraph. */
- stack = xmalloc ((cgraph_n_nodes + 1) * sizeof (struct cgraph_edge));
- sp = 0;
-
- /* Push the first edge on to the stack. */
- stack[sp++] = e;
+ struct cgraph_node *node;
+ struct cgraph_varpool_node *vnode;
- while (sp)
+ for (node = cgraph_nodes; node; node = node->next)
{
- struct cgraph_node *callee;
-
- /* Look at the edge on the top of the stack. */
- e = stack[sp - 1];
- callee = e->callee;
-
- /* Check if the callee destination has been visited yet. */
- if (!callee->output)
+ if (node->reachable
+ && (DECL_COMDAT (node->decl)
+ || (TREE_PUBLIC (node->decl) && !DECL_EXTERNAL (node->decl))))
+ node->local.externally_visible = true;
+ if (!node->local.externally_visible && node->analyzed
+ && !DECL_EXTERNAL (node->decl))
{
- array[nfound++] = e->callee;
- /* Mark that we have visited the destination. */
- callee->output = true;
- SET_INLINED_TIMES (callee, 0);
+ gcc_assert (flag_whole_program || !TREE_PUBLIC (node->decl));
+ TREE_PUBLIC (node->decl) = 0;
}
- SET_INLINED_TIMES (callee, INLINED_TIMES (callee) + 1);
-
- for (e1 = callee->callees; e1; e1 = e1->next_callee)
- if (e1->inline_call)
- break;
- if (e1)
- stack[sp++] = e1;
- else
+ node->local.local = (!node->needed
+ && node->analyzed
+ && !DECL_EXTERNAL (node->decl)
+ && !node->local.externally_visible);
+ }
+ for (vnode = cgraph_varpool_nodes_queue; vnode; vnode = vnode->next_needed)
+ {
+ if (vnode->needed
+ && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl)))
+ vnode->externally_visible = 1;
+ if (!vnode->externally_visible)
{
- while (true)
- {
- for (e1 = e->next_callee; e1; e1 = e1->next_callee)
- if (e1->inline_call)
- break;
-
- if (e1)
- {
- stack[sp - 1] = e1;
- break;
- }
- else
- {
- sp--;
- if (!sp)
- break;
- e = stack[sp - 1];
- }
- }
+ gcc_assert (flag_whole_program || !TREE_PUBLIC (vnode->decl));
+ TREE_PUBLIC (vnode->decl) = 0;
}
+ gcc_assert (TREE_STATIC (vnode->decl));
}
- free (stack);
+ /* Because we have to be conservative on the boundaries of source
+ level units, it is possible that we marked some functions in
+ reachable just because they might be used later via external
+ linkage, but after making them local they are really unreachable
+ now. */
+ cgraph_remove_unreachable_nodes (true, cgraph_dump_file);
if (cgraph_dump_file)
{
- fprintf (cgraph_dump_file, "Found inline successors of %s:",
- cgraph_node_name (node));
- for (i = 0; i < nfound; i++)
- {
- fprintf (cgraph_dump_file, " %s", cgraph_node_name (array[i]));
- if (INLINED_TIMES (array[i]) != 1)
- fprintf (cgraph_dump_file, " (%i times)",
- (int)INLINED_TIMES (array[i]));
- }
- fprintf (cgraph_dump_file, "\n");
+ fprintf (cgraph_dump_file, "\nMarking local functions:");
+ for (node = cgraph_nodes; node; node = node->next)
+ if (node->local.local)
+ fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
+ fprintf (cgraph_dump_file, "\n\n");
+ fprintf (cgraph_dump_file, "\nMarking externally visible functions:");
+ for (node = cgraph_nodes; node; node = node->next)
+ if (node->local.externally_visible)
+ fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
+ fprintf (cgraph_dump_file, "\n\n");
}
-
- return nfound;
+ cgraph_function_flags_ready = true;
}
-/* Estimate size of the function after inlining WHAT into TO. */
-
-static int
-cgraph_estimate_size_after_inlining (int times, struct cgraph_node *to,
- struct cgraph_node *what)
-{
- return (what->global.insns - INSNS_PER_CALL) *times + to->global.insns;
-}
-
-/* Estimate the growth caused by inlining NODE into all callees. */
-
-static int
-cgraph_estimate_growth (struct cgraph_node *node)
+/* Return true when function body of DECL still needs to be kept around
+ for later re-use. */
+bool
+cgraph_preserve_function_body_p (tree decl)
{
- int growth = 0;
- int calls_saved = 0;
- int clones_added = 0;
- struct cgraph_edge *e;
-
- for (e = node->callers; e; e = e->next_caller)
- if (!e->inline_call)
- {
- growth += ((cgraph_estimate_size_after_inlining (1, e->caller, node)
- -
- e->caller->global.insns) *e->caller->global.cloned_times);
- calls_saved += e->caller->global.cloned_times;
- clones_added += e->caller->global.cloned_times;
- }
-
- /* ??? Wrong for self recursive functions or cases where we decide to not
- inline for different reasons, but it is not big deal as in that case
- we will keep the body around, but we will also avoid some inlining. */
- if (!node->needed && !node->origin && !DECL_EXTERNAL (node->decl))
- growth -= node->global.insns, clones_added--;
-
- if (!calls_saved)
- calls_saved = 1;
-
- return growth;
+ struct cgraph_node *node;
+ /* Keep the body; we're going to dump it. */
+ if (dump_enabled_p (TDI_tree_all))
+ return true;
+ if (!cgraph_global_info_ready)
+ return (DECL_INLINE (decl) && !flag_really_no_inline);
+ /* Look if there is any clone around. */
+ for (node = cgraph_node (decl); node; node = node->next_clone)
+ if (node->global.inlined_to)
+ return true;
+ return false;
}
-/* Update insn sizes after inlining WHAT into TO that is already inlined into
- all nodes in INLINED array. */
+/* Perform simple optimizations based on callgraph. */
-static void
-cgraph_mark_inline (struct cgraph_node *to, struct cgraph_node *what,
- struct cgraph_node **inlined, int ninlined,
- struct cgraph_node **inlined_callees,
- int ninlined_callees)
+void
+cgraph_optimize (void)
{
- int i;
- int times = 0;
- int clones = 0;
- struct cgraph_edge *e;
- bool called = false;
- int new_insns;
-
- for (e = what->callers; e; e = e->next_caller)
+#ifdef ENABLE_CHECKING
+ verify_cgraph ();
+#endif
+ if (!flag_unit_at_a_time)
{
- if (e->caller == to)
- {
- if (e->inline_call)
- abort ();
- e->inline_call = true;
- times++;
- clones += e->caller->global.cloned_times;
- }
- else if (!e->inline_call)
- called = true;
+ cgraph_varpool_assemble_pending_decls ();
+ return;
}
- if (!times)
- abort ();
- ncalls_inlined += times;
- new_insns = cgraph_estimate_size_after_inlining (times, to, what);
- if (to->global.will_be_output)
- overall_insns += new_insns - to->global.insns;
- to->global.insns = new_insns;
+ process_pending_assemble_externals ();
+
+ /* Frontend may output common variables after the unit has been finalized.
+ It is safe to deal with them here as they are always zero initialized. */
+ cgraph_varpool_analyze_pending_decls ();
- if (!called && !what->needed && !what->origin
- && !DECL_EXTERNAL (what->decl))
- {
- if (!what->global.will_be_output)
- abort ();
- clones--;
- nfunctions_inlined++;
- what->global.will_be_output = 0;
- overall_insns -= what->global.insns;
- }
- what->global.cloned_times += clones;
- for (i = 0; i < ninlined; i++)
+ timevar_push (TV_CGRAPHOPT);
+ if (!quiet_flag)
+ fprintf (stderr, "Performing intraprocedural optimizations\n");
+
+ cgraph_function_and_variable_visibility ();
+ if (cgraph_dump_file)
{
- new_insns =
- cgraph_estimate_size_after_inlining (INLINED_TIMES (inlined[i]) *
- times, inlined[i], what);
- if (inlined[i]->global.will_be_output)
- overall_insns += new_insns - inlined[i]->global.insns;
- inlined[i]->global.insns = new_insns;
+ fprintf (cgraph_dump_file, "Marked ");
+ dump_cgraph (cgraph_dump_file);
}
- for (i = 0; i < ninlined_callees; i++)
+ ipa_passes ();
+ /* This pass remove bodies of extern inline functions we never inlined.
+ Do this later so other IPA passes see what is really going on. */
+ cgraph_remove_unreachable_nodes (false, dump_file);
+ cgraph_global_info_ready = true;
+ if (cgraph_dump_file)
{
- inlined_callees[i]->global.cloned_times +=
- INLINED_TIMES (inlined_callees[i]) * clones;
+ fprintf (cgraph_dump_file, "Optimized ");
+ dump_cgraph (cgraph_dump_file);
+ dump_varpool (cgraph_dump_file);
}
-}
-
-/* Return false when inlining WHAT into TO is not good idea as it would cause
- too large growth of function bodies. */
-
-static bool
-cgraph_check_inline_limits (struct cgraph_node *to, struct cgraph_node *what,
- struct cgraph_node **inlined, int ninlined)
-{
- int i;
- int times = 0;
- struct cgraph_edge *e;
- int newsize;
- int limit;
-
- for (e = to->callees; e; e = e->next_callee)
- if (e->callee == what)
- times++;
+ timevar_pop (TV_CGRAPHOPT);
- /* When inlining large function body called once into small function,
- take the inlined function as base for limiting the growth. */
- if (to->local.self_insns > what->local.self_insns)
- limit = to->local.self_insns;
- else
- limit = what->local.self_insns;
+ /* Output everything. */
+ if (!quiet_flag)
+ fprintf (stderr, "Assembling functions:\n");
+#ifdef ENABLE_CHECKING
+ verify_cgraph ();
+#endif
+
+ cgraph_mark_functions_to_output ();
+ cgraph_expand_all_functions ();
+ cgraph_varpool_remove_unreferenced_decls ();
- limit += limit * PARAM_VALUE (PARAM_LARGE_FUNCTION_GROWTH) / 100;
+ cgraph_varpool_assemble_pending_decls ();
- newsize = cgraph_estimate_size_after_inlining (times, to, what);
- if (newsize > PARAM_VALUE (PARAM_LARGE_FUNCTION_INSNS)
- && newsize > limit)
- return false;
- for (i = 0; i < ninlined; i++)
+ if (cgraph_dump_file)
{
- newsize =
- cgraph_estimate_size_after_inlining (INLINED_TIMES (inlined[i]) *
- times, inlined[i], what);
- if (newsize > PARAM_VALUE (PARAM_LARGE_FUNCTION_INSNS)
- && newsize >
- inlined[i]->local.self_insns *
- (100 + PARAM_VALUE (PARAM_LARGE_FUNCTION_GROWTH)) / 100)
- return false;
+ fprintf (cgraph_dump_file, "\nFinal ");
+ dump_cgraph (cgraph_dump_file);
}
- return true;
-}
-
-/* Return true when function N is small enought to be inlined. */
-
-static bool
-cgraph_default_inline_p (struct cgraph_node *n)
-{
- if (!DECL_INLINE (n->decl) || !DECL_SAVED_TREE (n->decl))
- return false;
- if (DECL_DECLARED_INLINE_P (n->decl))
- return n->global.insns < MAX_INLINE_INSNS_SINGLE;
- else
- return n->global.insns < MAX_INLINE_INSNS_AUTO;
-}
-
-/* We use greedy algorithm for inlining of small functions:
- All inline candidates are put into prioritized heap based on estimated
- growth of the overall number of instructions and then update the estimates.
-
- INLINED and INLINED_CALEES are just pointers to arrays large enought
- to be passed to cgraph_inlined_into and cgraph_inlined_callees. */
-
-static void
-cgraph_decide_inlining_of_small_functions (struct cgraph_node **inlined,
- struct cgraph_node **inlined_callees)
-{
- int i;
- struct cgraph_node *node;
- fibheap_t heap = fibheap_new ();
- struct fibnode **heap_node =
- xcalloc (cgraph_max_uid, sizeof (struct fibnode *));
- int ninlined, ninlined_callees;
- int max_insns = ((HOST_WIDEST_INT) initial_insns
- * (100 + PARAM_VALUE (PARAM_INLINE_UNIT_GROWTH)) / 100);
-
- /* Put all inline candidates into the heap. */
-
- for (node = cgraph_nodes; node; node = node->next)
+#ifdef ENABLE_CHECKING
+ verify_cgraph ();
+ /* Double check that all inline clones are gone and that all
+ function bodies have been released from memory. */
+ if (flag_unit_at_a_time
+ && !dump_enabled_p (TDI_tree_all)
+ && !(sorrycount || errorcount))
{
- struct cgraph_edge *e;
+ struct cgraph_node *node;
+ bool error_found = false;
- if (!node->local.inlinable || !node->callers
- || !cgraph_default_inline_p (node))
- continue;
-
- /* Rule out always_inline functions we dealt with earlier. */
- for (e = node->callers; e; e = e->next_caller)
- if (e->inline_call)
- break;
- if (e)
- continue;
- heap_node[node->uid] =
- fibheap_insert (heap, cgraph_estimate_growth (node), node);
- }
-
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file, "\n\nDeciding on inlining: ");
- while ((node = fibheap_extract_min (heap)) && overall_insns <= max_insns)
- {
- struct cgraph_edge *e;
- int old_insns = overall_insns;
-
- heap_node[node->uid] = NULL;
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file, "Considering %s %i insns, growth %i.\n",
- cgraph_node_name (node), node->global.insns,
- cgraph_estimate_growth (node));
- if (!cgraph_default_inline_p (node))
- {
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file, "Function too large.\n");
- continue;
- }
- ninlined_callees = cgraph_inlined_callees (node, inlined_callees);
- for (e = node->callers; e; e = e->next_caller)
- if (!e->inline_call && e->caller != node)
+ for (node = cgraph_nodes; node; node = node->next)
+ if (node->analyzed
+ && (node->global.inlined_to
+ || DECL_SAVED_TREE (node->decl)))
{
- ninlined = cgraph_inlined_into (e->caller, inlined);
- if (e->callee->output
- || !cgraph_check_inline_limits (e->caller, node, inlined,
- ninlined))
- {
- for (i = 0; i < ninlined; i++)
- inlined[i]->output = 0, node->aux = 0;
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file, "Not inlining into %s\n",
- cgraph_node_name (e->caller));
- continue;
- }
- cgraph_mark_inline (e->caller, node, inlined, ninlined,
- inlined_callees, ninlined_callees);
- if (heap_node[e->caller->uid])
- fibheap_replace_key (heap, heap_node[e->caller->uid],
- cgraph_estimate_growth (e->caller));
-
- /* Size of the functions we updated into has changed, so update
- the keys. */
- for (i = 0; i < ninlined; i++)
- {
- inlined[i]->output = 0, node->aux = 0;
- if (heap_node[inlined[i]->uid])
- fibheap_replace_key (heap, heap_node[inlined[i]->uid],
- cgraph_estimate_growth (inlined[i]));
- }
- }
-
- /* Similarly all functions called by function we just inlined
- are now called more times; update keys. */
-
- for (e = node->callees; e; e = e->next_callee)
- if (!e->inline_call && heap_node[e->callee->uid])
- fibheap_replace_key (heap, heap_node[e->callee->uid],
- cgraph_estimate_growth (e->callee));
-
- for (i = 0; i < ninlined_callees; i++)
- {
- struct cgraph_edge *e;
-
- for (e = inlined_callees[i]->callees; e; e = e->next_callee)
- if (!e->inline_call && heap_node[e->callee->uid])
- fibheap_replace_key (heap, heap_node[e->callee->uid],
- cgraph_estimate_growth (e->callee));
-
- inlined_callees[i]->output = 0, node->aux = 0;
- }
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file,
- "Created %i clones, Num insns:%i (%+i), %.2f%%.\n\n",
- node->global.cloned_times - 1,
- overall_insns, overall_insns - old_insns,
- overall_insns * 100.0 / initial_insns);
+ error_found = true;
+ dump_cgraph_node (stderr, node);
+ }
+ if (error_found)
+ internal_error ("Nodes with no released memory found.");
}
- if (cgraph_dump_file && !fibheap_empty (heap))
- fprintf (cgraph_dump_file, "inline-unit-growth limit reached.\n");
- fibheap_delete (heap);
- free (heap_node);
+#endif
}
-/* Decide on the inlining. We do so in the topological order to avoid
- expenses on updating datastructures. */
+/* Generate and emit a static constructor or destructor. WHICH must be
+ one of 'I' or 'D'. BODY should be a STATEMENT_LIST containing
+ GENERIC statements. */
-static void
-cgraph_decide_inlining (void)
+void
+cgraph_build_static_cdtor (char which, tree body, int priority)
{
- struct cgraph_node *node;
- int nnodes;
- struct cgraph_node **order =
- xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
- struct cgraph_node **inlined =
- xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
- struct cgraph_node **inlined_callees =
- xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
- int ninlined;
- int ninlined_callees;
- int i, y;
-
- for (node = cgraph_nodes; node; node = node->next)
- initial_insns += node->local.self_insns;
- overall_insns = initial_insns;
+ static int counter = 0;
+ char which_buf[16];
+ tree decl, name, resdecl;
- nnodes = cgraph_postorder (order);
+ sprintf (which_buf, "%c_%d", which, counter++);
+ name = get_file_function_name_long (which_buf);
- for (node = cgraph_nodes; node; node = node->next)
- node->aux = 0;
-
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file, "\n\nDeciding on always_inline functions:\n");
-
- /* In the first pass mark all always_inline edges. Do this with a priority
- so no our decisions makes this impossible. */
- for (i = nnodes - 1; i >= 0; i--)
- {
- struct cgraph_edge *e;
+ decl = build_decl (FUNCTION_DECL, name,
+ build_function_type (void_type_node, void_list_node));
+ current_function_decl = decl;
- node = order[i];
+ resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
+ DECL_ARTIFICIAL (resdecl) = 1;
+ DECL_IGNORED_P (resdecl) = 1;
+ DECL_RESULT (decl) = resdecl;
- for (e = node->callees; e; e = e->next_callee)
- if (e->callee->local.disregard_inline_limits)
- break;
- if (!e)
- continue;
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file,
- "Considering %s %i insns (always inline)\n",
- cgraph_node_name (node), node->global.insns);
- ninlined = cgraph_inlined_into (order[i], inlined);
- for (; e; e = e->next_callee)
- {
- if (e->inline_call || !e->callee->local.disregard_inline_limits)
- continue;
- if (e->callee->output || e->callee == node)
- continue;
- ninlined_callees =
- cgraph_inlined_callees (e->callee, inlined_callees);
- cgraph_mark_inline (node, e->callee, inlined, ninlined,
- inlined_callees, ninlined_callees);
- for (y = 0; y < ninlined_callees; y++)
- inlined_callees[y]->output = 0, node->aux = 0;
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file, "Inlined %i times. Now %i insns\n\n",
- node->global.cloned_times, overall_insns);
- }
- for (y = 0; y < ninlined; y++)
- inlined[y]->output = 0, node->aux = 0;
- }
+ allocate_struct_function (decl);
- cgraph_decide_inlining_of_small_functions (inlined, inlined_callees);
+ TREE_STATIC (decl) = 1;
+ TREE_USED (decl) = 1;
+ DECL_ARTIFICIAL (decl) = 1;
+ DECL_IGNORED_P (decl) = 1;
+ DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
+ DECL_SAVED_TREE (decl) = body;
+ TREE_PUBLIC (decl) = ! targetm.have_ctors_dtors;
+ DECL_UNINLINABLE (decl) = 1;
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file, "\n\nFunctions to inline once:\n");
+ DECL_INITIAL (decl) = make_node (BLOCK);
+ TREE_USED (DECL_INITIAL (decl)) = 1;
- /* And finally decide what functions are called once. */
+ DECL_SOURCE_LOCATION (decl) = input_location;
+ cfun->function_end_locus = input_location;
- for (i = nnodes - 1; i >= 0; i--)
+ switch (which)
{
- node = order[i];
-
- if (node->callers && !node->callers->next_caller && !node->needed
- && node->local.inlinable && !node->callers->inline_call
- && !DECL_EXTERNAL (node->decl) && !DECL_COMDAT (node->decl))
- {
- bool ok = true;
- struct cgraph_node *node1;
-
- /* Verify that we won't duplicate the caller. */
- for (node1 = node->callers->caller;
- node1->callers && node1->callers->inline_call
- && ok; node1 = node1->callers->caller)
- if (node1->callers->next_caller || node1->needed)
- ok = false;
- if (ok)
- {
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file,
- "Considering %s %i insns (called once)\n",
- cgraph_node_name (node), node->global.insns);
- ninlined = cgraph_inlined_into (node->callers->caller, inlined);
- if (cgraph_check_inline_limits
- (node->callers->caller, node, inlined, ninlined))
- {
- ninlined_callees =
- cgraph_inlined_callees (node, inlined_callees);
- cgraph_mark_inline (node->callers->caller, node, inlined,
- ninlined, inlined_callees,
- ninlined_callees);
- for (y = 0; y < ninlined_callees; y++)
- inlined_callees[y]->output = 0, node->aux = 0;
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file, "Inlined. Now %i insns\n\n", overall_insns);
- }
- for (y = 0; y < ninlined; y++)
- inlined[y]->output = 0, node->aux = 0;
- }
- }
+ case 'I':
+ DECL_STATIC_CONSTRUCTOR (decl) = 1;
+ break;
+ case 'D':
+ DECL_STATIC_DESTRUCTOR (decl) = 1;
+ break;
+ default:
+ gcc_unreachable ();
}
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file,
- "\nInlined %i calls, elliminated %i functions, %i insns turned to %i insns.\n",
- ncalls_inlined, nfunctions_inlined, initial_insns,
- overall_insns);
- free (order);
- free (inlined);
- free (inlined_callees);
-}
-
-/* Return true when CALLER_DECL should be inlined into CALLEE_DECL. */
-
-bool
-cgraph_inline_p (tree caller_decl, tree callee_decl)
-{
- struct cgraph_node *caller = cgraph_node (caller_decl);
- struct cgraph_node *callee = cgraph_node (callee_decl);
- struct cgraph_edge *e;
-
- for (e = caller->callees; e; e = e->next_callee)
- if (e->callee == callee)
- return e->inline_call;
- /* We do not record builtins in the callgraph. Perhaps it would make more
- sense to do so and then prune out those not overwritten by explicit
- function body. */
- return false;
-}
-/* Expand all functions that must be output.
-
- Attempt to topologically sort the nodes so function is output when
- all called functions are already assembled to allow data to be
- propagated across the callgraph. Use a stack to get smaller distance
- between a function and it's callees (later we may choose to use a more
- sophisticated algorithm for function reordering; we will likely want
- to use subsections to make the output functions appear in top-down
- order). */
-
-static void
-cgraph_expand_functions (void)
-{
- struct cgraph_node *node;
- struct cgraph_node **order =
- xcalloc (cgraph_n_nodes, sizeof (struct cgraph_node *));
- int order_pos = 0;
- int i;
-
- cgraph_mark_functions_to_output ();
-
- order_pos = cgraph_postorder (order);
+ gimplify_function_tree (decl);
- for (i = order_pos - 1; i >= 0; i--)
+ /* ??? We will get called LATE in the compilation process. */
+ if (cgraph_global_info_ready)
{
- node = order[i];
- if (node->output)
- {
- if (!node->reachable)
- abort ();
- node->output = 0;
- cgraph_expand_function (node);
- }
+ tree_lowering_passes (decl);
+ tree_rest_of_compilation (decl);
}
- free (order);
-}
-
-/* Mark all local functions.
-
- A local function is one whose calls can occur only in the
- current compilation unit, so we change its calling convention.
- We simply mark all static functions whose address is not taken
- as local. */
-
-static void
-cgraph_mark_local_functions (void)
-{
- struct cgraph_node *node;
-
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file, "Marking local functions:");
-
- /* Figure out functions we want to assemble. */
- for (node = cgraph_nodes; node; node = node->next)
+ else
+ cgraph_finalize_function (decl, 0);
+
+ if (targetm.have_ctors_dtors)
{
- node->local.local = (!node->needed
- && DECL_SAVED_TREE (node->decl)
- && !TREE_PUBLIC (node->decl));
- if (cgraph_dump_file && node->local.local)
- fprintf (cgraph_dump_file, " %s", cgraph_node_name (node));
+ void (*fn) (rtx, int);
+
+ if (which == 'I')
+ fn = targetm.asm_out.constructor;
+ else
+ fn = targetm.asm_out.destructor;
+ fn (XEXP (DECL_RTL (decl), 0), priority);
}
- if (cgraph_dump_file)
- fprintf (cgraph_dump_file, "\n");
}
-/* Perform simple optimizations based on callgraph. */
-
void
-cgraph_optimize (void)
+init_cgraph (void)
{
- if (!flag_unit_at_a_time)
- return;
- timevar_push (TV_CGRAPHOPT);
- if (!quiet_flag)
- fprintf (stderr, "Performing intraprocedural optimizations\n");
- if (cgraph_dump_file)
- {
- fprintf (cgraph_dump_file, "Initial callgraph:");
- dump_cgraph (cgraph_dump_file);
- }
- cgraph_mark_local_functions ();
-
- cgraph_decide_inlining ();
-
- cgraph_global_info_ready = true;
- if (cgraph_dump_file)
- {
- fprintf (cgraph_dump_file, "Optimized callgraph:");
- dump_cgraph (cgraph_dump_file);
- }
- timevar_pop (TV_CGRAPHOPT);
- if (!quiet_flag)
- fprintf (stderr, "Assembling functions:");
-
- /* Output everything. */
- cgraph_expand_functions ();
- if (cgraph_dump_file)
- {
- fprintf (cgraph_dump_file, "Final callgraph:");
- dump_cgraph (cgraph_dump_file);
- }
+ cgraph_dump_file = dump_begin (TDI_cgraph, NULL);
}