#include "gimple.h"
#include "tree-iterator.h"
#include "tree-pass.h"
+#include "tree-dump.h"
#include "output.h"
#include "coverage.h"
static void cgraph_mark_functions_to_output (void);
static void cgraph_expand_function (struct cgraph_node *);
static void cgraph_output_pending_asms (void);
+static void cgraph_analyze_function (struct cgraph_node *);
static FILE *cgraph_dump_file;
priority = p;
else if (p != priority)
break;
- append_to_statement_list (build_function_call_expr (fn, 0),
+ append_to_statement_list (build_function_call_expr (UNKNOWN_LOCATION,
+ fn, 0),
&body);
++i;
}
either outside this translation unit, something magic in the system
configury. */
-static bool
-decide_is_function_needed (struct cgraph_node *node, tree decl)
+bool
+cgraph_decide_is_function_needed (struct cgraph_node *node, tree decl)
{
- if (MAIN_NAME_P (DECL_NAME (decl))
- && TREE_PUBLIC (decl))
- {
- node->local.externally_visible = true;
- return true;
- }
-
/* If the user told us it is used, then it must be so. */
if (node->local.externally_visible)
return true;
|| (!optimize && !node->local.disregard_inline_limits
&& !DECL_DECLARED_INLINE_P (decl)
&& !node->origin))
- && !flag_whole_program)
+ && !flag_whole_program
+ && !flag_lto
+ && !flag_whopr)
&& !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
return true;
{
if (node->lowered)
return;
+
+ if (node->nested)
+ lower_nested_functions (node->decl);
+ gcc_assert (!node->nested);
+
tree_lowering_passes (node->decl);
node->lowered = true;
}
notice_global_symbol (decl);
node->local.finalized = true;
node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
+ node->finalized_by_frontend = true;
record_cdtor_fn (node->decl);
- if (node->nested)
- lower_nested_functions (decl);
- gcc_assert (!node->nested);
- if (decide_is_function_needed (node, decl))
+ if (cgraph_decide_is_function_needed (node, decl))
cgraph_mark_needed_node (node);
/* Since we reclaim unreachable nodes at the end of every language
cgraph_mark_if_needed (tree decl)
{
struct cgraph_node *node = cgraph_node (decl);
- if (node->local.finalized && decide_is_function_needed (node, decl))
+ if (node->local.finalized && cgraph_decide_is_function_needed (node, decl))
cgraph_mark_needed_node (node);
}
error ("Execution count is negative");
error_found = true;
}
+ if (node->global.inlined_to && node->local.externally_visible)
+ {
+ error ("Externally visible inline clone");
+ error_found = true;
+ }
+ if (node->global.inlined_to && node->address_taken)
+ {
+ error ("Inline clone with address taken");
+ error_found = true;
+ }
+ if (node->global.inlined_to && node->needed)
+ {
+ error ("Inline clone is needed");
+ error_found = true;
+ }
for (e = node->callers; e; e = e->next_caller)
{
if (e->count < 0)
if (node->analyzed && gimple_has_body_p (node->decl)
&& !TREE_ASM_WRITTEN (node->decl)
- && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to))
+ && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
+ && !flag_wpa)
{
if (this_cfun->cfg)
{
}
/* Analyze the function scheduled to be output. */
-void
+static void
cgraph_analyze_function (struct cgraph_node *node)
{
+ tree save = current_function_decl;
tree decl = node->decl;
current_function_decl = decl;
push_cfun (DECL_STRUCT_FUNCTION (decl));
+
+ /* Make sure to gimplify bodies only once. During analyzing a
+ function we lower it, which will require gimplified nested
+ functions, so we can end up here with an already gimplified
+ body. */
+ if (!gimple_body (decl))
+ gimplify_function_tree (decl);
+ dump_function (TDI_generic, decl);
+
cgraph_lower_function (node);
node->analyzed = true;
pop_cfun ();
- current_function_decl = NULL;
+ current_function_decl = save;
}
/* Look for externally_visible and used attributes and mark cgraph nodes
if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
{
if (! TREE_PUBLIC (node->decl))
- warning (OPT_Wattributes,
- "%J%<externally_visible%> attribute have effect only on public objects",
- node->decl);
- else
- {
- if (node->local.finalized)
- cgraph_mark_needed_node (node);
- node->local.externally_visible = true;
- }
+ warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
+ "%<externally_visible%>"
+ " attribute have effect only on public objects");
+ else if (node->local.finalized)
+ cgraph_mark_needed_node (node);
}
}
for (vnode = varpool_nodes; vnode != first_var; vnode = vnode->next)
if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
{
if (! TREE_PUBLIC (vnode->decl))
- warning (OPT_Wattributes,
- "%J%<externally_visible%> attribute have effect only on public objects",
- vnode->decl);
- else
- {
- if (vnode->finalized)
- varpool_mark_needed_node (vnode);
- vnode->externally_visible = true;
- }
+ warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
+ "%<externally_visible%>"
+ " attribute have effect only on public objects");
+ else if (vnode->finalized)
+ varpool_mark_needed_node (vnode);
}
}
}
continue;
}
- gcc_assert (!node->analyzed && node->reachable);
- gcc_assert (gimple_body (decl));
-
- cgraph_analyze_function (node);
+ if (!node->analyzed)
+ cgraph_analyze_function (node);
for (edge = node->callees; edge; edge = edge->next_callee)
if (!edge->callee->reachable)
ggc_collect ();
}
+
+/* Emit thunks for every node in the cgraph.
+ FIXME: We really ought to emit thunks only for functions that are needed. */
+
+static void
+cgraph_emit_thunks (void)
+{
+ struct cgraph_node *n;
+
+ for (n = cgraph_nodes; n; n = n->next)
+ {
+ /* Only emit thunks on functions defined in this TU.
+ Note that this may emit more thunks than strictly necessary.
+ During optimization some nodes may disappear. It would be
+ nice to only emit thunks only for the functions that will be
+ emitted, but we cannot know that until the inliner and other
+ IPA passes have run (see the sequencing of the call to
+ cgraph_mark_functions_to_output in cgraph_optimize). */
+ if (n->reachable
+ && !DECL_EXTERNAL (n->decl))
+ lang_hooks.callgraph.emit_associated_thunks (n->decl);
+ }
+}
+
+
/* Analyze the whole compilation unit once it is parsed completely. */
void
cgraph_finalize_compilation_unit (void)
{
- if (errorcount || sorrycount)
- return;
+ timevar_push (TV_CGRAPH);
+
+ /* Do not skip analyzing the functions if there were errors, we
+ miss diagnostics for following functions otherwise. */
+ /* Emit size functions we didn't inline. */
+ finalize_size_functions ();
+
+ /* Call functions declared with the "constructor" or "destructor"
+ attribute. */
+ cgraph_build_cdtor_fns ();
+
+ /* Mark alias targets necessary and emit diagnostics. */
finish_aliases_1 ();
if (!quiet_flag)
fflush (stderr);
}
- timevar_push (TV_CGRAPH);
+ /* Gimplify and lower all functions, compute reachability and
+ remove unreachable nodes. */
+ cgraph_analyze_functions ();
+
+ /* Emit thunks for reachable nodes, if needed. */
+ if (lang_hooks.callgraph.emit_associated_thunks)
+ cgraph_emit_thunks ();
+
+ /* Mark alias targets necessary and emit diagnostics. */
+ finish_aliases_1 ();
+
+ /* Gimplify and lower thunks. */
cgraph_analyze_functions ();
+
+ /* Finally drive the pass manager. */
+ cgraph_optimize ();
+
timevar_pop (TV_CGRAPH);
}
gcc_assert (node->lowered);
/* Generate RTL for the body of DECL. */
- if (lang_hooks.callgraph.emit_associated_thunks)
- lang_hooks.callgraph.emit_associated_thunks (decl);
tree_rest_of_compilation (decl);
/* Make sure that BE didn't give up on compiling. */
/* This is used to sort the node types by the cgraph order number. */
+enum cgraph_order_sort_kind
+{
+ ORDER_UNDEFINED = 0,
+ ORDER_FUNCTION,
+ ORDER_VAR,
+ ORDER_ASM
+};
+
struct cgraph_order_sort
{
- enum { ORDER_UNDEFINED = 0, ORDER_FUNCTION, ORDER_VAR, ORDER_ASM } kind;
+ enum cgraph_order_sort_kind kind;
union
{
struct cgraph_node *f;
current_function_decl = NULL;
gimple_register_cfg_hooks ();
bitmap_obstack_initialize (NULL);
- execute_ipa_pass_list (all_ipa_passes);
- /* Generate coverage variables and constructors. */
- coverage_finish ();
+ if (!in_lto_p)
+ execute_ipa_pass_list (all_small_ipa_passes);
- /* Process new functions added. */
- set_cfun (NULL);
- current_function_decl = NULL;
- cgraph_process_new_functions ();
+ /* If pass_all_early_optimizations was not scheduled, the state of
+ the cgraph will not be properly updated. Update it now. */
+ if (cgraph_state < CGRAPH_STATE_IPA_SSA)
+ cgraph_state = CGRAPH_STATE_IPA_SSA;
+
+ if (!in_lto_p)
+ {
+ /* Generate coverage variables and constructors. */
+ coverage_finish ();
+
+ /* Process new functions added. */
+ set_cfun (NULL);
+ current_function_decl = NULL;
+ cgraph_process_new_functions ();
+ }
+
+ execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_regular_ipa_passes);
+ execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
+
+ if (!in_lto_p)
+ ipa_write_summaries ();
+
+ execute_ipa_pass_list (all_regular_ipa_passes);
bitmap_obstack_release (NULL);
}
+
/* Perform simple optimizations based on callgraph. */
void
verify_cgraph ();
#endif
- /* Call functions declared with the "constructor" or "destructor"
- attribute. */
- cgraph_build_cdtor_fns ();
-
/* Frontend may output common variables after the unit has been finalized.
It is safe to deal with them here as they are always zero initialized. */
varpool_analyze_pending_decls ();
- cgraph_analyze_functions ();
timevar_push (TV_CGRAPHOPT);
if (pre_ipa_mem_report)
if (errorcount == 0 && sorrycount == 0)
ipa_passes ();
+ /* Do nothing else if any IPA pass found errors. */
+ if (errorcount || sorrycount)
+ return;
+
/* This pass remove bodies of extern inline functions we never inlined.
Do this later so other IPA passes see what is really going on. */
cgraph_remove_unreachable_nodes (false, dump_file);
}
#endif
}
+
+
/* Generate and emit a static constructor or destructor. WHICH must
be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
name = get_file_function_name (which_buf);
- decl = build_decl (FUNCTION_DECL, name,
+ decl = build_decl (input_location, FUNCTION_DECL, name,
build_function_type (void_type_node, void_list_node));
current_function_decl = decl;
- resdecl = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
+ resdecl = build_decl (input_location,
+ RESULT_DECL, NULL_TREE, void_type_node);
DECL_ARTIFICIAL (resdecl) = 1;
DECL_RESULT (decl) = resdecl;
DECL_CONTEXT (resdecl) = decl;
{
struct function *inner_function = DECL_STRUCT_FUNCTION (e->caller->decl);
gimple_call_set_fndecl (e->call_stmt, new_version->decl);
- /* Update EH information too, just in case. */
- if (!stmt_could_throw_p (e->call_stmt)
- && lookup_stmt_eh_region_fn (inner_function, e->call_stmt))
- remove_stmt_from_eh_region_fn (inner_function, e->call_stmt);
+ maybe_clean_eh_stmt_fn (inner_function, e->call_stmt);
}
}
also cloned. */
for (e = old_version->callees;e; e=e->next_callee)
{
- new_e = cgraph_clone_edge (e, new_version, e->call_stmt, 0, e->frequency,
+ new_e = cgraph_clone_edge (e, new_version, e->call_stmt,
+ e->lto_stmt_uid, 0, e->frequency,
e->loop_nest, true);
new_e->count = e->count;
}
??? We cannot use COMDAT linkage because there is no
ABI support for this. */
DECL_EXTERNAL (new_version_node->decl) = 0;
- DECL_ONE_ONLY (new_version_node->decl) = 0;
+ DECL_COMDAT_GROUP (new_version_node->decl) = NULL_TREE;
TREE_PUBLIC (new_version_node->decl) = 0;
DECL_COMDAT (new_version_node->decl) = 0;
DECL_WEAK (new_version_node->decl) = 0;
tree_function_versioning (node->decl, first_clone->decl, NULL, true, NULL);
DECL_EXTERNAL (first_clone->decl) = 0;
- DECL_ONE_ONLY (first_clone->decl) = 0;
+ DECL_COMDAT_GROUP (first_clone->decl) = NULL_TREE;
TREE_PUBLIC (first_clone->decl) = 0;
DECL_COMDAT (first_clone->decl) = 0;
+ VEC_free (ipa_opt_pass, heap,
+ DECL_STRUCT_FUNCTION (first_clone->decl)->ipa_transforms_to_apply);
+ DECL_STRUCT_FUNCTION (first_clone->decl)->ipa_transforms_to_apply = NULL;
#ifdef ENABLE_CHECKING
verify_cgraph_node (first_clone);
tree_function_versioning (node->clone_of->decl, node->decl,
node->clone.tree_map, true,
node->clone.args_to_skip);
+ if (cgraph_dump_file)
+ {
+ dump_function_to_file (node->clone_of->decl, cgraph_dump_file, dump_flags);
+ dump_function_to_file (node->decl, cgraph_dump_file, dump_flags);
+ }
/* Function is no longer clone. */
if (node->next_sibling_clone)
if (gimple_has_body_p (node->clone_of->decl))
{
if (cgraph_dump_file)
- fprintf (cgraph_dump_file, " clonning %s to %s",
- cgraph_node_name (node->clone_of),
- cgraph_node_name (node));
+ {
+ fprintf (cgraph_dump_file, "clonning %s to %s\n",
+ cgraph_node_name (node->clone_of),
+ cgraph_node_name (node));
+ if (node->clone.tree_map)
+ {
+ unsigned int i;
+ fprintf (cgraph_dump_file, " replace map: ");
+ for (i = 0; i < VEC_length (ipa_replace_map_p,
+ node->clone.tree_map);
+ i++)
+ {
+ struct ipa_replace_map *replace_info;
+ replace_info = VEC_index (ipa_replace_map_p,
+ node->clone.tree_map,
+ i);
+ print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
+ fprintf (cgraph_dump_file, " -> ");
+ print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
+ fprintf (cgraph_dump_file, "%s%s;",
+ replace_info->replace_p ? "(replace)":"",
+ replace_info->ref_p ? "(ref)":"");
+ }
+ fprintf (cgraph_dump_file, "\n");
+ }
+ if (node->clone.args_to_skip)
+ {
+ fprintf (cgraph_dump_file, " args_to_skip: ");
+ dump_bitmap (cgraph_dump_file, node->clone.args_to_skip);
+ }
+ if (node->clone.args_to_skip)
+ {
+ fprintf (cgraph_dump_file, " combined_args_to_skip:");
+ dump_bitmap (cgraph_dump_file, node->clone.combined_args_to_skip);
+ }
+ }
cgraph_materialize_clone (node);
}
else
for (e = node->callees; e; e = e->next_callee)
{
tree decl = gimple_call_fndecl (e->call_stmt);
- if (decl != e->callee->decl)
+ /* When function gets inlined, indirect inlining might've invented
+ new edge for orginally indirect stmt. Since we are not
+ preserving clones in the original form, we must not update here
+ since other inline clones don't need to contain call to the same
+ call. Inliner will do the substitution for us later. */
+ if (decl && decl != e->callee->decl)
{
gimple new_stmt;
gimple_stmt_iterator gsi;
print_gimple_stmt (cgraph_dump_file, e->call_stmt, 0, dump_flags);
}
- if (e->callee->clone.args_to_skip)
+ if (e->callee->clone.combined_args_to_skip)
new_stmt = gimple_call_copy_skip_args (e->call_stmt,
- e->callee->clone.args_to_skip);
+ e->callee->clone.combined_args_to_skip);
else
new_stmt = e->call_stmt;
if (gimple_vdef (new_stmt)
gsi_replace (&gsi, new_stmt, true);
/* Update EH information too, just in case. */
- if (!stmt_could_throw_p (new_stmt)
- && lookup_stmt_eh_region (new_stmt))
- remove_stmt_from_eh_region (new_stmt);
+ maybe_clean_or_replace_eh_stmt (e->call_stmt, new_stmt);
cgraph_set_call_stmt_including_clones (node, e->call_stmt, new_stmt);
verify_cgraph_node (node);
#endif
}
+#ifdef ENABLE_CHECKING
+ verify_cgraph ();
+#endif
cgraph_remove_unreachable_nodes (false, cgraph_dump_file);
}