tree fndecl;
struct cgraph_node *node;
+ varpool_analyze_pending_decls ();
/* Note that this queue may grow as its being processed, as the new
functions may generate new ones. */
while (cgraph_new_nodes)
break;
}
cgraph_call_function_insertion_hooks (node);
+ varpool_analyze_pending_decls ();
}
return output;
}
{
tree decl = node->decl;
if (DECL_PRESERVE_P (decl))
- {
- mark_decl_referenced (decl);
- if (node->local.finalized)
- cgraph_mark_needed_node (node);
- }
+ cgraph_mark_needed_node (node);
if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
{
if (! TREE_PUBLIC (node->decl))
tree decl = vnode->decl;
if (DECL_PRESERVE_P (decl))
{
- mark_decl_referenced (decl);
vnode->force_output = true;
if (vnode->finalized)
varpool_mark_needed_node (vnode);
if (node->analyzed
&& !node->global.inlined_to
&& (node->needed || node->reachable_from_other_partition
+ || node->address_taken
|| (e && node->reachable))
&& !TREE_ASM_WRITTEN (decl)
&& !DECL_EXTERNAL (decl))
cgraph_remove_same_body_alias (node);
/* Since we want to emit the thunk, we explicitly mark its name as
referenced. */
- mark_decl_referenced (thunk_fndecl);
cgraph_add_new_function (thunk_fndecl, true);
bitmap_obstack_release (NULL);
}
execute_ipa_summary_passes
((struct ipa_opt_pass_d *) all_regular_ipa_passes);
}
+
+ /* Some targets need to handle LTO assembler output specially. */
+ if (flag_generate_lto)
+ targetm.asm_out.lto_start ();
+
execute_ipa_summary_passes ((struct ipa_opt_pass_d *) all_lto_gen_passes);
if (!in_lto_p)
ipa_write_summaries ();
+ if (flag_generate_lto)
+ targetm.asm_out.lto_end ();
+
if (!flag_ltrans)
execute_ipa_pass_list (all_regular_ipa_passes);
invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
}
}
cgraph_materialize_clone (node);
+ stabilized = false;
}
- else
- stabilized = false;
}
}
}