1 /* Write and read the cgraph to the memory mapped representation of a
4 Copyright 2009 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-flow.h"
40 #include "diagnostic.h"
45 #include "pointer-set.h"
46 #include "lto-streamer.h"
49 static void output_varpool (cgraph_node_set, varpool_node_set);
51 /* Cgraph streaming is organized as set of record whose type
52 is indicated by a tag. */
55 /* Must leave 0 for the stopper. */
57 /* Cgraph node without body available. */
58 LTO_cgraph_unavail_node = 1,
59 /* Cgraph node with function body. */
60 LTO_cgraph_analyzed_node,
63 LTO_cgraph_indirect_edge
66 /* Create a new cgraph encoder. */
69 lto_cgraph_encoder_new (void)
71 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
72 encoder->map = pointer_map_create ();
73 encoder->nodes = NULL;
78 /* Delete ENCODER and its components. */
81 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
83 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
84 pointer_map_destroy (encoder->map);
89 /* Return the existing reference number of NODE in the cgraph encoder in
90 output block OB. Assign a new reference if this is the first time
94 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
95 struct cgraph_node *node)
100 slot = pointer_map_contains (encoder->map, node);
103 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
104 slot = pointer_map_insert (encoder->map, node);
105 *slot = (void *) (intptr_t) ref;
106 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
109 ref = (int) (intptr_t) *slot;
114 #define LCC_NOT_FOUND (-1)
116 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
117 or LCC_NOT_FOUND if it is not there. */
120 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
121 struct cgraph_node *node)
123 void **slot = pointer_map_contains (encoder->map, node);
124 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
128 /* Return the cgraph node corresponding to REF using ENCODER. */
131 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
133 if (ref == LCC_NOT_FOUND)
136 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
140 /* Return number of encoded nodes in ENCODER. */
143 lto_cgraph_encoder_size (lto_cgraph_encoder_t encoder)
145 return VEC_length (cgraph_node_ptr, encoder->nodes);
148 /* Create a new varpool encoder. */
150 lto_varpool_encoder_t
151 lto_varpool_encoder_new (void)
153 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
154 encoder->map = pointer_map_create ();
155 encoder->initializer = pointer_set_create ();
156 encoder->nodes = NULL;
161 /* Delete ENCODER and its components. */
164 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
166 VEC_free (varpool_node_ptr, heap, encoder->nodes);
167 pointer_map_destroy (encoder->map);
168 pointer_set_destroy (encoder->initializer);
173 /* Return the existing reference number of NODE in the varpool encoder in
174 output block OB. Assign a new reference if this is the first time
178 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
179 struct varpool_node *node)
184 slot = pointer_map_contains (encoder->map, node);
187 ref = VEC_length (varpool_node_ptr, encoder->nodes);
188 slot = pointer_map_insert (encoder->map, node);
189 *slot = (void *) (intptr_t) ref;
190 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
193 ref = (int) (intptr_t) *slot;
198 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
199 or LCC_NOT_FOUND if it is not there. */
202 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
203 struct varpool_node *node)
205 void **slot = pointer_map_contains (encoder->map, node);
206 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
210 /* Return the varpool node corresponding to REF using ENCODER. */
212 struct varpool_node *
213 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
215 if (ref == LCC_NOT_FOUND)
218 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
222 /* Return number of encoded nodes in ENCODER. */
225 lto_varpool_encoder_size (lto_varpool_encoder_t encoder)
227 return VEC_length (varpool_node_ptr, encoder->nodes);
230 /* Return TRUE if we should encode initializer of NODE (if any). */
233 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
234 struct varpool_node *node)
236 return pointer_set_contains (encoder->initializer, node);
239 /* Return TRUE if we should encode initializer of NODE (if any). */
242 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
243 struct varpool_node *node)
245 pointer_set_insert (encoder->initializer, node);
248 /* Output the cgraph EDGE to OB using ENCODER. */
251 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
252 lto_cgraph_encoder_t encoder)
256 struct bitpack_d *bp;
258 if (edge->indirect_unknown_callee)
259 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_indirect_edge);
261 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_edge);
263 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
264 gcc_assert (ref != LCC_NOT_FOUND);
265 lto_output_sleb128_stream (ob->main_stream, ref);
267 if (!edge->indirect_unknown_callee)
269 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
270 gcc_assert (ref != LCC_NOT_FOUND);
271 lto_output_sleb128_stream (ob->main_stream, ref);
274 lto_output_sleb128_stream (ob->main_stream, edge->count);
276 bp = bitpack_create ();
277 uid = flag_wpa ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt);
278 bp_pack_value (bp, uid, HOST_BITS_PER_INT);
279 bp_pack_value (bp, edge->inline_failed, HOST_BITS_PER_INT);
280 bp_pack_value (bp, edge->frequency, HOST_BITS_PER_INT);
281 bp_pack_value (bp, edge->loop_nest, 30);
282 bp_pack_value (bp, edge->indirect_inlining_edge, 1);
283 bp_pack_value (bp, edge->call_stmt_cannot_inline_p, 1);
284 bp_pack_value (bp, edge->can_throw_external, 1);
285 if (edge->indirect_unknown_callee)
287 int flags = edge->indirect_info->ecf_flags;
288 bp_pack_value (bp, (flags & ECF_CONST) != 0, 1);
289 bp_pack_value (bp, (flags & ECF_PURE) != 0, 1);
290 bp_pack_value (bp, (flags & ECF_NORETURN) != 0, 1);
291 bp_pack_value (bp, (flags & ECF_MALLOC) != 0, 1);
292 bp_pack_value (bp, (flags & ECF_NOTHROW) != 0, 1);
293 bp_pack_value (bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
294 /* Flags that should not appear on indirect calls. */
295 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
300 lto_output_bitpack (ob->main_stream, bp);
304 /* Return if LIST contain references from other partitions. */
306 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
307 varpool_node_set vset)
311 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
313 if (ref->refering_type == IPA_REF_CGRAPH)
315 if (!cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
320 if (!varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
328 /* Return true when node is reachable from other partition. */
331 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
333 struct cgraph_edge *e;
336 if (node->global.inlined_to)
338 for (e = node->callers; e; e = e->next_caller)
339 if (!cgraph_node_in_set_p (e->caller, set))
344 /* Output the cgraph NODE to OB. ENCODER is used to find the
345 reference number of NODE->inlined_to. SET is the set of nodes we
346 are writing to the current file. If NODE is not in SET, then NODE
347 is a boundary of a cgraph_node_set and we pretend NODE just has a
348 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
349 that have had their callgraph node written so far. This is used to
350 determine if NODE is a clone of a previously written node. */
353 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
354 lto_cgraph_encoder_t encoder, cgraph_node_set set,
355 varpool_node_set vset,
356 bitmap written_decls)
359 struct bitpack_d *bp;
360 bool boundary_p, wrote_decl_p;
362 bool in_other_partition = false;
364 boundary_p = !cgraph_node_in_set_p (node, set);
365 wrote_decl_p = bitmap_bit_p (written_decls, DECL_UID (node->decl));
367 if (node->analyzed && !boundary_p)
368 tag = LTO_cgraph_analyzed_node;
370 tag = LTO_cgraph_unavail_node;
372 lto_output_uleb128_stream (ob->main_stream, tag);
374 /* In WPA mode, we only output part of the call-graph. Also, we
375 fake cgraph node attributes. There are two cases that we care.
377 Boundary nodes: There are nodes that are not part of SET but are
378 called from within SET. We artificially make them look like
379 externally visible nodes with no function body.
381 Cherry-picked nodes: These are nodes we pulled from other
382 translation units into SET during IPA-inlining. We make them as
383 local static nodes to prevent clashes with other local statics. */
384 if (boundary_p && node->analyzed)
386 /* Inline clones can not be part of boundary.
387 gcc_assert (!node->global.inlined_to);
389 FIXME: At the moment they can be, when partition contains an inline
390 clone that is clone of inline clone from outside partition. We can
391 reshape the clone tree and make other tree to be the root, but it
392 needs a bit extra work and will be promplty done by cgraph_remove_node
393 after reading back. */
394 in_other_partition = 1;
397 lto_output_uleb128_stream (ob->main_stream, wrote_decl_p);
400 bitmap_set_bit (written_decls, DECL_UID (node->decl));
402 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
403 lto_output_sleb128_stream (ob->main_stream, node->count);
405 bp = bitpack_create ();
406 bp_pack_value (bp, node->local.local, 1);
407 bp_pack_value (bp, node->local.externally_visible, 1);
408 bp_pack_value (bp, node->local.finalized, 1);
409 bp_pack_value (bp, node->local.inlinable, 1);
410 bp_pack_value (bp, node->local.disregard_inline_limits, 1);
411 bp_pack_value (bp, node->local.redefined_extern_inline, 1);
412 bp_pack_value (bp, node->local.vtable_method, 1);
413 bp_pack_value (bp, node->needed, 1);
414 bp_pack_value (bp, node->address_taken, 1);
415 bp_pack_value (bp, node->abstract_and_needed, 1);
416 bp_pack_value (bp, tag == LTO_cgraph_analyzed_node
417 && !DECL_EXTERNAL (node->decl)
418 && (reachable_from_other_partition_p (node, set)
419 || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
420 bp_pack_value (bp, node->lowered, 1);
421 bp_pack_value (bp, in_other_partition, 1);
422 bp_pack_value (bp, node->alias, 1);
423 bp_pack_value (bp, node->finalized_by_frontend, 1);
424 bp_pack_value (bp, node->frequency, 2);
425 lto_output_bitpack (ob->main_stream, bp);
428 if (tag == LTO_cgraph_analyzed_node)
430 lto_output_sleb128_stream (ob->main_stream,
431 node->local.inline_summary.estimated_self_stack_size);
432 lto_output_sleb128_stream (ob->main_stream,
433 node->local.inline_summary.self_size);
434 lto_output_sleb128_stream (ob->main_stream,
435 node->local.inline_summary.size_inlining_benefit);
436 lto_output_sleb128_stream (ob->main_stream,
437 node->local.inline_summary.self_time);
438 lto_output_sleb128_stream (ob->main_stream,
439 node->local.inline_summary.time_inlining_benefit);
440 if (node->global.inlined_to)
442 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
443 gcc_assert (ref != LCC_NOT_FOUND);
448 lto_output_sleb128_stream (ob->main_stream, ref);
451 if (node->same_comdat_group && !boundary_p)
453 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
454 gcc_assert (ref != LCC_NOT_FOUND);
458 lto_output_sleb128_stream (ob->main_stream, ref);
462 struct cgraph_node *alias;
463 unsigned long alias_count = 1;
464 for (alias = node->same_body; alias->next; alias = alias->next)
466 lto_output_uleb128_stream (ob->main_stream, alias_count);
469 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
471 if (alias->thunk.thunk_p)
473 lto_output_uleb128_stream
475 1 + (alias->thunk.this_adjusting != 0) * 2
476 + (alias->thunk.virtual_offset_p != 0) * 4);
477 lto_output_uleb128_stream (ob->main_stream,
478 alias->thunk.fixed_offset);
479 lto_output_uleb128_stream (ob->main_stream,
480 alias->thunk.virtual_value);
481 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
486 lto_output_uleb128_stream (ob->main_stream, 0);
487 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
490 alias = alias->previous;
495 lto_output_uleb128_stream (ob->main_stream, 0);
498 /* Output the varpool NODE to OB.
499 If NODE is not in SET, then NODE is a boundary. */
502 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
503 cgraph_node_set set, varpool_node_set vset)
505 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
506 struct bitpack_d *bp;
507 struct varpool_node *alias;
510 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
511 bp = bitpack_create ();
512 bp_pack_value (bp, node->externally_visible, 1);
513 bp_pack_value (bp, node->force_output, 1);
514 bp_pack_value (bp, node->finalized, 1);
515 bp_pack_value (bp, node->alias, 1);
516 gcc_assert (!node->alias || !node->extra_name);
517 gcc_assert (node->finalized || !node->analyzed);
518 gcc_assert (node->needed);
519 /* Constant pool initializers can be de-unified into individual ltrans units.
520 FIXME: Alternatively at -Os we may want to avoid generating for them the local
521 labels and share them across LTRANS partitions. */
522 if (DECL_IN_CONSTANT_POOL (node->decl))
524 bp_pack_value (bp, 0, 1); /* used_from_other_parition. */
525 bp_pack_value (bp, 0, 1); /* in_other_partition. */
529 bp_pack_value (bp, node->analyzed
530 && referenced_from_other_partition_p (&node->ref_list,
532 bp_pack_value (bp, boundary_p, 1); /* in_other_partition. */
534 /* Also emit any extra name aliases. */
535 for (alias = node->extra_name; alias; alias = alias->next)
537 bp_pack_value (bp, count != 0, 1);
538 lto_output_bitpack (ob->main_stream, bp);
543 lto_output_uleb128_stream (ob->main_stream, count);
544 for (alias = node->extra_name; alias; alias = alias->next)
545 lto_output_var_decl_index (ob->decl_state, ob->main_stream, alias->decl);
549 /* Output the varpool NODE to OB.
550 If NODE is not in SET, then NODE is a boundary. */
553 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
554 lto_cgraph_encoder_t encoder,
555 lto_varpool_encoder_t varpool_encoder)
557 struct bitpack_d *bp = bitpack_create ();
558 bp_pack_value (bp, ref->refered_type, 1);
559 bp_pack_value (bp, ref->use, 2);
560 lto_output_bitpack (ob->main_stream, bp);
562 if (ref->refered_type == IPA_REF_CGRAPH)
564 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
565 gcc_assert (nref != LCC_NOT_FOUND);
566 lto_output_sleb128_stream (ob->main_stream, nref);
570 int nref = lto_varpool_encoder_lookup (varpool_encoder,
571 ipa_ref_varpool_node (ref));
572 gcc_assert (nref != LCC_NOT_FOUND);
573 lto_output_sleb128_stream (ob->main_stream, nref);
577 /* Stream out profile_summary to OB. */
580 output_profile_summary (struct lto_simple_output_block *ob)
584 /* We do not output num, it is not terribly useful. */
585 gcc_assert (profile_info->runs);
586 lto_output_uleb128_stream (ob->main_stream, profile_info->runs);
587 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_all);
588 lto_output_sleb128_stream (ob->main_stream, profile_info->run_max);
589 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_max);
592 lto_output_uleb128_stream (ob->main_stream, 0);
595 /* Add NODE into encoder as well as nodes it is cloned from.
596 Do it in a way so clones appear first. */
598 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node)
601 add_node_to (encoder, node->clone_of);
602 lto_cgraph_encoder_encode (encoder, node);
605 /* Add all references in LIST to encoders. */
608 add_references (lto_cgraph_encoder_t encoder,
609 lto_varpool_encoder_t varpool_encoder,
610 struct ipa_ref_list *list)
614 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
615 if (ref->refered_type == IPA_REF_CGRAPH)
616 add_node_to (encoder, ipa_ref_node (ref));
619 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
620 lto_varpool_encoder_encode (varpool_encoder, vnode);
624 /* Output all callees or indirect outgoing edges. EDGE must be the first such
628 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
629 struct lto_simple_output_block *ob,
630 lto_cgraph_encoder_t encoder)
635 /* Output edges in backward direction, so the reconstructed callgraph match
636 and it is easy to associate call sites in the IPA pass summaries. */
637 while (edge->next_callee)
638 edge = edge->next_callee;
639 for (; edge; edge = edge->prev_callee)
640 lto_output_edge (ob, edge, encoder);
643 /* Output the part of the cgraph in SET. */
646 output_refs (cgraph_node_set set, varpool_node_set vset,
647 lto_cgraph_encoder_t encoder,
648 lto_varpool_encoder_t varpool_encoder)
650 cgraph_node_set_iterator csi;
651 varpool_node_set_iterator vsi;
652 struct lto_simple_output_block *ob;
657 ob = lto_create_simple_output_block (LTO_section_refs);
659 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
661 struct cgraph_node *node = csi_node (csi);
663 count = ipa_ref_list_nreferences (&node->ref_list);
666 lto_output_uleb128_stream (ob->main_stream, count);
667 lto_output_uleb128_stream (ob->main_stream,
668 lto_cgraph_encoder_lookup (encoder, node));
669 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
670 lto_output_ref (ob, ref, encoder, varpool_encoder);
674 lto_output_uleb128_stream (ob->main_stream, 0);
676 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
678 struct varpool_node *node = vsi_node (vsi);
680 count = ipa_ref_list_nreferences (&node->ref_list);
683 lto_output_uleb128_stream (ob->main_stream, count);
684 lto_output_uleb128_stream (ob->main_stream,
685 lto_varpool_encoder_lookup (varpool_encoder,
687 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
688 lto_output_ref (ob, ref, encoder, varpool_encoder);
692 lto_output_uleb128_stream (ob->main_stream, 0);
694 lto_destroy_simple_output_block (ob);
698 /* Output the part of the cgraph in SET. */
701 output_cgraph (cgraph_node_set set, varpool_node_set vset)
703 struct cgraph_node *node;
704 struct lto_simple_output_block *ob;
705 cgraph_node_set_iterator csi;
706 varpool_node_set_iterator vsi;
707 struct cgraph_edge *edge;
709 bitmap written_decls;
710 lto_cgraph_encoder_t encoder;
711 lto_varpool_encoder_t varpool_encoder;
712 struct cgraph_asm_node *can;
714 ob = lto_create_simple_output_block (LTO_section_cgraph);
716 output_profile_summary (ob);
718 /* An encoder for cgraph nodes should have been created by
719 ipa_write_summaries_1. */
720 gcc_assert (ob->decl_state->cgraph_node_encoder);
721 gcc_assert (ob->decl_state->varpool_node_encoder);
722 encoder = ob->decl_state->cgraph_node_encoder;
723 varpool_encoder = ob->decl_state->varpool_node_encoder;
725 /* The FUNCTION_DECLs for which we have written a node. The first
726 node found is written as the "original" node, the remaining nodes
727 are considered its clones. */
728 written_decls = lto_bitmap_alloc ();
730 /* Go over all the nodes in SET and assign references. */
731 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
733 node = csi_node (csi);
734 add_node_to (encoder, node);
735 add_references (encoder, varpool_encoder, &node->ref_list);
737 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
739 struct varpool_node *vnode = vsi_node (vsi);
740 gcc_assert (!vnode->alias);
741 lto_varpool_encoder_encode (varpool_encoder, vnode);
742 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
743 add_references (encoder, varpool_encoder, &vnode->ref_list);
745 /* Pickle in also the initializer of all referenced readonly variables
746 to help folding. Constant pool variables are not shared, so we must
748 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
750 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
751 if (DECL_INITIAL (vnode->decl)
752 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
754 && (DECL_IN_CONSTANT_POOL (vnode->decl)
755 || TREE_READONLY (vnode->decl)))
757 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
758 add_references (encoder, varpool_encoder, &vnode->ref_list);
762 /* Go over all the nodes again to include callees that are not in
764 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
766 node = csi_node (csi);
767 for (edge = node->callees; edge; edge = edge->next_callee)
769 struct cgraph_node *callee = edge->callee;
770 if (!cgraph_node_in_set_p (callee, set))
772 /* We should have moved all the inlines. */
773 gcc_assert (!callee->global.inlined_to);
774 add_node_to (encoder, callee);
779 /* Write out the nodes. We must first output a node and then its clones,
780 otherwise at a time reading back the node there would be nothing to clone
782 n_nodes = lto_cgraph_encoder_size (encoder);
783 for (i = 0; i < n_nodes; i++)
785 node = lto_cgraph_encoder_deref (encoder, i);
786 lto_output_node (ob, node, encoder, set, vset, written_decls);
789 lto_bitmap_free (written_decls);
791 /* Go over the nodes in SET again to write edges. */
792 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
794 node = csi_node (csi);
795 output_outgoing_cgraph_edges (node->callees, ob, encoder);
796 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
799 lto_output_uleb128_stream (ob->main_stream, 0);
801 /* Emit toplevel asms. */
802 for (can = cgraph_asm_nodes; can; can = can->next)
804 int len = TREE_STRING_LENGTH (can->asm_str);
805 lto_output_uleb128_stream (ob->main_stream, len);
806 for (i = 0; i < len; ++i)
807 lto_output_1_stream (ob->main_stream,
808 TREE_STRING_POINTER (can->asm_str)[i]);
811 lto_output_uleb128_stream (ob->main_stream, 0);
813 lto_destroy_simple_output_block (ob);
814 output_varpool (set, vset);
815 output_refs (set, vset, encoder, varpool_encoder);
818 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
819 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
820 NODE or to replace the values in it, for instance because the first
821 time we saw it, the function body was not available but now it
822 is. BP is a bitpack with all the bitflags for NODE read from the
826 input_overwrite_node (struct lto_file_decl_data *file_data,
827 struct cgraph_node *node,
828 enum LTO_cgraph_tags tag,
829 struct bitpack_d *bp,
830 unsigned int stack_size,
831 unsigned int self_time,
832 unsigned int time_inlining_benefit,
833 unsigned int self_size,
834 unsigned int size_inlining_benefit)
836 node->aux = (void *) tag;
837 node->local.inline_summary.estimated_self_stack_size = stack_size;
838 node->local.inline_summary.self_time = self_time;
839 node->local.inline_summary.time_inlining_benefit = time_inlining_benefit;
840 node->local.inline_summary.self_size = self_size;
841 node->local.inline_summary.size_inlining_benefit = size_inlining_benefit;
842 node->global.time = self_time;
843 node->global.size = self_size;
844 node->global.estimated_stack_size = stack_size;
845 node->global.estimated_growth = INT_MIN;
846 node->local.lto_file_data = file_data;
848 node->local.local = bp_unpack_value (bp, 1);
849 node->local.externally_visible = bp_unpack_value (bp, 1);
850 node->local.finalized = bp_unpack_value (bp, 1);
851 node->local.inlinable = bp_unpack_value (bp, 1);
852 node->local.disregard_inline_limits = bp_unpack_value (bp, 1);
853 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
854 node->local.vtable_method = bp_unpack_value (bp, 1);
855 node->needed = bp_unpack_value (bp, 1);
856 node->address_taken = bp_unpack_value (bp, 1);
857 node->abstract_and_needed = bp_unpack_value (bp, 1);
858 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
859 node->lowered = bp_unpack_value (bp, 1);
860 node->analyzed = tag == LTO_cgraph_analyzed_node;
861 node->in_other_partition = bp_unpack_value (bp, 1);
862 node->alias = bp_unpack_value (bp, 1);
863 node->finalized_by_frontend = bp_unpack_value (bp, 1);
864 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
867 /* Output the part of the cgraph in SET. */
870 output_varpool (cgraph_node_set set, varpool_node_set vset)
872 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
873 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
874 int len = lto_varpool_encoder_size (varpool_encoder), i;
876 lto_output_uleb128_stream (ob->main_stream, len);
878 /* Write out the nodes. We must first output a node and then its clones,
879 otherwise at a time reading back the node there would be nothing to clone
881 for (i = 0; i < len; i++)
883 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
887 lto_destroy_simple_output_block (ob);
890 /* Read a node from input_block IB. TAG is the node's tag just read.
891 Return the node read or overwriten. */
893 static struct cgraph_node *
894 input_node (struct lto_file_decl_data *file_data,
895 struct lto_input_block *ib,
896 enum LTO_cgraph_tags tag)
899 struct cgraph_node *node;
900 struct bitpack_d *bp;
904 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
907 int time_inlining_benefit = 0;
908 int size_inlining_benefit = 0;
909 unsigned long same_body_count = 0;
911 clone_p = (lto_input_uleb128 (ib) != 0);
913 decl_index = lto_input_uleb128 (ib);
914 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
917 node = cgraph_clone_node (cgraph_node (fn_decl), 0,
918 CGRAPH_FREQ_BASE, 0, false, NULL);
921 node = cgraph_node (fn_decl);
923 node->count = lto_input_sleb128 (ib);
924 bp = lto_input_bitpack (ib);
926 if (tag == LTO_cgraph_analyzed_node)
928 stack_size = lto_input_sleb128 (ib);
929 self_size = lto_input_sleb128 (ib);
930 size_inlining_benefit = lto_input_sleb128 (ib);
931 self_time = lto_input_sleb128 (ib);
932 time_inlining_benefit = lto_input_sleb128 (ib);
934 ref = lto_input_sleb128 (ib);
937 ref2 = lto_input_sleb128 (ib);
938 same_body_count = lto_input_uleb128 (ib);
940 /* Make sure that we have not read this node before. Nodes that
941 have already been read will have their tag stored in the 'aux'
942 field. Since built-in functions can be referenced in multiple
943 functions, they are expected to be read more than once. */
944 if (node->aux && !DECL_IS_BUILTIN (node->decl))
945 internal_error ("bytecode stream: found multiple instances of cgraph "
946 "node %d", node->uid);
948 input_overwrite_node (file_data, node, tag, bp, stack_size, self_time,
949 time_inlining_benefit, self_size,
950 size_inlining_benefit);
953 /* Store a reference for now, and fix up later to be a pointer. */
954 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
956 /* Store a reference for now, and fix up later to be a pointer. */
957 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
959 while (same_body_count-- > 0)
963 decl_index = lto_input_uleb128 (ib);
964 alias_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
965 type = lto_input_uleb128 (ib);
969 decl_index = lto_input_uleb128 (ib);
970 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
971 cgraph_same_body_alias (alias_decl, real_alias);
975 HOST_WIDE_INT fixed_offset = lto_input_uleb128 (ib);
976 HOST_WIDE_INT virtual_value = lto_input_uleb128 (ib);
978 decl_index = lto_input_uleb128 (ib);
979 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
980 cgraph_add_thunk (alias_decl, fn_decl, type & 2, fixed_offset,
982 (type & 4) ? size_int (virtual_value) : NULL_TREE,
989 /* Read a node from input_block IB. TAG is the node's tag just read.
990 Return the node read or overwriten. */
992 static struct varpool_node *
993 input_varpool_node (struct lto_file_decl_data *file_data,
994 struct lto_input_block *ib)
998 struct varpool_node *node;
999 struct bitpack_d *bp;
1003 decl_index = lto_input_uleb128 (ib);
1004 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1005 node = varpool_node (var_decl);
1007 bp = lto_input_bitpack (ib);
1008 node->externally_visible = bp_unpack_value (bp, 1);
1009 node->force_output = bp_unpack_value (bp, 1);
1010 node->finalized = bp_unpack_value (bp, 1);
1011 node->alias = bp_unpack_value (bp, 1);
1012 node->analyzed = node->finalized;
1013 node->used_from_other_partition = bp_unpack_value (bp, 1);
1014 node->in_other_partition = bp_unpack_value (bp, 1);
1015 aliases_p = bp_unpack_value (bp, 1);
1016 if (node->finalized)
1017 varpool_mark_needed_node (node);
1018 bitpack_delete (bp);
1021 count = lto_input_uleb128 (ib);
1022 for (; count > 0; count --)
1024 tree decl = lto_file_decl_data_get_var_decl (file_data,
1025 lto_input_uleb128 (ib));
1026 varpool_extra_name_alias (decl, var_decl);
1032 /* Read a node from input_block IB. TAG is the node's tag just read.
1033 Return the node read or overwriten. */
1036 input_ref (struct lto_input_block *ib,
1037 struct cgraph_node *refering_node,
1038 struct varpool_node *refering_varpool_node,
1039 VEC(cgraph_node_ptr, heap) *nodes,
1040 VEC(varpool_node_ptr, heap) *varpool_nodes)
1042 struct cgraph_node *node = NULL;
1043 struct varpool_node *varpool_node = NULL;
1044 struct bitpack_d *bp;
1045 enum ipa_ref_type type;
1046 enum ipa_ref_use use;
1048 bp = lto_input_bitpack (ib);
1049 type = (enum ipa_ref_type) bp_unpack_value (bp, 1);
1050 use = (enum ipa_ref_use) bp_unpack_value (bp, 2);
1051 bitpack_delete (bp);
1052 if (type == IPA_REF_CGRAPH)
1053 node = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1055 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes, lto_input_sleb128 (ib));
1056 ipa_record_reference (refering_node, refering_varpool_node,
1057 node, varpool_node, use, NULL);
1060 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1061 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1062 edge being read is indirect (in the sense that it has
1063 indirect_unknown_callee set). */
1066 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1069 struct cgraph_node *caller, *callee;
1070 struct cgraph_edge *edge;
1071 unsigned int stmt_id;
1075 cgraph_inline_failed_t inline_failed;
1076 struct bitpack_d *bp;
1077 enum ld_plugin_symbol_resolution caller_resolution;
1080 caller = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1081 if (caller == NULL || caller->decl == NULL_TREE)
1082 internal_error ("bytecode stream: no caller found while reading edge");
1086 callee = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1087 if (callee == NULL || callee->decl == NULL_TREE)
1088 internal_error ("bytecode stream: no callee found while reading edge");
1093 count = (gcov_type) lto_input_sleb128 (ib);
1095 bp = lto_input_bitpack (ib);
1096 stmt_id = (unsigned int) bp_unpack_value (bp, HOST_BITS_PER_INT);
1097 inline_failed = (cgraph_inline_failed_t) bp_unpack_value (bp,
1099 freq = (int) bp_unpack_value (bp, HOST_BITS_PER_INT);
1100 nest = (unsigned) bp_unpack_value (bp, 30);
1102 /* If the caller was preempted, don't create the edge.
1103 ??? Should we ever have edges from a preempted caller? */
1104 caller_resolution = lto_symtab_get_resolution (caller->decl);
1105 if (caller_resolution == LDPR_PREEMPTED_REG
1106 || caller_resolution == LDPR_PREEMPTED_IR)
1110 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq, nest);
1112 edge = cgraph_create_edge (caller, callee, NULL, count, freq, nest);
1114 edge->indirect_inlining_edge = bp_unpack_value (bp, 1);
1115 edge->lto_stmt_uid = stmt_id;
1116 edge->inline_failed = inline_failed;
1117 edge->call_stmt_cannot_inline_p = bp_unpack_value (bp, 1);
1118 edge->can_throw_external = bp_unpack_value (bp, 1);
1121 if (bp_unpack_value (bp, 1))
1122 ecf_flags |= ECF_CONST;
1123 if (bp_unpack_value (bp, 1))
1124 ecf_flags |= ECF_PURE;
1125 if (bp_unpack_value (bp, 1))
1126 ecf_flags |= ECF_NORETURN;
1127 if (bp_unpack_value (bp, 1))
1128 ecf_flags |= ECF_MALLOC;
1129 if (bp_unpack_value (bp, 1))
1130 ecf_flags |= ECF_NOTHROW;
1131 if (bp_unpack_value (bp, 1))
1132 ecf_flags |= ECF_RETURNS_TWICE;
1133 edge->indirect_info->ecf_flags = ecf_flags;
1135 bitpack_delete (bp);
1139 /* Read a cgraph from IB using the info in FILE_DATA. */
1141 static VEC(cgraph_node_ptr, heap) *
1142 input_cgraph_1 (struct lto_file_decl_data *file_data,
1143 struct lto_input_block *ib)
1145 enum LTO_cgraph_tags tag;
1146 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1147 struct cgraph_node *node;
1149 unsigned HOST_WIDE_INT len;
1151 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1154 if (tag == LTO_cgraph_edge)
1155 input_edge (ib, nodes, false);
1156 else if (tag == LTO_cgraph_indirect_edge)
1157 input_edge (ib, nodes, true);
1160 node = input_node (file_data, ib, tag);
1161 if (node == NULL || node->decl == NULL_TREE)
1162 internal_error ("bytecode stream: found empty cgraph node");
1163 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1164 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1167 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1170 /* Input toplevel asms. */
1171 len = lto_input_uleb128 (ib);
1174 char *str = (char *)xmalloc (len + 1);
1175 for (i = 0; i < len; ++i)
1176 str[i] = lto_input_1_unsigned (ib);
1177 cgraph_add_asm_node (build_string (len, str));
1180 len = lto_input_uleb128 (ib);
1183 for (i = 0; VEC_iterate (cgraph_node_ptr, nodes, i, node); i++)
1185 int ref = (int) (intptr_t) node->global.inlined_to;
1187 /* Fixup inlined_to from reference to pointer. */
1188 if (ref != LCC_NOT_FOUND)
1189 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1191 node->global.inlined_to = NULL;
1193 ref = (int) (intptr_t) node->same_comdat_group;
1195 /* Fixup same_comdat_group from reference to pointer. */
1196 if (ref != LCC_NOT_FOUND)
1197 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1199 node->same_comdat_group = NULL;
1204 /* Read a varpool from IB using the info in FILE_DATA. */
1206 static VEC(varpool_node_ptr, heap) *
1207 input_varpool_1 (struct lto_file_decl_data *file_data,
1208 struct lto_input_block *ib)
1210 unsigned HOST_WIDE_INT len;
1211 VEC(varpool_node_ptr, heap) *varpool = NULL;
1213 len = lto_input_uleb128 (ib);
1216 VEC_safe_push (varpool_node_ptr, heap, varpool,
1217 input_varpool_node (file_data, ib));
1223 /* Input ipa_refs. */
1226 input_refs (struct lto_input_block *ib,
1227 VEC(cgraph_node_ptr, heap) *nodes,
1228 VEC(varpool_node_ptr, heap) *varpool)
1234 struct cgraph_node *node;
1235 count = lto_input_uleb128 (ib);
1238 idx = lto_input_uleb128 (ib);
1239 node = VEC_index (cgraph_node_ptr, nodes, idx);
1242 input_ref (ib, node, NULL, nodes, varpool);
1248 struct varpool_node *node;
1249 count = lto_input_uleb128 (ib);
1252 node = VEC_index (varpool_node_ptr, varpool, lto_input_uleb128 (ib));
1255 input_ref (ib, NULL, node, nodes, varpool);
1262 static struct gcov_ctr_summary lto_gcov_summary;
1264 /* Input profile_info from IB. */
1266 input_profile_summary (struct lto_input_block *ib)
1268 unsigned int runs = lto_input_uleb128 (ib);
1273 profile_info = <o_gcov_summary;
1274 lto_gcov_summary.runs = runs;
1275 lto_gcov_summary.sum_all = lto_input_sleb128 (ib);
1276 lto_gcov_summary.run_max = lto_input_sleb128 (ib);
1277 lto_gcov_summary.sum_max = lto_input_sleb128 (ib);
1279 /* We can support this by scaling all counts to nearest common multiple
1280 of all different runs, but it is perhaps not worth the effort. */
1281 else if (profile_info->runs != runs
1282 || profile_info->sum_all != lto_input_sleb128 (ib)
1283 || profile_info->run_max != lto_input_sleb128 (ib)
1284 || profile_info->sum_max != lto_input_sleb128 (ib))
1285 sorry ("Combining units with different profiles is not supported.");
1286 /* We allow some units to have profile and other to not have one. This will
1287 just make unprofiled units to be size optimized that is sane. */
1292 /* Input and merge the cgraph from each of the .o files passed to
1298 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1299 struct lto_file_decl_data *file_data;
1301 struct cgraph_node *node;
1303 while ((file_data = file_data_vec[j++]))
1307 struct lto_input_block *ib;
1308 VEC(cgraph_node_ptr, heap) *nodes;
1309 VEC(varpool_node_ptr, heap) *varpool;
1311 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1313 input_profile_summary (ib);
1314 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1315 nodes = input_cgraph_1 (file_data, ib);
1316 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1319 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1321 varpool = input_varpool_1 (file_data, ib);
1322 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1325 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1327 input_refs (ib, nodes, varpool);
1328 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1330 VEC_free (cgraph_node_ptr, heap, nodes);
1331 VEC_free (varpool_node_ptr, heap, varpool);
1334 /* Clear out the aux field that was used to store enough state to
1335 tell which nodes should be overwritten. */
1336 for (node = cgraph_nodes; node; node = node->next)
1338 /* Some nodes may have been created by cgraph_node. This
1339 happens when the callgraph contains nested functions. If the
1340 node for the parent function was never emitted to the gimple
1341 file, cgraph_node will create a node for it when setting the
1342 context of the nested function. */
1343 if (node->local.lto_file_data)