1 /* Write and read the cgraph to the memory mapped representation of a
4 Copyright 2009 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-flow.h"
40 #include "diagnostic.h"
45 #include "pointer-set.h"
46 #include "lto-streamer.h"
49 static void output_varpool (cgraph_node_set, varpool_node_set);
51 /* Cgraph streaming is organized as set of record whose type
52 is indicated by a tag. */
55 /* Must leave 0 for the stopper. */
57 /* Cgraph node without body available. */
58 LTO_cgraph_unavail_node = 1,
59 /* Cgraph node with function body. */
60 LTO_cgraph_analyzed_node,
63 LTO_cgraph_indirect_edge
66 /* Create a new cgraph encoder. */
69 lto_cgraph_encoder_new (void)
71 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
72 encoder->map = pointer_map_create ();
73 encoder->nodes = NULL;
74 encoder->body = pointer_set_create ();
79 /* Delete ENCODER and its components. */
82 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
84 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
85 pointer_map_destroy (encoder->map);
86 pointer_set_destroy (encoder->body);
91 /* Return the existing reference number of NODE in the cgraph encoder in
92 output block OB. Assign a new reference if this is the first time
96 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
97 struct cgraph_node *node)
102 slot = pointer_map_contains (encoder->map, node);
105 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
106 slot = pointer_map_insert (encoder->map, node);
107 *slot = (void *) (intptr_t) ref;
108 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
111 ref = (int) (intptr_t) *slot;
116 #define LCC_NOT_FOUND (-1)
118 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
119 or LCC_NOT_FOUND if it is not there. */
122 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
123 struct cgraph_node *node)
125 void **slot = pointer_map_contains (encoder->map, node);
126 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
130 /* Return the cgraph node corresponding to REF using ENCODER. */
133 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
135 if (ref == LCC_NOT_FOUND)
138 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
142 /* Return TRUE if we should encode initializer of NODE (if any). */
145 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
146 struct cgraph_node *node)
148 return pointer_set_contains (encoder->body, node);
151 /* Return TRUE if we should encode body of NODE (if any). */
154 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
155 struct cgraph_node *node)
157 pointer_set_insert (encoder->body, node);
160 /* Create a new varpool encoder. */
162 lto_varpool_encoder_t
163 lto_varpool_encoder_new (void)
165 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
166 encoder->map = pointer_map_create ();
167 encoder->initializer = pointer_set_create ();
168 encoder->nodes = NULL;
173 /* Delete ENCODER and its components. */
176 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
178 VEC_free (varpool_node_ptr, heap, encoder->nodes);
179 pointer_map_destroy (encoder->map);
180 pointer_set_destroy (encoder->initializer);
185 /* Return the existing reference number of NODE in the varpool encoder in
186 output block OB. Assign a new reference if this is the first time
190 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
191 struct varpool_node *node)
196 slot = pointer_map_contains (encoder->map, node);
199 ref = VEC_length (varpool_node_ptr, encoder->nodes);
200 slot = pointer_map_insert (encoder->map, node);
201 *slot = (void *) (intptr_t) ref;
202 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
205 ref = (int) (intptr_t) *slot;
210 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
211 or LCC_NOT_FOUND if it is not there. */
214 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
215 struct varpool_node *node)
217 void **slot = pointer_map_contains (encoder->map, node);
218 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
222 /* Return the varpool node corresponding to REF using ENCODER. */
224 struct varpool_node *
225 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
227 if (ref == LCC_NOT_FOUND)
230 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
234 /* Return number of encoded nodes in ENCODER. */
237 lto_varpool_encoder_size (lto_varpool_encoder_t encoder)
239 return VEC_length (varpool_node_ptr, encoder->nodes);
242 /* Return TRUE if we should encode initializer of NODE (if any). */
245 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
246 struct varpool_node *node)
248 return pointer_set_contains (encoder->initializer, node);
251 /* Return TRUE if we should encode initializer of NODE (if any). */
254 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
255 struct varpool_node *node)
257 pointer_set_insert (encoder->initializer, node);
260 /* Output the cgraph EDGE to OB using ENCODER. */
263 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
264 lto_cgraph_encoder_t encoder)
268 struct bitpack_d *bp;
270 if (edge->indirect_unknown_callee)
271 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_indirect_edge);
273 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_edge);
275 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
276 gcc_assert (ref != LCC_NOT_FOUND);
277 lto_output_sleb128_stream (ob->main_stream, ref);
279 if (!edge->indirect_unknown_callee)
281 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
282 gcc_assert (ref != LCC_NOT_FOUND);
283 lto_output_sleb128_stream (ob->main_stream, ref);
286 lto_output_sleb128_stream (ob->main_stream, edge->count);
288 bp = bitpack_create ();
289 uid = flag_wpa ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt);
290 bp_pack_value (bp, uid, HOST_BITS_PER_INT);
291 bp_pack_value (bp, edge->inline_failed, HOST_BITS_PER_INT);
292 bp_pack_value (bp, edge->frequency, HOST_BITS_PER_INT);
293 bp_pack_value (bp, edge->loop_nest, 30);
294 bp_pack_value (bp, edge->indirect_inlining_edge, 1);
295 bp_pack_value (bp, edge->call_stmt_cannot_inline_p, 1);
296 bp_pack_value (bp, edge->can_throw_external, 1);
297 if (edge->indirect_unknown_callee)
299 int flags = edge->indirect_info->ecf_flags;
300 bp_pack_value (bp, (flags & ECF_CONST) != 0, 1);
301 bp_pack_value (bp, (flags & ECF_PURE) != 0, 1);
302 bp_pack_value (bp, (flags & ECF_NORETURN) != 0, 1);
303 bp_pack_value (bp, (flags & ECF_MALLOC) != 0, 1);
304 bp_pack_value (bp, (flags & ECF_NOTHROW) != 0, 1);
305 bp_pack_value (bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
306 /* Flags that should not appear on indirect calls. */
307 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
312 lto_output_bitpack (ob->main_stream, bp);
316 /* Return if LIST contain references from other partitions. */
319 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
320 varpool_node_set vset)
324 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
326 if (ref->refering_type == IPA_REF_CGRAPH)
328 if (!cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
333 if (!varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
341 /* Return true when node is reachable from other partition. */
344 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
346 struct cgraph_edge *e;
349 if (node->global.inlined_to)
351 for (e = node->callers; e; e = e->next_caller)
352 if (!cgraph_node_in_set_p (e->caller, set))
357 /* Return if LIST contain references from other partitions. */
360 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
361 varpool_node_set vset)
365 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
367 if (ref->refering_type == IPA_REF_CGRAPH)
369 if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
374 if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
382 /* Return true when node is reachable from other partition. */
385 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
387 struct cgraph_edge *e;
390 if (node->global.inlined_to)
392 for (e = node->callers; e; e = e->next_caller)
393 if (cgraph_node_in_set_p (e->caller, set))
398 /* Output the cgraph NODE to OB. ENCODER is used to find the
399 reference number of NODE->inlined_to. SET is the set of nodes we
400 are writing to the current file. If NODE is not in SET, then NODE
401 is a boundary of a cgraph_node_set and we pretend NODE just has a
402 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
403 that have had their callgraph node written so far. This is used to
404 determine if NODE is a clone of a previously written node. */
407 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
408 lto_cgraph_encoder_t encoder, cgraph_node_set set,
409 varpool_node_set vset)
412 struct bitpack_d *bp;
415 bool in_other_partition = false;
416 struct cgraph_node *clone_of;
418 boundary_p = !cgraph_node_in_set_p (node, set);
420 if (node->analyzed && !boundary_p)
421 tag = LTO_cgraph_analyzed_node;
423 tag = LTO_cgraph_unavail_node;
425 lto_output_uleb128_stream (ob->main_stream, tag);
427 /* In WPA mode, we only output part of the call-graph. Also, we
428 fake cgraph node attributes. There are two cases that we care.
430 Boundary nodes: There are nodes that are not part of SET but are
431 called from within SET. We artificially make them look like
432 externally visible nodes with no function body.
434 Cherry-picked nodes: These are nodes we pulled from other
435 translation units into SET during IPA-inlining. We make them as
436 local static nodes to prevent clashes with other local statics. */
437 if (boundary_p && node->analyzed)
439 /* Inline clones can not be part of boundary.
440 gcc_assert (!node->global.inlined_to);
442 FIXME: At the moment they can be, when partition contains an inline
443 clone that is clone of inline clone from outside partition. We can
444 reshape the clone tree and make other tree to be the root, but it
445 needs a bit extra work and will be promplty done by cgraph_remove_node
446 after reading back. */
447 in_other_partition = 1;
450 clone_of = node->clone_of;
452 && (ref = lto_cgraph_encoder_lookup (encoder, node->clone_of)) == LCC_NOT_FOUND)
453 if (clone_of->prev_sibling_clone)
454 clone_of = clone_of->prev_sibling_clone;
456 clone_of = clone_of->clone_of;
458 lto_output_sleb128_stream (ob->main_stream, LCC_NOT_FOUND);
460 lto_output_sleb128_stream (ob->main_stream, ref);
463 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
464 lto_output_sleb128_stream (ob->main_stream, node->count);
466 bp = bitpack_create ();
467 bp_pack_value (bp, node->local.local, 1);
468 bp_pack_value (bp, node->local.externally_visible, 1);
469 bp_pack_value (bp, node->local.finalized, 1);
470 bp_pack_value (bp, node->local.inlinable, 1);
471 bp_pack_value (bp, node->local.versionable, 1);
472 bp_pack_value (bp, node->local.disregard_inline_limits, 1);
473 bp_pack_value (bp, node->local.redefined_extern_inline, 1);
474 bp_pack_value (bp, node->local.vtable_method, 1);
475 bp_pack_value (bp, node->needed, 1);
476 bp_pack_value (bp, node->address_taken, 1);
477 bp_pack_value (bp, node->abstract_and_needed, 1);
478 bp_pack_value (bp, tag == LTO_cgraph_analyzed_node
479 && !DECL_EXTERNAL (node->decl)
480 && (reachable_from_other_partition_p (node, set)
481 || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
482 bp_pack_value (bp, node->lowered, 1);
483 bp_pack_value (bp, in_other_partition, 1);
484 bp_pack_value (bp, node->alias, 1);
485 bp_pack_value (bp, node->finalized_by_frontend, 1);
486 bp_pack_value (bp, node->frequency, 2);
487 lto_output_bitpack (ob->main_stream, bp);
490 if (tag == LTO_cgraph_analyzed_node)
492 lto_output_sleb128_stream (ob->main_stream,
493 node->local.inline_summary.estimated_self_stack_size);
494 lto_output_sleb128_stream (ob->main_stream,
495 node->local.inline_summary.self_size);
496 lto_output_sleb128_stream (ob->main_stream,
497 node->local.inline_summary.size_inlining_benefit);
498 lto_output_sleb128_stream (ob->main_stream,
499 node->local.inline_summary.self_time);
500 lto_output_sleb128_stream (ob->main_stream,
501 node->local.inline_summary.time_inlining_benefit);
502 if (node->global.inlined_to)
504 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
505 gcc_assert (ref != LCC_NOT_FOUND);
510 lto_output_sleb128_stream (ob->main_stream, ref);
513 if (node->same_comdat_group && !boundary_p)
515 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
516 gcc_assert (ref != LCC_NOT_FOUND);
520 lto_output_sleb128_stream (ob->main_stream, ref);
524 struct cgraph_node *alias;
525 unsigned long alias_count = 1;
526 for (alias = node->same_body; alias->next; alias = alias->next)
528 lto_output_uleb128_stream (ob->main_stream, alias_count);
531 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
533 if (alias->thunk.thunk_p)
535 lto_output_uleb128_stream
537 1 + (alias->thunk.this_adjusting != 0) * 2
538 + (alias->thunk.virtual_offset_p != 0) * 4);
539 lto_output_uleb128_stream (ob->main_stream,
540 alias->thunk.fixed_offset);
541 lto_output_uleb128_stream (ob->main_stream,
542 alias->thunk.virtual_value);
543 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
548 lto_output_uleb128_stream (ob->main_stream, 0);
549 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
552 alias = alias->previous;
557 lto_output_uleb128_stream (ob->main_stream, 0);
560 /* Output the varpool NODE to OB.
561 If NODE is not in SET, then NODE is a boundary. */
564 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
565 cgraph_node_set set, varpool_node_set vset)
567 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
568 struct bitpack_d *bp;
569 struct varpool_node *alias;
572 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
573 bp = bitpack_create ();
574 bp_pack_value (bp, node->externally_visible, 1);
575 bp_pack_value (bp, node->force_output, 1);
576 bp_pack_value (bp, node->finalized, 1);
577 bp_pack_value (bp, node->alias, 1);
578 gcc_assert (!node->alias || !node->extra_name);
579 gcc_assert (node->finalized || !node->analyzed);
580 gcc_assert (node->needed);
581 /* Constant pool initializers can be de-unified into individual ltrans units.
582 FIXME: Alternatively at -Os we may want to avoid generating for them the local
583 labels and share them across LTRANS partitions. */
584 if (DECL_IN_CONSTANT_POOL (node->decl))
586 bp_pack_value (bp, 0, 1); /* used_from_other_parition. */
587 bp_pack_value (bp, 0, 1); /* in_other_partition. */
591 bp_pack_value (bp, node->analyzed
592 && referenced_from_other_partition_p (&node->ref_list,
594 bp_pack_value (bp, boundary_p, 1); /* in_other_partition. */
596 /* Also emit any extra name aliases. */
597 for (alias = node->extra_name; alias; alias = alias->next)
599 bp_pack_value (bp, count != 0, 1);
600 lto_output_bitpack (ob->main_stream, bp);
605 lto_output_uleb128_stream (ob->main_stream, count);
606 for (alias = node->extra_name; alias; alias = alias->next)
607 lto_output_var_decl_index (ob->decl_state, ob->main_stream, alias->decl);
611 /* Output the varpool NODE to OB.
612 If NODE is not in SET, then NODE is a boundary. */
615 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
616 lto_cgraph_encoder_t encoder,
617 lto_varpool_encoder_t varpool_encoder)
619 struct bitpack_d *bp = bitpack_create ();
620 bp_pack_value (bp, ref->refered_type, 1);
621 bp_pack_value (bp, ref->use, 2);
622 lto_output_bitpack (ob->main_stream, bp);
624 if (ref->refered_type == IPA_REF_CGRAPH)
626 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
627 gcc_assert (nref != LCC_NOT_FOUND);
628 lto_output_sleb128_stream (ob->main_stream, nref);
632 int nref = lto_varpool_encoder_lookup (varpool_encoder,
633 ipa_ref_varpool_node (ref));
634 gcc_assert (nref != LCC_NOT_FOUND);
635 lto_output_sleb128_stream (ob->main_stream, nref);
639 /* Stream out profile_summary to OB. */
642 output_profile_summary (struct lto_simple_output_block *ob)
646 /* We do not output num, it is not terribly useful. */
647 gcc_assert (profile_info->runs);
648 lto_output_uleb128_stream (ob->main_stream, profile_info->runs);
649 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_all);
650 lto_output_sleb128_stream (ob->main_stream, profile_info->run_max);
651 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_max);
654 lto_output_uleb128_stream (ob->main_stream, 0);
657 /* Add NODE into encoder as well as nodes it is cloned from.
658 Do it in a way so clones appear first. */
661 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
665 add_node_to (encoder, node->clone_of, include_body);
666 else if (include_body)
667 lto_set_cgraph_encoder_encode_body (encoder, node);
668 lto_cgraph_encoder_encode (encoder, node);
671 /* Add all references in LIST to encoders. */
674 add_references (lto_cgraph_encoder_t encoder,
675 lto_varpool_encoder_t varpool_encoder,
676 struct ipa_ref_list *list)
680 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
681 if (ref->refered_type == IPA_REF_CGRAPH)
682 add_node_to (encoder, ipa_ref_node (ref), false);
685 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
686 lto_varpool_encoder_encode (varpool_encoder, vnode);
690 /* Output all callees or indirect outgoing edges. EDGE must be the first such
694 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
695 struct lto_simple_output_block *ob,
696 lto_cgraph_encoder_t encoder)
701 /* Output edges in backward direction, so the reconstructed callgraph match
702 and it is easy to associate call sites in the IPA pass summaries. */
703 while (edge->next_callee)
704 edge = edge->next_callee;
705 for (; edge; edge = edge->prev_callee)
706 lto_output_edge (ob, edge, encoder);
709 /* Output the part of the cgraph in SET. */
712 output_refs (cgraph_node_set set, varpool_node_set vset,
713 lto_cgraph_encoder_t encoder,
714 lto_varpool_encoder_t varpool_encoder)
716 cgraph_node_set_iterator csi;
717 varpool_node_set_iterator vsi;
718 struct lto_simple_output_block *ob;
723 ob = lto_create_simple_output_block (LTO_section_refs);
725 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
727 struct cgraph_node *node = csi_node (csi);
729 count = ipa_ref_list_nreferences (&node->ref_list);
732 lto_output_uleb128_stream (ob->main_stream, count);
733 lto_output_uleb128_stream (ob->main_stream,
734 lto_cgraph_encoder_lookup (encoder, node));
735 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
736 lto_output_ref (ob, ref, encoder, varpool_encoder);
740 lto_output_uleb128_stream (ob->main_stream, 0);
742 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
744 struct varpool_node *node = vsi_node (vsi);
746 count = ipa_ref_list_nreferences (&node->ref_list);
749 lto_output_uleb128_stream (ob->main_stream, count);
750 lto_output_uleb128_stream (ob->main_stream,
751 lto_varpool_encoder_lookup (varpool_encoder,
753 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
754 lto_output_ref (ob, ref, encoder, varpool_encoder);
758 lto_output_uleb128_stream (ob->main_stream, 0);
760 lto_destroy_simple_output_block (ob);
763 /* Find out all cgraph and varpool nodes we want to encode in current unit
764 and insert them to encoders. */
766 compute_ltrans_boundary (struct lto_out_decl_state *state,
767 cgraph_node_set set, varpool_node_set vset)
769 struct cgraph_node *node;
770 cgraph_node_set_iterator csi;
771 varpool_node_set_iterator vsi;
772 struct cgraph_edge *edge;
774 lto_cgraph_encoder_t encoder;
775 lto_varpool_encoder_t varpool_encoder;
777 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
778 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
780 /* Go over all the nodes in SET and assign references. */
781 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
783 node = csi_node (csi);
784 add_node_to (encoder, node, true);
785 add_references (encoder, varpool_encoder, &node->ref_list);
787 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
789 struct varpool_node *vnode = vsi_node (vsi);
790 gcc_assert (!vnode->alias);
791 lto_varpool_encoder_encode (varpool_encoder, vnode);
792 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
793 add_references (encoder, varpool_encoder, &vnode->ref_list);
795 /* Pickle in also the initializer of all referenced readonly variables
796 to help folding. Constant pool variables are not shared, so we must
798 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
800 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
801 if (DECL_INITIAL (vnode->decl)
802 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
804 && (DECL_IN_CONSTANT_POOL (vnode->decl)
805 || TREE_READONLY (vnode->decl)))
807 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
808 add_references (encoder, varpool_encoder, &vnode->ref_list);
812 /* Go over all the nodes again to include callees that are not in
814 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
816 node = csi_node (csi);
817 for (edge = node->callees; edge; edge = edge->next_callee)
819 struct cgraph_node *callee = edge->callee;
820 if (!cgraph_node_in_set_p (callee, set))
822 /* We should have moved all the inlines. */
823 gcc_assert (!callee->global.inlined_to);
824 add_node_to (encoder, callee, false);
830 /* Output the part of the cgraph in SET. */
833 output_cgraph (cgraph_node_set set, varpool_node_set vset)
835 struct cgraph_node *node;
836 struct lto_simple_output_block *ob;
837 cgraph_node_set_iterator csi;
839 lto_cgraph_encoder_t encoder;
840 lto_varpool_encoder_t varpool_encoder;
841 struct cgraph_asm_node *can;
843 ob = lto_create_simple_output_block (LTO_section_cgraph);
845 output_profile_summary (ob);
847 /* An encoder for cgraph nodes should have been created by
848 ipa_write_summaries_1. */
849 gcc_assert (ob->decl_state->cgraph_node_encoder);
850 gcc_assert (ob->decl_state->varpool_node_encoder);
851 encoder = ob->decl_state->cgraph_node_encoder;
852 varpool_encoder = ob->decl_state->varpool_node_encoder;
854 /* Write out the nodes. We must first output a node and then its clones,
855 otherwise at a time reading back the node there would be nothing to clone
857 n_nodes = lto_cgraph_encoder_size (encoder);
858 for (i = 0; i < n_nodes; i++)
860 node = lto_cgraph_encoder_deref (encoder, i);
861 lto_output_node (ob, node, encoder, set, vset);
864 /* Go over the nodes in SET again to write edges. */
865 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
867 node = csi_node (csi);
868 output_outgoing_cgraph_edges (node->callees, ob, encoder);
869 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
872 lto_output_uleb128_stream (ob->main_stream, 0);
874 /* Emit toplevel asms. */
875 for (can = cgraph_asm_nodes; can; can = can->next)
877 int len = TREE_STRING_LENGTH (can->asm_str);
878 lto_output_uleb128_stream (ob->main_stream, len);
879 for (i = 0; i < len; ++i)
880 lto_output_1_stream (ob->main_stream,
881 TREE_STRING_POINTER (can->asm_str)[i]);
884 lto_output_uleb128_stream (ob->main_stream, 0);
886 lto_destroy_simple_output_block (ob);
887 output_varpool (set, vset);
888 output_refs (set, vset, encoder, varpool_encoder);
891 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
892 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
893 NODE or to replace the values in it, for instance because the first
894 time we saw it, the function body was not available but now it
895 is. BP is a bitpack with all the bitflags for NODE read from the
899 input_overwrite_node (struct lto_file_decl_data *file_data,
900 struct cgraph_node *node,
901 enum LTO_cgraph_tags tag,
902 struct bitpack_d *bp,
903 unsigned int stack_size,
904 unsigned int self_time,
905 unsigned int time_inlining_benefit,
906 unsigned int self_size,
907 unsigned int size_inlining_benefit)
909 node->aux = (void *) tag;
910 node->local.inline_summary.estimated_self_stack_size = stack_size;
911 node->local.inline_summary.self_time = self_time;
912 node->local.inline_summary.time_inlining_benefit = time_inlining_benefit;
913 node->local.inline_summary.self_size = self_size;
914 node->local.inline_summary.size_inlining_benefit = size_inlining_benefit;
915 node->global.time = self_time;
916 node->global.size = self_size;
917 node->global.estimated_stack_size = stack_size;
918 node->global.estimated_growth = INT_MIN;
919 node->local.lto_file_data = file_data;
921 node->local.local = bp_unpack_value (bp, 1);
922 node->local.externally_visible = bp_unpack_value (bp, 1);
923 node->local.finalized = bp_unpack_value (bp, 1);
924 node->local.inlinable = bp_unpack_value (bp, 1);
925 node->local.versionable = bp_unpack_value (bp, 1);
926 node->local.disregard_inline_limits = bp_unpack_value (bp, 1);
927 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
928 node->local.vtable_method = bp_unpack_value (bp, 1);
929 node->needed = bp_unpack_value (bp, 1);
930 node->address_taken = bp_unpack_value (bp, 1);
931 node->abstract_and_needed = bp_unpack_value (bp, 1);
932 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
933 node->lowered = bp_unpack_value (bp, 1);
934 node->analyzed = tag == LTO_cgraph_analyzed_node;
935 node->in_other_partition = bp_unpack_value (bp, 1);
936 node->alias = bp_unpack_value (bp, 1);
937 node->finalized_by_frontend = bp_unpack_value (bp, 1);
938 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
941 /* Output the part of the cgraph in SET. */
944 output_varpool (cgraph_node_set set, varpool_node_set vset)
946 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
947 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
948 int len = lto_varpool_encoder_size (varpool_encoder), i;
950 lto_output_uleb128_stream (ob->main_stream, len);
952 /* Write out the nodes. We must first output a node and then its clones,
953 otherwise at a time reading back the node there would be nothing to clone
955 for (i = 0; i < len; i++)
957 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
961 lto_destroy_simple_output_block (ob);
964 /* Read a node from input_block IB. TAG is the node's tag just read.
965 Return the node read or overwriten. */
967 static struct cgraph_node *
968 input_node (struct lto_file_decl_data *file_data,
969 struct lto_input_block *ib,
970 enum LTO_cgraph_tags tag,
971 VEC(cgraph_node_ptr, heap) *nodes)
974 struct cgraph_node *node;
975 struct bitpack_d *bp;
978 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
981 int time_inlining_benefit = 0;
982 int size_inlining_benefit = 0;
983 unsigned long same_body_count = 0;
986 clone_ref = lto_input_sleb128 (ib);
988 decl_index = lto_input_uleb128 (ib);
989 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
991 if (clone_ref != LCC_NOT_FOUND)
993 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
994 0, CGRAPH_FREQ_BASE, 0, false, NULL);
997 node = cgraph_node (fn_decl);
999 node->count = lto_input_sleb128 (ib);
1000 bp = lto_input_bitpack (ib);
1002 if (tag == LTO_cgraph_analyzed_node)
1004 stack_size = lto_input_sleb128 (ib);
1005 self_size = lto_input_sleb128 (ib);
1006 size_inlining_benefit = lto_input_sleb128 (ib);
1007 self_time = lto_input_sleb128 (ib);
1008 time_inlining_benefit = lto_input_sleb128 (ib);
1010 ref = lto_input_sleb128 (ib);
1013 ref2 = lto_input_sleb128 (ib);
1014 same_body_count = lto_input_uleb128 (ib);
1016 /* Make sure that we have not read this node before. Nodes that
1017 have already been read will have their tag stored in the 'aux'
1018 field. Since built-in functions can be referenced in multiple
1019 functions, they are expected to be read more than once. */
1020 if (node->aux && !DECL_IS_BUILTIN (node->decl))
1021 internal_error ("bytecode stream: found multiple instances of cgraph "
1022 "node %d", node->uid);
1024 input_overwrite_node (file_data, node, tag, bp, stack_size, self_time,
1025 time_inlining_benefit, self_size,
1026 size_inlining_benefit);
1027 bitpack_delete (bp);
1029 /* Store a reference for now, and fix up later to be a pointer. */
1030 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1032 /* Store a reference for now, and fix up later to be a pointer. */
1033 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
1035 while (same_body_count-- > 0)
1039 decl_index = lto_input_uleb128 (ib);
1040 alias_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1041 type = lto_input_uleb128 (ib);
1045 decl_index = lto_input_uleb128 (ib);
1046 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1047 cgraph_same_body_alias (alias_decl, real_alias);
1051 HOST_WIDE_INT fixed_offset = lto_input_uleb128 (ib);
1052 HOST_WIDE_INT virtual_value = lto_input_uleb128 (ib);
1054 decl_index = lto_input_uleb128 (ib);
1055 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1056 cgraph_add_thunk (alias_decl, fn_decl, type & 2, fixed_offset,
1058 (type & 4) ? size_int (virtual_value) : NULL_TREE,
1065 /* Read a node from input_block IB. TAG is the node's tag just read.
1066 Return the node read or overwriten. */
1068 static struct varpool_node *
1069 input_varpool_node (struct lto_file_decl_data *file_data,
1070 struct lto_input_block *ib)
1074 struct varpool_node *node;
1075 struct bitpack_d *bp;
1079 decl_index = lto_input_uleb128 (ib);
1080 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1081 node = varpool_node (var_decl);
1083 bp = lto_input_bitpack (ib);
1084 node->externally_visible = bp_unpack_value (bp, 1);
1085 node->force_output = bp_unpack_value (bp, 1);
1086 node->finalized = bp_unpack_value (bp, 1);
1087 node->alias = bp_unpack_value (bp, 1);
1088 node->analyzed = node->finalized;
1089 node->used_from_other_partition = bp_unpack_value (bp, 1);
1090 node->in_other_partition = bp_unpack_value (bp, 1);
1091 aliases_p = bp_unpack_value (bp, 1);
1092 if (node->finalized)
1093 varpool_mark_needed_node (node);
1094 bitpack_delete (bp);
1097 count = lto_input_uleb128 (ib);
1098 for (; count > 0; count --)
1100 tree decl = lto_file_decl_data_get_var_decl (file_data,
1101 lto_input_uleb128 (ib));
1102 varpool_extra_name_alias (decl, var_decl);
1108 /* Read a node from input_block IB. TAG is the node's tag just read.
1109 Return the node read or overwriten. */
1112 input_ref (struct lto_input_block *ib,
1113 struct cgraph_node *refering_node,
1114 struct varpool_node *refering_varpool_node,
1115 VEC(cgraph_node_ptr, heap) *nodes,
1116 VEC(varpool_node_ptr, heap) *varpool_nodes)
1118 struct cgraph_node *node = NULL;
1119 struct varpool_node *varpool_node = NULL;
1120 struct bitpack_d *bp;
1121 enum ipa_ref_type type;
1122 enum ipa_ref_use use;
1124 bp = lto_input_bitpack (ib);
1125 type = (enum ipa_ref_type) bp_unpack_value (bp, 1);
1126 use = (enum ipa_ref_use) bp_unpack_value (bp, 2);
1127 bitpack_delete (bp);
1128 if (type == IPA_REF_CGRAPH)
1129 node = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1131 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes, lto_input_sleb128 (ib));
1132 ipa_record_reference (refering_node, refering_varpool_node,
1133 node, varpool_node, use, NULL);
1136 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1137 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1138 edge being read is indirect (in the sense that it has
1139 indirect_unknown_callee set). */
1142 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1145 struct cgraph_node *caller, *callee;
1146 struct cgraph_edge *edge;
1147 unsigned int stmt_id;
1151 cgraph_inline_failed_t inline_failed;
1152 struct bitpack_d *bp;
1153 enum ld_plugin_symbol_resolution caller_resolution;
1156 caller = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1157 if (caller == NULL || caller->decl == NULL_TREE)
1158 internal_error ("bytecode stream: no caller found while reading edge");
1162 callee = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1163 if (callee == NULL || callee->decl == NULL_TREE)
1164 internal_error ("bytecode stream: no callee found while reading edge");
1169 count = (gcov_type) lto_input_sleb128 (ib);
1171 bp = lto_input_bitpack (ib);
1172 stmt_id = (unsigned int) bp_unpack_value (bp, HOST_BITS_PER_INT);
1173 inline_failed = (cgraph_inline_failed_t) bp_unpack_value (bp,
1175 freq = (int) bp_unpack_value (bp, HOST_BITS_PER_INT);
1176 nest = (unsigned) bp_unpack_value (bp, 30);
1178 /* If the caller was preempted, don't create the edge.
1179 ??? Should we ever have edges from a preempted caller? */
1180 caller_resolution = lto_symtab_get_resolution (caller->decl);
1181 if (caller_resolution == LDPR_PREEMPTED_REG
1182 || caller_resolution == LDPR_PREEMPTED_IR)
1186 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq, nest);
1188 edge = cgraph_create_edge (caller, callee, NULL, count, freq, nest);
1190 edge->indirect_inlining_edge = bp_unpack_value (bp, 1);
1191 edge->lto_stmt_uid = stmt_id;
1192 edge->inline_failed = inline_failed;
1193 edge->call_stmt_cannot_inline_p = bp_unpack_value (bp, 1);
1194 edge->can_throw_external = bp_unpack_value (bp, 1);
1197 if (bp_unpack_value (bp, 1))
1198 ecf_flags |= ECF_CONST;
1199 if (bp_unpack_value (bp, 1))
1200 ecf_flags |= ECF_PURE;
1201 if (bp_unpack_value (bp, 1))
1202 ecf_flags |= ECF_NORETURN;
1203 if (bp_unpack_value (bp, 1))
1204 ecf_flags |= ECF_MALLOC;
1205 if (bp_unpack_value (bp, 1))
1206 ecf_flags |= ECF_NOTHROW;
1207 if (bp_unpack_value (bp, 1))
1208 ecf_flags |= ECF_RETURNS_TWICE;
1209 edge->indirect_info->ecf_flags = ecf_flags;
1211 bitpack_delete (bp);
1215 /* Read a cgraph from IB using the info in FILE_DATA. */
1217 static VEC(cgraph_node_ptr, heap) *
1218 input_cgraph_1 (struct lto_file_decl_data *file_data,
1219 struct lto_input_block *ib)
1221 enum LTO_cgraph_tags tag;
1222 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1223 struct cgraph_node *node;
1225 unsigned HOST_WIDE_INT len;
1227 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1230 if (tag == LTO_cgraph_edge)
1231 input_edge (ib, nodes, false);
1232 else if (tag == LTO_cgraph_indirect_edge)
1233 input_edge (ib, nodes, true);
1236 node = input_node (file_data, ib, tag,nodes);
1237 if (node == NULL || node->decl == NULL_TREE)
1238 internal_error ("bytecode stream: found empty cgraph node");
1239 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1240 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1243 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1246 /* Input toplevel asms. */
1247 len = lto_input_uleb128 (ib);
1250 char *str = (char *)xmalloc (len + 1);
1251 for (i = 0; i < len; ++i)
1252 str[i] = lto_input_1_unsigned (ib);
1253 cgraph_add_asm_node (build_string (len, str));
1256 len = lto_input_uleb128 (ib);
1259 for (i = 0; VEC_iterate (cgraph_node_ptr, nodes, i, node); i++)
1261 int ref = (int) (intptr_t) node->global.inlined_to;
1263 /* Fixup inlined_to from reference to pointer. */
1264 if (ref != LCC_NOT_FOUND)
1265 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1267 node->global.inlined_to = NULL;
1269 ref = (int) (intptr_t) node->same_comdat_group;
1271 /* Fixup same_comdat_group from reference to pointer. */
1272 if (ref != LCC_NOT_FOUND)
1273 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1275 node->same_comdat_group = NULL;
1280 /* Read a varpool from IB using the info in FILE_DATA. */
1282 static VEC(varpool_node_ptr, heap) *
1283 input_varpool_1 (struct lto_file_decl_data *file_data,
1284 struct lto_input_block *ib)
1286 unsigned HOST_WIDE_INT len;
1287 VEC(varpool_node_ptr, heap) *varpool = NULL;
1289 len = lto_input_uleb128 (ib);
1292 VEC_safe_push (varpool_node_ptr, heap, varpool,
1293 input_varpool_node (file_data, ib));
1299 /* Input ipa_refs. */
1302 input_refs (struct lto_input_block *ib,
1303 VEC(cgraph_node_ptr, heap) *nodes,
1304 VEC(varpool_node_ptr, heap) *varpool)
1310 struct cgraph_node *node;
1311 count = lto_input_uleb128 (ib);
1314 idx = lto_input_uleb128 (ib);
1315 node = VEC_index (cgraph_node_ptr, nodes, idx);
1318 input_ref (ib, node, NULL, nodes, varpool);
1324 struct varpool_node *node;
1325 count = lto_input_uleb128 (ib);
1328 node = VEC_index (varpool_node_ptr, varpool, lto_input_uleb128 (ib));
1331 input_ref (ib, NULL, node, nodes, varpool);
1338 static struct gcov_ctr_summary lto_gcov_summary;
1340 /* Input profile_info from IB. */
1342 input_profile_summary (struct lto_input_block *ib)
1344 unsigned int runs = lto_input_uleb128 (ib);
1349 profile_info = <o_gcov_summary;
1350 lto_gcov_summary.runs = runs;
1351 lto_gcov_summary.sum_all = lto_input_sleb128 (ib);
1352 lto_gcov_summary.run_max = lto_input_sleb128 (ib);
1353 lto_gcov_summary.sum_max = lto_input_sleb128 (ib);
1355 /* We can support this by scaling all counts to nearest common multiple
1356 of all different runs, but it is perhaps not worth the effort. */
1357 else if (profile_info->runs != runs
1358 || profile_info->sum_all != lto_input_sleb128 (ib)
1359 || profile_info->run_max != lto_input_sleb128 (ib)
1360 || profile_info->sum_max != lto_input_sleb128 (ib))
1361 sorry ("Combining units with different profiles is not supported.");
1362 /* We allow some units to have profile and other to not have one. This will
1363 just make unprofiled units to be size optimized that is sane. */
1368 /* Input and merge the cgraph from each of the .o files passed to
1374 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1375 struct lto_file_decl_data *file_data;
1377 struct cgraph_node *node;
1379 while ((file_data = file_data_vec[j++]))
1383 struct lto_input_block *ib;
1384 VEC(cgraph_node_ptr, heap) *nodes;
1385 VEC(varpool_node_ptr, heap) *varpool;
1387 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1389 input_profile_summary (ib);
1390 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1391 nodes = input_cgraph_1 (file_data, ib);
1392 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1395 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1397 varpool = input_varpool_1 (file_data, ib);
1398 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1401 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1403 input_refs (ib, nodes, varpool);
1404 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1406 VEC_free (cgraph_node_ptr, heap, nodes);
1407 VEC_free (varpool_node_ptr, heap, varpool);
1410 /* Clear out the aux field that was used to store enough state to
1411 tell which nodes should be overwritten. */
1412 for (node = cgraph_nodes; node; node = node->next)
1414 /* Some nodes may have been created by cgraph_node. This
1415 happens when the callgraph contains nested functions. If the
1416 node for the parent function was never emitted to the gimple
1417 file, cgraph_node will create a node for it when setting the
1418 context of the nested function. */
1419 if (node->local.lto_file_data)