1 /* Write and read the cgraph to the memory mapped representation of a
4 Copyright 2009 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
34 #include "langhooks.h"
35 #include "basic-block.h"
36 #include "tree-flow.h"
40 #include "diagnostic.h"
45 #include "pointer-set.h"
46 #include "lto-streamer.h"
49 static void output_varpool (cgraph_node_set, varpool_node_set);
51 /* Cgraph streaming is organized as set of record whose type
52 is indicated by a tag. */
55 /* Must leave 0 for the stopper. */
57 /* Cgraph node without body available. */
58 LTO_cgraph_unavail_node = 1,
59 /* Cgraph node with function body. */
60 LTO_cgraph_analyzed_node,
63 LTO_cgraph_indirect_edge
66 /* Create a new cgraph encoder. */
69 lto_cgraph_encoder_new (void)
71 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
72 encoder->map = pointer_map_create ();
73 encoder->nodes = NULL;
74 encoder->body = pointer_set_create ();
79 /* Delete ENCODER and its components. */
82 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
84 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
85 pointer_map_destroy (encoder->map);
86 pointer_set_destroy (encoder->body);
91 /* Return the existing reference number of NODE in the cgraph encoder in
92 output block OB. Assign a new reference if this is the first time
96 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
97 struct cgraph_node *node)
102 slot = pointer_map_contains (encoder->map, node);
105 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
106 slot = pointer_map_insert (encoder->map, node);
107 *slot = (void *) (intptr_t) ref;
108 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
111 ref = (int) (intptr_t) *slot;
116 #define LCC_NOT_FOUND (-1)
118 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
119 or LCC_NOT_FOUND if it is not there. */
122 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
123 struct cgraph_node *node)
125 void **slot = pointer_map_contains (encoder->map, node);
126 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
130 /* Return the cgraph node corresponding to REF using ENCODER. */
133 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
135 if (ref == LCC_NOT_FOUND)
138 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
142 /* Return TRUE if we should encode initializer of NODE (if any). */
145 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
146 struct cgraph_node *node)
148 return pointer_set_contains (encoder->body, node);
151 /* Return TRUE if we should encode body of NODE (if any). */
154 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
155 struct cgraph_node *node)
157 pointer_set_insert (encoder->body, node);
160 /* Create a new varpool encoder. */
162 lto_varpool_encoder_t
163 lto_varpool_encoder_new (void)
165 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
166 encoder->map = pointer_map_create ();
167 encoder->initializer = pointer_set_create ();
168 encoder->nodes = NULL;
173 /* Delete ENCODER and its components. */
176 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
178 VEC_free (varpool_node_ptr, heap, encoder->nodes);
179 pointer_map_destroy (encoder->map);
180 pointer_set_destroy (encoder->initializer);
185 /* Return the existing reference number of NODE in the varpool encoder in
186 output block OB. Assign a new reference if this is the first time
190 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
191 struct varpool_node *node)
196 slot = pointer_map_contains (encoder->map, node);
199 ref = VEC_length (varpool_node_ptr, encoder->nodes);
200 slot = pointer_map_insert (encoder->map, node);
201 *slot = (void *) (intptr_t) ref;
202 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
205 ref = (int) (intptr_t) *slot;
210 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
211 or LCC_NOT_FOUND if it is not there. */
214 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
215 struct varpool_node *node)
217 void **slot = pointer_map_contains (encoder->map, node);
218 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
222 /* Return the varpool node corresponding to REF using ENCODER. */
224 struct varpool_node *
225 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
227 if (ref == LCC_NOT_FOUND)
230 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
234 /* Return number of encoded nodes in ENCODER. */
237 lto_varpool_encoder_size (lto_varpool_encoder_t encoder)
239 return VEC_length (varpool_node_ptr, encoder->nodes);
242 /* Return TRUE if we should encode initializer of NODE (if any). */
245 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
246 struct varpool_node *node)
248 return pointer_set_contains (encoder->initializer, node);
251 /* Return TRUE if we should encode initializer of NODE (if any). */
254 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
255 struct varpool_node *node)
257 pointer_set_insert (encoder->initializer, node);
260 /* Output the cgraph EDGE to OB using ENCODER. */
263 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
264 lto_cgraph_encoder_t encoder)
268 struct bitpack_d *bp;
270 if (edge->indirect_unknown_callee)
271 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_indirect_edge);
273 lto_output_uleb128_stream (ob->main_stream, LTO_cgraph_edge);
275 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
276 gcc_assert (ref != LCC_NOT_FOUND);
277 lto_output_sleb128_stream (ob->main_stream, ref);
279 if (!edge->indirect_unknown_callee)
281 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
282 gcc_assert (ref != LCC_NOT_FOUND);
283 lto_output_sleb128_stream (ob->main_stream, ref);
286 lto_output_sleb128_stream (ob->main_stream, edge->count);
288 bp = bitpack_create ();
289 uid = flag_wpa ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt);
290 bp_pack_value (bp, uid, HOST_BITS_PER_INT);
291 bp_pack_value (bp, edge->inline_failed, HOST_BITS_PER_INT);
292 bp_pack_value (bp, edge->frequency, HOST_BITS_PER_INT);
293 bp_pack_value (bp, edge->loop_nest, 30);
294 bp_pack_value (bp, edge->indirect_inlining_edge, 1);
295 bp_pack_value (bp, edge->call_stmt_cannot_inline_p, 1);
296 bp_pack_value (bp, edge->can_throw_external, 1);
297 if (edge->indirect_unknown_callee)
299 int flags = edge->indirect_info->ecf_flags;
300 bp_pack_value (bp, (flags & ECF_CONST) != 0, 1);
301 bp_pack_value (bp, (flags & ECF_PURE) != 0, 1);
302 bp_pack_value (bp, (flags & ECF_NORETURN) != 0, 1);
303 bp_pack_value (bp, (flags & ECF_MALLOC) != 0, 1);
304 bp_pack_value (bp, (flags & ECF_NOTHROW) != 0, 1);
305 bp_pack_value (bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
306 /* Flags that should not appear on indirect calls. */
307 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
312 lto_output_bitpack (ob->main_stream, bp);
316 /* Return if LIST contain references from other partitions. */
319 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
320 varpool_node_set vset)
324 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
326 if (ref->refering_type == IPA_REF_CGRAPH)
328 if (!cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
333 if (!varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
341 /* Return true when node is reachable from other partition. */
344 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
346 struct cgraph_edge *e;
349 if (node->global.inlined_to)
351 for (e = node->callers; e; e = e->next_caller)
352 if (!cgraph_node_in_set_p (e->caller, set))
357 /* Return if LIST contain references from other partitions. */
360 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
361 varpool_node_set vset)
365 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
367 if (ref->refering_type == IPA_REF_CGRAPH)
369 if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
374 if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
382 /* Return true when node is reachable from other partition. */
385 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
387 struct cgraph_edge *e;
390 if (node->global.inlined_to)
392 for (e = node->callers; e; e = e->next_caller)
393 if (cgraph_node_in_set_p (e->caller, set))
398 /* Output the cgraph NODE to OB. ENCODER is used to find the
399 reference number of NODE->inlined_to. SET is the set of nodes we
400 are writing to the current file. If NODE is not in SET, then NODE
401 is a boundary of a cgraph_node_set and we pretend NODE just has a
402 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
403 that have had their callgraph node written so far. This is used to
404 determine if NODE is a clone of a previously written node. */
407 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
408 lto_cgraph_encoder_t encoder, cgraph_node_set set,
409 varpool_node_set vset)
412 struct bitpack_d *bp;
415 bool in_other_partition = false;
416 struct cgraph_node *clone_of;
418 boundary_p = !cgraph_node_in_set_p (node, set);
420 if (node->analyzed && !boundary_p)
421 tag = LTO_cgraph_analyzed_node;
423 tag = LTO_cgraph_unavail_node;
425 lto_output_uleb128_stream (ob->main_stream, tag);
427 /* In WPA mode, we only output part of the call-graph. Also, we
428 fake cgraph node attributes. There are two cases that we care.
430 Boundary nodes: There are nodes that are not part of SET but are
431 called from within SET. We artificially make them look like
432 externally visible nodes with no function body.
434 Cherry-picked nodes: These are nodes we pulled from other
435 translation units into SET during IPA-inlining. We make them as
436 local static nodes to prevent clashes with other local statics. */
437 if (boundary_p && node->analyzed)
439 /* Inline clones can not be part of boundary.
440 gcc_assert (!node->global.inlined_to);
442 FIXME: At the moment they can be, when partition contains an inline
443 clone that is clone of inline clone from outside partition. We can
444 reshape the clone tree and make other tree to be the root, but it
445 needs a bit extra work and will be promplty done by cgraph_remove_node
446 after reading back. */
447 in_other_partition = 1;
450 clone_of = node->clone_of;
452 && (ref = lto_cgraph_encoder_lookup (encoder, node->clone_of)) == LCC_NOT_FOUND)
453 if (clone_of->prev_sibling_clone)
454 clone_of = clone_of->prev_sibling_clone;
456 clone_of = clone_of->clone_of;
458 lto_output_sleb128_stream (ob->main_stream, LCC_NOT_FOUND);
460 lto_output_sleb128_stream (ob->main_stream, ref);
463 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
464 lto_output_sleb128_stream (ob->main_stream, node->count);
466 bp = bitpack_create ();
467 bp_pack_value (bp, node->local.local, 1);
468 bp_pack_value (bp, node->local.externally_visible, 1);
469 bp_pack_value (bp, node->local.finalized, 1);
470 bp_pack_value (bp, node->local.inlinable, 1);
471 bp_pack_value (bp, node->local.disregard_inline_limits, 1);
472 bp_pack_value (bp, node->local.redefined_extern_inline, 1);
473 bp_pack_value (bp, node->local.vtable_method, 1);
474 bp_pack_value (bp, node->needed, 1);
475 bp_pack_value (bp, node->address_taken, 1);
476 bp_pack_value (bp, node->abstract_and_needed, 1);
477 bp_pack_value (bp, tag == LTO_cgraph_analyzed_node
478 && !DECL_EXTERNAL (node->decl)
479 && (reachable_from_other_partition_p (node, set)
480 || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
481 bp_pack_value (bp, node->lowered, 1);
482 bp_pack_value (bp, in_other_partition, 1);
483 bp_pack_value (bp, node->alias, 1);
484 bp_pack_value (bp, node->finalized_by_frontend, 1);
485 bp_pack_value (bp, node->frequency, 2);
486 lto_output_bitpack (ob->main_stream, bp);
489 if (tag == LTO_cgraph_analyzed_node)
491 lto_output_sleb128_stream (ob->main_stream,
492 node->local.inline_summary.estimated_self_stack_size);
493 lto_output_sleb128_stream (ob->main_stream,
494 node->local.inline_summary.self_size);
495 lto_output_sleb128_stream (ob->main_stream,
496 node->local.inline_summary.size_inlining_benefit);
497 lto_output_sleb128_stream (ob->main_stream,
498 node->local.inline_summary.self_time);
499 lto_output_sleb128_stream (ob->main_stream,
500 node->local.inline_summary.time_inlining_benefit);
501 if (node->global.inlined_to)
503 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
504 gcc_assert (ref != LCC_NOT_FOUND);
509 lto_output_sleb128_stream (ob->main_stream, ref);
512 if (node->same_comdat_group && !boundary_p)
514 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
515 gcc_assert (ref != LCC_NOT_FOUND);
519 lto_output_sleb128_stream (ob->main_stream, ref);
523 struct cgraph_node *alias;
524 unsigned long alias_count = 1;
525 for (alias = node->same_body; alias->next; alias = alias->next)
527 lto_output_uleb128_stream (ob->main_stream, alias_count);
530 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
532 if (alias->thunk.thunk_p)
534 lto_output_uleb128_stream
536 1 + (alias->thunk.this_adjusting != 0) * 2
537 + (alias->thunk.virtual_offset_p != 0) * 4);
538 lto_output_uleb128_stream (ob->main_stream,
539 alias->thunk.fixed_offset);
540 lto_output_uleb128_stream (ob->main_stream,
541 alias->thunk.virtual_value);
542 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
547 lto_output_uleb128_stream (ob->main_stream, 0);
548 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
551 alias = alias->previous;
556 lto_output_uleb128_stream (ob->main_stream, 0);
559 /* Output the varpool NODE to OB.
560 If NODE is not in SET, then NODE is a boundary. */
563 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
564 cgraph_node_set set, varpool_node_set vset)
566 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
567 struct bitpack_d *bp;
568 struct varpool_node *alias;
571 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
572 bp = bitpack_create ();
573 bp_pack_value (bp, node->externally_visible, 1);
574 bp_pack_value (bp, node->force_output, 1);
575 bp_pack_value (bp, node->finalized, 1);
576 bp_pack_value (bp, node->alias, 1);
577 gcc_assert (!node->alias || !node->extra_name);
578 gcc_assert (node->finalized || !node->analyzed);
579 gcc_assert (node->needed);
580 /* Constant pool initializers can be de-unified into individual ltrans units.
581 FIXME: Alternatively at -Os we may want to avoid generating for them the local
582 labels and share them across LTRANS partitions. */
583 if (DECL_IN_CONSTANT_POOL (node->decl))
585 bp_pack_value (bp, 0, 1); /* used_from_other_parition. */
586 bp_pack_value (bp, 0, 1); /* in_other_partition. */
590 bp_pack_value (bp, node->analyzed
591 && referenced_from_other_partition_p (&node->ref_list,
593 bp_pack_value (bp, boundary_p, 1); /* in_other_partition. */
595 /* Also emit any extra name aliases. */
596 for (alias = node->extra_name; alias; alias = alias->next)
598 bp_pack_value (bp, count != 0, 1);
599 lto_output_bitpack (ob->main_stream, bp);
604 lto_output_uleb128_stream (ob->main_stream, count);
605 for (alias = node->extra_name; alias; alias = alias->next)
606 lto_output_var_decl_index (ob->decl_state, ob->main_stream, alias->decl);
610 /* Output the varpool NODE to OB.
611 If NODE is not in SET, then NODE is a boundary. */
614 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
615 lto_cgraph_encoder_t encoder,
616 lto_varpool_encoder_t varpool_encoder)
618 struct bitpack_d *bp = bitpack_create ();
619 bp_pack_value (bp, ref->refered_type, 1);
620 bp_pack_value (bp, ref->use, 2);
621 lto_output_bitpack (ob->main_stream, bp);
623 if (ref->refered_type == IPA_REF_CGRAPH)
625 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
626 gcc_assert (nref != LCC_NOT_FOUND);
627 lto_output_sleb128_stream (ob->main_stream, nref);
631 int nref = lto_varpool_encoder_lookup (varpool_encoder,
632 ipa_ref_varpool_node (ref));
633 gcc_assert (nref != LCC_NOT_FOUND);
634 lto_output_sleb128_stream (ob->main_stream, nref);
638 /* Stream out profile_summary to OB. */
641 output_profile_summary (struct lto_simple_output_block *ob)
645 /* We do not output num, it is not terribly useful. */
646 gcc_assert (profile_info->runs);
647 lto_output_uleb128_stream (ob->main_stream, profile_info->runs);
648 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_all);
649 lto_output_sleb128_stream (ob->main_stream, profile_info->run_max);
650 lto_output_sleb128_stream (ob->main_stream, profile_info->sum_max);
653 lto_output_uleb128_stream (ob->main_stream, 0);
656 /* Add NODE into encoder as well as nodes it is cloned from.
657 Do it in a way so clones appear first. */
660 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
664 add_node_to (encoder, node->clone_of, include_body);
665 else if (include_body)
666 lto_set_cgraph_encoder_encode_body (encoder, node);
667 lto_cgraph_encoder_encode (encoder, node);
670 /* Add all references in LIST to encoders. */
673 add_references (lto_cgraph_encoder_t encoder,
674 lto_varpool_encoder_t varpool_encoder,
675 struct ipa_ref_list *list)
679 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
680 if (ref->refered_type == IPA_REF_CGRAPH)
681 add_node_to (encoder, ipa_ref_node (ref), false);
684 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
685 lto_varpool_encoder_encode (varpool_encoder, vnode);
689 /* Output all callees or indirect outgoing edges. EDGE must be the first such
693 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
694 struct lto_simple_output_block *ob,
695 lto_cgraph_encoder_t encoder)
700 /* Output edges in backward direction, so the reconstructed callgraph match
701 and it is easy to associate call sites in the IPA pass summaries. */
702 while (edge->next_callee)
703 edge = edge->next_callee;
704 for (; edge; edge = edge->prev_callee)
705 lto_output_edge (ob, edge, encoder);
708 /* Output the part of the cgraph in SET. */
711 output_refs (cgraph_node_set set, varpool_node_set vset,
712 lto_cgraph_encoder_t encoder,
713 lto_varpool_encoder_t varpool_encoder)
715 cgraph_node_set_iterator csi;
716 varpool_node_set_iterator vsi;
717 struct lto_simple_output_block *ob;
722 ob = lto_create_simple_output_block (LTO_section_refs);
724 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
726 struct cgraph_node *node = csi_node (csi);
728 count = ipa_ref_list_nreferences (&node->ref_list);
731 lto_output_uleb128_stream (ob->main_stream, count);
732 lto_output_uleb128_stream (ob->main_stream,
733 lto_cgraph_encoder_lookup (encoder, node));
734 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
735 lto_output_ref (ob, ref, encoder, varpool_encoder);
739 lto_output_uleb128_stream (ob->main_stream, 0);
741 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
743 struct varpool_node *node = vsi_node (vsi);
745 count = ipa_ref_list_nreferences (&node->ref_list);
748 lto_output_uleb128_stream (ob->main_stream, count);
749 lto_output_uleb128_stream (ob->main_stream,
750 lto_varpool_encoder_lookup (varpool_encoder,
752 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
753 lto_output_ref (ob, ref, encoder, varpool_encoder);
757 lto_output_uleb128_stream (ob->main_stream, 0);
759 lto_destroy_simple_output_block (ob);
762 /* Find out all cgraph and varpool nodes we want to encode in current unit
763 and insert them to encoders. */
765 compute_ltrans_boundary (struct lto_out_decl_state *state,
766 cgraph_node_set set, varpool_node_set vset)
768 struct cgraph_node *node;
769 cgraph_node_set_iterator csi;
770 varpool_node_set_iterator vsi;
771 struct cgraph_edge *edge;
773 lto_cgraph_encoder_t encoder;
774 lto_varpool_encoder_t varpool_encoder;
776 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
777 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
779 /* Go over all the nodes in SET and assign references. */
780 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
782 node = csi_node (csi);
783 add_node_to (encoder, node, true);
784 add_references (encoder, varpool_encoder, &node->ref_list);
786 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
788 struct varpool_node *vnode = vsi_node (vsi);
789 gcc_assert (!vnode->alias);
790 lto_varpool_encoder_encode (varpool_encoder, vnode);
791 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
792 add_references (encoder, varpool_encoder, &vnode->ref_list);
794 /* Pickle in also the initializer of all referenced readonly variables
795 to help folding. Constant pool variables are not shared, so we must
797 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
799 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
800 if (DECL_INITIAL (vnode->decl)
801 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
803 && (DECL_IN_CONSTANT_POOL (vnode->decl)
804 || TREE_READONLY (vnode->decl)))
806 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
807 add_references (encoder, varpool_encoder, &vnode->ref_list);
811 /* Go over all the nodes again to include callees that are not in
813 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
815 node = csi_node (csi);
816 for (edge = node->callees; edge; edge = edge->next_callee)
818 struct cgraph_node *callee = edge->callee;
819 if (!cgraph_node_in_set_p (callee, set))
821 /* We should have moved all the inlines. */
822 gcc_assert (!callee->global.inlined_to);
823 add_node_to (encoder, callee, false);
829 /* Output the part of the cgraph in SET. */
832 output_cgraph (cgraph_node_set set, varpool_node_set vset)
834 struct cgraph_node *node;
835 struct lto_simple_output_block *ob;
836 cgraph_node_set_iterator csi;
838 lto_cgraph_encoder_t encoder;
839 lto_varpool_encoder_t varpool_encoder;
840 struct cgraph_asm_node *can;
842 ob = lto_create_simple_output_block (LTO_section_cgraph);
844 output_profile_summary (ob);
846 /* An encoder for cgraph nodes should have been created by
847 ipa_write_summaries_1. */
848 gcc_assert (ob->decl_state->cgraph_node_encoder);
849 gcc_assert (ob->decl_state->varpool_node_encoder);
850 encoder = ob->decl_state->cgraph_node_encoder;
851 varpool_encoder = ob->decl_state->varpool_node_encoder;
853 /* Write out the nodes. We must first output a node and then its clones,
854 otherwise at a time reading back the node there would be nothing to clone
856 n_nodes = lto_cgraph_encoder_size (encoder);
857 for (i = 0; i < n_nodes; i++)
859 node = lto_cgraph_encoder_deref (encoder, i);
860 lto_output_node (ob, node, encoder, set, vset);
863 /* Go over the nodes in SET again to write edges. */
864 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
866 node = csi_node (csi);
867 output_outgoing_cgraph_edges (node->callees, ob, encoder);
868 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
871 lto_output_uleb128_stream (ob->main_stream, 0);
873 /* Emit toplevel asms. */
874 for (can = cgraph_asm_nodes; can; can = can->next)
876 int len = TREE_STRING_LENGTH (can->asm_str);
877 lto_output_uleb128_stream (ob->main_stream, len);
878 for (i = 0; i < len; ++i)
879 lto_output_1_stream (ob->main_stream,
880 TREE_STRING_POINTER (can->asm_str)[i]);
883 lto_output_uleb128_stream (ob->main_stream, 0);
885 lto_destroy_simple_output_block (ob);
886 output_varpool (set, vset);
887 output_refs (set, vset, encoder, varpool_encoder);
890 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
891 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
892 NODE or to replace the values in it, for instance because the first
893 time we saw it, the function body was not available but now it
894 is. BP is a bitpack with all the bitflags for NODE read from the
898 input_overwrite_node (struct lto_file_decl_data *file_data,
899 struct cgraph_node *node,
900 enum LTO_cgraph_tags tag,
901 struct bitpack_d *bp,
902 unsigned int stack_size,
903 unsigned int self_time,
904 unsigned int time_inlining_benefit,
905 unsigned int self_size,
906 unsigned int size_inlining_benefit)
908 node->aux = (void *) tag;
909 node->local.inline_summary.estimated_self_stack_size = stack_size;
910 node->local.inline_summary.self_time = self_time;
911 node->local.inline_summary.time_inlining_benefit = time_inlining_benefit;
912 node->local.inline_summary.self_size = self_size;
913 node->local.inline_summary.size_inlining_benefit = size_inlining_benefit;
914 node->global.time = self_time;
915 node->global.size = self_size;
916 node->global.estimated_stack_size = stack_size;
917 node->global.estimated_growth = INT_MIN;
918 node->local.lto_file_data = file_data;
920 node->local.local = bp_unpack_value (bp, 1);
921 node->local.externally_visible = bp_unpack_value (bp, 1);
922 node->local.finalized = bp_unpack_value (bp, 1);
923 node->local.inlinable = bp_unpack_value (bp, 1);
924 node->local.disregard_inline_limits = bp_unpack_value (bp, 1);
925 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
926 node->local.vtable_method = bp_unpack_value (bp, 1);
927 node->needed = bp_unpack_value (bp, 1);
928 node->address_taken = bp_unpack_value (bp, 1);
929 node->abstract_and_needed = bp_unpack_value (bp, 1);
930 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
931 node->lowered = bp_unpack_value (bp, 1);
932 node->analyzed = tag == LTO_cgraph_analyzed_node;
933 node->in_other_partition = bp_unpack_value (bp, 1);
934 node->alias = bp_unpack_value (bp, 1);
935 node->finalized_by_frontend = bp_unpack_value (bp, 1);
936 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
939 /* Output the part of the cgraph in SET. */
942 output_varpool (cgraph_node_set set, varpool_node_set vset)
944 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
945 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
946 int len = lto_varpool_encoder_size (varpool_encoder), i;
948 lto_output_uleb128_stream (ob->main_stream, len);
950 /* Write out the nodes. We must first output a node and then its clones,
951 otherwise at a time reading back the node there would be nothing to clone
953 for (i = 0; i < len; i++)
955 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
959 lto_destroy_simple_output_block (ob);
962 /* Read a node from input_block IB. TAG is the node's tag just read.
963 Return the node read or overwriten. */
965 static struct cgraph_node *
966 input_node (struct lto_file_decl_data *file_data,
967 struct lto_input_block *ib,
968 enum LTO_cgraph_tags tag,
969 VEC(cgraph_node_ptr, heap) *nodes)
972 struct cgraph_node *node;
973 struct bitpack_d *bp;
976 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
979 int time_inlining_benefit = 0;
980 int size_inlining_benefit = 0;
981 unsigned long same_body_count = 0;
984 clone_ref = lto_input_sleb128 (ib);
986 decl_index = lto_input_uleb128 (ib);
987 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
989 if (clone_ref != LCC_NOT_FOUND)
991 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
992 0, CGRAPH_FREQ_BASE, 0, false, NULL);
995 node = cgraph_node (fn_decl);
997 node->count = lto_input_sleb128 (ib);
998 bp = lto_input_bitpack (ib);
1000 if (tag == LTO_cgraph_analyzed_node)
1002 stack_size = lto_input_sleb128 (ib);
1003 self_size = lto_input_sleb128 (ib);
1004 size_inlining_benefit = lto_input_sleb128 (ib);
1005 self_time = lto_input_sleb128 (ib);
1006 time_inlining_benefit = lto_input_sleb128 (ib);
1008 ref = lto_input_sleb128 (ib);
1011 ref2 = lto_input_sleb128 (ib);
1012 same_body_count = lto_input_uleb128 (ib);
1014 /* Make sure that we have not read this node before. Nodes that
1015 have already been read will have their tag stored in the 'aux'
1016 field. Since built-in functions can be referenced in multiple
1017 functions, they are expected to be read more than once. */
1018 if (node->aux && !DECL_IS_BUILTIN (node->decl))
1019 internal_error ("bytecode stream: found multiple instances of cgraph "
1020 "node %d", node->uid);
1022 input_overwrite_node (file_data, node, tag, bp, stack_size, self_time,
1023 time_inlining_benefit, self_size,
1024 size_inlining_benefit);
1025 bitpack_delete (bp);
1027 /* Store a reference for now, and fix up later to be a pointer. */
1028 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1030 /* Store a reference for now, and fix up later to be a pointer. */
1031 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
1033 while (same_body_count-- > 0)
1037 decl_index = lto_input_uleb128 (ib);
1038 alias_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1039 type = lto_input_uleb128 (ib);
1043 decl_index = lto_input_uleb128 (ib);
1044 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1045 cgraph_same_body_alias (alias_decl, real_alias);
1049 HOST_WIDE_INT fixed_offset = lto_input_uleb128 (ib);
1050 HOST_WIDE_INT virtual_value = lto_input_uleb128 (ib);
1052 decl_index = lto_input_uleb128 (ib);
1053 real_alias = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1054 cgraph_add_thunk (alias_decl, fn_decl, type & 2, fixed_offset,
1056 (type & 4) ? size_int (virtual_value) : NULL_TREE,
1063 /* Read a node from input_block IB. TAG is the node's tag just read.
1064 Return the node read or overwriten. */
1066 static struct varpool_node *
1067 input_varpool_node (struct lto_file_decl_data *file_data,
1068 struct lto_input_block *ib)
1072 struct varpool_node *node;
1073 struct bitpack_d *bp;
1077 decl_index = lto_input_uleb128 (ib);
1078 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1079 node = varpool_node (var_decl);
1081 bp = lto_input_bitpack (ib);
1082 node->externally_visible = bp_unpack_value (bp, 1);
1083 node->force_output = bp_unpack_value (bp, 1);
1084 node->finalized = bp_unpack_value (bp, 1);
1085 node->alias = bp_unpack_value (bp, 1);
1086 node->analyzed = node->finalized;
1087 node->used_from_other_partition = bp_unpack_value (bp, 1);
1088 node->in_other_partition = bp_unpack_value (bp, 1);
1089 aliases_p = bp_unpack_value (bp, 1);
1090 if (node->finalized)
1091 varpool_mark_needed_node (node);
1092 bitpack_delete (bp);
1095 count = lto_input_uleb128 (ib);
1096 for (; count > 0; count --)
1098 tree decl = lto_file_decl_data_get_var_decl (file_data,
1099 lto_input_uleb128 (ib));
1100 varpool_extra_name_alias (decl, var_decl);
1106 /* Read a node from input_block IB. TAG is the node's tag just read.
1107 Return the node read or overwriten. */
1110 input_ref (struct lto_input_block *ib,
1111 struct cgraph_node *refering_node,
1112 struct varpool_node *refering_varpool_node,
1113 VEC(cgraph_node_ptr, heap) *nodes,
1114 VEC(varpool_node_ptr, heap) *varpool_nodes)
1116 struct cgraph_node *node = NULL;
1117 struct varpool_node *varpool_node = NULL;
1118 struct bitpack_d *bp;
1119 enum ipa_ref_type type;
1120 enum ipa_ref_use use;
1122 bp = lto_input_bitpack (ib);
1123 type = (enum ipa_ref_type) bp_unpack_value (bp, 1);
1124 use = (enum ipa_ref_use) bp_unpack_value (bp, 2);
1125 bitpack_delete (bp);
1126 if (type == IPA_REF_CGRAPH)
1127 node = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1129 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes, lto_input_sleb128 (ib));
1130 ipa_record_reference (refering_node, refering_varpool_node,
1131 node, varpool_node, use, NULL);
1134 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1135 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1136 edge being read is indirect (in the sense that it has
1137 indirect_unknown_callee set). */
1140 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1143 struct cgraph_node *caller, *callee;
1144 struct cgraph_edge *edge;
1145 unsigned int stmt_id;
1149 cgraph_inline_failed_t inline_failed;
1150 struct bitpack_d *bp;
1151 enum ld_plugin_symbol_resolution caller_resolution;
1154 caller = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1155 if (caller == NULL || caller->decl == NULL_TREE)
1156 internal_error ("bytecode stream: no caller found while reading edge");
1160 callee = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
1161 if (callee == NULL || callee->decl == NULL_TREE)
1162 internal_error ("bytecode stream: no callee found while reading edge");
1167 count = (gcov_type) lto_input_sleb128 (ib);
1169 bp = lto_input_bitpack (ib);
1170 stmt_id = (unsigned int) bp_unpack_value (bp, HOST_BITS_PER_INT);
1171 inline_failed = (cgraph_inline_failed_t) bp_unpack_value (bp,
1173 freq = (int) bp_unpack_value (bp, HOST_BITS_PER_INT);
1174 nest = (unsigned) bp_unpack_value (bp, 30);
1176 /* If the caller was preempted, don't create the edge.
1177 ??? Should we ever have edges from a preempted caller? */
1178 caller_resolution = lto_symtab_get_resolution (caller->decl);
1179 if (caller_resolution == LDPR_PREEMPTED_REG
1180 || caller_resolution == LDPR_PREEMPTED_IR)
1184 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq, nest);
1186 edge = cgraph_create_edge (caller, callee, NULL, count, freq, nest);
1188 edge->indirect_inlining_edge = bp_unpack_value (bp, 1);
1189 edge->lto_stmt_uid = stmt_id;
1190 edge->inline_failed = inline_failed;
1191 edge->call_stmt_cannot_inline_p = bp_unpack_value (bp, 1);
1192 edge->can_throw_external = bp_unpack_value (bp, 1);
1195 if (bp_unpack_value (bp, 1))
1196 ecf_flags |= ECF_CONST;
1197 if (bp_unpack_value (bp, 1))
1198 ecf_flags |= ECF_PURE;
1199 if (bp_unpack_value (bp, 1))
1200 ecf_flags |= ECF_NORETURN;
1201 if (bp_unpack_value (bp, 1))
1202 ecf_flags |= ECF_MALLOC;
1203 if (bp_unpack_value (bp, 1))
1204 ecf_flags |= ECF_NOTHROW;
1205 if (bp_unpack_value (bp, 1))
1206 ecf_flags |= ECF_RETURNS_TWICE;
1207 edge->indirect_info->ecf_flags = ecf_flags;
1209 bitpack_delete (bp);
1213 /* Read a cgraph from IB using the info in FILE_DATA. */
1215 static VEC(cgraph_node_ptr, heap) *
1216 input_cgraph_1 (struct lto_file_decl_data *file_data,
1217 struct lto_input_block *ib)
1219 enum LTO_cgraph_tags tag;
1220 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1221 struct cgraph_node *node;
1223 unsigned HOST_WIDE_INT len;
1225 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1228 if (tag == LTO_cgraph_edge)
1229 input_edge (ib, nodes, false);
1230 else if (tag == LTO_cgraph_indirect_edge)
1231 input_edge (ib, nodes, true);
1234 node = input_node (file_data, ib, tag,nodes);
1235 if (node == NULL || node->decl == NULL_TREE)
1236 internal_error ("bytecode stream: found empty cgraph node");
1237 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1238 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1241 tag = (enum LTO_cgraph_tags) lto_input_uleb128 (ib);
1244 /* Input toplevel asms. */
1245 len = lto_input_uleb128 (ib);
1248 char *str = (char *)xmalloc (len + 1);
1249 for (i = 0; i < len; ++i)
1250 str[i] = lto_input_1_unsigned (ib);
1251 cgraph_add_asm_node (build_string (len, str));
1254 len = lto_input_uleb128 (ib);
1257 for (i = 0; VEC_iterate (cgraph_node_ptr, nodes, i, node); i++)
1259 int ref = (int) (intptr_t) node->global.inlined_to;
1261 /* Fixup inlined_to from reference to pointer. */
1262 if (ref != LCC_NOT_FOUND)
1263 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1265 node->global.inlined_to = NULL;
1267 ref = (int) (intptr_t) node->same_comdat_group;
1269 /* Fixup same_comdat_group from reference to pointer. */
1270 if (ref != LCC_NOT_FOUND)
1271 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1273 node->same_comdat_group = NULL;
1278 /* Read a varpool from IB using the info in FILE_DATA. */
1280 static VEC(varpool_node_ptr, heap) *
1281 input_varpool_1 (struct lto_file_decl_data *file_data,
1282 struct lto_input_block *ib)
1284 unsigned HOST_WIDE_INT len;
1285 VEC(varpool_node_ptr, heap) *varpool = NULL;
1287 len = lto_input_uleb128 (ib);
1290 VEC_safe_push (varpool_node_ptr, heap, varpool,
1291 input_varpool_node (file_data, ib));
1297 /* Input ipa_refs. */
1300 input_refs (struct lto_input_block *ib,
1301 VEC(cgraph_node_ptr, heap) *nodes,
1302 VEC(varpool_node_ptr, heap) *varpool)
1308 struct cgraph_node *node;
1309 count = lto_input_uleb128 (ib);
1312 idx = lto_input_uleb128 (ib);
1313 node = VEC_index (cgraph_node_ptr, nodes, idx);
1316 input_ref (ib, node, NULL, nodes, varpool);
1322 struct varpool_node *node;
1323 count = lto_input_uleb128 (ib);
1326 node = VEC_index (varpool_node_ptr, varpool, lto_input_uleb128 (ib));
1329 input_ref (ib, NULL, node, nodes, varpool);
1336 static struct gcov_ctr_summary lto_gcov_summary;
1338 /* Input profile_info from IB. */
1340 input_profile_summary (struct lto_input_block *ib)
1342 unsigned int runs = lto_input_uleb128 (ib);
1347 profile_info = <o_gcov_summary;
1348 lto_gcov_summary.runs = runs;
1349 lto_gcov_summary.sum_all = lto_input_sleb128 (ib);
1350 lto_gcov_summary.run_max = lto_input_sleb128 (ib);
1351 lto_gcov_summary.sum_max = lto_input_sleb128 (ib);
1353 /* We can support this by scaling all counts to nearest common multiple
1354 of all different runs, but it is perhaps not worth the effort. */
1355 else if (profile_info->runs != runs
1356 || profile_info->sum_all != lto_input_sleb128 (ib)
1357 || profile_info->run_max != lto_input_sleb128 (ib)
1358 || profile_info->sum_max != lto_input_sleb128 (ib))
1359 sorry ("Combining units with different profiles is not supported.");
1360 /* We allow some units to have profile and other to not have one. This will
1361 just make unprofiled units to be size optimized that is sane. */
1366 /* Input and merge the cgraph from each of the .o files passed to
1372 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1373 struct lto_file_decl_data *file_data;
1375 struct cgraph_node *node;
1377 while ((file_data = file_data_vec[j++]))
1381 struct lto_input_block *ib;
1382 VEC(cgraph_node_ptr, heap) *nodes;
1383 VEC(varpool_node_ptr, heap) *varpool;
1385 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1387 input_profile_summary (ib);
1388 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1389 nodes = input_cgraph_1 (file_data, ib);
1390 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1393 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1395 varpool = input_varpool_1 (file_data, ib);
1396 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1399 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1401 input_refs (ib, nodes, varpool);
1402 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1404 VEC_free (cgraph_node_ptr, heap, nodes);
1405 VEC_free (varpool_node_ptr, heap, varpool);
1408 /* Clear out the aux field that was used to store enough state to
1409 tell which nodes should be overwritten. */
1410 for (node = cgraph_nodes; node; node = node->next)
1412 /* Some nodes may have been created by cgraph_node. This
1413 happens when the callgraph contains nested functions. If the
1414 node for the parent function was never emitted to the gimple
1415 file, cgraph_node will create a node for it when setting the
1416 context of the nested function. */
1417 if (node->local.lto_file_data)