1 /* Write and read the cgraph to the memory mapped representation of a
4 Copyright 2009, 2010, 2011 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
33 #include "langhooks.h"
34 #include "basic-block.h"
35 #include "tree-flow.h"
39 #include "diagnostic-core.h"
44 #include "pointer-set.h"
45 #include "lto-streamer.h"
46 #include "data-streamer.h"
47 #include "tree-streamer.h"
50 static void output_varpool (cgraph_node_set, varpool_node_set);
51 static void output_cgraph_opt_summary (cgraph_node_set set);
52 static void input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes);
54 /* Number of LDPR values known to GCC. */
55 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
57 /* Cgraph streaming is organized as set of record whose type
58 is indicated by a tag. */
61 /* Must leave 0 for the stopper. */
63 /* Cgraph node without body available. */
64 LTO_cgraph_unavail_node = 1,
65 /* Cgraph node with function body. */
66 LTO_cgraph_analyzed_node,
69 LTO_cgraph_indirect_edge,
73 /* Create a new cgraph encoder. */
76 lto_cgraph_encoder_new (void)
78 lto_cgraph_encoder_t encoder = XCNEW (struct lto_cgraph_encoder_d);
79 encoder->map = pointer_map_create ();
80 encoder->nodes = NULL;
81 encoder->body = pointer_set_create ();
86 /* Delete ENCODER and its components. */
89 lto_cgraph_encoder_delete (lto_cgraph_encoder_t encoder)
91 VEC_free (cgraph_node_ptr, heap, encoder->nodes);
92 pointer_map_destroy (encoder->map);
93 pointer_set_destroy (encoder->body);
98 /* Return the existing reference number of NODE in the cgraph encoder in
99 output block OB. Assign a new reference if this is the first time
103 lto_cgraph_encoder_encode (lto_cgraph_encoder_t encoder,
104 struct cgraph_node *node)
109 slot = pointer_map_contains (encoder->map, node);
112 ref = VEC_length (cgraph_node_ptr, encoder->nodes);
113 slot = pointer_map_insert (encoder->map, node);
114 *slot = (void *) (intptr_t) ref;
115 VEC_safe_push (cgraph_node_ptr, heap, encoder->nodes, node);
118 ref = (int) (intptr_t) *slot;
123 #define LCC_NOT_FOUND (-1)
125 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
126 or LCC_NOT_FOUND if it is not there. */
129 lto_cgraph_encoder_lookup (lto_cgraph_encoder_t encoder,
130 struct cgraph_node *node)
132 void **slot = pointer_map_contains (encoder->map, node);
133 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
137 /* Return the cgraph node corresponding to REF using ENCODER. */
140 lto_cgraph_encoder_deref (lto_cgraph_encoder_t encoder, int ref)
142 if (ref == LCC_NOT_FOUND)
145 return VEC_index (cgraph_node_ptr, encoder->nodes, ref);
149 /* Return TRUE if we should encode initializer of NODE (if any). */
152 lto_cgraph_encoder_encode_body_p (lto_cgraph_encoder_t encoder,
153 struct cgraph_node *node)
155 return pointer_set_contains (encoder->body, node);
158 /* Return TRUE if we should encode body of NODE (if any). */
161 lto_set_cgraph_encoder_encode_body (lto_cgraph_encoder_t encoder,
162 struct cgraph_node *node)
164 pointer_set_insert (encoder->body, node);
167 /* Create a new varpool encoder. */
169 lto_varpool_encoder_t
170 lto_varpool_encoder_new (void)
172 lto_varpool_encoder_t encoder = XCNEW (struct lto_varpool_encoder_d);
173 encoder->map = pointer_map_create ();
174 encoder->initializer = pointer_set_create ();
175 encoder->nodes = NULL;
180 /* Delete ENCODER and its components. */
183 lto_varpool_encoder_delete (lto_varpool_encoder_t encoder)
185 VEC_free (varpool_node_ptr, heap, encoder->nodes);
186 pointer_map_destroy (encoder->map);
187 pointer_set_destroy (encoder->initializer);
192 /* Return the existing reference number of NODE in the varpool encoder in
193 output block OB. Assign a new reference if this is the first time
197 lto_varpool_encoder_encode (lto_varpool_encoder_t encoder,
198 struct varpool_node *node)
203 slot = pointer_map_contains (encoder->map, node);
206 ref = VEC_length (varpool_node_ptr, encoder->nodes);
207 slot = pointer_map_insert (encoder->map, node);
208 *slot = (void *) (intptr_t) ref;
209 VEC_safe_push (varpool_node_ptr, heap, encoder->nodes, node);
212 ref = (int) (intptr_t) *slot;
217 /* Look up NODE in encoder. Return NODE's reference if it has been encoded
218 or LCC_NOT_FOUND if it is not there. */
221 lto_varpool_encoder_lookup (lto_varpool_encoder_t encoder,
222 struct varpool_node *node)
224 void **slot = pointer_map_contains (encoder->map, node);
225 return (slot ? (int) (intptr_t) *slot : LCC_NOT_FOUND);
229 /* Return the varpool node corresponding to REF using ENCODER. */
231 struct varpool_node *
232 lto_varpool_encoder_deref (lto_varpool_encoder_t encoder, int ref)
234 if (ref == LCC_NOT_FOUND)
237 return VEC_index (varpool_node_ptr, encoder->nodes, ref);
241 /* Return TRUE if we should encode initializer of NODE (if any). */
244 lto_varpool_encoder_encode_initializer_p (lto_varpool_encoder_t encoder,
245 struct varpool_node *node)
247 return pointer_set_contains (encoder->initializer, node);
250 /* Return TRUE if we should encode initializer of NODE (if any). */
253 lto_set_varpool_encoder_encode_initializer (lto_varpool_encoder_t encoder,
254 struct varpool_node *node)
256 pointer_set_insert (encoder->initializer, node);
259 /* Output the cgraph EDGE to OB using ENCODER. */
262 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
263 lto_cgraph_encoder_t encoder)
269 if (edge->indirect_unknown_callee)
270 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
271 LTO_cgraph_indirect_edge);
273 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
276 ref = lto_cgraph_encoder_lookup (encoder, edge->caller);
277 gcc_assert (ref != LCC_NOT_FOUND);
278 streamer_write_hwi_stream (ob->main_stream, ref);
280 if (!edge->indirect_unknown_callee)
282 ref = lto_cgraph_encoder_lookup (encoder, edge->callee);
283 gcc_assert (ref != LCC_NOT_FOUND);
284 streamer_write_hwi_stream (ob->main_stream, ref);
287 streamer_write_hwi_stream (ob->main_stream, edge->count);
289 bp = bitpack_create (ob->main_stream);
290 uid = (!gimple_has_body_p (edge->caller->decl)
291 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt));
292 bp_pack_enum (&bp, cgraph_inline_failed_enum,
293 CIF_N_REASONS, edge->inline_failed);
294 bp_pack_var_len_unsigned (&bp, uid);
295 bp_pack_var_len_unsigned (&bp, edge->frequency);
296 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
297 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
298 bp_pack_value (&bp, edge->can_throw_external, 1);
299 if (edge->indirect_unknown_callee)
301 int flags = edge->indirect_info->ecf_flags;
302 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
303 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
304 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
305 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
306 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
307 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
308 /* Flags that should not appear on indirect calls. */
309 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
315 streamer_write_bitpack (&bp);
318 /* Return if LIST contain references from other partitions. */
321 referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
322 varpool_node_set vset)
326 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
328 if (ref->refering_type == IPA_REF_CGRAPH)
330 if (ipa_ref_refering_node (ref)->in_other_partition
331 || !cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
336 if (ipa_ref_refering_varpool_node (ref)->in_other_partition
337 || !varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
345 /* Return true when node is reachable from other partition. */
348 reachable_from_other_partition_p (struct cgraph_node *node, cgraph_node_set set)
350 struct cgraph_edge *e;
353 if (node->global.inlined_to)
355 for (e = node->callers; e; e = e->next_caller)
356 if (e->caller->in_other_partition
357 || !cgraph_node_in_set_p (e->caller, set))
362 /* Return if LIST contain references from other partitions. */
365 referenced_from_this_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
366 varpool_node_set vset)
370 for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
372 if (ref->refering_type == IPA_REF_CGRAPH)
374 if (cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
379 if (varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
387 /* Return true when node is reachable from other partition. */
390 reachable_from_this_partition_p (struct cgraph_node *node, cgraph_node_set set)
392 struct cgraph_edge *e;
393 for (e = node->callers; e; e = e->next_caller)
394 if (cgraph_node_in_set_p (e->caller, set))
399 /* Output the cgraph NODE to OB. ENCODER is used to find the
400 reference number of NODE->inlined_to. SET is the set of nodes we
401 are writing to the current file. If NODE is not in SET, then NODE
402 is a boundary of a cgraph_node_set and we pretend NODE just has a
403 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
404 that have had their callgraph node written so far. This is used to
405 determine if NODE is a clone of a previously written node. */
408 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
409 lto_cgraph_encoder_t encoder, cgraph_node_set set,
410 varpool_node_set vset)
416 bool in_other_partition = false;
417 struct cgraph_node *clone_of;
419 boundary_p = !cgraph_node_in_set_p (node, set);
421 if (node->analyzed && !boundary_p)
422 tag = LTO_cgraph_analyzed_node;
424 tag = LTO_cgraph_unavail_node;
426 streamer_write_enum (ob->main_stream, LTO_cgraph_tags, LTO_cgraph_last_tag,
429 /* In WPA mode, we only output part of the call-graph. Also, we
430 fake cgraph node attributes. There are two cases that we care.
432 Boundary nodes: There are nodes that are not part of SET but are
433 called from within SET. We artificially make them look like
434 externally visible nodes with no function body.
436 Cherry-picked nodes: These are nodes we pulled from other
437 translation units into SET during IPA-inlining. We make them as
438 local static nodes to prevent clashes with other local statics. */
439 if (boundary_p && node->analyzed)
441 /* Inline clones can not be part of boundary.
442 gcc_assert (!node->global.inlined_to);
444 FIXME: At the moment they can be, when partition contains an inline
445 clone that is clone of inline clone from outside partition. We can
446 reshape the clone tree and make other tree to be the root, but it
447 needs a bit extra work and will be promplty done by cgraph_remove_node
448 after reading back. */
449 in_other_partition = 1;
452 clone_of = node->clone_of;
454 && (ref = lto_cgraph_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
455 if (clone_of->prev_sibling_clone)
456 clone_of = clone_of->prev_sibling_clone;
458 clone_of = clone_of->clone_of;
460 if (LTO_cgraph_analyzed_node)
461 gcc_assert (clone_of || !node->clone_of);
463 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
465 streamer_write_hwi_stream (ob->main_stream, ref);
468 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
469 streamer_write_hwi_stream (ob->main_stream, node->count);
470 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
472 if (tag == LTO_cgraph_analyzed_node)
474 if (node->global.inlined_to)
476 ref = lto_cgraph_encoder_lookup (encoder, node->global.inlined_to);
477 gcc_assert (ref != LCC_NOT_FOUND);
482 streamer_write_hwi_stream (ob->main_stream, ref);
485 if (node->same_comdat_group && !boundary_p)
487 ref = lto_cgraph_encoder_lookup (encoder, node->same_comdat_group);
488 gcc_assert (ref != LCC_NOT_FOUND);
492 streamer_write_hwi_stream (ob->main_stream, ref);
494 bp = bitpack_create (ob->main_stream);
495 bp_pack_value (&bp, node->local.local, 1);
496 bp_pack_value (&bp, node->local.externally_visible, 1);
497 bp_pack_value (&bp, node->local.finalized, 1);
498 bp_pack_value (&bp, node->local.versionable, 1);
499 bp_pack_value (&bp, node->local.can_change_signature, 1);
500 bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
501 bp_pack_value (&bp, node->needed, 1);
502 bp_pack_value (&bp, node->address_taken, 1);
503 bp_pack_value (&bp, node->abstract_and_needed, 1);
504 bp_pack_value (&bp, tag == LTO_cgraph_analyzed_node
505 && !DECL_EXTERNAL (node->decl)
506 && !DECL_COMDAT (node->decl)
507 && (reachable_from_other_partition_p (node, set)
508 || referenced_from_other_partition_p (&node->ref_list, set, vset)), 1);
509 bp_pack_value (&bp, node->lowered, 1);
510 bp_pack_value (&bp, in_other_partition, 1);
511 bp_pack_value (&bp, node->alias && !boundary_p, 1);
512 bp_pack_value (&bp, node->frequency, 2);
513 bp_pack_value (&bp, node->only_called_at_startup, 1);
514 bp_pack_value (&bp, node->only_called_at_exit, 1);
515 bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
516 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
517 LDPR_NUM_KNOWN, node->resolution);
518 streamer_write_bitpack (&bp);
520 if (node->thunk.thunk_p && !boundary_p)
522 streamer_write_uhwi_stream
524 1 + (node->thunk.this_adjusting != 0) * 2
525 + (node->thunk.virtual_offset_p != 0) * 4);
526 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
527 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
529 if ((node->alias || node->thunk.thunk_p) && !boundary_p)
531 streamer_write_hwi_in_range (ob->main_stream, 0, 1,
532 node->thunk.alias != NULL);
533 if (node->thunk.alias != NULL)
534 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
539 /* Output the varpool NODE to OB.
540 If NODE is not in SET, then NODE is a boundary. */
543 lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
544 lto_varpool_encoder_t varpool_encoder,
545 cgraph_node_set set, varpool_node_set vset)
547 bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
551 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
552 bp = bitpack_create (ob->main_stream);
553 bp_pack_value (&bp, node->externally_visible, 1);
554 bp_pack_value (&bp, node->force_output, 1);
555 bp_pack_value (&bp, node->finalized, 1);
556 bp_pack_value (&bp, node->alias, 1);
557 bp_pack_value (&bp, node->alias_of != NULL, 1);
558 gcc_assert (node->finalized || !node->analyzed);
559 gcc_assert (node->needed);
560 /* Constant pool initializers can be de-unified into individual ltrans units.
561 FIXME: Alternatively at -Os we may want to avoid generating for them the local
562 labels and share them across LTRANS partitions. */
563 if (DECL_IN_CONSTANT_POOL (node->decl)
564 && !DECL_COMDAT (node->decl))
566 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
567 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
571 bp_pack_value (&bp, node->analyzed
572 && referenced_from_other_partition_p (&node->ref_list,
574 bp_pack_value (&bp, boundary_p, 1); /* in_other_partition. */
576 streamer_write_bitpack (&bp);
578 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->alias_of);
579 if (node->same_comdat_group && !boundary_p)
581 ref = lto_varpool_encoder_lookup (varpool_encoder, node->same_comdat_group);
582 gcc_assert (ref != LCC_NOT_FOUND);
586 streamer_write_hwi_stream (ob->main_stream, ref);
587 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
588 LDPR_NUM_KNOWN, node->resolution);
591 /* Output the varpool NODE to OB.
592 If NODE is not in SET, then NODE is a boundary. */
595 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
596 lto_cgraph_encoder_t encoder,
597 lto_varpool_encoder_t varpool_encoder)
600 bp = bitpack_create (ob->main_stream);
601 bp_pack_value (&bp, ref->refered_type, 1);
602 bp_pack_value (&bp, ref->use, 2);
603 streamer_write_bitpack (&bp);
604 if (ref->refered_type == IPA_REF_CGRAPH)
606 int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
607 gcc_assert (nref != LCC_NOT_FOUND);
608 streamer_write_hwi_stream (ob->main_stream, nref);
612 int nref = lto_varpool_encoder_lookup (varpool_encoder,
613 ipa_ref_varpool_node (ref));
614 gcc_assert (nref != LCC_NOT_FOUND);
615 streamer_write_hwi_stream (ob->main_stream, nref);
619 /* Stream out profile_summary to OB. */
622 output_profile_summary (struct lto_simple_output_block *ob)
626 /* We do not output num, sum_all and run_max, they are not used by
627 GCC profile feedback and they are difficult to merge from multiple
629 gcc_assert (profile_info->runs);
630 streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
631 streamer_write_uhwi_stream (ob->main_stream, profile_info->sum_max);
634 streamer_write_uhwi_stream (ob->main_stream, 0);
637 /* Add NODE into encoder as well as nodes it is cloned from.
638 Do it in a way so clones appear first. */
641 add_node_to (lto_cgraph_encoder_t encoder, struct cgraph_node *node,
645 add_node_to (encoder, node->clone_of, include_body);
646 else if (include_body)
647 lto_set_cgraph_encoder_encode_body (encoder, node);
648 lto_cgraph_encoder_encode (encoder, node);
651 /* Add all references in LIST to encoders. */
654 add_references (lto_cgraph_encoder_t encoder,
655 lto_varpool_encoder_t varpool_encoder,
656 struct ipa_ref_list *list)
660 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
661 if (ref->refered_type == IPA_REF_CGRAPH)
662 add_node_to (encoder, ipa_ref_node (ref), false);
665 struct varpool_node *vnode = ipa_ref_varpool_node (ref);
666 lto_varpool_encoder_encode (varpool_encoder, vnode);
670 /* Output all callees or indirect outgoing edges. EDGE must be the first such
674 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
675 struct lto_simple_output_block *ob,
676 lto_cgraph_encoder_t encoder)
681 /* Output edges in backward direction, so the reconstructed callgraph match
682 and it is easy to associate call sites in the IPA pass summaries. */
683 while (edge->next_callee)
684 edge = edge->next_callee;
685 for (; edge; edge = edge->prev_callee)
686 lto_output_edge (ob, edge, encoder);
689 /* Output the part of the cgraph in SET. */
692 output_refs (cgraph_node_set set, varpool_node_set vset,
693 lto_cgraph_encoder_t encoder,
694 lto_varpool_encoder_t varpool_encoder)
696 cgraph_node_set_iterator csi;
697 varpool_node_set_iterator vsi;
698 struct lto_simple_output_block *ob;
703 ob = lto_create_simple_output_block (LTO_section_refs);
705 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
707 struct cgraph_node *node = csi_node (csi);
709 count = ipa_ref_list_nreferences (&node->ref_list);
712 streamer_write_uhwi_stream (ob->main_stream, count);
713 streamer_write_uhwi_stream (ob->main_stream,
714 lto_cgraph_encoder_lookup (encoder, node));
715 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
716 lto_output_ref (ob, ref, encoder, varpool_encoder);
720 streamer_write_uhwi_stream (ob->main_stream, 0);
722 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
724 struct varpool_node *node = vsi_node (vsi);
726 count = ipa_ref_list_nreferences (&node->ref_list);
729 streamer_write_uhwi_stream (ob->main_stream, count);
730 streamer_write_uhwi_stream (ob->main_stream,
731 lto_varpool_encoder_lookup (varpool_encoder,
733 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
734 lto_output_ref (ob, ref, encoder, varpool_encoder);
738 streamer_write_uhwi_stream (ob->main_stream, 0);
740 lto_destroy_simple_output_block (ob);
743 /* Find out all cgraph and varpool nodes we want to encode in current unit
744 and insert them to encoders. */
746 compute_ltrans_boundary (struct lto_out_decl_state *state,
747 cgraph_node_set set, varpool_node_set vset)
749 struct cgraph_node *node;
750 cgraph_node_set_iterator csi;
751 varpool_node_set_iterator vsi;
752 struct cgraph_edge *edge;
754 lto_cgraph_encoder_t encoder;
755 lto_varpool_encoder_t varpool_encoder;
757 encoder = state->cgraph_node_encoder = lto_cgraph_encoder_new ();
758 varpool_encoder = state->varpool_node_encoder = lto_varpool_encoder_new ();
760 /* Go over all the nodes in SET and assign references. */
761 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
763 node = csi_node (csi);
764 add_node_to (encoder, node, true);
765 add_references (encoder, varpool_encoder, &node->ref_list);
767 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
769 struct varpool_node *vnode = vsi_node (vsi);
770 gcc_assert (!vnode->alias || vnode->alias_of);
771 lto_varpool_encoder_encode (varpool_encoder, vnode);
772 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
773 add_references (encoder, varpool_encoder, &vnode->ref_list);
775 /* Pickle in also the initializer of all referenced readonly variables
776 to help folding. Constant pool variables are not shared, so we must
778 for (i = 0; i < lto_varpool_encoder_size (varpool_encoder); i++)
780 struct varpool_node *vnode = lto_varpool_encoder_deref (varpool_encoder, i);
781 if (DECL_INITIAL (vnode->decl)
782 && !lto_varpool_encoder_encode_initializer_p (varpool_encoder,
784 && const_value_known_p (vnode->decl))
786 lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
787 add_references (encoder, varpool_encoder, &vnode->ref_list);
791 /* Go over all the nodes again to include callees that are not in
793 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
795 node = csi_node (csi);
796 for (edge = node->callees; edge; edge = edge->next_callee)
798 struct cgraph_node *callee = edge->callee;
799 if (!cgraph_node_in_set_p (callee, set))
801 /* We should have moved all the inlines. */
802 gcc_assert (!callee->global.inlined_to);
803 add_node_to (encoder, callee, false);
809 /* Output the part of the cgraph in SET. */
812 output_cgraph (cgraph_node_set set, varpool_node_set vset)
814 struct cgraph_node *node;
815 struct lto_simple_output_block *ob;
816 cgraph_node_set_iterator csi;
818 lto_cgraph_encoder_t encoder;
819 lto_varpool_encoder_t varpool_encoder;
820 static bool asm_nodes_output = false;
823 output_cgraph_opt_summary (set);
825 ob = lto_create_simple_output_block (LTO_section_cgraph);
827 output_profile_summary (ob);
829 /* An encoder for cgraph nodes should have been created by
830 ipa_write_summaries_1. */
831 gcc_assert (ob->decl_state->cgraph_node_encoder);
832 gcc_assert (ob->decl_state->varpool_node_encoder);
833 encoder = ob->decl_state->cgraph_node_encoder;
834 varpool_encoder = ob->decl_state->varpool_node_encoder;
836 /* Write out the nodes. We must first output a node and then its clones,
837 otherwise at a time reading back the node there would be nothing to clone
839 n_nodes = lto_cgraph_encoder_size (encoder);
840 for (i = 0; i < n_nodes; i++)
842 node = lto_cgraph_encoder_deref (encoder, i);
843 lto_output_node (ob, node, encoder, set, vset);
846 /* Go over the nodes in SET again to write edges. */
847 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
849 node = csi_node (csi);
850 output_outgoing_cgraph_edges (node->callees, ob, encoder);
851 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
854 streamer_write_uhwi_stream (ob->main_stream, 0);
856 lto_destroy_simple_output_block (ob);
858 /* Emit toplevel asms.
859 When doing WPA we must output every asm just once. Since we do not partition asm
860 nodes at all, output them to first output. This is kind of hack, but should work
862 if (!asm_nodes_output)
864 asm_nodes_output = true;
865 lto_output_toplevel_asms ();
868 output_varpool (set, vset);
869 output_refs (set, vset, encoder, varpool_encoder);
872 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
873 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
874 NODE or to replace the values in it, for instance because the first
875 time we saw it, the function body was not available but now it
876 is. BP is a bitpack with all the bitflags for NODE read from the
880 input_overwrite_node (struct lto_file_decl_data *file_data,
881 struct cgraph_node *node,
882 enum LTO_cgraph_tags tag,
883 struct bitpack_d *bp)
885 node->aux = (void *) tag;
886 node->local.lto_file_data = file_data;
888 node->local.local = bp_unpack_value (bp, 1);
889 node->local.externally_visible = bp_unpack_value (bp, 1);
890 node->local.finalized = bp_unpack_value (bp, 1);
891 node->local.versionable = bp_unpack_value (bp, 1);
892 node->local.can_change_signature = bp_unpack_value (bp, 1);
893 node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
894 node->needed = bp_unpack_value (bp, 1);
895 node->address_taken = bp_unpack_value (bp, 1);
896 node->abstract_and_needed = bp_unpack_value (bp, 1);
897 node->reachable_from_other_partition = bp_unpack_value (bp, 1);
898 node->lowered = bp_unpack_value (bp, 1);
899 node->analyzed = tag == LTO_cgraph_analyzed_node;
900 node->in_other_partition = bp_unpack_value (bp, 1);
901 if (node->in_other_partition
902 /* Avoid updating decl when we are seeing just inline clone.
903 When inlining function that has functions already inlined into it,
904 we produce clones of inline clones.
906 WPA partitioning might put each clone into different unit and
907 we might end up streaming inline clone from other partition
908 to support clone we are interested in. */
910 || node->clone_of->decl != node->decl))
912 DECL_EXTERNAL (node->decl) = 1;
913 TREE_STATIC (node->decl) = 0;
915 node->alias = bp_unpack_value (bp, 1);
916 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
917 node->only_called_at_startup = bp_unpack_value (bp, 1);
918 node->only_called_at_exit = bp_unpack_value (bp, 1);
919 node->thunk.thunk_p = bp_unpack_value (bp, 1);
920 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
924 /* Output the part of the cgraph in SET. */
927 output_varpool (cgraph_node_set set, varpool_node_set vset)
929 struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
930 lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
931 int len = lto_varpool_encoder_size (varpool_encoder), i;
933 streamer_write_uhwi_stream (ob->main_stream, len);
935 /* Write out the nodes. We must first output a node and then its clones,
936 otherwise at a time reading back the node there would be nothing to clone
938 for (i = 0; i < len; i++)
940 lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
945 lto_destroy_simple_output_block (ob);
948 /* Read a node from input_block IB. TAG is the node's tag just read.
949 Return the node read or overwriten. */
951 static struct cgraph_node *
952 input_node (struct lto_file_decl_data *file_data,
953 struct lto_input_block *ib,
954 enum LTO_cgraph_tags tag,
955 VEC(cgraph_node_ptr, heap) *nodes)
958 struct cgraph_node *node;
961 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
964 clone_ref = streamer_read_hwi (ib);
966 decl_index = streamer_read_uhwi (ib);
967 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
969 if (clone_ref != LCC_NOT_FOUND)
971 node = cgraph_clone_node (VEC_index (cgraph_node_ptr, nodes, clone_ref), fn_decl,
972 0, CGRAPH_FREQ_BASE, false, NULL, false);
975 node = cgraph_get_create_node (fn_decl);
977 node->count = streamer_read_hwi (ib);
978 node->count_materialization_scale = streamer_read_hwi (ib);
980 if (tag == LTO_cgraph_analyzed_node)
981 ref = streamer_read_hwi (ib);
983 ref2 = streamer_read_hwi (ib);
985 /* Make sure that we have not read this node before. Nodes that
986 have already been read will have their tag stored in the 'aux'
987 field. Since built-in functions can be referenced in multiple
988 functions, they are expected to be read more than once. */
989 if (node->aux && !DECL_BUILT_IN (node->decl))
990 internal_error ("bytecode stream: found multiple instances of cgraph "
991 "node %d", node->uid);
993 bp = streamer_read_bitpack (ib);
994 input_overwrite_node (file_data, node, tag, &bp);
996 /* Store a reference for now, and fix up later to be a pointer. */
997 node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
999 /* Store a reference for now, and fix up later to be a pointer. */
1000 node->same_comdat_group = (cgraph_node_ptr) (intptr_t) ref2;
1002 if (node->thunk.thunk_p)
1004 int type = streamer_read_uhwi (ib);
1005 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1006 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1008 node->thunk.fixed_offset = fixed_offset;
1009 node->thunk.this_adjusting = (type & 2);
1010 node->thunk.virtual_value = virtual_value;
1011 node->thunk.virtual_offset_p = (type & 4);
1013 if (node->thunk.thunk_p || node->alias)
1015 if (streamer_read_hwi_in_range (ib, "alias nonzero flag", 0, 1))
1017 decl_index = streamer_read_uhwi (ib);
1018 node->thunk.alias = lto_file_decl_data_get_fn_decl (file_data,
1025 /* Read a node from input_block IB. TAG is the node's tag just read.
1026 Return the node read or overwriten. */
1028 static struct varpool_node *
1029 input_varpool_node (struct lto_file_decl_data *file_data,
1030 struct lto_input_block *ib)
1034 struct varpool_node *node;
1035 struct bitpack_d bp;
1036 int ref = LCC_NOT_FOUND;
1037 bool non_null_aliasof;
1039 decl_index = streamer_read_uhwi (ib);
1040 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1041 node = varpool_node (var_decl);
1042 node->lto_file_data = file_data;
1044 bp = streamer_read_bitpack (ib);
1045 node->externally_visible = bp_unpack_value (&bp, 1);
1046 node->force_output = bp_unpack_value (&bp, 1);
1047 node->finalized = bp_unpack_value (&bp, 1);
1048 node->alias = bp_unpack_value (&bp, 1);
1049 non_null_aliasof = bp_unpack_value (&bp, 1);
1050 node->analyzed = node->finalized;
1051 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1052 node->in_other_partition = bp_unpack_value (&bp, 1);
1053 if (node->in_other_partition)
1055 DECL_EXTERNAL (node->decl) = 1;
1056 TREE_STATIC (node->decl) = 0;
1058 if (node->finalized)
1059 varpool_mark_needed_node (node);
1060 if (non_null_aliasof)
1062 decl_index = streamer_read_uhwi (ib);
1063 node->alias_of = lto_file_decl_data_get_var_decl (file_data, decl_index);
1065 ref = streamer_read_hwi (ib);
1066 /* Store a reference for now, and fix up later to be a pointer. */
1067 node->same_comdat_group = (struct varpool_node *) (intptr_t) ref;
1068 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1074 /* Read a node from input_block IB. TAG is the node's tag just read.
1075 Return the node read or overwriten. */
1078 input_ref (struct lto_input_block *ib,
1079 struct cgraph_node *refering_node,
1080 struct varpool_node *refering_varpool_node,
1081 VEC(cgraph_node_ptr, heap) *nodes,
1082 VEC(varpool_node_ptr, heap) *varpool_nodes)
1084 struct cgraph_node *node = NULL;
1085 struct varpool_node *varpool_node = NULL;
1086 struct bitpack_d bp;
1087 enum ipa_ref_type type;
1088 enum ipa_ref_use use;
1090 bp = streamer_read_bitpack (ib);
1091 type = (enum ipa_ref_type) bp_unpack_value (&bp, 1);
1092 use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1093 if (type == IPA_REF_CGRAPH)
1094 node = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1096 varpool_node = VEC_index (varpool_node_ptr, varpool_nodes,
1097 streamer_read_hwi (ib));
1098 ipa_record_reference (refering_node, refering_varpool_node,
1099 node, varpool_node, use, NULL);
1102 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1103 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1104 edge being read is indirect (in the sense that it has
1105 indirect_unknown_callee set). */
1108 input_edge (struct lto_input_block *ib, VEC(cgraph_node_ptr, heap) *nodes,
1111 struct cgraph_node *caller, *callee;
1112 struct cgraph_edge *edge;
1113 unsigned int stmt_id;
1116 cgraph_inline_failed_t inline_failed;
1117 struct bitpack_d bp;
1120 caller = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1121 if (caller == NULL || caller->decl == NULL_TREE)
1122 internal_error ("bytecode stream: no caller found while reading edge");
1126 callee = VEC_index (cgraph_node_ptr, nodes, streamer_read_hwi (ib));
1127 if (callee == NULL || callee->decl == NULL_TREE)
1128 internal_error ("bytecode stream: no callee found while reading edge");
1133 count = (gcov_type) streamer_read_hwi (ib);
1135 bp = streamer_read_bitpack (ib);
1136 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_enum, CIF_N_REASONS);
1137 stmt_id = bp_unpack_var_len_unsigned (&bp);
1138 freq = (int) bp_unpack_var_len_unsigned (&bp);
1141 edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq);
1143 edge = cgraph_create_edge (caller, callee, NULL, count, freq);
1145 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1146 edge->lto_stmt_uid = stmt_id;
1147 edge->inline_failed = inline_failed;
1148 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1149 edge->can_throw_external = bp_unpack_value (&bp, 1);
1152 if (bp_unpack_value (&bp, 1))
1153 ecf_flags |= ECF_CONST;
1154 if (bp_unpack_value (&bp, 1))
1155 ecf_flags |= ECF_PURE;
1156 if (bp_unpack_value (&bp, 1))
1157 ecf_flags |= ECF_NORETURN;
1158 if (bp_unpack_value (&bp, 1))
1159 ecf_flags |= ECF_MALLOC;
1160 if (bp_unpack_value (&bp, 1))
1161 ecf_flags |= ECF_NOTHROW;
1162 if (bp_unpack_value (&bp, 1))
1163 ecf_flags |= ECF_RETURNS_TWICE;
1164 edge->indirect_info->ecf_flags = ecf_flags;
1169 /* Read a cgraph from IB using the info in FILE_DATA. */
1171 static VEC(cgraph_node_ptr, heap) *
1172 input_cgraph_1 (struct lto_file_decl_data *file_data,
1173 struct lto_input_block *ib)
1175 enum LTO_cgraph_tags tag;
1176 VEC(cgraph_node_ptr, heap) *nodes = NULL;
1177 struct cgraph_node *node;
1180 tag = streamer_read_enum (ib, LTO_cgraph_tags, LTO_cgraph_last_tag);
1183 if (tag == LTO_cgraph_edge)
1184 input_edge (ib, nodes, false);
1185 else if (tag == LTO_cgraph_indirect_edge)
1186 input_edge (ib, nodes, true);
1189 node = input_node (file_data, ib, tag,nodes);
1190 if (node == NULL || node->decl == NULL_TREE)
1191 internal_error ("bytecode stream: found empty cgraph node");
1192 VEC_safe_push (cgraph_node_ptr, heap, nodes, node);
1193 lto_cgraph_encoder_encode (file_data->cgraph_node_encoder, node);
1196 tag = streamer_read_enum (ib, LTO_cgraph_tags, LTO_cgraph_last_tag);
1199 lto_input_toplevel_asms (file_data);
1201 /* AUX pointers should be all non-zero for nodes read from the stream. */
1202 #ifdef ENABLE_CHECKING
1203 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1204 gcc_assert (node->aux);
1206 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1208 int ref = (int) (intptr_t) node->global.inlined_to;
1210 /* We share declaration of builtins, so we may read same node twice. */
1215 /* Fixup inlined_to from reference to pointer. */
1216 if (ref != LCC_NOT_FOUND)
1217 node->global.inlined_to = VEC_index (cgraph_node_ptr, nodes, ref);
1219 node->global.inlined_to = NULL;
1221 ref = (int) (intptr_t) node->same_comdat_group;
1223 /* Fixup same_comdat_group from reference to pointer. */
1224 if (ref != LCC_NOT_FOUND)
1225 node->same_comdat_group = VEC_index (cgraph_node_ptr, nodes, ref);
1227 node->same_comdat_group = NULL;
1229 FOR_EACH_VEC_ELT (cgraph_node_ptr, nodes, i, node)
1230 node->aux = (void *)1;
1234 /* Read a varpool from IB using the info in FILE_DATA. */
1236 static VEC(varpool_node_ptr, heap) *
1237 input_varpool_1 (struct lto_file_decl_data *file_data,
1238 struct lto_input_block *ib)
1240 unsigned HOST_WIDE_INT len;
1241 VEC(varpool_node_ptr, heap) *varpool = NULL;
1243 struct varpool_node *node;
1245 len = streamer_read_uhwi (ib);
1248 VEC_safe_push (varpool_node_ptr, heap, varpool,
1249 input_varpool_node (file_data, ib));
1252 #ifdef ENABLE_CHECKING
1253 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1254 gcc_assert (!node->aux);
1256 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1258 int ref = (int) (intptr_t) node->same_comdat_group;
1259 /* We share declaration of builtins, so we may read same node twice. */
1262 node->aux = (void *)1;
1264 /* Fixup same_comdat_group from reference to pointer. */
1265 if (ref != LCC_NOT_FOUND)
1266 node->same_comdat_group = VEC_index (varpool_node_ptr, varpool, ref);
1268 node->same_comdat_group = NULL;
1270 FOR_EACH_VEC_ELT (varpool_node_ptr, varpool, i, node)
1275 /* Input ipa_refs. */
1278 input_refs (struct lto_input_block *ib,
1279 VEC(cgraph_node_ptr, heap) *nodes,
1280 VEC(varpool_node_ptr, heap) *varpool)
1286 struct cgraph_node *node;
1287 count = streamer_read_uhwi (ib);
1290 idx = streamer_read_uhwi (ib);
1291 node = VEC_index (cgraph_node_ptr, nodes, idx);
1294 input_ref (ib, node, NULL, nodes, varpool);
1300 struct varpool_node *node;
1301 count = streamer_read_uhwi (ib);
1304 node = VEC_index (varpool_node_ptr, varpool,
1305 streamer_read_uhwi (ib));
1308 input_ref (ib, NULL, node, nodes, varpool);
1315 static struct gcov_ctr_summary lto_gcov_summary;
1317 /* Input profile_info from IB. */
1319 input_profile_summary (struct lto_input_block *ib,
1320 struct lto_file_decl_data *file_data)
1322 unsigned int runs = streamer_read_uhwi (ib);
1325 file_data->profile_info.runs = runs;
1326 file_data->profile_info.sum_max = streamer_read_uhwi (ib);
1331 /* Rescale profile summaries to the same number of runs in the whole unit. */
1334 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1336 struct lto_file_decl_data *file_data;
1338 gcov_unsigned_t max_runs = 0;
1339 struct cgraph_node *node;
1340 struct cgraph_edge *edge;
1342 /* Find unit with maximal number of runs. If we ever get serious about
1343 roundoff errors, we might also consider computing smallest common
1345 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1346 if (max_runs < file_data->profile_info.runs)
1347 max_runs = file_data->profile_info.runs;
1352 /* Simple overflow check. We probably don't need to support that many train
1353 runs. Such a large value probably imply data corruption anyway. */
1354 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1356 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1357 INT_MAX / REG_BR_PROB_BASE);
1361 profile_info = <o_gcov_summary;
1362 lto_gcov_summary.runs = max_runs;
1363 lto_gcov_summary.sum_max = 0;
1365 /* Rescale all units to the maximal number of runs.
1366 sum_max can not be easily merged, as we have no idea what files come from
1367 the same run. We do not use the info anyway, so leave it 0. */
1368 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1369 if (file_data->profile_info.runs)
1371 int scale = ((REG_BR_PROB_BASE * max_runs
1372 + file_data->profile_info.runs / 2)
1373 / file_data->profile_info.runs);
1374 lto_gcov_summary.sum_max = MAX (lto_gcov_summary.sum_max,
1375 (file_data->profile_info.sum_max
1377 + REG_BR_PROB_BASE / 2)
1378 / REG_BR_PROB_BASE);
1381 /* Watch roundoff errors. */
1382 if (lto_gcov_summary.sum_max < max_runs)
1383 lto_gcov_summary.sum_max = max_runs;
1385 /* If merging already happent at WPA time, we are done. */
1389 /* Now compute count_materialization_scale of each node.
1390 During LTRANS we already have values of count_materialization_scale
1391 computed, so just update them. */
1392 for (node = cgraph_nodes; node; node = node->next)
1393 if (node->local.lto_file_data
1394 && node->local.lto_file_data->profile_info.runs)
1399 ((node->count_materialization_scale * max_runs
1400 + node->local.lto_file_data->profile_info.runs / 2)
1401 / node->local.lto_file_data->profile_info.runs);
1402 node->count_materialization_scale = scale;
1404 fatal_error ("Profile information in %s corrupted",
1405 file_data->file_name);
1407 if (scale == REG_BR_PROB_BASE)
1409 for (edge = node->callees; edge; edge = edge->next_callee)
1410 edge->count = ((edge->count * scale + REG_BR_PROB_BASE / 2)
1411 / REG_BR_PROB_BASE);
1412 node->count = ((node->count * scale + REG_BR_PROB_BASE / 2)
1413 / REG_BR_PROB_BASE);
1417 /* Input and merge the cgraph from each of the .o files passed to
1423 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1424 struct lto_file_decl_data *file_data;
1426 struct cgraph_node *node;
1428 while ((file_data = file_data_vec[j++]))
1432 struct lto_input_block *ib;
1433 VEC(cgraph_node_ptr, heap) *nodes;
1434 VEC(varpool_node_ptr, heap) *varpool;
1436 ib = lto_create_simple_input_block (file_data, LTO_section_cgraph,
1439 fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1440 input_profile_summary (ib, file_data);
1441 file_data->cgraph_node_encoder = lto_cgraph_encoder_new ();
1442 nodes = input_cgraph_1 (file_data, ib);
1443 lto_destroy_simple_input_block (file_data, LTO_section_cgraph,
1446 ib = lto_create_simple_input_block (file_data, LTO_section_varpool,
1449 fatal_error ("cannot find LTO varpool in %s", file_data->file_name);
1450 varpool = input_varpool_1 (file_data, ib);
1451 lto_destroy_simple_input_block (file_data, LTO_section_varpool,
1454 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1457 fatal_error("cannot find LTO section refs in %s", file_data->file_name);
1458 input_refs (ib, nodes, varpool);
1459 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1462 input_cgraph_opt_summary (nodes);
1463 VEC_free (cgraph_node_ptr, heap, nodes);
1464 VEC_free (varpool_node_ptr, heap, varpool);
1467 merge_profile_summaries (file_data_vec);
1469 /* Clear out the aux field that was used to store enough state to
1470 tell which nodes should be overwritten. */
1471 for (node = cgraph_nodes; node; node = node->next)
1473 /* Some nodes may have been created by cgraph_node. This
1474 happens when the callgraph contains nested functions. If the
1475 node for the parent function was never emitted to the gimple
1476 file, cgraph_node will create a node for it when setting the
1477 context of the nested function. */
1478 if (node->local.lto_file_data)
1483 /* True when we need optimization summary for NODE. */
1486 output_cgraph_opt_summary_p (struct cgraph_node *node,
1487 cgraph_node_set set ATTRIBUTE_UNUSED)
1489 return (node->clone_of
1490 && (node->clone.tree_map
1491 || node->clone.args_to_skip
1492 || node->clone.combined_args_to_skip));
1495 /* Output optimization summary for EDGE to OB. */
1497 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1498 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1502 /* Output optimization summary for NODE to OB. */
1505 output_node_opt_summary (struct output_block *ob,
1506 struct cgraph_node *node,
1507 cgraph_node_set set)
1511 struct ipa_replace_map *map;
1512 struct bitpack_d bp;
1514 struct cgraph_edge *e;
1516 if (node->clone.args_to_skip)
1518 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1519 EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1520 streamer_write_uhwi (ob, index);
1523 streamer_write_uhwi (ob, 0);
1524 if (node->clone.combined_args_to_skip)
1526 streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1527 EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1528 streamer_write_uhwi (ob, index);
1531 streamer_write_uhwi (ob, 0);
1532 streamer_write_uhwi (ob, VEC_length (ipa_replace_map_p,
1533 node->clone.tree_map));
1534 FOR_EACH_VEC_ELT (ipa_replace_map_p, node->clone.tree_map, i, map)
1539 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm;
1540 parm = DECL_CHAIN (parm), parm_num++)
1541 if (map->old_tree == parm)
1543 /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1544 mechanism to store function local declarations into summaries. */
1546 streamer_write_uhwi (ob, parm_num);
1547 stream_write_tree (ob, map->new_tree, true);
1548 bp = bitpack_create (ob->main_stream);
1549 bp_pack_value (&bp, map->replace_p, 1);
1550 bp_pack_value (&bp, map->ref_p, 1);
1551 streamer_write_bitpack (&bp);
1554 if (cgraph_node_in_set_p (node, set))
1556 for (e = node->callees; e; e = e->next_callee)
1557 output_edge_opt_summary (ob, e);
1558 for (e = node->indirect_calls; e; e = e->next_callee)
1559 output_edge_opt_summary (ob, e);
1563 /* Output optimization summaries stored in callgraph.
1564 At the moment it is the clone info structure. */
1567 output_cgraph_opt_summary (cgraph_node_set set)
1569 struct cgraph_node *node;
1571 lto_cgraph_encoder_t encoder;
1572 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1575 ob->cgraph_node = NULL;
1576 encoder = ob->decl_state->cgraph_node_encoder;
1577 n_nodes = lto_cgraph_encoder_size (encoder);
1578 for (i = 0; i < n_nodes; i++)
1579 if (output_cgraph_opt_summary_p (lto_cgraph_encoder_deref (encoder, i),
1582 streamer_write_uhwi (ob, count);
1583 for (i = 0; i < n_nodes; i++)
1585 node = lto_cgraph_encoder_deref (encoder, i);
1586 if (output_cgraph_opt_summary_p (node, set))
1588 streamer_write_uhwi (ob, i);
1589 output_node_opt_summary (ob, node, set);
1592 produce_asm (ob, NULL);
1593 destroy_output_block (ob);
1596 /* Input optimisation summary of EDGE. */
1599 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1600 struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1604 /* Input optimisation summary of NODE. */
1607 input_node_opt_summary (struct cgraph_node *node,
1608 struct lto_input_block *ib_main,
1609 struct data_in *data_in)
1614 struct bitpack_d bp;
1615 struct cgraph_edge *e;
1617 count = streamer_read_uhwi (ib_main);
1619 node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1620 for (i = 0; i < count; i++)
1622 bit = streamer_read_uhwi (ib_main);
1623 bitmap_set_bit (node->clone.args_to_skip, bit);
1625 count = streamer_read_uhwi (ib_main);
1627 node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1628 for (i = 0; i < count; i++)
1630 bit = streamer_read_uhwi (ib_main);
1631 bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1633 count = streamer_read_uhwi (ib_main);
1634 for (i = 0; i < count; i++)
1638 struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1640 VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
1641 for (parm_num = 0, parm = DECL_ARGUMENTS (node->decl); parm_num;
1642 parm = DECL_CHAIN (parm))
1644 map->parm_num = streamer_read_uhwi (ib_main);
1645 map->old_tree = NULL;
1646 map->new_tree = stream_read_tree (ib_main, data_in);
1647 bp = streamer_read_bitpack (ib_main);
1648 map->replace_p = bp_unpack_value (&bp, 1);
1649 map->ref_p = bp_unpack_value (&bp, 1);
1651 for (e = node->callees; e; e = e->next_callee)
1652 input_edge_opt_summary (e, ib_main);
1653 for (e = node->indirect_calls; e; e = e->next_callee)
1654 input_edge_opt_summary (e, ib_main);
1657 /* Read section in file FILE_DATA of length LEN with data DATA. */
1660 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1661 const char *data, size_t len, VEC (cgraph_node_ptr,
1664 const struct lto_function_header *header =
1665 (const struct lto_function_header *) data;
1666 const int32_t cfg_offset = sizeof (struct lto_function_header);
1667 const int32_t main_offset = cfg_offset + header->cfg_size;
1668 const int32_t string_offset = main_offset + header->main_size;
1669 struct data_in *data_in;
1670 struct lto_input_block ib_main;
1674 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1678 lto_data_in_create (file_data, (const char *) data + string_offset,
1679 header->string_size, NULL);
1680 count = streamer_read_uhwi (&ib_main);
1682 for (i = 0; i < count; i++)
1684 int ref = streamer_read_uhwi (&ib_main);
1685 input_node_opt_summary (VEC_index (cgraph_node_ptr, nodes, ref),
1688 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1690 lto_data_in_delete (data_in);
1693 /* Input optimization summary of cgraph. */
1696 input_cgraph_opt_summary (VEC (cgraph_node_ptr, heap) * nodes)
1698 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1699 struct lto_file_decl_data *file_data;
1702 while ((file_data = file_data_vec[j++]))
1706 lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1710 input_cgraph_opt_section (file_data, data, len, nodes);