1 /* Top-level LTO routines.
2 Copyright 2009, 2010 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "diagnostic-core.h"
29 #include "libiberty.h"
32 #include "tree-ssa-operands.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
37 #include "pointer-set.h"
45 #include "lto-streamer.h"
46 #include "splay-tree.h"
49 /* This needs to be included after config.h. Otherwise, _GNU_SOURCE will not
50 be defined in time to set __USE_GNU in the system headers, and strsignal
51 will not be declared. */
56 /* Handle opening elf files on hosts, such as Windows, that may use
57 text file handling that will break binary access. */
63 static GTY(()) tree first_personality_decl;
65 /* Returns a hash code for P. */
68 hash_name (const void *p)
70 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
71 return (hashval_t) htab_hash_string (ds->name);
75 /* Returns nonzero if P1 and P2 are equal. */
78 eq_name (const void *p1, const void *p2)
80 const struct lto_section_slot *s1 =
81 (const struct lto_section_slot *) p1;
82 const struct lto_section_slot *s2 =
83 (const struct lto_section_slot *) p2;
85 return strcmp (s1->name, s2->name) == 0;
88 /* Free lto_section_slot */
91 free_with_string (void *arg)
93 struct lto_section_slot *s = (struct lto_section_slot *)arg;
95 free (CONST_CAST (char *, s->name));
99 /* Create section hash table */
102 lto_obj_create_section_hash_table (void)
104 return htab_create (37, hash_name, eq_name, free_with_string);
107 /* Read the constructors and inits. */
110 lto_materialize_constructors_and_inits (struct lto_file_decl_data * file_data)
113 const char *data = lto_get_section_data (file_data,
114 LTO_section_static_initializer,
116 lto_input_constructors_and_inits (file_data, data);
117 lto_free_section_data (file_data, LTO_section_static_initializer, NULL,
121 /* Return true when NODE has a clone that is analyzed (i.e. we need
122 to load its body even if the node itself is not needed). */
125 has_analyzed_clone_p (struct cgraph_node *node)
127 struct cgraph_node *orig = node;
136 else if (node->next_sibling_clone)
137 node = node->next_sibling_clone;
140 while (node != orig && !node->next_sibling_clone)
141 node = node->clone_of;
143 node = node->next_sibling_clone;
149 /* Read the function body for the function associated with NODE. */
152 lto_materialize_function (struct cgraph_node *node)
155 struct lto_file_decl_data *file_data;
156 const char *data, *name;
160 /* Read in functions with body (analyzed nodes)
161 and also functions that are needed to produce virtual clones. */
162 if (node->analyzed || has_analyzed_clone_p (node))
164 /* Clones don't need to be read. */
167 file_data = node->local.lto_file_data;
168 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
170 /* We may have renamed the declaration, e.g., a static function. */
171 name = lto_get_decl_name_mapping (file_data, name);
173 data = lto_get_section_data (file_data, LTO_section_function_body,
176 fatal_error ("%s: section %s is missing",
177 file_data->file_name,
180 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
182 /* Load the function body only if not operating in WPA mode. In
183 WPA mode, the body of the function is not needed. */
186 allocate_struct_function (decl, false);
187 announce_function (decl);
188 lto_input_function_body (file_data, decl, data);
189 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
190 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
191 lto_stats.num_function_bodies++;
194 lto_free_section_data (file_data, LTO_section_function_body, name,
200 /* Let the middle end know about the function. */
201 rest_of_decl_compilation (decl, 1, 0);
205 /* Decode the content of memory pointed to by DATA in the the
206 in decl state object STATE. DATA_IN points to a data_in structure for
207 decoding. Return the address after the decoded object in the input. */
209 static const uint32_t *
210 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
211 struct lto_in_decl_state *state)
218 decl = lto_streamer_cache_get (data_in->reader_cache, (int) ix);
219 if (TREE_CODE (decl) != FUNCTION_DECL)
221 gcc_assert (decl == void_type_node);
224 state->fn_decl = decl;
226 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
228 uint32_t size = *data++;
229 tree *decls = ggc_alloc_vec_tree (size);
231 for (j = 0; j < size; j++)
233 decls[j] = lto_streamer_cache_get (data_in->reader_cache, data[j]);
235 /* Register every type in the global type table. If the
236 type existed already, use the existing type. */
237 if (TYPE_P (decls[j]))
238 decls[j] = gimple_register_type (decls[j]);
241 state->streams[i].size = size;
242 state->streams[i].trees = decls;
250 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
251 RESOLUTIONS is the set of symbols picked by the linker (read from the
252 resolution file when the linker plugin is being used). */
255 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
256 VEC(ld_plugin_symbol_resolution_t,heap) *resolutions)
258 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
259 const int32_t decl_offset = sizeof (struct lto_decl_header);
260 const int32_t main_offset = decl_offset + header->decl_state_size;
261 const int32_t string_offset = main_offset + header->main_size;
262 struct lto_input_block ib_main;
263 struct data_in *data_in;
265 const uint32_t *data_ptr, *data_end;
266 uint32_t num_decl_states;
268 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
271 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
272 header->string_size, resolutions);
274 /* Read the global declarations and types. */
275 while (ib_main.p < ib_main.len)
277 tree t = lto_input_tree (&ib_main, data_in);
278 gcc_assert (t && ib_main.p <= ib_main.len);
281 /* Read in lto_in_decl_state objects. */
282 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
284 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
285 num_decl_states = *data_ptr++;
287 gcc_assert (num_decl_states > 0);
288 decl_data->global_decl_state = lto_new_in_decl_state ();
289 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
290 decl_data->global_decl_state);
292 /* Read in per-function decl states and enter them in hash table. */
293 decl_data->function_decl_states =
294 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
296 for (i = 1; i < num_decl_states; i++)
298 struct lto_in_decl_state *state = lto_new_in_decl_state ();
301 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
302 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
303 gcc_assert (*slot == NULL);
307 if (data_ptr != data_end)
308 internal_error ("bytecode stream: garbage at the end of symbols section");
310 /* Set the current decl state to be the global state. */
311 decl_data->current_decl_state = decl_data->global_decl_state;
313 lto_data_in_delete (data_in);
316 /* strtoll is not portable. */
318 lto_parse_hex (const char *p) {
320 for (; *p != '\0'; ++p)
325 if (c >= '0' && c <= '9')
327 else if (c >= 'a' && c <= 'f')
329 else if (c >= 'A' && c <= 'F')
332 internal_error ("could not parse hex number");
338 /* Read resolution for file named FILE_NAME. The resolution is read from
342 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
344 /* We require that objects in the resolution file are in the same
345 order as the lto1 command line. */
346 unsigned int name_len;
348 unsigned int num_symbols;
350 struct lto_file_decl_data *file_data;
351 unsigned max_index = 0;
352 splay_tree_node nd = NULL;
357 name_len = strlen (file->filename);
358 obj_name = XNEWVEC (char, name_len + 1);
359 fscanf (resolution, " "); /* Read white space. */
361 fread (obj_name, sizeof (char), name_len, resolution);
362 obj_name[name_len] = '\0';
363 if (strcmp (obj_name, file->filename) != 0)
364 internal_error ("unexpected file name %s in linker resolution file. "
365 "Expected %s", obj_name, file->filename);
366 if (file->offset != 0)
371 t = fscanf (resolution, "@0x%16s", offset_p);
373 internal_error ("could not parse file offset");
374 offset = lto_parse_hex (offset_p);
375 if (offset != file->offset)
376 internal_error ("unexpected offset");
381 fscanf (resolution, "%u", &num_symbols);
383 for (i = 0; i < num_symbols; i++)
388 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
390 unsigned int lto_resolution_str_len =
391 sizeof (lto_resolution_str) / sizeof (char *);
393 t = fscanf (resolution, "%u %x %26s %*[^\n]\n", &index, &id, r_str);
395 internal_error ("Invalid line in the resolution file.");
396 if (index > max_index)
399 for (j = 0; j < lto_resolution_str_len; j++)
401 if (strcmp (lto_resolution_str[j], r_str) == 0)
403 r = (enum ld_plugin_symbol_resolution) j;
407 if (j == lto_resolution_str_len)
408 internal_error ("Invalid resolution in the resolution file.");
410 if (!(nd && nd->key == id))
412 nd = splay_tree_lookup (file_ids, id);
414 internal_error ("Resolution sub id %x not in object file", id);
417 file_data = (struct lto_file_decl_data *)nd->value;
418 if (cgraph_dump_file)
419 fprintf (cgraph_dump_file, "Adding resolution %u %u to id %x\n",
420 index, r, file_data->id);
421 VEC_safe_grow_cleared (ld_plugin_symbol_resolution_t, heap,
422 file_data->resolutions,
424 VEC_replace (ld_plugin_symbol_resolution_t,
425 file_data->resolutions, index, r);
429 /* Is the name for a id'ed LTO section? */
432 lto_section_with_id (const char *name, unsigned *id)
436 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
438 s = strrchr (name, '.');
439 return s && sscanf (s, ".%x", id) == 1;
442 /* Create file_data of each sub file id */
445 create_subid_section_table (void **slot, void *data)
447 struct lto_section_slot s_slot, *new_slot;
448 struct lto_section_slot *ls = *(struct lto_section_slot **)slot;
449 splay_tree file_ids = (splay_tree)data;
454 struct lto_file_decl_data *file_data;
456 if (!lto_section_with_id (ls->name, &id))
459 /* Find hash table of sub module id */
460 nd = splay_tree_lookup (file_ids, id);
463 file_data = (struct lto_file_decl_data *)nd->value;
467 file_data = ggc_alloc_lto_file_decl_data ();
468 memset(file_data, 0, sizeof (struct lto_file_decl_data));
470 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
471 splay_tree_insert (file_ids, id, (splay_tree_value)file_data);
474 /* Copy section into sub module hash table */
475 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
476 s_slot.name = new_name;
477 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
478 gcc_assert (*hash_slot == NULL);
480 new_slot = XDUP (struct lto_section_slot, ls);
481 new_slot->name = new_name;
482 *hash_slot = new_slot;
486 /* Read declarations and other initializations for a FILE_DATA. */
489 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
494 file_data->renaming_hash_table = lto_create_renaming_table ();
495 file_data->file_name = file->filename;
496 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
499 internal_error ("Cannot read LTO decls from %s", file_data->file_name);
502 lto_read_decls (file_data, data, file_data->resolutions);
503 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
509 struct lto_file_decl_data **file_data;
513 /* Traverse ids and create a list of file_datas out of it. */
515 static int lto_create_files_from_ids (splay_tree_node node, void *data)
517 struct lwstate *lw = (struct lwstate *)data;
518 struct lto_file_decl_data *file_data = (struct lto_file_decl_data *)node->value;
520 lto_file_finalize (file_data, lw->file);
521 if (cgraph_dump_file)
522 fprintf (cgraph_dump_file, "Creating file %s with sub id %x\n",
523 file_data->file_name, file_data->id);
524 file_data->next = *lw->file_data;
525 *lw->file_data = file_data;
530 /* Generate a TREE representation for all types and external decls
533 Read all of the globals out of the file. Then read the cgraph
534 and process the .o index into the cgraph nodes so that it can open
535 the .o file to load the functions and ipa information. */
537 static struct lto_file_decl_data *
538 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
540 struct lto_file_decl_data *file_data = NULL;
542 htab_t section_hash_table;
543 struct lwstate state;
545 section_hash_table = lto_obj_build_section_table (file);
547 /* Find all sub modules in the object and put their sections into new hash
548 tables in a splay tree. */
549 file_ids = splay_tree_new (splay_tree_compare_ints, NULL, NULL);
550 htab_traverse (section_hash_table, create_subid_section_table, file_ids);
552 /* Add resolutions to file ids */
553 lto_resolution_read (file_ids, resolution_file, file);
555 /* Finalize each lto file for each submodule in the merged object
556 and create list for returning. */
558 state.file_data = &file_data;
560 splay_tree_foreach (file_ids, lto_create_files_from_ids, &state);
562 splay_tree_delete (file_ids);
563 htab_delete (section_hash_table);
568 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
569 #define LTO_MMAP_IO 1
573 /* Page size of machine is used for mmap and munmap calls. */
574 static size_t page_mask;
577 /* Get the section data of length LEN from FILENAME starting at
578 OFFSET. The data segment must be freed by the caller when the
579 caller is finished. Returns NULL if all was not well. */
582 lto_read_section_data (struct lto_file_decl_data *file_data,
583 intptr_t offset, size_t len)
587 static char *fd_name;
589 intptr_t computed_len;
590 intptr_t computed_offset;
594 /* Keep a single-entry file-descriptor cache. The last file we
595 touched will get closed at exit.
596 ??? Eventually we want to add a more sophisticated larger cache
597 or rather fix function body streaming to not stream them in
598 practically random order. */
600 && strcmp (fd_name, file_data->file_name) != 0)
608 fd_name = xstrdup (file_data->file_name);
609 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
617 size_t page_size = sysconf (_SC_PAGE_SIZE);
618 page_mask = ~(page_size - 1);
621 computed_offset = offset & page_mask;
622 diff = offset - computed_offset;
623 computed_len = len + diff;
625 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
626 fd, computed_offset);
627 if (result == MAP_FAILED)
630 return result + diff;
632 result = (char *) xmalloc (len);
633 if (lseek (fd, offset, SEEK_SET) != offset
634 || read (fd, result, len) != (ssize_t) len)
645 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
646 NAME will be NULL unless the section type is for a function
650 get_section_data (struct lto_file_decl_data *file_data,
651 enum lto_section_type section_type,
655 htab_t section_hash_table = file_data->section_hash_table;
656 struct lto_section_slot *f_slot;
657 struct lto_section_slot s_slot;
658 const char *section_name = lto_get_section_name (section_type, name, file_data);
662 s_slot.name = section_name;
663 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
666 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
670 free (CONST_CAST (char *, section_name));
675 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
676 starts at OFFSET and has LEN bytes. */
679 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
680 enum lto_section_type section_type ATTRIBUTE_UNUSED,
681 const char *name ATTRIBUTE_UNUSED,
682 const char *offset, size_t len ATTRIBUTE_UNUSED)
685 intptr_t computed_len;
686 intptr_t computed_offset;
691 computed_offset = ((intptr_t) offset) & page_mask;
692 diff = (intptr_t) offset - computed_offset;
693 computed_len = len + diff;
695 munmap ((caddr_t) computed_offset, computed_len);
697 free (CONST_CAST(char *, offset));
701 /* Structure describing ltrans partitions. */
703 struct GTY (()) ltrans_partition_def
705 cgraph_node_set cgraph_set;
706 varpool_node_set varpool_set;
707 const char * GTY ((skip)) name;
711 typedef struct ltrans_partition_def *ltrans_partition;
712 DEF_VEC_P(ltrans_partition);
713 DEF_VEC_ALLOC_P(ltrans_partition,gc);
715 static GTY (()) VEC(ltrans_partition, gc) *ltrans_partitions;
717 static void add_cgraph_node_to_partition (ltrans_partition part, struct cgraph_node *node);
718 static void add_varpool_node_to_partition (ltrans_partition part, struct varpool_node *vnode);
720 /* Create new partition with name NAME. */
721 static ltrans_partition
722 new_partition (const char *name)
724 ltrans_partition part = ggc_alloc_ltrans_partition_def ();
725 part->cgraph_set = cgraph_node_set_new ();
726 part->varpool_set = varpool_node_set_new ();
729 VEC_safe_push (ltrans_partition, gc, ltrans_partitions, part);
733 /* See all references that go to comdat objects and bring them into partition too. */
735 add_references_to_partition (ltrans_partition part, struct ipa_ref_list *refs)
739 for (i = 0; ipa_ref_list_reference_iterate (refs, i, ref); i++)
741 if (ref->refered_type == IPA_REF_CGRAPH
742 && DECL_COMDAT (ipa_ref_node (ref)->decl)
743 && !cgraph_node_in_set_p (ipa_ref_node (ref), part->cgraph_set))
744 add_cgraph_node_to_partition (part, ipa_ref_node (ref));
746 if (ref->refered_type == IPA_REF_VARPOOL
747 && DECL_COMDAT (ipa_ref_varpool_node (ref)->decl)
748 && !varpool_node_in_set_p (ipa_ref_varpool_node (ref), part->varpool_set))
749 add_varpool_node_to_partition (part, ipa_ref_varpool_node (ref));
753 /* Add NODE to partition as well as the inline callees and referred comdats into partition PART. */
756 add_cgraph_node_to_partition (ltrans_partition part, struct cgraph_node *node)
758 struct cgraph_edge *e;
760 part->insns += node->local.inline_summary.self_size;
763 node->in_other_partition = 1;
764 node->aux = (void *)((size_t)node->aux + 1);
766 cgraph_node_set_add (part->cgraph_set, node);
768 for (e = node->callees; e; e = e->next_callee)
769 if ((!e->inline_failed || DECL_COMDAT (e->callee->decl))
770 && !cgraph_node_in_set_p (e->callee, part->cgraph_set))
771 add_cgraph_node_to_partition (part, e->callee);
773 add_references_to_partition (part, &node->ref_list);
775 if (node->same_comdat_group
776 && !cgraph_node_in_set_p (node->same_comdat_group, part->cgraph_set))
777 add_cgraph_node_to_partition (part, node->same_comdat_group);
780 /* Add VNODE to partition as well as comdat references partition PART. */
783 add_varpool_node_to_partition (ltrans_partition part, struct varpool_node *vnode)
785 varpool_node_set_add (part->varpool_set, vnode);
788 vnode->in_other_partition = 1;
789 vnode->aux = (void *)((size_t)vnode->aux + 1);
791 add_references_to_partition (part, &vnode->ref_list);
793 if (vnode->same_comdat_group
794 && !varpool_node_in_set_p (vnode->same_comdat_group, part->varpool_set))
795 add_varpool_node_to_partition (part, vnode->same_comdat_group);
798 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
799 and number of varpool nodes is N_VARPOOL_NODES. */
802 undo_partition (ltrans_partition partition, unsigned int n_cgraph_nodes,
803 unsigned int n_varpool_nodes)
805 while (VEC_length (cgraph_node_ptr, partition->cgraph_set->nodes) >
808 struct cgraph_node *node = VEC_index (cgraph_node_ptr,
809 partition->cgraph_set->nodes,
811 partition->insns -= node->local.inline_summary.self_size;
812 cgraph_node_set_remove (partition->cgraph_set, node);
813 node->aux = (void *)((size_t)node->aux - 1);
815 while (VEC_length (varpool_node_ptr, partition->varpool_set->nodes) >
818 struct varpool_node *node = VEC_index (varpool_node_ptr,
819 partition->varpool_set->nodes,
821 varpool_node_set_remove (partition->varpool_set, node);
822 node->aux = (void *)((size_t)node->aux - 1);
826 /* Return true if NODE should be partitioned.
827 This means that partitioning algorithm should put NODE into one of partitions.
828 This apply to most functions with bodies. Functions that are not partitions
829 are put into every unit needing them. This is the case of i.e. COMDATs. */
832 partition_cgraph_node_p (struct cgraph_node *node)
834 /* We will get proper partition based on function they are inlined to. */
835 if (node->global.inlined_to)
837 /* Nodes without a body do not need partitioning. */
840 /* Extern inlines and comdat are always only in partitions they are needed. */
841 if (DECL_EXTERNAL (node->decl)
842 || (DECL_COMDAT (node->decl)
843 && !cgraph_used_from_object_file_p (node)))
848 /* Return true if VNODE should be partitioned.
849 This means that partitioning algorithm should put VNODE into one of partitions. */
852 partition_varpool_node_p (struct varpool_node *vnode)
854 if (vnode->alias || !vnode->needed)
856 /* Constant pool and comdat are always only in partitions they are needed. */
857 if (DECL_IN_CONSTANT_POOL (vnode->decl)
858 || (DECL_COMDAT (vnode->decl)
859 && !varpool_used_from_object_file_p (vnode)))
864 /* Group cgrah nodes by input files. This is used mainly for testing
868 lto_1_to_1_map (void)
870 struct cgraph_node *node;
871 struct varpool_node *vnode;
872 struct lto_file_decl_data *file_data;
873 struct pointer_map_t *pmap;
874 ltrans_partition partition;
878 timevar_push (TV_WHOPR_WPA);
880 pmap = pointer_map_create ();
882 for (node = cgraph_nodes; node; node = node->next)
884 if (!partition_cgraph_node_p (node))
887 file_data = node->local.lto_file_data;
888 gcc_assert (!node->same_body_alias);
892 slot = pointer_map_contains (pmap, file_data);
894 partition = (ltrans_partition) *slot;
897 partition = new_partition (file_data->file_name);
898 slot = pointer_map_insert (pmap, file_data);
904 && VEC_length (ltrans_partition, ltrans_partitions))
905 partition = VEC_index (ltrans_partition, ltrans_partitions, 0);
908 partition = new_partition ("");
909 slot = pointer_map_insert (pmap, NULL);
914 add_cgraph_node_to_partition (partition, node);
917 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
919 if (!partition_varpool_node_p (vnode))
921 file_data = vnode->lto_file_data;
922 slot = pointer_map_contains (pmap, file_data);
924 partition = (ltrans_partition) *slot;
927 partition = new_partition (file_data->file_name);
928 slot = pointer_map_insert (pmap, file_data);
933 add_varpool_node_to_partition (partition, vnode);
935 for (node = cgraph_nodes; node; node = node->next)
937 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
940 /* If the cgraph is empty, create one cgraph node set so that there is still
941 an output file for any variables that need to be exported in a DSO. */
943 new_partition ("empty");
945 pointer_map_destroy (pmap);
947 timevar_pop (TV_WHOPR_WPA);
949 lto_stats.num_cgraph_partitions += VEC_length (ltrans_partition,
954 /* Group cgraph nodes in qually sized partitions.
956 The algorithm deciding paritions are simple: nodes are taken in predefined
957 order. The order correspond to order we wish to have functions in final
958 output. In future this will be given by function reordering pass, but at
959 the moment we use topological order that serve a good approximation.
961 The goal is to partition this linear order into intervals (partitions) such
962 that all partitions have approximately the same size and that the number of
963 callgraph or IPA reference edgess crossing boundaries is minimal.
965 This is a lot faster (O(n) in size of callgraph) than algorithms doing
966 priority based graph clustering that are generally O(n^2) and since WHOPR
967 is designed to make things go well across partitions, it leads to good results.
969 We compute the expected size of partition as
970 max (total_size / lto_partitions, min_partition_size).
971 We use dynamic expected size of partition, so small programs
972 are partitioning into enough partitions to allow use of multiple CPUs while
973 large programs are not partitioned too much. Creating too many partition
974 increase streaming overhead significandly.
976 In the future we would like to bound maximal size of partition to avoid
977 ltrans stage consuming too much memory. At the moment however WPA stage is
978 most memory intensive phase at large benchmark since too many types and
979 declarations are read into memory.
981 The function implement simple greedy algorithm. Nodes are begin added into
982 current partition until 3/4th of expected partition size is reached.
983 After this threshold we keep track of boundary size (number of edges going to
984 other partitions) and continue adding functions until the current partition
985 grows into a double of expected partition size. Then the process is undone
986 till the point when minimal ration of boundary size and in partition calls
990 lto_balanced_map (void)
993 struct cgraph_node **postorder =
994 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
995 struct cgraph_node **order = XNEWVEC (struct cgraph_node *, cgraph_max_uid);
996 int i, postorder_len;
997 struct cgraph_node *node;
1000 ltrans_partition partition;
1001 unsigned int last_visited_cgraph_node = 0, last_visited_varpool_node = 0;
1002 struct varpool_node *vnode;
1003 int cost = 0, internal = 0;
1004 int best_n_nodes = 0, best_n_varpool_nodes = 0, best_i = 0, best_cost =
1005 INT_MAX, best_internal = 0;
1008 /* Until we have better ordering facility, use toplogical order.
1009 Include only nodes we will partition and compute estimate of program
1010 size. Note that since nodes that are not partitioned might be put into
1011 multiple partitions, this is just an estimate of real size. This is why
1012 we keep partition_size updated after every partition is finalized. */
1013 postorder_len = cgraph_postorder (postorder);
1014 for (i = 0; i < postorder_len; i++)
1016 node = postorder[i];
1017 if (partition_cgraph_node_p (node))
1019 order[n_nodes++] = node;
1020 total_size += node->local.inline_summary.self_size;
1025 /* Compute partition size and create the first partition. */
1026 partition_size = total_size / PARAM_VALUE (PARAM_LTO_PARTITIONS);
1027 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
1028 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
1030 partition = new_partition ("");
1031 if (cgraph_dump_file)
1032 fprintf (cgraph_dump_file, "Total unit size: %i, partition size: %i\n",
1033 total_size, partition_size);
1035 for (i = 0; i < n_nodes; i++)
1037 add_cgraph_node_to_partition (partition, order[i]);
1039 /* Once we added a new node to the partition, we also want to add
1040 all referenced variables unless they was already added into some
1042 add_cgraph_node_to_partition adds possibly multiple nodes and
1043 variables that are needed to satisfy needs of ORDER[i].
1044 We remember last visited cgraph and varpool node from last iteration
1045 of outer loop that allows us to process every new addition.
1047 At the same time we compute size of the boundary into COST. Every
1048 callgraph or IPA reference edge leaving the partition contributes into
1049 COST. Every edge inside partition was earlier computed as one leaving
1050 it and thus we need to subtract it from COST. */
1051 while (last_visited_cgraph_node <
1052 VEC_length (cgraph_node_ptr, partition->cgraph_set->nodes)
1053 || last_visited_varpool_node < VEC_length (varpool_node_ptr,
1054 partition->varpool_set->
1057 struct ipa_ref_list *refs;
1059 struct ipa_ref *ref;
1060 bool cgraph_p = false;
1062 if (last_visited_cgraph_node <
1063 VEC_length (cgraph_node_ptr, partition->cgraph_set->nodes))
1065 struct cgraph_edge *edge;
1068 node = VEC_index (cgraph_node_ptr, partition->cgraph_set->nodes,
1069 last_visited_cgraph_node);
1070 refs = &node->ref_list;
1072 total_size -= node->local.inline_summary.self_size;
1073 last_visited_cgraph_node++;
1075 gcc_assert (node->analyzed);
1077 /* Compute boundary cost of callgrpah edges. */
1078 for (edge = node->callees; edge; edge = edge->next_callee)
1079 if (edge->callee->analyzed)
1081 int edge_cost = edge->frequency;
1082 cgraph_node_set_iterator csi;
1086 gcc_assert (edge_cost > 0);
1087 csi = cgraph_node_set_find (partition->cgraph_set, edge->callee);
1088 if (!csi_end_p (csi)
1089 && csi.index < last_visited_cgraph_node - 1)
1090 cost -= edge_cost, internal+= edge_cost;
1094 for (edge = node->callers; edge; edge = edge->next_caller)
1096 int edge_cost = edge->frequency;
1097 cgraph_node_set_iterator csi;
1099 gcc_assert (edge->caller->analyzed);
1102 gcc_assert (edge_cost > 0);
1103 csi = cgraph_node_set_find (partition->cgraph_set, edge->caller);
1104 if (!csi_end_p (csi)
1105 && csi.index < last_visited_cgraph_node)
1114 &VEC_index (varpool_node_ptr, partition->varpool_set->nodes,
1115 last_visited_varpool_node)->ref_list;
1116 last_visited_varpool_node++;
1119 /* Compute boundary cost of IPA REF edges and at the same time look into
1120 variables referenced from current partition and try to add them. */
1121 for (j = 0; ipa_ref_list_reference_iterate (refs, j, ref); j++)
1122 if (ref->refered_type == IPA_REF_VARPOOL)
1124 varpool_node_set_iterator vsi;
1126 vnode = ipa_ref_varpool_node (ref);
1127 if (!vnode->finalized)
1129 if (!vnode->aux && partition_varpool_node_p (vnode))
1130 add_varpool_node_to_partition (partition, vnode);
1131 vsi = varpool_node_set_find (partition->varpool_set, vnode);
1132 if (!vsi_end_p (vsi)
1133 && vsi.index < last_visited_varpool_node - !cgraph_p)
1140 cgraph_node_set_iterator csi;
1142 node = ipa_ref_node (ref);
1143 if (!node->analyzed)
1145 csi = cgraph_node_set_find (partition->cgraph_set, node);
1146 if (!csi_end_p (csi)
1147 && csi.index < last_visited_cgraph_node - cgraph_p)
1152 for (j = 0; ipa_ref_list_refering_iterate (refs, j, ref); j++)
1153 if (ref->refering_type == IPA_REF_VARPOOL)
1155 varpool_node_set_iterator vsi;
1157 vnode = ipa_ref_refering_varpool_node (ref);
1158 gcc_assert (vnode->finalized);
1159 if (!vnode->aux && partition_varpool_node_p (vnode))
1160 add_varpool_node_to_partition (partition, vnode);
1161 vsi = varpool_node_set_find (partition->varpool_set, vnode);
1162 if (!vsi_end_p (vsi)
1163 && vsi.index < last_visited_varpool_node)
1170 cgraph_node_set_iterator csi;
1172 node = ipa_ref_refering_node (ref);
1173 gcc_assert (node->analyzed);
1174 csi = cgraph_node_set_find (partition->cgraph_set, node);
1175 if (!csi_end_p (csi)
1176 && csi.index < last_visited_cgraph_node)
1183 /* If the partition is large enough, start looking for smallest boundary cost. */
1184 if (partition->insns < partition_size * 3 / 4
1185 || best_cost == INT_MAX
1187 || (best_internal * (HOST_WIDE_INT) cost
1188 > (internal * (HOST_WIDE_INT)best_cost)))
1189 && partition->insns < partition_size * 5 / 4))
1192 best_internal = internal;
1194 best_n_nodes = VEC_length (cgraph_node_ptr,
1195 partition->cgraph_set->nodes);
1196 best_n_varpool_nodes = VEC_length (varpool_node_ptr,
1197 partition->varpool_set->nodes);
1199 if (cgraph_dump_file)
1200 fprintf (cgraph_dump_file, "Step %i: added %s, size %i, cost %i/%i best %i/%i, step %i\n", i,
1201 cgraph_node_name (order[i]), partition->insns, cost, internal,
1202 best_cost, best_internal, best_i);
1203 /* Partition is too large, unwind into step when best cost was reached and
1204 start new partition. */
1205 if (partition->insns > 2 * partition_size)
1209 if (cgraph_dump_file)
1210 fprintf (cgraph_dump_file, "Unwinding %i insertions to step %i\n",
1211 i - best_i, best_i);
1212 undo_partition (partition, best_n_nodes, best_n_varpool_nodes);
1215 partition = new_partition ("");
1216 last_visited_cgraph_node = 0;
1217 last_visited_varpool_node = 0;
1220 if (cgraph_dump_file)
1221 fprintf (cgraph_dump_file, "New partition\n");
1223 best_n_varpool_nodes = 0;
1224 best_cost = INT_MAX;
1226 /* Since the size of partitions is just approximate, update the size after
1227 we finished current one. */
1228 if (npartitions < PARAM_VALUE (PARAM_LTO_PARTITIONS))
1229 partition_size = total_size
1230 / (PARAM_VALUE (PARAM_LTO_PARTITIONS) - npartitions);
1232 partition_size = INT_MAX;
1234 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
1235 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
1240 /* Varables that are not reachable from the code go into last partition. */
1241 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
1242 if (partition_varpool_node_p (vnode) && !vnode->aux)
1243 add_varpool_node_to_partition (partition, vnode);
1247 /* Promote variable VNODE to be static. */
1250 promote_var (struct varpool_node *vnode)
1252 if (TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
1254 gcc_assert (flag_wpa);
1255 TREE_PUBLIC (vnode->decl) = 1;
1256 DECL_VISIBILITY (vnode->decl) = VISIBILITY_HIDDEN;
1257 DECL_VISIBILITY_SPECIFIED (vnode->decl) = true;
1258 if (cgraph_dump_file)
1259 fprintf (cgraph_dump_file,
1260 "Promoting var as hidden: %s\n", varpool_node_name (vnode));
1264 /* Promote function NODE to be static. */
1267 promote_fn (struct cgraph_node *node)
1269 gcc_assert (flag_wpa);
1270 if (TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl))
1272 TREE_PUBLIC (node->decl) = 1;
1273 DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
1274 DECL_VISIBILITY_SPECIFIED (node->decl) = true;
1275 if (node->same_body)
1277 struct cgraph_node *alias;
1278 for (alias = node->same_body;
1279 alias; alias = alias->next)
1281 TREE_PUBLIC (alias->decl) = 1;
1282 DECL_VISIBILITY (alias->decl) = VISIBILITY_HIDDEN;
1283 DECL_VISIBILITY_SPECIFIED (alias->decl) = true;
1286 if (cgraph_dump_file)
1287 fprintf (cgraph_dump_file,
1288 "Promoting function as hidden: %s/%i\n",
1289 cgraph_node_name (node), node->uid);
1293 /* Find out all static decls that need to be promoted to global because
1294 of cross file sharing. This function must be run in the WPA mode after
1295 all inlinees are added. */
1298 lto_promote_cross_file_statics (void)
1300 struct varpool_node *vnode;
1302 cgraph_node_set set;
1303 varpool_node_set vset;
1304 cgraph_node_set_iterator csi;
1305 varpool_node_set_iterator vsi;
1306 VEC(varpool_node_ptr, heap) *promoted_initializers = NULL;
1307 struct pointer_set_t *inserted = pointer_set_create ();
1309 gcc_assert (flag_wpa);
1311 n_sets = VEC_length (ltrans_partition, ltrans_partitions);
1312 for (i = 0; i < n_sets; i++)
1314 ltrans_partition part = VEC_index (ltrans_partition, ltrans_partitions, i);
1315 set = part->cgraph_set;
1316 vset = part->varpool_set;
1318 /* If node has either address taken (and we have no clue from where)
1319 or it is called from other partition, it needs to be globalized. */
1320 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
1322 struct cgraph_node *node = csi_node (csi);
1323 if (node->local.externally_visible)
1325 if (node->global.inlined_to)
1327 if ((!DECL_EXTERNAL (node->decl) && !DECL_COMDAT (node->decl))
1328 && (referenced_from_other_partition_p (&node->ref_list, set, vset)
1329 || reachable_from_other_partition_p (node, set)))
1332 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
1334 vnode = vsi_node (vsi);
1335 /* Constant pool references use internal labels and thus can not
1336 be made global. It is sensible to keep those ltrans local to
1337 allow better optimization. */
1338 if (!DECL_IN_CONSTANT_POOL (vnode->decl) && !DECL_COMDAT (vnode->decl)
1339 && !vnode->externally_visible && vnode->analyzed
1340 && referenced_from_other_partition_p (&vnode->ref_list,
1342 promote_var (vnode);
1345 /* We export initializers of read-only var into each partition
1346 referencing it. Folding might take declarations from the
1347 initializers and use it; so everything referenced from the
1348 initializers needs can be accessed from this partition after
1351 This means that we need to promote all variables and functions
1352 referenced from all initializers from readonly vars referenced
1353 from this partition that are not in this partition.
1354 This needs to be done recursively. */
1355 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
1356 if (const_value_known_p (vnode->decl)
1357 && DECL_INITIAL (vnode->decl)
1358 && !varpool_node_in_set_p (vnode, vset)
1359 && referenced_from_this_partition_p (&vnode->ref_list, set, vset)
1360 && !pointer_set_insert (inserted, vnode))
1361 VEC_safe_push (varpool_node_ptr, heap, promoted_initializers, vnode);
1362 while (!VEC_empty (varpool_node_ptr, promoted_initializers))
1365 struct ipa_ref *ref;
1367 vnode = VEC_pop (varpool_node_ptr, promoted_initializers);
1368 for (i = 0; ipa_ref_list_reference_iterate (&vnode->ref_list, i, ref); i++)
1370 if (ref->refered_type == IPA_REF_CGRAPH)
1372 struct cgraph_node *n = ipa_ref_node (ref);
1373 gcc_assert (!n->global.inlined_to);
1374 if (!n->local.externally_visible
1375 && !cgraph_node_in_set_p (n, set))
1380 struct varpool_node *v = ipa_ref_varpool_node (ref);
1381 if (varpool_node_in_set_p (v, vset))
1383 /* Constant pool references use internal labels and thus can not
1384 be made global. It is sensible to keep those ltrans local to
1385 allow better optimization. */
1386 if (DECL_IN_CONSTANT_POOL (v->decl))
1388 if (!pointer_set_insert (inserted, vnode))
1389 VEC_safe_push (varpool_node_ptr, heap,
1390 promoted_initializers, v);
1392 else if (!DECL_IN_CONSTANT_POOL (v->decl)
1393 && !v->externally_visible && v->analyzed)
1396 && DECL_INITIAL (v->decl)
1397 && const_value_known_p (v->decl)
1398 && !pointer_set_insert (inserted, vnode))
1399 VEC_safe_push (varpool_node_ptr, heap,
1400 promoted_initializers, v);
1406 pointer_set_destroy (inserted);
1409 static lto_file *current_lto_file;
1411 /* Helper for qsort; compare partitions and return one with smaller size.
1412 We sort from greatest to smallest so parallel build doesn't stale on the
1413 longest compilation being executed too late. */
1416 cmp_partitions (const void *a, const void *b)
1418 const struct ltrans_partition_def *pa
1419 = *(struct ltrans_partition_def *const *)a;
1420 const struct ltrans_partition_def *pb
1421 = *(struct ltrans_partition_def *const *)b;
1422 return pb->insns - pa->insns;
1425 /* Write all output files in WPA mode and the file with the list of
1429 lto_wpa_write_files (void)
1433 cgraph_node_set set;
1434 varpool_node_set vset;
1435 ltrans_partition part;
1436 FILE *ltrans_output_list_stream;
1437 char *temp_filename;
1440 /* Open the LTRANS output list. */
1441 if (!ltrans_output_list)
1442 fatal_error ("no LTRANS output list filename provided");
1443 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
1444 if (ltrans_output_list_stream == NULL)
1445 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
1447 timevar_push (TV_WHOPR_WPA);
1449 FOR_EACH_VEC_ELT (ltrans_partition, ltrans_partitions, i, part)
1450 lto_stats.num_output_cgraph_nodes += VEC_length (cgraph_node_ptr,
1451 part->cgraph_set->nodes);
1453 /* Find out statics that need to be promoted
1454 to globals with hidden visibility because they are accessed from multiple
1456 lto_promote_cross_file_statics ();
1458 timevar_pop (TV_WHOPR_WPA);
1460 timevar_push (TV_WHOPR_WPA_IO);
1462 /* Generate a prefix for the LTRANS unit files. */
1463 blen = strlen (ltrans_output_list);
1464 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
1465 strcpy (temp_filename, ltrans_output_list);
1466 if (blen > sizeof (".out")
1467 && strcmp (temp_filename + blen - sizeof (".out") + 1,
1469 temp_filename[blen - sizeof (".out") + 1] = '\0';
1470 blen = strlen (temp_filename);
1472 n_sets = VEC_length (ltrans_partition, ltrans_partitions);
1473 qsort (VEC_address (ltrans_partition, ltrans_partitions), n_sets,
1474 sizeof (ltrans_partition), cmp_partitions);
1475 for (i = 0; i < n_sets; i++)
1478 ltrans_partition part = VEC_index (ltrans_partition, ltrans_partitions, i);
1480 set = part->cgraph_set;
1481 vset = part->varpool_set;
1483 /* Write all the nodes in SET. */
1484 sprintf (temp_filename + blen, "%u.o", i);
1485 file = lto_obj_file_open (temp_filename, true);
1487 fatal_error ("lto_obj_file_open() failed");
1490 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
1491 if (cgraph_dump_file)
1493 fprintf (cgraph_dump_file, "Writting partition %s to file %s, %i insns\n",
1494 part->name, temp_filename, part->insns);
1495 fprintf (cgraph_dump_file, "cgraph nodes:");
1496 dump_cgraph_node_set (cgraph_dump_file, set);
1497 fprintf (cgraph_dump_file, "varpool nodes:");
1498 dump_varpool_node_set (cgraph_dump_file, vset);
1500 gcc_assert (cgraph_node_set_nonempty_p (set)
1501 || varpool_node_set_nonempty_p (vset) || !i);
1503 lto_set_current_out_file (file);
1505 ipa_write_optimization_summaries (set, vset);
1507 lto_set_current_out_file (NULL);
1508 lto_obj_file_close (file);
1510 len = strlen (temp_filename);
1511 if (fwrite (temp_filename, 1, len, ltrans_output_list_stream) < len
1512 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
1513 fatal_error ("writing to LTRANS output list %s: %m",
1514 ltrans_output_list);
1517 lto_stats.num_output_files += n_sets;
1519 /* Close the LTRANS output list. */
1520 if (fclose (ltrans_output_list_stream))
1521 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
1523 timevar_pop (TV_WHOPR_WPA_IO);
1528 struct pointer_set_t *seen;
1531 #define LTO_FIXUP_SUBTREE(t) \
1533 walk_tree (&(t), lto_fixup_tree, data, NULL); \
1536 #define LTO_REGISTER_TYPE_AND_FIXUP_SUBTREE(t) \
1540 (t) = gimple_register_type (t); \
1541 walk_tree (&(t), lto_fixup_tree, data, NULL); \
1545 static tree lto_fixup_tree (tree *, int *, void *);
1547 /* Return true if T does not need to be fixed up recursively. */
1553 || CONSTANT_CLASS_P (t)
1554 || TREE_CODE (t) == IDENTIFIER_NODE);
1557 /* Fix up fields of a tree_common T. DATA points to fix-up states. */
1560 lto_fixup_common (tree t, void *data)
1562 /* The following re-creates the TYPE_REFERENCE_TO and TYPE_POINTER_TO
1563 lists. We do not stream TYPE_REFERENCE_TO, TYPE_POINTER_TO or
1564 TYPE_NEXT_PTR_TO and TYPE_NEXT_REF_TO.
1565 First remove us from any pointer list we are on. */
1566 if (TREE_CODE (t) == POINTER_TYPE)
1568 if (TYPE_POINTER_TO (TREE_TYPE (t)) == t)
1569 TYPE_POINTER_TO (TREE_TYPE (t)) = TYPE_NEXT_PTR_TO (t);
1572 tree tem = TYPE_POINTER_TO (TREE_TYPE (t));
1573 while (tem && TYPE_NEXT_PTR_TO (tem) != t)
1574 tem = TYPE_NEXT_PTR_TO (tem);
1576 TYPE_NEXT_PTR_TO (tem) = TYPE_NEXT_PTR_TO (t);
1578 TYPE_NEXT_PTR_TO (t) = NULL_TREE;
1580 else if (TREE_CODE (t) == REFERENCE_TYPE)
1582 if (TYPE_REFERENCE_TO (TREE_TYPE (t)) == t)
1583 TYPE_REFERENCE_TO (TREE_TYPE (t)) = TYPE_NEXT_REF_TO (t);
1586 tree tem = TYPE_REFERENCE_TO (TREE_TYPE (t));
1587 while (tem && TYPE_NEXT_REF_TO (tem) != t)
1588 tem = TYPE_NEXT_REF_TO (tem);
1590 TYPE_NEXT_REF_TO (tem) = TYPE_NEXT_REF_TO (t);
1592 TYPE_NEXT_REF_TO (t) = NULL_TREE;
1595 /* Fixup our type. */
1596 LTO_REGISTER_TYPE_AND_FIXUP_SUBTREE (TREE_TYPE (t));
1598 /* Second put us on the list of pointers of the new pointed-to type
1599 if we are a main variant. This is done in lto_fixup_type after
1600 fixing up our main variant. */
1602 /* This is not very efficient because we cannot do tail-recursion with
1603 a long chain of trees. */
1604 LTO_FIXUP_SUBTREE (TREE_CHAIN (t));
1607 /* Fix up fields of a decl_minimal T. DATA points to fix-up states. */
1610 lto_fixup_decl_minimal (tree t, void *data)
1612 lto_fixup_common (t, data);
1613 LTO_FIXUP_SUBTREE (DECL_NAME (t));
1614 LTO_FIXUP_SUBTREE (DECL_CONTEXT (t));
1617 /* Fix up fields of a decl_common T. DATA points to fix-up states. */
1620 lto_fixup_decl_common (tree t, void *data)
1622 lto_fixup_decl_minimal (t, data);
1623 LTO_FIXUP_SUBTREE (DECL_SIZE (t));
1624 LTO_FIXUP_SUBTREE (DECL_SIZE_UNIT (t));
1625 LTO_FIXUP_SUBTREE (DECL_INITIAL (t));
1626 LTO_FIXUP_SUBTREE (DECL_ATTRIBUTES (t));
1627 LTO_FIXUP_SUBTREE (DECL_ABSTRACT_ORIGIN (t));
1630 /* Fix up fields of a decl_with_vis T. DATA points to fix-up states. */
1633 lto_fixup_decl_with_vis (tree t, void *data)
1635 lto_fixup_decl_common (t, data);
1637 /* Accessor macro has side-effects, use field-name here. */
1638 LTO_FIXUP_SUBTREE (t->decl_with_vis.assembler_name);
1640 gcc_assert (no_fixup_p (DECL_SECTION_NAME (t)));
1643 /* Fix up fields of a decl_non_common T. DATA points to fix-up states. */
1646 lto_fixup_decl_non_common (tree t, void *data)
1648 lto_fixup_decl_with_vis (t, data);
1649 LTO_FIXUP_SUBTREE (DECL_ARGUMENT_FLD (t));
1650 LTO_FIXUP_SUBTREE (DECL_RESULT_FLD (t));
1651 LTO_FIXUP_SUBTREE (DECL_VINDEX (t));
1653 /* SAVED_TREE should not cleared by now. Also no accessor for base type. */
1654 gcc_assert (no_fixup_p (t->decl_non_common.saved_tree));
1657 /* Fix up fields of a decl_non_common T. DATA points to fix-up states. */
1660 lto_fixup_function (tree t, void *data)
1662 lto_fixup_decl_non_common (t, data);
1663 LTO_FIXUP_SUBTREE (DECL_FUNCTION_PERSONALITY (t));
1666 /* Fix up fields of a field_decl T. DATA points to fix-up states. */
1669 lto_fixup_field_decl (tree t, void *data)
1671 lto_fixup_decl_common (t, data);
1672 LTO_FIXUP_SUBTREE (DECL_FIELD_OFFSET (t));
1673 LTO_FIXUP_SUBTREE (DECL_BIT_FIELD_TYPE (t));
1674 LTO_FIXUP_SUBTREE (DECL_QUALIFIER (t));
1675 gcc_assert (no_fixup_p (DECL_FIELD_BIT_OFFSET (t)));
1676 LTO_FIXUP_SUBTREE (DECL_FCONTEXT (t));
1679 /* Fix up fields of a type T. DATA points to fix-up states. */
1682 lto_fixup_type (tree t, void *data)
1686 lto_fixup_common (t, data);
1687 LTO_FIXUP_SUBTREE (TYPE_CACHED_VALUES (t));
1688 LTO_FIXUP_SUBTREE (TYPE_SIZE (t));
1689 LTO_FIXUP_SUBTREE (TYPE_SIZE_UNIT (t));
1690 LTO_FIXUP_SUBTREE (TYPE_ATTRIBUTES (t));
1691 LTO_FIXUP_SUBTREE (TYPE_NAME (t));
1693 /* Accessors are for derived node types only. */
1694 if (!POINTER_TYPE_P (t))
1695 LTO_FIXUP_SUBTREE (t->type.minval);
1696 LTO_FIXUP_SUBTREE (t->type.maxval);
1698 /* Accessor is for derived node types only. */
1699 LTO_FIXUP_SUBTREE (t->type.binfo);
1701 if (TYPE_CONTEXT (t))
1703 if (TYPE_P (TYPE_CONTEXT (t)))
1704 LTO_REGISTER_TYPE_AND_FIXUP_SUBTREE (TYPE_CONTEXT (t));
1706 LTO_FIXUP_SUBTREE (TYPE_CONTEXT (t));
1709 /* TYPE_CANONICAL does not need to be fixed up, instead it should
1710 always point to ourselves at this time as we never fixup
1711 non-canonical ones. */
1712 gcc_assert (TYPE_CANONICAL (t) == t);
1714 /* The following re-creates proper variant lists while fixing up
1715 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1716 variant list state before fixup is broken. */
1718 /* Remove us from our main variant list if we are not the variant leader. */
1719 if (TYPE_MAIN_VARIANT (t) != t)
1721 tem = TYPE_MAIN_VARIANT (t);
1722 while (tem && TYPE_NEXT_VARIANT (tem) != t)
1723 tem = TYPE_NEXT_VARIANT (tem);
1725 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
1726 TYPE_NEXT_VARIANT (t) = NULL_TREE;
1729 /* Query our new main variant. */
1730 mv = gimple_register_type (TYPE_MAIN_VARIANT (t));
1732 /* If we were the variant leader and we get replaced ourselves drop
1733 all variants from our list. */
1734 if (TYPE_MAIN_VARIANT (t) == t
1740 tree tem2 = TYPE_NEXT_VARIANT (tem);
1741 TYPE_NEXT_VARIANT (tem) = NULL_TREE;
1746 /* If we are not our own variant leader link us into our new leaders
1750 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1751 TYPE_NEXT_VARIANT (mv) = t;
1754 /* Finally adjust our main variant and fix it up. */
1755 TYPE_MAIN_VARIANT (t) = mv;
1756 LTO_FIXUP_SUBTREE (TYPE_MAIN_VARIANT (t));
1758 /* As the second step of reconstructing the pointer chains put us
1759 on the list of pointers of the new pointed-to type
1760 if we are a main variant. See lto_fixup_common for the first step. */
1761 if (TREE_CODE (t) == POINTER_TYPE
1762 && TYPE_MAIN_VARIANT (t) == t)
1764 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1765 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1767 else if (TREE_CODE (t) == REFERENCE_TYPE
1768 && TYPE_MAIN_VARIANT (t) == t)
1770 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1771 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1775 /* Fix up fields of a BINFO T. DATA points to fix-up states. */
1778 lto_fixup_binfo (tree t, void *data)
1780 unsigned HOST_WIDE_INT i, n;
1781 tree base, saved_base;
1783 lto_fixup_common (t, data);
1784 gcc_assert (no_fixup_p (BINFO_OFFSET (t)));
1785 LTO_FIXUP_SUBTREE (BINFO_VTABLE (t));
1786 LTO_FIXUP_SUBTREE (BINFO_VIRTUALS (t));
1787 LTO_FIXUP_SUBTREE (BINFO_VPTR_FIELD (t));
1788 n = VEC_length (tree, BINFO_BASE_ACCESSES (t));
1789 for (i = 0; i < n; i++)
1791 saved_base = base = BINFO_BASE_ACCESS (t, i);
1792 LTO_FIXUP_SUBTREE (base);
1793 if (base != saved_base)
1794 VEC_replace (tree, BINFO_BASE_ACCESSES (t), i, base);
1796 LTO_FIXUP_SUBTREE (BINFO_INHERITANCE_CHAIN (t));
1797 LTO_FIXUP_SUBTREE (BINFO_SUBVTT_INDEX (t));
1798 LTO_FIXUP_SUBTREE (BINFO_VPTR_INDEX (t));
1799 n = BINFO_N_BASE_BINFOS (t);
1800 for (i = 0; i < n; i++)
1802 saved_base = base = BINFO_BASE_BINFO (t, i);
1803 LTO_FIXUP_SUBTREE (base);
1804 if (base != saved_base)
1805 VEC_replace (tree, BINFO_BASE_BINFOS (t), i, base);
1809 /* Fix up fields of a CONSTRUCTOR T. DATA points to fix-up states. */
1812 lto_fixup_constructor (tree t, void *data)
1814 unsigned HOST_WIDE_INT idx;
1815 constructor_elt *ce;
1817 LTO_REGISTER_TYPE_AND_FIXUP_SUBTREE (TREE_TYPE (t));
1820 VEC_iterate(constructor_elt, CONSTRUCTOR_ELTS (t), idx, ce);
1823 LTO_FIXUP_SUBTREE (ce->index);
1824 LTO_FIXUP_SUBTREE (ce->value);
1828 /* A walk_tree callback used by lto_fixup_state. TP is the pointer to the
1829 current tree. WALK_SUBTREES indicates if the subtrees will be walked.
1830 DATA is a pointer set to record visited nodes. */
1833 lto_fixup_tree (tree *tp, int *walk_subtrees, void *data)
1836 lto_fixup_data_t *fixup_data = (lto_fixup_data_t *) data;
1841 if (!t || pointer_set_contains (fixup_data->seen, t))
1844 if (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == FUNCTION_DECL)
1846 prevailing = lto_symtab_prevailing_decl (t);
1848 if (t != prevailing)
1850 /* Also replace t with prevailing defintion. We don't want to
1851 insert the other defintion in the seen set as we want to
1852 replace all instances of it. */
1857 else if (TYPE_P (t))
1859 /* Replace t with the prevailing type. We don't want to insert the
1860 other type in the seen set as we want to replace all instances of it. */
1861 t = gimple_register_type (t);
1865 if (pointer_set_insert (fixup_data->seen, t))
1868 /* walk_tree does not visit all reachable nodes that need to be fixed up.
1869 Hence we do special processing here for those kind of nodes. */
1870 switch (TREE_CODE (t))
1873 lto_fixup_field_decl (t, data);
1881 lto_fixup_decl_common (t, data);
1885 lto_fixup_decl_with_vis (t, data);
1889 lto_fixup_decl_non_common (t, data);
1893 lto_fixup_function (t, data);
1897 lto_fixup_binfo (t, data);
1902 lto_fixup_type (t, data);
1903 else if (TREE_CODE (t) == CONSTRUCTOR)
1904 lto_fixup_constructor (t, data);
1905 else if (CONSTANT_CLASS_P (t))
1906 LTO_REGISTER_TYPE_AND_FIXUP_SUBTREE (TREE_TYPE (t));
1907 else if (EXPR_P (t))
1909 /* walk_tree only handles TREE_OPERANDs. Do the rest here. */
1910 lto_fixup_common (t, data);
1911 LTO_FIXUP_SUBTREE (t->exp.block);
1916 /* Let walk_tree handle sub-trees. */
1924 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
1925 replaces var and function decls with the corresponding prevailing def and
1926 records the old decl in the free-list in DATA. We also record visted nodes
1927 in the seen-set in DATA to avoid multiple visit for nodes that need not
1931 lto_fixup_state (struct lto_in_decl_state *state, lto_fixup_data_t *data)
1934 struct lto_tree_ref_table *table;
1936 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
1937 we still need to walk from all DECLs to find the reachable
1938 FUNCTION_DECLs and VAR_DECLs. */
1939 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
1941 table = &state->streams[si];
1942 for (i = 0; i < table->size; i++)
1943 walk_tree (table->trees + i, lto_fixup_tree, data, NULL);
1947 /* A callback of htab_traverse. Just extract a state from SLOT and the
1948 lto_fixup_data_t object from AUX and calls lto_fixup_state. */
1951 lto_fixup_state_aux (void **slot, void *aux)
1953 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
1954 lto_fixup_state (state, (lto_fixup_data_t *) aux);
1958 /* Fix the decls from all FILES. Replaces each decl with the corresponding
1962 lto_fixup_decls (struct lto_file_decl_data **files)
1966 struct pointer_set_t *seen = pointer_set_create ();
1967 lto_fixup_data_t data;
1970 for (i = 0; files[i]; i++)
1972 struct lto_file_decl_data *file = files[i];
1973 struct lto_in_decl_state *state = file->global_decl_state;
1974 lto_fixup_state (state, &data);
1976 htab_traverse (file->function_decl_states, lto_fixup_state_aux, &data);
1979 FOR_EACH_VEC_ELT (tree, lto_global_var_decls, i, decl)
1981 tree saved_decl = decl;
1982 walk_tree (&decl, lto_fixup_tree, &data, NULL);
1983 if (decl != saved_decl)
1984 VEC_replace (tree, lto_global_var_decls, i, decl);
1987 pointer_set_destroy (seen);
1990 /* Read the options saved from each file in the command line. Called
1991 from lang_hooks.post_options which is called by process_options
1992 right before all the options are used to initialize the compiler.
1993 This assumes that decode_options has already run, so the
1994 num_in_fnames and in_fnames are properly set.
1996 Note that this assumes that all the files had been compiled with
1997 the same options, which is not a good assumption. In general,
1998 options ought to be read from all the files in the set and merged.
1999 However, it is still unclear what the merge rules should be. */
2002 lto_read_all_file_options (void)
2006 /* Clear any file options currently saved. */
2007 lto_clear_file_options ();
2009 /* Set the hooks to read ELF sections. */
2010 lto_set_in_hooks (NULL, get_section_data, free_section_data);
2012 fprintf (stderr, "Reading command line options:");
2014 for (i = 0; i < num_in_fnames; i++)
2016 struct lto_file_decl_data *file_data;
2017 lto_file *file = lto_obj_file_open (in_fnames[i], false);
2022 fprintf (stderr, " %s", in_fnames[i]);
2026 file_data = XCNEW (struct lto_file_decl_data);
2027 file_data->file_name = file->filename;
2028 file_data->section_hash_table = lto_obj_build_section_table (file);
2030 lto_read_file_options (file_data);
2032 lto_obj_file_close (file);
2033 htab_delete (file_data->section_hash_table);
2038 fprintf (stderr, "\n");
2040 /* Apply globally the options read from all the files. */
2041 lto_reissue_options ();
2044 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2046 /* Turn file datas for sub files into a single array, so that they look
2047 like separate files for further passes. */
2050 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
2052 struct lto_file_decl_data *n, *next;
2055 lto_stats.num_input_files = count;
2057 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count + 1);
2058 /* Set the hooks so that all of the ipa passes can read in their data. */
2059 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2060 for (i = 0, k = 0; i < last_file_ix; i++)
2062 for (n = orig[i]; n != NULL; n = next)
2064 all_file_decl_data[k++] = n;
2069 all_file_decl_data[k] = NULL;
2070 gcc_assert (k == count);
2073 /* Input file data before flattening (i.e. splitting them to subfiles to support
2074 incremental linking. */
2075 static int real_file_count;
2076 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2078 /* Read all the symbols from the input files FNAMES. NFILES is the
2079 number of files requested in the command line. Instantiate a
2080 global call graph by aggregating all the sub-graphs found in each
2084 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2086 unsigned int i, last_file_ix;
2088 struct cgraph_node *node;
2090 struct lto_file_decl_data **decl_data;
2094 timevar_push (TV_IPA_LTO_DECL_IN);
2097 = decl_data = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles + 1);
2098 real_file_count = nfiles;
2100 /* Read the resolution file. */
2102 if (resolution_file_name)
2105 unsigned num_objects;
2107 resolution = fopen (resolution_file_name, "r");
2108 if (resolution == NULL)
2109 fatal_error ("could not open symbol resolution file: %m");
2111 t = fscanf (resolution, "%u", &num_objects);
2112 gcc_assert (t == 1);
2114 /* True, since the plugin splits the archives. */
2115 gcc_assert (num_objects == nfiles);
2119 fprintf (stderr, "Reading object files:");
2121 /* Read all of the object files specified on the command line. */
2122 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2124 struct lto_file_decl_data *file_data = NULL;
2127 fprintf (stderr, " %s", fnames[i]);
2131 current_lto_file = lto_obj_file_open (fnames[i], false);
2132 if (!current_lto_file)
2135 file_data = lto_file_read (current_lto_file, resolution, &count);
2139 decl_data[last_file_ix++] = file_data;
2141 lto_obj_file_close (current_lto_file);
2142 current_lto_file = NULL;
2146 lto_flatten_files (decl_data, count, last_file_ix);
2147 lto_stats.num_input_files = count;
2148 ggc_free(decl_data);
2149 real_file_decl_data = NULL;
2151 if (resolution_file_name)
2152 fclose (resolution);
2154 /* Set the hooks so that all of the ipa passes can read in their data. */
2155 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2157 timevar_pop (TV_IPA_LTO_DECL_IN);
2160 fprintf (stderr, "\nReading the callgraph\n");
2162 timevar_push (TV_IPA_LTO_CGRAPH_IO);
2163 /* Read the callgraph. */
2165 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
2168 fprintf (stderr, "Merging declarations\n");
2170 timevar_push (TV_IPA_LTO_DECL_MERGE);
2171 /* Merge global decls. */
2172 lto_symtab_merge_decls ();
2174 /* Fixup all decls and types and free the type hash tables. */
2175 lto_fixup_decls (all_file_decl_data);
2176 free_gimple_type_tables ();
2179 timevar_pop (TV_IPA_LTO_DECL_MERGE);
2180 /* Each pass will set the appropriate timer. */
2183 fprintf (stderr, "Reading summaries\n");
2185 /* Read the IPA summary data. */
2187 ipa_read_optimization_summaries ();
2189 ipa_read_summaries ();
2191 /* Finally merge the cgraph according to the decl merging decisions. */
2192 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
2193 if (cgraph_dump_file)
2195 fprintf (cgraph_dump_file, "Before merging:\n");
2196 dump_cgraph (cgraph_dump_file);
2197 dump_varpool (cgraph_dump_file);
2199 lto_symtab_merge_cgraph_nodes ();
2203 for (node = cgraph_nodes; node; node = node->next)
2205 /* FIXME: ipa_transforms_to_apply holds list of passes that have optimization
2206 summaries computed and needs to apply changes. At the moment WHOPR only
2207 supports inlining, so we can push it here by hand. In future we need to stream
2208 this field into ltrans compilation. */
2210 VEC_safe_push (ipa_opt_pass, heap,
2211 node->ipa_transforms_to_apply,
2212 (ipa_opt_pass)&pass_ipa_inline);
2216 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
2218 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
2220 /* FIXME lto. This loop needs to be changed to use the pass manager to
2221 call the ipa passes directly. */
2223 for (i = 0; i < last_file_ix; i++)
2225 struct lto_file_decl_data *file_data = all_file_decl_data [i];
2226 lto_materialize_constructors_and_inits (file_data);
2229 /* Indicate that the cgraph is built and ready. */
2230 cgraph_function_flags_ready = true;
2232 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
2233 ggc_free (all_file_decl_data);
2234 all_file_decl_data = NULL;
2238 /* Materialize all the bodies for all the nodes in the callgraph. */
2241 materialize_cgraph (void)
2244 struct cgraph_node *node;
2246 timevar_id_t lto_timer;
2250 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
2253 /* Now that we have input the cgraph, we need to clear all of the aux
2254 nodes and read the functions if we are not running in WPA mode. */
2255 timevar_push (TV_IPA_LTO_GIMPLE_IN);
2257 for (node = cgraph_nodes; node; node = node->next)
2259 if (node->local.lto_file_data)
2261 lto_materialize_function (node);
2262 lto_stats.num_input_cgraph_nodes++;
2266 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
2268 /* Start the appropriate timer depending on the mode that we are
2270 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
2271 : (flag_ltrans) ? TV_WHOPR_LTRANS
2273 timevar_push (lto_timer);
2275 current_function_decl = NULL;
2278 /* Inform the middle end about the global variables we have seen. */
2279 FOR_EACH_VEC_ELT (tree, lto_global_var_decls, i, decl)
2280 rest_of_decl_compilation (decl, 1, 0);
2283 fprintf (stderr, "\n");
2285 timevar_pop (lto_timer);
2289 /* Perform whole program analysis (WPA) on the callgraph and write out the
2290 optimization plan. */
2293 do_whole_program_analysis (void)
2295 /* Note that since we are in WPA mode, materialize_cgraph will not
2296 actually read in all the function bodies. It only materializes
2297 the decls and cgraph nodes so that analysis can be performed. */
2298 materialize_cgraph ();
2300 /* Reading in the cgraph uses different timers, start timing WPA now. */
2301 timevar_push (TV_WHOPR_WPA);
2303 if (pre_ipa_mem_report)
2305 fprintf (stderr, "Memory consumption before IPA\n");
2306 dump_memory_report (false);
2309 if (cgraph_dump_file)
2311 dump_cgraph (cgraph_dump_file);
2312 dump_varpool (cgraph_dump_file);
2315 cgraph_function_flags_ready = true;
2316 bitmap_obstack_initialize (NULL);
2317 ipa_register_cgraph_hooks ();
2318 cgraph_state = CGRAPH_STATE_IPA_SSA;
2320 execute_ipa_pass_list (all_regular_ipa_passes);
2322 if (cgraph_dump_file)
2324 fprintf (cgraph_dump_file, "Optimized ");
2325 dump_cgraph (cgraph_dump_file);
2326 dump_varpool (cgraph_dump_file);
2329 bitmap_obstack_release (NULL);
2331 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
2332 timevar_pop (TV_WHOPR_WPA);
2334 if (flag_lto_partition_1to1)
2337 lto_balanced_map ();
2341 fprintf (stderr, "\nStreaming out");
2344 lto_wpa_write_files ();
2347 fprintf (stderr, "\n");
2349 if (post_ipa_mem_report)
2351 fprintf (stderr, "Memory consumption after IPA\n");
2352 dump_memory_report (false);
2355 /* Show the LTO report before launching LTRANS. */
2356 if (flag_lto_report)
2357 print_lto_report ();
2361 static GTY(()) tree lto_eh_personality_decl;
2363 /* Return the LTO personality function decl. */
2366 lto_eh_personality (void)
2368 if (!lto_eh_personality_decl)
2370 /* Use the first personality DECL for our personality if we don't
2371 support multiple ones. This ensures that we don't artificially
2372 create the need for them in a single-language program. */
2373 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
2374 lto_eh_personality_decl = first_personality_decl;
2376 lto_eh_personality_decl = lhd_gcc_personality ();
2379 return lto_eh_personality_decl;
2382 /* Set the process name based on the LTO mode. */
2385 lto_process_name (void)
2388 setproctitle ("lto1-lto");
2390 setproctitle ("lto1-wpa");
2392 setproctitle ("lto1-ltrans");
2395 /* Main entry point for the GIMPLE front end. This front end has
2396 three main personalities:
2398 - LTO (-flto). All the object files on the command line are
2399 loaded in memory and processed as a single translation unit.
2400 This is the traditional link-time optimization behavior.
2402 - WPA (-fwpa). Only the callgraph and summary information for
2403 files in the command file are loaded. A single callgraph
2404 (without function bodies) is instantiated for the whole set of
2405 files. IPA passes are only allowed to analyze the call graph
2406 and make transformation decisions. The callgraph is
2407 partitioned, each partition is written to a new object file
2408 together with the transformation decisions.
2410 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
2411 summary files from running again. Since WPA computed summary
2412 information and decided what transformations to apply, LTRANS
2413 simply applies them. */
2416 lto_main (int debug_p ATTRIBUTE_UNUSED)
2418 lto_process_name ();
2422 /* Read all the symbols and call graph from all the files in the
2424 read_cgraph_and_symbols (num_in_fnames, in_fnames);
2428 /* If WPA is enabled analyze the whole call graph and create an
2429 optimization plan. Otherwise, read in all the function
2430 bodies and continue with optimization. */
2432 do_whole_program_analysis ();
2435 materialize_cgraph ();
2437 /* Let the middle end know that we have read and merged all of
2441 /* FIXME lto, if the processes spawned by WPA fail, we miss
2442 the chance to print WPA's report, so WPA will call
2443 print_lto_report before launching LTRANS. If LTRANS was
2444 launched directly by the driver we would not need to do
2446 if (flag_lto_report)
2447 print_lto_report ();
2452 #include "gt-lto-lto.h"