1 /* Top-level LTO routines.
2 Copyright 2009, 2010 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "diagnostic-core.h"
29 #include "libiberty.h"
32 #include "tree-ssa-operands.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
37 #include "pointer-set.h"
45 #include "lto-streamer.h"
46 #include "splay-tree.h"
49 /* This needs to be included after config.h. Otherwise, _GNU_SOURCE will not
50 be defined in time to set __USE_GNU in the system headers, and strsignal
51 will not be declared. */
56 /* Handle opening elf files on hosts, such as Windows, that may use
57 text file handling that will break binary access. */
63 static GTY(()) tree first_personality_decl;
65 /* Returns a hash code for P. */
68 hash_name (const void *p)
70 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
71 return (hashval_t) htab_hash_string (ds->name);
75 /* Returns nonzero if P1 and P2 are equal. */
78 eq_name (const void *p1, const void *p2)
80 const struct lto_section_slot *s1 =
81 (const struct lto_section_slot *) p1;
82 const struct lto_section_slot *s2 =
83 (const struct lto_section_slot *) p2;
85 return strcmp (s1->name, s2->name) == 0;
88 /* Free lto_section_slot */
91 free_with_string (void *arg)
93 struct lto_section_slot *s = (struct lto_section_slot *)arg;
95 free (CONST_CAST (char *, s->name));
99 /* Create section hash table */
102 lto_obj_create_section_hash_table (void)
104 return htab_create (37, hash_name, eq_name, free_with_string);
107 /* Read the constructors and inits. */
110 lto_materialize_constructors_and_inits (struct lto_file_decl_data * file_data)
113 const char *data = lto_get_section_data (file_data,
114 LTO_section_static_initializer,
116 lto_input_constructors_and_inits (file_data, data);
117 lto_free_section_data (file_data, LTO_section_static_initializer, NULL,
121 /* Return true when NODE has a clone that is analyzed (i.e. we need
122 to load its body even if the node itself is not needed). */
125 has_analyzed_clone_p (struct cgraph_node *node)
127 struct cgraph_node *orig = node;
136 else if (node->next_sibling_clone)
137 node = node->next_sibling_clone;
140 while (node != orig && !node->next_sibling_clone)
141 node = node->clone_of;
143 node = node->next_sibling_clone;
149 /* Read the function body for the function associated with NODE. */
152 lto_materialize_function (struct cgraph_node *node)
155 struct lto_file_decl_data *file_data;
156 const char *data, *name;
160 /* Read in functions with body (analyzed nodes)
161 and also functions that are needed to produce virtual clones. */
162 if (node->analyzed || has_analyzed_clone_p (node))
164 /* Clones don't need to be read. */
167 file_data = node->local.lto_file_data;
168 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
170 /* We may have renamed the declaration, e.g., a static function. */
171 name = lto_get_decl_name_mapping (file_data, name);
173 data = lto_get_section_data (file_data, LTO_section_function_body,
176 fatal_error ("%s: section %s is missing",
177 file_data->file_name,
180 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
182 /* Load the function body only if not operating in WPA mode. In
183 WPA mode, the body of the function is not needed. */
186 allocate_struct_function (decl, false);
187 announce_function (decl);
188 lto_input_function_body (file_data, decl, data);
189 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
190 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
191 lto_stats.num_function_bodies++;
194 lto_free_section_data (file_data, LTO_section_function_body, name,
200 /* Let the middle end know about the function. */
201 rest_of_decl_compilation (decl, 1, 0);
205 /* Decode the content of memory pointed to by DATA in the the
206 in decl state object STATE. DATA_IN points to a data_in structure for
207 decoding. Return the address after the decoded object in the input. */
209 static const uint32_t *
210 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
211 struct lto_in_decl_state *state)
218 decl = lto_streamer_cache_get (data_in->reader_cache, (int) ix);
219 if (TREE_CODE (decl) != FUNCTION_DECL)
221 gcc_assert (decl == void_type_node);
224 state->fn_decl = decl;
226 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
228 uint32_t size = *data++;
229 tree *decls = ggc_alloc_vec_tree (size);
231 for (j = 0; j < size; j++)
233 decls[j] = lto_streamer_cache_get (data_in->reader_cache, data[j]);
235 /* Register every type in the global type table. If the
236 type existed already, use the existing type. */
237 if (TYPE_P (decls[j]))
238 decls[j] = gimple_register_type (decls[j]);
241 state->streams[i].size = size;
242 state->streams[i].trees = decls;
250 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
251 RESOLUTIONS is the set of symbols picked by the linker (read from the
252 resolution file when the linker plugin is being used). */
255 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
256 VEC(ld_plugin_symbol_resolution_t,heap) *resolutions)
258 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
259 const int32_t decl_offset = sizeof (struct lto_decl_header);
260 const int32_t main_offset = decl_offset + header->decl_state_size;
261 const int32_t string_offset = main_offset + header->main_size;
262 struct lto_input_block ib_main;
263 struct data_in *data_in;
265 const uint32_t *data_ptr, *data_end;
266 uint32_t num_decl_states;
268 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
271 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
272 header->string_size, resolutions);
274 /* Read the global declarations and types. */
275 while (ib_main.p < ib_main.len)
277 tree t = lto_input_tree (&ib_main, data_in);
278 gcc_assert (t && ib_main.p <= ib_main.len);
281 /* Read in lto_in_decl_state objects. */
282 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
284 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
285 num_decl_states = *data_ptr++;
287 gcc_assert (num_decl_states > 0);
288 decl_data->global_decl_state = lto_new_in_decl_state ();
289 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
290 decl_data->global_decl_state);
292 /* Read in per-function decl states and enter them in hash table. */
293 decl_data->function_decl_states =
294 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
296 for (i = 1; i < num_decl_states; i++)
298 struct lto_in_decl_state *state = lto_new_in_decl_state ();
301 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
302 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
303 gcc_assert (*slot == NULL);
307 if (data_ptr != data_end)
308 internal_error ("bytecode stream: garbage at the end of symbols section");
310 /* Set the current decl state to be the global state. */
311 decl_data->current_decl_state = decl_data->global_decl_state;
313 lto_data_in_delete (data_in);
316 /* strtoll is not portable. */
318 lto_parse_hex (const char *p) {
320 for (; *p != '\0'; ++p)
325 if (c >= '0' && c <= '9')
327 else if (c >= 'a' && c <= 'f')
329 else if (c >= 'A' && c <= 'F')
332 internal_error ("could not parse hex number");
338 /* Read resolution for file named FILE_NAME. The resolution is read from
342 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
344 /* We require that objects in the resolution file are in the same
345 order as the lto1 command line. */
346 unsigned int name_len;
348 unsigned int num_symbols;
350 struct lto_file_decl_data *file_data;
351 unsigned max_index = 0;
352 splay_tree_node nd = NULL;
357 name_len = strlen (file->filename);
358 obj_name = XNEWVEC (char, name_len + 1);
359 fscanf (resolution, " "); /* Read white space. */
361 fread (obj_name, sizeof (char), name_len, resolution);
362 obj_name[name_len] = '\0';
363 if (strcmp (obj_name, file->filename) != 0)
364 internal_error ("unexpected file name %s in linker resolution file. "
365 "Expected %s", obj_name, file->filename);
366 if (file->offset != 0)
371 t = fscanf (resolution, "@0x%16s", offset_p);
373 internal_error ("could not parse file offset");
374 offset = lto_parse_hex (offset_p);
375 if (offset != file->offset)
376 internal_error ("unexpected offset");
381 fscanf (resolution, "%u", &num_symbols);
383 for (i = 0; i < num_symbols; i++)
388 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
390 unsigned int lto_resolution_str_len =
391 sizeof (lto_resolution_str) / sizeof (char *);
393 t = fscanf (resolution, "%u %x %26s %*[^\n]\n", &index, &id, r_str);
395 internal_error ("Invalid line in the resolution file.");
396 if (index > max_index)
399 for (j = 0; j < lto_resolution_str_len; j++)
401 if (strcmp (lto_resolution_str[j], r_str) == 0)
403 r = (enum ld_plugin_symbol_resolution) j;
407 if (j == lto_resolution_str_len)
408 internal_error ("Invalid resolution in the resolution file.");
410 if (!(nd && nd->key == id))
412 nd = splay_tree_lookup (file_ids, id);
414 internal_error ("Resolution sub id %x not in object file", id);
417 file_data = (struct lto_file_decl_data *)nd->value;
418 if (cgraph_dump_file)
419 fprintf (cgraph_dump_file, "Adding resolution %u %u to id %x\n",
420 index, r, file_data->id);
421 VEC_safe_grow_cleared (ld_plugin_symbol_resolution_t, heap,
422 file_data->resolutions,
424 VEC_replace (ld_plugin_symbol_resolution_t,
425 file_data->resolutions, index, r);
429 /* Is the name for a id'ed LTO section? */
432 lto_section_with_id (const char *name, unsigned *id)
436 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
438 s = strrchr (name, '.');
439 return s && sscanf (s, ".%x", id) == 1;
442 /* Create file_data of each sub file id */
445 create_subid_section_table (void **slot, void *data)
447 struct lto_section_slot s_slot, *new_slot;
448 struct lto_section_slot *ls = *(struct lto_section_slot **)slot;
449 splay_tree file_ids = (splay_tree)data;
454 struct lto_file_decl_data *file_data;
456 if (!lto_section_with_id (ls->name, &id))
459 /* Find hash table of sub module id */
460 nd = splay_tree_lookup (file_ids, id);
463 file_data = (struct lto_file_decl_data *)nd->value;
467 file_data = ggc_alloc_lto_file_decl_data ();
468 memset(file_data, 0, sizeof (struct lto_file_decl_data));
470 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
471 splay_tree_insert (file_ids, id, (splay_tree_value)file_data);
474 /* Copy section into sub module hash table */
475 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
476 s_slot.name = new_name;
477 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
478 gcc_assert (*hash_slot == NULL);
480 new_slot = XDUP (struct lto_section_slot, ls);
481 new_slot->name = new_name;
482 *hash_slot = new_slot;
486 /* Read declarations and other initializations for a FILE_DATA. */
489 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
494 file_data->renaming_hash_table = lto_create_renaming_table ();
495 file_data->file_name = file->filename;
496 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
499 internal_error ("Cannot read LTO decls from %s", file_data->file_name);
502 lto_read_decls (file_data, data, file_data->resolutions);
503 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
509 struct lto_file_decl_data **file_data;
513 /* Traverse ids and create a list of file_datas out of it. */
515 static int lto_create_files_from_ids (splay_tree_node node, void *data)
517 struct lwstate *lw = (struct lwstate *)data;
518 struct lto_file_decl_data *file_data = (struct lto_file_decl_data *)node->value;
520 lto_file_finalize (file_data, lw->file);
521 if (cgraph_dump_file)
522 fprintf (cgraph_dump_file, "Creating file %s with sub id %x\n",
523 file_data->file_name, file_data->id);
524 file_data->next = *lw->file_data;
525 *lw->file_data = file_data;
530 /* Generate a TREE representation for all types and external decls
533 Read all of the globals out of the file. Then read the cgraph
534 and process the .o index into the cgraph nodes so that it can open
535 the .o file to load the functions and ipa information. */
537 static struct lto_file_decl_data *
538 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
540 struct lto_file_decl_data *file_data = NULL;
542 htab_t section_hash_table;
543 struct lwstate state;
545 section_hash_table = lto_obj_build_section_table (file);
547 /* Find all sub modules in the object and put their sections into new hash
548 tables in a splay tree. */
549 file_ids = splay_tree_new (splay_tree_compare_ints, NULL, NULL);
550 htab_traverse (section_hash_table, create_subid_section_table, file_ids);
552 /* Add resolutions to file ids */
553 lto_resolution_read (file_ids, resolution_file, file);
555 /* Finalize each lto file for each submodule in the merged object
556 and create list for returning. */
558 state.file_data = &file_data;
560 splay_tree_foreach (file_ids, lto_create_files_from_ids, &state);
562 splay_tree_delete (file_ids);
563 htab_delete (section_hash_table);
568 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
569 #define LTO_MMAP_IO 1
573 /* Page size of machine is used for mmap and munmap calls. */
574 static size_t page_mask;
577 /* Get the section data of length LEN from FILENAME starting at
578 OFFSET. The data segment must be freed by the caller when the
579 caller is finished. Returns NULL if all was not well. */
582 lto_read_section_data (struct lto_file_decl_data *file_data,
583 intptr_t offset, size_t len)
587 static char *fd_name;
589 intptr_t computed_len;
590 intptr_t computed_offset;
594 /* Keep a single-entry file-descriptor cache. The last file we
595 touched will get closed at exit.
596 ??? Eventually we want to add a more sophisticated larger cache
597 or rather fix function body streaming to not stream them in
598 practically random order. */
600 && strcmp (fd_name, file_data->file_name) != 0)
608 fd_name = xstrdup (file_data->file_name);
609 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
617 size_t page_size = sysconf (_SC_PAGE_SIZE);
618 page_mask = ~(page_size - 1);
621 computed_offset = offset & page_mask;
622 diff = offset - computed_offset;
623 computed_len = len + diff;
625 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
626 fd, computed_offset);
627 if (result == MAP_FAILED)
630 return result + diff;
632 result = (char *) xmalloc (len);
633 if (lseek (fd, offset, SEEK_SET) != offset
634 || read (fd, result, len) != (ssize_t) len)
645 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
646 NAME will be NULL unless the section type is for a function
650 get_section_data (struct lto_file_decl_data *file_data,
651 enum lto_section_type section_type,
655 htab_t section_hash_table = file_data->section_hash_table;
656 struct lto_section_slot *f_slot;
657 struct lto_section_slot s_slot;
658 const char *section_name = lto_get_section_name (section_type, name, file_data);
662 s_slot.name = section_name;
663 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
666 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
670 free (CONST_CAST (char *, section_name));
675 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
676 starts at OFFSET and has LEN bytes. */
679 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
680 enum lto_section_type section_type ATTRIBUTE_UNUSED,
681 const char *name ATTRIBUTE_UNUSED,
682 const char *offset, size_t len ATTRIBUTE_UNUSED)
685 intptr_t computed_len;
686 intptr_t computed_offset;
691 computed_offset = ((intptr_t) offset) & page_mask;
692 diff = (intptr_t) offset - computed_offset;
693 computed_len = len + diff;
695 munmap ((caddr_t) computed_offset, computed_len);
697 free (CONST_CAST(char *, offset));
701 /* Structure describing ltrans partitions. */
703 struct GTY (()) ltrans_partition_def
705 cgraph_node_set cgraph_set;
706 varpool_node_set varpool_set;
707 const char * GTY ((skip)) name;
711 typedef struct ltrans_partition_def *ltrans_partition;
712 DEF_VEC_P(ltrans_partition);
713 DEF_VEC_ALLOC_P(ltrans_partition,gc);
715 static GTY (()) VEC(ltrans_partition, gc) *ltrans_partitions;
717 static void add_cgraph_node_to_partition (ltrans_partition part, struct cgraph_node *node);
718 static void add_varpool_node_to_partition (ltrans_partition part, struct varpool_node *vnode);
720 /* Create new partition with name NAME. */
721 static ltrans_partition
722 new_partition (const char *name)
724 ltrans_partition part = ggc_alloc_ltrans_partition_def ();
725 part->cgraph_set = cgraph_node_set_new ();
726 part->varpool_set = varpool_node_set_new ();
729 VEC_safe_push (ltrans_partition, gc, ltrans_partitions, part);
733 /* See all references that go to comdat objects and bring them into partition too. */
735 add_references_to_partition (ltrans_partition part, struct ipa_ref_list *refs)
739 for (i = 0; ipa_ref_list_reference_iterate (refs, i, ref); i++)
741 if (ref->refered_type == IPA_REF_CGRAPH
742 && DECL_COMDAT (ipa_ref_node (ref)->decl)
743 && !cgraph_node_in_set_p (ipa_ref_node (ref), part->cgraph_set))
744 add_cgraph_node_to_partition (part, ipa_ref_node (ref));
746 if (ref->refered_type == IPA_REF_VARPOOL
747 && DECL_COMDAT (ipa_ref_varpool_node (ref)->decl)
748 && !varpool_node_in_set_p (ipa_ref_varpool_node (ref), part->varpool_set))
749 add_varpool_node_to_partition (part, ipa_ref_varpool_node (ref));
753 /* Add NODE to partition as well as the inline callees and referred comdats into partition PART. */
756 add_cgraph_node_to_partition (ltrans_partition part, struct cgraph_node *node)
758 struct cgraph_edge *e;
760 part->insns += node->local.inline_summary.self_size;
763 node->in_other_partition = 1;
764 node->aux = (void *)((size_t)node->aux + 1);
766 cgraph_node_set_add (part->cgraph_set, node);
768 for (e = node->callees; e; e = e->next_callee)
769 if ((!e->inline_failed || DECL_COMDAT (e->callee->decl))
770 && !cgraph_node_in_set_p (e->callee, part->cgraph_set))
771 add_cgraph_node_to_partition (part, e->callee);
773 add_references_to_partition (part, &node->ref_list);
775 if (node->same_comdat_group
776 && !cgraph_node_in_set_p (node->same_comdat_group, part->cgraph_set))
777 add_cgraph_node_to_partition (part, node->same_comdat_group);
780 /* Add VNODE to partition as well as comdat references partition PART. */
783 add_varpool_node_to_partition (ltrans_partition part, struct varpool_node *vnode)
785 varpool_node_set_add (part->varpool_set, vnode);
788 vnode->in_other_partition = 1;
789 vnode->aux = (void *)((size_t)vnode->aux + 1);
791 add_references_to_partition (part, &vnode->ref_list);
793 if (vnode->same_comdat_group
794 && !varpool_node_in_set_p (vnode->same_comdat_group, part->varpool_set))
795 add_varpool_node_to_partition (part, vnode->same_comdat_group);
798 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
799 and number of varpool nodes is N_VARPOOL_NODES. */
802 undo_partition (ltrans_partition partition, unsigned int n_cgraph_nodes,
803 unsigned int n_varpool_nodes)
805 while (VEC_length (cgraph_node_ptr, partition->cgraph_set->nodes) >
808 struct cgraph_node *node = VEC_index (cgraph_node_ptr,
809 partition->cgraph_set->nodes,
811 partition->insns -= node->local.inline_summary.self_size;
812 cgraph_node_set_remove (partition->cgraph_set, node);
813 node->aux = (void *)((size_t)node->aux - 1);
815 while (VEC_length (varpool_node_ptr, partition->varpool_set->nodes) >
818 struct varpool_node *node = VEC_index (varpool_node_ptr,
819 partition->varpool_set->nodes,
821 varpool_node_set_remove (partition->varpool_set, node);
822 node->aux = (void *)((size_t)node->aux - 1);
826 /* Return true if NODE should be partitioned.
827 This means that partitioning algorithm should put NODE into one of partitions.
828 This apply to most functions with bodies. Functions that are not partitions
829 are put into every unit needing them. This is the case of i.e. COMDATs. */
832 partition_cgraph_node_p (struct cgraph_node *node)
834 /* We will get proper partition based on function they are inlined to. */
835 if (node->global.inlined_to)
837 /* Nodes without a body do not need partitioning. */
840 /* Extern inlines and comdat are always only in partitions they are needed. */
841 if (DECL_EXTERNAL (node->decl)
842 || (DECL_COMDAT (node->decl)
843 && !cgraph_used_from_object_file_p (node)))
848 /* Return true if VNODE should be partitioned.
849 This means that partitioning algorithm should put VNODE into one of partitions. */
852 partition_varpool_node_p (struct varpool_node *vnode)
854 if (vnode->alias || !vnode->needed)
856 /* Constant pool and comdat are always only in partitions they are needed. */
857 if (DECL_IN_CONSTANT_POOL (vnode->decl)
858 || (DECL_COMDAT (vnode->decl)
859 && !varpool_used_from_object_file_p (vnode)))
864 /* Group cgrah nodes by input files. This is used mainly for testing
868 lto_1_to_1_map (void)
870 struct cgraph_node *node;
871 struct varpool_node *vnode;
872 struct lto_file_decl_data *file_data;
873 struct pointer_map_t *pmap;
874 ltrans_partition partition;
878 timevar_push (TV_WHOPR_WPA);
880 pmap = pointer_map_create ();
882 for (node = cgraph_nodes; node; node = node->next)
884 if (!partition_cgraph_node_p (node))
887 file_data = node->local.lto_file_data;
888 gcc_assert (!node->same_body_alias);
892 slot = pointer_map_contains (pmap, file_data);
894 partition = (ltrans_partition) *slot;
897 partition = new_partition (file_data->file_name);
898 slot = pointer_map_insert (pmap, file_data);
904 && VEC_length (ltrans_partition, ltrans_partitions))
905 partition = VEC_index (ltrans_partition, ltrans_partitions, 0);
908 partition = new_partition ("");
909 slot = pointer_map_insert (pmap, NULL);
914 add_cgraph_node_to_partition (partition, node);
917 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
919 if (!partition_varpool_node_p (vnode))
921 file_data = vnode->lto_file_data;
922 slot = pointer_map_contains (pmap, file_data);
924 partition = (ltrans_partition) *slot;
927 partition = new_partition (file_data->file_name);
928 slot = pointer_map_insert (pmap, file_data);
933 add_varpool_node_to_partition (partition, vnode);
935 for (node = cgraph_nodes; node; node = node->next)
937 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
940 /* If the cgraph is empty, create one cgraph node set so that there is still
941 an output file for any variables that need to be exported in a DSO. */
943 new_partition ("empty");
945 pointer_map_destroy (pmap);
947 timevar_pop (TV_WHOPR_WPA);
949 lto_stats.num_cgraph_partitions += VEC_length (ltrans_partition,
954 /* Group cgraph nodes in qually sized partitions.
956 The algorithm deciding paritions are simple: nodes are taken in predefined
957 order. The order correspond to order we wish to have functions in final
958 output. In future this will be given by function reordering pass, but at
959 the moment we use topological order that serve a good approximation.
961 The goal is to partition this linear order into intervals (partitions) such
962 that all partitions have approximately the same size and that the number of
963 callgraph or IPA reference edgess crossing boundaries is minimal.
965 This is a lot faster (O(n) in size of callgraph) than algorithms doing
966 priority based graph clustering that are generally O(n^2) and since WHOPR
967 is designed to make things go well across partitions, it leads to good results.
969 We compute the expected size of partition as
970 max (total_size / lto_partitions, min_partition_size).
971 We use dynamic expected size of partition, so small programs
972 are partitioning into enough partitions to allow use of multiple CPUs while
973 large programs are not partitioned too much. Creating too many partition
974 increase streaming overhead significandly.
976 In the future we would like to bound maximal size of partition to avoid
977 ltrans stage consuming too much memory. At the moment however WPA stage is
978 most memory intensive phase at large benchmark since too many types and
979 declarations are read into memory.
981 The function implement simple greedy algorithm. Nodes are begin added into
982 current partition until 3/4th of expected partition size is reached.
983 After this threshold we keep track of boundary size (number of edges going to
984 other partitions) and continue adding functions until the current partition
985 grows into a double of expected partition size. Then the process is undone
986 till the point when minimal ration of boundary size and in partition calls
990 lto_balanced_map (void)
993 struct cgraph_node **postorder =
994 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
995 struct cgraph_node **order = XNEWVEC (struct cgraph_node *, cgraph_max_uid);
996 int i, postorder_len;
997 struct cgraph_node *node;
998 int total_size = 0, best_total_size = 0;
1000 ltrans_partition partition;
1001 unsigned int last_visited_cgraph_node = 0, last_visited_varpool_node = 0;
1002 struct varpool_node *vnode;
1003 int cost = 0, internal = 0;
1004 int best_n_nodes = 0, best_n_varpool_nodes = 0, best_i = 0, best_cost =
1005 INT_MAX, best_internal = 0;
1008 /* Until we have better ordering facility, use toplogical order.
1009 Include only nodes we will partition and compute estimate of program
1010 size. Note that since nodes that are not partitioned might be put into
1011 multiple partitions, this is just an estimate of real size. This is why
1012 we keep partition_size updated after every partition is finalized. */
1013 postorder_len = cgraph_postorder (postorder);
1014 for (i = 0; i < postorder_len; i++)
1016 node = postorder[i];
1017 if (partition_cgraph_node_p (node))
1019 order[n_nodes++] = node;
1020 total_size += node->global.size;
1025 /* Compute partition size and create the first partition. */
1026 partition_size = total_size / PARAM_VALUE (PARAM_LTO_PARTITIONS);
1027 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
1028 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
1030 partition = new_partition ("");
1031 if (cgraph_dump_file)
1032 fprintf (cgraph_dump_file, "Total unit size: %i, partition size: %i\n",
1033 total_size, partition_size);
1035 for (i = 0; i < n_nodes; i++)
1037 add_cgraph_node_to_partition (partition, order[i]);
1038 total_size -= order[i]->global.size;
1040 /* Once we added a new node to the partition, we also want to add
1041 all referenced variables unless they was already added into some
1043 add_cgraph_node_to_partition adds possibly multiple nodes and
1044 variables that are needed to satisfy needs of ORDER[i].
1045 We remember last visited cgraph and varpool node from last iteration
1046 of outer loop that allows us to process every new addition.
1048 At the same time we compute size of the boundary into COST. Every
1049 callgraph or IPA reference edge leaving the partition contributes into
1050 COST. Every edge inside partition was earlier computed as one leaving
1051 it and thus we need to subtract it from COST. */
1052 while (last_visited_cgraph_node <
1053 VEC_length (cgraph_node_ptr, partition->cgraph_set->nodes)
1054 || last_visited_varpool_node < VEC_length (varpool_node_ptr,
1055 partition->varpool_set->
1058 struct ipa_ref_list *refs;
1060 struct ipa_ref *ref;
1061 bool cgraph_p = false;
1063 if (last_visited_cgraph_node <
1064 VEC_length (cgraph_node_ptr, partition->cgraph_set->nodes))
1066 struct cgraph_edge *edge;
1069 node = VEC_index (cgraph_node_ptr, partition->cgraph_set->nodes,
1070 last_visited_cgraph_node);
1071 refs = &node->ref_list;
1073 last_visited_cgraph_node++;
1075 gcc_assert (node->analyzed);
1077 /* Compute boundary cost of callgrpah edges. */
1078 for (edge = node->callees; edge; edge = edge->next_callee)
1079 if (edge->callee->analyzed)
1081 int edge_cost = edge->frequency;
1082 cgraph_node_set_iterator csi;
1086 gcc_assert (edge_cost > 0);
1087 csi = cgraph_node_set_find (partition->cgraph_set, edge->callee);
1088 if (!csi_end_p (csi)
1089 && csi.index < last_visited_cgraph_node - 1)
1090 cost -= edge_cost, internal+= edge_cost;
1094 for (edge = node->callers; edge; edge = edge->next_caller)
1096 int edge_cost = edge->frequency;
1097 cgraph_node_set_iterator csi;
1099 gcc_assert (edge->caller->analyzed);
1102 gcc_assert (edge_cost > 0);
1103 csi = cgraph_node_set_find (partition->cgraph_set, edge->caller);
1104 if (!csi_end_p (csi)
1105 && csi.index < last_visited_cgraph_node)
1114 &VEC_index (varpool_node_ptr, partition->varpool_set->nodes,
1115 last_visited_varpool_node)->ref_list;
1116 last_visited_varpool_node++;
1119 /* Compute boundary cost of IPA REF edges and at the same time look into
1120 variables referenced from current partition and try to add them. */
1121 for (j = 0; ipa_ref_list_reference_iterate (refs, j, ref); j++)
1122 if (ref->refered_type == IPA_REF_VARPOOL)
1124 varpool_node_set_iterator vsi;
1126 vnode = ipa_ref_varpool_node (ref);
1127 if (!vnode->finalized)
1129 if (!vnode->aux && partition_varpool_node_p (vnode))
1130 add_varpool_node_to_partition (partition, vnode);
1131 vsi = varpool_node_set_find (partition->varpool_set, vnode);
1132 if (!vsi_end_p (vsi)
1133 && vsi.index < last_visited_varpool_node - !cgraph_p)
1140 cgraph_node_set_iterator csi;
1142 node = ipa_ref_node (ref);
1143 if (!node->analyzed)
1145 csi = cgraph_node_set_find (partition->cgraph_set, node);
1146 if (!csi_end_p (csi)
1147 && csi.index < last_visited_cgraph_node - cgraph_p)
1152 for (j = 0; ipa_ref_list_refering_iterate (refs, j, ref); j++)
1153 if (ref->refering_type == IPA_REF_VARPOOL)
1155 varpool_node_set_iterator vsi;
1157 vnode = ipa_ref_refering_varpool_node (ref);
1158 gcc_assert (vnode->finalized);
1159 if (!vnode->aux && partition_varpool_node_p (vnode))
1160 add_varpool_node_to_partition (partition, vnode);
1161 vsi = varpool_node_set_find (partition->varpool_set, vnode);
1162 if (!vsi_end_p (vsi)
1163 && vsi.index < last_visited_varpool_node)
1170 cgraph_node_set_iterator csi;
1172 node = ipa_ref_refering_node (ref);
1173 gcc_assert (node->analyzed);
1174 csi = cgraph_node_set_find (partition->cgraph_set, node);
1175 if (!csi_end_p (csi)
1176 && csi.index < last_visited_cgraph_node)
1183 /* If the partition is large enough, start looking for smallest boundary cost. */
1184 if (partition->insns < partition_size * 3 / 4
1185 || best_cost == INT_MAX
1187 || (best_internal * (HOST_WIDE_INT) cost
1188 > (internal * (HOST_WIDE_INT)best_cost)))
1189 && partition->insns < partition_size * 5 / 4))
1192 best_internal = internal;
1194 best_n_nodes = VEC_length (cgraph_node_ptr,
1195 partition->cgraph_set->nodes);
1196 best_n_varpool_nodes = VEC_length (varpool_node_ptr,
1197 partition->varpool_set->nodes);
1198 best_total_size = total_size;
1200 if (cgraph_dump_file)
1201 fprintf (cgraph_dump_file, "Step %i: added %s, size %i, cost %i/%i best %i/%i, step %i\n", i,
1202 cgraph_node_name (order[i]), partition->insns, cost, internal,
1203 best_cost, best_internal, best_i);
1204 /* Partition is too large, unwind into step when best cost was reached and
1205 start new partition. */
1206 if (partition->insns > 2 * partition_size)
1210 if (cgraph_dump_file)
1211 fprintf (cgraph_dump_file, "Unwinding %i insertions to step %i\n",
1212 i - best_i, best_i);
1213 undo_partition (partition, best_n_nodes, best_n_varpool_nodes);
1216 /* When we are finished, avoid creating empty partition. */
1217 if (i == n_nodes - 1)
1219 partition = new_partition ("");
1220 last_visited_cgraph_node = 0;
1221 last_visited_varpool_node = 0;
1222 total_size = best_total_size;
1225 if (cgraph_dump_file)
1226 fprintf (cgraph_dump_file, "New partition\n");
1228 best_n_varpool_nodes = 0;
1229 best_cost = INT_MAX;
1231 /* Since the size of partitions is just approximate, update the size after
1232 we finished current one. */
1233 if (npartitions < PARAM_VALUE (PARAM_LTO_PARTITIONS))
1234 partition_size = total_size
1235 / (PARAM_VALUE (PARAM_LTO_PARTITIONS) - npartitions);
1237 partition_size = INT_MAX;
1239 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
1240 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
1245 /* Varables that are not reachable from the code go into last partition. */
1246 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
1247 if (partition_varpool_node_p (vnode) && !vnode->aux)
1248 add_varpool_node_to_partition (partition, vnode);
1252 /* Promote variable VNODE to be static. */
1255 promote_var (struct varpool_node *vnode)
1257 if (TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
1259 gcc_assert (flag_wpa);
1260 TREE_PUBLIC (vnode->decl) = 1;
1261 DECL_VISIBILITY (vnode->decl) = VISIBILITY_HIDDEN;
1262 DECL_VISIBILITY_SPECIFIED (vnode->decl) = true;
1263 if (cgraph_dump_file)
1264 fprintf (cgraph_dump_file,
1265 "Promoting var as hidden: %s\n", varpool_node_name (vnode));
1269 /* Promote function NODE to be static. */
1272 promote_fn (struct cgraph_node *node)
1274 gcc_assert (flag_wpa);
1275 if (TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl))
1277 TREE_PUBLIC (node->decl) = 1;
1278 DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
1279 DECL_VISIBILITY_SPECIFIED (node->decl) = true;
1280 if (node->same_body)
1282 struct cgraph_node *alias;
1283 for (alias = node->same_body;
1284 alias; alias = alias->next)
1286 TREE_PUBLIC (alias->decl) = 1;
1287 DECL_VISIBILITY (alias->decl) = VISIBILITY_HIDDEN;
1288 DECL_VISIBILITY_SPECIFIED (alias->decl) = true;
1291 if (cgraph_dump_file)
1292 fprintf (cgraph_dump_file,
1293 "Promoting function as hidden: %s/%i\n",
1294 cgraph_node_name (node), node->uid);
1298 /* Find out all static decls that need to be promoted to global because
1299 of cross file sharing. This function must be run in the WPA mode after
1300 all inlinees are added. */
1303 lto_promote_cross_file_statics (void)
1305 struct varpool_node *vnode;
1307 cgraph_node_set set;
1308 varpool_node_set vset;
1309 cgraph_node_set_iterator csi;
1310 varpool_node_set_iterator vsi;
1311 VEC(varpool_node_ptr, heap) *promoted_initializers = NULL;
1312 struct pointer_set_t *inserted = pointer_set_create ();
1314 gcc_assert (flag_wpa);
1316 n_sets = VEC_length (ltrans_partition, ltrans_partitions);
1317 for (i = 0; i < n_sets; i++)
1319 ltrans_partition part = VEC_index (ltrans_partition, ltrans_partitions, i);
1320 set = part->cgraph_set;
1321 vset = part->varpool_set;
1323 /* If node has either address taken (and we have no clue from where)
1324 or it is called from other partition, it needs to be globalized. */
1325 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
1327 struct cgraph_node *node = csi_node (csi);
1328 if (node->local.externally_visible)
1330 if (node->global.inlined_to)
1332 if ((!DECL_EXTERNAL (node->decl) && !DECL_COMDAT (node->decl))
1333 && (referenced_from_other_partition_p (&node->ref_list, set, vset)
1334 || reachable_from_other_partition_p (node, set)))
1337 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
1339 vnode = vsi_node (vsi);
1340 /* Constant pool references use internal labels and thus can not
1341 be made global. It is sensible to keep those ltrans local to
1342 allow better optimization. */
1343 if (!DECL_IN_CONSTANT_POOL (vnode->decl) && !DECL_COMDAT (vnode->decl)
1344 && !vnode->externally_visible && vnode->analyzed
1345 && referenced_from_other_partition_p (&vnode->ref_list,
1347 promote_var (vnode);
1350 /* We export initializers of read-only var into each partition
1351 referencing it. Folding might take declarations from the
1352 initializers and use it; so everything referenced from the
1353 initializers needs can be accessed from this partition after
1356 This means that we need to promote all variables and functions
1357 referenced from all initializers from readonly vars referenced
1358 from this partition that are not in this partition.
1359 This needs to be done recursively. */
1360 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
1361 if (const_value_known_p (vnode->decl)
1362 && DECL_INITIAL (vnode->decl)
1363 && !varpool_node_in_set_p (vnode, vset)
1364 && referenced_from_this_partition_p (&vnode->ref_list, set, vset)
1365 && !pointer_set_insert (inserted, vnode))
1366 VEC_safe_push (varpool_node_ptr, heap, promoted_initializers, vnode);
1367 while (!VEC_empty (varpool_node_ptr, promoted_initializers))
1370 struct ipa_ref *ref;
1372 vnode = VEC_pop (varpool_node_ptr, promoted_initializers);
1373 for (i = 0; ipa_ref_list_reference_iterate (&vnode->ref_list, i, ref); i++)
1375 if (ref->refered_type == IPA_REF_CGRAPH)
1377 struct cgraph_node *n = ipa_ref_node (ref);
1378 gcc_assert (!n->global.inlined_to);
1379 if (!n->local.externally_visible
1380 && !cgraph_node_in_set_p (n, set))
1385 struct varpool_node *v = ipa_ref_varpool_node (ref);
1386 if (varpool_node_in_set_p (v, vset))
1388 /* Constant pool references use internal labels and thus can not
1389 be made global. It is sensible to keep those ltrans local to
1390 allow better optimization. */
1391 if (DECL_IN_CONSTANT_POOL (v->decl))
1393 if (!pointer_set_insert (inserted, vnode))
1394 VEC_safe_push (varpool_node_ptr, heap,
1395 promoted_initializers, v);
1397 else if (!DECL_IN_CONSTANT_POOL (v->decl)
1398 && !v->externally_visible && v->analyzed)
1401 && DECL_INITIAL (v->decl)
1402 && const_value_known_p (v->decl)
1403 && !pointer_set_insert (inserted, vnode))
1404 VEC_safe_push (varpool_node_ptr, heap,
1405 promoted_initializers, v);
1411 pointer_set_destroy (inserted);
1414 static lto_file *current_lto_file;
1416 /* Helper for qsort; compare partitions and return one with smaller size.
1417 We sort from greatest to smallest so parallel build doesn't stale on the
1418 longest compilation being executed too late. */
1421 cmp_partitions (const void *a, const void *b)
1423 const struct ltrans_partition_def *pa
1424 = *(struct ltrans_partition_def *const *)a;
1425 const struct ltrans_partition_def *pb
1426 = *(struct ltrans_partition_def *const *)b;
1427 return pb->insns - pa->insns;
1430 /* Write all output files in WPA mode and the file with the list of
1434 lto_wpa_write_files (void)
1438 cgraph_node_set set;
1439 varpool_node_set vset;
1440 ltrans_partition part;
1441 FILE *ltrans_output_list_stream;
1442 char *temp_filename;
1445 /* Open the LTRANS output list. */
1446 if (!ltrans_output_list)
1447 fatal_error ("no LTRANS output list filename provided");
1448 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
1449 if (ltrans_output_list_stream == NULL)
1450 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
1452 timevar_push (TV_WHOPR_WPA);
1454 FOR_EACH_VEC_ELT (ltrans_partition, ltrans_partitions, i, part)
1455 lto_stats.num_output_cgraph_nodes += VEC_length (cgraph_node_ptr,
1456 part->cgraph_set->nodes);
1458 /* Find out statics that need to be promoted
1459 to globals with hidden visibility because they are accessed from multiple
1461 lto_promote_cross_file_statics ();
1463 timevar_pop (TV_WHOPR_WPA);
1465 timevar_push (TV_WHOPR_WPA_IO);
1467 /* Generate a prefix for the LTRANS unit files. */
1468 blen = strlen (ltrans_output_list);
1469 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
1470 strcpy (temp_filename, ltrans_output_list);
1471 if (blen > sizeof (".out")
1472 && strcmp (temp_filename + blen - sizeof (".out") + 1,
1474 temp_filename[blen - sizeof (".out") + 1] = '\0';
1475 blen = strlen (temp_filename);
1477 n_sets = VEC_length (ltrans_partition, ltrans_partitions);
1478 VEC_qsort (ltrans_partition, ltrans_partitions, cmp_partitions);
1479 for (i = 0; i < n_sets; i++)
1482 ltrans_partition part = VEC_index (ltrans_partition, ltrans_partitions, i);
1484 set = part->cgraph_set;
1485 vset = part->varpool_set;
1487 /* Write all the nodes in SET. */
1488 sprintf (temp_filename + blen, "%u.o", i);
1489 file = lto_obj_file_open (temp_filename, true);
1491 fatal_error ("lto_obj_file_open() failed");
1494 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
1495 if (cgraph_dump_file)
1497 fprintf (cgraph_dump_file, "Writting partition %s to file %s, %i insns\n",
1498 part->name, temp_filename, part->insns);
1499 fprintf (cgraph_dump_file, "cgraph nodes:");
1500 dump_cgraph_node_set (cgraph_dump_file, set);
1501 fprintf (cgraph_dump_file, "varpool nodes:");
1502 dump_varpool_node_set (cgraph_dump_file, vset);
1504 gcc_assert (cgraph_node_set_nonempty_p (set)
1505 || varpool_node_set_nonempty_p (vset) || !i);
1507 lto_set_current_out_file (file);
1509 ipa_write_optimization_summaries (set, vset);
1511 lto_set_current_out_file (NULL);
1512 lto_obj_file_close (file);
1514 len = strlen (temp_filename);
1515 if (fwrite (temp_filename, 1, len, ltrans_output_list_stream) < len
1516 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
1517 fatal_error ("writing to LTRANS output list %s: %m",
1518 ltrans_output_list);
1521 lto_stats.num_output_files += n_sets;
1523 /* Close the LTRANS output list. */
1524 if (fclose (ltrans_output_list_stream))
1525 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
1527 timevar_pop (TV_WHOPR_WPA_IO);
1532 struct pointer_set_t *seen;
1535 #define LTO_FIXUP_SUBTREE(t) \
1537 walk_tree (&(t), lto_fixup_tree, data, NULL); \
1540 #define LTO_REGISTER_TYPE_AND_FIXUP_SUBTREE(t) \
1544 (t) = gimple_register_type (t); \
1545 walk_tree (&(t), lto_fixup_tree, data, NULL); \
1549 static tree lto_fixup_tree (tree *, int *, void *);
1551 /* Return true if T does not need to be fixed up recursively. */
1557 || CONSTANT_CLASS_P (t)
1558 || TREE_CODE (t) == IDENTIFIER_NODE);
1561 /* Fix up fields of a tree_common T. DATA points to fix-up states. */
1564 lto_fixup_common (tree t, void *data)
1566 /* The following re-creates the TYPE_REFERENCE_TO and TYPE_POINTER_TO
1567 lists. We do not stream TYPE_REFERENCE_TO, TYPE_POINTER_TO or
1568 TYPE_NEXT_PTR_TO and TYPE_NEXT_REF_TO.
1569 First remove us from any pointer list we are on. */
1570 if (TREE_CODE (t) == POINTER_TYPE)
1572 if (TYPE_POINTER_TO (TREE_TYPE (t)) == t)
1573 TYPE_POINTER_TO (TREE_TYPE (t)) = TYPE_NEXT_PTR_TO (t);
1576 tree tem = TYPE_POINTER_TO (TREE_TYPE (t));
1577 while (tem && TYPE_NEXT_PTR_TO (tem) != t)
1578 tem = TYPE_NEXT_PTR_TO (tem);
1580 TYPE_NEXT_PTR_TO (tem) = TYPE_NEXT_PTR_TO (t);
1582 TYPE_NEXT_PTR_TO (t) = NULL_TREE;
1584 else if (TREE_CODE (t) == REFERENCE_TYPE)
1586 if (TYPE_REFERENCE_TO (TREE_TYPE (t)) == t)
1587 TYPE_REFERENCE_TO (TREE_TYPE (t)) = TYPE_NEXT_REF_TO (t);
1590 tree tem = TYPE_REFERENCE_TO (TREE_TYPE (t));
1591 while (tem && TYPE_NEXT_REF_TO (tem) != t)
1592 tem = TYPE_NEXT_REF_TO (tem);
1594 TYPE_NEXT_REF_TO (tem) = TYPE_NEXT_REF_TO (t);
1596 TYPE_NEXT_REF_TO (t) = NULL_TREE;
1599 /* Fixup our type. */
1600 LTO_REGISTER_TYPE_AND_FIXUP_SUBTREE (TREE_TYPE (t));
1602 /* Second put us on the list of pointers of the new pointed-to type
1603 if we are a main variant. This is done in lto_fixup_type after
1604 fixing up our main variant. */
1606 /* This is not very efficient because we cannot do tail-recursion with
1607 a long chain of trees. */
1608 LTO_FIXUP_SUBTREE (TREE_CHAIN (t));
1611 /* Fix up fields of a decl_minimal T. DATA points to fix-up states. */
1614 lto_fixup_decl_minimal (tree t, void *data)
1616 lto_fixup_common (t, data);
1617 LTO_FIXUP_SUBTREE (DECL_NAME (t));
1618 LTO_FIXUP_SUBTREE (DECL_CONTEXT (t));
1621 /* Fix up fields of a decl_common T. DATA points to fix-up states. */
1624 lto_fixup_decl_common (tree t, void *data)
1626 lto_fixup_decl_minimal (t, data);
1627 LTO_FIXUP_SUBTREE (DECL_SIZE (t));
1628 LTO_FIXUP_SUBTREE (DECL_SIZE_UNIT (t));
1629 LTO_FIXUP_SUBTREE (DECL_INITIAL (t));
1630 LTO_FIXUP_SUBTREE (DECL_ATTRIBUTES (t));
1631 LTO_FIXUP_SUBTREE (DECL_ABSTRACT_ORIGIN (t));
1634 /* Fix up fields of a decl_with_vis T. DATA points to fix-up states. */
1637 lto_fixup_decl_with_vis (tree t, void *data)
1639 lto_fixup_decl_common (t, data);
1641 /* Accessor macro has side-effects, use field-name here. */
1642 LTO_FIXUP_SUBTREE (t->decl_with_vis.assembler_name);
1644 gcc_assert (no_fixup_p (DECL_SECTION_NAME (t)));
1647 /* Fix up fields of a decl_non_common T. DATA points to fix-up states. */
1650 lto_fixup_decl_non_common (tree t, void *data)
1652 lto_fixup_decl_with_vis (t, data);
1653 LTO_FIXUP_SUBTREE (DECL_ARGUMENT_FLD (t));
1654 LTO_FIXUP_SUBTREE (DECL_RESULT_FLD (t));
1655 LTO_FIXUP_SUBTREE (DECL_VINDEX (t));
1657 /* SAVED_TREE should not cleared by now. Also no accessor for base type. */
1658 gcc_assert (no_fixup_p (t->decl_non_common.saved_tree));
1661 /* Fix up fields of a decl_non_common T. DATA points to fix-up states. */
1664 lto_fixup_function (tree t, void *data)
1666 lto_fixup_decl_non_common (t, data);
1667 LTO_FIXUP_SUBTREE (DECL_FUNCTION_PERSONALITY (t));
1670 /* Fix up fields of a field_decl T. DATA points to fix-up states. */
1673 lto_fixup_field_decl (tree t, void *data)
1675 lto_fixup_decl_common (t, data);
1676 LTO_FIXUP_SUBTREE (DECL_FIELD_OFFSET (t));
1677 LTO_FIXUP_SUBTREE (DECL_BIT_FIELD_TYPE (t));
1678 LTO_FIXUP_SUBTREE (DECL_QUALIFIER (t));
1679 gcc_assert (no_fixup_p (DECL_FIELD_BIT_OFFSET (t)));
1680 LTO_FIXUP_SUBTREE (DECL_FCONTEXT (t));
1683 /* Fix up fields of a type T. DATA points to fix-up states. */
1686 lto_fixup_type (tree t, void *data)
1690 lto_fixup_common (t, data);
1691 LTO_FIXUP_SUBTREE (TYPE_CACHED_VALUES (t));
1692 LTO_FIXUP_SUBTREE (TYPE_SIZE (t));
1693 LTO_FIXUP_SUBTREE (TYPE_SIZE_UNIT (t));
1694 LTO_FIXUP_SUBTREE (TYPE_ATTRIBUTES (t));
1695 LTO_FIXUP_SUBTREE (TYPE_NAME (t));
1697 /* Accessors are for derived node types only. */
1698 if (!POINTER_TYPE_P (t))
1699 LTO_FIXUP_SUBTREE (t->type.minval);
1700 LTO_FIXUP_SUBTREE (t->type.maxval);
1702 /* Accessor is for derived node types only. */
1703 LTO_FIXUP_SUBTREE (t->type.binfo);
1705 if (TYPE_CONTEXT (t))
1707 if (TYPE_P (TYPE_CONTEXT (t)))
1708 LTO_REGISTER_TYPE_AND_FIXUP_SUBTREE (TYPE_CONTEXT (t));
1710 LTO_FIXUP_SUBTREE (TYPE_CONTEXT (t));
1713 /* Compute the canonical type of t and fix that up. From this point
1714 there are no longer any types with TYPE_STRUCTURAL_EQUALITY_P
1715 and its type-based alias problems. */
1716 if (!TYPE_CANONICAL (t))
1718 TYPE_CANONICAL (t) = gimple_register_canonical_type (t);
1719 LTO_FIXUP_SUBTREE (TYPE_CANONICAL (t));
1722 /* The following re-creates proper variant lists while fixing up
1723 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1724 variant list state before fixup is broken. */
1726 /* Remove us from our main variant list if we are not the variant leader. */
1727 if (TYPE_MAIN_VARIANT (t) != t)
1729 tem = TYPE_MAIN_VARIANT (t);
1730 while (tem && TYPE_NEXT_VARIANT (tem) != t)
1731 tem = TYPE_NEXT_VARIANT (tem);
1733 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
1734 TYPE_NEXT_VARIANT (t) = NULL_TREE;
1737 /* Query our new main variant. */
1738 mv = gimple_register_type (TYPE_MAIN_VARIANT (t));
1740 /* If we were the variant leader and we get replaced ourselves drop
1741 all variants from our list. */
1742 if (TYPE_MAIN_VARIANT (t) == t
1748 tree tem2 = TYPE_NEXT_VARIANT (tem);
1749 TYPE_NEXT_VARIANT (tem) = NULL_TREE;
1754 /* If we are not our own variant leader link us into our new leaders
1758 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1759 TYPE_NEXT_VARIANT (mv) = t;
1762 /* Finally adjust our main variant and fix it up. */
1763 TYPE_MAIN_VARIANT (t) = mv;
1764 LTO_FIXUP_SUBTREE (TYPE_MAIN_VARIANT (t));
1766 /* As the second step of reconstructing the pointer chains put us
1767 on the list of pointers of the new pointed-to type
1768 if we are a main variant. See lto_fixup_common for the first step. */
1769 if (TREE_CODE (t) == POINTER_TYPE
1770 && TYPE_MAIN_VARIANT (t) == t)
1772 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1773 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1775 else if (TREE_CODE (t) == REFERENCE_TYPE
1776 && TYPE_MAIN_VARIANT (t) == t)
1778 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1779 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1783 /* Fix up fields of a BINFO T. DATA points to fix-up states. */
1786 lto_fixup_binfo (tree t, void *data)
1788 unsigned HOST_WIDE_INT i, n;
1789 tree base, saved_base;
1791 lto_fixup_common (t, data);
1792 gcc_assert (no_fixup_p (BINFO_OFFSET (t)));
1793 LTO_FIXUP_SUBTREE (BINFO_VTABLE (t));
1794 LTO_FIXUP_SUBTREE (BINFO_VIRTUALS (t));
1795 LTO_FIXUP_SUBTREE (BINFO_VPTR_FIELD (t));
1796 n = VEC_length (tree, BINFO_BASE_ACCESSES (t));
1797 for (i = 0; i < n; i++)
1799 saved_base = base = BINFO_BASE_ACCESS (t, i);
1800 LTO_FIXUP_SUBTREE (base);
1801 if (base != saved_base)
1802 VEC_replace (tree, BINFO_BASE_ACCESSES (t), i, base);
1804 LTO_FIXUP_SUBTREE (BINFO_INHERITANCE_CHAIN (t));
1805 LTO_FIXUP_SUBTREE (BINFO_SUBVTT_INDEX (t));
1806 LTO_FIXUP_SUBTREE (BINFO_VPTR_INDEX (t));
1807 n = BINFO_N_BASE_BINFOS (t);
1808 for (i = 0; i < n; i++)
1810 saved_base = base = BINFO_BASE_BINFO (t, i);
1811 LTO_FIXUP_SUBTREE (base);
1812 if (base != saved_base)
1813 VEC_replace (tree, BINFO_BASE_BINFOS (t), i, base);
1817 /* Fix up fields of a CONSTRUCTOR T. DATA points to fix-up states. */
1820 lto_fixup_constructor (tree t, void *data)
1822 unsigned HOST_WIDE_INT idx;
1823 constructor_elt *ce;
1825 LTO_REGISTER_TYPE_AND_FIXUP_SUBTREE (TREE_TYPE (t));
1828 VEC_iterate(constructor_elt, CONSTRUCTOR_ELTS (t), idx, ce);
1831 LTO_FIXUP_SUBTREE (ce->index);
1832 LTO_FIXUP_SUBTREE (ce->value);
1836 /* A walk_tree callback used by lto_fixup_state. TP is the pointer to the
1837 current tree. WALK_SUBTREES indicates if the subtrees will be walked.
1838 DATA is a pointer set to record visited nodes. */
1841 lto_fixup_tree (tree *tp, int *walk_subtrees, void *data)
1844 lto_fixup_data_t *fixup_data = (lto_fixup_data_t *) data;
1849 if (!t || pointer_set_contains (fixup_data->seen, t))
1852 if (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == FUNCTION_DECL)
1854 prevailing = lto_symtab_prevailing_decl (t);
1856 if (t != prevailing)
1858 /* Also replace t with prevailing defintion. We don't want to
1859 insert the other defintion in the seen set as we want to
1860 replace all instances of it. */
1865 else if (TYPE_P (t))
1867 /* Replace t with the prevailing type. We don't want to insert the
1868 other type in the seen set as we want to replace all instances of it. */
1869 t = gimple_register_type (t);
1873 if (pointer_set_insert (fixup_data->seen, t))
1876 /* walk_tree does not visit all reachable nodes that need to be fixed up.
1877 Hence we do special processing here for those kind of nodes. */
1878 switch (TREE_CODE (t))
1881 lto_fixup_field_decl (t, data);
1889 lto_fixup_decl_common (t, data);
1893 lto_fixup_decl_with_vis (t, data);
1897 lto_fixup_decl_non_common (t, data);
1901 lto_fixup_function (t, data);
1905 lto_fixup_binfo (t, data);
1910 lto_fixup_type (t, data);
1911 else if (TREE_CODE (t) == CONSTRUCTOR)
1912 lto_fixup_constructor (t, data);
1913 else if (CONSTANT_CLASS_P (t))
1914 LTO_REGISTER_TYPE_AND_FIXUP_SUBTREE (TREE_TYPE (t));
1915 else if (EXPR_P (t))
1917 /* walk_tree only handles TREE_OPERANDs. Do the rest here. */
1918 lto_fixup_common (t, data);
1919 LTO_FIXUP_SUBTREE (t->exp.block);
1924 /* Let walk_tree handle sub-trees. */
1932 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
1933 replaces var and function decls with the corresponding prevailing def and
1934 records the old decl in the free-list in DATA. We also record visted nodes
1935 in the seen-set in DATA to avoid multiple visit for nodes that need not
1939 lto_fixup_state (struct lto_in_decl_state *state, lto_fixup_data_t *data)
1942 struct lto_tree_ref_table *table;
1944 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
1945 we still need to walk from all DECLs to find the reachable
1946 FUNCTION_DECLs and VAR_DECLs. */
1947 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
1949 table = &state->streams[si];
1950 for (i = 0; i < table->size; i++)
1951 walk_tree (table->trees + i, lto_fixup_tree, data, NULL);
1955 /* A callback of htab_traverse. Just extract a state from SLOT and the
1956 lto_fixup_data_t object from AUX and calls lto_fixup_state. */
1959 lto_fixup_state_aux (void **slot, void *aux)
1961 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
1962 lto_fixup_state (state, (lto_fixup_data_t *) aux);
1966 /* Fix the decls from all FILES. Replaces each decl with the corresponding
1970 lto_fixup_decls (struct lto_file_decl_data **files)
1974 struct pointer_set_t *seen = pointer_set_create ();
1975 lto_fixup_data_t data;
1978 for (i = 0; files[i]; i++)
1980 struct lto_file_decl_data *file = files[i];
1981 struct lto_in_decl_state *state = file->global_decl_state;
1982 lto_fixup_state (state, &data);
1984 htab_traverse (file->function_decl_states, lto_fixup_state_aux, &data);
1987 FOR_EACH_VEC_ELT (tree, lto_global_var_decls, i, decl)
1989 tree saved_decl = decl;
1990 walk_tree (&decl, lto_fixup_tree, &data, NULL);
1991 if (decl != saved_decl)
1992 VEC_replace (tree, lto_global_var_decls, i, decl);
1995 pointer_set_destroy (seen);
1998 /* Read the options saved from each file in the command line. Called
1999 from lang_hooks.post_options which is called by process_options
2000 right before all the options are used to initialize the compiler.
2001 This assumes that decode_options has already run, so the
2002 num_in_fnames and in_fnames are properly set.
2004 Note that this assumes that all the files had been compiled with
2005 the same options, which is not a good assumption. In general,
2006 options ought to be read from all the files in the set and merged.
2007 However, it is still unclear what the merge rules should be. */
2010 lto_read_all_file_options (void)
2014 /* Clear any file options currently saved. */
2015 lto_clear_file_options ();
2017 /* Set the hooks to read ELF sections. */
2018 lto_set_in_hooks (NULL, get_section_data, free_section_data);
2020 fprintf (stderr, "Reading command line options:");
2022 for (i = 0; i < num_in_fnames; i++)
2024 struct lto_file_decl_data *file_data;
2025 lto_file *file = lto_obj_file_open (in_fnames[i], false);
2030 fprintf (stderr, " %s", in_fnames[i]);
2034 file_data = XCNEW (struct lto_file_decl_data);
2035 file_data->file_name = file->filename;
2036 file_data->section_hash_table = lto_obj_build_section_table (file);
2038 lto_read_file_options (file_data);
2040 lto_obj_file_close (file);
2041 htab_delete (file_data->section_hash_table);
2046 fprintf (stderr, "\n");
2048 /* Apply globally the options read from all the files. */
2049 lto_reissue_options ();
2052 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2054 /* Turn file datas for sub files into a single array, so that they look
2055 like separate files for further passes. */
2058 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
2060 struct lto_file_decl_data *n, *next;
2063 lto_stats.num_input_files = count;
2065 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count + 1);
2066 /* Set the hooks so that all of the ipa passes can read in their data. */
2067 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2068 for (i = 0, k = 0; i < last_file_ix; i++)
2070 for (n = orig[i]; n != NULL; n = next)
2072 all_file_decl_data[k++] = n;
2077 all_file_decl_data[k] = NULL;
2078 gcc_assert (k == count);
2081 /* Input file data before flattening (i.e. splitting them to subfiles to support
2082 incremental linking. */
2083 static int real_file_count;
2084 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2086 /* Read all the symbols from the input files FNAMES. NFILES is the
2087 number of files requested in the command line. Instantiate a
2088 global call graph by aggregating all the sub-graphs found in each
2092 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2094 unsigned int i, last_file_ix;
2096 struct cgraph_node *node;
2098 struct lto_file_decl_data **decl_data;
2102 timevar_push (TV_IPA_LTO_DECL_IN);
2105 = decl_data = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles + 1);
2106 real_file_count = nfiles;
2108 /* Read the resolution file. */
2110 if (resolution_file_name)
2113 unsigned num_objects;
2115 resolution = fopen (resolution_file_name, "r");
2116 if (resolution == NULL)
2117 fatal_error ("could not open symbol resolution file: %m");
2119 t = fscanf (resolution, "%u", &num_objects);
2120 gcc_assert (t == 1);
2122 /* True, since the plugin splits the archives. */
2123 gcc_assert (num_objects == nfiles);
2127 fprintf (stderr, "Reading object files:");
2129 /* Read all of the object files specified on the command line. */
2130 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2132 struct lto_file_decl_data *file_data = NULL;
2135 fprintf (stderr, " %s", fnames[i]);
2139 current_lto_file = lto_obj_file_open (fnames[i], false);
2140 if (!current_lto_file)
2143 file_data = lto_file_read (current_lto_file, resolution, &count);
2147 decl_data[last_file_ix++] = file_data;
2149 lto_obj_file_close (current_lto_file);
2150 current_lto_file = NULL;
2154 lto_flatten_files (decl_data, count, last_file_ix);
2155 lto_stats.num_input_files = count;
2156 ggc_free(decl_data);
2157 real_file_decl_data = NULL;
2159 if (resolution_file_name)
2160 fclose (resolution);
2162 /* Set the hooks so that all of the ipa passes can read in their data. */
2163 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2165 timevar_pop (TV_IPA_LTO_DECL_IN);
2168 fprintf (stderr, "\nReading the callgraph\n");
2170 timevar_push (TV_IPA_LTO_CGRAPH_IO);
2171 /* Read the callgraph. */
2173 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
2176 fprintf (stderr, "Merging declarations\n");
2178 timevar_push (TV_IPA_LTO_DECL_MERGE);
2179 /* Merge global decls. */
2180 lto_symtab_merge_decls ();
2182 /* Fixup all decls and types and free the type hash tables. */
2183 lto_fixup_decls (all_file_decl_data);
2184 free_gimple_type_tables ();
2187 timevar_pop (TV_IPA_LTO_DECL_MERGE);
2188 /* Each pass will set the appropriate timer. */
2191 fprintf (stderr, "Reading summaries\n");
2193 /* Read the IPA summary data. */
2195 ipa_read_optimization_summaries ();
2197 ipa_read_summaries ();
2199 /* Finally merge the cgraph according to the decl merging decisions. */
2200 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
2201 if (cgraph_dump_file)
2203 fprintf (cgraph_dump_file, "Before merging:\n");
2204 dump_cgraph (cgraph_dump_file);
2205 dump_varpool (cgraph_dump_file);
2207 lto_symtab_merge_cgraph_nodes ();
2211 for (node = cgraph_nodes; node; node = node->next)
2213 /* FIXME: ipa_transforms_to_apply holds list of passes that have optimization
2214 summaries computed and needs to apply changes. At the moment WHOPR only
2215 supports inlining, so we can push it here by hand. In future we need to stream
2216 this field into ltrans compilation. */
2218 VEC_safe_push (ipa_opt_pass, heap,
2219 node->ipa_transforms_to_apply,
2220 (ipa_opt_pass)&pass_ipa_inline);
2224 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
2226 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
2228 /* FIXME lto. This loop needs to be changed to use the pass manager to
2229 call the ipa passes directly. */
2231 for (i = 0; i < last_file_ix; i++)
2233 struct lto_file_decl_data *file_data = all_file_decl_data [i];
2234 lto_materialize_constructors_and_inits (file_data);
2237 /* Indicate that the cgraph is built and ready. */
2238 cgraph_function_flags_ready = true;
2240 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
2241 ggc_free (all_file_decl_data);
2242 all_file_decl_data = NULL;
2246 /* Materialize all the bodies for all the nodes in the callgraph. */
2249 materialize_cgraph (void)
2252 struct cgraph_node *node;
2254 timevar_id_t lto_timer;
2258 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
2261 /* Now that we have input the cgraph, we need to clear all of the aux
2262 nodes and read the functions if we are not running in WPA mode. */
2263 timevar_push (TV_IPA_LTO_GIMPLE_IN);
2265 for (node = cgraph_nodes; node; node = node->next)
2267 if (node->local.lto_file_data)
2269 lto_materialize_function (node);
2270 lto_stats.num_input_cgraph_nodes++;
2274 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
2276 /* Start the appropriate timer depending on the mode that we are
2278 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
2279 : (flag_ltrans) ? TV_WHOPR_LTRANS
2281 timevar_push (lto_timer);
2283 current_function_decl = NULL;
2286 /* Inform the middle end about the global variables we have seen. */
2287 FOR_EACH_VEC_ELT (tree, lto_global_var_decls, i, decl)
2288 rest_of_decl_compilation (decl, 1, 0);
2291 fprintf (stderr, "\n");
2293 timevar_pop (lto_timer);
2297 /* Perform whole program analysis (WPA) on the callgraph and write out the
2298 optimization plan. */
2301 do_whole_program_analysis (void)
2303 /* Note that since we are in WPA mode, materialize_cgraph will not
2304 actually read in all the function bodies. It only materializes
2305 the decls and cgraph nodes so that analysis can be performed. */
2306 materialize_cgraph ();
2308 /* Reading in the cgraph uses different timers, start timing WPA now. */
2309 timevar_push (TV_WHOPR_WPA);
2311 if (pre_ipa_mem_report)
2313 fprintf (stderr, "Memory consumption before IPA\n");
2314 dump_memory_report (false);
2317 if (cgraph_dump_file)
2319 dump_cgraph (cgraph_dump_file);
2320 dump_varpool (cgraph_dump_file);
2323 cgraph_function_flags_ready = true;
2324 bitmap_obstack_initialize (NULL);
2325 ipa_register_cgraph_hooks ();
2326 cgraph_state = CGRAPH_STATE_IPA_SSA;
2328 execute_ipa_pass_list (all_regular_ipa_passes);
2330 if (cgraph_dump_file)
2332 fprintf (cgraph_dump_file, "Optimized ");
2333 dump_cgraph (cgraph_dump_file);
2334 dump_varpool (cgraph_dump_file);
2337 bitmap_obstack_release (NULL);
2339 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
2340 timevar_pop (TV_WHOPR_WPA);
2342 if (flag_lto_partition_1to1)
2345 lto_balanced_map ();
2349 fprintf (stderr, "\nStreaming out");
2352 lto_wpa_write_files ();
2355 fprintf (stderr, "\n");
2357 if (post_ipa_mem_report)
2359 fprintf (stderr, "Memory consumption after IPA\n");
2360 dump_memory_report (false);
2363 /* Show the LTO report before launching LTRANS. */
2364 if (flag_lto_report)
2365 print_lto_report ();
2369 static GTY(()) tree lto_eh_personality_decl;
2371 /* Return the LTO personality function decl. */
2374 lto_eh_personality (void)
2376 if (!lto_eh_personality_decl)
2378 /* Use the first personality DECL for our personality if we don't
2379 support multiple ones. This ensures that we don't artificially
2380 create the need for them in a single-language program. */
2381 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
2382 lto_eh_personality_decl = first_personality_decl;
2384 lto_eh_personality_decl = lhd_gcc_personality ();
2387 return lto_eh_personality_decl;
2390 /* Set the process name based on the LTO mode. */
2393 lto_process_name (void)
2396 setproctitle ("lto1-lto");
2398 setproctitle ("lto1-wpa");
2400 setproctitle ("lto1-ltrans");
2403 /* Main entry point for the GIMPLE front end. This front end has
2404 three main personalities:
2406 - LTO (-flto). All the object files on the command line are
2407 loaded in memory and processed as a single translation unit.
2408 This is the traditional link-time optimization behavior.
2410 - WPA (-fwpa). Only the callgraph and summary information for
2411 files in the command file are loaded. A single callgraph
2412 (without function bodies) is instantiated for the whole set of
2413 files. IPA passes are only allowed to analyze the call graph
2414 and make transformation decisions. The callgraph is
2415 partitioned, each partition is written to a new object file
2416 together with the transformation decisions.
2418 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
2419 summary files from running again. Since WPA computed summary
2420 information and decided what transformations to apply, LTRANS
2421 simply applies them. */
2424 lto_main (int debug_p ATTRIBUTE_UNUSED)
2426 lto_process_name ();
2430 /* Read all the symbols and call graph from all the files in the
2432 read_cgraph_and_symbols (num_in_fnames, in_fnames);
2436 /* If WPA is enabled analyze the whole call graph and create an
2437 optimization plan. Otherwise, read in all the function
2438 bodies and continue with optimization. */
2440 do_whole_program_analysis ();
2443 materialize_cgraph ();
2445 /* Let the middle end know that we have read and merged all of
2449 /* FIXME lto, if the processes spawned by WPA fail, we miss
2450 the chance to print WPA's report, so WPA will call
2451 print_lto_report before launching LTRANS. If LTRANS was
2452 launched directly by the driver we would not need to do
2454 if (flag_lto_report)
2455 print_lto_report ();
2460 #include "gt-lto-lto.h"