1 /* Top-level LTO routines.
2 Copyright 2009, 2010 Free Software Foundation, Inc.
3 Contributed by CodeSourcery, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "diagnostic-core.h"
29 #include "libiberty.h"
32 #include "tree-ssa-operands.h"
33 #include "tree-pass.h"
34 #include "langhooks.h"
37 #include "pointer-set.h"
45 #include "lto-streamer.h"
46 #include "splay-tree.h"
49 /* This needs to be included after config.h. Otherwise, _GNU_SOURCE will not
50 be defined in time to set __USE_GNU in the system headers, and strsignal
51 will not be declared. */
56 /* Handle opening elf files on hosts, such as Windows, that may use
57 text file handling that will break binary access. */
63 static GTY(()) tree first_personality_decl;
65 /* Returns a hash code for P. */
68 hash_name (const void *p)
70 const struct lto_section_slot *ds = (const struct lto_section_slot *) p;
71 return (hashval_t) htab_hash_string (ds->name);
75 /* Returns nonzero if P1 and P2 are equal. */
78 eq_name (const void *p1, const void *p2)
80 const struct lto_section_slot *s1 =
81 (const struct lto_section_slot *) p1;
82 const struct lto_section_slot *s2 =
83 (const struct lto_section_slot *) p2;
85 return strcmp (s1->name, s2->name) == 0;
88 /* Free lto_section_slot */
91 free_with_string (void *arg)
93 struct lto_section_slot *s = (struct lto_section_slot *)arg;
95 free (CONST_CAST (char *, s->name));
99 /* Create section hash table */
102 lto_obj_create_section_hash_table (void)
104 return htab_create (37, hash_name, eq_name, free_with_string);
107 /* Read the constructors and inits. */
110 lto_materialize_constructors_and_inits (struct lto_file_decl_data * file_data)
113 const char *data = lto_get_section_data (file_data,
114 LTO_section_static_initializer,
116 lto_input_constructors_and_inits (file_data, data);
117 lto_free_section_data (file_data, LTO_section_static_initializer, NULL,
121 /* Return true when NODE has a clone that is analyzed (i.e. we need
122 to load its body even if the node itself is not needed). */
125 has_analyzed_clone_p (struct cgraph_node *node)
127 struct cgraph_node *orig = node;
136 else if (node->next_sibling_clone)
137 node = node->next_sibling_clone;
140 while (node != orig && !node->next_sibling_clone)
141 node = node->clone_of;
143 node = node->next_sibling_clone;
149 /* Read the function body for the function associated with NODE. */
152 lto_materialize_function (struct cgraph_node *node)
155 struct lto_file_decl_data *file_data;
156 const char *data, *name;
160 /* Read in functions with body (analyzed nodes)
161 and also functions that are needed to produce virtual clones. */
162 if (node->analyzed || has_analyzed_clone_p (node))
164 /* Clones don't need to be read. */
167 file_data = node->local.lto_file_data;
168 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
170 /* We may have renamed the declaration, e.g., a static function. */
171 name = lto_get_decl_name_mapping (file_data, name);
173 data = lto_get_section_data (file_data, LTO_section_function_body,
176 fatal_error ("%s: section %s is missing",
177 file_data->file_name,
180 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
182 /* Load the function body only if not operating in WPA mode. In
183 WPA mode, the body of the function is not needed. */
186 allocate_struct_function (decl, false);
187 announce_function (decl);
188 lto_input_function_body (file_data, decl, data);
189 if (DECL_FUNCTION_PERSONALITY (decl) && !first_personality_decl)
190 first_personality_decl = DECL_FUNCTION_PERSONALITY (decl);
191 lto_stats.num_function_bodies++;
194 lto_free_section_data (file_data, LTO_section_function_body, name,
200 /* Let the middle end know about the function. */
201 rest_of_decl_compilation (decl, 1, 0);
205 /* Decode the content of memory pointed to by DATA in the the
206 in decl state object STATE. DATA_IN points to a data_in structure for
207 decoding. Return the address after the decoded object in the input. */
209 static const uint32_t *
210 lto_read_in_decl_state (struct data_in *data_in, const uint32_t *data,
211 struct lto_in_decl_state *state)
218 decl = lto_streamer_cache_get (data_in->reader_cache, (int) ix);
219 if (TREE_CODE (decl) != FUNCTION_DECL)
221 gcc_assert (decl == void_type_node);
224 state->fn_decl = decl;
226 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
228 uint32_t size = *data++;
229 tree *decls = ggc_alloc_vec_tree (size);
231 for (j = 0; j < size; j++)
233 decls[j] = lto_streamer_cache_get (data_in->reader_cache, data[j]);
235 /* Register every type in the global type table. If the
236 type existed already, use the existing type. */
237 if (TYPE_P (decls[j]))
238 decls[j] = gimple_register_type (decls[j]);
241 state->streams[i].size = size;
242 state->streams[i].trees = decls;
250 /* Read all the symbols from buffer DATA, using descriptors in DECL_DATA.
251 RESOLUTIONS is the set of symbols picked by the linker (read from the
252 resolution file when the linker plugin is being used). */
255 lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
256 VEC(ld_plugin_symbol_resolution_t,heap) *resolutions)
258 const struct lto_decl_header *header = (const struct lto_decl_header *) data;
259 const int32_t decl_offset = sizeof (struct lto_decl_header);
260 const int32_t main_offset = decl_offset + header->decl_state_size;
261 const int32_t string_offset = main_offset + header->main_size;
262 struct lto_input_block ib_main;
263 struct data_in *data_in;
265 const uint32_t *data_ptr, *data_end;
266 uint32_t num_decl_states;
268 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
271 data_in = lto_data_in_create (decl_data, (const char *) data + string_offset,
272 header->string_size, resolutions);
274 /* Read the global declarations and types. */
275 while (ib_main.p < ib_main.len)
277 tree t = lto_input_tree (&ib_main, data_in);
278 gcc_assert (t && ib_main.p <= ib_main.len);
281 /* Read in lto_in_decl_state objects. */
282 data_ptr = (const uint32_t *) ((const char*) data + decl_offset);
284 (const uint32_t *) ((const char*) data_ptr + header->decl_state_size);
285 num_decl_states = *data_ptr++;
287 gcc_assert (num_decl_states > 0);
288 decl_data->global_decl_state = lto_new_in_decl_state ();
289 data_ptr = lto_read_in_decl_state (data_in, data_ptr,
290 decl_data->global_decl_state);
292 /* Read in per-function decl states and enter them in hash table. */
293 decl_data->function_decl_states =
294 htab_create_ggc (37, lto_hash_in_decl_state, lto_eq_in_decl_state, NULL);
296 for (i = 1; i < num_decl_states; i++)
298 struct lto_in_decl_state *state = lto_new_in_decl_state ();
301 data_ptr = lto_read_in_decl_state (data_in, data_ptr, state);
302 slot = htab_find_slot (decl_data->function_decl_states, state, INSERT);
303 gcc_assert (*slot == NULL);
307 if (data_ptr != data_end)
308 internal_error ("bytecode stream: garbage at the end of symbols section");
310 /* Set the current decl state to be the global state. */
311 decl_data->current_decl_state = decl_data->global_decl_state;
313 lto_data_in_delete (data_in);
316 /* strtoll is not portable. */
318 lto_parse_hex (const char *p) {
320 for (; *p != '\0'; ++p)
325 if (c >= '0' && c <= '9')
327 else if (c >= 'a' && c <= 'f')
329 else if (c >= 'A' && c <= 'F')
332 internal_error ("could not parse hex number");
338 /* Read resolution for file named FILE_NAME. The resolution is read from
342 lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
344 /* We require that objects in the resolution file are in the same
345 order as the lto1 command line. */
346 unsigned int name_len;
348 unsigned int num_symbols;
350 struct lto_file_decl_data *file_data;
351 unsigned max_index = 0;
352 splay_tree_node nd = NULL;
357 name_len = strlen (file->filename);
358 obj_name = XNEWVEC (char, name_len + 1);
359 fscanf (resolution, " "); /* Read white space. */
361 fread (obj_name, sizeof (char), name_len, resolution);
362 obj_name[name_len] = '\0';
363 if (strcmp (obj_name, file->filename) != 0)
364 internal_error ("unexpected file name %s in linker resolution file. "
365 "Expected %s", obj_name, file->filename);
366 if (file->offset != 0)
371 t = fscanf (resolution, "@0x%16s", offset_p);
373 internal_error ("could not parse file offset");
374 offset = lto_parse_hex (offset_p);
375 if (offset != file->offset)
376 internal_error ("unexpected offset");
381 fscanf (resolution, "%u", &num_symbols);
383 for (i = 0; i < num_symbols; i++)
388 enum ld_plugin_symbol_resolution r = (enum ld_plugin_symbol_resolution) 0;
390 unsigned int lto_resolution_str_len =
391 sizeof (lto_resolution_str) / sizeof (char *);
393 t = fscanf (resolution, "%u %x %26s %*[^\n]\n", &index, &id, r_str);
395 internal_error ("invalid line in the resolution file");
396 if (index > max_index)
399 for (j = 0; j < lto_resolution_str_len; j++)
401 if (strcmp (lto_resolution_str[j], r_str) == 0)
403 r = (enum ld_plugin_symbol_resolution) j;
407 if (j == lto_resolution_str_len)
408 internal_error ("invalid resolution in the resolution file");
410 if (!(nd && nd->key == id))
412 nd = splay_tree_lookup (file_ids, id);
414 internal_error ("resolution sub id %x not in object file", id);
417 file_data = (struct lto_file_decl_data *)nd->value;
418 if (cgraph_dump_file)
419 fprintf (cgraph_dump_file, "Adding resolution %u %u to id %x\n",
420 index, r, file_data->id);
421 VEC_safe_grow_cleared (ld_plugin_symbol_resolution_t, heap,
422 file_data->resolutions,
424 VEC_replace (ld_plugin_symbol_resolution_t,
425 file_data->resolutions, index, r);
429 /* Is the name for a id'ed LTO section? */
432 lto_section_with_id (const char *name, unsigned *id)
436 if (strncmp (name, LTO_SECTION_NAME_PREFIX, strlen (LTO_SECTION_NAME_PREFIX)))
438 s = strrchr (name, '.');
439 return s && sscanf (s, ".%x", id) == 1;
442 /* Create file_data of each sub file id */
445 create_subid_section_table (void **slot, void *data)
447 struct lto_section_slot s_slot, *new_slot;
448 struct lto_section_slot *ls = *(struct lto_section_slot **)slot;
449 splay_tree file_ids = (splay_tree)data;
454 struct lto_file_decl_data *file_data;
456 if (!lto_section_with_id (ls->name, &id))
459 /* Find hash table of sub module id */
460 nd = splay_tree_lookup (file_ids, id);
463 file_data = (struct lto_file_decl_data *)nd->value;
467 file_data = ggc_alloc_lto_file_decl_data ();
468 memset(file_data, 0, sizeof (struct lto_file_decl_data));
470 file_data->section_hash_table = lto_obj_create_section_hash_table ();;
471 splay_tree_insert (file_ids, id, (splay_tree_value)file_data);
474 /* Copy section into sub module hash table */
475 new_name = XDUPVEC (char, ls->name, strlen (ls->name) + 1);
476 s_slot.name = new_name;
477 hash_slot = htab_find_slot (file_data->section_hash_table, &s_slot, INSERT);
478 gcc_assert (*hash_slot == NULL);
480 new_slot = XDUP (struct lto_section_slot, ls);
481 new_slot->name = new_name;
482 *hash_slot = new_slot;
486 /* Read declarations and other initializations for a FILE_DATA. */
489 lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
494 file_data->renaming_hash_table = lto_create_renaming_table ();
495 file_data->file_name = file->filename;
496 data = lto_get_section_data (file_data, LTO_section_decls, NULL, &len);
499 internal_error ("cannot read LTO decls from %s", file_data->file_name);
502 lto_read_decls (file_data, data, file_data->resolutions);
503 lto_free_section_data (file_data, LTO_section_decls, NULL, data, len);
509 struct lto_file_decl_data **file_data;
513 /* Traverse ids and create a list of file_datas out of it. */
515 static int lto_create_files_from_ids (splay_tree_node node, void *data)
517 struct lwstate *lw = (struct lwstate *)data;
518 struct lto_file_decl_data *file_data = (struct lto_file_decl_data *)node->value;
520 lto_file_finalize (file_data, lw->file);
521 if (cgraph_dump_file)
522 fprintf (cgraph_dump_file, "Creating file %s with sub id %x\n",
523 file_data->file_name, file_data->id);
524 file_data->next = *lw->file_data;
525 *lw->file_data = file_data;
530 /* Generate a TREE representation for all types and external decls
533 Read all of the globals out of the file. Then read the cgraph
534 and process the .o index into the cgraph nodes so that it can open
535 the .o file to load the functions and ipa information. */
537 static struct lto_file_decl_data *
538 lto_file_read (lto_file *file, FILE *resolution_file, int *count)
540 struct lto_file_decl_data *file_data = NULL;
542 htab_t section_hash_table;
543 struct lwstate state;
545 section_hash_table = lto_obj_build_section_table (file);
547 /* Find all sub modules in the object and put their sections into new hash
548 tables in a splay tree. */
549 file_ids = splay_tree_new (splay_tree_compare_ints, NULL, NULL);
550 htab_traverse (section_hash_table, create_subid_section_table, file_ids);
552 /* Add resolutions to file ids */
553 lto_resolution_read (file_ids, resolution_file, file);
555 /* Finalize each lto file for each submodule in the merged object
556 and create list for returning. */
558 state.file_data = &file_data;
560 splay_tree_foreach (file_ids, lto_create_files_from_ids, &state);
562 splay_tree_delete (file_ids);
563 htab_delete (section_hash_table);
568 #if HAVE_MMAP_FILE && HAVE_SYSCONF && defined _SC_PAGE_SIZE
569 #define LTO_MMAP_IO 1
573 /* Page size of machine is used for mmap and munmap calls. */
574 static size_t page_mask;
577 /* Get the section data of length LEN from FILENAME starting at
578 OFFSET. The data segment must be freed by the caller when the
579 caller is finished. Returns NULL if all was not well. */
582 lto_read_section_data (struct lto_file_decl_data *file_data,
583 intptr_t offset, size_t len)
587 static char *fd_name;
589 intptr_t computed_len;
590 intptr_t computed_offset;
594 /* Keep a single-entry file-descriptor cache. The last file we
595 touched will get closed at exit.
596 ??? Eventually we want to add a more sophisticated larger cache
597 or rather fix function body streaming to not stream them in
598 practically random order. */
600 && strcmp (fd_name, file_data->file_name) != 0)
608 fd_name = xstrdup (file_data->file_name);
609 fd = open (file_data->file_name, O_RDONLY|O_BINARY);
617 size_t page_size = sysconf (_SC_PAGE_SIZE);
618 page_mask = ~(page_size - 1);
621 computed_offset = offset & page_mask;
622 diff = offset - computed_offset;
623 computed_len = len + diff;
625 result = (char *) mmap (NULL, computed_len, PROT_READ, MAP_PRIVATE,
626 fd, computed_offset);
627 if (result == MAP_FAILED)
630 return result + diff;
632 result = (char *) xmalloc (len);
633 if (lseek (fd, offset, SEEK_SET) != offset
634 || read (fd, result, len) != (ssize_t) len)
645 /* Get the section data from FILE_DATA of SECTION_TYPE with NAME.
646 NAME will be NULL unless the section type is for a function
650 get_section_data (struct lto_file_decl_data *file_data,
651 enum lto_section_type section_type,
655 htab_t section_hash_table = file_data->section_hash_table;
656 struct lto_section_slot *f_slot;
657 struct lto_section_slot s_slot;
658 const char *section_name = lto_get_section_name (section_type, name, file_data);
662 s_slot.name = section_name;
663 f_slot = (struct lto_section_slot *) htab_find (section_hash_table, &s_slot);
666 data = lto_read_section_data (file_data, f_slot->start, f_slot->len);
670 free (CONST_CAST (char *, section_name));
675 /* Free the section data from FILE_DATA of SECTION_TYPE with NAME that
676 starts at OFFSET and has LEN bytes. */
679 free_section_data (struct lto_file_decl_data *file_data ATTRIBUTE_UNUSED,
680 enum lto_section_type section_type ATTRIBUTE_UNUSED,
681 const char *name ATTRIBUTE_UNUSED,
682 const char *offset, size_t len ATTRIBUTE_UNUSED)
685 intptr_t computed_len;
686 intptr_t computed_offset;
691 computed_offset = ((intptr_t) offset) & page_mask;
692 diff = (intptr_t) offset - computed_offset;
693 computed_len = len + diff;
695 munmap ((caddr_t) computed_offset, computed_len);
697 free (CONST_CAST(char *, offset));
701 /* Structure describing ltrans partitions. */
703 struct GTY (()) ltrans_partition_def
705 cgraph_node_set cgraph_set;
706 varpool_node_set varpool_set;
707 const char * GTY ((skip)) name;
711 typedef struct ltrans_partition_def *ltrans_partition;
712 DEF_VEC_P(ltrans_partition);
713 DEF_VEC_ALLOC_P(ltrans_partition,gc);
715 static GTY (()) VEC(ltrans_partition, gc) *ltrans_partitions;
717 static void add_cgraph_node_to_partition (ltrans_partition part, struct cgraph_node *node);
718 static void add_varpool_node_to_partition (ltrans_partition part, struct varpool_node *vnode);
720 /* Create new partition with name NAME. */
721 static ltrans_partition
722 new_partition (const char *name)
724 ltrans_partition part = ggc_alloc_ltrans_partition_def ();
725 part->cgraph_set = cgraph_node_set_new ();
726 part->varpool_set = varpool_node_set_new ();
729 VEC_safe_push (ltrans_partition, gc, ltrans_partitions, part);
733 /* See all references that go to comdat objects and bring them into partition too. */
735 add_references_to_partition (ltrans_partition part, struct ipa_ref_list *refs)
739 for (i = 0; ipa_ref_list_reference_iterate (refs, i, ref); i++)
741 if (ref->refered_type == IPA_REF_CGRAPH
742 && DECL_COMDAT (ipa_ref_node (ref)->decl)
743 && !cgraph_node_in_set_p (ipa_ref_node (ref), part->cgraph_set))
744 add_cgraph_node_to_partition (part, ipa_ref_node (ref));
746 if (ref->refered_type == IPA_REF_VARPOOL
747 && DECL_COMDAT (ipa_ref_varpool_node (ref)->decl)
748 && !varpool_node_in_set_p (ipa_ref_varpool_node (ref), part->varpool_set))
749 add_varpool_node_to_partition (part, ipa_ref_varpool_node (ref));
753 /* Add NODE to partition as well as the inline callees and referred comdats into partition PART. */
756 add_cgraph_node_to_partition (ltrans_partition part, struct cgraph_node *node)
758 struct cgraph_edge *e;
760 part->insns += node->local.inline_summary.self_size;
764 node->in_other_partition = 1;
765 if (cgraph_dump_file)
766 fprintf (cgraph_dump_file, "Node %s/%i now used in multiple partitions\n",
767 cgraph_node_name (node), node->uid);
769 node->aux = (void *)((size_t)node->aux + 1);
771 cgraph_node_set_add (part->cgraph_set, node);
773 for (e = node->callees; e; e = e->next_callee)
774 if ((!e->inline_failed || DECL_COMDAT (e->callee->decl))
775 && !cgraph_node_in_set_p (e->callee, part->cgraph_set))
776 add_cgraph_node_to_partition (part, e->callee);
778 add_references_to_partition (part, &node->ref_list);
780 if (node->same_comdat_group
781 && !cgraph_node_in_set_p (node->same_comdat_group, part->cgraph_set))
782 add_cgraph_node_to_partition (part, node->same_comdat_group);
785 /* Add VNODE to partition as well as comdat references partition PART. */
788 add_varpool_node_to_partition (ltrans_partition part, struct varpool_node *vnode)
790 varpool_node_set_add (part->varpool_set, vnode);
794 vnode->in_other_partition = 1;
795 if (cgraph_dump_file)
796 fprintf (cgraph_dump_file, "Varpool node %s now used in multiple partitions\n",
797 varpool_node_name (vnode));
799 vnode->aux = (void *)((size_t)vnode->aux + 1);
801 add_references_to_partition (part, &vnode->ref_list);
803 if (vnode->same_comdat_group
804 && !varpool_node_in_set_p (vnode->same_comdat_group, part->varpool_set))
805 add_varpool_node_to_partition (part, vnode->same_comdat_group);
808 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
809 and number of varpool nodes is N_VARPOOL_NODES. */
812 undo_partition (ltrans_partition partition, unsigned int n_cgraph_nodes,
813 unsigned int n_varpool_nodes)
815 while (VEC_length (cgraph_node_ptr, partition->cgraph_set->nodes) >
818 struct cgraph_node *node = VEC_index (cgraph_node_ptr,
819 partition->cgraph_set->nodes,
821 partition->insns -= node->local.inline_summary.self_size;
822 cgraph_node_set_remove (partition->cgraph_set, node);
823 node->aux = (void *)((size_t)node->aux - 1);
825 while (VEC_length (varpool_node_ptr, partition->varpool_set->nodes) >
828 struct varpool_node *node = VEC_index (varpool_node_ptr,
829 partition->varpool_set->nodes,
831 varpool_node_set_remove (partition->varpool_set, node);
832 node->aux = (void *)((size_t)node->aux - 1);
836 /* Return true if NODE should be partitioned.
837 This means that partitioning algorithm should put NODE into one of partitions.
838 This apply to most functions with bodies. Functions that are not partitions
839 are put into every unit needing them. This is the case of i.e. COMDATs. */
842 partition_cgraph_node_p (struct cgraph_node *node)
844 /* We will get proper partition based on function they are inlined to. */
845 if (node->global.inlined_to)
847 /* Nodes without a body do not need partitioning. */
850 /* Extern inlines and comdat are always only in partitions they are needed. */
851 if (DECL_EXTERNAL (node->decl)
852 || (DECL_COMDAT (node->decl)
853 && !cgraph_used_from_object_file_p (node)))
858 /* Return true if VNODE should be partitioned.
859 This means that partitioning algorithm should put VNODE into one of partitions. */
862 partition_varpool_node_p (struct varpool_node *vnode)
864 if (vnode->alias || !vnode->needed)
866 /* Constant pool and comdat are always only in partitions they are needed. */
867 if (DECL_IN_CONSTANT_POOL (vnode->decl)
868 || (DECL_COMDAT (vnode->decl)
869 && !vnode->force_output
870 && !varpool_used_from_object_file_p (vnode)))
875 /* Group cgrah nodes by input files. This is used mainly for testing
879 lto_1_to_1_map (void)
881 struct cgraph_node *node;
882 struct varpool_node *vnode;
883 struct lto_file_decl_data *file_data;
884 struct pointer_map_t *pmap;
885 ltrans_partition partition;
889 timevar_push (TV_WHOPR_WPA);
891 pmap = pointer_map_create ();
893 for (node = cgraph_nodes; node; node = node->next)
895 if (!partition_cgraph_node_p (node))
898 file_data = node->local.lto_file_data;
899 gcc_assert (!node->same_body_alias);
903 slot = pointer_map_contains (pmap, file_data);
905 partition = (ltrans_partition) *slot;
908 partition = new_partition (file_data->file_name);
909 slot = pointer_map_insert (pmap, file_data);
915 && VEC_length (ltrans_partition, ltrans_partitions))
916 partition = VEC_index (ltrans_partition, ltrans_partitions, 0);
919 partition = new_partition ("");
920 slot = pointer_map_insert (pmap, NULL);
926 add_cgraph_node_to_partition (partition, node);
929 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
931 if (!partition_varpool_node_p (vnode))
933 file_data = vnode->lto_file_data;
934 slot = pointer_map_contains (pmap, file_data);
936 partition = (ltrans_partition) *slot;
939 partition = new_partition (file_data->file_name);
940 slot = pointer_map_insert (pmap, file_data);
946 add_varpool_node_to_partition (partition, vnode);
948 for (node = cgraph_nodes; node; node = node->next)
950 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
953 /* If the cgraph is empty, create one cgraph node set so that there is still
954 an output file for any variables that need to be exported in a DSO. */
956 new_partition ("empty");
958 pointer_map_destroy (pmap);
960 timevar_pop (TV_WHOPR_WPA);
962 lto_stats.num_cgraph_partitions += VEC_length (ltrans_partition,
967 /* Group cgraph nodes in qually sized partitions.
969 The algorithm deciding paritions are simple: nodes are taken in predefined
970 order. The order correspond to order we wish to have functions in final
971 output. In future this will be given by function reordering pass, but at
972 the moment we use topological order that serve a good approximation.
974 The goal is to partition this linear order into intervals (partitions) such
975 that all partitions have approximately the same size and that the number of
976 callgraph or IPA reference edgess crossing boundaries is minimal.
978 This is a lot faster (O(n) in size of callgraph) than algorithms doing
979 priority based graph clustering that are generally O(n^2) and since WHOPR
980 is designed to make things go well across partitions, it leads to good results.
982 We compute the expected size of partition as
983 max (total_size / lto_partitions, min_partition_size).
984 We use dynamic expected size of partition, so small programs
985 are partitioning into enough partitions to allow use of multiple CPUs while
986 large programs are not partitioned too much. Creating too many partition
987 increase streaming overhead significandly.
989 In the future we would like to bound maximal size of partition to avoid
990 ltrans stage consuming too much memory. At the moment however WPA stage is
991 most memory intensive phase at large benchmark since too many types and
992 declarations are read into memory.
994 The function implement simple greedy algorithm. Nodes are begin added into
995 current partition until 3/4th of expected partition size is reached.
996 After this threshold we keep track of boundary size (number of edges going to
997 other partitions) and continue adding functions until the current partition
998 grows into a double of expected partition size. Then the process is undone
999 till the point when minimal ration of boundary size and in partition calls
1003 lto_balanced_map (void)
1006 struct cgraph_node **postorder =
1007 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1008 struct cgraph_node **order = XNEWVEC (struct cgraph_node *, cgraph_max_uid);
1009 int i, postorder_len;
1010 struct cgraph_node *node;
1011 int total_size = 0, best_total_size = 0;
1013 ltrans_partition partition;
1014 unsigned int last_visited_cgraph_node = 0, last_visited_varpool_node = 0;
1015 struct varpool_node *vnode;
1016 int cost = 0, internal = 0;
1017 int best_n_nodes = 0, best_n_varpool_nodes = 0, best_i = 0, best_cost =
1018 INT_MAX, best_internal = 0;
1021 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
1022 gcc_assert (!vnode->aux);
1023 /* Until we have better ordering facility, use toplogical order.
1024 Include only nodes we will partition and compute estimate of program
1025 size. Note that since nodes that are not partitioned might be put into
1026 multiple partitions, this is just an estimate of real size. This is why
1027 we keep partition_size updated after every partition is finalized. */
1028 postorder_len = cgraph_postorder (postorder);
1029 for (i = 0; i < postorder_len; i++)
1031 node = postorder[i];
1032 if (partition_cgraph_node_p (node))
1034 order[n_nodes++] = node;
1035 total_size += node->global.size;
1040 /* Compute partition size and create the first partition. */
1041 partition_size = total_size / PARAM_VALUE (PARAM_LTO_PARTITIONS);
1042 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
1043 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
1045 partition = new_partition ("");
1046 if (cgraph_dump_file)
1047 fprintf (cgraph_dump_file, "Total unit size: %i, partition size: %i\n",
1048 total_size, partition_size);
1050 for (i = 0; i < n_nodes; i++)
1053 add_cgraph_node_to_partition (partition, order[i]);
1054 total_size -= order[i]->global.size;
1056 /* Once we added a new node to the partition, we also want to add
1057 all referenced variables unless they was already added into some
1059 add_cgraph_node_to_partition adds possibly multiple nodes and
1060 variables that are needed to satisfy needs of ORDER[i].
1061 We remember last visited cgraph and varpool node from last iteration
1062 of outer loop that allows us to process every new addition.
1064 At the same time we compute size of the boundary into COST. Every
1065 callgraph or IPA reference edge leaving the partition contributes into
1066 COST. Every edge inside partition was earlier computed as one leaving
1067 it and thus we need to subtract it from COST. */
1068 while (last_visited_cgraph_node <
1069 VEC_length (cgraph_node_ptr, partition->cgraph_set->nodes)
1070 || last_visited_varpool_node < VEC_length (varpool_node_ptr,
1071 partition->varpool_set->
1074 struct ipa_ref_list *refs;
1076 struct ipa_ref *ref;
1077 bool cgraph_p = false;
1079 if (last_visited_cgraph_node <
1080 VEC_length (cgraph_node_ptr, partition->cgraph_set->nodes))
1082 struct cgraph_edge *edge;
1085 node = VEC_index (cgraph_node_ptr, partition->cgraph_set->nodes,
1086 last_visited_cgraph_node);
1087 refs = &node->ref_list;
1089 last_visited_cgraph_node++;
1091 gcc_assert (node->analyzed);
1093 /* Compute boundary cost of callgrpah edges. */
1094 for (edge = node->callees; edge; edge = edge->next_callee)
1095 if (edge->callee->analyzed)
1097 int edge_cost = edge->frequency;
1098 cgraph_node_set_iterator csi;
1102 gcc_assert (edge_cost > 0);
1103 csi = cgraph_node_set_find (partition->cgraph_set, edge->callee);
1104 if (!csi_end_p (csi)
1105 && csi.index < last_visited_cgraph_node - 1)
1106 cost -= edge_cost, internal+= edge_cost;
1110 for (edge = node->callers; edge; edge = edge->next_caller)
1112 int edge_cost = edge->frequency;
1113 cgraph_node_set_iterator csi;
1115 gcc_assert (edge->caller->analyzed);
1118 gcc_assert (edge_cost > 0);
1119 csi = cgraph_node_set_find (partition->cgraph_set, edge->caller);
1120 if (!csi_end_p (csi)
1121 && csi.index < last_visited_cgraph_node)
1130 &VEC_index (varpool_node_ptr, partition->varpool_set->nodes,
1131 last_visited_varpool_node)->ref_list;
1132 last_visited_varpool_node++;
1135 /* Compute boundary cost of IPA REF edges and at the same time look into
1136 variables referenced from current partition and try to add them. */
1137 for (j = 0; ipa_ref_list_reference_iterate (refs, j, ref); j++)
1138 if (ref->refered_type == IPA_REF_VARPOOL)
1140 varpool_node_set_iterator vsi;
1142 vnode = ipa_ref_varpool_node (ref);
1143 if (!vnode->finalized)
1145 if (!vnode->aux && partition_varpool_node_p (vnode))
1146 add_varpool_node_to_partition (partition, vnode);
1147 vsi = varpool_node_set_find (partition->varpool_set, vnode);
1148 if (!vsi_end_p (vsi)
1149 && vsi.index < last_visited_varpool_node - !cgraph_p)
1156 cgraph_node_set_iterator csi;
1158 node = ipa_ref_node (ref);
1159 if (!node->analyzed)
1161 csi = cgraph_node_set_find (partition->cgraph_set, node);
1162 if (!csi_end_p (csi)
1163 && csi.index < last_visited_cgraph_node - cgraph_p)
1168 for (j = 0; ipa_ref_list_refering_iterate (refs, j, ref); j++)
1169 if (ref->refering_type == IPA_REF_VARPOOL)
1171 varpool_node_set_iterator vsi;
1173 vnode = ipa_ref_refering_varpool_node (ref);
1174 gcc_assert (vnode->finalized);
1175 if (!vnode->aux && partition_varpool_node_p (vnode))
1176 add_varpool_node_to_partition (partition, vnode);
1177 vsi = varpool_node_set_find (partition->varpool_set, vnode);
1178 if (!vsi_end_p (vsi)
1179 && vsi.index < last_visited_varpool_node)
1186 cgraph_node_set_iterator csi;
1188 node = ipa_ref_refering_node (ref);
1189 gcc_assert (node->analyzed);
1190 csi = cgraph_node_set_find (partition->cgraph_set, node);
1191 if (!csi_end_p (csi)
1192 && csi.index < last_visited_cgraph_node)
1199 /* If the partition is large enough, start looking for smallest boundary cost. */
1200 if (partition->insns < partition_size * 3 / 4
1201 || best_cost == INT_MAX
1203 || (best_internal * (HOST_WIDE_INT) cost
1204 > (internal * (HOST_WIDE_INT)best_cost)))
1205 && partition->insns < partition_size * 5 / 4))
1208 best_internal = internal;
1210 best_n_nodes = VEC_length (cgraph_node_ptr,
1211 partition->cgraph_set->nodes);
1212 best_n_varpool_nodes = VEC_length (varpool_node_ptr,
1213 partition->varpool_set->nodes);
1214 best_total_size = total_size;
1216 if (cgraph_dump_file)
1217 fprintf (cgraph_dump_file, "Step %i: added %s/%i, size %i, cost %i/%i best %i/%i, step %i\n", i,
1218 cgraph_node_name (order[i]), order[i]->uid, partition->insns, cost, internal,
1219 best_cost, best_internal, best_i);
1220 /* Partition is too large, unwind into step when best cost was reached and
1221 start new partition. */
1222 if (partition->insns > 2 * partition_size)
1226 if (cgraph_dump_file)
1227 fprintf (cgraph_dump_file, "Unwinding %i insertions to step %i\n",
1228 i - best_i, best_i);
1229 undo_partition (partition, best_n_nodes, best_n_varpool_nodes);
1232 /* When we are finished, avoid creating empty partition. */
1233 if (i == n_nodes - 1)
1235 partition = new_partition ("");
1236 last_visited_cgraph_node = 0;
1237 last_visited_varpool_node = 0;
1238 total_size = best_total_size;
1241 if (cgraph_dump_file)
1242 fprintf (cgraph_dump_file, "New partition\n");
1244 best_n_varpool_nodes = 0;
1245 best_cost = INT_MAX;
1247 /* Since the size of partitions is just approximate, update the size after
1248 we finished current one. */
1249 if (npartitions < PARAM_VALUE (PARAM_LTO_PARTITIONS))
1250 partition_size = total_size
1251 / (PARAM_VALUE (PARAM_LTO_PARTITIONS) - npartitions);
1253 partition_size = INT_MAX;
1255 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
1256 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
1261 /* Varables that are not reachable from the code go into last partition. */
1262 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
1263 if (partition_varpool_node_p (vnode) && !vnode->aux)
1264 add_varpool_node_to_partition (partition, vnode);
1268 /* Promote variable VNODE to be static. */
1271 promote_var (struct varpool_node *vnode)
1273 if (TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
1275 gcc_assert (flag_wpa);
1276 TREE_PUBLIC (vnode->decl) = 1;
1277 DECL_VISIBILITY (vnode->decl) = VISIBILITY_HIDDEN;
1278 DECL_VISIBILITY_SPECIFIED (vnode->decl) = true;
1279 if (cgraph_dump_file)
1280 fprintf (cgraph_dump_file,
1281 "Promoting var as hidden: %s\n", varpool_node_name (vnode));
1285 /* Promote function NODE to be static. */
1288 promote_fn (struct cgraph_node *node)
1290 gcc_assert (flag_wpa);
1291 if (TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl))
1293 TREE_PUBLIC (node->decl) = 1;
1294 DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
1295 DECL_VISIBILITY_SPECIFIED (node->decl) = true;
1296 if (node->same_body)
1298 struct cgraph_node *alias;
1299 for (alias = node->same_body;
1300 alias; alias = alias->next)
1302 TREE_PUBLIC (alias->decl) = 1;
1303 DECL_VISIBILITY (alias->decl) = VISIBILITY_HIDDEN;
1304 DECL_VISIBILITY_SPECIFIED (alias->decl) = true;
1307 if (cgraph_dump_file)
1308 fprintf (cgraph_dump_file,
1309 "Promoting function as hidden: %s/%i\n",
1310 cgraph_node_name (node), node->uid);
1314 /* Find out all static decls that need to be promoted to global because
1315 of cross file sharing. This function must be run in the WPA mode after
1316 all inlinees are added. */
1319 lto_promote_cross_file_statics (void)
1321 struct varpool_node *vnode;
1323 cgraph_node_set set;
1324 varpool_node_set vset;
1325 cgraph_node_set_iterator csi;
1326 varpool_node_set_iterator vsi;
1327 VEC(varpool_node_ptr, heap) *promoted_initializers = NULL;
1328 struct pointer_set_t *inserted = pointer_set_create ();
1330 gcc_assert (flag_wpa);
1332 n_sets = VEC_length (ltrans_partition, ltrans_partitions);
1333 for (i = 0; i < n_sets; i++)
1335 ltrans_partition part = VEC_index (ltrans_partition, ltrans_partitions, i);
1336 set = part->cgraph_set;
1337 vset = part->varpool_set;
1339 /* If node has either address taken (and we have no clue from where)
1340 or it is called from other partition, it needs to be globalized. */
1341 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
1343 struct cgraph_node *node = csi_node (csi);
1344 if (node->local.externally_visible)
1346 if (node->global.inlined_to)
1348 if ((!DECL_EXTERNAL (node->decl) && !DECL_COMDAT (node->decl))
1349 && (referenced_from_other_partition_p (&node->ref_list, set, vset)
1350 || reachable_from_other_partition_p (node, set)))
1353 for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
1355 vnode = vsi_node (vsi);
1356 /* Constant pool references use internal labels and thus can not
1357 be made global. It is sensible to keep those ltrans local to
1358 allow better optimization. */
1359 if (!DECL_IN_CONSTANT_POOL (vnode->decl) && !DECL_COMDAT (vnode->decl)
1360 && !vnode->externally_visible && vnode->analyzed
1361 && referenced_from_other_partition_p (&vnode->ref_list,
1363 promote_var (vnode);
1366 /* We export initializers of read-only var into each partition
1367 referencing it. Folding might take declarations from the
1368 initializers and use it; so everything referenced from the
1369 initializers needs can be accessed from this partition after
1372 This means that we need to promote all variables and functions
1373 referenced from all initializers from readonly vars referenced
1374 from this partition that are not in this partition.
1375 This needs to be done recursively. */
1376 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
1377 if (const_value_known_p (vnode->decl)
1378 && DECL_INITIAL (vnode->decl)
1379 && !varpool_node_in_set_p (vnode, vset)
1380 && referenced_from_this_partition_p (&vnode->ref_list, set, vset)
1381 && !pointer_set_insert (inserted, vnode))
1382 VEC_safe_push (varpool_node_ptr, heap, promoted_initializers, vnode);
1383 while (!VEC_empty (varpool_node_ptr, promoted_initializers))
1386 struct ipa_ref *ref;
1388 vnode = VEC_pop (varpool_node_ptr, promoted_initializers);
1389 for (i = 0; ipa_ref_list_reference_iterate (&vnode->ref_list, i, ref); i++)
1391 if (ref->refered_type == IPA_REF_CGRAPH)
1393 struct cgraph_node *n = ipa_ref_node (ref);
1394 gcc_assert (!n->global.inlined_to);
1395 if (!n->local.externally_visible
1396 && !cgraph_node_in_set_p (n, set))
1401 struct varpool_node *v = ipa_ref_varpool_node (ref);
1402 if (varpool_node_in_set_p (v, vset))
1404 /* Constant pool references use internal labels and thus can not
1405 be made global. It is sensible to keep those ltrans local to
1406 allow better optimization. */
1407 if (DECL_IN_CONSTANT_POOL (v->decl))
1409 if (!pointer_set_insert (inserted, vnode))
1410 VEC_safe_push (varpool_node_ptr, heap,
1411 promoted_initializers, v);
1413 else if (!DECL_IN_CONSTANT_POOL (v->decl)
1414 && !v->externally_visible && v->analyzed)
1417 && DECL_INITIAL (v->decl)
1418 && const_value_known_p (v->decl)
1419 && !pointer_set_insert (inserted, vnode))
1420 VEC_safe_push (varpool_node_ptr, heap,
1421 promoted_initializers, v);
1427 pointer_set_destroy (inserted);
1430 static lto_file *current_lto_file;
1432 /* Helper for qsort; compare partitions and return one with smaller size.
1433 We sort from greatest to smallest so parallel build doesn't stale on the
1434 longest compilation being executed too late. */
1437 cmp_partitions (const void *a, const void *b)
1439 const struct ltrans_partition_def *pa
1440 = *(struct ltrans_partition_def *const *)a;
1441 const struct ltrans_partition_def *pb
1442 = *(struct ltrans_partition_def *const *)b;
1443 return pb->insns - pa->insns;
1446 /* Write all output files in WPA mode and the file with the list of
1450 lto_wpa_write_files (void)
1454 cgraph_node_set set;
1455 varpool_node_set vset;
1456 ltrans_partition part;
1457 FILE *ltrans_output_list_stream;
1458 char *temp_filename;
1461 /* Open the LTRANS output list. */
1462 if (!ltrans_output_list)
1463 fatal_error ("no LTRANS output list filename provided");
1464 ltrans_output_list_stream = fopen (ltrans_output_list, "w");
1465 if (ltrans_output_list_stream == NULL)
1466 fatal_error ("opening LTRANS output list %s: %m", ltrans_output_list);
1468 timevar_push (TV_WHOPR_WPA);
1470 FOR_EACH_VEC_ELT (ltrans_partition, ltrans_partitions, i, part)
1471 lto_stats.num_output_cgraph_nodes += VEC_length (cgraph_node_ptr,
1472 part->cgraph_set->nodes);
1474 /* Find out statics that need to be promoted
1475 to globals with hidden visibility because they are accessed from multiple
1477 lto_promote_cross_file_statics ();
1479 timevar_pop (TV_WHOPR_WPA);
1481 timevar_push (TV_WHOPR_WPA_IO);
1483 /* Generate a prefix for the LTRANS unit files. */
1484 blen = strlen (ltrans_output_list);
1485 temp_filename = (char *) xmalloc (blen + sizeof ("2147483648.o"));
1486 strcpy (temp_filename, ltrans_output_list);
1487 if (blen > sizeof (".out")
1488 && strcmp (temp_filename + blen - sizeof (".out") + 1,
1490 temp_filename[blen - sizeof (".out") + 1] = '\0';
1491 blen = strlen (temp_filename);
1493 n_sets = VEC_length (ltrans_partition, ltrans_partitions);
1494 VEC_qsort (ltrans_partition, ltrans_partitions, cmp_partitions);
1495 for (i = 0; i < n_sets; i++)
1498 ltrans_partition part = VEC_index (ltrans_partition, ltrans_partitions, i);
1500 set = part->cgraph_set;
1501 vset = part->varpool_set;
1503 /* Write all the nodes in SET. */
1504 sprintf (temp_filename + blen, "%u.o", i);
1505 file = lto_obj_file_open (temp_filename, true);
1507 fatal_error ("lto_obj_file_open() failed");
1510 fprintf (stderr, " %s (%s %i insns)", temp_filename, part->name, part->insns);
1511 if (cgraph_dump_file)
1513 fprintf (cgraph_dump_file, "Writting partition %s to file %s, %i insns\n",
1514 part->name, temp_filename, part->insns);
1515 fprintf (cgraph_dump_file, "cgraph nodes:");
1516 dump_cgraph_node_set (cgraph_dump_file, set);
1517 fprintf (cgraph_dump_file, "varpool nodes:");
1518 dump_varpool_node_set (cgraph_dump_file, vset);
1520 gcc_assert (cgraph_node_set_nonempty_p (set)
1521 || varpool_node_set_nonempty_p (vset) || !i);
1523 lto_set_current_out_file (file);
1525 ipa_write_optimization_summaries (set, vset);
1527 lto_set_current_out_file (NULL);
1528 lto_obj_file_close (file);
1530 len = strlen (temp_filename);
1531 if (fwrite (temp_filename, 1, len, ltrans_output_list_stream) < len
1532 || fwrite ("\n", 1, 1, ltrans_output_list_stream) < 1)
1533 fatal_error ("writing to LTRANS output list %s: %m",
1534 ltrans_output_list);
1537 lto_stats.num_output_files += n_sets;
1539 /* Close the LTRANS output list. */
1540 if (fclose (ltrans_output_list_stream))
1541 fatal_error ("closing LTRANS output list %s: %m", ltrans_output_list);
1543 timevar_pop (TV_WHOPR_WPA_IO);
1548 struct pointer_set_t *seen;
1551 #define LTO_FIXUP_SUBTREE(t) \
1553 walk_tree (&(t), lto_fixup_tree, data, NULL); \
1556 #define LTO_REGISTER_TYPE_AND_FIXUP_SUBTREE(t) \
1560 (t) = gimple_register_type (t); \
1561 walk_tree (&(t), lto_fixup_tree, data, NULL); \
1565 static tree lto_fixup_tree (tree *, int *, void *);
1567 /* Return true if T does not need to be fixed up recursively. */
1573 || CONSTANT_CLASS_P (t)
1574 || TREE_CODE (t) == IDENTIFIER_NODE);
1577 /* Fix up fields of a tree_common T. DATA points to fix-up states. */
1580 lto_fixup_common (tree t, void *data)
1582 /* The following re-creates the TYPE_REFERENCE_TO and TYPE_POINTER_TO
1583 lists. We do not stream TYPE_REFERENCE_TO, TYPE_POINTER_TO or
1584 TYPE_NEXT_PTR_TO and TYPE_NEXT_REF_TO.
1585 First remove us from any pointer list we are on. */
1586 if (TREE_CODE (t) == POINTER_TYPE)
1588 if (TYPE_POINTER_TO (TREE_TYPE (t)) == t)
1589 TYPE_POINTER_TO (TREE_TYPE (t)) = TYPE_NEXT_PTR_TO (t);
1592 tree tem = TYPE_POINTER_TO (TREE_TYPE (t));
1593 while (tem && TYPE_NEXT_PTR_TO (tem) != t)
1594 tem = TYPE_NEXT_PTR_TO (tem);
1596 TYPE_NEXT_PTR_TO (tem) = TYPE_NEXT_PTR_TO (t);
1598 TYPE_NEXT_PTR_TO (t) = NULL_TREE;
1600 else if (TREE_CODE (t) == REFERENCE_TYPE)
1602 if (TYPE_REFERENCE_TO (TREE_TYPE (t)) == t)
1603 TYPE_REFERENCE_TO (TREE_TYPE (t)) = TYPE_NEXT_REF_TO (t);
1606 tree tem = TYPE_REFERENCE_TO (TREE_TYPE (t));
1607 while (tem && TYPE_NEXT_REF_TO (tem) != t)
1608 tem = TYPE_NEXT_REF_TO (tem);
1610 TYPE_NEXT_REF_TO (tem) = TYPE_NEXT_REF_TO (t);
1612 TYPE_NEXT_REF_TO (t) = NULL_TREE;
1615 /* Fixup our type. */
1616 LTO_REGISTER_TYPE_AND_FIXUP_SUBTREE (TREE_TYPE (t));
1618 /* Second put us on the list of pointers of the new pointed-to type
1619 if we are a main variant. This is done in lto_fixup_type after
1620 fixing up our main variant. */
1622 /* This is not very efficient because we cannot do tail-recursion with
1623 a long chain of trees. */
1624 LTO_FIXUP_SUBTREE (TREE_CHAIN (t));
1627 /* Fix up fields of a decl_minimal T. DATA points to fix-up states. */
1630 lto_fixup_decl_minimal (tree t, void *data)
1632 lto_fixup_common (t, data);
1633 LTO_FIXUP_SUBTREE (DECL_NAME (t));
1634 LTO_FIXUP_SUBTREE (DECL_CONTEXT (t));
1637 /* Fix up fields of a decl_common T. DATA points to fix-up states. */
1640 lto_fixup_decl_common (tree t, void *data)
1642 lto_fixup_decl_minimal (t, data);
1643 LTO_FIXUP_SUBTREE (DECL_SIZE (t));
1644 LTO_FIXUP_SUBTREE (DECL_SIZE_UNIT (t));
1645 LTO_FIXUP_SUBTREE (DECL_INITIAL (t));
1646 LTO_FIXUP_SUBTREE (DECL_ATTRIBUTES (t));
1647 LTO_FIXUP_SUBTREE (DECL_ABSTRACT_ORIGIN (t));
1650 /* Fix up fields of a decl_with_vis T. DATA points to fix-up states. */
1653 lto_fixup_decl_with_vis (tree t, void *data)
1655 lto_fixup_decl_common (t, data);
1657 /* Accessor macro has side-effects, use field-name here. */
1658 LTO_FIXUP_SUBTREE (t->decl_with_vis.assembler_name);
1660 gcc_assert (no_fixup_p (DECL_SECTION_NAME (t)));
1663 /* Fix up fields of a decl_non_common T. DATA points to fix-up states. */
1666 lto_fixup_decl_non_common (tree t, void *data)
1668 lto_fixup_decl_with_vis (t, data);
1669 LTO_FIXUP_SUBTREE (DECL_ARGUMENT_FLD (t));
1670 LTO_FIXUP_SUBTREE (DECL_RESULT_FLD (t));
1671 LTO_FIXUP_SUBTREE (DECL_VINDEX (t));
1673 /* SAVED_TREE should not cleared by now. Also no accessor for base type. */
1674 gcc_assert (no_fixup_p (t->decl_non_common.saved_tree));
1677 /* Fix up fields of a decl_non_common T. DATA points to fix-up states. */
1680 lto_fixup_function (tree t, void *data)
1682 lto_fixup_decl_non_common (t, data);
1683 LTO_FIXUP_SUBTREE (DECL_FUNCTION_PERSONALITY (t));
1686 /* Fix up fields of a field_decl T. DATA points to fix-up states. */
1689 lto_fixup_field_decl (tree t, void *data)
1691 lto_fixup_decl_common (t, data);
1692 LTO_FIXUP_SUBTREE (DECL_FIELD_OFFSET (t));
1693 LTO_FIXUP_SUBTREE (DECL_BIT_FIELD_TYPE (t));
1694 LTO_FIXUP_SUBTREE (DECL_QUALIFIER (t));
1695 gcc_assert (no_fixup_p (DECL_FIELD_BIT_OFFSET (t)));
1696 LTO_FIXUP_SUBTREE (DECL_FCONTEXT (t));
1699 /* Fix up fields of a type T. DATA points to fix-up states. */
1702 lto_fixup_type (tree t, void *data)
1706 lto_fixup_common (t, data);
1707 LTO_FIXUP_SUBTREE (TYPE_CACHED_VALUES (t));
1708 LTO_FIXUP_SUBTREE (TYPE_SIZE (t));
1709 LTO_FIXUP_SUBTREE (TYPE_SIZE_UNIT (t));
1710 LTO_FIXUP_SUBTREE (TYPE_ATTRIBUTES (t));
1711 LTO_FIXUP_SUBTREE (TYPE_NAME (t));
1713 /* Accessors are for derived node types only. */
1714 if (!POINTER_TYPE_P (t))
1715 LTO_FIXUP_SUBTREE (t->type.minval);
1716 LTO_FIXUP_SUBTREE (t->type.maxval);
1718 /* Accessor is for derived node types only. */
1719 LTO_FIXUP_SUBTREE (t->type.binfo);
1721 if (TYPE_CONTEXT (t))
1723 if (TYPE_P (TYPE_CONTEXT (t)))
1724 LTO_REGISTER_TYPE_AND_FIXUP_SUBTREE (TYPE_CONTEXT (t));
1726 LTO_FIXUP_SUBTREE (TYPE_CONTEXT (t));
1729 /* Compute the canonical type of t and fix that up. From this point
1730 there are no longer any types with TYPE_STRUCTURAL_EQUALITY_P
1731 and its type-based alias problems. */
1732 if (!TYPE_CANONICAL (t))
1734 TYPE_CANONICAL (t) = gimple_register_canonical_type (t);
1735 LTO_FIXUP_SUBTREE (TYPE_CANONICAL (t));
1738 /* The following re-creates proper variant lists while fixing up
1739 the variant leaders. We do not stream TYPE_NEXT_VARIANT so the
1740 variant list state before fixup is broken. */
1742 /* Remove us from our main variant list if we are not the variant leader. */
1743 if (TYPE_MAIN_VARIANT (t) != t)
1745 tem = TYPE_MAIN_VARIANT (t);
1746 while (tem && TYPE_NEXT_VARIANT (tem) != t)
1747 tem = TYPE_NEXT_VARIANT (tem);
1749 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
1750 TYPE_NEXT_VARIANT (t) = NULL_TREE;
1753 /* Query our new main variant. */
1754 mv = gimple_register_type (TYPE_MAIN_VARIANT (t));
1756 /* If we were the variant leader and we get replaced ourselves drop
1757 all variants from our list. */
1758 if (TYPE_MAIN_VARIANT (t) == t
1764 tree tem2 = TYPE_NEXT_VARIANT (tem);
1765 TYPE_NEXT_VARIANT (tem) = NULL_TREE;
1770 /* If we are not our own variant leader link us into our new leaders
1774 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (mv);
1775 TYPE_NEXT_VARIANT (mv) = t;
1778 /* Finally adjust our main variant and fix it up. */
1779 TYPE_MAIN_VARIANT (t) = mv;
1780 LTO_FIXUP_SUBTREE (TYPE_MAIN_VARIANT (t));
1782 /* As the second step of reconstructing the pointer chains put us
1783 on the list of pointers of the new pointed-to type
1784 if we are a main variant. See lto_fixup_common for the first step. */
1785 if (TREE_CODE (t) == POINTER_TYPE
1786 && TYPE_MAIN_VARIANT (t) == t)
1788 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (TREE_TYPE (t));
1789 TYPE_POINTER_TO (TREE_TYPE (t)) = t;
1791 else if (TREE_CODE (t) == REFERENCE_TYPE
1792 && TYPE_MAIN_VARIANT (t) == t)
1794 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (TREE_TYPE (t));
1795 TYPE_REFERENCE_TO (TREE_TYPE (t)) = t;
1799 /* Fix up fields of a BINFO T. DATA points to fix-up states. */
1802 lto_fixup_binfo (tree t, void *data)
1804 unsigned HOST_WIDE_INT i, n;
1805 tree base, saved_base;
1807 lto_fixup_common (t, data);
1808 gcc_assert (no_fixup_p (BINFO_OFFSET (t)));
1809 LTO_FIXUP_SUBTREE (BINFO_VTABLE (t));
1810 LTO_FIXUP_SUBTREE (BINFO_VIRTUALS (t));
1811 LTO_FIXUP_SUBTREE (BINFO_VPTR_FIELD (t));
1812 n = VEC_length (tree, BINFO_BASE_ACCESSES (t));
1813 for (i = 0; i < n; i++)
1815 saved_base = base = BINFO_BASE_ACCESS (t, i);
1816 LTO_FIXUP_SUBTREE (base);
1817 if (base != saved_base)
1818 VEC_replace (tree, BINFO_BASE_ACCESSES (t), i, base);
1820 LTO_FIXUP_SUBTREE (BINFO_INHERITANCE_CHAIN (t));
1821 LTO_FIXUP_SUBTREE (BINFO_SUBVTT_INDEX (t));
1822 LTO_FIXUP_SUBTREE (BINFO_VPTR_INDEX (t));
1823 n = BINFO_N_BASE_BINFOS (t);
1824 for (i = 0; i < n; i++)
1826 saved_base = base = BINFO_BASE_BINFO (t, i);
1827 LTO_FIXUP_SUBTREE (base);
1828 if (base != saved_base)
1829 VEC_replace (tree, BINFO_BASE_BINFOS (t), i, base);
1833 /* Fix up fields of a CONSTRUCTOR T. DATA points to fix-up states. */
1836 lto_fixup_constructor (tree t, void *data)
1838 unsigned HOST_WIDE_INT idx;
1839 constructor_elt *ce;
1841 LTO_REGISTER_TYPE_AND_FIXUP_SUBTREE (TREE_TYPE (t));
1844 VEC_iterate(constructor_elt, CONSTRUCTOR_ELTS (t), idx, ce);
1847 LTO_FIXUP_SUBTREE (ce->index);
1848 LTO_FIXUP_SUBTREE (ce->value);
1852 /* A walk_tree callback used by lto_fixup_state. TP is the pointer to the
1853 current tree. WALK_SUBTREES indicates if the subtrees will be walked.
1854 DATA is a pointer set to record visited nodes. */
1857 lto_fixup_tree (tree *tp, int *walk_subtrees, void *data)
1860 lto_fixup_data_t *fixup_data = (lto_fixup_data_t *) data;
1865 if (!t || pointer_set_contains (fixup_data->seen, t))
1868 if (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == FUNCTION_DECL)
1870 prevailing = lto_symtab_prevailing_decl (t);
1872 if (t != prevailing)
1874 /* Also replace t with prevailing defintion. We don't want to
1875 insert the other defintion in the seen set as we want to
1876 replace all instances of it. */
1881 else if (TYPE_P (t))
1883 /* Replace t with the prevailing type. We don't want to insert the
1884 other type in the seen set as we want to replace all instances of it. */
1885 t = gimple_register_type (t);
1889 if (pointer_set_insert (fixup_data->seen, t))
1892 /* walk_tree does not visit all reachable nodes that need to be fixed up.
1893 Hence we do special processing here for those kind of nodes. */
1894 switch (TREE_CODE (t))
1897 lto_fixup_field_decl (t, data);
1905 lto_fixup_decl_common (t, data);
1909 lto_fixup_decl_with_vis (t, data);
1913 lto_fixup_decl_non_common (t, data);
1917 lto_fixup_function (t, data);
1921 lto_fixup_binfo (t, data);
1926 lto_fixup_type (t, data);
1927 else if (TREE_CODE (t) == CONSTRUCTOR)
1928 lto_fixup_constructor (t, data);
1929 else if (CONSTANT_CLASS_P (t))
1930 LTO_REGISTER_TYPE_AND_FIXUP_SUBTREE (TREE_TYPE (t));
1931 else if (EXPR_P (t))
1933 /* walk_tree only handles TREE_OPERANDs. Do the rest here. */
1934 lto_fixup_common (t, data);
1935 LTO_FIXUP_SUBTREE (t->exp.block);
1940 /* Let walk_tree handle sub-trees. */
1948 /* Helper function of lto_fixup_decls. Walks the var and fn streams in STATE,
1949 replaces var and function decls with the corresponding prevailing def and
1950 records the old decl in the free-list in DATA. We also record visted nodes
1951 in the seen-set in DATA to avoid multiple visit for nodes that need not
1955 lto_fixup_state (struct lto_in_decl_state *state, lto_fixup_data_t *data)
1958 struct lto_tree_ref_table *table;
1960 /* Although we only want to replace FUNCTION_DECLs and VAR_DECLs,
1961 we still need to walk from all DECLs to find the reachable
1962 FUNCTION_DECLs and VAR_DECLs. */
1963 for (si = 0; si < LTO_N_DECL_STREAMS; si++)
1965 table = &state->streams[si];
1966 for (i = 0; i < table->size; i++)
1967 walk_tree (table->trees + i, lto_fixup_tree, data, NULL);
1971 /* A callback of htab_traverse. Just extract a state from SLOT and the
1972 lto_fixup_data_t object from AUX and calls lto_fixup_state. */
1975 lto_fixup_state_aux (void **slot, void *aux)
1977 struct lto_in_decl_state *state = (struct lto_in_decl_state *) *slot;
1978 lto_fixup_state (state, (lto_fixup_data_t *) aux);
1982 /* Fix the decls from all FILES. Replaces each decl with the corresponding
1986 lto_fixup_decls (struct lto_file_decl_data **files)
1990 struct pointer_set_t *seen = pointer_set_create ();
1991 lto_fixup_data_t data;
1994 for (i = 0; files[i]; i++)
1996 struct lto_file_decl_data *file = files[i];
1997 struct lto_in_decl_state *state = file->global_decl_state;
1998 lto_fixup_state (state, &data);
2000 htab_traverse (file->function_decl_states, lto_fixup_state_aux, &data);
2003 FOR_EACH_VEC_ELT (tree, lto_global_var_decls, i, decl)
2005 tree saved_decl = decl;
2006 walk_tree (&decl, lto_fixup_tree, &data, NULL);
2007 if (decl != saved_decl)
2008 VEC_replace (tree, lto_global_var_decls, i, decl);
2011 pointer_set_destroy (seen);
2014 /* Read the options saved from each file in the command line. Called
2015 from lang_hooks.post_options which is called by process_options
2016 right before all the options are used to initialize the compiler.
2017 This assumes that decode_options has already run, so the
2018 num_in_fnames and in_fnames are properly set.
2020 Note that this assumes that all the files had been compiled with
2021 the same options, which is not a good assumption. In general,
2022 options ought to be read from all the files in the set and merged.
2023 However, it is still unclear what the merge rules should be. */
2026 lto_read_all_file_options (void)
2030 /* Clear any file options currently saved. */
2031 lto_clear_file_options ();
2033 /* Set the hooks to read ELF sections. */
2034 lto_set_in_hooks (NULL, get_section_data, free_section_data);
2036 fprintf (stderr, "Reading command line options:");
2038 for (i = 0; i < num_in_fnames; i++)
2040 struct lto_file_decl_data *file_data;
2041 lto_file *file = lto_obj_file_open (in_fnames[i], false);
2046 fprintf (stderr, " %s", in_fnames[i]);
2050 file_data = XCNEW (struct lto_file_decl_data);
2051 file_data->file_name = file->filename;
2052 file_data->section_hash_table = lto_obj_build_section_table (file);
2054 lto_read_file_options (file_data);
2056 lto_obj_file_close (file);
2057 htab_delete (file_data->section_hash_table);
2062 fprintf (stderr, "\n");
2064 /* Apply globally the options read from all the files. */
2065 lto_reissue_options ();
2068 static GTY((length ("lto_stats.num_input_files + 1"))) struct lto_file_decl_data **all_file_decl_data;
2070 /* Turn file datas for sub files into a single array, so that they look
2071 like separate files for further passes. */
2074 lto_flatten_files (struct lto_file_decl_data **orig, int count, int last_file_ix)
2076 struct lto_file_decl_data *n, *next;
2079 lto_stats.num_input_files = count;
2081 = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (count + 1);
2082 /* Set the hooks so that all of the ipa passes can read in their data. */
2083 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2084 for (i = 0, k = 0; i < last_file_ix; i++)
2086 for (n = orig[i]; n != NULL; n = next)
2088 all_file_decl_data[k++] = n;
2093 all_file_decl_data[k] = NULL;
2094 gcc_assert (k == count);
2097 /* Input file data before flattening (i.e. splitting them to subfiles to support
2098 incremental linking. */
2099 static int real_file_count;
2100 static GTY((length ("real_file_count + 1"))) struct lto_file_decl_data **real_file_decl_data;
2102 /* Read all the symbols from the input files FNAMES. NFILES is the
2103 number of files requested in the command line. Instantiate a
2104 global call graph by aggregating all the sub-graphs found in each
2108 read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
2110 unsigned int i, last_file_ix;
2112 struct cgraph_node *node;
2114 struct lto_file_decl_data **decl_data;
2118 timevar_push (TV_IPA_LTO_DECL_IN);
2121 = decl_data = ggc_alloc_cleared_vec_lto_file_decl_data_ptr (nfiles + 1);
2122 real_file_count = nfiles;
2124 /* Read the resolution file. */
2126 if (resolution_file_name)
2129 unsigned num_objects;
2131 resolution = fopen (resolution_file_name, "r");
2132 if (resolution == NULL)
2133 fatal_error ("could not open symbol resolution file: %m");
2135 t = fscanf (resolution, "%u", &num_objects);
2136 gcc_assert (t == 1);
2138 /* True, since the plugin splits the archives. */
2139 gcc_assert (num_objects == nfiles);
2143 fprintf (stderr, "Reading object files:");
2145 /* Read all of the object files specified on the command line. */
2146 for (i = 0, last_file_ix = 0; i < nfiles; ++i)
2148 struct lto_file_decl_data *file_data = NULL;
2151 fprintf (stderr, " %s", fnames[i]);
2155 current_lto_file = lto_obj_file_open (fnames[i], false);
2156 if (!current_lto_file)
2159 file_data = lto_file_read (current_lto_file, resolution, &count);
2163 decl_data[last_file_ix++] = file_data;
2165 lto_obj_file_close (current_lto_file);
2166 current_lto_file = NULL;
2170 lto_flatten_files (decl_data, count, last_file_ix);
2171 lto_stats.num_input_files = count;
2172 ggc_free(decl_data);
2173 real_file_decl_data = NULL;
2175 if (resolution_file_name)
2176 fclose (resolution);
2178 /* Set the hooks so that all of the ipa passes can read in their data. */
2179 lto_set_in_hooks (all_file_decl_data, get_section_data, free_section_data);
2181 timevar_pop (TV_IPA_LTO_DECL_IN);
2184 fprintf (stderr, "\nReading the callgraph\n");
2186 timevar_push (TV_IPA_LTO_CGRAPH_IO);
2187 /* Read the callgraph. */
2189 timevar_pop (TV_IPA_LTO_CGRAPH_IO);
2192 fprintf (stderr, "Merging declarations\n");
2194 timevar_push (TV_IPA_LTO_DECL_MERGE);
2195 /* Merge global decls. */
2196 lto_symtab_merge_decls ();
2198 /* If there were errors during symbol merging bail out, we have no
2199 good way to recover here. */
2201 fatal_error ("errors during merging of translation units");
2203 /* Fixup all decls and types and free the type hash tables. */
2204 lto_fixup_decls (all_file_decl_data);
2205 free_gimple_type_tables ();
2208 timevar_pop (TV_IPA_LTO_DECL_MERGE);
2209 /* Each pass will set the appropriate timer. */
2212 fprintf (stderr, "Reading summaries\n");
2214 /* Read the IPA summary data. */
2216 ipa_read_optimization_summaries ();
2218 ipa_read_summaries ();
2220 /* Finally merge the cgraph according to the decl merging decisions. */
2221 timevar_push (TV_IPA_LTO_CGRAPH_MERGE);
2222 if (cgraph_dump_file)
2224 fprintf (cgraph_dump_file, "Before merging:\n");
2225 dump_cgraph (cgraph_dump_file);
2226 dump_varpool (cgraph_dump_file);
2228 lto_symtab_merge_cgraph_nodes ();
2232 for (node = cgraph_nodes; node; node = node->next)
2234 /* FIXME: ipa_transforms_to_apply holds list of passes that have optimization
2235 summaries computed and needs to apply changes. At the moment WHOPR only
2236 supports inlining, so we can push it here by hand. In future we need to stream
2237 this field into ltrans compilation. */
2239 VEC_safe_push (ipa_opt_pass, heap,
2240 node->ipa_transforms_to_apply,
2241 (ipa_opt_pass)&pass_ipa_inline);
2245 timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
2247 timevar_push (TV_IPA_LTO_DECL_INIT_IO);
2249 /* FIXME lto. This loop needs to be changed to use the pass manager to
2250 call the ipa passes directly. */
2252 for (i = 0; i < last_file_ix; i++)
2254 struct lto_file_decl_data *file_data = all_file_decl_data [i];
2255 lto_materialize_constructors_and_inits (file_data);
2258 /* Indicate that the cgraph is built and ready. */
2259 cgraph_function_flags_ready = true;
2261 timevar_pop (TV_IPA_LTO_DECL_INIT_IO);
2262 ggc_free (all_file_decl_data);
2263 all_file_decl_data = NULL;
2267 /* Materialize all the bodies for all the nodes in the callgraph. */
2270 materialize_cgraph (void)
2273 struct cgraph_node *node;
2275 timevar_id_t lto_timer;
2279 flag_wpa ? "Materializing decls:" : "Reading function bodies:");
2282 /* Now that we have input the cgraph, we need to clear all of the aux
2283 nodes and read the functions if we are not running in WPA mode. */
2284 timevar_push (TV_IPA_LTO_GIMPLE_IN);
2286 for (node = cgraph_nodes; node; node = node->next)
2288 if (node->local.lto_file_data)
2290 lto_materialize_function (node);
2291 lto_stats.num_input_cgraph_nodes++;
2295 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
2297 /* Start the appropriate timer depending on the mode that we are
2299 lto_timer = (flag_wpa) ? TV_WHOPR_WPA
2300 : (flag_ltrans) ? TV_WHOPR_LTRANS
2302 timevar_push (lto_timer);
2304 current_function_decl = NULL;
2307 /* Inform the middle end about the global variables we have seen. */
2308 FOR_EACH_VEC_ELT (tree, lto_global_var_decls, i, decl)
2309 rest_of_decl_compilation (decl, 1, 0);
2312 fprintf (stderr, "\n");
2314 timevar_pop (lto_timer);
2318 /* Perform whole program analysis (WPA) on the callgraph and write out the
2319 optimization plan. */
2322 do_whole_program_analysis (void)
2324 /* Note that since we are in WPA mode, materialize_cgraph will not
2325 actually read in all the function bodies. It only materializes
2326 the decls and cgraph nodes so that analysis can be performed. */
2327 materialize_cgraph ();
2329 /* Reading in the cgraph uses different timers, start timing WPA now. */
2330 timevar_push (TV_WHOPR_WPA);
2332 if (pre_ipa_mem_report)
2334 fprintf (stderr, "Memory consumption before IPA\n");
2335 dump_memory_report (false);
2338 cgraph_function_flags_ready = true;
2340 if (cgraph_dump_file)
2342 dump_cgraph (cgraph_dump_file);
2343 dump_varpool (cgraph_dump_file);
2345 bitmap_obstack_initialize (NULL);
2346 ipa_register_cgraph_hooks ();
2347 cgraph_state = CGRAPH_STATE_IPA_SSA;
2349 execute_ipa_pass_list (all_regular_ipa_passes);
2351 if (cgraph_dump_file)
2353 fprintf (cgraph_dump_file, "Optimized ");
2354 dump_cgraph (cgraph_dump_file);
2355 dump_varpool (cgraph_dump_file);
2358 bitmap_obstack_release (NULL);
2360 /* We are about to launch the final LTRANS phase, stop the WPA timer. */
2361 timevar_pop (TV_WHOPR_WPA);
2363 if (flag_lto_partition_1to1)
2366 lto_balanced_map ();
2370 fprintf (stderr, "\nStreaming out");
2373 lto_wpa_write_files ();
2376 fprintf (stderr, "\n");
2378 if (post_ipa_mem_report)
2380 fprintf (stderr, "Memory consumption after IPA\n");
2381 dump_memory_report (false);
2384 /* Show the LTO report before launching LTRANS. */
2385 if (flag_lto_report)
2386 print_lto_report ();
2390 static GTY(()) tree lto_eh_personality_decl;
2392 /* Return the LTO personality function decl. */
2395 lto_eh_personality (void)
2397 if (!lto_eh_personality_decl)
2399 /* Use the first personality DECL for our personality if we don't
2400 support multiple ones. This ensures that we don't artificially
2401 create the need for them in a single-language program. */
2402 if (first_personality_decl && !dwarf2out_do_cfi_asm ())
2403 lto_eh_personality_decl = first_personality_decl;
2405 lto_eh_personality_decl = lhd_gcc_personality ();
2408 return lto_eh_personality_decl;
2411 /* Set the process name based on the LTO mode. */
2414 lto_process_name (void)
2417 setproctitle ("lto1-lto");
2419 setproctitle ("lto1-wpa");
2421 setproctitle ("lto1-ltrans");
2424 /* Main entry point for the GIMPLE front end. This front end has
2425 three main personalities:
2427 - LTO (-flto). All the object files on the command line are
2428 loaded in memory and processed as a single translation unit.
2429 This is the traditional link-time optimization behavior.
2431 - WPA (-fwpa). Only the callgraph and summary information for
2432 files in the command file are loaded. A single callgraph
2433 (without function bodies) is instantiated for the whole set of
2434 files. IPA passes are only allowed to analyze the call graph
2435 and make transformation decisions. The callgraph is
2436 partitioned, each partition is written to a new object file
2437 together with the transformation decisions.
2439 - LTRANS (-fltrans). Similar to -flto but it prevents the IPA
2440 summary files from running again. Since WPA computed summary
2441 information and decided what transformations to apply, LTRANS
2442 simply applies them. */
2447 lto_process_name ();
2451 /* Read all the symbols and call graph from all the files in the
2453 read_cgraph_and_symbols (num_in_fnames, in_fnames);
2457 /* If WPA is enabled analyze the whole call graph and create an
2458 optimization plan. Otherwise, read in all the function
2459 bodies and continue with optimization. */
2461 do_whole_program_analysis ();
2464 materialize_cgraph ();
2466 /* Let the middle end know that we have read and merged all of
2470 /* FIXME lto, if the processes spawned by WPA fail, we miss
2471 the chance to print WPA's report, so WPA will call
2472 print_lto_report before launching LTRANS. If LTRANS was
2473 launched directly by the driver we would not need to do
2475 if (flag_lto_report)
2476 print_lto_report ();
2481 #include "gt-lto-lto.h"