1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "tree-pass.h"
31 #include "pointer-set.h"
33 #include "tree-iterator.h"
35 /* Fill array order with all nodes with output flag set in the reverse
39 cgraph_postorder (struct cgraph_node **order)
41 struct cgraph_node *node, *node2;
44 struct cgraph_edge *edge, last;
47 struct cgraph_node **stack =
48 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
50 /* We have to deal with cycles nicely, so use a depth first traversal
51 output algorithm. Ignore the fact that some functions won't need
52 to be output and put them into order as well, so we get dependencies
53 right through inline functions. */
54 for (node = cgraph_nodes; node; node = node->next)
56 for (pass = 0; pass < 2; pass++)
57 for (node = cgraph_nodes; node; node = node->next)
60 || (!cgraph_only_called_directly_p (node)
61 && !node->address_taken)))
67 node->aux = node->callers;
70 while (node2->aux != &last)
72 edge = (struct cgraph_edge *) node2->aux;
73 if (edge->next_caller)
74 node2->aux = edge->next_caller;
77 /* Break possible cycles involving always-inline
78 functions by ignoring edges from always-inline
79 functions to non-always-inline functions. */
80 if (edge->caller->local.disregard_inline_limits
81 && !edge->callee->local.disregard_inline_limits)
83 if (!edge->caller->aux)
85 if (!edge->caller->callers)
86 edge->caller->aux = &last;
88 edge->caller->aux = edge->caller->callers;
89 stack[stack_size++] = node2;
94 if (node2->aux == &last)
96 order[order_pos++] = node2;
98 node2 = stack[--stack_size];
105 for (node = cgraph_nodes; node; node = node->next)
110 /* Look for all functions inlined to NODE and update their inlined_to pointers
114 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
116 struct cgraph_edge *e;
117 for (e = node->callees; e; e = e->next_callee)
118 if (e->callee->global.inlined_to)
120 e->callee->global.inlined_to = inlined_to;
121 update_inlined_to_pointer (e->callee, inlined_to);
125 /* Add cgraph NODE to queue starting at FIRST.
127 The queue is linked via AUX pointers and terminated by pointer to 1.
128 We enqueue nodes at two occasions: when we find them reachable or when we find
129 their bodies needed for further clonning. In the second case we mark them
130 by pointer to 2 after processing so they are re-queue when they become
134 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
136 /* Node is still in queue; do nothing. */
137 if (node->aux && node->aux != (void *) 2)
139 /* Node was already processed as unreachable, re-enqueue
140 only if it became reachable now. */
141 if (node->aux == (void *)2 && !node->reachable)
147 /* Add varpool NODE to queue starting at FIRST. */
150 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
156 /* Process references. */
159 process_references (struct ipa_ref_list *list,
160 struct cgraph_node **first,
161 struct varpool_node **first_varpool,
162 bool before_inlining_p)
166 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
168 if (ref->refered_type == IPA_REF_CGRAPH)
170 struct cgraph_node *node = ipa_ref_node (ref);
172 && (!DECL_EXTERNAL (node->decl)
173 || before_inlining_p))
175 node->reachable = true;
176 enqueue_cgraph_node (node, first);
181 struct varpool_node *node = ipa_ref_varpool_node (ref);
184 varpool_mark_needed_node (node);
185 enqueue_varpool_node (node, first_varpool);
191 /* Return true when function NODE can be removed from callgraph
192 if all direct calls are eliminated. */
195 varpool_can_remove_if_no_refs (struct varpool_node *node)
197 return (!node->force_output && !node->used_from_other_partition
198 && (DECL_COMDAT (node->decl) || !node->externally_visible));
201 /* Return true when function can be marked local. */
204 cgraph_local_node_p (struct cgraph_node *node)
206 return (cgraph_only_called_directly_p (node)
208 && !DECL_EXTERNAL (node->decl)
209 && !node->local.externally_visible
210 && !node->reachable_from_other_partition
211 && !node->in_other_partition);
214 /* Perform reachability analysis and reclaim all unreachable nodes.
215 If BEFORE_INLINING_P is true this function is called before inlining
216 decisions has been made. If BEFORE_INLINING_P is false this function also
217 removes unneeded bodies of extern inline functions. */
220 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
222 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
223 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
224 struct cgraph_node *node, *next;
225 struct varpool_node *vnode, *vnext;
226 bool changed = false;
228 #ifdef ENABLE_CHECKING
232 fprintf (file, "\nReclaiming functions:");
233 #ifdef ENABLE_CHECKING
234 for (node = cgraph_nodes; node; node = node->next)
235 gcc_assert (!node->aux);
236 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
237 gcc_assert (!vnode->aux);
239 varpool_reset_queue ();
240 for (node = cgraph_nodes; node; node = node->next)
241 if ((!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
242 /* Keep around virtual functions for possible devirtualization. */
243 || (!before_inlining_p
244 && !node->global.inlined_to
245 && DECL_VIRTUAL_P (node->decl)
246 && (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))))
247 && ((!DECL_EXTERNAL (node->decl))
248 || before_inlining_p))
250 gcc_assert (!node->global.inlined_to);
251 enqueue_cgraph_node (node, &first);
252 node->reachable = true;
256 gcc_assert (!node->aux);
257 node->reachable = false;
259 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
261 vnode->next_needed = NULL;
262 vnode->prev_needed = NULL;
263 if (!varpool_can_remove_if_no_refs (vnode))
265 vnode->needed = false;
266 varpool_mark_needed_node (vnode);
267 enqueue_varpool_node (vnode, &first_varpool);
270 vnode->needed = false;
273 /* Perform reachability analysis. As a special case do not consider
274 extern inline functions not inlined as live because we won't output
277 We maintain two worklist, one for cgraph nodes other for varpools and
278 are finished once both are empty. */
280 while (first != (struct cgraph_node *) (void *) 1
281 || first_varpool != (struct varpool_node *) (void *) 1)
283 if (first != (struct cgraph_node *) (void *) 1)
285 struct cgraph_edge *e;
287 first = (struct cgraph_node *) first->aux;
288 if (!node->reachable)
289 node->aux = (void *)2;
291 /* If we found this node reachable, first mark on the callees
292 reachable too, unless they are direct calls to extern inline functions
293 we decided to not inline. */
296 for (e = node->callees; e; e = e->next_callee)
297 if (!e->callee->reachable
299 && (!e->inline_failed || !e->callee->analyzed
300 || (!DECL_EXTERNAL (e->callee->decl))
301 || before_inlining_p))
303 e->callee->reachable = true;
304 enqueue_cgraph_node (e->callee, &first);
306 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
309 /* If any function in a comdat group is reachable, force
310 all other functions in the same comdat group to be
312 if (node->same_comdat_group
314 && !node->global.inlined_to)
316 for (next = node->same_comdat_group;
318 next = next->same_comdat_group)
319 if (!next->reachable)
321 next->reachable = true;
322 enqueue_cgraph_node (next, &first);
326 /* We can freely remove inline clones even if they are cloned, however if
327 function is clone of real clone, we must keep it around in order to
328 make materialize_clones produce function body with the changes
330 while (node->clone_of && !node->clone_of->aux
331 && !gimple_has_body_p (node->decl))
333 bool noninline = node->clone_of->decl != node->decl;
334 node = node->clone_of;
335 if (noninline && !node->reachable && !node->aux)
337 enqueue_cgraph_node (node, &first);
342 if (first_varpool != (struct varpool_node *) (void *) 1)
344 vnode = first_varpool;
345 first_varpool = (struct varpool_node *)first_varpool->aux;
347 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
348 /* If any function in a comdat group is reachable, force
349 all other functions in the same comdat group to be
351 if (vnode->same_comdat_group)
353 struct varpool_node *next;
354 for (next = vnode->same_comdat_group;
356 next = next->same_comdat_group)
359 varpool_mark_needed_node (next);
360 enqueue_varpool_node (next, &first_varpool);
366 /* Remove unreachable nodes.
368 Completely unreachable functions can be fully removed from the callgraph.
369 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
370 callgraph (so we still have edges to them). We remove function body then.
372 Also we need to care functions that are unreachable but we need to keep them around
373 for later clonning. In this case we also turn them to unanalyzed nodes, but
374 keep the body around. */
375 for (node = cgraph_nodes; node; node = next)
378 if (node->aux && !node->reachable)
380 cgraph_node_remove_callees (node);
381 ipa_remove_all_references (&node->ref_list);
382 node->analyzed = false;
383 node->local.inlinable = false;
387 struct cgraph_edge *e;
392 node->global.inlined_to = NULL;
394 fprintf (file, " %s", cgraph_node_name (node));
395 /* See if there is reachable caller. */
396 for (e = node->callers; e && !found; e = e->next_caller)
397 if (e->caller->reachable)
399 for (i = 0; (ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
401 if (ref->refering_type == IPA_REF_CGRAPH
402 && ipa_ref_refering_node (ref)->reachable)
404 else if (ref->refering_type == IPA_REF_VARPOOL
405 && ipa_ref_refering_varpool_node (ref)->needed)
408 /* If so, we need to keep node in the callgraph. */
409 if (found || node->needed)
413 struct cgraph_node *clone;
415 /* If there are still clones, we must keep body around.
416 Otherwise we can just remove the body but keep the clone. */
417 for (clone = node->clones; clone;
418 clone = clone->next_sibling_clone)
423 cgraph_release_function_body (node);
424 node->local.inlinable = false;
425 if (node->prev_sibling_clone)
426 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
427 else if (node->clone_of)
428 node->clone_of->clones = node->next_sibling_clone;
429 if (node->next_sibling_clone)
430 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
431 #ifdef ENABLE_CHECKING
433 node->former_clone_of = node->clone_of->decl;
435 node->clone_of = NULL;
436 node->next_sibling_clone = NULL;
437 node->prev_sibling_clone = NULL;
440 gcc_assert (!clone->in_other_partition);
441 node->analyzed = false;
443 cgraph_node_remove_callees (node);
444 ipa_remove_all_references (&node->ref_list);
449 cgraph_remove_node (node);
454 for (node = cgraph_nodes; node; node = node->next)
456 /* Inline clones might be kept around so their materializing allows further
457 cloning. If the function the clone is inlined into is removed, we need
458 to turn it into normal cone. */
459 if (node->global.inlined_to
462 gcc_assert (node->clones);
463 node->global.inlined_to = NULL;
464 update_inlined_to_pointer (node, node);
470 fprintf (file, "\n");
472 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
473 are undesirable at -O0 since we do not want to remove anything. */
478 fprintf (file, "Reclaiming variables:");
479 for (vnode = varpool_nodes; vnode; vnode = vnext)
485 fprintf (file, " %s", varpool_node_name (vnode));
486 varpool_remove_node (vnode);
491 /* Now update address_taken flags and try to promote functions to be local. */
494 fprintf (file, "\nClearing address taken flags:");
495 for (node = cgraph_nodes; node; node = node->next)
496 if (node->address_taken
497 && !node->reachable_from_other_partition)
502 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
505 gcc_assert (ref->use == IPA_REF_ADDR);
511 fprintf (file, " %s", cgraph_node_name (node));
512 node->address_taken = false;
514 if (cgraph_local_node_p (node))
516 node->local.local = true;
518 fprintf (file, " (local)");
523 #ifdef ENABLE_CHECKING
527 /* Reclaim alias pairs for functions that have disappeared from the
529 remove_unreachable_alias_pairs ();
534 /* Discover variables that have no longer address taken or that are read only
535 and update their flags.
537 FIXME: This can not be done in between gimplify and omp_expand since
538 readonly flag plays role on what is shared and what is not. Currently we do
539 this transformation as part of whole program visibility and re-do at
540 ipa-reference pass (to take into account clonning), but it would
541 make sense to do it before early optimizations. */
544 ipa_discover_readonly_nonaddressable_vars (void)
546 struct varpool_node *vnode;
548 fprintf (dump_file, "Clearing variable flags:");
549 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
550 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
551 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
553 bool written = false;
554 bool address_taken = false;
557 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
558 && (!written || !address_taken); i++)
562 address_taken = true;
570 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
573 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
574 TREE_ADDRESSABLE (vnode->decl) = 0;
576 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
577 /* Making variable in explicit section readonly can cause section
579 See e.g. gcc.c-torture/compile/pr23237.c */
580 && DECL_SECTION_NAME (vnode->decl) == NULL)
583 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
584 TREE_READONLY (vnode->decl) = 1;
588 fprintf (dump_file, "\n");
591 /* Return true when function NODE should be considered externally visible. */
594 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program, bool aliased)
596 if (!node->local.finalized)
598 if (!DECL_COMDAT (node->decl)
599 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
602 /* Do not even try to be smart about aliased nodes. Until we properly
603 represent everything by same body alias, these are just evil. */
607 /* If linker counts on us, we must preserve the function. */
608 if (cgraph_used_from_object_file_p (node))
610 if (DECL_PRESERVE_P (node->decl))
612 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
615 /* When doing link time optimizations, hidden symbols become local. */
617 && (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
618 || DECL_VISIBILITY (node->decl) == VISIBILITY_INTERNAL)
619 /* Be sure that node is defined in IR file, not in other object
620 file. In that case we don't set used_from_other_object_file. */
623 else if (!whole_program)
625 /* COMDAT functions must be shared only if they have address taken,
626 otherwise we can produce our own private implementation with
628 else if (DECL_COMDAT (node->decl))
630 if (node->address_taken || !node->analyzed)
632 if (node->same_comdat_group)
634 struct cgraph_node *next;
636 /* If more than one function is in the same COMDAT group, it must
637 be shared even if just one function in the comdat group has
639 for (next = node->same_comdat_group;
641 next = next->same_comdat_group)
642 if (next->address_taken || !next->analyzed)
647 if (MAIN_NAME_P (DECL_NAME (node->decl)))
653 /* Return true when variable VNODE should be considered externally visible. */
656 varpool_externally_visible_p (struct varpool_node *vnode, bool aliased)
658 if (!DECL_COMDAT (vnode->decl) && !TREE_PUBLIC (vnode->decl))
661 /* Do not even try to be smart about aliased nodes. Until we properly
662 represent everything by same body alias, these are just evil. */
666 /* If linker counts on us, we must preserve the function. */
667 if (varpool_used_from_object_file_p (vnode))
670 if (DECL_PRESERVE_P (vnode->decl))
672 if (lookup_attribute ("externally_visible",
673 DECL_ATTRIBUTES (vnode->decl)))
676 /* See if we have linker information about symbol not being used or
677 if we need to make guess based on the declaration.
679 Even if the linker clams the symbol is unused, never bring internal
680 symbols that are declared by user as used or externally visible.
681 This is needed for i.e. references from asm statements. */
682 if (varpool_used_from_object_file_p (vnode))
685 /* When doing link time optimizations, hidden symbols become local. */
687 && (DECL_VISIBILITY (vnode->decl) == VISIBILITY_HIDDEN
688 || DECL_VISIBILITY (vnode->decl) == VISIBILITY_INTERNAL)
689 /* Be sure that node is defined in IR file, not in other object
690 file. In that case we don't set used_from_other_object_file. */
693 else if (!flag_whole_program)
696 /* Do not attempt to privatize COMDATS by default.
697 This would break linking with C++ libraries sharing
700 FIXME: We can do so for readonly vars with no address taken and
701 possibly also for vtables since no direct pointer comparsion is done.
702 It might be interesting to do so to reduce linking overhead. */
703 if (DECL_COMDAT (vnode->decl) || DECL_WEAK (vnode->decl))
708 /* Dissolve the same_comdat_group list in which NODE resides. */
711 dissolve_same_comdat_group_list (struct cgraph_node *node)
713 struct cgraph_node *n = node, *next;
716 next = n->same_comdat_group;
717 n->same_comdat_group = NULL;
723 /* Mark visibility of all functions.
725 A local function is one whose calls can occur only in the current
726 compilation unit and all its calls are explicit, so we can change
727 its calling convention. We simply mark all static functions whose
728 address is not taken as local.
730 We also change the TREE_PUBLIC flag of all declarations that are public
731 in language point of view but we want to overwrite this default
732 via visibilities for the backend point of view. */
735 function_and_variable_visibility (bool whole_program)
737 struct cgraph_node *node;
738 struct varpool_node *vnode;
739 struct pointer_set_t *aliased_nodes = pointer_set_create ();
740 struct pointer_set_t *aliased_vnodes = pointer_set_create ();
744 /* Discover aliased nodes. */
745 FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
748 fprintf (dump_file, "Alias %s->%s",
749 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
750 IDENTIFIER_POINTER (p->target));
752 if ((node = cgraph_node_for_asm (p->target)) != NULL)
754 gcc_assert (node->needed);
755 pointer_set_insert (aliased_nodes, node);
757 fprintf (dump_file, " node %s/%i",
758 cgraph_node_name (node), node->uid);
760 else if ((vnode = varpool_node_for_asm (p->target)) != NULL)
762 gcc_assert (vnode->needed);
763 pointer_set_insert (aliased_vnodes, vnode);
765 fprintf (dump_file, " varpool node %s",
766 varpool_node_name (vnode));
769 fprintf (dump_file, "\n");
772 for (node = cgraph_nodes; node; node = node->next)
774 /* C++ FE on lack of COMDAT support create local COMDAT functions
775 (that ought to be shared but can not due to object format
776 limitations). It is neccesary to keep the flag to make rest of C++ FE
777 happy. Clear the flag here to avoid confusion in middle-end. */
778 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
779 DECL_COMDAT (node->decl) = 0;
780 /* For external decls stop tracking same_comdat_group, it doesn't matter
781 what comdat group they are in when they won't be emitted in this TU,
782 and simplifies later passes. */
783 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
785 #ifdef ENABLE_CHECKING
786 struct cgraph_node *n;
788 for (n = node->same_comdat_group;
790 n = n->same_comdat_group)
791 /* If at least one of same comdat group functions is external,
792 all of them have to be, otherwise it is a front-end bug. */
793 gcc_assert (DECL_EXTERNAL (n->decl));
795 dissolve_same_comdat_group_list (node);
797 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
798 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
799 if (cgraph_externally_visible_p (node, whole_program,
800 pointer_set_contains (aliased_nodes,
803 gcc_assert (!node->global.inlined_to);
804 node->local.externally_visible = true;
807 node->local.externally_visible = false;
808 if (!node->local.externally_visible && node->analyzed
809 && !DECL_EXTERNAL (node->decl))
811 struct cgraph_node *alias;
812 gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl));
813 cgraph_make_decl_local (node->decl);
814 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
815 for (alias = node->same_body; alias; alias = alias->next)
816 cgraph_make_decl_local (alias->decl);
817 if (node->same_comdat_group)
818 /* cgraph_externally_visible_p has already checked all other nodes
819 in the group and they will all be made local. We need to
820 dissolve the group at once so that the predicate does not
822 dissolve_same_comdat_group_list (node);
824 node->local.local = cgraph_local_node_p (node);
826 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
828 /* weak flag makes no sense on local variables. */
829 gcc_assert (!DECL_WEAK (vnode->decl)
830 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
831 /* In several cases declarations can not be common:
833 - when declaration has initializer
835 - when it has specific section
836 - when it resides in non-generic address space.
837 - if declaration is local, it will get into .local common section
838 so common flag is not needed. Frontends still produce these in
839 certain cases, such as for:
841 static int a __attribute__ ((common))
843 Canonicalize things here and clear the redundant flag. */
844 if (DECL_COMMON (vnode->decl)
845 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
846 || (DECL_INITIAL (vnode->decl)
847 && DECL_INITIAL (vnode->decl) != error_mark_node)
848 || DECL_WEAK (vnode->decl)
849 || DECL_SECTION_NAME (vnode->decl) != NULL
850 || ! (ADDR_SPACE_GENERIC_P
851 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
852 DECL_COMMON (vnode->decl) = 0;
854 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
856 if (!vnode->finalized)
859 && varpool_externally_visible_p
861 pointer_set_contains (aliased_vnodes, vnode)))
862 vnode->externally_visible = true;
864 vnode->externally_visible = false;
865 if (!vnode->externally_visible)
867 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl));
868 cgraph_make_decl_local (vnode->decl);
869 vnode->resolution = LDPR_PREVAILING_DEF_IRONLY;
871 gcc_assert (TREE_STATIC (vnode->decl));
873 pointer_set_destroy (aliased_nodes);
874 pointer_set_destroy (aliased_vnodes);
878 fprintf (dump_file, "\nMarking local functions:");
879 for (node = cgraph_nodes; node; node = node->next)
880 if (node->local.local)
881 fprintf (dump_file, " %s", cgraph_node_name (node));
882 fprintf (dump_file, "\n\n");
883 fprintf (dump_file, "\nMarking externally visible functions:");
884 for (node = cgraph_nodes; node; node = node->next)
885 if (node->local.externally_visible)
886 fprintf (dump_file, " %s", cgraph_node_name (node));
887 fprintf (dump_file, "\n\n");
888 fprintf (dump_file, "\nMarking externally visible variables:");
889 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
890 if (vnode->externally_visible)
891 fprintf (dump_file, " %s", varpool_node_name (vnode));
892 fprintf (dump_file, "\n\n");
894 cgraph_function_flags_ready = true;
898 /* Local function pass handling visibilities. This happens before LTO streaming
899 so in particular -fwhole-program should be ignored at this level. */
902 local_function_and_variable_visibility (void)
904 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
907 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
911 "visibility", /* name */
913 local_function_and_variable_visibility,/* execute */
916 0, /* static_pass_number */
917 TV_CGRAPHOPT, /* tv_id */
918 0, /* properties_required */
919 0, /* properties_provided */
920 0, /* properties_destroyed */
921 0, /* todo_flags_start */
922 TODO_remove_functions | TODO_dump_cgraph
923 | TODO_ggc_collect /* todo_flags_finish */
927 /* Do not re-run on ltrans stage. */
930 gate_whole_program_function_and_variable_visibility (void)
935 /* Bring functionss local at LTO time whith -fwhole-program. */
938 whole_program_function_and_variable_visibility (void)
940 struct cgraph_node *node;
941 struct varpool_node *vnode;
943 function_and_variable_visibility (flag_whole_program);
945 for (node = cgraph_nodes; node; node = node->next)
946 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
947 && node->local.finalized)
948 cgraph_mark_needed_node (node);
949 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
950 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
951 varpool_mark_needed_node (vnode);
954 fprintf (dump_file, "\nNeeded variables:");
955 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
957 fprintf (dump_file, " %s", varpool_node_name (vnode));
958 fprintf (dump_file, "\n\n");
961 ipa_discover_readonly_nonaddressable_vars ();
965 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
969 "whole-program", /* name */
970 gate_whole_program_function_and_variable_visibility,/* gate */
971 whole_program_function_and_variable_visibility,/* execute */
974 0, /* static_pass_number */
975 TV_CGRAPHOPT, /* tv_id */
976 0, /* properties_required */
977 0, /* properties_provided */
978 0, /* properties_destroyed */
979 0, /* todo_flags_start */
980 TODO_remove_functions | TODO_dump_cgraph
981 | TODO_ggc_collect /* todo_flags_finish */
983 NULL, /* generate_summary */
984 NULL, /* write_summary */
985 NULL, /* read_summary */
986 NULL, /* write_optimization_summary */
987 NULL, /* read_optimization_summary */
988 NULL, /* stmt_fixup */
990 NULL, /* function_transform */
991 NULL, /* variable_transform */
994 /* Hash a cgraph node set element. */
997 hash_cgraph_node_set_element (const void *p)
999 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
1000 return htab_hash_pointer (element->node);
1003 /* Compare two cgraph node set elements. */
1006 eq_cgraph_node_set_element (const void *p1, const void *p2)
1008 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
1009 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
1011 return e1->node == e2->node;
1014 /* Create a new cgraph node set. */
1017 cgraph_node_set_new (void)
1019 cgraph_node_set new_node_set;
1021 new_node_set = ggc_alloc_cgraph_node_set_def ();
1022 new_node_set->hashtab = htab_create_ggc (10,
1023 hash_cgraph_node_set_element,
1024 eq_cgraph_node_set_element,
1026 new_node_set->nodes = NULL;
1027 return new_node_set;
1030 /* Add cgraph_node NODE to cgraph_node_set SET. */
1033 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
1036 cgraph_node_set_element element;
1037 struct cgraph_node_set_element_def dummy;
1040 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1042 if (*slot != HTAB_EMPTY_ENTRY)
1044 element = (cgraph_node_set_element) *slot;
1045 gcc_assert (node == element->node
1046 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1051 /* Insert node into hash table. */
1052 element = ggc_alloc_cgraph_node_set_element_def ();
1053 element->node = node;
1054 element->index = VEC_length (cgraph_node_ptr, set->nodes);
1057 /* Insert into node vector. */
1058 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
1061 /* Remove cgraph_node NODE from cgraph_node_set SET. */
1064 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
1066 void **slot, **last_slot;
1067 cgraph_node_set_element element, last_element;
1068 struct cgraph_node *last_node;
1069 struct cgraph_node_set_element_def dummy;
1072 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1076 element = (cgraph_node_set_element) *slot;
1077 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1080 /* Remove from vector. We do this by swapping node with the last element
1082 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
1083 if (last_node != node)
1085 dummy.node = last_node;
1086 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1087 last_element = (cgraph_node_set_element) *last_slot;
1088 gcc_assert (last_element);
1090 /* Move the last element to the original spot of NODE. */
1091 last_element->index = element->index;
1092 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
1096 /* Remove element from hash table. */
1097 htab_clear_slot (set->hashtab, slot);
1101 /* Find NODE in SET and return an iterator to it if found. A null iterator
1102 is returned if NODE is not in SET. */
1104 cgraph_node_set_iterator
1105 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
1108 struct cgraph_node_set_element_def dummy;
1109 cgraph_node_set_element element;
1110 cgraph_node_set_iterator csi;
1113 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1115 csi.index = (unsigned) ~0;
1118 element = (cgraph_node_set_element) *slot;
1119 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
1121 csi.index = element->index;
1128 /* Dump content of SET to file F. */
1131 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
1133 cgraph_node_set_iterator iter;
1135 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
1137 struct cgraph_node *node = csi_node (iter);
1138 fprintf (f, " %s/%i", cgraph_node_name (node), node->uid);
1143 /* Dump content of SET to stderr. */
1146 debug_cgraph_node_set (cgraph_node_set set)
1148 dump_cgraph_node_set (stderr, set);
1151 /* Hash a varpool node set element. */
1154 hash_varpool_node_set_element (const void *p)
1156 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1157 return htab_hash_pointer (element->node);
1160 /* Compare two varpool node set elements. */
1163 eq_varpool_node_set_element (const void *p1, const void *p2)
1165 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1166 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1168 return e1->node == e2->node;
1171 /* Create a new varpool node set. */
1174 varpool_node_set_new (void)
1176 varpool_node_set new_node_set;
1178 new_node_set = ggc_alloc_varpool_node_set_def ();
1179 new_node_set->hashtab = htab_create_ggc (10,
1180 hash_varpool_node_set_element,
1181 eq_varpool_node_set_element,
1183 new_node_set->nodes = NULL;
1184 return new_node_set;
1187 /* Add varpool_node NODE to varpool_node_set SET. */
1190 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1193 varpool_node_set_element element;
1194 struct varpool_node_set_element_def dummy;
1197 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1199 if (*slot != HTAB_EMPTY_ENTRY)
1201 element = (varpool_node_set_element) *slot;
1202 gcc_assert (node == element->node
1203 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1208 /* Insert node into hash table. */
1209 element = ggc_alloc_varpool_node_set_element_def ();
1210 element->node = node;
1211 element->index = VEC_length (varpool_node_ptr, set->nodes);
1214 /* Insert into node vector. */
1215 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1218 /* Remove varpool_node NODE from varpool_node_set SET. */
1221 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1223 void **slot, **last_slot;
1224 varpool_node_set_element element, last_element;
1225 struct varpool_node *last_node;
1226 struct varpool_node_set_element_def dummy;
1229 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1233 element = (varpool_node_set_element) *slot;
1234 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1237 /* Remove from vector. We do this by swapping node with the last element
1239 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1240 if (last_node != node)
1242 dummy.node = last_node;
1243 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1244 last_element = (varpool_node_set_element) *last_slot;
1245 gcc_assert (last_element);
1247 /* Move the last element to the original spot of NODE. */
1248 last_element->index = element->index;
1249 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1253 /* Remove element from hash table. */
1254 htab_clear_slot (set->hashtab, slot);
1258 /* Find NODE in SET and return an iterator to it if found. A null iterator
1259 is returned if NODE is not in SET. */
1261 varpool_node_set_iterator
1262 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1265 struct varpool_node_set_element_def dummy;
1266 varpool_node_set_element element;
1267 varpool_node_set_iterator vsi;
1270 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1272 vsi.index = (unsigned) ~0;
1275 element = (varpool_node_set_element) *slot;
1276 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1278 vsi.index = element->index;
1285 /* Dump content of SET to file F. */
1288 dump_varpool_node_set (FILE *f, varpool_node_set set)
1290 varpool_node_set_iterator iter;
1292 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1294 struct varpool_node *node = vsi_node (iter);
1295 fprintf (f, " %s", varpool_node_name (node));
1300 /* Dump content of SET to stderr. */
1303 debug_varpool_node_set (varpool_node_set set)
1305 dump_varpool_node_set (stderr, set);
1309 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1314 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1315 struct cgraph_edge *e;
1317 bool something_changed = false;
1320 order_pos = cgraph_postorder (order);
1321 for (i = order_pos - 1; i >= 0; i--)
1323 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1325 for (e = order[i]->callees; e; e = e->next_callee)
1326 if (e->callee->local.local && !e->callee->aux)
1328 something_changed = true;
1329 e->callee->aux = (void *)1;
1332 order[i]->aux = NULL;
1335 while (something_changed)
1337 something_changed = false;
1338 for (i = order_pos - 1; i >= 0; i--)
1340 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1342 for (e = order[i]->callees; e; e = e->next_callee)
1343 if (e->callee->local.local && !e->callee->aux)
1345 something_changed = true;
1346 e->callee->aux = (void *)1;
1349 order[i]->aux = NULL;
1357 gate_ipa_profile (void)
1359 return flag_ipa_profile;
1362 struct ipa_opt_pass_d pass_ipa_profile =
1366 "ipa-profile", /* name */
1367 gate_ipa_profile, /* gate */
1368 ipa_profile, /* execute */
1371 0, /* static_pass_number */
1372 TV_IPA_PROFILE, /* tv_id */
1373 0, /* properties_required */
1374 0, /* properties_provided */
1375 0, /* properties_destroyed */
1376 0, /* todo_flags_start */
1377 0 /* todo_flags_finish */
1379 NULL, /* generate_summary */
1380 NULL, /* write_summary */
1381 NULL, /* read_summary */
1382 NULL, /* write_optimization_summary */
1383 NULL, /* read_optimization_summary */
1384 NULL, /* stmt_fixup */
1386 NULL, /* function_transform */
1387 NULL /* variable_transform */
1390 /* Generate and emit a static constructor or destructor. WHICH must
1391 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY
1392 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the
1393 initialization priority for this constructor or destructor. */
1396 cgraph_build_static_cdtor (char which, tree body, int priority)
1398 static int counter = 0;
1400 tree decl, name, resdecl;
1402 /* The priority is encoded in the constructor or destructor name.
1403 collect2 will sort the names and arrange that they are called at
1405 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++);
1406 name = get_file_function_name (which_buf);
1408 decl = build_decl (input_location, FUNCTION_DECL, name,
1409 build_function_type_list (void_type_node, NULL_TREE));
1410 current_function_decl = decl;
1412 resdecl = build_decl (input_location,
1413 RESULT_DECL, NULL_TREE, void_type_node);
1414 DECL_ARTIFICIAL (resdecl) = 1;
1415 DECL_RESULT (decl) = resdecl;
1416 DECL_CONTEXT (resdecl) = decl;
1418 allocate_struct_function (decl, false);
1420 TREE_STATIC (decl) = 1;
1421 TREE_USED (decl) = 1;
1422 DECL_ARTIFICIAL (decl) = 1;
1423 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1;
1424 DECL_SAVED_TREE (decl) = body;
1425 if (!targetm.have_ctors_dtors)
1427 TREE_PUBLIC (decl) = 1;
1428 DECL_PRESERVE_P (decl) = 1;
1430 DECL_UNINLINABLE (decl) = 1;
1432 DECL_INITIAL (decl) = make_node (BLOCK);
1433 TREE_USED (DECL_INITIAL (decl)) = 1;
1435 DECL_SOURCE_LOCATION (decl) = input_location;
1436 cfun->function_end_locus = input_location;
1441 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1442 decl_init_priority_insert (decl, priority);
1445 DECL_STATIC_DESTRUCTOR (decl) = 1;
1446 decl_fini_priority_insert (decl, priority);
1452 gimplify_function_tree (decl);
1454 cgraph_add_new_function (decl, false);
1457 current_function_decl = NULL;
1461 /* A vector of FUNCTION_DECLs declared as static constructors. */
1462 static VEC(tree, heap) *static_ctors;
1463 /* A vector of FUNCTION_DECLs declared as static destructors. */
1464 static VEC(tree, heap) *static_dtors;
1466 /* When target does not have ctors and dtors, we call all constructor
1467 and destructor by special initialization/destruction function
1468 recognized by collect2.
1470 When we are going to build this function, collect all constructors and
1471 destructors and turn them into normal functions. */
1474 record_cdtor_fn (struct cgraph_node *node)
1476 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1477 VEC_safe_push (tree, heap, static_ctors, node->decl);
1478 if (DECL_STATIC_DESTRUCTOR (node->decl))
1479 VEC_safe_push (tree, heap, static_dtors, node->decl);
1480 node = cgraph_node (node->decl);
1481 node->local.disregard_inline_limits = 1;
1484 /* Define global constructors/destructor functions for the CDTORS, of
1485 which they are LEN. The CDTORS are sorted by initialization
1486 priority. If CTOR_P is true, these are constructors; otherwise,
1487 they are destructors. */
1490 build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors)
1493 size_t len = VEC_length (tree, cdtors);
1500 priority_type priority;
1508 fn = VEC_index (tree, cdtors, j);
1509 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
1512 else if (p != priority)
1518 /* When there is only one cdtor and target supports them, do nothing. */
1520 && targetm.have_ctors_dtors)
1525 /* Find the next batch of constructors/destructors with the same
1526 initialization priority. */
1530 fn = VEC_index (tree, cdtors, i);
1531 call = build_call_expr (fn, 0);
1533 DECL_STATIC_CONSTRUCTOR (fn) = 0;
1535 DECL_STATIC_DESTRUCTOR (fn) = 0;
1536 /* We do not want to optimize away pure/const calls here.
1537 When optimizing, these should be already removed, when not
1538 optimizing, we want user to be able to breakpoint in them. */
1539 TREE_SIDE_EFFECTS (call) = 1;
1540 append_to_statement_list (call, &body);
1542 gcc_assert (body != NULL_TREE);
1543 /* Generate a function to call all the function of like
1545 cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority);
1549 /* Comparison function for qsort. P1 and P2 are actually of type
1550 "tree *" and point to static constructors. DECL_INIT_PRIORITY is
1551 used to determine the sort order. */
1554 compare_ctor (const void *p1, const void *p2)
1561 f1 = *(const tree *)p1;
1562 f2 = *(const tree *)p2;
1563 priority1 = DECL_INIT_PRIORITY (f1);
1564 priority2 = DECL_INIT_PRIORITY (f2);
1566 if (priority1 < priority2)
1568 else if (priority1 > priority2)
1571 /* Ensure a stable sort. Constructors are executed in backwarding
1572 order to make LTO initialize braries first. */
1573 return DECL_UID (f2) - DECL_UID (f1);
1576 /* Comparison function for qsort. P1 and P2 are actually of type
1577 "tree *" and point to static destructors. DECL_FINI_PRIORITY is
1578 used to determine the sort order. */
1581 compare_dtor (const void *p1, const void *p2)
1588 f1 = *(const tree *)p1;
1589 f2 = *(const tree *)p2;
1590 priority1 = DECL_FINI_PRIORITY (f1);
1591 priority2 = DECL_FINI_PRIORITY (f2);
1593 if (priority1 < priority2)
1595 else if (priority1 > priority2)
1598 /* Ensure a stable sort. */
1599 return DECL_UID (f1) - DECL_UID (f2);
1602 /* Generate functions to call static constructors and destructors
1603 for targets that do not support .ctors/.dtors sections. These
1604 functions have magic names which are detected by collect2. */
1607 build_cdtor_fns (void)
1609 if (!VEC_empty (tree, static_ctors))
1611 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1612 VEC_qsort (tree, static_ctors, compare_ctor);
1613 build_cdtor (/*ctor_p=*/true, static_ctors);
1616 if (!VEC_empty (tree, static_dtors))
1618 gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
1619 VEC_qsort (tree, static_dtors, compare_dtor);
1620 build_cdtor (/*ctor_p=*/false, static_dtors);
1624 /* Look for constructors and destructors and produce function calling them.
1625 This is needed for targets not supporting ctors or dtors, but we perform the
1626 transformation also at linktime to merge possibly numberous
1627 constructors/destructors into single function to improve code locality and
1631 ipa_cdtor_merge (void)
1633 struct cgraph_node *node;
1634 for (node = cgraph_nodes; node; node = node->next)
1636 && (DECL_STATIC_CONSTRUCTOR (node->decl)
1637 || DECL_STATIC_DESTRUCTOR (node->decl)))
1638 record_cdtor_fn (node);
1640 VEC_free (tree, heap, static_ctors);
1641 VEC_free (tree, heap, static_dtors);
1645 /* Perform the pass when we have no ctors/dtors support
1646 or at LTO time to merge multiple constructors into single
1650 gate_ipa_cdtor_merge (void)
1652 return !targetm.have_ctors_dtors || (optimize && in_lto_p);
1655 struct ipa_opt_pass_d pass_ipa_cdtor_merge =
1660 gate_ipa_cdtor_merge, /* gate */
1661 ipa_cdtor_merge, /* execute */
1664 0, /* static_pass_number */
1665 TV_CGRAPHOPT, /* tv_id */
1666 0, /* properties_required */
1667 0, /* properties_provided */
1668 0, /* properties_destroyed */
1669 0, /* todo_flags_start */
1670 0 /* todo_flags_finish */
1672 NULL, /* generate_summary */
1673 NULL, /* write_summary */
1674 NULL, /* read_summary */
1675 NULL, /* write_optimization_summary */
1676 NULL, /* read_optimization_summary */
1677 NULL, /* stmt_fixup */
1679 NULL, /* function_transform */
1680 NULL /* variable_transform */