1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "tree-pass.h"
32 /* Fill array order with all nodes with output flag set in the reverse
36 cgraph_postorder (struct cgraph_node **order)
38 struct cgraph_node *node, *node2;
41 struct cgraph_edge *edge, last;
44 struct cgraph_node **stack =
45 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
47 /* We have to deal with cycles nicely, so use a depth first traversal
48 output algorithm. Ignore the fact that some functions won't need
49 to be output and put them into order as well, so we get dependencies
50 right through inline functions. */
51 for (node = cgraph_nodes; node; node = node->next)
53 for (pass = 0; pass < 2; pass++)
54 for (node = cgraph_nodes; node; node = node->next)
57 || (!cgraph_only_called_directly_p (node)
58 && !node->address_taken)))
64 node->aux = node->callers;
67 while (node2->aux != &last)
69 edge = (struct cgraph_edge *) node2->aux;
70 if (edge->next_caller)
71 node2->aux = edge->next_caller;
74 /* Break possible cycles involving always-inline
75 functions by ignoring edges from always-inline
76 functions to non-always-inline functions. */
77 if (edge->caller->local.disregard_inline_limits
78 && !edge->callee->local.disregard_inline_limits)
80 if (!edge->caller->aux)
82 if (!edge->caller->callers)
83 edge->caller->aux = &last;
85 edge->caller->aux = edge->caller->callers;
86 stack[stack_size++] = node2;
91 if (node2->aux == &last)
93 order[order_pos++] = node2;
95 node2 = stack[--stack_size];
102 for (node = cgraph_nodes; node; node = node->next)
107 /* Look for all functions inlined to NODE and update their inlined_to pointers
111 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
113 struct cgraph_edge *e;
114 for (e = node->callees; e; e = e->next_callee)
115 if (e->callee->global.inlined_to)
117 e->callee->global.inlined_to = inlined_to;
118 update_inlined_to_pointer (e->callee, inlined_to);
122 /* Add cgraph NODE to queue starting at FIRST.
124 The queue is linked via AUX pointers and terminated by pointer to 1.
125 We enqueue nodes at two occasions: when we find them reachable or when we find
126 their bodies needed for further clonning. In the second case we mark them
127 by pointer to 2 after processing so they are re-queue when they become
131 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
133 /* Node is still in queue; do nothing. */
134 if (node->aux && node->aux != (void *) 2)
136 /* Node was already processed as unreachable, re-enqueue
137 only if it became reachable now. */
138 if (node->aux == (void *)2 && !node->reachable)
144 /* Add varpool NODE to queue starting at FIRST. */
147 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
153 /* Process references. */
156 process_references (struct ipa_ref_list *list,
157 struct cgraph_node **first,
158 struct varpool_node **first_varpool,
159 bool before_inlining_p)
163 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
165 if (ref->refered_type == IPA_REF_CGRAPH)
167 struct cgraph_node *node = ipa_ref_node (ref);
169 && (!DECL_EXTERNAL (node->decl)
170 || before_inlining_p))
172 node->reachable = true;
173 enqueue_cgraph_node (node, first);
178 struct varpool_node *node = ipa_ref_varpool_node (ref);
181 varpool_mark_needed_node (node);
182 enqueue_varpool_node (node, first_varpool);
188 /* Return true when function NODE can be removed from callgraph
189 if all direct calls are eliminated. */
192 varpool_can_remove_if_no_refs (struct varpool_node *node)
194 return (!node->force_output && !node->used_from_other_partition
195 && (DECL_COMDAT (node->decl) || !node->externally_visible));
198 /* Return true when function can be marked local. */
201 cgraph_local_node_p (struct cgraph_node *node)
203 return (cgraph_only_called_directly_p (node)
205 && !DECL_EXTERNAL (node->decl)
206 && !node->local.externally_visible
207 && !node->reachable_from_other_partition
208 && !node->in_other_partition);
211 /* Perform reachability analysis and reclaim all unreachable nodes.
212 If BEFORE_INLINING_P is true this function is called before inlining
213 decisions has been made. If BEFORE_INLINING_P is false this function also
214 removes unneeded bodies of extern inline functions. */
217 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
219 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
220 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
221 struct cgraph_node *node, *next;
222 struct varpool_node *vnode, *vnext;
223 bool changed = false;
225 #ifdef ENABLE_CHECKING
229 fprintf (file, "\nReclaiming functions:");
230 #ifdef ENABLE_CHECKING
231 for (node = cgraph_nodes; node; node = node->next)
232 gcc_assert (!node->aux);
233 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
234 gcc_assert (!vnode->aux);
236 varpool_reset_queue ();
237 for (node = cgraph_nodes; node; node = node->next)
238 if (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
239 && ((!DECL_EXTERNAL (node->decl))
240 || before_inlining_p))
242 gcc_assert (!node->global.inlined_to);
243 enqueue_cgraph_node (node, &first);
244 node->reachable = true;
248 gcc_assert (!node->aux);
249 node->reachable = false;
251 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
253 vnode->next_needed = NULL;
254 vnode->prev_needed = NULL;
255 if (!varpool_can_remove_if_no_refs (vnode))
257 vnode->needed = false;
258 varpool_mark_needed_node (vnode);
259 enqueue_varpool_node (vnode, &first_varpool);
262 vnode->needed = false;
265 /* Perform reachability analysis. As a special case do not consider
266 extern inline functions not inlined as live because we won't output
269 We maintain two worklist, one for cgraph nodes other for varpools and
270 are finished once both are empty. */
272 while (first != (struct cgraph_node *) (void *) 1
273 || first_varpool != (struct varpool_node *) (void *) 1)
275 if (first != (struct cgraph_node *) (void *) 1)
277 struct cgraph_edge *e;
279 first = (struct cgraph_node *) first->aux;
280 if (!node->reachable)
281 node->aux = (void *)2;
283 /* If we found this node reachable, first mark on the callees
284 reachable too, unless they are direct calls to extern inline functions
285 we decided to not inline. */
287 for (e = node->callees; e; e = e->next_callee)
288 if (!e->callee->reachable
290 && (!e->inline_failed || !e->callee->analyzed
291 || (!DECL_EXTERNAL (e->callee->decl))
292 || before_inlining_p))
294 e->callee->reachable = true;
295 enqueue_cgraph_node (e->callee, &first);
298 /* If any function in a comdat group is reachable, force
299 all other functions in the same comdat group to be
301 if (node->same_comdat_group
303 && !node->global.inlined_to)
305 for (next = node->same_comdat_group;
307 next = next->same_comdat_group)
308 if (!next->reachable)
310 next->reachable = true;
311 enqueue_cgraph_node (next, &first);
315 /* We can freely remove inline clones even if they are cloned, however if
316 function is clone of real clone, we must keep it around in order to
317 make materialize_clones produce function body with the changes
319 while (node->clone_of && !node->clone_of->aux && !gimple_has_body_p (node->decl))
321 bool noninline = node->clone_of->decl != node->decl;
322 node = node->clone_of;
323 if (noninline && !node->reachable && !node->aux)
325 enqueue_cgraph_node (node, &first);
329 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
331 if (first_varpool != (struct varpool_node *) (void *) 1)
333 vnode = first_varpool;
334 first_varpool = (struct varpool_node *)first_varpool->aux;
336 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
340 /* Remove unreachable nodes.
342 Completely unreachable functions can be fully removed from the callgraph.
343 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
344 callgraph (so we still have edges to them). We remove function body then.
346 Also we need to care functions that are unreachable but we need to keep them around
347 for later clonning. In this case we also turn them to unanalyzed nodes, but
348 keep the body around. */
349 for (node = cgraph_nodes; node; node = next)
352 if (node->aux && !node->reachable)
354 cgraph_node_remove_callees (node);
355 node->analyzed = false;
356 node->local.inlinable = false;
360 node->global.inlined_to = NULL;
362 fprintf (file, " %s", cgraph_node_name (node));
363 if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p)
364 cgraph_remove_node (node);
367 struct cgraph_edge *e;
369 /* See if there is reachable caller. */
370 for (e = node->callers; e; e = e->next_caller)
371 if (e->caller->reachable)
374 /* If so, we need to keep node in the callgraph. */
375 if (e || node->needed)
377 struct cgraph_node *clone;
379 /* If there are still clones, we must keep body around.
380 Otherwise we can just remove the body but keep the clone. */
381 for (clone = node->clones; clone;
382 clone = clone->next_sibling_clone)
387 cgraph_release_function_body (node);
388 node->analyzed = false;
389 node->local.inlinable = false;
392 gcc_assert (!clone->in_other_partition);
393 cgraph_node_remove_callees (node);
394 if (node->prev_sibling_clone)
395 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
396 else if (node->clone_of)
397 node->clone_of->clones = node->next_sibling_clone;
398 if (node->next_sibling_clone)
399 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
400 node->clone_of = NULL;
401 node->next_sibling_clone = NULL;
402 node->prev_sibling_clone = NULL;
405 cgraph_remove_node (node);
410 for (node = cgraph_nodes; node; node = node->next)
412 /* Inline clones might be kept around so their materializing allows further
413 cloning. If the function the clone is inlined into is removed, we need
414 to turn it into normal cone. */
415 if (node->global.inlined_to
418 gcc_assert (node->clones);
419 node->global.inlined_to = NULL;
420 update_inlined_to_pointer (node, node);
426 fprintf (file, "\n");
428 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
429 are undesirable at -O0 since we do not want to remove anything. */
434 fprintf (file, "Reclaiming variables:");
435 for (vnode = varpool_nodes; vnode; vnode = vnext)
441 fprintf (file, " %s", varpool_node_name (vnode));
442 varpool_remove_node (vnode);
447 /* Now update address_taken flags and try to promote functions to be local. */
450 fprintf (file, "\nClearing address taken flags:");
451 for (node = cgraph_nodes; node; node = node->next)
452 if (node->address_taken
453 && !node->reachable_from_other_partition)
458 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
461 gcc_assert (ref->use == IPA_REF_ADDR);
467 fprintf (file, " %s", cgraph_node_name (node));
468 node->address_taken = false;
470 if (cgraph_local_node_p (node))
472 node->local.local = true;
474 fprintf (file, " (local)");
479 #ifdef ENABLE_CHECKING
483 /* Reclaim alias pairs for functions that have disappeared from the
485 remove_unreachable_alias_pairs ();
490 /* Discover variables that have no longer address taken or that are read only
491 and update their flags.
493 FIXME: This can not be done in between gimplify and omp_expand since
494 readonly flag plays role on what is shared and what is not. Currently we do
495 this transformation as part of ipa-reference pass, but it would make sense
496 to do it before early optimizations. */
499 ipa_discover_readonly_nonaddressable_vars (void)
501 struct varpool_node *vnode;
503 fprintf (dump_file, "Clearing variable flags:");
504 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
505 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
506 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
508 bool written = false;
509 bool address_taken = false;
512 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
513 && (!written || !address_taken); i++)
517 address_taken = true;
525 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
528 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
529 TREE_ADDRESSABLE (vnode->decl) = 0;
531 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
532 /* Making variable in explicit section readonly can cause section
534 See e.g. gcc.c-torture/compile/pr23237.c */
535 && DECL_SECTION_NAME (vnode->decl) == NULL)
538 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
539 TREE_READONLY (vnode->decl) = 1;
543 fprintf (dump_file, "\n");
546 /* Return true when function NODE should be considered externally visible. */
549 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program)
551 if (!node->local.finalized)
553 if (!DECL_COMDAT (node->decl)
554 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
558 if (DECL_PRESERVE_P (node->decl))
560 /* COMDAT functions must be shared only if they have address taken,
561 otherwise we can produce our own private implementation with
563 if (DECL_COMDAT (node->decl))
565 if (node->address_taken || !node->analyzed)
567 if (node->same_comdat_group)
569 struct cgraph_node *next;
571 /* If more than one function is in the same COMDAT group, it must
572 be shared even if just one function in the comdat group has
574 for (next = node->same_comdat_group;
576 next = next->same_comdat_group)
577 if (next->address_taken || !next->analyzed)
581 if (MAIN_NAME_P (DECL_NAME (node->decl)))
583 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
588 /* Dissolve the same_comdat_group list in which NODE resides. */
591 dissolve_same_comdat_group_list (struct cgraph_node *node)
593 struct cgraph_node *n = node, *next;
596 next = n->same_comdat_group;
597 n->same_comdat_group = NULL;
603 /* Mark visibility of all functions.
605 A local function is one whose calls can occur only in the current
606 compilation unit and all its calls are explicit, so we can change
607 its calling convention. We simply mark all static functions whose
608 address is not taken as local.
610 We also change the TREE_PUBLIC flag of all declarations that are public
611 in language point of view but we want to overwrite this default
612 via visibilities for the backend point of view. */
615 function_and_variable_visibility (bool whole_program)
617 struct cgraph_node *node;
618 struct varpool_node *vnode;
620 for (node = cgraph_nodes; node; node = node->next)
622 /* C++ FE on lack of COMDAT support create local COMDAT functions
623 (that ought to be shared but can not due to object format
624 limitations). It is neccesary to keep the flag to make rest of C++ FE
625 happy. Clear the flag here to avoid confusion in middle-end. */
626 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
627 DECL_COMDAT (node->decl) = 0;
628 /* For external decls stop tracking same_comdat_group, it doesn't matter
629 what comdat group they are in when they won't be emitted in this TU,
630 and simplifies later passes. */
631 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
633 #ifdef ENABLE_CHECKING
634 struct cgraph_node *n;
636 for (n = node->same_comdat_group;
638 n = n->same_comdat_group)
639 /* If at least one of same comdat group functions is external,
640 all of them have to be, otherwise it is a front-end bug. */
641 gcc_assert (DECL_EXTERNAL (n->decl));
643 dissolve_same_comdat_group_list (node);
645 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
646 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
647 if (cgraph_externally_visible_p (node, whole_program))
649 gcc_assert (!node->global.inlined_to);
650 node->local.externally_visible = true;
653 node->local.externally_visible = false;
654 if (!node->local.externally_visible && node->analyzed
655 && !DECL_EXTERNAL (node->decl))
657 struct cgraph_node *alias;
658 gcc_assert (whole_program || !TREE_PUBLIC (node->decl));
659 cgraph_make_decl_local (node->decl);
660 for (alias = node->same_body; alias; alias = alias->next)
661 cgraph_make_decl_local (alias->decl);
662 if (node->same_comdat_group)
663 /* cgraph_externally_visible_p has already checked all other nodes
664 in the group and they will all be made local. We need to
665 dissolve the group at once so that the predicate does not
667 dissolve_same_comdat_group_list (node);
669 node->local.local = cgraph_local_node_p (node);
671 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
673 /* weak flag makes no sense on local variables. */
674 gcc_assert (!DECL_WEAK (vnode->decl)
675 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
676 /* In several cases declarations can not be common:
678 - when declaration has initializer
680 - when it has specific section
681 - when it resides in non-generic address space.
682 - if declaration is local, it will get into .local common section
683 so common flag is not needed. Frontends still produce these in
684 certain cases, such as for:
686 static int a __attribute__ ((common))
688 Canonicalize things here and clear the redundant flag. */
689 if (DECL_COMMON (vnode->decl)
690 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
691 || (DECL_INITIAL (vnode->decl)
692 && DECL_INITIAL (vnode->decl) != error_mark_node)
693 || DECL_WEAK (vnode->decl)
694 || DECL_SECTION_NAME (vnode->decl) != NULL
695 || ! (ADDR_SPACE_GENERIC_P
696 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
697 DECL_COMMON (vnode->decl) = 0;
699 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
701 if (!vnode->finalized)
704 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))
706 /* We can privatize comdat readonly variables whose address is not taken,
707 but doing so is not going to bring us optimization oppurtunities until
708 we start reordering datastructures. */
709 || DECL_COMDAT (vnode->decl)
710 || DECL_WEAK (vnode->decl)
711 || lookup_attribute ("externally_visible",
712 DECL_ATTRIBUTES (vnode->decl))))
713 vnode->externally_visible = true;
715 vnode->externally_visible = false;
716 if (!vnode->externally_visible)
718 gcc_assert (whole_program || !TREE_PUBLIC (vnode->decl));
719 cgraph_make_decl_local (vnode->decl);
721 gcc_assert (TREE_STATIC (vnode->decl));
726 fprintf (dump_file, "\nMarking local functions:");
727 for (node = cgraph_nodes; node; node = node->next)
728 if (node->local.local)
729 fprintf (dump_file, " %s", cgraph_node_name (node));
730 fprintf (dump_file, "\n\n");
731 fprintf (dump_file, "\nMarking externally visible functions:");
732 for (node = cgraph_nodes; node; node = node->next)
733 if (node->local.externally_visible)
734 fprintf (dump_file, " %s", cgraph_node_name (node));
735 fprintf (dump_file, "\n\n");
736 fprintf (dump_file, "\nMarking externally visible variables:");
737 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
738 if (vnode->externally_visible)
739 fprintf (dump_file, " %s", varpool_node_name (vnode));
740 fprintf (dump_file, "\n\n");
742 cgraph_function_flags_ready = true;
746 /* Local function pass handling visibilities. This happens before LTO streaming
747 so in particular -fwhole-program should be ignored at this level. */
750 local_function_and_variable_visibility (void)
752 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
755 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
759 "visibility", /* name */
761 local_function_and_variable_visibility,/* execute */
764 0, /* static_pass_number */
765 TV_CGRAPHOPT, /* tv_id */
766 0, /* properties_required */
767 0, /* properties_provided */
768 0, /* properties_destroyed */
769 0, /* todo_flags_start */
770 TODO_remove_functions | TODO_dump_cgraph
771 | TODO_ggc_collect /* todo_flags_finish */
775 /* Do not re-run on ltrans stage. */
778 gate_whole_program_function_and_variable_visibility (void)
783 /* Bring functionss local at LTO time whith -fwhole-program. */
786 whole_program_function_and_variable_visibility (void)
788 struct cgraph_node *node;
789 struct varpool_node *vnode;
791 function_and_variable_visibility (flag_whole_program);
793 for (node = cgraph_nodes; node; node = node->next)
794 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
795 && node->local.finalized)
796 cgraph_mark_needed_node (node);
797 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
798 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
799 varpool_mark_needed_node (vnode);
802 fprintf (dump_file, "\nNeeded variables:");
803 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
805 fprintf (dump_file, " %s", varpool_node_name (vnode));
806 fprintf (dump_file, "\n\n");
811 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
815 "whole-program", /* name */
816 gate_whole_program_function_and_variable_visibility,/* gate */
817 whole_program_function_and_variable_visibility,/* execute */
820 0, /* static_pass_number */
821 TV_CGRAPHOPT, /* tv_id */
822 0, /* properties_required */
823 0, /* properties_provided */
824 0, /* properties_destroyed */
825 0, /* todo_flags_start */
826 TODO_remove_functions | TODO_dump_cgraph
827 | TODO_ggc_collect /* todo_flags_finish */
829 NULL, /* generate_summary */
830 NULL, /* write_summary */
831 NULL, /* read_summary */
832 NULL, /* write_optimization_summary */
833 NULL, /* read_optimization_summary */
834 NULL, /* stmt_fixup */
836 NULL, /* function_transform */
837 NULL, /* variable_transform */
840 /* Hash a cgraph node set element. */
843 hash_cgraph_node_set_element (const void *p)
845 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
846 return htab_hash_pointer (element->node);
849 /* Compare two cgraph node set elements. */
852 eq_cgraph_node_set_element (const void *p1, const void *p2)
854 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
855 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
857 return e1->node == e2->node;
860 /* Create a new cgraph node set. */
863 cgraph_node_set_new (void)
865 cgraph_node_set new_node_set;
867 new_node_set = GGC_NEW (struct cgraph_node_set_def);
868 new_node_set->hashtab = htab_create_ggc (10,
869 hash_cgraph_node_set_element,
870 eq_cgraph_node_set_element,
872 new_node_set->nodes = NULL;
876 /* Add cgraph_node NODE to cgraph_node_set SET. */
879 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
882 cgraph_node_set_element element;
883 struct cgraph_node_set_element_def dummy;
886 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
888 if (*slot != HTAB_EMPTY_ENTRY)
890 element = (cgraph_node_set_element) *slot;
891 gcc_assert (node == element->node
892 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
897 /* Insert node into hash table. */
899 (cgraph_node_set_element) GGC_NEW (struct cgraph_node_set_element_def);
900 element->node = node;
901 element->index = VEC_length (cgraph_node_ptr, set->nodes);
904 /* Insert into node vector. */
905 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
908 /* Remove cgraph_node NODE from cgraph_node_set SET. */
911 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
913 void **slot, **last_slot;
914 cgraph_node_set_element element, last_element;
915 struct cgraph_node *last_node;
916 struct cgraph_node_set_element_def dummy;
919 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
923 element = (cgraph_node_set_element) *slot;
924 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
927 /* Remove from vector. We do this by swapping node with the last element
929 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
930 if (last_node != node)
932 dummy.node = last_node;
933 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
934 last_element = (cgraph_node_set_element) *last_slot;
935 gcc_assert (last_element);
937 /* Move the last element to the original spot of NODE. */
938 last_element->index = element->index;
939 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
943 /* Remove element from hash table. */
944 htab_clear_slot (set->hashtab, slot);
948 /* Find NODE in SET and return an iterator to it if found. A null iterator
949 is returned if NODE is not in SET. */
951 cgraph_node_set_iterator
952 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
955 struct cgraph_node_set_element_def dummy;
956 cgraph_node_set_element element;
957 cgraph_node_set_iterator csi;
960 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
962 csi.index = (unsigned) ~0;
965 element = (cgraph_node_set_element) *slot;
966 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
968 csi.index = element->index;
975 /* Dump content of SET to file F. */
978 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
980 cgraph_node_set_iterator iter;
982 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
984 struct cgraph_node *node = csi_node (iter);
985 dump_cgraph_node (f, node);
989 /* Dump content of SET to stderr. */
992 debug_cgraph_node_set (cgraph_node_set set)
994 dump_cgraph_node_set (stderr, set);
997 /* Hash a varpool node set element. */
1000 hash_varpool_node_set_element (const void *p)
1002 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1003 return htab_hash_pointer (element->node);
1006 /* Compare two varpool node set elements. */
1009 eq_varpool_node_set_element (const void *p1, const void *p2)
1011 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1012 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1014 return e1->node == e2->node;
1017 /* Create a new varpool node set. */
1020 varpool_node_set_new (void)
1022 varpool_node_set new_node_set;
1024 new_node_set = GGC_NEW (struct varpool_node_set_def);
1025 new_node_set->hashtab = htab_create_ggc (10,
1026 hash_varpool_node_set_element,
1027 eq_varpool_node_set_element,
1029 new_node_set->nodes = NULL;
1030 return new_node_set;
1033 /* Add varpool_node NODE to varpool_node_set SET. */
1036 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1039 varpool_node_set_element element;
1040 struct varpool_node_set_element_def dummy;
1043 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1045 if (*slot != HTAB_EMPTY_ENTRY)
1047 element = (varpool_node_set_element) *slot;
1048 gcc_assert (node == element->node
1049 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1054 /* Insert node into hash table. */
1056 (varpool_node_set_element) GGC_NEW (struct varpool_node_set_element_def);
1057 element->node = node;
1058 element->index = VEC_length (varpool_node_ptr, set->nodes);
1061 /* Insert into node vector. */
1062 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1065 /* Remove varpool_node NODE from varpool_node_set SET. */
1068 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1070 void **slot, **last_slot;
1071 varpool_node_set_element element, last_element;
1072 struct varpool_node *last_node;
1073 struct varpool_node_set_element_def dummy;
1076 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1080 element = (varpool_node_set_element) *slot;
1081 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1084 /* Remove from vector. We do this by swapping node with the last element
1086 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1087 if (last_node != node)
1089 dummy.node = last_node;
1090 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1091 last_element = (varpool_node_set_element) *last_slot;
1092 gcc_assert (last_element);
1094 /* Move the last element to the original spot of NODE. */
1095 last_element->index = element->index;
1096 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1100 /* Remove element from hash table. */
1101 htab_clear_slot (set->hashtab, slot);
1105 /* Find NODE in SET and return an iterator to it if found. A null iterator
1106 is returned if NODE is not in SET. */
1108 varpool_node_set_iterator
1109 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1112 struct varpool_node_set_element_def dummy;
1113 varpool_node_set_element element;
1114 varpool_node_set_iterator vsi;
1117 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1119 vsi.index = (unsigned) ~0;
1122 element = (varpool_node_set_element) *slot;
1123 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1125 vsi.index = element->index;
1132 /* Dump content of SET to file F. */
1135 dump_varpool_node_set (FILE *f, varpool_node_set set)
1137 varpool_node_set_iterator iter;
1139 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1141 struct varpool_node *node = vsi_node (iter);
1142 dump_varpool_node (f, node);
1146 /* Dump content of SET to stderr. */
1149 debug_varpool_node_set (varpool_node_set set)
1151 dump_varpool_node_set (stderr, set);
1155 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1160 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1161 struct cgraph_edge *e;
1163 bool something_changed = false;
1166 order_pos = cgraph_postorder (order);
1167 for (i = order_pos - 1; i >= 0; i--)
1169 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1171 for (e = order[i]->callees; e; e = e->next_callee)
1172 if (e->callee->local.local && !e->callee->aux)
1174 something_changed = true;
1175 e->callee->aux = (void *)1;
1178 order[i]->aux = NULL;
1181 while (something_changed)
1183 something_changed = false;
1184 for (i = order_pos - 1; i >= 0; i--)
1186 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1188 for (e = order[i]->callees; e; e = e->next_callee)
1189 if (e->callee->local.local && !e->callee->aux)
1191 something_changed = true;
1192 e->callee->aux = (void *)1;
1195 order[i]->aux = NULL;
1203 gate_ipa_profile (void)
1205 return flag_ipa_profile;
1208 struct ipa_opt_pass_d pass_ipa_profile =
1212 "ipa-profile", /* name */
1213 gate_ipa_profile, /* gate */
1214 ipa_profile, /* execute */
1217 0, /* static_pass_number */
1218 TV_IPA_PROFILE, /* tv_id */
1219 0, /* properties_required */
1220 0, /* properties_provided */
1221 0, /* properties_destroyed */
1222 0, /* todo_flags_start */
1223 0 /* todo_flags_finish */
1225 NULL, /* generate_summary */
1226 NULL, /* write_summary */
1227 NULL, /* read_summary */
1228 NULL, /* write_optimization_summary */
1229 NULL, /* read_optimization_summary */
1230 NULL, /* stmt_fixup */
1232 NULL, /* function_transform */
1233 NULL /* variable_transform */