1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "tree-pass.h"
32 /* Fill array order with all nodes with output flag set in the reverse
36 cgraph_postorder (struct cgraph_node **order)
38 struct cgraph_node *node, *node2;
41 struct cgraph_edge *edge, last;
44 struct cgraph_node **stack =
45 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
47 /* We have to deal with cycles nicely, so use a depth first traversal
48 output algorithm. Ignore the fact that some functions won't need
49 to be output and put them into order as well, so we get dependencies
50 right through inline functions. */
51 for (node = cgraph_nodes; node; node = node->next)
53 for (pass = 0; pass < 2; pass++)
54 for (node = cgraph_nodes; node; node = node->next)
57 || (!cgraph_only_called_directly_p (node)
58 && !node->address_taken)))
64 node->aux = node->callers;
67 while (node2->aux != &last)
69 edge = (struct cgraph_edge *) node2->aux;
70 if (edge->next_caller)
71 node2->aux = edge->next_caller;
74 /* Break possible cycles involving always-inline
75 functions by ignoring edges from always-inline
76 functions to non-always-inline functions. */
77 if (edge->caller->local.disregard_inline_limits
78 && !edge->callee->local.disregard_inline_limits)
80 if (!edge->caller->aux)
82 if (!edge->caller->callers)
83 edge->caller->aux = &last;
85 edge->caller->aux = edge->caller->callers;
86 stack[stack_size++] = node2;
91 if (node2->aux == &last)
93 order[order_pos++] = node2;
95 node2 = stack[--stack_size];
102 for (node = cgraph_nodes; node; node = node->next)
107 /* Look for all functions inlined to NODE and update their inlined_to pointers
111 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
113 struct cgraph_edge *e;
114 for (e = node->callees; e; e = e->next_callee)
115 if (e->callee->global.inlined_to)
117 e->callee->global.inlined_to = inlined_to;
118 update_inlined_to_pointer (e->callee, inlined_to);
122 /* Add cgraph NODE to queue starting at FIRST.
124 The queue is linked via AUX pointers and terminated by pointer to 1.
125 We enqueue nodes at two occasions: when we find them reachable or when we find
126 their bodies needed for further clonning. In the second case we mark them
127 by pointer to 2 after processing so they are re-queue when they become
131 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
133 /* Node is still in queue; do nothing. */
134 if (node->aux && node->aux != (void *) 2)
136 /* Node was already processed as unreachable, re-enqueue
137 only if it became reachable now. */
138 if (node->aux == (void *)2 && !node->reachable)
144 /* Add varpool NODE to queue starting at FIRST. */
147 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
153 /* Process references. */
156 process_references (struct ipa_ref_list *list,
157 struct cgraph_node **first,
158 struct varpool_node **first_varpool,
159 bool before_inlining_p)
163 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
165 if (ref->refered_type == IPA_REF_CGRAPH)
167 struct cgraph_node *node = ipa_ref_node (ref);
169 && (!DECL_EXTERNAL (node->decl)
170 || before_inlining_p))
172 node->reachable = true;
173 enqueue_cgraph_node (node, first);
178 struct varpool_node *node = ipa_ref_varpool_node (ref);
181 varpool_mark_needed_node (node);
182 enqueue_varpool_node (node, first_varpool);
188 /* Return true when function NODE can be removed from callgraph
189 if all direct calls are eliminated. */
192 varpool_can_remove_if_no_refs (struct varpool_node *node)
194 return (!node->force_output && !node->used_from_other_partition
195 && (DECL_COMDAT (node->decl) || !node->externally_visible));
198 /* Return true when function can be marked local. */
201 cgraph_local_node_p (struct cgraph_node *node)
203 return (cgraph_only_called_directly_p (node)
205 && !DECL_EXTERNAL (node->decl)
206 && !node->local.externally_visible
207 && !node->reachable_from_other_partition
208 && !node->in_other_partition);
211 /* Perform reachability analysis and reclaim all unreachable nodes.
212 If BEFORE_INLINING_P is true this function is called before inlining
213 decisions has been made. If BEFORE_INLINING_P is false this function also
214 removes unneeded bodies of extern inline functions. */
217 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
219 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
220 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
221 struct cgraph_node *node, *next;
222 struct varpool_node *vnode, *vnext;
223 bool changed = false;
225 #ifdef ENABLE_CHECKING
229 fprintf (file, "\nReclaiming functions:");
230 #ifdef ENABLE_CHECKING
231 for (node = cgraph_nodes; node; node = node->next)
232 gcc_assert (!node->aux);
233 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
234 gcc_assert (!vnode->aux);
236 varpool_reset_queue ();
237 for (node = cgraph_nodes; node; node = node->next)
238 if (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
239 && ((!DECL_EXTERNAL (node->decl))
240 || before_inlining_p))
242 gcc_assert (!node->global.inlined_to);
243 enqueue_cgraph_node (node, &first);
244 node->reachable = true;
248 gcc_assert (!node->aux);
249 node->reachable = false;
251 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
253 vnode->next_needed = NULL;
254 vnode->prev_needed = NULL;
255 if (!varpool_can_remove_if_no_refs (vnode))
257 vnode->needed = false;
258 varpool_mark_needed_node (vnode);
259 enqueue_varpool_node (vnode, &first_varpool);
262 vnode->needed = false;
265 /* Perform reachability analysis. As a special case do not consider
266 extern inline functions not inlined as live because we won't output
269 We maintain two worklist, one for cgraph nodes other for varpools and
270 are finished once both are empty. */
272 while (first != (struct cgraph_node *) (void *) 1
273 || first_varpool != (struct varpool_node *) (void *) 1)
275 if (first != (struct cgraph_node *) (void *) 1)
277 struct cgraph_edge *e;
279 first = (struct cgraph_node *) first->aux;
280 if (!node->reachable)
281 node->aux = (void *)2;
283 /* If we found this node reachable, first mark on the callees
284 reachable too, unless they are direct calls to extern inline functions
285 we decided to not inline. */
287 for (e = node->callees; e; e = e->next_callee)
288 if (!e->callee->reachable
290 && (!e->inline_failed || !e->callee->analyzed
291 || (!DECL_EXTERNAL (e->callee->decl))
292 || before_inlining_p))
294 e->callee->reachable = true;
295 enqueue_cgraph_node (e->callee, &first);
298 /* If any function in a comdat group is reachable, force
299 all other functions in the same comdat group to be
301 if (node->same_comdat_group
303 && !node->global.inlined_to)
305 for (next = node->same_comdat_group;
307 next = next->same_comdat_group)
308 if (!next->reachable)
310 next->reachable = true;
311 enqueue_cgraph_node (next, &first);
315 /* We can freely remove inline clones even if they are cloned, however if
316 function is clone of real clone, we must keep it around in order to
317 make materialize_clones produce function body with the changes
319 while (node->clone_of && !node->clone_of->aux && !gimple_has_body_p (node->decl))
321 bool noninline = node->clone_of->decl != node->decl;
322 node = node->clone_of;
323 if (noninline && !node->reachable && !node->aux)
325 enqueue_cgraph_node (node, &first);
329 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
331 if (first_varpool != (struct varpool_node *) (void *) 1)
333 vnode = first_varpool;
334 first_varpool = (struct varpool_node *)first_varpool->aux;
336 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
340 /* Remove unreachable nodes.
342 Completely unreachable functions can be fully removed from the callgraph.
343 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
344 callgraph (so we still have edges to them). We remove function body then.
346 Also we need to care functions that are unreachable but we need to keep them around
347 for later clonning. In this case we also turn them to unanalyzed nodes, but
348 keep the body around. */
349 for (node = cgraph_nodes; node; node = next)
352 if (node->aux && !node->reachable)
354 cgraph_node_remove_callees (node);
355 node->analyzed = false;
356 node->local.inlinable = false;
360 node->global.inlined_to = NULL;
362 fprintf (file, " %s", cgraph_node_name (node));
363 if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p)
364 cgraph_remove_node (node);
367 struct cgraph_edge *e;
369 /* See if there is reachable caller. */
370 for (e = node->callers; e; e = e->next_caller)
371 if (e->caller->reachable)
374 /* If so, we need to keep node in the callgraph. */
375 if (e || node->needed)
377 struct cgraph_node *clone;
379 /* If there are still clones, we must keep body around.
380 Otherwise we can just remove the body but keep the clone. */
381 for (clone = node->clones; clone;
382 clone = clone->next_sibling_clone)
387 cgraph_release_function_body (node);
388 node->analyzed = false;
389 node->local.inlinable = false;
392 gcc_assert (!clone->in_other_partition);
393 cgraph_node_remove_callees (node);
394 ipa_remove_all_references (&node->ref_list);
395 if (node->prev_sibling_clone)
396 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
397 else if (node->clone_of)
398 node->clone_of->clones = node->next_sibling_clone;
399 if (node->next_sibling_clone)
400 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
401 node->clone_of = NULL;
402 node->next_sibling_clone = NULL;
403 node->prev_sibling_clone = NULL;
406 cgraph_remove_node (node);
411 for (node = cgraph_nodes; node; node = node->next)
413 /* Inline clones might be kept around so their materializing allows further
414 cloning. If the function the clone is inlined into is removed, we need
415 to turn it into normal cone. */
416 if (node->global.inlined_to
419 gcc_assert (node->clones);
420 node->global.inlined_to = NULL;
421 update_inlined_to_pointer (node, node);
427 fprintf (file, "\n");
429 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations
430 are undesirable at -O0 since we do not want to remove anything. */
435 fprintf (file, "Reclaiming variables:");
436 for (vnode = varpool_nodes; vnode; vnode = vnext)
442 fprintf (file, " %s", varpool_node_name (vnode));
443 varpool_remove_node (vnode);
448 /* Now update address_taken flags and try to promote functions to be local. */
451 fprintf (file, "\nClearing address taken flags:");
452 for (node = cgraph_nodes; node; node = node->next)
453 if (node->address_taken
454 && !node->reachable_from_other_partition)
459 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
462 gcc_assert (ref->use == IPA_REF_ADDR);
468 fprintf (file, " %s", cgraph_node_name (node));
469 node->address_taken = false;
471 if (cgraph_local_node_p (node))
473 node->local.local = true;
475 fprintf (file, " (local)");
480 #ifdef ENABLE_CHECKING
484 /* Reclaim alias pairs for functions that have disappeared from the
486 remove_unreachable_alias_pairs ();
491 /* Discover variables that have no longer address taken or that are read only
492 and update their flags.
494 FIXME: This can not be done in between gimplify and omp_expand since
495 readonly flag plays role on what is shared and what is not. Currently we do
496 this transformation as part of ipa-reference pass, but it would make sense
497 to do it before early optimizations. */
500 ipa_discover_readonly_nonaddressable_vars (void)
502 struct varpool_node *vnode;
504 fprintf (dump_file, "Clearing variable flags:");
505 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
506 if (vnode->finalized && varpool_all_refs_explicit_p (vnode)
507 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl)))
509 bool written = false;
510 bool address_taken = false;
513 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref)
514 && (!written || !address_taken); i++)
518 address_taken = true;
526 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken)
529 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode));
530 TREE_ADDRESSABLE (vnode->decl) = 0;
532 if (!TREE_READONLY (vnode->decl) && !address_taken && !written
533 /* Making variable in explicit section readonly can cause section
535 See e.g. gcc.c-torture/compile/pr23237.c */
536 && DECL_SECTION_NAME (vnode->decl) == NULL)
539 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode));
540 TREE_READONLY (vnode->decl) = 1;
544 fprintf (dump_file, "\n");
547 /* Return true when function NODE should be considered externally visible. */
550 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program)
552 if (!node->local.finalized)
554 if (!DECL_COMDAT (node->decl)
555 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
559 if (DECL_PRESERVE_P (node->decl))
561 /* COMDAT functions must be shared only if they have address taken,
562 otherwise we can produce our own private implementation with
564 if (DECL_COMDAT (node->decl))
566 if (node->address_taken || !node->analyzed)
568 if (node->same_comdat_group)
570 struct cgraph_node *next;
572 /* If more than one function is in the same COMDAT group, it must
573 be shared even if just one function in the comdat group has
575 for (next = node->same_comdat_group;
577 next = next->same_comdat_group)
578 if (next->address_taken || !next->analyzed)
582 if (MAIN_NAME_P (DECL_NAME (node->decl)))
584 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
589 /* Dissolve the same_comdat_group list in which NODE resides. */
592 dissolve_same_comdat_group_list (struct cgraph_node *node)
594 struct cgraph_node *n = node, *next;
597 next = n->same_comdat_group;
598 n->same_comdat_group = NULL;
604 /* Mark visibility of all functions.
606 A local function is one whose calls can occur only in the current
607 compilation unit and all its calls are explicit, so we can change
608 its calling convention. We simply mark all static functions whose
609 address is not taken as local.
611 We also change the TREE_PUBLIC flag of all declarations that are public
612 in language point of view but we want to overwrite this default
613 via visibilities for the backend point of view. */
616 function_and_variable_visibility (bool whole_program)
618 struct cgraph_node *node;
619 struct varpool_node *vnode;
621 for (node = cgraph_nodes; node; node = node->next)
623 /* C++ FE on lack of COMDAT support create local COMDAT functions
624 (that ought to be shared but can not due to object format
625 limitations). It is neccesary to keep the flag to make rest of C++ FE
626 happy. Clear the flag here to avoid confusion in middle-end. */
627 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
628 DECL_COMDAT (node->decl) = 0;
629 /* For external decls stop tracking same_comdat_group, it doesn't matter
630 what comdat group they are in when they won't be emitted in this TU,
631 and simplifies later passes. */
632 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
634 #ifdef ENABLE_CHECKING
635 struct cgraph_node *n;
637 for (n = node->same_comdat_group;
639 n = n->same_comdat_group)
640 /* If at least one of same comdat group functions is external,
641 all of them have to be, otherwise it is a front-end bug. */
642 gcc_assert (DECL_EXTERNAL (n->decl));
644 dissolve_same_comdat_group_list (node);
646 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
647 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
648 if (cgraph_externally_visible_p (node, whole_program))
650 gcc_assert (!node->global.inlined_to);
651 node->local.externally_visible = true;
654 node->local.externally_visible = false;
655 if (!node->local.externally_visible && node->analyzed
656 && !DECL_EXTERNAL (node->decl))
658 struct cgraph_node *alias;
659 gcc_assert (whole_program || !TREE_PUBLIC (node->decl));
660 cgraph_make_decl_local (node->decl);
661 for (alias = node->same_body; alias; alias = alias->next)
662 cgraph_make_decl_local (alias->decl);
663 if (node->same_comdat_group)
664 /* cgraph_externally_visible_p has already checked all other nodes
665 in the group and they will all be made local. We need to
666 dissolve the group at once so that the predicate does not
668 dissolve_same_comdat_group_list (node);
670 node->local.local = cgraph_local_node_p (node);
672 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
674 /* weak flag makes no sense on local variables. */
675 gcc_assert (!DECL_WEAK (vnode->decl)
676 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
677 /* In several cases declarations can not be common:
679 - when declaration has initializer
681 - when it has specific section
682 - when it resides in non-generic address space.
683 - if declaration is local, it will get into .local common section
684 so common flag is not needed. Frontends still produce these in
685 certain cases, such as for:
687 static int a __attribute__ ((common))
689 Canonicalize things here and clear the redundant flag. */
690 if (DECL_COMMON (vnode->decl)
691 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
692 || (DECL_INITIAL (vnode->decl)
693 && DECL_INITIAL (vnode->decl) != error_mark_node)
694 || DECL_WEAK (vnode->decl)
695 || DECL_SECTION_NAME (vnode->decl) != NULL
696 || ! (ADDR_SPACE_GENERIC_P
697 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
698 DECL_COMMON (vnode->decl) = 0;
700 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
702 if (!vnode->finalized)
705 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))
707 /* We can privatize comdat readonly variables whose address is not taken,
708 but doing so is not going to bring us optimization oppurtunities until
709 we start reordering datastructures. */
710 || DECL_COMDAT (vnode->decl)
711 || DECL_WEAK (vnode->decl)
712 || lookup_attribute ("externally_visible",
713 DECL_ATTRIBUTES (vnode->decl))))
714 vnode->externally_visible = true;
716 vnode->externally_visible = false;
717 if (!vnode->externally_visible)
719 gcc_assert (whole_program || !TREE_PUBLIC (vnode->decl));
720 cgraph_make_decl_local (vnode->decl);
722 gcc_assert (TREE_STATIC (vnode->decl));
727 fprintf (dump_file, "\nMarking local functions:");
728 for (node = cgraph_nodes; node; node = node->next)
729 if (node->local.local)
730 fprintf (dump_file, " %s", cgraph_node_name (node));
731 fprintf (dump_file, "\n\n");
732 fprintf (dump_file, "\nMarking externally visible functions:");
733 for (node = cgraph_nodes; node; node = node->next)
734 if (node->local.externally_visible)
735 fprintf (dump_file, " %s", cgraph_node_name (node));
736 fprintf (dump_file, "\n\n");
737 fprintf (dump_file, "\nMarking externally visible variables:");
738 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
739 if (vnode->externally_visible)
740 fprintf (dump_file, " %s", varpool_node_name (vnode));
741 fprintf (dump_file, "\n\n");
743 cgraph_function_flags_ready = true;
747 /* Local function pass handling visibilities. This happens before LTO streaming
748 so in particular -fwhole-program should be ignored at this level. */
751 local_function_and_variable_visibility (void)
753 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
756 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
760 "visibility", /* name */
762 local_function_and_variable_visibility,/* execute */
765 0, /* static_pass_number */
766 TV_CGRAPHOPT, /* tv_id */
767 0, /* properties_required */
768 0, /* properties_provided */
769 0, /* properties_destroyed */
770 0, /* todo_flags_start */
771 TODO_remove_functions | TODO_dump_cgraph
772 | TODO_ggc_collect /* todo_flags_finish */
776 /* Do not re-run on ltrans stage. */
779 gate_whole_program_function_and_variable_visibility (void)
784 /* Bring functionss local at LTO time whith -fwhole-program. */
787 whole_program_function_and_variable_visibility (void)
789 struct cgraph_node *node;
790 struct varpool_node *vnode;
792 function_and_variable_visibility (flag_whole_program);
794 for (node = cgraph_nodes; node; node = node->next)
795 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
796 && node->local.finalized)
797 cgraph_mark_needed_node (node);
798 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
799 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
800 varpool_mark_needed_node (vnode);
803 fprintf (dump_file, "\nNeeded variables:");
804 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
806 fprintf (dump_file, " %s", varpool_node_name (vnode));
807 fprintf (dump_file, "\n\n");
812 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
816 "whole-program", /* name */
817 gate_whole_program_function_and_variable_visibility,/* gate */
818 whole_program_function_and_variable_visibility,/* execute */
821 0, /* static_pass_number */
822 TV_CGRAPHOPT, /* tv_id */
823 0, /* properties_required */
824 0, /* properties_provided */
825 0, /* properties_destroyed */
826 0, /* todo_flags_start */
827 TODO_remove_functions | TODO_dump_cgraph
828 | TODO_ggc_collect /* todo_flags_finish */
830 NULL, /* generate_summary */
831 NULL, /* write_summary */
832 NULL, /* read_summary */
833 NULL, /* write_optimization_summary */
834 NULL, /* read_optimization_summary */
835 NULL, /* stmt_fixup */
837 NULL, /* function_transform */
838 NULL, /* variable_transform */
841 /* Hash a cgraph node set element. */
844 hash_cgraph_node_set_element (const void *p)
846 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
847 return htab_hash_pointer (element->node);
850 /* Compare two cgraph node set elements. */
853 eq_cgraph_node_set_element (const void *p1, const void *p2)
855 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
856 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
858 return e1->node == e2->node;
861 /* Create a new cgraph node set. */
864 cgraph_node_set_new (void)
866 cgraph_node_set new_node_set;
868 new_node_set = GGC_NEW (struct cgraph_node_set_def);
869 new_node_set->hashtab = htab_create_ggc (10,
870 hash_cgraph_node_set_element,
871 eq_cgraph_node_set_element,
873 new_node_set->nodes = NULL;
877 /* Add cgraph_node NODE to cgraph_node_set SET. */
880 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
883 cgraph_node_set_element element;
884 struct cgraph_node_set_element_def dummy;
887 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
889 if (*slot != HTAB_EMPTY_ENTRY)
891 element = (cgraph_node_set_element) *slot;
892 gcc_assert (node == element->node
893 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
898 /* Insert node into hash table. */
900 (cgraph_node_set_element) GGC_NEW (struct cgraph_node_set_element_def);
901 element->node = node;
902 element->index = VEC_length (cgraph_node_ptr, set->nodes);
905 /* Insert into node vector. */
906 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
909 /* Remove cgraph_node NODE from cgraph_node_set SET. */
912 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
914 void **slot, **last_slot;
915 cgraph_node_set_element element, last_element;
916 struct cgraph_node *last_node;
917 struct cgraph_node_set_element_def dummy;
920 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
924 element = (cgraph_node_set_element) *slot;
925 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
928 /* Remove from vector. We do this by swapping node with the last element
930 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
931 if (last_node != node)
933 dummy.node = last_node;
934 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
935 last_element = (cgraph_node_set_element) *last_slot;
936 gcc_assert (last_element);
938 /* Move the last element to the original spot of NODE. */
939 last_element->index = element->index;
940 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
944 /* Remove element from hash table. */
945 htab_clear_slot (set->hashtab, slot);
949 /* Find NODE in SET and return an iterator to it if found. A null iterator
950 is returned if NODE is not in SET. */
952 cgraph_node_set_iterator
953 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
956 struct cgraph_node_set_element_def dummy;
957 cgraph_node_set_element element;
958 cgraph_node_set_iterator csi;
961 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
963 csi.index = (unsigned) ~0;
966 element = (cgraph_node_set_element) *slot;
967 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
969 csi.index = element->index;
976 /* Dump content of SET to file F. */
979 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
981 cgraph_node_set_iterator iter;
983 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
985 struct cgraph_node *node = csi_node (iter);
986 dump_cgraph_node (f, node);
990 /* Dump content of SET to stderr. */
993 debug_cgraph_node_set (cgraph_node_set set)
995 dump_cgraph_node_set (stderr, set);
998 /* Hash a varpool node set element. */
1001 hash_varpool_node_set_element (const void *p)
1003 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
1004 return htab_hash_pointer (element->node);
1007 /* Compare two varpool node set elements. */
1010 eq_varpool_node_set_element (const void *p1, const void *p2)
1012 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
1013 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
1015 return e1->node == e2->node;
1018 /* Create a new varpool node set. */
1021 varpool_node_set_new (void)
1023 varpool_node_set new_node_set;
1025 new_node_set = GGC_NEW (struct varpool_node_set_def);
1026 new_node_set->hashtab = htab_create_ggc (10,
1027 hash_varpool_node_set_element,
1028 eq_varpool_node_set_element,
1030 new_node_set->nodes = NULL;
1031 return new_node_set;
1034 /* Add varpool_node NODE to varpool_node_set SET. */
1037 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
1040 varpool_node_set_element element;
1041 struct varpool_node_set_element_def dummy;
1044 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
1046 if (*slot != HTAB_EMPTY_ENTRY)
1048 element = (varpool_node_set_element) *slot;
1049 gcc_assert (node == element->node
1050 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
1055 /* Insert node into hash table. */
1057 (varpool_node_set_element) GGC_NEW (struct varpool_node_set_element_def);
1058 element->node = node;
1059 element->index = VEC_length (varpool_node_ptr, set->nodes);
1062 /* Insert into node vector. */
1063 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
1066 /* Remove varpool_node NODE from varpool_node_set SET. */
1069 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
1071 void **slot, **last_slot;
1072 varpool_node_set_element element, last_element;
1073 struct varpool_node *last_node;
1074 struct varpool_node_set_element_def dummy;
1077 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1081 element = (varpool_node_set_element) *slot;
1082 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1085 /* Remove from vector. We do this by swapping node with the last element
1087 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1088 if (last_node != node)
1090 dummy.node = last_node;
1091 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1092 last_element = (varpool_node_set_element) *last_slot;
1093 gcc_assert (last_element);
1095 /* Move the last element to the original spot of NODE. */
1096 last_element->index = element->index;
1097 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1101 /* Remove element from hash table. */
1102 htab_clear_slot (set->hashtab, slot);
1106 /* Find NODE in SET and return an iterator to it if found. A null iterator
1107 is returned if NODE is not in SET. */
1109 varpool_node_set_iterator
1110 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1113 struct varpool_node_set_element_def dummy;
1114 varpool_node_set_element element;
1115 varpool_node_set_iterator vsi;
1118 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1120 vsi.index = (unsigned) ~0;
1123 element = (varpool_node_set_element) *slot;
1124 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1126 vsi.index = element->index;
1133 /* Dump content of SET to file F. */
1136 dump_varpool_node_set (FILE *f, varpool_node_set set)
1138 varpool_node_set_iterator iter;
1140 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1142 struct varpool_node *node = vsi_node (iter);
1143 dump_varpool_node (f, node);
1147 /* Dump content of SET to stderr. */
1150 debug_varpool_node_set (varpool_node_set set)
1152 dump_varpool_node_set (stderr, set);
1156 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1161 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1162 struct cgraph_edge *e;
1164 bool something_changed = false;
1167 order_pos = cgraph_postorder (order);
1168 for (i = order_pos - 1; i >= 0; i--)
1170 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1172 for (e = order[i]->callees; e; e = e->next_callee)
1173 if (e->callee->local.local && !e->callee->aux)
1175 something_changed = true;
1176 e->callee->aux = (void *)1;
1179 order[i]->aux = NULL;
1182 while (something_changed)
1184 something_changed = false;
1185 for (i = order_pos - 1; i >= 0; i--)
1187 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1189 for (e = order[i]->callees; e; e = e->next_callee)
1190 if (e->callee->local.local && !e->callee->aux)
1192 something_changed = true;
1193 e->callee->aux = (void *)1;
1196 order[i]->aux = NULL;
1204 gate_ipa_profile (void)
1206 return flag_ipa_profile;
1209 struct ipa_opt_pass_d pass_ipa_profile =
1213 "ipa-profile", /* name */
1214 gate_ipa_profile, /* gate */
1215 ipa_profile, /* execute */
1218 0, /* static_pass_number */
1219 TV_IPA_PROFILE, /* tv_id */
1220 0, /* properties_required */
1221 0, /* properties_provided */
1222 0, /* properties_destroyed */
1223 0, /* todo_flags_start */
1224 0 /* todo_flags_finish */
1226 NULL, /* generate_summary */
1227 NULL, /* write_summary */
1228 NULL, /* read_summary */
1229 NULL, /* write_optimization_summary */
1230 NULL, /* read_optimization_summary */
1231 NULL, /* stmt_fixup */
1233 NULL, /* function_transform */
1234 NULL /* variable_transform */