1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "tree-pass.h"
31 /* Fill array order with all nodes with output flag set in the reverse
35 cgraph_postorder (struct cgraph_node **order)
37 struct cgraph_node *node, *node2;
40 struct cgraph_edge *edge, last;
43 struct cgraph_node **stack =
44 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
46 /* We have to deal with cycles nicely, so use a depth first traversal
47 output algorithm. Ignore the fact that some functions won't need
48 to be output and put them into order as well, so we get dependencies
49 right through inline functions. */
50 for (node = cgraph_nodes; node; node = node->next)
52 for (pass = 0; pass < 2; pass++)
53 for (node = cgraph_nodes; node; node = node->next)
56 || (!cgraph_only_called_directly_p (node)
57 && !node->address_taken)))
63 node->aux = node->callers;
66 while (node2->aux != &last)
68 edge = (struct cgraph_edge *) node2->aux;
69 if (edge->next_caller)
70 node2->aux = edge->next_caller;
73 /* Break possible cycles involving always-inline
74 functions by ignoring edges from always-inline
75 functions to non-always-inline functions. */
76 if (edge->caller->local.disregard_inline_limits
77 && !edge->callee->local.disregard_inline_limits)
79 if (!edge->caller->aux)
81 if (!edge->caller->callers)
82 edge->caller->aux = &last;
84 edge->caller->aux = edge->caller->callers;
85 stack[stack_size++] = node2;
90 if (node2->aux == &last)
92 order[order_pos++] = node2;
94 node2 = stack[--stack_size];
101 for (node = cgraph_nodes; node; node = node->next)
106 /* Look for all functions inlined to NODE and update their inlined_to pointers
110 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
112 struct cgraph_edge *e;
113 for (e = node->callees; e; e = e->next_callee)
114 if (e->callee->global.inlined_to)
116 e->callee->global.inlined_to = inlined_to;
117 update_inlined_to_pointer (e->callee, inlined_to);
121 /* Add cgraph NODE to queue starting at FIRST.
123 The queue is linked via AUX pointers and terminated by pointer to 1.
124 We enqueue nodes at two occasions: when we find them reachable or when we find
125 their bodies needed for further clonning. In the second case we mark them
126 by pointer to 2 after processing so they are re-queue when they become
130 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
132 /* Node is still in queue; do nothing. */
133 if (node->aux && node->aux != (void *) 2)
135 /* Node was already processed as unreachable, re-enqueue
136 only if it became reachable now. */
137 if (node->aux == (void *)2 && !node->reachable)
143 /* Add varpool NODE to queue starting at FIRST. */
146 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
152 /* Process references. */
155 process_references (struct ipa_ref_list *list,
156 struct cgraph_node **first,
157 struct varpool_node **first_varpool,
158 bool before_inlining_p)
162 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
164 if (ref->refered_type == IPA_REF_CGRAPH)
166 struct cgraph_node *node = ipa_ref_node (ref);
168 && (!DECL_EXTERNAL (node->decl)
169 || before_inlining_p))
171 node->reachable = true;
172 enqueue_cgraph_node (node, first);
177 struct varpool_node *node = ipa_ref_varpool_node (ref);
180 varpool_mark_needed_node (node);
181 enqueue_varpool_node (node, first_varpool);
187 /* Return true when function NODE can be removed from callgraph
188 if all direct calls are eliminated. */
191 varpool_can_remove_if_no_refs (struct varpool_node *node)
193 return (!node->force_output && !node->used_from_other_partition
194 && (DECL_COMDAT (node->decl) || !node->externally_visible));
197 /* Perform reachability analysis and reclaim all unreachable nodes.
198 If BEFORE_INLINING_P is true this function is called before inlining
199 decisions has been made. If BEFORE_INLINING_P is false this function also
200 removes unneeded bodies of extern inline functions. */
203 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
205 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
206 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
207 struct cgraph_node *node, *next;
208 struct varpool_node *vnode, *vnext;
209 bool changed = false;
211 #ifdef ENABLE_CHECKING
215 fprintf (file, "\nReclaiming functions:");
216 #ifdef ENABLE_CHECKING
217 for (node = cgraph_nodes; node; node = node->next)
218 gcc_assert (!node->aux);
219 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
220 gcc_assert (!vnode->aux);
222 varpool_reset_queue ();
223 for (node = cgraph_nodes; node; node = node->next)
224 if (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
225 && ((!DECL_EXTERNAL (node->decl))
226 || before_inlining_p))
228 gcc_assert (!node->global.inlined_to);
229 enqueue_cgraph_node (node, &first);
230 node->reachable = true;
234 gcc_assert (!node->aux);
235 node->reachable = false;
237 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
239 vnode->next_needed = NULL;
240 vnode->prev_needed = NULL;
241 if (!varpool_can_remove_if_no_refs (vnode))
243 vnode->needed = false;
244 varpool_mark_needed_node (vnode);
245 enqueue_varpool_node (vnode, &first_varpool);
248 vnode->needed = false;
251 /* Perform reachability analysis. As a special case do not consider
252 extern inline functions not inlined as live because we won't output
255 We maintain two worklist, one for cgraph nodes other for varpools and
256 are finished once both are empty. */
258 while (first != (struct cgraph_node *) (void *) 1
259 || first_varpool != (struct varpool_node *) (void *) 1)
261 if (first != (struct cgraph_node *) (void *) 1)
263 struct cgraph_edge *e;
265 first = (struct cgraph_node *) first->aux;
266 if (!node->reachable)
267 node->aux = (void *)2;
269 /* If we found this node reachable, first mark on the callees
270 reachable too, unless they are direct calls to extern inline functions
271 we decided to not inline. */
273 for (e = node->callees; e; e = e->next_callee)
274 if (!e->callee->reachable
276 && (!e->inline_failed || !e->callee->analyzed
277 || (!DECL_EXTERNAL (e->callee->decl))
278 || before_inlining_p))
280 e->callee->reachable = true;
281 enqueue_cgraph_node (e->callee, &first);
284 /* If any function in a comdat group is reachable, force
285 all other functions in the same comdat group to be
287 if (node->same_comdat_group
289 && !node->global.inlined_to)
291 for (next = node->same_comdat_group;
293 next = next->same_comdat_group)
294 if (!next->reachable)
296 next->reachable = true;
297 enqueue_cgraph_node (next, &first);
301 /* We can freely remove inline clones even if they are cloned, however if
302 function is clone of real clone, we must keep it around in order to
303 make materialize_clones produce function body with the changes
305 while (node->clone_of && !node->clone_of->aux && !gimple_has_body_p (node->decl))
307 bool noninline = node->clone_of->decl != node->decl;
308 node = node->clone_of;
309 if (noninline && !node->reachable && !node->aux)
311 enqueue_cgraph_node (node, &first);
315 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
317 if (first_varpool != (struct varpool_node *) (void *) 1)
319 vnode = first_varpool;
320 first_varpool = (struct varpool_node *)first_varpool->aux;
322 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
326 /* Remove unreachable nodes.
328 Completely unreachable functions can be fully removed from the callgraph.
329 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
330 callgraph (so we still have edges to them). We remove function body then.
332 Also we need to care functions that are unreachable but we need to keep them around
333 for later clonning. In this case we also turn them to unanalyzed nodes, but
334 keep the body around. */
335 for (node = cgraph_nodes; node; node = next)
338 if (node->aux && !node->reachable)
340 cgraph_node_remove_callees (node);
341 node->analyzed = false;
342 node->local.inlinable = false;
346 node->global.inlined_to = NULL;
348 fprintf (file, " %s", cgraph_node_name (node));
349 if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p)
350 cgraph_remove_node (node);
353 struct cgraph_edge *e;
355 /* See if there is reachable caller. */
356 for (e = node->callers; e; e = e->next_caller)
357 if (e->caller->reachable)
360 /* If so, we need to keep node in the callgraph. */
361 if (e || node->needed)
363 struct cgraph_node *clone;
365 /* If there are still clones, we must keep body around.
366 Otherwise we can just remove the body but keep the clone. */
367 for (clone = node->clones; clone;
368 clone = clone->next_sibling_clone)
373 cgraph_release_function_body (node);
374 node->analyzed = false;
375 node->local.inlinable = false;
378 gcc_assert (!clone->in_other_partition);
379 cgraph_node_remove_callees (node);
380 ipa_remove_all_references (&node->ref_list);
381 if (node->prev_sibling_clone)
382 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
383 else if (node->clone_of)
384 node->clone_of->clones = node->next_sibling_clone;
385 if (node->next_sibling_clone)
386 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
387 node->clone_of = NULL;
388 node->next_sibling_clone = NULL;
389 node->prev_sibling_clone = NULL;
392 cgraph_remove_node (node);
397 for (node = cgraph_nodes; node; node = node->next)
399 /* Inline clones might be kept around so their materializing allows further
400 cloning. If the function the clone is inlined into is removed, we need
401 to turn it into normal cone. */
402 if (node->global.inlined_to
405 gcc_assert (node->clones);
406 node->global.inlined_to = NULL;
407 update_inlined_to_pointer (node, node);
412 fprintf (file, "\nReclaiming variables:");
413 for (vnode = varpool_nodes; vnode; vnode = vnext)
419 fprintf (file, " %s", varpool_node_name (vnode));
420 varpool_remove_node (vnode);
424 fprintf (file, "\nClearing address taken flags:");
425 for (node = cgraph_nodes; node; node = node->next)
426 if (node->address_taken
427 && !node->reachable_from_other_partition)
432 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
438 fprintf (file, " %s", cgraph_node_name (node));
439 node->address_taken = false;
443 #ifdef ENABLE_CHECKING
447 /* Reclaim alias pairs for functions that have disappeared from the
449 remove_unreachable_alias_pairs ();
455 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program)
457 if (!node->local.finalized)
459 if (!DECL_COMDAT (node->decl)
460 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
464 if (DECL_PRESERVE_P (node->decl))
466 /* COMDAT functions must be shared only if they have address taken,
467 otherwise we can produce our own private implementation with
469 if (DECL_COMDAT (node->decl))
471 if (node->address_taken || !node->analyzed)
473 if (node->same_comdat_group)
475 struct cgraph_node *next;
477 /* If more than one function is in the same COMDAT group, it must
478 be shared even if just one function in the comdat group has
480 for (next = node->same_comdat_group;
482 next = next->same_comdat_group)
483 if (next->address_taken || !next->analyzed)
487 if (MAIN_NAME_P (DECL_NAME (node->decl)))
489 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
494 /* Dissolve the same_comdat_group list in which NODE resides. */
497 dissolve_same_comdat_group_list (struct cgraph_node *node)
499 struct cgraph_node *n = node, *next;
502 next = n->same_comdat_group;
503 n->same_comdat_group = NULL;
509 /* Mark visibility of all functions.
511 A local function is one whose calls can occur only in the current
512 compilation unit and all its calls are explicit, so we can change
513 its calling convention. We simply mark all static functions whose
514 address is not taken as local.
516 We also change the TREE_PUBLIC flag of all declarations that are public
517 in language point of view but we want to overwrite this default
518 via visibilities for the backend point of view. */
521 function_and_variable_visibility (bool whole_program)
523 struct cgraph_node *node;
524 struct varpool_node *vnode;
526 for (node = cgraph_nodes; node; node = node->next)
528 /* C++ FE on lack of COMDAT support create local COMDAT functions
529 (that ought to be shared but can not due to object format
530 limitations). It is neccesary to keep the flag to make rest of C++ FE
531 happy. Clear the flag here to avoid confusion in middle-end. */
532 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
533 DECL_COMDAT (node->decl) = 0;
534 /* For external decls stop tracking same_comdat_group, it doesn't matter
535 what comdat group they are in when they won't be emitted in this TU,
536 and simplifies later passes. */
537 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
539 #ifdef ENABLE_CHECKING
540 struct cgraph_node *n;
542 for (n = node->same_comdat_group;
544 n = n->same_comdat_group)
545 /* If at least one of same comdat group functions is external,
546 all of them have to be, otherwise it is a front-end bug. */
547 gcc_assert (DECL_EXTERNAL (n->decl));
549 dissolve_same_comdat_group_list (node);
551 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
552 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
553 if (cgraph_externally_visible_p (node, whole_program))
555 gcc_assert (!node->global.inlined_to);
556 node->local.externally_visible = true;
559 node->local.externally_visible = false;
560 if (!node->local.externally_visible && node->analyzed
561 && !DECL_EXTERNAL (node->decl))
563 gcc_assert (whole_program || !TREE_PUBLIC (node->decl));
564 cgraph_make_decl_local (node->decl);
565 if (node->same_comdat_group)
566 /* cgraph_externally_visible_p has already checked all other nodes
567 in the group and they will all be made local. We need to
568 dissolve the group at once so that the predicate does not
570 dissolve_same_comdat_group_list (node);
572 node->local.local = (cgraph_only_called_directly_p (node)
574 && !DECL_EXTERNAL (node->decl)
575 && !node->local.externally_visible);
577 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
579 /* weak flag makes no sense on local variables. */
580 gcc_assert (!DECL_WEAK (vnode->decl)
581 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
582 /* In several cases declarations can not be common:
584 - when declaration has initializer
586 - when it has specific section
587 - when it resides in non-generic address space.
588 - if declaration is local, it will get into .local common section
589 so common flag is not needed. Frontends still produce these in
590 certain cases, such as for:
592 static int a __attribute__ ((common))
594 Canonicalize things here and clear the redundant flag. */
595 if (DECL_COMMON (vnode->decl)
596 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
597 || (DECL_INITIAL (vnode->decl)
598 && DECL_INITIAL (vnode->decl) != error_mark_node)
599 || DECL_WEAK (vnode->decl)
600 || DECL_SECTION_NAME (vnode->decl) != NULL
601 || ! (ADDR_SPACE_GENERIC_P
602 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
603 DECL_COMMON (vnode->decl) = 0;
605 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
607 if (!vnode->finalized)
610 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))
612 /* We can privatize comdat readonly variables whose address is not taken,
613 but doing so is not going to bring us optimization oppurtunities until
614 we start reordering datastructures. */
615 || DECL_COMDAT (vnode->decl)
616 || DECL_WEAK (vnode->decl)
617 || lookup_attribute ("externally_visible",
618 DECL_ATTRIBUTES (vnode->decl))))
619 vnode->externally_visible = true;
621 vnode->externally_visible = false;
622 if (!vnode->externally_visible)
624 gcc_assert (whole_program || !TREE_PUBLIC (vnode->decl));
625 cgraph_make_decl_local (vnode->decl);
627 gcc_assert (TREE_STATIC (vnode->decl));
632 fprintf (dump_file, "\nMarking local functions:");
633 for (node = cgraph_nodes; node; node = node->next)
634 if (node->local.local)
635 fprintf (dump_file, " %s", cgraph_node_name (node));
636 fprintf (dump_file, "\n\n");
637 fprintf (dump_file, "\nMarking externally visible functions:");
638 for (node = cgraph_nodes; node; node = node->next)
639 if (node->local.externally_visible)
640 fprintf (dump_file, " %s", cgraph_node_name (node));
641 fprintf (dump_file, "\n\n");
642 fprintf (dump_file, "\nMarking externally visible variables:");
643 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
644 if (vnode->externally_visible)
645 fprintf (dump_file, " %s", varpool_node_name (vnode));
646 fprintf (dump_file, "\n\n");
648 cgraph_function_flags_ready = true;
652 /* Local function pass handling visibilities. This happens before LTO streaming
653 so in particular -fwhole-program should be ignored at this level. */
656 local_function_and_variable_visibility (void)
658 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
661 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
665 "visibility", /* name */
667 local_function_and_variable_visibility,/* execute */
670 0, /* static_pass_number */
671 TV_CGRAPHOPT, /* tv_id */
672 0, /* properties_required */
673 0, /* properties_provided */
674 0, /* properties_destroyed */
675 0, /* todo_flags_start */
676 TODO_remove_functions | TODO_dump_cgraph
677 | TODO_ggc_collect /* todo_flags_finish */
681 /* Do not re-run on ltrans stage. */
684 gate_whole_program_function_and_variable_visibility (void)
689 /* Bring functionss local at LTO time whith -fwhole-program. */
692 whole_program_function_and_variable_visibility (void)
694 struct cgraph_node *node;
695 struct varpool_node *vnode;
697 function_and_variable_visibility (flag_whole_program);
699 for (node = cgraph_nodes; node; node = node->next)
700 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
701 && node->local.finalized)
702 cgraph_mark_needed_node (node);
703 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
704 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
705 varpool_mark_needed_node (vnode);
708 fprintf (dump_file, "\nNeeded variables:");
709 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
711 fprintf (dump_file, " %s", varpool_node_name (vnode));
712 fprintf (dump_file, "\n\n");
717 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
721 "whole-program", /* name */
722 gate_whole_program_function_and_variable_visibility,/* gate */
723 whole_program_function_and_variable_visibility,/* execute */
726 0, /* static_pass_number */
727 TV_CGRAPHOPT, /* tv_id */
728 0, /* properties_required */
729 0, /* properties_provided */
730 0, /* properties_destroyed */
731 0, /* todo_flags_start */
732 TODO_remove_functions | TODO_dump_cgraph
733 | TODO_ggc_collect /* todo_flags_finish */
735 NULL, /* generate_summary */
736 NULL, /* write_summary */
737 NULL, /* read_summary */
738 NULL, /* write_optimization_summary */
739 NULL, /* read_optimization_summary */
740 NULL, /* stmt_fixup */
742 NULL, /* function_transform */
743 NULL, /* variable_transform */
746 /* Hash a cgraph node set element. */
749 hash_cgraph_node_set_element (const void *p)
751 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
752 return htab_hash_pointer (element->node);
755 /* Compare two cgraph node set elements. */
758 eq_cgraph_node_set_element (const void *p1, const void *p2)
760 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
761 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
763 return e1->node == e2->node;
766 /* Create a new cgraph node set. */
769 cgraph_node_set_new (void)
771 cgraph_node_set new_node_set;
773 new_node_set = GGC_NEW (struct cgraph_node_set_def);
774 new_node_set->hashtab = htab_create_ggc (10,
775 hash_cgraph_node_set_element,
776 eq_cgraph_node_set_element,
778 new_node_set->nodes = NULL;
782 /* Add cgraph_node NODE to cgraph_node_set SET. */
785 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
788 cgraph_node_set_element element;
789 struct cgraph_node_set_element_def dummy;
792 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
794 if (*slot != HTAB_EMPTY_ENTRY)
796 element = (cgraph_node_set_element) *slot;
797 gcc_assert (node == element->node
798 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
803 /* Insert node into hash table. */
805 (cgraph_node_set_element) GGC_NEW (struct cgraph_node_set_element_def);
806 element->node = node;
807 element->index = VEC_length (cgraph_node_ptr, set->nodes);
810 /* Insert into node vector. */
811 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
814 /* Remove cgraph_node NODE from cgraph_node_set SET. */
817 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
819 void **slot, **last_slot;
820 cgraph_node_set_element element, last_element;
821 struct cgraph_node *last_node;
822 struct cgraph_node_set_element_def dummy;
825 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
829 element = (cgraph_node_set_element) *slot;
830 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
833 /* Remove from vector. We do this by swapping node with the last element
835 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
836 if (last_node != node)
838 dummy.node = last_node;
839 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
840 last_element = (cgraph_node_set_element) *last_slot;
841 gcc_assert (last_element);
843 /* Move the last element to the original spot of NODE. */
844 last_element->index = element->index;
845 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
849 /* Remove element from hash table. */
850 htab_clear_slot (set->hashtab, slot);
854 /* Find NODE in SET and return an iterator to it if found. A null iterator
855 is returned if NODE is not in SET. */
857 cgraph_node_set_iterator
858 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
861 struct cgraph_node_set_element_def dummy;
862 cgraph_node_set_element element;
863 cgraph_node_set_iterator csi;
866 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
868 csi.index = (unsigned) ~0;
871 element = (cgraph_node_set_element) *slot;
872 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
874 csi.index = element->index;
881 /* Dump content of SET to file F. */
884 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
886 cgraph_node_set_iterator iter;
888 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
890 struct cgraph_node *node = csi_node (iter);
891 dump_cgraph_node (f, node);
895 /* Dump content of SET to stderr. */
898 debug_cgraph_node_set (cgraph_node_set set)
900 dump_cgraph_node_set (stderr, set);
903 /* Hash a varpool node set element. */
906 hash_varpool_node_set_element (const void *p)
908 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
909 return htab_hash_pointer (element->node);
912 /* Compare two varpool node set elements. */
915 eq_varpool_node_set_element (const void *p1, const void *p2)
917 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
918 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
920 return e1->node == e2->node;
923 /* Create a new varpool node set. */
926 varpool_node_set_new (void)
928 varpool_node_set new_node_set;
930 new_node_set = GGC_NEW (struct varpool_node_set_def);
931 new_node_set->hashtab = htab_create_ggc (10,
932 hash_varpool_node_set_element,
933 eq_varpool_node_set_element,
935 new_node_set->nodes = NULL;
939 /* Add varpool_node NODE to varpool_node_set SET. */
942 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
945 varpool_node_set_element element;
946 struct varpool_node_set_element_def dummy;
949 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
951 if (*slot != HTAB_EMPTY_ENTRY)
953 element = (varpool_node_set_element) *slot;
954 gcc_assert (node == element->node
955 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
960 /* Insert node into hash table. */
962 (varpool_node_set_element) GGC_NEW (struct varpool_node_set_element_def);
963 element->node = node;
964 element->index = VEC_length (varpool_node_ptr, set->nodes);
967 /* Insert into node vector. */
968 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
971 /* Remove varpool_node NODE from varpool_node_set SET. */
974 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
976 void **slot, **last_slot;
977 varpool_node_set_element element, last_element;
978 struct varpool_node *last_node;
979 struct varpool_node_set_element_def dummy;
982 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
986 element = (varpool_node_set_element) *slot;
987 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
990 /* Remove from vector. We do this by swapping node with the last element
992 last_node = VEC_pop (varpool_node_ptr, set->nodes);
993 if (last_node != node)
995 dummy.node = last_node;
996 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
997 last_element = (varpool_node_set_element) *last_slot;
998 gcc_assert (last_element);
1000 /* Move the last element to the original spot of NODE. */
1001 last_element->index = element->index;
1002 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1006 /* Remove element from hash table. */
1007 htab_clear_slot (set->hashtab, slot);
1011 /* Find NODE in SET and return an iterator to it if found. A null iterator
1012 is returned if NODE is not in SET. */
1014 varpool_node_set_iterator
1015 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1018 struct varpool_node_set_element_def dummy;
1019 varpool_node_set_element element;
1020 varpool_node_set_iterator vsi;
1023 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1025 vsi.index = (unsigned) ~0;
1028 element = (varpool_node_set_element) *slot;
1029 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1031 vsi.index = element->index;
1038 /* Dump content of SET to file F. */
1041 dump_varpool_node_set (FILE *f, varpool_node_set set)
1043 varpool_node_set_iterator iter;
1045 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1047 struct varpool_node *node = vsi_node (iter);
1048 dump_varpool_node (f, node);
1052 /* Dump content of SET to stderr. */
1055 debug_varpool_node_set (varpool_node_set set)
1057 dump_varpool_node_set (stderr, set);
1061 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1066 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1067 struct cgraph_edge *e;
1069 bool something_changed = false;
1072 order_pos = cgraph_postorder (order);
1073 for (i = order_pos - 1; i >= 0; i--)
1075 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1077 for (e = order[i]->callees; e; e = e->next_callee)
1078 if (e->callee->local.local && !e->callee->aux)
1080 something_changed = true;
1081 e->callee->aux = (void *)1;
1084 order[i]->aux = NULL;
1087 while (something_changed)
1089 something_changed = false;
1090 for (i = order_pos - 1; i >= 0; i--)
1092 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1094 for (e = order[i]->callees; e; e = e->next_callee)
1095 if (e->callee->local.local && !e->callee->aux)
1097 something_changed = true;
1098 e->callee->aux = (void *)1;
1101 order[i]->aux = NULL;
1109 gate_ipa_profile (void)
1111 return flag_ipa_profile;
1114 struct ipa_opt_pass_d pass_ipa_profile =
1118 "ipa-profile", /* name */
1119 gate_ipa_profile, /* gate */
1120 ipa_profile, /* execute */
1123 0, /* static_pass_number */
1124 TV_IPA_PROFILE, /* tv_id */
1125 0, /* properties_required */
1126 0, /* properties_provided */
1127 0, /* properties_destroyed */
1128 0, /* todo_flags_start */
1129 0 /* todo_flags_finish */
1131 NULL, /* generate_summary */
1132 NULL, /* write_summary */
1133 NULL, /* read_summary */
1134 NULL, /* write_optimization_summary */
1135 NULL, /* read_optimization_summary */
1136 NULL, /* stmt_fixup */
1138 NULL, /* function_transform */
1139 NULL /* variable_transform */