1 /* Basic IPA optimizations and utilities.
2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "tree-pass.h"
31 /* Fill array order with all nodes with output flag set in the reverse
35 cgraph_postorder (struct cgraph_node **order)
37 struct cgraph_node *node, *node2;
40 struct cgraph_edge *edge, last;
43 struct cgraph_node **stack =
44 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
46 /* We have to deal with cycles nicely, so use a depth first traversal
47 output algorithm. Ignore the fact that some functions won't need
48 to be output and put them into order as well, so we get dependencies
49 right through inline functions. */
50 for (node = cgraph_nodes; node; node = node->next)
52 for (pass = 0; pass < 2; pass++)
53 for (node = cgraph_nodes; node; node = node->next)
56 || (!cgraph_only_called_directly_p (node)
57 && !node->address_taken)))
63 node->aux = node->callers;
66 while (node2->aux != &last)
68 edge = (struct cgraph_edge *) node2->aux;
69 if (edge->next_caller)
70 node2->aux = edge->next_caller;
73 /* Break possible cycles involving always-inline
74 functions by ignoring edges from always-inline
75 functions to non-always-inline functions. */
76 if (edge->caller->local.disregard_inline_limits
77 && !edge->callee->local.disregard_inline_limits)
79 if (!edge->caller->aux)
81 if (!edge->caller->callers)
82 edge->caller->aux = &last;
84 edge->caller->aux = edge->caller->callers;
85 stack[stack_size++] = node2;
90 if (node2->aux == &last)
92 order[order_pos++] = node2;
94 node2 = stack[--stack_size];
101 for (node = cgraph_nodes; node; node = node->next)
106 /* Look for all functions inlined to NODE and update their inlined_to pointers
110 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to)
112 struct cgraph_edge *e;
113 for (e = node->callees; e; e = e->next_callee)
114 if (e->callee->global.inlined_to)
116 e->callee->global.inlined_to = inlined_to;
117 update_inlined_to_pointer (e->callee, inlined_to);
121 /* Add cgraph NODE to queue starting at FIRST.
123 The queue is linked via AUX pointers and terminated by pointer to 1.
124 We enqueue nodes at two occasions: when we find them reachable or when we find
125 their bodies needed for further clonning. In the second case we mark them
126 by pointer to 2 after processing so they are re-queue when they become
130 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first)
132 /* Node is still in queue; do nothing. */
133 if (node->aux && node->aux != (void *) 2)
135 /* Node was already processed as unreachable, re-enqueue
136 only if it became reachable now. */
137 if (node->aux == (void *)2 && !node->reachable)
143 /* Add varpool NODE to queue starting at FIRST. */
146 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first)
152 /* Process references. */
155 process_references (struct ipa_ref_list *list,
156 struct cgraph_node **first,
157 struct varpool_node **first_varpool,
158 bool before_inlining_p)
162 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
164 if (ref->refered_type == IPA_REF_CGRAPH)
166 struct cgraph_node *node = ipa_ref_node (ref);
168 && (!DECL_EXTERNAL (node->decl)
169 || before_inlining_p))
171 node->reachable = true;
172 enqueue_cgraph_node (node, first);
177 struct varpool_node *node = ipa_ref_varpool_node (ref);
180 varpool_mark_needed_node (node);
181 enqueue_varpool_node (node, first_varpool);
187 /* Return true when function NODE can be removed from callgraph
188 if all direct calls are eliminated. */
191 varpool_can_remove_if_no_refs (struct varpool_node *node)
193 return (!node->force_output && !node->used_from_other_partition
194 && (DECL_COMDAT (node->decl) || !node->externally_visible));
197 /* Perform reachability analysis and reclaim all unreachable nodes.
198 If BEFORE_INLINING_P is true this function is called before inlining
199 decisions has been made. If BEFORE_INLINING_P is false this function also
200 removes unneeded bodies of extern inline functions. */
203 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
205 struct cgraph_node *first = (struct cgraph_node *) (void *) 1;
206 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1;
207 struct cgraph_node *node, *next;
208 struct varpool_node *vnode, *vnext;
209 bool changed = false;
211 #ifdef ENABLE_CHECKING
215 fprintf (file, "\nReclaiming functions:");
216 #ifdef ENABLE_CHECKING
217 for (node = cgraph_nodes; node; node = node->next)
218 gcc_assert (!node->aux);
219 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
220 gcc_assert (!vnode->aux);
222 varpool_reset_queue ();
223 for (node = cgraph_nodes; node; node = node->next)
224 if (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node)
225 && ((!DECL_EXTERNAL (node->decl))
226 || before_inlining_p))
228 gcc_assert (!node->global.inlined_to);
229 enqueue_cgraph_node (node, &first);
230 node->reachable = true;
234 gcc_assert (!node->aux);
235 node->reachable = false;
237 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
239 vnode->next_needed = NULL;
240 vnode->prev_needed = NULL;
241 if (!varpool_can_remove_if_no_refs (vnode))
243 vnode->needed = false;
244 varpool_mark_needed_node (vnode);
245 enqueue_varpool_node (vnode, &first_varpool);
248 vnode->needed = false;
251 /* Perform reachability analysis. As a special case do not consider
252 extern inline functions not inlined as live because we won't output
255 We maintain two worklist, one for cgraph nodes other for varpools and
256 are finished once both are empty. */
258 while (first != (struct cgraph_node *) (void *) 1
259 || first_varpool != (struct varpool_node *) (void *) 1)
261 if (first != (struct cgraph_node *) (void *) 1)
263 struct cgraph_edge *e;
265 first = (struct cgraph_node *) first->aux;
266 if (!node->reachable)
267 node->aux = (void *)2;
269 /* If we found this node reachable, first mark on the callees
270 reachable too, unless they are direct calls to extern inline functions
271 we decided to not inline. */
273 for (e = node->callees; e; e = e->next_callee)
274 if (!e->callee->reachable
276 && (!e->inline_failed || !e->callee->analyzed
277 || (!DECL_EXTERNAL (e->callee->decl))
278 || before_inlining_p))
280 e->callee->reachable = true;
281 enqueue_cgraph_node (e->callee, &first);
284 /* If any function in a comdat group is reachable, force
285 all other functions in the same comdat group to be
287 if (node->same_comdat_group
289 && !node->global.inlined_to)
291 for (next = node->same_comdat_group;
293 next = next->same_comdat_group)
294 if (!next->reachable)
296 next->reachable = true;
297 enqueue_cgraph_node (next, &first);
301 /* We can freely remove inline clones even if they are cloned, however if
302 function is clone of real clone, we must keep it around in order to
303 make materialize_clones produce function body with the changes
305 while (node->clone_of && !node->clone_of->aux && !gimple_has_body_p (node->decl))
307 bool noninline = node->clone_of->decl != node->decl;
308 node = node->clone_of;
309 if (noninline && !node->reachable && !node->aux)
311 enqueue_cgraph_node (node, &first);
315 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p);
317 if (first_varpool != (struct varpool_node *) (void *) 1)
319 vnode = first_varpool;
320 first_varpool = (struct varpool_node *)first_varpool->aux;
322 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p);
323 /* If any function in a comdat group is reachable, force
324 all other functions in the same comdat group to be
326 if (vnode->same_comdat_group)
328 struct varpool_node *next;
329 for (next = vnode->same_comdat_group;
331 next = next->same_comdat_group)
334 varpool_mark_needed_node (next);
335 enqueue_varpool_node (next, &first_varpool);
341 /* Remove unreachable nodes.
343 Completely unreachable functions can be fully removed from the callgraph.
344 Extern inline functions that we decided to not inline need to become unanalyzed nodes of
345 callgraph (so we still have edges to them). We remove function body then.
347 Also we need to care functions that are unreachable but we need to keep them around
348 for later clonning. In this case we also turn them to unanalyzed nodes, but
349 keep the body around. */
350 for (node = cgraph_nodes; node; node = next)
353 if (node->aux && !node->reachable)
355 cgraph_node_remove_callees (node);
356 node->analyzed = false;
357 node->local.inlinable = false;
361 node->global.inlined_to = NULL;
363 fprintf (file, " %s", cgraph_node_name (node));
364 if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p)
365 cgraph_remove_node (node);
368 struct cgraph_edge *e;
370 /* See if there is reachable caller. */
371 for (e = node->callers; e; e = e->next_caller)
372 if (e->caller->reachable)
375 /* If so, we need to keep node in the callgraph. */
376 if (e || node->needed)
378 struct cgraph_node *clone;
380 /* If there are still clones, we must keep body around.
381 Otherwise we can just remove the body but keep the clone. */
382 for (clone = node->clones; clone;
383 clone = clone->next_sibling_clone)
388 cgraph_release_function_body (node);
389 node->analyzed = false;
390 node->local.inlinable = false;
393 gcc_assert (!clone->in_other_partition);
394 cgraph_node_remove_callees (node);
395 ipa_remove_all_references (&node->ref_list);
396 if (node->prev_sibling_clone)
397 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
398 else if (node->clone_of)
399 node->clone_of->clones = node->next_sibling_clone;
400 if (node->next_sibling_clone)
401 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
402 node->clone_of = NULL;
403 node->next_sibling_clone = NULL;
404 node->prev_sibling_clone = NULL;
407 cgraph_remove_node (node);
412 for (node = cgraph_nodes; node; node = node->next)
414 /* Inline clones might be kept around so their materializing allows further
415 cloning. If the function the clone is inlined into is removed, we need
416 to turn it into normal cone. */
417 if (node->global.inlined_to
420 gcc_assert (node->clones);
421 node->global.inlined_to = NULL;
422 update_inlined_to_pointer (node, node);
427 fprintf (file, "\nReclaiming variables:");
428 for (vnode = varpool_nodes; vnode; vnode = vnext)
434 fprintf (file, " %s", varpool_node_name (vnode));
435 varpool_remove_node (vnode);
439 fprintf (file, "\nClearing address taken flags:");
440 for (node = cgraph_nodes; node; node = node->next)
441 if (node->address_taken
442 && !node->reachable_from_other_partition)
447 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref)
453 fprintf (file, " %s", cgraph_node_name (node));
454 node->address_taken = false;
458 #ifdef ENABLE_CHECKING
462 /* Reclaim alias pairs for functions that have disappeared from the
464 remove_unreachable_alias_pairs ();
470 cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program)
472 if (!node->local.finalized)
474 if (!DECL_COMDAT (node->decl)
475 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)))
479 if (DECL_PRESERVE_P (node->decl))
481 /* COMDAT functions must be shared only if they have address taken,
482 otherwise we can produce our own private implementation with
484 if (DECL_COMDAT (node->decl))
486 if (node->address_taken || !node->analyzed)
488 if (node->same_comdat_group)
490 struct cgraph_node *next;
492 /* If more than one function is in the same COMDAT group, it must
493 be shared even if just one function in the comdat group has
495 for (next = node->same_comdat_group;
497 next = next->same_comdat_group)
498 if (next->address_taken || !next->analyzed)
502 if (MAIN_NAME_P (DECL_NAME (node->decl)))
504 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl)))
509 /* Dissolve the same_comdat_group list in which NODE resides. */
512 dissolve_same_comdat_group_list (struct cgraph_node *node)
514 struct cgraph_node *n = node, *next;
517 next = n->same_comdat_group;
518 n->same_comdat_group = NULL;
524 /* Mark visibility of all functions.
526 A local function is one whose calls can occur only in the current
527 compilation unit and all its calls are explicit, so we can change
528 its calling convention. We simply mark all static functions whose
529 address is not taken as local.
531 We also change the TREE_PUBLIC flag of all declarations that are public
532 in language point of view but we want to overwrite this default
533 via visibilities for the backend point of view. */
536 function_and_variable_visibility (bool whole_program)
538 struct cgraph_node *node;
539 struct varpool_node *vnode;
541 for (node = cgraph_nodes; node; node = node->next)
543 /* C++ FE on lack of COMDAT support create local COMDAT functions
544 (that ought to be shared but can not due to object format
545 limitations). It is neccesary to keep the flag to make rest of C++ FE
546 happy. Clear the flag here to avoid confusion in middle-end. */
547 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl))
548 DECL_COMDAT (node->decl) = 0;
549 /* For external decls stop tracking same_comdat_group, it doesn't matter
550 what comdat group they are in when they won't be emitted in this TU,
551 and simplifies later passes. */
552 if (node->same_comdat_group && DECL_EXTERNAL (node->decl))
554 #ifdef ENABLE_CHECKING
555 struct cgraph_node *n;
557 for (n = node->same_comdat_group;
559 n = n->same_comdat_group)
560 /* If at least one of same comdat group functions is external,
561 all of them have to be, otherwise it is a front-end bug. */
562 gcc_assert (DECL_EXTERNAL (n->decl));
564 dissolve_same_comdat_group_list (node);
566 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl))
567 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl));
568 if (cgraph_externally_visible_p (node, whole_program))
570 gcc_assert (!node->global.inlined_to);
571 node->local.externally_visible = true;
574 node->local.externally_visible = false;
575 if (!node->local.externally_visible && node->analyzed
576 && !DECL_EXTERNAL (node->decl))
578 gcc_assert (whole_program || !TREE_PUBLIC (node->decl));
579 cgraph_make_decl_local (node->decl);
580 if (node->same_comdat_group)
581 /* cgraph_externally_visible_p has already checked all other nodes
582 in the group and they will all be made local. We need to
583 dissolve the group at once so that the predicate does not
585 dissolve_same_comdat_group_list (node);
587 node->local.local = (cgraph_only_called_directly_p (node)
589 && !DECL_EXTERNAL (node->decl)
590 && !node->local.externally_visible);
592 for (vnode = varpool_nodes; vnode; vnode = vnode->next)
594 /* weak flag makes no sense on local variables. */
595 gcc_assert (!DECL_WEAK (vnode->decl)
596 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl));
597 /* In several cases declarations can not be common:
599 - when declaration has initializer
601 - when it has specific section
602 - when it resides in non-generic address space.
603 - if declaration is local, it will get into .local common section
604 so common flag is not needed. Frontends still produce these in
605 certain cases, such as for:
607 static int a __attribute__ ((common))
609 Canonicalize things here and clear the redundant flag. */
610 if (DECL_COMMON (vnode->decl)
611 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl))
612 || (DECL_INITIAL (vnode->decl)
613 && DECL_INITIAL (vnode->decl) != error_mark_node)
614 || DECL_WEAK (vnode->decl)
615 || DECL_SECTION_NAME (vnode->decl) != NULL
616 || ! (ADDR_SPACE_GENERIC_P
617 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl))))))
618 DECL_COMMON (vnode->decl) = 0;
620 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
622 if (!vnode->finalized)
625 && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))
627 /* We can privatize comdat readonly variables whose address is not taken,
628 but doing so is not going to bring us optimization oppurtunities until
629 we start reordering datastructures. */
630 || DECL_COMDAT (vnode->decl)
631 || DECL_WEAK (vnode->decl)
632 || lookup_attribute ("externally_visible",
633 DECL_ATTRIBUTES (vnode->decl))))
634 vnode->externally_visible = true;
636 vnode->externally_visible = false;
637 if (!vnode->externally_visible)
639 gcc_assert (whole_program || !TREE_PUBLIC (vnode->decl));
640 cgraph_make_decl_local (vnode->decl);
642 gcc_assert (TREE_STATIC (vnode->decl));
647 fprintf (dump_file, "\nMarking local functions:");
648 for (node = cgraph_nodes; node; node = node->next)
649 if (node->local.local)
650 fprintf (dump_file, " %s", cgraph_node_name (node));
651 fprintf (dump_file, "\n\n");
652 fprintf (dump_file, "\nMarking externally visible functions:");
653 for (node = cgraph_nodes; node; node = node->next)
654 if (node->local.externally_visible)
655 fprintf (dump_file, " %s", cgraph_node_name (node));
656 fprintf (dump_file, "\n\n");
657 fprintf (dump_file, "\nMarking externally visible variables:");
658 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
659 if (vnode->externally_visible)
660 fprintf (dump_file, " %s", varpool_node_name (vnode));
661 fprintf (dump_file, "\n\n");
663 cgraph_function_flags_ready = true;
667 /* Local function pass handling visibilities. This happens before LTO streaming
668 so in particular -fwhole-program should be ignored at this level. */
671 local_function_and_variable_visibility (void)
673 return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr);
676 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility =
680 "visibility", /* name */
682 local_function_and_variable_visibility,/* execute */
685 0, /* static_pass_number */
686 TV_CGRAPHOPT, /* tv_id */
687 0, /* properties_required */
688 0, /* properties_provided */
689 0, /* properties_destroyed */
690 0, /* todo_flags_start */
691 TODO_remove_functions | TODO_dump_cgraph
692 | TODO_ggc_collect /* todo_flags_finish */
696 /* Do not re-run on ltrans stage. */
699 gate_whole_program_function_and_variable_visibility (void)
704 /* Bring functionss local at LTO time whith -fwhole-program. */
707 whole_program_function_and_variable_visibility (void)
709 struct cgraph_node *node;
710 struct varpool_node *vnode;
712 function_and_variable_visibility (flag_whole_program);
714 for (node = cgraph_nodes; node; node = node->next)
715 if ((node->local.externally_visible && !DECL_COMDAT (node->decl))
716 && node->local.finalized)
717 cgraph_mark_needed_node (node);
718 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
719 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl))
720 varpool_mark_needed_node (vnode);
723 fprintf (dump_file, "\nNeeded variables:");
724 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed)
726 fprintf (dump_file, " %s", varpool_node_name (vnode));
727 fprintf (dump_file, "\n\n");
732 struct ipa_opt_pass_d pass_ipa_whole_program_visibility =
736 "whole-program", /* name */
737 gate_whole_program_function_and_variable_visibility,/* gate */
738 whole_program_function_and_variable_visibility,/* execute */
741 0, /* static_pass_number */
742 TV_CGRAPHOPT, /* tv_id */
743 0, /* properties_required */
744 0, /* properties_provided */
745 0, /* properties_destroyed */
746 0, /* todo_flags_start */
747 TODO_remove_functions | TODO_dump_cgraph
748 | TODO_ggc_collect /* todo_flags_finish */
750 NULL, /* generate_summary */
751 NULL, /* write_summary */
752 NULL, /* read_summary */
753 NULL, /* write_optimization_summary */
754 NULL, /* read_optimization_summary */
755 NULL, /* stmt_fixup */
757 NULL, /* function_transform */
758 NULL, /* variable_transform */
761 /* Hash a cgraph node set element. */
764 hash_cgraph_node_set_element (const void *p)
766 const_cgraph_node_set_element element = (const_cgraph_node_set_element) p;
767 return htab_hash_pointer (element->node);
770 /* Compare two cgraph node set elements. */
773 eq_cgraph_node_set_element (const void *p1, const void *p2)
775 const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1;
776 const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2;
778 return e1->node == e2->node;
781 /* Create a new cgraph node set. */
784 cgraph_node_set_new (void)
786 cgraph_node_set new_node_set;
788 new_node_set = GGC_NEW (struct cgraph_node_set_def);
789 new_node_set->hashtab = htab_create_ggc (10,
790 hash_cgraph_node_set_element,
791 eq_cgraph_node_set_element,
793 new_node_set->nodes = NULL;
797 /* Add cgraph_node NODE to cgraph_node_set SET. */
800 cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
803 cgraph_node_set_element element;
804 struct cgraph_node_set_element_def dummy;
807 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
809 if (*slot != HTAB_EMPTY_ENTRY)
811 element = (cgraph_node_set_element) *slot;
812 gcc_assert (node == element->node
813 && (VEC_index (cgraph_node_ptr, set->nodes, element->index)
818 /* Insert node into hash table. */
820 (cgraph_node_set_element) GGC_NEW (struct cgraph_node_set_element_def);
821 element->node = node;
822 element->index = VEC_length (cgraph_node_ptr, set->nodes);
825 /* Insert into node vector. */
826 VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node);
829 /* Remove cgraph_node NODE from cgraph_node_set SET. */
832 cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
834 void **slot, **last_slot;
835 cgraph_node_set_element element, last_element;
836 struct cgraph_node *last_node;
837 struct cgraph_node_set_element_def dummy;
840 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
844 element = (cgraph_node_set_element) *slot;
845 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
848 /* Remove from vector. We do this by swapping node with the last element
850 last_node = VEC_pop (cgraph_node_ptr, set->nodes);
851 if (last_node != node)
853 dummy.node = last_node;
854 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
855 last_element = (cgraph_node_set_element) *last_slot;
856 gcc_assert (last_element);
858 /* Move the last element to the original spot of NODE. */
859 last_element->index = element->index;
860 VEC_replace (cgraph_node_ptr, set->nodes, last_element->index,
864 /* Remove element from hash table. */
865 htab_clear_slot (set->hashtab, slot);
869 /* Find NODE in SET and return an iterator to it if found. A null iterator
870 is returned if NODE is not in SET. */
872 cgraph_node_set_iterator
873 cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node)
876 struct cgraph_node_set_element_def dummy;
877 cgraph_node_set_element element;
878 cgraph_node_set_iterator csi;
881 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
883 csi.index = (unsigned) ~0;
886 element = (cgraph_node_set_element) *slot;
887 gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index)
889 csi.index = element->index;
896 /* Dump content of SET to file F. */
899 dump_cgraph_node_set (FILE *f, cgraph_node_set set)
901 cgraph_node_set_iterator iter;
903 for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter))
905 struct cgraph_node *node = csi_node (iter);
906 dump_cgraph_node (f, node);
910 /* Dump content of SET to stderr. */
913 debug_cgraph_node_set (cgraph_node_set set)
915 dump_cgraph_node_set (stderr, set);
918 /* Hash a varpool node set element. */
921 hash_varpool_node_set_element (const void *p)
923 const_varpool_node_set_element element = (const_varpool_node_set_element) p;
924 return htab_hash_pointer (element->node);
927 /* Compare two varpool node set elements. */
930 eq_varpool_node_set_element (const void *p1, const void *p2)
932 const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1;
933 const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2;
935 return e1->node == e2->node;
938 /* Create a new varpool node set. */
941 varpool_node_set_new (void)
943 varpool_node_set new_node_set;
945 new_node_set = GGC_NEW (struct varpool_node_set_def);
946 new_node_set->hashtab = htab_create_ggc (10,
947 hash_varpool_node_set_element,
948 eq_varpool_node_set_element,
950 new_node_set->nodes = NULL;
954 /* Add varpool_node NODE to varpool_node_set SET. */
957 varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
960 varpool_node_set_element element;
961 struct varpool_node_set_element_def dummy;
964 slot = htab_find_slot (set->hashtab, &dummy, INSERT);
966 if (*slot != HTAB_EMPTY_ENTRY)
968 element = (varpool_node_set_element) *slot;
969 gcc_assert (node == element->node
970 && (VEC_index (varpool_node_ptr, set->nodes, element->index)
975 /* Insert node into hash table. */
977 (varpool_node_set_element) GGC_NEW (struct varpool_node_set_element_def);
978 element->node = node;
979 element->index = VEC_length (varpool_node_ptr, set->nodes);
982 /* Insert into node vector. */
983 VEC_safe_push (varpool_node_ptr, gc, set->nodes, node);
986 /* Remove varpool_node NODE from varpool_node_set SET. */
989 varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
991 void **slot, **last_slot;
992 varpool_node_set_element element, last_element;
993 struct varpool_node *last_node;
994 struct varpool_node_set_element_def dummy;
997 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1001 element = (varpool_node_set_element) *slot;
1002 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1005 /* Remove from vector. We do this by swapping node with the last element
1007 last_node = VEC_pop (varpool_node_ptr, set->nodes);
1008 if (last_node != node)
1010 dummy.node = last_node;
1011 last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1012 last_element = (varpool_node_set_element) *last_slot;
1013 gcc_assert (last_element);
1015 /* Move the last element to the original spot of NODE. */
1016 last_element->index = element->index;
1017 VEC_replace (varpool_node_ptr, set->nodes, last_element->index,
1021 /* Remove element from hash table. */
1022 htab_clear_slot (set->hashtab, slot);
1026 /* Find NODE in SET and return an iterator to it if found. A null iterator
1027 is returned if NODE is not in SET. */
1029 varpool_node_set_iterator
1030 varpool_node_set_find (varpool_node_set set, struct varpool_node *node)
1033 struct varpool_node_set_element_def dummy;
1034 varpool_node_set_element element;
1035 varpool_node_set_iterator vsi;
1038 slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT);
1040 vsi.index = (unsigned) ~0;
1043 element = (varpool_node_set_element) *slot;
1044 gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index)
1046 vsi.index = element->index;
1053 /* Dump content of SET to file F. */
1056 dump_varpool_node_set (FILE *f, varpool_node_set set)
1058 varpool_node_set_iterator iter;
1060 for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter))
1062 struct varpool_node *node = vsi_node (iter);
1063 dump_varpool_node (f, node);
1067 /* Dump content of SET to stderr. */
1070 debug_varpool_node_set (varpool_node_set set)
1072 dump_varpool_node_set (stderr, set);
1076 /* Simple ipa profile pass propagating frequencies across the callgraph. */
1081 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1082 struct cgraph_edge *e;
1084 bool something_changed = false;
1087 order_pos = cgraph_postorder (order);
1088 for (i = order_pos - 1; i >= 0; i--)
1090 if (order[i]->local.local && cgraph_propagate_frequency (order[i]))
1092 for (e = order[i]->callees; e; e = e->next_callee)
1093 if (e->callee->local.local && !e->callee->aux)
1095 something_changed = true;
1096 e->callee->aux = (void *)1;
1099 order[i]->aux = NULL;
1102 while (something_changed)
1104 something_changed = false;
1105 for (i = order_pos - 1; i >= 0; i--)
1107 if (order[i]->aux && cgraph_propagate_frequency (order[i]))
1109 for (e = order[i]->callees; e; e = e->next_callee)
1110 if (e->callee->local.local && !e->callee->aux)
1112 something_changed = true;
1113 e->callee->aux = (void *)1;
1116 order[i]->aux = NULL;
1124 gate_ipa_profile (void)
1126 return flag_ipa_profile;
1129 struct ipa_opt_pass_d pass_ipa_profile =
1133 "ipa-profile", /* name */
1134 gate_ipa_profile, /* gate */
1135 ipa_profile, /* execute */
1138 0, /* static_pass_number */
1139 TV_IPA_PROFILE, /* tv_id */
1140 0, /* properties_required */
1141 0, /* properties_provided */
1142 0, /* properties_destroyed */
1143 0, /* todo_flags_start */
1144 0 /* todo_flags_finish */
1146 NULL, /* generate_summary */
1147 NULL, /* write_summary */
1148 NULL, /* read_summary */
1149 NULL, /* write_optimization_summary */
1150 NULL, /* read_optimization_summary */
1151 NULL, /* stmt_fixup */
1153 NULL, /* function_transform */
1154 NULL /* variable_transform */