X-Git-Url: http://git.sourceforge.jp/view?p=pf3gnuchains%2Fgcc-fork.git;a=blobdiff_plain;f=gcc%2Fipa.c;h=8eadd36ca30a6c9b7688313535d0274d1345d4aa;hp=06f838cb07d4bfbfaa5aa6d23009e17794ff28cf;hb=7717ea00902734bd90371e34af23d0b73287f875;hpb=c4d6511c84d6077f81dacbe8d7683a56cd0a07e7 diff --git a/gcc/ipa.c b/gcc/ipa.c index 06f838cb07d..8eadd36ca30 100644 --- a/gcc/ipa.c +++ b/gcc/ipa.c @@ -1,5 +1,6 @@ /* Basic IPA optimizations and utilities. - Copyright (C) 2003, 2004, 2005, 2007 Free Software Foundation, Inc. + Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010 + Free Software Foundation, Inc. This file is part of GCC. @@ -24,6 +25,12 @@ along with GCC; see the file COPYING3. If not see #include "cgraph.h" #include "tree-pass.h" #include "timevar.h" +#include "gimple.h" +#include "ggc.h" +#include "flags.h" +#include "pointer-set.h" +#include "target.h" +#include "tree-iterator.h" /* Fill array order with all nodes with output flag set in the reverse topological order. */ @@ -35,6 +42,7 @@ cgraph_postorder (struct cgraph_node **order) int stack_size = 0; int order_pos = 0; struct cgraph_edge *edge, last; + int pass; struct cgraph_node **stack = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes); @@ -42,63 +50,179 @@ cgraph_postorder (struct cgraph_node **order) /* We have to deal with cycles nicely, so use a depth first traversal output algorithm. Ignore the fact that some functions won't need to be output and put them into order as well, so we get dependencies - right through intline functions. */ + right through inline functions. */ for (node = cgraph_nodes; node; node = node->next) node->aux = NULL; - for (node = cgraph_nodes; node; node = node->next) - if (!node->aux) - { - node2 = node; - if (!node->callers) - node->aux = &last; - else - node->aux = node->callers; - while (node2) - { - while (node2->aux != &last) - { - edge = (struct cgraph_edge *) node2->aux; - if (edge->next_caller) - node2->aux = edge->next_caller; - else - node2->aux = &last; - if (!edge->caller->aux) - { - if (!edge->caller->callers) - edge->caller->aux = &last; - else - edge->caller->aux = edge->caller->callers; - stack[stack_size++] = node2; - node2 = edge->caller; - break; - } - } - if (node2->aux == &last) - { - order[order_pos++] = node2; - if (stack_size) - node2 = stack[--stack_size]; - else - node2 = NULL; - } - } - } + for (pass = 0; pass < 2; pass++) + for (node = cgraph_nodes; node; node = node->next) + if (!node->aux + && (pass + || (!cgraph_only_called_directly_p (node) + && !node->address_taken))) + { + node2 = node; + if (!node->callers) + node->aux = &last; + else + node->aux = node->callers; + while (node2) + { + while (node2->aux != &last) + { + edge = (struct cgraph_edge *) node2->aux; + if (edge->next_caller) + node2->aux = edge->next_caller; + else + node2->aux = &last; + /* Break possible cycles involving always-inline + functions by ignoring edges from always-inline + functions to non-always-inline functions. */ + if (edge->caller->local.disregard_inline_limits + && !edge->callee->local.disregard_inline_limits) + continue; + if (!edge->caller->aux) + { + if (!edge->caller->callers) + edge->caller->aux = &last; + else + edge->caller->aux = edge->caller->callers; + stack[stack_size++] = node2; + node2 = edge->caller; + break; + } + } + if (node2->aux == &last) + { + order[order_pos++] = node2; + if (stack_size) + node2 = stack[--stack_size]; + else + node2 = NULL; + } + } + } free (stack); for (node = cgraph_nodes; node; node = node->next) node->aux = NULL; return order_pos; } +/* Look for all functions inlined to NODE and update their inlined_to pointers + to INLINED_TO. */ + +static void +update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to) +{ + struct cgraph_edge *e; + for (e = node->callees; e; e = e->next_callee) + if (e->callee->global.inlined_to) + { + e->callee->global.inlined_to = inlined_to; + update_inlined_to_pointer (e->callee, inlined_to); + } +} + +/* Add cgraph NODE to queue starting at FIRST. + + The queue is linked via AUX pointers and terminated by pointer to 1. + We enqueue nodes at two occasions: when we find them reachable or when we find + their bodies needed for further clonning. In the second case we mark them + by pointer to 2 after processing so they are re-queue when they become + reachable. */ + +static void +enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first) +{ + /* Node is still in queue; do nothing. */ + if (node->aux && node->aux != (void *) 2) + return; + /* Node was already processed as unreachable, re-enqueue + only if it became reachable now. */ + if (node->aux == (void *)2 && !node->reachable) + return; + node->aux = *first; + *first = node; +} + +/* Add varpool NODE to queue starting at FIRST. */ + +static void +enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first) +{ + node->aux = *first; + *first = node; +} + +/* Process references. */ + +static void +process_references (struct ipa_ref_list *list, + struct cgraph_node **first, + struct varpool_node **first_varpool, + bool before_inlining_p) +{ + int i; + struct ipa_ref *ref; + for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++) + { + if (ref->refered_type == IPA_REF_CGRAPH) + { + struct cgraph_node *node = ipa_ref_node (ref); + if (!node->reachable + && (!DECL_EXTERNAL (node->decl) + || before_inlining_p)) + { + node->reachable = true; + enqueue_cgraph_node (node, first); + } + } + else + { + struct varpool_node *node = ipa_ref_varpool_node (ref); + if (!node->needed) + { + varpool_mark_needed_node (node); + enqueue_varpool_node (node, first_varpool); + } + } + } +} + +/* Return true when function NODE can be removed from callgraph + if all direct calls are eliminated. */ + +static inline bool +varpool_can_remove_if_no_refs (struct varpool_node *node) +{ + return (!node->force_output && !node->used_from_other_partition + && (DECL_COMDAT (node->decl) || !node->externally_visible)); +} + +/* Return true when function can be marked local. */ + +static bool +cgraph_local_node_p (struct cgraph_node *node) +{ + return (cgraph_only_called_directly_p (node) + && node->analyzed + && !DECL_EXTERNAL (node->decl) + && !node->local.externally_visible + && !node->reachable_from_other_partition + && !node->in_other_partition); +} + /* Perform reachability analysis and reclaim all unreachable nodes. If BEFORE_INLINING_P is true this function is called before inlining - decisions has been made. If BEFORE_INLINING_P is false this function also + decisions has been made. If BEFORE_INLINING_P is false this function also removes unneeded bodies of extern inline functions. */ bool cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file) { struct cgraph_node *first = (struct cgraph_node *) (void *) 1; + struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1; struct cgraph_node *node, *next; + struct varpool_node *vnode, *vnext; bool changed = false; #ifdef ENABLE_CHECKING @@ -109,82 +233,205 @@ cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file) #ifdef ENABLE_CHECKING for (node = cgraph_nodes; node; node = node->next) gcc_assert (!node->aux); + for (vnode = varpool_nodes; vnode; vnode = vnode->next) + gcc_assert (!vnode->aux); #endif + varpool_reset_queue (); for (node = cgraph_nodes; node; node = node->next) - if (node->needed && !node->global.inlined_to - && ((!DECL_EXTERNAL (node->decl)) - || !node->analyzed + if ((!cgraph_can_remove_if_no_direct_calls_and_refs_p (node) + /* Keep around virtual functions for possible devirtualization. */ + || (!before_inlining_p + && !node->global.inlined_to + && DECL_VIRTUAL_P (node->decl) + && (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl)))) + && ((!DECL_EXTERNAL (node->decl)) || before_inlining_p)) { - node->aux = first; - first = node; + gcc_assert (!node->global.inlined_to); + enqueue_cgraph_node (node, &first); + node->reachable = true; } else - gcc_assert (!node->aux); + { + gcc_assert (!node->aux); + node->reachable = false; + } + for (vnode = varpool_nodes; vnode; vnode = vnode->next) + { + vnode->next_needed = NULL; + vnode->prev_needed = NULL; + if (!varpool_can_remove_if_no_refs (vnode)) + { + vnode->needed = false; + varpool_mark_needed_node (vnode); + enqueue_varpool_node (vnode, &first_varpool); + } + else + vnode->needed = false; + } /* Perform reachability analysis. As a special case do not consider extern inline functions not inlined as live because we won't output - them at all. */ - while (first != (void *) 1) + them at all. + + We maintain two worklist, one for cgraph nodes other for varpools and + are finished once both are empty. */ + + while (first != (struct cgraph_node *) (void *) 1 + || first_varpool != (struct varpool_node *) (void *) 1) { - struct cgraph_edge *e; - node = first; - first = (struct cgraph_node *) first->aux; - - for (e = node->callees; e; e = e->next_callee) - if (!e->callee->aux - && node->analyzed - && (!e->inline_failed || !e->callee->analyzed - || (!DECL_EXTERNAL (e->callee->decl)) - || before_inlining_p)) - { - e->callee->aux = first; - first = e->callee; - } + if (first != (struct cgraph_node *) (void *) 1) + { + struct cgraph_edge *e; + node = first; + first = (struct cgraph_node *) first->aux; + if (!node->reachable) + node->aux = (void *)2; + + /* If we found this node reachable, first mark on the callees + reachable too, unless they are direct calls to extern inline functions + we decided to not inline. */ + if (node->reachable) + { + for (e = node->callees; e; e = e->next_callee) + if (!e->callee->reachable + && node->analyzed + && (!e->inline_failed || !e->callee->analyzed + || (!DECL_EXTERNAL (e->callee->decl)) + || before_inlining_p)) + { + e->callee->reachable = true; + enqueue_cgraph_node (e->callee, &first); + } + process_references (&node->ref_list, &first, &first_varpool, before_inlining_p); + } + + /* If any function in a comdat group is reachable, force + all other functions in the same comdat group to be + also reachable. */ + if (node->same_comdat_group + && node->reachable + && !node->global.inlined_to) + { + for (next = node->same_comdat_group; + next != node; + next = next->same_comdat_group) + if (!next->reachable) + { + next->reachable = true; + enqueue_cgraph_node (next, &first); + } + } + + /* We can freely remove inline clones even if they are cloned, however if + function is clone of real clone, we must keep it around in order to + make materialize_clones produce function body with the changes + applied. */ + while (node->clone_of && !node->clone_of->aux + && !gimple_has_body_p (node->decl)) + { + bool noninline = node->clone_of->decl != node->decl; + node = node->clone_of; + if (noninline && !node->reachable && !node->aux) + { + enqueue_cgraph_node (node, &first); + break; + } + } + } + if (first_varpool != (struct varpool_node *) (void *) 1) + { + vnode = first_varpool; + first_varpool = (struct varpool_node *)first_varpool->aux; + vnode->aux = NULL; + process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p); + /* If any function in a comdat group is reachable, force + all other functions in the same comdat group to be + also reachable. */ + if (vnode->same_comdat_group) + { + struct varpool_node *next; + for (next = vnode->same_comdat_group; + next != vnode; + next = next->same_comdat_group) + if (!next->needed) + { + varpool_mark_needed_node (next); + enqueue_varpool_node (next, &first_varpool); + } + } + } } - /* Remove unreachable nodes. Extern inline functions need special care; - Unreachable extern inline functions shall be removed. - Reachable extern inline functions we never inlined shall get their bodies - eliminated. - Reachable extern inline functions we sometimes inlined will be turned into - unanalyzed nodes so they look like for true extern functions to the rest - of code. Body of such functions is released via remove_node once the - inline clones are eliminated. */ + /* Remove unreachable nodes. + + Completely unreachable functions can be fully removed from the callgraph. + Extern inline functions that we decided to not inline need to become unanalyzed nodes of + callgraph (so we still have edges to them). We remove function body then. + + Also we need to care functions that are unreachable but we need to keep them around + for later clonning. In this case we also turn them to unanalyzed nodes, but + keep the body around. */ for (node = cgraph_nodes; node; node = next) { next = node->next; + if (node->aux && !node->reachable) + { + cgraph_node_remove_callees (node); + ipa_remove_all_references (&node->ref_list); + node->analyzed = false; + node->local.inlinable = false; + } if (!node->aux) { node->global.inlined_to = NULL; if (file) fprintf (file, " %s", cgraph_node_name (node)); - if (!node->analyzed || !DECL_EXTERNAL (node->decl) - || before_inlining_p) + if (!node->analyzed || !DECL_EXTERNAL (node->decl) || before_inlining_p) cgraph_remove_node (node); else { struct cgraph_edge *e; + /* See if there is reachable caller. */ for (e = node->callers; e; e = e->next_caller) - if (e->caller->aux) + if (e->caller->reachable) break; + + /* If so, we need to keep node in the callgraph. */ if (e || node->needed) { struct cgraph_node *clone; - for (clone = node->next_clone; clone; - clone = clone->next_clone) + /* If there are still clones, we must keep body around. + Otherwise we can just remove the body but keep the clone. */ + for (clone = node->clones; clone; + clone = clone->next_sibling_clone) if (clone->aux) break; if (!clone) { cgraph_release_function_body (node); - node->analyzed = false; + node->local.inlinable = false; + if (node->prev_sibling_clone) + node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone; + else if (node->clone_of) + node->clone_of->clones = node->next_sibling_clone; + if (node->next_sibling_clone) + node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone; +#ifdef ENABLE_CHECKING + if (node->clone_of) + node->former_clone_of = node->clone_of->decl; +#endif + node->clone_of = NULL; + node->next_sibling_clone = NULL; + node->prev_sibling_clone = NULL; } - cgraph_node_remove_callees (node); + else + gcc_assert (!clone->in_other_partition); node->analyzed = false; - node->local.inlinable = false; + cgraph_node_remove_callees (node); + ipa_remove_all_references (&node->ref_list); } else cgraph_remove_node (node); @@ -193,13 +440,216 @@ cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file) } } for (node = cgraph_nodes; node; node = node->next) - node->aux = NULL; + { + /* Inline clones might be kept around so their materializing allows further + cloning. If the function the clone is inlined into is removed, we need + to turn it into normal cone. */ + if (node->global.inlined_to + && !node->callers) + { + gcc_assert (node->clones); + node->global.inlined_to = NULL; + update_inlined_to_pointer (node, node); + } + node->aux = NULL; + } + + if (file) + fprintf (file, "\n"); + + /* We must release unused extern inlines or sanity checking will fail. Rest of transformations + are undesirable at -O0 since we do not want to remove anything. */ + if (!optimize) + return changed; + + if (file) + fprintf (file, "Reclaiming variables:"); + for (vnode = varpool_nodes; vnode; vnode = vnext) + { + vnext = vnode->next; + if (!vnode->needed) + { + if (file) + fprintf (file, " %s", varpool_node_name (vnode)); + varpool_remove_node (vnode); + changed = true; + } + } + + /* Now update address_taken flags and try to promote functions to be local. */ + + if (file) + fprintf (file, "\nClearing address taken flags:"); + for (node = cgraph_nodes; node; node = node->next) + if (node->address_taken + && !node->reachable_from_other_partition) + { + int i; + struct ipa_ref *ref; + bool found = false; + for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref) + && !found; i++) + { + gcc_assert (ref->use == IPA_REF_ADDR); + found = true; + } + if (!found) + { + if (file) + fprintf (file, " %s", cgraph_node_name (node)); + node->address_taken = false; + changed = true; + if (cgraph_local_node_p (node)) + { + node->local.local = true; + if (file) + fprintf (file, " (local)"); + } + } + } + #ifdef ENABLE_CHECKING verify_cgraph (); #endif + + /* Reclaim alias pairs for functions that have disappeared from the + call graph. */ + remove_unreachable_alias_pairs (); + return changed; } +/* Discover variables that have no longer address taken or that are read only + and update their flags. + + FIXME: This can not be done in between gimplify and omp_expand since + readonly flag plays role on what is shared and what is not. Currently we do + this transformation as part of whole program visibility and re-do at + ipa-reference pass (to take into account clonning), but it would + make sense to do it before early optimizations. */ + +void +ipa_discover_readonly_nonaddressable_vars (void) +{ + struct varpool_node *vnode; + if (dump_file) + fprintf (dump_file, "Clearing variable flags:"); + for (vnode = varpool_nodes; vnode; vnode = vnode->next) + if (vnode->finalized && varpool_all_refs_explicit_p (vnode) + && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl))) + { + bool written = false; + bool address_taken = false; + int i; + struct ipa_ref *ref; + for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref) + && (!written || !address_taken); i++) + switch (ref->use) + { + case IPA_REF_ADDR: + address_taken = true; + break; + case IPA_REF_LOAD: + break; + case IPA_REF_STORE: + written = true; + break; + } + if (TREE_ADDRESSABLE (vnode->decl) && !address_taken) + { + if (dump_file) + fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode)); + TREE_ADDRESSABLE (vnode->decl) = 0; + } + if (!TREE_READONLY (vnode->decl) && !address_taken && !written + /* Making variable in explicit section readonly can cause section + type conflict. + See e.g. gcc.c-torture/compile/pr23237.c */ + && DECL_SECTION_NAME (vnode->decl) == NULL) + { + if (dump_file) + fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode)); + TREE_READONLY (vnode->decl) = 1; + } + } + if (dump_file) + fprintf (dump_file, "\n"); +} + +/* Return true when function NODE should be considered externally visible. */ + +static bool +cgraph_externally_visible_p (struct cgraph_node *node, bool whole_program, bool aliased) +{ + if (!node->local.finalized) + return false; + if (!DECL_COMDAT (node->decl) + && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl))) + return false; + + /* Do not even try to be smart about aliased nodes. Until we properly + represent everything by same body alias, these are just evil. */ + if (aliased) + return true; + + /* If linker counts on us, we must preserve the function. */ + if (cgraph_used_from_object_file_p (node)) + return true; + /* When doing link time optimizations, hidden symbols become local. */ + if (in_lto_p + && (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN + || DECL_VISIBILITY (node->decl) == VISIBILITY_INTERNAL) + /* Be sure that node is defined in IR file, not in other object + file. In that case we don't set used_from_other_object_file. */ + && node->analyzed) + ; + else if (!whole_program) + return true; + /* COMDAT functions must be shared only if they have address taken, + otherwise we can produce our own private implementation with + -fwhole-program. */ + else if (DECL_COMDAT (node->decl)) + { + if (node->address_taken || !node->analyzed) + return true; + if (node->same_comdat_group) + { + struct cgraph_node *next; + + /* If more than one function is in the same COMDAT group, it must + be shared even if just one function in the comdat group has + address taken. */ + for (next = node->same_comdat_group; + next != node; + next = next->same_comdat_group) + if (next->address_taken || !next->analyzed) + return true; + } + } + if (DECL_PRESERVE_P (node->decl)) + return true; + if (MAIN_NAME_P (DECL_NAME (node->decl))) + return true; + if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl))) + return true; + return false; +} + +/* Dissolve the same_comdat_group list in which NODE resides. */ + +static void +dissolve_same_comdat_group_list (struct cgraph_node *node) +{ + struct cgraph_node *n = node, *next; + do + { + next = n->same_comdat_group; + n->same_comdat_group = NULL; + n = next; + } + while (n != node); +} + /* Mark visibility of all functions. A local function is one whose calls can occur only in the current @@ -212,42 +662,164 @@ cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file) via visibilities for the backend point of view. */ static unsigned int -function_and_variable_visibility (void) +function_and_variable_visibility (bool whole_program) { struct cgraph_node *node; struct varpool_node *vnode; + struct pointer_set_t *aliased_nodes = pointer_set_create (); + struct pointer_set_t *aliased_vnodes = pointer_set_create (); + unsigned i; + alias_pair *p; + + /* Discover aliased nodes. */ + FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p) + { + if (dump_file) + fprintf (dump_file, "Alias %s->%s", + IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)), + IDENTIFIER_POINTER (p->target)); + + if ((node = cgraph_node_for_asm (p->target)) != NULL) + { + gcc_assert (node->needed); + pointer_set_insert (aliased_nodes, node); + if (dump_file) + fprintf (dump_file, " node %s/%i", + cgraph_node_name (node), node->uid); + } + else if ((vnode = varpool_node_for_asm (p->target)) != NULL) + { + gcc_assert (vnode->needed); + pointer_set_insert (aliased_vnodes, vnode); + if (dump_file) + fprintf (dump_file, " varpool node %s", + varpool_node_name (vnode)); + } + if (dump_file) + fprintf (dump_file, "\n"); + } for (node = cgraph_nodes; node; node = node->next) { - if (node->reachable - && (DECL_COMDAT (node->decl) - || (!flag_whole_program - && TREE_PUBLIC (node->decl) && !DECL_EXTERNAL (node->decl)))) - node->local.externally_visible = true; + /* C++ FE on lack of COMDAT support create local COMDAT functions + (that ought to be shared but can not due to object format + limitations). It is neccesary to keep the flag to make rest of C++ FE + happy. Clear the flag here to avoid confusion in middle-end. */ + if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl)) + DECL_COMDAT (node->decl) = 0; + /* For external decls stop tracking same_comdat_group, it doesn't matter + what comdat group they are in when they won't be emitted in this TU, + and simplifies later passes. */ + if (node->same_comdat_group && DECL_EXTERNAL (node->decl)) + { +#ifdef ENABLE_CHECKING + struct cgraph_node *n; + + for (n = node->same_comdat_group; + n != node; + n = n->same_comdat_group) + /* If at least one of same comdat group functions is external, + all of them have to be, otherwise it is a front-end bug. */ + gcc_assert (DECL_EXTERNAL (n->decl)); +#endif + dissolve_same_comdat_group_list (node); + } + gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl)) + || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)); + if (cgraph_externally_visible_p (node, whole_program, + pointer_set_contains (aliased_nodes, + node))) + { + gcc_assert (!node->global.inlined_to); + node->local.externally_visible = true; + } + else + node->local.externally_visible = false; if (!node->local.externally_visible && node->analyzed && !DECL_EXTERNAL (node->decl)) { - gcc_assert (flag_whole_program || !TREE_PUBLIC (node->decl)); - TREE_PUBLIC (node->decl) = 0; + struct cgraph_node *alias; + gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl)); + cgraph_make_decl_local (node->decl); + node->resolution = LDPR_PREVAILING_DEF_IRONLY; + for (alias = node->same_body; alias; alias = alias->next) + cgraph_make_decl_local (alias->decl); + if (node->same_comdat_group) + /* cgraph_externally_visible_p has already checked all other nodes + in the group and they will all be made local. We need to + dissolve the group at once so that the predicate does not + segfault though. */ + dissolve_same_comdat_group_list (node); } - node->local.local = (!node->needed - && node->analyzed - && !DECL_EXTERNAL (node->decl) - && !node->local.externally_visible); + node->local.local = cgraph_local_node_p (node); + } + for (vnode = varpool_nodes; vnode; vnode = vnode->next) + { + /* weak flag makes no sense on local variables. */ + gcc_assert (!DECL_WEAK (vnode->decl) + || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl)); + /* In several cases declarations can not be common: + + - when declaration has initializer + - when it is in weak + - when it has specific section + - when it resides in non-generic address space. + - if declaration is local, it will get into .local common section + so common flag is not needed. Frontends still produce these in + certain cases, such as for: + + static int a __attribute__ ((common)) + + Canonicalize things here and clear the redundant flag. */ + if (DECL_COMMON (vnode->decl) + && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl)) + || (DECL_INITIAL (vnode->decl) + && DECL_INITIAL (vnode->decl) != error_mark_node) + || DECL_WEAK (vnode->decl) + || DECL_SECTION_NAME (vnode->decl) != NULL + || ! (ADDR_SPACE_GENERIC_P + (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl)))))) + DECL_COMMON (vnode->decl) = 0; } for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed) { + if (!vnode->finalized) + continue; if (vnode->needed - && !flag_whole_program - && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl))) - vnode->externally_visible = 1; + && (DECL_COMDAT (vnode->decl) || TREE_PUBLIC (vnode->decl)) + && (((!whole_program + /* We can privatize comdat readonly variables whose address is + not taken, but doing so is not going to bring us + optimization oppurtunities until we start reordering + datastructures. */ + || DECL_COMDAT (vnode->decl) + || DECL_WEAK (vnode->decl)) + /* When doing linktime optimizations, all hidden symbols will + become local. */ + && (!in_lto_p + || (DECL_VISIBILITY (vnode->decl) != VISIBILITY_HIDDEN + && DECL_VISIBILITY (vnode->decl) != VISIBILITY_INTERNAL) + /* We can get prevailing decision in other object file. + In this case we do not sed used_from_object_file. */ + || !vnode->finalized)) + || DECL_PRESERVE_P (vnode->decl) + || varpool_used_from_object_file_p (vnode) + || pointer_set_contains (aliased_vnodes, vnode) + || lookup_attribute ("externally_visible", + DECL_ATTRIBUTES (vnode->decl)))) + vnode->externally_visible = true; + else + vnode->externally_visible = false; if (!vnode->externally_visible) { - gcc_assert (flag_whole_program || !TREE_PUBLIC (vnode->decl)); - TREE_PUBLIC (vnode->decl) = 0; + gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl)); + cgraph_make_decl_local (vnode->decl); + vnode->resolution = LDPR_PREVAILING_DEF_IRONLY; } gcc_assert (TREE_STATIC (vnode->decl)); } + pointer_set_destroy (aliased_nodes); + pointer_set_destroy (aliased_vnodes); if (dump_file) { @@ -261,18 +833,32 @@ function_and_variable_visibility (void) if (node->local.externally_visible) fprintf (dump_file, " %s", cgraph_node_name (node)); fprintf (dump_file, "\n\n"); + fprintf (dump_file, "\nMarking externally visible variables:"); + for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed) + if (vnode->externally_visible) + fprintf (dump_file, " %s", varpool_node_name (vnode)); + fprintf (dump_file, "\n\n"); } cgraph_function_flags_ready = true; return 0; } -struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility = +/* Local function pass handling visibilities. This happens before LTO streaming + so in particular -fwhole-program should be ignored at this level. */ + +static unsigned int +local_function_and_variable_visibility (void) +{ + return function_and_variable_visibility (flag_whole_program && !flag_lto && !flag_whopr); +} + +struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility = { { SIMPLE_IPA_PASS, "visibility", /* name */ NULL, /* gate */ - function_and_variable_visibility, /* execute */ + local_function_and_variable_visibility,/* execute */ NULL, /* sub */ NULL, /* next */ 0, /* static_pass_number */ @@ -281,6 +867,770 @@ struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility = 0, /* properties_provided */ 0, /* properties_destroyed */ 0, /* todo_flags_start */ - TODO_remove_functions | TODO_dump_cgraph/* todo_flags_finish */ + TODO_remove_functions | TODO_dump_cgraph + | TODO_ggc_collect /* todo_flags_finish */ } }; + +/* Do not re-run on ltrans stage. */ + +static bool +gate_whole_program_function_and_variable_visibility (void) +{ + return !flag_ltrans; +} + +/* Bring functionss local at LTO time whith -fwhole-program. */ + +static unsigned int +whole_program_function_and_variable_visibility (void) +{ + struct cgraph_node *node; + struct varpool_node *vnode; + + function_and_variable_visibility (flag_whole_program); + + for (node = cgraph_nodes; node; node = node->next) + if ((node->local.externally_visible && !DECL_COMDAT (node->decl)) + && node->local.finalized) + cgraph_mark_needed_node (node); + for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed) + if (vnode->externally_visible && !DECL_COMDAT (vnode->decl)) + varpool_mark_needed_node (vnode); + if (dump_file) + { + fprintf (dump_file, "\nNeeded variables:"); + for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed) + if (vnode->needed) + fprintf (dump_file, " %s", varpool_node_name (vnode)); + fprintf (dump_file, "\n\n"); + } + if (optimize) + ipa_discover_readonly_nonaddressable_vars (); + return 0; +} + +struct ipa_opt_pass_d pass_ipa_whole_program_visibility = +{ + { + IPA_PASS, + "whole-program", /* name */ + gate_whole_program_function_and_variable_visibility,/* gate */ + whole_program_function_and_variable_visibility,/* execute */ + NULL, /* sub */ + NULL, /* next */ + 0, /* static_pass_number */ + TV_CGRAPHOPT, /* tv_id */ + 0, /* properties_required */ + 0, /* properties_provided */ + 0, /* properties_destroyed */ + 0, /* todo_flags_start */ + TODO_remove_functions | TODO_dump_cgraph + | TODO_ggc_collect /* todo_flags_finish */ + }, + NULL, /* generate_summary */ + NULL, /* write_summary */ + NULL, /* read_summary */ + NULL, /* write_optimization_summary */ + NULL, /* read_optimization_summary */ + NULL, /* stmt_fixup */ + 0, /* TODOs */ + NULL, /* function_transform */ + NULL, /* variable_transform */ +}; + +/* Hash a cgraph node set element. */ + +static hashval_t +hash_cgraph_node_set_element (const void *p) +{ + const_cgraph_node_set_element element = (const_cgraph_node_set_element) p; + return htab_hash_pointer (element->node); +} + +/* Compare two cgraph node set elements. */ + +static int +eq_cgraph_node_set_element (const void *p1, const void *p2) +{ + const_cgraph_node_set_element e1 = (const_cgraph_node_set_element) p1; + const_cgraph_node_set_element e2 = (const_cgraph_node_set_element) p2; + + return e1->node == e2->node; +} + +/* Create a new cgraph node set. */ + +cgraph_node_set +cgraph_node_set_new (void) +{ + cgraph_node_set new_node_set; + + new_node_set = ggc_alloc_cgraph_node_set_def (); + new_node_set->hashtab = htab_create_ggc (10, + hash_cgraph_node_set_element, + eq_cgraph_node_set_element, + NULL); + new_node_set->nodes = NULL; + return new_node_set; +} + +/* Add cgraph_node NODE to cgraph_node_set SET. */ + +void +cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node) +{ + void **slot; + cgraph_node_set_element element; + struct cgraph_node_set_element_def dummy; + + dummy.node = node; + slot = htab_find_slot (set->hashtab, &dummy, INSERT); + + if (*slot != HTAB_EMPTY_ENTRY) + { + element = (cgraph_node_set_element) *slot; + gcc_assert (node == element->node + && (VEC_index (cgraph_node_ptr, set->nodes, element->index) + == node)); + return; + } + + /* Insert node into hash table. */ + element = ggc_alloc_cgraph_node_set_element_def (); + element->node = node; + element->index = VEC_length (cgraph_node_ptr, set->nodes); + *slot = element; + + /* Insert into node vector. */ + VEC_safe_push (cgraph_node_ptr, gc, set->nodes, node); +} + +/* Remove cgraph_node NODE from cgraph_node_set SET. */ + +void +cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node) +{ + void **slot, **last_slot; + cgraph_node_set_element element, last_element; + struct cgraph_node *last_node; + struct cgraph_node_set_element_def dummy; + + dummy.node = node; + slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT); + if (slot == NULL) + return; + + element = (cgraph_node_set_element) *slot; + gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index) + == node); + + /* Remove from vector. We do this by swapping node with the last element + of the vector. */ + last_node = VEC_pop (cgraph_node_ptr, set->nodes); + if (last_node != node) + { + dummy.node = last_node; + last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT); + last_element = (cgraph_node_set_element) *last_slot; + gcc_assert (last_element); + + /* Move the last element to the original spot of NODE. */ + last_element->index = element->index; + VEC_replace (cgraph_node_ptr, set->nodes, last_element->index, + last_node); + } + + /* Remove element from hash table. */ + htab_clear_slot (set->hashtab, slot); + ggc_free (element); +} + +/* Find NODE in SET and return an iterator to it if found. A null iterator + is returned if NODE is not in SET. */ + +cgraph_node_set_iterator +cgraph_node_set_find (cgraph_node_set set, struct cgraph_node *node) +{ + void **slot; + struct cgraph_node_set_element_def dummy; + cgraph_node_set_element element; + cgraph_node_set_iterator csi; + + dummy.node = node; + slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT); + if (slot == NULL) + csi.index = (unsigned) ~0; + else + { + element = (cgraph_node_set_element) *slot; + gcc_assert (VEC_index (cgraph_node_ptr, set->nodes, element->index) + == node); + csi.index = element->index; + } + csi.set = set; + + return csi; +} + +/* Dump content of SET to file F. */ + +void +dump_cgraph_node_set (FILE *f, cgraph_node_set set) +{ + cgraph_node_set_iterator iter; + + for (iter = csi_start (set); !csi_end_p (iter); csi_next (&iter)) + { + struct cgraph_node *node = csi_node (iter); + fprintf (f, " %s/%i", cgraph_node_name (node), node->uid); + } + fprintf (f, "\n"); +} + +/* Dump content of SET to stderr. */ + +DEBUG_FUNCTION void +debug_cgraph_node_set (cgraph_node_set set) +{ + dump_cgraph_node_set (stderr, set); +} + +/* Hash a varpool node set element. */ + +static hashval_t +hash_varpool_node_set_element (const void *p) +{ + const_varpool_node_set_element element = (const_varpool_node_set_element) p; + return htab_hash_pointer (element->node); +} + +/* Compare two varpool node set elements. */ + +static int +eq_varpool_node_set_element (const void *p1, const void *p2) +{ + const_varpool_node_set_element e1 = (const_varpool_node_set_element) p1; + const_varpool_node_set_element e2 = (const_varpool_node_set_element) p2; + + return e1->node == e2->node; +} + +/* Create a new varpool node set. */ + +varpool_node_set +varpool_node_set_new (void) +{ + varpool_node_set new_node_set; + + new_node_set = ggc_alloc_varpool_node_set_def (); + new_node_set->hashtab = htab_create_ggc (10, + hash_varpool_node_set_element, + eq_varpool_node_set_element, + NULL); + new_node_set->nodes = NULL; + return new_node_set; +} + +/* Add varpool_node NODE to varpool_node_set SET. */ + +void +varpool_node_set_add (varpool_node_set set, struct varpool_node *node) +{ + void **slot; + varpool_node_set_element element; + struct varpool_node_set_element_def dummy; + + dummy.node = node; + slot = htab_find_slot (set->hashtab, &dummy, INSERT); + + if (*slot != HTAB_EMPTY_ENTRY) + { + element = (varpool_node_set_element) *slot; + gcc_assert (node == element->node + && (VEC_index (varpool_node_ptr, set->nodes, element->index) + == node)); + return; + } + + /* Insert node into hash table. */ + element = ggc_alloc_varpool_node_set_element_def (); + element->node = node; + element->index = VEC_length (varpool_node_ptr, set->nodes); + *slot = element; + + /* Insert into node vector. */ + VEC_safe_push (varpool_node_ptr, gc, set->nodes, node); +} + +/* Remove varpool_node NODE from varpool_node_set SET. */ + +void +varpool_node_set_remove (varpool_node_set set, struct varpool_node *node) +{ + void **slot, **last_slot; + varpool_node_set_element element, last_element; + struct varpool_node *last_node; + struct varpool_node_set_element_def dummy; + + dummy.node = node; + slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT); + if (slot == NULL) + return; + + element = (varpool_node_set_element) *slot; + gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index) + == node); + + /* Remove from vector. We do this by swapping node with the last element + of the vector. */ + last_node = VEC_pop (varpool_node_ptr, set->nodes); + if (last_node != node) + { + dummy.node = last_node; + last_slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT); + last_element = (varpool_node_set_element) *last_slot; + gcc_assert (last_element); + + /* Move the last element to the original spot of NODE. */ + last_element->index = element->index; + VEC_replace (varpool_node_ptr, set->nodes, last_element->index, + last_node); + } + + /* Remove element from hash table. */ + htab_clear_slot (set->hashtab, slot); + ggc_free (element); +} + +/* Find NODE in SET and return an iterator to it if found. A null iterator + is returned if NODE is not in SET. */ + +varpool_node_set_iterator +varpool_node_set_find (varpool_node_set set, struct varpool_node *node) +{ + void **slot; + struct varpool_node_set_element_def dummy; + varpool_node_set_element element; + varpool_node_set_iterator vsi; + + dummy.node = node; + slot = htab_find_slot (set->hashtab, &dummy, NO_INSERT); + if (slot == NULL) + vsi.index = (unsigned) ~0; + else + { + element = (varpool_node_set_element) *slot; + gcc_assert (VEC_index (varpool_node_ptr, set->nodes, element->index) + == node); + vsi.index = element->index; + } + vsi.set = set; + + return vsi; +} + +/* Dump content of SET to file F. */ + +void +dump_varpool_node_set (FILE *f, varpool_node_set set) +{ + varpool_node_set_iterator iter; + + for (iter = vsi_start (set); !vsi_end_p (iter); vsi_next (&iter)) + { + struct varpool_node *node = vsi_node (iter); + fprintf (f, " %s", varpool_node_name (node)); + } + fprintf (f, "\n"); +} + +/* Dump content of SET to stderr. */ + +DEBUG_FUNCTION void +debug_varpool_node_set (varpool_node_set set) +{ + dump_varpool_node_set (stderr, set); +} + + +/* Simple ipa profile pass propagating frequencies across the callgraph. */ + +static unsigned int +ipa_profile (void) +{ + struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes); + struct cgraph_edge *e; + int order_pos; + bool something_changed = false; + int i; + + order_pos = cgraph_postorder (order); + for (i = order_pos - 1; i >= 0; i--) + { + if (order[i]->local.local && cgraph_propagate_frequency (order[i])) + { + for (e = order[i]->callees; e; e = e->next_callee) + if (e->callee->local.local && !e->callee->aux) + { + something_changed = true; + e->callee->aux = (void *)1; + } + } + order[i]->aux = NULL; + } + + while (something_changed) + { + something_changed = false; + for (i = order_pos - 1; i >= 0; i--) + { + if (order[i]->aux && cgraph_propagate_frequency (order[i])) + { + for (e = order[i]->callees; e; e = e->next_callee) + if (e->callee->local.local && !e->callee->aux) + { + something_changed = true; + e->callee->aux = (void *)1; + } + } + order[i]->aux = NULL; + } + } + free (order); + return 0; +} + +static bool +gate_ipa_profile (void) +{ + return flag_ipa_profile; +} + +struct ipa_opt_pass_d pass_ipa_profile = +{ + { + IPA_PASS, + "ipa-profile", /* name */ + gate_ipa_profile, /* gate */ + ipa_profile, /* execute */ + NULL, /* sub */ + NULL, /* next */ + 0, /* static_pass_number */ + TV_IPA_PROFILE, /* tv_id */ + 0, /* properties_required */ + 0, /* properties_provided */ + 0, /* properties_destroyed */ + 0, /* todo_flags_start */ + 0 /* todo_flags_finish */ + }, + NULL, /* generate_summary */ + NULL, /* write_summary */ + NULL, /* read_summary */ + NULL, /* write_optimization_summary */ + NULL, /* read_optimization_summary */ + NULL, /* stmt_fixup */ + 0, /* TODOs */ + NULL, /* function_transform */ + NULL /* variable_transform */ +}; + +/* Generate and emit a static constructor or destructor. WHICH must + be one of 'I' (for a constructor) or 'D' (for a destructor). BODY + is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the + initialization priority for this constructor or destructor. */ + +void +cgraph_build_static_cdtor (char which, tree body, int priority) +{ + static int counter = 0; + char which_buf[16]; + tree decl, name, resdecl; + + /* The priority is encoded in the constructor or destructor name. + collect2 will sort the names and arrange that they are called at + program startup. */ + sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++); + name = get_file_function_name (which_buf); + + decl = build_decl (input_location, FUNCTION_DECL, name, + build_function_type_list (void_type_node, NULL_TREE)); + current_function_decl = decl; + + resdecl = build_decl (input_location, + RESULT_DECL, NULL_TREE, void_type_node); + DECL_ARTIFICIAL (resdecl) = 1; + DECL_RESULT (decl) = resdecl; + DECL_CONTEXT (resdecl) = decl; + + allocate_struct_function (decl, false); + + TREE_STATIC (decl) = 1; + TREE_USED (decl) = 1; + DECL_ARTIFICIAL (decl) = 1; + DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1; + DECL_SAVED_TREE (decl) = body; + if (!targetm.have_ctors_dtors) + { + TREE_PUBLIC (decl) = 1; + DECL_PRESERVE_P (decl) = 1; + } + DECL_UNINLINABLE (decl) = 1; + + DECL_INITIAL (decl) = make_node (BLOCK); + TREE_USED (DECL_INITIAL (decl)) = 1; + + DECL_SOURCE_LOCATION (decl) = input_location; + cfun->function_end_locus = input_location; + + switch (which) + { + case 'I': + DECL_STATIC_CONSTRUCTOR (decl) = 1; + decl_init_priority_insert (decl, priority); + break; + case 'D': + DECL_STATIC_DESTRUCTOR (decl) = 1; + decl_fini_priority_insert (decl, priority); + break; + default: + gcc_unreachable (); + } + + gimplify_function_tree (decl); + + cgraph_add_new_function (decl, false); + + set_cfun (NULL); + current_function_decl = NULL; +} + + +/* A vector of FUNCTION_DECLs declared as static constructors. */ +static VEC(tree, heap) *static_ctors; +/* A vector of FUNCTION_DECLs declared as static destructors. */ +static VEC(tree, heap) *static_dtors; + +/* When target does not have ctors and dtors, we call all constructor + and destructor by special initialization/destruction function + recognized by collect2. + + When we are going to build this function, collect all constructors and + destructors and turn them into normal functions. */ + +static void +record_cdtor_fn (struct cgraph_node *node) +{ + if (DECL_STATIC_CONSTRUCTOR (node->decl)) + VEC_safe_push (tree, heap, static_ctors, node->decl); + if (DECL_STATIC_DESTRUCTOR (node->decl)) + VEC_safe_push (tree, heap, static_dtors, node->decl); + node = cgraph_node (node->decl); + node->local.disregard_inline_limits = 1; +} + +/* Define global constructors/destructor functions for the CDTORS, of + which they are LEN. The CDTORS are sorted by initialization + priority. If CTOR_P is true, these are constructors; otherwise, + they are destructors. */ + +static void +build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors) +{ + size_t i,j; + size_t len = VEC_length (tree, cdtors); + + i = 0; + while (i < len) + { + tree body; + tree fn; + priority_type priority; + + priority = 0; + body = NULL_TREE; + j = i; + do + { + priority_type p; + fn = VEC_index (tree, cdtors, j); + p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn); + if (j == i) + priority = p; + else if (p != priority) + break; + j++; + } + while (j < len); + + /* When there is only one cdtor and target supports them, do nothing. */ + if (j == i + 1 + && targetm.have_ctors_dtors) + { + i++; + continue; + } + /* Find the next batch of constructors/destructors with the same + initialization priority. */ + for (;i < j; i++) + { + tree call; + fn = VEC_index (tree, cdtors, i); + call = build_call_expr (fn, 0); + if (ctor_p) + DECL_STATIC_CONSTRUCTOR (fn) = 0; + else + DECL_STATIC_DESTRUCTOR (fn) = 0; + /* We do not want to optimize away pure/const calls here. + When optimizing, these should be already removed, when not + optimizing, we want user to be able to breakpoint in them. */ + TREE_SIDE_EFFECTS (call) = 1; + append_to_statement_list (call, &body); + } + while (i < len); + gcc_assert (body != NULL_TREE); + /* Generate a function to call all the function of like + priority. */ + cgraph_build_static_cdtor (ctor_p ? 'I' : 'D', body, priority); + } +} + +/* Comparison function for qsort. P1 and P2 are actually of type + "tree *" and point to static constructors. DECL_INIT_PRIORITY is + used to determine the sort order. */ + +static int +compare_ctor (const void *p1, const void *p2) +{ + tree f1; + tree f2; + int priority1; + int priority2; + + f1 = *(const tree *)p1; + f2 = *(const tree *)p2; + priority1 = DECL_INIT_PRIORITY (f1); + priority2 = DECL_INIT_PRIORITY (f2); + + if (priority1 < priority2) + return -1; + else if (priority1 > priority2) + return 1; + else + /* Ensure a stable sort. Constructors are executed in backwarding + order to make LTO initialize braries first. */ + return DECL_UID (f2) - DECL_UID (f1); +} + +/* Comparison function for qsort. P1 and P2 are actually of type + "tree *" and point to static destructors. DECL_FINI_PRIORITY is + used to determine the sort order. */ + +static int +compare_dtor (const void *p1, const void *p2) +{ + tree f1; + tree f2; + int priority1; + int priority2; + + f1 = *(const tree *)p1; + f2 = *(const tree *)p2; + priority1 = DECL_FINI_PRIORITY (f1); + priority2 = DECL_FINI_PRIORITY (f2); + + if (priority1 < priority2) + return -1; + else if (priority1 > priority2) + return 1; + else + /* Ensure a stable sort. */ + return DECL_UID (f1) - DECL_UID (f2); +} + +/* Generate functions to call static constructors and destructors + for targets that do not support .ctors/.dtors sections. These + functions have magic names which are detected by collect2. */ + +static void +build_cdtor_fns (void) +{ + if (!VEC_empty (tree, static_ctors)) + { + gcc_assert (!targetm.have_ctors_dtors || in_lto_p); + qsort (VEC_address (tree, static_ctors), + VEC_length (tree, static_ctors), + sizeof (tree), + compare_ctor); + build_cdtor (/*ctor_p=*/true, static_ctors); + } + + if (!VEC_empty (tree, static_dtors)) + { + gcc_assert (!targetm.have_ctors_dtors || in_lto_p); + qsort (VEC_address (tree, static_dtors), + VEC_length (tree, static_dtors), + sizeof (tree), + compare_dtor); + build_cdtor (/*ctor_p=*/false, static_dtors); + } +} + +/* Look for constructors and destructors and produce function calling them. + This is needed for targets not supporting ctors or dtors, but we perform the + transformation also at linktime to merge possibly numberous + constructors/destructors into single function to improve code locality and + reduce size. */ + +static unsigned int +ipa_cdtor_merge (void) +{ + struct cgraph_node *node; + for (node = cgraph_nodes; node; node = node->next) + if (node->analyzed + && (DECL_STATIC_CONSTRUCTOR (node->decl) + || DECL_STATIC_DESTRUCTOR (node->decl))) + record_cdtor_fn (node); + build_cdtor_fns (); + VEC_free (tree, heap, static_ctors); + VEC_free (tree, heap, static_dtors); + return 0; +} + +/* Perform the pass when we have no ctors/dtors support + or at LTO time to merge multiple constructors into single + function. */ + +static bool +gate_ipa_cdtor_merge (void) +{ + return !targetm.have_ctors_dtors || (optimize && in_lto_p); +} + +struct ipa_opt_pass_d pass_ipa_cdtor_merge = +{ + { + IPA_PASS, + "cdtor", /* name */ + gate_ipa_cdtor_merge, /* gate */ + ipa_cdtor_merge, /* execute */ + NULL, /* sub */ + NULL, /* next */ + 0, /* static_pass_number */ + TV_CGRAPHOPT, /* tv_id */ + 0, /* properties_required */ + 0, /* properties_provided */ + 0, /* properties_destroyed */ + 0, /* todo_flags_start */ + 0 /* todo_flags_finish */ + }, + NULL, /* generate_summary */ + NULL, /* write_summary */ + NULL, /* read_summary */ + NULL, /* write_optimization_summary */ + NULL, /* read_optimization_summary */ + NULL, /* stmt_fixup */ + 0, /* TODOs */ + NULL, /* function_transform */ + NULL /* variable_transform */ +};