+2010-05-06 Jan Hubicka <jh@suse.cz>
+
+ * cgraphbuild.c (record_reference_ctx): Add varpool_node.
+ (record_reference, mark_address, mark_load, mark_store): Record
+ references.
+ (record_references_in_initializer): Update call of record_references.
+ (rebuild_cgraph_edges): Remove all references before rebuiding.
+ * cgraph.c (cgraph_create_node): Clear ref list.
+ (cgraph_remove_node): Remove references.
+ (dump_cgraph_node): Dump references.
+ (cgraph_clone_node): Clone references.
+ * cgraph.h: Include ipa-ref.h and ipa-ref-inline.h
+ (struct cgraph_node, varpool_node): Add ref_lst.
+ * ipa-ref.c: New file.
+ * ipa-ref.h: New file.
+ * ipa-ref-inline.h: New file.
+ * lto-cgraph.c (output_varpool): Take cgrag node set argument.
+ (referenced_from_other_partition_p): New function.
+ (lto_output_varpool_node): Take set arugment; call
+ referenced_from_other_partition.
+ (lto_output_ref): New.
+ (add_references): New.
+ (output_refs): New.
+ (output_cgraph): Compute boundary based on references;
+ output refs.
+ (output_varpool): Accept cgraph_node_set argument.
+ (input_ref): New.
+ (input_refs): New.
+ (input_cgraph): Call input_refs.
+ * lto-section-in.c (lto_section_name): Add refs.
+ * Makefile.in: (cgraph.h): Include ipa-ref.h and ipa-ref-inline.h
+ (ipa-ref.o): New file.
+ * varpool.c (varpool_node): Clear ipa ref list.
+ (varpool_remove_node): Remove references.
+ (dump_varpool_node): Dump references.
+ (varpool_assemble_decl): Only compile finalized ones.
+ (varpool_extra_name_alias): Initialize ref list.
+ * lto-streamer.c (lto-get_section_name): Add .refs section.
+ * lto-streamer.h (lto_section_type): Add LTO_section_refs.
+ (referenced_from_other_partition_p): Declared.
+
2010-05-06 Ira Rosen <irar@il.ibm.com>
PR tree-optimization/43901
IPA_UTILS_H = ipa-utils.h $(TREE_H) $(CGRAPH_H)
IPA_REFERENCE_H = ipa-reference.h $(BITMAP_H) $(TREE_H)
IPA_TYPE_ESCAPE_H = ipa-type-escape.h $(TREE_H)
-CGRAPH_H = cgraph.h $(TREE_H) $(BASIC_BLOCK_H) cif-code.def
+CGRAPH_H = cgraph.h $(TREE_H) $(BASIC_BLOCK_H) cif-code.def ipa-ref.h ipa-ref-inline.h
DF_H = df.h $(BITMAP_H) $(BASIC_BLOCK_H) alloc-pool.h $(TIMEVAR_H)
RESOURCE_H = resource.h hard-reg-set.h $(DF_H)
DDG_H = ddg.h sbitmap.h $(DF_H)
ipa-prop.o \
ipa-pure-const.o \
ipa-reference.o \
+ ipa-ref.o \
ipa-struct-reorg.o \
ipa-type-escape.o \
ipa-utils.o \
langhooks.h $(GGC_H) $(TARGET_H) $(CGRAPH_H) $(IPA_PROP_H) $(DIAGNOSTIC_H) \
$(TREE_FLOW_H) $(TM_H) $(TREE_PASS_H) $(FLAGS_H) $(TREE_H) \
$(TREE_INLINE_H) $(TIMEVAR_H)
+ipa-ref.o : ipa-ref.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
+ langhooks.h $(GGC_H) $(TARGET_H) $(CGRAPH_H) $(TREE_H) $(TARGET_H) \
+ $(TREE_FLOW_H) $(TM_H) $(TREE_PASS_H) $(FLAGS_H) $(TREE_H) $(GGC_H)
ipa-cp.o : ipa-cp.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
$(TREE_H) $(TARGET_H) $(CGRAPH_H) $(IPA_PROP_H) $(TREE_FLOW_H) \
$(TREE_PASS_H) $(FLAGS_H) $(TIMEVAR_H) $(DIAGNOSTIC_H) $(TREE_DUMP_H) \
$(srcdir)/real.h $(srcdir)/function.h $(srcdir)/insn-addr.h $(srcdir)/hwint.h \
$(srcdir)/fixed-value.h \
$(srcdir)/ipa-reference.h $(srcdir)/output.h $(srcdir)/cfgloop.h \
- $(srcdir)/cselib.h $(srcdir)/basic-block.h $(srcdir)/cgraph.h \
+ $(srcdir)/cselib.h $(srcdir)/basic-block.h $(srcdir)/ipa-ref.h $(srcdir)/cgraph.h \
$(srcdir)/reload.h $(srcdir)/caller-save.c \
$(srcdir)/alias.c $(srcdir)/bitmap.c $(srcdir)/cselib.c $(srcdir)/cgraph.c \
$(srcdir)/ipa-prop.c $(srcdir)/ipa-cp.c $(srcdir)/ipa-inline.c $(srcdir)/matrix-reorg.c \
node->previous = NULL;
node->global.estimated_growth = INT_MIN;
node->frequency = NODE_FREQUENCY_NORMAL;
+ ipa_empty_ref_list (&node->ref_list);
cgraph_nodes = node;
cgraph_n_nodes++;
return node;
cgraph_call_node_removal_hooks (node);
cgraph_node_remove_callers (node);
cgraph_node_remove_callees (node);
+ ipa_remove_all_references (&node->ref_list);
+ ipa_remove_all_refering (&node->ref_list);
VEC_free (ipa_opt_pass, heap,
node->ipa_transforms_to_apply);
fprintf(f, "(can throw external) ");
}
fprintf (f, "\n");
+ fprintf (f, " References: ");
+ ipa_dump_references (f, &node->ref_list);
+ fprintf (f, " Refering this function: ");
+ ipa_dump_refering (f, &node->ref_list);
for (edge = node->indirect_calls; edge; edge = edge->next_callee)
indirect_calls_count++;
for (e = n->indirect_calls; e; e = e->next_callee)
cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
count_scale, freq, loop_nest, update_original);
+ ipa_clone_references (new_node, NULL, &n->ref_list);
new_node->next_sibling_clone = n->clones;
if (n->clones)
#define GCC_CGRAPH_H
#include "tree.h"
#include "basic-block.h"
+#include "ipa-ref.h"
enum availability
{
per-function in order to allow IPA passes to introduce new functions. */
VEC(ipa_opt_pass,heap) * GTY((skip)) ipa_transforms_to_apply;
+ struct ipa_ref_list ref_list;
struct cgraph_local_info local;
struct cgraph_global_info global;
struct cgraph_rtl_info rtl;
/* The varpool data structure.
Each static variable decl has assigned varpool_node. */
-struct GTY((chain_next ("%h.next"))) varpool_node {
+struct GTY((chain_next ("%h.next"), chain_prev ("%h.prev"))) varpool_node {
tree decl;
/* Pointer to the next function in varpool_nodes. */
struct varpool_node *next, *prev;
/* For normal nodes a pointer to the first extra name alias. For alias
nodes a pointer to the normal node. */
struct varpool_node *extra_name;
+ struct ipa_ref_list ref_list;
/* Ordering of all cgraph nodes. */
int order;
/* Constant pool accessor function. */
htab_t constant_pool_htab (void);
+#include "ipa-ref-inline.h"
+
#endif /* GCC_CGRAPH_H */
struct record_reference_ctx
{
bool only_vars;
+ struct varpool_node *varpool_node;
};
/* Walk tree and record all calls and references to functions/variables.
/* Record dereferences to the functions. This makes the
functions reachable unconditionally. */
decl = get_base_var (*tp);
- if (TREE_CODE (decl) == FUNCTION_DECL && !ctx->only_vars)
- cgraph_mark_address_taken_node (cgraph_node (decl));
+ if (TREE_CODE (decl) == FUNCTION_DECL)
+ {
+ if (!ctx->only_vars)
+ cgraph_mark_address_taken_node (cgraph_node (decl));
+ ipa_record_reference (NULL, ctx->varpool_node,
+ cgraph_node (decl), NULL,
+ IPA_REF_ADDR, NULL);
+ }
if (TREE_CODE (decl) == VAR_DECL)
{
- gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
+ struct varpool_node *vnode = varpool_node (decl);
if (lang_hooks.callgraph.analyze_expr)
lang_hooks.callgraph.analyze_expr (&decl, walk_subtrees);
- varpool_mark_needed_node (varpool_node (decl));
+ varpool_mark_needed_node (vnode);
+ if (vnode->alias && vnode->extra_name)
+ vnode = vnode->extra_name;
+ ipa_record_reference (NULL, ctx->varpool_node,
+ NULL, vnode,
+ IPA_REF_ADDR, NULL);
}
*walk_subtrees = 0;
break;
{
struct cgraph_node *node = cgraph_node (addr);
cgraph_mark_address_taken_node (node);
+ ipa_record_reference ((struct cgraph_node *)data, NULL,
+ node, NULL,
+ IPA_REF_ADDR, stmt);
}
else
{
if (lang_hooks.callgraph.analyze_expr)
lang_hooks.callgraph.analyze_expr (&addr, &walk_subtrees);
varpool_mark_needed_node (vnode);
+ if (vnode->alias && vnode->extra_name)
+ vnode = vnode->extra_name;
+ ipa_record_reference ((struct cgraph_node *)data, NULL,
+ NULL, vnode,
+ IPA_REF_ADDR, stmt);
}
}
if (lang_hooks.callgraph.analyze_expr)
lang_hooks.callgraph.analyze_expr (&t, &walk_subtrees);
varpool_mark_needed_node (vnode);
+ if (vnode->alias && vnode->extra_name)
+ vnode = vnode->extra_name;
+ ipa_record_reference ((struct cgraph_node *)data, NULL,
+ NULL, vnode,
+ IPA_REF_LOAD, stmt);
}
return false;
}
if (lang_hooks.callgraph.analyze_expr)
lang_hooks.callgraph.analyze_expr (&t, &walk_subtrees);
varpool_mark_needed_node (vnode);
+ if (vnode->alias && vnode->extra_name)
+ vnode = vnode->extra_name;
+ ipa_record_reference ((struct cgraph_node *)data, NULL,
+ NULL, vnode,
+ IPA_REF_STORE, NULL);
}
return false;
}
record_references_in_initializer (tree decl, bool only_vars)
{
struct pointer_set_t *visited_nodes = pointer_set_create ();
- struct record_reference_ctx ctx = {false};
+ struct varpool_node *node = varpool_node (decl);
+ struct record_reference_ctx ctx = {false, NULL};
+ ctx.varpool_node = node;
ctx.only_vars = only_vars;
walk_tree (&DECL_INITIAL (decl), record_reference,
&ctx, visited_nodes);
gimple_stmt_iterator gsi;
cgraph_node_remove_callees (node);
+ ipa_remove_all_references (&node->ref_list);
node->count = ENTRY_BLOCK_PTR->count;
#include "lto-streamer.h"
#include "gcov-io.h"
-static void output_varpool (varpool_node_set);
+static void output_varpool (cgraph_node_set, varpool_node_set);
/* Cgraph streaming is organized as set of record whose type
is indicated by a tag. */
bitpack_delete (bp);
}
+/* Return if LIST contain references from other partitions. */
+bool
+referenced_from_other_partition_p (struct ipa_ref_list *list, cgraph_node_set set,
+ varpool_node_set vset)
+{
+ int i;
+ struct ipa_ref *ref;
+ for (i = 0; ipa_ref_list_refering_iterate (list, i, ref); i++)
+ {
+ if (ref->refering_type == IPA_REF_CGRAPH)
+ {
+ if (!cgraph_node_in_set_p (ipa_ref_refering_node (ref), set))
+ return true;
+ }
+ else
+ {
+ if (!varpool_node_in_set_p (ipa_ref_refering_varpool_node (ref),
+ vset))
+ return true;
+ }
+ }
+ return false;
+}
+
/* Return true when node is reachable from other partition. */
static bool
static void
lto_output_varpool_node (struct lto_simple_output_block *ob, struct varpool_node *node,
- varpool_node_set set)
+ cgraph_node_set set, varpool_node_set vset)
{
- bool boundary_p = !varpool_node_in_set_p (node, set) && node->analyzed;
+ bool boundary_p = !varpool_node_in_set_p (node, vset) && node->analyzed;
struct bitpack_d *bp;
struct varpool_node *alias;
int count = 0;
}
else
{
- /* FIXME: We have no idea how we move references around. For moment assume that
- everything is used externally. */
- bp_pack_value (bp, flag_wpa, 1); /* used_from_other_parition. */
+ bp_pack_value (bp, node->analyzed
+ && referenced_from_other_partition_p (&node->ref_list,
+ set, vset), 1);
bp_pack_value (bp, boundary_p, 1); /* in_other_partition. */
}
/* Also emit any extra name aliases. */
}
}
+/* Output the varpool NODE to OB.
+ If NODE is not in SET, then NODE is a boundary. */
+
+static void
+lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
+ lto_cgraph_encoder_t encoder,
+ lto_varpool_encoder_t varpool_encoder)
+{
+ struct bitpack_d *bp = bitpack_create ();
+ bp_pack_value (bp, ref->refered_type, 1);
+ bp_pack_value (bp, ref->use, 2);
+ lto_output_bitpack (ob->main_stream, bp);
+ bitpack_delete (bp);
+ if (ref->refered_type == IPA_REF_CGRAPH)
+ {
+ int nref = lto_cgraph_encoder_lookup (encoder, ipa_ref_node (ref));
+ gcc_assert (nref != LCC_NOT_FOUND);
+ lto_output_sleb128_stream (ob->main_stream, nref);
+ }
+ else
+ {
+ int nref = lto_varpool_encoder_lookup (varpool_encoder,
+ ipa_ref_varpool_node (ref));
+ gcc_assert (nref != LCC_NOT_FOUND);
+ lto_output_sleb128_stream (ob->main_stream, nref);
+ }
+}
+
/* Stream out profile_summary to OB. */
static void
lto_cgraph_encoder_encode (encoder, node);
}
+/* Add all references in LIST to encoders. */
+
+static void
+add_references (lto_cgraph_encoder_t encoder,
+ lto_varpool_encoder_t varpool_encoder,
+ struct ipa_ref_list *list)
+{
+ int i;
+ struct ipa_ref *ref;
+ for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
+ if (ref->refered_type == IPA_REF_CGRAPH)
+ add_node_to (encoder, ipa_ref_node (ref));
+ else
+ {
+ struct varpool_node *vnode = ipa_ref_varpool_node (ref);
+ lto_varpool_encoder_encode (varpool_encoder, vnode);
+ }
+}
+
/* Output all callees or indirect outgoing edges. EDGE must be the first such
edge. */
/* Output the part of the cgraph in SET. */
+static void
+output_refs (cgraph_node_set set, varpool_node_set vset,
+ lto_cgraph_encoder_t encoder,
+ lto_varpool_encoder_t varpool_encoder)
+{
+ cgraph_node_set_iterator csi;
+ varpool_node_set_iterator vsi;
+ struct lto_simple_output_block *ob;
+ int count;
+ struct ipa_ref *ref;
+ int i;
+
+ ob = lto_create_simple_output_block (LTO_section_refs);
+
+ for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
+ {
+ struct cgraph_node *node = csi_node (csi);
+
+ count = ipa_ref_list_nreferences (&node->ref_list);
+ if (count)
+ {
+ lto_output_uleb128_stream (ob->main_stream, count);
+ lto_output_uleb128_stream (ob->main_stream,
+ lto_cgraph_encoder_lookup (encoder, node));
+ for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
+ lto_output_ref (ob, ref, encoder, varpool_encoder);
+ }
+ }
+
+ lto_output_uleb128_stream (ob->main_stream, 0);
+
+ for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
+ {
+ struct varpool_node *node = vsi_node (vsi);
+
+ count = ipa_ref_list_nreferences (&node->ref_list);
+ if (count)
+ {
+ lto_output_uleb128_stream (ob->main_stream, count);
+ lto_output_uleb128_stream (ob->main_stream,
+ lto_varpool_encoder_lookup (varpool_encoder,
+ node));
+ for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
+ lto_output_ref (ob, ref, encoder, varpool_encoder);
+ }
+ }
+
+ lto_output_uleb128_stream (ob->main_stream, 0);
+
+ lto_destroy_simple_output_block (ob);
+}
+
+
+/* Output the part of the cgraph in SET. */
+
void
output_cgraph (cgraph_node_set set, varpool_node_set vset)
{
{
node = csi_node (csi);
add_node_to (encoder, node);
+ add_references (encoder, varpool_encoder, &node->ref_list);
}
for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
{
gcc_assert (!vnode->alias);
lto_varpool_encoder_encode (varpool_encoder, vnode);
lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
+ add_references (encoder, varpool_encoder, &vnode->ref_list);
}
- /* FIXME: We do not track references, so for now we need to include all possibly
- used variables in the encoder set. */
+ /* FIXME: We can not currenlty remove any varpool nodes or we get ICE merging
+ binfos. */
for (vnode = varpool_nodes; vnode; vnode = vnode->next)
if (vnode->needed)
lto_varpool_encoder_encode (varpool_encoder, vnode);
|| TREE_READONLY (vnode->decl)))
{
lto_set_varpool_encoder_encode_initializer (varpool_encoder, vnode);
+ add_references (encoder, varpool_encoder, &vnode->ref_list);
}
}
lto_output_uleb128_stream (ob->main_stream, 0);
lto_destroy_simple_output_block (ob);
- output_varpool (vset);
+ output_varpool (set, vset);
+ output_refs (set, vset, encoder, varpool_encoder);
}
/* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
/* Output the part of the cgraph in SET. */
static void
-output_varpool (varpool_node_set vset)
+output_varpool (cgraph_node_set set, varpool_node_set vset)
{
struct lto_simple_output_block *ob = lto_create_simple_output_block (LTO_section_varpool);
lto_varpool_encoder_t varpool_encoder = ob->decl_state->varpool_node_encoder;
for (i = 0; i < len; i++)
{
lto_output_varpool_node (ob, lto_varpool_encoder_deref (varpool_encoder, i),
- vset);
+ set, vset);
}
lto_destroy_simple_output_block (ob);
return node;
}
+/* Read a node from input_block IB. TAG is the node's tag just read.
+ Return the node read or overwriten. */
+
+static void
+input_ref (struct lto_input_block *ib,
+ struct cgraph_node *refering_node,
+ struct varpool_node *refering_varpool_node,
+ VEC(cgraph_node_ptr, heap) *nodes,
+ VEC(varpool_node_ptr, heap) *varpool_nodes)
+{
+ struct cgraph_node *node = NULL;
+ struct varpool_node *varpool_node = NULL;
+ struct bitpack_d *bp;
+ enum ipa_ref_type type;
+ enum ipa_ref_use use;
+
+ bp = lto_input_bitpack (ib);
+ type = (enum ipa_ref_type) bp_unpack_value (bp, 1);
+ use = (enum ipa_ref_use) bp_unpack_value (bp, 2);
+ bitpack_delete (bp);
+ if (type == IPA_REF_CGRAPH)
+ node = VEC_index (cgraph_node_ptr, nodes, lto_input_sleb128 (ib));
+ else
+ varpool_node = VEC_index (varpool_node_ptr, varpool_nodes, lto_input_sleb128 (ib));
+ ipa_record_reference (refering_node, refering_varpool_node,
+ node, varpool_node, use, NULL);
+}
/* Read an edge from IB. NODES points to a vector of previously read nodes for
decoding caller and callee of the edge to be read. If INDIRECT is true, the
return varpool;
}
+/* Input ipa_refs. */
+
+static void
+input_refs (struct lto_input_block *ib,
+ VEC(cgraph_node_ptr, heap) *nodes,
+ VEC(varpool_node_ptr, heap) *varpool)
+{
+ int count;
+ int idx;
+ while (true)
+ {
+ struct cgraph_node *node;
+ count = lto_input_uleb128 (ib);
+ if (!count)
+ break;
+ idx = lto_input_uleb128 (ib);
+ node = VEC_index (cgraph_node_ptr, nodes, idx);
+ while (count)
+ {
+ input_ref (ib, node, NULL, nodes, varpool);
+ count--;
+ }
+ }
+ while (true)
+ {
+ struct varpool_node *node;
+ count = lto_input_uleb128 (ib);
+ if (!count)
+ break;
+ node = VEC_index (varpool_node_ptr, varpool, lto_input_uleb128 (ib));
+ while (count)
+ {
+ input_ref (ib, NULL, node, nodes, varpool);
+ count--;
+ }
+ }
+}
+
static struct gcov_ctr_summary lto_gcov_summary;
lto_destroy_simple_input_block (file_data, LTO_section_varpool,
ib, data, len);
+ ib = lto_create_simple_input_block (file_data, LTO_section_refs,
+ &data, &len);
+ input_refs (ib, nodes, varpool);
+ lto_destroy_simple_input_block (file_data, LTO_section_refs,
+ ib, data, len);
VEC_free (cgraph_node_ptr, heap, nodes);
VEC_free (varpool_node_ptr, heap, varpool);
}
"static_initializer",
"cgraph",
"varpool",
+ "refs",
"jump_funcs"
"ipa_pure_const",
"ipa_reference",
case LTO_section_varpool:
return concat (LTO_SECTION_NAME_PREFIX, ".vars", NULL);
+ case LTO_section_refs:
+ return concat (LTO_SECTION_NAME_PREFIX, ".refs", NULL);
+
case LTO_section_jump_functions:
return concat (LTO_SECTION_NAME_PREFIX, ".jmpfuncs", NULL);
LTO_section_static_initializer,
LTO_section_cgraph,
LTO_section_varpool,
+ LTO_section_refs,
LTO_section_jump_functions,
LTO_section_ipa_pure_const,
LTO_section_ipa_reference,
struct varpool_node *);
void output_cgraph (cgraph_node_set, varpool_node_set);
void input_cgraph (void);
+bool referenced_from_other_partition_p (struct ipa_ref_list *,
+ cgraph_node_set,
+ varpool_node_set vset);
/* In lto-symtab.c. */
next = e->next_caller;
cgraph_redirect_edge_callee (e, prevailing_node);
}
+ /* Redirect incomming references. */
+ ipa_clone_refering (prevailing_node, NULL, &node->ref_list);
if (node->same_body)
{
gcc_assert (!vnode->finalized || prevailing_node->finalized);
gcc_assert (!vnode->analyzed || prevailing_node->analyzed);
+ /* When replacing by an alias, the references goes to the original
+ variable. */
+ if (prevailing_node->alias && prevailing_node->extra_name)
+ prevailing_node = prevailing_node->extra_name;
+ ipa_clone_refering (NULL, prevailing_node, &vnode->ref_list);
+
/* Finally remove the replaced node. */
varpool_remove_node (vnode);
}
2010-05-05 Jan Hubicka <jh@suse.cz>
+ * lto.c (lto_promote_cross_file_statics): Compute boundary based on refs.
+
+2010-05-05 Jan Hubicka <jh@suse.cz>
+
* lto.c (lto_1_to_1_map): Partition only needed nodes.
2010-04-30 Jan Hubicka <jh@suse.cz>
struct varpool_node *vnode;
unsigned i, n_sets;
cgraph_node_set set;
+ varpool_node_set vset;
cgraph_node_set_iterator csi;
+ varpool_node_set_iterator vsi;
gcc_assert (flag_wpa);
- /* At moment we make no attempt to figure out who is refering the variables,
- so all must become global.
-
- Constant pool references use internal labels and thus can not be made global.
- It is sensible to keep those ltrans local to allow better optimization. */
- for (vnode = varpool_nodes; vnode; vnode = vnode->next)
- if (!vnode->externally_visible && vnode->analyzed
- && !DECL_IN_CONSTANT_POOL (vnode->decl))
- {
- TREE_PUBLIC (vnode->decl) = 1;
- DECL_VISIBILITY (vnode->decl) = VISIBILITY_HIDDEN;
- }
n_sets = VEC_length (cgraph_node_set, lto_cgraph_node_sets);
for (i = 0; i < n_sets; i++)
{
set = VEC_index (cgraph_node_set, lto_cgraph_node_sets, i);
+ vset = VEC_index (varpool_node_set, lto_varpool_node_sets, i);
/* If node has either address taken (and we have no clue from where)
or it is called from other partition, it needs to be globalized. */
for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
{
struct cgraph_node *node = csi_node (csi);
- bool globalize = node->address_taken || node->local.vtable_method;
+ bool globalize = node->local.vtable_method;
struct cgraph_edge *e;
if (node->local.externally_visible)
continue;
+ if (!globalize
+ && referenced_from_other_partition_p (&node->ref_list, set, vset))
+ globalize = true;
for (e = node->callers; e && !globalize; e = e->next_caller)
{
struct cgraph_node *caller = e->caller;
}
if (globalize)
{
+ gcc_assert (flag_wpa);
TREE_PUBLIC (node->decl) = 1;
DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
if (node->same_body)
}
}
}
+ for (vsi = vsi_start (vset); !vsi_end_p (vsi); vsi_next (&vsi))
+ {
+ vnode = vsi_node (vsi);
+ /* Constant pool references use internal labels and thus can not
+ be made global. It is sensible to keep those ltrans local to
+ allow better optimization. */
+ if (!DECL_IN_CONSTANT_POOL (vnode->decl)
+ && !vnode->externally_visible && vnode->analyzed
+ && referenced_from_other_partition_p (&vnode->ref_list, set, vset))
+ {
+ gcc_assert (flag_wpa);
+ TREE_PUBLIC (vnode->decl) = 1;
+ DECL_VISIBILITY (vnode->decl) = VISIBILITY_HIDDEN;
+ }
+ }
}
}
t = *tp;
*walk_subtrees = 0;
- if (pointer_set_contains (fixup_data->seen, t))
+ if (!t || pointer_set_contains (fixup_data->seen, t))
return NULL;
if (TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == FUNCTION_DECL)
node->decl = decl;
node->order = cgraph_order++;
node->next = varpool_nodes;
+ ipa_empty_ref_list (&node->ref_list);
if (varpool_nodes)
varpool_nodes->prev = node;
varpool_nodes = node;
gcc_assert (varpool_nodes_queue == node);
varpool_nodes_queue = node->next_needed;
}
+ ipa_remove_all_references (&node->ref_list);
+ ipa_remove_all_refering (&node->ref_list);
if (DECL_INITIAL (node->decl))
DECL_INITIAL (node->decl) = error_mark_node;
ggc_free (node);
else if (node->used_from_other_partition)
fprintf (f, " used_from_other_partition");
fprintf (f, "\n");
+ fprintf (f, " References: ");
+ ipa_dump_references (f, &node->ref_list);
+ fprintf (f, " Refering this var: ");
+ ipa_dump_refering (f, &node->ref_list);
}
/* Dump the variable pool. */
alias_node->alias = 1;
alias_node->extra_name = decl_node;
alias_node->next = decl_node->extra_name;
+ ipa_empty_ref_list (&alias_node->ref_list);
if (decl_node->extra_name)
decl_node->extra_name->prev = alias_node;
decl_node->extra_name = alias_node;