/* Get a section name for a particular type or name. The NAME field
- is only used if SECTION_TYPE is LTO_section_function_body or
- LTO_static_initializer. For all others it is ignored. The callee
- of this function is responcible to free the returned name. */
+ is only used if SECTION_TYPE is LTO_section_function_body. For all
+ others it is ignored. The callee of this function is responsible
+ to free the returned name. */
char *
-lto_get_section_name (int section_type, const char *name)
+lto_get_section_name (int section_type, const char *name, struct lto_file_decl_data *f)
{
- switch (section_type)
+ const char *add;
+ char post[32];
+ const char *sep;
+
+ if (section_type == LTO_section_function_body)
{
- case LTO_section_function_body:
gcc_assert (name != NULL);
if (name[0] == '*')
name++;
- return concat (LTO_SECTION_NAME_PREFIX, name, NULL);
-
- case LTO_section_static_initializer:
- return concat (LTO_SECTION_NAME_PREFIX, ".statics", NULL);
-
- case LTO_section_symtab:
- return concat (LTO_SECTION_NAME_PREFIX, ".symtab", NULL);
-
- case LTO_section_decls:
- return concat (LTO_SECTION_NAME_PREFIX, ".decls", NULL);
-
- case LTO_section_cgraph:
- return concat (LTO_SECTION_NAME_PREFIX, ".cgraph", NULL);
-
- case LTO_section_varpool:
- return concat (LTO_SECTION_NAME_PREFIX, ".vars", NULL);
-
- case LTO_section_refs:
- return concat (LTO_SECTION_NAME_PREFIX, ".refs", NULL);
-
- case LTO_section_jump_functions:
- return concat (LTO_SECTION_NAME_PREFIX, ".jmpfuncs", NULL);
-
- case LTO_section_ipa_pure_const:
- return concat (LTO_SECTION_NAME_PREFIX, ".pureconst", NULL);
-
- case LTO_section_ipa_reference:
- return concat (LTO_SECTION_NAME_PREFIX, ".reference", NULL);
+ add = name;
+ sep = "";
+ }
+ else if (section_type < LTO_N_SECTION_TYPES)
+ {
+ add = lto_section_name[section_type];
+ sep = ".";
+ }
+ else
+ internal_error ("bytecode stream: unexpected LTO section %s", name);
- case LTO_section_opts:
- return concat (LTO_SECTION_NAME_PREFIX, ".opts", NULL);
+ /* Make the section name unique so that ld -r combining sections
+ doesn't confuse the reader with merged sections.
- case LTO_section_cgraph_opt_sum:
- return concat (LTO_SECTION_NAME_PREFIX, ".cgraphopt", NULL);
+ For options don't add a ID, the option reader cannot deal with them
+ and merging should be ok here.
- default:
- internal_error ("bytecode stream: unexpected LTO section %s", name);
- }
+ XXX: use crc64 to minimize collisions? */
+ if (section_type == LTO_section_opts)
+ strcpy (post, "");
+ else
+ sprintf (post, ".%x", f ? f->id : crc32_string(0, get_random_seed (false)));
+ return concat (LTO_SECTION_NAME_PREFIX, sep, add, post, NULL);
}
}
-/* Create a new bitpack. */
-
-struct bitpack_d *
-bitpack_create (void)
-{
- return XCNEW (struct bitpack_d);
-}
-
-
-/* Free the memory used by bitpack BP. */
-
-void
-bitpack_delete (struct bitpack_d *bp)
-{
- VEC_free (bitpack_word_t, heap, bp->values);
- free (bp);
-}
-
-
-/* Return an index to the word in bitpack BP that contains the
- next NBITS. */
-
-static inline unsigned
-bp_get_next_word (struct bitpack_d *bp, unsigned nbits)
-{
- unsigned last, ix;
-
- /* In principle, the next word to use is determined by the
- number of bits already processed in BP. */
- ix = bp->num_bits / BITS_PER_BITPACK_WORD;
-
- /* All the encoded bit patterns in BP are contiguous, therefore if
- the next NBITS would straddle over two different words, move the
- index to the next word and update the number of encoded bits
- by adding up the hole of unused bits created by this move. */
- bp->first_unused_bit %= BITS_PER_BITPACK_WORD;
- last = bp->first_unused_bit + nbits - 1;
- if (last >= BITS_PER_BITPACK_WORD)
- {
- ix++;
- bp->num_bits += (BITS_PER_BITPACK_WORD - bp->first_unused_bit);
- bp->first_unused_bit = 0;
- }
-
- return ix;
-}
-
-
-/* Pack NBITS of value VAL into bitpack BP. */
-
-void
-bp_pack_value (struct bitpack_d *bp, bitpack_word_t val, unsigned nbits)
-{
- unsigned ix;
- bitpack_word_t word;
-
- /* We cannot encode more bits than BITS_PER_BITPACK_WORD. */
- gcc_assert (nbits > 0 && nbits <= BITS_PER_BITPACK_WORD);
-
- /* Compute which word will contain the next NBITS. */
- ix = bp_get_next_word (bp, nbits);
- if (ix >= VEC_length (bitpack_word_t, bp->values))
- {
- /* If there is no room left in the last word of the values
- array, add a new word. Additionally, we should only
- need to add a single word, since every pack operation cannot
- use more bits than fit in a single word. */
- gcc_assert (ix < VEC_length (bitpack_word_t, bp->values) + 1);
- VEC_safe_push (bitpack_word_t, heap, bp->values, 0);
- }
-
- /* Grab the last word to pack VAL into. */
- word = VEC_index (bitpack_word_t, bp->values, ix);
-
- /* To fit VAL in WORD, we need to shift VAL to the left to
- skip the bottom BP->FIRST_UNUSED_BIT bits. */
- gcc_assert (BITS_PER_BITPACK_WORD >= bp->first_unused_bit + nbits);
- val <<= bp->first_unused_bit;
-
- /* Update WORD with VAL. */
- word |= val;
-
- /* Update BP. */
- VEC_replace (bitpack_word_t, bp->values, ix, word);
- bp->num_bits += nbits;
- bp->first_unused_bit += nbits;
-}
-
-
-/* Unpack the next NBITS from bitpack BP. */
-
-bitpack_word_t
-bp_unpack_value (struct bitpack_d *bp, unsigned nbits)
-{
- bitpack_word_t val, word, mask;
- unsigned ix;
-
- /* We cannot decode more bits than BITS_PER_BITPACK_WORD. */
- gcc_assert (nbits > 0 && nbits <= BITS_PER_BITPACK_WORD);
-
- /* Compute which word contains the next NBITS. */
- ix = bp_get_next_word (bp, nbits);
- word = VEC_index (bitpack_word_t, bp->values, ix);
-
- /* Compute the mask to get NBITS from WORD. */
- mask = (nbits == BITS_PER_BITPACK_WORD)
- ? (bitpack_word_t) -1
- : ((bitpack_word_t) 1 << nbits) - 1;
-
- /* Shift WORD to the right to skip over the bits already decoded
- in word. */
- word >>= bp->first_unused_bit;
-
- /* Apply the mask to obtain the requested value. */
- val = word & mask;
-
- /* Update BP->NUM_BITS for the next unpack operation. */
- bp->num_bits += nbits;
- bp->first_unused_bit += nbits;
-
- return val;
-}
-
-
/* Check that all the TS_* structures handled by the lto_output_* and
lto_input_* routines are exactly ALL the structures defined in
treestruct.def. */
/* These are the TS_* structures that are either handled or
explicitly ignored by the streamer routines. */
handled_p[TS_BASE] = true;
+ handled_p[TS_TYPED] = true;
handled_p[TS_COMMON] = true;
handled_p[TS_INT_CST] = true;
handled_p[TS_REAL_CST] = true;
handled_p[TS_OMP_CLAUSE] = true;
handled_p[TS_OPTIMIZATION] = true;
handled_p[TS_TARGET_OPTION] = true;
+ handled_p[TS_TRANSLATION_UNIT_DECL] = true;
/* Anything not marked above will trigger the following assertion.
If this assertion triggers, it means that there is a new TS_*
/* Helper for lto_streamer_cache_insert_1. Add T to CACHE->NODES at
- slot IX. Add OFFSET to CACHE->OFFSETS at slot IX. */
+ slot IX. */
static void
lto_streamer_cache_add_to_node_array (struct lto_streamer_cache_d *cache,
- int ix, tree t, unsigned offset)
+ unsigned ix, tree t)
{
- gcc_assert (ix >= 0);
+ /* Make sure we're either replacing an old element or
+ appending consecutively. */
+ gcc_assert (ix <= VEC_length (tree, cache->nodes));
- /* Grow the array of nodes and offsets to accomodate T at IX. */
- if (ix >= (int) VEC_length (tree, cache->nodes))
- {
- size_t sz = ix + (20 + ix) / 4;
- VEC_safe_grow_cleared (tree, heap, cache->nodes, sz);
- VEC_safe_grow_cleared (unsigned, heap, cache->offsets, sz);
- }
-
- VEC_replace (tree, cache->nodes, ix, t);
- VEC_replace (unsigned, cache->offsets, ix, offset);
+ if (ix == VEC_length (tree, cache->nodes))
+ VEC_safe_push (tree, heap, cache->nodes, t);
+ else
+ VEC_replace (tree, cache->nodes, ix, t);
}
/* Helper for lto_streamer_cache_insert and lto_streamer_cache_insert_at.
- CACHE, T, IX_P and OFFSET_P are as in lto_streamer_cache_insert.
+ CACHE, T, and IX_P are as in lto_streamer_cache_insert.
If INSERT_AT_NEXT_SLOT_P is true, T is inserted at the next available
slot in the cache. Otherwise, T is inserted at the position indicated
static bool
lto_streamer_cache_insert_1 (struct lto_streamer_cache_d *cache,
- tree t, int *ix_p, unsigned *offset_p,
+ tree t, unsigned *ix_p,
bool insert_at_next_slot_p)
{
void **slot;
- struct tree_int_map d_entry, *entry;
- int ix;
- unsigned offset;
+ unsigned ix;
bool existed_p;
gcc_assert (t);
- d_entry.base.from = t;
- slot = htab_find_slot (cache->node_map, &d_entry, INSERT);
- if (*slot == NULL)
+ slot = pointer_map_insert (cache->node_map, t);
+ if (!*slot)
{
/* Determine the next slot to use in the cache. */
if (insert_at_next_slot_p)
- ix = cache->next_slot++;
+ ix = VEC_length (tree, cache->nodes);
else
ix = *ix_p;
+ *slot = (void *)(size_t) (ix + 1);
- entry = (struct tree_int_map *)pool_alloc (cache->node_map_entries);
- entry->base.from = t;
- entry->to = (unsigned) ix;
- *slot = entry;
-
- /* If no offset was given, store the invalid offset -1. */
- offset = (offset_p) ? *offset_p : (unsigned) -1;
-
- lto_streamer_cache_add_to_node_array (cache, ix, t, offset);
+ lto_streamer_cache_add_to_node_array (cache, ix, t);
/* Indicate that the item was not present in the cache. */
existed_p = false;
}
else
{
- entry = (struct tree_int_map *) *slot;
- ix = (int) entry->to;
- offset = VEC_index (unsigned, cache->offsets, ix);
+ ix = (size_t) *slot - 1;
if (!insert_at_next_slot_p && ix != *ix_p)
{
/* If the caller wants to insert T at a specific slot
location, and ENTRY->TO does not match *IX_P, add T to
- the requested location slot. This situation arises when
- streaming builtin functions.
-
- For instance, on the writer side we could have two
- FUNCTION_DECLS T1 and T2 that are represented by the same
- builtin function. The reader will only instantiate the
- canonical builtin, but since T1 and T2 had been
- originally stored in different cache slots (S1 and S2),
- the reader must be able to find the canonical builtin
- function at slots S1 and S2. */
- gcc_assert (lto_stream_as_builtin_p (t));
+ the requested location slot. */
ix = *ix_p;
-
- /* Since we are storing a builtin, the offset into the
- stream is not necessary as we will not need to read
- forward in the stream. */
- lto_streamer_cache_add_to_node_array (cache, ix, t, -1);
+ lto_streamer_cache_add_to_node_array (cache, ix, t);
}
/* Indicate that T was already in the cache. */
if (ix_p)
*ix_p = ix;
- if (offset_p)
- *offset_p = offset;
-
return existed_p;
}
return true. Otherwise, return false.
If IX_P is non-null, update it with the index into the cache where
- T has been stored.
-
- *OFFSET_P represents the offset in the stream where T is physically
- written out. The first time T is added to the cache, *OFFSET_P is
- recorded in the cache together with T. But if T already existed
- in the cache, *OFFSET_P is updated with the value that was recorded
- the first time T was added to the cache.
-
- If OFFSET_P is NULL, it is ignored. */
+ T has been stored. */
bool
lto_streamer_cache_insert (struct lto_streamer_cache_d *cache, tree t,
- int *ix_p, unsigned *offset_p)
+ unsigned *ix_p)
{
- return lto_streamer_cache_insert_1 (cache, t, ix_p, offset_p, true);
+ return lto_streamer_cache_insert_1 (cache, t, ix_p, true);
}
bool
lto_streamer_cache_insert_at (struct lto_streamer_cache_d *cache,
- tree t, int ix)
+ tree t, unsigned ix)
{
- return lto_streamer_cache_insert_1 (cache, t, &ix, NULL, false);
+ return lto_streamer_cache_insert_1 (cache, t, &ix, false);
}
-/* Return true if tree node T exists in CACHE. If IX_P is
+/* Appends tree node T to CACHE, even if T already existed in it. */
+
+void
+lto_streamer_cache_append (struct lto_streamer_cache_d *cache, tree t)
+{
+ unsigned ix = VEC_length (tree, cache->nodes);
+ lto_streamer_cache_insert_1 (cache, t, &ix, false);
+}
+
+/* Return true if tree node T exists in CACHE, otherwise false. If IX_P is
not NULL, write to *IX_P the index into the cache where T is stored
- (-1 if T is not found). */
+ ((unsigned)-1 if T is not found). */
bool
lto_streamer_cache_lookup (struct lto_streamer_cache_d *cache, tree t,
- int *ix_p)
+ unsigned *ix_p)
{
void **slot;
- struct tree_int_map d_slot;
bool retval;
- int ix;
+ unsigned ix;
gcc_assert (t);
- d_slot.base.from = t;
- slot = htab_find_slot (cache->node_map, &d_slot, NO_INSERT);
+ slot = pointer_map_contains (cache->node_map, t);
if (slot == NULL)
{
retval = false;
else
{
retval = true;
- ix = (int) ((struct tree_int_map *) *slot)->to;
+ ix = (size_t) *slot - 1;
}
if (ix_p)
/* Return the tree node at slot IX in CACHE. */
tree
-lto_streamer_cache_get (struct lto_streamer_cache_d *cache, int ix)
+lto_streamer_cache_get (struct lto_streamer_cache_d *cache, unsigned ix)
{
gcc_assert (cache);
- /* If the reader is requesting an index beyond the length of the
- cache, it will need to read ahead. Return NULL_TREE to indicate
- that. */
- if ((unsigned) ix >= VEC_length (tree, cache->nodes))
- return NULL_TREE;
+ /* Make sure we're not requesting something we don't have. */
+ gcc_assert (ix < VEC_length (tree, cache->nodes));
- return VEC_index (tree, cache->nodes, (unsigned) ix);
+ return VEC_index (tree, cache->nodes, ix);
}
return;
if (TYPE_P (node))
- *nodep = node = gimple_register_type (node);
+ {
+ /* Type merging will get confused by the canonical types as they
+ are set by the middle-end. */
+ if (in_lto_p)
+ TYPE_CANONICAL (node) = NULL_TREE;
+ node = gimple_register_type (node);
+ TYPE_CANONICAL (node) = gimple_register_canonical_type (node);
+ if (in_lto_p)
+ TYPE_CANONICAL (*nodep) = TYPE_CANONICAL (node);
+ *nodep = node;
+ }
/* Return if node is already seen. */
if (pointer_set_insert (seen_nodes, node))
VEC_safe_push (tree, heap, *common_nodes, node);
- if (tree_node_can_be_shared (node))
- {
- if (POINTER_TYPE_P (node)
- || TREE_CODE (node) == COMPLEX_TYPE
- || TREE_CODE (node) == ARRAY_TYPE)
- lto_record_common_node (&TREE_TYPE (node), common_nodes, seen_nodes);
- }
+ if (POINTER_TYPE_P (node)
+ || TREE_CODE (node) == COMPLEX_TYPE
+ || TREE_CODE (node) == ARRAY_TYPE)
+ lto_record_common_node (&TREE_TYPE (node), common_nodes, seen_nodes);
}
/* Generate a vector of common nodes and make sure they are merged
- properly according to the the gimple type table. */
+ properly according to the gimple type table. */
static VEC(tree,heap) *
lto_get_common_nodes (void)
{
gcc_assert (t);
- lto_streamer_cache_insert (cache, t, NULL, NULL);
+ lto_streamer_cache_insert (cache, t, NULL);
/* The FIELD_DECLs of structures should be shared, so that every
COMPONENT_REF uses the same tree node when referencing a field.
cache = XCNEW (struct lto_streamer_cache_d);
- cache->node_map = htab_create (101, tree_int_map_hash, tree_int_map_eq, NULL);
-
- cache->node_map_entries = create_alloc_pool ("node map",
- sizeof (struct tree_int_map),
- 100);
+ cache->node_map = pointer_map_create ();
/* Load all the well-known tree nodes that are always created by
the compiler on startup. This prevents writing them out
unnecessarily. */
common_nodes = lto_get_common_nodes ();
- for (i = 0; VEC_iterate (tree, common_nodes, i, node); i++)
+ FOR_EACH_VEC_ELT (tree, common_nodes, i, node)
preload_common_node (cache, node);
VEC_free(tree, heap, common_nodes);
if (c == NULL)
return;
- htab_delete (c->node_map);
- free_alloc_pool (c->node_map_entries);
+ pointer_map_destroy (c->node_map);
VEC_free (tree, heap, c->nodes);
- VEC_free (unsigned, heap, c->offsets);
free (c);
}