/* "Bag-of-pages" garbage collector for the GNU compiler.
- Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008
- Free Software Foundation, Inc.
+ Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009,
+ 2010 Free Software Foundation, Inc.
This file is part of GCC.
#include "tree.h"
#include "rtl.h"
#include "tm_p.h"
-#include "toplev.h"
+#include "toplev.h" /* exact_log2 */
+#include "diagnostic-core.h"
#include "flags.h"
#include "ggc.h"
+#include "ggc-internal.h"
#include "timevar.h"
#include "params.h"
#include "tree-flow.h"
+#include "cfgloop.h"
+#include "plugin.h"
/* Prefer MAP_ANON(YMOUS) to /dev/zero, since we don't need to keep a
file open. Prefer either to valloc. */
#define OFFSET_TO_BIT(OFFSET, ORDER) \
(((OFFSET) * DIV_MULT (ORDER)) >> DIV_SHIFT (ORDER))
+/* We use this structure to determine the alignment required for
+ allocations. For power-of-two sized allocations, that's not a
+ problem, but it does matter for odd-sized allocations.
+ We do not care about alignment for floating-point types. */
+
+struct max_alignment {
+ char c;
+ union {
+ HOST_WIDEST_INT i;
+ void *p;
+ } u;
+};
+
+/* The biggest alignment required. */
+
+#define MAX_ALIGNMENT (offsetof (struct max_alignment, u))
+
+
/* The number of extra orders, not corresponding to power-of-two sized
objects. */
thing you need to do to add a new special allocation size. */
static const size_t extra_order_size_table[] = {
- sizeof (struct var_ann_d),
+ /* Extra orders for small non-power-of-two multiples of MAX_ALIGNMENT.
+ There are a lot of structures with these sizes and explicitly
+ listing them risks orders being dropped because they changed size. */
+ MAX_ALIGNMENT * 3,
+ MAX_ALIGNMENT * 5,
+ MAX_ALIGNMENT * 6,
+ MAX_ALIGNMENT * 7,
+ MAX_ALIGNMENT * 9,
+ MAX_ALIGNMENT * 10,
+ MAX_ALIGNMENT * 11,
+ MAX_ALIGNMENT * 12,
+ MAX_ALIGNMENT * 13,
+ MAX_ALIGNMENT * 14,
+ MAX_ALIGNMENT * 15,
sizeof (struct tree_decl_non_common),
sizeof (struct tree_field_decl),
sizeof (struct tree_parm_decl),
sizeof (struct tree_var_decl),
- sizeof (struct tree_list),
- sizeof (struct tree_ssa_name),
+ sizeof (struct tree_type),
sizeof (struct function),
sizeof (struct basic_block_def),
- sizeof (bitmap_element),
- sizeof (bitmap_head),
- TREE_EXP_SIZE (2),
- RTL_SIZE (2), /* MEM, PLUS, etc. */
- RTL_SIZE (9), /* INSN */
+ sizeof (struct cgraph_node),
+ sizeof (struct loop),
};
/* The total number of orders. */
#define NUM_ORDERS (HOST_BITS_PER_PTR + NUM_EXTRA_ORDERS)
-/* We use this structure to determine the alignment required for
- allocations. For power-of-two sized allocations, that's not a
- problem, but it does matter for odd-sized allocations. */
-
-struct max_alignment {
- char c;
- union {
- HOST_WIDEST_INT i;
- long double d;
- } u;
-};
-
-/* The biggest alignment required. */
-
-#define MAX_ALIGNMENT (offsetof (struct max_alignment, u))
-
/* Compute the smallest nonnegative number which when added to X gives
a multiple of F. */
#endif
+#ifdef ENABLE_GC_ALWAYS_COLLECT
+/* List of free objects to be verified as actually free on the
+ next collection. */
+struct free_object
+{
+ void *object;
+ struct free_object *next;
+};
+#endif
+
/* The rest of the global variables. */
static struct globals
{
#ifdef ENABLE_GC_ALWAYS_COLLECT
/* List of free objects to be verified as actually free on the
next collection. */
- struct free_object
- {
- void *object;
- struct free_object *next;
- } *free_object_list;
+ struct free_object *free_object_list;
#endif
#ifdef GATHER_STATISTICS
struct
{
- /* Total memory allocated with ggc_alloc. */
+ /* Total GC-allocated memory. */
unsigned long long total_allocated;
- /* Total overhead for memory to be allocated with ggc_alloc. */
+ /* Total overhead for GC-allocated memory. */
unsigned long long total_overhead;
/* Total allocations and overhead for sizes less than 32, 64 and 128.
These sizes are interesting because they are typical cache line
sizes. */
-
+
unsigned long long total_allocated_under32;
unsigned long long total_overhead_under32;
-
+
unsigned long long total_allocated_under64;
unsigned long long total_overhead_under64;
-
+
unsigned long long total_allocated_under128;
unsigned long long total_overhead_under128;
-
+
/* The allocations for each of the allocation orders. */
unsigned long long total_allocated_per_order[NUM_ORDERS];
/* Prints the page-entry for object size ORDER, for debugging. */
-void
+DEBUG_FUNCTION void
debug_print_page_list (int order)
{
page_entry *p;
G.bytes_mapped += size;
/* Pretend we don't have access to the allocated pages. We'll enable
- access to smaller pieces of the area in ggc_alloc. Discard the
+ access to smaller pieces of the area in ggc_internal_alloc. Discard the
handle to avoid handle leak. */
VALGRIND_DISCARD (VALGRIND_MAKE_MEM_NOACCESS (page, size));
/* We cannot free a page from a context deeper than the current
one. */
gcc_assert (entry->context_depth == top->context_depth);
-
+
/* Put top element into freed slot. */
G.by_depth[i] = top;
G.save_in_use[i] = G.save_in_use[G.by_depth_in_use-1];
ggc_alloc_typed_stat (enum gt_types_enum type ATTRIBUTE_UNUSED, size_t size
MEM_STAT_DECL)
{
- return ggc_alloc_stat (size PASS_MEM_STAT);
+ return ggc_internal_alloc_stat (size PASS_MEM_STAT);
}
/* Allocate a chunk of memory of SIZE bytes. Its contents are undefined. */
void *
-ggc_alloc_stat (size_t size MEM_STAT_DECL)
+ggc_internal_alloc_stat (size_t size MEM_STAT_DECL)
{
size_t order, word, bit, object_offset, object_size;
struct page_entry *entry;
#ifdef ENABLE_GC_ALWAYS_COLLECT
/* In the completely-anal-checking mode, we do *not* immediately free
- the data, but instead verify that the data is *actually* not
+ the data, but instead verify that the data is *actually* not
reachable the next time we collect. */
{
struct free_object *fo = XNEW (struct free_object);
/* If the page is completely full, then it's supposed to
be after all pages that aren't. Since we've freed one
object from a page that was full, we need to move the
- page to the head of the list.
+ page to the head of the list.
PE is the node we want to move. Q is the previous node
and P is the next node in the list. */
static void
compute_inverse (unsigned order)
{
- size_t size, inv;
+ size_t size, inv;
unsigned int e;
size = OBJECT_SIZE (order);
G.save_in_use = XNEWVEC (unsigned long *, G.by_depth_max);
}
-/* Start a new GGC zone. */
-
-struct alloc_zone *
-new_ggc_zone (const char *name ATTRIBUTE_UNUSED)
-{
- return NULL;
-}
-
-/* Destroy a GGC zone. */
-void
-destroy_ggc_zone (struct alloc_zone *zone ATTRIBUTE_UNUSED)
-{
-}
-
/* Merge the SAVE_IN_USE_P and IN_USE_P arrays in P so that IN_USE_P
reflects reality. Recalculate NUM_FREE_OBJECTS as well. */
G.pages[order] = next;
else
previous->next = next;
-
+
/* Splice P out of the back pointers too. */
if (next)
next->prev = previous;
/* Indicate that we've seen collections at this context depth. */
G.context_depth_collections = ((unsigned long)1 << (G.context_depth + 1)) - 1;
+ invoke_plugin_callbacks (PLUGIN_GGC_START, NULL);
+
clear_marks ();
ggc_mark_roots ();
#ifdef GATHER_STATISTICS
G.allocated_last_gc = G.allocated;
+ invoke_plugin_callbacks (PLUGIN_GGC_END, NULL);
+
timevar_pop (TV_GC);
if (!quiet_flag)
SCALE (G.allocated), STAT_LABEL(G.allocated),
SCALE (total_overhead), STAT_LABEL (total_overhead));
-#ifdef GATHER_STATISTICS
+#ifdef GATHER_STATISTICS
{
fprintf (stderr, "\nTotal allocations and overheads during the compilation process\n");
G.stats.total_overhead_under128);
fprintf (stderr, "Total Allocated under 128B: %10lld\n",
G.stats.total_allocated_under128);
-
+
for (i = 0; i < NUM_ORDERS; i++)
if (G.stats.total_allocated_per_order[i])
{
#endif
}
\f
+struct ggc_pch_ondisk
+{
+ unsigned totals[NUM_ORDERS];
+};
+
struct ggc_pch_data
{
- struct ggc_pch_ondisk
- {
- unsigned totals[NUM_ORDERS];
- } d;
+ struct ggc_pch_ondisk d;
size_t base[NUM_ORDERS];
size_t written[NUM_ORDERS];
};
size_t size, bool is_string ATTRIBUTE_UNUSED)
{
unsigned order;
- static const char emptyBytes[256];
+ static const char emptyBytes[256] = { 0 };
if (size < NUM_SIZE_LOOKUP)
order = size_lookup[size];
/* Update the statistics. */
G.allocated = G.allocated_last_gc = offs - (char *)addr;
}
+
+struct alloc_zone
+{
+ int dummy;
+};
+
+struct alloc_zone rtl_zone;
+struct alloc_zone tree_zone;
+struct alloc_zone tree_id_zone;