/* "Bag-of-pages" garbage collector for the GNU compiler.
- Copyright (C) 1999 Free Software Foundation, Inc.
+ Copyright (C) 1999, 2000 Free Software Foundation, Inc.
This file is part of GNU CC.
#include "flags.h"
#include "ggc.h"
-#ifdef HAVE_MMAP
+#ifdef HAVE_MMAP_ANYWHERE
#include <sys/mman.h>
#endif
+#ifndef MAP_FAILED
+#define MAP_FAILED -1
+#endif
+
+#if !defined (MAP_ANONYMOUS) && defined (MAP_ANON)
+#define MAP_ANONYMOUS MAP_ANON
+#endif
/* Stategy:
last collection. */
#undef GGC_ALWAYS_COLLECT
-/* If ENABLE_CHECKING is defined, enable GGC_POISON and
- GGC_ALWAYS_COLLECT automatically. */
-#ifdef ENABLE_CHECKING
+#ifdef ENABLE_GC_CHECKING
#define GGC_POISON
+#endif
+#ifdef ENABLE_GC_ALWAYS_COLLECT
#define GGC_ALWAYS_COLLECT
#endif
unsigned long *save_in_use_p;
/* Context depth of this page. */
- unsigned char context_depth;
-
- /* The lg of size of objects allocated from this page. */
- unsigned char order;
+ unsigned short context_depth;
/* The number of free objects remaining on this page. */
unsigned short num_free_objects;
next allocation from this page. */
unsigned short next_bit_hint;
- /* Saved number of free objects for pages that aren't in the topmost
- context during colleciton. */
- unsigned short save_num_free_objects;
+ /* The lg of size of objects allocated from this page. */
+ unsigned char order;
/* A bit vector indicating whether or not objects are in use. The
Nth bit is one if the Nth object on this page is allocated. This
/* Bytes currently allocated at the end of the last collection. */
size_t allocated_last_gc;
+ /* Total amount of memory mapped. */
+ size_t bytes_mapped;
+
/* The current depth in the context stack. */
- unsigned char context_depth;
+ unsigned short context_depth;
/* A file descriptor open to /dev/zero for reading. */
-#if defined (HAVE_MMAP) && !defined(MAP_ANONYMOUS)
+#if defined (HAVE_MMAP_ANYWHERE) && !defined(MAP_ANONYMOUS)
int dev_zero_fd;
#endif
/* Compute DIVIDEND / DIVISOR, rounded up. */
#define DIV_ROUND_UP(Dividend, Divisor) \
- ((Dividend + Divisor - 1) / Divisor)
+ (((Dividend) + (Divisor) - 1) / (Divisor))
/* The number of objects per allocation page, for objects of size
2^ORDER. */
#define GGC_MIN_LAST_ALLOCATED (4 * 1024 * 1024)
\f
-static int ggc_allocated_p PROTO ((const void *));
-static page_entry *lookup_page_table_entry PROTO ((const void *));
-static void set_page_table_entry PROTO ((void *, page_entry *));
-static char *alloc_anon PROTO ((char *, size_t));
-static struct page_entry * alloc_page PROTO ((unsigned));
-static void free_page PROTO ((struct page_entry *));
-static void release_pages PROTO ((void));
-static void clear_marks PROTO ((void));
-static void sweep_pages PROTO ((void));
+static int ggc_allocated_p PARAMS ((const void *));
+static page_entry *lookup_page_table_entry PARAMS ((const void *));
+static void set_page_table_entry PARAMS ((void *, page_entry *));
+static char *alloc_anon PARAMS ((char *, size_t));
+static struct page_entry * alloc_page PARAMS ((unsigned));
+static void free_page PARAMS ((struct page_entry *));
+static void release_pages PARAMS ((void));
+static void clear_marks PARAMS ((void));
+static void sweep_pages PARAMS ((void));
+static void ggc_recalculate_in_use_p PARAMS ((page_entry *));
#ifdef GGC_POISON
-static void poison PROTO ((void *, size_t));
-static void poison_pages PROTO ((void));
+static void poison_pages PARAMS ((void));
#endif
-void debug_print_page_list PROTO ((int));
+void debug_print_page_list PARAMS ((int));
\f
/* Returns non-zero if P was allocated in GC'able memory. */
return base[L1][L2];
}
-
/* Set the page table entry for a page. */
+
static void
set_page_table_entry(p, entry)
void *p;
base[L1][L2] = entry;
}
-
/* Prints the page-entry for object size ORDER, for debugging. */
+
void
debug_print_page_list (order)
int order;
fflush (stdout);
}
-#ifdef GGC_POISON
-/* `Poisons' the region of memory starting at START and extending for
- LEN bytes. */
-static inline void
-poison (start, len)
- void *start;
- size_t len;
-{
- memset (start, 0xa5, len);
-}
-#endif
-
/* Allocate SIZE bytes of anonymous memory, preferably near PREF,
(if non-null). */
+
static inline char *
alloc_anon (pref, size)
char *pref ATTRIBUTE_UNUSED;
{
char *page;
-#ifdef HAVE_MMAP
+#ifdef HAVE_MMAP_ANYWHERE
#ifdef MAP_ANONYMOUS
page = (char *) mmap (pref, size, PROT_READ | PROT_WRITE,
MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
exit(1);
}
#endif /* HAVE_VALLOC */
-#endif /* HAVE_MMAP */
+#endif /* HAVE_MMAP_ANYWHERE */
+
+ /* Remember that we allocated this memory. */
+ G.bytes_mapped += size;
return page;
}
/* Allocate a new page for allocating objects of size 2^ORDER,
and return an entry for it. The entry is not added to the
appropriate page_table list. */
+
static inline struct page_entry *
alloc_page (order)
unsigned order;
}
else
{
- /* Actually allocate the memory, using mmap. */
+ /* Actually allocate the memory. */
page = alloc_anon (NULL, entry_size);
}
return entry;
}
+/* For a page that is no longer needed, put it on the free page list. */
-/* Free a page when it's no longer needed. */
static inline void
free_page (entry)
page_entry *entry;
G.free_pages = entry;
}
+/* Release the free page cache to the system. */
-/* Release the page cache to the system. */
-static inline void
+static void
release_pages ()
{
-#ifdef HAVE_MMAP
+#ifdef HAVE_MMAP_ANYWHERE
page_entry *p, *next;
char *start;
size_t len;
else
{
munmap (start, len);
+ G.bytes_mapped -= len;
start = p->page;
len = p->bytes;
}
}
munmap (start, len);
+ G.bytes_mapped -= len;
#else
#ifdef HAVE_VALLOC
page_entry *p, *next;
{
next = p->next;
free (p->page);
+ G.bytes_mapped -= p->bytes;
free (p);
}
#endif /* HAVE_VALLOC */
-#endif /* HAVE_MMAP */
+#endif /* HAVE_MMAP_ANYWHERE */
G.free_pages = NULL;
}
-
/* This table provides a fast way to determine ceil(log_2(size)) for
allocation requests. The minimum allocation size is four bytes. */
+
static unsigned char const size_lookup[257] =
{
2, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4,
/* Allocate a chunk of memory of SIZE bytes. If ZERO is non-zero, the
memory is zeroed; otherwise, its contents are undefined. */
+
void *
ggc_alloc_obj (size, zero)
size_t size;
/* If there is no page for this object size, or all pages in this
context are full, allocate a new page. */
- if (entry == NULL
- || entry->num_free_objects == 0
- || entry->context_depth != G.context_depth)
+ if (entry == NULL || entry->num_free_objects == 0)
{
struct page_entry *new_entry;
new_entry = alloc_page (order);
#ifdef GGC_POISON
/* `Poison' the entire allocated object before zeroing the requested area,
so that bytes beyond the end, if any, will not necessarily be zero. */
- poison (result, 1 << order);
+ memset (result, 0xaf, 1 << order);
#endif
+
if (zero)
memset (result, 0, size);
return result;
}
-
-/* If P is not marked, marks it and returns 0. Otherwise returns 1.
+/* If P is not marked, marks it and return false. Otherwise return true.
P must have been allocated by the GC allocator; it mustn't point to
static objects, stack variables, or memory allocated with malloc. */
+
int
ggc_set_mark (p)
- void *p;
+ const void *p;
{
page_entry *entry;
unsigned bit, word;
/* Calculate the index of the object on the page; this is its bit
position in the in_use_p bitmap. */
- bit = (((char *) p) - entry->page) >> entry->order;
+ bit = (((const char *) p) - entry->page) >> entry->order;
word = bit / HOST_BITS_PER_LONG;
mask = (unsigned long) 1 << (bit % HOST_BITS_PER_LONG);
return 0;
}
+/* Mark P, but check first that it was allocated by the collector. */
+
void
ggc_mark_if_gcable (p)
- void *p;
+ const void *p;
{
if (p && ggc_allocated_p (p))
ggc_set_mark (p);
}
+
+/* Return the size of the gc-able object P. */
+
+size_t
+ggc_get_size (p)
+ const void *p;
+{
+ page_entry *pe = lookup_page_table_entry (p);
+ return 1 << pe->order;
+}
\f
/* Initialize the ggc-mmap allocator. */
+
void
init_ggc ()
{
G.pagesize = getpagesize();
G.lg_pagesize = exact_log2 (G.pagesize);
-#if defined (HAVE_MMAP) && !defined(MAP_ANONYMOUS)
+#if defined (HAVE_MMAP_ANYWHERE) && !defined(MAP_ANONYMOUS)
G.dev_zero_fd = open ("/dev/zero", O_RDONLY);
if (G.dev_zero_fd == -1)
abort ();
G.allocated_last_gc = GGC_MIN_LAST_ALLOCATED;
+#ifdef HAVE_MMAP_ANYWHERE
+ /* StunOS has an amazing off-by-one error for the first mmap allocation
+ after fiddling with RLIMIT_STACK. The result, as hard as it is to
+ believe, is an unaligned page allocation, which would cause us to
+ hork badly if we tried to use it. */
+ {
+ char *p = alloc_anon (NULL, G.pagesize);
+ if ((size_t)p & (G.pagesize - 1))
+ {
+ /* How losing. Discard this one and try another. If we still
+ can't get something useful, give up. */
+
+ p = alloc_anon (NULL, G.pagesize);
+ if ((size_t)p & (G.pagesize - 1))
+ abort ();
+ }
+ munmap (p, G.pagesize);
+ }
+#endif
+
empty_string = ggc_alloc_string ("", 0);
ggc_add_string_root (&empty_string, 1);
}
+/* Increment the `GC context'. Objects allocated in an outer context
+ are never freed, eliminating the need to register their roots. */
void
ggc_push_context ()
abort ();
}
+/* Merge the SAVE_IN_USE_P and IN_USE_P arrays in P so that IN_USE_P
+ reflects reality. Recalculate NUM_FREE_OBJECTS as well. */
+
+static void
+ggc_recalculate_in_use_p (p)
+ page_entry *p;
+{
+ unsigned int i;
+ size_t num_objects;
+
+ /* Because the past-the-end bit in in_use_p is always set, we
+ pretend there is one additional object. */
+ num_objects = OBJECTS_PER_PAGE (p->order) + 1;
+
+ /* Reset the free object count. */
+ p->num_free_objects = num_objects;
+
+ /* Combine the IN_USE_P and SAVE_IN_USE_P arrays. */
+ for (i = 0;
+ i < DIV_ROUND_UP (BITMAP_SIZE (num_objects),
+ sizeof (*p->in_use_p));
+ ++i)
+ {
+ unsigned long j;
+
+ /* Something is in use if it is marked, or if it was in use in a
+ context further down the context stack. */
+ p->in_use_p[i] |= p->save_in_use_p[i];
+
+ /* Decrement the free object count for every object allocated. */
+ for (j = p->in_use_p[i]; j; j >>= 1)
+ p->num_free_objects -= (j & 1);
+ }
+
+ if (p->num_free_objects >= num_objects)
+ abort ();
+}
+
+/* Decrement the `GC context'. All objects allocated since the
+ previous ggc_push_context are migrated to the outer context. */
void
ggc_pop_context ()
left over are imported into the previous context. */
for (order = 2; order < HOST_BITS_PER_PTR; order++)
{
- size_t num_objects = OBJECTS_PER_PAGE (order);
- size_t bitmap_size = BITMAP_SIZE (num_objects);
-
page_entry *p;
for (p = G.pages[order]; p != NULL; p = p->next)
{
if (p->context_depth > depth)
- {
- p->context_depth = depth;
- }
+ p->context_depth = depth;
/* If this page is now in the topmost context, and we'd
saved its allocation state, restore it. */
else if (p->context_depth == depth && p->save_in_use_p)
{
- memcpy (p->in_use_p, p->save_in_use_p, bitmap_size);
+ ggc_recalculate_in_use_p (p);
free (p->save_in_use_p);
p->save_in_use_p = 0;
- p->num_free_objects = p->save_num_free_objects;
}
}
}
}
\f
+/* Unmark all objects. */
+
static inline void
clear_marks ()
{
for (order = 2; order < HOST_BITS_PER_PTR; order++)
{
size_t num_objects = OBJECTS_PER_PAGE (order);
- size_t bitmap_size = BITMAP_SIZE (num_objects);
+ size_t bitmap_size = BITMAP_SIZE (num_objects + 1);
page_entry *p;
for (p = G.pages[order]; p != NULL; p = p->next)
/* Pages that aren't in the topmost context are not collected;
nevertheless, we need their in-use bit vectors to store GC
marks. So, back them up first. */
- if (p->context_depth < G.context_depth
- && ! p->save_in_use_p)
+ if (p->context_depth < G.context_depth)
{
- p->save_in_use_p = xmalloc (bitmap_size);
+ if (! p->save_in_use_p)
+ p->save_in_use_p = xmalloc (bitmap_size);
memcpy (p->save_in_use_p, p->in_use_p, bitmap_size);
- p->save_num_free_objects = p->num_free_objects;
}
/* Reset reset the number of free objects and clear the
}
}
+/* Free all empty pages. Partially empty pages need no attention
+ because the `mark' bit doubles as an `unused' bit. */
+
static inline void
sweep_pages ()
{
p = next;
}
while (! done);
+
+ /* Now, restore the in_use_p vectors for any pages from contexts
+ other than the current one. */
+ for (p = G.pages[order]; p; p = p->next)
+ if (p->context_depth != G.context_depth)
+ ggc_recalculate_in_use_p (p);
}
}
#ifdef GGC_POISON
+/* Clobber all free objects. */
+
static inline void
poison_pages ()
{
for (p = G.pages[order]; p != NULL; p = p->next)
{
size_t i;
+
+ if (p->context_depth != G.context_depth)
+ /* Since we don't do any collection for pages in pushed
+ contexts, there's no need to do any poisoning. And
+ besides, the IN_USE_P array isn't valid until we pop
+ contexts. */
+ continue;
+
for (i = 0; i < num_objects; i++)
{
size_t word, bit;
word = i / HOST_BITS_PER_LONG;
bit = i % HOST_BITS_PER_LONG;
if (((p->in_use_p[word] >> bit) & 1) == 0)
- poison (p->page + i * size, size);
+ memset (p->page + i * size, 0xa5, size);
}
}
}
}
#endif
+/* Top level mark-and-sweep routine. */
+
void
ggc_collect ()
{
clear_marks ();
ggc_mark_roots ();
- sweep_pages ();
#ifdef GGC_POISON
poison_pages ();
#endif
+ sweep_pages ();
+
G.allocated_last_gc = G.allocated;
if (G.allocated_last_gc < GGC_MIN_LAST_ALLOCATED)
G.allocated_last_gc = GGC_MIN_LAST_ALLOCATED;
(unsigned long) G.allocated / 1024, time * 1e-6);
}
}
+
+/* Print allocation statistics. */
+
+void
+ggc_page_print_statistics ()
+{
+ struct ggc_statistics stats;
+ unsigned int i;
+
+ /* Clear the statistics. */
+ memset (&stats, 0, sizeof (stats));
+
+ /* Make sure collection will really occur. */
+ G.allocated_last_gc = 0;
+
+ /* Collect and print the statistics common across collectors. */
+ ggc_print_statistics (stderr, &stats);
+
+ /* Release free pages so that we will not count the bytes allocated
+ there as part of the total allocated memory. */
+ release_pages ();
+
+ /* Collect some information about the various sizes of
+ allocation. */
+ fprintf (stderr, "\n%-4s%-16s%-16s\n", "Log", "Allocated", "Used");
+ for (i = 0; i < HOST_BITS_PER_PTR; ++i)
+ {
+ page_entry *p;
+ size_t allocated;
+ size_t in_use;
+
+ /* Skip empty entries. */
+ if (!G.pages[i])
+ continue;
+
+ allocated = in_use = 0;
+
+ /* Figure out the total number of bytes allocated for objects of
+ this size, and how many of them are actually in use. */
+ for (p = G.pages[i]; p; p = p->next)
+ {
+ allocated += p->bytes;
+ in_use +=
+ (OBJECTS_PER_PAGE (i) - p->num_free_objects) * (1 << i);
+ }
+ fprintf (stderr, "%-3d %-15lu %-15lu\n", i,
+ (unsigned long) allocated, (unsigned long) in_use);
+ }
+
+ /* Print out some global information. */
+ fprintf (stderr, "\nTotal bytes marked: %lu\n",
+ (unsigned long) G.allocated);
+ fprintf (stderr, "Total bytes mapped: %lu\n",
+ (unsigned long) G.bytes_mapped);
+}