2 * Copyright (c) 1996-1997
3 * Silicon Graphics Computer Systems, Inc.
5 * Permission to use, copy, modify, distribute and sell this software
6 * and its documentation for any purpose is hereby granted without fee,
7 * provided that the above copyright notice appear in all copies and
8 * that both that copyright notice and this permission notice appear
9 * in supporting documentation. Silicon Graphics makes no
10 * representations about the suitability of this software for any
11 * purpose. It is provided "as is" without express or implied warranty.
14 /* NOTE: This is an internal header file, included by other STL headers.
15 * You should not attempt to use it directly.
18 #ifndef __SGI_STL_INTERNAL_ALLOC_H
19 #define __SGI_STL_INTERNAL_ALLOC_H
22 # define __PRIVATE public
23 // Extra access restrictions prevent us from really making some things
26 # define __PRIVATE private
29 #ifdef __STL_STATIC_TEMPLATE_MEMBER_BUG
34 // This implements some standard node allocators. These are
35 // NOT the same as the allocators in the C++ draft standard or in
36 // in the original STL. They do not encapsulate different pointer
37 // types; indeed we assume that there is only one pointer type.
38 // The allocation primitives are intended to allocate individual objects,
39 // not larger arenas as with the original STL allocators.
41 #ifndef __THROW_BAD_ALLOC
42 # if defined(__STL_NO_BAD_ALLOC) || !defined(__STL_USE_EXCEPTIONS)
43 # include <bits/std_cstdio.h>
44 # include <bits/std_cstdlib.h>
45 # define __THROW_BAD_ALLOC fprintf(stderr, "out of memory\n"); exit(1)
46 # else /* Standard conforming out-of-memory handling */
47 # include <bits/std_new.h>
48 # define __THROW_BAD_ALLOC throw std::bad_alloc()
52 #include <bits/std_cstddef.h>
53 #include <bits/std_cstdlib.h>
54 #include <bits/std_cstring.h>
55 #include <bits/std_cassert.h>
61 # include <bits/stl_threads.h>
62 # define __NODE_ALLOCATOR_THREADS true
63 # ifdef __STL_SGI_THREADS
64 // We test whether threads are in use before locking.
65 // Perhaps this should be moved into stl_threads.h, but that
66 // probably makes it harder to avoid the procedure call when
69 extern int __us_rsthread_malloc;
71 // The above is copied from malloc.h. Including <malloc.h>
72 // would be cleaner but fails with certain levels of standard
74 # define __NODE_ALLOCATOR_LOCK if (threads && __us_rsthread_malloc) \
75 { _S_node_allocator_lock._M_acquire_lock(); }
76 # define __NODE_ALLOCATOR_UNLOCK if (threads && __us_rsthread_malloc) \
77 { _S_node_allocator_lock._M_release_lock(); }
78 # else /* !__STL_SGI_THREADS */
79 # define __NODE_ALLOCATOR_LOCK \
80 { if (threads) _S_node_allocator_lock._M_acquire_lock(); }
81 # define __NODE_ALLOCATOR_UNLOCK \
82 { if (threads) _S_node_allocator_lock._M_release_lock(); }
86 # define __NODE_ALLOCATOR_LOCK
87 # define __NODE_ALLOCATOR_UNLOCK
88 # define __NODE_ALLOCATOR_THREADS false
93 #if defined(__sgi) && !defined(__GNUC__) && (_MIPS_SIM != _MIPS_SIM_ABI32)
97 // Malloc-based allocator. Typically slower than default alloc below.
98 // Typically thread-safe and more storage efficient.
99 #ifdef __STL_STATIC_TEMPLATE_MEMBER_BUG
100 # ifdef __DECLARE_GLOBALS_HERE
101 void (* __malloc_alloc_oom_handler)() = 0;
102 // g++ 2.7.2 does not handle static template data members.
104 extern void (* __malloc_alloc_oom_handler)();
108 template <int __inst>
109 class __malloc_alloc_template {
113 static void* _S_oom_malloc(size_t);
114 static void* _S_oom_realloc(void*, size_t);
116 #ifndef __STL_STATIC_TEMPLATE_MEMBER_BUG
117 static void (* __malloc_alloc_oom_handler)();
122 static void* allocate(size_t __n)
124 void* __result = malloc(__n);
125 if (0 == __result) __result = _S_oom_malloc(__n);
129 static void deallocate(void* __p, size_t /* __n */)
134 static void* reallocate(void* __p, size_t /* old_sz */, size_t __new_sz)
136 void* __result = realloc(__p, __new_sz);
137 if (0 == __result) __result = _S_oom_realloc(__p, __new_sz);
141 static void (* __set_malloc_handler(void (*__f)()))()
143 void (* __old)() = __malloc_alloc_oom_handler;
144 __malloc_alloc_oom_handler = __f;
150 // malloc_alloc out-of-memory handling
152 #ifndef __STL_STATIC_TEMPLATE_MEMBER_BUG
153 template <int __inst>
154 void (* __malloc_alloc_template<__inst>::__malloc_alloc_oom_handler)() = 0;
157 template <int __inst>
159 __malloc_alloc_template<__inst>::_S_oom_malloc(size_t __n)
161 void (* __my_malloc_handler)();
165 __my_malloc_handler = __malloc_alloc_oom_handler;
166 if (0 == __my_malloc_handler) { __THROW_BAD_ALLOC; }
167 (*__my_malloc_handler)();
168 __result = malloc(__n);
169 if (__result) return(__result);
173 template <int __inst>
174 void* __malloc_alloc_template<__inst>::_S_oom_realloc(void* __p, size_t __n)
176 void (* __my_malloc_handler)();
180 __my_malloc_handler = __malloc_alloc_oom_handler;
181 if (0 == __my_malloc_handler) { __THROW_BAD_ALLOC; }
182 (*__my_malloc_handler)();
183 __result = realloc(__p, __n);
184 if (__result) return(__result);
188 typedef __malloc_alloc_template<0> malloc_alloc;
190 template<class _Tp, class _Alloc>
194 static _Tp* allocate(size_t __n)
195 { return 0 == __n ? 0 : (_Tp*) _Alloc::allocate(__n * sizeof (_Tp)); }
196 static _Tp* allocate(void)
197 { return (_Tp*) _Alloc::allocate(sizeof (_Tp)); }
198 static void deallocate(_Tp* __p, size_t __n)
199 { if (0 != __n) _Alloc::deallocate(__p, __n * sizeof (_Tp)); }
200 static void deallocate(_Tp* __p)
201 { _Alloc::deallocate(__p, sizeof (_Tp)); }
204 // Allocator adaptor to check size arguments for debugging.
205 // Reports errors using assert. Checking can be disabled with
206 // NDEBUG, but it's far better to just use the underlying allocator
207 // instead when no checking is desired.
208 // There is some evidence that this can confuse Purify.
209 template <class _Alloc>
214 enum {_S_extra = 8}; // Size of space used to store size. Note
215 // that this must be large enough to preserve
220 static void* allocate(size_t __n)
222 char* __result = (char*)_Alloc::allocate(__n + (int) _S_extra);
223 *(size_t*)__result = __n;
224 return __result + (int) _S_extra;
227 static void deallocate(void* __p, size_t __n)
229 char* __real_p = (char*)__p - (int) _S_extra;
230 assert(*(size_t*)__real_p == __n);
231 _Alloc::deallocate(__real_p, __n + (int) _S_extra);
234 static void* reallocate(void* __p, size_t __old_sz, size_t __new_sz)
236 char* __real_p = (char*)__p - (int) _S_extra;
237 assert(*(size_t*)__real_p == __old_sz);
238 char* __result = (char*)
239 _Alloc::reallocate(__real_p, __old_sz + (int) _S_extra,
240 __new_sz + (int) _S_extra);
241 *(size_t*)__result = __new_sz;
242 return __result + (int) _S_extra;
250 typedef malloc_alloc alloc;
251 typedef malloc_alloc single_client_alloc;
256 // Default node allocator.
257 // With a reasonable compiler, this should be roughly as fast as the
258 // original STL class-specific allocators, but with less fragmentation.
259 // Default_alloc_template parameters are experimental and MAY
260 // DISAPPEAR in the future. Clients should just use alloc for now.
262 // Important implementation properties:
263 // 1. If the client request an object of size > _MAX_BYTES, the resulting
264 // object will be obtained directly from malloc.
265 // 2. In all other cases, we allocate an object of size exactly
266 // _S_round_up(requested_size). Thus the client has enough size
267 // information that we can return the object to the proper free list
268 // without permanently losing part of the object.
271 // The first template parameter specifies whether more than one thread
272 // may use this allocator. It is safe to allocate an object from
273 // one instance of a default_alloc and deallocate it with another
274 // one. This effectively transfers its ownership to the second one.
275 // This may have undesirable effects on reference locality.
276 // The second parameter is unreferenced and serves only to allow the
277 // creation of multiple default_alloc instances.
278 // Node that containers built on different allocator instances have
279 // different types, limiting the utility of this approach.
281 #if defined(__SUNPRO_CC) || defined(__GNUC__)
282 // breaks if we make these template class members:
284 enum {_MAX_BYTES = 128};
285 enum {_NFREELISTS = 16}; // _MAX_BYTES/_ALIGN
288 template <bool threads, int inst>
289 class __default_alloc_template {
292 // Really we should use static const int x = N
293 // instead of enum { x = N }, but few compilers accept the former.
294 #if ! (defined(__SUNPRO_CC) || defined(__GNUC__))
296 enum {_MAX_BYTES = 128};
297 enum {_NFREELISTS = 16}; // _MAX_BYTES/_ALIGN
300 _S_round_up(size_t __bytes)
301 { return (((__bytes) + (size_t) _ALIGN-1) & ~((size_t) _ALIGN - 1)); }
305 union _Obj* _M_free_list_link;
306 char _M_client_data[1]; /* The client sees this. */
309 # if defined(__SUNPRO_CC) || defined(__GNUC__) || defined(__HP_aCC)
310 static _Obj* __STL_VOLATILE _S_free_list[];
311 // Specifying a size results in duplicate def for 4.1
313 static _Obj* __STL_VOLATILE _S_free_list[_NFREELISTS];
315 static size_t _S_freelist_index(size_t __bytes) {
316 return (((__bytes) + (size_t)_ALIGN-1)/(size_t)_ALIGN - 1);
319 // Returns an object of size __n, and optionally adds to size __n free list.
320 static void* _S_refill(size_t __n);
321 // Allocates a chunk for nobjs of size size. nobjs may be reduced
322 // if it is inconvenient to allocate the requested number.
323 static char* _S_chunk_alloc(size_t __size, int& __nobjs);
325 // Chunk allocation state.
326 static char* _S_start_free;
327 static char* _S_end_free;
328 static size_t _S_heap_size;
330 # ifdef __STL_THREADS
331 static _STL_mutex_lock _S_node_allocator_lock;
334 // It would be nice to use _STL_auto_lock here. But we
335 // don't need the NULL check. And we do need a test whether
336 // threads have actually been started.
341 _Lock() { __NODE_ALLOCATOR_LOCK; }
342 ~_Lock() { __NODE_ALLOCATOR_UNLOCK; }
347 /* __n must be > 0 */
348 static void* allocate(size_t __n)
352 if (__n > (size_t) _MAX_BYTES) {
353 __ret = malloc_alloc::allocate(__n);
356 _Obj* __STL_VOLATILE* __my_free_list
357 = _S_free_list + _S_freelist_index(__n);
358 // Acquire the lock here with a constructor call.
359 // This ensures that it is released in exit or during stack
363 _Lock __lock_instance;
365 _Obj* __RESTRICT __result = *__my_free_list;
367 __ret = _S_refill(_S_round_up(__n));
369 *__my_free_list = __result -> _M_free_list_link;
377 /* __p may not be 0 */
378 static void deallocate(void* __p, size_t __n)
380 if (__n > (size_t) _MAX_BYTES)
381 malloc_alloc::deallocate(__p, __n);
383 _Obj* __STL_VOLATILE* __my_free_list
384 = _S_free_list + _S_freelist_index(__n);
385 _Obj* __q = (_Obj*)__p;
390 _Lock __lock_instance;
391 # endif /* _NOTHREADS */
392 __q -> _M_free_list_link = *__my_free_list;
393 *__my_free_list = __q;
394 // lock is released here
398 static void* reallocate(void* __p, size_t __old_sz, size_t __new_sz);
402 typedef __default_alloc_template<__NODE_ALLOCATOR_THREADS, 0> alloc;
403 typedef __default_alloc_template<false, 0> single_client_alloc;
405 template <bool __threads, int __inst>
406 inline bool operator==(const __default_alloc_template<__threads, __inst>&,
407 const __default_alloc_template<__threads, __inst>&)
412 # ifdef __STL_FUNCTION_TMPL_PARTIAL_ORDER
413 template <bool __threads, int __inst>
414 inline bool operator!=(const __default_alloc_template<__threads, __inst>&,
415 const __default_alloc_template<__threads, __inst>&)
419 # endif /* __STL_FUNCTION_TMPL_PARTIAL_ORDER */
423 /* We allocate memory in large chunks in order to avoid fragmenting */
424 /* the malloc heap too much. */
425 /* We assume that size is properly aligned. */
426 /* We hold the allocation lock. */
427 template <bool __threads, int __inst>
429 __default_alloc_template<__threads, __inst>::_S_chunk_alloc(size_t __size,
433 size_t __total_bytes = __size * __nobjs;
434 size_t __bytes_left = _S_end_free - _S_start_free;
436 if (__bytes_left >= __total_bytes) {
437 __result = _S_start_free;
438 _S_start_free += __total_bytes;
440 } else if (__bytes_left >= __size) {
441 __nobjs = (int)(__bytes_left/__size);
442 __total_bytes = __size * __nobjs;
443 __result = _S_start_free;
444 _S_start_free += __total_bytes;
447 size_t __bytes_to_get =
448 2 * __total_bytes + _S_round_up(_S_heap_size >> 4);
449 // Try to make use of the left-over piece.
450 if (__bytes_left > 0) {
451 _Obj* __STL_VOLATILE* __my_free_list =
452 _S_free_list + _S_freelist_index(__bytes_left);
454 ((_Obj*)_S_start_free) -> _M_free_list_link = *__my_free_list;
455 *__my_free_list = (_Obj*)_S_start_free;
457 _S_start_free = (char*)malloc(__bytes_to_get);
458 if (0 == _S_start_free) {
460 _Obj* __STL_VOLATILE* __my_free_list;
462 // Try to make do with what we have. That can't
463 // hurt. We do not try smaller requests, since that tends
464 // to result in disaster on multi-process machines.
466 __i <= (size_t) _MAX_BYTES;
467 __i += (size_t) _ALIGN) {
468 __my_free_list = _S_free_list + _S_freelist_index(__i);
469 __p = *__my_free_list;
471 *__my_free_list = __p -> _M_free_list_link;
472 _S_start_free = (char*)__p;
473 _S_end_free = _S_start_free + __i;
474 return(_S_chunk_alloc(__size, __nobjs));
475 // Any leftover piece will eventually make it to the
479 _S_end_free = 0; // In case of exception.
480 _S_start_free = (char*)malloc_alloc::allocate(__bytes_to_get);
481 // This should either throw an
482 // exception or remedy the situation. Thus we assume it
485 _S_heap_size += __bytes_to_get;
486 _S_end_free = _S_start_free + __bytes_to_get;
487 return(_S_chunk_alloc(__size, __nobjs));
492 /* Returns an object of size __n, and optionally adds to size __n free list.*/
493 /* We assume that __n is properly aligned. */
494 /* We hold the allocation lock. */
495 template <bool __threads, int __inst>
497 __default_alloc_template<__threads, __inst>::_S_refill(size_t __n)
500 char* __chunk = _S_chunk_alloc(__n, __nobjs);
501 _Obj* __STL_VOLATILE* __my_free_list;
507 if (1 == __nobjs) return(__chunk);
508 __my_free_list = _S_free_list + _S_freelist_index(__n);
510 /* Build free list in chunk */
511 __result = (_Obj*)__chunk;
512 *__my_free_list = __next_obj = (_Obj*)(__chunk + __n);
513 for (__i = 1; ; __i++) {
514 __current_obj = __next_obj;
515 __next_obj = (_Obj*)((char*)__next_obj + __n);
516 if (__nobjs - 1 == __i) {
517 __current_obj -> _M_free_list_link = 0;
520 __current_obj -> _M_free_list_link = __next_obj;
526 template <bool threads, int inst>
528 __default_alloc_template<threads, inst>::reallocate(void* __p,
535 if (__old_sz > (size_t) _MAX_BYTES && __new_sz > (size_t) _MAX_BYTES) {
536 return(realloc(__p, __new_sz));
538 if (_S_round_up(__old_sz) == _S_round_up(__new_sz)) return(__p);
539 __result = allocate(__new_sz);
540 __copy_sz = __new_sz > __old_sz? __old_sz : __new_sz;
541 memcpy(__result, __p, __copy_sz);
542 deallocate(__p, __old_sz);
547 template <bool __threads, int __inst>
549 __default_alloc_template<__threads, __inst>::_S_node_allocator_lock
550 __STL_MUTEX_INITIALIZER;
554 template <bool __threads, int __inst>
555 char* __default_alloc_template<__threads, __inst>::_S_start_free = 0;
557 template <bool __threads, int __inst>
558 char* __default_alloc_template<__threads, __inst>::_S_end_free = 0;
560 template <bool __threads, int __inst>
561 size_t __default_alloc_template<__threads, __inst>::_S_heap_size = 0;
563 template <bool __threads, int __inst>
564 typename __default_alloc_template<__threads, __inst>::_Obj* __STL_VOLATILE
565 __default_alloc_template<__threads, __inst> ::_S_free_list[
566 # if defined(__SUNPRO_CC) || defined(__GNUC__) || defined(__HP_aCC)
569 __default_alloc_template<__threads, __inst>::_NFREELISTS
571 ] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, };
572 // The 16 zeros are necessary to make version 4.1 of the SunPro
573 // compiler happy. Otherwise it appears to allocate too little
574 // space for the array.
576 #endif /* ! __USE_MALLOC */
578 // This implements allocators as specified in the C++ standard.
580 // Note that standard-conforming allocators use many language features
581 // that are not yet widely implemented. In particular, they rely on
582 // member templates, partial specialization, partial ordering of function
583 // templates, the typename keyword, and the use of the template keyword
584 // to refer to a template member of a dependent type.
586 #ifdef __STL_USE_STD_ALLOCATORS
590 typedef alloc _Alloc; // The underlying allocator.
592 typedef size_t size_type;
593 typedef ptrdiff_t difference_type;
594 typedef _Tp* pointer;
595 typedef const _Tp* const_pointer;
596 typedef _Tp& reference;
597 typedef const _Tp& const_reference;
598 typedef _Tp value_type;
600 template <class _Tp1> struct rebind {
601 typedef allocator<_Tp1> other;
604 allocator() __STL_NOTHROW {}
605 allocator(const allocator&) __STL_NOTHROW {}
606 template <class _Tp1> allocator(const allocator<_Tp1>&) __STL_NOTHROW {}
607 ~allocator() __STL_NOTHROW {}
609 pointer address(reference __x) const { return &__x; }
610 const_pointer address(const_reference __x) const { return &__x; }
612 // __n is permitted to be 0. The C++ standard says nothing about what
613 // the return value is when __n == 0.
614 _Tp* allocate(size_type __n, const void* = 0) {
615 return __n != 0 ? static_cast<_Tp*>(_Alloc::allocate(__n * sizeof(_Tp)))
619 // __p is not permitted to be a null pointer.
620 void deallocate(pointer __p, size_type __n)
621 { _Alloc::deallocate(__p, __n * sizeof(_Tp)); }
623 size_type max_size() const __STL_NOTHROW
624 { return size_t(-1) / sizeof(_Tp); }
626 void construct(pointer __p, const _Tp& __val) { new(__p) _Tp(__val); }
627 void destroy(pointer __p) { __p->~_Tp(); }
631 class allocator<void> {
633 typedef size_t size_type;
634 typedef ptrdiff_t difference_type;
635 typedef void* pointer;
636 typedef const void* const_pointer;
637 typedef void value_type;
639 template <class _Tp1> struct rebind {
640 typedef allocator<_Tp1> other;
645 template <class _T1, class _T2>
646 inline bool operator==(const allocator<_T1>&, const allocator<_T2>&)
651 template <class _T1, class _T2>
652 inline bool operator!=(const allocator<_T1>&, const allocator<_T2>&)
657 // Allocator adaptor to turn an SGI-style allocator (e.g. alloc, malloc_alloc)
658 // into a standard-conforming allocator. Note that this adaptor does
659 // *not* assume that all objects of the underlying alloc class are
660 // identical, nor does it assume that all of the underlying alloc's
661 // member functions are static member functions. Note, also, that
662 // __allocator<_Tp, alloc> is essentially the same thing as allocator<_Tp>.
664 template <class _Tp, class _Alloc>
666 _Alloc __underlying_alloc;
668 typedef size_t size_type;
669 typedef ptrdiff_t difference_type;
670 typedef _Tp* pointer;
671 typedef const _Tp* const_pointer;
672 typedef _Tp& reference;
673 typedef const _Tp& const_reference;
674 typedef _Tp value_type;
676 template <class _Tp1> struct rebind {
677 typedef __allocator<_Tp1, _Alloc> other;
680 __allocator() __STL_NOTHROW {}
681 __allocator(const __allocator& __a) __STL_NOTHROW
682 : __underlying_alloc(__a.__underlying_alloc) {}
683 template <class _Tp1>
684 __allocator(const __allocator<_Tp1, _Alloc>& __a) __STL_NOTHROW
685 : __underlying_alloc(__a.__underlying_alloc) {}
686 ~__allocator() __STL_NOTHROW {}
688 pointer address(reference __x) const { return &__x; }
689 const_pointer address(const_reference __x) const { return &__x; }
691 // __n is permitted to be 0.
692 _Tp* allocate(size_type __n, const void* = 0) {
694 ? static_cast<_Tp*>(__underlying_alloc.allocate(__n * sizeof(_Tp)))
698 // __p is not permitted to be a null pointer.
699 void deallocate(pointer __p, size_type __n)
700 { __underlying_alloc.deallocate(__p, __n * sizeof(_Tp)); }
702 size_type max_size() const __STL_NOTHROW
703 { return size_t(-1) / sizeof(_Tp); }
705 void construct(pointer __p, const _Tp& __val) { new(__p) _Tp(__val); }
706 void destroy(pointer __p) { __p->~_Tp(); }
709 template <class _Alloc>
710 class __allocator<void, _Alloc> {
711 typedef size_t size_type;
712 typedef ptrdiff_t difference_type;
713 typedef void* pointer;
714 typedef const void* const_pointer;
715 typedef void value_type;
717 template <class _Tp1> struct rebind {
718 typedef __allocator<_Tp1, _Alloc> other;
722 template <class _Tp, class _Alloc>
723 inline bool operator==(const __allocator<_Tp, _Alloc>& __a1,
724 const __allocator<_Tp, _Alloc>& __a2)
726 return __a1.__underlying_alloc == __a2.__underlying_alloc;
729 #ifdef __STL_FUNCTION_TMPL_PARTIAL_ORDER
730 template <class _Tp, class _Alloc>
731 inline bool operator!=(const __allocator<_Tp, _Alloc>& __a1,
732 const __allocator<_Tp, _Alloc>& __a2)
734 return __a1.__underlying_alloc != __a2.__underlying_alloc;
736 #endif /* __STL_FUNCTION_TMPL_PARTIAL_ORDER */
738 // Comparison operators for all of the predifined SGI-style allocators.
739 // This ensures that __allocator<malloc_alloc> (for example) will
743 inline bool operator==(const __malloc_alloc_template<inst>&,
744 const __malloc_alloc_template<inst>&)
749 #ifdef __STL_FUNCTION_TMPL_PARTIAL_ORDER
750 template <int __inst>
751 inline bool operator!=(const __malloc_alloc_template<__inst>&,
752 const __malloc_alloc_template<__inst>&)
756 #endif /* __STL_FUNCTION_TMPL_PARTIAL_ORDER */
758 template <class _Alloc>
759 inline bool operator==(const debug_alloc<_Alloc>&,
760 const debug_alloc<_Alloc>&) {
764 #ifdef __STL_FUNCTION_TMPL_PARTIAL_ORDER
765 template <class _Alloc>
766 inline bool operator!=(const debug_alloc<_Alloc>&,
767 const debug_alloc<_Alloc>&) {
770 #endif /* __STL_FUNCTION_TMPL_PARTIAL_ORDER */
772 // Another allocator adaptor: _Alloc_traits. This serves two
773 // purposes. First, make it possible to write containers that can use
774 // either SGI-style allocators or standard-conforming allocator.
775 // Second, provide a mechanism so that containers can query whether or
776 // not the allocator has distinct instances. If not, the container
777 // can avoid wasting a word of memory to store an empty object.
779 // This adaptor uses partial specialization. The general case of
780 // _Alloc_traits<_Tp, _Alloc> assumes that _Alloc is a
781 // standard-conforming allocator, possibly with non-equal instances
782 // and non-static members. (It still behaves correctly even if _Alloc
783 // has static member and if all instances are equal. Refinements
784 // affect performance, not correctness.)
786 // There are always two members: allocator_type, which is a standard-
787 // conforming allocator type for allocating objects of type _Tp, and
788 // _S_instanceless, a static const member of type bool. If
789 // _S_instanceless is true, this means that there is no difference
790 // between any two instances of type allocator_type. Furthermore, if
791 // _S_instanceless is true, then _Alloc_traits has one additional
792 // member: _Alloc_type. This type encapsulates allocation and
793 // deallocation of objects of type _Tp through a static interface; it
794 // has two member functions, whose signatures are
795 // static _Tp* allocate(size_t)
796 // static void deallocate(_Tp*, size_t)
798 // The fully general version.
800 template <class _Tp, class _Allocator>
803 static const bool _S_instanceless = false;
804 typedef typename _Allocator::__STL_TEMPLATE rebind<_Tp>::other
808 template <class _Tp, class _Allocator>
809 const bool _Alloc_traits<_Tp, _Allocator>::_S_instanceless;
811 // The version for the default allocator.
813 template <class _Tp, class _Tp1>
814 struct _Alloc_traits<_Tp, allocator<_Tp1> >
816 static const bool _S_instanceless = true;
817 typedef simple_alloc<_Tp, alloc> _Alloc_type;
818 typedef allocator<_Tp> allocator_type;
821 // Versions for the predefined SGI-style allocators.
823 template <class _Tp, int __inst>
824 struct _Alloc_traits<_Tp, __malloc_alloc_template<__inst> >
826 static const bool _S_instanceless = true;
827 typedef simple_alloc<_Tp, __malloc_alloc_template<__inst> > _Alloc_type;
828 typedef __allocator<_Tp, __malloc_alloc_template<__inst> > allocator_type;
832 template <class _Tp, bool __threads, int __inst>
833 struct _Alloc_traits<_Tp, __default_alloc_template<__threads, __inst> >
835 static const bool _S_instanceless = true;
836 typedef simple_alloc<_Tp, __default_alloc_template<__threads, __inst> >
838 typedef __allocator<_Tp, __default_alloc_template<__threads, __inst> >
843 template <class _Tp, class _Alloc>
844 struct _Alloc_traits<_Tp, debug_alloc<_Alloc> >
846 static const bool _S_instanceless = true;
847 typedef simple_alloc<_Tp, debug_alloc<_Alloc> > _Alloc_type;
848 typedef __allocator<_Tp, debug_alloc<_Alloc> > allocator_type;
851 // Versions for the __allocator adaptor used with the predefined
852 // SGI-style allocators.
854 template <class _Tp, class _Tp1, int __inst>
855 struct _Alloc_traits<_Tp,
856 __allocator<_Tp1, __malloc_alloc_template<__inst> > >
858 static const bool _S_instanceless = true;
859 typedef simple_alloc<_Tp, __malloc_alloc_template<__inst> > _Alloc_type;
860 typedef __allocator<_Tp, __malloc_alloc_template<__inst> > allocator_type;
864 template <class _Tp, class _Tp1, bool __thr, int __inst>
865 struct _Alloc_traits<_Tp,
867 __default_alloc_template<__thr, __inst> > >
869 static const bool _S_instanceless = true;
870 typedef simple_alloc<_Tp, __default_alloc_template<__thr,__inst> >
872 typedef __allocator<_Tp, __default_alloc_template<__thr,__inst> >
877 template <class _Tp, class _Tp1, class _Alloc>
878 struct _Alloc_traits<_Tp, __allocator<_Tp1, debug_alloc<_Alloc> > >
880 static const bool _S_instanceless = true;
881 typedef simple_alloc<_Tp, debug_alloc<_Alloc> > _Alloc_type;
882 typedef __allocator<_Tp, debug_alloc<_Alloc> > allocator_type;
886 #endif /* __STL_USE_STD_ALLOCATORS */
888 #if defined(__sgi) && !defined(__GNUC__) && (_MIPS_SIM != _MIPS_SIM_ABI32)
889 #pragma reset woff 1174
896 #endif /* __SGI_STL_INTERNAL_ALLOC_H */