1 // Allocators -*- C++ -*-
3 // Copyright (C) 2001 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 2, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // You should have received a copy of the GNU General Public License along
17 // with this library; see the file COPYING. If not, write to the Free
18 // Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307,
21 // As a special exception, you may use this file as part of a free software
22 // library without restriction. Specifically, if other files instantiate
23 // templates or use macros or inline functions from this file, or you compile
24 // this file and link it with other files to produce an executable, this
25 // file does not by itself cause the resulting executable to be covered by
26 // the GNU General Public License. This exception does not however
27 // invalidate any other reasons why the executable file might be covered by
28 // the GNU General Public License.
31 * Copyright (c) 1996-1997
32 * Silicon Graphics Computer Systems, Inc.
34 * Permission to use, copy, modify, distribute and sell this software
35 * and its documentation for any purpose is hereby granted without fee,
36 * provided that the above copyright notice appear in all copies and
37 * that both that copyright notice and this permission notice appear
38 * in supporting documentation. Silicon Graphics makes no
39 * representations about the suitability of this software for any
40 * purpose. It is provided "as is" without express or implied warranty.
44 * This is an internal header file, included by other library headers.
45 * You should not attempt to use it directly.
48 #ifndef __SGI_STL_INTERNAL_ALLOC_H
49 #define __SGI_STL_INTERNAL_ALLOC_H
51 // This implements some standard node allocators. These are
52 // NOT the same as the allocators in the C++ draft standard or in
53 // in the original STL. They do not encapsulate different pointer
54 // types; indeed we assume that there is only one pointer type.
55 // The allocation primitives are intended to allocate individual objects,
56 // not larger arenas as with the original STL allocators.
58 #include <bits/functexcept.h> // for __throw_bad_alloc
59 #include <bits/std_cstddef.h>
60 #include <bits/std_cstdlib.h>
61 #include <bits/std_cstring.h>
62 #include <bits/std_cassert.h>
68 # include <bits/stl_threads.h>
69 # define __NODE_ALLOCATOR_THREADS true
70 # ifdef __STL_SGI_THREADS
71 // We test whether threads are in use before locking.
72 // Perhaps this should be moved into stl_threads.h, but that
73 // probably makes it harder to avoid the procedure call when
76 extern int __us_rsthread_malloc;
78 // The above is copied from malloc.h. Including <malloc.h>
79 // would be cleaner but fails with certain levels of standard
81 # define __NODE_ALLOCATOR_LOCK if (threads && __us_rsthread_malloc) \
82 { _S_node_allocator_lock._M_acquire_lock(); }
83 # define __NODE_ALLOCATOR_UNLOCK if (threads && __us_rsthread_malloc) \
84 { _S_node_allocator_lock._M_release_lock(); }
85 # else /* !__STL_SGI_THREADS */
86 # define __NODE_ALLOCATOR_LOCK \
87 { if (threads) _S_node_allocator_lock._M_acquire_lock(); }
88 # define __NODE_ALLOCATOR_UNLOCK \
89 { if (threads) _S_node_allocator_lock._M_release_lock(); }
93 # define __NODE_ALLOCATOR_LOCK
94 # define __NODE_ALLOCATOR_UNLOCK
95 # define __NODE_ALLOCATOR_THREADS false
100 // A new-based allocator, as required by the standard.
106 { return ::operator new(__n); }
109 deallocate(void* __p, size_t)
110 { ::operator delete(__p); }
113 // Malloc-based allocator. Typically slower than default alloc below.
114 // Typically thread-safe and more storage efficient.
115 template <int __inst>
116 class __malloc_alloc_template
119 static void* _S_oom_malloc(size_t);
120 static void* _S_oom_realloc(void*, size_t);
121 static void (* __malloc_alloc_oom_handler)();
127 void* __result = malloc(__n);
128 if (0 == __result) __result = _S_oom_malloc(__n);
133 deallocate(void* __p, size_t /* __n */)
137 reallocate(void* __p, size_t /* old_sz */, size_t __new_sz)
139 void* __result = realloc(__p, __new_sz);
140 if (0 == __result) __result = _S_oom_realloc(__p, __new_sz);
144 static void (* __set_malloc_handler(void (*__f)()))()
146 void (* __old)() = __malloc_alloc_oom_handler;
147 __malloc_alloc_oom_handler = __f;
152 // malloc_alloc out-of-memory handling
153 template <int __inst>
154 void (* __malloc_alloc_template<__inst>::__malloc_alloc_oom_handler)() = 0;
156 template <int __inst>
158 __malloc_alloc_template<__inst>::_S_oom_malloc(size_t __n)
160 void (* __my_malloc_handler)();
165 __my_malloc_handler = __malloc_alloc_oom_handler;
166 if (0 == __my_malloc_handler)
167 std::__throw_bad_alloc();
168 (*__my_malloc_handler)();
169 __result = malloc(__n);
175 template <int __inst>
177 __malloc_alloc_template<__inst>::_S_oom_realloc(void* __p, size_t __n)
179 void (* __my_malloc_handler)();
184 __my_malloc_handler = __malloc_alloc_oom_handler;
185 if (0 == __my_malloc_handler)
186 std::__throw_bad_alloc();
187 (*__my_malloc_handler)();
188 __result = realloc(__p, __n);
194 // Determines the underlying allocator choice.
196 typedef __malloc_alloc_template<0> __mem_interface;
198 typedef __new_alloc __mem_interface;
201 template<class _Tp, class _Alloc>
205 static _Tp* allocate(size_t __n)
206 { return 0 == __n ? 0 : (_Tp*) _Alloc::allocate(__n * sizeof (_Tp)); }
207 static _Tp* allocate(void)
208 { return (_Tp*) _Alloc::allocate(sizeof (_Tp)); }
209 static void deallocate(_Tp* __p, size_t __n)
210 { if (0 != __n) _Alloc::deallocate(__p, __n * sizeof (_Tp)); }
211 static void deallocate(_Tp* __p)
212 { _Alloc::deallocate(__p, sizeof (_Tp)); }
215 // Allocator adaptor to check size arguments for debugging.
216 // Reports errors using assert. Checking can be disabled with
217 // NDEBUG, but it's far better to just use the underlying allocator
218 // instead when no checking is desired.
219 // There is some evidence that this can confuse Purify.
220 template <class _Alloc>
225 enum {_S_extra = 8}; // Size of space used to store size. Note
226 // that this must be large enough to preserve
231 static void* allocate(size_t __n)
233 char* __result = (char*)_Alloc::allocate(__n + (int) _S_extra);
234 *(size_t*)__result = __n;
235 return __result + (int) _S_extra;
238 static void deallocate(void* __p, size_t __n)
240 char* __real_p = (char*)__p - (int) _S_extra;
241 assert(*(size_t*)__real_p == __n);
242 _Alloc::deallocate(__real_p, __n + (int) _S_extra);
245 static void* reallocate(void* __p, size_t __old_sz, size_t __new_sz)
247 char* __real_p = (char*)__p - (int) _S_extra;
248 assert(*(size_t*)__real_p == __old_sz);
249 char* __result = (char*)
250 _Alloc::reallocate(__real_p, __old_sz + (int) _S_extra,
251 __new_sz + (int) _S_extra);
252 *(size_t*)__result = __new_sz;
253 return __result + (int) _S_extra;
261 typedef __mem_interface alloc;
262 typedef __mem_interface single_client_alloc;
267 // Default node allocator.
268 // With a reasonable compiler, this should be roughly as fast as the
269 // original STL class-specific allocators, but with less fragmentation.
270 // Default_alloc_template parameters are experimental and MAY
271 // DISAPPEAR in the future. Clients should just use alloc for now.
273 // Important implementation properties:
274 // 1. If the client request an object of size > _MAX_BYTES, the resulting
275 // object will be obtained directly from malloc.
276 // 2. In all other cases, we allocate an object of size exactly
277 // _S_round_up(requested_size). Thus the client has enough size
278 // information that we can return the object to the proper free list
279 // without permanently losing part of the object.
282 // The first template parameter specifies whether more than one thread
283 // may use this allocator. It is safe to allocate an object from
284 // one instance of a default_alloc and deallocate it with another
285 // one. This effectively transfers its ownership to the second one.
286 // This may have undesirable effects on reference locality.
287 // The second parameter is unreferenced and serves only to allow the
288 // creation of multiple default_alloc instances.
289 // Node that containers built on different allocator instances have
290 // different types, limiting the utility of this approach.
292 template <bool threads, int inst>
293 class __default_alloc_template {
296 // Really we should use static const int x = N
297 // instead of enum { x = N }, but few compilers accept the former.
299 enum {_MAX_BYTES = 128};
300 enum {_NFREELISTS = 16}; // _MAX_BYTES/_ALIGN
302 _S_round_up(size_t __bytes)
303 { return (((__bytes) + (size_t) _ALIGN-1) & ~((size_t) _ALIGN - 1)); }
306 union _Obj* _M_free_list_link;
307 char _M_client_data[1]; /* The client sees this. */
310 static _Obj* volatile _S_free_list[];
311 // Specifying a size results in duplicate def for 4.1
312 static size_t _S_freelist_index(size_t __bytes) {
313 return (((__bytes) + (size_t)_ALIGN-1)/(size_t)_ALIGN - 1);
316 // Returns an object of size __n, and optionally adds to size __n free list.
317 static void* _S_refill(size_t __n);
318 // Allocates a chunk for nobjs of size size. nobjs may be reduced
319 // if it is inconvenient to allocate the requested number.
320 static char* _S_chunk_alloc(size_t __size, int& __nobjs);
322 // Chunk allocation state.
323 static char* _S_start_free;
324 static char* _S_end_free;
325 static size_t _S_heap_size;
327 # ifdef __STL_THREADS
328 static _STL_mutex_lock _S_node_allocator_lock;
331 // It would be nice to use _STL_auto_lock here. But we
332 // don't need the NULL check. And we do need a test whether
333 // threads have actually been started.
338 _Lock() { __NODE_ALLOCATOR_LOCK; }
339 ~_Lock() { __NODE_ALLOCATOR_UNLOCK; }
344 /* __n must be > 0 */
345 static void* allocate(size_t __n)
349 if (__n > (size_t) _MAX_BYTES)
350 __ret = __mem_interface::allocate(__n);
353 _Obj* volatile* __my_free_list = _S_free_list + _S_freelist_index(__n);
354 // Acquire the lock here with a constructor call.
355 // This ensures that it is released in exit or during stack
359 _Lock __lock_instance;
361 _Obj* __RESTRICT __result = *__my_free_list;
363 __ret = _S_refill(_S_round_up(__n));
366 *__my_free_list = __result -> _M_free_list_link;
374 /* __p may not be 0 */
375 static void deallocate(void* __p, size_t __n)
377 if (__n > (size_t) _MAX_BYTES)
378 __mem_interface::deallocate(__p, __n);
381 _Obj* volatile* __my_free_list
382 = _S_free_list + _S_freelist_index(__n);
383 _Obj* __q = (_Obj*)__p;
388 _Lock __lock_instance;
389 # endif /* _NOTHREADS */
390 __q -> _M_free_list_link = *__my_free_list;
391 *__my_free_list = __q;
392 // lock is released here
396 static void* reallocate(void* __p, size_t __old_sz, size_t __new_sz);
399 typedef __default_alloc_template<__NODE_ALLOCATOR_THREADS, 0> alloc;
400 typedef __default_alloc_template<false, 0> single_client_alloc;
402 template <bool __threads, int __inst>
403 inline bool operator==(const __default_alloc_template<__threads, __inst>&,
404 const __default_alloc_template<__threads, __inst>&)
409 template <bool __threads, int __inst>
410 inline bool operator!=(const __default_alloc_template<__threads, __inst>&,
411 const __default_alloc_template<__threads, __inst>&)
418 /* We allocate memory in large chunks in order to avoid fragmenting */
419 /* the malloc heap too much. */
420 /* We assume that size is properly aligned. */
421 /* We hold the allocation lock. */
422 template <bool __threads, int __inst>
424 __default_alloc_template<__threads, __inst>::_S_chunk_alloc(size_t __size,
428 size_t __total_bytes = __size * __nobjs;
429 size_t __bytes_left = _S_end_free - _S_start_free;
431 if (__bytes_left >= __total_bytes)
433 __result = _S_start_free;
434 _S_start_free += __total_bytes;
437 else if (__bytes_left >= __size)
439 __nobjs = (int)(__bytes_left/__size);
440 __total_bytes = __size * __nobjs;
441 __result = _S_start_free;
442 _S_start_free += __total_bytes;
447 size_t __bytes_to_get =
448 2 * __total_bytes + _S_round_up(_S_heap_size >> 4);
449 // Try to make use of the left-over piece.
450 if (__bytes_left > 0)
452 _Obj* volatile* __my_free_list =
453 _S_free_list + _S_freelist_index(__bytes_left);
455 ((_Obj*)_S_start_free) -> _M_free_list_link = *__my_free_list;
456 *__my_free_list = (_Obj*)_S_start_free;
458 _S_start_free = (char*) __mem_interface::allocate(__bytes_to_get);
459 if (0 == _S_start_free)
462 _Obj* volatile* __my_free_list;
464 // Try to make do with what we have. That can't hurt. We
465 // do not try smaller requests, since that tends to result
466 // in disaster on multi-process machines.
468 for (; __i <= (size_t) _MAX_BYTES; __i += (size_t) _ALIGN)
470 __my_free_list = _S_free_list + _S_freelist_index(__i);
471 __p = *__my_free_list;
474 *__my_free_list = __p -> _M_free_list_link;
475 _S_start_free = (char*)__p;
476 _S_end_free = _S_start_free + __i;
477 return(_S_chunk_alloc(__size, __nobjs));
478 // Any leftover piece will eventually make it to the
482 _S_end_free = 0; // In case of exception.
483 _S_start_free = (char*)__mem_interface::allocate(__bytes_to_get);
484 // This should either throw an
485 // exception or remedy the situation. Thus we assume it
488 _S_heap_size += __bytes_to_get;
489 _S_end_free = _S_start_free + __bytes_to_get;
490 return(_S_chunk_alloc(__size, __nobjs));
495 /* Returns an object of size __n, and optionally adds to size __n free list.*/
496 /* We assume that __n is properly aligned. */
497 /* We hold the allocation lock. */
498 template <bool __threads, int __inst>
500 __default_alloc_template<__threads, __inst>::_S_refill(size_t __n)
503 char* __chunk = _S_chunk_alloc(__n, __nobjs);
504 _Obj* volatile* __my_free_list;
510 if (1 == __nobjs) return(__chunk);
511 __my_free_list = _S_free_list + _S_freelist_index(__n);
513 /* Build free list in chunk */
514 __result = (_Obj*)__chunk;
515 *__my_free_list = __next_obj = (_Obj*)(__chunk + __n);
516 for (__i = 1; ; __i++) {
517 __current_obj = __next_obj;
518 __next_obj = (_Obj*)((char*)__next_obj + __n);
519 if (__nobjs - 1 == __i) {
520 __current_obj -> _M_free_list_link = 0;
523 __current_obj -> _M_free_list_link = __next_obj;
529 template <bool threads, int inst>
531 __default_alloc_template<threads, inst>::reallocate(void* __p,
538 if (__old_sz > (size_t) _MAX_BYTES && __new_sz > (size_t) _MAX_BYTES) {
539 return(realloc(__p, __new_sz));
541 if (_S_round_up(__old_sz) == _S_round_up(__new_sz)) return(__p);
542 __result = allocate(__new_sz);
543 __copy_sz = __new_sz > __old_sz? __old_sz : __new_sz;
544 memcpy(__result, __p, __copy_sz);
545 deallocate(__p, __old_sz);
550 template <bool __threads, int __inst>
552 __default_alloc_template<__threads, __inst>::_S_node_allocator_lock
553 __STL_MUTEX_INITIALIZER;
557 template <bool __threads, int __inst>
558 char* __default_alloc_template<__threads, __inst>::_S_start_free = 0;
560 template <bool __threads, int __inst>
561 char* __default_alloc_template<__threads, __inst>::_S_end_free = 0;
563 template <bool __threads, int __inst>
564 size_t __default_alloc_template<__threads, __inst>::_S_heap_size = 0;
566 template <bool __threads, int __inst>
567 typename __default_alloc_template<__threads, __inst>::_Obj* volatile
568 __default_alloc_template<__threads, __inst> ::_S_free_list[
569 __default_alloc_template<__threads, __inst>::_NFREELISTS
570 ] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, };
571 // The 16 zeros are necessary to make version 4.1 of the SunPro
572 // compiler happy. Otherwise it appears to allocate too little
573 // space for the array.
575 #endif /* ! __USE_MALLOC */
577 // This implements allocators as specified in the C++ standard.
579 // Note that standard-conforming allocators use many language features
580 // that are not yet widely implemented. In particular, they rely on
581 // member templates, partial specialization, partial ordering of function
582 // templates, the typename keyword, and the use of the template keyword
583 // to refer to a template member of a dependent type.
587 typedef alloc _Alloc; // The underlying allocator.
589 typedef size_t size_type;
590 typedef ptrdiff_t difference_type;
591 typedef _Tp* pointer;
592 typedef const _Tp* const_pointer;
593 typedef _Tp& reference;
594 typedef const _Tp& const_reference;
595 typedef _Tp value_type;
597 template <class _Tp1> struct rebind {
598 typedef allocator<_Tp1> other;
601 allocator() throw() {}
602 allocator(const allocator&) throw() {}
603 template <class _Tp1> allocator(const allocator<_Tp1>&) throw() {}
604 ~allocator() throw() {}
606 pointer address(reference __x) const { return &__x; }
607 const_pointer address(const_reference __x) const { return &__x; }
609 // __n is permitted to be 0. The C++ standard says nothing about what
610 // the return value is when __n == 0.
611 _Tp* allocate(size_type __n, const void* = 0) {
612 return __n != 0 ? static_cast<_Tp*>(_Alloc::allocate(__n * sizeof(_Tp)))
616 // __p is not permitted to be a null pointer.
617 void deallocate(pointer __p, size_type __n)
618 { _Alloc::deallocate(__p, __n * sizeof(_Tp)); }
620 size_type max_size() const throw()
621 { return size_t(-1) / sizeof(_Tp); }
623 void construct(pointer __p, const _Tp& __val) { new(__p) _Tp(__val); }
624 void destroy(pointer __p) { __p->~_Tp(); }
628 class allocator<void> {
630 typedef size_t size_type;
631 typedef ptrdiff_t difference_type;
632 typedef void* pointer;
633 typedef const void* const_pointer;
634 typedef void value_type;
636 template <class _Tp1> struct rebind {
637 typedef allocator<_Tp1> other;
642 template <class _T1, class _T2>
643 inline bool operator==(const allocator<_T1>&, const allocator<_T2>&)
648 template <class _T1, class _T2>
649 inline bool operator!=(const allocator<_T1>&, const allocator<_T2>&)
654 // Allocator adaptor to turn an SGI-style allocator (e.g. alloc, malloc_alloc)
655 // into a standard-conforming allocator. Note that this adaptor does
656 // *not* assume that all objects of the underlying alloc class are
657 // identical, nor does it assume that all of the underlying alloc's
658 // member functions are static member functions. Note, also, that
659 // __allocator<_Tp, alloc> is essentially the same thing as allocator<_Tp>.
661 template <class _Tp, class _Alloc>
663 _Alloc __underlying_alloc;
665 typedef size_t size_type;
666 typedef ptrdiff_t difference_type;
667 typedef _Tp* pointer;
668 typedef const _Tp* const_pointer;
669 typedef _Tp& reference;
670 typedef const _Tp& const_reference;
671 typedef _Tp value_type;
673 template <class _Tp1> struct rebind {
674 typedef __allocator<_Tp1, _Alloc> other;
677 __allocator() throw() {}
678 __allocator(const __allocator& __a) throw()
679 : __underlying_alloc(__a.__underlying_alloc) {}
680 template <class _Tp1>
681 __allocator(const __allocator<_Tp1, _Alloc>& __a) throw()
682 : __underlying_alloc(__a.__underlying_alloc) {}
683 ~__allocator() throw() {}
685 pointer address(reference __x) const { return &__x; }
686 const_pointer address(const_reference __x) const { return &__x; }
688 // __n is permitted to be 0.
689 _Tp* allocate(size_type __n, const void* = 0) {
691 ? static_cast<_Tp*>(__underlying_alloc.allocate(__n * sizeof(_Tp)))
695 // __p is not permitted to be a null pointer.
696 void deallocate(pointer __p, size_type __n)
697 { __underlying_alloc.deallocate(__p, __n * sizeof(_Tp)); }
699 size_type max_size() const throw()
700 { return size_t(-1) / sizeof(_Tp); }
702 void construct(pointer __p, const _Tp& __val) { new(__p) _Tp(__val); }
703 void destroy(pointer __p) { __p->~_Tp(); }
706 template <class _Alloc>
707 class __allocator<void, _Alloc> {
708 typedef size_t size_type;
709 typedef ptrdiff_t difference_type;
710 typedef void* pointer;
711 typedef const void* const_pointer;
712 typedef void value_type;
714 template <class _Tp1> struct rebind {
715 typedef __allocator<_Tp1, _Alloc> other;
719 template <class _Tp, class _Alloc>
720 inline bool operator==(const __allocator<_Tp, _Alloc>& __a1,
721 const __allocator<_Tp, _Alloc>& __a2)
723 return __a1.__underlying_alloc == __a2.__underlying_alloc;
726 template <class _Tp, class _Alloc>
727 inline bool operator!=(const __allocator<_Tp, _Alloc>& __a1,
728 const __allocator<_Tp, _Alloc>& __a2)
730 return __a1.__underlying_alloc != __a2.__underlying_alloc;
733 // Comparison operators for all of the predifined SGI-style allocators.
734 // This ensures that __allocator<malloc_alloc> (for example) will
738 inline bool operator==(const __malloc_alloc_template<inst>&,
739 const __malloc_alloc_template<inst>&)
744 template <int __inst>
745 inline bool operator!=(const __malloc_alloc_template<__inst>&,
746 const __malloc_alloc_template<__inst>&)
751 template <class _Alloc>
752 inline bool operator==(const debug_alloc<_Alloc>&,
753 const debug_alloc<_Alloc>&) {
757 template <class _Alloc>
758 inline bool operator!=(const debug_alloc<_Alloc>&,
759 const debug_alloc<_Alloc>&) {
763 // Another allocator adaptor: _Alloc_traits. This serves two
764 // purposes. First, make it possible to write containers that can use
765 // either SGI-style allocators or standard-conforming allocator.
766 // Second, provide a mechanism so that containers can query whether or
767 // not the allocator has distinct instances. If not, the container
768 // can avoid wasting a word of memory to store an empty object.
770 // This adaptor uses partial specialization. The general case of
771 // _Alloc_traits<_Tp, _Alloc> assumes that _Alloc is a
772 // standard-conforming allocator, possibly with non-equal instances
773 // and non-static members. (It still behaves correctly even if _Alloc
774 // has static member and if all instances are equal. Refinements
775 // affect performance, not correctness.)
777 // There are always two members: allocator_type, which is a standard-
778 // conforming allocator type for allocating objects of type _Tp, and
779 // _S_instanceless, a static const member of type bool. If
780 // _S_instanceless is true, this means that there is no difference
781 // between any two instances of type allocator_type. Furthermore, if
782 // _S_instanceless is true, then _Alloc_traits has one additional
783 // member: _Alloc_type. This type encapsulates allocation and
784 // deallocation of objects of type _Tp through a static interface; it
785 // has two member functions, whose signatures are
786 // static _Tp* allocate(size_t)
787 // static void deallocate(_Tp*, size_t)
789 // The fully general version.
791 template <class _Tp, class _Allocator>
794 static const bool _S_instanceless = false;
795 typedef typename _Allocator::template rebind<_Tp>::other allocator_type;
798 template <class _Tp, class _Allocator>
799 const bool _Alloc_traits<_Tp, _Allocator>::_S_instanceless;
801 // The version for the default allocator.
803 template <class _Tp, class _Tp1>
804 struct _Alloc_traits<_Tp, allocator<_Tp1> >
806 static const bool _S_instanceless = true;
807 typedef simple_alloc<_Tp, alloc> _Alloc_type;
808 typedef allocator<_Tp> allocator_type;
811 // Versions for the predefined SGI-style allocators.
813 template <class _Tp, int __inst>
814 struct _Alloc_traits<_Tp, __malloc_alloc_template<__inst> >
816 static const bool _S_instanceless = true;
817 typedef simple_alloc<_Tp, __malloc_alloc_template<__inst> > _Alloc_type;
818 typedef __allocator<_Tp, __malloc_alloc_template<__inst> > allocator_type;
822 template <class _Tp, bool __threads, int __inst>
823 struct _Alloc_traits<_Tp, __default_alloc_template<__threads, __inst> >
825 static const bool _S_instanceless = true;
826 typedef simple_alloc<_Tp, __default_alloc_template<__threads, __inst> >
828 typedef __allocator<_Tp, __default_alloc_template<__threads, __inst> >
833 template <class _Tp, class _Alloc>
834 struct _Alloc_traits<_Tp, debug_alloc<_Alloc> >
836 static const bool _S_instanceless = true;
837 typedef simple_alloc<_Tp, debug_alloc<_Alloc> > _Alloc_type;
838 typedef __allocator<_Tp, debug_alloc<_Alloc> > allocator_type;
841 // Versions for the __allocator adaptor used with the predefined
842 // SGI-style allocators.
844 template <class _Tp, class _Tp1, int __inst>
845 struct _Alloc_traits<_Tp,
846 __allocator<_Tp1, __malloc_alloc_template<__inst> > >
848 static const bool _S_instanceless = true;
849 typedef simple_alloc<_Tp, __malloc_alloc_template<__inst> > _Alloc_type;
850 typedef __allocator<_Tp, __malloc_alloc_template<__inst> > allocator_type;
854 template <class _Tp, class _Tp1, bool __thr, int __inst>
855 struct _Alloc_traits<_Tp,
857 __default_alloc_template<__thr, __inst> > >
859 static const bool _S_instanceless = true;
860 typedef simple_alloc<_Tp, __default_alloc_template<__thr,__inst> >
862 typedef __allocator<_Tp, __default_alloc_template<__thr,__inst> >
867 template <class _Tp, class _Tp1, class _Alloc>
868 struct _Alloc_traits<_Tp, __allocator<_Tp1, debug_alloc<_Alloc> > >
870 static const bool _S_instanceless = true;
871 typedef simple_alloc<_Tp, debug_alloc<_Alloc> > _Alloc_type;
872 typedef __allocator<_Tp, debug_alloc<_Alloc> > allocator_type;
877 #endif /* __SGI_STL_INTERNAL_ALLOC_H */