stl_alloc.h

Go to the documentation of this file.
00001 // Allocators -*- C++ -*- 00002 00003 // Copyright (C) 2001, 2002 Free Software Foundation, Inc. 00004 // 00005 // This file is part of the GNU ISO C++ Library. This library is free 00006 // software; you can redistribute it and/or modify it under the 00007 // terms of the GNU General Public License as published by the 00008 // Free Software Foundation; either version 2, or (at your option) 00009 // any later version. 00010 00011 // This library is distributed in the hope that it will be useful, 00012 // but WITHOUT ANY WARRANTY; without even the implied warranty of 00013 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 00014 // GNU General Public License for more details. 00015 00016 // You should have received a copy of the GNU General Public License along 00017 // with this library; see the file COPYING. If not, write to the Free 00018 // Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, 00019 // USA. 00020 00021 // As a special exception, you may use this file as part of a free software 00022 // library without restriction. Specifically, if other files instantiate 00023 // templates or use macros or inline functions from this file, or you compile 00024 // this file and link it with other files to produce an executable, this 00025 // file does not by itself cause the resulting executable to be covered by 00026 // the GNU General Public License. This exception does not however 00027 // invalidate any other reasons why the executable file might be covered by 00028 // the GNU General Public License. 00029 00030 /* 00031 * Copyright (c) 1996-1997 00032 * Silicon Graphics Computer Systems, Inc. 00033 * 00034 * Permission to use, copy, modify, distribute and sell this software 00035 * and its documentation for any purpose is hereby granted without fee, 00036 * provided that the above copyright notice appear in all copies and 00037 * that both that copyright notice and this permission notice appear 00038 * in supporting documentation. Silicon Graphics makes no 00039 * representations about the suitability of this software for any 00040 * purpose. It is provided "as is" without express or implied warranty. 00041 */ 00042 00048 #ifndef __GLIBCPP_INTERNAL_ALLOC_H 00049 #define __GLIBCPP_INTERNAL_ALLOC_H 00050 00085 #include <cstddef> 00086 #include <cstdlib> 00087 #include <cstring> 00088 #include <cassert> 00089 #include <bits/functexcept.h> // For __throw_bad_alloc 00090 #include <bits/stl_threads.h> 00091 00092 #include <bits/atomicity.h> 00093 00094 namespace std 00095 { 00104 class __new_alloc 00105 { 00106 public: 00107 static void* 00108 allocate(size_t __n) 00109 { return ::operator new(__n); } 00110 00111 static void 00112 deallocate(void* __p, size_t) 00113 { ::operator delete(__p); } 00114 }; 00115 00116 00127 template<int __inst> 00128 class __malloc_alloc_template 00129 { 00130 private: 00131 static void* _S_oom_malloc(size_t); 00132 00133 // _GLIBCPP_DEPRECATED 00134 static void* _S_oom_realloc(void*, size_t); 00135 00136 static void (* __malloc_alloc_oom_handler)(); 00137 00138 public: 00139 static void* 00140 allocate(size_t __n) 00141 { 00142 void* __result = malloc(__n); 00143 if (__builtin_expect(__result == 0, 0)) 00144 __result = _S_oom_malloc(__n); 00145 return __result; 00146 } 00147 00148 static void 00149 deallocate(void* __p, size_t /* __n */) 00150 { free(__p); } 00151 00152 // _GLIBCPP_DEPRECATED 00153 static void* 00154 reallocate(void* __p, size_t /* old_sz */, size_t __new_sz) 00155 { 00156 void* __result = realloc(__p, __new_sz); 00157 if (__builtin_expect(__result == 0, 0)) 00158 __result = _S_oom_realloc(__p, __new_sz); 00159 return __result; 00160 } 00161 00162 static void (* __set_malloc_handler(void (*__f)()))() 00163 { 00164 void (* __old)() = __malloc_alloc_oom_handler; 00165 __malloc_alloc_oom_handler = __f; 00166 return __old; 00167 } 00168 }; 00169 00170 // malloc_alloc out-of-memory handling 00171 template<int __inst> 00172 void (* __malloc_alloc_template<__inst>::__malloc_alloc_oom_handler)() = 0; 00173 00174 template<int __inst> 00175 void* 00176 __malloc_alloc_template<__inst>:: 00177 _S_oom_malloc(size_t __n) 00178 { 00179 void (* __my_malloc_handler)(); 00180 void* __result; 00181 00182 for (;;) 00183 { 00184 __my_malloc_handler = __malloc_alloc_oom_handler; 00185 if (__builtin_expect(__my_malloc_handler == 0, 0)) 00186 __throw_bad_alloc(); 00187 (*__my_malloc_handler)(); 00188 __result = malloc(__n); 00189 if (__result) 00190 return __result; 00191 } 00192 } 00193 00194 // _GLIBCPP_DEPRECATED 00195 template<int __inst> 00196 void* 00197 __malloc_alloc_template<__inst>:: 00198 _S_oom_realloc(void* __p, size_t __n) 00199 { 00200 void (* __my_malloc_handler)(); 00201 void* __result; 00202 00203 for (;;) 00204 { 00205 __my_malloc_handler = __malloc_alloc_oom_handler; 00206 if (__builtin_expect(__my_malloc_handler == 0, 0)) 00207 __throw_bad_alloc(); 00208 (*__my_malloc_handler)(); 00209 __result = realloc(__p, __n); 00210 if (__result) 00211 return __result; 00212 } 00213 } 00214 00215 // Should not be referenced within the library anymore. 00216 typedef __new_alloc __mem_interface; 00217 00229 template<typename _Tp, typename _Alloc> 00230 class __simple_alloc 00231 { 00232 public: 00233 static _Tp* 00234 allocate(size_t __n) 00235 { 00236 _Tp* __ret = 0; 00237 if (__n) 00238 __ret = static_cast<_Tp*>(_Alloc::allocate(__n * sizeof(_Tp))); 00239 return __ret; 00240 } 00241 00242 static _Tp* 00243 allocate() 00244 { return (_Tp*) _Alloc::allocate(sizeof (_Tp)); } 00245 00246 static void 00247 deallocate(_Tp* __p, size_t __n) 00248 { if (0 != __n) _Alloc::deallocate(__p, __n * sizeof (_Tp)); } 00249 00250 static void 00251 deallocate(_Tp* __p) 00252 { _Alloc::deallocate(__p, sizeof (_Tp)); } 00253 }; 00254 00255 00270 template<typename _Alloc> 00271 class __debug_alloc 00272 { 00273 private: 00274 // Size of space used to store size. Note that this must be 00275 // large enough to preserve alignment. 00276 enum {_S_extra = 8}; 00277 00278 public: 00279 static void* 00280 allocate(size_t __n) 00281 { 00282 char* __result = (char*)_Alloc::allocate(__n + (int) _S_extra); 00283 *(size_t*)__result = __n; 00284 return __result + (int) _S_extra; 00285 } 00286 00287 static void 00288 deallocate(void* __p, size_t __n) 00289 { 00290 char* __real_p = (char*)__p - (int) _S_extra; 00291 assert(*(size_t*)__real_p == __n); 00292 _Alloc::deallocate(__real_p, __n + (int) _S_extra); 00293 } 00294 00295 // _GLIBCPP_DEPRECATED 00296 static void* 00297 reallocate(void* __p, size_t __old_sz, size_t __new_sz) 00298 { 00299 char* __real_p = (char*)__p - (int) _S_extra; 00300 assert(*(size_t*)__real_p == __old_sz); 00301 char* __result = (char*) _Alloc::reallocate(__real_p, 00302 __old_sz + (int) _S_extra, 00303 __new_sz + (int) _S_extra); 00304 *(size_t*)__result = __new_sz; 00305 return __result + (int) _S_extra; 00306 } 00307 }; 00308 00309 00340 template<bool __threads, int __inst> 00341 class __default_alloc_template 00342 { 00343 private: 00344 enum {_ALIGN = 8}; 00345 enum {_MAX_BYTES = 128}; 00346 enum {_NFREELISTS = _MAX_BYTES / _ALIGN}; 00347 00348 union _Obj 00349 { 00350 union _Obj* _M_free_list_link; 00351 char _M_client_data[1]; // The client sees this. 00352 }; 00353 00354 static _Obj* volatile _S_free_list[_NFREELISTS]; 00355 00356 // Chunk allocation state. 00357 static char* _S_start_free; 00358 static char* _S_end_free; 00359 static size_t _S_heap_size; 00360 00361 static _STL_mutex_lock _S_node_allocator_lock; 00362 00363 static size_t 00364 _S_round_up(size_t __bytes) 00365 { return (((__bytes) + (size_t) _ALIGN-1) & ~((size_t) _ALIGN - 1)); } 00366 00367 static size_t 00368 _S_freelist_index(size_t __bytes) 00369 { return (((__bytes) + (size_t)_ALIGN - 1)/(size_t)_ALIGN - 1); } 00370 00371 // Returns an object of size __n, and optionally adds to size __n 00372 // free list. 00373 static void* 00374 _S_refill(size_t __n); 00375 00376 // Allocates a chunk for nobjs of size size. nobjs may be reduced 00377 // if it is inconvenient to allocate the requested number. 00378 static char* 00379 _S_chunk_alloc(size_t __size, int& __nobjs); 00380 00381 // It would be nice to use _STL_auto_lock here. But we need a 00382 // test whether threads are in use. 00383 struct _Lock 00384 { 00385 _Lock() { if (__threads) _S_node_allocator_lock._M_acquire_lock(); } 00386 ~_Lock() { if (__threads) _S_node_allocator_lock._M_release_lock(); } 00387 } __attribute__ ((__unused__)); 00388 friend struct _Lock; 00389 00390 static _Atomic_word _S_force_new; 00391 00392 public: 00393 // __n must be > 0 00394 static void* 00395 allocate(size_t __n) 00396 { 00397 void* __ret = 0; 00398 00399 // If there is a race through here, assume answer from getenv 00400 // will resolve in same direction. Inspired by techniques 00401 // to efficiently support threading found in basic_string.h. 00402 if (_S_force_new == 0) 00403 { 00404 if (getenv("GLIBCPP_FORCE_NEW")) 00405 __atomic_add(&_S_force_new, 1); 00406 else 00407 __atomic_add(&_S_force_new, -1); 00408 // Trust but verify... 00409 assert(_S_force_new != 0); 00410 } 00411 00412 if ((__n > (size_t) _MAX_BYTES) || (_S_force_new > 0)) 00413 __ret = __new_alloc::allocate(__n); 00414 else 00415 { 00416 _Obj* volatile* __my_free_list = _S_free_list 00417 + _S_freelist_index(__n); 00418 // Acquire the lock here with a constructor call. This 00419 // ensures that it is released in exit or during stack 00420 // unwinding. 00421 _Lock __lock_instance; 00422 _Obj* __restrict__ __result = *__my_free_list; 00423 if (__builtin_expect(__result == 0, 0)) 00424 __ret = _S_refill(_S_round_up(__n)); 00425 else 00426 { 00427 *__my_free_list = __result -> _M_free_list_link; 00428 __ret = __result; 00429 } 00430 if (__builtin_expect(__ret == 0, 0)) 00431 __throw_bad_alloc(); 00432 } 00433 return __ret; 00434 } 00435 00436 // __p may not be 0 00437 static void 00438 deallocate(void* __p, size_t __n) 00439 { 00440 if ((__n > (size_t) _MAX_BYTES) || (_S_force_new > 0)) 00441 __new_alloc::deallocate(__p, __n); 00442 else 00443 { 00444 _Obj* volatile* __my_free_list = _S_free_list 00445 + _S_freelist_index(__n); 00446 _Obj* __q = (_Obj*)__p; 00447 00448 // Acquire the lock here with a constructor call. This 00449 // ensures that it is released in exit or during stack 00450 // unwinding. 00451 _Lock __lock_instance; 00452 __q -> _M_free_list_link = *__my_free_list; 00453 *__my_free_list = __q; 00454 } 00455 } 00456 00457 // _GLIBCPP_DEPRECATED 00458 static void* 00459 reallocate(void* __p, size_t __old_sz, size_t __new_sz); 00460 }; 00461 00462 template<bool __threads, int __inst> _Atomic_word 00463 __default_alloc_template<__threads, __inst>::_S_force_new = 0; 00464 00465 template<bool __threads, int __inst> 00466 inline bool 00467 operator==(const __default_alloc_template<__threads,__inst>&, 00468 const __default_alloc_template<__threads,__inst>&) 00469 { return true; } 00470 00471 template<bool __threads, int __inst> 00472 inline bool 00473 operator!=(const __default_alloc_template<__threads,__inst>&, 00474 const __default_alloc_template<__threads,__inst>&) 00475 { return false; } 00476 00477 00478 // We allocate memory in large chunks in order to avoid fragmenting the 00479 // heap too much. We assume that __size is properly aligned. We hold 00480 // the allocation lock. 00481 template<bool __threads, int __inst> 00482 char* 00483 __default_alloc_template<__threads, __inst>:: 00484 _S_chunk_alloc(size_t __size, int& __nobjs) 00485 { 00486 char* __result; 00487 size_t __total_bytes = __size * __nobjs; 00488 size_t __bytes_left = _S_end_free - _S_start_free; 00489 00490 if (__bytes_left >= __total_bytes) 00491 { 00492 __result = _S_start_free; 00493 _S_start_free += __total_bytes; 00494 return __result ; 00495 } 00496 else if (__bytes_left >= __size) 00497 { 00498 __nobjs = (int)(__bytes_left/__size); 00499 __total_bytes = __size * __nobjs; 00500 __result = _S_start_free; 00501 _S_start_free += __total_bytes; 00502 return __result; 00503 } 00504 else 00505 { 00506 size_t __bytes_to_get = 00507 2 * __total_bytes + _S_round_up(_S_heap_size >> 4); 00508 // Try to make use of the left-over piece. 00509 if (__bytes_left > 0) 00510 { 00511 _Obj* volatile* __my_free_list = 00512 _S_free_list + _S_freelist_index(__bytes_left); 00513 00514 ((_Obj*)_S_start_free) -> _M_free_list_link = *__my_free_list; 00515 *__my_free_list = (_Obj*)_S_start_free; 00516 } 00517 _S_start_free = (char*) __new_alloc::allocate(__bytes_to_get); 00518 if (_S_start_free == 0) 00519 { 00520 size_t __i; 00521 _Obj* volatile* __my_free_list; 00522 _Obj* __p; 00523 // Try to make do with what we have. That can't hurt. We 00524 // do not try smaller requests, since that tends to result 00525 // in disaster on multi-process machines. 00526 __i = __size; 00527 for (; __i <= (size_t) _MAX_BYTES; __i += (size_t) _ALIGN) 00528 { 00529 __my_free_list = _S_free_list + _S_freelist_index(__i); 00530 __p = *__my_free_list; 00531 if (__p != 0) 00532 { 00533 *__my_free_list = __p -> _M_free_list_link; 00534 _S_start_free = (char*)__p; 00535 _S_end_free = _S_start_free + __i; 00536 return _S_chunk_alloc(__size, __nobjs); 00537 // Any leftover piece will eventually make it to the 00538 // right free list. 00539 } 00540 } 00541 _S_end_free = 0; // In case of exception. 00542 _S_start_free = (char*)__new_alloc::allocate(__bytes_to_get); 00543 // This should either throw an exception or remedy the situation. 00544 // Thus we assume it succeeded. 00545 } 00546 _S_heap_size += __bytes_to_get; 00547 _S_end_free = _S_start_free + __bytes_to_get; 00548 return _S_chunk_alloc(__size, __nobjs); 00549 } 00550 } 00551 00552 00553 // Returns an object of size __n, and optionally adds to "size 00554 // __n"'s free list. We assume that __n is properly aligned. We 00555 // hold the allocation lock. 00556 template<bool __threads, int __inst> 00557 void* 00558 __default_alloc_template<__threads, __inst>::_S_refill(size_t __n) 00559 { 00560 int __nobjs = 20; 00561 char* __chunk = _S_chunk_alloc(__n, __nobjs); 00562 _Obj* volatile* __my_free_list; 00563 _Obj* __result; 00564 _Obj* __current_obj; 00565 _Obj* __next_obj; 00566 int __i; 00567 00568 if (1 == __nobjs) 00569 return __chunk; 00570 __my_free_list = _S_free_list + _S_freelist_index(__n); 00571 00572 // Build free list in chunk. 00573 __result = (_Obj*)__chunk; 00574 *__my_free_list = __next_obj = (_Obj*)(__chunk + __n); 00575 for (__i = 1; ; __i++) 00576 { 00577 __current_obj = __next_obj; 00578 __next_obj = (_Obj*)((char*)__next_obj + __n); 00579 if (__nobjs - 1 == __i) 00580 { 00581 __current_obj -> _M_free_list_link = 0; 00582 break; 00583 } 00584 else 00585 __current_obj -> _M_free_list_link = __next_obj; 00586 } 00587 return __result; 00588 } 00589 00590 00591 // _GLIBCPP_DEPRECATED 00592 template<bool threads, int inst> 00593 void* 00594 __default_alloc_template<threads, inst>:: 00595 reallocate(void* __p, size_t __old_sz, size_t __new_sz) 00596 { 00597 void* __result; 00598 size_t __copy_sz; 00599 00600 if (__old_sz > (size_t) _MAX_BYTES && __new_sz > (size_t) _MAX_BYTES) 00601 return(realloc(__p, __new_sz)); 00602 if (_S_round_up(__old_sz) == _S_round_up(__new_sz)) 00603 return(__p); 00604 __result = allocate(__new_sz); 00605 __copy_sz = __new_sz > __old_sz? __old_sz : __new_sz; 00606 memcpy(__result, __p, __copy_sz); 00607 deallocate(__p, __old_sz); 00608 return __result; 00609 } 00610 00611 template<bool __threads, int __inst> 00612 _STL_mutex_lock 00613 __default_alloc_template<__threads,__inst>::_S_node_allocator_lock 00614 __STL_MUTEX_INITIALIZER; 00615 00616 template<bool __threads, int __inst> 00617 char* __default_alloc_template<__threads,__inst>::_S_start_free = 0; 00618 00619 template<bool __threads, int __inst> 00620 char* __default_alloc_template<__threads,__inst>::_S_end_free = 0; 00621 00622 template<bool __threads, int __inst> 00623 size_t __default_alloc_template<__threads,__inst>::_S_heap_size = 0; 00624 00625 template<bool __threads, int __inst> 00626 typename __default_alloc_template<__threads,__inst>::_Obj* volatile 00627 __default_alloc_template<__threads,__inst>::_S_free_list[_NFREELISTS]; 00628 00629 typedef __default_alloc_template<true,0> __alloc; 00630 typedef __default_alloc_template<false,0> __single_client_alloc; 00631 00632 00647 template<typename _Tp> 00648 class allocator 00649 { 00650 typedef __alloc _Alloc; // The underlying allocator. 00651 public: 00652 typedef size_t size_type; 00653 typedef ptrdiff_t difference_type; 00654 typedef _Tp* pointer; 00655 typedef const _Tp* const_pointer; 00656 typedef _Tp& reference; 00657 typedef const _Tp& const_reference; 00658 typedef _Tp value_type; 00659 00660 template<typename _Tp1> 00661 struct rebind 00662 { typedef allocator<_Tp1> other; }; 00663 00664 allocator() throw() {} 00665 allocator(const allocator&) throw() {} 00666 template<typename _Tp1> 00667 allocator(const allocator<_Tp1>&) throw() {} 00668 ~allocator() throw() {} 00669 00670 pointer 00671 address(reference __x) const { return &__x; } 00672 00673 const_pointer 00674 address(const_reference __x) const { return &__x; } 00675 00676 // NB: __n is permitted to be 0. The C++ standard says nothing 00677 // about what the return value is when __n == 0. 00678 _Tp* 00679 allocate(size_type __n, const void* = 0) 00680 { 00681 _Tp* __ret = 0; 00682 if (__n) 00683 { 00684 if (__n <= this->max_size()) 00685 __ret = static_cast<_Tp*>(_Alloc::allocate(__n * sizeof(_Tp))); 00686 else 00687 __throw_bad_alloc(); 00688 } 00689 return __ret; 00690 } 00691 00692 // __p is not permitted to be a null pointer. 00693 void 00694 deallocate(pointer __p, size_type __n) 00695 { _Alloc::deallocate(__p, __n * sizeof(_Tp)); } 00696 00697 size_type 00698 max_size() const throw() { return size_t(-1) / sizeof(_Tp); } 00699 00700 void construct(pointer __p, const _Tp& __val) { new(__p) _Tp(__val); } 00701 void destroy(pointer __p) { __p->~_Tp(); } 00702 }; 00703 00704 template<> 00705 class allocator<void> 00706 { 00707 public: 00708 typedef size_t size_type; 00709 typedef ptrdiff_t difference_type; 00710 typedef void* pointer; 00711 typedef const void* const_pointer; 00712 typedef void value_type; 00713 00714 template<typename _Tp1> 00715 struct rebind 00716 { typedef allocator<_Tp1> other; }; 00717 }; 00718 00719 00720 template<typename _T1, typename _T2> 00721 inline bool 00722 operator==(const allocator<_T1>&, const allocator<_T2>&) 00723 { return true; } 00724 00725 template<typename _T1, typename _T2> 00726 inline bool 00727 operator!=(const allocator<_T1>&, const allocator<_T2>&) 00728 { return false; } 00729 00730 00743 template<typename _Tp, typename _Alloc> 00744 struct __allocator 00745 { 00746 _Alloc __underlying_alloc; 00747 00748 typedef size_t size_type; 00749 typedef ptrdiff_t difference_type; 00750 typedef _Tp* pointer; 00751 typedef const _Tp* const_pointer; 00752 typedef _Tp& reference; 00753 typedef const _Tp& const_reference; 00754 typedef _Tp value_type; 00755 00756 template<typename _Tp1> 00757 struct rebind 00758 { typedef __allocator<_Tp1, _Alloc> other; }; 00759 00760 __allocator() throw() {} 00761 __allocator(const __allocator& __a) throw() 00762 : __underlying_alloc(__a.__underlying_alloc) {} 00763 00764 template<typename _Tp1> 00765 __allocator(const __allocator<_Tp1, _Alloc>& __a) throw() 00766 : __underlying_alloc(__a.__underlying_alloc) {} 00767 00768 ~__allocator() throw() {} 00769 00770 pointer 00771 address(reference __x) const { return &__x; } 00772 00773 const_pointer 00774 address(const_reference __x) const { return &__x; } 00775 00776 // NB: __n is permitted to be 0. The C++ standard says nothing 00777 // about what the return value is when __n == 0. 00778 _Tp* 00779 allocate(size_type __n, const void* = 0) 00780 { 00781 _Tp* __ret = 0; 00782 if (__n) 00783 __ret = static_cast<_Tp*>(_Alloc::allocate(__n * sizeof(_Tp))); 00784 return __ret; 00785 } 00786 00787 // __p is not permitted to be a null pointer. 00788 void 00789 deallocate(pointer __p, size_type __n) 00790 { __underlying_alloc.deallocate(__p, __n * sizeof(_Tp)); } 00791 00792 size_type 00793 max_size() const throw() { return size_t(-1) / sizeof(_Tp); } 00794 00795 void 00796 construct(pointer __p, const _Tp& __val) { new(__p) _Tp(__val); } 00797 00798 void 00799 destroy(pointer __p) { __p->~_Tp(); } 00800 }; 00801 00802 template<typename _Alloc> 00803 struct __allocator<void, _Alloc> 00804 { 00805 typedef size_t size_type; 00806 typedef ptrdiff_t difference_type; 00807 typedef void* pointer; 00808 typedef const void* const_pointer; 00809 typedef void value_type; 00810 00811 template<typename _Tp1> 00812 struct rebind 00813 { typedef __allocator<_Tp1, _Alloc> other; }; 00814 }; 00815 00816 template<typename _Tp, typename _Alloc> 00817 inline bool 00818 operator==(const __allocator<_Tp,_Alloc>& __a1, 00819 const __allocator<_Tp,_Alloc>& __a2) 00820 { return __a1.__underlying_alloc == __a2.__underlying_alloc; } 00821 00822 template<typename _Tp, typename _Alloc> 00823 inline bool 00824 operator!=(const __allocator<_Tp, _Alloc>& __a1, 00825 const __allocator<_Tp, _Alloc>& __a2) 00826 { return __a1.__underlying_alloc != __a2.__underlying_alloc; } 00827 00828 00830 00834 template<int inst> 00835 inline bool 00836 operator==(const __malloc_alloc_template<inst>&, 00837 const __malloc_alloc_template<inst>&) 00838 { return true; } 00839 00840 template<int __inst> 00841 inline bool 00842 operator!=(const __malloc_alloc_template<__inst>&, 00843 const __malloc_alloc_template<__inst>&) 00844 { return false; } 00845 00846 template<typename _Alloc> 00847 inline bool 00848 operator==(const __debug_alloc<_Alloc>&, const __debug_alloc<_Alloc>&) 00849 { return true; } 00850 00851 template<typename _Alloc> 00852 inline bool 00853 operator!=(const __debug_alloc<_Alloc>&, const __debug_alloc<_Alloc>&) 00854 { return false; } 00856 00857 00895 // The fully general version. 00896 template<typename _Tp, typename _Allocator> 00897 struct _Alloc_traits 00898 { 00899 static const bool _S_instanceless = false; 00900 typedef typename _Allocator::template rebind<_Tp>::other allocator_type; 00901 }; 00902 00903 template<typename _Tp, typename _Allocator> 00904 const bool _Alloc_traits<_Tp, _Allocator>::_S_instanceless; 00905 00907 template<typename _Tp, typename _Tp1> 00908 struct _Alloc_traits<_Tp, allocator<_Tp1> > 00909 { 00910 static const bool _S_instanceless = true; 00911 typedef __simple_alloc<_Tp, __alloc> _Alloc_type; 00912 typedef allocator<_Tp> allocator_type; 00913 }; 00915 00917 00918 template<typename _Tp, int __inst> 00919 struct _Alloc_traits<_Tp, __malloc_alloc_template<__inst> > 00920 { 00921 static const bool _S_instanceless = true; 00922 typedef __simple_alloc<_Tp, __malloc_alloc_template<__inst> > _Alloc_type; 00923 typedef __allocator<_Tp, __malloc_alloc_template<__inst> > allocator_type; 00924 }; 00925 00926 template<typename _Tp, bool __threads, int __inst> 00927 struct _Alloc_traits<_Tp, __default_alloc_template<__threads, __inst> > 00928 { 00929 static const bool _S_instanceless = true; 00930 typedef __simple_alloc<_Tp, __default_alloc_template<__threads, __inst> > 00931 _Alloc_type; 00932 typedef __allocator<_Tp, __default_alloc_template<__threads, __inst> > 00933 allocator_type; 00934 }; 00935 00936 template<typename _Tp, typename _Alloc> 00937 struct _Alloc_traits<_Tp, __debug_alloc<_Alloc> > 00938 { 00939 static const bool _S_instanceless = true; 00940 typedef __simple_alloc<_Tp, __debug_alloc<_Alloc> > _Alloc_type; 00941 typedef __allocator<_Tp, __debug_alloc<_Alloc> > allocator_type; 00942 }; 00944 00946 00947 00948 template<typename _Tp, typename _Tp1, int __inst> 00949 struct _Alloc_traits<_Tp, 00950 __allocator<_Tp1, __malloc_alloc_template<__inst> > > 00951 { 00952 static const bool _S_instanceless = true; 00953 typedef __simple_alloc<_Tp, __malloc_alloc_template<__inst> > _Alloc_type; 00954 typedef __allocator<_Tp, __malloc_alloc_template<__inst> > allocator_type; 00955 }; 00956 00957 template<typename _Tp, typename _Tp1, bool __thr, int __inst> 00958 struct _Alloc_traits<_Tp, __allocator<_Tp1, __default_alloc_template<__thr, __inst> > > 00959 { 00960 static const bool _S_instanceless = true; 00961 typedef __simple_alloc<_Tp, __default_alloc_template<__thr,__inst> > 00962 _Alloc_type; 00963 typedef __allocator<_Tp, __default_alloc_template<__thr,__inst> > 00964 allocator_type; 00965 }; 00966 00967 template<typename _Tp, typename _Tp1, typename _Alloc> 00968 struct _Alloc_traits<_Tp, __allocator<_Tp1, __debug_alloc<_Alloc> > > 00969 { 00970 static const bool _S_instanceless = true; 00971 typedef __simple_alloc<_Tp, __debug_alloc<_Alloc> > _Alloc_type; 00972 typedef __allocator<_Tp, __debug_alloc<_Alloc> > allocator_type; 00973 }; 00975 00976 // Inhibit implicit instantiations for required instantiations, 00977 // which are defined via explicit instantiations elsewhere. 00978 // NB: This syntax is a GNU extension. 00979 extern template class allocator<char>; 00980 extern template class allocator<wchar_t>; 00981 extern template class __default_alloc_template<true,0>; 00982 } // namespace std 00983 00984 #endif

Generated on Wed Sep 29 13:54:51 2004 for libstdc++-v3 Source by doxygen 1.3.7