00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031
00032
00033
00034
00035
00036
00037
00038
00039
00040
00041
00042
00048
#ifndef _CPP_BITS_PTHREAD_ALLOCIMPL_H
00049
#define _CPP_BITS_PTHREAD_ALLOCIMPL_H 1
00050
00051
00052
00053
00054
00055
00056
00057
00058
00059
00060
00061
00062
00063
00064
#include <bits/c++config.h>
00065
#include <cerrno>
00066
#include <bits/stl_alloc.h>
00067
#ifndef __RESTRICT
00068
# define __RESTRICT
00069
#endif
00070
00071
#include <new>
00072
00073
namespace std
00074 {
00075
00076
#define __STL_DATA_ALIGNMENT 8
00077
00078
union _Pthread_alloc_obj {
00079
union _Pthread_alloc_obj * __free_list_link;
00080
char __client_data[__STL_DATA_ALIGNMENT];
00081 };
00082
00083
00084
00085
00086
00087
00088
template<size_t _Max_size>
00089
struct _Pthread_alloc_per_thread_state {
00090
typedef _Pthread_alloc_obj __obj;
00091
enum { _S_NFREELISTS = _Max_size/__STL_DATA_ALIGNMENT };
00092 _Pthread_alloc_obj*
volatile __free_list[_S_NFREELISTS];
00093 _Pthread_alloc_per_thread_state<_Max_size> * __next;
00094
00095
00096
00097
00098
00099 _Pthread_alloc_per_thread_state() : __next(0)
00100 {
00101 memset((
void *)__free_list, 0, (size_t) _S_NFREELISTS *
sizeof(__obj *));
00102 }
00103
00104
void *_M_refill(size_t __n);
00105 };
00106
00107
00108
00109
00110
00111
template <size_t _Max_size = 128>
00112
class _Pthread_alloc_template {
00113
00114
public:
00115
00116
typedef _Pthread_alloc_obj __obj;
00117
00118
00119
00120
static char *_S_chunk_alloc(size_t __size,
int &__nobjs);
00121
00122
enum {_S_ALIGN = __STL_DATA_ALIGNMENT};
00123
00124
static size_t _S_round_up(size_t __bytes) {
00125
return (((__bytes) + (
int) _S_ALIGN-1) & ~((
int) _S_ALIGN - 1));
00126 }
00127
static size_t _S_freelist_index(size_t __bytes) {
00128
return (((__bytes) + (
int) _S_ALIGN-1)/(
int)_S_ALIGN - 1);
00129 }
00130
00131
private:
00132
00133
00134
static pthread_mutex_t _S_chunk_allocator_lock;
00135
static char *_S_start_free;
00136
static char *_S_end_free;
00137
static size_t _S_heap_size;
00138
static _Pthread_alloc_per_thread_state<_Max_size>* _S_free_per_thread_states;
00139
static pthread_key_t _S_key;
00140
static bool _S_key_initialized;
00141
00142
00143
static void _S_destructor(
void *instance);
00144
00145
00146
static _Pthread_alloc_per_thread_state<_Max_size> *_S_new_per_thread_state();
00147
00148
static _Pthread_alloc_per_thread_state<_Max_size> *_S_get_per_thread_state();
00149
00150
00151
class _M_lock;
00152
friend class _M_lock;
00153
class _M_lock {
00154
public:
00155 _M_lock () { pthread_mutex_lock(&_S_chunk_allocator_lock); }
00156 ~_M_lock () { pthread_mutex_unlock(&_S_chunk_allocator_lock); }
00157 };
00158
00159
public:
00160
00161
00162
static void * allocate(size_t __n)
00163 {
00164 __obj *
volatile * __my_free_list;
00165 __obj * __RESTRICT __result;
00166 _Pthread_alloc_per_thread_state<_Max_size>* __a;
00167
00168
if (__n > _Max_size) {
00169
return(malloc_alloc::allocate(__n));
00170 }
00171
if (!_S_key_initialized ||
00172 !(__a = (_Pthread_alloc_per_thread_state<_Max_size>*)
00173 pthread_getspecific(_S_key))) {
00174 __a = _S_get_per_thread_state();
00175 }
00176 __my_free_list = __a -> __free_list + _S_freelist_index(__n);
00177 __result = *__my_free_list;
00178
if (__result == 0) {
00179
void *__r = __a -> _M_refill(_S_round_up(__n));
00180
return __r;
00181 }
00182 *__my_free_list = __result -> __free_list_link;
00183
return (__result);
00184 };
00185
00186
00187
static void deallocate(
void *__p, size_t __n)
00188 {
00189 __obj *__q = (__obj *)__p;
00190 __obj *
volatile * __my_free_list;
00191 _Pthread_alloc_per_thread_state<_Max_size>* __a;
00192
00193
if (__n > _Max_size) {
00194 malloc_alloc::deallocate(__p, __n);
00195
return;
00196 }
00197
if (!_S_key_initialized ||
00198 !(__a = (_Pthread_alloc_per_thread_state<_Max_size> *)
00199 pthread_getspecific(_S_key))) {
00200 __a = _S_get_per_thread_state();
00201 }
00202 __my_free_list = __a->__free_list + _S_freelist_index(__n);
00203 __q -> __free_list_link = *__my_free_list;
00204 *__my_free_list = __q;
00205 }
00206
00207
static void * reallocate(
void *__p, size_t __old_sz, size_t __new_sz);
00208
00209 } ;
00210
00211
typedef _Pthread_alloc_template<> pthread_alloc;
00212
00213
00214
template <size_t _Max_size>
00215
void _Pthread_alloc_template<_Max_size>::_S_destructor(
void * __instance)
00216 {
00217 _M_lock __lock_instance;
00218 _Pthread_alloc_per_thread_state<_Max_size>* __s =
00219 (_Pthread_alloc_per_thread_state<_Max_size> *)__instance;
00220 __s -> __next = _S_free_per_thread_states;
00221 _S_free_per_thread_states = __s;
00222 }
00223
00224
template <size_t _Max_size>
00225 _Pthread_alloc_per_thread_state<_Max_size> *
00226 _Pthread_alloc_template<_Max_size>::_S_new_per_thread_state()
00227 {
00228
00229
if (0 != _S_free_per_thread_states) {
00230 _Pthread_alloc_per_thread_state<_Max_size> *__result =
00231 _S_free_per_thread_states;
00232 _S_free_per_thread_states = _S_free_per_thread_states -> __next;
00233
return __result;
00234 }
else {
00235
return new _Pthread_alloc_per_thread_state<_Max_size>;
00236 }
00237 }
00238
00239
template <size_t _Max_size>
00240 _Pthread_alloc_per_thread_state<_Max_size> *
00241 _Pthread_alloc_template<_Max_size>::_S_get_per_thread_state()
00242 {
00243
00244 _M_lock __lock_instance;
00245
int __ret_code;
00246 _Pthread_alloc_per_thread_state<_Max_size> * __result;
00247
if (!_S_key_initialized) {
00248
if (pthread_key_create(&_S_key, _S_destructor)) {
00249 std::__throw_bad_alloc();
00250 }
00251 _S_key_initialized =
true;
00252 }
00253 __result = _S_new_per_thread_state();
00254 __ret_code = pthread_setspecific(_S_key, __result);
00255
if (__ret_code) {
00256
if (__ret_code == ENOMEM) {
00257 std::__throw_bad_alloc();
00258 }
else {
00259
00260 abort();
00261 }
00262 }
00263
return __result;
00264 }
00265
00266
00267
00268
00269
template <size_t _Max_size>
00270
char *_Pthread_alloc_template<_Max_size>
00271 ::_S_chunk_alloc(size_t __size,
int &__nobjs)
00272 {
00273 {
00274
char * __result;
00275 size_t __total_bytes;
00276 size_t __bytes_left;
00277
00278 _M_lock __lock_instance;
00279
00280 __total_bytes = __size * __nobjs;
00281 __bytes_left = _S_end_free - _S_start_free;
00282
if (__bytes_left >= __total_bytes) {
00283 __result = _S_start_free;
00284 _S_start_free += __total_bytes;
00285
return(__result);
00286 }
else if (__bytes_left >= __size) {
00287 __nobjs = __bytes_left/__size;
00288 __total_bytes = __size * __nobjs;
00289 __result = _S_start_free;
00290 _S_start_free += __total_bytes;
00291
return(__result);
00292 }
else {
00293 size_t __bytes_to_get =
00294 2 * __total_bytes + _S_round_up(_S_heap_size >> 4);
00295
00296
if (__bytes_left > 0) {
00297 _Pthread_alloc_per_thread_state<_Max_size>* __a =
00298 (_Pthread_alloc_per_thread_state<_Max_size>*)
00299 pthread_getspecific(_S_key);
00300 __obj *
volatile * __my_free_list =
00301 __a->__free_list + _S_freelist_index(__bytes_left);
00302
00303 ((__obj *)_S_start_free) -> __free_list_link = *__my_free_list;
00304 *__my_free_list = (__obj *)_S_start_free;
00305 }
00306
# ifdef _SGI_SOURCE
00307
00308
00309
00310
00311 {
00312
const int __cache_line_size = 128;
00313 __bytes_to_get &= ~(__cache_line_size-1);
00314 _S_start_free = (
char *)memalign(__cache_line_size, __bytes_to_get);
00315
if (0 == _S_start_free) {
00316 _S_start_free = (
char *)malloc_alloc::allocate(__bytes_to_get);
00317 }
00318 }
00319
# else
00320 _S_start_free = (
char *)malloc_alloc::allocate(__bytes_to_get);
00321
# endif
00322
_S_heap_size += __bytes_to_get;
00323 _S_end_free = _S_start_free + __bytes_to_get;
00324 }
00325 }
00326
00327
return(_S_chunk_alloc(__size, __nobjs));
00328 }
00329
00330
00331
00332
00333
00334
template <size_t _Max_size>
00335
void *_Pthread_alloc_per_thread_state<_Max_size>
00336 ::_M_refill(size_t __n)
00337 {
00338
int __nobjs = 128;
00339
char * __chunk =
00340 _Pthread_alloc_template<_Max_size>::_S_chunk_alloc(__n, __nobjs);
00341 __obj *
volatile * __my_free_list;
00342 __obj * __result;
00343 __obj * __current_obj, * __next_obj;
00344
int __i;
00345
00346
if (1 == __nobjs) {
00347
return(__chunk);
00348 }
00349 __my_free_list = __free_list
00350 + _Pthread_alloc_template<_Max_size>::_S_freelist_index(__n);
00351
00352
00353 __result = (__obj *)__chunk;
00354 *__my_free_list = __next_obj = (__obj *)(__chunk + __n);
00355
for (__i = 1; ; __i++) {
00356 __current_obj = __next_obj;
00357 __next_obj = (__obj *)((
char *)__next_obj + __n);
00358
if (__nobjs - 1 == __i) {
00359 __current_obj -> __free_list_link = 0;
00360
break;
00361 }
else {
00362 __current_obj -> __free_list_link = __next_obj;
00363 }
00364 }
00365
return(__result);
00366 }
00367
00368
template <size_t _Max_size>
00369
void *_Pthread_alloc_template<_Max_size>
00370 ::reallocate(
void *__p, size_t __old_sz, size_t __new_sz)
00371 {
00372
void * __result;
00373 size_t __copy_sz;
00374
00375
if (__old_sz > _Max_size
00376 && __new_sz > _Max_size) {
00377
return(realloc(__p, __new_sz));
00378 }
00379
if (_S_round_up(__old_sz) == _S_round_up(__new_sz))
return(__p);
00380 __result = allocate(__new_sz);
00381 __copy_sz = __new_sz > __old_sz? __old_sz : __new_sz;
00382 memcpy(__result, __p, __copy_sz);
00383 deallocate(__p, __old_sz);
00384
return(__result);
00385 }
00386
00387
template <size_t _Max_size>
00388 _Pthread_alloc_per_thread_state<_Max_size> *
00389 _Pthread_alloc_template<_Max_size>::_S_free_per_thread_states = 0;
00390
00391
template <size_t _Max_size>
00392 pthread_key_t _Pthread_alloc_template<_Max_size>::_S_key;
00393
00394
template <size_t _Max_size>
00395
bool _Pthread_alloc_template<_Max_size>::_S_key_initialized =
false;
00396
00397
template <size_t _Max_size>
00398 pthread_mutex_t _Pthread_alloc_template<_Max_size>::_S_chunk_allocator_lock
00399 = PTHREAD_MUTEX_INITIALIZER;
00400
00401
template <size_t _Max_size>
00402
char *_Pthread_alloc_template<_Max_size>
00403 ::_S_start_free = 0;
00404
00405
template <size_t _Max_size>
00406
char *_Pthread_alloc_template<_Max_size>
00407 ::_S_end_free = 0;
00408
00409
template <size_t _Max_size>
00410 size_t _Pthread_alloc_template<_Max_size>
00411 ::_S_heap_size = 0;
00412
00413
00414
template <
class _Tp>
00415
class pthread_allocator {
00416
typedef pthread_alloc _S_Alloc;
00417
public:
00418
typedef size_t size_type;
00419
typedef ptrdiff_t difference_type;
00420
typedef _Tp* pointer;
00421
typedef const _Tp* const_pointer;
00422
typedef _Tp& reference;
00423
typedef const _Tp& const_reference;
00424
typedef _Tp value_type;
00425
00426
template <
class _NewType>
struct rebind {
00427
typedef pthread_allocator<_NewType> other;
00428 };
00429
00430 pthread_allocator() throw() {}
00431 pthread_allocator(
const pthread_allocator& a)
throw() {}
00432
template <
class _OtherType>
00433 pthread_allocator(
const pthread_allocator<_OtherType>&)
00434 throw() {}
00435 ~pthread_allocator() throw() {}
00436
00437 pointer address(reference __x)
const {
return &__x; }
00438 const_pointer address(const_reference __x)
const {
return &__x; }
00439
00440
00441
00442 _Tp* allocate(size_type __n,
const void* = 0) {
00443
return __n != 0 ? static_cast<_Tp*>(_S_Alloc::allocate(__n *
sizeof(_Tp)))
00444 : 0;
00445 }
00446
00447
00448
void deallocate(pointer __p, size_type __n)
00449 { _S_Alloc::deallocate(__p, __n *
sizeof(_Tp)); }
00450
00451 size_type max_size() const throw()
00452 {
return size_t(-1) /
sizeof(_Tp); }
00453
00454
void construct(pointer __p,
const _Tp& __val) {
new(__p) _Tp(__val); }
00455
void destroy(pointer _p) { _p->~_Tp(); }
00456 };
00457
00458
template<>
00459
class pthread_allocator<void> {
00460
public:
00461
typedef size_t size_type;
00462
typedef ptrdiff_t difference_type;
00463
typedef void* pointer;
00464
typedef const void* const_pointer;
00465
typedef void value_type;
00466
00467
template <
class _NewType>
struct rebind {
00468
typedef pthread_allocator<_NewType> other;
00469 };
00470 };
00471
00472
template <size_t _Max_size>
00473
inline bool operator==(
const _Pthread_alloc_template<_Max_size>&,
00474
const _Pthread_alloc_template<_Max_size>&)
00475 {
00476
return true;
00477 }
00478
00479
template <
class _T1,
class _T2>
00480
inline bool operator==(
const pthread_allocator<_T1>&,
00481
const pthread_allocator<_T2>& a2)
00482 {
00483
return true;
00484 }
00485
00486
template <
class _T1,
class _T2>
00487
inline bool operator!=(
const pthread_allocator<_T1>&,
00488
const pthread_allocator<_T2>&)
00489 {
00490
return false;
00491 }
00492
00493
template <
class _Tp, size_t _Max_size>
00494
struct _Alloc_traits<_Tp, _Pthread_alloc_template<_Max_size> >
00495 {
00496
static const bool _S_instanceless =
true;
00497
typedef simple_alloc<_Tp, _Pthread_alloc_template<_Max_size> > _Alloc_type;
00498
typedef __allocator<_Tp, _Pthread_alloc_template<_Max_size> >
00499 allocator_type;
00500 };
00501
00502
template <
class _Tp,
class _Atype, size_t _Max>
00503
struct _Alloc_traits<_Tp, __allocator<_Atype, _Pthread_alloc_template<_Max> > >
00504 {
00505
static const bool _S_instanceless =
true;
00506
typedef simple_alloc<_Tp, _Pthread_alloc_template<_Max> > _Alloc_type;
00507
typedef __allocator<_Tp, _Pthread_alloc_template<_Max> > allocator_type;
00508 };
00509
00510
template <
class _Tp,
class _Atype>
00511
struct _Alloc_traits<_Tp, pthread_allocator<_Atype> >
00512 {
00513
static const bool _S_instanceless =
true;
00514
typedef simple_alloc<_Tp, _Pthread_alloc_template<> > _Alloc_type;
00515
typedef pthread_allocator<_Tp> allocator_type;
00516 };
00517
00518
00519 }
00520
00521
#endif
00522
00523
00524
00525