42 #ifndef _POOL_ALLOCATOR_H 
   43 #define _POOL_ALLOCATOR_H 1 
   52 #if __cplusplus >= 201103L 
   53 #include <type_traits> 
   56 namespace __gnu_cxx _GLIBCXX_VISIBILITY(default)
 
   58 _GLIBCXX_BEGIN_NAMESPACE_VERSION
 
   82       enum { _S_align = 8 };
 
   83       enum { _S_max_bytes = 128 };
 
   84       enum { _S_free_list_size = (size_t)_S_max_bytes / (
size_t)_S_align };
 
   88     union _Obj* _M_free_list_link;
 
   89     char        _M_client_data[1];    
 
   92       static _Obj* 
volatile         _S_free_list[_S_free_list_size];
 
   95       static char*                  _S_start_free;
 
   96       static char*                  _S_end_free;
 
   97       static size_t                 _S_heap_size;     
 
  100       _M_round_up(
size_t __bytes)
 
  101       { 
return ((__bytes + (
size_t)_S_align - 1) & ~((
size_t)_S_align - 1)); }
 
  103       _GLIBCXX_CONST _Obj* 
volatile*
 
  104       _M_get_free_list(
size_t __bytes) 
throw ();
 
  107       _M_get_mutex() 
throw ();
 
  112       _M_refill(
size_t __n);
 
  117       _M_allocate_chunk(
size_t __n, 
int& __nobjs);
 
  125   template<
typename _Tp>
 
  129       static _Atomic_word       _S_force_new;
 
  132       typedef size_t     size_type;
 
  133       typedef ptrdiff_t  difference_type;
 
  134       typedef _Tp*       pointer;
 
  135       typedef const _Tp* const_pointer;
 
  136       typedef _Tp&       reference;
 
  137       typedef const _Tp& const_reference;
 
  138       typedef _Tp        value_type;
 
  140       template<
typename _Tp1>
 
  144 #if __cplusplus >= 201103L 
  147       typedef std::true_type propagate_on_container_move_assignment;
 
  154       template<
typename _Tp1>
 
  160       address(reference __x) 
const _GLIBCXX_NOEXCEPT
 
  164       address(const_reference __x) 
const _GLIBCXX_NOEXCEPT
 
  168       max_size() 
const _GLIBCXX_USE_NOEXCEPT 
 
  169       { 
return size_t(-1) / 
sizeof(_Tp); }
 
  171 #if __cplusplus >= 201103L 
  172       template<
typename _Up, 
typename... _Args>
 
  174         construct(_Up* __p, _Args&&... __args)
 
  175     { ::new((
void *)__p) _Up(std::forward<_Args>(__args)...); }
 
  177       template<
typename _Up>
 
  179         destroy(_Up* __p) { __p->~_Up(); }
 
  184       construct(pointer __p, 
const _Tp& __val) 
 
  185       { ::new((
void *)__p) _Tp(__val); }
 
  188       destroy(pointer __p) { __p->~_Tp(); }
 
  192       allocate(size_type __n, 
const void* = 0);
 
  195       deallocate(pointer __p, size_type __n);      
 
  198   template<
typename _Tp>
 
  203   template<
typename _Tp>
 
  205     operator!=(
const __pool_alloc<_Tp>&, 
const __pool_alloc<_Tp>&)
 
  208   template<
typename _Tp>
 
  210     __pool_alloc<_Tp>::_S_force_new;
 
  212   template<
typename _Tp>
 
  214     __pool_alloc<_Tp>::allocate(size_type __n, 
const void*)
 
  217       if (__builtin_expect(__n != 0, 
true))
 
  219       if (__n > this->max_size())
 
  220         std::__throw_bad_alloc();
 
  225       if (_S_force_new == 0)
 
  227           if (std::getenv(
"GLIBCXX_FORCE_NEW"))
 
  228         __atomic_add_dispatch(&_S_force_new, 1);
 
  230         __atomic_add_dispatch(&_S_force_new, -1);
 
  233       const size_t __bytes = __n * 
sizeof(_Tp);       
 
  234       if (__bytes > 
size_t(_S_max_bytes) || _S_force_new > 0)
 
  235         __ret = static_cast<_Tp*>(::
operator new(__bytes));
 
  238           _Obj* 
volatile* __free_list = _M_get_free_list(__bytes);
 
  240           __scoped_lock sentry(_M_get_mutex());
 
  241           _Obj* __restrict__ __result = *__free_list;
 
  242           if (__builtin_expect(__result == 0, 0))
 
  243         __ret = 
static_cast<_Tp*
>(_M_refill(_M_round_up(__bytes)));
 
  246           *__free_list = __result->_M_free_list_link;
 
  247           __ret = 
reinterpret_cast<_Tp*
>(__result);
 
  250         std::__throw_bad_alloc();
 
  256   template<
typename _Tp>
 
  258     __pool_alloc<_Tp>::deallocate(pointer __p, size_type __n)
 
  260       if (__builtin_expect(__n != 0 && __p != 0, 
true))
 
  262       const size_t __bytes = __n * 
sizeof(_Tp);
 
  263       if (__bytes > static_cast<size_t>(_S_max_bytes) || _S_force_new > 0)
 
  264         ::
operator delete(__p);
 
  267           _Obj* 
volatile* __free_list = _M_get_free_list(__bytes);
 
  268           _Obj* __q = 
reinterpret_cast<_Obj*
>(__p);
 
  270           __scoped_lock sentry(_M_get_mutex());
 
  271           __q ->_M_free_list_link = *__free_list;
 
  277 _GLIBCXX_END_NAMESPACE_VERSION
 
Allocator using a memory pool with a single lock. 
GNU extensions for public use. 
_Tp * __addressof(_Tp &__r) noexcept
Same as C++11 std::addressof. 
Base class for __pool_alloc.