📄 stl_alloc.h
字号:
return _S_FREELIST_INDEX(__bytes);
}
// Returns an object of size __n, and optionally adds to size __n free list.
static void* _S_refill(size_t __n);
// Allocates a chunk for nobjs of size size. nobjs may be reduced
// if it is inconvenient to allocate the requested number.
static char* _S_chunk_alloc(size_t __p_size, int& __nobjs);
// Chunk allocation state.
static char* _S_start_free;
static char* _S_end_free;
static size_t _S_heap_size;
# ifdef __STL_THREADS
static _STL_mutex_base _S_node_allocator_lock;
# endif
// It would be nice to use _STL_auto_lock here. But we
// don't need the NULL check. And we do need a test whether
// threads have actually been started.
class _Lock;
friend class _Lock;
class _Lock {
public:
_Lock() __STL_NOTHROW { __NODE_ALLOCATOR_LOCK; }
~_Lock() __STL_NOTHROW { __NODE_ALLOCATOR_UNLOCK; }
};
public:
// this one is needed for proper simple_alloc wrapping
typedef char value_type;
# if defined (__STL_MEMBER_TEMPLATE_CLASSES)
template <class _Tp1> struct rebind {
typedef __allocator<_Tp1, __node_alloc<__threads, __inst> > other;
};
# endif
/* __n must be > 0 */
static inline void * allocate(size_t __n) {
void* __r;
if (__n > (size_t)_MAX_BYTES) {
# ifdef __STL_NODE_ALLOC_USE_MALLOC
__r = __STL_VENDOR_CSTD::malloc(__n);
# else
__r = __stl_new(__n);
# endif
} else {
_Obj * __STL_VOLATILE * __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n);
# ifndef _NOTHREADS
/*REFERENCED*/
_Lock __lock_instance;
# endif
// Acquire the lock here with a constructor call.
// This ensures that it is released in exit or during stack
// unwinding.
if ( (__r = *__my_free_list) != 0 ) {
*__my_free_list = ((_Obj*)__r) -> _M_free_list_link;
} else {
__r = _S_refill(__n);
}
// lock is released here
}
return __r;
}
/* __p may not be 0 */
static inline void deallocate(void *__p, size_t __n) {
if (__n > (size_t) _MAX_BYTES) {
# ifdef __STL_NODE_ALLOC_USE_MALLOC
__STL_VENDOR_CSTD::free(__p);
# else
__stl_delete(__p);
# endif
} else {
_Obj * __STL_VOLATILE * __my_free_list = _S_free_list + _S_FREELIST_INDEX(__n);
# ifndef _NOTHREADS
/*REFERENCED*/
_Lock __lock_instance;
# endif /* _NOTHREADS */
// acquire lock
((_Obj *)__p) -> _M_free_list_link = *__my_free_list;
*__my_free_list = (_Obj *)__p;
// lock is released here
}
}
# if 0
static void * reallocate(void *__p, size_t __old_sz, size_t __new_sz);
# endif
} ;
typedef __node_alloc<__NODE_ALLOCATOR_THREADS, 0> _Node_alloc;
# if defined ( __STL_USE_NEWALLOC )
# if defined ( __STL_DEBUG_ALLOC )
typedef __debug_alloc<__new_alloc> __sgi_alloc;
# else
typedef __new_alloc __sgi_alloc;
# endif /* __STL_DEBUG_ALLOC */
typedef __new_alloc __single_client_alloc;
typedef __new_alloc __multithreaded_alloc;
# elif defined (__STL_USE_MALLOC)
# if defined ( __STL_DEBUG_ALLOC )
typedef __debug_alloc<__malloc_alloc<0> > __sgi_alloc;
# else
typedef __malloc_alloc<0> __sgi_alloc;
# endif /* __STL_DEBUG_ALLOC */
typedef __malloc_alloc<0> __single_client_alloc;
typedef __malloc_alloc<0> __multithreaded_alloc;
# else
# if defined ( __STL_DEBUG_ALLOC )
typedef __debug_alloc<_Node_alloc> __sgi_alloc;
# else
typedef _Node_alloc __sgi_alloc;
# endif
typedef __node_alloc<false, 0> __single_client_alloc;
typedef __node_alloc<true, 0> __multithreaded_alloc;
# endif /* __STL_USE_NEWALLOC */
// This implements allocators as specified in the C++ standard.
//
// Note that standard-conforming allocators use many language features
// that are not yet widely implemented. In particular, they rely on
// member templates, partial specialization, partial ordering of function
// templates, the typename keyword, and the use of the template keyword
// to refer to a template member of a dependent type.
template <class _Tp>
class allocator {
public:
typedef size_t size_type;
typedef ptrdiff_t difference_type;
typedef _Tp* pointer;
typedef const _Tp* const_pointer;
typedef _Tp& reference;
typedef const _Tp& const_reference;
typedef _Tp value_type;
# if defined (__STL_MEMBER_TEMPLATE_CLASSES)
template <class _Tp1> struct rebind {
typedef allocator<_Tp1> other;
};
# endif
allocator() __STL_NOTHROW {}
# if defined (__STL_MEMBER_TEMPLATES)
template <class _Tp1> allocator(const allocator<_Tp1>&) __STL_NOTHROW {}
# endif
# if (!defined(__STL_MEMBER_TEMPLATES) || defined (__STL_FUNCTION_TMPL_PARTIAL_ORDER) )
allocator(const allocator<_Tp>&) __STL_NOTHROW {}
# endif
~allocator() __STL_NOTHROW {}
static pointer address(reference __x) { return &__x; }
# if !defined (__WATCOM_CPLUSPLUS__)
static const_pointer address(const_reference __x) { return &__x; }
# endif
// __n is permitted to be 0. The C++ standard says nothing about what
// the return value is when __n == 0.
_Tp* allocate(size_type __n, const void* = 0) const {
return __n != 0 ? __STATIC_CAST(value_type*,__sgi_alloc::allocate(__n * sizeof(value_type)))
: 0;
}
// __p is permitted to be a null pointer, only if n==0.
static void deallocate(pointer __p, size_type __n) {
__stl_assert( (__p == 0) == (__n == 0) );
if (__p != 0)
__sgi_alloc::deallocate((void*)__p, __n * sizeof(value_type));
}
// backwards compatibility
static void deallocate(pointer __p)
{ if (__p != 0) __sgi_alloc::deallocate((void*)__p, sizeof(value_type)); }
static size_type max_size() __STL_NOTHROW
{ return size_t(-1) / sizeof(value_type); }
static void construct(pointer __p, const _Tp& __val) { __STLPORT_STD::construct(__p, __val); }
static void destroy(pointer __p) { __STLPORT_STD::destroy(__p); }
};
__STL_TEMPLATE_NULL
class allocator<void> {
public:
typedef size_t size_type;
typedef ptrdiff_t difference_type;
typedef void* pointer;
typedef const void* const_pointer;
# if defined (__STL_CLASS_PARTIAL_SPECIALIZATION)
typedef void value_type;
# endif
# if defined (__STL_MEMBER_TEMPLATE_CLASSES)
template <class _Tp1> struct rebind {
typedef allocator<_Tp1> other;
};
# endif
};
template <class _T1, class _T2>
inline bool operator==(const allocator<_T1>&, const allocator<_T2>&)
{
return true;
}
template <class _T1, class _T2>
inline bool operator!=(const allocator<_T1>&, const allocator<_T2>&)
{
return false;
}
template<class _Tp, class _Alloc>
class __simple_alloc {
typedef _Alloc __alloc_type;
public:
typedef typename _Alloc::value_type __alloc_value_type;
typedef _Tp value_type;
static size_t __chunk(size_t __n) {
return (sizeof(__alloc_value_type)==sizeof(value_type)) ? __n :
((__n*sizeof(value_type)+sizeof(__alloc_value_type)-1)/sizeof(__alloc_value_type));
}
static _Tp* allocate(size_t __n)
{ return 0 == __n ? 0 : (_Tp*) __alloc_type::allocate(__chunk(__n)); }
static void deallocate(_Tp * __p, size_t __n) {
__alloc_type::deallocate((__alloc_value_type*)__p, __chunk(__n)); }
};
// Allocator adaptor to turn an SGI-style allocator (e.g. alloc, malloc_alloc)
// into a standard-conforming allocator. Note that this adaptor does
// *not* assume that all objects of the underlying alloc class are
// identical, nor does it assume that all of the underlying alloc's
// member functions are static member functions. Note, also, that
// __allocator<_Tp, alloc> is essentially the same thing as allocator<_Tp>.
template <class _Tp, class _Alloc>
struct __allocator : public _Alloc {
typedef _Alloc __underlying_alloc;
typedef size_t size_type;
typedef ptrdiff_t difference_type;
typedef _Tp* pointer;
typedef const _Tp* const_pointer;
typedef _Tp& reference;
typedef const _Tp& const_reference;
typedef _Tp value_type;
# if defined (__STL_MEMBER_TEMPLATE_CLASSES)
template <class _Tp1> struct rebind {
typedef __allocator<_Tp1, _Alloc> other;
};
# endif
__allocator() __STL_NOTHROW {}
__allocator(const _Alloc& ) __STL_NOTHROW {}
__allocator(const __allocator<_Tp, _Alloc>& __a) __STL_NOTHROW
: _Alloc(__a) {}
# if defined (__STL_MEMBER_TEMPLATES) && defined (__STL_FUNCTION_TMPL_PARTIAL_ORDER)
template <class _Tp1>
__allocator(const __allocator<_Tp1, _Alloc>& __a) __STL_NOTHROW
: _Alloc(__a) {}
# endif
~__allocator() __STL_NOTHROW {}
pointer address(reference __x) const { return &__x; }
# if !defined (__WATCOM_CPLUSPLUS__)
const_pointer address(const_reference __x) const { return &__x; }
# endif
// __n is permitted to be 0.
_Tp* allocate(size_type __n, const void* = 0) {
return __n != 0
? __STATIC_CAST(_Tp*,__underlying_alloc::allocate(__n * sizeof(_Tp)))
: 0;
}
// __p is not permitted to be a null pointer.
void deallocate(pointer __p, size_type __n)
{ if (__p) __underlying_alloc::deallocate(__p, __n * sizeof(_Tp)); }
size_type max_size() const __STL_NOTHROW
{ return size_t(-1) / sizeof(_Tp); }
void construct(pointer __p, const _Tp& __val) { __STLPORT_STD::construct(__p, __val); }
void destroy(pointer __p) { __STLPORT_STD::destroy(__p); }
};
#ifdef __STL_CLASS_PARTIAL_SPECIALIZATION
template <class _Alloc>
class __allocator<void, _Alloc> {
typedef size_t size_type;
typedef ptrdiff_t difference_type;
typedef void* pointer;
typedef const void* const_pointer;
typedef void value_type;
#ifdef __STL_MEMBER_TEMPLATE_CLASSES
template <class _Tp1> struct rebind {
typedef __allocator<_Tp1, _Alloc> other;
};
#endif
};
#endif
template <class _Tp, class _Alloc>
inline bool operator==(const __allocator<_Tp, _Alloc>& __a1,
const __allocator<_Tp, _Alloc>& __a2)
{
return __a1.__underlying_alloc == __a2.__underlying_alloc;
}
#ifdef __STL_USE_SEPARATE_RELOPS_NAMESPACE
template <class _Tp, class _Alloc>
inline bool operator!=(const __allocator<_Tp, _Alloc>& __a1,
const __allocator<_Tp, _Alloc>& __a2)
{
return __a1.__underlying_alloc != __a2.__underlying_alloc;
}
#endif /* __STL_FUNCTION_TMPL_PARTIAL_ORDER */
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -