📄 dlmalloc.cxx
字号:
#define IAV(i) bin_at(i), bin_at(i)
#ifndef CYGIMP_MEMALLOC_ALLOCATOR_DLMALLOC_SAFE_MULTIPLE
static mbinptr av_[CYGPRI_MEMALLOC_ALLOCATOR_DLMALLOC_NAV * 2 + 2] = {
0, 0,
IAV(0), IAV(1), IAV(2), IAV(3), IAV(4), IAV(5), IAV(6), IAV(7),
IAV(8), IAV(9), IAV(10), IAV(11), IAV(12), IAV(13), IAV(14), IAV(15),
IAV(16), IAV(17), IAV(18), IAV(19), IAV(20), IAV(21), IAV(22), IAV(23),
IAV(24), IAV(25), IAV(26), IAV(27), IAV(28), IAV(29), IAV(30), IAV(31),
IAV(32), IAV(33), IAV(34), IAV(35), IAV(36), IAV(37), IAV(38), IAV(39),
IAV(40), IAV(41), IAV(42), IAV(43), IAV(44), IAV(45), IAV(46), IAV(47),
IAV(48), IAV(49), IAV(50), IAV(51), IAV(52), IAV(53), IAV(54), IAV(55),
IAV(56), IAV(57), IAV(58), IAV(59), IAV(60), IAV(61), IAV(62), IAV(63),
IAV(64), IAV(65), IAV(66), IAV(67), IAV(68), IAV(69), IAV(70), IAV(71),
IAV(72), IAV(73), IAV(74), IAV(75), IAV(76), IAV(77), IAV(78), IAV(79),
IAV(80), IAV(81), IAV(82), IAV(83), IAV(84), IAV(85), IAV(86), IAV(87),
IAV(88), IAV(89), IAV(90), IAV(91), IAV(92), IAV(93), IAV(94), IAV(95),
IAV(96), IAV(97), IAV(98), IAV(99), IAV(100), IAV(101), IAV(102), IAV(103),
IAV(104), IAV(105), IAV(106), IAV(107), IAV(108), IAV(109), IAV(110), IAV(111),
IAV(112), IAV(113), IAV(114), IAV(115), IAV(116), IAV(117), IAV(118), IAV(119),
IAV(120), IAV(121), IAV(122), IAV(123), IAV(124), IAV(125), IAV(126), IAV(127)
};
#endif
/* field-extraction macros */
#define first(b) ((b)->fd)
#define last(b) ((b)->bk)
/*
Indexing into bins
*/
#define bin_index(sz) \(((((unsigned long)(sz)) >> 9) == 0) ? (((unsigned long)(sz)) >> 3): \ ((((unsigned long)(sz)) >> 9) <= 4) ? 56 + (((unsigned long)(sz)) >> 6): \ ((((unsigned long)(sz)) >> 9) <= 20) ? 91 + (((unsigned long)(sz)) >> 9): \ ((((unsigned long)(sz)) >> 9) <= 84) ? 110 + (((unsigned long)(sz)) >> 12): \ ((((unsigned long)(sz)) >> 9) <= 340) ? 119 + (((unsigned long)(sz)) >> 15): \ ((((unsigned long)(sz)) >> 9) <= 1364) ? 124 + (((unsigned long)(sz)) >> 18): \ 126)
/*
bins for chunks < 512 are all spaced SMALLBIN_WIDTH bytes apart, and hold
identically sized chunks. This is exploited in malloc.
*/
#define MAX_SMALLBIN_SIZE 512
#define SMALLBIN_WIDTH 8
#define SMALLBIN_WIDTH_BITS 3
#define MAX_SMALLBIN (MAX_SMALLBIN_SIZE / SMALLBIN_WIDTH) - 1
#define smallbin_index(sz) (((unsigned long)(sz)) >> SMALLBIN_WIDTH_BITS)
/*
Requests are `small' if both the corresponding and the next bin are small
*/
#define is_small_request(nb) (nb < MAX_SMALLBIN_SIZE - SMALLBIN_WIDTH)
/*
To help compensate for the large number of bins, a one-level index
structure is used for bin-by-bin searching. `binblocks' is a
one-word bitvector recording whether groups of BINBLOCKWIDTH bins
have any (possibly) non-empty bins, so they can be skipped over
all at once during during traversals. The bits are NOT always
cleared as soon as all bins in a block are empty, but instead only
when all are noticed to be empty during traversal in malloc.
*/
#define BINBLOCKWIDTH 4 /* bins per block */
#define binblocks (bin_at(0)->size) /* bitvector of nonempty blocks */
/* bin<->block macros */
#define idx2binblock(ix) ((unsigned long)1 << (ix / BINBLOCKWIDTH))
#define mark_binblock(ii) (binblocks |= idx2binblock(ii))
#define clear_binblock(ii) (binblocks &= ~(idx2binblock(ii)))
//----------------------------------------------------------------------------
/*
Debugging support
*/
#ifdef CYGDBG_MEMALLOC_ALLOCATOR_DLMALLOC_DEBUG
/*
These routines make a number of assertions about the states
of data structures that should be true at all times. If any
are not true, it's very likely that a user program has somehow
trashed memory. (It's also possible that there is a coding error
in malloc. In which case, please report it!)
*/
void
Cyg_Mempool_dlmalloc_Implementation::do_check_chunk( mchunkptr p )
{
INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE;
/* Check for legal address ... */
ASSERT((cyg_uint8 *)p >= arenabase);
if (p != top)
ASSERT((cyg_uint8 *)p + sz <= (cyg_uint8 *)top);
else
ASSERT((cyg_uint8 *)p + sz <= arenabase + arenasize);
} // Cyg_Mempool_dlmalloc_Implementation::do_check_chunk()
void
Cyg_Mempool_dlmalloc_Implementation::do_check_free_chunk(mchunkptr p)
{
INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE;
mchunkptr next = chunk_at_offset(p, sz);
do_check_chunk(p);
/* Check whether it claims to be free ... */
ASSERT(!inuse(p));
/* Unless a special marker, must have OK fields */
if ((long)sz >= (long)MINSIZE)
{
ASSERT((sz & MALLOC_ALIGN_MASK) == 0);
ASSERT(aligned_OK(chunk2mem(p)));
/* ... matching footer field */
ASSERT(next->prev_size == sz);
/* ... and is fully consolidated */
ASSERT(prev_inuse(p));
ASSERT (next == top || inuse(next));
/* ... and has minimally sane links */
ASSERT(p->fd->bk == p);
ASSERT(p->bk->fd == p);
}
else /* markers are always of size SIZE_SZ */
ASSERT(sz == SIZE_SZ);
} // Cyg_Mempool_dlmalloc_Implementation::do_check_free_chunk()
void
Cyg_Mempool_dlmalloc_Implementation::do_check_inuse_chunk(mchunkptr p)
{
mchunkptr next = next_chunk(p);
do_check_chunk(p);
/* Check whether it claims to be in use ... */
ASSERT(inuse(p));
/* ... and is surrounded by OK chunks.
Since more things can be checked with free chunks than inuse ones,
if an inuse chunk borders them and debug is on, it's worth doing them.
*/
if (!prev_inuse(p))
{
mchunkptr prv = prev_chunk(p);
ASSERT(next_chunk(prv) == p);
do_check_free_chunk(prv);
}
if (next == top)
{
ASSERT(prev_inuse(next));
ASSERT(chunksize(next) >= MINSIZE);
}
else if (!inuse(next))
do_check_free_chunk(next);
} // Cyg_Mempool_dlmalloc_Implementation::do_check_inuse_chunk(
void
Cyg_Mempool_dlmalloc_Implementation::do_check_malloced_chunk(mchunkptr p,
INTERNAL_SIZE_T s)
{
INTERNAL_SIZE_T sz = p->size & ~PREV_INUSE;
long room = long_sub_size_t(sz, s);
do_check_inuse_chunk(p);
/* Legal size ... */
ASSERT((long)sz >= (long)MINSIZE);
ASSERT((sz & MALLOC_ALIGN_MASK) == 0);
ASSERT(room >= 0);
ASSERT(room < (long)MINSIZE);
/* ... and alignment */
ASSERT(aligned_OK(chunk2mem(p)));
/* ... and was allocated at front of an available chunk */
ASSERT(prev_inuse(p));
} // Cyg_Mempool_dlmalloc_Implementation::do_check_malloced_chunk(
#define check_free_chunk(P) do_check_free_chunk(P)
#define check_inuse_chunk(P) do_check_inuse_chunk(P)
#define check_chunk(P) do_check_chunk(P)
#define check_malloced_chunk(P,N) do_check_malloced_chunk(P,N)
#else
#define check_free_chunk(P)
#define check_inuse_chunk(P)
#define check_chunk(P)
#define check_malloced_chunk(P,N)
#endif
//----------------------------------------------------------------------------
/*
Macro-based internal utilities
*/
/*
Linking chunks in bin lists.
Call these only with variables, not arbitrary expressions, as arguments.
*/
/*
Place chunk p of size s in its bin, in size order,
putting it ahead of others of same size.
*/
#define frontlink(P, S, IDX, BK, FD) \{ \ if (S < MAX_SMALLBIN_SIZE) \ { \ IDX = smallbin_index(S); \ mark_binblock(IDX); \ BK = bin_at(IDX); \ FD = BK->fd; \ P->bk = BK; \ P->fd = FD; \ FD->bk = BK->fd = P; \ } \ else \ { \ IDX = bin_index(S); \ BK = bin_at(IDX); \ FD = BK->fd; \ if (FD == BK) mark_binblock(IDX); \ else \ { \ while (FD != BK && S < chunksize(FD)) FD = FD->fd; \ BK = FD->bk; \ } \ P->bk = BK; \ P->fd = FD; \ FD->bk = BK->fd = P; \ } \}
/* take a chunk off a list */
#define unlink(P, BK, FD) \{ \ BK = P->bk; \ FD = P->fd; \ FD->bk = BK; \ BK->fd = FD; \} \
/* Place p as the last remainder */
#define link_last_remainder(P) \{ \ last_remainder->fd = last_remainder->bk = P; \ P->fd = P->bk = last_remainder; \}
/* Clear the last_remainder bin */
#define clear_last_remainder \ (last_remainder->fd = last_remainder->bk = last_remainder)
//----------------------------------------------------------------------------
Cyg_Mempool_dlmalloc_Implementation::Cyg_Mempool_dlmalloc_Implementation(
cyg_uint8 *base, cyg_int32 size,
CYG_ADDRWORD /* argthru */ )
{
arenabase = base;
arenasize = size;
CYG_ADDRESS front_misalign;
cyg_int32 correction;
#ifdef CYGIMP_MEMALLOC_ALLOCATOR_DLMALLOC_SAFE_MULTIPLE
cyg_ucount16 i;
av_[0] = av_[1] = 0;
for (i=0; i < CYGPRI_MEMALLOC_ALLOCATOR_DLMALLOC_NAV; i++) {
av_[ i*2+2 ] = av_[ i*2+3 ] = bin_at(i);
} // for
#elif defined(CYGDBG_USE_ASSERTS)
static int instances;
if ( ++instances > 1 )
CYG_FAIL( "Multiple dlmalloc instances but "
"CYGIMP_MEMALLOC_ALLOCATOR_DLMALLOC_SAFE_MULTIPLE "
"not defined" );
#endif
front_misalign = (CYG_ADDRESS)chunk2mem(base) & MALLOC_ALIGN_MASK;
if ( front_misalign > 0 ) {
correction = (MALLOC_ALIGNMENT) - front_misalign;
} else {
correction = 0;
}
// too small to be useful?
if ( correction + 2*(unsigned)MALLOC_ALIGNMENT > (unsigned) size )
// help catch errors. Don't fail now.
arenabase = NULL;
else {
top = (mchunkptr)(base + correction);
set_head(top, arenasize | PREV_INUSE);
}
}
//----------------------------------------------------------------------------
/* Main public routines */
/*
Malloc Algorithm:
The requested size is first converted into a usable form, `nb'.
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -