📄 dlmalloc.cxx
字号:
/* Get to the next possibly nonempty block */ if ( (block <<= 1) <= binblocks && (block != 0) ) { while ((block & binblocks) == 0) { idx += BINBLOCKWIDTH; block <<= 1; } } else break; } } /* Try to use top chunk */ /* Require that there be a remainder, ensuring top always exists */ remainder_size = long_sub_size_t(chunksize(top), nb); if (chunksize(top) < nb || remainder_size < (long)MINSIZE) { //diag_printf("chunksize(top)=%ld, nb=%d, remainder=%ld\n", chunksize(top), // nb, remainder_size); MALLOC_UNLOCK; return NULL; /* propagate failure */ } victim = top; set_head(victim, nb | PREV_INUSE); top = chunk_at_offset(victim, nb); set_head(top, remainder_size | PREV_INUSE); check_malloced_chunk(victim, nb); MALLOC_UNLOCK; return chunk2mem(victim);} // Cyg_Mempool_dlmalloc_Implementation::try_alloc()//----------------------------------------------------------------------------/* free() algorithm : cases: 1. free(NULL) has no effect. 2. Chunks are consolidated as they arrive, and placed in corresponding bins. (This includes the case of consolidating with the current `last_remainder').*/cyg_boolCyg_Mempool_dlmalloc_Implementation::free( cyg_uint8 *mem, cyg_int32 ){ mchunkptr p; /* chunk corresponding to mem */ INTERNAL_SIZE_T hd; /* its head field */ INTERNAL_SIZE_T sz; /* its size */ int idx; /* its bin index */ mchunkptr next; /* next contiguous chunk */ INTERNAL_SIZE_T nextsz; /* its size */ INTERNAL_SIZE_T prevsz; /* size of previous contiguous chunk */ mchunkptr bck; /* misc temp for linking */ mchunkptr fwd; /* misc temp for linking */ int islr; /* track whether merging with last_remainder */ if (mem == NULL) /* free(NULL) has no effect */ return false; MALLOC_LOCK; p = mem2chunk(mem); hd = p->size; check_inuse_chunk(p); sz = hd & ~PREV_INUSE; next = chunk_at_offset(p, sz); nextsz = chunksize(next); if (next == top) /* merge with top */ { sz += nextsz; if (!(hd & PREV_INUSE)) /* consolidate backward */ { prevsz = p->prev_size; p = chunk_at_offset(p, -((long) prevsz)); sz += prevsz; unlink(p, bck, fwd); } set_head(p, sz | PREV_INUSE); top = p; MALLOC_UNLOCK; return true; } set_head(next, nextsz); /* clear inuse bit */ islr = 0; if (!(hd & PREV_INUSE)) /* consolidate backward */ { prevsz = p->prev_size; p = chunk_at_offset(p, -((long) prevsz)); sz += prevsz; if (p->fd == last_remainder) /* keep as last_remainder */ islr = 1; else unlink(p, bck, fwd); } if (!(inuse_bit_at_offset(next, nextsz))) /* consolidate forward */ { sz += nextsz; if (!islr && next->fd == last_remainder) /* re-insert last_remainder */ { islr = 1; link_last_remainder(p); } else unlink(next, bck, fwd); } set_head(p, sz | PREV_INUSE); set_foot(p, sz); if (!islr) frontlink(p, sz, idx, bck, fwd); MALLOC_UNLOCK; return true;} // Cyg_Mempool_dlmalloc_Implementation::free()//----------------------------------------------------------------------------// resize existing allocation, if oldsize is non-NULL, previous// allocation size is placed into it. If previous size not available,// it is set to 0. NB previous allocation size may have been rounded up.// Occasionally the allocation can be adjusted *backwards* as well as,// or instead of forwards, therefore the address of the resized// allocation is returned, or NULL if no resizing was possible.// Note that this differs from ::realloc() in that no attempt is// made to call malloc() if resizing is not possible - that is left// to higher layers. The data is copied from old to new though.// The effects of alloc_ptr==NULL or newsize==0 are undefined// DOCUMENTATION FROM ORIGINAL FILE:// (some now irrelevant parts elided)/* Realloc algorithm: If the reallocation is for additional space, and the chunk can be extended, it is, else a malloc-copy-free sequence is taken. There are several different ways that a chunk could be extended. All are tried: * Extending forward into following adjacent free chunk. * Shifting backwards, joining preceding adjacent space * Both shifting backwards and extending forward. If the reallocation is for less space, and the new request is for a `small' (<512 bytes) size, then the newly unused space is lopped off and freed. The old unix realloc convention of allowing the last-free'd chunk to be used as an argument to realloc is no longer supported. I don't know of any programs still relying on this feature, and allowing it would also allow too many other incorrect usages of realloc to be sensible.*/cyg_uint8 *Cyg_Mempool_dlmalloc_Implementation::resize_alloc( cyg_uint8 *oldmem, cyg_int32 bytes, cyg_int32 *poldsize ){ INTERNAL_SIZE_T nb; /* padded request size */ mchunkptr oldp; /* chunk corresponding to oldmem */ INTERNAL_SIZE_T oldsize; /* its size */ mchunkptr newp; /* chunk to return */ INTERNAL_SIZE_T newsize; /* its size */ cyg_uint8* newmem; /* corresponding user mem */ mchunkptr next; /* next contiguous chunk after oldp */ INTERNAL_SIZE_T nextsize; /* its size */ mchunkptr prev; /* previous contiguous chunk before oldp */ INTERNAL_SIZE_T prevsize; /* its size */ mchunkptr remainder; /* holds split off extra space from newp */ INTERNAL_SIZE_T remainder_size; /* its size */ mchunkptr bck; /* misc temp for linking */ mchunkptr fwd; /* misc temp for linking */ MALLOC_LOCK; newp = oldp = mem2chunk(oldmem); newsize = oldsize = chunksize(oldp); if (NULL != poldsize) *poldsize = oldsize - SIZE_SZ; nb = request2size(bytes); check_inuse_chunk(oldp); if ((long)(oldsize) < (long)(nb)) { /* Try expanding forward */ next = chunk_at_offset(oldp, oldsize); if (next == top || !inuse(next)) { nextsize = chunksize(next); /* Forward into top only if a remainder */ if (next == top) { if ((long)(nextsize + newsize) >= (long)(nb + MINSIZE)) { newsize += nextsize; top = chunk_at_offset(oldp, nb); set_head(top, (newsize - nb) | PREV_INUSE); set_head_size(oldp, nb); MALLOC_UNLOCK; return chunk2mem(oldp); } } /* Forward into next chunk */ else if (((long)(nextsize + newsize) >= (long)(nb))) { unlink(next, bck, fwd); newsize += nextsize; goto split; } } else { next = 0; nextsize = 0; } /* Try shifting backwards. */ if (!prev_inuse(oldp)) { prev = prev_chunk(oldp); prevsize = chunksize(prev); /* try forward + backward first to save a later consolidation */ if (next != 0) { /* into top */ if (next == top) { if ((long)(nextsize + prevsize + newsize) >= (long)(nb + MINSIZE)) { unlink(prev, bck, fwd); newp = prev; newsize += prevsize + nextsize; newmem = chunk2mem(newp); MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ); top = chunk_at_offset(newp, nb); set_head(top, (newsize - nb) | PREV_INUSE); set_head_size(newp, nb); MALLOC_UNLOCK; return newmem; } } /* into next chunk */ else if (((long)(nextsize + prevsize + newsize) >= (long)(nb))) { unlink(next, bck, fwd); unlink(prev, bck, fwd); newp = prev; newsize += nextsize + prevsize; newmem = chunk2mem(newp); MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ); goto split; } } /* backward only */ if (prev != 0 && (long)(prevsize + newsize) >= (long)nb) { unlink(prev, bck, fwd); newp = prev; newsize += prevsize; newmem = chunk2mem(newp); MALLOC_COPY(newmem, oldmem, oldsize - SIZE_SZ); goto split; } } // couldn't resize the allocation any direction, so return failure MALLOC_UNLOCK; return NULL; } split: /* split off extra room in old or expanded chunk */ remainder_size = long_sub_size_t(newsize, nb); if (remainder_size >= (long)MINSIZE) /* split off remainder */ { remainder = chunk_at_offset(newp, nb); set_head_size(newp, nb); set_head(remainder, remainder_size | PREV_INUSE); set_inuse_bit_at_offset(remainder, remainder_size); /* let free() deal with it */ Cyg_Mempool_dlmalloc_Implementation::free( chunk2mem(remainder) ); } else { set_head_size(newp, newsize); set_inuse_bit_at_offset(newp, newsize); } check_inuse_chunk(newp); MALLOC_UNLOCK; return chunk2mem(newp);} // Cyg_Mempool_dlmalloc_Implementation::resize_alloc()//----------------------------------------------------------------------------// Get memory pool status// flags is a bitmask of requested fields to fill in. The flags are// defined in common.hxxvoidCyg_Mempool_dlmalloc_Implementation::get_status( cyg_mempool_status_flag_t flags, Cyg_Mempool_Status &status ){ if (0 != (flags&(CYG_MEMPOOL_STAT_FREEBLOCKS|CYG_MEMPOOL_STAT_TOTALFREE| CYG_MEMPOOL_STAT_TOTALALLOCATED|CYG_MEMPOOL_STAT_MAXFREE))) { int i; mbinptr b; mchunkptr p; cyg_int32 chunksizep; cyg_int32 maxfree;#ifdef CYGDBG_MEMALLOC_ALLOCATOR_DLMALLOC_DEBUG mchunkptr q;#endif INTERNAL_SIZE_T avail = chunksize(top); int navail = ((long)(avail) >= (long)MINSIZE)? 1 : 0; maxfree = avail; for (i = 1; i < CYGPRI_MEMALLOC_ALLOCATOR_DLMALLOC_NAV; ++i) { b = bin_at(i); for (p = last(b); p != b; p = p->bk) {#ifdef CYGDBG_MEMALLOC_ALLOCATOR_DLMALLOC_DEBUG check_free_chunk(p); for (q = next_chunk(p); (q < top) && inuse(q) && (long)(chunksize(q)) >= (long)MINSIZE; q = next_chunk(q)) check_inuse_chunk(q);#endif chunksizep = chunksize(p); avail += chunksizep; if ( chunksizep > maxfree ) maxfree = chunksizep; navail++; } } if ( 0 != (flags & CYG_MEMPOOL_STAT_TOTALALLOCATED) ) status.totalallocated = arenasize - avail; // as quick or quicker to just set most of these, rather than // test flag first status.totalfree = (avail & ~(MALLOC_ALIGN_MASK)) - SIZE_SZ - MINSIZE; CYG_ASSERT( ((avail + SIZE_SZ + MALLOC_ALIGN_MASK) & ~MALLOC_ALIGN_MASK) >= MINSIZE, "free mem negative!" ); status.freeblocks = navail; status.maxfree = (maxfree & ~(MALLOC_ALIGN_MASK)) - SIZE_SZ - MINSIZE; //diag_printf("raw mf: %d, ret mf: %d\n", maxfree, status.maxfree); CYG_ASSERT( ((maxfree + SIZE_SZ + MALLOC_ALIGN_MASK) & ~MALLOC_ALIGN_MASK) >= MINSIZE, "max free block size negative!" ); } // if // as quick or quicker to just set most of these, rather than // test flag first status.arenabase = status.origbase = arenabase; status.arenasize = status.origsize = arenasize; status.maxoverhead = MINSIZE + MALLOC_ALIGNMENT;} // Cyg_Mempool_dlmalloc_Implementation::get_status()//----------------------------------------------------------------------------// EOF dlmalloc.cxx
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -