📄 g++malloc.c
字号:
{ { 0, &(av[63].hd), &(av[63].hd) }, 0 }, { { 0, &(av[64].hd), &(av[64].hd) }, 0 }, { { 0, &(av[65].hd), &(av[65].hd) }, 0 }, { { 0, &(av[66].hd), &(av[66].hd) }, 0 }, { { 0, &(av[67].hd), &(av[67].hd) }, 0 }, { { 0, &(av[68].hd), &(av[68].hd) }, 0 }, { { 0, &(av[69].hd), &(av[69].hd) }, 0 }, { { 0, &(av[70].hd), &(av[70].hd) }, 0 }, { { 0, &(av[71].hd), &(av[71].hd) }, 0 }, { { 0, &(av[72].hd), &(av[72].hd) }, 0 }, { { 0, &(av[73].hd), &(av[73].hd) }, 0 }, { { 0, &(av[74].hd), &(av[74].hd) }, 0 }, { { 0, &(av[75].hd), &(av[75].hd) }, 0 }, { { 0, &(av[76].hd), &(av[76].hd) }, 0 }, { { 0, &(av[77].hd), &(av[77].hd) }, 0 }, { { 0, &(av[78].hd), &(av[78].hd) }, 0 }, { { 0, &(av[79].hd), &(av[79].hd) }, 0 }, { { 0, &(av[80].hd), &(av[80].hd) }, 0 }, { { 0, &(av[81].hd), &(av[81].hd) }, 0 }, { { 0, &(av[82].hd), &(av[82].hd) }, 0 }, { { 0, &(av[83].hd), &(av[83].hd) }, 0 }, { { 0, &(av[84].hd), &(av[84].hd) }, 0 }, { { 0, &(av[85].hd), &(av[85].hd) }, 0 }, { { 0, &(av[86].hd), &(av[86].hd) }, 0 }, { { 0, &(av[87].hd), &(av[87].hd) }, 0 }, { { 0, &(av[88].hd), &(av[88].hd) }, 0 }, { { 0, &(av[89].hd), &(av[89].hd) }, 0 }, { { 0, &(av[90].hd), &(av[90].hd) }, 0 }, { { 0, &(av[91].hd), &(av[91].hd) }, 0 }, { { 0, &(av[92].hd), &(av[92].hd) }, 0 }, { { 0, &(av[93].hd), &(av[93].hd) }, 0 }, { { 0, &(av[94].hd), &(av[94].hd) }, 0 }, { { 0, &(av[95].hd), &(av[95].hd) }, 0 }, { { 0, &(av[96].hd), &(av[96].hd) }, 0 }, { { 0, &(av[97].hd), &(av[97].hd) }, 0 }, { { 0, &(av[98].hd), &(av[98].hd) }, 0 }, { { 0, &(av[99].hd), &(av[99].hd) }, 0 }, { { 0, &(av[100].hd), &(av[100].hd) }, 0 }, { { 0, &(av[101].hd), &(av[101].hd) }, 0 }, { { 0, &(av[102].hd), &(av[102].hd) }, 0 }, { { 0, &(av[103].hd), &(av[103].hd) }, 0 }, { { 0, &(av[104].hd), &(av[104].hd) }, 0 }, { { 0, &(av[105].hd), &(av[105].hd) }, 0 }, { { 0, &(av[106].hd), &(av[106].hd) }, 0 }, { { 0, &(av[107].hd), &(av[107].hd) }, 0 }, { { 0, &(av[108].hd), &(av[108].hd) }, 0 }, { { 0, &(av[109].hd), &(av[109].hd) }, 0 }, { { 0, &(av[110].hd), &(av[110].hd) }, 0 }, { { 0, &(av[111].hd), &(av[111].hd) }, 0 }, { { 0, &(av[112].hd), &(av[112].hd) }, 0 }, { { 0, &(av[113].hd), &(av[113].hd) }, 0 }, { { 0, &(av[114].hd), &(av[114].hd) }, 0 }, { { 0, &(av[115].hd), &(av[115].hd) }, 0 }, { { 0, &(av[116].hd), &(av[116].hd) }, 0 }, { { 0, &(av[117].hd), &(av[117].hd) }, 0 }, { { 0, &(av[118].hd), &(av[118].hd) }, 0 }, { { 0, &(av[119].hd), &(av[119].hd) }, 0 }};/* indexing into bins*/static inline mbinptr size2bin(unsigned int sz){ mbinptr b = av; while (sz >= (MINSIZE * 2)) { b += 4; sz >>= 1; } /* find power of 2 */ b += (sz - MINSIZE) >> 2; /* find quadrant */ return b;}/* counts maintained if MALLOC_STATS defined */#ifdef MALLOC_STATSstatic unsigned int sbrked_mem;static unsigned int requested_mem;static unsigned int malloced_mem;static unsigned int freed_mem;static unsigned int max_used_mem;static unsigned int n_sbrks;static unsigned int n_mallocs;static unsigned int n_frees;static unsigned int n_reallocs;static unsigned int n_reallocs_with_copy;static unsigned int n_avail;static unsigned int max_inuse;static unsigned int n_malloc_chunks;static unsigned int n_malloc_bins;static unsigned int n_split;static unsigned int n_consol;static void do_malloc_stats(const mchunkptr p){ ++n_mallocs; if ((n_mallocs-n_frees) > max_inuse) max_inuse = n_mallocs - n_frees; malloced_mem += (p->size & ~(INUSE)); if (malloced_mem - freed_mem > max_used_mem) max_used_mem = malloced_mem - freed_mem;}static void do_free_stats(const mchunkptr p){ ++n_frees; freed_mem += (p->size & ~(INUSE));} #endif/* Utilities needed below for memalign *//* This is redundant with libg++ support, but not if used stand-alone */static unsigned int gcd(unsigned int a, unsigned int b){ unsigned int tmp; if (b > a) { tmp = a; a = b; b = tmp; } for(;;) { if (b == 0) return a; else if (b == 1) return b; else { tmp = b; b = a % b; a = tmp; } }}static inline unsigned int lcm(unsigned int x, unsigned int y){ return x / gcd(x, y) * y;}/* maintaining INUSE via size field */#define inuse(p) ((p)->size & INUSE)#define set_inuse(p) ((p)->size |= INUSE)#define clear_inuse(b) ((p)->size &= ~INUSE) /* operations on malloc_chunk addresses *//* return ptr to next physical malloc_chunk */#define next_chunk(p) ((mchunkptr)((char*)(p) + (p)->size))/* return ptr to previous physical malloc_chunk */#define prev_chunk(p) ((mchunkptr)((char*)(p)-((((int*)(p))[-1]) & ~(INUSE))))/* place size at front and back of chunk */static inline void set_size(mchunkptr p, unsigned int sz){ p->size = *((int*)((char*)(p) + sz - SIZE_SZ)) = sz;}/* conversion from malloc headers to user pointers, and back */static inline void* chunk2mem(mchunkptr p) { void *mem; set_inuse(p);mem = (void*)((char*)(p) + SIZE_SZ); return mem;}/* xxxx my own */mchunkptr sanity_check(void* mem) { mchunkptr p = (mchunkptr)((char*)(mem) - SIZE_SZ); /* a quick sanity check */ unsigned int sz = p->size & ~(INUSE); if (p->size == sz || sz != *((int*)((char*)(p) + sz - SIZE_SZ))) malloc_user_error(); return p;}static inline mchunkptr mem2chunk(void* mem) { mchunkptr p = (mchunkptr)((char*)(mem) - SIZE_SZ); /* a quick sanity check */ unsigned int sz = p->size & ~(INUSE); if (p->size == sz || sz != *((int*)((char*)(p) + sz - SIZE_SZ))) malloc_user_error(); p->size = sz; /* clears INUSE */ return p;}/* maintaining bins & pointers *//* maximum bin actually used */static mbinptr malloc_maxbin = FIRSTBIN;/* operations on lists inside bins *//* take a chunk off a list */static inline void unlink(mchunkptr p){ mchunkptr b = p->bk; mchunkptr f = p->fd; f->bk = b; b->fd = f; UPDATE_STATS (--n_avail);}/* split a chunk and place on the back of a list */static inline void split(mchunkptr p, unsigned int offset){ unsigned int room = p->size - offset; if (room >= MINSIZE) { mbinptr bn = size2bin(room); /* new bin */ mchunkptr h = &(bn->hd); /* its head */ mchunkptr b = h->bk; /* old back element */ mchunkptr t = (mchunkptr)((char*)(p) + offset); /* remaindered chunk */ /* set size */ t->size = *((int*)((char*)(t) + room - SIZE_SZ)) = room; /* link up */ t->bk = b; t->fd = h; h->bk = b->fd = t; /* adjust maxbin (h == b means was empty) */ if (h == b && bn > malloc_maxbin) malloc_maxbin = bn; /* adjust size of chunk to be returned */ p->size = *((int*)((char*)(p) + offset - SIZE_SZ)) = offset; UPDATE_STATS ((++n_split, ++n_avail)); }}/* place a consolidated chunk on the back of a list *//* like above, except no split */static inline void consollink(mchunkptr p){ mbinptr bn = size2bin(p->size); mchunkptr h = &(bn->hd); mchunkptr b = h->bk; p->bk = b; p->fd = h; h->bk = b->fd = p; if (h == b && bn > malloc_maxbin) malloc_maxbin = bn; UPDATE_STATS(++n_avail);}/* place a freed chunk on the front of a list */static inline void frontlink(mchunkptr p){ mbinptr bn = size2bin(p->size); mchunkptr h = &(bn->hd); mchunkptr f = h->fd; p->bk = h; p->fd = f; f->bk = h->fd = p; if (h == f && bn > malloc_maxbin) malloc_maxbin = bn; bn->dirty = 1; UPDATE_STATS(++n_avail);}/* Dealing with sbrk *//* To link consecutive sbrk regions when possible */static int* last_sbrk_end;/* who to call when sbrk returns failure */#ifndef NO_NEW_HANDLERtypedef volatile void (*vfp)();#ifdef __cplusplusextern "C" vfp __new_handler;#elseextern vfp __new_handler;#endif#endifstatic mchunkptr malloc_from_sys(unsigned nb){ mchunkptr p; unsigned int sbrk_size; int* ip; /* Minimally, we need to pad with enough space */ /* to place dummy size/use fields to ends if needed */ sbrk_size = ((nb + SBRK_UNIT - 1 + SIZE_SZ + SIZE_SZ) / SBRK_UNIT) * SBRK_UNIT; ip = (int*)(sbrk(sbrk_size)); if ((char*)ip == (char*)(-1)) /* sbrk returns -1 on failure */ {#ifndef NO_NEW_HANDLER (*__new_handler) ();#endif return 0; } UPDATE_STATS ((++n_sbrks, sbrked_mem += sbrk_size)); if (last_sbrk_end != &ip[-1]) { /* It's either first time through or someone else called sbrk. */ /* Arrange end-markers at front & back */ /* Shouldn't be necessary, but better to be safe */ while (!aligned_OK(ip)) { ++ip; sbrk_size -= SIZE_SZ; } /* Mark the front as in use to prevent merging. */ /* Note we can get away with only 1 word, not MINSIZE overhead here */ *ip++ = SIZE_SZ | INUSE; p = (mchunkptr)ip; set_size(p,sbrk_size - (SIZE_SZ + SIZE_SZ)); } else { mchunkptr l; /* We can safely make the header start at end of prev sbrked chunk. */ /* We will still have space left at the end from a previous call */ /* to place the end marker, below */ p = (mchunkptr)(last_sbrk_end); set_size(p, sbrk_size); /* Even better, maybe we can merge with last fragment: */ l = prev_chunk(p); if (!inuse(l)) { unlink(l); set_size(l, p->size + l->size); p = l; } } /* mark the end of sbrked space as in use to prevent merging */ last_sbrk_end = (int*)((char*)p + p->size); *last_sbrk_end = SIZE_SZ | INUSE; UPDATE_STATS((++n_avail, ++n_malloc_chunks)); /* make it safe to unlink in malloc */ UPDATE_STATS(++n_avail); p->fd = p->bk = p; return p;}/* Consolidate dirty bins. *//* Stop if found a chunk big enough to satisfy current malloc request *//* (It requires much less bookkeeping to consolidate entire bins *//* at once than to keep records of which chunks might be *//* consolidatable. So long as the lists are short, which we *//* try to ensure via small bin ranges, there is little wasted effort.) */static mchunkptr malloc_find_space(unsigned int nb){ mbinptr b;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -