void AllocPool::Free(void *inPtr) { #ifdef DISABLE_MEMORY_POOLS free(inPtr); return; #endif check_pool(); if (inPtr == 0) return; /* free(0) has no effect */ AllocChunkPtr chunk = MemToChunk(inPtr); check_inuse_chunk(chunk); garbage_fill(chunk); size_t size = chunk->Size(); if (!chunk->PrevInUse()) /* consolidate backward */ { size_t prevSize = chunk->PrevSize(); chunk = chunk->ChunkAtOffset(0L-prevSize); size += prevSize; UnlinkFree(chunk); } AllocChunkPtr next = chunk->ChunkAtOffset(size); if (!next->InUse()) /* consolidate forward */ { size += next->Size(); UnlinkFree(next); } chunk->SetSizeFree(size); if (mAreaMoreSize && chunk->IsArea()) { // whole area is free FreeArea(chunk); } else { LinkFree(chunk); } check_pool(); }
/* ------------------------------ realloc ------------------------------ */ void* ulibc_realloc(void* oldmem, size_t bytes) { mstate av; size_t nb; /* padded request size */ mchunkptr oldp; /* chunk corresponding to oldmem */ size_t oldsize; /* its size */ mchunkptr newp; /* chunk to return */ size_t newsize; /* its size */ void* newmem; /* corresponding user mem */ mchunkptr next; /* next contiguous chunk after oldp */ mchunkptr remainder; /* extra space at end of newp */ unsigned long remainder_size; /* its size */ mchunkptr bck; /* misc temp for linking */ mchunkptr fwd; /* misc temp for linking */ unsigned long copysize; /* bytes to copy */ unsigned int ncopies; /* size_t words to copy */ size_t* s; /* copy source */ size_t* d; /* copy destination */ void *retval; /* Check for special cases. */ if (! oldmem) return ulibc_malloc(bytes); if (! bytes) { ulibc_free (oldmem); return NULL; } av = get_malloc_state(); checked_request2size(bytes, nb); oldp = mem2chunk(oldmem); oldsize = chunksize(oldp); check_inuse_chunk(oldp); if (!chunk_is_mmapped(oldp)) { if ((unsigned long)(oldsize) >= (unsigned long)(nb)) { /* already big enough; split below */ newp = oldp; newsize = oldsize; } else { next = chunk_at_offset(oldp, oldsize); /* Try to expand forward into top */ if (next == av->top && (unsigned long)(newsize = oldsize + chunksize(next)) >= (unsigned long)(nb + MINSIZE)) { set_head_size(oldp, nb); av->top = chunk_at_offset(oldp, nb); set_head(av->top, (newsize - nb) | PREV_INUSE); retval = chunk2mem(oldp); goto DONE; } /* Try to expand forward into next chunk; split off remainder below */ else if (next != av->top && !inuse(next) && (unsigned long)(newsize = oldsize + chunksize(next)) >= (unsigned long)(nb)) { newp = oldp; unlink(next, bck, fwd); } /* allocate, copy, free */ else { newmem = malloc(nb - MALLOC_ALIGN_MASK); if (newmem == 0) { retval = 0; /* propagate failure */ goto DONE; } newp = mem2chunk(newmem); newsize = chunksize(newp); /* Avoid copy if newp is next chunk after oldp. */ if (newp == next) { newsize += oldsize; newp = oldp; } else { /* Unroll copy of <= 36 bytes (72 if 8byte sizes) We know that contents have an odd number of size_t-sized words; minimally 3. */ copysize = oldsize - (sizeof(size_t)); s = (size_t*)(oldmem); d = (size_t*)(newmem); ncopies = copysize / sizeof(size_t); assert(ncopies >= 3); if (ncopies > 9) memcpy(d, s, copysize); else { *(d+0) = *(s+0); *(d+1) = *(s+1); *(d+2) = *(s+2); if (ncopies > 4) { *(d+3) = *(s+3); *(d+4) = *(s+4); if (ncopies > 6) { *(d+5) = *(s+5); *(d+6) = *(s+6); if (ncopies > 8) { *(d+7) = *(s+7); *(d+8) = *(s+8); } } } } ulibc_free(oldmem); check_inuse_chunk(newp); retval = chunk2mem(newp); goto DONE; } } } /* If possible, free extra space in old or extended chunk */ assert((unsigned long)(newsize) >= (unsigned long)(nb)); remainder_size = newsize - nb; if (remainder_size < MINSIZE) { /* not enough extra to split off */ set_head_size(newp, newsize); set_inuse_bit_at_offset(newp, newsize); } else { /* split remainder */ remainder = chunk_at_offset(newp, nb); set_head_size(newp, nb); set_head(remainder, remainder_size | PREV_INUSE); /* Mark remainder as inuse so free() won't complain */ set_inuse_bit_at_offset(remainder, remainder_size); ulibc_free(chunk2mem(remainder)); } check_inuse_chunk(newp); retval = chunk2mem(newp); goto DONE; } /* Handle mmap cases */ else { size_t offset = oldp->prev_size; size_t pagemask = av->pagesize - 1; char *cp; unsigned long sum; /* Note the extra (sizeof(size_t)) overhead */ newsize = (nb + offset + (sizeof(size_t)) + pagemask) & ~pagemask; /* don't need to remap if still within same page */ if (oldsize == newsize - offset) { retval = oldmem; goto DONE; } cp = (char*)mremap((char*)oldp - offset, oldsize + offset, newsize, 1); if (cp != (char*)MORECORE_FAILURE) { newp = (mchunkptr)(cp + offset); set_head(newp, (newsize - offset)|IS_MMAPPED); assert(aligned_OK(chunk2mem(newp))); assert((newp->prev_size == offset)); /* update statistics */ sum = av->mmapped_mem += newsize - oldsize; if (sum > (unsigned long)(av->max_mmapped_mem)) av->max_mmapped_mem = sum; sum += av->sbrked_mem; if (sum > (unsigned long)(av->max_total_mem)) av->max_total_mem = sum; retval = chunk2mem(newp); goto DONE; } /* Note the extra (sizeof(size_t)) overhead. */ if ((unsigned long)(oldsize) >= (unsigned long)(nb + (sizeof(size_t)))) newmem = oldmem; /* do nothing */ else { /* Must alloc, copy, free. */ newmem = malloc(nb - MALLOC_ALIGN_MASK); if (newmem != 0) { memcpy(newmem, oldmem, oldsize - 2*(sizeof(size_t))); ulibc_free(oldmem); } } retval = newmem; } DONE: return retval; }
/* ------------------------------ free ------------------------------ */ void free(void* mem) { mstate av; mchunkptr p; /* chunk corresponding to mem */ size_t size; /* its size */ mfastbinptr* fb; /* associated fastbin */ mchunkptr nextchunk; /* next contiguous chunk */ size_t nextsize; /* its size */ int nextinuse; /* true if nextchunk is used */ size_t prevsize; /* size of previous contiguous chunk */ mchunkptr bck; /* misc temp for linking */ mchunkptr fwd; /* misc temp for linking */ /* free(0) has no effect */ if (mem == NULL) return; __MALLOC_LOCK; av = get_malloc_state(); p = mem2chunk(mem); size = chunksize(p); check_inuse_chunk(p); /* If eligible, place chunk on a fastbin so it can be found and used quickly in malloc. */ if ((unsigned long)(size) <= (unsigned long)(av->max_fast) #if TRIM_FASTBINS /* If TRIM_FASTBINS set, don't place chunks bordering top into fastbins */ && (chunk_at_offset(p, size) != av->top) #endif ) { set_fastchunks(av); fb = &(av->fastbins[fastbin_index(size)]); p->fd = *fb; *fb = p; } /* Consolidate other non-mmapped chunks as they arrive. */ else if (!chunk_is_mmapped(p)) { set_anychunks(av); nextchunk = chunk_at_offset(p, size); nextsize = chunksize(nextchunk); /* consolidate backward */ if (!prev_inuse(p)) { prevsize = p->prev_size; size += prevsize; p = chunk_at_offset(p, -((long) prevsize)); unlink(p, bck, fwd); } if (nextchunk != av->top) { /* get and clear inuse bit */ nextinuse = inuse_bit_at_offset(nextchunk, nextsize); set_head(nextchunk, nextsize); /* consolidate forward */ if (!nextinuse) { unlink(nextchunk, bck, fwd); size += nextsize; } /* Place the chunk in unsorted chunk list. Chunks are not placed into regular bins until after they have been given one chance to be used in malloc. */ bck = unsorted_chunks(av); fwd = bck->fd; p->bk = bck; p->fd = fwd; bck->fd = p; fwd->bk = p; set_head(p, size | PREV_INUSE); set_foot(p, size); check_free_chunk(p); } /* If the chunk borders the current high end of memory, consolidate into top */ else { size += nextsize; set_head(p, size | PREV_INUSE); av->top = p; check_chunk(p); } /* If freeing a large space, consolidate possibly-surrounding chunks. Then, if the total unused topmost memory exceeds trim threshold, ask malloc_trim to reduce top. Unless max_fast is 0, we don't know if there are fastbins bordering top, so we cannot tell for sure whether threshold has been reached unless fastbins are consolidated. But we don't want to consolidate on each free. As a compromise, consolidation is performed if FASTBIN_CONSOLIDATION_THRESHOLD is reached. */ if ((unsigned long)(size) >= FASTBIN_CONSOLIDATION_THRESHOLD) { if (have_fastchunks(av)) __malloc_consolidate(av); if ((unsigned long)(chunksize(av->top)) >= (unsigned long)(av->trim_threshold)) __malloc_trim(av->top_pad, av); } } /* If the chunk was allocated via mmap, release via munmap() Note that if HAVE_MMAP is false but chunk_is_mmapped is true, then user must have overwritten memory. There's nothing we can do to catch this error unless DEBUG is set, in which case check_inuse_chunk (above) will have triggered error. */ else { size_t offset = p->prev_size; av->n_mmaps--; av->mmapped_mem -= (size + offset); munmap((char*)p - offset, size + offset); } __MALLOC_UNLOCK; }
/* ------------------------- __malloc_consolidate ------------------------- __malloc_consolidate is a specialized version of free() that tears down chunks held in fastbins. Free itself cannot be used for this purpose since, among other things, it might place chunks back onto fastbins. So, instead, we need to use a minor variant of the same code. Also, because this routine needs to be called the first time through malloc anyway, it turns out to be the perfect place to trigger initialization code. */ void attribute_hidden __malloc_consolidate(mstate av) { mfastbinptr* fb; /* current fastbin being consolidated */ mfastbinptr* maxfb; /* last fastbin (for loop control) */ mchunkptr p; /* current chunk being consolidated */ mchunkptr nextp; /* next chunk to consolidate */ mchunkptr unsorted_bin; /* bin header */ mchunkptr first_unsorted; /* chunk to link to */ /* These have same use as in free() */ mchunkptr nextchunk; size_t size; size_t nextsize; size_t prevsize; int nextinuse; mchunkptr bck; mchunkptr fwd; /* If max_fast is 0, we know that av hasn't yet been initialized, in which case do so below */ if (av->max_fast != 0) { clear_fastchunks(av); unsorted_bin = unsorted_chunks(av); /* Remove each chunk from fast bin and consolidate it, placing it then in unsorted bin. Among other reasons for doing this, placing in unsorted bin avoids needing to calculate actual bins until malloc is sure that chunks aren't immediately going to be reused anyway. */ maxfb = &(av->fastbins[fastbin_index(av->max_fast)]); fb = &(av->fastbins[0]); do { if ( (p = *fb) != 0) { *fb = 0; do { check_inuse_chunk(p); nextp = p->fd; /* Slightly streamlined version of consolidation code in free() */ size = p->size & ~PREV_INUSE; nextchunk = chunk_at_offset(p, size); nextsize = chunksize(nextchunk); if (!prev_inuse(p)) { prevsize = p->prev_size; size += prevsize; p = chunk_at_offset(p, -((long) prevsize)); unlink(p, bck, fwd); } if (nextchunk != av->top) { nextinuse = inuse_bit_at_offset(nextchunk, nextsize); set_head(nextchunk, nextsize); if (!nextinuse) { size += nextsize; unlink(nextchunk, bck, fwd); } first_unsorted = unsorted_bin->fd; unsorted_bin->fd = p; first_unsorted->bk = p; set_head(p, size | PREV_INUSE); p->bk = unsorted_bin; p->fd = first_unsorted; set_foot(p, size); } else { size += nextsize; set_head(p, size | PREV_INUSE); av->top = p; } } while ( (p = nextp) != 0); } } while (fb++ != maxfb); } else { malloc_init_state(av); check_malloc_state(); } }
/* ------------------------- __malloc_consolidate ------------------------- __malloc_consolidate is a specialized version of free() that tears down chunks held in fastbins. Free itself cannot be used for this purpose since, among other things, it might place chunks back onto fastbins. So, instead, we need to use a minor variant of the same code. Also, because this routine needs to be called the first time through malloc anyway, it turns out to be the perfect place to trigger initialization code. */ void attribute_hidden __malloc_consolidate(mstate av) { mfastbinptr* fb; /* current fastbin being consolidated */ mfastbinptr* maxfb; /* last fastbin (for loop control) */ mchunkptr p; /* current chunk being consolidated */ mchunkptr nextp; /* next chunk to consolidate */ mchunkptr unsorted_bin; /* bin header */ mchunkptr first_unsorted; /* chunk to link to */ ustate unit; /* */ /* These have same use as in free() */ mchunkptr nextchunk; size_t size; size_t nextsize; size_t prevsize; int nextinuse; mchunkptr bck; mchunkptr fwd; /* If max_fast is 0, we know that av hasn't yet been initialized, in which case do so below */ if (av->max_fast != 0) { clear_fastchunks(av); unsorted_bin = unsorted_chunks(av); /* Remove each chunk from fast bin and consolidate it, placing it then in unsorted bin. Among other reasons for doing this, placing in unsorted bin avoids needing to calculate actual bins until malloc is sure that chunks aren't immediately going to be reused anyway. */ maxfb = &(av->fastbins[fastbin_index(av->max_fast)]); fb = &(av->fastbins[0]); do { if ( (p = *fb) != 0) { *fb = 0; do { check_inuse_chunk(p); nextp = p->fd; /* Slightly streamlined version of consolidation code in free() */ size = p->size & ~PREV_INUSE; nextchunk = chunk_at_offset(p, size); nextsize = chunksize(nextchunk); if (!prev_inuse(p)) { prevsize = p->prev_size; size += prevsize; p = chunk_at_offset(p, -((long) prevsize)); unlink(p, bck, fwd); } unit = lookup_ustate_by_mem((void*)p); if (nextchunk != unit->unit_top) { nextinuse = inuse_bit_at_offset(nextchunk, nextsize); set_head(nextchunk, nextsize); if (!nextinuse) { size += nextsize; unlink(nextchunk, bck, fwd); } first_unsorted = unsorted_bin->fd; unsorted_bin->fd = p; first_unsorted->bk = p; set_head(p, size | PREV_INUSE); p->bk = unsorted_bin; p->fd = first_unsorted; set_foot(p, size); } else { size += nextsize; set_head(p, size | PREV_INUSE); unit->unit_top = p; } } while ( (p = nextp) != 0); } } while (fb++ != maxfb); } else { if (get_abstate()->mstate_list.num == 0) { //initialize abheap state init_linked_list(&(get_abstate()->mstate_list)); init_linked_list(&(get_abstate()->ustate_list)); init_linked_list(&(get_abstate()->mmapped_ustate_list)); get_abstate()->ab_top = (mchunkptr)(CHANNEL_ADDR); //allocate channel heap space mmap((void *) CHANNEL_ADDR, CHANNEL_SIZE, PROT_READ|PROT_WRITE, MAP_ANONYMOUS|MAP_FIXED|MAP_SHARED, -1, 0); touch_mem((void *)CHANNEL_ADDR, CHANNEL_SIZE); } malloc_init_state(av); check_malloc_state(); } }
void* AllocPool::Realloc(void* inPtr, size_t inReqSize) { #ifdef DISABLE_MEMORY_POOLS return realloc(inPtr, inReqSize); #endif void *outPtr; AllocChunkPtr prev; check_pool(); bool docopy = false; /* realloc of null is supposed to be same as malloc */ if (inPtr == 0) return Alloc(inReqSize); AllocChunkPtr oldChunk = MemToChunk(inPtr); AllocChunkPtr newChunk = oldChunk; size_t oldsize = oldChunk->Size(); size_t newsize = oldsize; size_t size = RequestToSize(inReqSize); size_t nextsize, prevsize; check_inuse_chunk(oldChunk); if (oldsize < size) { /* Try expanding forward */ AllocChunkPtr next = oldChunk->NextChunk(); if (!next->InUse()) { nextsize = next->Size(); /* Forward into next chunk */ if (nextsize + newsize >= size) { UnlinkFree(next); newsize += nextsize; goto split; } } else { next = 0; nextsize = 0; } /* Try shifting backwards. */ prev = oldChunk->PrevChunk(); if (!prev->InUse()) { prevsize = prev->Size(); /* try forward + backward first to save a later consolidation */ if (next != 0) { /* into next chunk */ if (nextsize + prevsize + newsize >= size) { newsize += nextsize + prevsize; UnlinkFree(next); goto alloc_prev; } } /* backward only */ if (prev != 0 && prevsize + newsize >= size) { newsize += prevsize; goto alloc_prev; } } /* Must allocate */ outPtr = Alloc(inReqSize); check_pool(); if (outPtr == 0) { //ipostbuf("realloc failed. size: %d\n", inReqSize); throw std::runtime_error("realloc failed, increase server's memory allocation (e.g. via ServerOptions)"); } /* Otherwise copy, free, and exit */ memcpy(outPtr, inPtr, oldsize - sizeof(AllocChunk)); Free(inPtr); return outPtr; } else goto split; alloc_prev: UnlinkFree(prev); newChunk = prev; docopy = true; // FALL THROUGH split: /* split off extra room in old or expanded chunk */ //check_pool(); if (newsize - size >= kMinAllocSize) { /* split off remainder */ size_t remainder_size = newsize - size; AllocChunkPtr remainder = newChunk->ChunkAtOffset(size); remainder->SetSizeInUse(remainder_size); newChunk->SetSizeInUse(size); Free(remainder->ToPtr()); /* let free() deal with it */ } else { newChunk->SetSizeInUse(newsize); } outPtr = newChunk->ToPtr(); if (docopy) { memmove(outPtr, inPtr, oldsize - sizeof(AllocChunk)); } check_inuse_chunk(newChunk); check_pool(); garbage_fill(newChunk); return outPtr; }
//This function is equal to mspace_free //replacing PREACTION with 0 and POSTACTION with nothing static void mspace_free_lockless(mspace msp, void* mem) { if (mem != 0) { mchunkptr p = mem2chunk(mem); #if FOOTERS mstate fm = get_mstate_for(p); msp = msp; /* placate people compiling -Wunused */ #else /* FOOTERS */ mstate fm = (mstate)msp; #endif /* FOOTERS */ if (!ok_magic(fm)) { USAGE_ERROR_ACTION(fm, p); return; } if (!0){//PREACTION(fm)) { check_inuse_chunk(fm, p); if (RTCHECK(ok_address(fm, p) && ok_inuse(p))) { size_t psize = chunksize(p); mchunkptr next = chunk_plus_offset(p, psize); s_allocated_memory -= psize; if (!pinuse(p)) { size_t prevsize = p->prev_foot; if (is_mmapped(p)) { psize += prevsize + MMAP_FOOT_PAD; if (CALL_MUNMAP((char*)p - prevsize, psize) == 0) fm->footprint -= psize; goto postaction; } else { mchunkptr prev = chunk_minus_offset(p, prevsize); psize += prevsize; p = prev; if (RTCHECK(ok_address(fm, prev))) { /* consolidate backward */ if (p != fm->dv) { unlink_chunk(fm, p, prevsize); } else if ((next->head & INUSE_BITS) == INUSE_BITS) { fm->dvsize = psize; set_free_with_pinuse(p, psize, next); goto postaction; } } else goto erroraction; } } if (RTCHECK(ok_next(p, next) && ok_pinuse(next))) { if (!cinuse(next)) { /* consolidate forward */ if (next == fm->top) { size_t tsize = fm->topsize += psize; fm->top = p; p->head = tsize | PINUSE_BIT; if (p == fm->dv) { fm->dv = 0; fm->dvsize = 0; } if (should_trim(fm, tsize)) sys_trim(fm, 0); goto postaction; } else if (next == fm->dv) { size_t dsize = fm->dvsize += psize; fm->dv = p; set_size_and_pinuse_of_free_chunk(p, dsize); goto postaction; } else { size_t nsize = chunksize(next); psize += nsize; unlink_chunk(fm, next, nsize); set_size_and_pinuse_of_free_chunk(p, psize); if (p == fm->dv) { fm->dvsize = psize; goto postaction; } } } else set_free_with_pinuse(p, psize, next); if (is_small(psize)) { insert_small_chunk(fm, p, psize); check_free_chunk(fm, p); } else { tchunkptr tp = (tchunkptr)p; insert_large_chunk(fm, tp, psize); check_free_chunk(fm, p); if (--fm->release_checks == 0) release_unused_segments(fm); } goto postaction; } } erroraction: USAGE_ERROR_ACTION(fm, p); postaction: ;//POSTACTION(fm); } } }