static void su_home_stats_alloc(su_block_t *sub, void *p, void *preload, size_t size, int zero) { su_home_stat_t *hs = sub->sub_stats; size_t rsize = __ALIGN(size); hs->hs_rehash += (sub->sub_n != hs->hs_blocksize); hs->hs_blocksize = sub->sub_n; hs->hs_clones += zero > 1; if (preload) { hs->hs_allocs.hsa_preload++; return; } hs->hs_allocs.hsa_number++; hs->hs_allocs.hsa_bytes += size; hs->hs_allocs.hsa_rbytes += rsize; if (hs->hs_allocs.hsa_rbytes > hs->hs_allocs.hsa_maxrbytes) hs->hs_allocs.hsa_maxrbytes = hs->hs_allocs.hsa_rbytes; hs->hs_blocks.hsb_number++; hs->hs_blocks.hsb_bytes += size; hs->hs_blocks.hsb_rbytes += rsize; }
/** Preload a memory home. * * The function su_home_preload() preloads a memory home. */ void su_home_preload(su_home_t *home, isize_t n, isize_t isize) { su_block_t *sub; if (home == NULL) return; if (home->suh_blocks == NULL) su_home_init(home); sub = MEMLOCK(home); if (!sub->sub_preload) { size_t size; void *preload; size = n * __ALIGN(isize); if (size > 65535) /* We have 16 bits... */ size = 65535 & (ALIGNMENT - 1); preload = malloc(size); home->suh_blocks->sub_preload = preload; home->suh_blocks->sub_prsize = (unsigned)size; } UNLOCK(home); }
/** Preload a memory home from stack. * * Initializes a memory home using an area allocated from stack. Poor man's * alloca(). */ su_home_t *su_home_auto(void *area, isize_t size) { su_home_t *home; su_block_t *sub; size_t homesize = __ALIGN(sizeof *home); size_t subsize = __ALIGN(offsetof(su_block_t, sub_nodes[SUB_N_AUTO])); size_t prepsize; char *p = area; prepsize = homesize + subsize + (__ALIGN((intptr_t)p) - (intptr_t)p); if (area == NULL || size < prepsize) return NULL; if (size > INT_MAX) size = INT_MAX; home = memset(p, 0, homesize); home->suh_size = (int)size; sub = memset(p + homesize, 0, subsize); home->suh_blocks = sub; if (size > prepsize + 65535) size = prepsize + 65535; sub->sub_n = SUB_N_AUTO; sub->sub_ref = 1; sub->sub_preload = p + prepsize; sub->sub_prsize = (unsigned)(size - prepsize); sub->sub_hauto = 1; sub->sub_auto = 1; sub->sub_preauto = 1; sub->sub_auto_all = 1; #ifdef DEBUG SU_DEBUG_9(("%s: start - block %p sub_used is %ld sub_n %ld used %d\n", __func__, sub, sub->sub_used, sub->sub_n, su_get_used_count(sub))) ; #endif return home; }
static size_t __compute_new_alignment(size_t const curr_align) { size_t taille_index = sizeof(size_t); size_t new_align = curr_align; if(taille_index > curr_align) { new_align += taille_index - curr_align; new_align = __ALIGN(new_align, curr_align); } HEAP_POOL_DEBUG("alignment queried is %zu, new alignment with size_t is %zu\n", curr_align, new_align); return new_align; }
static void su_home_stats_free(su_block_t *sub, void *p, void *preload, unsigned size) { su_home_stat_t *hs = sub->sub_stats; size_t rsize = __ALIGN(size); if (preload) { hs->hs_frees.hsf_preload++; return; } hs->hs_frees.hsf_number++; hs->hs_frees.hsf_bytes += size; hs->hs_frees.hsf_rbytes += rsize; hs->hs_blocks.hsb_number--; hs->hs_blocks.hsb_bytes -= size; hs->hs_blocks.hsb_rbytes -= rsize; }
struct heap_pool_desc* heap_pool_create(char name[], size_t const nb, size_t const size, size_t const _align) { size_t allocsize; size_t align; size_t chunksize; size_t holesize = 0; size_t pagesize; struct heap_pool_desc *ret = NULL; int fd = 0; size_t *walker, i; uint8_t *ptr; unsigned int mapflag; HEAP_POOL_DEBUG("Starting with %d %d %d\n", nb, size, _align); if(_align && !is_power_of_two(_align)) { return ERR_PTR(-EINVAL); } align = (!_align) ? sizeof(size_t) : __compute_new_alignment(_align); pagesize = (size_t)heap_pool_getpagesize(); chunksize = size + CHUNK_EXTRA_SIZE; holesize = sizeof(size_t); if(_align > 1) { size_t next_val = chunksize + holesize; size_t diff = __ALIGN(next_val, _align) - next_val; holesize += diff; } allocsize = sizeof(*ret) + align - holesize + nb*(sizeof(size_t) + chunksize + holesize); allocsize = __ALIGN(allocsize, pagesize); HEAP_POOL_DEBUG("need %zu pages\n", allocsize / pagesize); HEAP_POOL_DEBUG("allocsize is %zu\n", allocsize); HEAP_POOL_DEBUG("holesize is %zu\n", holesize); #ifdef HP_HAVE_SHM fd = shm_open(name, O_RDWR | O_CREAT | O_EXCL, 0666); if(unlikely(fd < 0)) { return ERR_PTR(-errno); } ftruncate(fd, allocsize); mapflag = MAP_SHARED #else (void)name; mapflag = MAP_ANONYMOUS | MAP_PRIVATE; #endif ret = mmap(NULL, allocsize, PROT_READ | PROT_WRITE, mapflag, fd, 0); if(ret == MAP_FAILED) { goto mapfailed; } #ifdef HP_HAVE_SHM close(fd); #endif ret->hpd_next = NULL; ret->hpd_szel = size; ret->hpd_nrel = ((allocsize - align - sizeof(*ret)) / (sizeof(size_t) + holesize + chunksize)); ret->hpd_nralloc = 0; HEAP_POOL_DEBUG("numelem is %zu\n", ret->hpd_nrel); ret->hpd_firstfree = 0; ret->hpd_szck = holesize + ret->hpd_szel + CHUNK_EXTRA_SIZE; ret->hpd_holesize = holesize; ret->hpd_allocsize = allocsize; walker = (size_t*)ret->hpd_raw; ptr = (uint8_t*)(walker + ret->hpd_nrel); HEAP_POOL_DEBUG("ptr before allocating hole %p\n", ptr); uint8_t *ptr2 = __ALIGN_PTR(ptr + align, align); HEAP_POOL_DEBUG("ptr after allocating hole %p\n", ptr2 - ptr); ptr = ptr2; HEAP_POOL_DEBUG("ptr go %p to %p\n", walker + ret->hpd_nrel, ptr); ret->hpd_offfirstelem = (off_t)((char*)ptr - (char*)ret); #define INDEX_INDEX_INDEXNEXT (1) for(i = 0; i < ret->hpd_nrel; ++i, walker++, ptr+=chunksize+holesize) { *walker = (i+1); #ifdef HEAP_POOL_OVERFLOW_DBG *(ptr + size) = HEAP_POOL_MAGIC | HEAP_CHUNK_FREE; #endif *(size_t*)(ptr - sizeof(size_t)) = i; #ifdef DBG if(_align) assert((unsigned long)ptr % _align == 0); #endif } HEAP_POOL_DEBUG("-------------------------\n"); mapfailed: #ifdef HP_HAVE_SHM close(fd); #endif return ret; }
/** Allocate a memory block. * * @internal * * Precondition: locked home * * @param home home to allocate * @param sub block structure used to allocate * @param size * @param zero if true, zero allocated block; * if > 1, allocate a subhome * */ static void *sub_alloc(su_home_t *home, su_block_t *sub, size_t size, enum sub_zero zero) { void *data, *preload = NULL; assert (size < (((size_t)1) << SIZEBITS)); #ifdef DEBUG SU_DEBUG_9(("sub_alloc: allocating size %ld from home: %p using block %p\n", size, home, sub)) ; #endif if (size >= ((size_t)1) << SIZEBITS) return (void)(errno = ENOMEM), NULL; if (!size) return NULL; if (sub == NULL || 3 * sub->sub_used > 2 * sub->sub_n) { /* Resize the hash table */ size_t i, n, n2; su_block_t *b2; if (sub) n = home->suh_blocks->sub_n, n2 = 4 * n + 3; //, used = sub->sub_used; else n = 0, n2 = SUB_N; //, used = 0; #ifdef DEBUG SU_DEBUG_9(("sub_alloc: realloc block hash of size %ld\n", n2)) ; #endif if (!(b2 = su_hash_alloc(n2))) return NULL; for (i = 0; i < n; i++) { if (sub->sub_nodes[i].sua_data) su_block_add(b2, sub->sub_nodes[i].sua_data)[0] = sub->sub_nodes[i]; } if (sub) { b2->sub_parent = sub->sub_parent; b2->sub_ref = sub->sub_ref; b2->sub_preload = sub->sub_preload; b2->sub_prsize = sub->sub_prsize; b2->sub_prused = sub->sub_prused; b2->sub_hauto = sub->sub_hauto; b2->sub_preauto = sub->sub_preauto; b2->sub_destructor = sub->sub_destructor; /* auto_all is not copied! */ b2->sub_stats = sub->sub_stats; } home->suh_blocks = b2; if (sub && !sub->sub_auto) free(sub); sub = b2; } if (sub && zero < do_clone && sub->sub_preload && size <= sub->sub_prsize) { /* Use preloaded memory */ size_t prused = sub->sub_prused + size + MEMCHECK_EXTRA; prused = __ALIGN(prused); if (prused <= sub->sub_prsize) { preload = (char *)sub->sub_preload + sub->sub_prused; sub->sub_prused = (unsigned)prused; } #ifdef DEBUG SU_DEBUG_9(("sub_alloc: using %s memory\n", "preloaded")) ; #endif } if (preload && zero) { data = memset(preload, 0, size); } else if (preload) { data = preload; } else if (zero) { data = calloc(1, size + MEMCHECK_EXTRA); } else { data = malloc(size + MEMCHECK_EXTRA); } if (data) { su_alloc_t *sua; #if MEMCHECK_EXTRA size_t term = 0 - size; memcpy((char *)data + size, &term, sizeof (term)); #endif #ifdef DEBUG SU_DEBUG_9(("sub_alloc: data will be located at %p\n", data)) ; #endif if (!preload) sub->sub_auto_all = 0; if (zero >= do_clone) { /* Prepare cloned home */ su_home_t *subhome = data; assert(preload == 0); subhome->suh_blocks = su_hash_alloc(SUB_N); if (!subhome->suh_blocks) return (void)safefree(data), NULL; subhome->suh_size = (unsigned)size; subhome->suh_blocks->sub_parent = home; subhome->suh_blocks->sub_hauto = 0; } /* OK, add the block to the hash table. */ sua = su_block_add(sub, data); assert(sua); sua->sua_size = (unsigned)size; sua->sua_home = zero > 1; if (sub->sub_stats) su_home_stats_alloc(sub, data, preload, size, zero); } return data; }
/** Reallocate a memory block. * * Allocates a memory block of @a size bytes. * It copies the old block contents to the new block and frees the old * block. * * If @a home is NULL, this function behaves exactly like realloc(). * * @param home pointer to memory pool object * @param data pointer to old memory block * @param size size of the memory block to be allocated * * @return * A pointer to the allocated memory block or * NULL if an error occurred. */ void *su_realloc(su_home_t *home, void *data, isize_t size) { void *ndata; su_alloc_t *sua; su_block_t *sub; size_t p; size_t term = 0 - size; if (!home) return realloc(data, size); if (size == 0) { if (data) su_free(home, data); return NULL; } sub = MEMLOCK(home); if (!data) { data = sub_alloc(home, sub, size, (enum sub_zero)0); UNLOCK(home); return data; } sua = su_block_find(sub, data); if (!su_alloc_check(sub, sua)) return UNLOCK(home); assert(!sua->sua_home); if (sua->sua_home) return UNLOCK(home); if (!su_is_preloaded(sub, data)) { ndata = realloc(data, size + MEMCHECK_EXTRA); if (ndata) { if (sub->sub_stats) { su_home_stats_free(sub, data, 0, sua->sua_size); su_home_stats_alloc(sub, data, 0, size, 1); } #if MEMCHECK_EXTRA memcpy((char *)ndata + size, &term, sizeof (term)); #else (void)term; #endif memset(sua, 0, sizeof *sua); sub->sub_used--; su_block_add(sub, ndata)->sua_size = (unsigned)size; } UNLOCK(home); return ndata; } p = (char *)data - home->suh_blocks->sub_preload; p += sua->sua_size + MEMCHECK_EXTRA; p = __ALIGN(p); if (p == sub->sub_prused) { size_t p2 = (char *)data - sub->sub_preload + size + MEMCHECK_EXTRA; p2 = __ALIGN(p2); if (p2 <= sub->sub_prsize) { /* Extend/reduce existing preload */ if (sub->sub_stats) { su_home_stats_free(sub, data, data, sua->sua_size); su_home_stats_alloc(sub, data, data, size, 0); } sub->sub_prused = (unsigned)p2; sua->sua_size = (unsigned)size; #if MEMCHECK_EXTRA memcpy((char *)data + size, &term, sizeof (term)); #endif UNLOCK(home); return data; } } else if (size < (size_t)sua->sua_size) { /* Reduce existing preload */ if (sub->sub_stats) { su_home_stats_free(sub, data, data, sua->sua_size); su_home_stats_alloc(sub, data, data, size, 0); } #if MEMCHECK_EXTRA memcpy((char *)data + size, &term, sizeof (term)); #endif sua->sua_size = (unsigned)size; UNLOCK(home); return data; } ndata = malloc(size + MEMCHECK_EXTRA); if (ndata) { if (p == sub->sub_prused) { /* Free preload */ sub->sub_prused = (char *)data - home->suh_blocks->sub_preload; if (sub->sub_stats) su_home_stats_free(sub, data, data, sua->sua_size); } memcpy(ndata, data, (size_t)sua->sua_size < size ? (size_t)sua->sua_size : size); #if MEMCHECK_EXTRA memcpy((char *)ndata + size, &term, sizeof (term)); #endif if (sub->sub_stats) su_home_stats_alloc(sub, data, 0, size, 1); memset(sua, 0, sizeof *sua); sub->sub_used--; su_block_add(sub, ndata)->sua_size = (unsigned)size; #ifdef DEBUG SU_DEBUG_9(("%s: block %p sub_used is %ld sub_n %ld used %d\n", __func__, sub, sub->sub_used, sub->sub_n, su_get_used_count(sub))) ; #endif } UNLOCK(home); return ndata; }