rtree_t * rtree_new(unsigned bits, rtree_alloc_t *alloc, rtree_dalloc_t *dalloc, pool_t *pool) { rtree_t *ret; unsigned bits_per_level, bits_in_leaf, height, i; assert(bits > 0 && bits <= (sizeof(uintptr_t) << 3)); bits_per_level = jemalloc_ffs(pow2_ceil((RTREE_NODESIZE / sizeof(void *)))) - 1; bits_in_leaf = jemalloc_ffs(pow2_ceil((RTREE_NODESIZE / sizeof(uint8_t)))) - 1; if (bits > bits_in_leaf) { height = 1 + (bits - bits_in_leaf) / bits_per_level; if ((height-1) * bits_per_level + bits_in_leaf != bits) height++; } else { height = 1; } assert((height-1) * bits_per_level + bits_in_leaf >= bits); ret = (rtree_t*)alloc(pool, offsetof(rtree_t, level2bits) + (sizeof(unsigned) * height)); if (ret == NULL) return (NULL); memset(ret, 0, offsetof(rtree_t, level2bits) + (sizeof(unsigned) * height)); ret->alloc = alloc; ret->dalloc = dalloc; ret->pool = pool; if (malloc_mutex_init(&ret->mutex)) { if (dalloc != NULL) dalloc(pool, ret); return (NULL); } ret->height = height; if (height > 1) { if ((height-1) * bits_per_level + bits_in_leaf > bits) { ret->level2bits[0] = (bits - bits_in_leaf) % bits_per_level; } else ret->level2bits[0] = bits_per_level; for (i = 1; i < height-1; i++) ret->level2bits[i] = bits_per_level; ret->level2bits[height-1] = bits_in_leaf; } else ret->level2bits[0] = bits; ret->root = (void**)alloc(pool, sizeof(void *) << ret->level2bits[0]); if (ret->root == NULL) { if (dalloc != NULL) dalloc(pool, ret); return (NULL); } memset(ret->root, 0, sizeof(void *) << ret->level2bits[0]); return (ret); }
bool base_boot(void) { base_nodes = NULL; if (malloc_mutex_init(&base_mtx)) return (true); return (false); }
bool ctl_boot(void) { if (malloc_mutex_init(&ctl_mtx)) return (true); ctl_initialized = false; return (false); }
bool huge_boot(pool_t *pool) { /* Initialize chunks data. */ if (malloc_mutex_init(&pool->huge_mtx)) return (true); extent_tree_ad_new(&pool->huge); return (false); }
bool base_boot(void) { if (malloc_mutex_init(&base_mtx, "base", WITNESS_RANK_BASE)) return (true); base_extent_sn_next = 0; extent_tree_szsnad_new(&base_avail_szsnad); base_nodes = NULL; return (false); }
bool chunk_dss_boot(void) { cassert(config_dss); if (malloc_mutex_init(&dss_mtx)) return (true); dss_base = sbrk(0); dss_prev = dss_base; dss_max = dss_base; return (false); }
bool chunk_dss_boot(void) { if (malloc_mutex_init(&dss_mtx)) return (true); dss_base = sbrk(0); dss_prev = dss_base; dss_max = dss_base; extent_tree_szad_new(&dss_chunks_szad); extent_tree_ad_new(&dss_chunks_ad); return (false); }
bool huge_boot(void) { /* Initialize chunks data. */ if (malloc_mutex_init(&huge_mtx)) return (true); extent_tree_ad_new(&huge); if (config_stats) { huge_nmalloc = 0; huge_ndalloc = 0; huge_allocated = 0; } return (false); }
/* * Only the most significant bits of keys passed to rtree_{read,write}() are * used. */ bool rtree_new(rtree_t *rtree, bool zeroed) { #ifdef JEMALLOC_JET if (!zeroed) { memset(rtree, 0, sizeof(rtree_t)); /* Clear root. */ } #else assert(zeroed); #endif if (malloc_mutex_init(&rtree->init_lock, "rtree", WITNESS_RANK_RTREE, malloc_mutex_rank_exclusive)) { return true; } return false; }
bool huge_boot(void) { /* Initialize chunks data. */ if (malloc_mutex_init(&huge_mtx)) return (true); extent_tree_ad_new(&huge); #ifdef JEMALLOC_STATS huge_nmalloc = 0; huge_ndalloc = 0; huge_allocated = 0; #endif return (false); }
bool chunk_swap_boot(void) { if (malloc_mutex_init(&swap_mtx)) return (true); swap_enabled = false; swap_prezeroed = false; /* swap.* mallctl's depend on this. */ swap_nfds = 0; swap_fds = NULL; #ifdef JEMALLOC_STATS swap_avail = 0; #endif swap_base = NULL; swap_end = NULL; swap_max = NULL; extent_tree_szad_new(&swap_chunks_szad); extent_tree_ad_new(&swap_chunks_ad); return (false); }