TEST_END TEST_BEGIN(test_mallctlbymib_errors) { uint64_t epoch; size_t sz; size_t mib[1]; size_t miblen; miblen = sizeof(mib)/sizeof(size_t); assert_d_eq(mallctlnametomib("version", mib, &miblen), 0, "Unexpected mallctlnametomib() failure"); assert_d_eq(mallctlbymib(mib, miblen, NULL, NULL, "0.0.0", strlen("0.0.0")), EPERM, "mallctl() should return EPERM on " "attempt to write read-only value"); miblen = sizeof(mib)/sizeof(size_t); assert_d_eq(mallctlnametomib("epoch", mib, &miblen), 0, "Unexpected mallctlnametomib() failure"); assert_d_eq(mallctlbymib(mib, miblen, NULL, NULL, &epoch, sizeof(epoch)-1), EINVAL, "mallctlbymib() should return EINVAL for input size mismatch"); assert_d_eq(mallctlbymib(mib, miblen, NULL, NULL, &epoch, sizeof(epoch)+1), EINVAL, "mallctlbymib() should return EINVAL for input size mismatch"); sz = sizeof(epoch)-1; assert_d_eq(mallctlbymib(mib, miblen, &epoch, &sz, NULL, 0), EINVAL, "mallctlbymib() should return EINVAL for output size mismatch"); sz = sizeof(epoch)+1; assert_d_eq(mallctlbymib(mib, miblen, &epoch, &sz, NULL, 0), EINVAL, "mallctlbymib() should return EINVAL for output size mismatch"); }
static inline void threadStats(uint64*& allocated, uint64*& deallocated, size_t*& cactive, size_t& cactiveLimit) { pthread_once(&threadStatsOnce, threadStatsInit); if (!MemoryManager::s_statsEnabled) return; size_t len = sizeof(allocated); if (mallctlbymib(threadAllocatedpMib, sizeof(threadAllocatedpMib) / sizeof(size_t), &allocated, &len, NULL, 0)) { assert(false); } len = sizeof(deallocated); if (mallctlbymib(threadDeallocatedpMib, sizeof(threadDeallocatedpMib) / sizeof(size_t), &deallocated, &len, NULL, 0)) { assert(false); } len = sizeof(cactive); if (mallctlbymib(statsCactiveMib, sizeof(statsCactiveMib) / sizeof(size_t), &cactive, &len, NULL, 0)) { assert(false); } size_t headRoom = RuntimeOption::ServerMemoryHeadRoom; // Compute cactiveLimit based on s_cactiveLimitCeiling, as computed in // threadStatsInit(). if (headRoom != 0 && headRoom < MemoryManager::s_cactiveLimitCeiling) { cactiveLimit = MemoryManager::s_cactiveLimitCeiling - headRoom; } else { cactiveLimit = SIZE_MAX; } }
TEST_END TEST_BEGIN(test_arena_i_dss) { const char *dss_prec_old, *dss_prec_new; size_t sz = sizeof(dss_prec_old); size_t mib[3]; size_t miblen; miblen = sizeof(mib)/sizeof(size_t); assert_d_eq(mallctlnametomib("arena.0.dss", mib, &miblen), 0, "Unexpected mallctlnametomib() error"); dss_prec_new = "disabled"; assert_d_eq(mallctlbymib(mib, miblen, &dss_prec_old, &sz, &dss_prec_new, sizeof(dss_prec_new)), 0, "Unexpected mallctl() failure"); assert_str_ne(dss_prec_old, "primary", "Unexpected default for dss precedence"); assert_d_eq(mallctlbymib(mib, miblen, &dss_prec_new, &sz, &dss_prec_old, sizeof(dss_prec_old)), 0, "Unexpected mallctl() failure"); mib[1] = narenas_total_get(); dss_prec_new = "disabled"; assert_d_eq(mallctlbymib(mib, miblen, &dss_prec_old, &sz, &dss_prec_new, sizeof(dss_prec_new)), 0, "Unexpected mallctl() failure"); assert_str_ne(dss_prec_old, "primary", "Unexpected default for dss precedence"); }
uint64 get_frees() { #ifndef NO_JEMALLOC if (mallctl) { if (!mallctl_free_mib) { if (!mallctl_mib_init()) { return 0; } } uint64 stat; size_t size = sizeof(stat); if (mallctlbymib(mallctl_free_mib, mallctl_mib_len, &stat, &size, NULL, 0)) { return 0; } return stat; } #endif #ifndef NO_TCMALLOC if (MallocExtensionInstance) { size_t stat; MallocExtensionInstance()->GetNumericProperty( "generic.thread_bytes_freed", &stat); return stat; } #endif return 0; }
TEST_END TEST_BEGIN(test_manpage_example) { unsigned nbins, i; size_t mib[4]; size_t len, miblen; len = sizeof(nbins); assert_d_eq(mallctl("arenas.nbins", &nbins, &len, NULL, 0), 0, "Unexpected mallctl() failure"); miblen = 4; assert_d_eq(mallctlnametomib("arenas.bin.0.size", mib, &miblen), 0, "Unexpected mallctlnametomib() failure"); for (i = 0; i < nbins; i++) { size_t bin_size; mib[2] = i; len = sizeof(bin_size); assert_d_eq(mallctlbymib(mib, miblen, &bin_size, &len, NULL, 0), 0, "Unexpected mallctlbymib() failure"); /* Do something with bin_size... */ } }
TEST_END TEST_BEGIN(test_large_run_size) { bool cache_oblivious; unsigned nlruns, i; size_t sz, run_size_prev, ceil_prev; size_t mib[4]; size_t miblen = sizeof(mib) / sizeof(size_t); /* * Iterate over all large size classes, get their run sizes, and verify * that the quantized size is the same as the run size. */ sz = sizeof(bool); assert_d_eq(mallctl("config.cache_oblivious", &cache_oblivious, &sz, NULL, 0), 0, "Unexpected mallctl failure"); sz = sizeof(unsigned); assert_d_eq(mallctl("arenas.nlruns", &nlruns, &sz, NULL, 0), 0, "Unexpected mallctl failure"); assert_d_eq(mallctlnametomib("arenas.lrun.0.size", mib, &miblen), 0, "Unexpected mallctlnametomib failure"); for (i = 0; i < nlruns; i++) { size_t lrun_size, run_size, floor, ceil; mib[2] = i; sz = sizeof(size_t); assert_d_eq(mallctlbymib(mib, miblen, &lrun_size, &sz, NULL, 0), 0, "Unexpected mallctlbymib failure"); run_size = cache_oblivious ? lrun_size + PAGE : lrun_size; floor = run_quantize_floor(run_size); ceil = run_quantize_ceil(run_size); assert_zu_eq(run_size, floor, "Large run quantization should be a no-op for precise " "size (lrun_size=%zu, run_size=%zu)", lrun_size, run_size); assert_zu_eq(run_size, ceil, "Large run quantization should be a no-op for precise " "size (lrun_size=%zu, run_size=%zu)", lrun_size, run_size); if (i > 0) { assert_zu_eq(run_size_prev, run_quantize_floor(run_size - PAGE), "Floor should be a precise size"); if (run_size_prev < ceil_prev) { assert_zu_eq(ceil_prev, run_size, "Ceiling should be a precise size " "(run_size_prev=%zu, ceil_prev=%zu, " "run_size=%zu)", run_size_prev, ceil_prev, run_size); } } run_size_prev = floor; ceil_prev = run_quantize_ceil(run_size + PAGE); } }
static inline void thread_stats(uint64*& allocated, uint64*& deallocated) { pthread_once(&mibOnce, mibInit); if (!MemoryManager::s_stats_enabled) return; size_t len = sizeof(allocated); if (mallctlbymib(threadAllocatedpMib, sizeof(threadAllocatedpMib) / sizeof(size_t), &allocated, &len, NULL, 0)) { assert(false); } len = sizeof(deallocated); if (mallctlbymib(threadDeallocatedpMib, sizeof(threadDeallocatedpMib) / sizeof(size_t), &deallocated, &len, NULL, 0)) { assert(false); } }
TEST_END TEST_BEGIN(test_arena_i_initialized) { unsigned narenas, i; size_t sz; size_t mib[3]; size_t miblen = sizeof(mib) / sizeof(size_t); bool initialized; sz = sizeof(narenas); assert_d_eq(mallctl("arenas.narenas", (void *)&narenas, &sz, NULL, 0), 0, "Unexpected mallctl() failure"); assert_d_eq(mallctlnametomib("arena.0.initialized", mib, &miblen), 0, "Unexpected mallctlnametomib() failure"); for (i = 0; i < narenas; i++) { mib[1] = i; sz = sizeof(initialized); assert_d_eq(mallctlbymib(mib, miblen, &initialized, &sz, NULL, 0), 0, "Unexpected mallctl() failure"); } mib[1] = MALLCTL_ARENAS_ALL; sz = sizeof(initialized); assert_d_eq(mallctlbymib(mib, miblen, &initialized, &sz, NULL, 0), 0, "Unexpected mallctl() failure"); assert_true(initialized, "Merged arena statistics should always be initialized"); /* Equivalent to the above but using mallctl() directly. */ sz = sizeof(initialized); assert_d_eq(mallctl( "arena." STRINGIFY(MALLCTL_ARENAS_ALL) ".initialized", (void *)&initialized, &sz, NULL, 0), 0, "Unexpected mallctl() failure"); assert_true(initialized, "Merged arena statistics should always be initialized"); }
TEST_END TEST_BEGIN(test_arena_i_decay) { unsigned narenas; size_t sz = sizeof(unsigned); size_t mib[3]; size_t miblen = 3; assert_d_eq(mallctl("arena.0.decay", NULL, NULL, NULL, 0), 0, "Unexpected mallctl() failure"); assert_d_eq(mallctl("arenas.narenas", (void *)&narenas, &sz, NULL, 0), 0, "Unexpected mallctl() failure"); assert_d_eq(mallctlnametomib("arena.0.decay", mib, &miblen), 0, "Unexpected mallctlnametomib() failure"); mib[1] = narenas; assert_d_eq(mallctlbymib(mib, miblen, NULL, NULL, NULL, 0), 0, "Unexpected mallctlbymib() failure"); mib[1] = MALLCTL_ARENAS_ALL; assert_d_eq(mallctlbymib(mib, miblen, NULL, NULL, NULL, 0), 0, "Unexpected mallctlbymib() failure"); }
static size_t get_size_impl(const char *cmd, size_t ind) { size_t ret; size_t z; size_t mib[4]; size_t miblen = 4; z = sizeof(size_t); assert_d_eq(mallctlnametomib(cmd, mib, &miblen), 0, "Unexpected mallctlnametomib(\"%s\", ...) failure", cmd); mib[2] = ind; z = sizeof(size_t); assert_d_eq(mallctlbymib(mib, miblen, (void *)&ret, &z, NULL, 0), 0, "Unexpected mallctlbymib([\"%s\", %zu], ...) failure", cmd, ind); return ret; }
TEST_END TEST_BEGIN(test_arena_i_purge) { unsigned narenas; size_t sz = sizeof(unsigned); size_t mib[3]; size_t miblen = 3; assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0, "Unexpected mallctl() failure"); assert_d_eq(mallctl("arenas.narenas", &narenas, &sz, NULL, 0), 0, "Unexpected mallctl() failure"); assert_d_eq(mallctlnametomib("arena.0.purge", mib, &miblen), 0, "Unexpected mallctlnametomib() failure"); mib[1] = narenas; assert_d_eq(mallctlbymib(mib, miblen, NULL, NULL, NULL, 0), 0, "Unexpected mallctlbymib() failure"); }
static size_t get_max_size_class(void) { unsigned nhchunks; size_t mib[4]; size_t sz, miblen, max_size_class; sz = sizeof(unsigned); assert_d_eq(mallctl("arenas.nhchunks", (void *)&nhchunks, &sz, NULL, 0), 0, "Unexpected mallctl() error"); miblen = sizeof(mib) / sizeof(size_t); assert_d_eq(mallctlnametomib("arenas.hchunk.0.size", mib, &miblen), 0, "Unexpected mallctlnametomib() error"); mib[2] = nhchunks - 1; sz = sizeof(size_t); assert_d_eq(mallctlbymib(mib, miblen, (void *)&max_size_class, &sz, NULL, 0), 0, "Unexpected mallctlbymib() error"); return (max_size_class); }