static extent_node_t * huge_node_get(const void *ptr) { extent_node_t *node; node = chunk_lookup(ptr, true); assert(!extent_node_achunk_get(node)); return (node); }
/* je_iterate calls callback for each allocation found in the memory region * between [base, base+size). base will be rounded down to by the jemalloc * chunk size, and base+size will be rounded up to the chunk size. If no memory * managed by jemalloc is found in the requested region, je_iterate returns -1 * and sets errno to EINVAL. * * je_iterate must be called when no allocations are in progress, either * when single-threaded (for example just after a fork), or between * jemalloc_prefork() and jemalloc_postfork_parent(). The callback must * not attempt to allocate with jemalloc. */ int je_iterate(uintptr_t base, size_t size, void (*callback)(uintptr_t ptr, size_t size, void* arg), void* arg) { int error = EINVAL; uintptr_t ptr = (uintptr_t)CHUNK_ADDR2BASE(base); uintptr_t end = CHUNK_CEILING(base + size); while (ptr < end) { extent_node_t *node; node = chunk_lookup((void *)ptr, false); if (node == NULL) { ptr += chunksize; continue; } assert(extent_node_achunk_get(node) || (uintptr_t)extent_node_addr_get(node) == ptr); error = 0; if (extent_node_achunk_get(node)) { /* Chunk */ arena_chunk_t *chunk = (arena_chunk_t *)ptr; ptr += chunksize; if (&chunk->node != node) { /* Empty retained chunk */ continue; } je_iterate_chunk(chunk, callback, arg); } else if ((uintptr_t)extent_node_addr_get(node) == ptr) { /* Huge allocation */ callback(ptr, extent_node_size_get(node), arg); ptr += extent_node_size_get(node); } } if (error) { set_errno(error); return -1; } return 0; }