/* * Return unused memory to the system if possible. */ static void trimHeaps() { HS_BOILERPLATE(); HeapSource *hs = gHs; size_t heapBytes = 0; for (size_t i = 0; i < hs->numHeaps; i++) { Heap *heap = &hs->heaps[i]; /* Return the wilderness chunk to the system. */ mspace_trim(heap->msp, 0); /* Return any whole free pages to the system. */ mspace_walk_free_pages(heap->msp, releasePagesInRange, &heapBytes); } /* Same for the native heap. */ dlmalloc_trim(0); size_t nativeBytes = 0; dlmalloc_walk_free_pages(releasePagesInRange, &nativeBytes); LOGD_HEAP("madvised %zd (GC) + %zd (native) = %zd total bytes", heapBytes, nativeBytes, heapBytes + nativeBytes); }
int public_mTRIm(size_t s) { int result; (void)mutex_lock(&main_arena.mutex); result = mspace_trim(arena_to_mspace(&main_arena), s); (void)mutex_unlock(&main_arena.mutex); return result; }
bool MemoryHeap::trim(size_t pad) { size_t released = release_unused_segments((mstate)mMspace); int trimmed = mspace_trim(mMspace, pad); return released > 0 || trimmed > 0; }
mm_shared_space_trim(struct mm_shared_space *space) { mm_common_lock(&space->lock); mspace_trim(space->space.opaque, 64 * MM_PAGE_SIZE); mm_common_unlock(&space->lock); }
mm_private_space_trim(struct mm_private_space *space) { mspace_trim(space->space.opaque, 16 * MM_PAGE_SIZE); }