static void walkHeap(bool merge, bool native) { HeapChunkContext ctx; memset(&ctx, 0, sizeof(ctx)); ctx.bufLen = HPSx_CHUNK_SIZE; ctx.buf = (u1 *)malloc(ctx.bufLen); if (ctx.buf == NULL) { return; } ctx.merge = merge; if (native) { ctx.type = CHUNK_TYPE("NHSG"); } else { if (ctx.merge) { ctx.type = CHUNK_TYPE("HPSG"); } else { ctx.type = CHUNK_TYPE("HPSO"); } } ctx.p = ctx.buf; ctx.needHeader = true; if (native) { #ifdef USE_DLMALLOC dlmalloc_inspect_all(heap_chunk_callback, (void*)&ctx); #endif } else { dvmHeapSourceWalk(heap_chunk_callback, (void *)&ctx); } if (ctx.p > ctx.buf) { flush_hpsg_chunk(&ctx); } free(ctx.buf); }
/* * Return unused memory to the system if possible. */ static void trimHeaps() { HS_BOILERPLATE(); HeapSource *hs = gHs; size_t heapBytes = 0; for (size_t i = 0; i < hs->numHeaps; i++) { Heap *heap = &hs->heaps[i]; /* Return the wilderness chunk to the system. */ mspace_trim(heap->msp, 0); /* Return any whole free pages to the system. */ mspace_inspect_all(heap->msp, releasePagesInRange, &heapBytes); } /* Same for the native heap. */ dlmalloc_trim(0); size_t nativeBytes = 0; dlmalloc_inspect_all(releasePagesInRange, &nativeBytes); LOGD_HEAP("madvised %zd (GC) + %zd (native) = %zd total bytes", heapBytes, nativeBytes, heapBytes + nativeBytes); }