static int internal_function heap_trim(heap_info *heap, size_t pad) { mstate ar_ptr = heap->ar_ptr; unsigned long pagesz = GLRO(dl_pagesize); mchunkptr top_chunk = top(ar_ptr), p, bck, fwd; heap_info *prev_heap; long new_size, top_size, extra, prev_size, misalign; /* Can this heap go away completely? */ while(top_chunk == chunk_at_offset(heap, sizeof(*heap))) { prev_heap = heap->prev; prev_size = prev_heap->size - (MINSIZE-2*SIZE_SZ); p = chunk_at_offset(prev_heap, prev_size); /* fencepost must be properly aligned. */ misalign = ((long) p) & MALLOC_ALIGN_MASK; p = chunk_at_offset(prev_heap, prev_size - misalign); assert(p->size == (0|PREV_INUSE)); /* must be fencepost */ p = prev_chunk(p); new_size = chunksize(p) + (MINSIZE-2*SIZE_SZ) + misalign; assert(new_size>0 && new_size<(long)(2*MINSIZE)); if(!prev_inuse(p)) new_size += p->prev_size; assert(new_size>0 && new_size<HEAP_MAX_SIZE); if(new_size + (HEAP_MAX_SIZE - prev_heap->size) < pad + MINSIZE + pagesz) break; ar_ptr->system_mem -= heap->size; arena_mem -= heap->size; delete_heap(heap); heap = prev_heap; if(!prev_inuse(p)) { /* consolidate backward */ p = prev_chunk(p); unlink(p, bck, fwd); } assert(((unsigned long)((char*)p + new_size) & (pagesz-1)) == 0); assert( ((char*)p + new_size) == ((char*)heap + heap->size) ); top(ar_ptr) = top_chunk = p; set_head(top_chunk, new_size | PREV_INUSE); /*check_chunk(ar_ptr, top_chunk);*/ } top_size = chunksize(top_chunk); extra = (top_size - pad - MINSIZE - 1) & ~(pagesz - 1); if(extra < (long)pagesz) return 0; /* Try to shrink. */ if(shrink_heap(heap, extra) != 0) return 0; ar_ptr->system_mem -= extra; arena_mem -= extra; /* Success. Adjust top accordingly. */ set_head(top_chunk, (top_size - extra) | PREV_INUSE); /*check_chunk(ar_ptr, top_chunk);*/ return 1; }
static void do_gc() { DBG(("start gc")); clear_all_bitmaps(); sml_rootset_enum_ptr(mark); sml_malloc_pop_and_mark(mark); /* check finalization */ sml_check_finalizer(mark); sweep(); shrink_heap(1); /* sweep malloc heap */ sml_malloc_sweep(); DBG(("gc finished.")); }
static int internal_function heap_trim (heap_info *heap, size_t pad) { mstate ar_ptr = heap->ar_ptr; unsigned long pagesz = GLRO (dl_pagesize); mchunkptr top_chunk = top (ar_ptr), p, bck, fwd; heap_info *prev_heap; long new_size, top_size, top_area, extra, prev_size, misalign; /* Can this heap go away completely? */ while (top_chunk == chunk_at_offset (heap, sizeof (*heap))) { prev_heap = heap->prev; prev_size = prev_heap->size - (MINSIZE - 2 * SIZE_SZ); p = chunk_at_offset (prev_heap, prev_size); /* fencepost must be properly aligned. */ misalign = ((long) p) & MALLOC_ALIGN_MASK; p = chunk_at_offset (prev_heap, prev_size - misalign); assert (p->size == (0 | PREV_INUSE)); /* must be fencepost */ p = prev_chunk (p); new_size = chunksize (p) + (MINSIZE - 2 * SIZE_SZ) + misalign; assert (new_size > 0 && new_size < (long) (2 * MINSIZE)); if (!prev_inuse (p)) new_size += p->prev_size; assert (new_size > 0 && new_size < HEAP_MAX_SIZE); if (new_size + (HEAP_MAX_SIZE - prev_heap->size) < pad + MINSIZE + pagesz) break; ar_ptr->system_mem -= heap->size; arena_mem -= heap->size; LIBC_PROBE (memory_heap_free, 2, heap, heap->size); delete_heap (heap); heap = prev_heap; if (!prev_inuse (p)) /* consolidate backward */ { p = prev_chunk (p); unlink (ar_ptr, p, bck, fwd); } assert (((unsigned long) ((char *) p + new_size) & (pagesz - 1)) == 0); assert (((char *) p + new_size) == ((char *) heap + heap->size)); top (ar_ptr) = top_chunk = p; set_head (top_chunk, new_size | PREV_INUSE); /*check_chunk(ar_ptr, top_chunk);*/ } /* Uses similar logic for per-thread arenas as the main arena with systrim by preserving the top pad and at least a page. */ top_size = chunksize (top_chunk); top_area = top_size - MINSIZE - 1; if (top_area <= pad) return 0; extra = ALIGN_DOWN(top_area - pad, pagesz); if ((unsigned long) extra < mp_.trim_threshold) return 0; /* Try to shrink. */ if (shrink_heap (heap, extra) != 0) return 0; ar_ptr->system_mem -= extra; arena_mem -= extra; /* Success. Adjust top accordingly. */ set_head (top_chunk, (top_size - extra) | PREV_INUSE); /*check_chunk(ar_ptr, top_chunk);*/ return 1; }