static mchunkptr internal_function mem2chunk_check (void *mem, unsigned char **magic_p) { mchunkptr p; INTERNAL_SIZE_T sz, c; unsigned char magic; if (!aligned_OK (mem)) return NULL; p = mem2chunk (mem); if (!chunk_is_mmapped (p)) { /* Must be a chunk in conventional heap memory. */ int contig = contiguous (&main_arena); sz = chunksize (p); if ((contig && ((char *) p < mp_.sbrk_base || ((char *) p + sz) >= (mp_.sbrk_base + main_arena.system_mem))) || sz < MINSIZE || sz & MALLOC_ALIGN_MASK || !inuse (p) || (!prev_inuse (p) && (p->prev_size & MALLOC_ALIGN_MASK || (contig && (char *) prev_chunk (p) < mp_.sbrk_base) || next_chunk (prev_chunk (p)) != p))) return NULL; magic = MAGICBYTE (p); for (sz += SIZE_SZ - 1; (c = ((unsigned char *) p)[sz]) != magic; sz -= c) { if (c <= 0 || sz < (c + 2 * SIZE_SZ)) return NULL; } } else { unsigned long offset, page_mask = GLRO (dl_pagesize) - 1; /* mmap()ed chunks have MALLOC_ALIGNMENT or higher power-of-two alignment relative to the beginning of a page. Check this first. */ offset = (unsigned long) mem & page_mask; if ((offset != MALLOC_ALIGNMENT && offset != 0 && offset != 0x10 && offset != 0x20 && offset != 0x40 && offset != 0x80 && offset != 0x100 && offset != 0x200 && offset != 0x400 && offset != 0x800 && offset != 0x1000 && offset < 0x2000) || !chunk_is_mmapped (p) || (p->size & PREV_INUSE) || ((((unsigned long) p - p->prev_size) & page_mask) != 0) || ((sz = chunksize (p)), ((p->prev_size + sz) & page_mask) != 0)) return NULL; magic = MAGICBYTE (p); for (sz -= 1; (c = ((unsigned char *) p)[sz]) != magic; sz -= c) { if (c <= 0 || sz < (c + 2 * SIZE_SZ)) return NULL; } } ((unsigned char *) p)[sz] ^= 0xFF; if (magic_p) *magic_p = (unsigned char *) p + sz; return p; }
static int internal_function heap_trim (heap_info *heap, size_t pad) { mstate ar_ptr = heap->ar_ptr; unsigned long pagesz = GLRO (dl_pagesize); mchunkptr top_chunk = top (ar_ptr), p, bck, fwd; heap_info *prev_heap; long new_size, top_size, top_area, extra, prev_size, misalign; /* Can this heap go away completely? */ while (top_chunk == chunk_at_offset (heap, sizeof (*heap))) { prev_heap = heap->prev; prev_size = prev_heap->size - (MINSIZE - 2 * SIZE_SZ); p = chunk_at_offset (prev_heap, prev_size); /* fencepost must be properly aligned. */ misalign = ((long) p) & MALLOC_ALIGN_MASK; p = chunk_at_offset (prev_heap, prev_size - misalign); assert (chunksize_nomask (p) == (0 | PREV_INUSE)); /* must be fencepost */ p = prev_chunk (p); new_size = chunksize (p) + (MINSIZE - 2 * SIZE_SZ) + misalign; assert (new_size > 0 && new_size < (long) (2 * MINSIZE)); if (!prev_inuse (p)) new_size += prev_size (p); assert (new_size > 0 && new_size < HEAP_MAX_SIZE); if (new_size + (HEAP_MAX_SIZE - prev_heap->size) < pad + MINSIZE + pagesz) break; ar_ptr->system_mem -= heap->size; LIBC_PROBE (memory_heap_free, 2, heap, heap->size); delete_heap (heap); heap = prev_heap; if (!prev_inuse (p)) /* consolidate backward */ { p = prev_chunk (p); unlink (ar_ptr, p, bck, fwd); } assert (((unsigned long) ((char *) p + new_size) & (pagesz - 1)) == 0); assert (((char *) p + new_size) == ((char *) heap + heap->size)); top (ar_ptr) = top_chunk = p; set_head (top_chunk, new_size | PREV_INUSE); /*check_chunk(ar_ptr, top_chunk);*/ } /* Uses similar logic for per-thread arenas as the main arena with systrim and _int_free by preserving the top pad and rounding down to the nearest page. */ top_size = chunksize (top_chunk); if ((unsigned long)(top_size) < (unsigned long)(mp_.trim_threshold)) return 0; top_area = top_size - MINSIZE - 1; if (top_area < 0 || (size_t) top_area <= pad) return 0; /* Release in pagesize units and round down to the nearest page. */ extra = ALIGN_DOWN(top_area - pad, pagesz); if (extra == 0) return 0; /* Try to shrink. */ if (shrink_heap (heap, extra) != 0) return 0; ar_ptr->system_mem -= extra; /* Success. Adjust top accordingly. */ set_head (top_chunk, (top_size - extra) | PREV_INUSE); /*check_chunk(ar_ptr, top_chunk);*/ return 1; }