static mchunkptr internal_function mem2chunk_check(void* mem, unsigned char **magic_p) { mchunkptr p; INTERNAL_SIZE_T sz, c; unsigned char magic; if(!aligned_OK(mem)) return NULL; p = mem2chunk(mem); if (!chunk_is_mmapped(p)) { /* Must be a chunk in conventional heap memory. */ int contig = contiguous(&main_arena); sz = chunksize(p); if((contig && ((char*)p<mp_.sbrk_base || ((char*)p + sz)>=(mp_.sbrk_base+main_arena.system_mem) )) || sz<MINSIZE || sz&MALLOC_ALIGN_MASK || !inuse(p) || ( !prev_inuse(p) && (p->prev_size&MALLOC_ALIGN_MASK || (contig && (char*)prev_chunk(p)<mp_.sbrk_base) || next_chunk(prev_chunk(p))!=p) )) return NULL; magic = MAGICBYTE(p); for(sz += SIZE_SZ-1; (c = ((unsigned char*)p)[sz]) != magic; sz -= c) { if(c<=0 || sz<(c+2*SIZE_SZ)) return NULL; } } else { unsigned long offset, page_mask = GLRO(dl_pagesize)-1; /* mmap()ed chunks have MALLOC_ALIGNMENT or higher power-of-two alignment relative to the beginning of a page. Check this first. */ offset = (unsigned long)mem & page_mask; if((offset!=MALLOC_ALIGNMENT && offset!=0 && offset!=0x10 && offset!=0x20 && offset!=0x40 && offset!=0x80 && offset!=0x100 && offset!=0x200 && offset!=0x400 && offset!=0x800 && offset!=0x1000 && offset<0x2000) || !chunk_is_mmapped(p) || (p->size & PREV_INUSE) || ( (((unsigned long)p - p->prev_size) & page_mask) != 0 ) || ( (sz = chunksize(p)), ((p->prev_size + sz) & page_mask) != 0 ) ) return NULL; magic = MAGICBYTE(p); for(sz -= 1; (c = ((unsigned char*)p)[sz]) != magic; sz -= c) { if(c<=0 || sz<(c+2*SIZE_SZ)) return NULL; } } ((unsigned char*)p)[sz] ^= 0xFF; if (magic_p) *magic_p = (unsigned char *)p + sz; return p; }
internal_function mem2mem_check (void *ptr, size_t sz) { mchunkptr p; unsigned char *m_ptr = ptr; size_t i; if (!ptr) return ptr; p = mem2chunk (ptr); for (i = chunksize (p) - (chunk_is_mmapped (p) ? 2 * SIZE_SZ + 1 : SIZE_SZ + 1); i > sz; i -= 0xFF) { if (i - sz < 0x100) { m_ptr[i] = (unsigned char) (i - sz); break; } m_ptr[i] = 0xFF; } m_ptr[sz] = MAGICBYTE (p); return (void *) m_ptr; }
/* Visualize the chunk as being partitioned into blocks of 256 bytes from the highest address of the chunk, downwards. The beginning of each block tells us the size of the previous block, up to the actual size of the requested memory. Our magic byte is right at the end of the requested size, so we must reach it with this iteration, otherwise we have witnessed a memory corruption. */ static size_t malloc_check_get_size(mchunkptr p) { size_t size; unsigned char c; unsigned char magic = MAGICBYTE(p); assert(using_malloc_checking == 1); for (size = chunksize(p) - 1 + (chunk_is_mmapped(p) ? 0 : SIZE_SZ); (c = ((unsigned char*)p)[size]) != magic; size -= c) { if(c<=0 || size<(c+2*SIZE_SZ)) { malloc_printerr(check_action, "malloc_check_get_size: memory corruption", chunk2mem(p)); return 0; } } /* chunk2mem size. */ return size - 2*SIZE_SZ; }