static Void_t* pos_int_malloc(char *name, mstate av, size_t bytes) { INTERNAL_SIZE_T nb; unsigned int idx; mbinptr bin; mchunkptr victim; INTERNAL_SIZE_T size; int victim_index; mchunkptr remainder; unsigned long remainder_size; unsigned int block; unsigned int bit; unsigned int map; mchunkptr fwd; mchunkptr bck; //const char *errstr = NULL; size_t pagemask = PAGESIZE - 1; //16바이트 단위로 정렬 checked_request2size(bytes, nb); #if MALLOC_DEBUG == 1 printf("before fastbin\n") ; #endif // 1. fast bin (<=144) // 1. fast bin (<=144) /* if ((unsigned long)(nb) <= (unsigned long)(get_max_fast())) { idx = fastbin_index(nb); // printf("idx = %d\n" , idx) ; mfastbinptr* fb = &fastbin(av, idx); victim = *fb; if (victim != 0) { if (fastbin_index (chunksize (victim)) != idx) { errstr = "malloc(): memory corruption (fast)"; errout: malloc_printerr (check_action, errstr, chunk2mem (victim)); } #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)fb, (unsigned long)victim->fd); #else *fb = victim->fd; #endif void *p = chunk2mem(victim); return p; } } */ // 2. small bin (<=1008) if (in_smallbin_range(nb)) { #if MALLOC_DEBUG == 1 printf("inside smallbin if\n") ; #endif idx = smallbin_index(nb); bin = bin_at(av,idx); victim = last(bin); if ( victim != bin && victim != NULL ) { bck = victim->bk; /* if (bck->fd != victim) { errstr = "malloc(): smallbin double linked list corrupted"; goto errout; }*/ #if CONSISTENCY == 1 set_inuse_bit_at_offset_log(name, victim, nb); POS_WRITE_VAUE(name, (unsigned long *)&bin->bk, (unsigned long)bck); POS_WRITE_VAUE(name, (unsigned long *)&bin->fd, (unsigned long)bin); #else set_inuse_bit_at_offset(victim, nb); bin->bk = bck; bck->fd = bin; #endif void *p = chunk2mem(victim); return p; } } else { idx = largebin_index(nb); if (have_fastchunks(av)) { pos_malloc_consolidate(name, av); } } #if MALLOC_DEBUG==1 printf("before unsorted bin\n") ; #endif for(;;) { int iters = 0; // 3. unsorted bin while ((victim = unsorted_chunks(av)->bk) != unsorted_chunks(av)) { bck = victim->bk; /*if (victim->size <= 2 * SIZE_SZ || victim->size > av->system_mem) malloc_printerr (check_action, "malloc(): memory corruption", chunk2mem (victim));*/ size = chunksize(victim); if (in_smallbin_range(nb) && bck == unsorted_chunks(av) && victim == av->last_remainder && (unsigned long)(size) > (unsigned long)(nb + MINSIZE)) { remainder_size = size - nb; remainder = chunk_at_offset(victim, nb); #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&unsorted_chunks(av)->bk, (unsigned long)remainder); POS_WRITE_VAUE(name, (unsigned long *)&unsorted_chunks(av)->fd, (unsigned long)remainder); POS_WRITE_VAUE(name, (unsigned long *)&av->last_remainder, (unsigned long)remainder); #else unsorted_chunks(av)->bk = unsorted_chunks(av)->fd = remainder; av->last_remainder = remainder; #endif remainder->bk = remainder->fd = unsorted_chunks(av); if (!in_smallbin_range(remainder_size)) { remainder->fd_nextsize = NULL; remainder->bk_nextsize = NULL; } // Remainder dosen't need logging... if (chunk_is_last(victim)) set_head(remainder, remainder_size | LAST_CHUNK | PREV_INUSE); else set_head(remainder, remainder_size | PREV_INUSE); // set PREV_INUSE flag.. #if CONSISTENCY == 1 if (chunk_is_first(victim)) { set_head_log(name, victim, nb | FIRST_CHUNK | PREV_INUSE); } else { set_head_log(name, victim, nb | PREV_INUSE); } set_foot_log(name, remainder, remainder_size); #else if (chunk_is_first(victim)) set_head(victim, nb | FIRST_CHUNK | PREV_INUSE); else set_head(victim, nb | PREV_INUSE); set_foot(remainder, remainder_size); #endif void *p = chunk2mem(victim); return p; } #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&unsorted_chunks(av)->bk, (unsigned long)bck); POS_WRITE_VAUE(name, (unsigned long *)&bck->fd, (unsigned long)unsorted_chunks(av)); #else unsorted_chunks(av)->bk = bck; bck->fd = unsorted_chunks(av); #endif if (size == nb) { #if CONSISTENCY == 1 set_inuse_bit_at_offset_log(name, victim, size); #else set_inuse_bit_at_offset(victim, size); #endif void *p = chunk2mem(victim); return p; } if (in_smallbin_range(size)) { victim_index = smallbin_index(size); bck = bin_at(av, victim_index); fwd = bck->fd; } else { victim_index = largebin_index(size); bck = bin_at(av, victim_index); fwd = bck->fd; if (fwd != bck) { size |= PREV_INUSE; //In order not to use chunksize() if ((unsigned long)(size) < (unsigned long)(bck->bk->size)) { fwd = bck; bck = bck->bk; // Current victim was in the unsorted bin that fd_nextsize dosen't need.. so, we don't leave log.. (We don't leave log for fd_nextsize below..) victim->fd_nextsize = fwd->fd; victim->bk_nextsize = fwd->fd->bk_nextsize; #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&fwd->fd->bk_nextsize, (unsigned long)victim); POS_WRITE_VAUE(name, (unsigned long *)&victim->bk_nextsize->fd_nextsize, (unsigned long)victim); #else fwd->fd->bk_nextsize = victim->bk_nextsize->fd_nextsize = victim; #endif } else { while ((unsigned long) size < fwd->size) { fwd = fwd->fd_nextsize; } if ((unsigned long) size == (unsigned long) fwd->size) fwd = fwd->fd; else { victim->fd_nextsize = fwd; victim->bk_nextsize = fwd->bk_nextsize; #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&fwd->bk_nextsize, (unsigned long)victim); POS_WRITE_VAUE(name, (unsigned long *)&victim->bk_nextsize->fd_nextsize, (unsigned long)victim); #else fwd->bk_nextsize = victim; victim->bk_nextsize->fd_nextsize = victim; #endif } bck = fwd->bk; } } else victim->fd_nextsize = victim->bk_nextsize = victim; } #if CONSISTENCY == 1 mark_bin_log(name, av, victim_index); POS_WRITE_VAUE(name, (unsigned long *)&victim->bk, (unsigned long)bck); POS_WRITE_VAUE(name, (unsigned long *)&victim->fd, (unsigned long)fwd); POS_WRITE_VAUE(name, (unsigned long *)&fwd->bk, (unsigned long)victim); POS_WRITE_VAUE(name, (unsigned long *)&bck->fd, (unsigned long)victim); #else mark_bin(av, victim_index); victim->bk = bck; victim->fd = fwd; fwd->bk = victim; bck->fd = victim; #endif #define MAX_ITERS 10000 if (++iters >= MAX_ITERS) break; } #if MALLOC_DEBUG == 1 printf("before large bin\n") ; #endif // 4. large bin (1024<=) if (!in_smallbin_range(nb)) { bin = bin_at(av, idx); #if MALLOC_DEBUG printf(" bin = [%p]\n " ,bin ) ; #endif if ((victim = first(bin)) != bin && (unsigned long)(victim->size) >= (unsigned long)(nb)) { victim = victim->bk_nextsize; while (((unsigned long)(size = chunksize(victim)) < (unsigned long)(nb))) victim = victim->bk_nextsize; //if (victim != last(bin) && victim->size == victim->fd->size) if (victim != last(bin) && chunksize(victim) == chunksize(victim->fd)) victim = victim->fd; remainder_size = size - nb; #if CONSISTENCY == 1 unlink_log(name, victim, bck, fwd); #else unlink(victim, bck, fwd); #endif if (remainder_size < MINSIZE) { #if CONSISTENCY == 1 set_inuse_bit_at_offset_log(name, victim, size); #else set_inuse_bit_at_offset(victim, size); #endif } else { remainder = chunk_at_offset(victim, nb); #if CONSISTENCY == 1 insert_to_unsorted_log(name, av, remainder, bck, fwd, remainder_size); #else insert_to_unsorted(av, remainder, bck, fwd, remainder_size); #endif // Remainder dosen't need logging... if (chunk_is_last(victim)) set_head(remainder, remainder_size | LAST_CHUNK | PREV_INUSE); else set_head(remainder, remainder_size | PREV_INUSE); // set PREV_INUSE flag.. #if CONSISTENCY == 1 if (chunk_is_first(victim)) { set_head_log(name, victim, nb | FIRST_CHUNK | PREV_INUSE); } else { set_head_log(name, victim, nb | PREV_INUSE); } set_foot_log(name, remainder, remainder_size); #else if (chunk_is_first(victim)) set_head(victim, nb | FIRST_CHUNK | PREV_INUSE); else set_head(victim, nb | PREV_INUSE); set_foot(remainder, remainder_size); #endif } void *p = chunk2mem(victim); return p; } } ++idx; bin = bin_at(av,idx); block = idx2block(idx); map = av->binmap[block]; bit = idx2bit(idx); #if MALLOC_DEBUG == 1 printf("DDD\n") ; #endif for (;;) { if (bit > map || bit == 0) { do { if (++block >= BINMAPSIZE){ goto new_alloc; } } while ( (map = av->binmap[block]) == 0); bin = bin_at(av, (block << BINMAPSHIFT)); bit = 1; } while ((bit & map) == 0) { bin = next_bin(bin); bit <<= 1; } #if MALLOC_DEBUG == 1 printf("before victim\n") ; #endif victim = last(bin); if (victim == bin) { #if MALLOC_DEBUG == 1 printf("victim == bin\n") ; #endif #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&av->binmap[block], (unsigned long)(map &~bit)); #else av->binmap[block] = map &= ~bit; #endif bin = next_bin(bin); bit <<= 1; } else { #if MALLOC_DEBUG == 1 printf("victim != bin\n") ; #endif size = chunksize(victim); remainder_size = size - nb; #if CONSISTENCY == 1 unlink_log(name, victim, bck, fwd); #else unlink(victim, bck, fwd); #endif #if MALLOC_DEBUG == 1 printf("unlink\n") ; #endif if (remainder_size < MINSIZE) { #if CONSISTENCY == 1 set_inuse_bit_at_offset_log(name, victim, size); #else #if MALLOC_DEBUG == 1 printf("D\n") ; printf("victim : %p\n", victim) ; printf("size: %d\n" ,size) ; #endif set_inuse_bit_at_offset(victim, size); #endif } else { #if MALLOC_DEBUG == 1 printf("remainder\n") ; #endif remainder = chunk_at_offset(victim, nb); #if CONSISTENCY == 1 insert_to_unsorted_log(name, av, remainder, bck, fwd, remainder_size); #else #if MALLOC_DEBUG == 1 printf("av=%p\n",av) ; printf("%p %p %p\n", remainder , bck , fwd); printf("%d\n" , remainder_size) ; #endif insert_to_unsorted(av, remainder, bck, fwd, remainder_size); #endif #if MALLOC_DEBUG == 1 printf("%p %p %p\n", remainder , bck , fwd); printf("s\n") ; #endif if (in_smallbin_range(nb)) { #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&av->last_remainder, (unsigned long)remainder); #else av->last_remainder = remainder; #endif } #if MALLOC_DEBUG == 1 printf("s\n") ; #endif if (chunk_is_last(victim)) set_head(remainder, remainder_size | LAST_CHUNK | PREV_INUSE); else set_head(remainder, remainder_size | PREV_INUSE); // set PREV_INUSE flag.. #if CONSISTENCY == 1 if (chunk_is_first(victim)) { set_head_log(name, victim, nb | FIRST_CHUNK | PREV_INUSE); } else { set_head_log(name, victim, nb | PREV_INUSE); } #if MALLOC_DEBUG == 1 printf("s\n") ; #endif set_foot_log(name, remainder, remainder_size); #else if (chunk_is_first(victim)) set_head(victim, nb | FIRST_CHUNK | PREV_INUSE); else set_head(victim, nb | PREV_INUSE); set_foot(remainder, remainder_size); #endif } #if MALLOC_DEBUG == 1 printf("s\n") ; #endif void *p = chunk2mem(victim); return p; } } new_alloc: #if MALLOC_DEBUG == 1 printf("before newallocation\n"); #endif // 6. new allocation size = (nb + MINSIZE +2*SIZE_SZ + pagemask) & ~pagemask; size += DEFAULT_PAD; //char* mm = (char*)(SEG_ALLOC(0, size, PROT_READ|PROT_WRITE, MAP_PRIVATE)); char *mm = (char *)pos_seg_alloc(name, size); memset(mm , 0 , size); #if MALLOC_DEBUG == 1 printf(" mm = %p\n" , mm) ; #endif #if CONSISTENCY == 1 pos_log_insert_malloc_free(name, (unsigned long)mm, size); #endif //if (mm != MAP_FAILED) { if (mm != (char *)0) { #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&av->system_mem, (unsigned long)(av->system_mem+size)); #else av->system_mem += size; #endif // printf("D!\n") ; mchunkptr p; p = (mchunkptr)mm; remainder_size = size - nb - 2*SIZE_SZ; remainder = chunk_at_offset(p, nb); #if CONSISTENCY == 1 insert_to_unsorted_log(name, av, remainder, bck, fwd, remainder_size); #else insert_to_unsorted(av, remainder, bck, fwd, remainder_size); #endif /*if (in_smallbin_range(nb)) av->last_remainder = remainder;*/ // set PREV_INUSE flag.. //#if CONSISTENCY == 1 //#elseif set_head(p, nb | FIRST_CHUNK | PREV_INUSE); set_head(remainder, remainder_size | LAST_CHUNK | PREV_INUSE); set_foot(remainder, remainder_size); clear_inuse_bit_at_offset(remainder, remainder_size); //#endif //return p; return chunk2mem(p); } else return 0; } }
int __malloc_set_state(void* msptr) { struct malloc_save_state* ms = (struct malloc_save_state*)msptr; size_t i; mbinptr b; disallow_malloc_check = 1; ptmalloc_init(); if(ms->magic != MALLOC_STATE_MAGIC) return -1; /* Must fail if the major version is too high. */ if((ms->version & ~0xffl) > (MALLOC_STATE_VERSION & ~0xffl)) return -2; (void)mutex_lock(&main_arena.mutex); /* There are no fastchunks. */ clear_fastchunks(&main_arena); if (ms->version >= 4) set_max_fast(ms->max_fast); else set_max_fast(64); /* 64 used to be the value we always used. */ for (i=0; i<NFASTBINS; ++i) fastbin (&main_arena, i) = 0; for (i=0; i<BINMAPSIZE; ++i) main_arena.binmap[i] = 0; top(&main_arena) = ms->av[2]; main_arena.last_remainder = 0; for(i=1; i<NBINS; i++) { b = bin_at(&main_arena, i); if(ms->av[2*i+2] == 0) { assert(ms->av[2*i+3] == 0); first(b) = last(b) = b; } else { if(ms->version >= 3 && (i<NSMALLBINS || (largebin_index(chunksize(ms->av[2*i+2]))==i && largebin_index(chunksize(ms->av[2*i+3]))==i))) { first(b) = ms->av[2*i+2]; last(b) = ms->av[2*i+3]; /* Make sure the links to the bins within the heap are correct. */ first(b)->bk = b; last(b)->fd = b; /* Set bit in binblocks. */ mark_bin(&main_arena, i); } else { /* Oops, index computation from chunksize must have changed. Link the whole list into unsorted_chunks. */ first(b) = last(b) = b; b = unsorted_chunks(&main_arena); ms->av[2*i+2]->bk = b; ms->av[2*i+3]->fd = b->fd; b->fd->bk = ms->av[2*i+3]; b->fd = ms->av[2*i+2]; } } } if (ms->version < 3) { /* Clear fd_nextsize and bk_nextsize fields. */ b = unsorted_chunks(&main_arena)->fd; while (b != unsorted_chunks(&main_arena)) { if (!in_smallbin_range(chunksize(b))) { b->fd_nextsize = NULL; b->bk_nextsize = NULL; } b = b->fd; } } mp_.sbrk_base = ms->sbrk_base; main_arena.system_mem = ms->sbrked_mem_bytes; mp_.trim_threshold = ms->trim_threshold; mp_.top_pad = ms->top_pad; mp_.n_mmaps_max = ms->n_mmaps_max; mp_.mmap_threshold = ms->mmap_threshold; check_action = ms->check_action; main_arena.max_system_mem = ms->max_sbrked_mem; mp_.n_mmaps = ms->n_mmaps; mp_.max_n_mmaps = ms->max_n_mmaps; mp_.mmapped_mem = ms->mmapped_mem; mp_.max_mmapped_mem = ms->max_mmapped_mem; /* add version-dependent code here */ if (ms->version >= 1) { /* Check whether it is safe to enable malloc checking, or whether it is necessary to disable it. */ if (ms->using_malloc_checking && !using_malloc_checking && !disallow_malloc_check) __malloc_check_init (); else if (!ms->using_malloc_checking && using_malloc_checking) { __malloc_hook = NULL; __free_hook = NULL; __realloc_hook = NULL; __memalign_hook = NULL; using_malloc_checking = 0; } } if (ms->version >= 4) { #ifdef PER_THREAD mp_.arena_test = ms->arena_test; mp_.arena_max = ms->arena_max; narenas = ms->narenas; #endif } check_malloc_state(&main_arena); (void)mutex_unlock(&main_arena.mutex); return 0; }