static Void_t* pos_int_malloc(char *name, mstate av, size_t bytes) { INTERNAL_SIZE_T nb; unsigned int idx; mbinptr bin; mchunkptr victim; INTERNAL_SIZE_T size; int victim_index; mchunkptr remainder; unsigned long remainder_size; unsigned int block; unsigned int bit; unsigned int map; mchunkptr fwd; mchunkptr bck; //const char *errstr = NULL; size_t pagemask = PAGESIZE - 1; //16바이트 단위로 정렬 checked_request2size(bytes, nb); #if MALLOC_DEBUG == 1 printf("before fastbin\n") ; #endif // 1. fast bin (<=144) // 1. fast bin (<=144) /* if ((unsigned long)(nb) <= (unsigned long)(get_max_fast())) { idx = fastbin_index(nb); // printf("idx = %d\n" , idx) ; mfastbinptr* fb = &fastbin(av, idx); victim = *fb; if (victim != 0) { if (fastbin_index (chunksize (victim)) != idx) { errstr = "malloc(): memory corruption (fast)"; errout: malloc_printerr (check_action, errstr, chunk2mem (victim)); } #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)fb, (unsigned long)victim->fd); #else *fb = victim->fd; #endif void *p = chunk2mem(victim); return p; } } */ // 2. small bin (<=1008) if (in_smallbin_range(nb)) { #if MALLOC_DEBUG == 1 printf("inside smallbin if\n") ; #endif idx = smallbin_index(nb); bin = bin_at(av,idx); victim = last(bin); if ( victim != bin && victim != NULL ) { bck = victim->bk; /* if (bck->fd != victim) { errstr = "malloc(): smallbin double linked list corrupted"; goto errout; }*/ #if CONSISTENCY == 1 set_inuse_bit_at_offset_log(name, victim, nb); POS_WRITE_VAUE(name, (unsigned long *)&bin->bk, (unsigned long)bck); POS_WRITE_VAUE(name, (unsigned long *)&bin->fd, (unsigned long)bin); #else set_inuse_bit_at_offset(victim, nb); bin->bk = bck; bck->fd = bin; #endif void *p = chunk2mem(victim); return p; } } else { idx = largebin_index(nb); if (have_fastchunks(av)) { pos_malloc_consolidate(name, av); } } #if MALLOC_DEBUG==1 printf("before unsorted bin\n") ; #endif for(;;) { int iters = 0; // 3. unsorted bin while ((victim = unsorted_chunks(av)->bk) != unsorted_chunks(av)) { bck = victim->bk; /*if (victim->size <= 2 * SIZE_SZ || victim->size > av->system_mem) malloc_printerr (check_action, "malloc(): memory corruption", chunk2mem (victim));*/ size = chunksize(victim); if (in_smallbin_range(nb) && bck == unsorted_chunks(av) && victim == av->last_remainder && (unsigned long)(size) > (unsigned long)(nb + MINSIZE)) { remainder_size = size - nb; remainder = chunk_at_offset(victim, nb); #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&unsorted_chunks(av)->bk, (unsigned long)remainder); POS_WRITE_VAUE(name, (unsigned long *)&unsorted_chunks(av)->fd, (unsigned long)remainder); POS_WRITE_VAUE(name, (unsigned long *)&av->last_remainder, (unsigned long)remainder); #else unsorted_chunks(av)->bk = unsorted_chunks(av)->fd = remainder; av->last_remainder = remainder; #endif remainder->bk = remainder->fd = unsorted_chunks(av); if (!in_smallbin_range(remainder_size)) { remainder->fd_nextsize = NULL; remainder->bk_nextsize = NULL; } // Remainder dosen't need logging... if (chunk_is_last(victim)) set_head(remainder, remainder_size | LAST_CHUNK | PREV_INUSE); else set_head(remainder, remainder_size | PREV_INUSE); // set PREV_INUSE flag.. #if CONSISTENCY == 1 if (chunk_is_first(victim)) { set_head_log(name, victim, nb | FIRST_CHUNK | PREV_INUSE); } else { set_head_log(name, victim, nb | PREV_INUSE); } set_foot_log(name, remainder, remainder_size); #else if (chunk_is_first(victim)) set_head(victim, nb | FIRST_CHUNK | PREV_INUSE); else set_head(victim, nb | PREV_INUSE); set_foot(remainder, remainder_size); #endif void *p = chunk2mem(victim); return p; } #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&unsorted_chunks(av)->bk, (unsigned long)bck); POS_WRITE_VAUE(name, (unsigned long *)&bck->fd, (unsigned long)unsorted_chunks(av)); #else unsorted_chunks(av)->bk = bck; bck->fd = unsorted_chunks(av); #endif if (size == nb) { #if CONSISTENCY == 1 set_inuse_bit_at_offset_log(name, victim, size); #else set_inuse_bit_at_offset(victim, size); #endif void *p = chunk2mem(victim); return p; } if (in_smallbin_range(size)) { victim_index = smallbin_index(size); bck = bin_at(av, victim_index); fwd = bck->fd; } else { victim_index = largebin_index(size); bck = bin_at(av, victim_index); fwd = bck->fd; if (fwd != bck) { size |= PREV_INUSE; //In order not to use chunksize() if ((unsigned long)(size) < (unsigned long)(bck->bk->size)) { fwd = bck; bck = bck->bk; // Current victim was in the unsorted bin that fd_nextsize dosen't need.. so, we don't leave log.. (We don't leave log for fd_nextsize below..) victim->fd_nextsize = fwd->fd; victim->bk_nextsize = fwd->fd->bk_nextsize; #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&fwd->fd->bk_nextsize, (unsigned long)victim); POS_WRITE_VAUE(name, (unsigned long *)&victim->bk_nextsize->fd_nextsize, (unsigned long)victim); #else fwd->fd->bk_nextsize = victim->bk_nextsize->fd_nextsize = victim; #endif } else { while ((unsigned long) size < fwd->size) { fwd = fwd->fd_nextsize; } if ((unsigned long) size == (unsigned long) fwd->size) fwd = fwd->fd; else { victim->fd_nextsize = fwd; victim->bk_nextsize = fwd->bk_nextsize; #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&fwd->bk_nextsize, (unsigned long)victim); POS_WRITE_VAUE(name, (unsigned long *)&victim->bk_nextsize->fd_nextsize, (unsigned long)victim); #else fwd->bk_nextsize = victim; victim->bk_nextsize->fd_nextsize = victim; #endif } bck = fwd->bk; } } else victim->fd_nextsize = victim->bk_nextsize = victim; } #if CONSISTENCY == 1 mark_bin_log(name, av, victim_index); POS_WRITE_VAUE(name, (unsigned long *)&victim->bk, (unsigned long)bck); POS_WRITE_VAUE(name, (unsigned long *)&victim->fd, (unsigned long)fwd); POS_WRITE_VAUE(name, (unsigned long *)&fwd->bk, (unsigned long)victim); POS_WRITE_VAUE(name, (unsigned long *)&bck->fd, (unsigned long)victim); #else mark_bin(av, victim_index); victim->bk = bck; victim->fd = fwd; fwd->bk = victim; bck->fd = victim; #endif #define MAX_ITERS 10000 if (++iters >= MAX_ITERS) break; } #if MALLOC_DEBUG == 1 printf("before large bin\n") ; #endif // 4. large bin (1024<=) if (!in_smallbin_range(nb)) { bin = bin_at(av, idx); #if MALLOC_DEBUG printf(" bin = [%p]\n " ,bin ) ; #endif if ((victim = first(bin)) != bin && (unsigned long)(victim->size) >= (unsigned long)(nb)) { victim = victim->bk_nextsize; while (((unsigned long)(size = chunksize(victim)) < (unsigned long)(nb))) victim = victim->bk_nextsize; //if (victim != last(bin) && victim->size == victim->fd->size) if (victim != last(bin) && chunksize(victim) == chunksize(victim->fd)) victim = victim->fd; remainder_size = size - nb; #if CONSISTENCY == 1 unlink_log(name, victim, bck, fwd); #else unlink(victim, bck, fwd); #endif if (remainder_size < MINSIZE) { #if CONSISTENCY == 1 set_inuse_bit_at_offset_log(name, victim, size); #else set_inuse_bit_at_offset(victim, size); #endif } else { remainder = chunk_at_offset(victim, nb); #if CONSISTENCY == 1 insert_to_unsorted_log(name, av, remainder, bck, fwd, remainder_size); #else insert_to_unsorted(av, remainder, bck, fwd, remainder_size); #endif // Remainder dosen't need logging... if (chunk_is_last(victim)) set_head(remainder, remainder_size | LAST_CHUNK | PREV_INUSE); else set_head(remainder, remainder_size | PREV_INUSE); // set PREV_INUSE flag.. #if CONSISTENCY == 1 if (chunk_is_first(victim)) { set_head_log(name, victim, nb | FIRST_CHUNK | PREV_INUSE); } else { set_head_log(name, victim, nb | PREV_INUSE); } set_foot_log(name, remainder, remainder_size); #else if (chunk_is_first(victim)) set_head(victim, nb | FIRST_CHUNK | PREV_INUSE); else set_head(victim, nb | PREV_INUSE); set_foot(remainder, remainder_size); #endif } void *p = chunk2mem(victim); return p; } } ++idx; bin = bin_at(av,idx); block = idx2block(idx); map = av->binmap[block]; bit = idx2bit(idx); #if MALLOC_DEBUG == 1 printf("DDD\n") ; #endif for (;;) { if (bit > map || bit == 0) { do { if (++block >= BINMAPSIZE){ goto new_alloc; } } while ( (map = av->binmap[block]) == 0); bin = bin_at(av, (block << BINMAPSHIFT)); bit = 1; } while ((bit & map) == 0) { bin = next_bin(bin); bit <<= 1; } #if MALLOC_DEBUG == 1 printf("before victim\n") ; #endif victim = last(bin); if (victim == bin) { #if MALLOC_DEBUG == 1 printf("victim == bin\n") ; #endif #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&av->binmap[block], (unsigned long)(map &~bit)); #else av->binmap[block] = map &= ~bit; #endif bin = next_bin(bin); bit <<= 1; } else { #if MALLOC_DEBUG == 1 printf("victim != bin\n") ; #endif size = chunksize(victim); remainder_size = size - nb; #if CONSISTENCY == 1 unlink_log(name, victim, bck, fwd); #else unlink(victim, bck, fwd); #endif #if MALLOC_DEBUG == 1 printf("unlink\n") ; #endif if (remainder_size < MINSIZE) { #if CONSISTENCY == 1 set_inuse_bit_at_offset_log(name, victim, size); #else #if MALLOC_DEBUG == 1 printf("D\n") ; printf("victim : %p\n", victim) ; printf("size: %d\n" ,size) ; #endif set_inuse_bit_at_offset(victim, size); #endif } else { #if MALLOC_DEBUG == 1 printf("remainder\n") ; #endif remainder = chunk_at_offset(victim, nb); #if CONSISTENCY == 1 insert_to_unsorted_log(name, av, remainder, bck, fwd, remainder_size); #else #if MALLOC_DEBUG == 1 printf("av=%p\n",av) ; printf("%p %p %p\n", remainder , bck , fwd); printf("%d\n" , remainder_size) ; #endif insert_to_unsorted(av, remainder, bck, fwd, remainder_size); #endif #if MALLOC_DEBUG == 1 printf("%p %p %p\n", remainder , bck , fwd); printf("s\n") ; #endif if (in_smallbin_range(nb)) { #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&av->last_remainder, (unsigned long)remainder); #else av->last_remainder = remainder; #endif } #if MALLOC_DEBUG == 1 printf("s\n") ; #endif if (chunk_is_last(victim)) set_head(remainder, remainder_size | LAST_CHUNK | PREV_INUSE); else set_head(remainder, remainder_size | PREV_INUSE); // set PREV_INUSE flag.. #if CONSISTENCY == 1 if (chunk_is_first(victim)) { set_head_log(name, victim, nb | FIRST_CHUNK | PREV_INUSE); } else { set_head_log(name, victim, nb | PREV_INUSE); } #if MALLOC_DEBUG == 1 printf("s\n") ; #endif set_foot_log(name, remainder, remainder_size); #else if (chunk_is_first(victim)) set_head(victim, nb | FIRST_CHUNK | PREV_INUSE); else set_head(victim, nb | PREV_INUSE); set_foot(remainder, remainder_size); #endif } #if MALLOC_DEBUG == 1 printf("s\n") ; #endif void *p = chunk2mem(victim); return p; } } new_alloc: #if MALLOC_DEBUG == 1 printf("before newallocation\n"); #endif // 6. new allocation size = (nb + MINSIZE +2*SIZE_SZ + pagemask) & ~pagemask; size += DEFAULT_PAD; //char* mm = (char*)(SEG_ALLOC(0, size, PROT_READ|PROT_WRITE, MAP_PRIVATE)); char *mm = (char *)pos_seg_alloc(name, size); memset(mm , 0 , size); #if MALLOC_DEBUG == 1 printf(" mm = %p\n" , mm) ; #endif #if CONSISTENCY == 1 pos_log_insert_malloc_free(name, (unsigned long)mm, size); #endif //if (mm != MAP_FAILED) { if (mm != (char *)0) { #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&av->system_mem, (unsigned long)(av->system_mem+size)); #else av->system_mem += size; #endif // printf("D!\n") ; mchunkptr p; p = (mchunkptr)mm; remainder_size = size - nb - 2*SIZE_SZ; remainder = chunk_at_offset(p, nb); #if CONSISTENCY == 1 insert_to_unsorted_log(name, av, remainder, bck, fwd, remainder_size); #else insert_to_unsorted(av, remainder, bck, fwd, remainder_size); #endif /*if (in_smallbin_range(nb)) av->last_remainder = remainder;*/ // set PREV_INUSE flag.. //#if CONSISTENCY == 1 //#elseif set_head(p, nb | FIRST_CHUNK | PREV_INUSE); set_head(remainder, remainder_size | LAST_CHUNK | PREV_INUSE); set_foot(remainder, remainder_size); clear_inuse_bit_at_offset(remainder, remainder_size); //#endif //return p; return chunk2mem(p); } else return 0; } }
//This function is equal to mspace_malloc //replacing PREACTION with 0 and POSTACTION with nothing void* mspace_malloc_lockless(mspace msp, size_t bytes) { mstate ms = (mstate)msp; if (!ok_magic(ms)) { USAGE_ERROR_ACTION(ms,ms); return 0; } if (!0){//PREACTION(ms)) { void* mem; size_t nb; if (bytes <= MAX_SMALL_REQUEST) { bindex_t idx; binmap_t smallbits; nb = (bytes < MIN_REQUEST)? MIN_CHUNK_SIZE : pad_request(bytes); idx = small_index(nb); smallbits = ms->smallmap >> idx; if ((smallbits & 0x3U) != 0) { /* Remainderless fit to a smallbin. */ mchunkptr b, p; idx += ~smallbits & 1; /* Uses next bin if idx empty */ b = smallbin_at(ms, idx); p = b->fd; assert(chunksize(p) == small_index2size(idx)); unlink_first_small_chunk(ms, b, p, idx); set_inuse_and_pinuse(ms, p, small_index2size(idx)); mem = chunk2mem(p); check_malloced_chunk(ms, mem, nb); goto postaction; } else if (nb > ms->dvsize) { if (smallbits != 0) { /* Use chunk in next nonempty smallbin */ mchunkptr b, p, r; size_t rsize; bindex_t i; binmap_t leftbits = (smallbits << idx) & left_bits(idx2bit(idx)); binmap_t leastbit = least_bit(leftbits); compute_bit2idx(leastbit, i); b = smallbin_at(ms, i); p = b->fd; assert(chunksize(p) == small_index2size(i)); unlink_first_small_chunk(ms, b, p, i); rsize = small_index2size(i) - nb; /* Fit here cannot be remainderless if 4byte sizes */ if (SIZE_T_SIZE != 4 && rsize < MIN_CHUNK_SIZE) set_inuse_and_pinuse(ms, p, small_index2size(i)); else { set_size_and_pinuse_of_inuse_chunk(ms, p, nb); r = chunk_plus_offset(p, nb); set_size_and_pinuse_of_free_chunk(r, rsize); replace_dv(ms, r, rsize); } mem = chunk2mem(p); check_malloced_chunk(ms, mem, nb); goto postaction; } else if (ms->treemap != 0 && (mem = tmalloc_small(ms, nb)) != 0) { check_malloced_chunk(ms, mem, nb); goto postaction; } } }