//static void void pos_malloc_init_state(char *name, mstate av) { mchunkptr first_chunk; unsigned long first_size; mchunkptr last_chunk; unsigned long last_size; mchunkptr bck; mchunkptr fwd; int i; mbinptr bin; // init mutex key pthread_mutex_init( &av->mutex,NULL ) ; // initialize malloc_state #if CONSISTENCY == 1 set_init_key_log(name, av); #else set_init_key(av); #endif // Below codes don't need logging. for (i=1; i < NBINS; i++) { bin = bin_at(av,i); bin->fd = bin->bk = bin; } //set_max_fast(DEFAULT_MXFAST); clear_fastchunks(av); for (i=0; i< NFASTBINS ; i++) { av->fastbinsY[i] = 0; } // first chunk first_chunk = chunk_at_offset(av, sizeof(struct malloc_state)); first_size = (PAGESIZE - sizeof(struct malloc_state) - 2*SIZE_SZ)/2; // 956 //#if CONSISTENCY == 1 //first_size = (128*1024-1)*4096 + 960; //536867776 //first_size = request2size(first_size); // 536867792 //#else first_size = 960; //#endif //insert_to_unsorted(av, first_chunk, bck, fwd, first_size); set_head(first_chunk, first_size | FIRST_CHUNK | PREV_INUSE); set_foot(first_chunk, first_size); clear_inuse_bit_at_offset(first_chunk, first_size); // last_chunk last_chunk = chunk_at_offset(first_chunk, first_size); //last_size = first_size; //#if CONSISTENCY == 1 ////last_size = (128*1024)*4096 + 944; // 536867760 //last_size = (256*1024)*4096 - first_size - 2*SIZE_SZ; //last_size = request2size(last_size); // 536874032 //#else // last_size = 988; last_size = 1800; //#endif insert_to_unsorted(av, last_chunk, bck, fwd, last_size); set_head(last_chunk, last_size | LAST_CHUNK | PREV_INUSE); set_foot(last_chunk, last_size); clear_inuse_bit_at_offset(last_chunk, last_size); av->last_remainder = 0; for (i=0; i<BINMAPSIZE; i++) { av->binmap[i] = 0; } av->system_mem = PAGESIZE; av -> prime_obj = NULL; }
//static void void pos_int_free(char *name, mstate av, mchunkptr p, int flag) { INTERNAL_SIZE_T size; mfastbinptr* fb; mchunkptr prevchunk; INTERNAL_SIZE_T prevsize; mchunkptr nextchunk; INTERNAL_SIZE_T nextsize; int nextinuse; mchunkptr bck; mchunkptr fwd; //const char *errstr = NULL; size = chunksize(p); /*if ((uintptr_t) p > (uintptr_t) -size || misaligned_chunk (p)) { errstr = "free(): invalid pointer"; errout: //malloc_printerr (check_action, errstr, chunk2mem(p)); return; }*/ /*if (size < MINSIZE) { errstr = "free(): invalid size"; goto errout; }*/ //check_inuse_chunk(av, p); // fastbin if (flag==1 && (unsigned long)(size) <= (unsigned long)(get_max_fast ())) { /*if (chunk_at_offset (p, size)->size <= 2 * SIZE_SZ || chunksize (chunk_at_offset (p, size)) >= av->system_mem) { errstr = "free(): invalid next size (fast)"; goto errout; }*/ #if CONSISTENCY == 1 set_fastchunks_log(name, av); #else set_fastchunks(av); #endif fb = &fastbin(av, fastbin_index(size)); if (*fb == p) { //errstr = "double free or corruption (fasttop)"; //goto errout; return ; } #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&p->fd, (unsigned long)*fb); POS_WRITE_VAUE(name, (unsigned long *)fb, (unsigned long)p); #else p->fd = *fb; *fb = p; #endif return ; } // 1. First chunk if (chunk_is_first(p)) { nextchunk = next_chunk(p); nextsize = chunksize(nextchunk); // 1-1. (free F), free L if (chunk_is_last(nextchunk) && !inuse(nextchunk)) { //if (av < p && p < (char *)(av+PAGESIZE)){ if ((char*)av+sizeof(struct malloc_state) == (char*)p) { #if CONSISTENCY == 1 insert_to_unsorted_log(name, av, p, bck, fwd, size); set_foot_log(name, p, size); clear_inuse_bit_at_offset_log(name, p, size); #else insert_to_unsorted(av, p, bck, fwd, size); set_foot(p, size); clear_inuse_bit_at_offset(p, size); #endif goto out; } else { #if CONSISTENCY == 1 unlink_log(name, nextchunk, bck, fwd); size = size + nextsize + 2*SIZE_SZ; pos_log_insert_malloc_free(name, (unsigned long)p, size); //pos_seg_free(name, (void *)p, size); // Delayed pos_seg_free POS_WRITE_VAUE(name, (unsigned long *)&av->system_mem, (unsigned long)(av->system_mem-size)); #else unlink(nextchunk, bck, fwd); size = size + nextsize + 2*SIZE_SZ; /*if (size%PAGESIZE != 0) { errstr = "free(): unmmap size is not page size"; goto errout; }*/ //FREE((char*)p, size); pos_seg_free(name, (void *)p, size); av->system_mem -= size; #endif goto out; } } // 1-3. (free F), free M else if (!inuse(nextchunk)) { #if CONSISTENCY == 1 unlink_log(name, nextchunk, bck, fwd); size += nextsize; insert_to_unsorted_log(name, av, p, bck, fwd, size); set_head_log(name, p, size | FIRST_CHUNK | PREV_INUSE); set_foot_log(name, p, size); #else unlink(nextchunk, bck, fwd); size += nextsize; insert_to_unsorted(av, p, bck, fwd, size); set_head(p, size | FIRST_CHUNK | PREV_INUSE); set_foot(p, size); #endif goto out; } // 1-2. (free F), inuse L & 1-4. (free F), inuse M else { #if CONSISTENCY == 1 insert_to_unsorted_log(name, av, p, bck, fwd, size); set_foot_log(name, p, size); clear_inuse_bit_at_offset_log(name, p, size); #else insert_to_unsorted(av, p, bck, fwd, size); set_foot(p, size); clear_inuse_bit_at_offset(p, size); #endif goto out; } } // 2. Last chunk else if (chunk_is_last(p)) { if (!prev_inuse(p)) { prevchunk = prev_chunk(p); prevsize = chunksize(prevchunk); // 2-1. free F, (free L) if (chunk_is_first(prevchunk)) { //if (av < prevchunk && prevchunk < av+PAGESIZE){ if((char*)av+sizeof(struct malloc_state) == (char*)prevchunk) { #if CONSISTENCY == 1 insert_to_unsorted_log(name, av, p, bck, fwd, size); set_foot_log(name, p, size); clear_inuse_bit_at_offset_log(name, p, size); #else insert_to_unsorted(av, p, bck, fwd, size); set_foot(p, size); clear_inuse_bit_at_offset(p, size); #endif goto out; } else { #if CONSISTENCY == 1 unlink_log(name, prevchunk, bck, fwd); size = prevsize+size+2*SIZE_SZ; //pos_seg_free(name, (void *)p, size); pos_log_insert_malloc_free(name, (unsigned long)p, size); POS_WRITE_VAUE(name, (unsigned long *)&av->system_mem, (unsigned long)(av->system_mem-size)); #else unlink(prevchunk, bck, fwd); size = prevsize+size+2*SIZE_SZ; /*if (size%PAGESIZE != 0) { errstr = "free(): unmmap size is not page size"; goto errout; }*/ //FREE((char*)p, size); pos_seg_free(name, (void *)p, size); av->system_mem -= size; #endif goto out; } } // 2-3. free M, (free L) else { #if CONSISTENCY == 1 unlink_log(name, prevchunk, bck, fwd); size += prevsize; p = chunk_at_offset(p, -((long) prevsize)); insert_to_unsorted_log(name, av, p, bck, fwd, size); set_head_log(name, p, size | LAST_CHUNK | PREV_INUSE); set_foot_log(name, p, size); clear_inuse_bit_at_offset_log(name, p, size); #else unlink(prevchunk, bck, fwd); size += prevsize; p = chunk_at_offset(p, -((long) prevsize)); insert_to_unsorted(av, p, bck, fwd, size); set_head(p, size | LAST_CHUNK | PREV_INUSE); set_foot(p, size); clear_inuse_bit_at_offset(p, size); #endif goto out; } } // 2-2. inuse F, (free L) & 2-4. inuse M, (free L) else { #if CONSISTENCY == 1 insert_to_unsorted_log(name, av, p, bck, fwd, size); set_foot_log(name, p, size); clear_inuse_bit_at_offset_log(name, p, size); #else insert_to_unsorted(av, p, bck, fwd, size); set_foot(p, size); clear_inuse_bit_at_offset(p, size); #endif goto out; } } // 3. Middle chunk else { nextchunk = next_chunk(p); nextsize = chunksize(nextchunk); if (!prev_inuse(p)) { prevchunk = prev_chunk(p); prevsize = chunksize(prevchunk); // 3-1. free F, (free M), free L if (chunk_is_first(prevchunk) && chunk_is_last(nextchunk) && !inuse(nextchunk) ) { //if (av < prevchunk && prevchunk < av+PAGESIZE){ if((char*)av+sizeof(struct malloc_state) == (char*)prevchunk) { #if CONSISTENCY == 1 unlink_log(name, prevchunk, bck, fwd); size += prevsize; p = chunk_at_offset(p, -((long) prevsize)); insert_to_unsorted_log(name, av, p, bck, fwd, size); set_head_log(name, p, size | FIRST_CHUNK | PREV_INUSE); set_foot_log(name, p, size); clear_inuse_bit_at_offset_log(name, p, size); #else unlink(prevchunk, bck, fwd); size += prevsize; p = chunk_at_offset(p, -((long) prevsize)); insert_to_unsorted(av, p, bck, fwd, size); set_head(p, size | FIRST_CHUNK | PREV_INUSE); set_foot(p, size); clear_inuse_bit_at_offset(p, size); #endif goto out; } else { #if CONSISTENCY == 1 unlink_log(name, prevchunk, bck, fwd); unlink_log(name, nextchunk, bck, fwd); p = chunk_at_offset(p, -((long) prevsize)); size = prevsize+size+nextsize+2*SIZE_SZ; pos_log_insert_malloc_free(name, (unsigned long)p, size); //pos_seg_free(name, (void *)p, size); POS_WRITE_VAUE(name, (unsigned long *)&av->system_mem, (unsigned long)(av->system_mem-size)); #else unlink(prevchunk, bck, fwd); unlink(nextchunk, bck, fwd); p = chunk_at_offset(p, -((long) prevsize)); size = prevsize+size+nextsize+2*SIZE_SZ; /*if (size%PAGESIZE != 0) { errstr = "free(): unmmap size is not page size"; goto errout; }*/ //FREE((char*)p, size); pos_seg_free(name, (void *)p, size); av->system_mem -= size; #endif goto out; } } #if CONSISTENCY == 1 unlink_log(name, prevchunk, bck, fwd); #else unlink(prevchunk, bck, fwd); #endif size += prevsize; p = chunk_at_offset(p, -((long) prevsize)); if (chunk_is_first(prevchunk)) { #if CONSISTENCY == 1 set_head_log(name, p, size | FIRST_CHUNK | PREV_INUSE); #else set_head(p, size | FIRST_CHUNK | PREV_INUSE); //set_foot(p, size); //clear_inuse_bit_at_offset(p, size); #endif } } nextinuse = inuse_bit_at_offset(nextchunk, nextsize); if (!nextinuse) { #if CONSISTENCY == 1 unlink_log(name, nextchunk, bck, fwd); #else unlink(nextchunk, bck, fwd); #endif size += nextsize; } #if CONSISTENCY == 1 insert_to_unsorted_log(name, av, p, bck, fwd, size); if (chunk_is_first(p)) { set_head_log(name, p, size | FIRST_CHUNK | PREV_INUSE); } else if (chunk_is_last(nextchunk)&&!nextinuse) { set_head_log(name, p, size | LAST_CHUNK | PREV_INUSE); } else { set_head_log(name, p, size | PREV_INUSE); } set_foot_log(name, p, size); clear_inuse_bit_at_offset_log(name, p, size); #else //else //clear_inuse_bit_at_offset(nextchunk, 0); insert_to_unsorted(av, p, bck, fwd, size); if (chunk_is_first(p)) { set_head(p, size | FIRST_CHUNK | PREV_INUSE); } else if (chunk_is_last(nextchunk)&&!nextinuse) { set_head(p, size | LAST_CHUNK | PREV_INUSE); } else { set_head(p, size | PREV_INUSE); } set_foot(p, size); clear_inuse_bit_at_offset(p, size); //check_free_chunk(av, p); #endif } out: if ((unsigned long)(size) >= FASTBIN_CONSOLIDATION_THRESHOLD && have_fastchunks(av)) { pos_malloc_consolidate(name, av); } }
static Void_t* pos_int_malloc(char *name, mstate av, size_t bytes) { INTERNAL_SIZE_T nb; unsigned int idx; mbinptr bin; mchunkptr victim; INTERNAL_SIZE_T size; int victim_index; mchunkptr remainder; unsigned long remainder_size; unsigned int block; unsigned int bit; unsigned int map; mchunkptr fwd; mchunkptr bck; //const char *errstr = NULL; size_t pagemask = PAGESIZE - 1; //16바이트 단위로 정렬 checked_request2size(bytes, nb); #if MALLOC_DEBUG == 1 printf("before fastbin\n") ; #endif // 1. fast bin (<=144) // 1. fast bin (<=144) /* if ((unsigned long)(nb) <= (unsigned long)(get_max_fast())) { idx = fastbin_index(nb); // printf("idx = %d\n" , idx) ; mfastbinptr* fb = &fastbin(av, idx); victim = *fb; if (victim != 0) { if (fastbin_index (chunksize (victim)) != idx) { errstr = "malloc(): memory corruption (fast)"; errout: malloc_printerr (check_action, errstr, chunk2mem (victim)); } #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)fb, (unsigned long)victim->fd); #else *fb = victim->fd; #endif void *p = chunk2mem(victim); return p; } } */ // 2. small bin (<=1008) if (in_smallbin_range(nb)) { #if MALLOC_DEBUG == 1 printf("inside smallbin if\n") ; #endif idx = smallbin_index(nb); bin = bin_at(av,idx); victim = last(bin); if ( victim != bin && victim != NULL ) { bck = victim->bk; /* if (bck->fd != victim) { errstr = "malloc(): smallbin double linked list corrupted"; goto errout; }*/ #if CONSISTENCY == 1 set_inuse_bit_at_offset_log(name, victim, nb); POS_WRITE_VAUE(name, (unsigned long *)&bin->bk, (unsigned long)bck); POS_WRITE_VAUE(name, (unsigned long *)&bin->fd, (unsigned long)bin); #else set_inuse_bit_at_offset(victim, nb); bin->bk = bck; bck->fd = bin; #endif void *p = chunk2mem(victim); return p; } } else { idx = largebin_index(nb); if (have_fastchunks(av)) { pos_malloc_consolidate(name, av); } } #if MALLOC_DEBUG==1 printf("before unsorted bin\n") ; #endif for(;;) { int iters = 0; // 3. unsorted bin while ((victim = unsorted_chunks(av)->bk) != unsorted_chunks(av)) { bck = victim->bk; /*if (victim->size <= 2 * SIZE_SZ || victim->size > av->system_mem) malloc_printerr (check_action, "malloc(): memory corruption", chunk2mem (victim));*/ size = chunksize(victim); if (in_smallbin_range(nb) && bck == unsorted_chunks(av) && victim == av->last_remainder && (unsigned long)(size) > (unsigned long)(nb + MINSIZE)) { remainder_size = size - nb; remainder = chunk_at_offset(victim, nb); #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&unsorted_chunks(av)->bk, (unsigned long)remainder); POS_WRITE_VAUE(name, (unsigned long *)&unsorted_chunks(av)->fd, (unsigned long)remainder); POS_WRITE_VAUE(name, (unsigned long *)&av->last_remainder, (unsigned long)remainder); #else unsorted_chunks(av)->bk = unsorted_chunks(av)->fd = remainder; av->last_remainder = remainder; #endif remainder->bk = remainder->fd = unsorted_chunks(av); if (!in_smallbin_range(remainder_size)) { remainder->fd_nextsize = NULL; remainder->bk_nextsize = NULL; } // Remainder dosen't need logging... if (chunk_is_last(victim)) set_head(remainder, remainder_size | LAST_CHUNK | PREV_INUSE); else set_head(remainder, remainder_size | PREV_INUSE); // set PREV_INUSE flag.. #if CONSISTENCY == 1 if (chunk_is_first(victim)) { set_head_log(name, victim, nb | FIRST_CHUNK | PREV_INUSE); } else { set_head_log(name, victim, nb | PREV_INUSE); } set_foot_log(name, remainder, remainder_size); #else if (chunk_is_first(victim)) set_head(victim, nb | FIRST_CHUNK | PREV_INUSE); else set_head(victim, nb | PREV_INUSE); set_foot(remainder, remainder_size); #endif void *p = chunk2mem(victim); return p; } #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&unsorted_chunks(av)->bk, (unsigned long)bck); POS_WRITE_VAUE(name, (unsigned long *)&bck->fd, (unsigned long)unsorted_chunks(av)); #else unsorted_chunks(av)->bk = bck; bck->fd = unsorted_chunks(av); #endif if (size == nb) { #if CONSISTENCY == 1 set_inuse_bit_at_offset_log(name, victim, size); #else set_inuse_bit_at_offset(victim, size); #endif void *p = chunk2mem(victim); return p; } if (in_smallbin_range(size)) { victim_index = smallbin_index(size); bck = bin_at(av, victim_index); fwd = bck->fd; } else { victim_index = largebin_index(size); bck = bin_at(av, victim_index); fwd = bck->fd; if (fwd != bck) { size |= PREV_INUSE; //In order not to use chunksize() if ((unsigned long)(size) < (unsigned long)(bck->bk->size)) { fwd = bck; bck = bck->bk; // Current victim was in the unsorted bin that fd_nextsize dosen't need.. so, we don't leave log.. (We don't leave log for fd_nextsize below..) victim->fd_nextsize = fwd->fd; victim->bk_nextsize = fwd->fd->bk_nextsize; #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&fwd->fd->bk_nextsize, (unsigned long)victim); POS_WRITE_VAUE(name, (unsigned long *)&victim->bk_nextsize->fd_nextsize, (unsigned long)victim); #else fwd->fd->bk_nextsize = victim->bk_nextsize->fd_nextsize = victim; #endif } else { while ((unsigned long) size < fwd->size) { fwd = fwd->fd_nextsize; } if ((unsigned long) size == (unsigned long) fwd->size) fwd = fwd->fd; else { victim->fd_nextsize = fwd; victim->bk_nextsize = fwd->bk_nextsize; #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&fwd->bk_nextsize, (unsigned long)victim); POS_WRITE_VAUE(name, (unsigned long *)&victim->bk_nextsize->fd_nextsize, (unsigned long)victim); #else fwd->bk_nextsize = victim; victim->bk_nextsize->fd_nextsize = victim; #endif } bck = fwd->bk; } } else victim->fd_nextsize = victim->bk_nextsize = victim; } #if CONSISTENCY == 1 mark_bin_log(name, av, victim_index); POS_WRITE_VAUE(name, (unsigned long *)&victim->bk, (unsigned long)bck); POS_WRITE_VAUE(name, (unsigned long *)&victim->fd, (unsigned long)fwd); POS_WRITE_VAUE(name, (unsigned long *)&fwd->bk, (unsigned long)victim); POS_WRITE_VAUE(name, (unsigned long *)&bck->fd, (unsigned long)victim); #else mark_bin(av, victim_index); victim->bk = bck; victim->fd = fwd; fwd->bk = victim; bck->fd = victim; #endif #define MAX_ITERS 10000 if (++iters >= MAX_ITERS) break; } #if MALLOC_DEBUG == 1 printf("before large bin\n") ; #endif // 4. large bin (1024<=) if (!in_smallbin_range(nb)) { bin = bin_at(av, idx); #if MALLOC_DEBUG printf(" bin = [%p]\n " ,bin ) ; #endif if ((victim = first(bin)) != bin && (unsigned long)(victim->size) >= (unsigned long)(nb)) { victim = victim->bk_nextsize; while (((unsigned long)(size = chunksize(victim)) < (unsigned long)(nb))) victim = victim->bk_nextsize; //if (victim != last(bin) && victim->size == victim->fd->size) if (victim != last(bin) && chunksize(victim) == chunksize(victim->fd)) victim = victim->fd; remainder_size = size - nb; #if CONSISTENCY == 1 unlink_log(name, victim, bck, fwd); #else unlink(victim, bck, fwd); #endif if (remainder_size < MINSIZE) { #if CONSISTENCY == 1 set_inuse_bit_at_offset_log(name, victim, size); #else set_inuse_bit_at_offset(victim, size); #endif } else { remainder = chunk_at_offset(victim, nb); #if CONSISTENCY == 1 insert_to_unsorted_log(name, av, remainder, bck, fwd, remainder_size); #else insert_to_unsorted(av, remainder, bck, fwd, remainder_size); #endif // Remainder dosen't need logging... if (chunk_is_last(victim)) set_head(remainder, remainder_size | LAST_CHUNK | PREV_INUSE); else set_head(remainder, remainder_size | PREV_INUSE); // set PREV_INUSE flag.. #if CONSISTENCY == 1 if (chunk_is_first(victim)) { set_head_log(name, victim, nb | FIRST_CHUNK | PREV_INUSE); } else { set_head_log(name, victim, nb | PREV_INUSE); } set_foot_log(name, remainder, remainder_size); #else if (chunk_is_first(victim)) set_head(victim, nb | FIRST_CHUNK | PREV_INUSE); else set_head(victim, nb | PREV_INUSE); set_foot(remainder, remainder_size); #endif } void *p = chunk2mem(victim); return p; } } ++idx; bin = bin_at(av,idx); block = idx2block(idx); map = av->binmap[block]; bit = idx2bit(idx); #if MALLOC_DEBUG == 1 printf("DDD\n") ; #endif for (;;) { if (bit > map || bit == 0) { do { if (++block >= BINMAPSIZE){ goto new_alloc; } } while ( (map = av->binmap[block]) == 0); bin = bin_at(av, (block << BINMAPSHIFT)); bit = 1; } while ((bit & map) == 0) { bin = next_bin(bin); bit <<= 1; } #if MALLOC_DEBUG == 1 printf("before victim\n") ; #endif victim = last(bin); if (victim == bin) { #if MALLOC_DEBUG == 1 printf("victim == bin\n") ; #endif #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&av->binmap[block], (unsigned long)(map &~bit)); #else av->binmap[block] = map &= ~bit; #endif bin = next_bin(bin); bit <<= 1; } else { #if MALLOC_DEBUG == 1 printf("victim != bin\n") ; #endif size = chunksize(victim); remainder_size = size - nb; #if CONSISTENCY == 1 unlink_log(name, victim, bck, fwd); #else unlink(victim, bck, fwd); #endif #if MALLOC_DEBUG == 1 printf("unlink\n") ; #endif if (remainder_size < MINSIZE) { #if CONSISTENCY == 1 set_inuse_bit_at_offset_log(name, victim, size); #else #if MALLOC_DEBUG == 1 printf("D\n") ; printf("victim : %p\n", victim) ; printf("size: %d\n" ,size) ; #endif set_inuse_bit_at_offset(victim, size); #endif } else { #if MALLOC_DEBUG == 1 printf("remainder\n") ; #endif remainder = chunk_at_offset(victim, nb); #if CONSISTENCY == 1 insert_to_unsorted_log(name, av, remainder, bck, fwd, remainder_size); #else #if MALLOC_DEBUG == 1 printf("av=%p\n",av) ; printf("%p %p %p\n", remainder , bck , fwd); printf("%d\n" , remainder_size) ; #endif insert_to_unsorted(av, remainder, bck, fwd, remainder_size); #endif #if MALLOC_DEBUG == 1 printf("%p %p %p\n", remainder , bck , fwd); printf("s\n") ; #endif if (in_smallbin_range(nb)) { #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&av->last_remainder, (unsigned long)remainder); #else av->last_remainder = remainder; #endif } #if MALLOC_DEBUG == 1 printf("s\n") ; #endif if (chunk_is_last(victim)) set_head(remainder, remainder_size | LAST_CHUNK | PREV_INUSE); else set_head(remainder, remainder_size | PREV_INUSE); // set PREV_INUSE flag.. #if CONSISTENCY == 1 if (chunk_is_first(victim)) { set_head_log(name, victim, nb | FIRST_CHUNK | PREV_INUSE); } else { set_head_log(name, victim, nb | PREV_INUSE); } #if MALLOC_DEBUG == 1 printf("s\n") ; #endif set_foot_log(name, remainder, remainder_size); #else if (chunk_is_first(victim)) set_head(victim, nb | FIRST_CHUNK | PREV_INUSE); else set_head(victim, nb | PREV_INUSE); set_foot(remainder, remainder_size); #endif } #if MALLOC_DEBUG == 1 printf("s\n") ; #endif void *p = chunk2mem(victim); return p; } } new_alloc: #if MALLOC_DEBUG == 1 printf("before newallocation\n"); #endif // 6. new allocation size = (nb + MINSIZE +2*SIZE_SZ + pagemask) & ~pagemask; size += DEFAULT_PAD; //char* mm = (char*)(SEG_ALLOC(0, size, PROT_READ|PROT_WRITE, MAP_PRIVATE)); char *mm = (char *)pos_seg_alloc(name, size); memset(mm , 0 , size); #if MALLOC_DEBUG == 1 printf(" mm = %p\n" , mm) ; #endif #if CONSISTENCY == 1 pos_log_insert_malloc_free(name, (unsigned long)mm, size); #endif //if (mm != MAP_FAILED) { if (mm != (char *)0) { #if CONSISTENCY == 1 POS_WRITE_VAUE(name, (unsigned long *)&av->system_mem, (unsigned long)(av->system_mem+size)); #else av->system_mem += size; #endif // printf("D!\n") ; mchunkptr p; p = (mchunkptr)mm; remainder_size = size - nb - 2*SIZE_SZ; remainder = chunk_at_offset(p, nb); #if CONSISTENCY == 1 insert_to_unsorted_log(name, av, remainder, bck, fwd, remainder_size); #else insert_to_unsorted(av, remainder, bck, fwd, remainder_size); #endif /*if (in_smallbin_range(nb)) av->last_remainder = remainder;*/ // set PREV_INUSE flag.. //#if CONSISTENCY == 1 //#elseif set_head(p, nb | FIRST_CHUNK | PREV_INUSE); set_head(remainder, remainder_size | LAST_CHUNK | PREV_INUSE); set_foot(remainder, remainder_size); clear_inuse_bit_at_offset(remainder, remainder_size); //#endif //return p; return chunk2mem(p); } else return 0; } }
/* ------------------------------ free ------------------------------ */ void free(void* mem) { mstate av; mchunkptr p; /* chunk corresponding to mem */ size_t size; /* its size */ mfastbinptr* fb; /* associated fastbin */ mchunkptr nextchunk; /* next contiguous chunk */ size_t nextsize; /* its size */ int nextinuse; /* true if nextchunk is used */ size_t prevsize; /* size of previous contiguous chunk */ mchunkptr bck; /* misc temp for linking */ mchunkptr fwd; /* misc temp for linking */ /* free(0) has no effect */ if (mem == NULL) return; __MALLOC_LOCK; av = get_malloc_state(); p = mem2chunk(mem); size = chunksize(p); check_inuse_chunk(p); /* If eligible, place chunk on a fastbin so it can be found and used quickly in malloc. */ if ((unsigned long)(size) <= (unsigned long)(av->max_fast) #if TRIM_FASTBINS /* If TRIM_FASTBINS set, don't place chunks bordering top into fastbins */ && (chunk_at_offset(p, size) != av->top) #endif ) { set_fastchunks(av); fb = &(av->fastbins[fastbin_index(size)]); p->fd = *fb; *fb = p; } /* Consolidate other non-mmapped chunks as they arrive. */ else if (!chunk_is_mmapped(p)) { set_anychunks(av); nextchunk = chunk_at_offset(p, size); nextsize = chunksize(nextchunk); /* consolidate backward */ if (!prev_inuse(p)) { prevsize = p->prev_size; size += prevsize; p = chunk_at_offset(p, -((long) prevsize)); unlink(p, bck, fwd); } if (nextchunk != av->top) { /* get and clear inuse bit */ nextinuse = inuse_bit_at_offset(nextchunk, nextsize); set_head(nextchunk, nextsize); /* consolidate forward */ if (!nextinuse) { unlink(nextchunk, bck, fwd); size += nextsize; } /* Place the chunk in unsorted chunk list. Chunks are not placed into regular bins until after they have been given one chance to be used in malloc. */ bck = unsorted_chunks(av); fwd = bck->fd; p->bk = bck; p->fd = fwd; bck->fd = p; fwd->bk = p; set_head(p, size | PREV_INUSE); set_foot(p, size); check_free_chunk(p); } /* If the chunk borders the current high end of memory, consolidate into top */ else { size += nextsize; set_head(p, size | PREV_INUSE); av->top = p; check_chunk(p); } /* If freeing a large space, consolidate possibly-surrounding chunks. Then, if the total unused topmost memory exceeds trim threshold, ask malloc_trim to reduce top. Unless max_fast is 0, we don't know if there are fastbins bordering top, so we cannot tell for sure whether threshold has been reached unless fastbins are consolidated. But we don't want to consolidate on each free. As a compromise, consolidation is performed if FASTBIN_CONSOLIDATION_THRESHOLD is reached. */ if ((unsigned long)(size) >= FASTBIN_CONSOLIDATION_THRESHOLD) { if (have_fastchunks(av)) __malloc_consolidate(av); if ((unsigned long)(chunksize(av->top)) >= (unsigned long)(av->trim_threshold)) __malloc_trim(av->top_pad, av); } } /* If the chunk was allocated via mmap, release via munmap() Note that if HAVE_MMAP is false but chunk_is_mmapped is true, then user must have overwritten memory. There's nothing we can do to catch this error unless DEBUG is set, in which case check_inuse_chunk (above) will have triggered error. */ else { size_t offset = p->prev_size; av->n_mmaps--; av->mmapped_mem -= (size + offset); munmap((char*)p - offset, size + offset); } __MALLOC_UNLOCK; }
/* ------------------------- __malloc_consolidate ------------------------- __malloc_consolidate is a specialized version of free() that tears down chunks held in fastbins. Free itself cannot be used for this purpose since, among other things, it might place chunks back onto fastbins. So, instead, we need to use a minor variant of the same code. Also, because this routine needs to be called the first time through malloc anyway, it turns out to be the perfect place to trigger initialization code. */ void attribute_hidden __malloc_consolidate(mstate av) { mfastbinptr* fb; /* current fastbin being consolidated */ mfastbinptr* maxfb; /* last fastbin (for loop control) */ mchunkptr p; /* current chunk being consolidated */ mchunkptr nextp; /* next chunk to consolidate */ mchunkptr unsorted_bin; /* bin header */ mchunkptr first_unsorted; /* chunk to link to */ /* These have same use as in free() */ mchunkptr nextchunk; size_t size; size_t nextsize; size_t prevsize; int nextinuse; mchunkptr bck; mchunkptr fwd; /* If max_fast is 0, we know that av hasn't yet been initialized, in which case do so below */ if (av->max_fast != 0) { clear_fastchunks(av); unsorted_bin = unsorted_chunks(av); /* Remove each chunk from fast bin and consolidate it, placing it then in unsorted bin. Among other reasons for doing this, placing in unsorted bin avoids needing to calculate actual bins until malloc is sure that chunks aren't immediately going to be reused anyway. */ maxfb = &(av->fastbins[fastbin_index(av->max_fast)]); fb = &(av->fastbins[0]); do { if ( (p = *fb) != 0) { *fb = 0; do { check_inuse_chunk(p); nextp = p->fd; /* Slightly streamlined version of consolidation code in free() */ size = p->size & ~PREV_INUSE; nextchunk = chunk_at_offset(p, size); nextsize = chunksize(nextchunk); if (!prev_inuse(p)) { prevsize = p->prev_size; size += prevsize; p = chunk_at_offset(p, -((long) prevsize)); unlink(p, bck, fwd); } if (nextchunk != av->top) { nextinuse = inuse_bit_at_offset(nextchunk, nextsize); set_head(nextchunk, nextsize); if (!nextinuse) { size += nextsize; unlink(nextchunk, bck, fwd); } first_unsorted = unsorted_bin->fd; unsorted_bin->fd = p; first_unsorted->bk = p; set_head(p, size | PREV_INUSE); p->bk = unsorted_bin; p->fd = first_unsorted; set_foot(p, size); } else { size += nextsize; set_head(p, size | PREV_INUSE); av->top = p; } } while ( (p = nextp) != 0); } } while (fb++ != maxfb); } else { malloc_init_state(av); check_malloc_state(); } }
/* ------------------------- __malloc_consolidate ------------------------- __malloc_consolidate is a specialized version of free() that tears down chunks held in fastbins. Free itself cannot be used for this purpose since, among other things, it might place chunks back onto fastbins. So, instead, we need to use a minor variant of the same code. Also, because this routine needs to be called the first time through malloc anyway, it turns out to be the perfect place to trigger initialization code. */ void attribute_hidden __malloc_consolidate(mstate av) { mfastbinptr* fb; /* current fastbin being consolidated */ mfastbinptr* maxfb; /* last fastbin (for loop control) */ mchunkptr p; /* current chunk being consolidated */ mchunkptr nextp; /* next chunk to consolidate */ mchunkptr unsorted_bin; /* bin header */ mchunkptr first_unsorted; /* chunk to link to */ ustate unit; /* */ /* These have same use as in free() */ mchunkptr nextchunk; size_t size; size_t nextsize; size_t prevsize; int nextinuse; mchunkptr bck; mchunkptr fwd; /* If max_fast is 0, we know that av hasn't yet been initialized, in which case do so below */ if (av->max_fast != 0) { clear_fastchunks(av); unsorted_bin = unsorted_chunks(av); /* Remove each chunk from fast bin and consolidate it, placing it then in unsorted bin. Among other reasons for doing this, placing in unsorted bin avoids needing to calculate actual bins until malloc is sure that chunks aren't immediately going to be reused anyway. */ maxfb = &(av->fastbins[fastbin_index(av->max_fast)]); fb = &(av->fastbins[0]); do { if ( (p = *fb) != 0) { *fb = 0; do { check_inuse_chunk(p); nextp = p->fd; /* Slightly streamlined version of consolidation code in free() */ size = p->size & ~PREV_INUSE; nextchunk = chunk_at_offset(p, size); nextsize = chunksize(nextchunk); if (!prev_inuse(p)) { prevsize = p->prev_size; size += prevsize; p = chunk_at_offset(p, -((long) prevsize)); unlink(p, bck, fwd); } unit = lookup_ustate_by_mem((void*)p); if (nextchunk != unit->unit_top) { nextinuse = inuse_bit_at_offset(nextchunk, nextsize); set_head(nextchunk, nextsize); if (!nextinuse) { size += nextsize; unlink(nextchunk, bck, fwd); } first_unsorted = unsorted_bin->fd; unsorted_bin->fd = p; first_unsorted->bk = p; set_head(p, size | PREV_INUSE); p->bk = unsorted_bin; p->fd = first_unsorted; set_foot(p, size); } else { size += nextsize; set_head(p, size | PREV_INUSE); unit->unit_top = p; } } while ( (p = nextp) != 0); } } while (fb++ != maxfb); } else { if (get_abstate()->mstate_list.num == 0) { //initialize abheap state init_linked_list(&(get_abstate()->mstate_list)); init_linked_list(&(get_abstate()->ustate_list)); init_linked_list(&(get_abstate()->mmapped_ustate_list)); get_abstate()->ab_top = (mchunkptr)(CHANNEL_ADDR); //allocate channel heap space mmap((void *) CHANNEL_ADDR, CHANNEL_SIZE, PROT_READ|PROT_WRITE, MAP_ANONYMOUS|MAP_FIXED|MAP_SHARED, -1, 0); touch_mem((void *)CHANNEL_ADDR, CHANNEL_SIZE); } malloc_init_state(av); check_malloc_state(); } }
static void _int_free (mstate av, mchunkptr p, int have_lock) { ... else if (!chunk_is_mmapped(p)) { ... nextchunk = chunk_at_offset(p, size); ... nextsize = chunksize(nextchunk); ... /* consolidate backward */ if (!prev_inuse(p)) { prevsize = p->prev_size; size += prevsize; p = chunk_at_offset(p, -((long) prevsize)); unlink(p, bck, fwd); } if (nextchunk != av->top) { /* get and clear inuse bit */ nextinuse = inuse_bit_at_offset(nextchunk, nextsize); /* consolidate forward */ if (!nextinuse) { unlink(nextchunk, bck, fwd); size += nextsize; } else clear_inuse_bit_at_offset(nextchunk, 0); /* Place the chunk in unsorted chunk list. Chunks are not placed into regular bins until after they have been given one chance to be used in malloc. */ bck = unsorted_chunks(av); fwd = bck->fd; if (__glibc_unlikely (fwd->bk != bck)) { errstr = "free(): corrupted unsorted chunks"; goto errout; } p->fd = fwd; p->bk = bck; if (!in_smallbin_range(size)) { p->fd_nextsize = NULL; p->bk_nextsize = NULL; } bck->fd = p; fwd->bk = p; set_head(p, size | PREV_INUSE); set_foot(p, size); check_free_chunk(av, p); } /* If the chunk borders the current high end of memory, consolidate into top */ else { size += nextsize; set_head(p, size | PREV_INUSE); av->top = p; check_chunk(av, p); } ... }