void RageSound_DSound_Software::MixerThread()
{
	if( !SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_TIME_CRITICAL) )
		if( !SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_ABOVE_NORMAL) )
			LOG->Warn(werr_ssprintf(GetLastError(), "Failed to set sound thread priority"));

	while( !shutdown_mixer_thread )
	{
		char *locked_buf;
		unsigned len;
		const int64_t play_pos = pcm->GetOutputPosition(); /* must be called before get_output_buf */

		if( !pcm->get_output_buf(&locked_buf, &len, chunksize()) )
		{
			Sleep( chunksize()*1000 / samplerate );
			continue;
		}

		this->Mix( (int16_t *) locked_buf, len/bytes_per_frame, play_pos, pcm->GetPosition() );

		pcm->release_output_buf(locked_buf, len);
	}

	/* I'm not sure why, but if we don't stop the stream now, then the thread will take
	 * 90ms (our buffer size) longer to close. */
	pcm->Stop();
}
Ejemplo n.º 2
0
/* libc_hidden_proto(mallinfo) */
struct mallinfo mallinfo(void)
{
    mstate av;
    struct mallinfo mi;
    unsigned int i;
    mbinptr b;
    mchunkptr p;
    size_t avail;
    size_t fastavail;
    int nblocks;
    int nfastblocks;

    __MALLOC_LOCK;
    av = get_malloc_state();
    /* Ensure initialization */
    if (av->top == 0)  {
	__malloc_consolidate(av);
    }

    check_malloc_state();

    /* Account for top */
    avail = chunksize(av->top);
    nblocks = 1;  /* top always exists */

    /* traverse fastbins */
    nfastblocks = 0;
    fastavail = 0;

    for (i = 0; i < NFASTBINS; ++i) {
	for (p = av->fastbins[i]; p != 0; p = p->fd) {
	    ++nfastblocks;
	    fastavail += chunksize(p);
	}
    }

    avail += fastavail;

    /* traverse regular bins */
    for (i = 1; i < NBINS; ++i) {
	b = bin_at(av, i);
	for (p = last(b); p != b; p = p->bk) {
	    ++nblocks;
	    avail += chunksize(p);
	}
    }

    mi.smblks = nfastblocks;
    mi.ordblks = nblocks;
    mi.fordblks = avail;
    mi.uordblks = av->sbrked_mem - avail;
    mi.arena = av->sbrked_mem;
    mi.hblks = av->n_mmaps;
    mi.hblkhd = av->mmapped_mem;
    mi.fsmblks = fastavail;
    mi.keepcost = chunksize(av->top);
    mi.usmblks = av->max_total_mem;
    __MALLOC_UNLOCK;
    return mi;
}
Ejemplo n.º 3
0
//five consecutive chunks allocated
void  five_malloc() {
    void *p = get_heap_base();
    Busy_Header *b_1 = malloc(100);
    assert_addr_equal(b_1, p);   //b_1 is at the start of heap
    assert_equal(chunksize(b_1), request2size(100));
    Busy_Header *b_2 = malloc(200);
    Busy_Header *b1_next = find_next(b_1);
    assert_addr_equal(b1_next,b_2);     //b_2 is after b_1
    assert_equal(chunksize(b_2), request2size(200));
    Busy_Header *b_3 = malloc(300);
    Busy_Header *b2_next = find_next(b_2);
    assert_addr_equal(find_next(b_2),b_3);     //b_3 is after b_2
    assert_equal(chunksize(b_3), request2size(300));
    Busy_Header *b_4 = malloc(400);
    Busy_Header *b3_next = find_next(b_3);
    assert_addr_equal(find_next(b_3),b_4);     //b_4 is after b_3
    assert_equal(chunksize(b_4), request2size(400));
    Busy_Header *b_5 = malloc(500);
    Busy_Header *b4_next = find_next(b_4);
    assert_addr_equal(find_next(b_4),b_5);     //b_5 is after b_4
    assert_equal(chunksize(b_5), request2size(500));

    Heap_Info info = verify_heap();
    assert_equal(info.busy, 5);
    assert_equal(info.free, 1);
    assert_equal(info.free_size, HEAP_SIZE - info.busy_size);
}
Ejemplo n.º 4
0
static int
internal_function
heap_trim(heap_info *heap, size_t pad)
{
  mstate ar_ptr = heap->ar_ptr;
  unsigned long pagesz = GLRO(dl_pagesize);
  mchunkptr top_chunk = top(ar_ptr), p, bck, fwd;
  heap_info *prev_heap;
  long new_size, top_size, extra, prev_size, misalign;

  /* Can this heap go away completely? */
  while(top_chunk == chunk_at_offset(heap, sizeof(*heap))) {
    prev_heap = heap->prev;
    prev_size = prev_heap->size - (MINSIZE-2*SIZE_SZ);
    p = chunk_at_offset(prev_heap, prev_size);
    /* fencepost must be properly aligned.  */
    misalign = ((long) p) & MALLOC_ALIGN_MASK;
    p = chunk_at_offset(prev_heap, prev_size - misalign);
    assert(p->size == (0|PREV_INUSE)); /* must be fencepost */
    p = prev_chunk(p);
    new_size = chunksize(p) + (MINSIZE-2*SIZE_SZ) + misalign;
    assert(new_size>0 && new_size<(long)(2*MINSIZE));
    if(!prev_inuse(p))
      new_size += p->prev_size;
    assert(new_size>0 && new_size<HEAP_MAX_SIZE);
    if(new_size + (HEAP_MAX_SIZE - prev_heap->size) < pad + MINSIZE + pagesz)
      break;
    ar_ptr->system_mem -= heap->size;
    arena_mem -= heap->size;
    delete_heap(heap);
    heap = prev_heap;
    if(!prev_inuse(p)) { /* consolidate backward */
      p = prev_chunk(p);
      unlink(p, bck, fwd);
    }
    assert(((unsigned long)((char*)p + new_size) & (pagesz-1)) == 0);
    assert( ((char*)p + new_size) == ((char*)heap + heap->size) );
    top(ar_ptr) = top_chunk = p;
    set_head(top_chunk, new_size | PREV_INUSE);
    /*check_chunk(ar_ptr, top_chunk);*/
  }
  top_size = chunksize(top_chunk);
  extra = (top_size - pad - MINSIZE - 1) & ~(pagesz - 1);
  if(extra < (long)pagesz)
    return 0;
  /* Try to shrink. */
  if(shrink_heap(heap, extra) != 0)
    return 0;
  ar_ptr->system_mem -= extra;
  arena_mem -= extra;

  /* Success. Adjust top accordingly. */
  set_head(top_chunk, (top_size - extra) | PREV_INUSE);
  /*check_chunk(ar_ptr, top_chunk);*/
  return 1;
}
Ejemplo n.º 5
0
static mchunkptr
internal_function
mem2chunk_check(void* mem, unsigned char **magic_p)
{
  mchunkptr p;
  INTERNAL_SIZE_T sz, c;
  unsigned char magic;

  if(!aligned_OK(mem)) return NULL;
  p = mem2chunk(mem);
  if (!chunk_is_mmapped(p)) {
    /* Must be a chunk in conventional heap memory. */
    int contig = contiguous(&main_arena);
    sz = chunksize(p);
    if((contig &&
	((char*)p<mp_.sbrk_base ||
	 ((char*)p + sz)>=(mp_.sbrk_base+main_arena.system_mem) )) ||
       sz<MINSIZE || sz&MALLOC_ALIGN_MASK || !inuse(p) ||
       ( !prev_inuse(p) && (p->prev_size&MALLOC_ALIGN_MASK ||
			    (contig && (char*)prev_chunk(p)<mp_.sbrk_base) ||
			    next_chunk(prev_chunk(p))!=p) ))
      return NULL;
    magic = MAGICBYTE(p);
    for(sz += SIZE_SZ-1; (c = ((unsigned char*)p)[sz]) != magic; sz -= c) {
      if(c<=0 || sz<(c+2*SIZE_SZ)) return NULL;
    }
  } else {
    unsigned long offset, page_mask = GLRO(dl_pagesize)-1;

    /* mmap()ed chunks have MALLOC_ALIGNMENT or higher power-of-two
       alignment relative to the beginning of a page.  Check this
       first. */
    offset = (unsigned long)mem & page_mask;
    if((offset!=MALLOC_ALIGNMENT && offset!=0 && offset!=0x10 &&
	offset!=0x20 && offset!=0x40 && offset!=0x80 && offset!=0x100 &&
	offset!=0x200 && offset!=0x400 && offset!=0x800 && offset!=0x1000 &&
	offset<0x2000) ||
       !chunk_is_mmapped(p) || (p->size & PREV_INUSE) ||
       ( (((unsigned long)p - p->prev_size) & page_mask) != 0 ) ||
       ( (sz = chunksize(p)), ((p->prev_size + sz) & page_mask) != 0 ) )
      return NULL;
    magic = MAGICBYTE(p);
    for(sz -= 1; (c = ((unsigned char*)p)[sz]) != magic; sz -= c) {
      if(c<=0 || sz<(c+2*SIZE_SZ)) return NULL;
    }
  }
  ((unsigned char*)p)[sz] ^= 0xFF;
  if (magic_p)
    *magic_p = (unsigned char *)p + sz;
  return p;
}
Ejemplo n.º 6
0
void bin_malloc_free_malloc() {  // test bin malloc and free
    void *p = malloc(99); // should split heap into two chunks
    assert_addr_not_equal(p, NULL);
    Free_Header *freelist = get_heap_freelist();
    Busy_Header *heap = get_heap_base();
    assert_addr_not_equal(freelist, heap);
    // check 1st chunk
    assert_equal(p, heap);
    assert_equal(chunksize(p), request2size(99));
    // check 2nd chunk
    assert_equal(freelist->size, HEAP_SIZE-request2size(99));
    assert_addr_equal(freelist->next, NULL);

    Free_Header *freelist1 = get_bin_freelist(99);// freelist of bin should be NULL
    free(p);                                                       // free to bin
    Free_Header *freelist2 = get_bin_freelist(99);// freelist of bin should be have one element p
    assert_addr_not_equal(freelist1,freelist2);
    void *p1 = malloc(99);                                        // this malloc should be from bin
    assert_addr_not_equal(p1, NULL);
    Free_Header *freelist3 = get_bin_freelist(99); // freelist of bin should be NULL
    assert_addr_equal(freelist3,NULL);

    free(p1);
    Free_Header *freelist4 = get_bin_freelist(99); // freelist should have one element p1
    assert_addr_equal(freelist2,freelist4);
}
Ejemplo n.º 7
0
internal_function
mem2mem_check (void *ptr, size_t sz)
{
  mchunkptr p;
  unsigned char *m_ptr = ptr;
  size_t i;

  if (!ptr)
    return ptr;

  p = mem2chunk (ptr);
  for (i = chunksize (p) - (chunk_is_mmapped (p) ? 2 * SIZE_SZ + 1 : SIZE_SZ + 1);
       i > sz;
       i -= 0xFF)
    {
      if (i - sz < 0x100)
        {
          m_ptr[i] = (unsigned char) (i - sz);
          break;
        }
      m_ptr[i] = 0xFF;
    }
  m_ptr[sz] = MAGICBYTE (p);
  return (void *) m_ptr;
}
RageSound_DSound_Software::RageSound_DSound_Software()
{
	shutdown_mixer_thread = false;
	pcm = NULL;

	/* If we're emulated, we're better off with the WaveOut driver; DS
	 * emulation tends to be desynced. */
	if( ds.IsEmulated() )
		RageException::ThrowNonfatal( "Driver unusable (emulated device)" );

	max_writeahead = safe_writeahead;
	if( PREFSMAN->m_iSoundWriteAhead )
		max_writeahead = PREFSMAN->m_iSoundWriteAhead;

	/* Create a DirectSound stream, but don't force it into hardware. */
	pcm = new DSoundBuf( ds, DSoundBuf::HW_DONT_CARE, channels, samplerate, 16, max_writeahead );

	/* Fill a buffer before we start playing, so we don't play whatever junk is
	 * in the buffer. */
	char *locked_buf;
	unsigned len;
	while( pcm->get_output_buf(&locked_buf, &len, chunksize()) )
	{
		memset( locked_buf, 0, len );
		pcm->release_output_buf(locked_buf, len);
	}

	StartDecodeThread();

	/* Start playing. */
	pcm->Play();

	MixingThread.SetName("Mixer thread");
	MixingThread.Create( MixerThread_start, this );
}
Ejemplo n.º 9
0
/* A direct copy of dlmalloc_usable_size(),
 * which isn't compiled in when ONLY_MSPACES is set.
 * The mspace parameter isn't actually necessary,
 * but we include it to be consistent with the
 * rest of the mspace_*() functions.
 */
size_t mspace_usable_size(mspace _unused, const void* mem) {
  if (mem != 0) {
    const mchunkptr p = mem2chunk(mem);
    if (cinuse(p))
      return chunksize(p) - overhead_for(p);
  }
  return 0;
}
Ejemplo n.º 10
0
std::string ocstream::str() const
{
  std::string ret;
  ret.reserve(size());
  for (unsigned n = 0; n < chunkcount(); ++n)
    ret.append(chunk(n), chunksize(n));
  return ret;
}
Ejemplo n.º 11
0
void malloc_then_free() {
	one_malloc();
	void *p = get_heap_base(); // should be allocated chunk
	Free_Header *freelist0 = get_freelist();
	free(p);
	Free_Header *freelist1 = get_freelist();
	// allocated chunk is freed and becomes head of new freelist
	assert_addr_equal(freelist1, p);
	assert_addr_equal(freelist1, get_heap_base());
	assert_addr_not_equal(freelist0, freelist1);
	assert_equal(chunksize(freelist1) + chunksize(freelist1->next), HEAP_SIZE);
	Heap_Info info = verify_heap();
	assert_equal(info.busy, 0);
	assert_equal(info.busy_size, 0);
	assert_equal(info.free, 1); // 1 free chunk after merging
	assert_equal(info.free_size, HEAP_SIZE);
}
Ejemplo n.º 12
0
size_t extmem_get_mem_size(unsigned long pgoff)
{
    void * va = (void *)get_virt_from_mspace(pgoff << PAGE_SHIFT);
    mchunkptr p  = mem2chunk(va);
    size_t psize = chunksize(p) - TWO_SIZE_T_SIZES;

    extmem_printk("[EXT_MEM] %s size: 0x%x\n", __FUNCTION__, psize);
    return psize;
}
Ejemplo n.º 13
0
size_t
public_mUSABLe(void* mem)
{
  if (mem != 0) {
    mchunkptr p = mem2chunk(mem);
    if (cinuse(p))
      return chunksize(p) - overhead_for(p);
  }
  return 0;
}
Ejemplo n.º 14
0
static int
internal_function
top_check(void)
{
  mchunkptr t = top(&main_arena);
  char* brk, * new_brk;
  INTERNAL_SIZE_T front_misalign, sbrk_size;
  unsigned long pagesz = GLRO(dl_pagesize);

  if (t == initial_top(&main_arena) ||
      (!chunk_is_mmapped(t) &&
       chunksize(t)>=MINSIZE &&
       prev_inuse(t) &&
       (!contiguous(&main_arena) ||
	(char*)t + chunksize(t) == mp_.sbrk_base + main_arena.system_mem)))
    return 0;

  malloc_printerr (check_action, "malloc: top chunk is corrupt", t);

  /* Try to set up a new top chunk. */
  brk = MORECORE(0);
  front_misalign = (unsigned long)chunk2mem(brk) & MALLOC_ALIGN_MASK;
  if (front_misalign > 0)
    front_misalign = MALLOC_ALIGNMENT - front_misalign;
  sbrk_size = front_misalign + mp_.top_pad + MINSIZE;
  sbrk_size += pagesz - ((unsigned long)(brk + sbrk_size) & (pagesz - 1));
  new_brk = (char*)(MORECORE (sbrk_size));
  if (new_brk == (char*)(MORECORE_FAILURE))
    {
      __set_errno (ENOMEM);
      return -1;
    }
  /* Call the `morecore' hook if necessary.  */
  void (*hook) (void) = force_reg (__after_morecore_hook);
  if (hook)
    (*hook) ();
  main_arena.system_mem = (new_brk - mp_.sbrk_base) + sbrk_size;

  top(&main_arena) = (mchunkptr)(brk + front_misalign);
  set_head(top(&main_arena), (sbrk_size - front_misalign) | PREV_INUSE);

  return 0;
}
Ejemplo n.º 15
0
/* ------------------------- __malloc_trim -------------------------
   __malloc_trim is an inverse of sorts to __malloc_alloc.  It gives memory
   back to the system (via negative arguments to sbrk) if there is unused
   memory at the `high' end of the malloc pool. It is called automatically by
   free() when top space exceeds the trim threshold. It is also called by the
   public malloc_trim routine.  It returns 1 if it actually released any
   memory, else 0.
*/
static int __malloc_trim(size_t pad, mstate av)
{
    long  top_size;        /* Amount of top-most memory */
    long  extra;           /* Amount to release */
    long  released;        /* Amount actually released */
    char* current_brk;     /* address returned by pre-check sbrk call */
    char* new_brk;         /* address returned by post-check sbrk call */
    size_t pagesz;

    pagesz = av->pagesize;
    top_size = chunksize(av->top);

    /* Release in pagesize units, keeping at least one page */
    extra = ((top_size - pad - MINSIZE + (pagesz-1)) / pagesz - 1) * pagesz;

    if (extra > 0) {

	/*
	   Only proceed if end of memory is where we last set it.
	   This avoids problems if there were foreign sbrk calls.
	   */
	current_brk = (char*)(MORECORE(0));
	if (current_brk == (char*)(av->top) + top_size) {

	    /*
	       Attempt to release memory. We ignore MORECORE return value,
	       and instead call again to find out where new end of memory is.
	       This avoids problems if first call releases less than we asked,
	       of if failure somehow altered brk value. (We could still
	       encounter problems if it altered brk in some very bad way,
	       but the only thing we can do is adjust anyway, which will cause
	       some downstream failure.)
	       */

	    MORECORE(-extra);
	    new_brk = (char*)(MORECORE(0));

	    if (new_brk != (char*)MORECORE_FAILURE) {
		released = (long)(current_brk - new_brk);

		if (released != 0) {
		    /* Success. Adjust top. */
		    av->sbrked_mem -= released;
		    set_head(av->top, (top_size - released) | PREV_INUSE);
		    check_malloc_state();
		    return 1;
		}
	    }
	}
    }
    return 0;
}
Ejemplo n.º 16
0
/* Walk heap jumping by size field of chunk header. Return an info record. */
Heap_Info get_heap_info() {
	void *heap = get_heap_base();			  // should be allocated chunk
	void *end_of_heap = heap + heap_size - 1; // last valid address of heap
	Busy_Header *p = heap;
	uint32_t busy = 0;
	uint32_t free = 0;
	uint32_t busy_size = 0;
	uint32_t free_size = 0;
	while ( (void*)p>=heap && (void*)p<=end_of_heap ) { // stay inbounds, walking heap
		// track
		if ( p->size & BUSY_BIT ) {
			busy++;
			busy_size += chunksize(p);
		}
		else {
			free++;
			free_size += chunksize(p);
		}
		p = (Busy_Header *)((char *) p + chunksize(p));
	}
	return (Heap_Info){heap_size, busy, busy_size, free, free_size};
}
Ejemplo n.º 17
0
/** clears the constrain tag and adjusts the allocation count */
static void clear_constrain_tag(void* m)
{
	if ( m == NULL )
		return;

	mchunkptr p = mem2chunk(m);

	if ( p->size & CONSTRAIN_TAG )
	{
		p->size &= ~CONSTRAIN_TAG;
		g_constrain_allocated_mem -= chunksize(p);
	}
}
Ejemplo n.º 18
0
void two_malloc() {
    one_malloc();
    void *p0 = get_heap_base();
    Free_Header *freelist0 = get_heap_freelist();
    Busy_Header *p = malloc(200);
    assert_addr_not_equal(p, NULL);
    // check 2nd alloc chunk
    assert_equal(p, freelist0); // should return previous free chunk
    assert_equal(chunksize(p), request2size(200));
    // check remaining free chunk
    Free_Header *freelist1 = get_heap_freelist();
    assert_addr_not_equal(freelist0, freelist1);
    assert_addr_not_equal(freelist0, get_heap_base());
    assert_equal(chunksize(freelist1), HEAP_SIZE-request2size(100)-request2size(200));
    assert_equal(chunksize(p0)+chunksize(p)+chunksize(freelist1), HEAP_SIZE);
    assert_addr_equal(freelist1->next, NULL);

    Heap_Info info = verify_heap();
    assert_equal(info.busy, 2);
    assert_equal(info.busy_size, request2size(100) + request2size(200));
    assert_equal(info.free, 1);
    assert_equal(info.free_size, HEAP_SIZE - request2size(100) - request2size(200));
}
Ejemplo n.º 19
0
Heap_Info get_heap_info() {
    void *heap = get_heap_base();
    void *end_of_heap = heap + DEFAULT_MAX_HEAP_SIZE - 1;
    Busy_Header *p = heap;
    uint32_t busy = 0;
    uint32_t free = 0;
    uint32_t busy_size = 0;
    uint32_t free_size = 0;
    while ( p >= heap && p <= end_of_heap ) {
        if ( p->size & BUSY_BIT ) {
            busy++;
            busy_size += chunksize(p);
        }
        else {
            free++;
            free_size += chunksize(p);
        }
        p = (Busy_Header *)((char *) p + chunksize(p));
    }
    return (Heap_Info) {
        DEFAULT_MAX_HEAP_SIZE, busy, busy_size, free, free_size
    };
}
Ejemplo n.º 20
0
void malloc_large_size_then_free() {
    Free_Header *freelist = get_heap_freelist();//free list before malloc
    void *p = malloc(2048);
    assert_addr_not_equal(p, NULL);
    assert_equal(chunksize(p), request2size(2048));
    Free_Header *freelist1 = get_heap_freelist(); // free list after malloc
    Busy_Header *heap = get_heap_base();
    assert_addr_equal(p,heap);
    assert_addr_not_equal(heap,freelist1);
    free(p);
    Busy_Header *heap1 = get_heap_base();
    Free_Header *freelist2 = get_heap_freelist(); // free list after free,should back to status before malloc
    assert_addr_equal(freelist,freelist2);
    assert_addr_equal(heap1,freelist2);
}
Ejemplo n.º 21
0
void one_malloc() {
    void *p = malloc(100);
    assert_addr_not_equal(p, NULL);
    Free_Header *freelist = get_heap_freelist();
    Busy_Header *heap = get_heap_base();
    assert_addr_not_equal(freelist, heap);
    // check 1st chunk
    assert_equal(p, heap);
    assert_equal(chunksize(p), request2size(100));
    // check 2nd chunk
    assert_equal(freelist->size, HEAP_SIZE-request2size(100));
    assert_addr_equal(freelist->next, NULL);

    Heap_Info info = verify_heap();
    assert_equal(info.busy, 1);
    assert_equal(info.busy_size, request2size(100));
    assert_equal(info.free, 1);
    assert_equal(info.free_size, HEAP_SIZE - request2size(100));
}
void RageSoundDriver::DecodeThread()
{
	SetupDecodingThread();

	while( !m_bShutdownDecodeThread )
	{
		/* Fill each playing sound, round-robin. */
		{
			int iSampleRate = GetSampleRate();
			ASSERT_M( iSampleRate > 0, ssprintf("%i", iSampleRate) );
			int iUsecs = 1000000*chunksize() / iSampleRate;
			usleep( iUsecs );
		}

		LockMut( m_Mutex );
//		LOG->Trace("begin mix");

		for( unsigned i = 0; i < ARRAYLEN(m_Sounds); ++i )
		{
			if( m_Sounds[i].m_State != Sound::PLAYING )
				continue;

			Sound *pSound = &m_Sounds[i];

			CHECKPOINT_M("Processing the sound while buffers are available.");
			while( pSound->m_Buffer.num_writable() )
			{
				int iWrote = GetDataForSound( *pSound );
				if( iWrote == RageSoundReader::WOULD_BLOCK )
					break;
				if( iWrote < 0 )
				{
					/* This sound is finishing. */
					pSound->m_State = Sound::STOPPING;
					break;
//					LOG->Trace("mixer: (#%i) eof (%p)", i, pSound->m_pSound );
				}
			}
		}
//		LOG->Trace("end mix");
	}
}
Ejemplo n.º 23
0
/** tags the allocated memory and adjusts the allocation count */
static void constrain_tag_allocation(void *m)
{
	if ( m == NULL )
		return;

	mchunkptr p = mem2chunk(m);

	assert(!(p->size & CONSTRAIN_TAG));
	p->size |= CONSTRAIN_TAG;

	g_constrain_allocated_mem += chunksize(p);

#ifdef SHOW_MEM_INFO
	if ( g_show_mem_info && (g_constrain_allocated_mem > g_max_constrain_allocated_mem) )
	{
		show_mem_info(0);
		g_max_constrain_allocated_mem = g_constrain_allocated_mem;
	}
#endif

}
Ejemplo n.º 24
0
/* Visualize the chunk as being partitioned into blocks of 256 bytes from the
   highest address of the chunk, downwards.  The beginning of each block tells
   us the size of the previous block, up to the actual size of the requested
   memory.  Our magic byte is right at the end of the requested size, so we
   must reach it with this iteration, otherwise we have witnessed a memory
   corruption.  */
static size_t
malloc_check_get_size(mchunkptr p)
{
  size_t size;
  unsigned char c;
  unsigned char magic = MAGICBYTE(p);

  assert(using_malloc_checking == 1);

  for (size = chunksize(p) - 1 + (chunk_is_mmapped(p) ? 0 : SIZE_SZ);
       (c = ((unsigned char*)p)[size]) != magic;
       size -= c) {
    if(c<=0 || size<(c+2*SIZE_SZ)) {
      malloc_printerr(check_action, "malloc_check_get_size: memory corruption",
		      chunk2mem(p));
      return 0;
    }
  }

  /* chunk2mem size.  */
  return size - 2*SIZE_SZ;
}
Ejemplo n.º 25
0
//static void 
void
pos_int_free(char *name, mstate av, mchunkptr p, int flag)
{
	INTERNAL_SIZE_T size;
	mfastbinptr* fb;
	mchunkptr prevchunk;
	INTERNAL_SIZE_T prevsize;
	mchunkptr nextchunk;
	INTERNAL_SIZE_T nextsize;
	int nextinuse;
	mchunkptr bck;
	mchunkptr fwd;

	//const char *errstr = NULL;


	size = chunksize(p);
	/*if ((uintptr_t) p > (uintptr_t) -size || misaligned_chunk (p)) {
		errstr = "free(): invalid pointer";
errout:
		//malloc_printerr (check_action, errstr, chunk2mem(p));
		return;
	}*/
	/*if (size < MINSIZE) {
		errstr = "free(): invalid size";
		goto errout;
	}*/

	//check_inuse_chunk(av, p);


	// fastbin
	if (flag==1 && (unsigned long)(size) <= (unsigned long)(get_max_fast ())) {
		/*if (chunk_at_offset (p, size)->size <= 2 * SIZE_SZ
		   || chunksize (chunk_at_offset (p, size)) >= av->system_mem) {
			errstr = "free(): invalid next size (fast)";
			goto errout;
		}*/

#if CONSISTENCY == 1
		set_fastchunks_log(name, av);
#else
		set_fastchunks(av);
#endif
		fb = &fastbin(av, fastbin_index(size));

		if (*fb == p) {
			//errstr = "double free or corruption (fasttop)";
			//goto errout;
			return ;
		}

#if CONSISTENCY == 1
		POS_WRITE_VAUE(name, (unsigned long *)&p->fd, (unsigned long)*fb);
		POS_WRITE_VAUE(name, (unsigned long *)fb, (unsigned long)p);
#else
		p->fd = *fb;
		*fb = p;
#endif

		return ;
	}

	// 1. First chunk
	if (chunk_is_first(p)) {

		nextchunk = next_chunk(p);
		nextsize = chunksize(nextchunk);

		// 1-1. (free F), free L
		if (chunk_is_last(nextchunk) && !inuse(nextchunk)) {

			//if (av < p && p < (char *)(av+PAGESIZE)){
			if ((char*)av+sizeof(struct malloc_state) == (char*)p) {
#if CONSISTENCY == 1
				insert_to_unsorted_log(name, av, p, bck, fwd, size);

				set_foot_log(name, p, size);
				clear_inuse_bit_at_offset_log(name, p, size);
#else
				insert_to_unsorted(av, p, bck, fwd, size);

				set_foot(p, size);
				clear_inuse_bit_at_offset(p, size);
#endif

				goto out;
			}
			else {
#if CONSISTENCY == 1
				unlink_log(name, nextchunk, bck, fwd);
				size = size + nextsize + 2*SIZE_SZ;

				pos_log_insert_malloc_free(name, (unsigned long)p, size);
				//pos_seg_free(name, (void *)p, size); // Delayed pos_seg_free
				POS_WRITE_VAUE(name, (unsigned long *)&av->system_mem, (unsigned long)(av->system_mem-size));
#else
				unlink(nextchunk, bck, fwd);
				size = size + nextsize + 2*SIZE_SZ;
				/*if (size%PAGESIZE != 0) {
					errstr = "free(): unmmap size is not page size";
					goto errout;
				}*/
				//FREE((char*)p, size);
				pos_seg_free(name, (void *)p, size);
				av->system_mem -= size;
#endif

				goto out;
			}

		}

		// 1-3. (free F), free M
		else if (!inuse(nextchunk)) {
#if CONSISTENCY == 1
			unlink_log(name, nextchunk, bck, fwd);
			size += nextsize;

			insert_to_unsorted_log(name, av, p, bck, fwd, size);

			set_head_log(name, p, size | FIRST_CHUNK | PREV_INUSE);
			set_foot_log(name, p, size);
#else
			unlink(nextchunk, bck, fwd);
			size += nextsize;

			insert_to_unsorted(av, p, bck, fwd, size);

			set_head(p, size | FIRST_CHUNK | PREV_INUSE);
			set_foot(p, size);
#endif

			goto out;
		}

		// 1-2. (free F), inuse L & 1-4. (free F), inuse M
		else {
#if CONSISTENCY == 1
			insert_to_unsorted_log(name, av, p, bck, fwd, size);

			set_foot_log(name, p, size);
			clear_inuse_bit_at_offset_log(name, p, size);
#else
			insert_to_unsorted(av, p, bck, fwd, size);

			set_foot(p, size);
			clear_inuse_bit_at_offset(p, size);
#endif

			goto out;
		}

	}

	// 2. Last chunk
	else if (chunk_is_last(p)) {

		if (!prev_inuse(p)) {
			prevchunk = prev_chunk(p);
			prevsize = chunksize(prevchunk);

			// 2-1. free F, (free L)
			if (chunk_is_first(prevchunk)) {

				//if (av < prevchunk && prevchunk < av+PAGESIZE){
				if((char*)av+sizeof(struct malloc_state) == (char*)prevchunk) {
#if CONSISTENCY == 1
					insert_to_unsorted_log(name, av, p, bck, fwd, size);
  
					set_foot_log(name, p, size);
					clear_inuse_bit_at_offset_log(name, p, size);
#else
					insert_to_unsorted(av, p, bck, fwd, size);
  
					set_foot(p, size);
					clear_inuse_bit_at_offset(p, size);
#endif

					goto out;
				}
				else {
#if CONSISTENCY == 1
					unlink_log(name, prevchunk, bck, fwd);
					size = prevsize+size+2*SIZE_SZ;
					//pos_seg_free(name, (void *)p, size);
					pos_log_insert_malloc_free(name, (unsigned long)p, size);
					POS_WRITE_VAUE(name, (unsigned long *)&av->system_mem, (unsigned long)(av->system_mem-size));
#else
					unlink(prevchunk, bck, fwd);
					size = prevsize+size+2*SIZE_SZ;
					/*if (size%PAGESIZE != 0) {
						errstr = "free(): unmmap size is not page size";
						goto errout;
					}*/
					//FREE((char*)p, size);
					pos_seg_free(name, (void *)p, size);
					av->system_mem -= size;
#endif

					goto out;
				}

			}

			// 2-3. free M, (free L)
			else {
#if CONSISTENCY == 1
				unlink_log(name, prevchunk, bck, fwd);
				size += prevsize;
				p = chunk_at_offset(p, -((long) prevsize));
  
				insert_to_unsorted_log(name, av, p, bck, fwd, size);

				set_head_log(name, p, size | LAST_CHUNK | PREV_INUSE);
				set_foot_log(name, p, size);
				clear_inuse_bit_at_offset_log(name, p, size);
#else
				unlink(prevchunk, bck, fwd);
				size += prevsize;
				p = chunk_at_offset(p, -((long) prevsize));
  
				insert_to_unsorted(av, p, bck, fwd, size);

				set_head(p, size | LAST_CHUNK | PREV_INUSE);
				set_foot(p, size);
				clear_inuse_bit_at_offset(p, size);
#endif
				goto out;
			}

		}

		// 2-2. inuse F, (free L) & 2-4. inuse M, (free L)
		else {
#if CONSISTENCY == 1
			insert_to_unsorted_log(name, av, p, bck, fwd, size);
  
			set_foot_log(name, p, size);
			clear_inuse_bit_at_offset_log(name, p, size);
#else
			insert_to_unsorted(av, p, bck, fwd, size);
  
			set_foot(p, size);
			clear_inuse_bit_at_offset(p, size);
#endif

			goto out;
		}

	}

	// 3. Middle chunk
	else {

		nextchunk = next_chunk(p);
		nextsize = chunksize(nextchunk);

		if (!prev_inuse(p)) {
			prevchunk = prev_chunk(p);
			prevsize = chunksize(prevchunk);

			// 3-1. free F, (free M), free L
			if (chunk_is_first(prevchunk) && chunk_is_last(nextchunk) && !inuse(nextchunk) ) {
	
				//if (av < prevchunk && prevchunk < av+PAGESIZE){
				if((char*)av+sizeof(struct malloc_state) == (char*)prevchunk) {
#if CONSISTENCY == 1
					unlink_log(name, prevchunk, bck, fwd);
					size += prevsize;
					p = chunk_at_offset(p, -((long) prevsize));

					insert_to_unsorted_log(name, av, p, bck, fwd, size);
  
					set_head_log(name, p, size | FIRST_CHUNK | PREV_INUSE);
					set_foot_log(name, p, size);
					clear_inuse_bit_at_offset_log(name, p, size);
#else
					unlink(prevchunk, bck, fwd);
					size += prevsize;
					p = chunk_at_offset(p, -((long) prevsize));

					insert_to_unsorted(av, p, bck, fwd, size);
  
					set_head(p, size | FIRST_CHUNK | PREV_INUSE);
					set_foot(p, size);
					clear_inuse_bit_at_offset(p, size);
  #endif
  
					goto out;
				}

				else {
#if CONSISTENCY == 1
					unlink_log(name, prevchunk, bck, fwd);
					unlink_log(name, nextchunk, bck, fwd);
					p = chunk_at_offset(p, -((long) prevsize));
					size = prevsize+size+nextsize+2*SIZE_SZ;

					pos_log_insert_malloc_free(name, (unsigned long)p, size);
					//pos_seg_free(name, (void *)p, size);
					POS_WRITE_VAUE(name, (unsigned long *)&av->system_mem, (unsigned long)(av->system_mem-size));
#else
					unlink(prevchunk, bck, fwd);
					unlink(nextchunk, bck, fwd);
					p = chunk_at_offset(p, -((long) prevsize));
					size = prevsize+size+nextsize+2*SIZE_SZ;
					/*if (size%PAGESIZE != 0) {
						errstr = "free(): unmmap size is not page size";
						goto errout;
					}*/
					//FREE((char*)p, size);
					pos_seg_free(name, (void *)p, size);
					av->system_mem -= size;
  #endif
  
					goto out;
				}
			}

#if CONSISTENCY == 1
			unlink_log(name, prevchunk, bck, fwd);
#else
			unlink(prevchunk, bck, fwd);
#endif
			size += prevsize;
			p = chunk_at_offset(p, -((long) prevsize));

			if (chunk_is_first(prevchunk)) {
#if CONSISTENCY == 1
				set_head_log(name, p, size | FIRST_CHUNK | PREV_INUSE);
#else
				set_head(p, size | FIRST_CHUNK | PREV_INUSE);
				//set_foot(p, size);
				//clear_inuse_bit_at_offset(p, size);
#endif
			}

		}

		nextinuse = inuse_bit_at_offset(nextchunk, nextsize);

		if (!nextinuse) {
#if CONSISTENCY == 1
			unlink_log(name, nextchunk, bck, fwd);
#else
			unlink(nextchunk, bck, fwd);
#endif
			size += nextsize;
		}

#if CONSISTENCY == 1
		insert_to_unsorted_log(name, av, p, bck, fwd, size);

		if (chunk_is_first(p)) {
			set_head_log(name, p, size | FIRST_CHUNK | PREV_INUSE);
		} else if (chunk_is_last(nextchunk)&&!nextinuse) {
			set_head_log(name, p, size | LAST_CHUNK | PREV_INUSE);
		} else {
			set_head_log(name, p, size | PREV_INUSE);
		}
		set_foot_log(name, p, size);
		clear_inuse_bit_at_offset_log(name, p, size);
#else
		//else
		//clear_inuse_bit_at_offset(nextchunk, 0);

		insert_to_unsorted(av, p, bck, fwd, size);

		if (chunk_is_first(p)) {
			set_head(p, size | FIRST_CHUNK | PREV_INUSE);
		} else if (chunk_is_last(nextchunk)&&!nextinuse) {
			set_head(p, size | LAST_CHUNK | PREV_INUSE);
		} else {
			set_head(p, size | PREV_INUSE);
		}
		set_foot(p, size);
		clear_inuse_bit_at_offset(p, size);

		//check_free_chunk(av, p);
 #endif
	}

out: 
	if ((unsigned long)(size) >= FASTBIN_CONSOLIDATION_THRESHOLD && have_fastchunks(av)) {
		pos_malloc_consolidate(name, av);
	}
}
Ejemplo n.º 26
0
/*
  ------------------------------ pos_test_function ------------------------------
*/
void
pos_print_free_chunks(char *name)
{
	struct malloc_state * av;
	mbinptr	bin;
	mchunkptr p;
	int i;


	av = (struct malloc_state *)pos_lookup_mstate(name);
	if (av == NULL || !have_init_key(av)) {
		printf("\n     Can't print free chunks.\n");
		return;
	}

	printf("\n      ******************** Free chunks of `%s` ************************\n", name);

	printf("      *  1. Fast bins(0~7)\n");
	for (i=0; i< NFASTBINS ; i++) {
		p = (mchunkptr)fastbin(av, i);
		while (p!=0) {
			printf("      *    [%3d] addr=0x%lX, size=%lu", i, (unsigned long int)p, (unsigned long int)chunksize(p));
			if (chunk_is_first(p)) printf("(F)\n");
			else if (chunk_is_last(p)) printf("(L)\n");
			else printf("(M)\n");
			p = p->fd;
		}
	}

	printf("      *  2. Unsorted(1) / Small(2~63) / Large(64~128) bins\n");
	for (i=1; i < NBINS; i++) {
		bin = bin_at(av,i);
		p = bin->fd;
		while (p != bin) {
			printf("      *    [%3d] addr=0x%lX, size=%lu", i, (unsigned long int)p, (unsigned long int)chunksize(p));
			if (chunk_is_first(p)) printf("(F)\n");
			else if (chunk_is_last(p)) printf("(L)\n");
			else printf("(M)\n");
			p = p->fd;
		}
	}

	printf("      ***********************************************************************\n");
}
Ejemplo n.º 27
0
Void_t*
pos_public_rEALLOc(char *name, Void_t *oldmem, unsigned long _bytes)
{
	mstate ar_ptr;
	INTERNAL_SIZE_T nb;      /* padded request size */

	Void_t* newp;             /* chunk to return */

	size_t bytes = _bytes;

	/*if (bytes == 0 && oldmem != NULL) {
		pos_public_fREe(name, oldmem);
		return NULL;
	}*/

	/* realloc of null is supposed to be same as malloc */
	if (oldmem == 0)
		return pos_public_mALLOc(name, bytes);


	/* chunk corresponding to oldmem */
	const mchunkptr oldp = mem2chunk(oldmem);
	/* its size */
	const INTERNAL_SIZE_T oldsize = chunksize(oldp);

	/* Little security check which won't hurt performance: the
	    allocator never wrapps around at the end of the address space.
	    Therefore we can exclude some size values which might appear
	    here by accident or by "design" from some intruder. */
	/*if (__builtin_expect ((uintptr_t) oldp > (uintptr_t) -oldsize, 0)
		|| __builtin_expect (misaligned_chunk (oldp), 0))
	{
		malloc_printerr (check_action, "realloc(): invalid pointer", oldmem);
		return NULL;
	}*/

	checked_request2size(bytes, nb);

	ar_ptr = (struct malloc_state *)pos_lookup_mstate(name);
	if (ar_ptr == NULL) {
		return NULL;
	}

	(void)mutex_lock(&ar_ptr->mutex);
	newp = pos_int_realloc(name, ar_ptr, oldp, oldsize, nb);
	(void)mutex_unlock(&ar_ptr->mutex);

	if (newp == NULL) {
		/* Try harder to allocate memory in other arenas.  */
		newp = pos_public_mALLOc(name, bytes);
		if (newp != NULL) {
			memcpy (newp, oldmem, oldsize - SIZE_SZ);
			
			(void)mutex_lock(&ar_ptr->mutex);
			pos_int_free(name, ar_ptr, oldp, 1);
			(void)mutex_unlock(&ar_ptr->mutex);
		}
	}

	return newp;
}
Ejemplo n.º 28
0
static Void_t*
pos_int_malloc(char *name, mstate av, size_t bytes)
{
	INTERNAL_SIZE_T nb;
	unsigned int idx;
	mbinptr bin;

	mchunkptr victim;
	INTERNAL_SIZE_T size;
	int victim_index;

	mchunkptr remainder;
	unsigned long remainder_size;

	unsigned int block;
	unsigned int bit;
	unsigned int map;

	mchunkptr fwd;
	mchunkptr bck;

	//const char *errstr = NULL;

	size_t pagemask  = PAGESIZE - 1;
	
	//16바이트 단위로 정렬
	checked_request2size(bytes, nb);
	#if MALLOC_DEBUG == 1 
	printf("before fastbin\n") ; 
	#endif
	// 1. fast bin (<=144)

	// 1. fast bin (<=144)
/*	if ((unsigned long)(nb) <= (unsigned long)(get_max_fast())) {
		idx = fastbin_index(nb);
	//	printf("idx = %d\n" , idx) ; 	
		mfastbinptr* fb = &fastbin(av, idx);
		victim = *fb;

		if (victim != 0) {

		if (fastbin_index (chunksize (victim)) != idx) {
			errstr = "malloc(): memory corruption (fast)";
errout:
			malloc_printerr (check_action, errstr, chunk2mem (victim));
		}

#if CONSISTENCY == 1
			POS_WRITE_VAUE(name, (unsigned long *)fb, (unsigned long)victim->fd);
#else
			*fb = victim->fd;
#endif

			void *p = chunk2mem(victim);
			return p;
		}
	}
*/
	// 2. small bin (<=1008)
	if (in_smallbin_range(nb)) {
	#if MALLOC_DEBUG == 1
	printf("inside smallbin if\n") ; 	
	#endif 	

		idx = smallbin_index(nb);
		bin = bin_at(av,idx);

		victim = last(bin);
		if ( victim != bin && victim != NULL ) {

			bck = victim->bk;

/*			if (bck->fd != victim) {
				errstr = "malloc(): smallbin double linked list corrupted";
				goto errout;
			}*/

#if CONSISTENCY == 1
			set_inuse_bit_at_offset_log(name, victim, nb);
			POS_WRITE_VAUE(name, (unsigned long *)&bin->bk, (unsigned long)bck);
			POS_WRITE_VAUE(name, (unsigned long *)&bin->fd, (unsigned long)bin);
#else
			set_inuse_bit_at_offset(victim, nb);
			bin->bk = bck;
			bck->fd = bin;
#endif

			void *p = chunk2mem(victim);
			return p;
		}
	}
	else {
		idx = largebin_index(nb);

		if (have_fastchunks(av)) {
			pos_malloc_consolidate(name, av);
		}
	}

	#if MALLOC_DEBUG==1
	printf("before unsorted bin\n") ; 	
	#endif 
	for(;;) {
		int iters = 0;
		// 3. unsorted bin
		while ((victim = unsorted_chunks(av)->bk) != unsorted_chunks(av)) {
			bck = victim->bk;
			
			/*if (victim->size <= 2 * SIZE_SZ || victim->size > av->system_mem)
				malloc_printerr (check_action, "malloc(): memory corruption", chunk2mem (victim));*/
			size = chunksize(victim);

			if (in_smallbin_range(nb) &&
			   bck == unsorted_chunks(av) &&
			   victim == av->last_remainder &&
			   (unsigned long)(size) > (unsigned long)(nb + MINSIZE)) {

				remainder_size = size - nb;
				remainder = chunk_at_offset(victim, nb);
#if CONSISTENCY == 1
				POS_WRITE_VAUE(name, (unsigned long *)&unsorted_chunks(av)->bk, (unsigned long)remainder);
				POS_WRITE_VAUE(name, (unsigned long *)&unsorted_chunks(av)->fd, (unsigned long)remainder);
				POS_WRITE_VAUE(name, (unsigned long *)&av->last_remainder, (unsigned long)remainder);
#else
				unsorted_chunks(av)->bk = unsorted_chunks(av)->fd = remainder;
				av->last_remainder = remainder;
#endif
				remainder->bk = remainder->fd = unsorted_chunks(av);
				if (!in_smallbin_range(remainder_size)) {
					remainder->fd_nextsize = NULL;
					remainder->bk_nextsize = NULL;
				}

// Remainder dosen't need logging...
				if (chunk_is_last(victim))
					set_head(remainder, remainder_size | LAST_CHUNK | PREV_INUSE);
				else
					set_head(remainder, remainder_size | PREV_INUSE);

				// set PREV_INUSE flag..
#if CONSISTENCY == 1
				if (chunk_is_first(victim)) {
					set_head_log(name, victim, nb | FIRST_CHUNK | PREV_INUSE);
				} else {
					set_head_log(name, victim, nb | PREV_INUSE);
				}
				
				set_foot_log(name, remainder, remainder_size);
#else
				if (chunk_is_first(victim))
					set_head(victim, nb | FIRST_CHUNK | PREV_INUSE);
				else
					set_head(victim, nb | PREV_INUSE);

				set_foot(remainder, remainder_size);
#endif

				void *p = chunk2mem(victim);
				return p;
			}

#if CONSISTENCY == 1
			POS_WRITE_VAUE(name, (unsigned long *)&unsorted_chunks(av)->bk, (unsigned long)bck);
			POS_WRITE_VAUE(name, (unsigned long *)&bck->fd, (unsigned long)unsorted_chunks(av));
#else
			unsorted_chunks(av)->bk = bck;
			bck->fd = unsorted_chunks(av);
#endif

			if (size == nb) {
#if CONSISTENCY == 1
				set_inuse_bit_at_offset_log(name, victim, size);
#else
				set_inuse_bit_at_offset(victim, size);
#endif

				void *p = chunk2mem(victim);
				return p;
			}
			if (in_smallbin_range(size)) {
				victim_index = smallbin_index(size);
				bck = bin_at(av, victim_index);
				fwd = bck->fd;
			}
			else {
				victim_index = largebin_index(size);
				bck = bin_at(av, victim_index);
				fwd = bck->fd;

				if (fwd != bck) {
					size |= PREV_INUSE; //In order not to use chunksize()
					if ((unsigned long)(size) < (unsigned long)(bck->bk->size)) {
						fwd = bck;
						bck = bck->bk;

// Current victim was in the unsorted bin that fd_nextsize dosen't need.. so, we don't leave log.. (We don't leave log for fd_nextsize below..)
						victim->fd_nextsize = fwd->fd;
						victim->bk_nextsize = fwd->fd->bk_nextsize;
#if CONSISTENCY == 1
						POS_WRITE_VAUE(name, (unsigned long *)&fwd->fd->bk_nextsize, (unsigned long)victim);
						POS_WRITE_VAUE(name, (unsigned long *)&victim->bk_nextsize->fd_nextsize, (unsigned long)victim);
#else
						fwd->fd->bk_nextsize = victim->bk_nextsize->fd_nextsize = victim;
#endif
					}
					else {
						while ((unsigned long) size < fwd->size) {
							fwd = fwd->fd_nextsize;
						}

						if ((unsigned long) size == (unsigned long) fwd->size)
							fwd = fwd->fd;
						else {
							victim->fd_nextsize = fwd;
							victim->bk_nextsize = fwd->bk_nextsize;
#if CONSISTENCY == 1
							POS_WRITE_VAUE(name, (unsigned long *)&fwd->bk_nextsize, (unsigned long)victim);
							POS_WRITE_VAUE(name, (unsigned long *)&victim->bk_nextsize->fd_nextsize, (unsigned long)victim);
#else
							fwd->bk_nextsize = victim;
							victim->bk_nextsize->fd_nextsize = victim;
#endif
						}
						bck = fwd->bk;
					}
				} 
				else
					victim->fd_nextsize = victim->bk_nextsize = victim;
			}

#if CONSISTENCY == 1
			mark_bin_log(name, av, victim_index);
			POS_WRITE_VAUE(name, (unsigned long *)&victim->bk, (unsigned long)bck);
			POS_WRITE_VAUE(name, (unsigned long *)&victim->fd, (unsigned long)fwd);
			POS_WRITE_VAUE(name, (unsigned long *)&fwd->bk, (unsigned long)victim);
			POS_WRITE_VAUE(name, (unsigned long *)&bck->fd, (unsigned long)victim);
#else
			mark_bin(av, victim_index);
			victim->bk = bck;
			victim->fd = fwd;
			fwd->bk = victim;
			bck->fd = victim;
#endif
#define MAX_ITERS	10000
			if (++iters >= MAX_ITERS)
				break;
		}
		#if MALLOC_DEBUG == 1
		printf("before large bin\n") ;
		#endif 
		// 4. large bin (1024<=)
		if (!in_smallbin_range(nb)) {
			bin = bin_at(av, idx);
#if MALLOC_DEBUG
			printf(" bin = [%p]\n " ,bin ) ; 	
#endif
			if ((victim = first(bin)) != bin &&
			   (unsigned long)(victim->size) >= (unsigned long)(nb)) {

				victim = victim->bk_nextsize;
				while (((unsigned long)(size = chunksize(victim)) < (unsigned long)(nb)))
					victim = victim->bk_nextsize;

				//if (victim != last(bin) && victim->size == victim->fd->size)
				if (victim != last(bin) && chunksize(victim) == chunksize(victim->fd))
					victim = victim->fd;

				remainder_size = size - nb;
#if CONSISTENCY == 1
				unlink_log(name, victim, bck, fwd);
#else
				unlink(victim, bck, fwd);
#endif

				if (remainder_size < MINSIZE)  {
#if CONSISTENCY == 1
					set_inuse_bit_at_offset_log(name, victim, size);
#else
					set_inuse_bit_at_offset(victim, size);
#endif
				}
				else {
					remainder = chunk_at_offset(victim, nb);

#if CONSISTENCY == 1
					insert_to_unsorted_log(name, av, remainder, bck, fwd, remainder_size);
#else
					insert_to_unsorted(av, remainder, bck, fwd, remainder_size);
#endif

// Remainder dosen't need logging...
					if (chunk_is_last(victim))
						set_head(remainder, remainder_size | LAST_CHUNK | PREV_INUSE);
					else
						set_head(remainder, remainder_size | PREV_INUSE);

					// set PREV_INUSE flag..
#if CONSISTENCY == 1
					if (chunk_is_first(victim)) {
						set_head_log(name, victim, nb | FIRST_CHUNK | PREV_INUSE);
					} else {
						set_head_log(name, victim, nb | PREV_INUSE);
					}
					
					set_foot_log(name, remainder, remainder_size);
#else
					if (chunk_is_first(victim))
						set_head(victim, nb | FIRST_CHUNK | PREV_INUSE);
					else
						set_head(victim, nb | PREV_INUSE);

					set_foot(remainder, remainder_size);
#endif
				}

				void *p = chunk2mem(victim);
				return p;
			}
			
		}
		++idx;
		bin = bin_at(av,idx);
		block = idx2block(idx);
		map = av->binmap[block];
	 	bit = idx2bit(idx);
#if MALLOC_DEBUG == 1
		printf("DDD\n") ;
#endif
		for (;;) {
			if (bit > map || bit == 0) {
				do {
					if (++block >= BINMAPSIZE){
						goto new_alloc;
					}
				} while ( (map = av->binmap[block]) == 0);

				bin = bin_at(av, (block << BINMAPSHIFT));
				bit = 1;
			}
			while ((bit & map) == 0) {
				bin = next_bin(bin);
				bit <<= 1;
			}
#if MALLOC_DEBUG == 1
			printf("before victim\n") ; 
#endif 
			victim = last(bin);

			if (victim == bin) {
#if MALLOC_DEBUG == 1
				printf("victim == bin\n") ; 
#endif 
#if CONSISTENCY == 1
				POS_WRITE_VAUE(name, (unsigned long *)&av->binmap[block], (unsigned long)(map &~bit));
#else
				av->binmap[block] = map &= ~bit;
#endif
				bin = next_bin(bin);
		        	bit <<= 1;
			}
			else {
#if MALLOC_DEBUG == 1
				printf("victim != bin\n") ; 
#endif
				size = chunksize(victim);

				remainder_size = size - nb;

#if CONSISTENCY == 1
				unlink_log(name, victim, bck, fwd);
#else
				unlink(victim, bck, fwd);
#endif
#if MALLOC_DEBUG == 1
				printf("unlink\n") ; 
#endif 

				if (remainder_size < MINSIZE) {
#if CONSISTENCY == 1
					set_inuse_bit_at_offset_log(name, victim, size);
#else
#if MALLOC_DEBUG == 1
					printf("D\n") ;
					printf("victim : %p\n", victim) ; 	
					printf("size: %d\n" ,size) ; 
#endif
					set_inuse_bit_at_offset(victim, size);
#endif
				}
				else {
#if MALLOC_DEBUG == 1
					printf("remainder\n") ; 	
#endif
					remainder = chunk_at_offset(victim, nb);

#if CONSISTENCY == 1
					insert_to_unsorted_log(name, av, remainder, bck, fwd, remainder_size);
#else
#if MALLOC_DEBUG == 1
					printf("av=%p\n",av) ; 	
					printf("%p %p %p\n", remainder , bck , fwd);
					printf("%d\n" , remainder_size) ;
#endif 
					insert_to_unsorted(av, remainder, bck, fwd, remainder_size);
#endif

#if MALLOC_DEBUG == 1
					printf("%p %p %p\n", remainder , bck , fwd);

					printf("s\n") ; 
#endif
					if (in_smallbin_range(nb)) {
#if CONSISTENCY == 1
						POS_WRITE_VAUE(name, (unsigned long *)&av->last_remainder, (unsigned long)remainder);
#else
						av->last_remainder = remainder;
#endif
					}
#if MALLOC_DEBUG == 1
				printf("s\n") ; 
#endif 
					if (chunk_is_last(victim))
						set_head(remainder, remainder_size | LAST_CHUNK | PREV_INUSE);
					else
						set_head(remainder, remainder_size | PREV_INUSE);

					// set PREV_INUSE flag..
#if CONSISTENCY == 1
					if (chunk_is_first(victim)) {
						set_head_log(name, victim, nb | FIRST_CHUNK | PREV_INUSE);
					} else {
						set_head_log(name, victim, nb | PREV_INUSE);
					}
					
#if MALLOC_DEBUG == 1
				printf("s\n") ; 
#endif 
					set_foot_log(name, remainder, remainder_size);
#else
					if (chunk_is_first(victim))
						set_head(victim, nb | FIRST_CHUNK | PREV_INUSE);
					else
						set_head(victim, nb | PREV_INUSE);

					set_foot(remainder, remainder_size);
#endif
				}
#if MALLOC_DEBUG == 1 
				printf("s\n") ; 
#endif
				void *p = chunk2mem(victim);
				return p;
			}
		}
new_alloc:
		#if MALLOC_DEBUG == 1
		printf("before newallocation\n");
		#endif 
		// 6. new allocation
		size = (nb + MINSIZE +2*SIZE_SZ + pagemask) & ~pagemask;
		size += DEFAULT_PAD;
		//char* mm = (char*)(SEG_ALLOC(0, size, PROT_READ|PROT_WRITE, MAP_PRIVATE));
		char *mm = (char *)pos_seg_alloc(name, size);
		
		memset(mm , 0 , size);
		#if MALLOC_DEBUG == 1
		printf(" mm = %p\n" , mm) ; 	
		#endif 
	
#if CONSISTENCY == 1
		pos_log_insert_malloc_free(name, (unsigned long)mm, size);
#endif
		//if (mm != MAP_FAILED) {
		if (mm != (char *)0) {
#if CONSISTENCY == 1
			POS_WRITE_VAUE(name, (unsigned long *)&av->system_mem, (unsigned long)(av->system_mem+size));
#else
			av->system_mem += size;
#endif

		
//			printf("D!\n") ;
			mchunkptr p;

			p = (mchunkptr)mm;

			remainder_size = size - nb - 2*SIZE_SZ;
			remainder = chunk_at_offset(p, nb);

#if CONSISTENCY == 1
			insert_to_unsorted_log(name, av, remainder, bck, fwd, remainder_size);
#else
			insert_to_unsorted(av, remainder, bck, fwd, remainder_size);
#endif

			/*if (in_smallbin_range(nb))
				av->last_remainder = remainder;*/

			// set PREV_INUSE flag..
//#if CONSISTENCY == 1
//#elseif
			set_head(p, nb | FIRST_CHUNK | PREV_INUSE);
			set_head(remainder, remainder_size | LAST_CHUNK | PREV_INUSE);

			set_foot(remainder, remainder_size);
			clear_inuse_bit_at_offset(remainder, remainder_size);
//#endif

			//return p;
			return chunk2mem(p);
		} 
		else
			return 0;
  
	}
}
Ejemplo n.º 29
0
////////////////////////////////////////
// WARNING!: pos_realloc has error!. FIX UP!
////////////////////////////////////////
Void_t*
pos_int_realloc(char *name, mstate av, mchunkptr oldp, INTERNAL_SIZE_T oldsize,
	     INTERNAL_SIZE_T nb)
{
	mchunkptr newp;				/* chunk to return */
	INTERNAL_SIZE_T newsize;		/* its size */
	Void_t* newmem;				/* corresponding user mem */

	mchunkptr next;				/* next contiguous chunk after oldp */

	mchunkptr remainder;			/* extra space at end of newp */
	unsigned long remainder_size;	/* its size */

	mchunkptr bck;				/* misc temp for linking */
	mchunkptr fwd;				/* misc temp for linking */

	unsigned long copysize;		/* bytes to copy */
	unsigned int ncopies;			/* INTERNAL_SIZE_T words to copy */
	INTERNAL_SIZE_T* s;			/* copy source */
	INTERNAL_SIZE_T* d;			/* copy destination */

	const char *errstr = NULL;


	/* oldmem size */
	/*if (oldp->size <= 2 * SIZE_SZ || oldsize >= av->system_mem) {
		errstr = "realloc(): invalid old size";
errout:
		malloc_printerr (check_action, errstr, chunk2mem(oldp));
		return NULL;
	}*/

	next = chunk_at_offset(oldp, oldsize);
	INTERNAL_SIZE_T nextsize = chunksize(next);
	/*if (next->size <= 2 * SIZE_SZ || nextsize >= av->system_mem) {
		errstr = "realloc(): invalid next size";
			goto errout;
	}*/

	//old size 보다 작을 경우
	if ((unsigned long)(oldsize) >= (unsigned long)(nb)) {
		/* already big enough; split below */
		newp = oldp;
		newsize = oldsize;
	}

	//old size 보다 클 경우
	else {

		/* Try to expand forward into next chunk;  split off remainder below */
		if (!inuse(next) &&
			(unsigned long)(newsize = oldsize + nextsize) >= (unsigned long)(nb)) {
			newp = oldp;
			unlink(next, bck, fwd);
		}

		/* allocate, copy, free */
		else {
			newmem = pos_int_malloc(name, av, nb - MALLOC_ALIGN_MASK);
			if (newmem == 0)
				return 0; /* propagate failure */

			newp = mem2chunk(newmem);
			newsize = chunksize(newp);

			/*
			  Avoid copy if newp is next chunk after oldp.
			*/
			if (newp == next) {
				newsize += oldsize;
				newp = oldp;
			}
			else {
				/*
				  Unroll copy of <= 36 bytes (72 if 8byte sizes)
				  We know that contents have an odd number of
				  INTERNAL_SIZE_T-sized words; minimally 3.
				*/

				copysize = oldsize - SIZE_SZ;
				s = (INTERNAL_SIZE_T*)(chunk2mem(oldp));
				d = (INTERNAL_SIZE_T*)(newmem);
				ncopies = copysize / sizeof(INTERNAL_SIZE_T);

				if (ncopies > 9)
					memcpy(d, s, copysize);
				else {
					*(d+0) = *(s+0);
					*(d+1) = *(s+1);
					*(d+2) = *(s+2);
					if (ncopies > 4) {
						*(d+3) = *(s+3);
						*(d+4) = *(s+4);
						if (ncopies > 6) {
							*(d+5) = *(s+5);
							*(d+6) = *(s+6);
							if (ncopies > 8) {
								*(d+7) = *(s+7);
								*(d+8) = *(s+8);
							}
						}
					}
				}

				pos_int_free(name, av, oldp, 1);

				return chunk2mem(newp);
			}
		}
	}

	/* If possible, free extra space in old or extended chunk */

	remainder_size = newsize - nb;

	if (remainder_size < MINSIZE) { /* not enough extra to split off */
		set_head_size(newp, newsize);
		set_inuse_bit_at_offset(newp, newsize);
	}
	else { /* split remainder */
		remainder = chunk_at_offset(newp, nb);

		if (chunk_is_last(newp))
			set_head(remainder, remainder_size | LAST_CHUNK | PREV_INUSE);
		else
			set_head(remainder, remainder_size | PREV_INUSE);

		// set PREV_INUSE flag..
		if (chunk_is_first(newp))
			set_head(newp, nb | FIRST_CHUNK | PREV_INUSE);
		else
			set_head(newp, nb | PREV_INUSE);
		
		//set_head_size(newp, nb);
		//set_head(remainder, remainder_size | PREV_INUSE |(av != &main_arena ? NON_MAIN_ARENA : 0));

		/* Mark remainder as inuse so free() won't complain */
		set_inuse_bit_at_offset(remainder, remainder_size);
		pos_int_free(name, av, remainder, 1);
	}

	return chunk2mem(newp);
}
Ejemplo n.º 30
0
/* ------------------------------ realloc ------------------------------ */
void* ulibc_realloc(void* oldmem, size_t bytes)
{
    mstate av;

    size_t  nb;              /* padded request size */

    mchunkptr        oldp;            /* chunk corresponding to oldmem */
    size_t  oldsize;         /* its size */

    mchunkptr        newp;            /* chunk to return */
    size_t  newsize;         /* its size */
    void*          newmem;          /* corresponding user mem */

    mchunkptr        next;            /* next contiguous chunk after oldp */

    mchunkptr        remainder;       /* extra space at end of newp */
    unsigned long     remainder_size;  /* its size */

    mchunkptr        bck;             /* misc temp for linking */
    mchunkptr        fwd;             /* misc temp for linking */

    unsigned long     copysize;        /* bytes to copy */
    unsigned int     ncopies;         /* size_t words to copy */
    size_t* s;               /* copy source */
    size_t* d;               /* copy destination */

    void *retval;

    /* Check for special cases.  */
    if (! oldmem)
	return ulibc_malloc(bytes);
    if (! bytes) {
	ulibc_free (oldmem);
	return NULL;
    }

    av = get_malloc_state();
    checked_request2size(bytes, nb);

    oldp    = mem2chunk(oldmem);
    oldsize = chunksize(oldp);

    check_inuse_chunk(oldp);

    if (!chunk_is_mmapped(oldp)) {

	if ((unsigned long)(oldsize) >= (unsigned long)(nb)) {
	    /* already big enough; split below */
	    newp = oldp;
	    newsize = oldsize;
	}

	else {
	    next = chunk_at_offset(oldp, oldsize);

	    /* Try to expand forward into top */
	    if (next == av->top &&
		    (unsigned long)(newsize = oldsize + chunksize(next)) >=
		    (unsigned long)(nb + MINSIZE)) {
		set_head_size(oldp, nb);
		av->top = chunk_at_offset(oldp, nb);
		set_head(av->top, (newsize - nb) | PREV_INUSE);
		retval = chunk2mem(oldp);
		goto DONE;
	    }

	    /* Try to expand forward into next chunk;  split off remainder below */
	    else if (next != av->top &&
		    !inuse(next) &&
		    (unsigned long)(newsize = oldsize + chunksize(next)) >=
		    (unsigned long)(nb)) {
		newp = oldp;
		unlink(next, bck, fwd);
	    }

	    /* allocate, copy, free */
	    else {
		newmem = malloc(nb - MALLOC_ALIGN_MASK);
		if (newmem == 0) {
		    retval = 0; /* propagate failure */
		    goto DONE;
		}

		newp = mem2chunk(newmem);
		newsize = chunksize(newp);

		/*
		   Avoid copy if newp is next chunk after oldp.
		   */
		if (newp == next) {
		    newsize += oldsize;
		    newp = oldp;
		}
		else {
		    /*
		       Unroll copy of <= 36 bytes (72 if 8byte sizes)
		       We know that contents have an odd number of
		       size_t-sized words; minimally 3.
		       */

		    copysize = oldsize - (sizeof(size_t));
		    s = (size_t*)(oldmem);
		    d = (size_t*)(newmem);
		    ncopies = copysize / sizeof(size_t);
		    assert(ncopies >= 3);

		    if (ncopies > 9)
			memcpy(d, s, copysize);

		    else {
			*(d+0) = *(s+0);
			*(d+1) = *(s+1);
			*(d+2) = *(s+2);
			if (ncopies > 4) {
			    *(d+3) = *(s+3);
			    *(d+4) = *(s+4);
			    if (ncopies > 6) {
				*(d+5) = *(s+5);
				*(d+6) = *(s+6);
				if (ncopies > 8) {
				    *(d+7) = *(s+7);
				    *(d+8) = *(s+8);
				}
			    }
			}
		    }

		    ulibc_free(oldmem);
		    check_inuse_chunk(newp);
		    retval = chunk2mem(newp);
		    goto DONE;
		}
	    }
	}

	/* If possible, free extra space in old or extended chunk */

	assert((unsigned long)(newsize) >= (unsigned long)(nb));

	remainder_size = newsize - nb;

	if (remainder_size < MINSIZE) { /* not enough extra to split off */
	    set_head_size(newp, newsize);
	    set_inuse_bit_at_offset(newp, newsize);
	}
	else { /* split remainder */
	    remainder = chunk_at_offset(newp, nb);
	    set_head_size(newp, nb);
	    set_head(remainder, remainder_size | PREV_INUSE);
	    /* Mark remainder as inuse so free() won't complain */
	    set_inuse_bit_at_offset(remainder, remainder_size);
	    ulibc_free(chunk2mem(remainder));
	}

	check_inuse_chunk(newp);
	retval = chunk2mem(newp);
	goto DONE;
    }

    /*
       Handle mmap cases
       */

    else {
	size_t offset = oldp->prev_size;
	size_t pagemask = av->pagesize - 1;
	char *cp;
	unsigned long  sum;

	/* Note the extra (sizeof(size_t)) overhead */
	newsize = (nb + offset + (sizeof(size_t)) + pagemask) & ~pagemask;

	/* don't need to remap if still within same page */
	if (oldsize == newsize - offset) {
	    retval = oldmem;
	    goto DONE;
	}

	cp = (char*)mremap((char*)oldp - offset, oldsize + offset, newsize, 1);

	if (cp != (char*)MORECORE_FAILURE) {

	    newp = (mchunkptr)(cp + offset);
	    set_head(newp, (newsize - offset)|IS_MMAPPED);

	    assert(aligned_OK(chunk2mem(newp)));
	    assert((newp->prev_size == offset));

	    /* update statistics */
	    sum = av->mmapped_mem += newsize - oldsize;
	    if (sum > (unsigned long)(av->max_mmapped_mem))
		av->max_mmapped_mem = sum;
	    sum += av->sbrked_mem;
	    if (sum > (unsigned long)(av->max_total_mem))
		av->max_total_mem = sum;

	    retval = chunk2mem(newp);
	    goto DONE;
	}

	/* Note the extra (sizeof(size_t)) overhead. */
	if ((unsigned long)(oldsize) >= (unsigned long)(nb + (sizeof(size_t))))
	    newmem = oldmem; /* do nothing */
	else {
	    /* Must alloc, copy, free. */
	    newmem = malloc(nb - MALLOC_ALIGN_MASK);
	    if (newmem != 0) {
		memcpy(newmem, oldmem, oldsize - 2*(sizeof(size_t)));
		ulibc_free(oldmem);
	    }
	}
	retval = newmem;
    }

 DONE:
    return retval;
}