Exemple #1
0
/* libc_hidden_proto(mallinfo) */
struct mallinfo mallinfo(void)
{
    mstate av;
    struct mallinfo mi;
    unsigned int i;
    mbinptr b;
    mchunkptr p;
    size_t avail;
    size_t fastavail;
    int nblocks;
    int nfastblocks;

    __MALLOC_LOCK;
    av = get_malloc_state();
    /* Ensure initialization */
    if (av->top == 0)  {
	__malloc_consolidate(av);
    }

    check_malloc_state();

    /* Account for top */
    avail = chunksize(av->top);
    nblocks = 1;  /* top always exists */

    /* traverse fastbins */
    nfastblocks = 0;
    fastavail = 0;

    for (i = 0; i < NFASTBINS; ++i) {
	for (p = av->fastbins[i]; p != 0; p = p->fd) {
	    ++nfastblocks;
	    fastavail += chunksize(p);
	}
    }

    avail += fastavail;

    /* traverse regular bins */
    for (i = 1; i < NBINS; ++i) {
	b = bin_at(av, i);
	for (p = last(b); p != b; p = p->bk) {
	    ++nblocks;
	    avail += chunksize(p);
	}
    }

    mi.smblks = nfastblocks;
    mi.ordblks = nblocks;
    mi.fordblks = avail;
    mi.uordblks = av->sbrked_mem - avail;
    mi.arena = av->sbrked_mem;
    mi.hblks = av->n_mmaps;
    mi.hblkhd = av->mmapped_mem;
    mi.fsmblks = fastavail;
    mi.keepcost = chunksize(av->top);
    mi.usmblks = av->max_total_mem;
    __MALLOC_UNLOCK;
    return mi;
}
Exemple #2
0
/* ------------------------- __malloc_trim -------------------------
   __malloc_trim is an inverse of sorts to __malloc_alloc.  It gives memory
   back to the system (via negative arguments to sbrk) if there is unused
   memory at the `high' end of the malloc pool. It is called automatically by
   free() when top space exceeds the trim threshold. It is also called by the
   public malloc_trim routine.  It returns 1 if it actually released any
   memory, else 0.
*/
static int __malloc_trim(size_t pad, mstate av)
{
    long  top_size;        /* Amount of top-most memory */
    long  extra;           /* Amount to release */
    long  released;        /* Amount actually released */
    char* current_brk;     /* address returned by pre-check sbrk call */
    char* new_brk;         /* address returned by post-check sbrk call */
    size_t pagesz;

    pagesz = av->pagesize;
    top_size = chunksize(av->top);

    /* Release in pagesize units, keeping at least one page */
    extra = ((top_size - pad - MINSIZE + (pagesz-1)) / pagesz - 1) * pagesz;

    if (extra > 0) {

	/*
	   Only proceed if end of memory is where we last set it.
	   This avoids problems if there were foreign sbrk calls.
	   */
	current_brk = (char*)(MORECORE(0));
	if (current_brk == (char*)(av->top) + top_size) {

	    /*
	       Attempt to release memory. We ignore MORECORE return value,
	       and instead call again to find out where new end of memory is.
	       This avoids problems if first call releases less than we asked,
	       of if failure somehow altered brk value. (We could still
	       encounter problems if it altered brk in some very bad way,
	       but the only thing we can do is adjust anyway, which will cause
	       some downstream failure.)
	       */

	    MORECORE(-extra);
	    new_brk = (char*)(MORECORE(0));

	    if (new_brk != (char*)MORECORE_FAILURE) {
		released = (long)(current_brk - new_brk);

		if (released != 0) {
		    /* Success. Adjust top. */
		    av->sbrked_mem -= released;
		    set_head(av->top, (top_size - released) | PREV_INUSE);
		    check_malloc_state();
		    return 1;
		}
	    }
	}
    }
    return 0;
}
Exemple #3
0
int
__malloc_set_state(void* msptr)
{
  struct malloc_save_state* ms = (struct malloc_save_state*)msptr;
  size_t i;
  mbinptr b;

  disallow_malloc_check = 1;
  ptmalloc_init();
  if(ms->magic != MALLOC_STATE_MAGIC) return -1;
  /* Must fail if the major version is too high. */
  if((ms->version & ~0xffl) > (MALLOC_STATE_VERSION & ~0xffl)) return -2;
  (void)mutex_lock(&main_arena.mutex);
  /* There are no fastchunks.  */
  clear_fastchunks(&main_arena);
  if (ms->version >= 4)
    set_max_fast(ms->max_fast);
  else
    set_max_fast(64);	/* 64 used to be the value we always used.  */
  for (i=0; i<NFASTBINS; ++i)
    fastbin (&main_arena, i) = 0;
  for (i=0; i<BINMAPSIZE; ++i)
    main_arena.binmap[i] = 0;
  top(&main_arena) = ms->av[2];
  main_arena.last_remainder = 0;
  for(i=1; i<NBINS; i++) {
    b = bin_at(&main_arena, i);
    if(ms->av[2*i+2] == 0) {
      assert(ms->av[2*i+3] == 0);
      first(b) = last(b) = b;
    } else {
      if(ms->version >= 3 &&
	 (i<NSMALLBINS || (largebin_index(chunksize(ms->av[2*i+2]))==i &&
			   largebin_index(chunksize(ms->av[2*i+3]))==i))) {
	first(b) = ms->av[2*i+2];
	last(b) = ms->av[2*i+3];
	/* Make sure the links to the bins within the heap are correct.  */
	first(b)->bk = b;
	last(b)->fd = b;
	/* Set bit in binblocks.  */
	mark_bin(&main_arena, i);
      } else {
	/* Oops, index computation from chunksize must have changed.
	   Link the whole list into unsorted_chunks.  */
	first(b) = last(b) = b;
	b = unsorted_chunks(&main_arena);
	ms->av[2*i+2]->bk = b;
	ms->av[2*i+3]->fd = b->fd;
	b->fd->bk = ms->av[2*i+3];
	b->fd = ms->av[2*i+2];
      }
    }
  }
  if (ms->version < 3) {
    /* Clear fd_nextsize and bk_nextsize fields.  */
    b = unsorted_chunks(&main_arena)->fd;
    while (b != unsorted_chunks(&main_arena)) {
      if (!in_smallbin_range(chunksize(b))) {
	b->fd_nextsize = NULL;
	b->bk_nextsize = NULL;
      }
      b = b->fd;
    }
  }
  mp_.sbrk_base = ms->sbrk_base;
  main_arena.system_mem = ms->sbrked_mem_bytes;
  mp_.trim_threshold = ms->trim_threshold;
  mp_.top_pad = ms->top_pad;
  mp_.n_mmaps_max = ms->n_mmaps_max;
  mp_.mmap_threshold = ms->mmap_threshold;
  check_action = ms->check_action;
  main_arena.max_system_mem = ms->max_sbrked_mem;
  mp_.n_mmaps = ms->n_mmaps;
  mp_.max_n_mmaps = ms->max_n_mmaps;
  mp_.mmapped_mem = ms->mmapped_mem;
  mp_.max_mmapped_mem = ms->max_mmapped_mem;
  /* add version-dependent code here */
  if (ms->version >= 1) {
    /* Check whether it is safe to enable malloc checking, or whether
       it is necessary to disable it.  */
    if (ms->using_malloc_checking && !using_malloc_checking &&
	!disallow_malloc_check)
      __malloc_check_init ();
    else if (!ms->using_malloc_checking && using_malloc_checking) {
      __malloc_hook = NULL;
      __free_hook = NULL;
      __realloc_hook = NULL;
      __memalign_hook = NULL;
      using_malloc_checking = 0;
    }
  }
  if (ms->version >= 4) {
#ifdef PER_THREAD
    mp_.arena_test = ms->arena_test;
    mp_.arena_max = ms->arena_max;
    narenas = ms->narenas;
#endif
  }
  check_malloc_state(&main_arena);

  (void)mutex_unlock(&main_arena.mutex);
  return 0;
}
Exemple #4
0
/* ------------------------- __malloc_consolidate -------------------------

  __malloc_consolidate is a specialized version of free() that tears
  down chunks held in fastbins.  Free itself cannot be used for this
  purpose since, among other things, it might place chunks back onto
  fastbins.  So, instead, we need to use a minor variant of the same
  code.

  Also, because this routine needs to be called the first time through
  malloc anyway, it turns out to be the perfect place to trigger
  initialization code.
*/
void attribute_hidden __malloc_consolidate(mstate av)
{
    mfastbinptr*    fb;                 /* current fastbin being consolidated */
    mfastbinptr*    maxfb;              /* last fastbin (for loop control) */
    mchunkptr       p;                  /* current chunk being consolidated */
    mchunkptr       nextp;              /* next chunk to consolidate */
    mchunkptr       unsorted_bin;       /* bin header */
    mchunkptr       first_unsorted;     /* chunk to link to */

    /* These have same use as in free() */
    mchunkptr       nextchunk;
    size_t size;
    size_t nextsize;
    size_t prevsize;
    int             nextinuse;
    mchunkptr       bck;
    mchunkptr       fwd;

    /*
       If max_fast is 0, we know that av hasn't
       yet been initialized, in which case do so below
       */

    if (av->max_fast != 0) {
	clear_fastchunks(av);

	unsorted_bin = unsorted_chunks(av);

	/*
	   Remove each chunk from fast bin and consolidate it, placing it
	   then in unsorted bin. Among other reasons for doing this,
	   placing in unsorted bin avoids needing to calculate actual bins
	   until malloc is sure that chunks aren't immediately going to be
	   reused anyway.
	   */

	maxfb = &(av->fastbins[fastbin_index(av->max_fast)]);
	fb = &(av->fastbins[0]);
	do {
	    if ( (p = *fb) != 0) {
		*fb = 0;

		do {
		    check_inuse_chunk(p);
		    nextp = p->fd;

		    /* Slightly streamlined version of consolidation code in free() */
		    size = p->size & ~PREV_INUSE;
		    nextchunk = chunk_at_offset(p, size);
		    nextsize = chunksize(nextchunk);

		    if (!prev_inuse(p)) {
			prevsize = p->prev_size;
			size += prevsize;
			p = chunk_at_offset(p, -((long) prevsize));
			unlink(p, bck, fwd);
		    }

		    if (nextchunk != av->top) {
			nextinuse = inuse_bit_at_offset(nextchunk, nextsize);
			set_head(nextchunk, nextsize);

			if (!nextinuse) {
			    size += nextsize;
			    unlink(nextchunk, bck, fwd);
			}

			first_unsorted = unsorted_bin->fd;
			unsorted_bin->fd = p;
			first_unsorted->bk = p;

			set_head(p, size | PREV_INUSE);
			p->bk = unsorted_bin;
			p->fd = first_unsorted;
			set_foot(p, size);
		    }

		    else {
			size += nextsize;
			set_head(p, size | PREV_INUSE);
			av->top = p;
		    }

		} while ( (p = nextp) != 0);

	    }
	} while (fb++ != maxfb);
    }
    else {
	malloc_init_state(av);
	check_malloc_state();
    }
}
/* ------------------------- __malloc_consolidate -------------------------

  __malloc_consolidate is a specialized version of free() that tears
  down chunks held in fastbins.  Free itself cannot be used for this
  purpose since, among other things, it might place chunks back onto
  fastbins.  So, instead, we need to use a minor variant of the same
  code.

  Also, because this routine needs to be called the first time through
  malloc anyway, it turns out to be the perfect place to trigger
  initialization code.
*/
void attribute_hidden __malloc_consolidate(mstate av)
{
    mfastbinptr*    fb;                 /* current fastbin being consolidated */
    mfastbinptr*    maxfb;              /* last fastbin (for loop control) */
    mchunkptr       p;                  /* current chunk being consolidated */
    mchunkptr       nextp;              /* next chunk to consolidate */
    mchunkptr       unsorted_bin;       /* bin header */
    mchunkptr       first_unsorted;     /* chunk to link to */
    ustate	    unit;		/*  */

    /* These have same use as in free() */
    mchunkptr       nextchunk;
    size_t size;
    size_t nextsize;
    size_t prevsize;
    int             nextinuse;
    mchunkptr       bck;
    mchunkptr       fwd;

    /*
       If max_fast is 0, we know that av hasn't
       yet been initialized, in which case do so below
       */

    if (av->max_fast != 0) {
	clear_fastchunks(av);

	unsorted_bin = unsorted_chunks(av);

	/*
	   Remove each chunk from fast bin and consolidate it, placing it
	   then in unsorted bin. Among other reasons for doing this,
	   placing in unsorted bin avoids needing to calculate actual bins
	   until malloc is sure that chunks aren't immediately going to be
	   reused anyway.
	   */

	maxfb = &(av->fastbins[fastbin_index(av->max_fast)]);
	fb = &(av->fastbins[0]);
	do {
	    if ( (p = *fb) != 0) {
		*fb = 0;

		do {
		    check_inuse_chunk(p);
		    nextp = p->fd;

		    /* Slightly streamlined version of consolidation code in free() */
		    size = p->size & ~PREV_INUSE;
		    nextchunk = chunk_at_offset(p, size);
		    nextsize = chunksize(nextchunk);

		    if (!prev_inuse(p)) {
			prevsize = p->prev_size;
			size += prevsize;
			p = chunk_at_offset(p, -((long) prevsize));
			unlink(p, bck, fwd);
		    }
		    
		    unit = lookup_ustate_by_mem((void*)p);
		    if (nextchunk != unit->unit_top) {
			nextinuse = inuse_bit_at_offset(nextchunk, nextsize);
			set_head(nextchunk, nextsize);

			if (!nextinuse) {
			    size += nextsize;
			    unlink(nextchunk, bck, fwd);
			}

			first_unsorted = unsorted_bin->fd;
			unsorted_bin->fd = p;
			first_unsorted->bk = p;

			set_head(p, size | PREV_INUSE);
			p->bk = unsorted_bin;
			p->fd = first_unsorted;
			set_foot(p, size);
		    }

		    else {
			size += nextsize;
			set_head(p, size | PREV_INUSE);
			unit->unit_top = p;
		    }

		} while ( (p = nextp) != 0);

	    }
	} while (fb++ != maxfb);
    }
    else {
    	if (get_abstate()->mstate_list.num == 0) {
    		//initialize abheap state
    		init_linked_list(&(get_abstate()->mstate_list));
		init_linked_list(&(get_abstate()->ustate_list));
		init_linked_list(&(get_abstate()->mmapped_ustate_list));
		get_abstate()->ab_top = (mchunkptr)(CHANNEL_ADDR);
		//allocate channel heap space
		mmap((void *) CHANNEL_ADDR, CHANNEL_SIZE, PROT_READ|PROT_WRITE, 
			MAP_ANONYMOUS|MAP_FIXED|MAP_SHARED, -1, 0);	
		touch_mem((void *)CHANNEL_ADDR, CHANNEL_SIZE);
    	}
	malloc_init_state(av);
	check_malloc_state();
    }
}
Exemple #6
0
void lea_malloc_check_state()
{
	MALLOC_WRAPPED_ACTION(check_malloc_state());
}