Пример #1
0
/*
  Initialize a malloc_state struct.

  This is called only from within __malloc_consolidate, which needs
  be called in the same contexts anyway.  It is never called directly
  outside of __malloc_consolidate because some optimizing compilers try
  to inline it at all call points, which turns out not to be an
  optimization at all. (Inlining it in __malloc_consolidate is fine though.)
*/
static void malloc_init_state(mstate av)
{
    int     i;
    mbinptr bin;

    /* Establish circular links for normal bins */
    for (i = 1; i < NBINS; ++i) {
	bin = bin_at(av,i);
	bin->fd = bin->bk = bin;
    }

    av->top_pad        = DEFAULT_TOP_PAD;
    av->n_mmaps_max    = DEFAULT_MMAP_MAX;
    av->mmap_threshold = DEFAULT_MMAP_THRESHOLD;
    av->trim_threshold = DEFAULT_TRIM_THRESHOLD;

#if MORECORE_CONTIGUOUS
    set_contiguous(av);
#else
    set_noncontiguous(av);
#endif


    set_max_fast(av, DEFAULT_MXFAST);

    av->top            = initial_top(av);
    av->pagesize       = malloc_getpagesize;
}
Пример #2
0
/*
  Initialize a malloc_state struct.

  This is called only from within __malloc_consolidate, which needs
  be called in the same contexts anyway.  It is never called directly
  outside of __malloc_consolidate because some optimizing compilers try
  to inline it at all call points, which turns out not to be an
  optimization at all. (Inlining it in __malloc_consolidate is fine though.)
*/
static void malloc_init_state(mstate av)
{
    int     i;
    mbinptr bin;

    /* Establish circular links for normal bins */
    for (i = 1; i < NBINS; ++i) {
	bin = bin_at(av,i);
	bin->fd = bin->bk = bin;
    }

    av->top_pad        = DEFAULT_TOP_PAD;
    av->n_mmaps_max    = DEFAULT_MMAP_MAX;
    av->mmap_threshold = DEFAULT_MMAP_THRESHOLD;
    av->trim_threshold = DEFAULT_TRIM_THRESHOLD;

#if MORECORE_CONTIGUOUS
    set_contiguous(av);
#else
    set_noncontiguous(av);
#endif


    set_max_fast(av, DEFAULT_MXFAST);

    //av->top            = initial_top(av);
    init_linked_list(&(av->ustate_list));     //init ustate list
    av->pagesize       = malloc_getpagesize;
    
    // add new mstate to the mstate list
    list_insert_tail(&(get_abstate()->mstate_list), (void *)av);
}
Пример #3
0
int
__malloc_set_state(void* msptr)
{
  struct malloc_save_state* ms = (struct malloc_save_state*)msptr;
  size_t i;
  mbinptr b;

  disallow_malloc_check = 1;
  ptmalloc_init();
  if(ms->magic != MALLOC_STATE_MAGIC) return -1;
  /* Must fail if the major version is too high. */
  if((ms->version & ~0xffl) > (MALLOC_STATE_VERSION & ~0xffl)) return -2;
  (void)mutex_lock(&main_arena.mutex);
  /* There are no fastchunks.  */
  clear_fastchunks(&main_arena);
  if (ms->version >= 4)
    set_max_fast(ms->max_fast);
  else
    set_max_fast(64);	/* 64 used to be the value we always used.  */
  for (i=0; i<NFASTBINS; ++i)
    fastbin (&main_arena, i) = 0;
  for (i=0; i<BINMAPSIZE; ++i)
    main_arena.binmap[i] = 0;
  top(&main_arena) = ms->av[2];
  main_arena.last_remainder = 0;
  for(i=1; i<NBINS; i++) {
    b = bin_at(&main_arena, i);
    if(ms->av[2*i+2] == 0) {
      assert(ms->av[2*i+3] == 0);
      first(b) = last(b) = b;
    } else {
      if(ms->version >= 3 &&
	 (i<NSMALLBINS || (largebin_index(chunksize(ms->av[2*i+2]))==i &&
			   largebin_index(chunksize(ms->av[2*i+3]))==i))) {
	first(b) = ms->av[2*i+2];
	last(b) = ms->av[2*i+3];
	/* Make sure the links to the bins within the heap are correct.  */
	first(b)->bk = b;
	last(b)->fd = b;
	/* Set bit in binblocks.  */
	mark_bin(&main_arena, i);
      } else {
	/* Oops, index computation from chunksize must have changed.
	   Link the whole list into unsorted_chunks.  */
	first(b) = last(b) = b;
	b = unsorted_chunks(&main_arena);
	ms->av[2*i+2]->bk = b;
	ms->av[2*i+3]->fd = b->fd;
	b->fd->bk = ms->av[2*i+3];
	b->fd = ms->av[2*i+2];
      }
    }
  }
  if (ms->version < 3) {
    /* Clear fd_nextsize and bk_nextsize fields.  */
    b = unsorted_chunks(&main_arena)->fd;
    while (b != unsorted_chunks(&main_arena)) {
      if (!in_smallbin_range(chunksize(b))) {
	b->fd_nextsize = NULL;
	b->bk_nextsize = NULL;
      }
      b = b->fd;
    }
  }
  mp_.sbrk_base = ms->sbrk_base;
  main_arena.system_mem = ms->sbrked_mem_bytes;
  mp_.trim_threshold = ms->trim_threshold;
  mp_.top_pad = ms->top_pad;
  mp_.n_mmaps_max = ms->n_mmaps_max;
  mp_.mmap_threshold = ms->mmap_threshold;
  check_action = ms->check_action;
  main_arena.max_system_mem = ms->max_sbrked_mem;
  mp_.n_mmaps = ms->n_mmaps;
  mp_.max_n_mmaps = ms->max_n_mmaps;
  mp_.mmapped_mem = ms->mmapped_mem;
  mp_.max_mmapped_mem = ms->max_mmapped_mem;
  /* add version-dependent code here */
  if (ms->version >= 1) {
    /* Check whether it is safe to enable malloc checking, or whether
       it is necessary to disable it.  */
    if (ms->using_malloc_checking && !using_malloc_checking &&
	!disallow_malloc_check)
      __malloc_check_init ();
    else if (!ms->using_malloc_checking && using_malloc_checking) {
      __malloc_hook = NULL;
      __free_hook = NULL;
      __realloc_hook = NULL;
      __memalign_hook = NULL;
      using_malloc_checking = 0;
    }
  }
  if (ms->version >= 4) {
#ifdef PER_THREAD
    mp_.arena_test = ms->arena_test;
    mp_.arena_max = ms->arena_max;
    narenas = ms->narenas;
#endif
  }
  check_malloc_state(&main_arena);

  (void)mutex_unlock(&main_arena.mutex);
  return 0;
}