Пример #1
0
void			free(void *ptr)
{
  t_block_descriptor	*temp;

  if (ptr == NULL)
    return ;
  temp = (t_block_descriptor *) ptr - 1;
  if (check_chunk(temp) == -1)
    {
      my_put_error("in free(): warning: modified (chunk-) pointer\n");
      return ;
    }
  if (is_already_free(buckets[temp->p - 1], temp) == -1)
    {
      my_put_error("in free(): warning: page is already free\n");
      return ;
    }
    if (is_malloc(temp, 1) == -1)
    {
      my_put_error("in free(): warning: malloc() has never been called\n");
      return ;
    }
  ret_from_list(temp);
  put_in_freelist(temp);
  return ;
}
Пример #2
0
void check_chunks(char *msg)
{
	int i, corrupt = 0;
	if (debug) mrlog("check_chunks(%s)", msg);
	for (i = 0; i < CHUNKS_MAX; i++) {
		if (chunks[i].p) {
			corrupt |= check_chunk(i);
		}
	}
	if (corrupt) {
		mrlog("Memory corruption detected");
		mrexit("check_chunks found memory corruption", EXIT_FAILURE);
	} else {
		if (debug) mrlog("Memory checks out OK");
	}
}
Пример #3
0
static enum scanner_traversal_e
check_chunk_and_sleep(const char *chunk_path, void *data)
{
	GError *local_error = NULL;
	struct chunk_checker_data_s *cc_data;

	cc_data = data;

	if (!check_chunk(chunk_path, cc_data->volume_path, &local_error)) {
		ERROR("check_chunk(%s) : %s", chunk_path, gerror_get_message(local_error));
		g_clear_error(&local_error);
	}

	sleep(cc_data->sleep_time);

	return SCAN_CONTINUE;
}
Пример #4
0
static void remove_chunk(void *p, char *cl)
{
	int i;

	if (!debug_memory) return;

	for (i = 0; i < CHUNKS_MAX; i++)
		if (chunks[i].p == p) break;
	if (i == CHUNKS_MAX) {
		mrlog("Can't find chunk %p (%s)", p, cl);
		dump_chunks();
		mrlog("Continuing even though I can't find chunk %p (%s)",
			p, cl);
	}
	if (check_chunk(i)) mrexit("remove_chunk: check not ok", EXIT_FAILURE);
	if (debug >= 3) {
		mrlog("Removing chunk %p (%s) from slot %d",
			p, chunks[i].cl, i);
	}
	chunks[i].p = NULL;
}
Пример #5
0
/* ------------------------------ free ------------------------------ */
void free(void* mem)
{
    mstate av;

    mchunkptr       p;           /* chunk corresponding to mem */
    size_t size;        /* its size */
    mfastbinptr*    fb;          /* associated fastbin */
    mchunkptr       nextchunk;   /* next contiguous chunk */
    size_t nextsize;    /* its size */
    int             nextinuse;   /* true if nextchunk is used */
    size_t prevsize;    /* size of previous contiguous chunk */
    mchunkptr       bck;         /* misc temp for linking */
    mchunkptr       fwd;         /* misc temp for linking */

    /* free(0) has no effect */
    if (mem == NULL)
	return;

    __MALLOC_LOCK;
    av = get_malloc_state();
    p = mem2chunk(mem);
    size = chunksize(p);

    check_inuse_chunk(p);

    /*
       If eligible, place chunk on a fastbin so it can be found
       and used quickly in malloc.
       */

    if ((unsigned long)(size) <= (unsigned long)(av->max_fast)

#if TRIM_FASTBINS
	    /* If TRIM_FASTBINS set, don't place chunks
	       bordering top into fastbins */
	    && (chunk_at_offset(p, size) != av->top)
#endif
       ) {

	set_fastchunks(av);
	fb = &(av->fastbins[fastbin_index(size)]);
	p->fd = *fb;
	*fb = p;
    }

    /*
       Consolidate other non-mmapped chunks as they arrive.
       */

    else if (!chunk_is_mmapped(p)) {
	set_anychunks(av);

	nextchunk = chunk_at_offset(p, size);
	nextsize = chunksize(nextchunk);

	/* consolidate backward */
	if (!prev_inuse(p)) {
	    prevsize = p->prev_size;
	    size += prevsize;
	    p = chunk_at_offset(p, -((long) prevsize));
	    unlink(p, bck, fwd);
	}

	if (nextchunk != av->top) {
	    /* get and clear inuse bit */
	    nextinuse = inuse_bit_at_offset(nextchunk, nextsize);
	    set_head(nextchunk, nextsize);

	    /* consolidate forward */
	    if (!nextinuse) {
		unlink(nextchunk, bck, fwd);
		size += nextsize;
	    }

	    /*
	       Place the chunk in unsorted chunk list. Chunks are
	       not placed into regular bins until after they have
	       been given one chance to be used in malloc.
	       */

	    bck = unsorted_chunks(av);
	    fwd = bck->fd;
	    p->bk = bck;
	    p->fd = fwd;
	    bck->fd = p;
	    fwd->bk = p;

	    set_head(p, size | PREV_INUSE);
	    set_foot(p, size);

	    check_free_chunk(p);
	}

	/*
	   If the chunk borders the current high end of memory,
	   consolidate into top
	   */

	else {
	    size += nextsize;
	    set_head(p, size | PREV_INUSE);
	    av->top = p;
	    check_chunk(p);
	}

	/*
	   If freeing a large space, consolidate possibly-surrounding
	   chunks. Then, if the total unused topmost memory exceeds trim
	   threshold, ask malloc_trim to reduce top.

	   Unless max_fast is 0, we don't know if there are fastbins
	   bordering top, so we cannot tell for sure whether threshold
	   has been reached unless fastbins are consolidated.  But we
	   don't want to consolidate on each free.  As a compromise,
	   consolidation is performed if FASTBIN_CONSOLIDATION_THRESHOLD
	   is reached.
	   */

	if ((unsigned long)(size) >= FASTBIN_CONSOLIDATION_THRESHOLD) {
	    if (have_fastchunks(av))
		__malloc_consolidate(av);

	    if ((unsigned long)(chunksize(av->top)) >=
		    (unsigned long)(av->trim_threshold))
		__malloc_trim(av->top_pad, av);
	}

    }
    /*
       If the chunk was allocated via mmap, release via munmap()
       Note that if HAVE_MMAP is false but chunk_is_mmapped is
       true, then user must have overwritten memory. There's nothing
       we can do to catch this error unless DEBUG is set, in which case
       check_inuse_chunk (above) will have triggered error.
       */

    else {
	size_t offset = p->prev_size;
	av->n_mmaps--;
	av->mmapped_mem -= (size + offset);
	munmap((char*)p - offset, size + offset);
    }
    __MALLOC_UNLOCK;
}
Пример #6
0
static void
_int_free (mstate av, mchunkptr p, int have_lock)
{
  ...
  else if (!chunk_is_mmapped(p)) {
    ...
    nextchunk = chunk_at_offset(p, size);
    ...
    nextsize = chunksize(nextchunk);
    ...
    /* consolidate backward */
    if (!prev_inuse(p)) {
      prevsize = p->prev_size;
      size += prevsize;
      p = chunk_at_offset(p, -((long) prevsize));
      unlink(p, bck, fwd);
    }

    if (nextchunk != av->top) {
      /* get and clear inuse bit */
      nextinuse = inuse_bit_at_offset(nextchunk, nextsize);

      /* consolidate forward */
      if (!nextinuse) {
	unlink(nextchunk, bck, fwd);
	size += nextsize;
      } else
	clear_inuse_bit_at_offset(nextchunk, 0);

      /*
	Place the chunk in unsorted chunk list. Chunks are
	not placed into regular bins until after they have
	been given one chance to be used in malloc.
      */

      bck = unsorted_chunks(av);
      fwd = bck->fd;
      if (__glibc_unlikely (fwd->bk != bck))
	{
	  errstr = "free(): corrupted unsorted chunks";
	  goto errout;
	}
      p->fd = fwd;
      p->bk = bck;
      if (!in_smallbin_range(size))
	{
	  p->fd_nextsize = NULL;
	  p->bk_nextsize = NULL;
	}
      bck->fd = p;
      fwd->bk = p;

      set_head(p, size | PREV_INUSE);
      set_foot(p, size);

      check_free_chunk(av, p);
    }

    /*
      If the chunk borders the current high end of memory,
      consolidate into top
    */

    else {
      size += nextsize;
      set_head(p, size | PREV_INUSE);
      av->top = p;
      check_chunk(av, p);
    }
    ...
}