Exemple #1
0
/* glibc declares this as a weak symbol, so we can override it */
void *malloc(size_t size)
{
	if (_mallocmock_fail)
		return NULL;

	_mallocmock_malloc_size += size;
	return __libc_malloc(size);
}
Exemple #2
0
void *calloc (size_t n_blocks, size_t n_block_bytes)
{
  size_t bytes;
  gpointer mem;

  bytes = n_blocks * n_block_bytes + HEADER_SPACE;
  mem = __libc_malloc (bytes);
  memset (mem, 0, bytes);
  return record_bytes (mem, n_blocks * n_block_bytes);
}
Exemple #3
0
__ptr_t
malloc(size_t sz)
{
    __ptr_t p = __libc_malloc(sz);
    if (p)
        memset(p, 0xcd, sz);

    ++live_blocks;
    return p;
}
Exemple #4
0
void *malloc(size_t size)
{
	void *ret;

	wrap_log("malloc(0x%x)", size);
	ret = __libc_malloc(size);
	wrap_log(" = %p\n", ret);

	return ret;
}
Exemple #5
0
 // Override the GLIBC malloc to support mallocNan
 void* malloc(size_t size)
 {
     if (Foam::sigFpe::mallocNanActive_)
     {
         return Foam::sigFpe::mallocNan(size);
     }
     else
     {
         return __libc_malloc(size);
     }
 }
Exemple #6
0
void *
malloc(size_t sz)
{
    void *x;
    if ((x = __libc_malloc(sizeof(struct header) + sz + CAGE)) == 0)
    	return 0;
    ((struct header *)x)->magic = MAGIC1 ^ sz;
    ((struct header *)x)->size = sz;
    memset((char *)x + sizeof(struct header) + sz, TWEETY, CAGE);
    return ((char *)x + sizeof(struct header));
}
Exemple #7
0
void* Foam::sigFpe::mallocNan(size_t size)
{
    // Call the low-level GLIBC malloc function
    void * result = __libc_malloc(size);

    // Initialize to signalling NaN
    UList<scalar> lst(reinterpret_cast<scalar*>(result), size/sizeof(scalar));
    sigFpe::fillNan(lst);

    return result;
}
static void *
mem_get_memory(size_t align,
	       size_t *alloc_size,
	       int *ispool,
	       mem_slot_queue_t **ppslot)
{
    mem_slot_queue_t *pslot = NULL;
    size_t new_alloc_size;
    size_t sn;
    void *data;

    new_alloc_size = *alloc_size;
    if (!nkn_pool_enable || align || (*alloc_size > SIZE_MAX_SLOT))
	goto get_poolmiss;

    SIZE_TO_SLOT(*alloc_size, sn);
    new_alloc_size = SLOT_TO_SIZE(sn);
    *alloc_size = new_alloc_size;

    // Check thread local pool if available
    if ((int)mempool_key) {
	pslot = pthread_getspecific(mempool_key);
	if (pslot) {
	    *ppslot = &pslot[sn];
	    if ((data = mem_slot_get(&pslot[sn], &pslot))) {
		*ispool = MEM_SLOT_NLIB;
		return data;
	    }
	}
    }

    if (!pslot) {
	// Check global pool
	pslot = &mem_slot_q[0];
	*ppslot = &pslot[sn];
	data = mem_slot_get(&mem_slot_q[sn], &pslot);
	if (data) {
	    *ispool = MEM_SLOT_NLIB;
	    return data;
	}
    }

    // Fall thru to regular allocation with a size that can be
    // pooled during the free.

 get_poolmiss:
    if (align) {
	data =  __libc_memalign(align, new_alloc_size);
    } else {
	data = __libc_malloc(new_alloc_size);
    }
    return data;
}
Exemple #9
0
void *malloc(uint64_t size)
{
	pfunc();
#ifdef USER_KERNEL_COPY
	if( size > QCU_KMALLOC_MAX_SIZE)
#endif
		return __zcmalloc(size);
#ifdef USER_KERNEL_COPY
	else
		return __libc_malloc(size);
#endif
}
extern "C" void* malloc(size_t size)
{
    REF;
    void* out = __libc_malloc(size);
    DEREF;
    if (out && D) {
        D->now_usable.fetchAndAddOrdered(malloc_usable_size(out));
        D->now_overhead.fetchAndAddOrdered(CHUNK_OVERHEAD);
        D->updatePeak();
    }
    return out;
}
Exemple #11
0
void *
calloc(size_t unit, size_t qty)
{
    size_t sz = unit * qty;
    void *x;
    if ((x = __libc_malloc(sizeof(struct header) + sz + CAGE)) == 0)
    	return 0;
    ((struct header *)x)->magic = MAGIC1 ^ sz;
    ((struct header *)x)->size = sz;
    memset((char *)x + sizeof(struct header), 0, sz);
    memset((char *)x + sizeof(struct header) + sz, TWEETY, CAGE);
    return ((char *)x + sizeof(struct header));
}
Exemple #12
0
void *
__malloc_get_state (void)
{
  struct malloc_save_state *ms;
  int i;
  mbinptr b;

  ms = (struct malloc_save_state *) __libc_malloc (sizeof (*ms));
  if (!ms)
    return 0;

  (void) mutex_lock (&main_arena.mutex);
  malloc_consolidate (&main_arena);
  ms->magic = MALLOC_STATE_MAGIC;
  ms->version = MALLOC_STATE_VERSION;
  ms->av[0] = 0;
  ms->av[1] = 0; /* used to be binblocks, now no longer used */
  ms->av[2] = top (&main_arena);
  ms->av[3] = 0; /* used to be undefined */
  for (i = 1; i < NBINS; i++)
    {
      b = bin_at (&main_arena, i);
      if (first (b) == b)
        ms->av[2 * i + 2] = ms->av[2 * i + 3] = 0; /* empty bin */
      else
        {
          ms->av[2 * i + 2] = first (b);
          ms->av[2 * i + 3] = last (b);
        }
    }
  ms->sbrk_base = mp_.sbrk_base;
  ms->sbrked_mem_bytes = main_arena.system_mem;
  ms->trim_threshold = mp_.trim_threshold;
  ms->top_pad = mp_.top_pad;
  ms->n_mmaps_max = mp_.n_mmaps_max;
  ms->mmap_threshold = mp_.mmap_threshold;
  ms->check_action = check_action;
  ms->max_sbrked_mem = main_arena.max_system_mem;
  ms->max_total_mem = 0;
  ms->n_mmaps = mp_.n_mmaps;
  ms->max_n_mmaps = mp_.max_n_mmaps;
  ms->mmapped_mem = mp_.mmapped_mem;
  ms->max_mmapped_mem = mp_.max_mmapped_mem;
  ms->using_malloc_checking = using_malloc_checking;
  ms->max_fast = get_max_fast ();
  ms->arena_test = mp_.arena_test;
  ms->arena_max = mp_.arena_max;
  ms->narenas = narenas;
  (void) mutex_unlock (&main_arena.mutex);
  return (void *) ms;
}
static void
init_slot_mem(mem_slot_queue_t **base)
{
    u_int64_t  preallocate_size = 0;
    mem_slot_queue_t *pslot;
    int allcnt, pcnt;
    u_int64_t poff;
    struct free_queue *pfreeq;

    pslot = *base;
    for (allcnt = 0; allcnt < MAX_SLOTS; ++allcnt) {
	preallocate_size += pslot->size * pslot->prealloc_count;
	pslot++;
    }
    if (preallocate_size == 0) {
	base[0]->flags |= MEM_SLOT_INIT_DONE;
	return;
    }
    base[0]->prealloc_addr = __libc_malloc(preallocate_size);
    if (base[0]->prealloc_addr == NULL)
	return;
    void *temp_alloc = base[0]->prealloc_addr;
    pslot = *base;
    for (allcnt = 0; allcnt < MAX_SLOTS; ++allcnt) {
	SLOT_LOCK(pslot);
	pslot->tot_buf_in_this_slot = pslot->prealloc_count;
	for (pcnt =0, poff=0; pcnt < pslot->tot_buf_in_this_slot;
	     pcnt++, poff+=pslot->size) {
	    pfreeq = (struct free_queue *)
		((u_int64_t)temp_alloc + (u_int64_t)poff);
	    pfreeq->next = pslot->freeq;
	    pslot->freeq = pfreeq;
#ifdef MEM_DEBUG
	    AO_fetch_and_add1(&pslot->cnt);
#endif
	}
	SLOT_UNLOCK(pslot);
	pslot++;
	temp_alloc = (void *)((u_int64_t)temp_alloc + (u_int64_t)poff);
    }
    base[0]->flags |= MEM_SLOT_INIT_DONE;
}
void* malloc(size_t size)
{
  void* ret;
  if( !chpl_mem_inited() ) {
    ret = __libc_malloc(size);
    if( DEBUG_REPLACE_MALLOC ) 
      printf("in early malloc %p = system malloc(%#x)\n", ret, (int) size);
    track_system_allocated(ret, size, __libc_malloc);
    return ret;
  }
  if( DEBUG_REPLACE_MALLOC ) 
    printf("in malloc\n");

  ret = chpl_malloc(size);

  if( DEBUG_REPLACE_MALLOC ) 
    printf("%p = chpl_malloc(%#x)\n", ret, (int) size);

  return ret;
}
Exemple #15
0
void *
fail_countdown_malloc(size_t size)
{
   if (ALLOC_FAIL_COUNTER >= 0) ALLOC_FAIL_COUNTER--;
   return ALLOC_FAIL_COUNTER < 0 ? NULL : __libc_malloc(size);
}
Exemple #16
0
void *
fail_prone_malloc(size_t size)
{
   return drand48() < ALLOC_ERR_PROB ? NULL : __libc_malloc(size);
}
Exemple #17
0
void *malloc(size_t size)
{
        void *__libc_malloc(size_t size);
        return __libc_malloc(size);
}