Esempio n. 1
0
void* valloc(size_t size)
{
	void *ptr;
	START_CALL();
	ptr = real_valloc(size);
	END_CALL(ptr, size);
	return ptr;
}
Esempio n. 2
0
static void *
track_valloc (malloc_zone_t * zone, size_t size) {
  // Pointer to the allocated object
  char * objp;

  //
  // Perform the allocation.
  //
  objp = (char*) real_valloc (zone, size);

  //
  // Record the allocation and return to the caller.
  //
  ExternalObjects.insert(objp, objp + size);
  return objp;
}
static void * myth_malloc_wrapper_valloc(size_t size)
{
#ifdef MYTH_WRAP_MALLOC_RUNTIME
  /* fall back to the bump allocator before wrapping completed */
  if (!g_wrap_malloc_completed) {
    void *ptr = sys_alloc_align(PAGE_SIZE, size);
    return ptr;
  }
  /* no wrap. call the real one */
  if (!g_wrap_malloc) {
    return real_valloc(size);
  }
#endif
  void * ret = 0;
  errno = myth_malloc_wrapper_posix_memalign(&ret,PAGE_SIZE,size);
  return ret;
}
Esempio n. 4
0
void *valloc(size_t size)
{
	struct log_malloc_s *mem;
	sig_atomic_t memuse;
	sig_atomic_t memruse = 0;

	if(!DL_RESOLVE_CHECK(valloc))
		return NULL;

	if((mem = real_valloc(size + MEM_OFF)) != NULL)
	{
		mem->size = size;
		mem->cb = ~mem->size;
		memuse = __sync_add_and_fetch(&g_ctx.mem_used, mem->size);
#ifdef HAVE_MALLOC_USABLE_SIZE
		mem->rsize = malloc_usable_size(mem);
		memruse = __sync_add_and_fetch(&g_ctx.mem_rused, mem->rsize);
#endif
	}
#ifndef DISABLE_CALL_COUNTS
	(void)__sync_fetch_and_add(&g_ctx.stat.valloc, 1);
	g_ctx.stat.unrel_sum++;
#endif

	if(!g_ctx.memlog_disabled)
	{
		int s;
		char buf[LOG_BUFSIZE];

		s = snprintf(buf, sizeof(buf), "+ valloc %zu %p [%u:%u]\n",
			size, MEM_PTR(mem),
			memuse, memruse);

		log_trace(buf, s, sizeof(buf), 1);
	}
	return MEM_PTR(mem);
}