static myth_freelist_t make_chunks(size_t chunk_sz, size_t min_alloc_sz) { #if FIX_FALSE_SHARING4 chunk_sz = (chunk_sz + 63) & ~63; #endif size_t alloc_sz = (chunk_sz <= min_alloc_sz ? min_alloc_sz : chunk_sz); #if 0 fprintf(stderr, "malloc make_chunks(chunk_sz = %ld, min_alloc_sz = %ld, alloc_sz = %ld)\n", chunk_sz, min_alloc_sz, alloc_sz); #endif #if FIX_FALSE_SHARING4 void * region; real_posix_memalign(®ion, 64, alloc_sz); #else void * region = real_malloc(alloc_sz); #endif void * fl = NULL; void * tl = NULL; void * p; for (p = region; p + chunk_sz <= region + alloc_sz; p += chunk_sz) { *((void **)p) = NULL; /* p->next = NULL */ /* append p at the tail of the list */ if (tl) { *((void **)tl) = p; /* fl->next = NULL */ } else { fl = p; } tl = p; } return fl; }
int posix_memalign(void **memptr, size_t alignment, size_t size) { size_t usable; void *p; int ret; while(real_posix_memalign == NULL){ if(!initializing){ initializing = 1; __init(); initializing = 0; } sched_yield(); } ret = real_posix_memalign(memptr, alignment, size + SIZEOF_F_RZ + SIZEOF_SIZE); if (ret != 0) return ret; usable = malloc_usable_size(*memptr); p = *memptr + size; /* end of user region */ memset(p, MAGIC_BYTE, SIZEOF_RZ(usable, size)); p += SIZEOF_RZ(usable,size); /* end of redzone */ *(size_t *)p = size; OFC_DUMP(*memptr, usable, size); return 0; }
int posix_memalign(void** memptr, size_t alignment, size_t size) { int rv; START_CALL(); rv = real_posix_memalign(memptr, alignment, size); /* FIXME: not sure if all this works fine yet */ if (memptr) END_CALL(*memptr, size); return rv; }
static int myth_malloc_wrapper_posix_memalign(void **memptr,size_t alignment,size_t size) { #ifdef MYTH_WRAP_MALLOC_RUNTIME /* fall back to the bump allocator before wrapping completed */ if (!g_wrap_malloc_completed) { void *ptr = sys_alloc_align(alignment, size); if (ptr) { *memptr = ptr; return 0; } else { return ENOMEM; } } /* no wrap. call the real one */ if (!g_wrap_malloc) { return real_posix_memalign(memptr, alignment, size); } #endif if (size == 0) { *memptr = NULL; return 0; } malloc_wrapper_header_t ptr; if (size<16)size=16; if (!real_malloc){ static int load_malloc_protect=0; if (load_malloc_protect==0){ load_malloc_protect=1; real_malloc=dlsym(RTLD_NEXT,"malloc"); } else {*memptr=NULL;return 0;} assert(real_malloc); } uintptr_t n0,n; n0=(uintptr_t)real_malloc(size+alignment+sizeof(malloc_wrapper_header)); if (!n0){ fprintf(stderr,"size=%llu\n",(unsigned long long)size); return ENOMEM; } //align n = n0 + sizeof(malloc_wrapper_header) + alignment - 1; n = n - n % alignment; ptr=(malloc_wrapper_header_t)n;ptr--; assert(n0 <= (uintptr_t)ptr); assert(n + size <= n0 + size+alignment+sizeof(malloc_wrapper_header)); ptr->s.fl_index=FREE_LIST_NUM; ptr->s.org_ptr=(void*)n0; //fprintf(stderr,"memalign A,%p,%p,%p,%d\n",(void*)n0,ptr,(void*)n,FREE_LIST_NUM); *memptr=(void*)n; return 0; }
/** * For this default posix_memalign function, we force the constructor and * call the real posix_memalign if the function address resolution was * successful. */ static int default_posix_memalign(void **memptr, size_t alignment, size_t size) { /* * if posix_memalign is called before the constructor, we force the * constructor. */ init_malloc(); /* if real_posix_memalign was not found, we return NULL */ if (real_posix_memalign == default_posix_memalign) { debug ("Failed to resolve 'default_posix_memalign', returning NULL\n"); return -ENOMEM; } /* We can now use the real_posix_memalign */ return real_posix_memalign(memptr, alignment, size); }
int posix_memalign(void **memptr, size_t alignment, size_t size) { int ret = 0; struct log_malloc_s *mem = NULL; sig_atomic_t memuse; sig_atomic_t memruse = 0; if(!DL_RESOLVE_CHECK(posix_memalign)) return ENOMEM; if(alignment > MEM_OFF) return ENOMEM; if((ret = real_posix_memalign((void **)&mem, alignment, size + MEM_OFF)) == 0) { mem->size = size; mem->cb = ~mem->size; memuse = __sync_add_and_fetch(&g_ctx.mem_used, mem->size); #ifdef HAVE_MALLOC_USABLE_SIZE mem->rsize = malloc_usable_size(mem); memruse = __sync_add_and_fetch(&g_ctx.mem_rused, mem->rsize); #endif } #ifndef DISABLE_CALL_COUNTS (void)__sync_fetch_and_add(&g_ctx.stat.posix_memalign, 1); g_ctx.stat.unrel_sum++; #endif if(!g_ctx.memlog_disabled) { int s; char buf[LOG_BUFSIZE]; s = snprintf(buf, sizeof(buf), "+ posix_memalign %zu %p (%zu %zu : %d) [%u:%u]\n", size, MEM_PTR(mem), alignment, size, ret, memuse, memruse); log_trace(buf, s, sizeof(buf), 1); } return ret; }
int posix_memalign(void **memptr, size_t alignment, size_t size) { int rc = 0; if (!alignment || !memptr) { rc = -EINVAL; } else { struct alloc_header alloc_header; size_t allocated_size; *memptr = NULL; alloc_header.requested_size = size; allocated_size = (sizeof(alloc_header) / alignment) + ((sizeof(alloc_header) % alignment) ? 1 : 0); allocated_size *= alignment; allocated_size += alloc_header.requested_size; #ifdef CHECK_COOKIE alloc_header.cookie = ALLOC_COOKIE; alloc_header.dummy = 0; #endif alloc_header.ptr = NULL; rc = real_posix_memalign(&alloc_header.ptr, alignment, allocated_size); if (!rc && alloc_header.ptr) { struct alloc_header *store_ptr; *memptr = alloc_header.ptr + allocated_size - alloc_header.requested_size; store_ptr = (struct alloc_header *)*memptr; store_ptr--; *store_ptr = alloc_header; } } return rc; }