TEST_END TEST_BEGIN(test_overflow) { size_t largemax; void *p; largemax = get_large_size(get_nlarge()-1); p = mallocx(1, 0); assert_ptr_not_null(p, "Unexpected mallocx() failure"); assert_ptr_null(rallocx(p, largemax+1, 0), "Expected OOM for rallocx(p, size=%#zx, 0)", largemax+1); assert_ptr_null(rallocx(p, ZU(PTRDIFF_MAX)+1, 0), "Expected OOM for rallocx(p, size=%#zx, 0)", ZU(PTRDIFF_MAX)+1); assert_ptr_null(rallocx(p, SIZE_T_MAX, 0), "Expected OOM for rallocx(p, size=%#zx, 0)", SIZE_T_MAX); assert_ptr_null(rallocx(p, 1, MALLOCX_ALIGN(ZU(PTRDIFF_MAX)+1)), "Expected OOM for rallocx(p, size=1, MALLOCX_ALIGN(%#zx))", ZU(PTRDIFF_MAX)+1); dallocx(p, 0); }
TEST_END TEST_BEGIN(test_oom) { size_t hugemax, size, alignment; hugemax = get_huge_size(get_nhuge()-1); /* * It should be impossible to allocate two objects that each consume * more than half the virtual address space. */ { void *p; p = mallocx(hugemax, 0); if (p != NULL) { assert_ptr_null(mallocx(hugemax, 0), "Expected OOM for mallocx(size=%#zx, 0)", hugemax); dallocx(p, 0); } } #if LG_SIZEOF_PTR == 3 size = ZU(0x8000000000000000); alignment = ZU(0x8000000000000000); #else size = ZU(0x80000000); alignment = ZU(0x80000000); #endif assert_ptr_null(mallocx(size, MALLOCX_ALIGN(alignment)), "Expected OOM for mallocx(size=%#zx, MALLOCX_ALIGN(%#zx)", size, alignment); }
TEST_END static void * thd_start_reincarnated(void *arg) { tsd_t *tsd = tsd_fetch(); assert(tsd); void *p = malloc(1); assert_ptr_not_null(p, "Unexpected malloc() failure"); /* Manually trigger reincarnation. */ assert_ptr_not_null(tsd_arena_get(tsd), "Should have tsd arena set."); tsd_cleanup((void *)tsd); assert_ptr_null(*tsd_arenap_get_unsafe(tsd), "TSD arena should have been cleared."); assert_u_eq(tsd->state, tsd_state_purgatory, "TSD state should be purgatory\n"); free(p); assert_u_eq(tsd->state, tsd_state_reincarnated, "TSD state should be reincarnated\n"); p = mallocx(1, MALLOCX_TCACHE_NONE); assert_ptr_not_null(p, "Unexpected malloc() failure"); assert_ptr_null(*tsd_arenap_get_unsafe(tsd), "Should not have tsd arena set after reincarnation."); free(p); tsd_cleanup((void *)tsd); assert_ptr_null(*tsd_arenap_get_unsafe(tsd), "TSD arena should have been cleared after 2nd cleanup."); return NULL; }
TEST_END TEST_BEGIN(test_rtree_random) { unsigned i; sfmt_t *sfmt; #define NSET 16 #define SEED 42 sfmt = init_gen_rand(SEED); for (i = 1; i <= (sizeof(uintptr_t) << 3); i++) { uintptr_t keys[NSET]; extent_node_t node; unsigned j; rtree_t rtree; assert_false(rtree_new(&rtree, i, node_alloc, node_dalloc), "Unexpected rtree_new() failure"); for (j = 0; j < NSET; j++) { keys[j] = (uintptr_t)gen_rand64(sfmt); assert_false(rtree_set(&rtree, keys[j], &node), "Unexpected rtree_set() failure"); assert_ptr_eq(rtree_get(&rtree, keys[j], true), &node, "rtree_get() should return previously set value"); } for (j = 0; j < NSET; j++) { assert_ptr_eq(rtree_get(&rtree, keys[j], true), &node, "rtree_get() should return previously set value"); } for (j = 0; j < NSET; j++) { assert_false(rtree_set(&rtree, keys[j], NULL), "Unexpected rtree_set() failure"); assert_ptr_null(rtree_get(&rtree, keys[j], true), "rtree_get() should return previously set value"); } for (j = 0; j < NSET; j++) { assert_ptr_null(rtree_get(&rtree, keys[j], true), "rtree_get() should return previously set value"); } rtree_delete(&rtree); } fini_gen_rand(sfmt); #undef NSET #undef SEED }
TEST_END TEST_BEGIN(test_align) { void *p, *q; size_t align; #define MAX_ALIGN (ZU(1) << 29) align = ZU(1); p = mallocx(1, MALLOCX_ALIGN(align)); assert_ptr_not_null(p, "Unexpected mallocx() error"); for (align <<= 1; align <= MAX_ALIGN; align <<= 1) { q = rallocx(p, 1, MALLOCX_ALIGN(align)); assert_ptr_not_null(q, "Unexpected rallocx() error for align=%zu", align); assert_ptr_null( (void *)((uintptr_t)q & (align-1)), "%p inadequately aligned for align=%zu", q, align); p = q; } dallocx(p, 0); #undef MAX_ALIGN }
TEST_END TEST_BEGIN(test_alignment_and_size) { int r; size_t nsz, rsz, sz, alignment, total; unsigned i; void *ps[NITER]; for (i = 0; i < NITER; i++) ps[i] = NULL; for (alignment = 8; alignment <= MAXALIGN; alignment <<= 1) { total = 0; for (sz = 1; sz < 3 * alignment && sz < (1U << 31); sz += (alignment >> (LG_SIZEOF_PTR-1)) - 1) { for (i = 0; i < NITER; i++) { nsz = 0; r = nallocm(&nsz, sz, ALLOCM_ALIGN(alignment) | ALLOCM_ZERO); assert_d_eq(r, ALLOCM_SUCCESS, "nallocm() error for alignment=%zu, " "size=%zu (%#zx): %d", alignment, sz, sz, r); rsz = 0; r = allocm(&ps[i], &rsz, sz, ALLOCM_ALIGN(alignment) | ALLOCM_ZERO); assert_d_eq(r, ALLOCM_SUCCESS, "allocm() error for alignment=%zu, " "size=%zu (%#zx): %d", alignment, sz, sz, r); assert_zu_ge(rsz, sz, "Real size smaller than expected for " "alignment=%zu, size=%zu", alignment, sz); assert_zu_eq(nsz, rsz, "nallocm()/allocm() rsize mismatch for " "alignment=%zu, size=%zu", alignment, sz); assert_ptr_null( (void *)((uintptr_t)ps[i] & (alignment-1)), "%p inadequately aligned for" " alignment=%zu, size=%zu", ps[i], alignment, sz); sallocm(ps[i], &rsz, 0); total += rsz; if (total >= (MAXALIGN << 1)) break; } for (i = 0; i < NITER; i++) { if (ps[i] != NULL) { dallocm(ps[i], 0); ps[i] = NULL; } } } } }
TEST_END TEST_BEGIN(test_alignment_and_size) { #define MAXALIGN (((size_t)1) << 25) #define NITER 4 size_t nsz, rsz, sz, alignment, total; unsigned i; void *ps[NITER]; for (i = 0; i < NITER; i++) ps[i] = NULL; for (alignment = 8; alignment <= MAXALIGN; alignment <<= 1) { total = 0; for (sz = 1; sz < 3 * alignment && sz < (1U << 31); sz += (alignment >> (LG_SIZEOF_PTR-1)) - 1) { for (i = 0; i < NITER; i++) { nsz = nallocx(sz, MALLOCX_ALIGN(alignment) | MALLOCX_ZERO); assert_zu_ne(nsz, 0, "nallocx() error for alignment=%zu, " "size=%zu (%#zx)", alignment, sz, sz); ps[i] = mallocx(sz, MALLOCX_ALIGN(alignment) | MALLOCX_ZERO); assert_ptr_not_null(ps[i], "mallocx() error for alignment=%zu, " "size=%zu (%#zx)", alignment, sz, sz); rsz = sallocx(ps[i], 0); assert_zu_ge(rsz, sz, "Real size smaller than expected for " "alignment=%zu, size=%zu", alignment, sz); assert_zu_eq(nsz, rsz, "nallocx()/sallocx() size mismatch for " "alignment=%zu, size=%zu", alignment, sz); assert_ptr_null( (void *)((uintptr_t)ps[i] & (alignment-1)), "%p inadequately aligned for" " alignment=%zu, size=%zu", ps[i], alignment, sz); total += rsz; if (total >= (MAXALIGN << 1)) break; } for (i = 0; i < NITER; i++) { if (ps[i] != NULL) { dallocx(ps[i], 0); ps[i] = NULL; } } } } #undef MAXALIGN #undef NITER }
TEST_END TEST_BEGIN(test_oom) { size_t hugemax; bool oom; void *ptrs[3]; unsigned i; /* * It should be impossible to allocate three objects that each consume * nearly half the virtual address space. */ hugemax = get_huge_size(get_nhuge()-1); oom = false; for (i = 0; i < sizeof(ptrs) / sizeof(void *); i++) { ptrs[i] = mallocx(hugemax, 0); if (ptrs[i] == NULL) oom = true; } assert_true(oom, "Expected OOM during series of calls to mallocx(size=%zu, 0)", hugemax); for (i = 0; i < sizeof(ptrs) / sizeof(void *); i++) { if (ptrs[i] != NULL) dallocx(ptrs[i], 0); } #if LG_SIZEOF_PTR == 3 assert_ptr_null(mallocx(0x8000000000000000ULL, MALLOCX_ALIGN(0x8000000000000000ULL)), "Expected OOM for mallocx()"); assert_ptr_null(mallocx(0x8000000000000000ULL, MALLOCX_ALIGN(0x80000000)), "Expected OOM for mallocx()"); #else assert_ptr_null(mallocx(0x80000000UL, MALLOCX_ALIGN(0x80000000UL)), "Expected OOM for mallocx()"); #endif }
static void test_empty_list(list_head_t *head) { list_t *t; unsigned i; assert_ptr_null(ql_first(head), "Unexpected element for empty list"); assert_ptr_null(ql_last(head, link), "Unexpected element for empty list"); i = 0; ql_foreach(t, head, link) { i++; } assert_u_eq(i, 0, "Unexpected element for empty list"); i = 0; ql_reverse_foreach(t, head, link) { i++; } assert_u_eq(i, 0, "Unexpected element for empty list"); }
TEST_END TEST_BEGIN(test_rtree_bits) { unsigned i, j, k; for (i = 1; i < (sizeof(uintptr_t) << 3); i++) { uintptr_t keys[] = {0, 1, (((uintptr_t)1) << (sizeof(uintptr_t)*8-i)) - 1}; extent_node_t node; rtree_t rtree; assert_false(rtree_new(&rtree, i, node_alloc, node_dalloc), "Unexpected rtree_new() failure"); for (j = 0; j < sizeof(keys)/sizeof(uintptr_t); j++) { assert_false(rtree_set(&rtree, keys[j], &node), "Unexpected rtree_set() failure"); for (k = 0; k < sizeof(keys)/sizeof(uintptr_t); k++) { assert_ptr_eq(rtree_get(&rtree, keys[k], true), &node, "rtree_get() should return " "previously set value and ignore " "insignificant key bits; i=%u, j=%u, k=%u, " "set key=%#"FMTxPTR", get key=%#"FMTxPTR, i, j, k, keys[j], keys[k]); } assert_ptr_null(rtree_get(&rtree, (((uintptr_t)1) << (sizeof(uintptr_t)*8-i)), false), "Only leftmost rtree leaf should be set; " "i=%u, j=%u", i, j); assert_false(rtree_set(&rtree, keys[j], NULL), "Unexpected rtree_set() failure"); } rtree_delete(&rtree); } }
TEST_END TEST_BEGIN(test_lg_align_and_zero) { void *p, *q; size_t lg_align, sz; #define MAX_LG_ALIGN 29 #define MAX_VALIDATE (ZU(1) << 22) lg_align = ZU(0); p = mallocx(1, MALLOCX_LG_ALIGN(lg_align)|MALLOCX_ZERO); assert_ptr_not_null(p, "Unexpected mallocx() error"); for (lg_align++; lg_align <= MAX_LG_ALIGN; lg_align++) { q = rallocx(p, 1, MALLOCX_LG_ALIGN(lg_align)|MALLOCX_ZERO); assert_ptr_not_null(q, "Unexpected rallocx() error for lg_align=%zu", lg_align); assert_ptr_null( (void *)((uintptr_t)q & ((ZU(1) << lg_align)-1)), "%p inadequately aligned for lg_align=%zu", q, lg_align); sz = sallocx(q, 0); if ((sz << 1) <= MAX_VALIDATE) { assert_false(validate_fill(q, 0, 0, sz), "Expected zeroed memory"); } else { assert_false(validate_fill(q, 0, 0, MAX_VALIDATE), "Expected zeroed memory"); assert_false(validate_fill(q+sz-MAX_VALIDATE, 0, 0, MAX_VALIDATE), "Expected zeroed memory"); } p = q; } dallocx(p, 0); #undef MAX_VALIDATE #undef MAX_LG_ALIGN }