static void leak_check(void) { RADIX_TREE(tree, GFP_KERNEL); item_insert(&tree, 1000000); item_delete(&tree, 1000000); item_kill_tree(&tree); }
void add_and_check(void) { RADIX_TREE(tree, GFP_KERNEL); item_insert(&tree, 44); item_check_present(&tree, 44); item_check_absent(&tree, 43); item_kill_tree(&tree); }
static void __leak_check(void) { RADIX_TREE(tree, GFP_KERNEL); printf("%d: nr_allocated=%d\n", __LINE__, nr_allocated); item_insert(&tree, 1000000); printf("%d: nr_allocated=%d\n", __LINE__, nr_allocated); item_delete(&tree, 1000000); printf("%d: nr_allocated=%d\n", __LINE__, nr_allocated); item_kill_tree(&tree); printf("%d: nr_allocated=%d\n", __LINE__, nr_allocated); }
static void thrash_tags(void) { RADIX_TREE(tree, GFP_KERNEL); char *thrash_state; thrash_state = malloc(THRASH_SIZE); memset(thrash_state, 0, THRASH_SIZE); do_thrash(&tree, thrash_state, 0); verify_tag_consistency(&tree, 0); item_kill_tree(&tree); free(thrash_state); }
void simple_checks(void) { unsigned long index; RADIX_TREE(tree, GFP_KERNEL); for (index = 0; index < 10000; index++) { __simple_checks(&tree, index, 0); __simple_checks(&tree, index, 1); } verify_tag_consistency(&tree, 0); verify_tag_consistency(&tree, 1); printf("before item_kill_tree: %d allocated\n", nr_allocated); item_kill_tree(&tree); printf("after item_kill_tree: %d allocated\n", nr_allocated); }
static void single_check(void) { struct item *items[BATCH]; RADIX_TREE(tree, GFP_KERNEL); int ret; item_insert(&tree, 0); item_tag_set(&tree, 0, 0); ret = radix_tree_gang_lookup_tag(&tree, (void **)items, 0, BATCH, 0); assert(ret == 1); ret = radix_tree_gang_lookup_tag(&tree, (void **)items, 1, BATCH, 0); assert(ret == 0); verify_tag_consistency(&tree, 0); verify_tag_consistency(&tree, 1); item_kill_tree(&tree); }
void __gang_check(unsigned long middle, long down, long up, int chunk, int hop) { long idx; RADIX_TREE(tree, GFP_KERNEL); middle = 1 << 30; for (idx = -down; idx < up; idx++) item_insert(&tree, middle + idx); item_check_absent(&tree, middle - down - 1); for (idx = -down; idx < up; idx++) item_check_present(&tree, middle + idx); item_check_absent(&tree, middle + up); item_gang_check_present(&tree, middle - down, up + down, chunk, hop); item_full_scan(&tree, middle - down, down + up, chunk); item_kill_tree(&tree); }
/* * Check that tags propagate correctly when extending a tree. */ static void extend_checks(void) { RADIX_TREE(tree, GFP_KERNEL); item_insert(&tree, 43); assert(item_tag_get(&tree, 43, 0) == 0); item_tag_set(&tree, 43, 0); assert(item_tag_get(&tree, 43, 0) == 1); item_insert(&tree, 1000000); assert(item_tag_get(&tree, 43, 0) == 1); item_insert(&tree, 0); item_tag_set(&tree, 0, 0); item_delete(&tree, 1000000); assert(item_tag_get(&tree, 43, 0) != 0); item_delete(&tree, 43); assert(item_tag_get(&tree, 43, 0) == 0); /* crash */ assert(item_tag_get(&tree, 0, 0) == 1); verify_tag_consistency(&tree, 0); item_kill_tree(&tree); }
/* * Check that tags propagate correctly when contracting a tree. */ static void contract_checks(void) { struct item *item; int tmp; RADIX_TREE(tree, GFP_KERNEL); tmp = 1<<RADIX_TREE_MAP_SHIFT; item_insert(&tree, tmp); item_insert(&tree, tmp+1); item_tag_set(&tree, tmp, 0); item_tag_set(&tree, tmp, 1); item_tag_set(&tree, tmp+1, 0); item_delete(&tree, tmp+1); item_tag_clear(&tree, tmp, 1); assert(radix_tree_gang_lookup_tag(&tree, (void **)&item, 0, 1, 0) == 1); assert(radix_tree_gang_lookup_tag(&tree, (void **)&item, 0, 1, 1) == 0); assert(item_tag_get(&tree, tmp, 0) == 1); assert(item_tag_get(&tree, tmp, 1) == 0); verify_tag_consistency(&tree, 0); item_kill_tree(&tree); }
static void single_check(void) { struct item *items[BATCH]; RADIX_TREE(tree, GFP_KERNEL); int ret; unsigned long first = 0; item_insert(&tree, 0); item_tag_set(&tree, 0, 0); ret = radix_tree_gang_lookup_tag(&tree, (void **)items, 0, BATCH, 0); assert(ret == 1); ret = radix_tree_gang_lookup_tag(&tree, (void **)items, 1, BATCH, 0); assert(ret == 0); verify_tag_consistency(&tree, 0); verify_tag_consistency(&tree, 1); ret = tag_tagged_items(&tree, NULL, first, 10, 10, 0, 1); assert(ret == 1); ret = radix_tree_gang_lookup_tag(&tree, (void **)items, 0, BATCH, 1); assert(ret == 1); item_tag_clear(&tree, 0, 0); ret = radix_tree_gang_lookup_tag(&tree, (void **)items, 0, BATCH, 0); assert(ret == 0); item_kill_tree(&tree); }
void dynamic_height_check(void) { int i; RADIX_TREE(tree, GFP_KERNEL); tree_verify_min_height(&tree, 0); item_insert(&tree, 42); tree_verify_min_height(&tree, 42); item_insert(&tree, 1000000); tree_verify_min_height(&tree, 1000000); assert(item_delete(&tree, 1000000)); tree_verify_min_height(&tree, 42); assert(item_delete(&tree, 42)); tree_verify_min_height(&tree, 0); for (i = 0; i < 1000; i++) { item_insert(&tree, i); tree_verify_min_height(&tree, i); } i--; for (;;) { assert(item_delete(&tree, i)); if (i == 0) { tree_verify_min_height(&tree, 0); break; } i--; tree_verify_min_height(&tree, i); } item_kill_tree(&tree); }
static void __multiorder_tag_test(int index, int order) { RADIX_TREE(tree, GFP_KERNEL); int base, err, i; /* our canonical entry */ base = index & ~((1 << order) - 1); printf("Multiorder tag test with index %d, canonical entry %d\n", index, base); err = item_insert_order(&tree, index, order); assert(!err); /* * Verify we get collisions for covered indices. We try and fail to * insert an exceptional entry so we don't leak memory via * item_insert_order(). */ for_each_index(i, base, order) { err = __radix_tree_insert(&tree, i, order, (void *)(0xA0 | RADIX_TREE_EXCEPTIONAL_ENTRY)); assert(err == -EEXIST); }
void copy_tag_check(void) { RADIX_TREE(tree, GFP_KERNEL); unsigned long idx[ITEMS]; unsigned long start, end, count = 0, tagged, cur, tmp; int i; // printf("generating radix tree indices...\n"); start = rand(); end = rand(); if (start > end && (rand() % 10)) { cur = start; start = end; end = cur; } /* Specifically create items around the start and the end of the range * with high probability to check for off by one errors */ cur = rand(); if (cur & 1) { item_insert(&tree, start); if (cur & 2) { if (start <= end) count++; item_tag_set(&tree, start, 0); } } if (cur & 4) { item_insert(&tree, start-1); if (cur & 8) item_tag_set(&tree, start-1, 0); } if (cur & 16) { item_insert(&tree, end); if (cur & 32) { if (start <= end) count++; item_tag_set(&tree, end, 0); } } if (cur & 64) { item_insert(&tree, end+1); if (cur & 128) item_tag_set(&tree, end+1, 0); } for (i = 0; i < ITEMS; i++) { do { idx[i] = rand(); } while (item_lookup(&tree, idx[i])); item_insert(&tree, idx[i]); if (rand() & 1) { item_tag_set(&tree, idx[i], 0); if (idx[i] >= start && idx[i] <= end) count++; } /* if (i % 1000 == 0) putchar('.'); */ } // printf("\ncopying tags...\n"); tagged = tag_tagged_items(&tree, start, end, ITEMS, XA_MARK_0, XA_MARK_1); // printf("checking copied tags\n"); assert(tagged == count); check_copied_tags(&tree, start, end, idx, ITEMS, 0, 1); /* Copy tags in several rounds */ // printf("\ncopying tags...\n"); tmp = rand() % (count / 10 + 2); tagged = tag_tagged_items(&tree, start, end, tmp, XA_MARK_0, XA_MARK_2); assert(tagged == count); // printf("%lu %lu %lu\n", tagged, tmp, count); // printf("checking copied tags\n"); check_copied_tags(&tree, start, end, idx, ITEMS, 0, 2); verify_tag_consistency(&tree, 0); verify_tag_consistency(&tree, 1); verify_tag_consistency(&tree, 2); // printf("\n"); item_kill_tree(&tree); }