int main(int argc, char *argv[]) { #define BITMAP_BLOCKS 10 struct dev *dev = &(struct dev){ .bits = 3 }; /* This expect buffer is never reclaimed */ init_buffers(dev, 1 << 20, 1); block_t volblocks = BITMAP_BLOCKS << (dev->bits + 3); struct sb *sb = rapid_sb(dev); sb->super = INIT_DISKSB(dev->bits, volblocks); setup_sb(sb, &sb->super); test_init(argv[0]); struct inode *bitmap = rapid_open_inode(sb, NULL, 0); sb->bitmap = bitmap; /* Setup buffers for bitmap */ for (int block = 0; block < BITMAP_BLOCKS; block++) { struct buffer_head *buffer = blockget(bitmap->map, block); memset(bufdata(buffer), 0, sb->blocksize); set_buffer_clean(buffer); blockput(buffer); } /* Set fake backend mark to modify backend objects. */ tux3_start_backend(sb); if (test_start("test01")) test01(sb, BITMAP_BLOCKS); test_end(); if (test_start("test02")) test02(sb, BITMAP_BLOCKS); test_end(); if (test_start("test03")) test03(sb, BITMAP_BLOCKS); test_end(); if (test_start("test04")) test04(sb, BITMAP_BLOCKS); test_end(); if (test_start("test05")) test05(sb, BITMAP_BLOCKS); test_end(); if (test_start("test06")) test06(sb, BITMAP_BLOCKS); test_end(); if (test_start("test07")) test07(sb, BITMAP_BLOCKS); test_end(); if (test_start("test08")) test08(sb, BITMAP_BLOCKS); test_end(); tux3_end_backend(); clean_main(sb); return test_failures(); }
/* Test basic operations */ static void test01(struct sb *sb, struct inode *inode) { /* * FIXME: map_region() are not supporting to read segments on * multiple leaves at once. */ #define CAN_HANDLE_A_LEAF 1 /* Create by ascending order */ if (test_start("test01.1")) { struct block_segment seg; int err, segs; /* Set fake backend mark to modify backend objects. */ tux3_start_backend(sb); for (int i = 0, j = 0; i < 30; i++, j++) { segs = d_map_region(inode, 2*i, 1, &seg, 1, MAP_WRITE); test_assert(segs == 1); } #ifdef CAN_HANDLE_A_LEAF for (int i = 0; i < 30; i++) { segs = check_map_region(inode, 2*i, 1, &seg, 1); test_assert(segs == 1); } #else segs = check_map_region(inode, 0, 30*2, seg, ARRAY_SIZE(seg)); test_assert(segs == 30*2); #endif /* btree_chop and dleaf_chop test */ int index = 31*2; while (index--) { err = btree_chop(&tux_inode(inode)->btree, index, TUXKEY_LIMIT); test_assert(!err); #ifdef CAN_HANDLE_A_LEAF for (int i = 0; i < 30; i++) { if (index <= i*2) break; segs = check_map_region(inode, 2*i, 1, &seg, 1); test_assert(segs == 1); } #else segs = check_map_region(inode, 0, 30*2, seg, ARRAY_SIZE(seg)); test_assert(segs == i*2); #endif } /* Check if truncated all */ segs = map_region(inode, 0, INT_MAX, &seg, 1, MAP_READ); test_assert(segs == 1); test_assert(seg.count == INT_MAX); test_assert(seg.state == BLOCK_SEG_HOLE); tux3_end_backend(); test_assert(force_delta(sb) == 0); clean_main(sb, inode); } test_end(); /* Create by descending order */ if (test_start("test01.2")) { struct block_segment seg; int err, segs; /* Set fake backend mark to modify backend objects. */ tux3_start_backend(sb); for (int i = 30; i >= 0; i--) { segs = d_map_region(inode, 2*i, 1, &seg, 1, MAP_WRITE); test_assert(segs == 1); } #ifdef CAN_HANDLE_A_LEAF for (int i = 30; i >= 0; i--) { segs = check_map_region(inode, 2*i, 1, &seg, 1); test_assert(segs == 1); } #else segs = check_map_region(inode, 0, 30*2, seg, ARRAY_SIZE(seg)); test_assert(segs == i*2); #endif err = btree_chop(&tux_inode(inode)->btree, 0, TUXKEY_LIMIT); test_assert(!err); /* Check if truncated all */ segs = map_region(inode, 0, INT_MAX, &seg, 1, MAP_READ); test_assert(segs == 1); test_assert(seg.count == INT_MAX); test_assert(seg.state == BLOCK_SEG_HOLE); tux3_end_backend(); test_assert(force_delta(sb) == 0); clean_main(sb, inode); } test_end(); test_assert(force_delta(sb) == 0); clean_main(sb, inode); }
int make_tux3(struct sb *sb) { int err; err = clear_other_magic(sb); if (err) return err; change_begin_atomic(sb); trace("create bitmap"); sb->bitmap = create_internal_inode(sb, TUX_BITMAP_INO, NULL); if (IS_ERR(sb->bitmap)) { err = PTR_ERR(sb->bitmap); goto error_change_end; } change_end_atomic(sb); /* Set fake backend mark to modify backend objects. */ tux3_start_backend(sb); err = reserve_superblock(sb); tux3_end_backend(); if (err) goto error; change_begin_atomic(sb); #if 0 trace("create version table"); sb->vtable = create_internal_inode(sb, TUX_VTABLE_INO, NULL); if (IS_ERR(sb->vtable)) { err = PTR_ERR(sb->vtable); goto error_change_end; } #endif trace("create atom dictionary"); sb->atable = create_internal_inode(sb, TUX_ATABLE_INO, NULL); if (IS_ERR(sb->atable)) { err = PTR_ERR(sb->atable); goto error_change_end; } trace("create root directory"); struct tux_iattr root_iattr = { .mode = S_IFDIR | 0755, }; sb->rootdir = create_internal_inode(sb, TUX_ROOTDIR_INO, &root_iattr); if (IS_ERR(sb->rootdir)) { err = PTR_ERR(sb->rootdir); goto error_change_end; } change_end_atomic(sb); err = sync_super(sb); if (err) goto error; show_buffers(mapping(sb->bitmap)); show_buffers(mapping(sb->rootdir)); show_buffers(sb->volmap->map); return 0; error_change_end: change_end_atomic(sb); error: tux3_err(sb, "eek, %s", strerror(-err)); iput(sb->bitmap); sb->bitmap = NULL; return err; } int tux3_init_mem(void) { return tux3_init_hole_cache(); } void tux3_exit_mem(void) { tux3_destroy_hole_cache(); }