int do_rmdir(const char* path) { int ret; dprintf(stderr, "Requesting rmdie: %s \n", path); ret = simple_op(COREFS_SIMPLE_RMDIR, path, 0, 0, NULL); return ret; }
int do_rename(const char* from, const char* to) { int ret = 0; dprintf(stderr, "attempting to rename %s to %s\n", from,to); ret = simple_op(COREFS_SIMPLE_RENAME, from, 0, 0, to); return ret; }
int do_mkdir(const char* path, mode_t mode) { int ret; dprintf(stderr, "attempting to mkdir %s with perms %x\n", path, mode); ret = simple_op(COREFS_SIMPLE_MKDIR, path, 0, mode, NULL); return ret; }
int do_link(const char* from, const char* to) { #ifdef DEBUG dprintf(stderr, "SYMLINK: old \'%s\' new \'%s\'\n", from, to); #endif return simple_op(COREFS_SIMPLE_LINK, from, 0, 0, to); }
int do_access(const char* path, int mode){ #ifdef DEBUG dprintf(stderr, "ACCESS: path[%s] : mode[%d]\n", path,mode); #endif int ret = simple_op(COREFS_SIMPLE_ACCESS, path, 0, mode, NULL); return ret; }
int do_release(const char *path, struct fuse_file_info *fi) { #ifdef DEBUG dprintf(stderr, "RELEASE: %s \n", path); #endif simple_op(COREFS_SIMPLE_RELEASE, path, 0, fi->flags, NULL); return 0; }
int do_open(const char *path, struct fuse_file_info *fi) { #ifdef DEBUG dprintf(stderr, "OPEN: %s \n", path); #endif int ret = simple_op(COREFS_SIMPLE_OPEN, path, 0, fi->flags, NULL); return ret; }
int do_mknod(const char *path, mode_t mode, dev_t rdev) { int ret = 0; #ifdef DEBUG dprintf(stderr, "MKNOD: %s \n", path); #endif ret = simple_op(COREFS_SIMPLE_MKNOD, path, 0, mode, NULL); return ret; }
int do_truncate(const char *path, off_t size) { int ret = 0; #ifdef DEBUG dprintf(stderr, "TRUNCATE: %s : %llu\n", path, size); #endif ret = simple_op(COREFS_SIMPLE_TRUNCATE, path, size, 0, NULL); return ret; }
int do_unlink(const char *path) { int ret; #ifdef DEBUG dprintf(stderr, "UNLINK: %s \n", path); #endif ret = simple_op(COREFS_SIMPLE_UNLINK, path, 0, 0, NULL); return ret; }
int do_chmod(const char *path, mode_t mode) { #ifdef DEBUG dprintf(stderr, "CHMOD: %s mode %u\n", path, mode); #endif /* we assume that the uid and gid of the user is same at the client * and server side. The upper layer can performing its own * mapping, if necessary. */ return simple_op(COREFS_SIMPLE_CHMOD, path, 0, mode, NULL); }
int do_utime (const char * path, struct utimbuf * tbuf){ /* we use offset and mode1 variables for actime and modtime, * respectively */ if(tbuf == NULL){ struct timeval tv; struct timezone tz; gettimeofday(&tv, &tz); #ifdef DEBUG dprintf(stderr, "UTIME: %s actime %lu modtime %lu\n", path, tv.tv_sec,tv.tv_sec); #endif simple_op(COREFS_SIMPLE_UTIME, path, tv.tv_sec, tv.tv_sec, NULL); } #ifdef DEBUG dprintf(stderr, "UTIME: %s actime %lu modtime %lu\n", path, tbuf->actime, tbuf->modtime); #endif return simple_op(COREFS_SIMPLE_UTIME, path, tbuf->actime, tbuf->modtime, NULL); }
int do_symlink(const char* from, const char* to) { dprintf(stderr, "Requesting SYMLINK: %s : %s \n", from, to); return simple_op(COREFS_SIMPLE_SYMLINK, from, 0, 0, to); }
void simpler_test (size_t n_train_sample, size_t n_test_sample, size_t n_in, TestParams params) { params.n_batch = std::min(params.n_batch, n_train_sample); std::string serialpath = params.savepath; params.hiddens = { n_in, n_in, n_in / 2 }; DBNTrainer model(n_in, params.hiddens, "dbn_simple_learner"); // generate test sample std::vector<float> train_samples = batch_generate(n_train_sample, n_in); std::vector<float> test_samples = batch_generate(n_test_sample, n_in); std::vector<float> train_out = simple_op(train_samples); std::vector<float> test_out = simple_op(test_samples); if (params.train) { // pretrain if (params.pretrain) { model.initialize(); pretrain(model, n_in, train_samples, params, "demo"); model.save(serialpath, "dbn_demo_pretrain"); } else { model.initialize(serialpath, "dbn_demo_pretrain"); } // finetune float inbatch = params.n_batch * n_in; float outbatch = inbatch / 2; nnet::placeholder<float> finetune_in(std::vector<size_t>{n_in, params.n_batch}, "finetune_in"); nnet::placeholder<float> finetune_out(std::vector<size_t>{n_in / 2, params.n_batch}, "finetune_out"); rocnnet::update_cost_t tuner = model.build_finetune_functions(finetune_in, finetune_out, params.training_lr); nnet::variable_updater<float> train_update = tuner.first; size_t n_train_batches = n_train_sample / params.n_batch; auto xit = train_samples.begin(); auto yit = train_out.begin(); for (size_t epoch = 0; epoch < params.training_epochs; epoch++) { for (size_t mb_idx = 0; mb_idx < n_train_batches; mb_idx++) { std::vector<float> xbatch(xit + mb_idx * inbatch, xit + (mb_idx + 1) * inbatch); std::vector<float> ybatch(yit + mb_idx * outbatch, yit + (mb_idx + 1) * outbatch); finetune_in = xbatch; finetune_out = ybatch; train_update(true); std::cout << "epoch " << epoch << " fine tuning index " << mb_idx << '\n'; } } model.save(serialpath, "dbn_demo"); } else { model.initialize(serialpath, "dbn_demo"); } // test nnet::placeholder<float> test_in(std::vector<size_t>{n_in}, "test_in"); nnet::placeholder<float> expect_out(std::vector<size_t>{n_in / 2}, "expect_out"); nnet::varptr<float> test_res = model.prop_up(nnet::varptr<float>(&test_in)); nnet::varptr<float> test_error = nnet::reduce_mean( nnet::sqrt<float>(nnet::varptr<float>(&expect_out) - test_res)); auto xit = test_samples.begin(); auto yit = test_out.begin(); float total_err = 0; for (size_t i = 0; i < n_test_sample; ++i) { std::vector<float> xbatch(xit + i * n_in, xit + (i + 1) * n_in); std::vector<float> ybatch(yit + i * n_in / 2, yit + (i + 1) * n_in / 2); test_in = xbatch; expect_out = ybatch; float test_err = nnet::expose<float>(test_error)[0]; total_err += test_err; std::cout << "test error at " << i << ": " << test_err << '\n'; } std::cout << "total error " << total_err << '\n'; }