// Read label dataset char AH5_read_lbl_dataset(hid_t file_id, const char *path, AH5_lbl_dataset_t *lbl_dataset) { H5T_class_t type_class; char rdata = AH5_FALSE; size_t length; int nb_dims; AH5_init_lbl_dataset(lbl_dataset); lbl_dataset->path = strdup(path); lbl_dataset->nb_items = 1; // in case of single value if (AH5_path_valid(file_id, path)) if (H5LTget_dataset_ndims(file_id, path, &nb_dims) >= 0) if (nb_dims <= 1) if (H5LTget_dataset_info(file_id, path, &(lbl_dataset->nb_items), &type_class, &length) >= 0) if (type_class == H5T_STRING) if(AH5_read_str_dataset(file_id, path, lbl_dataset->nb_items, length, &(lbl_dataset->items))) rdata = AH5_TRUE; if (!rdata) { AH5_print_err_dset(AH5_C_LABEL, path); lbl_dataset->nb_items = 0; } return rdata; }
int_f nh5ltget_dataset_ndims_c(hid_t_f *loc_id, int_f *namelen, _fcd name, int_f *rank) { int ret_value = -1; herr_t ret; hid_t c_loc_id; char *c_name; int c_namelen; int c_rank; /* * Convert FORTRAN name to C name */ c_namelen = (int)*namelen; c_name = (char *)HD5f2cstring(name, c_namelen); if (c_name == NULL) return ret_value; /* * Call H5LTget_dataset_ndims function. */ c_loc_id = (hid_t)*loc_id; ret = H5LTget_dataset_ndims(c_loc_id, c_name, &c_rank); if (ret < 0) return ret_value; *rank = (int_f)c_rank; ret_value = 0; return ret_value; }
void hdf5_load_nd_dataset_helper( hid_t file_id, const char* dataset_name_, int min_dim, int max_dim, Blob<Dtype>* blob) { // Verify that the dataset exists. CHECK(H5LTfind_dataset(file_id, dataset_name_)) << "Failed to find HDF5 dataset " << dataset_name_; // Verify that the number of dimensions is in the accepted range. herr_t status; int ndims; status = H5LTget_dataset_ndims(file_id, dataset_name_, &ndims); CHECK_GE(status, 0) << "Failed to get dataset ndims for " << dataset_name_; CHECK_GE(ndims, min_dim); CHECK_LE(ndims, max_dim); // Verify that the data format is what we expect: float or double. std::vector<hsize_t> dims(ndims); H5T_class_t class_; status = H5LTget_dataset_info( file_id, dataset_name_, dims.data(), &class_, NULL); CHECK_GE(status, 0) << "Failed to get dataset info for " << dataset_name_; CHECK_EQ(class_, H5T_FLOAT) << "Expected float or double data"; vector<int> blob_dims(dims.size()); for (int i = 0; i < dims.size(); ++i) { blob_dims[i] = dims[i]; } blob->Reshape(blob_dims); }
// Read dataset in externalElement and open external files char AH5_read_eet_dataset (hid_t file_id, const char *path, AH5_eet_dataset_t *eet_dataset) { H5T_class_t type_class; char rdata = AH5_FALSE; hsize_t dims[2], i; size_t length; int nb_dims; eet_dataset->path = strdup(path); if (AH5_path_valid(file_id, path)) if (H5LTget_dataset_ndims(file_id, path, &nb_dims) >= 0) if (nb_dims == 2) if (H5LTget_dataset_info(file_id, path, dims, &type_class, &length) >= 0) if (dims[0] > 0 && dims[1] == 3 && type_class == H5T_STRING) if(AH5_read_str_dataset(file_id, path, dims[0] * dims[1], length, &(eet_dataset->eed_items))) { eet_dataset->nb_eed_items = dims[0]; rdata = AH5_TRUE; eet_dataset->file_id = (hid_t *) malloc(eet_dataset->nb_eed_items * sizeof(hid_t)); for (i = 0; i < eet_dataset->nb_eed_items; i++) eet_dataset->file_id[i] = -1; } if (!rdata) { AH5_print_err_dset(AH5_C_EXTERNAL_ELEMENT, path); eet_dataset->nb_eed_items = 0; eet_dataset->file_id = NULL; eet_dataset->eed_items = NULL; } else if (!AH5_open_external_files(eet_dataset)) rdata = AH5_FALSE; return rdata; }
void hdf5_load_nd_dataset_helper( hid_t file_id, const char* dataset_name_, int min_dim, int max_dim, Blob<Dtype>* blob) { // Verify that the dataset exists. // REVIEW ktran: this check doesn't work for nested dataset name ////CHECK(H5LTfind_dataset(file_id, dataset_name_)) << "Failed to find HDF5 dataset " << dataset_name_; // Verify that the number of dimensions is in the accepted range. herr_t status; int ndims; status = H5LTget_dataset_ndims(file_id, dataset_name_, &ndims); CHECK_GE(status, 0) << "Failed to get dataset ndims for " << dataset_name_; CHECK_GE(ndims, min_dim); CHECK_LE(ndims, max_dim); // Verify that the data format is what we expect: float or double. std::vector<hsize_t> dims(ndims); H5T_class_t class_; status = H5LTget_dataset_info( file_id, dataset_name_, dims.data(), &class_, NULL); CHECK_GE(status, 0) << "Failed to get dataset info for " << dataset_name_; switch (class_) { case H5T_FLOAT: { LOG_FIRST_N(INFO, 1) << "Datatype class: H5T_FLOAT"; break; } case H5T_INTEGER: { LOG_FIRST_N(INFO, 1) << "Datatype class: H5T_INTEGER"; break; } case H5T_TIME: LOG(FATAL) << "Unsupported datatype class: H5T_TIME"; case H5T_STRING: LOG(FATAL) << "Unsupported datatype class: H5T_STRING"; case H5T_BITFIELD: LOG(FATAL) << "Unsupported datatype class: H5T_BITFIELD"; case H5T_OPAQUE: LOG(FATAL) << "Unsupported datatype class: H5T_OPAQUE"; case H5T_COMPOUND: LOG(FATAL) << "Unsupported datatype class: H5T_COMPOUND"; case H5T_REFERENCE: LOG(FATAL) << "Unsupported datatype class: H5T_REFERENCE"; case H5T_ENUM: LOG(FATAL) << "Unsupported datatype class: H5T_ENUM"; case H5T_VLEN: LOG(FATAL) << "Unsupported datatype class: H5T_VLEN"; case H5T_ARRAY: LOG(FATAL) << "Unsupported datatype class: H5T_ARRAY"; default: LOG(FATAL) << "Datatype class unknown"; } vector<int> blob_dims(dims.size()); for (int i = 0; i < dims.size(); ++i) { blob_dims[i] = dims[i]; } blob->Reshape(blob_dims); }
value hdf5_h5lt_get_dataset_info(value loc_id_v, value dset_name_v) { CAMLparam2(loc_id_v, dset_name_v); CAMLlocal1(info); hid_t loc_id = Hid_val(loc_id_v); const char *dset_name = String_val(dset_name_v); int rank; hsize_t *dims; H5T_class_t class_id; size_t type_size; herr_t err; raise_if_fail(H5LTget_dataset_ndims(loc_id, dset_name, &rank)); dims = calloc(rank, sizeof(hsize_t)); if (dims == NULL) caml_raise_out_of_memory(); err = H5LTget_dataset_info(loc_id, dset_name, dims, &class_id, &type_size); if (err < 0) { free(dims); fail(); } info = caml_alloc_tuple(3); Store_field(info, 0, val_hsize_t_array(rank, dims)); Store_field(info, 1, Val_h5t_class(class_id)); Store_field(info, 2, Val_int(type_size)); CAMLreturn(info); }
void hdf5_load_nd_dataset_helper( hid_t file_id, const char* dataset_name_, int min_dim, int max_dim, Blob<Dtype>* blob) { // Verify that the dataset exists. CHECK(H5LTfind_dataset(file_id, dataset_name_)) << "Failed to find HDF5 dataset " << dataset_name_; // Verify that the number of dimensions is in the accepted range. herr_t status; int ndims; status = H5LTget_dataset_ndims(file_id, dataset_name_, &ndims); CHECK_GE(status, 0) << "Failed to get dataset ndims for " << dataset_name_; CHECK_GE(ndims, min_dim); CHECK_LE(ndims, max_dim); // Verify that the data format is what we expect: float or double. std::vector<hsize_t> dims(ndims); H5T_class_t class_; status = H5LTget_dataset_info( file_id, dataset_name_, dims.data(), &class_, NULL); CHECK_GE(status, 0) << "Failed to get dataset info for " << dataset_name_; // blocks around "LOG" macros are to avoid "initialization of occurresces_?? // is skipped by case label" errors on msvc switch (class_) { case H5T_FLOAT: { LOG_FIRST_N(INFO, 1) << "Datatype class: H5T_FLOAT"; break; } case H5T_INTEGER: { LOG_FIRST_N(INFO, 1) << "Datatype class: H5T_INTEGER"; break; } case H5T_TIME: { LOG(FATAL) << "Unsupported datatype class: H5T_TIME"; } case H5T_STRING: { LOG(FATAL) << "Unsupported datatype class: H5T_STRING"; } case H5T_BITFIELD: { LOG(FATAL) << "Unsupported datatype class: H5T_BITFIELD"; } case H5T_OPAQUE: { LOG(FATAL) << "Unsupported datatype class: H5T_OPAQUE"; } case H5T_COMPOUND: { LOG(FATAL) << "Unsupported datatype class: H5T_COMPOUND"; } case H5T_REFERENCE: { LOG(FATAL) << "Unsupported datatype class: H5T_REFERENCE"; } case H5T_ENUM: { LOG(FATAL) << "Unsupported datatype class: H5T_ENUM"; } case H5T_VLEN: { LOG(FATAL) << "Unsupported datatype class: H5T_VLEN"; } case H5T_ARRAY: { LOG(FATAL) << "Unsupported datatype class: H5T_ARRAY"; } default: { LOG(FATAL) << "Datatype class unknown"; } } vector<int> blob_dims(dims.size()); for (int i = 0; i < dims.size(); ++i) { blob_dims[i] = dims[i]; } blob->Reshape(blob_dims); }
int_f h5ltget_dataset_info_c(hid_t_f *loc_id, size_t_f *namelen, _fcd name, hsize_t_f *dims, int_f *type_class, size_t_f *type_size) { int ret_value = -1; herr_t ret; hid_t c_loc_id; char *c_name = NULL; H5T_class_t c_classtype; size_t c_type_size; hsize_t c_dims[32]; int i; int c_rank; /* * convert FORTRAN name to C name */ c_name = (char *)HD5f2cstring(name, (size_t)*namelen); if (c_name == NULL) goto done; /* * call H5LTget_dataset_ndims function. */ c_loc_id = (hid_t)*loc_id; ret = H5LTget_dataset_info(c_loc_id, c_name, c_dims, &c_classtype, &c_type_size); if (ret < 0) goto done; *type_class = c_classtype; *type_size = (size_t_f)c_type_size; /* * transpose dimension arrays because of C-FORTRAN storage order */ ret = H5LTget_dataset_ndims(c_loc_id, c_name, &c_rank); if (ret < 0) goto done; for (i = 0; i < c_rank ; i++) { dims[i] = (hsize_t_f) c_dims[c_rank - i - 1]; } ret_value = 0; done: if(c_name!=NULL) HDfree(c_name); return ret_value; }
//! Test write dataset. // Data extracted from http://www.hdfgroup.org/ftp/HDF5/examples/examples-by-api/hdf5-examples/1_8/C/H5T/h5ex_t_string.c char *test_write_complex_dataset() { hid_t file_id, filetype, memtype, space, dset; size_t sdim; hsize_t dims[1] = {2}; int ndims, i, j; int rank; int length; size_t type_size; float * buf; hid_t real_id_type; herr_t status; hsize_t *newdims; AH5_complex_t cplx[2]; file_id = AH5_auto_test_file(); cplx[0].re=10.; cplx[0].im=20.; cplx[1].re=10.5; cplx[1].im=20.5; mu_assert("Write complex dataset.", AH5_write_cpx_dataset(file_id,"dataset_name", 2, cplx)); // Test the written data using hdf5 API. real_id_type = create_type_id(H5T_NATIVE_FLOAT); status = H5LTget_dataset_ndims(file_id, "dataset_name", &rank); newdims = (hsize_t *) malloc(rank * sizeof(hsize_t)); status = H5LTget_dataset_info(file_id,"dataset_name" , newdims, NULL, NULL); length = newdims[0]; for (i = 1; i < rank; i++) length = length * newdims[i]; buf = (float *) malloc(2 * length * sizeof(float)); status = H5LTread_dataset(file_id, "dataset_name", real_id_type, buf); j = 0; for (i = 0; i < length; i++) { printf("Real parts : %f %f\n", cplx[i].re, buf[j]); printf("Imaginary parts : %f %f\n", cplx[i].im, buf[j+1]); mu_assert_equal("Check the real values.", cplx[i].re, buf[j]); mu_assert_equal("Check the imaginary value.", cplx[i].im, buf[j+1]); j = j + 2; } free(buf); return MU_FINISHED_WITHOUT_ERRORS; }
float_complex *read_complex_dataset(hid_t loc_id, const char* path) { float_complex *values; int rank; int i; hsize_t *dims; int length; size_t type_size; float * buf; hid_t real_id_type; herr_t status; real_id_type = create_real_type_id(); status = H5LTget_dataset_ndims(loc_id, path, &rank); if (status < 0) { printf("Can't read rank \n"); } dims = (hsize_t *) malloc(rank * sizeof(hsize_t)); status = H5LTget_dataset_info(loc_id, path, dims, NULL, NULL); if (status < 0) { printf("Can't read dataset info\n"); } length = dims[0]; for (i = 1; i < rank; i++) length = length * dims[i]; buf = (float *) malloc(2 * length * sizeof(float)); status = H5LTread_dataset(loc_id, path, real_id_type, buf); if (status < 0) { printf("Can't read dataset\n"); } values = (float_complex *) malloc(length*sizeof(float_complex)); int j = 0; for (i = 0; i < length; i++) { values[i].re = buf[j]; values[i].im = buf[j + 1]; j = j + 2; } free(buf); free(dims); return values; }
// Read dataset in externalElement and open external files char AH5_read_eet_dataset (hid_t file_id, const char *path, AH5_eet_dataset_t *eet_dataset) { H5T_class_t type_class; char rdata = AH5_FALSE; hsize_t dims[2], i; size_t length; int nb_dims; ssize_t fpath_size; eet_dataset->path = strdup(path); fpath_size = H5Fget_name(file_id, NULL, 0); // Strange behavior of H5Fget_name: it seems to return to small length. eet_dataset->principle_file_path = malloc(fpath_size + 2); eet_dataset->principle_file_path[fpath_size + 1] = '\0'; eet_dataset->principle_file_path[fpath_size] = '\0'; H5Fget_name(file_id, eet_dataset->principle_file_path, fpath_size + 1); if (AH5_path_valid(file_id, path)) if (H5LTget_dataset_ndims(file_id, path, &nb_dims) >= 0) if (nb_dims == 2) if (H5LTget_dataset_info(file_id, path, dims, &type_class, &length) >= 0) if (dims[0] > 0 && dims[1] == 3 && type_class == H5T_STRING) if(AH5_read_str_dataset(file_id, path, dims[0] * dims[1], length, &(eet_dataset->eed_items))) { eet_dataset->nb_eed_items = dims[0]; rdata = AH5_TRUE; eet_dataset->file_id = (hid_t *) malloc((size_t) eet_dataset->nb_eed_items * sizeof(hid_t)); for (i = 0; i < eet_dataset->nb_eed_items; i++) eet_dataset->file_id[i] = -1; } if (!rdata) { AH5_print_err_dset(AH5_C_EXTERNAL_ELEMENT, path); eet_dataset->nb_eed_items = 0; eet_dataset->file_id = NULL; eet_dataset->eed_items = NULL; } else if (!AH5_open_external_files(eet_dataset)) rdata = AH5_FALSE; return rdata; }
int_f h5ltget_dataset_ndims_c(hid_t_f *loc_id, size_t_f *namelen, _fcd name, int_f *rank) { int ret_value = -1; herr_t ret; hid_t c_loc_id; char *c_name = NULL; int c_rank; /* * Convert FORTRAN name to C name */ c_name = (char *)HD5f2cstring(name, (size_t)*namelen); if (c_name == NULL) goto done; /* * Call H5LTget_dataset_ndims function. */ c_loc_id = (hid_t)*loc_id; ret = H5LTget_dataset_ndims(c_loc_id, c_name, &c_rank); if (ret < 0) goto done; *rank = (int_f)c_rank; ret_value = 0; done: if(c_name!=NULL) HDfree(c_name); return ret_value; }
void hdf5_load_nd_dataset_helper( hid_t file_id, const char* dataset_name_, int min_dim, int max_dim, Blob<Dtype>* blob) { // Verify that the number of dimensions is in the accepted range. herr_t status; int ndims; status = H5LTget_dataset_ndims(file_id, dataset_name_, &ndims); CHECK_GE(ndims, min_dim); CHECK_LE(ndims, max_dim); // Verify that the data format is what we expect: float or double. std::vector<hsize_t> dims(ndims); H5T_class_t class_; status = H5LTget_dataset_info( file_id, dataset_name_, dims.data(), &class_, NULL); CHECK_EQ(class_, H5T_FLOAT) << "Expected float or double data"; blob->Reshape( dims[0], (dims.size() > 1) ? dims[1] : 1, (dims.size() > 2) ? dims[2] : 1, (dims.size() > 3) ? dims[3] : 1); }
// Read simulation instance char AH5_read_sim_instance (hid_t file_id, const char *path, AH5_sim_instance_t *sim_instance) { char mandatory[][AH5_ATTR_LENGTH] = {AH5_A_MODULE, AH5_A_VERSION}; /* char mandatory2[][AH5_ATTR_LENGTH] = {}; */ char *path1; char rdata = AH5_TRUE; H5T_class_t type_class; char success1 = AH5_FALSE, success2 = AH5_FALSE; size_t length; int nb_dims; sim_instance->path = strdup(path); sim_instance->opt_attrs.instances = NULL; sim_instance->module = NULL; sim_instance->version = NULL; sim_instance->inputs = NULL; sim_instance->outputs = NULL; if (AH5_path_valid(file_id, path)) { AH5_read_opt_attrs(file_id, path, &(sim_instance->opt_attrs), mandatory, sizeof(mandatory)/AH5_ATTR_LENGTH); if (!AH5_read_str_attr(file_id, path, AH5_A_MODULE, &(sim_instance->module))) AH5_print_err_attr(AH5_C_SIMULATION, path, AH5_A_MODULE); if (!AH5_read_str_attr(file_id, path, AH5_A_VERSION, &(sim_instance->version))) AH5_print_err_attr(AH5_C_SIMULATION, path, AH5_A_VERSION); path1 = malloc((strlen(path) + strlen(AH5_G_PARAMETER) + 1) * sizeof(*path1)); if (!path1) { AH5_print_err_dset(AH5_C_SIMULATION, path); return AH5_FALSE; } strncpy(path1, path, strlen(path) + 1); strncat(path1, AH5_G_PARAMETER, strlen(AH5_G_PARAMETER)); AH5_read_opt_attrs(file_id, path1, &(sim_instance->parameter), NULL, 0); // inputs sim_instance->nb_inputs = 1; // in case of single value path1 = realloc(path1, (strlen(path) + strlen(AH5_G_INPUTS) + 1) * sizeof(*path1)); if (!path1) { AH5_print_err_dset(AH5_C_SIMULATION, path); return AH5_FALSE; } strncpy(path1, path, strlen(path) + 1); strncat(path1, AH5_G_INPUTS, strlen(AH5_G_INPUTS)); if (AH5_path_valid(file_id, path1)) if (H5LTget_dataset_ndims(file_id, path1, &nb_dims) >= 0) if (nb_dims <= 1) if (H5LTget_dataset_info(file_id, path1, &(sim_instance->nb_inputs), &type_class, &length) >= 0) if (type_class == H5T_STRING) if(AH5_read_str_dataset(file_id, path1, sim_instance->nb_inputs, length, &(sim_instance->inputs))) success1 = AH5_TRUE; if (!success1) { AH5_print_err_dset(AH5_C_SIMULATION, path1); sim_instance->nb_inputs = 0; rdata = AH5_FALSE; } // outputs sim_instance->nb_outputs = 1; // in case of single value path1 = realloc(path1, (strlen(path) + strlen(AH5_G_OUTPUTS) + 1) * sizeof(*path1)); if (!path1) { AH5_print_err_dset(AH5_C_SIMULATION, path); return AH5_FALSE; } strncpy(path1, path, strlen(path) + 1); strncat(path1, AH5_G_OUTPUTS, strlen(AH5_G_OUTPUTS)); if (AH5_path_valid(file_id, path1)) if (H5LTget_dataset_ndims(file_id, path1, &nb_dims) >= 0) if (nb_dims <= 1) if (H5LTget_dataset_info(file_id, path1, &(sim_instance->nb_outputs), &type_class, &length) >= 0) if (type_class == H5T_STRING) if(AH5_read_str_dataset(file_id, path1, sim_instance->nb_outputs, length, &(sim_instance->outputs))) success2 = AH5_TRUE; if (!success2) { AH5_print_wrn_outputs(path); sim_instance->nb_outputs = 0; } free(path1); } else { rdata = AH5_FALSE; } return rdata; }
void HDF5GeneralDataLayer<Dtype>::LoadGeneralHDF5FileData(const char* filename) { DLOG(INFO) << "Loading The general HDF5 file" << filename; hid_t file_id = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT); if (file_id < 0) { LOG(ERROR) << "Failed opening HDF5 file" << filename; } HDF5GeneralDataParameter data_param = this->layer_param_.hdf5_general_data_param(); int fieldNum = data_param.field_size(); hdf_blobs_.resize(fieldNum); const int MIN_DATA_DIM = 1; const int MAX_DATA_DIM = 4; for(int i = 0; i < fieldNum; ++i){ //LOG(INFO) << "Data type: " << data_param.datatype(i).data(); if(i < data_param.datatype_size() && strcmp(data_param.datatype(i).data(), "int8") == 0){ // We take out the io functions here const char* dataset_name_ = data_param.field(i).data(); hdf_blobs_[i] = shared_ptr<Blob<Dtype> >(new Blob<Dtype>()); CHECK(H5LTfind_dataset(file_id, dataset_name_)) << "Failed to find HDF5 dataset " << dataset_name_; // Verify that the number of dimensions is in the accepted range. herr_t status; int ndims; status = H5LTget_dataset_ndims(file_id, dataset_name_, &ndims); CHECK_GE(status, 0) << "Failed to get dataset ndims for " << dataset_name_; CHECK_GE(ndims, MIN_DATA_DIM); CHECK_LE(ndims, MAX_DATA_DIM); // Verify that the data format is what we expect: int8 std::vector<hsize_t> dims(ndims); H5T_class_t class_; status = H5LTget_dataset_info(file_id, dataset_name_, dims.data(), &class_, NULL); CHECK_GE(status, 0) << "Failed to get dataset info for " << dataset_name_; CHECK_EQ(class_, H5T_INTEGER) << "Expected integer data"; vector<int> blob_dims(dims.size()); for (int j = 0; j < dims.size(); ++j) { blob_dims[j] = dims[j]; } hdf_blobs_[i]->Reshape(blob_dims); std::cout<<"Trying to allocate memories!\n"; int* buffer_data = new int[hdf_blobs_[i]->count()]; std::cout<<"Memories loaded!!!\n"; status = H5LTread_dataset_int(file_id, dataset_name_, buffer_data); CHECK_GE(status, 0) << "Failed to read int8 dataset " << dataset_name_; Dtype* target_data = hdf_blobs_[i]->mutable_cpu_data(); for(int j = 0; j < hdf_blobs_[i]->count(); j++){ //LOG(INFO) << Dtype(buffer_data[j]); target_data[j] = Dtype(buffer_data[j]); } delete buffer_data; }else{ // The dataset is still the float32 datatype hdf_blobs_[i] = shared_ptr<Blob<Dtype> >(new Blob<Dtype>()); hdf5_load_nd_dataset(file_id, data_param.field(i).data(), MIN_DATA_DIM, MAX_DATA_DIM, hdf_blobs_[i].get()); } } herr_t status = H5Fclose(file_id); CHECK_GE(status, 0) << "Failed to close HDF5 file " << filename; for(int i = 1; i < fieldNum; ++i){ CHECK_EQ(hdf_blobs_[0]->num(), hdf_blobs_[i]->num()); } data_permutation_.clear(); data_permutation_.resize(hdf_blobs_[0]->shape(0)); for (int i = 0; i < hdf_blobs_[0]->shape(0); i++) data_permutation_[i] = i; //TODO: DATA SHUFFLE //LOG(INFO) << "Successully loaded " << data_blob_.num() << " rows"; }