Tuplef Wrapper_i_hdf::get_dims()const { Tuplef tmp; H5File file = H5File( file_name_, H5F_ACC_RDONLY ); Group group = Group(file.openGroup("/")); Attr_list_hdf attr_list(&group); if(!attr_list.contains_attr("version")) throw invalid_argument("input file does not have a version"); int file_version = 0; if(attr_list.get_value("version",file_version) != 1) throw invalid_argument("input file is not the right version"); if(attr_list.contains_attr("dims")) { attr_list.get_value("dims",tmp); } group.close(); file.close(); return Tuplef(tmp); }
CharacterVector ch5ReadFactor(string charName, string filePath) { H5File *file = new H5File(filePath, H5F_ACC_RDONLY); // Group Meta Group Group* metaGroup = new Group(file->openGroup("/MetaData/Factor")); // Getting the data set from the file DataSet dataset = metaGroup->openDataSet((H5std_string)charName); // Getting the data space from the dataset DataSpace dataspace = dataset.getSpace(); // We know that it is a char vector array so ndim = 1 hsize_t dims[1]; // Getting the length of strings dataspace.getSimpleExtentDims(dims, NULL); // for convenience int dim = dims[0]; // String Type StrType vlst(0, H5T_VARIABLE); // Returning the data char *strRet[dim]; dataset.read(strRet, vlst); CharacterVector out(dim); for(int i = 0; i < dim; i++) { out[i] = strRet[i]; } dataset.close(); //nn metaGroup->close(); file->close(); return out; }
//'@title This function writes an integer meta data to file //' //'@description This function is inteded for internal use //' //'@param intName the name of the meta data item to be written //'@param integer int that will be written to the meta data described by intName //'@param filePath character path to the h5 file where data will be written //'@param update int flag for whether item is new (0) or whether it will overwrite a previous item (1) //'@return int 0 // [[Rcpp::export]] int h5WriteInt(std::string intName, int integer, std::string filePath, int update) { H5File *file = new H5File(filePath, H5F_ACC_RDWR); // Colclasses dim hsize_t dim[1] = {1}; string meta = "/MetaData"; // Group Meta Group Group* metaGroup = new Group(file->openGroup(meta)); // dataspace DataSpace dataspace = DataSpace(1, dim); DataSet dataset; if(update == 1) { string slash = "/"; string groupName = meta + slash + intName; file->unlink(groupName); } dataset = metaGroup->createDataSet(intName, PredType::NATIVE_INT, dataspace); dataset.write(&integer, PredType::NATIVE_INT); dataset.close(); //nn metaGroup->close(); file->close(); return 0; }
Md_store utilities::extract_prams(const std::string & fname,int comp_num,const vector<string> &pram_list) { // hdf stuff H5File file = H5File( fname, H5F_ACC_RDONLY ); Group group = file.openGroup("/parameters/" + format_dset_name(utilities::D_XPOS,comp_num)); Attr_list_hdf attr_list(&group); Md_store filter_prams; for(vector<string>::const_iterator it = pram_list.begin(); it <pram_list.end(); ++it) { string pram = *it; if(!attr_list.contains_attr(pram)) throw runtime_error("the field " + pram + " is not in the file"); utilities::V_TYPE vtype = attr_list.get_type(pram); int tmpi; unsigned int tmpui; float tmpf; switch(vtype) { case V_UINT: filter_prams.add_element(pram.c_str(),attr_list.get_value(pram,tmpui)); break; case V_INT: filter_prams.add_element(pram.c_str(),attr_list.get_value(pram,tmpi)); break; case V_FLOAT: filter_prams.add_element(pram.c_str(),attr_list.get_value(pram,tmpf)); break; case V_ERROR: case V_COMPLEX: case V_STRING: case V_BOOL: case V_TIME: case V_GUID: throw runtime_error("the field " + pram + " is of type " + VT2str_s(vtype) + " which is not implemented yet."); } } // return a copy of the assembled parameters return Md_store(filter_prams); }
int main(int argc, char ** argv) { if (argc < 3){ cout << "Usage:" << argv[0] << " <filename> <outputfilename> - display some statistics of specified synapse." << endl; return 0; } const H5std_string FILE_NAME(argv[1]); ofstream outfile; outfile.open(argv[2]); try{ H5File * file = new H5File(FILE_NAME, H5F_ACC_RDONLY); Group * netgroup = new Group(file->openGroup("network")); const DataSet * syndataset = new DataSet(netgroup->openDataSet("synapse")); synstat(*syndataset, outfile); file->close(); outfile.close(); } catch( FileIException error ) { error.printError(); return -1; } // catch failure caused by the DataSet operations catch( DataSetIException error ) { error.printError(); return -1; } // catch failure caused by the DataSpace operations catch( DataSpaceIException error ) { error.printError(); return -1; } // catch failure caused by the DataSpace operations catch( DataTypeIException error ) { error.printError(); return -1; } return 0; }
//'@title Function to read an integer item from meta data //' //'@param intName character for the name of the item to be read back //'@param filePath character for the path to the h5 file //'@return int iteger item defined by intName in the meta data // [[Rcpp::export]] int h5ReadInt(std::string intName, std::string filePath) { H5File *file = new H5File(filePath, H5F_ACC_RDONLY); // Group Meta Group Group* metaGroup = new Group(file->openGroup("/MetaData")); // Getting the data set from the file DataSet dataset = metaGroup->openDataSet((H5std_string)intName); // Getting the data space from the dataset DataSpace dataspace = dataset.getSpace(); // Returning the data int intRet; dataset.read(&intRet, PredType::NATIVE_INT); dataset.close(); //nn metaGroup->close(); file->close(); return intRet; }
//'@title This function writes a character vector to the meta data //' //'@description This function writes a character vector to the meta data and is intended for internal use. //' //'@param charName the name that will be given to the meta data character vector //'@param charVec the character vector to be written as meta data //'@param filePath the path to the h5 file where the data will be written //'@param update integer denoting whether the data item is new or whether it is an update //'(which will overwrite any previous item) //'@return int 0 // [[Rcpp::export]] int h5WriteCharVector(std::string charName, SEXP charVec, std::string filePath, int update) { H5File *file = new H5File(filePath, H5F_ACC_RDWR); int len = Rf_length(charVec); hsize_t DIM1 = len; int rank = 1; //cout << "The length is ... " << len << endl; // Create a datatype to refer to StrType vlst(0, H5T_VARIABLE); // This is the char array char** arr = convertCharArray(charVec); string meta = "/MetaData"; // Group Meta Group Group* metaGroup = new Group(file->openGroup(meta)); // The dataset and dataspace hsize_t dims[] = {DIM1}; //hsize_t maxdims[] = {H5S_UNLIMITED}; DataSet dataset; if(update == 1) { string slash = "/"; string groupName = meta + slash + charName; file->unlink(groupName); } DataSpace dataspace(rank, dims); dataset = metaGroup->createDataSet(charName, vlst, dataspace); dataset.write(arr, vlst); dataset.close(); //nn metaGroup->close(); file->close(); return 0; }
bool Wrapper_i_hdf::priv_init(int fr_count) { if(locked_) return false; try { H5File * file = new H5File( file_name_, H5F_ACC_RDONLY ); if(two_d_data_) { const string nop_str = "number-of-planes"; Group g = file->openGroup("/"); Attr_list_hdf atr_list(&g); if(!atr_list.contains_attr(nop_str)) throw logic_error("wrapper_i_hdf: number-of-planes not found in file"); atr_list.get_value(nop_str,frame_count_); } else { /** @todo deal with this better, don't have any data */ frame_count_ = 1; } if(fr_count != 0) { if(fr_count + start_ > frame_count_) throw runtime_error("wrapper_i_hdf: asking for too many frames"); frame_count_ = fr_count; } // logic to set up data maps and data storage int i_count =0; int f_count =0; int c_count =0; for(set<pair<D_TYPE,int> >::iterator it = data_types_.begin(); it != data_types_.end();++it) { D_TYPE cur = (*it).first; switch(v_type(cur)) { case V_INT: data_i_.push_back(vector<int*>(frame_count_)); d_mapi_.set_lookup(cur,i_count++); break; case V_FLOAT: data_f_.push_back(vector<float*>(frame_count_)); d_mapf_.set_lookup(cur,f_count++); break; case V_COMPLEX: data_c_.push_back(vector<complex<float>*>(frame_count_)); d_mapc_.set_lookup(cur,c_count++); break; case V_STRING: case V_BOOL: case V_GUID: case V_TIME: case V_UINT: case V_ERROR: throw logic_error("wrapper_i_hdf: The data type should not have been " + VT2str_s(v_type(cur))); } } frame_c_.reserve(frame_count_); if(two_d_data_) frame_zdata_.resize(frame_count_); // set the size of the md_store set_Md_store_size(frame_count_); // fill in data // assume that the frames run from 0 to frame_count_ for(unsigned int j = 0; j<frame_count_;++j) { string frame_name = format_name(j+start_); Group * frame = new Group(file->openGroup(frame_name)); Attr_list_hdf g_attr_list(frame); set_Md_store(j,new Md_store(g_attr_list)); if(two_d_data_) { if(!g_attr_list.contains_attr("z-position")) throw logic_error("wrapper_i_hdf: z-position not found"); g_attr_list.get_value("z-position",frame_zdata_[j]); } for(set<pair<D_TYPE,int> >::iterator it = data_types_.begin(); it != data_types_.end();++it) { if(two_d_data_ && ((*it).first)==utilities::D_ZPOS) continue; // *************** DataSet * dset = new DataSet(frame->openDataSet(format_dset_name((*it).first,(*it).second))); // *************** DataSpace dspace = dset-> getSpace(); dspace.selectAll(); int part_count = dspace.getSimpleExtentNpoints(); // if the first data set for this frame set the number of particles if(frame_c_.size()==j) frame_c_.push_back(part_count); // if the part_count is less than a previous dataset, set the // number of particles to be the smaller number. This // shouldn't result in memory leaks as the bare arrays are // never returned else if(frame_c_.at(j) > part_count) frame_c_.at(j) = part_count; // if the current set has more than a previous set, keep the // old value. these checks are a kludge, need to deal with // this better at the level of writing out the data else if(frame_c_.at(j) < part_count) continue; // if(frame_c_.at(j) != part_count) // throw runtime_error("wrapper_i_hdf: data sets different sizes"); D_TYPE cur_type = (*it).first; switch(v_type(cur_type)) { case V_INT: data_i_.at(d_mapi_(cur_type)).at(j) = new int [part_count]; dset->read(data_i_.at(d_mapi_(cur_type)).at(j),PredType::NATIVE_INT); break; case V_FLOAT: data_f_.at(d_mapf_(cur_type)).at(j) = new float [part_count]; dset->read(data_f_.at(d_mapf_(cur_type)).at(j),PredType::NATIVE_FLOAT); break; case V_COMPLEX: throw logic_error("not implemented yet"); break; case V_STRING: case V_BOOL: case V_GUID: case V_TIME: case V_UINT: case V_ERROR: throw logic_error("wrapper_i_hdf: The data type should not have been " + VT2str_s(v_type(cur_type))); } // clean up hdf stuff dset->close(); delete dset; dset = NULL; } frame->close(); delete frame; frame = NULL; } file->close(); delete file; file = NULL; // shift all of the z by the minimum to start at zero if(two_d_data_) { float min = frame_zdata_[0]; for(unsigned int j = 0; j<frame_count_;++j) if(frame_zdata_[j]<min) min = frame_zdata_[j]; for(unsigned int j = 0; j<frame_count_;++j) frame_zdata_[j] -= min ; } } catch(Exception & e) { // clean up data if it died e.printError(); throw runtime_error("wrapper_i_hdf: constructor error"); } for(unsigned int j= 0; j<frame_count_;++j) total_part_count_ += frame_c_.at(j); return true; }