示例#1
0
bool CompartmentReportHDF5::writeCompartments( const uint32_t gid,
                                               const uint16_ts& counts )
{
    lunchbox::ScopedWrite mutex( detail::_hdf5Lock );

    try
    {
        const size_t compCount = std::accumulate( counts.begin(),
                                                  counts.end(), 0 );
        LBASSERT( !counts.empty( ));
        LBASSERTINFO( compCount > 0, gid );
        H5::DataSet dataset = _createDataset( gid, compCount );

        const size_t sections = counts.size();
        LBASSERT( sections > 0 );
        dataset.openAttribute( 1 ).write( H5::PredType::NATIVE_INT, &sections );

//        dataset.openAttribute( 2 ).write( H5::PredType::NATIVE_INT, &somas );
//        dataset.openAttribute( 3 ).write( H5::PredType::NATIVE_INT, &axons );
//        dataset.openAttribute( 4 ).write( H5::PredType::NATIVE_INT, &basals );
//        dataset.openAttribute( 5 ).write( H5::PredType::NATIVE_INT, &apics );

        boost::scoped_array< float > mapping( new float[compCount] );
        size_t i = 0;
        for( size_t j = 0; j < counts.size(); ++j )
            for( size_t k = 0; k < counts[j]; ++k )
                mapping[i++] = j;

        dataset.write( mapping.get(), H5::PredType::NATIVE_FLOAT );
        return true;
    }
    CATCH_HDF5ERRORS
    return false;
}
	/**
	 * @param attribute_id
	 * @return String representing the name of the attribute specified by attribute_id
	 */
	std::string HDF5FileReader::getVariableAttributeName(long attribute_id)
	{
		H5::DataSet dataset = this->variableGroup->openDataSet(this->variableGroup->getObjnameByIdx(0));
		H5::Attribute attribute = dataset.openAttribute(attribute_id);
		std::string buffer = attribute.getName();
		cout << "Attribute Name: '" << buffer << "'" << endl;
		return buffer;
	}
示例#3
0
OXSXDataSet
DataSetIO::LoadDataSet(const std::string& filename_){
    // Get Data Set
    H5::H5File  file(filename_, H5F_ACC_RDONLY);
    H5::DataSet dataSet = file.openDataSet("observations");
 
    // read meta information
    unsigned nObs = 0;
    H5::Attribute nameAtt  = dataSet.openAttribute("observed_quantities");
    H5::Attribute countAtt  = dataSet.openAttribute("n_observables");
    H5std_string strreadbuf("");
    nameAtt.read(nameAtt.getDataType(), strreadbuf);
    countAtt.read(countAtt.getDataType(), &nObs);

    // Read data out as 1D array
    hsize_t nData = 0;
    dataSet.getSpace().getSimpleExtentDims(&nData, NULL);
    size_t nEntries = nData/nObs;

    std::vector<double> flatData(nData, 0);
    dataSet.read(&flatData.at(0), H5::PredType::NATIVE_DOUBLE);

    assert(nData%nObs == 0); // logic error in writing file (this class!) if assert fails.

    // Assemble into an OXSX data set
    OXSXDataSet oxsxDataSet;

    // Set the variable names
    oxsxDataSet.SetObservableNames(UnpackString(strreadbuf, fDelimiter));

    // then the data
    std::vector<double> oneEventObs(nObs, 0);
    for(size_t i = 0; i < nEntries; i++){
        for(size_t j = 0; j < nObs; j++)
            oneEventObs[j] = flatData.at(i * nObs + j);
        
        oxsxDataSet.AddEntry(EventData(oneEventObs));
    }
      
    return oxsxDataSet;
}
	/**
	 * @return
	 */
	std::vector<std::string> HDF5FileReader::getVariableAttributeNames()
	{
		std::vector<std::string> attributeNames;
		int numAttributes = this->getNumberOfVariableAttributes();
		H5::DataSet dataset = this->variableGroup->openDataSet(this->variableGroup->getObjnameByIdx(0));
		for (int i = 0; i < numAttributes; i++)
		{
			std::string value = "";
			H5::Attribute attribute = dataset.openAttribute(i);
			attributeNames.push_back(attribute.getName());

		}
		return attributeNames;
	}
示例#5
0
bool ossim_hdf5::getDatasetAttributeValue( H5::H5File* file,
                                           const std::string& objectName,
                                           const std::string& key,
                                           std::string& value )
{
   static const char MODULE[] = "ossim_hdf5::getDatasetAttributeValue";

   bool result = false;
   
   if (  file )
   {
      try // HDF5 library throws exceptions so wrap with try{}catch...
      {
         // Open the dataset:
         H5::DataSet dataset = file->openDataSet( objectName );
         
         // Lookw for key:
         H5::Attribute attr = dataset.openAttribute( key );

         std::string  name = attr.getName();
         H5::DataType type = attr.getDataType();
         H5T_class_t  typeClass = attr.getTypeClass();
         
         if ( ( name == key ) && ( typeClass == H5T_STRING ) )
         {
            attr.read( type, value );
            result = true;
         }

         // Cleanup:
         attr.close();
         dataset.close();
      }
      catch( const H5::Exception& e )
      {
         ossimNotify(ossimNotifyLevel_WARN)
            << MODULE << " WARNING: Caught exception!\n"
            << e.getDetailMsg() << std::endl;
      }
      catch( ... )
      {
         ossimNotify(ossimNotifyLevel_WARN)
            << MODULE << " WARNING: Caught unknown exception!" << std::endl;
      }      
   }

   return result;
   
} // End: ossim_hdf5::getDatasetAttributeValue
示例#6
0
文件: HDF5.hpp 项目: rseal/HDF5R
      void ReadTAttrib(const int& tableNum, const std::string& name, 
            T& value, const H5::DataType& dType){
         //attributes are clunky in HDF5++ implementation - this is a workaround
         //template is required to pass the proper value type
         
         //std::cout << "attribute read name = " << name << std::endl;
         std::string tNum = Num2Table(tableNum);

         //open data set and read attribute "name"
         dSet_ = file_->openDataSet(tNum);
         H5::Attribute attrib = dSet_.openAttribute(name);

         //write the value to the attribute and close
         attrib.read(dType,reinterpret_cast<void*>(&value));
         attrib.close();
      }
示例#7
0
文件: HDF5.hpp 项目: rseal/HDF5R
   const std::string ReadTStrAttrib(const int& tableNum, const std::string& name){

      //attributes are clunky in HDF5++ implementation - this is a workaround
      //template is required to pass the proper value type

      std::string tNum = Num2Table(tableNum);

      std::string value;
      value.resize( STRING_ATTRIB_SIZE );
      //std::cout << "attribute string read name = " << tNum << std::endl;

      H5::StrType strType(0, STRING_ATTRIB_SIZE );

      dSet_ = file_->openDataSet(tNum);
      H5::Attribute attrib = dSet_.openAttribute(name);

      //write the value to the attribute and close
      attrib.read(strType,value);
      attrib.close();

      return value;
   }
	/**
	 * @param variable
	 * @param vattribute
	 * @return
	 */
	Attribute HDF5FileReader::getVariableAttribute(const std::string& variable, const std::string& vattribute)
	{


		//first, check the vAttributes map
//		std::cout<<"Checking variable attributes map\n";
		boost::unordered_map<std::string, boost::unordered_map< std::string, Attribute> >::iterator iter =
				vAttributes.find(variable);
		if (iter != vAttributes.end())
		{
			boost::unordered_map< std::string, Attribute>::iterator iter2 = vAttributes[variable].find(vattribute);
			if (iter2 != vAttributes[variable].end())
			{
				return (*iter2).second;
			}
		}
	//	std::cout<<"Attribute not loaded, opening Variables group\n";

		H5::Group group = this->current_file->openGroup("Variables");
	//	std::cout<<"Group opened. Creating memory for H5::DataSet\n";
		H5::DataSet * dataset = new H5::DataSet(group.openDataSet(variable));
	//	std::cout<<"creating h5attribute for variable\n";
		H5::Attribute h5attribute = dataset->openAttribute(vattribute); //changed from group.openAttribute(vattribute);
	//	std::cout<<"attribute opened, obtaining data type\n";
		H5::DataType dataType = h5attribute.getDataType();
		Attribute attribute;
	//	std::cerr<<"Retrieving Variable attribute info:"<<std::endl;
		if (dataType.getClass() == H5T_STRING)
		{
//			std::cout<<"String type variable attribute\n";
			std::string attributeValue = "NULL";
			h5attribute.read(dataType, &attributeValue);

			std::string attributeName = "";
			attributeName = h5attribute.getName();


			attribute.setAttributeName(attributeName);
			//std::cout << "attributeBuffer: " << attributeBuffer << endl;
			attribute.setAttributeValue(attributeValue);
			//return attribute;
		} else if (dataType.getClass() == H5T_INTEGER) //shouldn't this be H5T_INT or something?
		{
//			std::cout<<"Int type variable attribute\n";
			//int attributeValue = 0.f;
			int attributeBuffer;// = new int[1];

			h5attribute.read(dataType, &attributeBuffer);
			std::string attributeName = "";
			attributeName = h5attribute.getName();
			attribute.setAttributeName(attributeName);
			attribute.setAttributeValue(attributeBuffer);
			//return attribute;
		} else if (dataType.getClass() == H5T_FLOAT)//CDF_FLOAT
		{
//			std::cout<<"Float type variable attribute\n";
			//int attributeValue = 0.f;
			float attributeValue;// = new int[1];

			h5attribute.read(dataType, &attributeValue);
			std::string attributeName = "";
			attributeName = h5attribute.getName();
			attribute.setAttributeName(attributeName);
			attribute.setAttributeValue(attributeValue);
			//return attribute;
		}

		//cout << "added: " << i << " name: " << attribute.getAttributeName() << endl;
		//std::cout << "Attribute: " << attribute.toString() << std::endl;


		(vAttributes[variable])[vattribute] = attribute;
		return attribute;

	}
示例#9
0
H5RandomReader::H5RandomReader(const std::string fileName, const std::string groupPath) throw (InvalidFileException) {

    try {
        file.openFile(fileName, H5F_ACC_RDONLY);}
    catch ( H5::FileIException ) {
        throw InvalidFileException("Cannot acces file");}
    try {
        group = file.openGroup(groupPath);}
    catch ( H5::GroupIException ) {
        file.close();
        throw InvalidFileException("Cannot access group");}
    /*
     * extract timeline. This is also necessary to get the nbSteps.
     */
    try {
        timeline = group.openDataSet("timeline");
    	nSteps = timeline.getSpace().getSimpleExtentNpoints();}
    catch ( H5::DataSetIException error ) {
        //error.printError();
        group.close();
        file.close();
        throw InvalidFileException("Cannot access timeline dataset");}
    if (logging::info)
        std::cerr << "Opened group \"" <<  fileName << groupPath << "\" which has " << nSteps << " steps.\n";
    /*
     * extract objects names in the xpGroup
     */

    std::vector<std::string>  names;
    H5Literate(group.getId(), H5_INDEX_NAME, H5_ITER_INC, NULL, iterInGroup, &names);
    /*
     * extract data from object in xpGroup
     * these data can be of 3 types: matrix, translate or wrench
     * each data are saved in related map
     */
    for (unsigned int i=0; i<names.size(); i++){ //TODO: skip timeline
        H5::DataSet dSet = group.openDataSet(names[i]);
        if (H5Aexists(dSet.getId(), "ArborisViewerType")) {
            H5::Attribute att = dSet.openAttribute("ArborisViewerType");
            std::string type;
            att.read(att.getDataType(), type);
            if (type == "matrix"){
                H5::DataSpace dSpace = dSet.getSpace();
                bool dimension_ok = false;
                if (dSpace.getSimpleExtentNdims()==3) {
                    hsize_t dims[3];
                    dSpace.getSimpleExtentDims (dims);
                    if (dims[0] == nSteps && dims[1] == 4 && dims[2] == 4)
                        dimension_ok = true;}
                if (dimension_ok)
                    matrices[names[i]] = dSet;
                else {
                    if (logging::warning)
                        std::cerr << "Skipping dataset \"" << names[i] << "\" which has wrong dimensions. I was expecting (" << nSteps << ",4,4).\n";
                    dSet.close();}}
            else if (type == "translate"){
                H5::DataSpace dSpace = dSet.getSpace();
                bool dimension_ok = false;
                if (dSpace.getSimpleExtentNdims()==2) {
                    hsize_t dims[2];
                    dSpace.getSimpleExtentDims (dims);
                    if (dims[0] == nSteps && dims[1] == 3)
                        dimension_ok = true;}
                if (dimension_ok)
                    translates[names[i]] = dSet;
                else {
                    if (logging::warning)
                        std::cerr << "Skipping dataset \"" << names[i] << "\" which has wrong dimensions. I was expecting (" << nSteps << ",3).\n";
                    dSet.close();}}
            else if (type == "wrench") {
                H5::DataSpace dSpace = dSet.getSpace();
                bool dimension_ok = false;
                if (dSpace.getSimpleExtentNdims()==2) {
                    hsize_t dims[2];
                    dSpace.getSimpleExtentDims (dims);
                    if (dims[0] == nSteps && dims[1] == 6)
                        dimension_ok = true;}
                if (dimension_ok)
                    wrenches[names[i]] = dSet;
                else {
                    if (logging::warning)
                        std::cerr << "Skipping dataset \"" << names[i] << "\" which as wrong dimensions. I was expecting (" << nSteps << ",6).\n";
                    dSet.close();}}
            else {
                if (logging::warning)
                    std::cerr << "Skipping dataset \"" << names[i] << "\" whose ArborisViewerType attribute as unknown value \"" << type << "\".\n";
                dSet.close();}
            att.close();
        }
        else {
            if (logging::info)
                std::cerr << "Skipping dataset \"" << names[i] << "\" which has no ArborisViewerType attribute.\n";
            dSet.close();
        }
    }
};
示例#10
0
void ossim_hdf5::printObject(  H5::H5File* file,
                               const std::string& objectName,
                               const std::string& prefix,
                               std::ostream& out )
{
#if 0
   std::cout << "printObject entered..."
             << "\nobjectName: " << objectName
             << "\nprefix: " << prefix
             << std::endl;
#endif
   
   H5::DataSet dataset = file->openDataSet( objectName );
   
   // Get the class of the datatype that is used by the dataset.
   H5T_class_t type_class = dataset.getTypeClass();
   out << prefix << ".class_type: "
       << ossim_hdf5::getDatatypeClassType( type_class ) << std::endl;

   const ossim_uint32 ATTRS_COUNT = dataset.getNumAttrs();
   for ( ossim_uint32 aIdx = 0; aIdx < ATTRS_COUNT; ++aIdx )
   {
      H5::Attribute attr = dataset.openAttribute( aIdx );
      ossim_hdf5::printAttribute( attr, prefix, out );
      attr.close();
   }

   // Extents:
   std::vector<ossim_uint32> extents;
   ossim_hdf5::getExtents( &dataset, extents );
   for ( ossim_uint32 i = 0; i < extents.size(); ++i )
   {
      ossimString os;
      std::string exStr = ".extent";
      exStr += os.toString(i).string();
      out << prefix << exStr << ": " << extents[i] << std::endl;
   }

   // ossimScalarType scalar = getScalarType( type_class, dataset.getId() );
   ossimScalarType scalar = ossim_hdf5::getScalarType( dataset.getId() );
   if ( scalar != OSSIM_SCALAR_UNKNOWN)
   {
      out << prefix << "." << ossimKeywordNames::SCALAR_TYPE_KW << ": "
          << ossimScalarTypeLut::instance()->getEntryString( scalar ) << std::endl;

      if ( ossim::scalarSizeInBytes( scalar ) > 1 )
      {
         ossimByteOrder byteOrder = ossim_hdf5::getByteOrder( &dataset );
         std::string byteOrderString = "little_endian";
         if ( byteOrder == OSSIM_BIG_ENDIAN )
         {
            byteOrderString = "big_endian";
         }
         out << prefix << "." <<ossimKeywordNames::BYTE_ORDER_KW << ": "
             << byteOrderString << std::endl;
      }
   }

#if 0
   // Attributes:
   int numberOfAttrs = dataset.getNumAttrs();
   cout << "numberOfAttrs: " << numberOfAttrs << endl;
   for ( ossim_int32 attrIdx = 0; attrIdx < numberOfAttrs; ++attrIdx )
   {
      H5::Attribute attribute = dataset.openAttribute( attrIdx );
      cout << "attribute.from class: " << attribute.fromClass() << endl;
   }
#endif
   dataset.close();
   
} // End: printObject
示例#11
0
bool TStellarData::load(const std::string& fname, const std::string& group, const std::string& dset,
			double err_floor, double default_EBV) {
	H5::H5File *file = H5Utils::openFile(fname);
	if(file == NULL) { return false; }
	
	H5::Group *gp = H5Utils::openGroup(file, group);
	if(gp == NULL) {
		delete file;
		return false;
	}
	
	H5::DataSet dataset = gp->openDataSet(dset);
	
	/*
	 *  Photometry
	 */
	
	// Datatype
	hsize_t nbands = NBANDS;
	H5::ArrayType f4arr(H5::PredType::NATIVE_FLOAT, 1, &nbands);
	H5::ArrayType u4arr(H5::PredType::NATIVE_UINT32, 1, &nbands);
	H5::CompType dtype(sizeof(TFileData));
	dtype.insertMember("obj_id", HOFFSET(TFileData, obj_id), H5::PredType::NATIVE_UINT64);
	dtype.insertMember("l", HOFFSET(TFileData, l), H5::PredType::NATIVE_DOUBLE);
	dtype.insertMember("b", HOFFSET(TFileData, b), H5::PredType::NATIVE_DOUBLE);
	dtype.insertMember("mag", HOFFSET(TFileData, mag), f4arr);
	dtype.insertMember("err", HOFFSET(TFileData, err), f4arr);
	dtype.insertMember("maglimit", HOFFSET(TFileData, maglimit), f4arr);
	dtype.insertMember("nDet", HOFFSET(TFileData, N_det), u4arr);
	dtype.insertMember("EBV", HOFFSET(TFileData, EBV), H5::PredType::NATIVE_FLOAT);
	
	// Dataspace
	hsize_t length;
	H5::DataSpace dataspace = dataset.getSpace();
	dataspace.getSimpleExtentDims(&length);
	
	// Read in dataset
	TFileData* data_buf = new TFileData[length];
	dataset.read(data_buf, dtype);
	//std::cerr << "# Read in dimensions." << std::endl;
	
	// Fix magnitude limits
	for(int n=0; n<nbands; n++) {
		float tmp;
		float maglim_replacement = 25.;
		
		// Find the 95th percentile of valid magnitude limits
		std::vector<float> maglimit;
		for(hsize_t i=0; i<length; i++) {
			tmp = data_buf[i].maglimit[n];
			
			if((tmp > 10.) && (tmp < 40.) && (!isnan(tmp))) {
				maglimit.push_back(tmp);
			}
		}
		
		//std::sort(maglimit.begin(), maglimit.end());
		if(maglimit.size() != 0) {
			maglim_replacement = percentile(maglimit, 95.);
		}
		
		// Replace missing magnitude limits with the 95th percentile magnitude limit
		for(hsize_t i=0; i<length; i++) {
			tmp = data_buf[i].maglimit[n];
			
			if(!((tmp > 10.) && (tmp < 40.)) || isnan(tmp)) {
				//std::cout << i << ", " << n << ":  " << tmp << std::endl;
				data_buf[i].maglimit[n] = maglim_replacement;
			}
		}
	}
	
	//int n_filtered = 0;
	//int n_M_dwarfs = 0;
	
	TMagnitudes mag_tmp;
	for(size_t i=0; i<length; i++) {
		mag_tmp.set(data_buf[i], err_floor);
		star.push_back(mag_tmp);
		
		//int n_informative = 0;
		
		// Remove g-band
		//mag_tmp.m[0] = 0.;
		//mag_tmp.err[0] = 1.e10;
		
		//double g_err = mag_tmp.err[0];
		//mag_tmp.err[0] = sqrt(g_err*g_err + 0.1*0.1);
		
		// Filter bright end
                // TODO: Put this into query_lsd.py
		/*for(int j=0; j<NBANDS; j++) {
			if((mag_tmp.err[j] < 1.e9) && (mag_tmp.m[j] < 14.)) {
				mag_tmp.err[j] = 1.e10;
				mag_tmp.m[j] = 0.;
			}
			
			if(mag_tmp.err[j] < 1.e9) {
				n_informative++;
			}
		}*/
		
		// Filter M dwarfs based on color cut
		//bool M_dwarf = false;
		/*bool M_dwarf = true;
		
		double A_g = 3.172;
		double A_r = 2.271;
		double A_i = 1.682;
		
		if(mag_tmp.m[0] - A_g / (A_g - A_r) * (mag_tmp.m[0] - mag_tmp.m[1] - 1.2) > 20.) {
			M_dwarf = false;
		} else if(mag_tmp.m[1] - mag_tmp.m[2] - (A_r - A_i) / (A_g - A_r) * (mag_tmp.m[0] - mag_tmp.m[1]) < 0.) {
			M_dwarf = false;
		} else {
			n_M_dwarfs++;
		}
		*/
		
		/*if(n_informative >= 4) { //&& (!M_dwarf)) {
			star.push_back(mag_tmp);
		} else {
			n_filtered++;
		}*/
	}
	
	//std::cerr << "# of stars filtered: " << n_filtered << std::endl;
	//std::cerr << "# of M dwarfs: " << n_M_dwarfs << std::endl;
	
	/*
	 *  Attributes
	 */
	
	H5::Attribute att = dataset.openAttribute("healpix_index");
	H5::DataType att_dtype = H5::PredType::NATIVE_UINT64;
	att.read(att_dtype, reinterpret_cast<void*>(&healpix_index));
	
	att = dataset.openAttribute("nested");
	att_dtype = H5::PredType::NATIVE_UCHAR;
	att.read(att_dtype, reinterpret_cast<void*>(&nested));
	
	att = dataset.openAttribute("nside");
	att_dtype = H5::PredType::NATIVE_UINT32;
	att.read(att_dtype, reinterpret_cast<void*>(&nside));
	
	att = dataset.openAttribute("l");
	att_dtype = H5::PredType::NATIVE_DOUBLE;
	att.read(att_dtype, reinterpret_cast<void*>(&l));
	
	att = dataset.openAttribute("b");
	att_dtype = H5::PredType::NATIVE_DOUBLE;
	att.read(att_dtype, reinterpret_cast<void*>(&b));
	
	att = dataset.openAttribute("EBV");
	att_dtype = H5::PredType::NATIVE_DOUBLE;
	att.read(att_dtype, reinterpret_cast<void*>(&EBV));
	
	// TEST: Force l, b to anticenter
	//l = 180.;
	//b = 0.;
	
	if((EBV <= 0.) || (EBV > default_EBV) || isnan(EBV)) { EBV = default_EBV; }
	
	delete[] data_buf;
	delete gp;
	delete file;
	
	return true;
}