예제 #1
0
bool ossimHdfGridModel::setGridNodes( H5::H5File* h5File,
                                      const std::string& latDataSetName,
                                      const std::string& lonDataSetName,
                                      ossim_uint32 imageRows,
                                      ossim_uint32 imageCols )

{
    bool status = false;

    if ( h5File )
    {
        H5::DataSet latDataSet = h5File->openDataSet( latDataSetName );
        H5::DataSet lonDataSet = h5File->openDataSet( lonDataSetName );

        try
        {
            status = setGridNodes( &latDataSet, &lonDataSet, imageRows, imageCols );
        }
        catch ( const ossimException& e )
        {
            if ( traceDebug() )
            {
                ossimNotify(ossimNotifyLevel_WARN)
                        << "ossimHdfGridModel::setGridNodes caught exception\n"
                        << e.what() << std::endl;
            }
        }

        latDataSet.close();
        lonDataSet.close();
    }

    return status;
}
void FeaturePointsRANSAC::loadModel(std::string modelPath)
{
	H5::H5File h5Model;

	try {
		h5Model = H5::H5File(modelPath, H5F_ACC_RDONLY);
	}
	catch (H5::Exception& e) {
		std::string msg( std::string( "Could not open HDF5 file \n" ) + e.getCDetailMsg() );
		throw msg;
	}

	// Load the Shape
	H5::Group modelReconstructive = h5Model.openGroup("/shape/ReconstructiveModel/model");
	H5::DataSet dsMean = modelReconstructive.openDataSet("./mean");
	hsize_t dims[1];
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);	// dsMean.getSpace() leaks memory... maybe a hdf5 bug, maybe vlenReclaim(...) could be a fix. No idea.
	//H5::DataSpace dsp = dsMean.getSpace();
	//dsp.close();

	std::cout << "Dims: " << dims[0] << std::endl;		// TODO: I guess this whole part could be done A LOT better!
	float* testData = new float[dims[0]];
	dsMean.read(testData, H5::PredType::NATIVE_FLOAT);
	this->modelMeanShp.reserve(dims[0]);

	for (unsigned int i=0; i < dims[0]; ++i)	{
		modelMeanShp.push_back(testData[i]);
	}
	delete[] testData;
	testData = NULL;
	dsMean.close();

	// // Load the Texture
	H5::Group modelReconstructiveTex = h5Model.openGroup("/color/ReconstructiveModel/model");
	H5::DataSet dsMeanTex = modelReconstructiveTex.openDataSet("./mean");
	hsize_t dimsTex[1];
	dsMeanTex.getSpace().getSimpleExtentDims(dimsTex, NULL);
	std::cout << "Dims: " << dimsTex[0] << std::endl;		// TODO: I guess this whole part could be done A LOT better!
	float* testDataTex = new float[dimsTex[0]];
	dsMeanTex.read(testDataTex, H5::PredType::NATIVE_FLOAT);
	this->modelMeanTex.reserve(dimsTex[0]);

	for (unsigned int i=0; i < dimsTex[0]; ++i)	{
		modelMeanTex.push_back(testDataTex[i]);
	}
	delete[] testDataTex;
	testDataTex = NULL;
	dsMeanTex.close();

	h5Model.close();
}
예제 #3
0
ossimRefPtr<ossimImageGeometry> ossimH5Reader::getInternalImageGeometry()
{
   ossimRefPtr<ossimImageGeometry> geom = new ossimImageGeometry();

   if ( m_projection.valid() )
   {
      // Stored projection, currently shared by all entries.
      geom->setProjection( m_projection.get() );
   }
   else if ( isOpen() )
   {
      // Find the "Latitude" and "Longitude" datasets if present.
      std::string latName;
      std::string lonName;
      if ( getLatLonDatasetNames(  m_h5File, latName, lonName ) )
      {
         H5::DataSet latDataSet = m_h5File->openDataSet( latName );
         H5::DataSet lonDataSet = m_h5File->openDataSet( lonName );

         // Get the valid rectangle of the dataset.
         ossimIrect validRect = m_entries[m_currentEntry].getValidImageRect();

         // Try for a coarse projection first:
         ossimRefPtr<ossimProjection> proj =
            processCoarseGridProjection( latDataSet,
                                         lonDataSet,
                                         validRect );
         
         if ( proj.valid() == false )
         {
            ossimIrect rect;
            proj = ossim_hdf5::getBilinearProjection( latDataSet, lonDataSet, validRect );
         }
         
         if ( proj.valid() )
         {
            // Store it for next time:
            m_projection = proj;
            
            // Set the geometry projection
            geom->setProjection( proj.get() ); 
         }
               
         latDataSet.close();
         lonDataSet.close();
      }
   }
 
   return geom;
}
예제 #4
0
파일: utils.cpp 프로젝트: jbradt/mcopt
arma::Mat<uint16_t> readLUT(const std::string& path)
{
    H5::H5File file (path.c_str(), H5F_ACC_RDONLY);
    H5::DataSet ds = file.openDataSet("LUT");

    H5::DataSpace filespace = ds.getSpace();
    int ndims = filespace.getSimpleExtentNdims();
    assert(ndims == 2);
    hsize_t dims[2] = {1, 1};
    filespace.getSimpleExtentDims(dims);

    H5::DataSpace memspace (ndims, dims);

    arma::Mat<uint16_t> res (dims[0], dims[1]);

    ds.read(res.memptr(), H5::PredType::NATIVE_UINT16, memspace, filespace);

    filespace.close();
    memspace.close();
    ds.close();
    file.close();

    // NOTE: Armadillo stores data in column-major order, while HDF5 uses
    // row-major ordering. Above, we read the data directly from HDF5 into
    // the arma matrix, so it was implicitly transposed. The next function
    // fixes this problem.
    arma::inplace_trans(res);
    return res;
}
예제 #5
0
void pyne::Material::_load_comp_protocol0(H5::H5File * db, std::string datapath, int row)
{
  H5::Group matgroup = (*db).openGroup(datapath);
  H5::DataSet nucset;

  double nucvalue;
  hsize_t matG = matgroup.getNumObjs();

  // Iterate over datasets in the group.
  for (int matg = 0; matg < matG; matg++)
  {
    std::string nuckey = matgroup.getObjnameByIdx(matg);
    nucset = matgroup.openDataSet(nuckey);
    nucvalue = h5wrap::get_array_index<double>(&nucset, row);

    if (nuckey == "Mass" || nuckey == "MASS" || nuckey == "mass")
      mass = nucvalue;
    else
      comp[pyne::nucname::zzaaam(nuckey)] = nucvalue;

    nucset.close();
  };

  // Set meta data
  name = datapath.substr(datapath.rfind("/")+1, datapath.length());
  atoms_per_mol = -1.0;
};
예제 #6
0
bool loadStackHDF5( const char* fileName, Image4DSimple& img )
{
#ifdef USE_HDF5
    H5::Exception::dontPrint();
    H5::H5File file( fileName, H5F_ACC_RDONLY );

    for ( size_t i = 0; i < file.getObjCount(); i++ )
    {
        H5std_string name = file.getObjnameByIdx( i );
        if ( name == "Channels" )
        {
            H5::Group channels = file.openGroup( name );

            // Grab the attributes
            H5::Attribute attr = channels.openAttribute( "width" );
            H5::DataType type = attr.getDataType();
            long width, height;
            attr.read( type, &width );
            attr.close();

            attr = channels.openAttribute( "height" );
            attr.read( type, &height );
            attr.close();

            int num_channels = 0;
            // Count the number of channels
            for ( size_t obj = 0; obj < channels.getNumObjs(); obj++ )
                if ( channels.getObjTypeByIdx( obj ) == H5G_DATASET )
                    num_channels++;

            int channel_idx = 0;
            for ( size_t obj = 0; obj < channels.getNumObjs(); obj++ )
            {
                if ( channels.getObjTypeByIdx( obj ) == H5G_DATASET )
                {
                    H5std_string ds_name = channels.getObjnameByIdx( obj );
                    H5::DataSet data = channels.openDataSet( ds_name );
                    uint8_t* buffer = new uint8_t[ data.getStorageSize() ];
                    data.read( buffer, data.getDataType() );
                    QByteArray qbarray( ( const char* )buffer, data.getStorageSize() );
                    data.close();

                    if ( !loadIndexedStackFFMpeg( &qbarray, img, channel_idx++, num_channels,
                                                  width, height ) )
                    {
                        v3d_msg( "Error happened in HDF file reading. Stop. \n", false );
                        return false;
                    }

                    delete [] buffer;
                }
            }
        }
    }

#endif

    return true;
}
예제 #7
0
bool ossim_hdf5::getDatasetAttributeValue( H5::H5File* file,
                                           const std::string& objectName,
                                           const std::string& key,
                                           std::string& value )
{
   static const char MODULE[] = "ossim_hdf5::getDatasetAttributeValue";

   bool result = false;
   
   if (  file )
   {
      try // HDF5 library throws exceptions so wrap with try{}catch...
      {
         // Open the dataset:
         H5::DataSet dataset = file->openDataSet( objectName );
         
         // Lookw for key:
         H5::Attribute attr = dataset.openAttribute( key );

         std::string  name = attr.getName();
         H5::DataType type = attr.getDataType();
         H5T_class_t  typeClass = attr.getTypeClass();
         
         if ( ( name == key ) && ( typeClass == H5T_STRING ) )
         {
            attr.read( type, value );
            result = true;
         }

         // Cleanup:
         attr.close();
         dataset.close();
      }
      catch( const H5::Exception& e )
      {
         ossimNotify(ossimNotifyLevel_WARN)
            << MODULE << " WARNING: Caught exception!\n"
            << e.getDetailMsg() << std::endl;
      }
      catch( ... )
      {
         ossimNotify(ossimNotifyLevel_WARN)
            << MODULE << " WARNING: Caught unknown exception!" << std::endl;
      }      
   }

   return result;
   
} // End: ossim_hdf5::getDatasetAttributeValue
예제 #8
0
void ossimH5Reader::addImageDatasetEntries(const std::vector<std::string>& names)
{
   if ( m_h5File && names.size() )
   {
      std::vector<std::string>::const_iterator i = names.begin();
      while ( i != names.end() )
      {
         if ( ossim_hdf5::isExcludedDataset( *i ) == false )
         {
            H5::DataSet dataset = m_h5File->openDataSet( *i );

            // Get the class of the datatype that is used by the dataset.
            H5T_class_t type_class = dataset.getTypeClass();
            
            if ( ( type_class == H5T_INTEGER ) || ( type_class == H5T_FLOAT ) )
            {
               // Get the extents:
               std::vector<ossim_uint32> extents;
               ossim_hdf5::getExtents( &dataset, extents ); 

               if ( extents.size() >= 2 )
               {
                  if ( ( extents[0] > 1 ) && ( extents[1] > 1 ) )
                  {
                     ossimH5ImageDataset hids;
                     hids.initialize( dataset, *i );
                     m_entries.push_back( hids );
                  }     
               }
            }

            dataset.close();
         }
            
         ++i;
      }
   }
   
#if 0 /* Please leave for debug. (drb) */
   std::vector<ossimH5ImageDataset>::const_iterator i = m_entries.begin();
   while ( i != m_entries.end() )
   {
      std::cout << (*i) << endl;
      ++i;
   }
#endif
      
} // End: ossimH5Reader::addImageDatasetEntries
예제 #9
0
bool ossim_hdf5::isLoadableAsImage( H5::H5File* file, const std::string& datasetName )
{
   bool result = false;

   // std::cout << "isLoadable entered..." << std::endl;
   if ( file && datasetName.size() )
   {
      if ( isExcludedDataset( datasetName ) == false )
      {
         H5::DataSet dataset = file->openDataSet( datasetName );
         
         // Get the class of the datatype that is used by the dataset.
         H5T_class_t type_class = dataset.getTypeClass();
         // std::cout << "Class type: " << ossim_hdf5::getDatatypeClassType( type_class )
         // << std::endl;
         
         if ( ( type_class == H5T_INTEGER ) || ( type_class == H5T_FLOAT ) )
         {
            // Get dataspace of the dataset.
            // H5::DataSpace dataspace = dataset.getSpace();
            
            // Get the extents:
            std::vector<ossim_uint32> extents;
            ossim_hdf5::getExtents( &dataset, extents );
            
            if ( extents.size() >= 2 )
            {
               if ( ( extents[0] > 1 ) && ( extents[1] > 1 ) )
               {
                  // std::cout << "Accepting dataset: " << datasetName << std::endl;
                  result = true;
               }     
            }
            
         }
         
         dataset.close();
      }
   }
      
   // std::cout << "isLoadable exit status: " << (result?"true":"false") << std::endl;
   
   return result;
}
예제 #10
0
파일: utils.cpp 프로젝트: jbradt/mcopt
std::vector<double> readEloss(const std::string& path)
{
    H5::H5File file (path.c_str(), H5F_ACC_RDONLY);
    H5::DataSet ds = file.openDataSet("eloss");

    H5::DataSpace filespace = ds.getSpace();
    int ndims = filespace.getSimpleExtentNdims();
    assert(ndims == 1);
    hsize_t dim;
    filespace.getSimpleExtentDims(&dim);

    H5::DataSpace memspace (ndims, &dim);

    std::vector<double> res (dim);

    ds.read(res.data(), H5::PredType::NATIVE_DOUBLE, memspace, filespace);

    filespace.close();
    memspace.close();
    ds.close();
    file.close();
    return res;
}
예제 #11
0
Bundle2::Bundle2(const boost::filesystem::path& fileName, bool loadGeometry):
	version_(BUNDLE_VERSION), poiFirstFrame_(0) {
	// Opening file
	H5::H5File bundleFile;
	bundleFile.openFile(fileName.string(), H5F_ACC_RDONLY);
	loadParameters(bundleFile);

	// Loading POI
	H5::Group poiGroup = bundleFile.openGroup("/POI");

	hsize_t count;
	H5::Attribute attr = poiGroup.openAttribute("count");
	attr.read(H5::PredType::NATIVE_HSIZE, &count);
	attr.close();

	for(size_t frame = 0; frame < count; ++frame) {
		cout.flush();

		const std::string frameGroupName = boost::str(boost::format("Frame %1$04d") % frame);
		H5::Group frameGroup = poiGroup.openGroup(frameGroupName);

		addPOIFrame();
		for(size_t camera = 0; camera < numCameras_; ++camera)
			poi_[poi_.size() - 1][camera].load(frameGroup, camera);

		frameGroup.close();
	}

	poiGroup.close();

	// Loading frames
	H5::Group bundleGroup = bundleFile.openGroup("/Bundle");

	H5::Group framesGroup = bundleGroup.openGroup("Frames");

	attr = framesGroup.openAttribute("count");
	attr.read(H5::PredType::NATIVE_HSIZE, &count);
	attr.close();

	for(size_t frame = 0; frame < count; ++frame) {
		Frame* f = new Frame(framesGroup, frame, numCameras_);
		frames_.push_back(f);
	}

	framesGroup.close();

	// Loading tracks
	H5::DataSet tracksDataset = bundleGroup.openDataSet("Tracks");

	hsize_t tracksDim[2];
	H5::DataSpace tracksDS = tracksDataset.getSpace();
	tracksDS.getSimpleExtentDims(tracksDim);
	tracksDS.close();

	for(size_t i = 0; i < tracksDim[0]; ++i) {
		size_t j = addTrack();
		tracks_[j]->load(tracksDataset, frames_, i);
	}

	tracksDataset.close();

	bundleGroup.close();

	if(loadGeometry && checkGeometry_(bundleFile)) loadGeometry_(bundleFile);

	bundleFile.close();
}
예제 #12
0
void ossim_hdf5::printObject(  H5::H5File* file,
                               const std::string& objectName,
                               const std::string& prefix,
                               std::ostream& out )
{
#if 0
   std::cout << "printObject entered..."
             << "\nobjectName: " << objectName
             << "\nprefix: " << prefix
             << std::endl;
#endif
   
   H5::DataSet dataset = file->openDataSet( objectName );
   
   // Get the class of the datatype that is used by the dataset.
   H5T_class_t type_class = dataset.getTypeClass();
   out << prefix << ".class_type: "
       << ossim_hdf5::getDatatypeClassType( type_class ) << std::endl;

   const ossim_uint32 ATTRS_COUNT = dataset.getNumAttrs();
   for ( ossim_uint32 aIdx = 0; aIdx < ATTRS_COUNT; ++aIdx )
   {
      H5::Attribute attr = dataset.openAttribute( aIdx );
      ossim_hdf5::printAttribute( attr, prefix, out );
      attr.close();
   }

   // Extents:
   std::vector<ossim_uint32> extents;
   ossim_hdf5::getExtents( &dataset, extents );
   for ( ossim_uint32 i = 0; i < extents.size(); ++i )
   {
      ossimString os;
      std::string exStr = ".extent";
      exStr += os.toString(i).string();
      out << prefix << exStr << ": " << extents[i] << std::endl;
   }

   // ossimScalarType scalar = getScalarType( type_class, dataset.getId() );
   ossimScalarType scalar = ossim_hdf5::getScalarType( dataset.getId() );
   if ( scalar != OSSIM_SCALAR_UNKNOWN)
   {
      out << prefix << "." << ossimKeywordNames::SCALAR_TYPE_KW << ": "
          << ossimScalarTypeLut::instance()->getEntryString( scalar ) << std::endl;

      if ( ossim::scalarSizeInBytes( scalar ) > 1 )
      {
         ossimByteOrder byteOrder = ossim_hdf5::getByteOrder( &dataset );
         std::string byteOrderString = "little_endian";
         if ( byteOrder == OSSIM_BIG_ENDIAN )
         {
            byteOrderString = "big_endian";
         }
         out << prefix << "." <<ossimKeywordNames::BYTE_ORDER_KW << ": "
             << byteOrderString << std::endl;
      }
   }

#if 0
   // Attributes:
   int numberOfAttrs = dataset.getNumAttrs();
   cout << "numberOfAttrs: " << numberOfAttrs << endl;
   for ( ossim_int32 attrIdx = 0; attrIdx < numberOfAttrs; ++attrIdx )
   {
      H5::Attribute attribute = dataset.openAttribute( attrIdx );
      cout << "attribute.from class: " << attribute.fromClass() << endl;
   }
#endif
   dataset.close();
   
} // End: printObject
예제 #13
0
H5RandomReader::H5RandomReader(const std::string fileName, const std::string groupPath) throw (InvalidFileException) {

    try {
        file.openFile(fileName, H5F_ACC_RDONLY);}
    catch ( H5::FileIException ) {
        throw InvalidFileException("Cannot acces file");}
    try {
        group = file.openGroup(groupPath);}
    catch ( H5::GroupIException ) {
        file.close();
        throw InvalidFileException("Cannot access group");}
    /*
     * extract timeline. This is also necessary to get the nbSteps.
     */
    try {
        timeline = group.openDataSet("timeline");
    	nSteps = timeline.getSpace().getSimpleExtentNpoints();}
    catch ( H5::DataSetIException error ) {
        //error.printError();
        group.close();
        file.close();
        throw InvalidFileException("Cannot access timeline dataset");}
    if (logging::info)
        std::cerr << "Opened group \"" <<  fileName << groupPath << "\" which has " << nSteps << " steps.\n";
    /*
     * extract objects names in the xpGroup
     */

    std::vector<std::string>  names;
    H5Literate(group.getId(), H5_INDEX_NAME, H5_ITER_INC, NULL, iterInGroup, &names);
    /*
     * extract data from object in xpGroup
     * these data can be of 3 types: matrix, translate or wrench
     * each data are saved in related map
     */
    for (unsigned int i=0; i<names.size(); i++){ //TODO: skip timeline
        H5::DataSet dSet = group.openDataSet(names[i]);
        if (H5Aexists(dSet.getId(), "ArborisViewerType")) {
            H5::Attribute att = dSet.openAttribute("ArborisViewerType");
            std::string type;
            att.read(att.getDataType(), type);
            if (type == "matrix"){
                H5::DataSpace dSpace = dSet.getSpace();
                bool dimension_ok = false;
                if (dSpace.getSimpleExtentNdims()==3) {
                    hsize_t dims[3];
                    dSpace.getSimpleExtentDims (dims);
                    if (dims[0] == nSteps && dims[1] == 4 && dims[2] == 4)
                        dimension_ok = true;}
                if (dimension_ok)
                    matrices[names[i]] = dSet;
                else {
                    if (logging::warning)
                        std::cerr << "Skipping dataset \"" << names[i] << "\" which has wrong dimensions. I was expecting (" << nSteps << ",4,4).\n";
                    dSet.close();}}
            else if (type == "translate"){
                H5::DataSpace dSpace = dSet.getSpace();
                bool dimension_ok = false;
                if (dSpace.getSimpleExtentNdims()==2) {
                    hsize_t dims[2];
                    dSpace.getSimpleExtentDims (dims);
                    if (dims[0] == nSteps && dims[1] == 3)
                        dimension_ok = true;}
                if (dimension_ok)
                    translates[names[i]] = dSet;
                else {
                    if (logging::warning)
                        std::cerr << "Skipping dataset \"" << names[i] << "\" which has wrong dimensions. I was expecting (" << nSteps << ",3).\n";
                    dSet.close();}}
            else if (type == "wrench") {
                H5::DataSpace dSpace = dSet.getSpace();
                bool dimension_ok = false;
                if (dSpace.getSimpleExtentNdims()==2) {
                    hsize_t dims[2];
                    dSpace.getSimpleExtentDims (dims);
                    if (dims[0] == nSteps && dims[1] == 6)
                        dimension_ok = true;}
                if (dimension_ok)
                    wrenches[names[i]] = dSet;
                else {
                    if (logging::warning)
                        std::cerr << "Skipping dataset \"" << names[i] << "\" which as wrong dimensions. I was expecting (" << nSteps << ",6).\n";
                    dSet.close();}}
            else {
                if (logging::warning)
                    std::cerr << "Skipping dataset \"" << names[i] << "\" whose ArborisViewerType attribute as unknown value \"" << type << "\".\n";
                dSet.close();}
            att.close();
        }
        else {
            if (logging::info)
                std::cerr << "Skipping dataset \"" << names[i] << "\" which has no ArborisViewerType attribute.\n";
            dSet.close();
        }
    }
};
예제 #14
0
void Bundle2::saveGeometry(const boost::filesystem::path& fileName) const {
	H5::H5File bundleFile;
	bundleFile.openFile(fileName.string(), H5F_ACC_RDWR);

	H5::Group rootGroup = bundleFile.openGroup("/");

	// If the group "Geometry" exists, delete it!
	if(checkGeometry_(bundleFile)) {
		rootGroup.unlink("Geometry");
	}

	// Creating group Geometry
	H5::Group geometryGroup = rootGroup.createGroup("Geometry");

	// Saving poses
	const hsize_t posesChunkDim[] = { 3, 12 };
	H5::DSetCreatPropList posesPropList;
	posesPropList.setLayout(H5D_CHUNKED);
	posesPropList.setChunk(2, posesChunkDim);
	posesPropList.setDeflate(9);

	const hsize_t posesMaxDim[] = { H5S_UNLIMITED, 12 };
	const hsize_t posesCurDim[] = { frames_.size(), 12 };
	H5::DataSpace posesDS(2, posesCurDim, posesMaxDim);

	H5::DataSet posesDataSet = geometryGroup.createDataSet("Poses", H5::PredType::IEEE_F64LE, posesDS, posesPropList);

	double* posesData = (double*)malloc(frames_.size()*12*sizeof(double));
	size_t i = 0;
	for(deque<Frame*>::const_iterator it = frames_.begin(); it != frames_.end(); it++) {
		posesData[i*12] = (*it)->pose()->t().x();
		posesData[i*12 + 1] = (*it)->pose()->t().y();
		posesData[i*12 + 2] = (*it)->pose()->t().z();

		core::Matrix<double> R = (*it)->pose()->R();
		posesData[i*12 + 3] = R[0][0];
		posesData[i*12 + 4] = R[1][0];
		posesData[i*12 + 5] = R[2][0];
		posesData[i*12 + 6] = R[0][1];
		posesData[i*12 + 7] = R[1][1];
		posesData[i*12 + 8] = R[2][1];
		posesData[i*12 + 9] = R[0][2];
		posesData[i*12 + 10] = R[1][2];
		posesData[i*12 + 11] = R[2][2];

		++i;
	}

	posesDataSet.write((const void*)posesData, H5::PredType::NATIVE_DOUBLE, H5::DataSpace::ALL, H5::DataSpace::ALL);
	free((void*)posesData);
	posesDataSet.close();
	posesDS.close();

	// Saving points
	const hsize_t pointsChunkDim[] = {10, 3};
	H5::DSetCreatPropList pointsPropList;
	pointsPropList.setLayout(H5D_CHUNKED);
	pointsPropList.setChunk(2, pointsChunkDim);
	pointsPropList.setDeflate(9);

	const hsize_t pointsMaxDim[] = { H5S_UNLIMITED, 3 };
	const hsize_t pointsCurDim[] = { tracks_.size(), 3 };
	H5::DataSpace pointsDS(2, pointsCurDim, pointsMaxDim);

	H5::DataSet pointsDataSet = geometryGroup.createDataSet("Points", H5::PredType::IEEE_F64LE, pointsDS, pointsPropList);

	double* pointsData = (double*)malloc(tracks_.size()*3*sizeof(double));

	i = 0;
	for(deque<Track*>::const_iterator it = tracks_.begin(); it != tracks_.end(); it++) {
		pointsData[i*3] = (*it)->point()->coords().x();
		pointsData[i*3 + 1] = (*it)->point()->coords().y();
		pointsData[i*3 + 2] = (*it)->point()->coords().z();

		++i;
	}

	pointsDataSet.write((const void*)pointsData, H5::PredType::NATIVE_DOUBLE, H5::DataSpace::ALL, H5::DataSpace::ALL);
	free((void*)pointsData);
	pointsDataSet.close();
	pointsDS.close();

	// Saving inlier information
	const hsize_t inliersChunkDim[] = { 3 };
	H5::DSetCreatPropList inliersPropList;
	inliersPropList.setLayout(H5D_CHUNKED);
	inliersPropList.setChunk(1, inliersChunkDim);
	inliersPropList.setDeflate(9);

	const hsize_t inliersMaxDim[] = { H5S_UNLIMITED };
	const hsize_t inliersCurDim[] = { frames_.size() };
	H5::DataSpace inliersDS(1, inliersCurDim, inliersMaxDim);

	H5::VarLenType inliersType(&H5::PredType::STD_U8LE);

	H5::DataSet inliersDataSet = geometryGroup.createDataSet("Inliers", inliersType, inliersDS, inliersPropList);

	i = 0;
	for(deque<Frame*>::const_iterator it = frames_.begin(); it != frames_.end(); it++) {
		hvl_t inliersLine;

        size_t inliersLineSize = 0;
        for(size_t j = 0; j < (*it)->size(); ++j) {
            View& v = (**it)[j];
            for(unsigned int cam = 0; cam < v.numCameras(); ++cam) {
                if(v.inCamera(cam)) ++inliersLineSize;
            }
        }

		inliersLine.len = inliersLineSize;
		inliersLine.p = malloc(inliersLineSize*sizeof(unsigned char));

        size_t k = 0;
		for(size_t j = 0; j < (*it)->size(); ++j) {
            View& v = (**it)[j];
            for(unsigned int cam = 0; cam < v.numCameras(); ++cam) {
                if(v.inCamera(cam)) {
                    ((unsigned char*)(inliersLine.p))[k] = v.ray(cam).inlier()?1:0;
                    ++k;
                }
            }
        }

		const hsize_t dsOffset[] = { i };
		const hsize_t dsCount[] = { 1 };
		H5::DataSpace inliersCurDS = inliersDataSet.getSpace();
		inliersCurDS.selectHyperslab(H5S_SELECT_SET, dsCount, dsOffset);

		const hsize_t memDim[] = { 1 };
		H5::DataSpace memDS(1, memDim, memDim);

		H5::VarLenType memType(&H5::PredType::NATIVE_UCHAR);

		inliersDataSet.write((const void*)&inliersLine, memType, memDS, inliersCurDS);

		memType.close();
		memDS.close();
		inliersCurDS.close();

		free(inliersLine.p);

		++i;
	}

	inliersDataSet.close();
	inliersType.close();
	inliersDS.close();

	// Saving curves
	if(!curves_.empty()) {
		const hsize_t chunkDim[] = { 5 };
		H5::DSetCreatPropList propList;
		propList.setLayout(H5D_CHUNKED);
		propList.setChunk(1, chunkDim);
		propList.setDeflate(9);

		H5::VarLenType curveDatasetType(&H5::PredType::STD_U64LE);

		hsize_t curvesDim[] = { curves_.size() };
		hsize_t curvesMaxDim[] = { H5S_UNLIMITED };
		H5::DataSpace curvesDataspace(1, curvesDim, curvesMaxDim);

		H5::DataSet curvesDataset = geometryGroup.createDataSet("Curves", curveDatasetType, curvesDataspace, propList);

		for(size_t i = 0; i < curves_.size(); ++i) {
			hvl_t curveLine;

			curveLine.len = curves_[i].size();
			curveLine.p = malloc(curves_[i].size()*sizeof(size_t));

			for(size_t j = 0; j < curves_[i].size(); ++j) ((size_t*)(curveLine.p))[j] = curves_[i].track(j);

			const hsize_t dsOffset[] = { i };
			const hsize_t dsCount[] = { 1 };
			H5::DataSpace curDS = curvesDataset.getSpace();
			curDS.selectHyperslab(H5S_SELECT_SET, dsCount, dsOffset);

			const hsize_t memDim[] = { 1 };
			H5::DataSpace memDS(1, memDim, memDim);

			H5::VarLenType memType(&H5::PredType::NATIVE_HSIZE);

			curvesDataset.write((const void*)&curveLine, memType, memDS, curDS);

			memType.close();
			memDS.close();
			curDS.close();

			free(curveLine.p);
 		}

		curvesDataset.close();
		curvesDataspace.close();
		curveDatasetType.close();
		propList.close();
	}

	geometryGroup.close();
	rootGroup.close();
	bundleFile.close();
}
예제 #15
0
void Bundle2::loadGeometry_(H5::H5File& file) {
	H5::Group geometryGroup = file.openGroup("/Geometry");

	// Loading poses
	H5::DataSet posesDataSet = geometryGroup.openDataSet("Poses");
	double* posesData = (double*)malloc(frames_.size()*12*sizeof(double));
	posesDataSet.read((void*)posesData, H5::PredType::NATIVE_DOUBLE, H5::DataSpace::ALL, H5::DataSpace::ALL);
	posesDataSet.close();

	size_t i = 0;
	for(deque<Frame*>::iterator it = frames_.begin(); it != frames_.end(); ++it) {
		Pose* pose = new Pose;
		pose->sett(core::RealPoint3D<double>(posesData[i*12], posesData[i*12 + 1], posesData[i*12 + 2]));

		core::Matrix<double> R(3, 3);
		R[0][0] = posesData[i*12 + 3]; R[1][0] = posesData[i*12 + 4]; R[2][0] = posesData[i*12 + 5];
		R[0][1] = posesData[i*12 + 6]; R[1][1] = posesData[i*12 + 7]; R[2][1] = posesData[i*12 + 8];
		R[0][2] = posesData[i*12 + 9]; R[1][2] = posesData[i*12 + 10]; R[2][2] = posesData[i*12 + 11];

		pose->setR(R);
		pose->calcEulerAngles();
		pose->setorientationSynchronWithAngles(true);
		pose->setderivationsSynchronWithAngles(false);

		(*it)->setpose(pose);

		++i;
	}
	free((void*)posesData);

	// Loading points
	H5::DataSet pointsDataSet = geometryGroup.openDataSet("Points");
	double* pointsData = (double*)malloc(tracks_.size()*3*sizeof(double));
	pointsDataSet.read((void*)pointsData, H5::PredType::NATIVE_DOUBLE, H5::DataSpace::ALL, H5::DataSpace::ALL);
	pointsDataSet.close();

	i = 0;
	for(deque<Track*>::iterator it = tracks_.begin(); it != tracks_.end(); it++) {
		Point* point = new Point(core::RealPoint3D<double>(pointsData[i*3], pointsData[i*3 + 1], pointsData[i*3 + 2]));
		(*it)->setpoint(point);

		++i;
	}
	free((void*)pointsData);

	// Loading inlier information
	H5::DataSet inliersDataSet = geometryGroup.openDataSet("Inliers");
	hvl_t* inliersData = (hvl_t*)malloc(frames_.size()*sizeof(hvl_t));
	H5::VarLenType memType(&H5::PredType::NATIVE_UCHAR);
	inliersDataSet.read((void*)inliersData, memType, H5::DataSpace::ALL, H5::DataSpace::ALL);
	memType.close();
	inliersDataSet.close();

	i = 0;
	for(deque<Frame*>::iterator it = frames_.begin(); it != frames_.end(); it++) {
		unsigned char* inl = (unsigned char*)(inliersData[i].p);

        size_t k = 0;
		for(size_t j = 0; j < (*it)->size(); ++j) {
            View& v = (**it)[j];
            for(unsigned int cam = 0; cam < v.numCameras(); ++cam) {
                if(v.inCamera(cam)) {
                    Ray ray;
                    if(inl[k]) ray.setinlier(true);
                    else ray.setinlier(false);

                    v.addRay(cam, ray);
                    ++k;
                }
            }
		}

		++i;
	}

	for(size_t j = 0; j < frames_.size(); ++j) free(inliersData[j].p);
	free((void*)inliersData);

	// Loading curves if they exists
	bool curvesFound = false;
	const hsize_t maxObjs = geometryGroup.getNumObjs();
	for(hsize_t obj = 0; obj < maxObjs; ++obj) {
		string objName = geometryGroup.getObjnameByIdx(obj);
		if(objName == string("Curves")) curvesFound = true;
	}

	if(curvesFound) {
		H5::DataSet curvesDataSet = geometryGroup.openDataSet("Curves");

		hsize_t curvesDim[1];
		H5::DataSpace curvesDS = curvesDataSet.getSpace();
		curvesDS.getSimpleExtentDims(curvesDim);
		curvesDS.close();

		hvl_t* curvesData = (hvl_t*)malloc(curvesDim[0]*sizeof(hvl_t));
		H5::VarLenType memType(&H5::PredType::NATIVE_HSIZE);

		curvesDataSet.read((void*)curvesData, memType, H5::DataSpace::ALL, H5::DataSpace::ALL);

		memType.close();
		curvesDataSet.close();

		for(size_t c = 0; c < curvesDim[0]; ++c) {
			const size_t cur_c = addCurve();

			for(size_t p = 0; p < curvesData[c].len; ++p) {
				curves_[cur_c].addPoint(((size_t*)(curvesData[c].p))[p]);
			}
		}

		for(size_t i = 0; i < curvesDim[0]; ++i) free(curvesData[i].p);
		free((void*)curvesData);
	}

	geometryGroup.close();
}
예제 #16
0
파일: HDF5.hpp 프로젝트: rseal/HDF5R
 void WriteTable(void* data){
    //write data and close data set
    dSet_.write(data, dType_, dSpace_);
    dSet_.close();
 }
예제 #17
0
// Bundle management
void Bundle2::save(const boost::filesystem::path& fileName) const {
	// Creating HDF5 file
	H5::H5File bundleFile(fileName.string(), H5F_ACC_TRUNC);
	storeParameters(bundleFile);

	H5::DataSpace scalar;

	// Saving POI
	H5::Group poiGroup = bundleFile.createGroup("/POI");

	H5::Attribute attr = poiGroup.createAttribute("count", H5::PredType::STD_U64LE, scalar);
	hsize_t count = poi_.size();
	attr.write(H5::PredType::NATIVE_HSIZE, &count);
	attr.close();

	for(size_t frame = 0; frame < poi_.size(); ++frame) {
		const std::string frameGroupName = boost::str(boost::format("Frame %1$04d") % frame);
		H5::Group frameGroup = poiGroup.createGroup(frameGroupName);

		count = poi_[frame].size();
		attr = frameGroup.createAttribute("count", H5::PredType::STD_U64LE, scalar);
		attr.write(H5::PredType::NATIVE_HSIZE, &count);
		attr.close();

		for(size_t camera = 0; camera < poi_[frame].size(); ++camera)
			poi_[frame][camera].save(frameGroup, camera);

		frameGroup.close();
	}

	poiGroup.close();

	// Saving key frames
	H5::Group bundleGroup = bundleFile.createGroup("/Bundle");

	H5::Group framesGroup = bundleGroup.createGroup("Frames");

	count = frames_.size();
	attr = framesGroup.createAttribute("count", H5::PredType::STD_U64LE, scalar);
	attr.write(H5::PredType::NATIVE_HSIZE, &count);
	attr.close();

	for(deque<Frame*>::const_iterator it = frames_.begin(); it != frames_.end(); it++) {
		(*it)->save(framesGroup);
	}

	framesGroup.close();

	// Saving tracks
	const hsize_t chunkDim[] = { 2, 1 };
	H5::DSetCreatPropList propList;
	propList.setLayout(H5D_CHUNKED);
	propList.setChunk(2, chunkDim);
	propList.setDeflate(9);

	H5::VarLenType tracksDatasetType(&H5::PredType::STD_U64LE);

	hsize_t tracksDim[] = { tracks_.size(), 2 };
    hsize_t tracksMaxDim[] = { H5S_UNLIMITED, 2 };
	H5::DataSpace tracksDataspace(2, tracksDim, tracksMaxDim);

	H5::DataSet tracksDataset = bundleGroup.createDataSet("Tracks", tracksDatasetType, tracksDataspace, propList);

	for(size_t i = 0; i < tracks_.size(); ++i)
		tracks_[i]->save(tracksDataset, i);

	tracksDataset.close();
	tracksDataspace.close();
	tracksDatasetType.close();
	propList.close();

	bundleGroup.close();

	scalar.close();
	bundleFile.close();
}
예제 #18
0
void Bundle2::initGeometryStream_() {
	// Creating group Geometry
	H5::Group geometryGroup = streamFile_->createGroup("/Geometry");

	// Saving poses
	const hsize_t posesChunkDim[] = { 3, 12 };
	H5::DSetCreatPropList posesPropList;
	posesPropList.setLayout(H5D_CHUNKED);
	posesPropList.setChunk(2, posesChunkDim);
	posesPropList.setDeflate(9);

	const hsize_t posesMaxDim[] = { H5S_UNLIMITED, 12 };
	const hsize_t posesCurDim[] = { frames_.size(), 12 };
	H5::DataSpace posesDS(2, posesCurDim, posesMaxDim);

	H5::DataSet posesDataSet = geometryGroup.createDataSet("Poses", H5::PredType::IEEE_F64LE, posesDS, posesPropList);

	double* posesData = (double*)malloc(frames_.size()*12*sizeof(double));
	size_t i = 0;
	for(deque<Frame*>::const_iterator it = frames_.begin(); it != frames_.end(); it++) {
		posesData[i*12] = (*it)->pose()->t().x();
		posesData[i*12 + 1] = (*it)->pose()->t().y();
		posesData[i*12 + 2] = (*it)->pose()->t().z();

		core::Matrix<double> R = (*it)->pose()->R();
		posesData[i*12 + 3] = R[0][0];
		posesData[i*12 + 4] = R[1][0];
		posesData[i*12 + 5] = R[2][0];
		posesData[i*12 + 6] = R[0][1];
		posesData[i*12 + 7] = R[1][1];
		posesData[i*12 + 8] = R[2][1];
		posesData[i*12 + 9] = R[0][2];
		posesData[i*12 + 10] = R[1][2];
		posesData[i*12 + 11] = R[2][2];

		++i;
	}

	posesDataSet.write((const void*)posesData, H5::PredType::NATIVE_DOUBLE, H5::DataSpace::ALL, H5::DataSpace::ALL);
	free((void*)posesData);
	posesDataSet.close();
	posesDS.close();

	// Creating points dataset
	const hsize_t pointsChunkDim[] = {10, 3};
	H5::DSetCreatPropList pointsPropList;
	pointsPropList.setLayout(H5D_CHUNKED);
	pointsPropList.setChunk(2, pointsChunkDim);
	pointsPropList.setDeflate(9);

	const hsize_t pointsMaxDim[] = { H5S_UNLIMITED, 3 };
	const hsize_t pointsCurDim[] = { 0, 3 };
	H5::DataSpace pointsDS(2, pointsCurDim, pointsMaxDim);

	H5::DataSet pointsDataSet = geometryGroup.createDataSet("Points", H5::PredType::IEEE_F64LE, pointsDS, pointsPropList);

	pointsDataSet.close();
	pointsDS.close();

	// Creating inliers dataset
	const hsize_t inliersChunkDim[] = { 3 };
	H5::DSetCreatPropList inliersPropList;
	inliersPropList.setLayout(H5D_CHUNKED);
	inliersPropList.setChunk(1, inliersChunkDim);
	inliersPropList.setDeflate(9);

	const hsize_t inliersMaxDim[] = { H5S_UNLIMITED };
	const hsize_t inliersCurDim[] = { frames_.size() };
	H5::DataSpace inliersDS(1, inliersCurDim, inliersMaxDim);

	H5::VarLenType inliersType(&H5::PredType::STD_U8LE);

	H5::DataSet inliersDataSet = geometryGroup.createDataSet("Inliers", inliersType, inliersDS, inliersPropList);

	inliersDataSet.close();
	inliersType.close();
	inliersDS.close();

	// Creating curve dataset
	const hsize_t chunkDim[] = { 5 };
	H5::DSetCreatPropList propList;
	propList.setLayout(H5D_CHUNKED);
	propList.setChunk(1, chunkDim);
	propList.setDeflate(9);

	H5::VarLenType curveDatasetType(&H5::PredType::STD_U64LE);

	hsize_t curvesDim[] = { 0 };
	hsize_t curvesMaxDim[] = { H5S_UNLIMITED };
	H5::DataSpace curvesDataspace(1, curvesDim, curvesMaxDim);

	H5::DataSet curvesDataset = geometryGroup.createDataSet("Curves", curveDatasetType, curvesDataspace, propList);

	curvesDataset.close();
	curvesDataspace.close();
	curveDatasetType.close();
	propList.close();

	geometryGroup.close();
}
예제 #19
0
void Bundle2::initFrameStream_() {
	H5::DataSpace scalar;

	// Creating datasets for each frame
	H5::Group bundleGroup = streamFile_->createGroup("/Bundle");
	H5::Group framesGroup = bundleGroup.createGroup("Frames");

	hsize_t count = frames_.size();
	H5::Attribute attr = framesGroup.createAttribute("count", H5::PredType::STD_U64LE, scalar);
	attr.write(H5::PredType::NATIVE_HSIZE, &count);
	attr.close();

	// Defining frame dataset property
	hsize_t chunkDim[] = { 1, 2, 10 };
	H5::DSetCreatPropList propList;
	propList.setLayout(H5D_CHUNKED);
	propList.setChunk(3, chunkDim);
	propList.setDeflate(9);

	// Definig dataset dataspace
	hsize_t max_dim[] = { numCameras_, 2, H5S_UNLIMITED };
	hsize_t dim[] = { numCameras_, 2, 0 };
	H5::DataSpace ds(3, dim, max_dim);

	for(deque<Frame*>::const_iterator it = frames_.begin(); it != frames_.end(); it++) {
		const std::string datasetName = boost::str(boost::format("Frame %1$04d") % (*it)->number());

		// Creating dataset
		H5::DataSet frameData = framesGroup.createDataSet(datasetName, H5::PredType::IEEE_F32LE, ds, propList);

		// Writing global number
		attr = frameData.createAttribute("globalNumber", H5::PredType::STD_U64LE, scalar);
		count = (*it)->globalNumber();
		attr.write(H5::PredType::NATIVE_HSIZE, &count);
		attr.close();

		// Clean up!
		frameData.close();
	}

	// Clean up!
	ds.close();
	propList.close();

	// Creating tracks dataset
	hsize_t chunkDim2[] = { 2, 10 };
	propList = H5::DSetCreatPropList();
	propList.setLayout(H5D_CHUNKED);
	propList.setChunk(2, chunkDim);
	propList.setDeflate(9);

	H5::VarLenType tracksDatasetType(&H5::PredType::STD_U64LE);

	hsize_t tracksDim[] = { 0, 2 };
	hsize_t tracksMaxDim[] = { H5S_UNLIMITED, 2 };
	H5::DataSpace tracksDataspace(2, tracksDim, tracksMaxDim);

	H5::DataSet tracksDataset = bundleGroup.createDataSet("Tracks", tracksDatasetType, tracksDataspace, propList);

	tracksDataset.close();
	tracksDataspace.close();
	tracksDatasetType.close();
	propList.close();

	framesGroup.close();
	bundleGroup.close();
	scalar.close();
}
예제 #20
0
PcaModel PcaModel::loadStatismoModel(path h5file, PcaModel::ModelType modelType)
{
	logging::Logger logger = Loggers->getLogger("shapemodels");
	PcaModel model;

	// Load the shape or color model from the .h5 file
	string h5GroupType;
	if (modelType == ModelType::SHAPE) {
		h5GroupType = "shape";
	} else if (modelType == ModelType::COLOR) {
		h5GroupType = "color";
	}

	H5::H5File h5Model;

	try {
		h5Model = H5::H5File(h5file.string(), H5F_ACC_RDONLY);
	}
	catch (H5::Exception& e) {
		string errorMessage = "Could not open HDF5 file: " + string(e.getCDetailMsg());
		logger.error(errorMessage);
		throw errorMessage;
	}

	// Load either the shape or texture mean
	string h5Group = "/" + h5GroupType + "/model";
	H5::Group modelReconstructive = h5Model.openGroup(h5Group);

	// Read the mean
	H5::DataSet dsMean = modelReconstructive.openDataSet("./mean");
	hsize_t dims[2];
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);	// dsMean.getSpace() leaks memory... maybe a hdf5 bug, maybe vlenReclaim(...) could be a fix. No idea.
	//H5::DataSpace dsp = dsMean.getSpace();
	//dsp.close();
	Loggers->getLogger("shapemodels").debug("Dimensions of the model mean: " + lexical_cast<string>(dims[0]));
	model.mean = Mat(1, dims[0], CV_32FC1); // Use a row-vector, because of faster memory access and I'm not sure the memory block is allocated contiguously if we have multiple rows.
	dsMean.read(model.mean.ptr<float>(0), H5::PredType::NATIVE_FLOAT);
	model.mean = model.mean.t(); // Transpose it to a col-vector
	dsMean.close();

	// Read the eigenvalues
	dsMean = modelReconstructive.openDataSet("./pcaVariance");
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);
	Loggers->getLogger("shapemodels").debug("Dimensions of the pcaVariance: " + lexical_cast<string>(dims[0]));
	model.eigenvalues = Mat(1, dims[0], CV_32FC1);
	dsMean.read(model.eigenvalues.ptr<float>(0), H5::PredType::NATIVE_FLOAT);
	model.eigenvalues = model.eigenvalues.t();
	dsMean.close();

	// Read the PCA basis matrix
	dsMean = modelReconstructive.openDataSet("./pcaBasis");
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);
	Loggers->getLogger("shapemodels").debug("Dimensions of the PCA basis matrix: " + lexical_cast<string>(dims[0]) + ", " + lexical_cast<string>(dims[1]));
	model.pcaBasis = Mat(dims[0], dims[1], CV_32FC1);
	dsMean.read(model.pcaBasis.ptr<float>(0), H5::PredType::NATIVE_FLOAT);
	dsMean.close();

	modelReconstructive.close(); // close the model-group

	// Read the noise variance (not implemented)
	/*dsMean = modelReconstructive.openDataSet("./noiseVariance");
	float noiseVariance = 10.0f;
	dsMean.read(&noiseVariance, H5::PredType::NATIVE_FLOAT);
	dsMean.close(); */

	// Read the triangle-list
	string representerGroupName = "/" + h5GroupType + "/representer";
	H5::Group representerGroup = h5Model.openGroup(representerGroupName);
	dsMean = representerGroup.openDataSet("./reference-mesh/triangle-list");
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);
	Loggers->getLogger("shapemodels").debug("Dimensions of the triangle-list: " + lexical_cast<string>(dims[0]) + ", " + lexical_cast<string>(dims[1]));
	Mat triangles(dims[0], dims[1], CV_32SC1);
	dsMean.read(triangles.ptr<int>(0), H5::PredType::NATIVE_INT32);
	dsMean.close();
	representerGroup.close();
	model.triangleList.resize(triangles.rows);
	for (unsigned int i = 0; i < model.triangleList.size(); ++i) {
		model.triangleList[i][0] = triangles.at<int>(i, 0);
		model.triangleList[i][1] = triangles.at<int>(i, 1);
		model.triangleList[i][2] = triangles.at<int>(i, 2);
	}

	// Load the landmarks mappings:
	// load the reference-mesh
	representerGroup = h5Model.openGroup(representerGroupName);
	dsMean = representerGroup.openDataSet("./reference-mesh/vertex-coordinates");
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);
	Loggers->getLogger("shapemodels").debug("Dimensions of the reference-mesh vertex-coordinates matrix: " + lexical_cast<string>(dims[0]) + ", " + lexical_cast<string>(dims[1]));
	Mat referenceMesh(dims[0], dims[1], CV_32FC1);
	dsMean.read(referenceMesh.ptr<float>(0), H5::PredType::NATIVE_FLOAT);
	dsMean.close();
	representerGroup.close();

	// convert to 3 vectors with the x, y and z coordinates for easy searching
	vector<float> refx(referenceMesh.col(0).clone());
	vector<float> refy(referenceMesh.col(1).clone());
	vector<float> refz(referenceMesh.col(2).clone());

	// load the landmarks info (mapping name <-> reference (x, y, z)-coords)
	H5::Group landmarksGroup = h5Model.openGroup("/metadata/landmarks");
	dsMean = landmarksGroup.openDataSet("./text");
	
	H5std_string outputString;
	Loggers->getLogger("shapemodels").debug("Reading landmark information from the model.");
	dsMean.read(outputString, dsMean.getStrType());
	dsMean.close();
	landmarksGroup.close();
	vector<string> landmarkLines;
	boost::split(landmarkLines, outputString, boost::is_any_of("\n"), boost::token_compress_on);
	for (const auto& l : landmarkLines) {
		if (l == "") {
			continue;
		}
		vector<string> line;
		boost::split(line, l, boost::is_any_of(" "), boost::token_compress_on);
		string name = line[0];
		int visibility = lexical_cast<int>(line[1]);
		float x = lexical_cast<float>(line[2]);
		float y = lexical_cast<float>(line[3]);
		float z = lexical_cast<float>(line[4]);
		// Find the x, y and z values in the reference
		const auto ivx = std::find(begin(refx), end(refx), x);
		const auto ivy = std::find(begin(refy), end(refy), y);
		const auto ivz = std::find(begin(refz), end(refz), z);
		// TODO Check for .end()!
		const auto vertexIdX = std::distance(begin(refx), ivx);
		const auto vertexIdY = std::distance(begin(refy), ivy);
		const auto vertexIdZ = std::distance(begin(refz), ivz);
		// assert vx=vy=vz
		// Hmm this is not perfect. If there's another vertex where 1 or 2 coords are the same, it fails.
		// We should do the search differently: Find _all_ the vertices that are equal, then take the one that has the right x, y and z.
		model.landmarkVertexMap.insert(make_pair(name, vertexIdX));
	
	}

	h5Model.close();
	return model;
}
예제 #21
0
bool saveStackHDF5( const char* fileName, const My4DImage& img, Codec_Mapping* mapping )
{
    try
    {
#ifdef USE_HDF5
        H5::Exception::dontPrint();
        H5::H5File file( fileName, H5F_ACC_TRUNC );
        H5::Group* group = new H5::Group( file.createGroup( "/Channels" ) );

        Image4DProxy<My4DImage> proxy( const_cast<My4DImage*>( &img ) );

        long scaledHeight = nearestPowerOfEight( proxy.sy );
        long scaledWidth = nearestPowerOfEight( proxy.sx );

        // Initialize the upper and lower bounds
        long pad_right = ( scaledWidth - proxy.sx ) ;
        long pad_bottom = ( scaledHeight - proxy.sy );

        hsize_t dims[1] = { 1 };
        H5::DataSpace attr_ds = H5::DataSpace( 1, dims );
        H5::Attribute attr = group->createAttribute( "width", H5::PredType::STD_I64LE, attr_ds );
        attr.write( H5::PredType::NATIVE_INT, &( proxy.sx ) );
        attr = group->createAttribute( "height", H5::PredType::STD_I64LE, attr_ds );
        attr.write( H5::PredType::NATIVE_INT, &( proxy.sy ) );
        attr = group->createAttribute( "frames", H5::PredType::STD_I64LE, attr_ds );
        attr.write( H5::PredType::NATIVE_INT, &( proxy.sz ) );
        attr = group->createAttribute( "pad_right", H5::PredType::STD_I64LE, attr_ds );
        attr.write( H5::PredType::NATIVE_INT, &( pad_right ) );
        attr = group->createAttribute( "pad_bottom", H5::PredType::STD_I64LE, attr_ds );
        attr.write( H5::PredType::NATIVE_INT, &( pad_bottom ) );

        Codec_Mapping* imap = mapping;
        if ( !mapping )
        {
            imap = new Codec_Mapping();
            generate_codec_mapping( *imap, proxy.sc );
        }

        for ( int c = 0; c < proxy.sc; ++c )
        {
            double default_irange = 1.0; // assumes data range is 0-255.0
            if ( proxy.su > 1 )
            {
                default_irange = 1.0 / 16.0; // 0-4096, like our microscope images
            }
            std::vector<double> imin( proxy.sc, 0.0 );
            std::vector<double> irange2( proxy.sc, default_irange );
            // rescale if converting from 16 bit to 8 bit
            if ( proxy.su > 1 )
            {
                if ( img.p_vmin && img.p_vmax )
                    proxy.set_minmax( img.p_vmin, img.p_vmax );
                if ( proxy.has_minmax() )
                {
                    imin[c] = proxy.vmin[c];
                    irange2[c] = 255.0 / ( proxy.vmax[c] - proxy.vmin[c] );
                }
            }

            FFMpegEncoder encoder( NULL, scaledWidth, scaledHeight,
                                   ( *imap )[c].first, ( *imap )[c].second );
            // If the image needs padding, fill the expanded border regions with black
            for ( int z = 0; z < proxy.sz; ++z )
            {
                for ( int y = 0; y < scaledHeight; ++y )
                {
                    for ( int x = 0; x < scaledWidth; ++x )
                    {
                        // If inside the area with valid data
                        if ( x < proxy.sx && y < proxy.sy )
                        {
                            int ic = c;
                            double val = proxy.value_at( x, y, z, ic );
                            val = ( val - imin[ic] ) * irange2[ic]; // rescale to range 0-255
                            for ( int cc = 0; cc < 3; ++cc )
                                encoder.setPixelIntensity( x, y, cc, ( int )val );
                        }
                        else
                            for ( int cc = 0; cc < 3; ++cc )
                                encoder.setPixelIntensity( x, y, cc, 0 );
                    }
                }
                encoder.write_frame();
            }

            for ( int rem = encoder.encoded_frames(); rem < proxy.sz; rem++ )
                encoder.encode();

            encoder.close();

            hsize_t  dims[1];
            dims[0] = encoder.buffer_size();
            H5::DataSpace dataspace( 1, dims );
std: stringstream name;
            name << "Channel_" << c;
            H5::DataSet dataset = group->createDataSet( name.str(), H5::PredType::NATIVE_UINT8, dataspace );
            dataset.write( encoder.buffer(), H5::PredType::NATIVE_UINT8 );
            dataset.close();

            std::cout << "Encoded channel is " << encoder.buffer_size() << " bytes." << std::endl;
            // Uncomment this if you want to dump the individual movies to a temp file
        }
#endif
        if ( !mapping )
            delete imap;

        file.close();

        return true;
    }
    catch ( ... ) {}

    return false;
}