void FeaturePointsRANSAC::loadModel(std::string modelPath)
{
	H5::H5File h5Model;

	try {
		h5Model = H5::H5File(modelPath, H5F_ACC_RDONLY);
	}
	catch (H5::Exception& e) {
		std::string msg( std::string( "Could not open HDF5 file \n" ) + e.getCDetailMsg() );
		throw msg;
	}

	// Load the Shape
	H5::Group modelReconstructive = h5Model.openGroup("/shape/ReconstructiveModel/model");
	H5::DataSet dsMean = modelReconstructive.openDataSet("./mean");
	hsize_t dims[1];
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);	// dsMean.getSpace() leaks memory... maybe a hdf5 bug, maybe vlenReclaim(...) could be a fix. No idea.
	//H5::DataSpace dsp = dsMean.getSpace();
	//dsp.close();

	std::cout << "Dims: " << dims[0] << std::endl;		// TODO: I guess this whole part could be done A LOT better!
	float* testData = new float[dims[0]];
	dsMean.read(testData, H5::PredType::NATIVE_FLOAT);
	this->modelMeanShp.reserve(dims[0]);

	for (unsigned int i=0; i < dims[0]; ++i)	{
		modelMeanShp.push_back(testData[i]);
	}
	delete[] testData;
	testData = NULL;
	dsMean.close();

	// // Load the Texture
	H5::Group modelReconstructiveTex = h5Model.openGroup("/color/ReconstructiveModel/model");
	H5::DataSet dsMeanTex = modelReconstructiveTex.openDataSet("./mean");
	hsize_t dimsTex[1];
	dsMeanTex.getSpace().getSimpleExtentDims(dimsTex, NULL);
	std::cout << "Dims: " << dimsTex[0] << std::endl;		// TODO: I guess this whole part could be done A LOT better!
	float* testDataTex = new float[dimsTex[0]];
	dsMeanTex.read(testDataTex, H5::PredType::NATIVE_FLOAT);
	this->modelMeanTex.reserve(dimsTex[0]);

	for (unsigned int i=0; i < dimsTex[0]; ++i)	{
		modelMeanTex.push_back(testDataTex[i]);
	}
	delete[] testDataTex;
	testDataTex = NULL;
	dsMeanTex.close();

	h5Model.close();
}
Ejemplo n.º 2
0
arma::Mat<uint16_t> readLUT(const std::string& path)
{
    H5::H5File file (path.c_str(), H5F_ACC_RDONLY);
    H5::DataSet ds = file.openDataSet("LUT");

    H5::DataSpace filespace = ds.getSpace();
    int ndims = filespace.getSimpleExtentNdims();
    assert(ndims == 2);
    hsize_t dims[2] = {1, 1};
    filespace.getSimpleExtentDims(dims);

    H5::DataSpace memspace (ndims, dims);

    arma::Mat<uint16_t> res (dims[0], dims[1]);

    ds.read(res.memptr(), H5::PredType::NATIVE_UINT16, memspace, filespace);

    filespace.close();
    memspace.close();
    ds.close();
    file.close();

    // NOTE: Armadillo stores data in column-major order, while HDF5 uses
    // row-major ordering. Above, we read the data directly from HDF5 into
    // the arma matrix, so it was implicitly transposed. The next function
    // fixes this problem.
    arma::inplace_trans(res);
    return res;
}
Ejemplo n.º 3
0
    inline void
    read_values(H5::DataSet& dataset, H5::DataSpace& data_space,
        dimension const& dimx, dimension const& dimy, dimension const& dimz,
        double* values)
    {
        using namespace H5;

        // Define the hyperslab for file based data.
        hsize_t data_offset[dimension::dim] = {
            dimx.offset_, dimy.offset_, dimz.offset_
        };
        hsize_t data_count[dimension::dim] = {
            dimx.count_, dimy.count_, dimz.count_
        };
        data_space.selectHyperslab(H5S_SELECT_SET, data_count, data_offset);

        // Memory dataspace.
        DataSpace mem_space (dimension::dim, data_count);

        // Define the hyperslab for data in memory.
        hsize_t mem_offset[dimension::dim] = { 0, 0, 0 };
        mem_space.selectHyperslab(H5S_SELECT_SET, data_count, mem_offset);

        // Read data to memory.
        dataset.read(values, PredType::NATIVE_DOUBLE, mem_space, data_space);
    }
Ejemplo n.º 4
0
Archivo: HDF5.hpp Proyecto: rseal/HDF5R
      void ReadTable(const int& tableNum, void* buf, 
            const H5::DataType& dType){

         std::string tNum = Num2Table(tableNum);
         dSet_ = file_->openDataSet(tNum);
         dSet_.read( buf, dType);
      }
Ejemplo n.º 5
0
bool loadStackHDF5( const char* fileName, Image4DSimple& img )
{
#ifdef USE_HDF5
    H5::Exception::dontPrint();
    H5::H5File file( fileName, H5F_ACC_RDONLY );

    for ( size_t i = 0; i < file.getObjCount(); i++ )
    {
        H5std_string name = file.getObjnameByIdx( i );
        if ( name == "Channels" )
        {
            H5::Group channels = file.openGroup( name );

            // Grab the attributes
            H5::Attribute attr = channels.openAttribute( "width" );
            H5::DataType type = attr.getDataType();
            long width, height;
            attr.read( type, &width );
            attr.close();

            attr = channels.openAttribute( "height" );
            attr.read( type, &height );
            attr.close();

            int num_channels = 0;
            // Count the number of channels
            for ( size_t obj = 0; obj < channels.getNumObjs(); obj++ )
                if ( channels.getObjTypeByIdx( obj ) == H5G_DATASET )
                    num_channels++;

            int channel_idx = 0;
            for ( size_t obj = 0; obj < channels.getNumObjs(); obj++ )
            {
                if ( channels.getObjTypeByIdx( obj ) == H5G_DATASET )
                {
                    H5std_string ds_name = channels.getObjnameByIdx( obj );
                    H5::DataSet data = channels.openDataSet( ds_name );
                    uint8_t* buffer = new uint8_t[ data.getStorageSize() ];
                    data.read( buffer, data.getDataType() );
                    QByteArray qbarray( ( const char* )buffer, data.getStorageSize() );
                    data.close();

                    if ( !loadIndexedStackFFMpeg( &qbarray, img, channel_idx++, num_channels,
                                                  width, height ) )
                    {
                        v3d_msg( "Error happened in HDF file reading. Stop. \n", false );
                        return false;
                    }

                    delete [] buffer;
                }
            }
        }
    }

#endif

    return true;
}
Ejemplo n.º 6
0
	/**
	 * @brief Returns a pointer to a std::vector<float> containing the values of the selected variable
	 *
	 * This allocates a new std::vector<float> pointer.  Make sure you
	 * delete the contents when you done using it, or you will have a memory leak.
	 *
	 * @param variable
	 * @return std::vector<float> containing the values of the selected variable.
	 */
	std::vector<float>* HDF5FileReader::getVariable(const std::string& variable)
	{
		std::vector<float>* variableData = new std::vector<float>();

		if (this->doesVariableExist(variable))
		{
			//std::cout << "reading " << variable << std::endl;
			//get variable number
//			long variableNum = this->getVariableID(variable);

			//std::cout << "variableNum for " << variable << ": " << variableNum << std::endl;
			//get dim sizes

			H5::Group group = this->current_file->openGroup("Variables");
			//cout << "variable: " << variable << ": " << counts[0] << endl;
			H5::DataSet * dataset = new H5::DataSet(group.openDataSet(variable));
			H5::DataSpace dataspace = dataset->getSpace();
			int rank = dataspace.getSimpleExtentNdims(); //should be 1
			hsize_t count[1];
			hsize_t offset[1] = {0};
//			int ndims = dataspace.getSimpleExtentDims(count, NULL);

			//std::cout << "count[0]: " << count[0] << std::endl;
			float * buffer = new float[count[0]];



			dataspace.selectHyperslab(H5S_SELECT_SET, count, offset);

			H5::DataSpace memspace( rank, count);
			memspace.selectHyperslab(H5S_SELECT_SET, count, offset);

			dataset->read(buffer, H5::PredType::NATIVE_FLOAT, memspace, dataspace);
			//std::cout << "after read" << std::endl;

			//add data to vector type, and delete original array
			variableData->reserve(count[0]);
			for (int i = 0; i < count[0]; i++)
			{
				variableData->push_back(buffer[i]);
			}
			//std::cout << "after adding to variableData vector" << std::endl;

			delete[] buffer;
			delete dataset;
			//std::cout << "finished reading " << variable << std::endl;
			//std::cout << "size of variable: " << variableData.size() << std::endl;
			//std::cout << "dimSizes[0]: " << dimSizes[0] << std::endl;

		}

		return variableData;
	}
Ejemplo n.º 7
0
int HDF5IO::loadInt(const std::string& GroupName, const std::string& Name)
{
    try{
      H5::Group FG = getGroup( GroupName );
      H5::DataSet DataSet = FG.openDataSet( Name.c_str());
      int x;
      DataSet.read(&x,H5::PredType::NATIVE_INT);
      FG.close();
      return x;
    }catch( H5::GroupIException not_found_error ){
      RUNTIME_ERROR("No dataset found in loadInt. ");
    }
}
Ejemplo n.º 8
0
size_t HDF5IO::loadUlong(const std::string& GroupName, const std::string& Name)
{
  try{
    H5::Group FG = getGroup( GroupName );
    H5::DataSet DataSet = FG.openDataSet( Name.c_str() );
    size_t x;
    DataSet.read(&x, H5::PredType::NATIVE_ULONG);
    return x;
    FG.close();
  }catch( H5::GroupIException not_found_error ){
    INFO("In Group - " << GroupName << ", and Name is " << Name);
    RUNTIME_ERROR("No dataset found in loadUlong. ");
  }
}
Ejemplo n.º 9
0
NDArray<T, Nd> NDArray<T,Nd>::ReadFromH5(const H5::DataSet& h5Dset) {
  H5::DataSpace dspace = h5Dset.getSpace(); 
  int ndim = dspace.getSimpleExtentNdims(); 
  if (ndim>Nd) 
      throw std::range_error("Too many dimensions in H5 dataset for NDArray");
  hsize_t dimSize[ndim];
  dspace.getSimpleExtentDims(dimSize); 
  std::array<std::size_t, Nd> dimSizeArr;
  for (int i=0; i<Nd; ++i) dimSizeArr[i] = dimSize[i];
  NDArray<T, Nd> arr(dimSizeArr);
  // Read in data here
  H5::DataType h5DType = GetH5DataType<T>();
  h5Dset.read(arr.mData, h5DType);
  return arr;
}
Ejemplo n.º 10
0
ComplexType HDF5IO::loadComplex(const std::string& GroupName, const std::string& Name)
{
  try{
    H5::CompType ComplexDataType = this->openCompType("complex");
    H5::Group FG = getGroup( GroupName );
    H5::DataSet DataSet = FG.openDataSet(Name.c_str());
    ComplexType C;
    RealType RealImag[2];
    DataSet.read(RealImag, ComplexDataType);
    FG.close();
    return ComplexType(RealImag[0],RealImag[1]);
  }catch( H5::GroupIException not_found_error ){
    RUNTIME_ERROR("No dataset found in loadComplex. ");
  }
}
Ejemplo n.º 11
0
void HDF5IO::loadStdVector(const std::string& GroupName, const std::string& Name,
    std::vector<RealType>& V)
{
  try{
    H5::Group FG = getGroup( GroupName );
    H5::DataSet DataSet = FG.openDataSet(Name.c_str());
    H5::DataSpace DataSpace = DataSet.getSpace();
    if(DataSpace.getSimpleExtentNdims() != 1)
      throw(H5::DataSpaceIException("HDF5IO::loadRealVector()","Unexpected multidimentional dataspace."));
    V.resize(DataSpace.getSimpleExtentNpoints());
    DataSet.read(V.data(),H5::PredType::NATIVE_DOUBLE);
    FG.close();
  } catch( const H5::Exception err ){
    RUNTIME_ERROR("HDF5IO::loadRealStdVector");
  }
}
Ejemplo n.º 12
0
void HDF5IO::loadStdVector(const std::string& GroupName, const std::string& Name,
    std::vector<ComplexType>& V)
{
  try{
    H5::CompType ComplexDataType = this->openCompType("complex");
    H5::Group FG = getGroup( GroupName );
    H5::DataSet DataSet = FG.openDataSet(Name.c_str());
    H5::DataSpace DataSpace = DataSet.getSpace();
    if(DataSpace.getSimpleExtentNdims() != 1)
      throw(H5::DataSpaceIException("HDF5IO::loadComplexVector()","Unexpected multidimentional dataspace."));
    V.resize(DataSpace.getSimpleExtentNpoints());
    DataSet.read(V.data(),ComplexDataType);
    FG.close();
  } catch( const H5::Exception err ){
    RUNTIME_ERROR("HDF5IO::loadComplexStdVector");
  }
}
Ejemplo n.º 13
0
void HDF5IO::loadVector(const std::string& GroupName, const std::string& Name,
  RealVectorType& V)
{
  H5::Group FG = getGroup( GroupName );
  H5::DataSet DataSet = FG.openDataSet(Name.c_str());
  H5::DataSpace DataSpace = DataSet.getSpace();
  if(DataSpace.getSimpleExtentNdims() != 1)
	throw(H5::DataSpaceIException("HDF5IO::loadRealVector()",
    "Unexpected multidimentional dataspace."));
  V.resize(DataSpace.getSimpleExtentNpoints());
  try{
    DataSet.read(V.data(),H5::PredType::NATIVE_DOUBLE);
  }catch( H5::GroupIException not_found_error ){
    RUNTIME_ERROR("No dataset found in loadRealVector. ");
  }
  FG.close();
}
Ejemplo n.º 14
0
	/**
	 * @brief Returns a value in the flat array of the variable and index requested.
	 *
	 * Use this method on variables that have a type of int
	 *
	 * @param variable The variable in the file
	 * @param index The index in the variable's array in the file
	 *
	 * @return int of the value in the array.
	 */
	int HDF5FileReader::getVariableIntAtIndex(const std::string& variable, long index)
	{
//		long counts[1];
		int value = std::numeric_limits<int>::min();
		if (this->doesVariableExist(variable))
		{
			//std::cout << "reading " << variable << std::endl;
			//get variable number
//			long variableNum = this->getVariableID(variable);

			//get dim sizes

			H5::Group group = this->current_file->openGroup("Variables");
			//cout << "variable: " << variable << ": " << counts[0] << endl;
			H5::DataSet * dataset = new H5::DataSet(group.openDataSet(variable));
			H5::DataSpace dataspace = dataset->getSpace();
			int rank = dataspace.getSimpleExtentNdims(); //should be 1
			hsize_t count[1] = {1};
			hsize_t offset[1] = {0};
			//int ndims = dataspace.getSimpleExtentDims(count, NULL);
			float * buffer = new float[count[0]];



			dataspace.selectHyperslab(H5S_SELECT_SET, count, offset);

			hsize_t dim[] = {count[0]};
			H5::DataSpace memspace( rank, dim);
			memspace.selectHyperslab(H5S_SELECT_SET, dim, offset);

			dataset->read(buffer, H5::PredType::NATIVE_INT, memspace, dataspace);


			//add data to vector type, and delete original array
			value = buffer[0];
			delete[] buffer;
			delete dataset;
		}
		return value;
			//std::cout << "finished reading " << variable << std::endl;
			//std::cout << "size of variable: " << variableData.size() << std::endl;
			//std::cout << "dimSizes[0]: " << dimSizes[0] << std::endl;


	}
Ejemplo n.º 15
0
    bool readDataset1D(const H5::H5File &file, const std::string &name, std::vector<_Tp> &data)
    {
        H5::DataSet dataset = file.openDataSet(name);
        H5::DataSpace dataspace = dataset.getSpace();
        hsize_t dims_out[1];
        int rank = dataspace.getSimpleExtentDims( dims_out, NULL);

        int _type;
        bool read = getNodeType(dataset,  _type);
        read &= (_type == StorageNode::SEQ);
        read &= (rank  == 1);

        if (!read)
            return read;
        data.resize(dims_out[0]);
        dataset.read(data.data(), dataset.getDataType());
        return true;
    }
Ejemplo n.º 16
0
	/**
	 * Returns a pointer to a std::vector<float> containing the values of the selected variable
	 * in the range specified by the startIndex and count (the number of records to read) stored
	 * in the selected file.  This allocates a new std::vector<float> pointer.  Make sure you
	 * delete the contents when you done using it, or you will have a memory leak.
	 * @param variableID
	 * @param startIndex
	 * @param count
	 * @return std::vector<float> containing the values of the selected variable.
	 */
	std::vector<float>* HDF5FileReader::getVariable(long variable, long indexOffset, long count)
	{

		std::vector<float>* variableData = new std::vector<float>();



		//get dim sizes
		H5::Group group = this->current_file->openGroup("Variables");
		//cout << "variable: " << variable << ": " << counts[0] << endl;
		H5::DataSet * dataset = new H5::DataSet(group.openDataSet(group.getObjnameByIdx(variable)));
		H5::DataSpace dataspace = dataset->getSpace();
		int rank = dataspace.getSimpleExtentNdims(); //should be 1
		hsize_t length[1] = {count};
		hsize_t offset[1] = {indexOffset};
		//int ndims = dataspace.getSimpleExtentDims(count, NULL);
		float * buffer = new float[length[0]];



		dataspace.selectHyperslab(H5S_SELECT_SET, length, offset);

		hsize_t dim[] = {length[0]};
		H5::DataSpace memspace( rank, dim);
		memspace.selectHyperslab(H5S_SELECT_SET, dim, offset);

		dataset->read(buffer, H5::PredType::NATIVE_FLOAT, memspace, dataspace);


		//add data to vector type, and delete original array
		variableData->reserve(length[0]);
		for (int i = 0; i < length[0]; i++)
		{
			variableData->push_back(buffer[i]);
		}

		delete[] buffer;
		delete dataset;
		//std::cout << "finished reading " << variable << std::endl;
		//std::cout << "size of variable: " << variableData.size() << std::endl;
		//std::cout << "dimSizes[0]: " << dimSizes[0] << std::endl;
		return variableData;
	}
Ejemplo n.º 17
0
void HDF5IO::loadMatrix(const std::string& GroupName, const std::string& Name,
    ComplexMatrixType& M)
{
  try{
    H5::CompType ComplexDataType = this->openCompType("complex");
    H5::Group FG = getGroup( GroupName );
    H5::DataSet DataSet = FG.openDataSet(Name.c_str());
    H5::DataSpace DataSpace = DataSet.getSpace();
    if(DataSpace.getSimpleExtentNdims() != 2)
	throw(H5::DataSpaceIException("HDF5IO::loadMatrix()","A dataspace must be precisely two-dimensional."));
    hsize_t Dims[2];
    DataSpace.getSimpleExtentDims(Dims);
    M.resize(Dims[0],Dims[1]);
    DataSet.read(M.data(), ComplexDataType);
    FG.close();
  } catch( const H5::Exception err ){
    RUNTIME_ERROR("HDF5IO::loadComplexMatrix at ");
  }
}
Ejemplo n.º 18
0
OXSXDataSet
DataSetIO::LoadDataSet(const std::string& filename_){
    // Get Data Set
    H5::H5File  file(filename_, H5F_ACC_RDONLY);
    H5::DataSet dataSet = file.openDataSet("observations");
 
    // read meta information
    unsigned nObs = 0;
    H5::Attribute nameAtt  = dataSet.openAttribute("observed_quantities");
    H5::Attribute countAtt  = dataSet.openAttribute("n_observables");
    H5std_string strreadbuf("");
    nameAtt.read(nameAtt.getDataType(), strreadbuf);
    countAtt.read(countAtt.getDataType(), &nObs);

    // Read data out as 1D array
    hsize_t nData = 0;
    dataSet.getSpace().getSimpleExtentDims(&nData, NULL);
    size_t nEntries = nData/nObs;

    std::vector<double> flatData(nData, 0);
    dataSet.read(&flatData.at(0), H5::PredType::NATIVE_DOUBLE);

    assert(nData%nObs == 0); // logic error in writing file (this class!) if assert fails.

    // Assemble into an OXSX data set
    OXSXDataSet oxsxDataSet;

    // Set the variable names
    oxsxDataSet.SetObservableNames(UnpackString(strreadbuf, fDelimiter));

    // then the data
    std::vector<double> oneEventObs(nObs, 0);
    for(size_t i = 0; i < nEntries; i++){
        for(size_t j = 0; j < nObs; j++)
            oneEventObs[j] = flatData.at(i * nObs + j);
        
        oxsxDataSet.AddEntry(EventData(oneEventObs));
    }
      
    return oxsxDataSet;
}
Ejemplo n.º 19
0
std::vector<double> readlastrow( H5::DataSet& ds )
{
  H5::DataSpace origspace = ds.getSpace();
  int rank = origspace.getSimpleExtentNdims();
  hsize_t dims[rank];
  int ndims = origspace.getSimpleExtentDims( dims, NULL);
  hsize_t nrows=dims[0];
  hsize_t ncols=dims[1];
  std::vector<double> returnvect( ncols );

  
  
  hsize_t targrowoffset = nrows-1;
  hsize_t targcoloffset = 0;
  hsize_t dimsmem[rank] = {1,  ncols};
  H5::DataSpace memspace(rank, dimsmem);

  hsize_t offset[rank] = { targrowoffset, targcoloffset };
  origspace.selectHyperslab( H5S_SELECT_SET, dimsmem, offset );
  ds.read( returnvect.data(), H5::PredType::NATIVE_DOUBLE, memspace, origspace );

  return returnvect;
}
Ejemplo n.º 20
0
std::vector<double> readEloss(const std::string& path)
{
    H5::H5File file (path.c_str(), H5F_ACC_RDONLY);
    H5::DataSet ds = file.openDataSet("eloss");

    H5::DataSpace filespace = ds.getSpace();
    int ndims = filespace.getSimpleExtentNdims();
    assert(ndims == 1);
    hsize_t dim;
    filespace.getSimpleExtentDims(&dim);

    H5::DataSpace memspace (ndims, &dim);

    std::vector<double> res (dim);

    ds.read(res.data(), H5::PredType::NATIVE_DOUBLE, memspace, filespace);

    filespace.close();
    memspace.close();
    ds.close();
    file.close();
    return res;
}
Ejemplo n.º 21
0
    inline void
    read_values(H5::DataSet& dataset, H5::DataSpace& data_space,
        hsize_t offset, hsize_t count, double* values)
    {
        using namespace H5;

        // Define hyperslab for file based data
        hsize_t data_offset[1] = { offset };
        hsize_t data_count[1] = { count };
        data_space.selectHyperslab(H5S_SELECT_SET, data_count, data_offset);

        // memory dataspace
        hsize_t mem_dims[1] = { count };
        DataSpace mem_space (1, mem_dims);

        // Define hyperslab for data in memory
        hsize_t mem_offset[1] = { 0 };
        hsize_t mem_count[1] = { count };
        mem_space.selectHyperslab(H5S_SELECT_SET, mem_count, mem_offset);

        // read data to memory
        dataset.read(values, PredType::NATIVE_DOUBLE, mem_space, data_space);
    }
Ejemplo n.º 22
0
PcaModel PcaModel::loadStatismoModel(path h5file, PcaModel::ModelType modelType)
{
	logging::Logger logger = Loggers->getLogger("shapemodels");
	PcaModel model;

	// Load the shape or color model from the .h5 file
	string h5GroupType;
	if (modelType == ModelType::SHAPE) {
		h5GroupType = "shape";
	} else if (modelType == ModelType::COLOR) {
		h5GroupType = "color";
	}

	H5::H5File h5Model;

	try {
		h5Model = H5::H5File(h5file.string(), H5F_ACC_RDONLY);
	}
	catch (H5::Exception& e) {
		string errorMessage = "Could not open HDF5 file: " + string(e.getCDetailMsg());
		logger.error(errorMessage);
		throw errorMessage;
	}

	// Load either the shape or texture mean
	string h5Group = "/" + h5GroupType + "/model";
	H5::Group modelReconstructive = h5Model.openGroup(h5Group);

	// Read the mean
	H5::DataSet dsMean = modelReconstructive.openDataSet("./mean");
	hsize_t dims[2];
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);	// dsMean.getSpace() leaks memory... maybe a hdf5 bug, maybe vlenReclaim(...) could be a fix. No idea.
	//H5::DataSpace dsp = dsMean.getSpace();
	//dsp.close();
	Loggers->getLogger("shapemodels").debug("Dimensions of the model mean: " + lexical_cast<string>(dims[0]));
	model.mean = Mat(1, dims[0], CV_32FC1); // Use a row-vector, because of faster memory access and I'm not sure the memory block is allocated contiguously if we have multiple rows.
	dsMean.read(model.mean.ptr<float>(0), H5::PredType::NATIVE_FLOAT);
	model.mean = model.mean.t(); // Transpose it to a col-vector
	dsMean.close();

	// Read the eigenvalues
	dsMean = modelReconstructive.openDataSet("./pcaVariance");
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);
	Loggers->getLogger("shapemodels").debug("Dimensions of the pcaVariance: " + lexical_cast<string>(dims[0]));
	model.eigenvalues = Mat(1, dims[0], CV_32FC1);
	dsMean.read(model.eigenvalues.ptr<float>(0), H5::PredType::NATIVE_FLOAT);
	model.eigenvalues = model.eigenvalues.t();
	dsMean.close();

	// Read the PCA basis matrix
	dsMean = modelReconstructive.openDataSet("./pcaBasis");
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);
	Loggers->getLogger("shapemodels").debug("Dimensions of the PCA basis matrix: " + lexical_cast<string>(dims[0]) + ", " + lexical_cast<string>(dims[1]));
	model.pcaBasis = Mat(dims[0], dims[1], CV_32FC1);
	dsMean.read(model.pcaBasis.ptr<float>(0), H5::PredType::NATIVE_FLOAT);
	dsMean.close();

	modelReconstructive.close(); // close the model-group

	// Read the noise variance (not implemented)
	/*dsMean = modelReconstructive.openDataSet("./noiseVariance");
	float noiseVariance = 10.0f;
	dsMean.read(&noiseVariance, H5::PredType::NATIVE_FLOAT);
	dsMean.close(); */

	// Read the triangle-list
	string representerGroupName = "/" + h5GroupType + "/representer";
	H5::Group representerGroup = h5Model.openGroup(representerGroupName);
	dsMean = representerGroup.openDataSet("./reference-mesh/triangle-list");
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);
	Loggers->getLogger("shapemodels").debug("Dimensions of the triangle-list: " + lexical_cast<string>(dims[0]) + ", " + lexical_cast<string>(dims[1]));
	Mat triangles(dims[0], dims[1], CV_32SC1);
	dsMean.read(triangles.ptr<int>(0), H5::PredType::NATIVE_INT32);
	dsMean.close();
	representerGroup.close();
	model.triangleList.resize(triangles.rows);
	for (unsigned int i = 0; i < model.triangleList.size(); ++i) {
		model.triangleList[i][0] = triangles.at<int>(i, 0);
		model.triangleList[i][1] = triangles.at<int>(i, 1);
		model.triangleList[i][2] = triangles.at<int>(i, 2);
	}

	// Load the landmarks mappings:
	// load the reference-mesh
	representerGroup = h5Model.openGroup(representerGroupName);
	dsMean = representerGroup.openDataSet("./reference-mesh/vertex-coordinates");
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);
	Loggers->getLogger("shapemodels").debug("Dimensions of the reference-mesh vertex-coordinates matrix: " + lexical_cast<string>(dims[0]) + ", " + lexical_cast<string>(dims[1]));
	Mat referenceMesh(dims[0], dims[1], CV_32FC1);
	dsMean.read(referenceMesh.ptr<float>(0), H5::PredType::NATIVE_FLOAT);
	dsMean.close();
	representerGroup.close();

	// convert to 3 vectors with the x, y and z coordinates for easy searching
	vector<float> refx(referenceMesh.col(0).clone());
	vector<float> refy(referenceMesh.col(1).clone());
	vector<float> refz(referenceMesh.col(2).clone());

	// load the landmarks info (mapping name <-> reference (x, y, z)-coords)
	H5::Group landmarksGroup = h5Model.openGroup("/metadata/landmarks");
	dsMean = landmarksGroup.openDataSet("./text");
	
	H5std_string outputString;
	Loggers->getLogger("shapemodels").debug("Reading landmark information from the model.");
	dsMean.read(outputString, dsMean.getStrType());
	dsMean.close();
	landmarksGroup.close();
	vector<string> landmarkLines;
	boost::split(landmarkLines, outputString, boost::is_any_of("\n"), boost::token_compress_on);
	for (const auto& l : landmarkLines) {
		if (l == "") {
			continue;
		}
		vector<string> line;
		boost::split(line, l, boost::is_any_of(" "), boost::token_compress_on);
		string name = line[0];
		int visibility = lexical_cast<int>(line[1]);
		float x = lexical_cast<float>(line[2]);
		float y = lexical_cast<float>(line[3]);
		float z = lexical_cast<float>(line[4]);
		// Find the x, y and z values in the reference
		const auto ivx = std::find(begin(refx), end(refx), x);
		const auto ivy = std::find(begin(refy), end(refy), y);
		const auto ivz = std::find(begin(refz), end(refz), z);
		// TODO Check for .end()!
		const auto vertexIdX = std::distance(begin(refx), ivx);
		const auto vertexIdY = std::distance(begin(refy), ivy);
		const auto vertexIdZ = std::distance(begin(refz), ivz);
		// assert vx=vy=vz
		// Hmm this is not perfect. If there's another vertex where 1 or 2 coords are the same, it fails.
		// We should do the search differently: Find _all_ the vertices that are equal, then take the one that has the right x, y and z.
		model.landmarkVertexMap.insert(make_pair(name, vertexIdX));
	
	}

	h5Model.close();
	return model;
}
Ejemplo n.º 23
0
bool TStellarData::load(const std::string& fname, const std::string& group, const std::string& dset,
			double err_floor, double default_EBV) {
	H5::H5File *file = H5Utils::openFile(fname);
	if(file == NULL) { return false; }
	
	H5::Group *gp = H5Utils::openGroup(file, group);
	if(gp == NULL) {
		delete file;
		return false;
	}
	
	H5::DataSet dataset = gp->openDataSet(dset);
	
	/*
	 *  Photometry
	 */
	
	// Datatype
	hsize_t nbands = NBANDS;
	H5::ArrayType f4arr(H5::PredType::NATIVE_FLOAT, 1, &nbands);
	H5::ArrayType u4arr(H5::PredType::NATIVE_UINT32, 1, &nbands);
	H5::CompType dtype(sizeof(TFileData));
	dtype.insertMember("obj_id", HOFFSET(TFileData, obj_id), H5::PredType::NATIVE_UINT64);
	dtype.insertMember("l", HOFFSET(TFileData, l), H5::PredType::NATIVE_DOUBLE);
	dtype.insertMember("b", HOFFSET(TFileData, b), H5::PredType::NATIVE_DOUBLE);
	dtype.insertMember("mag", HOFFSET(TFileData, mag), f4arr);
	dtype.insertMember("err", HOFFSET(TFileData, err), f4arr);
	dtype.insertMember("maglimit", HOFFSET(TFileData, maglimit), f4arr);
	dtype.insertMember("nDet", HOFFSET(TFileData, N_det), u4arr);
	dtype.insertMember("EBV", HOFFSET(TFileData, EBV), H5::PredType::NATIVE_FLOAT);
	
	// Dataspace
	hsize_t length;
	H5::DataSpace dataspace = dataset.getSpace();
	dataspace.getSimpleExtentDims(&length);
	
	// Read in dataset
	TFileData* data_buf = new TFileData[length];
	dataset.read(data_buf, dtype);
	//std::cerr << "# Read in dimensions." << std::endl;
	
	// Fix magnitude limits
	for(int n=0; n<nbands; n++) {
		float tmp;
		float maglim_replacement = 25.;
		
		// Find the 95th percentile of valid magnitude limits
		std::vector<float> maglimit;
		for(hsize_t i=0; i<length; i++) {
			tmp = data_buf[i].maglimit[n];
			
			if((tmp > 10.) && (tmp < 40.) && (!isnan(tmp))) {
				maglimit.push_back(tmp);
			}
		}
		
		//std::sort(maglimit.begin(), maglimit.end());
		if(maglimit.size() != 0) {
			maglim_replacement = percentile(maglimit, 95.);
		}
		
		// Replace missing magnitude limits with the 95th percentile magnitude limit
		for(hsize_t i=0; i<length; i++) {
			tmp = data_buf[i].maglimit[n];
			
			if(!((tmp > 10.) && (tmp < 40.)) || isnan(tmp)) {
				//std::cout << i << ", " << n << ":  " << tmp << std::endl;
				data_buf[i].maglimit[n] = maglim_replacement;
			}
		}
	}
	
	//int n_filtered = 0;
	//int n_M_dwarfs = 0;
	
	TMagnitudes mag_tmp;
	for(size_t i=0; i<length; i++) {
		mag_tmp.set(data_buf[i], err_floor);
		star.push_back(mag_tmp);
		
		//int n_informative = 0;
		
		// Remove g-band
		//mag_tmp.m[0] = 0.;
		//mag_tmp.err[0] = 1.e10;
		
		//double g_err = mag_tmp.err[0];
		//mag_tmp.err[0] = sqrt(g_err*g_err + 0.1*0.1);
		
		// Filter bright end
                // TODO: Put this into query_lsd.py
		/*for(int j=0; j<NBANDS; j++) {
			if((mag_tmp.err[j] < 1.e9) && (mag_tmp.m[j] < 14.)) {
				mag_tmp.err[j] = 1.e10;
				mag_tmp.m[j] = 0.;
			}
			
			if(mag_tmp.err[j] < 1.e9) {
				n_informative++;
			}
		}*/
		
		// Filter M dwarfs based on color cut
		//bool M_dwarf = false;
		/*bool M_dwarf = true;
		
		double A_g = 3.172;
		double A_r = 2.271;
		double A_i = 1.682;
		
		if(mag_tmp.m[0] - A_g / (A_g - A_r) * (mag_tmp.m[0] - mag_tmp.m[1] - 1.2) > 20.) {
			M_dwarf = false;
		} else if(mag_tmp.m[1] - mag_tmp.m[2] - (A_r - A_i) / (A_g - A_r) * (mag_tmp.m[0] - mag_tmp.m[1]) < 0.) {
			M_dwarf = false;
		} else {
			n_M_dwarfs++;
		}
		*/
		
		/*if(n_informative >= 4) { //&& (!M_dwarf)) {
			star.push_back(mag_tmp);
		} else {
			n_filtered++;
		}*/
	}
	
	//std::cerr << "# of stars filtered: " << n_filtered << std::endl;
	//std::cerr << "# of M dwarfs: " << n_M_dwarfs << std::endl;
	
	/*
	 *  Attributes
	 */
	
	H5::Attribute att = dataset.openAttribute("healpix_index");
	H5::DataType att_dtype = H5::PredType::NATIVE_UINT64;
	att.read(att_dtype, reinterpret_cast<void*>(&healpix_index));
	
	att = dataset.openAttribute("nested");
	att_dtype = H5::PredType::NATIVE_UCHAR;
	att.read(att_dtype, reinterpret_cast<void*>(&nested));
	
	att = dataset.openAttribute("nside");
	att_dtype = H5::PredType::NATIVE_UINT32;
	att.read(att_dtype, reinterpret_cast<void*>(&nside));
	
	att = dataset.openAttribute("l");
	att_dtype = H5::PredType::NATIVE_DOUBLE;
	att.read(att_dtype, reinterpret_cast<void*>(&l));
	
	att = dataset.openAttribute("b");
	att_dtype = H5::PredType::NATIVE_DOUBLE;
	att.read(att_dtype, reinterpret_cast<void*>(&b));
	
	att = dataset.openAttribute("EBV");
	att_dtype = H5::PredType::NATIVE_DOUBLE;
	att.read(att_dtype, reinterpret_cast<void*>(&EBV));
	
	// TEST: Force l, b to anticenter
	//l = 180.;
	//b = 0.;
	
	if((EBV <= 0.) || (EBV > default_EBV) || isnan(EBV)) { EBV = default_EBV; }
	
	delete[] data_buf;
	delete gp;
	delete file;
	
	return true;
}
Ejemplo n.º 24
0
bool ossim_hdf5::crossesDateline( H5::DataSet& dataset, const ossimIrect& validRect )
{
   bool result = false;

   H5::DataSpace dataspace = dataset.getSpace();
         
   // Number of dimensions of the input dataspace:
   const ossim_int32 DIM_COUNT = dataspace.getSimpleExtentNdims();
         
   if ( DIM_COUNT == 2 )
   {
      const ossim_uint32 ROWS = validRect.height();
      const ossim_uint32 COLS = validRect.width();
      
      // Get the extents. Assuming dimensions are same for lat lon dataset. 
      std::vector<hsize_t> dimsOut(DIM_COUNT);
      dataspace.getSimpleExtentDims( &dimsOut.front(), 0 );

      if ( (ROWS <= dimsOut[0]) && (COLS <= dimsOut[1]) )
      {
         std::vector<hsize_t> inputCount(DIM_COUNT);
         std::vector<hsize_t> inputOffset(DIM_COUNT);
         
         inputCount[0] = 1;    // row
         inputCount[1] = COLS; // col
         
         // Output dataspace dimensions.
         const ossim_int32 OUT_DIM_COUNT = 3;
         std::vector<hsize_t> outputCount(OUT_DIM_COUNT);
         outputCount[0] = 1; // single band
         outputCount[1] = 1; // single line
         outputCount[2] = COLS; // single sample
               
         // Output dataspace offset.
         std::vector<hsize_t> outputOffset(OUT_DIM_COUNT);
         outputOffset[0] = 0;
         outputOffset[1] = 0;
         outputOffset[2] = 0;
               
         ossimScalarType scalar = ossim_hdf5::getScalarType( &dataset );
         if ( scalar == OSSIM_FLOAT32 )
         {
            // See if we need to swap bytes:
            ossimEndian* endian = 0;
            if ( ( ossim::byteOrder() != ossim_hdf5::getByteOrder( &dataset ) ) )
            {
               endian = new ossimEndian();
            }

            // Native type:
            H5::DataType datatype = dataset.getDataType();
                  
            // Output dataspace always the same one line.
            H5::DataSpace bufferDataSpace( OUT_DIM_COUNT, &outputCount.front());
            bufferDataSpace.selectHyperslab( H5S_SELECT_SET,
                                             &outputCount.front(),
                                             &outputOffset.front() );

            //---
            // Dataset sample has NULL lines at the end so scan for valid rect.
            // Use "<= -999" for test as per NOAA as it seems the NULL value is
            // fuzzy.  e.g. -999.3.
            //---

            // Buffer to hold a line:
            std::vector<ossim_float32> lineBuffer(validRect.width());

            // Read the first line:
            inputOffset[0] = static_cast<hsize_t>(validRect.ul().y);
            inputOffset[1] = static_cast<hsize_t>(validRect.ul().x);
            dataspace.selectHyperslab( H5S_SELECT_SET,
                                       &inputCount.front(),
                                       &inputOffset.front() );
            dataset.read( &(lineBuffer.front()), datatype, bufferDataSpace, dataspace );

            if ( endian )
            {
               // If the endian pointer is initialized(not zero) swap the bytes.
               endian->swap( &(lineBuffer.front()), COLS );
            }

            // Test the first line:
            result = ossim_hdf5::crossesDateline( lineBuffer );

            if ( !result )
            {
               // Test the last line:
               inputOffset[0] = static_cast<hsize_t>(validRect.ll().y);
               inputOffset[1] = static_cast<hsize_t>(validRect.ll().x);
               dataspace.selectHyperslab( H5S_SELECT_SET,
                                          &inputCount.front(),
                                          &inputOffset.front() );
               dataset.read( &(lineBuffer.front()), datatype, bufferDataSpace, dataspace );

               result = ossim_hdf5::crossesDateline( lineBuffer );
            }

            if ( endian )
            {
               delete endian;
               endian = 0;
            }
         }
         else // Matches: if ( scalar == OSSIM_FLOAT32 ){...}
         {
            ossimNotify(ossimNotifyLevel_WARN)
               << "ossim_hdf5::crossesDateline WARNING!"
               << "\nUnhandled scalar type: "
               << ossimScalarTypeLut::instance()->getEntryString( scalar )
               << std::endl;
         }
         
      } // Matches: if ( dimsOut...
      
   } // Matches: if ( IN_DIM_COUNT == 2 )
   
   dataspace.close();
   
   return result;
   
} // End: ossim_hdf5::crossesDateline(...)
Ejemplo n.º 25
0
ossimRefPtr<ossimProjection> ossim_hdf5::getBilinearProjection(
   H5::DataSet& latDataSet, H5::DataSet& lonDataSet, const ossimIrect& validRect )
{
   ossimRefPtr<ossimProjection> proj = 0;

   // Get dataspace of the dataset.
   H5::DataSpace latDataSpace = latDataSet.getSpace();
   H5::DataSpace lonDataSpace = lonDataSet.getSpace();
         
   // Number of dimensions of the input dataspace:
   const ossim_int32 DIM_COUNT = latDataSpace.getSimpleExtentNdims();
         
   if ( DIM_COUNT == 2 )
   {
      // Get the extents. Assuming dimensions are same for lat lon dataset. 
      std::vector<hsize_t> dimsOut(DIM_COUNT);
      latDataSpace.getSimpleExtentDims( &dimsOut.front(), 0 );

      if ( dimsOut[0] && dimsOut[1] )
      {
         std::vector<hsize_t> inputCount(DIM_COUNT);
         std::vector<hsize_t> inputOffset(DIM_COUNT);
               
         inputOffset[0] = 0;
         inputOffset[1] = 0;
               
         inputCount[0] = 1;
         inputCount[1] = 1;
               
         // Output dataspace dimensions.
         const ossim_int32 OUT_DIM_COUNT = 3;
         std::vector<hsize_t> outputCount(OUT_DIM_COUNT);
         outputCount[0] = 1; // single band
         outputCount[1] = 1; // single line
         outputCount[2] = 1; // single sample
               
         // Output dataspace offset.
         std::vector<hsize_t> outputOffset(OUT_DIM_COUNT);
         outputOffset[0] = 0;
         outputOffset[1] = 0;
         outputOffset[2] = 0;
               
         ossimScalarType scalar = ossim_hdf5::getScalarType( &latDataSet );
         if ( scalar == OSSIM_FLOAT32 )
         {
            // See if we need to swap bytes:
            ossimEndian* endian = 0;
            if ( ( ossim::byteOrder() != ossim_hdf5::getByteOrder( &latDataSet ) ) )
            {
               endian = new ossimEndian();
            }

            // Native type:
            H5::DataType latDataType = latDataSet.getDataType();
            H5::DataType lonDataType = lonDataSet.getDataType();
                  
            std::vector<ossimDpt> ipts;
            std::vector<ossimGpt> gpts;
            ossimGpt gpt(0.0, 0.0, 0.0); // Assuming WGS84...
            ossim_float32 latValue = 0.0;
            ossim_float32 lonValue = 0.0;
                  
            // Only grab every 256th value.:
            const ossim_int32 GRID_SIZE = 256;
                  
            // Output dataspace always the same one pixel.
            H5::DataSpace bufferDataSpace( OUT_DIM_COUNT, &outputCount.front());
            bufferDataSpace.selectHyperslab( H5S_SELECT_SET,
                                             &outputCount.front(),
                                             &outputOffset.front() );

            //---
            // Dataset sample has NULL lines at the end so scan for valid rect.
            // Use "<= -999" for test as per NOAA as it seems the NULL value is
            // fuzzy.  e.g. -999.3.
            //---
            const ossim_float32 NULL_VALUE = -999.0;

            //---
            // Get the tie points within the valid rect:
            //---
            ossimDpt ipt = validRect.ul();
            while ( ipt.y <= validRect.lr().y )
            {
               inputOffset[0] = static_cast<hsize_t>(ipt.y);
               
               // Sample loop:
               ipt.x = validRect.ul().x;
               while ( ipt.x <= validRect.lr().x )
               {
                  inputOffset[1] = static_cast<hsize_t>(ipt.x);
                  
                  latDataSpace.selectHyperslab( H5S_SELECT_SET,
                                                &inputCount.front(),
                                                &inputOffset.front() );
                  lonDataSpace.selectHyperslab( H5S_SELECT_SET,
                                                &inputCount.front(),
                                                &inputOffset.front() );
                  
                  // Read data from file into the buffer.
                  latDataSet.read( &latValue, latDataType, bufferDataSpace, latDataSpace );
                  lonDataSet.read( &lonValue, lonDataType, bufferDataSpace, lonDataSpace );
                  
                  if ( endian )
                  {
                     // If the endian pointer is initialized(not zero) swap the bytes.
                     endian->swap( latValue );
                     endian->swap( lonValue );  
                  }
                  
                  if ( ( latValue > NULL_VALUE ) && ( lonValue > NULL_VALUE ) )
                  {
                     gpt.lat = latValue;
                     gpt.lon = lonValue;
                     gpts.push_back( gpt );
                     
                     // Add the image point subtracting the image offset.
                     ossimIpt shiftedIpt = ipt - validRect.ul();
                     ipts.push_back( shiftedIpt );
                  }

                  // Go to next point:
                  if ( ipt.x < validRect.lr().x )
                  {
                     ipt.x += GRID_SIZE;
                     if ( ipt.x > validRect.lr().x )
                     {
                        ipt.x = validRect.lr().x; // Clamp to last sample.
                     }
                  }
                  else
                  {  
                     break; // At the end:
                  }
                  
               } // End sample loop.

               if ( ipt.y < validRect.lr().y )
               {
                  ipt.y += GRID_SIZE;
                  if ( ipt.y > validRect.lr().y )
                  {
                     ipt.y = validRect.lr().y; // Clamp to last line.
                  }
               }
               else
               {  
                  break; // At the end:
               }
               
            } // End line loop.
                  
            if ( ipts.size() )
            {
               // Create the projection:
               ossimRefPtr<ossimBilinearProjection> bp = new ossimBilinearProjection();
                     
               // Add the tie points:
               bp->setTiePoints( ipts, gpts );
                     
               // Assign to output projection:
               proj = bp.get();
            }

            // Cleanup:
            if ( endian )
            {
               delete endian;
               endian = 0;
            }
         }
         else // Matches: if ( scalar == OSSIM_FLOAT32 ){...}
         {
            ossimNotify(ossimNotifyLevel_WARN)
               << "ossim_hdf5::getBilinearProjection WARNING!"
               << "\nUnhandled scalar type: "
               << ossimScalarTypeLut::instance()->getEntryString( scalar )
               << std::endl;
         }
               
      } // Matches: if ( dimsOut...
            
   } // Matches: if ( IN_DIM_COUNT == 2 )
         
   latDataSpace.close();
   lonDataSpace.close();
   
   return proj;
   
} // End: ossim_hdf5::getBilinearProjection()
Ejemplo n.º 26
0
bool ossim_hdf5::getValidBoundingRect( H5::DataSet& dataset,
                                       const std::string& name,
                                       ossimIrect& rect )
{
   bool result = false;
   H5::DataSpace imageDataspace = dataset.getSpace();
   const ossim_int32 IN_DIM_COUNT = imageDataspace.getSimpleExtentNdims();
         
   if ( IN_DIM_COUNT == 2 )
   {
      // Get the extents. Assuming dimensions are same for lat lon dataset. 
      std::vector<hsize_t> dimsOut(IN_DIM_COUNT);
      imageDataspace.getSimpleExtentDims( &dimsOut.front(), 0 );

      if ( dimsOut[0] && dimsOut[1] )
      {
         
         //---
         // Capture the rectangle:
         // dimsOut[0] is height, dimsOut[1] is width:
         //---
         rect = ossimIrect( 0, 0,
                            static_cast<ossim_int32>( dimsOut[1]-1 ),
                            static_cast<ossim_int32>( dimsOut[0]-1 ) );
         
         const ossim_int32 WIDTH  = rect.width();
               
         std::vector<hsize_t> inputCount(IN_DIM_COUNT);
         std::vector<hsize_t> inputOffset(IN_DIM_COUNT);
         
         inputOffset[0] = 0;
         inputOffset[1] = 0;
         
         inputCount[0] = 1;
         inputCount[1] = WIDTH;
         
         // Output dataspace dimensions.
         const ossim_int32 OUT_DIM_COUNT = 3;
         std::vector<hsize_t> outputCount(OUT_DIM_COUNT);
         outputCount[0] = 1;     // single band
         outputCount[1] = 1;     // single line
         outputCount[2] = WIDTH; // whole line
               
         // Output dataspace offset.
         std::vector<hsize_t> outputOffset(OUT_DIM_COUNT);
         outputOffset[0] = 0;
         outputOffset[1] = 0;
         outputOffset[2] = 0;
               
         ossimScalarType scalar = ossim_hdf5::getScalarType( &dataset );
         if ( scalar == OSSIM_FLOAT32 )
         {
            // See if we need to swap bytes:
            ossimEndian* endian = 0;
            if ( ( ossim::byteOrder() != ossim_hdf5::getByteOrder( &dataset ) ) )
            {
               endian = new ossimEndian();
            }

            // Native type:
            H5::DataType datatype = dataset.getDataType();
                  
            // Output dataspace always the same one line.
            H5::DataSpace bufferDataSpace( OUT_DIM_COUNT, &outputCount.front());
            bufferDataSpace.selectHyperslab( H5S_SELECT_SET,
                                             &outputCount.front(),
                                             &outputOffset.front() );

            //---
            // Dataset sample has NULL lines at the end so scan for valid rect.
            // Use "<= -999" for test as per NOAA as it seems the NULL value is
            // fuzzy.  e.g. -999.3.
            //---
            const ossim_float32 NULL_VALUE = -999.0;

            //---
            // VIIRS Radiance data has a -1.5e-9 in the first column.
            // Treat this as a null.
            //---
            const ossim_float32 NULL_VALUE2 = ( name == "/All_Data/VIIRS-DNB-SDR_All/Radiance" )
               ? -1.5e-9 : NULL_VALUE;
            const ossim_float32 TOLERANCE = 0.1e-9; // For ossim::almostEqual()

            // Hold one line:
            std::vector<ossim_float32> values( WIDTH );

            // Find the ul pixel:
            ossimIpt ulIpt = rect.ul();
            bool found = false;
                  
            // Line loop to find upper left pixel:
            while ( ulIpt.y <= rect.lr().y )
            {
               inputOffset[0] = static_cast<hsize_t>(ulIpt.y);
               imageDataspace.selectHyperslab( H5S_SELECT_SET,
                                               &inputCount.front(),
                                               &inputOffset.front() );
               
               // Read data from file into the buffer.
               dataset.read( (void*)&values.front(), datatype, bufferDataSpace, imageDataspace );
               
               if ( endian )
               {
                  // If the endian pointer is initialized(not zero) swap the bytes.
                  endian->swap( scalar, (void*)&values.front(), WIDTH );
               }
               
               // Sample loop:
               ulIpt.x = rect.ul().x;
               ossim_int32 index = 0;
               while ( ulIpt.x <= rect.lr().x )
               {
                  if ( !ossim::almostEqual(values[index], NULL_VALUE2, TOLERANCE) &&
                       ( values[index] > NULL_VALUE ) )
                  {
                     found = true; // Found valid pixel.
                     break;
                  }
                  ++ulIpt.x;
                  ++index;
                     
               } // End: sample loop
                     
               if ( found )
               {
                  break;
               }

               ++ulIpt.y;
                     
            } // End line loop to find ul pixel:

            // Find the lower right pixel:
            ossimIpt lrIpt = rect.lr();
            found = false;
                  
            // Line loop to find last pixel:
            while ( lrIpt.y >= rect.ul().y )
            {
               inputOffset[0] = static_cast<hsize_t>(lrIpt.y);
               imageDataspace.selectHyperslab( H5S_SELECT_SET,
                                               &inputCount.front(),
                                               &inputOffset.front() );
               
               // Read data from file into the buffer.
               dataset.read( (void*)&values.front(), datatype, bufferDataSpace, imageDataspace );

               if ( endian )
               {
                  // If the endian pointer is initialized(not zero) swap the bytes.
                  endian->swap( scalar, (void*)&values.front(), WIDTH );
               }
            
               // Sample loop:
               lrIpt.x = rect.lr().x;
               ossim_int32 index = WIDTH-1;
               
               while ( lrIpt.x >= rect.ul().x )
               {
                  if ( !ossim::almostEqual(values[index], NULL_VALUE2, TOLERANCE) &&
                       ( values[index] > NULL_VALUE ) )
                  {
                     found = true; // Found valid pixel.
                     break;
                  }
                  --lrIpt.x;
                  --index;
                     
               } // End: sample loop
                     
               if ( found )
               {
                  break;
               }

               --lrIpt.y;
                     
            } // End line loop to find lower right pixel.

            rect = ossimIrect( ulIpt, lrIpt );

            // Cleanup:
            if ( endian )
            {
               delete endian;
               endian = 0;
            }

            result = true;
            
         } 
         else // Matches: if ( scalar == OSSIM_FLOAT32 ){...}
         {
            ossimNotify(ossimNotifyLevel_WARN)
               << "ossim_hdf5::getBoundingRect WARNING!"
               << "\nUnhandled scalar type: "
               << ossimScalarTypeLut::instance()->getEntryString( scalar )
               << std::endl;
         }
               
      } // Matches: if ( dimsOut...
            
   } // Matches: if ( IN_DIM_COUNT == 2 )
         
   imageDataspace.close();

   return result;
   
} // End: ossim_hdf5::getBoundingRect(...)
Ejemplo n.º 27
0
void Bundle2::loadGeometry_(H5::H5File& file) {
	H5::Group geometryGroup = file.openGroup("/Geometry");

	// Loading poses
	H5::DataSet posesDataSet = geometryGroup.openDataSet("Poses");
	double* posesData = (double*)malloc(frames_.size()*12*sizeof(double));
	posesDataSet.read((void*)posesData, H5::PredType::NATIVE_DOUBLE, H5::DataSpace::ALL, H5::DataSpace::ALL);
	posesDataSet.close();

	size_t i = 0;
	for(deque<Frame*>::iterator it = frames_.begin(); it != frames_.end(); ++it) {
		Pose* pose = new Pose;
		pose->sett(core::RealPoint3D<double>(posesData[i*12], posesData[i*12 + 1], posesData[i*12 + 2]));

		core::Matrix<double> R(3, 3);
		R[0][0] = posesData[i*12 + 3]; R[1][0] = posesData[i*12 + 4]; R[2][0] = posesData[i*12 + 5];
		R[0][1] = posesData[i*12 + 6]; R[1][1] = posesData[i*12 + 7]; R[2][1] = posesData[i*12 + 8];
		R[0][2] = posesData[i*12 + 9]; R[1][2] = posesData[i*12 + 10]; R[2][2] = posesData[i*12 + 11];

		pose->setR(R);
		pose->calcEulerAngles();
		pose->setorientationSynchronWithAngles(true);
		pose->setderivationsSynchronWithAngles(false);

		(*it)->setpose(pose);

		++i;
	}
	free((void*)posesData);

	// Loading points
	H5::DataSet pointsDataSet = geometryGroup.openDataSet("Points");
	double* pointsData = (double*)malloc(tracks_.size()*3*sizeof(double));
	pointsDataSet.read((void*)pointsData, H5::PredType::NATIVE_DOUBLE, H5::DataSpace::ALL, H5::DataSpace::ALL);
	pointsDataSet.close();

	i = 0;
	for(deque<Track*>::iterator it = tracks_.begin(); it != tracks_.end(); it++) {
		Point* point = new Point(core::RealPoint3D<double>(pointsData[i*3], pointsData[i*3 + 1], pointsData[i*3 + 2]));
		(*it)->setpoint(point);

		++i;
	}
	free((void*)pointsData);

	// Loading inlier information
	H5::DataSet inliersDataSet = geometryGroup.openDataSet("Inliers");
	hvl_t* inliersData = (hvl_t*)malloc(frames_.size()*sizeof(hvl_t));
	H5::VarLenType memType(&H5::PredType::NATIVE_UCHAR);
	inliersDataSet.read((void*)inliersData, memType, H5::DataSpace::ALL, H5::DataSpace::ALL);
	memType.close();
	inliersDataSet.close();

	i = 0;
	for(deque<Frame*>::iterator it = frames_.begin(); it != frames_.end(); it++) {
		unsigned char* inl = (unsigned char*)(inliersData[i].p);

        size_t k = 0;
		for(size_t j = 0; j < (*it)->size(); ++j) {
            View& v = (**it)[j];
            for(unsigned int cam = 0; cam < v.numCameras(); ++cam) {
                if(v.inCamera(cam)) {
                    Ray ray;
                    if(inl[k]) ray.setinlier(true);
                    else ray.setinlier(false);

                    v.addRay(cam, ray);
                    ++k;
                }
            }
		}

		++i;
	}

	for(size_t j = 0; j < frames_.size(); ++j) free(inliersData[j].p);
	free((void*)inliersData);

	// Loading curves if they exists
	bool curvesFound = false;
	const hsize_t maxObjs = geometryGroup.getNumObjs();
	for(hsize_t obj = 0; obj < maxObjs; ++obj) {
		string objName = geometryGroup.getObjnameByIdx(obj);
		if(objName == string("Curves")) curvesFound = true;
	}

	if(curvesFound) {
		H5::DataSet curvesDataSet = geometryGroup.openDataSet("Curves");

		hsize_t curvesDim[1];
		H5::DataSpace curvesDS = curvesDataSet.getSpace();
		curvesDS.getSimpleExtentDims(curvesDim);
		curvesDS.close();

		hvl_t* curvesData = (hvl_t*)malloc(curvesDim[0]*sizeof(hvl_t));
		H5::VarLenType memType(&H5::PredType::NATIVE_HSIZE);

		curvesDataSet.read((void*)curvesData, memType, H5::DataSpace::ALL, H5::DataSpace::ALL);

		memType.close();
		curvesDataSet.close();

		for(size_t c = 0; c < curvesDim[0]; ++c) {
			const size_t cur_c = addCurve();

			for(size_t p = 0; p < curvesData[c].len; ++p) {
				curves_[cur_c].addPoint(((size_t*)(curvesData[c].p))[p]);
			}
		}

		for(size_t i = 0; i < curvesDim[0]; ++i) free(curvesData[i].p);
		free((void*)curvesData);
	}

	geometryGroup.close();
}
Ejemplo n.º 28
0
//--------------------------------------------------------------------------------------------------
/// 
//--------------------------------------------------------------------------------------------------
TEST(DISABLED_HDFTests, BasicFileRead)
{
    std::string file_path = "D:/ResInsight/SourSim/PKMUNK_NOV_TEST_SS.sourpre.00001";

    try
    {
        H5::Exception::dontPrint();                                // Turn off auto-printing of failures to handle the errors appropriately

        H5::H5File file(file_path.c_str(), H5F_ACC_RDONLY);

        {
            H5::Group        timestep = file.openGroup("Timestep_00001");
            H5::Attribute    attr     = timestep.openAttribute("timestep");

            double timestep_value = 0.0;

            H5::DataType type = attr.getDataType();
            attr.read(type, &timestep_value);

            //std::cout << "Timestep value " << timestep_value << std::endl;
            EXPECT_NEAR(timestep_value, 1.0, 1e-1);
        }


        {
            // Group size is not an attribute!

            H5::Group GridFunctions = file.openGroup("Timestep_00001/GridParts/GridPart_00000/GridFunctions");

            hsize_t   group_size = GridFunctions.getNumObjs();

            //std::cout << "GridFunctions group_size " << group_size << std::endl;
            EXPECT_EQ(size_t(20), group_size);

/*            for (hsize_t i = 0; i < group_size; i++)
            {
                // H5std_string node_name = GridFunctions.getObjnameByIdx(i);     // crashes on VS2017 due to lib/heap/runtime differences to HDF5 VS2015 lib

                std::string node_name;
                node_name.resize(1024);

                ssize_t slen = GridFunctions.getObjnameByIdx(i, &node_name[0], 1023);

                node_name.resize(slen + 1);

                std::cout << "GridFunctions sub-node name " << node_name << std::endl;
            }
*/

            std::string first_subnode(1024, '\0');

            ssize_t slen = GridFunctions.getObjnameByIdx(0, &first_subnode[0], 1023);
            first_subnode.resize(slen + 1);

            EXPECT_TRUE(first_subnode.compare(0, slen, "GridFunction_00002") == 0);
        }


        {
            H5::Group        GridFunction_00002 = file.openGroup("Timestep_00001/GridParts/GridPart_00000/GridFunctions/GridFunction_00002");
            H5::Attribute    attr = GridFunction_00002.openAttribute("limits_max");

            double limits_max = 0.0;
        
            H5::DataType type = attr.getDataType();
            attr.read(type, &limits_max);

//            std::cout << "limits_max " << limits_max << std::endl;
            EXPECT_NEAR(limits_max, 0.3970204292629652, 1e-10);
        }


        {
            H5::Group        GridFunction_00002 = file.openGroup("Timestep_00001/GridParts/GridPart_00000/GridFunctions/GridFunction_00002");
            H5::DataSet        dataset = H5::DataSet(GridFunction_00002.openDataSet("values"));

            hsize_t dims[2];
            H5::DataSpace    dataspace = dataset.getSpace();
            dataspace.getSimpleExtentDims(dims, nullptr);

            std::vector<double> values;
            values.resize(dims[0]);
            dataset.read(values.data(), H5::PredType::NATIVE_DOUBLE);

/*            for (hsize_t i = 0; i < dims[0]; i++)
            {
                std::cout << "value " << i << " " << values[i] << std::endl;

            }
*/
            EXPECT_NEAR(values[0], 0.32356910366452146, 1e-10);
            EXPECT_NEAR(values[dims[0] - 1], 0.12200070891582514, 1e-10);
        }



    }  // end of try block
    
       
    catch (H5::FileIException error)        // catch failure caused by the H5File operations
    {
        std::cout << error.getCDetailMsg();
    }
    
    catch (H5::DataSetIException error)        // catch failure caused by the DataSet operations
    {
        std::cout << error.getCDetailMsg();
    }
    
    catch (H5::DataSpaceIException error)    // catch failure caused by the DataSpace operations
    {
        std::cout << error.getCDetailMsg(); 
    }
    
    catch (H5::DataTypeIException error)        // catch failure caused by the DataSpace operations
    {
        std::cout << error.getCDetailMsg();
    }

}