Beispiel #1
0
void addrow( H5::DataSet& ds, const std::vector<double>& rowtowrite )
{
  //Get the space (since it may have grown in length since last time of course )
  H5::DataSpace origspace = ds.getSpace();

  //get the rank, even though I know it is 2
  int rank = origspace.getSimpleExtentNdims();

  //Get the actual dimensions of the ranks.
  hsize_t dims[rank];
  int ndims = origspace.getSimpleExtentDims( dims, NULL);

  //Want to ADD a row, so need to offset at row = nrows, and col = 0;
  hsize_t offset[rank] = { dims[0], 0 }; 
  hsize_t dims_toadd[rank] = { 1, rowtowrite.size() }; //will write 1 row, ncols columns.

  //Compute "new" size (extended by 1 row).
  hsize_t size[rank] = { dims[0]+dims_toadd[0], rowtowrite.size() };

  //Do the extension.
  ds.extend( size );

  //Get the new (extended) space, and select the hyperslab to write the row to.
  origspace = ds.getSpace();
  origspace.selectHyperslab( H5S_SELECT_SET, dims_toadd, offset );

  //Make the "memory" data space?
  H5::DataSpace toaddspace(rank, dims_toadd);

  ds.write(  rowtowrite.data(), H5::PredType::NATIVE_DOUBLE, toaddspace, origspace );

  //Can close toaddspace/origspace with no effect.
  //Can also close/open data set at the beginning of each time with no effect.
}
Beispiel #2
0
void compare_datasets(H5::DataSet const& ds1, H5::DataSet const& ds2)
{
    std::vector<hsize_t> dims(ds1.getSpace().getSimpleExtentNdims());
    ds1.getSpace().getSimpleExtentDims(&*dims.begin());

    // compare 2 or 3-dimensional list of vectors, e.g., box edges
    BOOST_REQUIRE( dims.size() == 2 );
    BOOST_REQUIRE( dims[1] == 2 || dims[1] == 3 );
    if (dims[1] == 3) {
        std::vector<halmd::fixed_vector<double, 3> > array1, array2;
        h5xx::read_dataset(ds1, array1);
        h5xx::read_dataset(ds2, array2);
        BOOST_CHECK_EQUAL_COLLECTIONS(
            array1.begin(), array1.end()
          , array2.begin(), array2.end()
        );
    }
    else if (dims[1] == 2) {
        std::vector<halmd::fixed_vector<double, 2> > array1, array2;
        h5xx::read_dataset(ds1, array1);
        h5xx::read_dataset(ds2, array2);
        BOOST_CHECK_EQUAL_COLLECTIONS(
            array1.begin(), array1.end()
          , array2.begin(), array2.end()
        );
    }
}
void FeaturePointsRANSAC::loadModel(std::string modelPath)
{
	H5::H5File h5Model;

	try {
		h5Model = H5::H5File(modelPath, H5F_ACC_RDONLY);
	}
	catch (H5::Exception& e) {
		std::string msg( std::string( "Could not open HDF5 file \n" ) + e.getCDetailMsg() );
		throw msg;
	}

	// Load the Shape
	H5::Group modelReconstructive = h5Model.openGroup("/shape/ReconstructiveModel/model");
	H5::DataSet dsMean = modelReconstructive.openDataSet("./mean");
	hsize_t dims[1];
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);	// dsMean.getSpace() leaks memory... maybe a hdf5 bug, maybe vlenReclaim(...) could be a fix. No idea.
	//H5::DataSpace dsp = dsMean.getSpace();
	//dsp.close();

	std::cout << "Dims: " << dims[0] << std::endl;		// TODO: I guess this whole part could be done A LOT better!
	float* testData = new float[dims[0]];
	dsMean.read(testData, H5::PredType::NATIVE_FLOAT);
	this->modelMeanShp.reserve(dims[0]);

	for (unsigned int i=0; i < dims[0]; ++i)	{
		modelMeanShp.push_back(testData[i]);
	}
	delete[] testData;
	testData = NULL;
	dsMean.close();

	// // Load the Texture
	H5::Group modelReconstructiveTex = h5Model.openGroup("/color/ReconstructiveModel/model");
	H5::DataSet dsMeanTex = modelReconstructiveTex.openDataSet("./mean");
	hsize_t dimsTex[1];
	dsMeanTex.getSpace().getSimpleExtentDims(dimsTex, NULL);
	std::cout << "Dims: " << dimsTex[0] << std::endl;		// TODO: I guess this whole part could be done A LOT better!
	float* testDataTex = new float[dimsTex[0]];
	dsMeanTex.read(testDataTex, H5::PredType::NATIVE_FLOAT);
	this->modelMeanTex.reserve(dimsTex[0]);

	for (unsigned int i=0; i < dimsTex[0]; ++i)	{
		modelMeanTex.push_back(testDataTex[i]);
	}
	delete[] testDataTex;
	testDataTex = NULL;
	dsMeanTex.close();

	h5Model.close();
}
H5::DataSet CompartmentReportHDF5::_openDataset( const H5::H5File& file,
                                                 const uint32_t cellID )
{
    std::stringstream cellName;
    cellName << "a" << cellID;
    const std::string datasetName = "/" + cellName.str() + "/" + _reportName +
                                    "/" + dataDatasetName;
    H5::DataSet dataset;
    H5E_BEGIN_TRY
        dataset = file.openDataSet( datasetName );
    H5E_END_TRY
    if( !dataset.getId() )
    {
        LBTHROW(
            std::runtime_error( "ReportReaderHDF5: "
                              "Dataset " + datasetName + " not found "
                              "in file: " + file.getFileName( )));
    }

    if( dataset.getSpace().getSimpleExtentNdims() != 2 )
    {
        LBTHROW(
            std::runtime_error("Compartment_Report_HDF5_File_Reader: "
                             "Error, not 2 dimensional array on " +
                             datasetName));
    }

    return dataset;
}
Beispiel #5
0
arma::Mat<uint16_t> readLUT(const std::string& path)
{
    H5::H5File file (path.c_str(), H5F_ACC_RDONLY);
    H5::DataSet ds = file.openDataSet("LUT");

    H5::DataSpace filespace = ds.getSpace();
    int ndims = filespace.getSimpleExtentNdims();
    assert(ndims == 2);
    hsize_t dims[2] = {1, 1};
    filespace.getSimpleExtentDims(dims);

    H5::DataSpace memspace (ndims, dims);

    arma::Mat<uint16_t> res (dims[0], dims[1]);

    ds.read(res.memptr(), H5::PredType::NATIVE_UINT16, memspace, filespace);

    filespace.close();
    memspace.close();
    ds.close();
    file.close();

    // NOTE: Armadillo stores data in column-major order, while HDF5 uses
    // row-major ordering. Above, we read the data directly from HDF5 into
    // the arma matrix, so it was implicitly transposed. The next function
    // fixes this problem.
    arma::inplace_trans(res);
    return res;
}
	/**
	 * @param variable
	 * @return
	 */
	long HDF5FileReader::getNumberOfRecords(const std::string& variable)
	{
		//std::cout << "reading " << variable << std::endl;
		//get variable number
		H5::DataSet dataset = this->variableGroup->openDataSet(variable);

		H5::DataSpace dataspace = dataset.getSpace();
		hsize_t count[1];
//		int ndims = dataspace.getSimpleExtentDims(count, NULL);
		return (long)count[0];
	}
	/**
	 * @brief Returns a pointer to a std::vector<float> containing the values of the selected variable
	 *
	 * This allocates a new std::vector<float> pointer.  Make sure you
	 * delete the contents when you done using it, or you will have a memory leak.
	 *
	 * @param variable
	 * @return std::vector<float> containing the values of the selected variable.
	 */
	std::vector<float>* HDF5FileReader::getVariable(const std::string& variable)
	{
		std::vector<float>* variableData = new std::vector<float>();

		if (this->doesVariableExist(variable))
		{
			//std::cout << "reading " << variable << std::endl;
			//get variable number
//			long variableNum = this->getVariableID(variable);

			//std::cout << "variableNum for " << variable << ": " << variableNum << std::endl;
			//get dim sizes

			H5::Group group = this->current_file->openGroup("Variables");
			//cout << "variable: " << variable << ": " << counts[0] << endl;
			H5::DataSet * dataset = new H5::DataSet(group.openDataSet(variable));
			H5::DataSpace dataspace = dataset->getSpace();
			int rank = dataspace.getSimpleExtentNdims(); //should be 1
			hsize_t count[1];
			hsize_t offset[1] = {0};
//			int ndims = dataspace.getSimpleExtentDims(count, NULL);

			//std::cout << "count[0]: " << count[0] << std::endl;
			float * buffer = new float[count[0]];



			dataspace.selectHyperslab(H5S_SELECT_SET, count, offset);

			H5::DataSpace memspace( rank, count);
			memspace.selectHyperslab(H5S_SELECT_SET, count, offset);

			dataset->read(buffer, H5::PredType::NATIVE_FLOAT, memspace, dataspace);
			//std::cout << "after read" << std::endl;

			//add data to vector type, and delete original array
			variableData->reserve(count[0]);
			for (int i = 0; i < count[0]; i++)
			{
				variableData->push_back(buffer[i]);
			}
			//std::cout << "after adding to variableData vector" << std::endl;

			delete[] buffer;
			delete dataset;
			//std::cout << "finished reading " << variable << std::endl;
			//std::cout << "size of variable: " << variableData.size() << std::endl;
			//std::cout << "dimSizes[0]: " << dimSizes[0] << std::endl;

		}

		return variableData;
	}
Beispiel #8
0
void compare_datasets(H5::DataSet const& ds1, H5::DataSet const& ds2, hssize_t index1, hssize_t index2)
{
    std::vector<hsize_t> dims(ds1.getSpace().getSimpleExtentNdims());
    ds1.getSpace().getSimpleExtentDims(&*dims.begin());

    BOOST_REQUIRE( dims.size() == 2 || dims.size() == 3 );
    if (dims.size() == 2) {
        // compare scalar mass, species, …
        // assumes lossless conversion from integer to floating-point
        std::vector<double> array1, array2;
        h5xx::read_chunked_dataset(ds1, array1, index1);
        h5xx::read_chunked_dataset(ds2, array2, index2);
        BOOST_CHECK_EQUAL_COLLECTIONS(
            array1.begin(), array1.end()
          , array2.begin(), array2.end()
        );
    }
    else if (dims.size() == 3) {
        // compare 2 or 3-dimensional positions, velocities, …
        BOOST_REQUIRE( dims[2] == 2 || dims[2] == 3 );
        if (dims[2] == 3) {
            std::vector<halmd::fixed_vector<double, 3> > array1, array2;
            h5xx::read_chunked_dataset(ds1, array1, index1);
            h5xx::read_chunked_dataset(ds2, array2, index2);
            BOOST_CHECK_EQUAL_COLLECTIONS(
                array1.begin(), array1.end()
              , array2.begin(), array2.end()
            );
        }
        else if (dims[2] == 2) {
            std::vector<halmd::fixed_vector<double, 2> > array1, array2;
            h5xx::read_chunked_dataset(ds1, array1, index1);
            h5xx::read_chunked_dataset(ds2, array2, index2);
            BOOST_CHECK_EQUAL_COLLECTIONS(
                array1.begin(), array1.end()
              , array2.begin(), array2.end()
            );
        }
    }
}
Beispiel #9
0
   const std::vector<hsize_t> TableDims(){

      if(flags_ != hdf5::READ) 
         throw std::runtime_error("TableDims() is only valid in READ mode");

      H5::DataSet dSet = file_->openDataSet("T00000000");
      H5::DataSpace dSpace = dSet.getSpace();

      std::vector<hsize_t> dims(dSpace.getSimpleExtentNdims());
      dSpace.getSimpleExtentDims(&dims[0]);

      return dims;
   }
NDArray<T, Nd> NDArray<T,Nd>::ReadFromH5(const H5::DataSet& h5Dset) {
  H5::DataSpace dspace = h5Dset.getSpace(); 
  int ndim = dspace.getSimpleExtentNdims(); 
  if (ndim>Nd) 
      throw std::range_error("Too many dimensions in H5 dataset for NDArray");
  hsize_t dimSize[ndim];
  dspace.getSimpleExtentDims(dimSize); 
  std::array<std::size_t, Nd> dimSizeArr;
  for (int i=0; i<Nd; ++i) dimSizeArr[i] = dimSize[i];
  NDArray<T, Nd> arr(dimSizeArr);
  // Read in data here
  H5::DataType h5DType = GetH5DataType<T>();
  h5Dset.read(arr.mData, h5DType);
  return arr;
}
void HDF5IO::loadStdVector(const std::string& GroupName, const std::string& Name,
    std::vector<RealType>& V)
{
  try{
    H5::Group FG = getGroup( GroupName );
    H5::DataSet DataSet = FG.openDataSet(Name.c_str());
    H5::DataSpace DataSpace = DataSet.getSpace();
    if(DataSpace.getSimpleExtentNdims() != 1)
      throw(H5::DataSpaceIException("HDF5IO::loadRealVector()","Unexpected multidimentional dataspace."));
    V.resize(DataSpace.getSimpleExtentNpoints());
    DataSet.read(V.data(),H5::PredType::NATIVE_DOUBLE);
    FG.close();
  } catch( const H5::Exception err ){
    RUNTIME_ERROR("HDF5IO::loadRealStdVector");
  }
}
void HDF5IO::loadStdVector(const std::string& GroupName, const std::string& Name,
    std::vector<ComplexType>& V)
{
  try{
    H5::CompType ComplexDataType = this->openCompType("complex");
    H5::Group FG = getGroup( GroupName );
    H5::DataSet DataSet = FG.openDataSet(Name.c_str());
    H5::DataSpace DataSpace = DataSet.getSpace();
    if(DataSpace.getSimpleExtentNdims() != 1)
      throw(H5::DataSpaceIException("HDF5IO::loadComplexVector()","Unexpected multidimentional dataspace."));
    V.resize(DataSpace.getSimpleExtentNpoints());
    DataSet.read(V.data(),ComplexDataType);
    FG.close();
  } catch( const H5::Exception err ){
    RUNTIME_ERROR("HDF5IO::loadComplexStdVector");
  }
}
void HDF5IO::loadVector(const std::string& GroupName, const std::string& Name,
  RealVectorType& V)
{
  H5::Group FG = getGroup( GroupName );
  H5::DataSet DataSet = FG.openDataSet(Name.c_str());
  H5::DataSpace DataSpace = DataSet.getSpace();
  if(DataSpace.getSimpleExtentNdims() != 1)
	throw(H5::DataSpaceIException("HDF5IO::loadRealVector()",
    "Unexpected multidimentional dataspace."));
  V.resize(DataSpace.getSimpleExtentNpoints());
  try{
    DataSet.read(V.data(),H5::PredType::NATIVE_DOUBLE);
  }catch( H5::GroupIException not_found_error ){
    RUNTIME_ERROR("No dataset found in loadRealVector. ");
  }
  FG.close();
}
	/**
	 * @brief Returns a value in the flat array of the variable and index requested.
	 *
	 * Use this method on variables that have a type of int
	 *
	 * @param variable The variable in the file
	 * @param index The index in the variable's array in the file
	 *
	 * @return int of the value in the array.
	 */
	int HDF5FileReader::getVariableIntAtIndex(const std::string& variable, long index)
	{
//		long counts[1];
		int value = std::numeric_limits<int>::min();
		if (this->doesVariableExist(variable))
		{
			//std::cout << "reading " << variable << std::endl;
			//get variable number
//			long variableNum = this->getVariableID(variable);

			//get dim sizes

			H5::Group group = this->current_file->openGroup("Variables");
			//cout << "variable: " << variable << ": " << counts[0] << endl;
			H5::DataSet * dataset = new H5::DataSet(group.openDataSet(variable));
			H5::DataSpace dataspace = dataset->getSpace();
			int rank = dataspace.getSimpleExtentNdims(); //should be 1
			hsize_t count[1] = {1};
			hsize_t offset[1] = {0};
			//int ndims = dataspace.getSimpleExtentDims(count, NULL);
			float * buffer = new float[count[0]];



			dataspace.selectHyperslab(H5S_SELECT_SET, count, offset);

			hsize_t dim[] = {count[0]};
			H5::DataSpace memspace( rank, dim);
			memspace.selectHyperslab(H5S_SELECT_SET, dim, offset);

			dataset->read(buffer, H5::PredType::NATIVE_INT, memspace, dataspace);


			//add data to vector type, and delete original array
			value = buffer[0];
			delete[] buffer;
			delete dataset;
		}
		return value;
			//std::cout << "finished reading " << variable << std::endl;
			//std::cout << "size of variable: " << variableData.size() << std::endl;
			//std::cout << "dimSizes[0]: " << dimSizes[0] << std::endl;


	}
Beispiel #15
0
/** Retrieves the dimensions of the dataset */
int hdfutil::GetDsDims (const H5::DataSet & dataset, int dims[3]) {
	H5::DataSpace dataspace = dataset.getSpace();
    bool simple = dataspace.isSimple();
    if (!simple)
        throw std::runtime_error("complex HDF5 dataspace");
	int rank = (int)dataspace.getSimpleExtentNdims();
	if (rank > 3)
        throw std::runtime_error("unsupported dimensionality");

    hsize_t h5_dims[3];
	dataspace.getSimpleExtentDims(h5_dims, NULL);
    for (int i = 0; i < rank; i++)
        dims[i] = (int)h5_dims[i];
    for (int i = rank; i < 3; i++)
        dims[i] =      -1;

    return rank;
}
Beispiel #16
0
    bool readDataset1D(const H5::H5File &file, const std::string &name, std::vector<_Tp> &data)
    {
        H5::DataSet dataset = file.openDataSet(name);
        H5::DataSpace dataspace = dataset.getSpace();
        hsize_t dims_out[1];
        int rank = dataspace.getSimpleExtentDims( dims_out, NULL);

        int _type;
        bool read = getNodeType(dataset,  _type);
        read &= (_type == StorageNode::SEQ);
        read &= (rank  == 1);

        if (!read)
            return read;
        data.resize(dims_out[0]);
        dataset.read(data.data(), dataset.getDataType());
        return true;
    }
void HDF5IO::loadMatrix(const std::string& GroupName, const std::string& Name,
    ComplexMatrixType& M)
{
  try{
    H5::CompType ComplexDataType = this->openCompType("complex");
    H5::Group FG = getGroup( GroupName );
    H5::DataSet DataSet = FG.openDataSet(Name.c_str());
    H5::DataSpace DataSpace = DataSet.getSpace();
    if(DataSpace.getSimpleExtentNdims() != 2)
	throw(H5::DataSpaceIException("HDF5IO::loadMatrix()","A dataspace must be precisely two-dimensional."));
    hsize_t Dims[2];
    DataSpace.getSimpleExtentDims(Dims);
    M.resize(Dims[0],Dims[1]);
    DataSet.read(M.data(), ComplexDataType);
    FG.close();
  } catch( const H5::Exception err ){
    RUNTIME_ERROR("HDF5IO::loadComplexMatrix at ");
  }
}
	/**
	 * Returns a pointer to a std::vector<float> containing the values of the selected variable
	 * in the range specified by the startIndex and count (the number of records to read) stored
	 * in the selected file.  This allocates a new std::vector<float> pointer.  Make sure you
	 * delete the contents when you done using it, or you will have a memory leak.
	 * @param variableID
	 * @param startIndex
	 * @param count
	 * @return std::vector<float> containing the values of the selected variable.
	 */
	std::vector<float>* HDF5FileReader::getVariable(long variable, long indexOffset, long count)
	{

		std::vector<float>* variableData = new std::vector<float>();



		//get dim sizes
		H5::Group group = this->current_file->openGroup("Variables");
		//cout << "variable: " << variable << ": " << counts[0] << endl;
		H5::DataSet * dataset = new H5::DataSet(group.openDataSet(group.getObjnameByIdx(variable)));
		H5::DataSpace dataspace = dataset->getSpace();
		int rank = dataspace.getSimpleExtentNdims(); //should be 1
		hsize_t length[1] = {count};
		hsize_t offset[1] = {indexOffset};
		//int ndims = dataspace.getSimpleExtentDims(count, NULL);
		float * buffer = new float[length[0]];



		dataspace.selectHyperslab(H5S_SELECT_SET, length, offset);

		hsize_t dim[] = {length[0]};
		H5::DataSpace memspace( rank, dim);
		memspace.selectHyperslab(H5S_SELECT_SET, dim, offset);

		dataset->read(buffer, H5::PredType::NATIVE_FLOAT, memspace, dataspace);


		//add data to vector type, and delete original array
		variableData->reserve(length[0]);
		for (int i = 0; i < length[0]; i++)
		{
			variableData->push_back(buffer[i]);
		}

		delete[] buffer;
		delete dataset;
		//std::cout << "finished reading " << variable << std::endl;
		//std::cout << "size of variable: " << variableData.size() << std::endl;
		//std::cout << "dimSizes[0]: " << dimSizes[0] << std::endl;
		return variableData;
	}
Beispiel #19
0
OXSXDataSet
DataSetIO::LoadDataSet(const std::string& filename_){
    // Get Data Set
    H5::H5File  file(filename_, H5F_ACC_RDONLY);
    H5::DataSet dataSet = file.openDataSet("observations");
 
    // read meta information
    unsigned nObs = 0;
    H5::Attribute nameAtt  = dataSet.openAttribute("observed_quantities");
    H5::Attribute countAtt  = dataSet.openAttribute("n_observables");
    H5std_string strreadbuf("");
    nameAtt.read(nameAtt.getDataType(), strreadbuf);
    countAtt.read(countAtt.getDataType(), &nObs);

    // Read data out as 1D array
    hsize_t nData = 0;
    dataSet.getSpace().getSimpleExtentDims(&nData, NULL);
    size_t nEntries = nData/nObs;

    std::vector<double> flatData(nData, 0);
    dataSet.read(&flatData.at(0), H5::PredType::NATIVE_DOUBLE);

    assert(nData%nObs == 0); // logic error in writing file (this class!) if assert fails.

    // Assemble into an OXSX data set
    OXSXDataSet oxsxDataSet;

    // Set the variable names
    oxsxDataSet.SetObservableNames(UnpackString(strreadbuf, fDelimiter));

    // then the data
    std::vector<double> oneEventObs(nObs, 0);
    for(size_t i = 0; i < nEntries; i++){
        for(size_t j = 0; j < nObs; j++)
            oneEventObs[j] = flatData.at(i * nObs + j);
        
        oxsxDataSet.AddEntry(EventData(oneEventObs));
    }
      
    return oxsxDataSet;
}
Beispiel #20
0
std::vector<double> readEloss(const std::string& path)
{
    H5::H5File file (path.c_str(), H5F_ACC_RDONLY);
    H5::DataSet ds = file.openDataSet("eloss");

    H5::DataSpace filespace = ds.getSpace();
    int ndims = filespace.getSimpleExtentNdims();
    assert(ndims == 1);
    hsize_t dim;
    filespace.getSimpleExtentDims(&dim);

    H5::DataSpace memspace (ndims, &dim);

    std::vector<double> res (dim);

    ds.read(res.data(), H5::PredType::NATIVE_DOUBLE, memspace, filespace);

    filespace.close();
    memspace.close();
    ds.close();
    file.close();
    return res;
}
Beispiel #21
0
std::vector<double> readlastrow( H5::DataSet& ds )
{
  H5::DataSpace origspace = ds.getSpace();
  int rank = origspace.getSimpleExtentNdims();
  hsize_t dims[rank];
  int ndims = origspace.getSimpleExtentDims( dims, NULL);
  hsize_t nrows=dims[0];
  hsize_t ncols=dims[1];
  std::vector<double> returnvect( ncols );

  
  
  hsize_t targrowoffset = nrows-1;
  hsize_t targcoloffset = 0;
  hsize_t dimsmem[rank] = {1,  ncols};
  H5::DataSpace memspace(rank, dimsmem);

  hsize_t offset[rank] = { targrowoffset, targcoloffset };
  origspace.selectHyperslab( H5S_SELECT_SET, dimsmem, offset );
  ds.read( returnvect.data(), H5::PredType::NATIVE_DOUBLE, memspace, origspace );

  return returnvect;
}
Beispiel #22
0
void Hdf5Handler::initialize(
        const std::string& filename,
        const std::vector<Hdf5ColumnData>& columns)
{
    try
    {
        m_h5File.reset(new H5::H5File(filename, H5F_ACC_RDONLY));
    }
    catch (const H5::FileIException&)
    {
        throw hdf5_error("Could not open HDF5 file");
    }

    try
    {
        // Open each HDF5 DataSet and its corresponding DataSpace.
        for (const auto& col : columns)
        {
            const std::string dataSetName = col.name;
            const H5::PredType predType = col.predType;
            const H5::DataSet dataSet = m_h5File->openDataSet(dataSetName);
            const H5::DataSpace dataSpace = dataSet.getSpace();

            m_columnDataMap.insert(std::make_pair(
                        dataSetName,
                        ColumnData(predType, dataSet, dataSpace)));

            // Does not check whether all the columns are the same length.
            m_numPoints =
                std::max((uint64_t)getColumnNumEntries(dataSetName), m_numPoints);
        }
    }
    catch (const H5::Exception&)
    {
        throw hdf5_error("Could not initialize data set information");
    }
}
Beispiel #23
0
ossimRefPtr<ossimProjection> ossim_hdf5::getBilinearProjection(
   H5::DataSet& latDataSet, H5::DataSet& lonDataSet, const ossimIrect& validRect )
{
   ossimRefPtr<ossimProjection> proj = 0;

   // Get dataspace of the dataset.
   H5::DataSpace latDataSpace = latDataSet.getSpace();
   H5::DataSpace lonDataSpace = lonDataSet.getSpace();
         
   // Number of dimensions of the input dataspace:
   const ossim_int32 DIM_COUNT = latDataSpace.getSimpleExtentNdims();
         
   if ( DIM_COUNT == 2 )
   {
      // Get the extents. Assuming dimensions are same for lat lon dataset. 
      std::vector<hsize_t> dimsOut(DIM_COUNT);
      latDataSpace.getSimpleExtentDims( &dimsOut.front(), 0 );

      if ( dimsOut[0] && dimsOut[1] )
      {
         std::vector<hsize_t> inputCount(DIM_COUNT);
         std::vector<hsize_t> inputOffset(DIM_COUNT);
               
         inputOffset[0] = 0;
         inputOffset[1] = 0;
               
         inputCount[0] = 1;
         inputCount[1] = 1;
               
         // Output dataspace dimensions.
         const ossim_int32 OUT_DIM_COUNT = 3;
         std::vector<hsize_t> outputCount(OUT_DIM_COUNT);
         outputCount[0] = 1; // single band
         outputCount[1] = 1; // single line
         outputCount[2] = 1; // single sample
               
         // Output dataspace offset.
         std::vector<hsize_t> outputOffset(OUT_DIM_COUNT);
         outputOffset[0] = 0;
         outputOffset[1] = 0;
         outputOffset[2] = 0;
               
         ossimScalarType scalar = ossim_hdf5::getScalarType( &latDataSet );
         if ( scalar == OSSIM_FLOAT32 )
         {
            // See if we need to swap bytes:
            ossimEndian* endian = 0;
            if ( ( ossim::byteOrder() != ossim_hdf5::getByteOrder( &latDataSet ) ) )
            {
               endian = new ossimEndian();
            }

            // Native type:
            H5::DataType latDataType = latDataSet.getDataType();
            H5::DataType lonDataType = lonDataSet.getDataType();
                  
            std::vector<ossimDpt> ipts;
            std::vector<ossimGpt> gpts;
            ossimGpt gpt(0.0, 0.0, 0.0); // Assuming WGS84...
            ossim_float32 latValue = 0.0;
            ossim_float32 lonValue = 0.0;
                  
            // Only grab every 256th value.:
            const ossim_int32 GRID_SIZE = 256;
                  
            // Output dataspace always the same one pixel.
            H5::DataSpace bufferDataSpace( OUT_DIM_COUNT, &outputCount.front());
            bufferDataSpace.selectHyperslab( H5S_SELECT_SET,
                                             &outputCount.front(),
                                             &outputOffset.front() );

            //---
            // Dataset sample has NULL lines at the end so scan for valid rect.
            // Use "<= -999" for test as per NOAA as it seems the NULL value is
            // fuzzy.  e.g. -999.3.
            //---
            const ossim_float32 NULL_VALUE = -999.0;

            //---
            // Get the tie points within the valid rect:
            //---
            ossimDpt ipt = validRect.ul();
            while ( ipt.y <= validRect.lr().y )
            {
               inputOffset[0] = static_cast<hsize_t>(ipt.y);
               
               // Sample loop:
               ipt.x = validRect.ul().x;
               while ( ipt.x <= validRect.lr().x )
               {
                  inputOffset[1] = static_cast<hsize_t>(ipt.x);
                  
                  latDataSpace.selectHyperslab( H5S_SELECT_SET,
                                                &inputCount.front(),
                                                &inputOffset.front() );
                  lonDataSpace.selectHyperslab( H5S_SELECT_SET,
                                                &inputCount.front(),
                                                &inputOffset.front() );
                  
                  // Read data from file into the buffer.
                  latDataSet.read( &latValue, latDataType, bufferDataSpace, latDataSpace );
                  lonDataSet.read( &lonValue, lonDataType, bufferDataSpace, lonDataSpace );
                  
                  if ( endian )
                  {
                     // If the endian pointer is initialized(not zero) swap the bytes.
                     endian->swap( latValue );
                     endian->swap( lonValue );  
                  }
                  
                  if ( ( latValue > NULL_VALUE ) && ( lonValue > NULL_VALUE ) )
                  {
                     gpt.lat = latValue;
                     gpt.lon = lonValue;
                     gpts.push_back( gpt );
                     
                     // Add the image point subtracting the image offset.
                     ossimIpt shiftedIpt = ipt - validRect.ul();
                     ipts.push_back( shiftedIpt );
                  }

                  // Go to next point:
                  if ( ipt.x < validRect.lr().x )
                  {
                     ipt.x += GRID_SIZE;
                     if ( ipt.x > validRect.lr().x )
                     {
                        ipt.x = validRect.lr().x; // Clamp to last sample.
                     }
                  }
                  else
                  {  
                     break; // At the end:
                  }
                  
               } // End sample loop.

               if ( ipt.y < validRect.lr().y )
               {
                  ipt.y += GRID_SIZE;
                  if ( ipt.y > validRect.lr().y )
                  {
                     ipt.y = validRect.lr().y; // Clamp to last line.
                  }
               }
               else
               {  
                  break; // At the end:
               }
               
            } // End line loop.
                  
            if ( ipts.size() )
            {
               // Create the projection:
               ossimRefPtr<ossimBilinearProjection> bp = new ossimBilinearProjection();
                     
               // Add the tie points:
               bp->setTiePoints( ipts, gpts );
                     
               // Assign to output projection:
               proj = bp.get();
            }

            // Cleanup:
            if ( endian )
            {
               delete endian;
               endian = 0;
            }
         }
         else // Matches: if ( scalar == OSSIM_FLOAT32 ){...}
         {
            ossimNotify(ossimNotifyLevel_WARN)
               << "ossim_hdf5::getBilinearProjection WARNING!"
               << "\nUnhandled scalar type: "
               << ossimScalarTypeLut::instance()->getEntryString( scalar )
               << std::endl;
         }
               
      } // Matches: if ( dimsOut...
            
   } // Matches: if ( IN_DIM_COUNT == 2 )
         
   latDataSpace.close();
   lonDataSpace.close();
   
   return proj;
   
} // End: ossim_hdf5::getBilinearProjection()
Beispiel #24
0
void Bundle2::loadGeometry_(H5::H5File& file) {
	H5::Group geometryGroup = file.openGroup("/Geometry");

	// Loading poses
	H5::DataSet posesDataSet = geometryGroup.openDataSet("Poses");
	double* posesData = (double*)malloc(frames_.size()*12*sizeof(double));
	posesDataSet.read((void*)posesData, H5::PredType::NATIVE_DOUBLE, H5::DataSpace::ALL, H5::DataSpace::ALL);
	posesDataSet.close();

	size_t i = 0;
	for(deque<Frame*>::iterator it = frames_.begin(); it != frames_.end(); ++it) {
		Pose* pose = new Pose;
		pose->sett(core::RealPoint3D<double>(posesData[i*12], posesData[i*12 + 1], posesData[i*12 + 2]));

		core::Matrix<double> R(3, 3);
		R[0][0] = posesData[i*12 + 3]; R[1][0] = posesData[i*12 + 4]; R[2][0] = posesData[i*12 + 5];
		R[0][1] = posesData[i*12 + 6]; R[1][1] = posesData[i*12 + 7]; R[2][1] = posesData[i*12 + 8];
		R[0][2] = posesData[i*12 + 9]; R[1][2] = posesData[i*12 + 10]; R[2][2] = posesData[i*12 + 11];

		pose->setR(R);
		pose->calcEulerAngles();
		pose->setorientationSynchronWithAngles(true);
		pose->setderivationsSynchronWithAngles(false);

		(*it)->setpose(pose);

		++i;
	}
	free((void*)posesData);

	// Loading points
	H5::DataSet pointsDataSet = geometryGroup.openDataSet("Points");
	double* pointsData = (double*)malloc(tracks_.size()*3*sizeof(double));
	pointsDataSet.read((void*)pointsData, H5::PredType::NATIVE_DOUBLE, H5::DataSpace::ALL, H5::DataSpace::ALL);
	pointsDataSet.close();

	i = 0;
	for(deque<Track*>::iterator it = tracks_.begin(); it != tracks_.end(); it++) {
		Point* point = new Point(core::RealPoint3D<double>(pointsData[i*3], pointsData[i*3 + 1], pointsData[i*3 + 2]));
		(*it)->setpoint(point);

		++i;
	}
	free((void*)pointsData);

	// Loading inlier information
	H5::DataSet inliersDataSet = geometryGroup.openDataSet("Inliers");
	hvl_t* inliersData = (hvl_t*)malloc(frames_.size()*sizeof(hvl_t));
	H5::VarLenType memType(&H5::PredType::NATIVE_UCHAR);
	inliersDataSet.read((void*)inliersData, memType, H5::DataSpace::ALL, H5::DataSpace::ALL);
	memType.close();
	inliersDataSet.close();

	i = 0;
	for(deque<Frame*>::iterator it = frames_.begin(); it != frames_.end(); it++) {
		unsigned char* inl = (unsigned char*)(inliersData[i].p);

        size_t k = 0;
		for(size_t j = 0; j < (*it)->size(); ++j) {
            View& v = (**it)[j];
            for(unsigned int cam = 0; cam < v.numCameras(); ++cam) {
                if(v.inCamera(cam)) {
                    Ray ray;
                    if(inl[k]) ray.setinlier(true);
                    else ray.setinlier(false);

                    v.addRay(cam, ray);
                    ++k;
                }
            }
		}

		++i;
	}

	for(size_t j = 0; j < frames_.size(); ++j) free(inliersData[j].p);
	free((void*)inliersData);

	// Loading curves if they exists
	bool curvesFound = false;
	const hsize_t maxObjs = geometryGroup.getNumObjs();
	for(hsize_t obj = 0; obj < maxObjs; ++obj) {
		string objName = geometryGroup.getObjnameByIdx(obj);
		if(objName == string("Curves")) curvesFound = true;
	}

	if(curvesFound) {
		H5::DataSet curvesDataSet = geometryGroup.openDataSet("Curves");

		hsize_t curvesDim[1];
		H5::DataSpace curvesDS = curvesDataSet.getSpace();
		curvesDS.getSimpleExtentDims(curvesDim);
		curvesDS.close();

		hvl_t* curvesData = (hvl_t*)malloc(curvesDim[0]*sizeof(hvl_t));
		H5::VarLenType memType(&H5::PredType::NATIVE_HSIZE);

		curvesDataSet.read((void*)curvesData, memType, H5::DataSpace::ALL, H5::DataSpace::ALL);

		memType.close();
		curvesDataSet.close();

		for(size_t c = 0; c < curvesDim[0]; ++c) {
			const size_t cur_c = addCurve();

			for(size_t p = 0; p < curvesData[c].len; ++p) {
				curves_[cur_c].addPoint(((size_t*)(curvesData[c].p))[p]);
			}
		}

		for(size_t i = 0; i < curvesDim[0]; ++i) free(curvesData[i].p);
		free((void*)curvesData);
	}

	geometryGroup.close();
}
Beispiel #25
0
bool TStellarData::load(const std::string& fname, const std::string& group, const std::string& dset,
			double err_floor, double default_EBV) {
	H5::H5File *file = H5Utils::openFile(fname);
	if(file == NULL) { return false; }
	
	H5::Group *gp = H5Utils::openGroup(file, group);
	if(gp == NULL) {
		delete file;
		return false;
	}
	
	H5::DataSet dataset = gp->openDataSet(dset);
	
	/*
	 *  Photometry
	 */
	
	// Datatype
	hsize_t nbands = NBANDS;
	H5::ArrayType f4arr(H5::PredType::NATIVE_FLOAT, 1, &nbands);
	H5::ArrayType u4arr(H5::PredType::NATIVE_UINT32, 1, &nbands);
	H5::CompType dtype(sizeof(TFileData));
	dtype.insertMember("obj_id", HOFFSET(TFileData, obj_id), H5::PredType::NATIVE_UINT64);
	dtype.insertMember("l", HOFFSET(TFileData, l), H5::PredType::NATIVE_DOUBLE);
	dtype.insertMember("b", HOFFSET(TFileData, b), H5::PredType::NATIVE_DOUBLE);
	dtype.insertMember("mag", HOFFSET(TFileData, mag), f4arr);
	dtype.insertMember("err", HOFFSET(TFileData, err), f4arr);
	dtype.insertMember("maglimit", HOFFSET(TFileData, maglimit), f4arr);
	dtype.insertMember("nDet", HOFFSET(TFileData, N_det), u4arr);
	dtype.insertMember("EBV", HOFFSET(TFileData, EBV), H5::PredType::NATIVE_FLOAT);
	
	// Dataspace
	hsize_t length;
	H5::DataSpace dataspace = dataset.getSpace();
	dataspace.getSimpleExtentDims(&length);
	
	// Read in dataset
	TFileData* data_buf = new TFileData[length];
	dataset.read(data_buf, dtype);
	//std::cerr << "# Read in dimensions." << std::endl;
	
	// Fix magnitude limits
	for(int n=0; n<nbands; n++) {
		float tmp;
		float maglim_replacement = 25.;
		
		// Find the 95th percentile of valid magnitude limits
		std::vector<float> maglimit;
		for(hsize_t i=0; i<length; i++) {
			tmp = data_buf[i].maglimit[n];
			
			if((tmp > 10.) && (tmp < 40.) && (!isnan(tmp))) {
				maglimit.push_back(tmp);
			}
		}
		
		//std::sort(maglimit.begin(), maglimit.end());
		if(maglimit.size() != 0) {
			maglim_replacement = percentile(maglimit, 95.);
		}
		
		// Replace missing magnitude limits with the 95th percentile magnitude limit
		for(hsize_t i=0; i<length; i++) {
			tmp = data_buf[i].maglimit[n];
			
			if(!((tmp > 10.) && (tmp < 40.)) || isnan(tmp)) {
				//std::cout << i << ", " << n << ":  " << tmp << std::endl;
				data_buf[i].maglimit[n] = maglim_replacement;
			}
		}
	}
	
	//int n_filtered = 0;
	//int n_M_dwarfs = 0;
	
	TMagnitudes mag_tmp;
	for(size_t i=0; i<length; i++) {
		mag_tmp.set(data_buf[i], err_floor);
		star.push_back(mag_tmp);
		
		//int n_informative = 0;
		
		// Remove g-band
		//mag_tmp.m[0] = 0.;
		//mag_tmp.err[0] = 1.e10;
		
		//double g_err = mag_tmp.err[0];
		//mag_tmp.err[0] = sqrt(g_err*g_err + 0.1*0.1);
		
		// Filter bright end
                // TODO: Put this into query_lsd.py
		/*for(int j=0; j<NBANDS; j++) {
			if((mag_tmp.err[j] < 1.e9) && (mag_tmp.m[j] < 14.)) {
				mag_tmp.err[j] = 1.e10;
				mag_tmp.m[j] = 0.;
			}
			
			if(mag_tmp.err[j] < 1.e9) {
				n_informative++;
			}
		}*/
		
		// Filter M dwarfs based on color cut
		//bool M_dwarf = false;
		/*bool M_dwarf = true;
		
		double A_g = 3.172;
		double A_r = 2.271;
		double A_i = 1.682;
		
		if(mag_tmp.m[0] - A_g / (A_g - A_r) * (mag_tmp.m[0] - mag_tmp.m[1] - 1.2) > 20.) {
			M_dwarf = false;
		} else if(mag_tmp.m[1] - mag_tmp.m[2] - (A_r - A_i) / (A_g - A_r) * (mag_tmp.m[0] - mag_tmp.m[1]) < 0.) {
			M_dwarf = false;
		} else {
			n_M_dwarfs++;
		}
		*/
		
		/*if(n_informative >= 4) { //&& (!M_dwarf)) {
			star.push_back(mag_tmp);
		} else {
			n_filtered++;
		}*/
	}
	
	//std::cerr << "# of stars filtered: " << n_filtered << std::endl;
	//std::cerr << "# of M dwarfs: " << n_M_dwarfs << std::endl;
	
	/*
	 *  Attributes
	 */
	
	H5::Attribute att = dataset.openAttribute("healpix_index");
	H5::DataType att_dtype = H5::PredType::NATIVE_UINT64;
	att.read(att_dtype, reinterpret_cast<void*>(&healpix_index));
	
	att = dataset.openAttribute("nested");
	att_dtype = H5::PredType::NATIVE_UCHAR;
	att.read(att_dtype, reinterpret_cast<void*>(&nested));
	
	att = dataset.openAttribute("nside");
	att_dtype = H5::PredType::NATIVE_UINT32;
	att.read(att_dtype, reinterpret_cast<void*>(&nside));
	
	att = dataset.openAttribute("l");
	att_dtype = H5::PredType::NATIVE_DOUBLE;
	att.read(att_dtype, reinterpret_cast<void*>(&l));
	
	att = dataset.openAttribute("b");
	att_dtype = H5::PredType::NATIVE_DOUBLE;
	att.read(att_dtype, reinterpret_cast<void*>(&b));
	
	att = dataset.openAttribute("EBV");
	att_dtype = H5::PredType::NATIVE_DOUBLE;
	att.read(att_dtype, reinterpret_cast<void*>(&EBV));
	
	// TEST: Force l, b to anticenter
	//l = 180.;
	//b = 0.;
	
	if((EBV <= 0.) || (EBV > default_EBV) || isnan(EBV)) { EBV = default_EBV; }
	
	delete[] data_buf;
	delete gp;
	delete file;
	
	return true;
}
H5RandomReader::H5RandomReader(const std::string fileName, const std::string groupPath) throw (InvalidFileException) {

    try {
        file.openFile(fileName, H5F_ACC_RDONLY);}
    catch ( H5::FileIException ) {
        throw InvalidFileException("Cannot acces file");}
    try {
        group = file.openGroup(groupPath);}
    catch ( H5::GroupIException ) {
        file.close();
        throw InvalidFileException("Cannot access group");}
    /*
     * extract timeline. This is also necessary to get the nbSteps.
     */
    try {
        timeline = group.openDataSet("timeline");
    	nSteps = timeline.getSpace().getSimpleExtentNpoints();}
    catch ( H5::DataSetIException error ) {
        //error.printError();
        group.close();
        file.close();
        throw InvalidFileException("Cannot access timeline dataset");}
    if (logging::info)
        std::cerr << "Opened group \"" <<  fileName << groupPath << "\" which has " << nSteps << " steps.\n";
    /*
     * extract objects names in the xpGroup
     */

    std::vector<std::string>  names;
    H5Literate(group.getId(), H5_INDEX_NAME, H5_ITER_INC, NULL, iterInGroup, &names);
    /*
     * extract data from object in xpGroup
     * these data can be of 3 types: matrix, translate or wrench
     * each data are saved in related map
     */
    for (unsigned int i=0; i<names.size(); i++){ //TODO: skip timeline
        H5::DataSet dSet = group.openDataSet(names[i]);
        if (H5Aexists(dSet.getId(), "ArborisViewerType")) {
            H5::Attribute att = dSet.openAttribute("ArborisViewerType");
            std::string type;
            att.read(att.getDataType(), type);
            if (type == "matrix"){
                H5::DataSpace dSpace = dSet.getSpace();
                bool dimension_ok = false;
                if (dSpace.getSimpleExtentNdims()==3) {
                    hsize_t dims[3];
                    dSpace.getSimpleExtentDims (dims);
                    if (dims[0] == nSteps && dims[1] == 4 && dims[2] == 4)
                        dimension_ok = true;}
                if (dimension_ok)
                    matrices[names[i]] = dSet;
                else {
                    if (logging::warning)
                        std::cerr << "Skipping dataset \"" << names[i] << "\" which has wrong dimensions. I was expecting (" << nSteps << ",4,4).\n";
                    dSet.close();}}
            else if (type == "translate"){
                H5::DataSpace dSpace = dSet.getSpace();
                bool dimension_ok = false;
                if (dSpace.getSimpleExtentNdims()==2) {
                    hsize_t dims[2];
                    dSpace.getSimpleExtentDims (dims);
                    if (dims[0] == nSteps && dims[1] == 3)
                        dimension_ok = true;}
                if (dimension_ok)
                    translates[names[i]] = dSet;
                else {
                    if (logging::warning)
                        std::cerr << "Skipping dataset \"" << names[i] << "\" which has wrong dimensions. I was expecting (" << nSteps << ",3).\n";
                    dSet.close();}}
            else if (type == "wrench") {
                H5::DataSpace dSpace = dSet.getSpace();
                bool dimension_ok = false;
                if (dSpace.getSimpleExtentNdims()==2) {
                    hsize_t dims[2];
                    dSpace.getSimpleExtentDims (dims);
                    if (dims[0] == nSteps && dims[1] == 6)
                        dimension_ok = true;}
                if (dimension_ok)
                    wrenches[names[i]] = dSet;
                else {
                    if (logging::warning)
                        std::cerr << "Skipping dataset \"" << names[i] << "\" which as wrong dimensions. I was expecting (" << nSteps << ",6).\n";
                    dSet.close();}}
            else {
                if (logging::warning)
                    std::cerr << "Skipping dataset \"" << names[i] << "\" whose ArborisViewerType attribute as unknown value \"" << type << "\".\n";
                dSet.close();}
            att.close();
        }
        else {
            if (logging::info)
                std::cerr << "Skipping dataset \"" << names[i] << "\" which has no ArborisViewerType attribute.\n";
            dSet.close();
        }
    }
};
Beispiel #27
0
bool ossim_hdf5::crossesDateline( H5::DataSet& dataset, const ossimIrect& validRect )
{
   bool result = false;

   H5::DataSpace dataspace = dataset.getSpace();
         
   // Number of dimensions of the input dataspace:
   const ossim_int32 DIM_COUNT = dataspace.getSimpleExtentNdims();
         
   if ( DIM_COUNT == 2 )
   {
      const ossim_uint32 ROWS = validRect.height();
      const ossim_uint32 COLS = validRect.width();
      
      // Get the extents. Assuming dimensions are same for lat lon dataset. 
      std::vector<hsize_t> dimsOut(DIM_COUNT);
      dataspace.getSimpleExtentDims( &dimsOut.front(), 0 );

      if ( (ROWS <= dimsOut[0]) && (COLS <= dimsOut[1]) )
      {
         std::vector<hsize_t> inputCount(DIM_COUNT);
         std::vector<hsize_t> inputOffset(DIM_COUNT);
         
         inputCount[0] = 1;    // row
         inputCount[1] = COLS; // col
         
         // Output dataspace dimensions.
         const ossim_int32 OUT_DIM_COUNT = 3;
         std::vector<hsize_t> outputCount(OUT_DIM_COUNT);
         outputCount[0] = 1; // single band
         outputCount[1] = 1; // single line
         outputCount[2] = COLS; // single sample
               
         // Output dataspace offset.
         std::vector<hsize_t> outputOffset(OUT_DIM_COUNT);
         outputOffset[0] = 0;
         outputOffset[1] = 0;
         outputOffset[2] = 0;
               
         ossimScalarType scalar = ossim_hdf5::getScalarType( &dataset );
         if ( scalar == OSSIM_FLOAT32 )
         {
            // See if we need to swap bytes:
            ossimEndian* endian = 0;
            if ( ( ossim::byteOrder() != ossim_hdf5::getByteOrder( &dataset ) ) )
            {
               endian = new ossimEndian();
            }

            // Native type:
            H5::DataType datatype = dataset.getDataType();
                  
            // Output dataspace always the same one line.
            H5::DataSpace bufferDataSpace( OUT_DIM_COUNT, &outputCount.front());
            bufferDataSpace.selectHyperslab( H5S_SELECT_SET,
                                             &outputCount.front(),
                                             &outputOffset.front() );

            //---
            // Dataset sample has NULL lines at the end so scan for valid rect.
            // Use "<= -999" for test as per NOAA as it seems the NULL value is
            // fuzzy.  e.g. -999.3.
            //---

            // Buffer to hold a line:
            std::vector<ossim_float32> lineBuffer(validRect.width());

            // Read the first line:
            inputOffset[0] = static_cast<hsize_t>(validRect.ul().y);
            inputOffset[1] = static_cast<hsize_t>(validRect.ul().x);
            dataspace.selectHyperslab( H5S_SELECT_SET,
                                       &inputCount.front(),
                                       &inputOffset.front() );
            dataset.read( &(lineBuffer.front()), datatype, bufferDataSpace, dataspace );

            if ( endian )
            {
               // If the endian pointer is initialized(not zero) swap the bytes.
               endian->swap( &(lineBuffer.front()), COLS );
            }

            // Test the first line:
            result = ossim_hdf5::crossesDateline( lineBuffer );

            if ( !result )
            {
               // Test the last line:
               inputOffset[0] = static_cast<hsize_t>(validRect.ll().y);
               inputOffset[1] = static_cast<hsize_t>(validRect.ll().x);
               dataspace.selectHyperslab( H5S_SELECT_SET,
                                          &inputCount.front(),
                                          &inputOffset.front() );
               dataset.read( &(lineBuffer.front()), datatype, bufferDataSpace, dataspace );

               result = ossim_hdf5::crossesDateline( lineBuffer );
            }

            if ( endian )
            {
               delete endian;
               endian = 0;
            }
         }
         else // Matches: if ( scalar == OSSIM_FLOAT32 ){...}
         {
            ossimNotify(ossimNotifyLevel_WARN)
               << "ossim_hdf5::crossesDateline WARNING!"
               << "\nUnhandled scalar type: "
               << ossimScalarTypeLut::instance()->getEntryString( scalar )
               << std::endl;
         }
         
      } // Matches: if ( dimsOut...
      
   } // Matches: if ( IN_DIM_COUNT == 2 )
   
   dataspace.close();
   
   return result;
   
} // End: ossim_hdf5::crossesDateline(...)
Beispiel #28
0
//--------------------------------------------------------------------------------------------------
/// 
//--------------------------------------------------------------------------------------------------
TEST(DISABLED_HDFTests, BasicFileRead)
{
    std::string file_path = "D:/ResInsight/SourSim/PKMUNK_NOV_TEST_SS.sourpre.00001";

    try
    {
        H5::Exception::dontPrint();                                // Turn off auto-printing of failures to handle the errors appropriately

        H5::H5File file(file_path.c_str(), H5F_ACC_RDONLY);

        {
            H5::Group        timestep = file.openGroup("Timestep_00001");
            H5::Attribute    attr     = timestep.openAttribute("timestep");

            double timestep_value = 0.0;

            H5::DataType type = attr.getDataType();
            attr.read(type, &timestep_value);

            //std::cout << "Timestep value " << timestep_value << std::endl;
            EXPECT_NEAR(timestep_value, 1.0, 1e-1);
        }


        {
            // Group size is not an attribute!

            H5::Group GridFunctions = file.openGroup("Timestep_00001/GridParts/GridPart_00000/GridFunctions");

            hsize_t   group_size = GridFunctions.getNumObjs();

            //std::cout << "GridFunctions group_size " << group_size << std::endl;
            EXPECT_EQ(size_t(20), group_size);

/*            for (hsize_t i = 0; i < group_size; i++)
            {
                // H5std_string node_name = GridFunctions.getObjnameByIdx(i);     // crashes on VS2017 due to lib/heap/runtime differences to HDF5 VS2015 lib

                std::string node_name;
                node_name.resize(1024);

                ssize_t slen = GridFunctions.getObjnameByIdx(i, &node_name[0], 1023);

                node_name.resize(slen + 1);

                std::cout << "GridFunctions sub-node name " << node_name << std::endl;
            }
*/

            std::string first_subnode(1024, '\0');

            ssize_t slen = GridFunctions.getObjnameByIdx(0, &first_subnode[0], 1023);
            first_subnode.resize(slen + 1);

            EXPECT_TRUE(first_subnode.compare(0, slen, "GridFunction_00002") == 0);
        }


        {
            H5::Group        GridFunction_00002 = file.openGroup("Timestep_00001/GridParts/GridPart_00000/GridFunctions/GridFunction_00002");
            H5::Attribute    attr = GridFunction_00002.openAttribute("limits_max");

            double limits_max = 0.0;
        
            H5::DataType type = attr.getDataType();
            attr.read(type, &limits_max);

//            std::cout << "limits_max " << limits_max << std::endl;
            EXPECT_NEAR(limits_max, 0.3970204292629652, 1e-10);
        }


        {
            H5::Group        GridFunction_00002 = file.openGroup("Timestep_00001/GridParts/GridPart_00000/GridFunctions/GridFunction_00002");
            H5::DataSet        dataset = H5::DataSet(GridFunction_00002.openDataSet("values"));

            hsize_t dims[2];
            H5::DataSpace    dataspace = dataset.getSpace();
            dataspace.getSimpleExtentDims(dims, nullptr);

            std::vector<double> values;
            values.resize(dims[0]);
            dataset.read(values.data(), H5::PredType::NATIVE_DOUBLE);

/*            for (hsize_t i = 0; i < dims[0]; i++)
            {
                std::cout << "value " << i << " " << values[i] << std::endl;

            }
*/
            EXPECT_NEAR(values[0], 0.32356910366452146, 1e-10);
            EXPECT_NEAR(values[dims[0] - 1], 0.12200070891582514, 1e-10);
        }



    }  // end of try block
    
       
    catch (H5::FileIException error)        // catch failure caused by the H5File operations
    {
        std::cout << error.getCDetailMsg();
    }
    
    catch (H5::DataSetIException error)        // catch failure caused by the DataSet operations
    {
        std::cout << error.getCDetailMsg();
    }
    
    catch (H5::DataSpaceIException error)    // catch failure caused by the DataSpace operations
    {
        std::cout << error.getCDetailMsg(); 
    }
    
    catch (H5::DataTypeIException error)        // catch failure caused by the DataSpace operations
    {
        std::cout << error.getCDetailMsg();
    }

}
Beispiel #29
0
PcaModel PcaModel::loadStatismoModel(path h5file, PcaModel::ModelType modelType)
{
	logging::Logger logger = Loggers->getLogger("shapemodels");
	PcaModel model;

	// Load the shape or color model from the .h5 file
	string h5GroupType;
	if (modelType == ModelType::SHAPE) {
		h5GroupType = "shape";
	} else if (modelType == ModelType::COLOR) {
		h5GroupType = "color";
	}

	H5::H5File h5Model;

	try {
		h5Model = H5::H5File(h5file.string(), H5F_ACC_RDONLY);
	}
	catch (H5::Exception& e) {
		string errorMessage = "Could not open HDF5 file: " + string(e.getCDetailMsg());
		logger.error(errorMessage);
		throw errorMessage;
	}

	// Load either the shape or texture mean
	string h5Group = "/" + h5GroupType + "/model";
	H5::Group modelReconstructive = h5Model.openGroup(h5Group);

	// Read the mean
	H5::DataSet dsMean = modelReconstructive.openDataSet("./mean");
	hsize_t dims[2];
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);	// dsMean.getSpace() leaks memory... maybe a hdf5 bug, maybe vlenReclaim(...) could be a fix. No idea.
	//H5::DataSpace dsp = dsMean.getSpace();
	//dsp.close();
	Loggers->getLogger("shapemodels").debug("Dimensions of the model mean: " + lexical_cast<string>(dims[0]));
	model.mean = Mat(1, dims[0], CV_32FC1); // Use a row-vector, because of faster memory access and I'm not sure the memory block is allocated contiguously if we have multiple rows.
	dsMean.read(model.mean.ptr<float>(0), H5::PredType::NATIVE_FLOAT);
	model.mean = model.mean.t(); // Transpose it to a col-vector
	dsMean.close();

	// Read the eigenvalues
	dsMean = modelReconstructive.openDataSet("./pcaVariance");
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);
	Loggers->getLogger("shapemodels").debug("Dimensions of the pcaVariance: " + lexical_cast<string>(dims[0]));
	model.eigenvalues = Mat(1, dims[0], CV_32FC1);
	dsMean.read(model.eigenvalues.ptr<float>(0), H5::PredType::NATIVE_FLOAT);
	model.eigenvalues = model.eigenvalues.t();
	dsMean.close();

	// Read the PCA basis matrix
	dsMean = modelReconstructive.openDataSet("./pcaBasis");
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);
	Loggers->getLogger("shapemodels").debug("Dimensions of the PCA basis matrix: " + lexical_cast<string>(dims[0]) + ", " + lexical_cast<string>(dims[1]));
	model.pcaBasis = Mat(dims[0], dims[1], CV_32FC1);
	dsMean.read(model.pcaBasis.ptr<float>(0), H5::PredType::NATIVE_FLOAT);
	dsMean.close();

	modelReconstructive.close(); // close the model-group

	// Read the noise variance (not implemented)
	/*dsMean = modelReconstructive.openDataSet("./noiseVariance");
	float noiseVariance = 10.0f;
	dsMean.read(&noiseVariance, H5::PredType::NATIVE_FLOAT);
	dsMean.close(); */

	// Read the triangle-list
	string representerGroupName = "/" + h5GroupType + "/representer";
	H5::Group representerGroup = h5Model.openGroup(representerGroupName);
	dsMean = representerGroup.openDataSet("./reference-mesh/triangle-list");
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);
	Loggers->getLogger("shapemodels").debug("Dimensions of the triangle-list: " + lexical_cast<string>(dims[0]) + ", " + lexical_cast<string>(dims[1]));
	Mat triangles(dims[0], dims[1], CV_32SC1);
	dsMean.read(triangles.ptr<int>(0), H5::PredType::NATIVE_INT32);
	dsMean.close();
	representerGroup.close();
	model.triangleList.resize(triangles.rows);
	for (unsigned int i = 0; i < model.triangleList.size(); ++i) {
		model.triangleList[i][0] = triangles.at<int>(i, 0);
		model.triangleList[i][1] = triangles.at<int>(i, 1);
		model.triangleList[i][2] = triangles.at<int>(i, 2);
	}

	// Load the landmarks mappings:
	// load the reference-mesh
	representerGroup = h5Model.openGroup(representerGroupName);
	dsMean = representerGroup.openDataSet("./reference-mesh/vertex-coordinates");
	dsMean.getSpace().getSimpleExtentDims(dims, NULL);
	Loggers->getLogger("shapemodels").debug("Dimensions of the reference-mesh vertex-coordinates matrix: " + lexical_cast<string>(dims[0]) + ", " + lexical_cast<string>(dims[1]));
	Mat referenceMesh(dims[0], dims[1], CV_32FC1);
	dsMean.read(referenceMesh.ptr<float>(0), H5::PredType::NATIVE_FLOAT);
	dsMean.close();
	representerGroup.close();

	// convert to 3 vectors with the x, y and z coordinates for easy searching
	vector<float> refx(referenceMesh.col(0).clone());
	vector<float> refy(referenceMesh.col(1).clone());
	vector<float> refz(referenceMesh.col(2).clone());

	// load the landmarks info (mapping name <-> reference (x, y, z)-coords)
	H5::Group landmarksGroup = h5Model.openGroup("/metadata/landmarks");
	dsMean = landmarksGroup.openDataSet("./text");
	
	H5std_string outputString;
	Loggers->getLogger("shapemodels").debug("Reading landmark information from the model.");
	dsMean.read(outputString, dsMean.getStrType());
	dsMean.close();
	landmarksGroup.close();
	vector<string> landmarkLines;
	boost::split(landmarkLines, outputString, boost::is_any_of("\n"), boost::token_compress_on);
	for (const auto& l : landmarkLines) {
		if (l == "") {
			continue;
		}
		vector<string> line;
		boost::split(line, l, boost::is_any_of(" "), boost::token_compress_on);
		string name = line[0];
		int visibility = lexical_cast<int>(line[1]);
		float x = lexical_cast<float>(line[2]);
		float y = lexical_cast<float>(line[3]);
		float z = lexical_cast<float>(line[4]);
		// Find the x, y and z values in the reference
		const auto ivx = std::find(begin(refx), end(refx), x);
		const auto ivy = std::find(begin(refy), end(refy), y);
		const auto ivz = std::find(begin(refz), end(refz), z);
		// TODO Check for .end()!
		const auto vertexIdX = std::distance(begin(refx), ivx);
		const auto vertexIdY = std::distance(begin(refy), ivy);
		const auto vertexIdZ = std::distance(begin(refz), ivz);
		// assert vx=vy=vz
		// Hmm this is not perfect. If there's another vertex where 1 or 2 coords are the same, it fails.
		// We should do the search differently: Find _all_ the vertices that are equal, then take the one that has the right x, y and z.
		model.landmarkVertexMap.insert(make_pair(name, vertexIdX));
	
	}

	h5Model.close();
	return model;
}
Beispiel #30
0
bool ossim_hdf5::getValidBoundingRect( H5::DataSet& dataset,
                                       const std::string& name,
                                       ossimIrect& rect )
{
   bool result = false;
   H5::DataSpace imageDataspace = dataset.getSpace();
   const ossim_int32 IN_DIM_COUNT = imageDataspace.getSimpleExtentNdims();
         
   if ( IN_DIM_COUNT == 2 )
   {
      // Get the extents. Assuming dimensions are same for lat lon dataset. 
      std::vector<hsize_t> dimsOut(IN_DIM_COUNT);
      imageDataspace.getSimpleExtentDims( &dimsOut.front(), 0 );

      if ( dimsOut[0] && dimsOut[1] )
      {
         
         //---
         // Capture the rectangle:
         // dimsOut[0] is height, dimsOut[1] is width:
         //---
         rect = ossimIrect( 0, 0,
                            static_cast<ossim_int32>( dimsOut[1]-1 ),
                            static_cast<ossim_int32>( dimsOut[0]-1 ) );
         
         const ossim_int32 WIDTH  = rect.width();
               
         std::vector<hsize_t> inputCount(IN_DIM_COUNT);
         std::vector<hsize_t> inputOffset(IN_DIM_COUNT);
         
         inputOffset[0] = 0;
         inputOffset[1] = 0;
         
         inputCount[0] = 1;
         inputCount[1] = WIDTH;
         
         // Output dataspace dimensions.
         const ossim_int32 OUT_DIM_COUNT = 3;
         std::vector<hsize_t> outputCount(OUT_DIM_COUNT);
         outputCount[0] = 1;     // single band
         outputCount[1] = 1;     // single line
         outputCount[2] = WIDTH; // whole line
               
         // Output dataspace offset.
         std::vector<hsize_t> outputOffset(OUT_DIM_COUNT);
         outputOffset[0] = 0;
         outputOffset[1] = 0;
         outputOffset[2] = 0;
               
         ossimScalarType scalar = ossim_hdf5::getScalarType( &dataset );
         if ( scalar == OSSIM_FLOAT32 )
         {
            // See if we need to swap bytes:
            ossimEndian* endian = 0;
            if ( ( ossim::byteOrder() != ossim_hdf5::getByteOrder( &dataset ) ) )
            {
               endian = new ossimEndian();
            }

            // Native type:
            H5::DataType datatype = dataset.getDataType();
                  
            // Output dataspace always the same one line.
            H5::DataSpace bufferDataSpace( OUT_DIM_COUNT, &outputCount.front());
            bufferDataSpace.selectHyperslab( H5S_SELECT_SET,
                                             &outputCount.front(),
                                             &outputOffset.front() );

            //---
            // Dataset sample has NULL lines at the end so scan for valid rect.
            // Use "<= -999" for test as per NOAA as it seems the NULL value is
            // fuzzy.  e.g. -999.3.
            //---
            const ossim_float32 NULL_VALUE = -999.0;

            //---
            // VIIRS Radiance data has a -1.5e-9 in the first column.
            // Treat this as a null.
            //---
            const ossim_float32 NULL_VALUE2 = ( name == "/All_Data/VIIRS-DNB-SDR_All/Radiance" )
               ? -1.5e-9 : NULL_VALUE;
            const ossim_float32 TOLERANCE = 0.1e-9; // For ossim::almostEqual()

            // Hold one line:
            std::vector<ossim_float32> values( WIDTH );

            // Find the ul pixel:
            ossimIpt ulIpt = rect.ul();
            bool found = false;
                  
            // Line loop to find upper left pixel:
            while ( ulIpt.y <= rect.lr().y )
            {
               inputOffset[0] = static_cast<hsize_t>(ulIpt.y);
               imageDataspace.selectHyperslab( H5S_SELECT_SET,
                                               &inputCount.front(),
                                               &inputOffset.front() );
               
               // Read data from file into the buffer.
               dataset.read( (void*)&values.front(), datatype, bufferDataSpace, imageDataspace );
               
               if ( endian )
               {
                  // If the endian pointer is initialized(not zero) swap the bytes.
                  endian->swap( scalar, (void*)&values.front(), WIDTH );
               }
               
               // Sample loop:
               ulIpt.x = rect.ul().x;
               ossim_int32 index = 0;
               while ( ulIpt.x <= rect.lr().x )
               {
                  if ( !ossim::almostEqual(values[index], NULL_VALUE2, TOLERANCE) &&
                       ( values[index] > NULL_VALUE ) )
                  {
                     found = true; // Found valid pixel.
                     break;
                  }
                  ++ulIpt.x;
                  ++index;
                     
               } // End: sample loop
                     
               if ( found )
               {
                  break;
               }

               ++ulIpt.y;
                     
            } // End line loop to find ul pixel:

            // Find the lower right pixel:
            ossimIpt lrIpt = rect.lr();
            found = false;
                  
            // Line loop to find last pixel:
            while ( lrIpt.y >= rect.ul().y )
            {
               inputOffset[0] = static_cast<hsize_t>(lrIpt.y);
               imageDataspace.selectHyperslab( H5S_SELECT_SET,
                                               &inputCount.front(),
                                               &inputOffset.front() );
               
               // Read data from file into the buffer.
               dataset.read( (void*)&values.front(), datatype, bufferDataSpace, imageDataspace );

               if ( endian )
               {
                  // If the endian pointer is initialized(not zero) swap the bytes.
                  endian->swap( scalar, (void*)&values.front(), WIDTH );
               }
            
               // Sample loop:
               lrIpt.x = rect.lr().x;
               ossim_int32 index = WIDTH-1;
               
               while ( lrIpt.x >= rect.ul().x )
               {
                  if ( !ossim::almostEqual(values[index], NULL_VALUE2, TOLERANCE) &&
                       ( values[index] > NULL_VALUE ) )
                  {
                     found = true; // Found valid pixel.
                     break;
                  }
                  --lrIpt.x;
                  --index;
                     
               } // End: sample loop
                     
               if ( found )
               {
                  break;
               }

               --lrIpt.y;
                     
            } // End line loop to find lower right pixel.

            rect = ossimIrect( ulIpt, lrIpt );

            // Cleanup:
            if ( endian )
            {
               delete endian;
               endian = 0;
            }

            result = true;
            
         } 
         else // Matches: if ( scalar == OSSIM_FLOAT32 ){...}
         {
            ossimNotify(ossimNotifyLevel_WARN)
               << "ossim_hdf5::getBoundingRect WARNING!"
               << "\nUnhandled scalar type: "
               << ossimScalarTypeLut::instance()->getEntryString( scalar )
               << std::endl;
         }
               
      } // Matches: if ( dimsOut...
            
   } // Matches: if ( IN_DIM_COUNT == 2 )
         
   imageDataspace.close();

   return result;
   
} // End: ossim_hdf5::getBoundingRect(...)