static ElementOutput* readBedElevation(Mesh* mesh, const QString fileName, const HdfGroup& gArea, int nElems) { DataSet* dsd = new DataSet(fileName); dsd->setName("Bed Elevation"); dsd->setType(DataSet::Bed); ElementOutput* tos = new ElementOutput; tos->init(nElems, false); tos->time = 0; HdfDataset dsBed = openHdfDataset(gArea, "Cells Minimum Elevation"); QVector<float> elev_vals = dsBed.readArray(); for (int i = 0; i < nElems; ++i) { float val = elev_vals[i]; if (val != val) { //NaN tos->values[i] = -9999; } else { tos->values[i] = val; } } dsd->addOutput(tos); dsd->updateZRange(); mesh->addDataSet(dsd); return tos; }
static HdfDataset openHdfDataset(const HdfGroup& hdfGroup, const QString& name) { HdfDataset dsFileType = hdfGroup.dataset(name); if (!dsFileType.isValid()) { throw LoadStatus::Err_UnknownFormat; } return dsFileType; }
static void readUnsteadyFaceResults(Mesh* mesh, const QString flowAreaName, const QString fileName, const HdfFile& hdfFile, int nElems) { // First read face to node mapping HdfGroup gGeom = openHdfGroup(hdfFile, "Geometry"); HdfGroup gGeom2DFlowAreas = openHdfGroup(gGeom, "2D Flow Areas"); HdfGroup gArea = openHdfGroup(gGeom2DFlowAreas, flowAreaName); HdfDataset dsFace2Cells = openHdfDataset(gArea, "Faces Cell Indexes"); QVector<hsize_t> fdims = dsFace2Cells.dims(); QVector<int> face2Cells = dsFace2Cells.readArrayInt(); //2x nFaces int nFaces = fdims[0]; HdfGroup gResults = openHdfGroup(hdfFile, "Results"); HdfGroup gUnsteady = openHdfGroup(gResults, "Unsteady"); HdfGroup gOutput = openHdfGroup(gUnsteady, "Output"); HdfGroup gOBlocks = openHdfGroup(gOutput, "Output Blocks"); HdfGroup gBaseO = openHdfGroup(gOBlocks, "Base Output"); HdfGroup gUnsteadTS = openHdfGroup(gBaseO, "Unsteady Time Series"); HdfGroup g2DFlowRes = openHdfGroup(gUnsteadTS, "2D Flow Areas"); HdfGroup gFlowAreaRes = openHdfGroup(g2DFlowRes, flowAreaName); //TODO we already have this somewhere else! HdfDataset dsTimes = openHdfDataset(gUnsteadTS, "Time"); QVector<float> times = dsTimes.readArray(); // Face center data datasets QStringList datasets; datasets.push_back("Face Shear Stress"); datasets.push_back("Face Velocity"); //this is magnitude double eps = std::numeric_limits<double>::min(); foreach(QString dsName, datasets) { DataSet* dsd = new DataSet(fileName); dsd->setName(dsName); dsd->setType(DataSet::Scalar); dsd->setIsTimeVarying(times.size()>1); HdfDataset dsVals = openHdfDataset(gFlowAreaRes, dsName); QVector<float> vals = dsVals.readArray(); for (int tidx=0; tidx<times.size(); ++tidx) { ElementOutput* tos = new ElementOutput; tos->init(nElems, false); tos->time = times[tidx]; std::fill(tos->values.begin(),tos->values.end(),-9999); for (int i = 0; i < nFaces; ++i) { int idx = tidx*nFaces + i; float val = vals[idx]; // This is value on face! if (val == val && fabs(val) > eps) { //not nan and not 0 for (int c = 0; c < 2; ++c) { int cell_idx = face2Cells[2*i + c]; // Take just maximum if (tos->values[cell_idx] < val ) { tos->values[cell_idx] = val; } } } } dsd->addOutput(tos); } dsd->updateZRange(); mesh->addDataSet(dsd); }
bool MDAL::DriverFlo2D::parseHDF5Datasets( const std::string &datFileName ) { //return true on error size_t nFaces = mMesh->facesCount(); std::string timedepFileName = fileNameFromDir( datFileName, "TIMDEP.HDF5" ); if ( !fileExists( timedepFileName ) ) return true; HdfFile file( timedepFileName ); if ( !file.isValid() ) return true; HdfGroup timedataGroup = file.group( "TIMDEP NETCDF OUTPUT RESULTS" ); if ( !timedataGroup.isValid() ) return true; std::vector<std::string> groupNames = timedataGroup.groups(); for ( const std::string &grpName : groupNames ) { HdfGroup grp = timedataGroup.group( grpName ); if ( !grp.isValid() ) return true; HdfAttribute groupType = grp.attribute( "Grouptype" ); if ( !groupType.isValid() ) return true; /* Min and Max arrays in TIMDEP.HDF5 files have dimensions 1xntimesteps . HdfDataset minDs = grp.dataset("Mins"); if (!minDs.isValid()) return true; HdfDataset maxDs = grp.dataset("Maxs"); if (!maxDs.isValid()) return true; */ HdfDataset timesDs = grp.dataset( "Times" ); if ( !timesDs.isValid() ) return true; size_t timesteps = timesDs.elementCount(); HdfDataset valuesDs = grp.dataset( "Values" ); if ( !valuesDs.isValid() ) return true; bool isVector = MDAL::contains( groupType.readString(), "vector", ContainsBehaviour::CaseInsensitive ); // Some sanity checks size_t expectedSize = mMesh->facesCount() * timesteps; if ( isVector ) expectedSize *= 2; if ( valuesDs.elementCount() != expectedSize ) return true; // Read data std::vector<double> times = timesDs.readArrayDouble(); std::vector<float> values = valuesDs.readArray(); // Create dataset now std::shared_ptr<DatasetGroup> ds = std::make_shared< DatasetGroup >( name(), mMesh.get(), datFileName, grpName ); ds->setIsOnVertices( false ); ds->setIsScalar( !isVector ); for ( size_t ts = 0; ts < timesteps; ++ts ) { std::shared_ptr< MemoryDataset > output = std::make_shared< MemoryDataset >( ds.get() ); output->setTime( times[ts] ); if ( isVector ) { // vector for ( size_t i = 0; i < nFaces; ++i ) { size_t idx = 2 * ( ts * nFaces + i ); double x = getDouble( static_cast<double>( values[idx] ) ); double y = getDouble( static_cast<double>( values[idx + 1] ) ); output->values()[2 * i] = x; output->values()[2 * i + 1] = y; } } else { // scalar for ( size_t i = 0; i < nFaces; ++i ) { size_t idx = ts * nFaces + i; double val = getDouble( static_cast<double>( values[idx] ) ); output->values()[i] = val; } } addDatasetToGroup( ds, output ); } // TODO use mins & maxs arrays ds->setStatistics( MDAL::calculateStatistics( ds ) ); mMesh->datasetGroups.push_back( ds ); } return false; }