HDF5GroupObjects* HDF5Dataset::HDF5FindDatasetObjects ( HDF5GroupObjects *poH5Objects, const char* pszDatasetName ) { int i; HDF5Dataset *poDS; HDF5GroupObjects *poObjectsFound; poDS=this; if( poH5Objects->nType == H5G_DATASET && EQUAL( poH5Objects->pszName,pszDatasetName ) ) { /* printf("found it! %ld\n",(long) poH5Objects);*/ return( poH5Objects ); } if( poH5Objects->nbObjs >0 ) for( i=0; i <poH5Objects->nbObjs; i++ ) { poObjectsFound= poDS->HDF5FindDatasetObjects( poH5Objects->poHchild+i, pszDatasetName ); /* -------------------------------------------------------------------- */ /* Is this our dataset?? */ /* -------------------------------------------------------------------- */ if( poObjectsFound != NULL ) return( poObjectsFound ); } /* -------------------------------------------------------------------- */ /* Dataset has not been found! */ /* -------------------------------------------------------------------- */ return( NULL ); }
HDF5GroupObjects * HDF5Dataset::HDF5FindDatasetObjects( HDF5GroupObjects *poH5Objects, const char *pszDatasetName ) { if( poH5Objects->nType == H5G_DATASET && EQUAL(poH5Objects->pszName, pszDatasetName) ) { #ifdef DEBUG_VERBOSE printf("found it! %p\n", poH5Objects); /*ok*/ #endif return poH5Objects; } HDF5Dataset *poDS = this; if( poH5Objects->nbObjs > 0 ) { for( unsigned int i = 0; i <poH5Objects->nbObjs; i++ ) { HDF5GroupObjects *poObjectsFound = poDS->HDF5FindDatasetObjects( poH5Objects->poHchild + i, pszDatasetName); // Is this our dataset? if( poObjectsFound != nullptr ) return poObjectsFound; } } // Dataset has not been found. return nullptr; }
// writing attribute to the file void WriteAttribute( RString& rFilename, RString& rPath, RVector<int> rIntdata, RVector<double> rDoubledata, RString rStrdata, vector<STLTypes::NameValuePair>& inAttributes, SEXP& results) { HDF5DA da(rFilename.Data(0),StringUtils::CompareNoCase(ExtractAttribute(inAttributes,NEWFILE),"true")); HDF5OutputConverter oc; string strPath(rPath.Data(0)); // read the compress level string compressStr = ExtractAttribute(inAttributes,COMPRESSLEVEL); int compressedLevel = 0; if ((compressStr.size()>0)) compressedLevel = atoi(compressStr.c_str()); // the dimension of the data to be read string strRow = ExtractAttribute(inAttributes,ROW); string strCol = ExtractAttribute(inAttributes,COL); size_t row=0; size_t col=0; if ((strRow.size()>0) && (strCol.size()>0)) { row = atoi(strRow.c_str()); col = atoi(strCol.c_str()); } string::size_type index = strPath.find_last_of('/'); if((index == string::npos )||(index==0)) throw RException("Attribute can only be directly under an existing dataset or group."); string parent = strPath.substr(0,index); string attr = strPath.substr(index+1,string::npos); // are we going to overwrite an existing attribute bool overwrite = StringUtils::CompareNoCase(ExtractAttribute(inAttributes,OVERWRITE),"true"); if(rIntdata.Size()>0) { // obtain the data from input rIntdata.Dimensions(row,col); Matrix<int> intdata; rIntdata.Extract(intdata); if(CheckDataset(da,parent)) { HDF5Dataset ds = da.OpenDatasetByFullPath(parent); if (overwrite) ds.DeleteAttribute(attr); ds.CreateAttribute(attr,intdata, compressedLevel); } else if(CheckGroup(da,parent)) { HDF5Group gp = da.OpenGroupByFullPath(parent); if (overwrite) gp.DeleteAttribute(attr); gp.CreateAttribute(attr,intdata, compressedLevel); } else throw RException("Attribute can only be directly under an existing dataset or group."); } else if(rDoubledata.Size()>0) { // obtain the data from input rDoubledata.Dimensions(row,col); Matrix<double> doubledata; rDoubledata.Extract(doubledata); if(CheckDataset(da,parent)) { HDF5Dataset ds = da.OpenDatasetByFullPath(parent); if (overwrite) ds.DeleteAttribute(attr); ds.CreateAttribute(attr,doubledata, compressedLevel); } else if(CheckGroup(da,parent)) { HDF5Group gp = da.OpenGroupByFullPath(parent); if (overwrite) gp.DeleteAttribute(attr); gp.CreateAttribute(attr,doubledata, compressedLevel); } else throw RException("Attribute can only be directly under an existing dataset or group."); } else if(rStrdata.Size()>0) { // obtain the data from input rStrdata.Dimensions(row,col); Matrix<string> strdata; rStrdata.Extract(strdata,row,col); if(CheckDataset(da,parent)) { HDF5Dataset ds = da.OpenDatasetByFullPath(parent); if (overwrite) ds.DeleteAttribute(attr); ds.CreateAttribute(attr,strdata, compressedLevel); } else if(CheckGroup(da,parent)) { HDF5Group gp = da.OpenGroupByFullPath(parent); if (overwrite) gp.DeleteAttribute(attr); gp.CreateAttribute(attr,strdata, compressedLevel); } else throw RException("Attribute can only be directly under an existing dataset or group."); } }
// Read data from file void ReadData( RString& rFilename, RString& rPath, vector<STLTypes::NameValuePair>& inAttributes, SEXP& results) { HDF5DA da(rFilename.Data(0),false); HDF5OutputConverter oc; string strPath(rPath.Data(0)); // check if this is a valid dataset vector<string> dasummary=da.GetDatasetNamesFullPath(); bool isDataset = (find( dasummary.begin( ), dasummary.end( ), strPath)!=dasummary.end( )); if(!isDataset) throw RException("path - "+strPath+" is not dataset."); HDF5Dataset ds = da.OpenDatasetByFullPath(strPath); string startStr = ExtractAttribute(inAttributes,STARTINDEX); string nrowsStr = ExtractAttribute(inAttributes,NROWS); size_t start=0; size_t nRows=ds.NumberOfRows(); // if only start index is supplied, we need to figure out the nrows. if ((startStr.size()>0)) { start = atoi(startStr.c_str()); nRows = nRows-start; } // nrows is supplied as well if (nrowsStr.size()>0) nRows = atoi(nrowsStr.c_str()); switch (ds.GetDataType()) { case HDF5DataType::CHAR:{ Matrix<char> data; da.ReadDataByFullPath(strPath, start, nRows, data); results=oc.ConvertIntMatrix(data); break;} case HDF5DataType::INT:{ Matrix<int> data; da.ReadDataByFullPath(strPath, start, nRows, data); results=oc.ConvertIntMatrix(data); break;} case HDF5DataType::INT64:{ Matrix<__int64> data; da.ReadDataByFullPath(strPath, start, nRows, data); results=oc.ConvertDoubleMatrix(data); break;} case HDF5DataType::FLOAT:{ Matrix<float> data; da.ReadDataByFullPath(strPath, start, nRows, data); results=oc.ConvertDoubleMatrix(data); break;} case HDF5DataType::DOUBLE:{ Matrix<double> data; da.ReadDataByFullPath(strPath, start, nRows, data); results=oc.ConvertDoubleMatrix(data); break;} case HDF5DataType::STRING:{ Matrix<string> data; da.ReadDataByFullPath(strPath, start, nRows, data); results=oc.ConvertStrMatrix(data); break;} case HDF5DataType::COMPOUND:{ HDF5CompoundType data; da.ReadDataByFullPath(strPath, start, nRows, data); results=oc.ConvertCompoundType(data); break;} default: throw RException("Unknown data type"); } }
CPLErr HDF5Dataset::HDF5ListGroupObjects( HDF5GroupObjects *poRootGroup, int bSUBDATASET ) { int i; char szTemp[8192]; char szDim[8192]; HDF5Dataset *poDS; poDS=this; if( poRootGroup->nbObjs >0 ) for( i=0; i < poRootGroup->nbObjs; i++ ) { poDS->HDF5ListGroupObjects( poRootGroup->poHchild+i, bSUBDATASET ); } if( poRootGroup->nType == H5G_GROUP ) { CreateMetadata( poRootGroup, H5G_GROUP ); } /* -------------------------------------------------------------------- */ /* Create Sub dataset list */ /* -------------------------------------------------------------------- */ if( (poRootGroup->nType == H5G_DATASET ) && bSUBDATASET && poDS->GetDataType( poRootGroup->native ) == GDT_Unknown ) { CPLDebug( "HDF5", "Skipping unsupported %s of type %s", poRootGroup->pszUnderscorePath, poDS->GetDataTypeName( poRootGroup->native ) ); } else if( (poRootGroup->nType == H5G_DATASET ) && bSUBDATASET ) { szDim[0]='\0'; switch( poRootGroup->nRank ) { case 3: sprintf( szTemp,"%dx%dx%d", (int)poRootGroup->paDims[0], (int)poRootGroup->paDims[1], (int)poRootGroup->paDims[2] ); break; case 2: sprintf( szTemp,"%dx%d", (int)poRootGroup->paDims[0], (int)poRootGroup->paDims[1] ); break; default: return CE_None; } strcat( szDim,szTemp ); sprintf( szTemp, "SUBDATASET_%d_NAME", ++(poDS->nSubDataCount) ); poDS->papszSubDatasets = CSLSetNameValue( poDS->papszSubDatasets, szTemp, CPLSPrintf( "HDF5:\"%s\":%s", poDS->GetDescription(), poRootGroup->pszUnderscorePath ) ); sprintf( szTemp, "SUBDATASET_%d_DESC", poDS->nSubDataCount ); poDS->papszSubDatasets = CSLSetNameValue( poDS->papszSubDatasets, szTemp, CPLSPrintf( "[%s] %s (%s)", szDim, poRootGroup->pszUnderscorePath, poDS->GetDataTypeName ( poRootGroup->native ) ) ); } return CE_None; }
GDALDataset *HDF5Dataset::Open( GDALOpenInfo * poOpenInfo ) { HDF5Dataset *poDS; CPLErr Err; if( !Identify( poOpenInfo ) ) return NULL; /* -------------------------------------------------------------------- */ /* Create datasource. */ /* -------------------------------------------------------------------- */ poDS = new HDF5Dataset(); poDS->SetDescription( poOpenInfo->pszFilename ); /* -------------------------------------------------------------------- */ /* Try opening the dataset. */ /* -------------------------------------------------------------------- */ poDS->hHDF5 = H5Fopen( poOpenInfo->pszFilename, H5F_ACC_RDONLY, H5P_DEFAULT ); if( poDS->hHDF5 < 0 ) { delete poDS; return NULL; } poDS->hGroupID = H5Gopen( poDS->hHDF5, "/" ); if( poDS->hGroupID < 0 ){ poDS->bIsHDFEOS=false; delete poDS; return NULL; } poDS->bIsHDFEOS=true; Err = poDS->ReadGlobalAttributes( true ); poDS->SetMetadata( poDS->papszMetadata ); if ( CSLCount( poDS->papszSubDatasets ) / 2 >= 1 ) poDS->SetMetadata( poDS->papszSubDatasets, "SUBDATASETS" ); // Make sure we don't try to do any pam stuff with this dataset. poDS->nPamFlags |= GPF_NOSAVE; /* -------------------------------------------------------------------- */ /* If we have single subdataset only, open it immediately */ /* -------------------------------------------------------------------- */ int nSubDatasets = CSLCount( poDS->papszSubDatasets ) / 2; if( nSubDatasets == 1 ) { CPLString osDSName = CSLFetchNameValue( poDS->papszSubDatasets, "SUBDATASET_1_NAME" ); delete poDS; return (GDALDataset *) GDALOpen( osDSName, poOpenInfo->eAccess ); } else { /* -------------------------------------------------------------------- */ /* Confirm the requested access is supported. */ /* -------------------------------------------------------------------- */ if( poOpenInfo->eAccess == GA_Update ) { delete poDS; CPLError( CE_Failure, CPLE_NotSupported, "The HDF5 driver does not support update access to existing" " datasets.\n" ); return NULL; } } return( poDS ); }
CPLErr HDF5Dataset::HDF5ListGroupObjects( HDF5GroupObjects *poRootGroup, int bSUBDATASET ) { HDF5Dataset *poDS = this; if( poRootGroup->nbObjs > 0 ) for( hsize_t i = 0; i < poRootGroup->nbObjs; i++ ) { poDS->HDF5ListGroupObjects(poRootGroup->poHchild + i, bSUBDATASET); } if( poRootGroup->nType == H5G_GROUP ) { CreateMetadata(poRootGroup, H5G_GROUP); } // Create Sub dataset list. if( poRootGroup->nType == H5G_DATASET && bSUBDATASET && poDS->GetDataType(poRootGroup->native) == GDT_Unknown ) { CPLDebug("HDF5", "Skipping unsupported %s of type %s", poRootGroup->pszUnderscorePath, poDS->GetDataTypeName(poRootGroup->native)); } else if( poRootGroup->nType == H5G_DATASET && bSUBDATASET ) { CreateMetadata(poRootGroup, H5G_DATASET); char szTemp[8192]; // TODO(schwehr): Get this off of the stack. switch( poRootGroup->nRank ) { case 2: snprintf(szTemp, sizeof(szTemp), "%dx%d", static_cast<int>(poRootGroup->paDims[0]), static_cast<int>(poRootGroup->paDims[1])); break; case 3: snprintf(szTemp, sizeof(szTemp), "%dx%dx%d", static_cast<int>(poRootGroup->paDims[0]), static_cast<int>(poRootGroup->paDims[1]), static_cast<int>(poRootGroup->paDims[2])); break; default: return CE_None; } const std::string osDim = szTemp; snprintf(szTemp, sizeof(szTemp), "SUBDATASET_%d_NAME", ++(poDS->nSubDataCount)); poDS->papszSubDatasets = CSLSetNameValue(poDS->papszSubDatasets, szTemp, CPLSPrintf("HDF5:\"%s\":%s", poDS->GetDescription(), poRootGroup->pszUnderscorePath)); snprintf(szTemp, sizeof(szTemp), "SUBDATASET_%d_DESC", poDS->nSubDataCount); poDS->papszSubDatasets = CSLSetNameValue( poDS->papszSubDatasets, szTemp, CPLSPrintf("[%s] %s (%s)", osDim.c_str(), poRootGroup->pszUnderscorePath, poDS->GetDataTypeName(poRootGroup->native))); } return CE_None; }