unsigned char DiHdfImporter::getFileAffinity(const std::string &filename) { HdfContext hdf(filename); if (hdf.grid() == FAIL) { return CAN_NOT_LOAD; } return CAN_LOAD; }
Alembic::Abc::IArchive IFactory::getArchive( const std::string & iFileName, CoreType & oType ) { // try Ogawa first, use kQuietNoop at first in case we fail Alembic::AbcCoreOgawa::ReadArchive ogawa( m_numStreams ); Alembic::Abc::IArchive archive( ogawa, iFileName, Alembic::Abc::ErrorHandler::kQuietNoopPolicy, m_cachePtr ); if ( archive.valid() ) { oType = kOgawa; archive.getErrorHandler().setPolicy( m_policy ); return archive; } #ifdef ALEMBIC_WITH_HDF5 Alembic::AbcCoreHDF5::ReadArchive hdf( m_cacheHierarchy ); archive = Alembic::Abc::IArchive( hdf, iFileName, Alembic::Abc::ErrorHandler::kQuietNoopPolicy, m_cachePtr ); if ( archive.valid() ) { oType = kHDF5; archive.getErrorHandler().setPolicy( m_policy ); return archive; } #else // check the first 8 bytes to see if this is an HDF5 file according to // www.hdfgroup.org/HDF5/doc/H5.format.html#Superblock std::ifstream filestream; filestream.open(iFileName.c_str(), std::ios::binary); if (filestream.is_open()) { char bf[8] = {0, 0, 0, 0, 0, 0, 0, 0}; filestream.read(bf, 8); filestream.close(); if (bf[0] == '\211' && bf[1] == 'H' && bf[2] == 'D' && bf[3] == 'F' && bf[4] == '\r' && bf[5] == '\n' && bf[6] == '\032' && bf[7] == '\n') { oType = kHDF5; return Alembic::Abc::IArchive(); } } #endif oType = kUnknown; return Alembic::Abc::IArchive(); }
Hdf Hdf::parentImpl() const { Hdf hdf(*this); if (m_name.empty()) { if (m_path.empty()) { throw HdfInvalidOperation("calling parent() on topmost node"); } size_t pos = m_path.rfind('.'); if (pos == std::string::npos) { hdf.m_name = m_path; hdf.m_path.clear(); } else { hdf.m_name = m_path.substr(pos + 1); hdf.m_path = m_path.substr(0, pos); } } else { hdf.m_name.clear(); } return hdf; }
std::vector<ImportDescriptor*> DiHdfImporter::getImportDescriptors(const std::string &filename) { mErrors.clear(); mWarnings.clear(); std::vector<ImportDescriptor*> descriptors; ImportDescriptorResource pImportDescriptor(filename, TypeConverter::toString<RasterElement>()); VERIFYRV(pImportDescriptor.get(), descriptors); descriptors.push_back(pImportDescriptor.release()); HdfContext hdf(filename); if (hdf.grid() == FAIL) { mErrors.push_back("Invalid DI HDF file."); return descriptors; } int32 frames = 0; int32 attrs = 0; GRfileinfo(hdf.grid(), &frames, &attrs); if (frames <= 0) { mErrors.push_back("Dataset does not contain and frames."); return descriptors; } if (hdf.toFrame(0) == FAIL) { mErrors.push_back("Unable to access image data."); return descriptors; } char pName[256]; int32 comps = 0; int32 data_type = 0; int32 interlace_mode = 0; int32 pDims[2] = {0,0}; EncodingType encoding; GRgetiminfo(hdf.riid(), pName, &comps, &data_type, &interlace_mode, pDims, &attrs); switch(data_type) { case DFNT_FLOAT32: encoding = FLT4BYTES; break; case DFNT_FLOAT64: encoding = FLT8BYTES; break; case DFNT_CHAR8: case DFNT_INT8: encoding = INT1SBYTE; break; case DFNT_UCHAR8: case DFNT_UINT8: encoding = INT1UBYTE; break; case DFNT_INT16: encoding = INT2SBYTES; break; case DFNT_UINT16: encoding = INT2UBYTES; break; case DFNT_INT32: encoding = INT4SBYTES; break; case DFNT_UINT32: encoding = INT4UBYTES; break; case DFNT_INT64: case DFNT_UINT64: default: mErrors.push_back("Unknown data encoding."); break; } pImportDescriptor->setDataDescriptor(RasterUtilities::generateRasterDataDescriptor( filename, NULL, pDims[1], pDims[0], frames, BSQ, encoding, IN_MEMORY)); RasterUtilities::generateAndSetFileDescriptor(pImportDescriptor->getDataDescriptor(), filename, std::string(), LITTLE_ENDIAN_ORDER); return descriptors; }
int main (int argc, char** argv) { int format = 1; int n[3] = { 2, 2, 2 }; int dims = 3; int skip=1; int start=0; int end=-1; bool last=false; char* infile = 0; char* vtffile = 0; float starttime = -1, endtime = -1; for (int i = 1; i < argc; i++) if (!strcmp(argv[i],"-format") && i < argc-1) { if (!strcasecmp(argv[++i],"ascii")) format = 0; else if (!strcasecmp(argv[i],"binary")) format = 1; else format = atoi(argv[i]); } else if (!strcmp(argv[i],"-nviz") && i < argc-1) n[0] = n[1] = n[2] = atoi(argv[++i]); else if (!strcmp(argv[i],"-1D")) dims = 1; else if (!strcmp(argv[i],"-2D")) dims = 2; else if (!strcmp(argv[i],"-last")) last = true; else if (!strcmp(argv[i],"-start") && i < argc-1) start = atoi(argv[++i]); else if (!strcmp(argv[i],"-starttime") && i < argc-1) starttime = atof(argv[++i]); else if (!strcmp(argv[i],"-end") && i < argc-1) end = atoi(argv[++i]); else if (!strcmp(argv[i],"-endtime") && i < argc-1) endtime = atof(argv[++i]); else if (!strcmp(argv[i],"-ndump") && i < argc-1) skip = atoi(argv[++i]); else if (!infile) infile = argv[i]; else if (!vtffile) vtffile = argv[i]; else std::cerr <<" ** Unknown option ignored: "<< argv[i] << std::endl; if (!infile) { std::cout <<"usage: "<< argv[0] <<" <inputfile> [<vtffile>|<vtufile>] [-nviz <nviz>] \n" << "[-ndump <ndump>] [-last] [-start <level>] [-end <level>]\n" << "[-starttime <time>] [-endtime <time>] [-1D|-2D]\n" << "[-format <0|1|ASCII|BINARY>]\n"; return 0; } else if (!vtffile) vtffile = infile; std::cout <<"\n >>> IFEM HDF5 to VT[F|U] converter <<<" <<"\n ==================================\n" <<"\nInput file: " << infile; std::cout <<"\nOutput file: "<< vtffile <<"\nNumber of visualization points: " << n[0] <<" "<< n[1] << " " << n[2] << std::endl; VTF* myVtf; if (strstr(vtffile,".vtf")) myVtf = new VTF(vtffile,format); else myVtf = new VTU(vtffile,last?1:0); // Process XML - establish fields and collapse bases PatchMap patches; HDF5Writer hdf(strtok(infile,"."),ProcessAdm(),true,true); XMLWriter xml(infile,ProcessAdm()); xml.readInfo(); int levels = xml.getLastTimeLevel(); std::cout <<"Reading "<< infile <<": Time levels = "<< levels << std::endl; const std::vector<XMLWriter::Entry>& entry = xml.getEntries(); std::vector<XMLWriter::Entry>::const_iterator it; ProcessList processlist; for (it = entry.begin(); it != entry.end(); ++it) { if (!it->basis.empty() && it->type != "restart") { processlist[it->basis].push_back(*it); std::cout << it->name <<"\t"<< it->description <<"\tnc="<< it->components <<"\t"<< it->basis << std::endl; } if (it->type == "eigenmodes") { levels = it->components-1; processlist[it->basis].back().components = 1; } if (it->type == "nodalforces") processlist["nodalforces"].push_back(*it); } if (processlist.empty()) { std::cout << "No fields to process, bailing" << std::endl; exit(1); } ProcessList::const_iterator pit = processlist.begin(); double time = 0.0; // setup step boundaries and initial time if (starttime > 0) start = (int)(floor(starttime/pit->second.begin()->timestep)); if (endtime > 0) end = int(endtime/pit->second.begin()->timestep+0.5f); if (end == -1) end = levels; time=last?end *pit->second.begin()->timestep: start*pit->second.begin()->timestep; bool ok = true; int block = 0; VTFFieldBlocks fieldBlocks; int k = 1; for (int i = last?end:start; i <= end && ok; i += skip) { if (levels > 0) { if (processlist.begin()->second.begin()->timestep > 0) { hdf.readDouble(i,"timeinfo","SIMbase-1",time); std::cout <<"Time level "<< i; std::cout << " (t=" << time << ")"; } else std::cout << "Step " << i+1; std::cout << std::endl; } VTFList vlist, slist; bool geomWritten=false; if ((isLR && hdf.hasGeometries(i)) || patches.empty()) { patches = setupPatchMap(processlist, hdf.hasGeometries(i)?i:0, hdf, dims, n, *myVtf, block, k); geomWritten = true; } for (pit = processlist.begin(); pit != processlist.end(); ++pit) { for (it = pit->second.begin(); it != pit->second.end() && ok; ++it) { if (it->once && k > 1) continue; if (pit->first != "nodalforces" && patches[pit->first].Patch.empty()) { if (k == 1) std::cerr << "Ignoring \"" << it->name << "\", basis not loaded" << std::endl; continue; } std::cout <<"Reading \""<< it->name <<"\""<< std::endl; if (pit->first == "nodalforces") { Vector vec; hdf.readVector(i, it->name, -1, vec); std::vector<Vec3Pair> pts(vec.size()/6); for (size_t j=0;j<vec.size()/6;++j) { for (int l=0;l<3;++l) { pts[j].first[l] = vec[6*j+l]; pts[j].second[l] = vec[6*j+l+3]; } } int geoBlck=-1; ok = myVtf->writeVectors(pts,geoBlck,++block,it->name.c_str(),k); continue; } for( int j=0;j<pit->second[0].patches;++j) { Vector vec; ok = hdf.readVector(it->once?0:i,it->name,j+1,vec); if (it->name.find('+') != std::string::npos) { /* Temporary hack to split a vector into scalar fields. The big assumption here is that the individual scalar names are separated by '+'-characters in the vector field name */ Matrix tmp(it->components,vec.size()/it->components); tmp.fill(vec.ptr()); size_t pos = 0; size_t fp = it->name.find('+'); std::string prefix; if (fp != std::string::npos) { size_t fs = it->name.find(' '); if (fs < fp) { prefix = it->name.substr(0,fs+1); pos = fs+1; } } for (size_t r = 1; r <= tmp.rows() && pos < it->name.size(); r++) { size_t end = it->name.find('+',pos); ok &= writeFieldPatch(tmp.getRow(r),1, *patches[pit->first].Patch[j], patches[pit->first].FakeModel[j], patches[pit->first].StartPart+j, block, prefix+it->name.substr(pos,end-pos), vlist, slist, *myVtf, it->description, it->type); pos = end+1; } } else { if (it->type == "knotspan") { ok &= writeElmPatch(vec,*patches[pit->first].Patch[j],myVtf->getBlock(j+1), patches[pit->first].StartPart+j,block, it->description, it->name, slist, *myVtf); } else if (it->type == "eigenmodes") { ok &= writeFieldPatch(vec,it->components, *patches[pit->first].Patch[j], patches[pit->first].FakeModel[j], patches[pit->first].StartPart+j, block,it->name,vlist,slist,*myVtf, it->description, it->type); } else { ok &= writeFieldPatch(vec,it->components, *patches[pit->first].Patch[j], patches[pit->first].FakeModel[j], patches[pit->first].StartPart+j, block,it->name,vlist,slist,*myVtf, it->description, it->type); } } } } } if (geomWritten) myVtf->writeGeometryBlocks(k); writeFieldBlocks(vlist,slist,*myVtf,k,fieldBlocks); if (!ok) return 3; bool res; if (processlist.begin()->second.begin()->type == "eigenmodes") { double val; bool freq=false; if (!hdf.readDouble(i, "1", "eigenval", val)) { freq = true; hdf.readDouble(i, "1", "eigenfrequency", val); } res=myVtf->writeState(k++, freq?"Frequency %g" : "Eigenvalue %g", val, 1); } else if (processlist.begin()->second.begin()->timestep > 0) res=myVtf->writeState(k++,"Time %g",time,0); else { double foo = k; res=myVtf->writeState(k++,"Step %g", foo, 0); } if (!res) { std::cerr << "Error writing state" << std::endl; return 4; } pit = processlist.begin(); time += pit->second.begin()->timestep*skip; } hdf.closeFile(levels,true); delete myVtf; return 0; }