bool Foam::passiveParticleStreamReconstructor::decompose ( const parUnallocatedFvFieldReconstructor& reconstructor, const unallocatedFvMesh& baseMesh, const IOobject& baseIO, const unallocatedFvMesh& thisMesh, const IOobject& thisIO, const bool, Ostream& os ) const { Pout<< "*** LAGRANGIAN DEcomposing " << baseIO.objectPath() << endl; Pout<< "** LAGRANGIAN Decomposed " << baseIO.objectPath() << endl; return os.good(); }
bool Foam::volFieldStreamReconstructor<Type>::decompose ( const parUnallocatedFvFieldReconstructor& reconstructor, const unallocatedFvMesh& baseMesh, const IOobject& baseIO, const unallocatedFvMesh& thisMesh, const IOobject& thisIO, const bool, Ostream& os ) const { typedef GeometricField<Type, unallocatedFvPatchField, unallocatedVolMesh> GeoField; // Read base field Info<< "Reading " << baseIO.objectPath() << endl; const GeoField baseFld(baseIO, baseMesh); // Decompose tmp<GeoField> tfld(reconstructor.decomposeFvVolumeField(baseFld)); // Stream Pout<< incrIndent; os << tfld(); Pout<< decrIndent; return os.good(); }
Foam::fileName Foam::fileOperations::autoParallelFileOperation::filePath ( const bool checkGlobal, const IOobject& io, const word& typeName ) const { if (debug) { Pout<< indent << "autoParallelFileOperation::filePath :" << " objectPath:" << io.objectPath() << " checkGlobal:" << checkGlobal << endl; } // Try uncollated searching fileName objPath = uncollatedFileOperation::filePath ( checkGlobal, io, typeName ); // If not found and parallel check parent if (objPath.empty() && io.time().processorCase()) // && checkGlobal) { fileName parentObjectPath = io.rootPath()/io.time().globalCaseName() /io.instance()/io.db().dbDir()/io.local()/io.name(); if (isFile(parentObjectPath)) { objPath = parentObjectPath; } } if (debug) { Pout<< indent << "autoParallelFileOperation::filePath :" << " Returning from file searching:" << endl << " objectPath:" << io.objectPath() << endl << " filePath :" << objPath << endl << endl; } return objPath; }
Foam::fileName Foam::fileOperations::masterFileOperation::filePath ( const bool checkGlobal, const IOobject& io ) const { Pout<< "Starting filePath for:" << io.objectPath() << endl; fileName objPath; pathType searchType = fileOperation::NOTFOUND; word newInstancePath; if (Pstream::master()) { objPath = filePath(checkGlobal, io, searchType, newInstancePath); } label masterType(searchType); Pstream::scatter(masterType); searchType = pathType(masterType); if ( searchType == fileOperation::FINDINSTANCE || searchType == fileOperation::PROCESSORSFINDINSTANCE ) { // Note: PROCESSORSFINDINSTANCE should never appear since our filePath // does not know about it Pstream::scatter(newInstancePath); } if (!Pstream::master()) { objPath = objectPath(io, searchType, newInstancePath); } Pout<< "Returning from file searching:" << endl << " objectPath:" << io.objectPath() << endl << " filePath :" << objPath << endl << endl; return objPath; }
void createDummyFvMeshFiles(const polyMesh& mesh, const word& regionName) { // Create dummy system/fv* { IOobject io ( "fvSchemes", mesh.time().system(), regionName, mesh, IOobject::NO_READ, IOobject::NO_WRITE, false ); Info<< "Testing:" << io.objectPath() << endl; if (!io.headerOk()) { Info<< "Writing dummy " << regionName/io.name() << endl; dictionary dummyDict; dictionary divDict; dummyDict.add("divSchemes", divDict); dictionary gradDict; dummyDict.add("gradSchemes", gradDict); dictionary laplDict; dummyDict.add("laplacianSchemes", laplDict); IOdictionary(io, dummyDict).regIOobject::write(); } } { IOobject io ( "fvSolution", mesh.time().system(), regionName, mesh, IOobject::NO_READ, IOobject::NO_WRITE, false ); if (!io.headerOk()) { Info<< "Writing dummy " << regionName/io.name() << endl; dictionary dummyDict; IOdictionary(io, dummyDict).regIOobject::write(); } } }
bool Foam::fileOperations::masterFileOperation::readHeader ( const bool checkGlobal, IOobject& io ) const { bool ok = false; Pout<< "Starting readHeader for:" << io.objectPath() << endl; if (Pstream::master()) { fileName objPath; pathType searchType = fileOperation::NOTFOUND; word newInstancePath; fileName fName(filePath(checkGlobal, io, searchType, newInstancePath)); Pout<< "readHeader actual file:" << fName << endl; if (!fName.empty() && Foam::isFile(fName)) { IFstream is(fName); if (is.good() && io.readHeader(is)) { ok = true; } } } Pstream::scatter(ok); Pstream::scatter(io.headerClassName()); Pstream::scatter(io.note()); Pout<< "Done readHeader ok:" << ok << endl; return ok; }
bool Foam::fileOperations::autoParallelFileOperation::read ( regIOobject& io, const bool masterOnly, const IOstream::streamFormat format, const word& type ) const { bool ok = true; if (Pstream::parRun()) { if (debug) { Pout<< indent << "autoParallelFileOperation::read :" << " Searching for handler for type:" << type << " global:" << io.globalObject() << " masterOnly:" << masterOnly << " of object: " << io.objectPath() << endl; } autoPtr<streamReconstructor> typeReconstructor ( streamReconstructor::New(type) ); if (typeReconstructor.valid()) { // Set flag for e.g. codeStream const bool oldGlobal = io.globalObject(); io.globalObject() = masterOnly; // If codeStream originates from dictionary which is // not IOdictionary we have a problem so use global //const bool oldFlag = regIOobject::masterOnlyReading; //regIOobject::masterOnlyReading = masterOnly; // Find file, check in parent directory fileName objPath = filePath(oldGlobal, io, type); // Check if the file comes from the parent path fileName parentObjectPath = io.rootPath()/io.time().globalCaseName() /io.instance()/io.db().dbDir()/io.local()/io.name(); if (debug) { Pout<< indent << "io.objectPath :" << io.objectPath() << nl << indent << "filePath :" << objPath << nl << indent << "parentObjectPath:" << parentObjectPath << endl; } if (io.objectPath() != objPath && objPath == parentObjectPath) { const Time& runTime = io.time(); // Install basic file handler storeFileHandler defaultOp(basicFileHandler_); Pout<< incrIndent; // Read local mesh const unallocatedFvMesh& procMesh = mesh(runTime); // Read undecomposed mesh. Read procAddressing files // (from runTime). const unallocatedFvMesh& baseUMesh = baseMesh(runTime); // Mapping engine from mesh to baseMesh const parUnallocatedFvFieldReconstructor& mapper = reconstructor(runTime); IOobject baseIO ( io.name(), io.instance(), io.local(), baseUMesh.thisDb(), IOobject::MUST_READ, IOobject::NO_WRITE, false ); OStringStream os(IOstream::BINARY); if (debug) { Pout<< "autoParallelFileOperation::read :" << " decompose and writing:" << baseIO.objectPath() << endl; } bool ok = typeReconstructor().decompose ( mapper, baseUMesh, baseIO, procMesh, io, false, // no face flips. Tbd. os ); Pout<< decrIndent; if (ok) { IStringStream is(os.str(), IOstream::BINARY); // Read field from stream ok = io.readData(is); io.close(); if (debug) { const word oldName(io.name()); io.rename(oldName + '_' + typeName_()); io.write(); Pout<< indent << "autoParallelFileOperation::read :" << " sucessfully decomposed " << io.objectPath() << " into " << io.objectPath() << endl; io.rename(oldName); } } else { if (debug) { Pout<< indent << "autoParallelFileOperation::read :" << " ** failed decomposing " << io.objectPath() << endl; } return false; } } else { ok = io.readData(io.readStream(type)); io.close(); } // Restore flags io.globalObject() = oldGlobal; //regIOobject::masterOnlyReading = oldFlag; } else { ok = io.readData(io.readStream(type)); io.close(); } } else { ok = uncollatedFileOperation::read ( io, masterOnly, format, type ); } return ok; }
bool Foam::passiveParticleStreamReconstructor::reconstruct ( const IOobject& io, const bool, Ostream& os ) const { // io.db() = Cloud<passiveParticle> // io.db().parent() = polyMesh // io.db().parent().parent() = Time // Retrieve from polyMesh const uFieldReconstructor& reconstructor = uFieldReconstructor::New(io.db().parent()); const PtrList<unallocatedFvMesh>& procMeshes = reconstructor.procMeshes(); Info<< "Reconstructing " << io.objectPath() << endl; // Read field on proc meshes PtrList<cloud> procClouds(procMeshes.size()); PtrList<unallocatedIOPosition> procFields(procMeshes.size()); forAll(procFields, proci) { const unallocatedFvMesh& procMesh = procMeshes[proci]; Pout<< incrIndent; // Construct empty cloud procClouds.set ( proci, new cloud ( procMesh.thisDb(), "kinematicCloud" ) ); procFields.set ( proci, new unallocatedIOPosition ( IOobject ( io.name(), io.instance(), io.local(), procClouds[proci], IOobject::MUST_READ, //IOobject::READ_IF_PRESENT, IOobject::NO_WRITE ) ) ); Pout<< decrIndent; } unallocatedIOPosition particles ( IOobject ( io.name(), io.instance(), io.local(), io.db(), IOobject::NO_READ, IOobject::NO_WRITE, false ) ); const faceList* facesPtr = nullptr; if (isA<polyMesh>(io.db().parent())) { facesPtr = &dynamic_cast<const polyMesh&>(io.db().parent()).faces(); } forAll(procFields, proci) { const unallocatedIOPosition& procCloud = procFields[proci]; const labelList& cellMap = reconstructor.cellProcAddressing()[proci]; const labelList& faceMap = reconstructor.faceProcAddressing()[proci]; forAllConstIter(typename IDLList<basicParticle>, procCloud, iter) { const basicParticle& p = iter(); const label mappedCell = cellMap[p.cell()]; const label mapi = faceMap[p.tetFace()]; label mappedTetFace = -1; label tetPti = p.tetPt(); if (mapi == 0) { FatalErrorInFunction << "problem" << exit(FatalError); } else if (mapi > 0) { mappedTetFace = mapi - 1; } else { mappedTetFace = -mapi - 1; if (facesPtr) { // Flipping face const face& f = (*facesPtr)[mappedTetFace]; tetPti = f.size() - 1 - tetPti; } } particles.append ( new basicParticle ( p, mappedCell, mappedTetFace, tetPti ) ); } } particles.writeData(os); return os.good(); }
int main(int argc, char *argv[]) { argList::noParallel(); argList::addBoolOption ( "blockTopology", "write block edges and centres as .obj files" ); argList::addOption ( "dict", "file", "specify alternative dictionary for the blockMesh description" ); #include "addRegionOption.H" #include "setRootCase.H" #include "createTime.H" const word dictName("blockMeshDict"); word regionName; word regionPath; // Check if the region is specified otherwise mesh the default region if (args.optionReadIfPresent("region", regionName, polyMesh::defaultRegion)) { Info<< nl << "Generating mesh for region " << regionName << endl; regionPath = regionName; } // Search for the appropriate blockMesh dictionary.... fileName dictPath; // Check if the dictionary is specified on the command-line if (args.optionFound("dict")) { dictPath = args["dict"]; dictPath = ( isDir(dictPath) ? dictPath/dictName : dictPath ); } // Check if dictionary is present in the constant directory else if ( exists ( runTime.path()/runTime.constant() /regionPath/polyMesh::meshSubDir/dictName ) ) { dictPath = runTime.constant() /regionPath/polyMesh::meshSubDir/dictName; } // Otherwise assume the dictionary is present in the system directory else { dictPath = runTime.system()/regionPath/dictName; } IOobject meshDictIO ( dictPath, runTime, IOobject::MUST_READ, IOobject::NO_WRITE, false ); if (!meshDictIO.headerOk()) { FatalErrorInFunction << meshDictIO.objectPath() << nl << exit(FatalError); } Info<< "Creating block mesh from\n " << meshDictIO.objectPath() << endl; blockMesh::verbose(true); IOdictionary meshDict(meshDictIO); blockMesh blocks(meshDict, regionName); if (args.optionFound("blockTopology")) { // Write mesh as edges. { fileName objMeshFile("blockTopology.obj"); OFstream str(runTime.path()/objMeshFile); Info<< nl << "Dumping block structure as Lightwave obj format" << " to " << objMeshFile << endl; blocks.writeTopology(str); }
tmp<GeometricField<Type, fvPatchField, volMesh> > autoCreateWallFunctionField ( const word& fieldName, const fvMesh& mesh, const objectRegistry& obj ) { IOobject nutHeader ( "nut", mesh.time().timeName(), obj, IOobject::MUST_READ ); typedef GeometricField<Type, fvPatchField, volMesh> fieldType; if (nutHeader.headerOk()) { return tmp<fieldType> ( new fieldType ( IOobject ( fieldName, mesh.time().timeName(), obj, IOobject::MUST_READ, IOobject::NO_WRITE, false ), mesh ) ); } else { Info<< "--> Upgrading " << fieldName << " to employ run-time selectable wall functions" << endl; // Read existing field IOobject ioObj ( fieldName, mesh.time().timeName(), obj, IOobject::MUST_READ, IOobject::NO_WRITE, false ); tmp<fieldType> fieldOrig ( new fieldType ( ioObj, mesh ) ); // rename file Info<< " Backup original " << fieldName << " to " << fieldName << ".old" << endl; mvBak(ioObj.objectPath(), "old"); PtrList<fvPatchField<Type> > newPatchFields(mesh.boundary().size()); forAll(newPatchFields, patchI) { if (mesh.boundary()[patchI].isWall()) { newPatchFields.set ( patchI, new PatchType ( mesh.boundary()[patchI], fieldOrig().dimensionedInternalField() ) ); newPatchFields[patchI] == fieldOrig().boundaryField()[patchI]; } else { newPatchFields.set ( patchI, fieldOrig().boundaryField()[patchI].clone() ); } } tmp<fieldType> fieldNew ( new fieldType ( IOobject ( fieldName, mesh.time().timeName(), obj, IOobject::NO_READ, IOobject::NO_WRITE, false ), mesh, fieldOrig().dimensions(), fieldOrig().internalField(), newPatchFields ) ); Info<< " Writing updated " << fieldName << endl; fieldNew().write(); return fieldNew; } }
int main(int argc, char *argv[]) { argList::addNote ( "redistribute a triSurface" ); argList::validArgs.append("triSurfaceMesh"); argList::validArgs.append("distributionType"); argList::addBoolOption ( "keepNonMapped", "preserve surface outside of mesh bounds" ); #include "setRootCase.H" #include "createTime.H" runTime.functionObjects().off(); const fileName surfFileName = args[1]; const word distType = args[2]; Info<< "Reading surface from " << surfFileName << nl << nl << "Using distribution method " << distributedTriSurfaceMesh::distributionTypeNames_[distType] << " " << distType << nl << endl; const bool keepNonMapped = args.options().found("keepNonMapped"); if (keepNonMapped) { Info<< "Preserving surface outside of mesh bounds." << nl << endl; } else { Info<< "Removing surface outside of mesh bounds." << nl << endl; } if (!Pstream::parRun()) { FatalErrorIn(args.executable()) << "Please run this program on the decomposed case." << " It will read surface " << surfFileName << " and decompose it such that it overlaps the mesh bounding box." << exit(FatalError); } #include "createPolyMesh.H" Random rndGen(653213); // Determine mesh bounding boxes: List<List<treeBoundBox> > meshBb(Pstream::nProcs()); { meshBb[Pstream::myProcNo()] = List<treeBoundBox> ( 1, treeBoundBox ( boundBox(mesh.points(), false) ).extend(rndGen, 1E-3) ); Pstream::gatherList(meshBb); Pstream::scatterList(meshBb); } IOobject io ( surfFileName, // name //runTime.findInstance("triSurface", surfFileName), // instance runTime.constant(), // instance "triSurface", // local runTime, // registry IOobject::MUST_READ, IOobject::NO_WRITE ); const fileName actualPath(io.filePath()); fileName localPath(actualPath); localPath.replace(runTime.rootPath() + '/', ""); if (actualPath == io.objectPath()) { Info<< "Loading local (decomposed) surface " << localPath << nl <<endl; } else { Info<< "Loading undecomposed surface " << localPath << nl << endl; } // Create dummy dictionary for bounding boxes if does not exist. if (!isFile(actualPath / "Dict")) { dictionary dict; dict.add("bounds", meshBb[Pstream::myProcNo()]); dict.add("distributionType", distType); dict.add("mergeDistance", SMALL); IOdictionary ioDict ( IOobject ( io.name() + "Dict", io.instance(), io.local(), io.db(), IOobject::NO_READ, IOobject::NO_WRITE, false ), dict ); Info<< "Writing dummy bounds dictionary to " << ioDict.name() << nl << endl; ioDict.regIOobject::writeObject ( IOstream::ASCII, IOstream::currentVersion, ioDict.time().writeCompression() ); } // Load surface distributedTriSurfaceMesh surfMesh(io); Info<< "Loaded surface" << nl << endl; // Generate a test field { const triSurface& s = static_cast<const triSurface&>(surfMesh); autoPtr<triSurfaceVectorField> fcPtr ( new triSurfaceVectorField ( IOobject ( surfMesh.searchableSurface::name(), // name surfMesh.searchableSurface::instance(), // instance surfMesh.searchableSurface::local(), // local surfMesh, IOobject::NO_READ, IOobject::AUTO_WRITE ), surfMesh, dimLength ) ); triSurfaceVectorField& fc = fcPtr(); forAll(fc, triI) { fc[triI] = s[triI].centre(s.points()); } // Steal pointer and store object on surfMesh fcPtr.ptr()->store(); }
int main(int argc, char *argv[]) { # include "setRootCase.H" # include "createTime.H" Info << "\nReading g" << endl; uniformDimensionedVectorField g ( IOobject ( "g", runTime.constant(), runTime, IOobject::MUST_READ, IOobject::NO_WRITE ) ); Info << "\nReading waveProperties\n" << endl; IOdictionary waveProperties ( IOobject ( "waveProperties.input", runTime.constant(), runTime, IOobject::MUST_READ, IOobject::NO_WRITE ) ); // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // IOobject wOut ( "waveProperties", runTime.constant(), runTime, IOobject::NO_READ, IOobject::NO_WRITE ); // Write waveProperties with the above computed changes OFstream os ( wOut.objectPath(), #if EXTBRANCH==1 ios_base::out|ios_base::trunc, #elif OFPLUSBRANCH==1 // Nothing to be put here #else #if OFVERSION<170 ios_base::out|ios_base::trunc, #endif #endif IOstream::ASCII, IOstream::currentVersion, IOstream::UNCOMPRESSED ); // Write the OF banner wOut.writeBanner( os ); // Write the file information. Class name is not correct when // using wOut.writeHeader( os ); hence manual entries os << "FoamFile" << nl; os << token::BEGIN_BLOCK << incrIndent << nl; os << indent << "version" << tab << IOstream::currentVersion << token::END_STATEMENT << nl; os << indent << "format" << tab << "ascii;" << nl; os << indent << "class" << tab << "dictionary;" << nl; os << indent << "object" << tab << "waveProperties;" << nl; os << decrIndent << indent << token::END_BLOCK << nl; // Write the divider wOut.writeDivider( os ); os << nl; /* Loop over all subdicts in waveProperties. For each of them compute the wave parameters relevant for that particular wave theory. */ wordList toc = waveProperties.toc(); forAll (toc, item) { // If a sub-dictionary, then compute parameters and write the subdict if (waveProperties.isDict(toc[item])) { dictionary& sd = waveProperties.subDict(toc[item]); autoPtr<setWaveProperties> props ( setWaveProperties::New(runTime, sd, true) ); props->set( os ); } else { label Nspaces = 20; // Read the entry and write to the dummy output file ITstream read = waveProperties.lookup(toc[item]); os << toc[item] << token::SPACE; for (int i=toc[item].size(); i<Nspaces-1; i++) { os << token::SPACE; } forAll (read, ri) { if (ri < read.size() - 1) { os << read[ri] << token::SPACE; } else { os << read[ri]; } } os << token::END_STATEMENT << nl << endl; // Additional level of check, such that the code does not crash at // runTime: if (toc[item] == "seaLevel") { // Read the magnitude of the sea level scalar sL = readScalar(waveProperties.lookup("seaLevel")); // If the Switch seaLevelAsReference is not found _and_ the // magnitude of the sea level differs from 0 (zero), stop the // evaluation of the wave parameters if ( !waveProperties.found("seaLevelAsReference") && SMALL < Foam::mag(sL) ) { // This merely looks up the string, it will not be found // and the user is forced to correct waveProperties.input, // before any execution is possible. waveProperties.lookup("seaLevelAsReference"); } } } } // Write end divider wOut.writeEndDivider(os); // End Info<< "\nEnd\n" << endl; return 0; }
bool Foam::volFieldStreamReconstructor<Type>::reconstruct ( const IOobject& io, const bool, Ostream& os ) const { typedef GeometricField<Type, unallocatedFvPatchField, unallocatedVolMesh> GeoField; // Retrieve from polyMesh const uFieldReconstructor& reconstructor = uFieldReconstructor::New(io.db()); const PtrList<unallocatedFvMesh>& procMeshes = reconstructor.procMeshes(); Info<< "Reconstructing " << io.objectPath() << endl; // Read field on proc meshes PtrList<GeoField> procFields(procMeshes.size()); forAll(procFields, proci) { const unallocatedFvMesh& procMesh = procMeshes[proci]; Pout<< incrIndent; procFields.set ( proci, new GeoField ( IOobject ( io.name(), io.instance(), io.local(), procMesh.thisDb(), IOobject::MUST_READ, IOobject::NO_WRITE, false ), procMesh ) ); Pout<< decrIndent; } // Fix filtering of empty nonuniform entries reconstructor.reconstructor().fixGenericNonuniform < GeoField, unallocatedGenericFvPatchField<Type> >(procFields); // Map local field onto baseMesh const unallocatedFvMesh& baseMesh = reconstructor.baseMesh(); tmp<GeoField> tfld ( reconstructor.reconstructor().reconstructFvVolumeField ( IOobject ( io.name(), io.instance(), io.local(), baseMesh.thisDb(), IOobject::NO_READ, IOobject::AUTO_WRITE, false ), procFields ) ); Pout<< incrIndent; os << tfld(); Pout<< decrIndent; return os.good(); }
// Read mesh if available. Otherwise create empty mesh with same non-proc // patches as proc0 mesh. Requires all processors to have all patches // (and in same order). autoPtr<fvMesh> createMesh ( const Time& runTime, const word& regionName, const fileName& instDir, const bool haveMesh ) { Pout<< "Create mesh for time = " << runTime.timeName() << nl << endl; IOobject io ( regionName, instDir, runTime, IOobject::MUST_READ ); if (!haveMesh) { // Create dummy mesh. Only used on procs that don't have mesh. fvMesh dummyMesh ( io, xferCopy(pointField()), xferCopy(faceList()), xferCopy(labelList()), xferCopy(labelList()), false ); Pout<< "Writing dummy mesh to " << dummyMesh.polyMesh::objectPath() << endl; dummyMesh.write(); } Pout<< "Reading mesh from " << io.objectPath() << endl; autoPtr<fvMesh> meshPtr(new fvMesh(io)); fvMesh& mesh = meshPtr(); // Determine patches. if (Pstream::master()) { // Send patches for ( int slave=Pstream::firstSlave(); slave<=Pstream::lastSlave(); slave++ ) { OPstream toSlave(Pstream::blocking, slave); toSlave << mesh.boundaryMesh(); } } else { // Receive patches IPstream fromMaster(Pstream::blocking, Pstream::masterNo()); PtrList<entry> patchEntries(fromMaster); if (haveMesh) { // Check master names against mine const polyBoundaryMesh& patches = mesh.boundaryMesh(); forAll(patchEntries, patchI) { const entry& e = patchEntries[patchI]; const word type(e.dict().lookup("type")); const word& name = e.keyword(); if (type == processorPolyPatch::typeName) { break; } if (patchI >= patches.size()) { FatalErrorIn ( "createMesh(const Time&, const fileName&, const bool)" ) << "Non-processor patches not synchronised." << endl << "Processor " << Pstream::myProcNo() << " has only " << patches.size() << " patches, master has " << patchI << exit(FatalError); } if ( type != patches[patchI].type() || name != patches[patchI].name() ) { FatalErrorIn ( "createMesh(const Time&, const fileName&, const bool)" ) << "Non-processor patches not synchronised." << endl << "Master patch " << patchI << " name:" << type << " type:" << type << endl << "Processor " << Pstream::myProcNo() << " patch " << patchI << " has name:" << patches[patchI].name() << " type:" << patches[patchI].type() << exit(FatalError); } } } else { // Add patch List<polyPatch*> patches(patchEntries.size()); label nPatches = 0; forAll(patchEntries, patchI) { const entry& e = patchEntries[patchI]; const word type(e.dict().lookup("type")); const word& name = e.keyword(); if (type == processorPolyPatch::typeName) { break; } Pout<< "Adding patch:" << nPatches << " name:" << name << " type:" << type << endl; dictionary patchDict(e.dict()); patchDict.remove("nFaces"); patchDict.add("nFaces", 0); patchDict.remove("startFace"); patchDict.add("startFace", 0); patches[patchI] = polyPatch::New ( name, patchDict, nPatches++, mesh.boundaryMesh() ).ptr(); } patches.setSize(nPatches); mesh.addFvPatches(patches, false); // no parallel comms //// Write empty mesh now we have correct patches //meshPtr().write(); } }
// Construct from IOObject Foam::surfacePatchIOList::surfacePatchIOList ( const IOobject& io ) : surfacePatchList(), regIOobject(io) { Foam::string functionName = "surfacePatchIOList::surfacePatchIOList" "(const IOobject& io)"; if ( readOpt() == IOobject::MUST_READ || readOpt() == IOobject::MUST_READ_IF_MODIFIED ) { if (readOpt() == IOobject::MUST_READ_IF_MODIFIED) { WarningInFunction << "Specified IOobject::MUST_READ_IF_MODIFIED but class" << " does not support automatic rereading." << endl; } surfacePatchList& patches = *this; // read polyPatchList Istream& is = readStream(typeName); PtrList<entry> patchEntries(is); patches.setSize(patchEntries.size()); label facei = 0; forAll(patches, patchi) { const dictionary& dict = patchEntries[patchi].dict(); label patchSize = readLabel(dict.lookup("nFaces")); label startFacei = readLabel(dict.lookup("startFace")); patches[patchi] = surfacePatch ( word(dict.lookup("geometricType")), patchEntries[patchi].keyword(), patchSize, startFacei, patchi ); if (startFacei != facei) { FatalErrorInFunction << "Patches are not ordered. Start of patch " << patchi << " does not correspond to sum of preceding patches." << endl << "while reading " << io.objectPath() << exit(FatalError); } facei += patchSize; } // Check state of IOstream is.check(functionName.c_str()); close(); } }
Foam::surfZoneIOList::surfZoneIOList ( const IOobject& io ) : surfZoneList(), regIOobject(io) { Foam::string functionName = "surfZoneIOList::surfZoneIOList" "(const IOobject& io)"; if ( readOpt() == IOobject::MUST_READ || readOpt() == IOobject::MUST_READ_IF_MODIFIED ) { surfZoneList& zones = *this; Istream& is = readStream(typeName); PtrList<entry> dictEntries(is); zones.setSize(dictEntries.size()); label faceI = 0; forAll(zones, zoneI) { const dictionary& dict = dictEntries[zoneI].dict(); label zoneSize = readLabel(dict.lookup("nFaces")); label startFaceI = readLabel(dict.lookup("startFace")); zones[zoneI] = surfZone ( dictEntries[zoneI].keyword(), zoneSize, startFaceI, zoneI ); word geoType; if (dict.readIfPresent("geometricType", geoType)) { zones[zoneI].geometricType() = geoType; } if (startFaceI != faceI) { FatalErrorIn(functionName) << "surfZones are not ordered. Start of zone " << zoneI << " does not correspond to sum of preceding zones." << nl << "while reading " << io.objectPath() << endl << exit(FatalError); } faceI += zoneSize; } // Check state of IOstream is.check(functionName.c_str()); close(); }
int main(int argc, char *argv[]) { argList::noParallel(); argList::addBoolOption ( "blockTopology", "write block edges and centres as .obj files" ); argList::addBoolOption ( "writeStep", "write mesh at different smoothing step" ); argList::addOption ( "dict", "file", "specify alternative dictionary for the blockMesh description" ); # include "addRegionOption.H" # include "setRootCase.H" # include "createTime.H" const word dictName("blockMeshDict"); word regionName; fileName polyMeshDir; if (args.optionReadIfPresent("region", regionName, polyMesh::defaultRegion)) { // constant/<region>/polyMesh/blockMeshDict polyMeshDir = regionName/polyMesh::meshSubDir; Info<< nl << "Generating mesh for region " << regionName << endl; } else { // constant/polyMesh/blockMeshDict polyMeshDir = polyMesh::meshSubDir; } IOobject meshDictIO ( dictName, runTime.constant(), polyMeshDir, runTime, IOobject::MUST_READ, IOobject::NO_WRITE, false ); if (args.optionFound("dict")) { const fileName dictPath = args["dict"]; meshDictIO = IOobject ( ( isDir(dictPath) ? dictPath/dictName : dictPath ), runTime, IOobject::MUST_READ, IOobject::NO_WRITE, false ); } if (!meshDictIO.headerOk()) { FatalErrorIn(args.executable()) << "Cannot open mesh description file\n " << meshDictIO.objectPath() << nl << exit(FatalError); } Info<< "Creating block mesh from\n " << meshDictIO.objectPath() << endl; blockMesh::verbose(false); IOdictionary meshDict(meshDictIO); blockMesh blocks(meshDict, regionName); if (args.optionFound("blockTopology")) { // Write mesh as edges. { fileName objMeshFile("blockTopology.obj"); OFstream str(runTime.path()/objMeshFile); Info<< nl << "Dumping block structure as Lightwave obj format" << " to " << objMeshFile << endl; blocks.writeTopology(str); }