int main(int argc, char *argv[]) { argList::addNote ( "redistribute a triSurface" ); argList::validArgs.append("triSurfaceMesh"); argList::validArgs.append("distributionType"); argList::addBoolOption ( "keepNonMapped", "preserve surface outside of mesh bounds" ); #include "setRootCase.H" #include "createTime.H" runTime.functionObjects().off(); const fileName surfFileName = args[1]; const word distType = args[2]; Info<< "Reading surface from " << surfFileName << nl << nl << "Using distribution method " << distributedTriSurfaceMesh::distributionTypeNames_[distType] << " " << distType << nl << endl; const bool keepNonMapped = args.options().found("keepNonMapped"); if (keepNonMapped) { Info<< "Preserving surface outside of mesh bounds." << nl << endl; } else { Info<< "Removing surface outside of mesh bounds." << nl << endl; } if (!Pstream::parRun()) { FatalErrorIn(args.executable()) << "Please run this program on the decomposed case." << " It will read surface " << surfFileName << " and decompose it such that it overlaps the mesh bounding box." << exit(FatalError); } #include "createPolyMesh.H" Random rndGen(653213); // Determine mesh bounding boxes: List<List<treeBoundBox> > meshBb(Pstream::nProcs()); { meshBb[Pstream::myProcNo()] = List<treeBoundBox> ( 1, treeBoundBox ( boundBox(mesh.points(), false) ).extend(rndGen, 1E-3) ); Pstream::gatherList(meshBb); Pstream::scatterList(meshBb); } IOobject io ( surfFileName, // name //runTime.findInstance("triSurface", surfFileName), // instance runTime.constant(), // instance "triSurface", // local runTime, // registry IOobject::MUST_READ, IOobject::NO_WRITE ); const fileName actualPath(io.filePath()); fileName localPath(actualPath); localPath.replace(runTime.rootPath() + '/', ""); if (actualPath == io.objectPath()) { Info<< "Loading local (decomposed) surface " << localPath << nl <<endl; } else { Info<< "Loading undecomposed surface " << localPath << nl << endl; } // Create dummy dictionary for bounding boxes if does not exist. if (!isFile(actualPath / "Dict")) { dictionary dict; dict.add("bounds", meshBb[Pstream::myProcNo()]); dict.add("distributionType", distType); dict.add("mergeDistance", SMALL); IOdictionary ioDict ( IOobject ( io.name() + "Dict", io.instance(), io.local(), io.db(), IOobject::NO_READ, IOobject::NO_WRITE, false ), dict ); Info<< "Writing dummy bounds dictionary to " << ioDict.name() << nl << endl; ioDict.regIOobject::writeObject ( IOstream::ASCII, IOstream::currentVersion, ioDict.time().writeCompression() ); } // Load surface distributedTriSurfaceMesh surfMesh(io); Info<< "Loaded surface" << nl << endl; // Generate a test field { const triSurface& s = static_cast<const triSurface&>(surfMesh); autoPtr<triSurfaceVectorField> fcPtr ( new triSurfaceVectorField ( IOobject ( surfMesh.searchableSurface::name(), // name surfMesh.searchableSurface::instance(), // instance surfMesh.searchableSurface::local(), // local surfMesh, IOobject::NO_READ, IOobject::AUTO_WRITE ), surfMesh, dimLength ) ); triSurfaceVectorField& fc = fcPtr(); forAll(fc, triI) { fc[triI] = s[triI].centre(s.points()); } // Steal pointer and store object on surfMesh fcPtr.ptr()->store(); }
void Foam::SmootherBoundary::analyseDict(dictionary &snapDict) { _featureAngle = readScalar(snapDict.lookup("featureAngle")); _minEdgeForFeature = readLabel(snapDict.lookup("minEdgeForFeature")); _minFeatureEdgeLength = readScalar(snapDict.lookup("minFeatureEdgeLength")); _writeFeatures = readBool(snapDict.lookup("writeFeatures")); Info<< " snapControls:" << nl << " - Feature angle : " << _featureAngle << nl << " - Min edges for features : " << _minEdgeForFeature << nl << " - Min feature edge length : " << _minFeatureEdgeLength << nl; const polyBoundaryMesh& bM = _polyMesh->boundaryMesh(); const label NbPolyPatchs = bM.size(); _triSurfList.resize(NbPolyPatchs, 0); _triSurfSearchList.resize(NbPolyPatchs, 0); _surfFeatList.resize(NbPolyPatchs, 0); _extEdgMeshList.resize(NbPolyPatchs, 0); _bndUseIntEdges.resize(NbPolyPatchs, true); _bndIsSnaped.resize(NbPolyPatchs, true); _bndLayers.resize(NbPolyPatchs); if (snapDict.found("boundaries")) { Info<< " - Boundary specifications : " << nl; const dictionary& bndDict = snapDict.subDict("boundaries"); wordList bndDefined = bndDict.toc(); forAll(bndDefined, patchI) { Info<< " - " << bndDefined[patchI] << nl; const dictionary& patchDic = bndDict.subDict(bndDefined[patchI]); if (patchDic.found("triSurface")) { word file = patchDic.lookup("triSurface"); bool exist = false; for(label patchJ = 0; patchJ < NbPolyPatchs && !exist; ++patchJ) { exist = bM[patchJ].name() == bndDefined[patchI]; if (exist) { Info<< " - Snaping surface : " << file << nl; _bndIsSnaped[patchJ] = false; IOobject surfFile ( file, _polyMesh->time().constant(), "triSurface", _polyMesh->time(), IOobject::MUST_READ, IOobject::NO_WRITE ); triSurface* bnd = new triSurface(surfFile.filePath()); addTriFace(patchJ, bnd); if (patchDic.found("internalFeatureEdges")) { _bndUseIntEdges[patchJ] = readBool ( patchDic.lookup("internalFeatureEdges") ); Info<< " - Use internal edges : " << _bndUseIntEdges[patchJ] << nl; } else { _bndUseIntEdges[patchJ] = false; } if (patchDic.found("boundaryLayer")) { _bndLayers[patchJ] = SmootherBoundaryLayer ( patchDic.subDict("boundaryLayer") ); } } } if (!exist) { WarningIn("Foam::MeshSmoother::analyseDict()") << "Patch " << bndDefined[patchI] << " definied in smootherDict is not existing in " << "polyMesh, existing patch in polyMesh ares: " << bM.names() << nl; } } }
// Read boundary file without reading mesh void rewriteBoundary ( const bool isTestRun, const IOobject& io, const fileName& regionPrefix, HashTable<word>& thisNames, HashTable<word>& nbrNames ) { Info<< "Reading boundary from " << io.filePath() << endl; // Read PtrList of dictionary. const word oldTypeName = IOPtrList<entry>::typeName; const_cast<word&>(IOPtrList<entry>::typeName) = word::null; IOPtrList<entry> patches(io); const_cast<word&>(IOPtrList<entry>::typeName) = oldTypeName; // Fake type back to what was in field const_cast<word&>(patches.type()) = patches.headerClassName(); // Replace any 'cyclic' label nOldCyclics = 0; forAll(patches, patchI) { const dictionary& patchDict = patches[patchI].dict(); if (word(patchDict["type"]) == cyclicPolyPatch::typeName) { if (!patchDict.found("neighbourPatch")) { Info<< "Patch " << patches[patchI].keyword() << " does not have 'neighbourPatch' entry; assuming it" << " is of the old type." << endl; nOldCyclics++; } } } Info<< "Detected " << nOldCyclics << " old cyclics." << nl << endl; // Save old patches. PtrList<entry> oldPatches(patches); // Extend label nOldPatches = patches.size(); patches.setSize(nOldPatches+nOldCyclics); // Create reordering map labelList oldToNew(patches.size()); // Add new entries label addedPatchI = nOldPatches; label newPatchI = 0; forAll(oldPatches, patchI) { const dictionary& patchDict = oldPatches[patchI].dict(); if ( word(patchDict["type"]) == cyclicPolyPatch::typeName ) { const word& name = oldPatches[patchI].keyword(); if (patchDict.found("neighbourPatch")) { patches.set(patchI, oldPatches.set(patchI, NULL)); oldToNew[patchI] = newPatchI++; // Check if patches come from automatic conversion word oldName; string::size_type i = name.rfind("_half0"); if (i != string::npos) { oldName = name.substr(0, i); thisNames.insert(oldName, name); Info<< "Detected converted cyclic patch " << name << " ; assuming it originates from " << oldName << endl; } else { i = name.rfind("_half1"); if (i != string::npos) { oldName = name.substr(0, i); nbrNames.insert(oldName, name); Info<< "Detected converted cyclic patch " << name << " ; assuming it originates from " << oldName << endl; } } } else { label nFaces = readLabel(patchDict["nFaces"]); label startFace = readLabel(patchDict["startFace"]); Info<< "Detected old style " << word(patchDict["type"]) << " patch " << name << " with" << nl << " nFaces : " << nFaces << nl << " startFace : " << startFace << endl; word thisName = name + "_half0"; word nbrName = name + "_half1"; thisNames.insert(name, thisName); nbrNames.insert(name, nbrName); // Save current dictionary const dictionary patchDict(patches[patchI].dict()); // Change entry on this side patches.set(patchI, oldPatches.set(patchI, NULL)); oldToNew[patchI] = newPatchI++; dictionary& thisPatchDict = patches[patchI].dict(); thisPatchDict.add("neighbourPatch", nbrName); thisPatchDict.set("nFaces", nFaces/2); patches[patchI].keyword() = thisName; // Add entry on other side patches.set ( addedPatchI, new dictionaryEntry ( nbrName, dictionary::null, patchDict ) ); oldToNew[addedPatchI] = newPatchI++; dictionary& nbrPatchDict = patches[addedPatchI].dict(); nbrPatchDict.set("neighbourPatch", thisName); nbrPatchDict.set("nFaces", nFaces/2); nbrPatchDict.set("startFace", startFace+nFaces/2); patches[addedPatchI].keyword() = nbrName; Info<< "Replaced with patches" << nl << patches[patchI].keyword() << " with" << nl << " nFaces : " << readLabel(thisPatchDict.lookup("nFaces")) << nl << " startFace : " << readLabel(thisPatchDict.lookup("startFace")) << nl << patches[addedPatchI].keyword() << " with" << nl << " nFaces : " << readLabel(nbrPatchDict.lookup("nFaces")) << nl << " startFace : " << readLabel(nbrPatchDict.lookup("startFace")) << nl << endl; addedPatchI++; } } else { patches.set(patchI, oldPatches.set(patchI, NULL)); oldToNew[patchI] = newPatchI++; } } patches.reorder(oldToNew); if (returnReduce(nOldCyclics, sumOp<label>()) > 0) { if (isTestRun) { //Info<< "-test option: no changes made" << nl << endl; } else { if (mvBak(patches.objectPath(), "old")) { Info<< "Backup to " << (patches.objectPath() + ".old") << nl; } Info<< "Write to " << patches.objectPath() << nl << endl; patches.write(); } } else { Info<< "No changes made to boundary file." << nl << endl; } }
forAll(featDicts, featI) { const dictionary& dict = featDicts[featI]; fileName featFileName(dict.lookup("file")); { IOobject featObj ( featFileName, // name io.time().constant(), // instance "triSurface", // local io.time(), // registry IOobject::MUST_READ, IOobject::NO_WRITE, false ); autoPtr<edgeMesh> eMeshPtr = edgeMesh::New(featObj.filePath()); set ( featI, new featureEdgeMesh ( featObj, eMeshPtr->points(), eMeshPtr->edges() ) ); } const featureEdgeMesh& eMesh = operator[](featI); //eMesh.mergePoints(meshRefiner_.mergeDistance()); if (dict.found("levels")) { List<Tuple2<scalar, label> > distLevels(dict["levels"]); if (dict.size() < 1) { FatalErrorIn ( "refinementFeatures::read" "(const objectRegistry&" ", const PtrList<dictionary>&)" ) << " : levels should be at least size 1" << endl << "levels : " << dict["levels"] << exit(FatalError); } distances_[featI].setSize(distLevels.size()); levels_[featI].setSize(distLevels.size()); forAll(distLevels, j) { distances_[featI][j] = distLevels[j].first(); levels_[featI][j] = distLevels[j].second(); // Check in incremental order if (j > 0) { if ( (distances_[featI][j] <= distances_[featI][j-1]) || (levels_[featI][j] > levels_[featI][j-1]) ) { FatalErrorIn ( "refinementFeatures::read" "(const objectRegistry&" ", const PtrList<dictionary>&)" ) << " : Refinement should be specified in order" << " of increasing distance" << " (and decreasing refinement level)." << endl << "Distance:" << distances_[featI][j] << " refinementLevel:" << levels_[featI][j] << exit(FatalError); } } }