Foam::flameletModelA::flameletModelA ( volScalarField& rho, volVectorField& U, volScalarField& Su, volScalarField& Sigma, volScalarField& b, psiuReactionThermo& thermo, compressible::turbulenceModel& turbulence, IOdictionary& mdData ) : flameletModel ( rho ), dictionary_(mdData.subDict("flameletModelACoeffs")), alphaSigma_(readScalar(dictionary_.lookup("alphaSigma"))), GammaK_(readScalar(dictionary_.lookup("GammaK"))), betaSigma_(readScalar(dictionary_.lookup("betaSigma"))), rho_(rho), U_(U), Su_(Su), Sigma_(Sigma), b_(b), thermo_(thermo), turbulence_(turbulence) {}
void Foam::moleculeCloud::buildConstProps() { Info<< nl << "Reading moleculeProperties dictionary." << endl; const List<word>& idList(pot_.idList()); constPropList_.setSize(idList.size()); const List<word>& siteIdList(pot_.siteIdList()); IOdictionary moleculePropertiesDict ( IOobject ( "moleculeProperties", mesh_.time().constant(), mesh_, IOobject::MUST_READ, IOobject::NO_WRITE, false ) ); forAll(idList, i) { const word& id(idList[i]); const dictionary& molDict(moleculePropertiesDict.subDict(id)); List<word> siteIdNames = molDict.lookup("siteIds"); List<label> siteIds(siteIdNames.size()); forAll(siteIdNames, sI) { const word& siteId = siteIdNames[sI]; siteIds[sI] = findIndex(siteIdList, siteId); if (siteIds[sI] == -1) { FatalErrorIn("moleculeCloud.C") << nl << siteId << " site not found." << nl << abort(FatalError); } } molecule::constantProperties& constProp = constPropList_[i]; constProp = molecule::constantProperties(molDict); constProp.siteIds() = siteIds; } }
int main(int argc, char *argv[]) { # include "setRootCase.H" # include "createTime.H" # include "createMesh.H" // Reading faMeshDefinition dictionary IOdictionary faMeshDefinition ( IOobject ( "faMeshDefinition", runTime.constant(), "faMesh", mesh, IOobject::MUST_READ, IOobject::NO_WRITE ) ); wordList polyMeshPatches ( faMeshDefinition.lookup("polyMeshPatches") ); dictionary bndDict = faMeshDefinition.subDict("boundary"); wordList faPatchNames = bndDict.toc(); List<faPatchData> faPatches(faPatchNames.size()+1); forAll (faPatchNames, patchI) { dictionary curPatchDict = bndDict.subDict(faPatchNames[patchI]); faPatches[patchI].name_ = faPatchNames[patchI]; faPatches[patchI].type_ = word(curPatchDict.lookup("type")); faPatches[patchI].ownPolyPatchID_ = mesh.boundaryMesh().findPatchID ( word(curPatchDict.lookup("ownerPolyPatch")) ); faPatches[patchI].ngbPolyPatchID_ = mesh.boundaryMesh().findPatchID ( word(curPatchDict.lookup("neighbourPolyPatch")) ); }
Foam::CantPopeBray::CantPopeBray ( volScalarField& rho, volVectorField& U, volScalarField& Su, volScalarField& Sigma, volScalarField& b, psiuReactionThermo& thermo, compressible::turbulenceModel& turbulence, IOdictionary& mdData ) : flameletModel ( rho ), dictionary_(mdData.subDict("CantPopeBrayCoeffs")), alphaSigma_ ( dictionary_.lookupOrDefault ( "alphaSigma", 0.28 ) ), betaSigma_ ( dictionary_.lookupOrDefault ( "betaSigma", 1.0 ) ), aCoeff_ ( dictionary_.lookupOrDefault ( "aCoeff", 10.0 ) ), rho_(rho), U_(U), Su_(Su), Sigma_(Sigma), b_(b), thermo_(thermo), turbulence_(turbulence) {}
Foam::autoPtr<ChemistryModel> Foam::basicChemistryModel::New ( const fvMesh& mesh ) { IOdictionary chemistryDict ( IOobject ( "chemistryProperties", mesh.time().constant(), mesh, IOobject::MUST_READ, IOobject::NO_WRITE, false ) ); word chemistryTypeName; if (chemistryDict.isDict("chemistryType")) { const dictionary& chemistryTypeDict ( chemistryDict.subDict("chemistryType") ); Info<< "Selecting chemistry type " << chemistryTypeDict << endl; const int nCmpt = 8; const char* cmptNames[nCmpt] = { "chemistrySolver", "chemistryModel", "chemistryThermo", "transport", "thermo", "equationOfState", "specie", "energy" }; IOdictionary thermoDict ( IOobject ( "thermophysicalProperties", mesh.time().constant(), mesh, IOobject::MUST_READ_IF_MODIFIED, IOobject::NO_WRITE, false ) ); word thermoTypeName; if (thermoDict.isDict("thermoType")) { const dictionary& thermoTypeDict(thermoDict.subDict("thermoType")); thermoTypeName = word(thermoTypeDict.lookup("transport")) + '<' + word(thermoTypeDict.lookup("thermo")) + '<' + word(thermoTypeDict.lookup("equationOfState")) + '<' + word(thermoTypeDict.lookup("specie")) + ">>," + word(thermoTypeDict.lookup("energy")) + ">"; } else { FatalIOErrorIn ( (ChemistryModel::typeName + "::New(const mesh&)").c_str(), thermoDict ) << "thermoType is in the old format and must be upgraded" << exit(FatalIOError); } Switch isTDAC(chemistryTypeDict.lookupOrDefault("TDAC",false)); // Construct the name of the chemistry type from the components if (isTDAC) { chemistryTypeName = word(chemistryTypeDict.lookup("chemistrySolver")) + '<' + "TDACChemistryModel<" + word(chemistryTypeDict.lookup("chemistryThermo")) + ',' + thermoTypeName + ">>"; } else { chemistryTypeName = word(chemistryTypeDict.lookup("chemistrySolver")) + '<' + "chemistryModel<" + word(chemistryTypeDict.lookup("chemistryThermo")) + ',' + thermoTypeName + ">>"; } typename ChemistryModel::fvMeshConstructorTable::iterator cstrIter = ChemistryModel::fvMeshConstructorTablePtr_->find(chemistryTypeName); if (cstrIter == ChemistryModel::fvMeshConstructorTablePtr_->end()) { FatalErrorIn(ChemistryModel::typeName + "::New(const mesh&)") << "Unknown " << ChemistryModel::typeName << " type " << nl << "chemistryType" << chemistryTypeDict << nl << nl << "Valid " << ChemistryModel ::typeName << " types are:" << nl << nl; // Get the list of all the suitable chemistry packages available wordList validChemistryTypeNames ( ChemistryModel::fvMeshConstructorTablePtr_->sortedToc() ); // Build a table of the thermo packages constituent parts // Note: row-0 contains the names of constituent parts List<wordList> validChemistryTypeNameCmpts ( validChemistryTypeNames.size() + 1 ); validChemistryTypeNameCmpts[0].setSize(nCmpt); forAll(validChemistryTypeNameCmpts[0], j) { validChemistryTypeNameCmpts[0][j] = cmptNames[j]; } // Split the thermo package names into their constituent parts forAll(validChemistryTypeNames, i) { validChemistryTypeNameCmpts[i+1] = basicThermo::splitThermoName ( validChemistryTypeNames[i], nCmpt ); } // Print the table of available packages // in terms of their constituent parts printTable(validChemistryTypeNameCmpts, FatalError); FatalError<< exit(FatalError); }
Foam::autoPtr<Foam::chemistryTabulationMethod<CompType, ThermoType>> Foam::chemistryTabulationMethod<CompType, ThermoType>::New ( const IOdictionary& dict, TDACChemistryModel<CompType, ThermoType>& chemistry ) { IOdictionary thermoDict ( IOobject ( "thermophysicalProperties", dict.db().time().constant(), dict.db(), IOobject::MUST_READ_IF_MODIFIED, IOobject::NO_WRITE, false ) ); word thermoTypeName; if (thermoDict.isDict("thermoType")) { const dictionary& thermoTypeDict(thermoDict.subDict("thermoType")); thermoTypeName = word(thermoTypeDict.lookup("transport")) + '<' + word(thermoTypeDict.lookup("thermo")) + '<' + word(thermoTypeDict.lookup("equationOfState")) + '<' + word(thermoTypeDict.lookup("specie")) + ">>," + word(thermoTypeDict.lookup("energy")) + ">"; } else { FatalIOErrorInFunction(thermoDict) << "thermoType is in the old format and must be upgraded" << exit(FatalIOError); } dictionary tabdict(dict.subDict("tabulation")); word chemistryTabulationMethodName = word(tabdict.lookup("method")) + '<' + word(dict.subDict("chemistryType").lookup("chemistryThermo")) + ',' + thermoTypeName + '>'; typename dictionaryConstructorTable::iterator cstrIter = dictionaryConstructorTablePtr_->find(chemistryTabulationMethodName); if (cstrIter == dictionaryConstructorTablePtr_->end()) { FatalErrorInFunction << "Unknown chemistryTabulationMethodType type " << chemistryTabulationMethodName << endl << endl << "Valid chemistryTabulationMethodType types are :" << endl << dictionaryConstructorTablePtr_->toc() << exit(FatalError); } return autoPtr<chemistryTabulationMethod<CompType, ThermoType>> ( cstrIter()(dict, chemistry) ); }
int main(int argc, char *argv[]) { # include "setRootCase.H" # include "createTime.H" // Get times list instantList Times = runTime.times(); const label startTime = 1; const label endTime = Times.size(); const label nSnapshots = Times.size() - 1; Info << "Number of snapshots: " << nSnapshots << endl; // Create a list of snapshots PtrList<volScalarField> fields(nSnapshots); runTime.setTime(Times[startTime], startTime); # include "createMesh.H" IOdictionary PODsolverDict ( IOobject ( "PODsolverDict", runTime.system(), mesh, IOobject::MUST_READ, IOobject::NO_WRITE ) ); scalar accuracy = readScalar ( PODsolverDict.subDict("scalarTransportCoeffs").lookup("accuracy") ); Info << "Seeking accuracy: " << accuracy << endl; word fieldName ( PODsolverDict.subDict("scalarTransportCoeffs").lookup("field") ); label snapI = 0; labelList timeIndices(nSnapshots); for (label i = startTime; i < endTime; i++) { runTime.setTime(Times[i], i); Info<< "Time = " << runTime.timeName() << endl; mesh.readUpdate(); Info<< " Reading " << fieldName << endl; fields.set ( snapI, new volScalarField ( IOobject ( fieldName, runTime.timeName(), mesh, IOobject::MUST_READ ), mesh ) ); // Rename the field fields[snapI].rename(fieldName + name(i)); timeIndices[snapI] = i; snapI++; Info<< endl; } timeIndices.setSize(snapI); // Read accurary Info<< "Reading \n" << endl; scalarPODOrthoNormalBase eb(fields, accuracy); const scalarRectangularMatrix& coeffs = eb.interpolationCoeffs(); // Check all snapshots forAll (fields, fieldI) { runTime.setTime(Times[timeIndices[fieldI]], timeIndices[fieldI]); volScalarField pReconstruct ( IOobject ( fieldName + "PODreconstruct", runTime.timeName(), mesh, IOobject::NO_READ ), mesh, dimensionedScalar("zero", fields[fieldI].dimensions(), 0) ); for (label baseI = 0; baseI < eb.baseSize(); baseI++) { pReconstruct += coeffs[fieldI][baseI]*eb.orthoField(baseI); } scalar sumFieldError = Foam::sqrt ( sumSqr ( pReconstruct.internalField() - fields[fieldI].internalField() ) ); scalar measure = Foam::sqrt(sumSqr(fields[fieldI].internalField())) + SMALL; scalar sumFieldRelError = sumFieldError/measure; Info<< "Field error: absolute = " << sumFieldError << " relative = " << sumFieldRelError << " measure = " << measure << endl; pReconstruct.write(); }
int main(int argc, char *argv[]) { # include "addTimeOptions.H" # include "setRootCase.H" # include "createTime.H" # include "createMesh.H" IOdictionary stlDefs ( IOobject ( "stlDefinitions", runTime.constant(), "triSurface", mesh, IOobject::MUST_READ, IOobject::NO_WRITE ) ); wordList toc = stlDefs.toc(); forAll (toc, item) { if (stlDefs.isDict(toc[item])) { Info << "\nCreates the STL surface for " << toc[item] << endl; pointField pp ( stlDefs.subDict(toc[item]).lookup("points") ); faceList faces( stlDefs.subDict(toc[item]).lookup("faces") ); triFaceList tfl(0); label count(0); if ( stlDefs.subDict(toc[item]) .lookupOrDefault<Switch>("extrude", false ) ) { if (faces.size() <= 1) { extrudeFacesAndPoints ( stlDefs.subDict(toc[item]), faces, pp ); } else { Info << "\nWARNING: Using extrude, but" << " multiple faces are defined\n" << endl; } } forAll (faces, facei) { faceTriangulation triangulation(pp, faces[facei], true ); tfl.setSize( count + triangulation.size() ); forAll (triangulation, triI) { tfl[count++] = triangulation[triI]; } } triSurface ts( tfl, pp ); Info << "Writes the STL surface for " << toc[item] << endl; ts.write( "constant/triSurface/"+toc[item]+".stl" ); }
int main(int argc, char *argv[]) { argList::validOptions.insert("overwrite", ""); # include "setRootCase.H" # include "createTime.H" runTime.functionObjects().off(); # include "createMesh.H" Info<< "Read mesh in = " << runTime.cpuTimeIncrement() << " s" << endl; const bool overwrite = args.optionFound("overwrite"); // Check patches and faceZones are synchronised mesh.boundaryMesh().checkParallelSync(true); meshRefinement::checkCoupledFaceZones(mesh); // Read decomposePar dictionary IOdictionary decomposeDict ( IOobject ( "decomposeParDict", runTime.system(), mesh, IOobject::MUST_READ, IOobject::NO_WRITE ) ); // Read meshing dictionary IOdictionary meshDict ( IOobject ( "snappyHexMeshDict", runTime.system(), mesh, IOobject::MUST_READ, IOobject::NO_WRITE ) ); // all surface geometry const dictionary& geometryDict = meshDict.subDict("geometry"); // refinement parameters const dictionary& refineDict = meshDict.subDict("castellatedMeshControls"); // mesh motion and mesh quality parameters const dictionary& motionDict = meshDict.subDict("meshQualityControls"); // snap-to-surface parameters const dictionary& snapDict = meshDict.subDict("snapControls"); // layer addition parameters const dictionary& layerDict = meshDict.subDict("addLayersControls"); const scalar mergeDist = getMergeDistance ( mesh, readScalar(meshDict.lookup("mergeTolerance")) ); // Debug // ~~~~~ const label debug(readLabel(meshDict.lookup("debug"))); if (debug > 0) { meshRefinement::debug = debug; autoRefineDriver::debug = debug; autoSnapDriver::debug = debug; autoLayerDriver::debug = debug; } // Read geometry // ~~~~~~~~~~~~~ searchableSurfaces allGeometry ( IOobject ( "abc", // dummy name mesh.time().constant(), // instance //mesh.time().findInstance("triSurface", word::null),// instance "triSurface", // local mesh.time(), // registry IOobject::MUST_READ, IOobject::NO_WRITE ), geometryDict ); // Read refinement surfaces // ~~~~~~~~~~~~~~~~~~~~~~~~ Info<< "Reading refinement surfaces." << endl; refinementSurfaces surfaces ( allGeometry, refineDict.subDict("refinementSurfaces") ); Info<< "Read refinement surfaces in = " << mesh.time().cpuTimeIncrement() << " s" << nl << endl; // Read refinement shells // ~~~~~~~~~~~~~~~~~~~~~~ Info<< "Reading refinement shells." << endl; shellSurfaces shells ( allGeometry, refineDict.subDict("refinementRegions") ); Info<< "Read refinement shells in = " << mesh.time().cpuTimeIncrement() << " s" << nl << endl; Info<< "Setting refinement level of surface to be consistent" << " with shells." << endl; surfaces.setMinLevelFields(shells); Info<< "Checked shell refinement in = " << mesh.time().cpuTimeIncrement() << " s" << nl << endl; // Refinement engine // ~~~~~~~~~~~~~~~~~ Info<< nl << "Determining initial surface intersections" << nl << "-----------------------------------------" << nl << endl; // Main refinement engine meshRefinement meshRefiner ( mesh, mergeDist, // tolerance used in sorting coordinates overwrite, // overwrite mesh files? surfaces, // for surface intersection refinement shells // for volume (inside/outside) refinement ); Info<< "Calculated surface intersections in = " << mesh.time().cpuTimeIncrement() << " s" << nl << endl; // Some stats meshRefiner.printMeshInfo(debug, "Initial mesh"); meshRefiner.write ( debug&meshRefinement::OBJINTERSECTIONS, mesh.time().path()/meshRefiner.timeName() ); // Add all the surface regions as patches // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ labelList globalToPatch; { Info<< nl << "Adding patches for surface regions" << nl << "----------------------------------" << nl << endl; // From global region number to mesh patch. globalToPatch.setSize(surfaces.nRegions(), -1); Info<< "Patch\tRegion" << nl << "-----\t------" << endl; const labelList& surfaceGeometry = surfaces.surfaces(); forAll(surfaceGeometry, surfI) { label geomI = surfaceGeometry[surfI]; const wordList& regNames = allGeometry.regionNames()[geomI]; Info<< surfaces.names()[surfI] << ':' << nl << nl; forAll(regNames, i) { label patchI = meshRefiner.addMeshedPatch ( regNames[i], wallPolyPatch::typeName ); Info<< patchI << '\t' << regNames[i] << nl; globalToPatch[surfaces.globalRegion(surfI, i)] = patchI; } Info<< nl; }
void getCellTable(const fvMesh & mesh) { cellTableMap_.clear(); cellTableId_.setSize(mesh.nCells(), 1); IOdictionary cellTableDict ( IOobject ( "cellTable", "constant", mesh, IOobject::READ_IF_PRESENT, IOobject::NO_WRITE, false ) ); volScalarField volField ( IOobject ( "cellTableId", mesh.time().timeName(), mesh, IOobject::READ_IF_PRESENT, IOobject::NO_WRITE, false ), mesh, dimensionedScalar("cellTableId", dimless, 1.0) ); // get cellTableId information from the volScalarField if possible if (volField.headerOk()) { const scalarField & field = volField.internalField(); forAll(field, cellI) { cellTableId_[cellI] = static_cast<int>(field[cellI]); } if (cellTableDict.headerOk()) { // convert dictionary to map wordList toc = cellTableDict.toc(); forAll(toc, i) { word keyword = toc[i]; if (!cellTableDict.isDict(keyword)) continue; const dictionary & dict = cellTableDict.subDict(keyword); if (dict.found("Id") && dict.found("MaterialType")) { label Id; dict["Id"] >> Id; dict["MaterialType"] >> keyword; if (keyword == "fluid") { cellTableMap_.insert(Id, 1); } else if (keyword == "solid") { cellTableMap_.insert(Id, 2); } } }
Foam::coherentFlameModel2b::coherentFlameModel2b ( volScalarField& rho, volVectorField& U, volScalarField& Su, volScalarField& Sigma, volScalarField& b, psiuReactionThermo& thermo, compressible::turbulenceModel& turbulence, IOdictionary& mdData ) : flameletModel ( rho ), dictionary_(mdData.subDict("coherentFlameModel2bCoeffs")), alphaSigma_ ( dictionary_.lookupOrDefault ( "alphaSigma", 2.1 ) ), betaSigma_ ( dictionary_.lookupOrDefault ( "betaSigma", 1.0 ) ), CSigma_ ( dictionary_.lookupOrDefault ( "CSigma", 0.5 ) ), Clt_ ( dictionary_.lookupOrDefault ( "Coeff_lt", 1.0 ) ), quenchingCoeff_ ( dictionary_.lookupOrDefault ( "quenchingCoeff", 1.0 ) ), fittedGammaK_ ( dictionary_.lookupOrDefault ( "fittedGammaK", false ) ), rho_(rho), U_(U), Su_(Su), Sigma_(Sigma), b_(b), thermo_(thermo), turbulence_(turbulence) {}
int main(int argc, char *argv[]) { # include "setRootCase.H" # include "createTime.H" Info << "\nReading g" << endl; uniformDimensionedVectorField g ( IOobject ( "g", runTime.constant(), runTime, IOobject::MUST_READ, IOobject::NO_WRITE ) ); Info << "\nReading waveProperties\n" << endl; IOdictionary waveProperties ( IOobject ( "waveProperties.input", runTime.constant(), runTime, IOobject::MUST_READ, IOobject::NO_WRITE ) ); // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * // IOobject wOut ( "waveProperties", runTime.constant(), runTime, IOobject::NO_READ, IOobject::NO_WRITE ); // Write waveProperties with the above computed changes OFstream os ( wOut.objectPath(), #if EXTBRANCH==1 ios_base::out|ios_base::trunc, #elif OFPLUSBRANCH==1 // Nothing to be put here #else #if OFVERSION<170 ios_base::out|ios_base::trunc, #endif #endif IOstream::ASCII, IOstream::currentVersion, IOstream::UNCOMPRESSED ); // Write the OF banner wOut.writeBanner( os ); // Write the file information. Class name is not correct when // using wOut.writeHeader( os ); hence manual entries os << "FoamFile" << nl; os << token::BEGIN_BLOCK << incrIndent << nl; os << indent << "version" << tab << IOstream::currentVersion << token::END_STATEMENT << nl; os << indent << "format" << tab << "ascii;" << nl; os << indent << "class" << tab << "dictionary;" << nl; os << indent << "object" << tab << "waveProperties;" << nl; os << decrIndent << indent << token::END_BLOCK << nl; // Write the divider wOut.writeDivider( os ); os << nl; /* Loop over all subdicts in waveProperties. For each of them compute the wave parameters relevant for that particular wave theory. */ wordList toc = waveProperties.toc(); forAll (toc, item) { // If a sub-dictionary, then compute parameters and write the subdict if (waveProperties.isDict(toc[item])) { dictionary& sd = waveProperties.subDict(toc[item]); autoPtr<setWaveProperties> props ( setWaveProperties::New(runTime, sd, true) ); props->set( os ); } else { label Nspaces = 20; // Read the entry and write to the dummy output file ITstream read = waveProperties.lookup(toc[item]); os << toc[item] << token::SPACE; for (int i=toc[item].size(); i<Nspaces-1; i++) { os << token::SPACE; } forAll (read, ri) { if (ri < read.size() - 1) { os << read[ri] << token::SPACE; } else { os << read[ri]; } } os << token::END_STATEMENT << nl << endl; // Additional level of check, such that the code does not crash at // runTime: if (toc[item] == "seaLevel") { // Read the magnitude of the sea level scalar sL = readScalar(waveProperties.lookup("seaLevel")); // If the Switch seaLevelAsReference is not found _and_ the // magnitude of the sea level differs from 0 (zero), stop the // evaluation of the wave parameters if ( !waveProperties.found("seaLevelAsReference") && SMALL < Foam::mag(sL) ) { // This merely looks up the string, it will not be found // and the user is forced to correct waveProperties.input, // before any execution is possible. waveProperties.lookup("seaLevelAsReference"); } } } } // Write end divider wOut.writeEndDivider(os); // End Info<< "\nEnd\n" << endl; return 0; }
polyIdPairs::polyIdPairs ( const polyMesh& mesh, const potential& pot ) : coeffVals_(), coeffNames_(), coeffNumIds_(), nIds_(0), coeffSize_(0), coeffType_("") { IOdictionary potentialDict ( IOobject ( "potentialDict", mesh.time().system(), mesh, IOobject::MUST_READ, IOobject::NO_WRITE ) ); //obtain information about pairs from pair subdict inside potentialdict const dictionary& pairDict(potentialDict.subDict("pair")); List<word> pairs(pairDict.toc());//generate list of pairs nIds_ = pot.siteIdList().size();//obtain size of siteidlist label coeffsize = 0; /** * traverse throught the list of pairs excluding electrostatic * further checking for interactions in pairs to take the total number * of species found which essentially provides a size of dynamic array * to be formed * loop until the first existing of coefficients are found after that * the loop is broken and no further traversal is done. */ for(int i = 0;i<pairs.size();i++){ if(pairs[i] != "electrostatic") { word pp = pairDict.subDict(pairs[i]).lookup("pairPotential"); if(pp!="noInteraction"){ List<word> coeff(pairDict.subDict(pairs[i]).subDict(pp+"Coeffs").toc()); //get the number of coeff's available coeffsize = coeff.size(); //set the size of coeffnames followed by generating //coeff names into coeffnames array coeffNames_.setSize(coeffsize); coeffVals_.setSize(coeffsize);//set the size of variables coeffNumIds_.setSize(coeffsize); coeffSize_ = coeffsize; coeffType_ = pp; for(int k=0; k<coeffsize;++k){ coeffNames_[k] = coeff[k]; coeffNumIds_[k] = k; } break;//break if the first existence of coeff found } } } int c = 0; for(;c < coeffsize; ++c) coeffVals_[c].setSize(nIds_); for(c = 0; c < coeffsize; ++c) for(int b = 0; b < nIds_; b++) coeffVals_[c][b].setSize(nIds_); //make the coeffs zero for(c=0;c < coeffsize; ++c){ for(int i=0; i<nIds_; ++i){ for(int j=0; j<nIds_; ++j){ coeffVals_[c][i][j] = 0; } } } /* * loop over each potential site id list to form pairs of each site id with another * essentially two loops a and b will be running on each site id to form pairs. * * for each pair created it will be checked with corresponding pairs inside potentialDict * if found pairPotential value for that pair will be obtained. * * if the obtained pair potential is not "noInteraction" then the resultant pairPotential value * will be used to form coeff string to determine "*Coeffs" value which could correspond to * 'lennardJonesCoeffs', 'morseCoeffs', '*Coeffs' anything related with pairPotential value for that * particular pair. * * further to read each value for the N species inside the coeffs we will be using coeffsize variable * value obtained at the beginning which essentially consists of number of Species inside coeffs, subsequently * we will be traversing the loop and will use the list of species name inside coeffsNames_ array */ for(int a=0; a<nIds_; a++) { word idA = pot.siteIdList()[a]; for(int b=0; b<nIds_; b++) { word idB = pot.siteIdList()[b]; word pname = idA+"-"+idB; if(pairDict.found(pname)){ word pp = pairDict.subDict(pname) .lookup("pairPotential"); if(pp!="noInteraction"){ const dictionary& coeffs = pairDict .subDict(pname) .subDict(pp+"Coeffs"); for(c=0; c<coeffsize; ++c){ scalar temp = readScalar(coeffs.lookup(coeffNames_[c])); coeffVals_[c][a][b] = temp; coeffVals_[c][b][a] = temp; }//c loop ends }//no interaction if ends }//first if condition ends }//b loop ends }//a loop ends }//end function
int main(int argc, char *argv[]) { argList::noParallel(); argList::addBoolOption("rewrite"); argList::addBoolOption("show"); argList args(argc, argv); Time runTime(args.rootPath(), args.caseName()); const word dictName("fvSolution"); bool optRewrite = args.optionFound("rewrite"); bool optShow = args.optionFound("show"); IOdictionary solutionDict ( IOobject ( dictName, "system", runTime, IOobject::MUST_READ_IF_MODIFIED, IOobject::NO_WRITE, false ) ); if (!solutionDict.found("solvers")) { Info<<"no solvers entry found in : " << dictName << endl; return 2; } if (optRewrite && solutionDict.instance() != "system") { Info<<"instance is not 'system' " "- disabling rewrite for this file" << nl; optRewrite = false; } dictionary& solverDict = solutionDict.subDict("solvers"); wordList names = solverDict.toc(); wordList oldNames = names; bool changed = false; for (label orig = 0; orig < names.size()-1; ++orig) { // skip patterns or entries that have already been done if (names[orig].empty() || wordRe::isPattern(names[orig])) { continue; } const dictionary& dict1 = solverDict.subDict(names[orig]); for (label check = orig+1; check < names.size(); ++check) { // skip patterns or entries that have already been done if (names[check].empty() || wordRe::isPattern(names[check])) { continue; } const dictionary& dict2 = solverDict.subDict(names[check]); // check for identical content if (checkDictionaryContent(dict1, dict2)) { names[orig] += "|" + names[check]; names[check].clear(); changed = true; } } } if (changed) { forAll(names, nameI) { if (names[nameI].empty()) { solverDict.remove(oldNames[nameI]); Info<<" #remove " << oldNames[nameI]; } else { Info<< " " << oldNames[nameI]; if (names[nameI] != oldNames[nameI]) { // make "(abc|def)" pattern keyType renamed( "(" + names[nameI] + ")", true); solverDict.changeKeyword(oldNames[nameI], renamed); Info<< " -> " << renamed; } } Info<< endl; } if (optRewrite) { mvBak(solutionDict.objectPath(), "orig"); Info<< "Backup to .orig" << nl << "Writing " << solutionDict.objectPath() << nl << endl; solutionDict.regIOobject::write(); } else if (optShow) { IOobject::writeDivider(Info); solutionDict.dictionary::write(Info, false); } else { Info<< "\nFile not rewritten" << endl; } } else {
Foam::autoPtr<Foam::laminarModel<BasicTurbulenceModel>> Foam::laminarModel<BasicTurbulenceModel>::New ( const alphaField& alpha, const rhoField& rho, const volVectorField& U, const surfaceScalarField& alphaRhoPhi, const surfaceScalarField& phi, const transportModel& transport, const word& propertiesName ) { IOdictionary modelDict ( IOobject ( IOobject::groupName(propertiesName, U.group()), U.time().constant(), U.db(), IOobject::MUST_READ_IF_MODIFIED, IOobject::NO_WRITE, false ) ); if (modelDict.found("laminar")) { // get model name, but do not register the dictionary // otherwise it is registered in the database twice const word modelType ( modelDict.subDict("laminar").lookup("laminarModel") ); Info<< "Selecting laminar stress model " << modelType << endl; typename dictionaryConstructorTable::iterator cstrIter = dictionaryConstructorTablePtr_->find(modelType); if (cstrIter == dictionaryConstructorTablePtr_->end()) { FatalErrorInFunction << "Unknown laminarModel type " << modelType << nl << nl << "Valid laminarModel types:" << endl << dictionaryConstructorTablePtr_->sortedToc() << exit(FatalError); } return autoPtr<laminarModel> ( cstrIter() ( alpha, rho, U, alphaRhoPhi, phi, transport, propertiesName) ); } else { Info<< "Selecting laminar stress model " << laminarModels::Stokes<BasicTurbulenceModel>::typeName << endl; return autoPtr<laminarModel> ( new laminarModels::Stokes<BasicTurbulenceModel> ( alpha, rho, U, alphaRhoPhi, phi, transport, propertiesName ) ); } }
void rewriteField ( const bool isTestRun, const Time& runTime, const word& fieldName, const HashTable<word>& thisNames, const HashTable<word>& nbrNames ) { // Read dictionary. (disable class type checking so we can load // field) Info<< "Loading field " << fieldName << endl; const word oldTypeName = IOdictionary::typeName; const_cast<word&>(IOdictionary::typeName) = word::null; IOdictionary fieldDict ( IOobject ( fieldName, runTime.timeName(), runTime, IOobject::MUST_READ_IF_MODIFIED, IOobject::NO_WRITE, false ) ); const_cast<word&>(IOdictionary::typeName) = oldTypeName; // Fake type back to what was in field const_cast<word&>(fieldDict.type()) = fieldDict.headerClassName(); dictionary& boundaryField = fieldDict.subDict("boundaryField"); label nChanged = 0; forAllConstIter(HashTable<word>, thisNames, iter) { const word& patchName = iter.key(); const word& newName = iter(); Info<< "Looking for entry for patch " << patchName << endl; // Find old patch name either direct or through wildcards // Find new patch name direct only if ( boundaryField.found(patchName) && !boundaryField.found(newName, false, false) ) { Info<< " Changing entry " << patchName << " to " << newName << endl; dictionary& patchDict = boundaryField.subDict(patchName); if (patchDict.found("value")) { // Remove any value field since wrong size. patchDict.remove("value"); } boundaryField.changeKeyword(patchName, newName); boundaryField.add ( nbrNames[patchName], patchDict ); Info<< " Adding entry " << nbrNames[patchName] << endl; nChanged++; } } // Info<< "New boundaryField:" << boundaryField << endl; if (returnReduce(nChanged, sumOp<label>()) > 0) { if (isTestRun) { // Info<< "-test option: no changes made" << endl; } else { if (mvBak(fieldDict.objectPath(), "old")) { Info<< "Backup to " << (fieldDict.objectPath() + ".old") << nl; } Info<< "Write to " << fieldDict.objectPath() << endl; fieldDict.regIOobject::write(); } } else { Info<< "No changes made to field " << fieldName << endl; } Info<< endl; }
int main(int argc, char *argv[]) { Foam::argList::addBoolOption ( "checkGeometry", "check all surface geometry for quality" ); Foam::argList::addBoolOption ( "conformationOnly", "conform to the initial points without any point motion" ); #include "setRootCase.H" #include "createTime.H" runTime.functionObjects().off(); const bool checkGeometry = args.optionFound("checkGeometry"); const bool conformationOnly = args.optionFound("conformationOnly"); IOdictionary foamyHexMeshDict ( IOobject ( args.executable() + "Dict", runTime.system(), runTime, IOobject::MUST_READ_IF_MODIFIED, IOobject::NO_WRITE ) ); if (checkGeometry) { const searchableSurfaces allGeometry ( IOobject ( "cvSearchableSurfaces", runTime.constant(), "triSurface", runTime, IOobject::MUST_READ, IOobject::NO_WRITE ), foamyHexMeshDict.subDict("geometry"), foamyHexMeshDict.lookupOrDefault("singleRegionName", true) ); // Write some stats allGeometry.writeStats(List<wordList>(0), Info); // Check topology allGeometry.checkTopology(true); // Check geometry allGeometry.checkGeometry ( 100.0, // max size ratio 1e-9, // intersection tolerance autoPtr<writer<scalar>>(new vtkSetWriter<scalar>()), 0.01, // min triangle quality true ); return 0; } conformalVoronoiMesh::debug = true; Info<< "Create mesh for time = " << runTime.timeName() << nl << endl; conformalVoronoiMesh mesh(runTime, foamyHexMeshDict); if (conformationOnly) { mesh.initialiseForConformation(); runTime++; mesh.writeMesh(runTime.timeName()); } else { mesh.initialiseForMotion(); while (runTime.run()) { runTime++; Info<< nl << "Time = " << runTime.timeName() << endl; mesh.move(); Info<< nl << "ExecutionTime = " << runTime.elapsedCpuTime() << " s" << " ClockTime = " << runTime.elapsedClockTime() << " s" << nl << endl; } } Info<< nl << "End" << nl << endl; return 0; }
//! Construct the multiphase::transport properties class. //! //! \param[in] multiphaseTransportDictionary Dictionary stored in constant/. //! \param[in] mesh The mesh. Foam::multiphase::transport::transport ( const IOdictionary& multiphaseTransportDictionary, const fvMesh& mesh ) : mesh_(mesh), multiphaseTransportDictionary_(multiphaseTransportDictionary), continuousPhaseSubDict_(multiphaseTransportDictionary.subDict("continuousPhase")), dispersedPhaseSubDict_(multiphaseTransportDictionary.subDict("dispersedPhase")), transportCoeffsSubDict_(multiphaseTransportDictionary.subDict("transportCoefficients")), dispersedPhases_(readLabel(multiphaseTransportDictionary_.lookup("dispersedPhases"))), Cl_(transportCoeffsSubDict_.lookup("Cl")), Cvm_(transportCoeffsSubDict_.lookup("Cvm")), rhoc_(continuousPhaseSubDict_.lookup("rho")), muc_(continuousPhaseSubDict_.lookup("mu")), nuc_(muc_/rhoc_), rhocField_( IOobject ( "rhoc", mesh_.time().timeName(), mesh_ ), mesh_, dimensionedScalar(continuousPhaseSubDict_.lookup("rho")) ), mucField_( IOobject ( "muc", mesh_.time().timeName(), mesh_ ), mesh_, dimensionedScalar(continuousPhaseSubDict_.lookup("mu")) ), nucField_( IOobject ( "nuc", mesh_.time().timeName(), mesh_ ), mucField_/rhocField_ ) { rhod_.setSize(dispersedPhases_); mud_.setSize(dispersedPhases_); nud_.setSize(dispersedPhases_); sigmad_.setSize(dispersedPhases_); rhodField_.setSize(dispersedPhases_); mudField_.setSize(dispersedPhases_); nudField_.setSize(dispersedPhases_); for (int i=0;i<dispersedPhases_;++i){ rhod_.set(i, new dimensionedScalar(dispersedPhaseSubDict_.lookup("rho"+Foam::name(i+1)))); mud_.set(i, new dimensionedScalar(dispersedPhaseSubDict_.lookup("mu"+Foam::name(i+1)))); nud_.set(i, new dimensionedScalar(mud_[i]/rhod_[i])); sigmad_.set(i, new dimensionedScalar(dispersedPhaseSubDict_.lookup("sigma"+Foam::name(i+1)))); rhodField_.set(i, new volScalarField( IOobject ( "rhodField"+ Foam::name(i+1), mesh_.time().timeName(), mesh_, IOobject::NO_READ, IOobject::NO_WRITE ), mesh, dimensionedScalar(dispersedPhaseSubDict_.lookup("rho"+Foam::name(i+1))) ) ); mudField_.set(i, new volScalarField( IOobject ( "mudField"+ Foam::name(i+1), mesh_.time().timeName(), mesh_, IOobject::NO_READ, IOobject::NO_WRITE ), mesh, dimensionedScalar(dispersedPhaseSubDict_.lookup("mu"+Foam::name(i+1))) ) ); nudField_.set(i, new volScalarField( IOobject ( "nudField"+ Foam::name(i+1), mesh_.time().timeName(), mesh_, IOobject::NO_READ, IOobject::NO_WRITE ), mudField_[i]/rhodField_[i] ) ); } }