void readFields ( const vtkMesh& vMesh, const typename GeoField::Mesh& mesh, const IOobjectList& objects, const HashSet<word>& selectedFields, PtrList<GeoField>& fields ) { // Search list of objects for volScalarFields IOobjectList fieldObjects(objects.lookupClass(GeoField::typeName)); // Construct the vol scalar fields label nFields = fields.size(); fields.setSize(nFields + fieldObjects.size()); for ( IOobjectList::iterator iter = fieldObjects.begin(); iter != fieldObjects.end(); ++iter ) { if (selectedFields.empty() || selectedFields.found(iter()->name())) { fields.set ( nFields, vMesh.interpolate ( GeoField ( *iter(), mesh ) ) ); nFields++; } } fields.setSize(nFields); }
void Foam::readFields ( const Mesh& mesh, const IOobjectList& objects, PtrList<GeoField>& fields ) { // Search list of objects for volScalarFields IOobjectList fieldObjects(objects.lookupClass(GeoField::typeName)); // Remove the cellDist field IOobjectList::iterator celDistIter = fieldObjects.find("cellDist"); if (celDistIter != fieldObjects.end()) { fieldObjects.erase(celDistIter); } // Construct the vol scalar fields fields.setSize(fieldObjects.size()); label fieldi=0; for ( IOobjectList::iterator iter = fieldObjects.begin(); iter != fieldObjects.end(); ++iter ) { fields.set ( fieldi++, new GeoField ( *iter(), mesh ) ); } }
void Foam::preservePatchTypes ( const objectRegistry& obr, const word& meshInstance, const fileName& meshDir, const wordList& patchNames, PtrList<dictionary>& patchDicts, const word& defaultFacesName, word& defaultFacesType ) { patchDicts.setSize(patchNames.size()); dictionary patchDictionary; // Read boundary file as single dictionary { IOobject patchEntriesHeader ( "boundary", meshInstance, meshDir, obr, IOobject::MUST_READ, IOobject::NO_WRITE, false ); if (patchEntriesHeader.typeHeaderOk<polyBoundaryMesh>(true)) { // Create a list of entries from the boundary file. polyBoundaryMeshEntries patchEntries(patchEntriesHeader); forAll(patchEntries, patchi) { patchDictionary.add(patchEntries[patchi]); } }
void coupledInfo<MeshType>::setField ( const wordList& fieldNames, const dictionary& fieldDicts, const label internalSize, PtrList<GeomField>& fields ) const { typedef typename GeomField::InternalField InternalField; typedef typename GeomField::PatchFieldType PatchFieldType; typedef typename GeomField::GeometricBoundaryField GeomBdyFieldType; typedef typename GeomField::DimensionedInternalField DimInternalField; // Size up the pointer list fields.setSize(fieldNames.size()); // Define patch type names, assumed to be // common for volume and surface fields word emptyType(emptyPolyPatch::typeName); word processorType(processorPolyPatch::typeName); forAll(fieldNames, i) { // Create and map the patch field values label nPatches = subMesh().boundary().size(); // Create field parts PtrList<PatchFieldType> patchFields(nPatches); // Read dimensions dimensionSet dimSet ( fieldDicts.subDict(fieldNames[i]).lookup("dimensions") ); // Read the internal field InternalField internalField ( "internalField", fieldDicts.subDict(fieldNames[i]), internalSize ); // Create dummy types for initial field creation forAll(patchFields, patchI) { if (patchI == (nPatches - 1)) { // Artificially set last patch patchFields.set ( patchI, PatchFieldType::New ( emptyType, subMesh().boundary()[patchI], DimInternalField::null() ) ); } else { patchFields.set ( patchI, PatchFieldType::New ( PatchFieldType::calculatedType(), subMesh().boundary()[patchI], DimInternalField::null() ) ); } } // Create field with dummy patches fields.set ( i, new GeomField ( IOobject ( fieldNames[i], subMesh().time().timeName(), subMesh(), IOobject::NO_READ, IOobject::NO_WRITE, false ), subMesh(), dimSet, internalField, patchFields ) ); // Set correct references for patch internal fields, // and fetch values from the supplied geometric field dictionaries GeomBdyFieldType& bf = fields[i].boundaryField(); forAll(bf, patchI) { if (patchI == (nPatches - 1)) { // Artificially set last patch bf.set ( patchI, PatchFieldType::New ( emptyType, subMesh().boundary()[patchI], fields[i].dimensionedInternalField() ) ); } else if (isA<processorPolyPatch>(subMesh().boundary()[patchI].patch())) { bf.set ( patchI, PatchFieldType::New ( processorType, subMesh().boundary()[patchI], fields[i].dimensionedInternalField() ) ); } else { bf.set ( patchI, PatchFieldType::New ( subMesh().boundary()[patchI], fields[i].dimensionedInternalField(), fieldDicts.subDict ( fieldNames[i] ).subDict("boundaryField").subDict ( subMesh().boundary()[patchI].name() ) ) ); } } }
void ReadAndMapFields ( const fvMesh& mesh, const IOobjectList& objects, const fvMesh& tetDualMesh, const labelList& map, const typename MappedGeoField::value_type& nullValue, PtrList<MappedGeoField>& tetFields ) { typedef typename MappedGeoField::value_type Type; // Search list of objects for wanted type IOobjectList fieldObjects(objects.lookupClass(ReadGeoField::typeName)); tetFields.setSize(fieldObjects.size()); label i = 0; forAllConstIter(IOobjectList, fieldObjects, iter) { Info<< "Converting " << ReadGeoField::typeName << ' ' << iter.key() << endl; ReadGeoField readField(*iter(), mesh); tetFields.set ( i, new MappedGeoField ( IOobject ( readField.name(), readField.instance(), readField.local(), tetDualMesh, IOobject::NO_READ, IOobject::AUTO_WRITE, readField.registerObject() ), pointMesh::New(tetDualMesh), dimensioned<Type> ( "zero", readField.dimensions(), pTraits<Type>::zero ) ) ); Field<Type>& fld = tetFields[i].internalField(); // Map from read field. Set unmapped entries to nullValue. fld.setSize(map.size(), nullValue); forAll(map, pointI) { label index = map[pointI]; if (index > 0) { label cellI = index-1; fld[pointI] = readField[cellI]; } else if (index < 0) { label faceI = -index-1; label bFaceI = faceI - mesh.nInternalFaces(); if (bFaceI >= 0) { label patchI = mesh.boundaryMesh().patchID()[bFaceI]; label localFaceI = mesh.boundaryMesh()[patchI].whichFace ( faceI ); fld[pointI] = readField.boundaryField()[patchI][localFaceI]; } //else //{ // FatalErrorIn("ReadAndMapFields(..)") // << "Face " << faceI << " from index " << index // << " is not a boundary face." << abort(FatalError); //} } //else //{ // WarningIn("ReadAndMapFields(..)") // << "Point " << pointI << " at " // << tetDualMesh.points()[pointI] // << " has no dual correspondence." << endl; //} }
void Foam::readFields::loadField ( const word& fieldName, PtrList<GeometricField<Type, fvPatchField, volMesh> >& vflds, PtrList<GeometricField<Type, fvsPatchField, surfaceMesh> >& sflds ) const { typedef GeometricField<Type, fvPatchField, volMesh> vfType; typedef GeometricField<Type, fvsPatchField, surfaceMesh> sfType; if (obr_.foundObject<vfType>(fieldName)) { if (debug) { Info<< "readFields : Field " << fieldName << " already in database" << endl; } } else if (obr_.foundObject<sfType>(fieldName)) { if (debug) { Info<< "readFields : Field " << fieldName << " already in database" << endl; } } else { const fvMesh& mesh = refCast<const fvMesh>(obr_); IOobject fieldHeader ( fieldName, mesh.time().timeName(), mesh, IOobject::MUST_READ, IOobject::NO_WRITE ); if ( fieldHeader.headerOk() && fieldHeader.headerClassName() == vfType::typeName ) { // store field locally Info<< " Reading " << fieldName << endl; label sz = vflds.size(); vflds.setSize(sz+1); vflds.set(sz, new vfType(fieldHeader, mesh)); } else if ( fieldHeader.headerOk() && fieldHeader.headerClassName() == sfType::typeName ) { // store field locally Info<< " Reading " << fieldName << endl; label sz = sflds.size(); sflds.setSize(sz+1); sflds.set(sz, new sfType(fieldHeader, mesh)); } } }
void Foam::nearWallFields::createFields ( PtrList<GeometricField<Type, fvPatchField, volMesh> >& sflds ) const { typedef GeometricField<Type, fvPatchField, volMesh> vfType; HashTable<const vfType*> flds(obr_.lookupClass<vfType>()); forAllConstIter(typename HashTable<const vfType*>, flds, iter) { const vfType& fld = *iter(); if (fieldMap_.found(fld.name())) { const word& sampleFldName = fieldMap_[fld.name()]; if (obr_.found(sampleFldName)) { Info<< " a field " << sampleFldName << " already exists on the mesh." << endl; } else { label sz = sflds.size(); sflds.setSize(sz+1); IOobject io(fld); io.readOpt() = IOobject::NO_READ; io.rename(sampleFldName); sflds.set(sz, new vfType(io, fld)); vfType& sampleFld = sflds[sz]; // Reset the bcs to be directMapped forAllConstIter(labelHashSet, patchSet_, iter) { label patchI = iter.key(); sampleFld.boundaryField().set ( patchI, new selfContainedDirectMappedFixedValueFvPatchField <Type> ( sampleFld.mesh().boundary()[patchI], sampleFld.dimensionedInternalField(), sampleFld.mesh().name(), directMappedPatchBase::NEARESTCELL, word::null, // samplePatch -distance_, sampleFld.name(), // fieldName false, // setAverage pTraits<Type>::zero, // average interpolationCellPoint<Type>::typeName ) ); } Info<< " created " << sampleFld.name() << " to sample " << fld.name() << endl; } } }
Foam::searchableSurfaceControl::searchableSurfaceControl ( const Time& runTime, const word& name, const dictionary& controlFunctionDict, const conformationSurfaces& geometryToConformTo, const scalar& defaultCellSize ) : cellSizeAndAlignmentControl ( runTime, name, controlFunctionDict, geometryToConformTo, defaultCellSize ), surfaceName_(controlFunctionDict.lookupOrDefault<word>("surface", name)), searchableSurface_(geometryToConformTo.geometry()[surfaceName_]), geometryToConformTo_(geometryToConformTo), cellSizeFunctions_(1), regionToCellSizeFunctions_(searchableSurface_.regions().size(), -1), maxPriority_(-1) { Info<< indent << "Master settings:" << endl; Info<< incrIndent; cellSizeFunctions_.set ( 0, cellSizeFunction::New ( controlFunctionDict, searchableSurface_, defaultCellSize_, labelList() ) ); Info<< decrIndent; PtrList<cellSizeFunction> regionCellSizeFunctions; DynamicList<label> defaultCellSizeRegions; label nRegionCellSizeFunctions = 0; // Loop over regions - if any entry is not specified they should // inherit values from the parent surface. if (controlFunctionDict.found("regions")) { const dictionary& regionsDict = controlFunctionDict.subDict("regions"); const wordList& regionNames = searchableSurface_.regions(); label nRegions = regionsDict.size(); regionCellSizeFunctions.setSize(nRegions); defaultCellSizeRegions.setCapacity(nRegions); forAll(regionNames, regionI) { const word& regionName = regionNames[regionI]; label regionID = geometryToConformTo_.geometry().findSurfaceRegionID ( this->name(), regionName ); if (regionsDict.found(regionName)) { // Get the dictionary for region const dictionary& regionDict = regionsDict.subDict(regionName); Info<< indent << "Region " << regionName << " (ID = " << regionID << ")" << " settings:" << endl; Info<< incrIndent; regionCellSizeFunctions.set ( nRegionCellSizeFunctions, cellSizeFunction::New ( regionDict, searchableSurface_, defaultCellSize_, labelList(1, regionID) ) ); Info<< decrIndent; regionToCellSizeFunctions_[regionID] = nRegionCellSizeFunctions; nRegionCellSizeFunctions++; } else { // Add to default list defaultCellSizeRegions.append(regionID); } } } if (defaultCellSizeRegions.empty() && !regionCellSizeFunctions.empty()) { cellSizeFunctions_.transfer(regionCellSizeFunctions); } else if (nRegionCellSizeFunctions > 0) { regionCellSizeFunctions.setSize(nRegionCellSizeFunctions + 1); regionCellSizeFunctions.set ( nRegionCellSizeFunctions, cellSizeFunction::New ( controlFunctionDict, searchableSurface_, defaultCellSize_, labelList() ) ); const wordList& regionNames = searchableSurface_.regions(); forAll(regionNames, regionI) { if (regionToCellSizeFunctions_[regionI] == -1) { regionToCellSizeFunctions_[regionI] = nRegionCellSizeFunctions; } } cellSizeFunctions_.transfer(regionCellSizeFunctions); }
void topoMapper::storeGradients ( GradientTable& gradTable, PtrList<gradType>& gradList ) const { // Define a few typedefs for convenience typedef GeometricField<Type, fvPatchField, volMesh> volType; typedef const GeometricField<Type, fvPatchField, volMesh> constVolType; typedef HashTable<constVolType*> volTypeTable; // Fetch all fields from registry volTypeTable fields(mesh_.objectRegistry::lookupClass<volType>()); // Track field count label nFields = 0; // Store old-times before gradient computation for ( typename volTypeTable::iterator fIter = fields.begin(); fIter != fields.end(); ++fIter ) { fIter()->storeOldTimes(); nFields++; } // Size up the list gradList.setSize(nFields); label fieldIndex = 0; for ( typename volTypeTable::const_iterator fIter = fields.begin(); fIter != fields.end(); ++fIter ) { const volType& field = *fIter(); // Compute the gradient. // If the fvSolution dictionary contains an entry, // use that, otherwise, default to leastSquares word gradName("grad(" + field.name() + ')'); // Register field under a name that's unique word registerName("remapGradient(" + field.name() + ')'); // Make a new entry if (mesh_.schemesDict().subDict("gradSchemes").found(gradName)) { gradList.set ( fieldIndex, new gradType ( IOobject ( registerName, mesh_.time().timeName(), mesh_, IOobject::NO_READ, IOobject::NO_WRITE, true ), fvc::grad(field, gradName)() ) ); } else { gradList.set ( fieldIndex, new gradType ( IOobject ( registerName, mesh_.time().timeName(), mesh_, IOobject::NO_READ, IOobject::NO_WRITE, true ), fv::leastSquaresGrad<Type>(mesh_).grad(field)() ) ); } // Add a map entry gradTable.insert ( field.name(), GradientMap(registerName, fieldIndex++) ); } }
void Foam::GAMGSolver::initVcycle ( PtrList<scalargpuField>& coarseCorrFields, PtrList<scalargpuField>& coarseSources, PtrList<lduMatrix::smoother>& smoothers, scalargpuField& scratch1, scalargpuField& scratch2 ) const { label maxSize = matrix_.diag().size(); coarseCorrFields.setSize(matrixLevels_.size()); coarseSources.setSize(matrixLevels_.size()); smoothers.setSize(matrixLevels_.size() + 1); // Create the smoother for the finest level smoothers.set ( 0, lduMatrix::smoother::New ( fieldName_, matrix_, interfaceBouCoeffs_, interfaceIntCoeffs_, interfaces_, controlDict_ ) ); forAll(matrixLevels_, leveli) { if (agglomeration_.nCells(leveli) >= 0) { label nCoarseCells = agglomeration_.nCells(leveli); coarseSources.set(leveli,GAMGSolverCache::source(leveli,nCoarseCells)); //coarseSources.set(leveli, new scalargpuField(nCoarseCells)); } if (matrixLevels_.set(leveli)) { const lduMatrix& mat = matrixLevels_[leveli]; label nCoarseCells = mat.diag().size(); maxSize = max(maxSize, nCoarseCells); //coarseCorrFields.set(leveli, new scalargpuField(nCoarseCells)); coarseCorrFields.set(leveli,GAMGSolverCache::corr(leveli,nCoarseCells)); smoothers.set ( leveli + 1, lduMatrix::smoother::New ( fieldName_, matrixLevels_[leveli], interfaceLevelsBouCoeffs_[leveli], interfaceLevelsIntCoeffs_[leveli], interfaceLevels_[leveli], controlDict_ ) ); } } if (maxSize > matrix_.diag().size()) { // Allocate some scratch storage scratch1.setSize(maxSize); scratch2.setSize(maxSize); } }
void Foam::equationReader::removePowExponents ( const label index, tokenList& tl, PtrList<equationOperation>& map, labelList& opLvl, labelList& pl ) const { // Remove pow(a,b) exponent part 'b' from an equation and create a sub- // equation. label tokenI(0); while (tokenI < map.size()) { if (map[tokenI].operation() == equationOperation::otpow) { // Found a 'pow('. Look for ','; fail on ')', or end of list // pl checks ensure the ',' or ')' relate to the 'pow(', and not // another function / parethesis const label powFoundAt(tokenI); const label pLvl(pl[tokenI]); while ((opLvl[tokenI] != 5) || (pl[tokenI] != pLvl)) { if ( ((opLvl[tokenI] == -4) && (pl[tokenI] == pLvl)) || (tokenI == (map.size() - 1)) ) { OStringStream description; description << "pow() function takes two arguments."; fatalParseError ( index, tl, powFoundAt, tokenI, "equationReader::removePowExponents", description ); } tokenI++; } // Found 'pow( ... ,' look for ')', fail on list end const label commaFoundAt(tokenI); while ((opLvl[tokenI] != -4) || (pl[tokenI] != pLvl)) { if (tokenI == (map.size() - 1)) { OStringStream description; description << "Can't find closing parenthesis for " << "pow() function."; fatalParseError ( index, tl, powFoundAt, tokenI, "equationReader::removePowExponents", description ); } tokenI++; } const label closeFoundAt(tokenI); // Ignore if the exponent is only 1 token if ((closeFoundAt - commaFoundAt) > 2) { // Now create sub-equation OStringStream subEqnStream; for ( label subTokenI(commaFoundAt + 1); subTokenI < closeFoundAt; subTokenI++ ) { if ( tl[subTokenI].isPunctuation() && (tl[subTokenI].pToken() == token::COLON)) { subEqnStream << "^"; } else { subEqnStream << tl[subTokenI]; } } string subEqnRawText(subEqnStream.str()); const equation& eqn(operator[](index)); equation subEqn ( eqn.name() + "_powExponent_" + name(powFoundAt), subEqnRawText, eqn.overrideDimensions(), eqn.changeDimensions() ); bool eqnCreated(false); for (label eqnI(0); eqnI < size(); eqnI++) { const equation& eqnTest(operator[](eqnI)); if (eqnTest.name() == subEqn.name()) { clearEquation(eqnI); eqnTest.setRawText(subEqn.rawText()); eqnTest.setOverrideDimensions ( subEqn.overrideDimensions() ); eqnTest.setChangeDimensions ( eqnTest.changeDimensions() ); eqnCreated = true; } } if (!eqnCreated) { createEquation(subEqn); } // Change commaFoundAt + 1 entry to reflect new subEquation // reference tl[commaFoundAt + 1] = token(subEqn.name()); map.set ( commaFoundAt + 1, new equationOperation(findSource(subEqn.name())) ); opLvl[commaFoundAt + 1] = 0; pl[commaFoundAt + 1] = pl[commaFoundAt]; // Remove the subEquation from tl, map, opLvl and pl label tokensRemoved(closeFoundAt - (commaFoundAt + 2)); label newSize(map.size() - tokensRemoved); for ( label subTokenI(commaFoundAt + 2); subTokenI < newSize; subTokenI++ ) { tl[subTokenI] = tl[subTokenI + tokensRemoved]; map[subTokenI] = map[subTokenI + tokensRemoved]; opLvl[subTokenI] = opLvl[subTokenI + tokensRemoved]; pl[subTokenI] = pl[subTokenI + tokensRemoved]; } tl.setSize(newSize); map.setSize(newSize); opLvl.setSize(newSize); pl.setSize(newSize); } } tokenI++; } }