GenericPointPatchField<PatchField, Mesh, PointPatch, MatrixType, Type>:: GenericPointPatchField ( const PointPatch& p, const DimensionedField<Type, Mesh>& iF, const dictionary& dict ) : CalculatedPointPatchField<PatchField, Mesh, PointPatch, MatrixType, Type> (p, iF), actualTypeName_(dict.lookup("type")), dict_(dict) { for ( dictionary::const_iterator iter = dict_.begin(); iter != dict_.end(); ++iter ) { if (iter().keyword() != "type") { if ( iter().isStream() && iter().stream().size() ) { ITstream& is = iter().stream(); // Read first token token firstToken(is); if ( firstToken.isWord() && firstToken.wordToken() == "nonuniform" ) { token fieldToken(is); if (!fieldToken.isCompound()) { if ( fieldToken.isLabel() && fieldToken.labelToken() == 0 ) { scalarFields_.insert ( iter().keyword(), new scalarField(0) ); } else { FatalIOErrorIn ( "GenericPointPatchField<Type>::" "GenericPointPatchField" "(const pointPatch&, const Field<Type>&, " "const dictionary&)", dict ) << "\n token following 'nonuniform' " "is not a compound" << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } } else if ( fieldToken.compoundToken().type() == token::Compound<List<scalar> >::typeName ) { scalarField* fPtr = new scalarField; fPtr->transfer ( dynamicCast<token::Compound<List<scalar> > > ( fieldToken.transferCompoundToken() ) ); if (fPtr->size() != this->size()) { FatalIOErrorIn ( "GenericPointPatchField<Type>::" "GenericPointPatchField" "(const pointPatch&, const Field<Type>&, " "const dictionary&)", dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << this->size() << ')' << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } scalarFields_.insert(iter().keyword(), fPtr); } else if ( fieldToken.compoundToken().type() == token::Compound<List<vector> >::typeName ) { vectorField* fPtr = new vectorField; fPtr->transfer ( dynamicCast<token::Compound<List<vector> > > ( fieldToken.transferCompoundToken() ) ); if (fPtr->size() != this->size()) { FatalIOErrorIn ( "GenericPointPatchField<Type>::" "GenericPointPatchField" "(const pointPatch&, const Field<Type>&, " "const dictionary&)", dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << this->size() << ')' << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } vectorFields_.insert(iter().keyword(), fPtr); } else if ( fieldToken.compoundToken().type() == token::Compound<List<sphericalTensor> >::typeName ) { sphericalTensorField* fPtr = new sphericalTensorField; fPtr->transfer ( dynamicCast < token::Compound<List<sphericalTensor> > > ( fieldToken.transferCompoundToken() ) ); if (fPtr->size() != this->size()) { FatalIOErrorIn ( "GenericPointPatchField<Type>::" "GenericPointPatchField" "(const pointPatch&, const Field<Type>&, " "const dictionary&)", dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << this->size() << ')' << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } sphericalTensorFields_.insert(iter().keyword(), fPtr); } else if ( fieldToken.compoundToken().type() == token::Compound<List<symmTensor> >::typeName ) { symmTensorField* fPtr = new symmTensorField; fPtr->transfer ( dynamicCast < token::Compound<List<symmTensor> > > ( fieldToken.transferCompoundToken() ) ); if (fPtr->size() != this->size()) { FatalIOErrorIn ( "GenericPointPatchField<Type>::" "GenericPointPatchField" "(const pointPatch&, const Field<Type>&, " "const dictionary&)", dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << this->size() << ')' << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } symmTensorFields_.insert(iter().keyword(), fPtr); } else if ( fieldToken.compoundToken().type() == token::Compound<List<tensor> >::typeName ) { tensorField* fPtr = new tensorField; fPtr->transfer ( dynamicCast<token::Compound<List<tensor> > > ( fieldToken.transferCompoundToken() ) ); if (fPtr->size() != this->size()) { FatalIOErrorIn ( "GenericPointPatchField<Type>::" "GenericPointPatchField" "(const pointPatch&, const Field<Type>&, " "const dictionary&)", dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << this->size() << ')' << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } tensorFields_.insert(iter().keyword(), fPtr); } else if ( fieldToken.compoundToken().type() == token::Compound<List<symmTensor4thOrder> >::typeName ) { symmTensor4thOrderField* fPtr = new symmTensor4thOrderField; fPtr->transfer ( dynamicCast < token::Compound<List<symmTensor4thOrder> > > ( fieldToken.transferCompoundToken() ) ); if (fPtr->size() != this->size()) { FatalIOErrorIn ( "GenericPointPatchField<Type>::" "GenericPointPatchField" "(const pointPatch&, const Field<Type>&, " "const dictionary&)", dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << this->size() << ')' << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } symmTensor4thOrderFields_.insert(iter().keyword(), fPtr); } else if ( fieldToken.compoundToken().type() == token::Compound<List<diagTensor> >::typeName ) { diagTensorField* fPtr = new diagTensorField; fPtr->transfer ( dynamicCast < token::Compound<List<diagTensor> > > ( fieldToken.transferCompoundToken() ) ); if (fPtr->size() != this->size()) { FatalIOErrorIn ( "GenericPointPatchField<Type>::" "GenericPointPatchField" "(const pointPatch&, const Field<Type>&, " "const dictionary&)", dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << this->size() << ')' << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } diagTensorFields_.insert(iter().keyword(), fPtr); } else { FatalIOErrorIn ( "GenericPointPatchField<Type>::" "GenericPointPatchField" "(const pointPatch&, const Field<Type>&, " "const dictionary&)", dict ) << "\n compound " << fieldToken.compoundToken() << " not supported" << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } } } } } }
Foam::genericFvPatchField<Type>::genericFvPatchField ( const fvPatch& p, const DimensionedField<Type, volMesh>& iF, const dictionary& dict ) : calculatedFvPatchField<Type>(p, iF, dict, false), actualTypeName_(dict.lookup("type")), dict_(dict) { if (!dict.found("value")) { FatalIOErrorIn ( "genericFvPatchField<Type>::genericFvPatchField" "(const fvPatch&, const Field<Type>&, const dictionary&)", dict ) << "\n Cannot find 'value' entry" << " on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << nl << " which is required to set the" " values of the generic patch field." << nl << " (Actual type " << actualTypeName_ << ")" << nl << "\n Please add the 'value' entry to the write function " "of the user-defined boundary-condition\n" " or link the boundary-condition into libfoamUtil.so" << exit(FatalIOError); } for ( dictionary::const_iterator iter = dict_.begin(); iter != dict_.end(); ++iter ) { if (iter().keyword() != "type" && iter().keyword() != "value") { if ( iter().isStream() && iter().stream().size() ) { ITstream& is = iter().stream(); // Read first token token firstToken(is); if ( firstToken.isWord() && firstToken.wordToken() == "nonuniform" ) { token fieldToken(is); if (!fieldToken.isCompound()) { if ( fieldToken.isLabel() && fieldToken.labelToken() == 0 ) { scalarFields_.insert ( iter().keyword(), new scalarField(0) ); } else { FatalIOErrorIn ( "genericFvPatchField<Type>::genericFvPatchField" "(const fvPatch&, const Field<Type>&, " "const dictionary&)", dict ) << "\n token following 'nonuniform' " "is not a compound" << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } } else if ( fieldToken.compoundToken().type() == token::Compound<List<scalar> >::typeName ) { scalarField* fPtr = new scalarField; fPtr->transfer ( dynamicCast<token::Compound<List<scalar> > > ( fieldToken.transferCompoundToken() ) ); if (fPtr->size() != this->size()) { FatalIOErrorIn ( "genericFvPatchField<Type>::genericFvPatchField" "(const fvPatch&, const Field<Type>&, " "const dictionary&)", dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << this->size() << ')' << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } scalarFields_.insert(iter().keyword(), fPtr); } else if ( fieldToken.compoundToken().type() == token::Compound<List<vector> >::typeName ) { vectorField* fPtr = new vectorField; fPtr->transfer ( dynamicCast<token::Compound<List<vector> > > ( fieldToken.transferCompoundToken() ) ); if (fPtr->size() != this->size()) { FatalIOErrorIn ( "genericFvPatchField<Type>::genericFvPatchField" "(const fvPatch&, const Field<Type>&, " "const dictionary&)", dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << this->size() << ')' << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } vectorFields_.insert(iter().keyword(), fPtr); } else if ( fieldToken.compoundToken().type() == token::Compound<List<sphericalTensor> >::typeName ) { sphericalTensorField* fPtr = new sphericalTensorField; fPtr->transfer ( dynamicCast < token::Compound<List<sphericalTensor> > > ( fieldToken.transferCompoundToken() ) ); if (fPtr->size() != this->size()) { FatalIOErrorIn ( "genericFvPatchField<Type>::genericFvPatchField" "(const fvPatch&, const Field<Type>&, " "const dictionary&)", dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << this->size() << ')' << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } sphericalTensorFields_.insert(iter().keyword(), fPtr); } else if ( fieldToken.compoundToken().type() == token::Compound<List<symmTensor> >::typeName ) { symmTensorField* fPtr = new symmTensorField; fPtr->transfer ( dynamicCast < token::Compound<List<symmTensor> > > ( fieldToken.transferCompoundToken() ) ); if (fPtr->size() != this->size()) { FatalIOErrorIn ( "genericFvPatchField<Type>::genericFvPatchField" "(const fvPatch&, const Field<Type>&, " "const dictionary&)", dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << this->size() << ')' << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } symmTensorFields_.insert(iter().keyword(), fPtr); } else if ( fieldToken.compoundToken().type() == token::Compound<List<tensor> >::typeName ) { tensorField* fPtr = new tensorField; fPtr->transfer ( dynamicCast<token::Compound<List<tensor> > > ( fieldToken.transferCompoundToken() ) ); if (fPtr->size() != this->size()) { FatalIOErrorIn ( "genericFvPatchField<Type>::genericFvPatchField" "(const fvPatch&, const Field<Type>&, " "const dictionary&)", dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << this->size() << ')' << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } tensorFields_.insert(iter().keyword(), fPtr); } else { FatalIOErrorIn ( "genericFvPatchField<Type>::genericFvPatchField" "(const fvPatch&, const Field<Type>&, " "const dictionary&)", dict ) << "\n compound " << fieldToken.compoundToken() << " not supported" << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } } else if ( firstToken.isWord() && firstToken.wordToken() == "uniform" ) { token fieldToken(is); if (!fieldToken.isPunctuation()) { scalarFields_.insert ( iter().keyword(), new scalarField ( this->size(), fieldToken.number() ) ); } else { // Read as scalarList. is.putBack(fieldToken); scalarList l(is); if (l.size() == vector::nComponents) { vector vs(l[0], l[1], l[2]); vectorFields_.insert ( iter().keyword(), new vectorField(this->size(), vs) ); } else if (l.size() == sphericalTensor::nComponents) { sphericalTensor vs(l[0]); sphericalTensorFields_.insert ( iter().keyword(), new sphericalTensorField(this->size(), vs) ); } else if (l.size() == symmTensor::nComponents) { symmTensor vs(l[0], l[1], l[2], l[3], l[4], l[5]); symmTensorFields_.insert ( iter().keyword(), new symmTensorField(this->size(), vs) ); } else if (l.size() == tensor::nComponents) { tensor vs ( l[0], l[1], l[2], l[3], l[4], l[5], l[6], l[7], l[8] ); tensorFields_.insert ( iter().keyword(), new tensorField(this->size(), vs) ); } else { FatalIOErrorIn ( "genericFvPatchField<Type>::genericFvPatchField" "(const fvPatch&, const Field<Type>&, " "const dictionary&)", dict ) << "\n unrecognised native type " << l << "\n on patch " << this->patch().name() << " of field " << this->dimensionedInternalField().name() << " in file " << this->dimensionedInternalField().objectPath() << exit(FatalIOError); } } } } } } }
forAllConstIter(dictionary, dict, iter) { if (iter().keyword() != "type" && iter().keyword() != "value") { if ( iter().isStream() && iter().stream().size() ) { ITstream& is = iter().stream(); // Read first token token firstToken(is); if ( firstToken.isWord() && firstToken.wordToken() == "nonuniform" ) { token fieldToken(is); if (!fieldToken.isCompound()) { if ( fieldToken.isLabel() && fieldToken.labelToken() == 0 ) { // Ignore nonuniform 0 entry } else { FatalIOErrorInFunction ( dict ) << "\n token following 'nonuniform' " "is not a compound" << exit(FatalIOError); } } else if ( fieldToken.compoundToken().type() == token::Compound<List<scalar>>::typeName ) { scalarField* fPtr = new scalarField; fPtr->transfer ( dynamicCast<token::Compound<List<scalar>>> ( fieldToken.transferCompoundToken(is) ) ); if (fPtr->size() != patchSize_) { FatalIOErrorInFunction ( dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << patchSize_ << ')' << "\n on patch " << patchName_ << exit(FatalIOError); } scalarFields.insert(iter().keyword(), fPtr); } else if ( fieldToken.compoundToken().type() == token::Compound<List<vector>>::typeName ) { vectorField* fPtr = new vectorField; fPtr->transfer ( dynamicCast<token::Compound<List<vector>>> ( fieldToken.transferCompoundToken(is) ) ); if (fPtr->size() != patchSize_) { FatalIOErrorInFunction ( dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << patchSize_ << ')' << "\n on patch " << patchName_ << exit(FatalIOError); } vectorFields.insert(iter().keyword(), fPtr); } else if ( fieldToken.compoundToken().type() == token::Compound<List<sphericalTensor>>::typeName ) { sphericalTensorField* fPtr = new sphericalTensorField; fPtr->transfer ( dynamicCast < token::Compound<List<sphericalTensor>> > ( fieldToken.transferCompoundToken(is) ) ); if (fPtr->size() != patchSize_) { FatalIOErrorInFunction ( dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << patchSize_ << ')' << "\n on patch " << patchName_ << exit(FatalIOError); } sphericalTensorFields.insert(iter().keyword(), fPtr); } else if ( fieldToken.compoundToken().type() == token::Compound<List<symmTensor>>::typeName ) { symmTensorField* fPtr = new symmTensorField; fPtr->transfer ( dynamicCast < token::Compound<List<symmTensor>> > ( fieldToken.transferCompoundToken(is) ) ); if (fPtr->size() != patchSize_) { FatalIOErrorInFunction ( dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << patchSize_ << ')' << "\n on patch " << patchName_ << exit(FatalIOError); } symmTensorFields.insert(iter().keyword(), fPtr); } else if ( fieldToken.compoundToken().type() == token::Compound<List<tensor>>::typeName ) { tensorField* fPtr = new tensorField; fPtr->transfer ( dynamicCast<token::Compound<List<tensor>>> ( fieldToken.transferCompoundToken(is) ) ); if (fPtr->size() != patchSize_) { FatalIOErrorInFunction ( dict ) << "\n size of field " << iter().keyword() << " (" << fPtr->size() << ')' << " is not the same size as the patch (" << patchSize_ << ')' << "\n on patch " << patchName_ << exit(FatalIOError); } tensorFields.insert(iter().keyword(), fPtr); } else { FatalIOErrorInFunction ( dict ) << "\n compound " << fieldToken.compoundToken() << " not supported" << "\n on patch " << patchName_ << exit(FatalIOError); } } else if ( firstToken.isWord() && firstToken.wordToken() == "uniform" ) { token fieldToken(is); if (!fieldToken.isPunctuation()) { scalarFields.insert ( iter().keyword(), new scalarField ( patchSize_, fieldToken.number() ) ); } else { // Read as scalarList. is.putBack(fieldToken); scalarList l(is); if (l.size() == vector::nComponents) { vector vs(l[0], l[1], l[2]); vectorFields.insert ( iter().keyword(), new vectorField(patchSize_, vs) ); } else if (l.size() == sphericalTensor::nComponents) { sphericalTensor vs(l[0]); sphericalTensorFields_.insert ( iter().keyword(), new sphericalTensorField(patchSize_, vs) ); } else if (l.size() == symmTensor::nComponents) { symmTensor vs(l[0], l[1], l[2], l[3], l[4], l[5]); symmTensorFields.insert ( iter().keyword(), new symmTensorField(patchSize_, vs) ); } else if (l.size() == tensor::nComponents) { tensor vs ( l[0], l[1], l[2], l[3], l[4], l[5], l[6], l[7], l[8] ); tensorFields.insert ( iter().keyword(), new tensorField(patchSize_, vs) ); } else { FatalIOErrorInFunction ( dict ) << "\n unrecognised native type " << l << "\n on patch " << patchName_ << exit(FatalIOError); } } } } } }