Foam::autoPtr<Foam::entry> Foam::entry::New(Istream& is) { is.fatalCheck("entry::New(Istream&)"); keyType keyword; // Get the next keyword and if invalid return false if (!getKeyword(keyword, is)) { return autoPtr<entry>(NULL); } else // Keyword starts entry ... { token nextToken(is); is.putBack(nextToken); if (nextToken == token::BEGIN_BLOCK) { return autoPtr<entry> ( new dictionaryEntry(keyword, dictionary::null, is) ); } else { return autoPtr<entry> ( new primitiveEntry(keyword, is) ); } } }
void Foam::functionEntries::ifeqEntry::skipUntil ( DynamicList<filePos>& stack, const dictionary& parentDict, const word& endWord, Istream& is ) { while (!is.eof()) { token t; readToken(t, is); if (t.isWord()) { if (t.wordToken() == "#if" || t.wordToken() == "#ifeq") { stack.append(filePos(is.name(), is.lineNumber())); skipUntil(stack, parentDict, "#endif", is); stack.remove(); } else if (t.wordToken() == endWord) { return; } } } FatalIOErrorInFunction(parentDict) << "Did not find matching " << endWord << exit(FatalIOError); }
// from Istream blockDescriptor::blockDescriptor ( const pointField& blockMeshPoints, const curvedEdgeList& edges, Istream& is ) : blockMeshPoints_(blockMeshPoints), blockShape_(is), curvedEdges_(edges), edgePoints_(12), edgeWeights_(12), n_(), expand_(12), zoneName_() { // Look at first token token t(is); is.putBack(t); // Optional zone name if (t.isWord()) { zoneName_ = t.wordToken(); // Consume zoneName token is >> t; // New look-ahead is >> t; is.putBack(t); }
void Foam::primitiveEntry::readEntry(const dictionary& dict, Istream& is) { label keywordLineNumber = is.lineNumber(); tokenIndex() = 0; if (read(dict, is)) { setSize(tokenIndex()); tokenIndex() = 0; } else { std::ostringstream os; os << "ill defined primitiveEntry starting at keyword '" << keyword() << '\'' << " on line " << keywordLineNumber << " and ending at line " << is.lineNumber(); SafeFatalIOErrorIn ( "primitiveEntry::readEntry(const dictionary&, Istream&)", is, os.str() ); } }
Foam::WetParcel<ParcelType>::WetParcel ( const polyMesh& mesh, Istream& is, bool readFields ) : ParcelType(mesh, is, readFields), Vliq_(0.0) { if (readFields) { if (is.format() == IOstream::ASCII) { Vliq_ = readScalar(is); } else { is.read ( reinterpret_cast<char*>(&Vliq_), sizeof(Vliq_) ); } } // Check state of Istream is.check ( "WetParcel<ParcelType>::WetParcel" "(const polyMesh&, Istream&, bool)" ); }
bool Foam::functionEntries::codeStream::execute ( const dictionary& parentDict, primitiveEntry& entry, Istream& is ) { Info<< "Using #codeStream at line " << is.lineNumber() << " in file " << parentDict.name() << endl; dynamicCode::checkSecurity ( "functionEntries::codeStream::execute(..)", parentDict ); // get code dictionary // must reference parent for stringOps::expand to work nicely dictionary codeDict("#codeStream", parentDict, is); streamingFunctionType function = getFunction(parentDict, codeDict); // use function to write stream OStringStream os(is.format()); (*function)(os, parentDict); // get the entry from this stream IStringStream resultStream(os.str()); entry.read(parentDict, resultStream); return true; }
Foam::reactingParcelInjectionData::reactingParcelInjectionData(Istream& is) : thermoParcelInjectionData(is) { is.check("reading Y's"); is >> Y_; is.check("reactingParcelInjectionData(Istream& is)"); }
Foam::thermoParcelInjectionData::thermoParcelInjectionData(Istream& is) : kinematicParcelInjectionData(is) { is.check("reading T"); is >> T_; is.check("reading cp"); is >> cp_; is.check("thermoParcelInjectionData(Istream& is)"); }
bool Foam::functionEntries::ifeqEntry::evaluate ( const bool doIf, DynamicList<filePos>& stack, dictionary& parentDict, Istream& is ) { while (!is.eof()) { token t; readToken(t, is); if (t.isWord() && t.wordToken() == "#ifeq") { // Recurse to evaluate execute(stack, parentDict, is); } else if (t.isWord() && t.wordToken() == "#if") { // Recurse to evaluate ifEntry::execute(stack, parentDict, is); } else if ( doIf && t.isWord() && (t.wordToken() == "#else" || t.wordToken() == "#elif") ) { // Now skip until #endif skipUntil(stack, parentDict, "#endif", is); stack.remove(); break; } else if (t.isWord() && t.wordToken() == "#endif") { stack.remove(); break; } else { is.putBack(t); bool ok = entry::New(parentDict, is); if (!ok) { return false; } } } return true; }
bool Foam::entry::getKeyword(keyType& keyword, Istream& is) { token keywordToken; // Read the next valid token discarding spurious ';'s do { if ( is.read(keywordToken).bad() || is.eof() || !keywordToken.good() ) { return false; } } while (keywordToken == token::END_STATEMENT); // If the token is a valid keyword set 'keyword' return true... if (keywordToken.isWord()) { keyword = keywordToken.wordToken(); return true; } else if (keywordToken.isString()) { // Enable wildcards keyword = keywordToken.stringToken(); return true; } // If it is the end of the dictionary or file return false... else if (keywordToken == token::END_BLOCK || is.eof()) { return false; } // Otherwise the token is invalid else { cerr<< "--> FOAM Warning : " << std::endl << " From function " << "entry::getKeyword(keyType&, Istream&)" << std::endl << " in file " << __FILE__ << " at line " << __LINE__ << std::endl << " Reading " << is.name().c_str() << std::endl << " found " << keywordToken << std::endl << " expected either " << token::END_BLOCK << " or EOF" << std::endl; return false; } }
void Foam::dimensioned<Type>::initialize(Istream& is) { token nextToken(is); is.putBack(nextToken); // Check if the original format is used in which the name is provided // and reset the name to that read if (nextToken.isWord()) { is >> name_; is >> nextToken; is.putBack(nextToken); }
Foam::primitiveEntry::primitiveEntry(const keyType& key, Istream& is) : entry(key), ITstream ( is.name() + '.' + key, tokenList(10), is.format(), is.version() ) { readEntry(dictionary::null, is); }
void Foam::HashPtrTable<T, Key, Hash>::read(Istream& is, const INew& inewt) { is.fatalCheck("HashPtrTable<T, Key, Hash>::read(Istream&, const INew&)"); token firstToken(is); is.fatalCheck ( "HashPtrTable<T, Key, Hash>::read(Istream&, const INew&) : " "reading first token" ); if (firstToken.isLabel()) { label s = firstToken.labelToken(); // Read beginning of contents char delimiter = is.readBeginList("HashPtrTable<T, Key, Hash>"); if (s) { if (2*s > this->tableSize_) { this->resize(2*s); } if (delimiter == token::BEGIN_LIST) { for (label i=0; i<s; i++) { Key key; is >> key; this->insert(key, inewt(key, is).ptr()); is.fatalCheck ( "HashPtrTable<T, Key, Hash>::" "read(Istream&, const INew&) : reading entry" ); } } else { FatalIOErrorIn ( "HashPtrTable<T, Key, Hash>::read(Istream&, const INew&)", is ) << "incorrect first token, '(', found " << firstToken.info() << exit(FatalIOError); } }
Foam::incompressible::incompressible(Istream& is) : specie(is), rho_(readScalar(is)) { is.check("incompressible::incompressible(Istream& is)"); }
static void load_data_record(Istream& fin, const gcstring& table, int tran, int n) { try { if (n > loadbuf_size) { loadbuf_size = max(n, 2 * loadbuf_size); mem_release(loadbuf); loadbuf = (char*) mem_committed(loadbuf_size); verify(loadbuf); } fin.read(loadbuf, n); Record rec(loadbuf); if (rec.cursize() != n) except_err(table << ": rec size " << rec.cursize() << " not what was read " << n); if (table == "views") theDB()->add_any_record(tran, table, rec); else theDB()->add_record(tran, table, rec); } catch (const Except& e) { errlog("load: skipping corrupted record in: ", table.str(), e.str()); alert("skipping corrupted record in: " << table << ": " << e); alerts = true; } }
unsigned char Foam::SHA1Digest::readHexDigit(Istream& is) { // Takes into account that 'a' (or 'A') is 10 static const int alphaOffset = toupper('A') - 10; // Takes into account that '0' is 0 static const int zeroOffset = int('0'); // silently ignore leading or intermediate '_' char c = 0; do { is.read(c); } while (c == '_'); if (!isxdigit(c)) { FatalIOErrorIn("SHA1Digest::readHexDigit(Istream&)", is) << "Illegal hex digit: '" << c << "'" << exit(FatalIOError); } if (isdigit(c)) { return int(c) - zeroOffset; } else { return toupper(c) - alphaOffset; } }
Foam::KinematicParcel<ParcelType>::KinematicParcel ( const Cloud<ParcelType>& cloud, Istream& is, bool readFields ) : Particle<ParcelType>(cloud, is, readFields), typeId_(0), nParticle_(0.0), d_(0.0), U_(vector::zero), rho_(0.0), tTurb_(0.0), UTurb_(vector::zero), rhoc_(0.0), Uc_(vector::zero), muc_(0.0) { if (readFields) { if (is.format() == IOstream::ASCII) { typeId_ = readLabel(is); nParticle_ = readScalar(is); d_ = readScalar(is); is >> U_; rho_ = readScalar(is); tTurb_ = readScalar(is); is >> UTurb_; } else {
Foam::ignitionSite::ignitionSite ( Istream& is, const engineTime& edb, const fvMesh& mesh ) : db_(edb), mesh_(mesh), ignitionSiteDict_(is), location_(ignitionSiteDict_.lookup("location")), diameter_(readScalar(ignitionSiteDict_.lookup("diameter"))), time_ ( db_.userTimeToTime ( edb.degToTime(readScalar(ignitionSiteDict_.lookup("start"))) ) ), duration_ ( db_.userTimeToTime ( edb.degToTime(readScalar(ignitionSiteDict_.lookup("duration"))) ) ), strength_(readScalar(ignitionSiteDict_.lookup("strength"))), timeIndex_(db_.timeIndex()) { // Check state of Istream is.check("ignitionSite::ignitionSite(Istream&)"); findIgnitionCells(mesh_); }
Foam::ReactingParcel<ParcelType>::ReactingParcel ( const Cloud<ParcelType>& cloud, Istream& is, bool readFields ) : ThermoParcel<ParcelType>(cloud, is, readFields), mass0_(0.0), Y_(0), pc_(0.0) { if (readFields) { const ReactingCloud<ParcelType>& cR = dynamic_cast<const ReactingCloud<ParcelType>&>(cloud); const label nMixture = cR.composition().phaseTypes().size(); Y_.setSize(nMixture); if (is.format() == IOstream::ASCII) { is >> mass0_ >> Y_; } else {
Foam::reactingMultiphaseParcelInjectionData:: reactingMultiphaseParcelInjectionData(Istream& is) : reactingParcelInjectionData(is) { is.check("reading YGas's"); is >> YGas_; is.check("reading YLiquid's"); is >> YLiquid_; is.check("reading YSolid's"); is >> YSolid_; is.check("reactingMultiphaseParcelInjectionData(Istream& is)"); }
bool Foam::functionEntries::includeEntry::execute ( dictionary& parentDict, Istream& is ) { const fileName rawFName(is); const fileName fName ( includeFileName(is.name().path(), rawFName, parentDict) ); IFstream ifs(fName); if (ifs) { if (Foam::functionEntries::includeEntry::log) { Info<< fName << endl; } parentDict.read(ifs); return true; } else { FatalIOErrorInFunction ( is ) << "Cannot open include file " << (ifs.name().size() ? ifs.name() : rawFName) << " while reading dictionary " << parentDict.name() << exit(FatalIOError); return false; } }
bool Foam::fileFormats::OFSsurfaceFormat<Face>::read ( Istream& is, MeshedSurface<Face>& surf ) { surf.clear(); if (!is.good()) { FatalErrorIn ( "fileFormats::OFSsurfaceFormat::read" "(Istream&, MeshedSurface<Face>&)" ) << "read error " << exit(FatalError); } pointField pointLst; List<Face> faceLst; List<surfZone> zoneLst; read(is, pointLst, faceLst, zoneLst); surf.reset ( xferMove(pointLst), xferMove(faceLst), xferMove(zoneLst) ); return true; }
Foam::phaseProperties::phaseProperties(Istream& is) : phase_(UNKNOWN), stateLabel_("(unknown)"), names_(0), Y_(0), globalIds_(0), globalCarrierIds_(0) { is.check("Foam::phaseProperties::phaseProperties(Istream& is)"); dictionaryEntry phaseInfo(dictionary::null, is); phase_ = phaseTypeNames_[phaseInfo.keyword()]; stateLabel_ = phaseToStateLabel(phase_); if (phaseInfo.size() > 0) { label nComponents = phaseInfo.size(); names_.setSize(nComponents, "unknownSpecie"); Y_.setSize(nComponents, 0.0); globalIds_.setSize(nComponents, -1); globalCarrierIds_.setSize(nComponents, -1); label cmptI = 0; forAllConstIter(IDLList<entry>, phaseInfo, iter) { names_[cmptI] = iter().keyword(); Y_[cmptI] = readScalar(phaseInfo.lookup(names_[cmptI])); cmptI++; }
Foam::eConstThermo<equationOfState>::eConstThermo(Istream& is) : equationOfState(is), Cv_(readScalar(is)), Hf_(readScalar(is)) { is.check("eConstThermo::eConstThermo(Istream& is)"); }
void Foam::functionEntries::ifeqEntry::readToken(token& t, Istream& is) { // Skip dummy tokens - avoids entry::getKeyword consuming #else, #endif do { if ( is.read(t).bad() || is.eof() || !t.good() ) { return; } } while (t == token::END_STATEMENT); }
Foam::specie::specie(Istream& is) : name_(is), nMoles_(readScalar(is)), molWeight_(readScalar(is)) { is.check("specie::specie(Istream& is)"); }
Foam::string Foam::functionEntries::negEntry::negateVariable ( const dictionary& parentDict, Istream& is ) { // Read variable name as a word including the '$' const word varWord(is); if (varWord[0] != '$') { FatalIOErrorInFunction ( parentDict ) << "Expected variable name beginning with a '$' but found '" << varWord << "'" << exit(FatalIOError); return string::null; } // Strip the leading '$' from the variable name const string varName = varWord(1, varWord.size()-1); // Lookup the variable name in the parent dictionary.... const entry* ePtr = parentDict.lookupScopedEntryPtr(varName, true, false); if (ePtr && ePtr->isStream()) { const token variable(ePtr->stream()); // Convert to a string OStringStream os(is.format()); os << variable; const string str(os.str()); // Negate if (str[0] == '-') { return str(1, str.size() - 1); } else { return '-' + str; } } else { FatalIOErrorInFunction ( parentDict ) << "Illegal dictionary variable name " << varName << endl << "Valid dictionary entries are " << parentDict.toc() << exit(FatalIOError); return string::null; } }
injector::injector(const Time& t, Istream& is) : injectorDict_(is), properties_(injectorType::New(t, injectorDict_)) { // Check state of Istream is.check("Istream& operator>>(Istream&, injector&)"); }
bool Foam::functionEntry::execute ( const word& functionName, const dictionary& parentDict, primitiveEntry& entry, Istream& is ) { is.fatalCheck ( "functionEntry::execute" "(const word&, const dictionary&, primitiveEntry&, Istream&)" ); if (!executeprimitiveEntryIstreamMemberFunctionTablePtr_) { cerr<<"functionEntry::execute" << "(const word&, const dictionary&, primitiveEntry&, Istream&)" << " not yet initialized, function = " << functionName.c_str() << std::endl; // return true to keep reading anyhow return true; } executeprimitiveEntryIstreamMemberFunctionTable::iterator mfIter = executeprimitiveEntryIstreamMemberFunctionTablePtr_->find(functionName); if (mfIter == executeprimitiveEntryIstreamMemberFunctionTablePtr_->end()) { FatalErrorIn ( "functionEntry::execute" "(const word&, const dictionary&, primitiveEntry&, Istream&)" ) << "Unknown functionEntry '" << functionName << "' in " << is.name() << " near line " << is.lineNumber() << endl << endl << "Valid functionEntries are :" << endl << executeprimitiveEntryIstreamMemberFunctionTablePtr_->toc() << exit(FatalError); } return mfIter()(parentDict, entry, is); }
tmp<convectionScheme<Type> > convectionScheme<Type>::New ( const fvMesh& mesh, const typename multivariateSurfaceInterpolationScheme<Type>:: fieldTable& fields, const surfaceScalarField& faceFlux, Istream& schemeData ) { if (fv::debug) { Info<< "convectionScheme<Type>::New" "(const fvMesh&, " "const typename multivariateSurfaceInterpolationScheme<Type>" "::fieldTable&, const surfaceScalarField&, Istream&) : " "constructing convectionScheme<Type>" << endl; } if (schemeData.eof()) { FatalIOErrorIn ( "convectionScheme<Type>::New" "(const fvMesh&, " "const typename multivariateSurfaceInterpolationScheme<Type>" "::fieldTable&, const surfaceScalarField&, Istream&)", schemeData ) << "Convection scheme not specified" << endl << endl << "Valid convection schemes are :" << endl << MultivariateConstructorTablePtr_->sortedToc() << exit(FatalIOError); } word schemeName(schemeData); typename MultivariateConstructorTable::iterator cstrIter = MultivariateConstructorTablePtr_->find(schemeName); if (cstrIter == MultivariateConstructorTablePtr_->end()) { FatalIOErrorIn ( "convectionScheme<Type>::New" "(const fvMesh&, " "const typename multivariateSurfaceInterpolationScheme<Type>" "::fieldTable&, const surfaceScalarField&, Istream&)", schemeData ) << "unknown convection scheme " << schemeName << endl << endl << "Valid convection schemes are :" << endl << MultivariateConstructorTablePtr_->sortedToc() << exit(FatalIOError); } return cstrIter()(mesh, fields, faceFlux, schemeData); }