コード例 #1
0
ファイル: wscmd.cpp プロジェクト: justindvs/pyroscaphe
void processCmd(mg_connection* conn, char* cmd, size_t cmdLen)
{
   // Null-terminate the command and add END_OF_BUF_CHAR to the end of
   // the cmd buffer.  This makes it easier to parse the command
   // token-by-token.
   cmd[cmdLen] = '\0';
   cmd[cmdLen+1] = END_OF_BUF_CHAR;

   char* cmdName = firstToken(cmd);

   if (strcmp("ping", cmdName) == 0) {
      char response[] = "OK";
      mg_websocket_write(conn, 1, response, sizeof(response)-1);
   }
   else if (strcmp("keydownup", cmdName) == 0) {
      char* keyName = nextToken(cmd);
      keypress::keyDownUp(keyName);
   }
   else if (strcmp("keyseq", cmdName) == 0) {
      char* keyName = nextToken(cmd);
      keypress::keySequence(keyName);
   }
   else if (strcmp("keydown", cmdName) == 0) {
      char* keyName = nextToken(cmd);
      keypress::keyDown(keyName);    
   }
   else if (strcmp("keyup", cmdName) == 0) {
      char* keyName = nextToken(cmd);
      keypress::keyUp(keyName);
   }
   else {
      printf("Unhandled cmd \"%s\"\n", cmdName);
   }
}
コード例 #2
0
ファイル: Tokenizer.cpp プロジェクト: AlexSenpai/A.I.
std::string Tokenizer::lastToken(void) {
	std::string theLastToken;
	while(firstToken() != "") {
		theLastToken = thisToken;
	}
	return theLastToken;
}
コード例 #3
0
ファイル: Tokenizer.cpp プロジェクト: AlexSenpai/A.I.
int Tokenizer::countTokens(void) {

	resetPosition();
	int tokenNumber = 0;
	for(; firstToken() != ""; tokenNumber++ )
		;
	resetPosition(); 
	return tokenNumber;
}
コード例 #4
0
void Foam::IOPosition<ParticleType>::readData
(
    Cloud<ParticleType>& c,
    bool checkClass
)
{
    Istream& is = readStream(checkClass ? typeName : "");

    token firstToken(is);

    if (firstToken.isLabel())
    {
        label s = firstToken.labelToken();

        // Read beginning of contents
        is.readBeginList("Cloud<ParticleType>");

        for (label i=0; i<s; i++)
        {
            // Do not read any fields, position only
            c.append(new ParticleType(c, is, false));
        }

        // Read end of contents
        is.readEndList("Cloud<ParticleType>");
    }
    else if (firstToken.isPunctuation())
    {
        if (firstToken.pToken() != token::BEGIN_LIST)
        {
            FatalIOErrorIn
            (
                "void IOPosition<ParticleType>::readData"
                "(Cloud<ParticleType>&, bool)",
                is
            )   << "incorrect first token, '(', found "
                << firstToken.info()
                << exit(FatalIOError);
        }

        token lastToken(is);
        while
        (
           !(
                lastToken.isPunctuation()
             && lastToken.pToken() == token::END_LIST
            )
        )
        {
            is.putBack(lastToken);
            // Do not read any fields, position only
            c.append(new ParticleType(c, is, false));
            is >> lastToken;
        }
    }
    else
    {
コード例 #5
0
ファイル: Tokenizer.cpp プロジェクト: AlexSenpai/A.I.
void Tokenizer::tokenize( vstring &v ) {
	if(buffer.length() == 0) {
		return;
	}
	resetPosition();
	for( ; firstToken() != ""; v.push_back(thisToken))
		;
	resetPosition();
}
コード例 #6
0
void DomainMatchFilterRuleMap::
doPut(FilterRule const& rule)
{
    auto* const pattern =
                dynamic_cast<DomainMatchPattern const*>(&rule.pattern());
    assert(pattern);

    auto const token = firstToken(pattern->domainPattern());
    m_rules.insert(token, &rule);
}
コード例 #7
0
void PrefixMatchFilterRuleMap::
doPut(FilterRule const& rule)
{
    auto* const pattern =
                dynamic_cast<BasicMatchPattern const*>(&rule.pattern());
    assert(pattern);
    assert(pattern->isBeginMatch());

    auto const token = firstToken(pattern->pattern());
    m_rules.insert(token, &rule);
}
コード例 #8
0
Foam::autoPtr<Foam::blockVertex> Foam::blockVertex::New
(
    const dictionary& dict,
    const label index,
    const searchableSurfaces& geometry,
    Istream& is
)
{
    if (debug)
    {
        InfoInFunction << "Constructing blockVertex" << endl;
    }

    token firstToken(is);

    if (firstToken.isPunctuation() && firstToken.pToken() == token::BEGIN_LIST)
    {
        // Putback the opening bracket
        is.putBack(firstToken);

        return autoPtr<blockVertex>
        (
            new blockVertices::pointVertex(dict, index, geometry, is)
        );
    }
    else if (firstToken.isWord())
    {
        const word faceType(firstToken.wordToken());

        IstreamConstructorTable::iterator cstrIter =
            IstreamConstructorTablePtr_->find(faceType);

        if (cstrIter == IstreamConstructorTablePtr_->end())
        {
            FatalErrorInFunction
                << "Unknown blockVertex type "
                << faceType << nl << nl
                << "Valid blockVertex types are" << endl
                << IstreamConstructorTablePtr_->sortedToc()
                << abort(FatalError);
        }

        return autoPtr<blockVertex>(cstrIter()(dict, index, geometry, is));
    }
    else
    {
        FatalIOErrorInFunction(is)
            << "incorrect first token, expected <word> or '(', found "
            << firstToken.info()
            << exit(FatalIOError);

        return autoPtr<blockVertex>(nullptr);
    }
}
コード例 #9
0
void Foam::HashPtrTable<T, Key, Hash>::read(Istream& is, const INew& inewt)
{
    is.fatalCheck("HashPtrTable<T, Key, Hash>::read(Istream&, const INew&)");

    token firstToken(is);

    is.fatalCheck
    (
        "HashPtrTable<T, Key, Hash>::read(Istream&, const INew&) : "
        "reading first token"
    );

    if (firstToken.isLabel())
    {
        label s = firstToken.labelToken();

        // Read beginning of contents
        char delimiter = is.readBeginList("HashPtrTable<T, Key, Hash>");

        if (s)
        {
            if (2*s > this->tableSize_)
            {
                this->resize(2*s);
            }

            if (delimiter == token::BEGIN_LIST)
            {
                for (label i=0; i<s; i++)
                {
                    Key key;
                    is >> key;
                    this->insert(key, inewt(key, is).ptr());

                    is.fatalCheck
                    (
                        "HashPtrTable<T, Key, Hash>::"
                        "read(Istream&, const INew&) : reading entry"
                    );
                }
            }
            else
            {
                FatalIOErrorIn
                (
                    "HashPtrTable<T, Key, Hash>::read(Istream&, const INew&)",
                    is
                )   << "incorrect first token, '(', found " << firstToken.info()
                    << exit(FatalIOError);
            }
        }
Field<Type>::Field
(
    const word& keyword,
    const dictionary& dict,
    const label s
)
{
    if (s)
    {
        ITstream& is = dict.lookup(keyword);

        // Read first token
        token firstToken(is);

        if (firstToken.isWord())
        {
            if (firstToken.wordToken() == "uniform")
            {
                this->setSize(s);
                operator=(pTraits<Type>(is));
            }
            else if (firstToken.wordToken() == "nonuniform")
            {
                is >> static_cast<List<Type>&>(*this);
                if (this->size() != s)
                {
                    FatalIOErrorIn
                    (
                        "Field<Type>::Field"
                        "(const word& keyword, "
                        "const dictionary& dict, const label s)",
                        dict
                    )   << "size " << this->size()
                        << " is not equal to the given value of " << s
                        << exit(FatalIOError);
                }
            }
            else
            {
                FatalIOErrorIn
                (
                    "Field<Type>::Field"
                    "(const word& keyword, "
                    "const dictionary& dict, const label s)",
                    dict
                )   << "expected keyword 'uniform' or 'nonuniform', found "
                    << firstToken.wordToken()
                    << exit(FatalIOError);
            }
        }
コード例 #11
0
ファイル: xmipp_strings.cpp プロジェクト: I2PC/scipion
// Get word ================================================================
char *firstWord(char *str)
{
    char *token;

    // Get token
    if (str != NULL)
        token = firstToken(str);
    else
        token = nextToken();

    // Check that there is something
    if (token == NULL)
        REPORT_ERROR(ERR_VALUE_EMPTY, "Empty token");

    return token;
}
コード例 #12
0
ファイル: Constant.C プロジェクト: Kiiree/CONSELFcae-dev
Foam::Constant<Type>::Constant(const word& entryName, const dictionary& dict)
:
    DataEntry<Type>(entryName),
    value_(pTraits<Type>::zero),
    dimensions_(dimless)
{
    Istream& is(dict.lookup(entryName));
    word entryType(is);
    token firstToken(is);
    if (firstToken.isWord())
    {
        token nextToken(is);
        if (nextToken == token::BEGIN_SQR)
        {
            is.putBack(nextToken);
            is >> dimensions_;
            is >> value_;
        }
コード例 #13
0
ファイル: polynomial.C プロジェクト: Kiiree/RapidCFD-dev
Foam::polynomial::polynomial(const word& entryName, const dictionary& dict)
:
    scalarDataEntry(entryName),
    coeffs_(),
    preCoeffs_(coeffs_.size()),
    expCoeffs_(coeffs_.size()),
    canIntegrate_(true),
    dimensions_(dimless)
{
    Istream& is(dict.lookup(entryName));
    word entryType(is);

    token firstToken(is);
    is.putBack(firstToken);
    if (firstToken == token::BEGIN_SQR)
    {
        is  >> this->dimensions_;
    }
コード例 #14
0
ファイル: sdputil.cpp プロジェクト: cdaffara/symbiandump-os2
// -----------------------------------------------------------------------------
// SdpUtil::IsTokenCharWithOptionalSlash
// Checks if all the possible two elements divided by slash are valid tokens
// -----------------------------------------------------------------------------
//
TBool SdpUtil::IsTokenCharWithOptionalSlash(const TDesC8& aValue)
	{
	TInt lineEndPosition = aValue.Locate('/');

	if ( lineEndPosition != KErrNotFound )
		{
		TPtrC8 firstToken( aValue.Left( lineEndPosition ) );
		if( !IsTokenChar( firstToken ) ||
		    !IsTokenChar( aValue.Mid( lineEndPosition + 1 ) ) )
			{
			return EFalse;
			}
		}
	else
		{
		return IsTokenChar( aValue );
		}
	return ETrue;
	}
コード例 #15
0
Foam::genericFvPatchField<Type>::genericFvPatchField
(
    const fvPatch& p,
    const DimensionedField<Type, volMesh>& iF,
    const dictionary& dict
)
:
    calculatedFvPatchField<Type>(p, iF, dict, false),
    actualTypeName_(dict.lookup("type")),
    dict_(dict)
{
    if (!dict.found("value"))
    {
        FatalIOErrorIn
        (
            "genericFvPatchField<Type>::genericFvPatchField"
            "(const fvPatch&, const Field<Type>&, const dictionary&)",
            dict
        )   << "\n    Cannot find 'value' entry"
            << " on patch " << this->patch().name()
            << " of field " << this->dimensionedInternalField().name()
            << " in file " << this->dimensionedInternalField().objectPath()
            << nl
            << "    which is required to set the"
               " values of the generic patch field." << nl
            << "    (Actual type " << actualTypeName_ << ")" << nl
            << "\n    Please add the 'value' entry to the write function "
               "of the user-defined boundary-condition\n"
               "    or link the boundary-condition into libfoamUtil.so"
            << exit(FatalIOError);
    }

    for
    (
        dictionary::const_iterator iter = dict_.begin();
        iter != dict_.end();
        ++iter
    )
    {
        if (iter().keyword() != "type" && iter().keyword() != "value")
        {
            if
            (
                iter().isStream()
             && iter().stream().size()
            )
            {
                ITstream& is = iter().stream();

                // Read first token
                token firstToken(is);

                if
                (
                    firstToken.isWord()
                 && firstToken.wordToken() == "nonuniform"
                )
                {
                    token fieldToken(is);

                    if (!fieldToken.isCompound())
                    {
                        if
                        (
                            fieldToken.isLabel()
                         && fieldToken.labelToken() == 0
                        )
                        {
                            scalarFields_.insert
                            (
                                iter().keyword(),
                                new scalarField(0)
                            );
                        }
                        else
                        {
                            FatalIOErrorIn
                            (
                                "genericFvPatchField<Type>::genericFvPatchField"
                                "(const fvPatch&, const Field<Type>&, "
                                "const dictionary&)",
                                dict
                            )   << "\n    token following 'nonuniform' "
                                  "is not a compound"
                                << "\n    on patch " << this->patch().name()
                                << " of field "
                                << this->dimensionedInternalField().name()
                                << " in file "
                                << this->dimensionedInternalField().objectPath()
                            << exit(FatalIOError);
                        }
                    }
                    else if
                    (
                        fieldToken.compoundToken().type()
                     == token::Compound<List<scalar> >::typeName
                    )
                    {
                        scalarField* fPtr = new scalarField;
                        fPtr->transfer
                        (
                            dynamicCast<token::Compound<List<scalar> > >
                            (
                                fieldToken.transferCompoundToken()
                            )
                        );

                        if (fPtr->size() != this->size())
                        {
                            FatalIOErrorIn
                            (
                                "genericFvPatchField<Type>::genericFvPatchField"
                                "(const fvPatch&, const Field<Type>&, "
                                "const dictionary&)",
                                dict
                            )   << "\n    size of field " << iter().keyword()
                                << " (" << fPtr->size() << ')'
                                << " is not the same size as the patch ("
                                << this->size() << ')'
                                << "\n    on patch " << this->patch().name()
                                << " of field "
                                << this->dimensionedInternalField().name()
                                << " in file "
                                << this->dimensionedInternalField().objectPath()
                                << exit(FatalIOError);
                        }

                        scalarFields_.insert(iter().keyword(), fPtr);
                    }
                    else if
                    (
                        fieldToken.compoundToken().type()
                     == token::Compound<List<vector> >::typeName
                    )
                    {
                        vectorField* fPtr = new vectorField;
                        fPtr->transfer
                        (
                            dynamicCast<token::Compound<List<vector> > >
                            (
                                fieldToken.transferCompoundToken()
                            )
                        );

                        if (fPtr->size() != this->size())
                        {
                            FatalIOErrorIn
                            (
                                "genericFvPatchField<Type>::genericFvPatchField"
                                "(const fvPatch&, const Field<Type>&, "
                                "const dictionary&)",
                                dict
                            )   << "\n    size of field " << iter().keyword()
                                << " (" << fPtr->size() << ')'
                                << " is not the same size as the patch ("
                                << this->size() << ')'
                                << "\n    on patch " << this->patch().name()
                                << " of field "
                                << this->dimensionedInternalField().name()
                                << " in file "
                                << this->dimensionedInternalField().objectPath()
                                << exit(FatalIOError);
                        }

                        vectorFields_.insert(iter().keyword(), fPtr);
                    }
                    else if
                    (
                        fieldToken.compoundToken().type()
                     == token::Compound<List<sphericalTensor> >::typeName
                    )
                    {
                        sphericalTensorField* fPtr = new sphericalTensorField;
                        fPtr->transfer
                        (
                            dynamicCast
                            <
                                token::Compound<List<sphericalTensor> >
                            >
                            (
                                fieldToken.transferCompoundToken()
                            )
                        );

                        if (fPtr->size() != this->size())
                        {
                            FatalIOErrorIn
                            (
                                "genericFvPatchField<Type>::genericFvPatchField"
                                "(const fvPatch&, const Field<Type>&, "
                                "const dictionary&)",
                                dict
                            )   << "\n    size of field " << iter().keyword()
                                << " (" << fPtr->size() << ')'
                                << " is not the same size as the patch ("
                                << this->size() << ')'
                                << "\n    on patch " << this->patch().name()
                                << " of field "
                                << this->dimensionedInternalField().name()
                                << " in file "
                                << this->dimensionedInternalField().objectPath()
                                << exit(FatalIOError);
                        }

                        sphericalTensorFields_.insert(iter().keyword(), fPtr);
                    }
                    else if
                    (
                        fieldToken.compoundToken().type()
                     == token::Compound<List<symmTensor> >::typeName
                    )
                    {
                        symmTensorField* fPtr = new symmTensorField;
                        fPtr->transfer
                        (
                            dynamicCast
                            <
                                token::Compound<List<symmTensor> >
                            >
                            (
                                fieldToken.transferCompoundToken()
                            )
                        );

                        if (fPtr->size() != this->size())
                        {
                            FatalIOErrorIn
                            (
                                "genericFvPatchField<Type>::genericFvPatchField"
                                "(const fvPatch&, const Field<Type>&, "
                                "const dictionary&)",
                                dict
                            )   << "\n    size of field " << iter().keyword()
                                << " (" << fPtr->size() << ')'
                                << " is not the same size as the patch ("
                                << this->size() << ')'
                                << "\n    on patch " << this->patch().name()
                                << " of field "
                                << this->dimensionedInternalField().name()
                                << " in file "
                                << this->dimensionedInternalField().objectPath()
                                << exit(FatalIOError);
                        }

                        symmTensorFields_.insert(iter().keyword(), fPtr);
                    }
                    else if
                    (
                        fieldToken.compoundToken().type()
                     == token::Compound<List<tensor> >::typeName
                    )
                    {
                        tensorField* fPtr = new tensorField;
                        fPtr->transfer
                        (
                            dynamicCast<token::Compound<List<tensor> > >
                            (
                                fieldToken.transferCompoundToken()
                            )
                        );

                        if (fPtr->size() != this->size())
                        {
                            FatalIOErrorIn
                            (
                                "genericFvPatchField<Type>::genericFvPatchField"
                                "(const fvPatch&, const Field<Type>&, "
                                "const dictionary&)",
                                dict
                            )   << "\n    size of field " << iter().keyword()
                                << " (" << fPtr->size() << ')'
                                << " is not the same size as the patch ("
                                << this->size() << ')'
                                << "\n    on patch " << this->patch().name()
                                << " of field "
                                << this->dimensionedInternalField().name()
                                << " in file "
                                << this->dimensionedInternalField().objectPath()
                                << exit(FatalIOError);
                        }

                        tensorFields_.insert(iter().keyword(), fPtr);
                    }
                    else
                    {
                        FatalIOErrorIn
                        (
                            "genericFvPatchField<Type>::genericFvPatchField"
                            "(const fvPatch&, const Field<Type>&, "
                            "const dictionary&)",
                            dict
                        )   << "\n    compound " << fieldToken.compoundToken()
                            << " not supported"
                            << "\n    on patch " << this->patch().name()
                            << " of field "
                            << this->dimensionedInternalField().name()
                            << " in file "
                            << this->dimensionedInternalField().objectPath()
                            << exit(FatalIOError);
                    }
                }
                else if
                (
                    firstToken.isWord()
                 && firstToken.wordToken() == "uniform"
                )
                {
                    token fieldToken(is);

                    if (!fieldToken.isPunctuation())
                    {
                        scalarFields_.insert
                        (
                            iter().keyword(),
                            new scalarField
                            (
                                this->size(),
                                fieldToken.number()
                            )
                        );
                    }
                    else
                    {
                        // Read as scalarList.
                        is.putBack(fieldToken);

                        scalarList l(is);

                        if (l.size() == vector::nComponents)
                        {
                            vector vs(l[0], l[1], l[2]);

                            vectorFields_.insert
                            (
                                iter().keyword(),
                                new vectorField(this->size(), vs)
                            );
                        }
                        else if (l.size() == sphericalTensor::nComponents)
                        {
                            sphericalTensor vs(l[0]);

                            sphericalTensorFields_.insert
                            (
                                iter().keyword(),
                                new sphericalTensorField(this->size(), vs)
                            );
                        }
                        else if (l.size() == symmTensor::nComponents)
                        {
                            symmTensor vs(l[0], l[1], l[2], l[3], l[4], l[5]);

                            symmTensorFields_.insert
                            (
                                iter().keyword(),
                                new symmTensorField(this->size(), vs)
                            );
                        }
                        else if (l.size() == tensor::nComponents)
                        {
                            tensor vs
                            (
                                l[0], l[1], l[2],
                                l[3], l[4], l[5],
                                l[6], l[7], l[8]
                            );

                            tensorFields_.insert
                            (
                                iter().keyword(),
                                new tensorField(this->size(), vs)
                            );
                        }
                        else
                        {
                            FatalIOErrorIn
                            (
                                "genericFvPatchField<Type>::genericFvPatchField"
                                "(const fvPatch&, const Field<Type>&, "
                                "const dictionary&)",
                                dict
                            )   << "\n    unrecognised native type " << l
                                << "\n    on patch " << this->patch().name()
                                << " of field "
                                << this->dimensionedInternalField().name()
                                << " in file "
                                << this->dimensionedInternalField().objectPath()
                                << exit(FatalIOError);
                        }
                    }
                }
            }
        }
    }
}
コード例 #16
0
ファイル: BFFParser.cpp プロジェクト: ClxS/fastbuild
// PerformVariableSubstitutions
//------------------------------------------------------------------------------
/*static*/ bool BFFParser::PerformVariableSubstitutions( const BFFIterator & startIter,
											  const BFFIterator & endIter,
											  AString & value )
{
	AStackString< 4096 > output;

	BFFIterator src( startIter );
	BFFIterator end( endIter );

	while ( src < end )
	{
		switch ( *src )
		{
			case '^':
			{
				src++; // skip escape char
				if ( src < end )
				{
					output += *src; // append escaped character
				}
				break;
			}
			case '$':
			{
				BFFIterator firstToken( src );
				src++; // skip opening $

				// find matching $
				BFFIterator startName( src );
				const char * endName = nullptr;
				while ( src < end )
				{
					if ( *src == '$' )
					{
						endName = src.GetCurrent();
						break;
					}
					src++;
				}
				if ( ( endName == nullptr ) ||
					 ( ( endName - startName.GetCurrent() ) < 1 ) )
				{
					Error::Error_1028_MissingVariableSubstitutionEnd( firstToken );
					return false; 
				}
				AStackString< MAX_VARIABLE_NAME_LENGTH > varName( startName.GetCurrent(), endName );
				const BFFVariable * var = BFFStackFrame::GetVarAny( varName );
				if ( var == nullptr )
				{
					Error::Error_1009_UnknownVariable( startName, nullptr );
					return false; 
				}
				if ( var->IsString() == false )
				{
					Error::Error_1029_VariableForSubstitutionIsNotAString( startName, varName, var->GetType() );
					return false; 
				}
				output += var->GetString();
				break;
			}
			default:
			{
				output += *src;
				break;
			}
		}
		src++;
	}

	value = output;
	return true;
}
コード例 #17
0
GenericPointPatchField<PatchField, Mesh, PointPatch, MatrixType, Type>::
GenericPointPatchField
(
    const PointPatch& p,
    const DimensionedField<Type, Mesh>& iF,
    const dictionary& dict
)
:
    CalculatedPointPatchField<PatchField, Mesh, PointPatch, MatrixType, Type>
        (p, iF),
    actualTypeName_(dict.lookup("type")),
    dict_(dict)
{
    for
    (
        dictionary::const_iterator iter = dict_.begin();
        iter != dict_.end();
        ++iter
    )
    {
        if (iter().keyword() != "type")
        {
            if
            (
                iter().isStream()
             && iter().stream().size()
            )
            {
                ITstream& is = iter().stream();

                // Read first token
                token firstToken(is);

                if
                (
                    firstToken.isWord()
                 && firstToken.wordToken() == "nonuniform"
                )
                {
                    token fieldToken(is);

                    if (!fieldToken.isCompound())
                    {
                        if
                        (
                            fieldToken.isLabel()
                         && fieldToken.labelToken() == 0
                        )
                        {
                            scalarFields_.insert
                            (
                                iter().keyword(),
                                new scalarField(0)
                            );
                        }
                        else
                        {
                            FatalIOErrorIn
                            (
                                "GenericPointPatchField<Type>::"
                                "GenericPointPatchField"
                                "(const pointPatch&, const Field<Type>&, "
                                "const dictionary&)",
                                dict
                            )   << "\n    token following 'nonuniform' "
                                  "is not a compound"
                                << "\n    on patch " << this->patch().name()
                                << " of field "
                                << this->dimensionedInternalField().name()
                                << " in file "
                                << this->dimensionedInternalField().objectPath()
                            << exit(FatalIOError);
                        }
                    }
                    else if
                    (
                        fieldToken.compoundToken().type()
                     == token::Compound<List<scalar> >::typeName
                    )
                    {
                        scalarField* fPtr = new scalarField;
                        fPtr->transfer
                        (
                            dynamicCast<token::Compound<List<scalar> > >
                            (
                                fieldToken.transferCompoundToken()
                            )
                        );

                        if (fPtr->size() != this->size())
                        {
                            FatalIOErrorIn
                            (
                                "GenericPointPatchField<Type>::"
                                "GenericPointPatchField"
                                "(const pointPatch&, const Field<Type>&, "
                                "const dictionary&)",
                                dict
                            )   << "\n    size of field " << iter().keyword()
                                << " (" << fPtr->size() << ')'
                                << " is not the same size as the patch ("
                                << this->size() << ')'
                                << "\n    on patch " << this->patch().name()
                                << " of field "
                                << this->dimensionedInternalField().name()
                                << " in file "
                                << this->dimensionedInternalField().objectPath()
                                << exit(FatalIOError);
                        }

                        scalarFields_.insert(iter().keyword(), fPtr);
                    }
                    else if
                    (
                        fieldToken.compoundToken().type()
                     == token::Compound<List<vector> >::typeName
                    )
                    {
                        vectorField* fPtr = new vectorField;
                        fPtr->transfer
                        (
                            dynamicCast<token::Compound<List<vector> > >
                            (
                                fieldToken.transferCompoundToken()
                            )
                        );

                        if (fPtr->size() != this->size())
                        {
                            FatalIOErrorIn
                            (
                                "GenericPointPatchField<Type>::"
                                "GenericPointPatchField"
                                "(const pointPatch&, const Field<Type>&, "
                                "const dictionary&)",
                                dict
                            )   << "\n    size of field " << iter().keyword()
                                << " (" << fPtr->size() << ')'
                                << " is not the same size as the patch ("
                                << this->size() << ')'
                                << "\n    on patch " << this->patch().name()
                                << " of field "
                                << this->dimensionedInternalField().name()
                                << " in file "
                                << this->dimensionedInternalField().objectPath()
                                << exit(FatalIOError);
                        }

                        vectorFields_.insert(iter().keyword(), fPtr);
                    }
                    else if
                    (
                        fieldToken.compoundToken().type()
                     == token::Compound<List<sphericalTensor> >::typeName
                    )
                    {
                        sphericalTensorField* fPtr = new sphericalTensorField;
                        fPtr->transfer
                        (
                            dynamicCast
                            <
                                token::Compound<List<sphericalTensor> >
                            >
                            (
                                fieldToken.transferCompoundToken()
                            )
                        );

                        if (fPtr->size() != this->size())
                        {
                            FatalIOErrorIn
                            (
                                "GenericPointPatchField<Type>::"
                                "GenericPointPatchField"
                                "(const pointPatch&, const Field<Type>&, "
                                "const dictionary&)",
                                dict
                            )   << "\n    size of field " << iter().keyword()
                                << " (" << fPtr->size() << ')'
                                << " is not the same size as the patch ("
                                << this->size() << ')'
                                << "\n    on patch " << this->patch().name()
                                << " of field "
                                << this->dimensionedInternalField().name()
                                << " in file "
                                << this->dimensionedInternalField().objectPath()
                                << exit(FatalIOError);
                        }

                        sphericalTensorFields_.insert(iter().keyword(), fPtr);
                    }
                    else if
                    (
                        fieldToken.compoundToken().type()
                     == token::Compound<List<symmTensor> >::typeName
                    )
                    {
                        symmTensorField* fPtr = new symmTensorField;
                        fPtr->transfer
                        (
                            dynamicCast
                            <
                                token::Compound<List<symmTensor> >
                            >
                            (
                                fieldToken.transferCompoundToken()
                            )
                        );

                        if (fPtr->size() != this->size())
                        {
                            FatalIOErrorIn
                            (
                                "GenericPointPatchField<Type>::"
                                "GenericPointPatchField"
                                "(const pointPatch&, const Field<Type>&, "
                                "const dictionary&)",
                                dict
                            )   << "\n    size of field " << iter().keyword()
                                << " (" << fPtr->size() << ')'
                                << " is not the same size as the patch ("
                                << this->size() << ')'
                                << "\n    on patch " << this->patch().name()
                                << " of field "
                                << this->dimensionedInternalField().name()
                                << " in file "
                                << this->dimensionedInternalField().objectPath()
                                << exit(FatalIOError);
                        }

                        symmTensorFields_.insert(iter().keyword(), fPtr);
                    }
                    else if
                    (
                        fieldToken.compoundToken().type()
                     == token::Compound<List<tensor> >::typeName
                    )
                    {
                        tensorField* fPtr = new tensorField;
                        fPtr->transfer
                        (
                            dynamicCast<token::Compound<List<tensor> > >
                            (
                                fieldToken.transferCompoundToken()
                            )
                        );

                        if (fPtr->size() != this->size())
                        {
                            FatalIOErrorIn
                            (
                                "GenericPointPatchField<Type>::"
                                "GenericPointPatchField"
                                "(const pointPatch&, const Field<Type>&, "
                                "const dictionary&)",
                                dict
                            )   << "\n    size of field " << iter().keyword()
                                << " (" << fPtr->size() << ')'
                                << " is not the same size as the patch ("
                                << this->size() << ')'
                                << "\n    on patch " << this->patch().name()
                                << " of field "
                                << this->dimensionedInternalField().name()
                                << " in file "
                                << this->dimensionedInternalField().objectPath()
                                << exit(FatalIOError);
                        }

                        tensorFields_.insert(iter().keyword(), fPtr);
                    }
                    else if
                    (
                        fieldToken.compoundToken().type()
                     == token::Compound<List<symmTensor4thOrder> >::typeName
                    )
                    {
                        symmTensor4thOrderField* fPtr = new symmTensor4thOrderField;
                        fPtr->transfer
                        (
                            dynamicCast
                            <
                                token::Compound<List<symmTensor4thOrder> >
                            >
                            (
                                fieldToken.transferCompoundToken()
                            )
                        );

                        if (fPtr->size() != this->size())
                        {
                            FatalIOErrorIn
                            (
                                "GenericPointPatchField<Type>::"
                                "GenericPointPatchField"
                                "(const pointPatch&, const Field<Type>&, "
                                "const dictionary&)",
                                dict
                            )   << "\n    size of field " << iter().keyword()
                                << " (" << fPtr->size() << ')'
                                << " is not the same size as the patch ("
                                << this->size() << ')'
                                << "\n    on patch " << this->patch().name()
                                << " of field "
                                << this->dimensionedInternalField().name()
                                << " in file "
                                << this->dimensionedInternalField().objectPath()
                                << exit(FatalIOError);
                        }

                        symmTensor4thOrderFields_.insert(iter().keyword(), fPtr);
                    }
                    else if
                    (
                        fieldToken.compoundToken().type()
                     == token::Compound<List<diagTensor> >::typeName
                    )
                    {
                        diagTensorField* fPtr = new diagTensorField;
                        fPtr->transfer
                        (
                            dynamicCast
                            <
                                token::Compound<List<diagTensor> >
                            >
                            (
                                fieldToken.transferCompoundToken()
                            )
                        );

                        if (fPtr->size() != this->size())
                        {
                            FatalIOErrorIn
                            (
                                "GenericPointPatchField<Type>::"
                                "GenericPointPatchField"
                                "(const pointPatch&, const Field<Type>&, "
                                "const dictionary&)",
                                dict
                            )   << "\n    size of field " << iter().keyword()
                                << " (" << fPtr->size() << ')'
                                << " is not the same size as the patch ("
                                << this->size() << ')'
                                << "\n    on patch " << this->patch().name()
                                << " of field "
                                << this->dimensionedInternalField().name()
                                << " in file "
                                << this->dimensionedInternalField().objectPath()
                                << exit(FatalIOError);
                        }

                        diagTensorFields_.insert(iter().keyword(), fPtr);
                    }
                    else
                    {
                        FatalIOErrorIn
                        (
                            "GenericPointPatchField<Type>::"
                            "GenericPointPatchField"
                            "(const pointPatch&, const Field<Type>&, "
                            "const dictionary&)",
                            dict
                        )   << "\n    compound " << fieldToken.compoundToken()
                            << " not supported"
                            << "\n    on patch " << this->patch().name()
                            << " of field "
                            << this->dimensionedInternalField().name()
                            << " in file "
                            << this->dimensionedInternalField().objectPath()
                            << exit(FatalIOError);
                    }
                }
            }
        }
    }
}
コード例 #18
0
ファイル: symmetries.cpp プロジェクト: dtegunov/vlion
// Read Symmetry file ======================================================
// crystal symmetry matices from http://cci.lbl.gov/asu_gallery/
int SymList::read_sym_file(FileName fn_sym)
{
    int i, j;
    FILE *fpoii;
    char line[80];
    char *auxstr;
    DOUBLE ang_incr, rot_ang;
    int  fold;
    Matrix2D<DOUBLE> L(4, 4), R(4, 4);
    Matrix1D<DOUBLE> axis(3);
    int pgGroup = 0, pgOrder = 0;
    std::vector<std::string> fileContent;

    //check if reserved word

    // Open file ---------------------------------------------------------
    if ((fpoii = fopen(fn_sym.c_str(), "r")) == NULL)
    {
        //check if reserved word and return group and order
        if (isSymmetryGroup(fn_sym, pgGroup, pgOrder))
        {
        	fill_symmetry_class(fn_sym, pgGroup, pgOrder, fileContent);
        }
        else
            REPORT_ERROR((std::string)"SymList::read_sym_file:Can't open file: "
                     + " or do not recognize symmetry group" + fn_sym);
    }
    else
    {
        while (fgets(line, 79, fpoii) != NULL)
        {
            if (line[0] == ';' || line[0] == '#' || line[0] == '\0')
            	continue;
			fileContent.push_back(line);
        }
        fclose(fpoii);
    }

    // Count the number of symmetries ------------------------------------
    true_symNo = 0;
    // count number of axis and mirror planes. It will help to identify
    // the crystallographic symmetry

    int no_axis, no_mirror_planes, no_inversion_points;
    no_axis = no_mirror_planes = no_inversion_points = 0;

    for (int n=0; n<fileContent.size(); n++)
    {
    	strcpy(line,fileContent[n].c_str());
        auxstr = firstToken(line);
        if (auxstr == NULL)
        {
            std::cout << line;
            std::cout << "Wrong line in symmetry file, the line is skipped\n";
            continue;
        }
        if (strcmp(auxstr, "rot_axis") == 0)
        {
            auxstr = nextToken();
            fold = textToInteger(auxstr);
            true_symNo += (fold - 1);
            no_axis++;
        }
        else if (strcmp(auxstr, "mirror_plane") == 0)
        {
            true_symNo++;
            no_mirror_planes++;
        }
        else if (strcmp(auxstr, "inversion") == 0)
        {
            true_symNo += 1;
            no_inversion_points = 1;
        }
    }
    // Ask for memory
    __L.resize(4*true_symNo, 4);
    __R.resize(4*true_symNo, 4);
    __chain_length.resize(true_symNo);
    __chain_length.initConstant(1);

    // Read symmetry parameters
    i = 0;
    for (int n=0; n<fileContent.size(); n++)
    {
        strcpy(line,fileContent[n].c_str());
        auxstr = firstToken(line);
        // Rotational axis ---------------------------------------------------
        if (strcmp(auxstr, "rot_axis") == 0)
        {
            auxstr = nextToken();
            fold = textToInteger(auxstr);
            auxstr = nextToken();
            XX(axis) = textToDOUBLE(auxstr);
            auxstr = nextToken();
            YY(axis) = textToDOUBLE(auxstr);
            auxstr = nextToken();
            ZZ(axis) = textToDOUBLE(auxstr);
            ang_incr = 360. / fold;
            L.initIdentity();
            for (j = 1, rot_ang = ang_incr; j < fold; j++, rot_ang += ang_incr)
            {
                rotation3DMatrix(rot_ang, axis, R);
                R.setSmallValuesToZero();
                set_matrices(i++, L, R.transpose());
            }
            __sym_elements++;
            // inversion ------------------------------------------------------
        }
        else if (strcmp(auxstr, "inversion") == 0)
        {
            L.initIdentity();
            L(2, 2) = -1;
            R.initIdentity();
            R(0, 0) = -1.;
            R(1, 1) = -1.;
            R(2, 2) = -1.;
            set_matrices(i++, L, R);
            __sym_elements++;
            // mirror plane -------------------------------------------------------------
        }
        else if (strcmp(auxstr, "mirror_plane") == 0)
        {
            auxstr = nextToken();
            XX(axis) = textToFloat(auxstr);
            auxstr = nextToken();
            YY(axis) = textToFloat(auxstr);
            auxstr = nextToken();
            ZZ(axis) = textToFloat(auxstr);
            L.initIdentity();
            L(2, 2) = -1;
            Matrix2D<DOUBLE> A;
            alignWithZ(axis,A);
            A = A.transpose();
            R = A * L * A.inv();
            L.initIdentity();
            set_matrices(i++, L, R);
            __sym_elements++;
        }
    }

    compute_subgroup();

    return pgGroup;
}
コード例 #19
0
void Foam::LPtrList<LListBase, T>::read(Istream& is, const INew& iNew)
{
    is.fatalCheck
    (
        "LPtrList<LListBase, T>::read(Istream&, const INew&)"
    );

    token firstToken(is);

    is.fatalCheck
    (
        "LPtrList<LListBase, T>::read(Istream&, const INew&) : "
        "reading first token"
    );

    if (firstToken.isLabel())
    {
        label s = firstToken.labelToken();

        // Read beginning of contents
        char delimiter = is.readBeginList("LPtrList<LListBase, T>");

        if (s)
        {
            if (delimiter == token::BEGIN_LIST)
            {
                for (label i=0; i<s; ++i)
                {
                    this->append(iNew(is).ptr());

                    is.fatalCheck
                    (
                        "LPtrList<LListBase, T>::read(Istream&, const INew&) : "
                        "reading entry"
                    );
                }
            }
            else
            {
                T* tPtr = iNew(is).ptr();
                this->append(tPtr);

                is.fatalCheck
                (
                    "LPtrList<LListBase, T>::read(Istream&, const INew&) : "
                    "reading entry"
                );

                for (label i=1; i<s; ++i)
                {
                    this->append(tPtr->clone().ptr());
                }
            }
        }

        // Read end of contents
        is.readEndList("LPtrList<LListBase, T>");
    }
    else if (firstToken.isPunctuation())
    {
        if (firstToken.pToken() != token::BEGIN_LIST)
        {
            FatalIOErrorIn
            (
                "LPtrList<LListBase, T>::read(Istream&, const INew&)",
                is
            )   << "incorrect first token, '(', found " << firstToken.info()
                << exit(FatalIOError);
        }

        token lastToken(is);
        is.fatalCheck("LPtrList<LListBase, T>::read(Istream&, const INew&)");

        while
        (
           !(
                lastToken.isPunctuation()
             && lastToken.pToken() == token::END_LIST
            )
        )
        {
            is.putBack(lastToken);
            this->append(iNew(is).ptr());

            is >> lastToken;
            is.fatalCheck
            (
                "LPtrList<LListBase, T>::read(Istream&, const INew&)"
            );
        }
    }
    else
    {
コード例 #20
0
ファイル: Tokenizer.cpp プロジェクト: AlexSenpai/A.I.
void Tokenizer::setPosition( int pos ) {
	int thisPos = 0;
	while( thisPos <= pos && firstToken() != "" ) {
		thisPos++;
	}
}
コード例 #21
0
ファイル: Tokenizer.cpp プロジェクト: AlexSenpai/A.I.
std::string Tokenizer::nextToken(void) {
	if(firstToken() != "") {
		return firstToken();
	}
	return thisToken;
}
コード例 #22
0
bool Foam::IOobject::readHeader(Istream& is)
{
    if (IOobject::debug)
    {
        Info<< "IOobject::readHeader(Istream&) : reading header for file "
            << is.name() << endl;
    }

    // Check Istream not already bad
    if (!is.good())
    {
        if (rOpt_ == MUST_READ || rOpt_ == MUST_READ_IF_MODIFIED)
        {
            FatalIOErrorIn("IOobject::readHeader(Istream&)", is)
                << " stream not open for reading essential object from file "
                << is.name()
                << exit(FatalIOError);
        }

        if (IOobject::debug)
        {
            SeriousIOErrorIn("IOobject::readHeader(Istream&)", is)
                << " stream not open for reading from file "
                << is.name() << endl;
        }

        return false;
    }

    token firstToken(is);

    if
    (
        is.good()
     && firstToken.isWord()
     && firstToken.wordToken() == "FoamFile"
    )
    {
        dictionary headerDict(is);

        is.version(headerDict.lookup("version"));
        is.format(headerDict.lookup("format"));
        headerClassName_ = word(headerDict.lookup("class"));

        const word headerObject(headerDict.lookup("object"));
        if (IOobject::debug && headerObject != name())
        {
            IOWarningIn("IOobject::readHeader(Istream&)", is)
                << " object renamed from "
                << name() << " to " << headerObject
                << " for file " << is.name() << endl;
        }

        // The note entry is optional
        headerDict.readIfPresent("note", note_);
    }
    else
    {
        SeriousIOErrorIn("IOobject::readHeader(Istream&)", is)
            << "First token could not be read or is not the keyword 'FoamFile'"
            << nl << nl << "Check header is of the form:" << nl << endl;

        writeHeader(Info);

        return false;
    }

    // Check stream is still OK
    if (is.good())
    {
        objState_ = GOOD;
    }
    else
    {
        if (rOpt_ == MUST_READ || rOpt_ == MUST_READ_IF_MODIFIED)
        {
            FatalIOErrorIn("IOobject::readHeader(Istream&)", is)
                << " stream failure while reading header"
                << " on line " << is.lineNumber()
                << " of file " << is.name()
                << " for essential object" << name()
                << exit(FatalIOError);
        }

        if (IOobject::debug)
        {
            Info<< "IOobject::readHeader(Istream&) :"
                << " stream failure while reading header"
                << " on line " << is.lineNumber()
                << " of file " << is.name() << endl;
        }

        objState_ = BAD;

        return false;
    }

    if (IOobject::debug)
    {
        Info<< " .... read" << endl;
    }

    return true;
}
コード例 #23
0
    forAllConstIter(dictionary, dict, iter)
    {
        if (iter().keyword() != "type" && iter().keyword() != "value")
        {
            if
            (
                iter().isStream()
             && iter().stream().size()
            )
            {
                ITstream& is = iter().stream();

                // Read first token
                token firstToken(is);

                if
                (
                    firstToken.isWord()
                 && firstToken.wordToken() == "nonuniform"
                )
                {
                    token fieldToken(is);

                    if (!fieldToken.isCompound())
                    {
                        if
                        (
                            fieldToken.isLabel()
                         && fieldToken.labelToken() == 0
                        )
                        {
                            // Ignore nonuniform 0 entry
                        }
                        else
                        {
                            FatalIOErrorInFunction
                            (
                                dict
                            )   << "\n    token following 'nonuniform' "
                                  "is not a compound"
                            << exit(FatalIOError);
                        }
                    }
                    else if
                    (
                        fieldToken.compoundToken().type()
                     == token::Compound<List<scalar>>::typeName
                    )
                    {
                        scalarField* fPtr = new scalarField;
                        fPtr->transfer
                        (
                            dynamicCast<token::Compound<List<scalar>>>
                            (
                                fieldToken.transferCompoundToken(is)
                            )
                        );

                        if (fPtr->size() != patchSize_)
                        {
                            FatalIOErrorInFunction
                            (
                                dict
                            )   << "\n    size of field " << iter().keyword()
                                << " (" << fPtr->size() << ')'
                                << " is not the same size as the patch ("
                                << patchSize_ << ')'
                                << "\n    on patch " << patchName_
                                << exit(FatalIOError);
                        }

                        scalarFields.insert(iter().keyword(), fPtr);
                    }
                    else if
                    (
                        fieldToken.compoundToken().type()
                     == token::Compound<List<vector>>::typeName
                    )
                    {
                        vectorField* fPtr = new vectorField;
                        fPtr->transfer
                        (
                            dynamicCast<token::Compound<List<vector>>>
                            (
                                fieldToken.transferCompoundToken(is)
                            )
                        );

                        if (fPtr->size() != patchSize_)
                        {
                            FatalIOErrorInFunction
                            (
                                dict
                            )   << "\n    size of field " << iter().keyword()
                                << " (" << fPtr->size() << ')'
                                << " is not the same size as the patch ("
                                << patchSize_ << ')'
                                << "\n    on patch " << patchName_
                                << exit(FatalIOError);
                        }

                        vectorFields.insert(iter().keyword(), fPtr);
                    }
                    else if
                    (
                        fieldToken.compoundToken().type()
                     == token::Compound<List<sphericalTensor>>::typeName
                    )
                    {
                        sphericalTensorField* fPtr = new sphericalTensorField;
                        fPtr->transfer
                        (
                            dynamicCast
                            <
                                token::Compound<List<sphericalTensor>>
                            >
                            (
                                fieldToken.transferCompoundToken(is)
                            )
                        );

                        if (fPtr->size() != patchSize_)
                        {
                            FatalIOErrorInFunction
                            (
                                dict
                            )   << "\n    size of field " << iter().keyword()
                                << " (" << fPtr->size() << ')'
                                << " is not the same size as the patch ("
                                << patchSize_ << ')'
                                << "\n    on patch " << patchName_
                                << exit(FatalIOError);
                        }

                        sphericalTensorFields.insert(iter().keyword(), fPtr);
                    }
                    else if
                    (
                        fieldToken.compoundToken().type()
                     == token::Compound<List<symmTensor>>::typeName
                    )
                    {
                        symmTensorField* fPtr = new symmTensorField;
                        fPtr->transfer
                        (
                            dynamicCast
                            <
                                token::Compound<List<symmTensor>>
                            >
                            (
                                fieldToken.transferCompoundToken(is)
                            )
                        );

                        if (fPtr->size() != patchSize_)
                        {
                            FatalIOErrorInFunction
                            (
                                dict
                            )   << "\n    size of field " << iter().keyword()
                                << " (" << fPtr->size() << ')'
                                << " is not the same size as the patch ("
                                << patchSize_ << ')'
                                << "\n    on patch " << patchName_
                                << exit(FatalIOError);
                        }

                        symmTensorFields.insert(iter().keyword(), fPtr);
                    }
                    else if
                    (
                        fieldToken.compoundToken().type()
                     == token::Compound<List<tensor>>::typeName
                    )
                    {
                        tensorField* fPtr = new tensorField;
                        fPtr->transfer
                        (
                            dynamicCast<token::Compound<List<tensor>>>
                            (
                                fieldToken.transferCompoundToken(is)
                            )
                        );

                        if (fPtr->size() != patchSize_)
                        {
                            FatalIOErrorInFunction
                            (
                                dict
                            )   << "\n    size of field " << iter().keyword()
                                << " (" << fPtr->size() << ')'
                                << " is not the same size as the patch ("
                                << patchSize_ << ')'
                                << "\n    on patch " << patchName_
                                << exit(FatalIOError);
                        }

                        tensorFields.insert(iter().keyword(), fPtr);
                    }
                    else
                    {
                        FatalIOErrorInFunction
                        (
                            dict
                        )   << "\n    compound " << fieldToken.compoundToken()
                            << " not supported"
                            << "\n    on patch " << patchName_
                            << exit(FatalIOError);
                    }
                }
                else if
                (
                    firstToken.isWord()
                 && firstToken.wordToken() == "uniform"
                )
                {
                    token fieldToken(is);

                    if (!fieldToken.isPunctuation())
                    {
                        scalarFields.insert
                        (
                            iter().keyword(),
                            new scalarField
                            (
                                patchSize_,
                                fieldToken.number()
                            )
                        );
                    }
                    else
                    {
                        // Read as scalarList.
                        is.putBack(fieldToken);

                        scalarList l(is);

                        if (l.size() == vector::nComponents)
                        {
                            vector vs(l[0], l[1], l[2]);

                            vectorFields.insert
                            (
                                iter().keyword(),
                                new vectorField(patchSize_, vs)
                            );
                        }
                        else if (l.size() == sphericalTensor::nComponents)
                        {
                            sphericalTensor vs(l[0]);

                            sphericalTensorFields_.insert
                            (
                                iter().keyword(),
                                new sphericalTensorField(patchSize_, vs)
                            );
                        }
                        else if (l.size() == symmTensor::nComponents)
                        {
                            symmTensor vs(l[0], l[1], l[2], l[3], l[4], l[5]);

                            symmTensorFields.insert
                            (
                                iter().keyword(),
                                new symmTensorField(patchSize_, vs)
                            );
                        }
                        else if (l.size() == tensor::nComponents)
                        {
                            tensor vs
                            (
                                l[0], l[1], l[2],
                                l[3], l[4], l[5],
                                l[6], l[7], l[8]
                            );

                            tensorFields.insert
                            (
                                iter().keyword(),
                                new tensorField(patchSize_, vs)
                            );
                        }
                        else
                        {
                            FatalIOErrorInFunction
                            (
                                dict
                            )   << "\n    unrecognised native type " << l
                                << "\n    on patch " << patchName_
                                << exit(FatalIOError);
                        }
                    }
                }
            }
        }
    }