Esempio n. 1
0
bool BrushTokenImporter::importTokens (Tokeniser& tokeniser)
{
	while (1) {
		// check for end of brush
		const std::string token = tokeniser.getToken();
		if (token == "}")
			break;

		tokeniser.ungetToken();

		m_brush.push_back(FaceSmartPointer(new Face(&m_brush)));

		Face& face = *m_brush.back();

		UFOFaceTokenImporter importer(face);
		if (!importer.importTokens(tokeniser))
			return false;
		face.planeChanged();
	}

	m_brush.planeChanged();
	m_brush.shaderChanged();

	return true;
}
Esempio n. 2
0
void LicenseParser::parseLicensesFile (Tokeniser& tokeniser, const std::string& filename)
{
	for (;;) {
		std::string token = tokeniser.getToken();
		if (token.empty())
			break;
		if (tokeniser.getLine() > 1) {
			tokeniser.ungetToken();
			break;
		}
	}
	std::size_t lastLine = 1;
	for (;;) {
		std::string token = tokeniser.getToken();
		if (token.empty())
			break;

		if (string::contains(token, "base/textures/") && lastLine != tokeniser.getLine()) {
			_licensesMap[os::stripExtension(token.substr(5))] = true;
			lastLine = tokeniser.getLine();
		}
	}
}
Esempio n. 3
0
bool EntityClassDoom3_parseModel( Tokeniser& tokeniser ){
	const char* name;
	PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, name ) );

	Model& model = g_models[name];

	PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser, "{" ) );
	tokeniser.nextLine();

	for (;; )
	{
		const char* parameter;
		PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, parameter ) );
		if ( string_equal( parameter, "}" ) ) {
			tokeniser.nextLine();
			break;
		}
		else if ( string_equal( parameter, "inherit" ) ) {
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, model.m_parent ) );
			tokeniser.nextLine();
		}
		else if ( string_equal( parameter, "remove" ) ) {
			//const char* remove =
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser ) );
			tokeniser.nextLine();
		}
		else if ( string_equal( parameter, "mesh" ) ) {
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, model.m_mesh ) );
			tokeniser.nextLine();
		}
		else if ( string_equal( parameter, "skin" ) ) {
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, model.m_skin ) );
			tokeniser.nextLine();
		}
		else if ( string_equal( parameter, "offset" ) ) {
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser, "(" ) );
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser ) );
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser ) );
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser ) );
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser, ")" ) );
			tokeniser.nextLine();
		}
		else if ( string_equal( parameter, "channel" ) ) {
			//const char* channelName =
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser ) );
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseToken( tokeniser, "(" ) );
			for (;; )
			{
				const char* end;
				PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, end ) );
				if ( string_equal( end, ")" ) ) {
					tokeniser.nextLine();
					break;
				}
			}
		}
		else if ( string_equal( parameter, "anim" ) ) {
			CopiedString animName;
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, animName ) );
			const char* animFile;
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, animFile ) );
			model.m_anims.insert( Model::Anims::value_type( animName, animFile ) );

			const char* token;
			PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, token ) );

			while ( string_equal( token, "," ) )
			{
				PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, animFile ) );
				PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, token ) );
			}

			if ( string_equal( token, "{" ) ) {
				for (;; )
				{
					const char* end;
					PARSE_RETURN_FALSE_IF_FAIL( EntityClassDoom3_parseString( tokeniser, end ) );
					if ( string_equal( end, "}" ) ) {
						tokeniser.nextLine();
						break;
					}
					tokeniser.nextLine();
				}
			}
			else
			{
				tokeniser.ungetToken();
			}
		}
		else
		{
			globalErrorStream() << "unknown model parameter: " << makeQuoted( parameter ) << "\n";
			return false;
		}
		tokeniser.nextLine();
	}
	return true;
}