示例#1
0
// Parse a single particle def
void ParticlesManager::parseParticleDef(parser::DefTokeniser& tok) {

	// Standard DEF, starts with "particle <name> {"
	tok.assertNextToken("particle");
	std::string name = tok.nextToken();
	tok.assertNextToken("{");
	
	ParticleDef pdef(name);

	// Any global keywords will come first, after which we get a series of 
	// brace-delimited stages.
	std::string token = tok.nextToken();
	while (token != "}") {
		if (token == "depthHack") {
			tok.skipTokens(1); // we don't care about depthHack
		}
		else if (token == "{") {
			
			// Parse stage
			ParticleStage stage(parseParticleStage(tok));
			
			// Append to the ParticleDef
			pdef.appendStage(stage);
		}
		
		// Get next token
		token = tok.nextToken();
	}
	
	// Add the ParticleDef to the map
	_particleDefs.insert(ParticleDefMap::value_type(name, pdef));
}
示例#2
0
void ParticleDef::parseFromTokens(parser::DefTokeniser& tok)
{
    // Clear out the particle def (except the name) before parsing
    clear();

    // Any global keywords will come first, after which we get a series of
    // brace-delimited stages.
    std::string token = tok.nextToken();

    while (token != "}")
    {
        if (token == "depthHack")
        {
            setDepthHack(string::convert<float>(tok.nextToken()));
        }
        else if (token == "{")
        {
            // Construct/Parse the stage from the tokens
            StageDefPtr stage = make_shared<StageDef>(ref(tok));

            // Append to the ParticleDef
            appendStage(stage);
        }

        // Get next token
        token = tok.nextToken();
    }

    _changedSignal.emit();
}
示例#3
0
void PatchParser::parseMatrix(parser::DefTokeniser& tok, IPatch& patch) const
{
	tok.assertNextToken("(");

	// For each row
	for (std::size_t c = 0; c < patch.getWidth(); c++)
	{
		tok.assertNextToken("(");

		// For each column
		for (std::size_t r=0; r < patch.getHeight(); r++) 
		{
			tok.assertNextToken("(");

			// Parse vertex coordinates
			patch.ctrlAt(r, c).vertex[0] = strToDouble(tok.nextToken());
			patch.ctrlAt(r, c).vertex[1] = strToDouble(tok.nextToken());
			patch.ctrlAt(r, c).vertex[2] = strToDouble(tok.nextToken());

			// Parse texture coordinates
			patch.ctrlAt(r, c).texcoord[0] = strToDouble(tok.nextToken());
			patch.ctrlAt(r, c).texcoord[1] = strToDouble(tok.nextToken());

			tok.assertNextToken(")");
		}

		tok.assertNextToken(")");
	}

	tok.assertNextToken(")");
}
示例#4
0
void XData::jumpOutOfBrackets(parser::DefTokeniser& tok, int currentDepth) const
{
	while ( tok.hasMoreTokens() && currentDepth > 0)
	{
		std::string token = tok.nextToken();
		if (token == "{")
			currentDepth += 1;
		else if (token == "}")
			currentDepth -= 1;
	}
}
// Parse a single particle def
void ParticlesManager::parseParticleDef(parser::DefTokeniser& tok, const std::string& filename)
{
	// Standard DEF, starts with "particle <name> {"
	std::string declName = tok.nextToken();

	// Check for a valid particle declaration, some .prt files contain materials
	if (declName != "particle")
	{
		// No particle, skip name plus whole block
		tok.skipTokens(1);
		tok.assertNextToken("{");

		for (std::size_t level = 1; level > 0;)
		{
			std::string token = tok.nextToken();

			if (token == "}")
			{
				level--;
			}
			else if (token == "{")
			{
				level++;
			}
		}

		return;
	}

	// Valid particle declaration, go ahead parsing the name
	std::string name = tok.nextToken();
	tok.assertNextToken("{");

	ParticleDefPtr pdef = findOrInsertParticleDef(name);

	pdef->setFilename(filename);

	// Let the particle construct itself from the token stream
	pdef->parseFromTokens(tok);
}
示例#6
0
/*
// Example Primitive
{
patchDef2
{
"textures/darkmod/stone/brick/rough_big_blocks03"
( 5 3 0 0 0 )
(
( ( 64 -88 108 0 0 ) ( 64 -88 184 0 -1.484375 ) ( 64 -88 184 0 -1.484375 ) )
( ( 64 -88 184 1.484375 0 ) ( 64 -88 184 1.484375 -1.484375 ) ( 64 -88 184 1.484375 -1.484375 ) )
( ( 112 -88 184 2.421875 0 ) ( 112 -88 184 2.421875 -1.484375 ) ( 112 -88 184 2.421875 -1.484375 ) )
( ( 160 -88 184 3.359375 0 ) ( 160 -88 184 3.359375 -1.484375 ) ( 160 -88 184 3.359375 -1.484375 ) )
( ( 160 -88 108 4.84375 0 ) ( 160 -88 184 4.84375 -1.484375 ) ( 160 -88 184 4.84375 -1.484375 ) )
)
}
}
*/
scene::INodePtr PatchDef2Parser::parse(parser::DefTokeniser& tok) const
{
	scene::INodePtr node = GlobalPatchCreator(DEF2).createPatch();

	IPatchNodePtr patchNode = boost::dynamic_pointer_cast<IPatchNode>(node);
	assert(patchNode != NULL);

	IPatch& patch = patchNode->getPatch();

	tok.assertNextToken("{");

	// Parse shader
	patch.setShader(tok.nextToken());

	// Parse parameters
	tok.assertNextToken("(");

	// parse matrix dimensions
	std::size_t cols = string::convert<std::size_t>(tok.nextToken());
	std::size_t rows = string::convert<std::size_t>(tok.nextToken());

	patch.setDims(cols, rows);

	// ignore contents/flags values
	tok.skipTokens(3);

	tok.assertNextToken(")");

	// Parse Patch Matrix
	parseMatrix(tok, patch);

	// Parse Footer
	tok.assertNextToken("}");
	tok.assertNextToken("}");

	patch.controlPointsChanged();

	return node;
}
void ParticleParameter::parseFromTokens(parser::DefTokeniser& tok)
{
	std::string val = tok.nextToken();

	try
	{
		setFrom(boost::lexical_cast<float>(val));
	}
	catch (boost::bad_lexical_cast&)
	{
		rError() << "[particles] Bad lower value, token is '" <<
			val << "'" << std::endl;
	}

	if (tok.peek() == "to")
	{
		// Upper value is there, parse it
		tok.skipTokens(1); // skip the "to"

		val = tok.nextToken();

		try
		{
			// cut off the quotes before converting to double
			setTo(boost::lexical_cast<float>(val));
		}
		catch (boost::bad_lexical_cast&)
		{
			rError() << "[particles] Bad upper value, token is '" <<
				val << "'" << std::endl;
		}
	}
	else
	{
		setTo(getFrom());
	}
}
示例#8
0
// Returns a GUI expression, which can be a number, a string or a formula ("gui::objVisible" == 1).
std::string GuiWindowDef::getExpression(parser::DefTokeniser& tokeniser)
{
	std::string returnValue = tokeniser.nextToken();

	if (returnValue == "(")
	{
		// Assemble token until closing brace found
		std::size_t depth = 1;

		while (depth > 0 && tokeniser.hasMoreTokens())
		{
			std::string token = tokeniser.nextToken();

			if (token == ")") depth--;

			returnValue += token;
		}
	}

	//  Strip quotes
	boost::algorithm::trim_if(returnValue, boost::algorithm::is_any_of("\""));

	return returnValue;
}
示例#9
0
// Parse an individual particle stage
ParticleStage ParticlesManager::parseParticleStage(parser::DefTokeniser& tok) {
	
	ParticleStage stage;
	
	// Read values. These are not a simple list of keyvalue pairs, but some
	// values may consist of more than one token.
	std::string token = tok.nextToken();
	while (token != "}") {
		
		if (token == "count") {
			try {
				stage.setCount(boost::lexical_cast<int>(tok.nextToken()));
			}
			catch (boost::bad_lexical_cast& e) {
				std::cerr << "[particles] Bad count value '" << token 
						  << "'" << std::endl;
			}
		}
		else if (token == "color") {
			
			// Read 4 values and assemble as a vector4
			Vector4 col;
			col.x() = boost::lexical_cast<float>(tok.nextToken());
			col.y() = boost::lexical_cast<float>(tok.nextToken());
			col.z() = boost::lexical_cast<float>(tok.nextToken());
			col.w() = boost::lexical_cast<float>(tok.nextToken());
			
			// Set the stage colour
			stage.setColour(col);
		}
		
		token = tok.nextToken();
	}
	
	return stage;
}
示例#10
0
void Doom3MapReader::parseMapVersion(parser::DefTokeniser& tok)
{
	// Parse the map version
    float version = 0;

    try
	{
        tok.assertNextToken("Version");
        version = boost::lexical_cast<float>(tok.nextToken());
    }
    catch (parser::ParseException& e)
	{
		// failed => quit
		rError()
            << "[mapdoom3] Unable to parse map version: "
            << e.what() << std::endl;

		throw FailureException(_("Unable to parse map version (parse exception)."));
    }
    catch (boost::bad_lexical_cast& e)
	{
        rError()
            << "[mapdoom3] Unable to parse map version: "
            << e.what() << std::endl;

		throw FailureException(_("Could not recognise map version number format."));
    }

	float requiredVersion = MAP_VERSION_D3;

    // Check we have the correct version for this module
    if (version != requiredVersion)
	{
		std::string errMsg = (boost::format(_("Incorrect map version: required %f, found %f")) % requiredVersion % version).str();

        rError() << errMsg << std::endl;

		throw FailureException(errMsg);
    }

	// success
}
void Quake3MapReader::parsePrimitive(parser::DefTokeniser& tok, const scene::INodePtr& parentEntity)
{
    _primitiveCount++;

	std::string primitiveKeyword = tok.nextToken();

	// Get a parser for this keyword
	PrimitiveParsers::const_iterator p = _primitiveParsers.find(primitiveKeyword);

	if (p == _primitiveParsers.end())
	{
		throw FailureException("Unknown primitive type: " + primitiveKeyword);
	}

	const PrimitiveParserPtr& parser = p->second;

	// Try to parse the primitive, throwing exception if failed
	try
	{
		scene::INodePtr primitive = parser->parse(tok);

		if (!primitive)
		{
			std::string text = (boost::format(_("Primitive #%d: parse error")) % _primitiveCount).str();
			throw FailureException(text);
		}

		// Now add the primitive as a child of the entity
		_importFilter.addPrimitiveToEntity(primitive, parentEntity); 
	}
	catch (parser::ParseException& e)
	{
		// Translate ParseExceptions to FailureExceptions
		std::string text = (boost::format(_("Primitive #%d: parse exception %s")) % _primitiveCount % e.what()).str();
		throw FailureException(text);
	}
}
示例#12
0
scene::INodePtr BrushDef3Parser::parse(parser::DefTokeniser& tok) const
{
	// Create a new brush
	scene::INodePtr node = GlobalBrushCreator().createBrush();

	// Cast the node, this must succeed
	IBrushNodePtr brushNode = boost::dynamic_pointer_cast<IBrushNode>(node);
	assert(brushNode != NULL);

	IBrush& brush = brushNode->getIBrush();

	tok.assertNextToken("{");

	// Parse face tokens until a closing brace is encountered
	while (1)
	{
		std::string token = tok.nextToken();

		// Token should be either a "(" (start of face) or "}" (end of brush)
		if (token == "}")
		{
			break; // end of brush
		}
		else if (token == "(") // FACE
		{
			// Construct a plane and parse its values
			Plane3 plane;

			plane.normal().x() = string::to_float(tok.nextToken());
			plane.normal().y() = string::to_float(tok.nextToken());
			plane.normal().z() = string::to_float(tok.nextToken());
			plane.dist() = -string::to_float(tok.nextToken()); // negate d

			tok.assertNextToken(")");

			// Parse TexDef
			Matrix4 texdef;
			tok.assertNextToken("(");

			tok.assertNextToken("(");
			texdef.xx() = string::to_float(tok.nextToken());
			texdef.yx() = string::to_float(tok.nextToken());
			texdef.tx() = string::to_float(tok.nextToken());
			tok.assertNextToken(")");

			tok.assertNextToken("(");
			texdef.xy() = string::to_float(tok.nextToken());
			texdef.yy() = string::to_float(tok.nextToken());
			texdef.ty() = string::to_float(tok.nextToken());
			tok.assertNextToken(")");

			tok.assertNextToken(")");

			// Parse Shader
			std::string shader = tok.nextToken();

			// Parse Flags (usually each brush has all faces detail or all faces structural)
			IBrush::DetailFlag flag = static_cast<IBrush::DetailFlag>(
				string::convert<std::size_t>(tok.nextToken(), IBrush::Structural));
			brush.setDetailFlag(flag);

			// Ignore the other two flags
			tok.skipTokens(2);

			// Finally, add the new face to the brush
			/*IFace& face = */brush.addFace(plane, texdef, shader);
		}
		else {
			std::string text = (boost::format(_("BrushDef3Parser: invalid token '%s'")) % token).str();
			throw parser::ParseException(text);
		}
	}

	// Final outer "}"
	tok.assertNextToken("}");

	return node;
}
示例#13
0
void Doom3EntityClass::parseFromTokens(parser::DefTokeniser& tokeniser)
{
	// Clear this structure first, we might be "refreshing" ourselves from tokens
	clear();

	// Required open brace (the name has already been parsed by the EClassManager)
    tokeniser.assertNextToken("{");

    // Loop over all of the keys in this entitydef
    while (true) {
        const std::string key = tokeniser.nextToken();
        
        if (key == "}") {
        	break; // end of def
        }

        const std::string value = tokeniser.nextToken();
    
        // Otherwise, switch on the key name
        
        if (key == "model") {
        	setModelPath(os::standardPath(value));
        }
        else if (key == "editor_color") {
            setColour(value);
        }
        else if (key == "editor_light") {
            if (value == "1") {
                setIsLight(true);
            }
        }
        else if (key == "spawnclass") {
            if (value == "idLight") {
                setIsLight(true);
            }
        }
		else if (boost::algorithm::istarts_with(key, "editor_"))
		{
			// "editor_yyy" represents an attribute that may be set on this
        	// entity. Construct a value-less EntityClassAttribute to add to
        	// the class, so that it will show in the entity inspector.

			// Locate the space in "editor_bool myVariable", starting after "editor_"
			std::size_t spacePos = key.find(' ', 7);

			// Only proceed if we have a space (some keys like "editor_displayFolder" don't have spaces)
			if (spacePos != std::string::npos) {
				// The part beyond the space is the name of the attribute
				std::string attName = key.substr(spacePos + 1);
				
				// Get the type by trimming the string left and right
				std::string type = key.substr(7, key.length() - attName.length() - 8);

				// Ignore editor_setKeyValue
				if (!attName.empty() && type != "setKeyValue") {
					// Transform the type into a better format
					if (type == "var" || type == "string") {
						type = "text";
					}

        			addAttribute(EntityClassAttribute(type, attName, "", value));
        		}
			}
		}
        
		// Following key-specific processing, add the keyvalue to the eclass
		EntityClassAttribute attribute("text", key, value, "");

		if (getAttribute(key).type.empty()) {
			// Type is empty, attribute does not exist, add it.
			addAttribute(attribute);
		}
		else if (getAttribute(key).value.empty() ) {
			// Attribute type is set, but value is empty, set the value.
			getAttribute(key).value = value;
		}
		else {
			// Both type and value are not empty, emit a warning
			globalWarningStream() << "[eclassmgr] attribute " << key 
				<< " already set on entityclass " << _name << std::endl;
		}
    } // while true

	// Notify the observers
	for (Observers::const_iterator i = _observers.begin(); i != _observers.end(); ++i)
	{
		(*i)->OnEClassReload();
	}
}
void Quake3MapReader::parseEntity(parser::DefTokeniser& tok)
{
    // Map of keyvalues for this entity
    EntityKeyValues keyValues;

    // The actual entity. This is initially null, and will be created when
    // primitives start or the end of the entity is reached
    scene::INodePtr entity;

	// Start parsing, first token must be an open brace
	tok.assertNextToken("{");

	std::string token = tok.nextToken();

	// Reset the primitive counter, we're starting a new entity
	_primitiveCount = 0;

	while (true)
	{
	    // Token must be either a key, a "{" to indicate the start of a
	    // primitive, or a "}" to indicate the end of the entity

	    if (token == "{") // PRIMITIVE
		{ 
			// Create the entity right now, if not yet done
			if (entity == NULL)
			{
				entity = createEntity(keyValues);
			}

			// Parse the primitive block, and pass the parent entity
			parsePrimitive(tok, entity);
	    }
	    else if (token == "}") // END OF ENTITY
		{
            // Create the entity if necessary and return it
	        if (entity == NULL)
			{
	            entity = createEntity(keyValues);
	        }

			break;
	    }
	    else // KEY
		{ 
	        std::string value = tok.nextToken();

	        // Sanity check (invalid number of tokens will get us out of sync)
	        if (value == "{" || value == "}")
			{
				std::string text = (boost::format(_("Parsed invalid value '%s' for key '%s'")) % value % token).str();
	            throw FailureException(text);
	        }

	        // Otherwise add the keyvalue pair to our map
	        keyValues.insert(EntityKeyValues::value_type(token, value));
	    }

	    // Get the next token
	    token = tok.nextToken();
	}

	// Insert the entity
	_importFilter.addEntity(entity);
}
void Doom3EntityClass::parseFromTokens(parser::DefTokeniser& tokeniser)
{
    // Clear this structure first, we might be "refreshing" ourselves from tokens
    clear();

    // Required open brace (the name has already been parsed by the EClassManager)
    tokeniser.assertNextToken("{");

    // Loop over all of the keys in this entitydef
    std::string key;
    while ((key = tokeniser.nextToken()) != "}")
    {
        const std::string value = tokeniser.nextToken();

        // Handle some keys specially
        if (key == "model")
        {
            setModelPath(os::standardPath(value));
        }
        else if (key == "editor_color")
        {
            setColour(string::convert<Vector3>(value));
        }
        else if (key == "editor_light")
        {
            setIsLight(value == "1");
        }
        else if (key == "spawnclass")
        {
            setIsLight(value == "idLight");
        }
        else if (boost::algorithm::istarts_with(key, "editor_"))
        {
            parseEditorSpawnarg(key, value);
        }

        // Try parsing this key/value with the Attachments manager
        _attachments->parseDefAttachKeys(key, value);

        // Add the EntityClassAttribute for this key/val
        if (getAttribute(key).getType().empty())
        {
            // Following key-specific processing, add the keyvalue to the eclass
            EntityClassAttribute attribute("text", key, value, "");

            // Type is empty, attribute does not exist, add it.
            addAttribute(attribute);
        }
        else if (getAttribute(key).getValue().empty())
        {
            // Attribute type is set, but value is empty, set the value.
            getAttribute(key).setValue(value);
        }
        else
        {
            // Both type and value are not empty, emit a warning
            rWarning() << "[eclassmgr] attribute " << key
                << " already set on entityclass " << _name << std::endl;
        }
    } // while true

    _attachments->validateAttachments();

    // Notify the observers
    _changedSignal.emit();
}
示例#16
0
/*
// Example Primitive
{
brushDef
{
( -1216 -464 232 ) ( -1088 -464 232 ) ( -1088 -80 120 ) ( ( 0.031250 0 14 ) ( -0.000009 0.031250 4.471550 ) ) common/caulk 134217728 4 0
( -1088 -464 248 ) ( -1216 -464 248 ) ( -1216 -80 136 ) ( ( 0 -0.031373 -0.147059 ) ( 0.007812 0 0.049020 ) ) common/caulk 134217728 0 0
( -1088 -560 120 ) ( -1088 -560 136 ) ( -1088 -80 136 ) ( ( 0.031250 0 16.500000 ) ( 0 0.031250 0.250000 ) ) common/caulk 134217728 4 0
( -1088 -80 136 ) ( -1216 -80 136 ) ( -1216 -80 8 ) ( ( 0.031250 0 2 ) ( 0 0.031250 0.250000 ) ) common/caulk 134217728 4 0
( -1216 -400 136 ) ( -1216 -400 120 ) ( -1216 -80 120 ) ( ( 0.031250 0 -16.500000 ) ( 0 0.031250 0.250000 ) ) common/caulk 134217728 4 0
( -1088 -464 232 ) ( -1216 -464 232 ) ( -1216 -464 248 ) ( ( 0.031250 0 -2 ) ( 0 0.031250 0.250000 ) ) common/caulk 134217728 4 0
}
}
*/
scene::INodePtr BrushDefParser::parse(parser::DefTokeniser& tok) const
{
	// Create a new brush
	scene::INodePtr node = GlobalBrushCreator().createBrush();

	// Cast the node, this must succeed
	IBrushNodePtr brushNode = boost::dynamic_pointer_cast<IBrushNode>(node);
	assert(brushNode != NULL);

	IBrush& brush = brushNode->getIBrush();

	tok.assertNextToken("{");

	// Parse face tokens until a closing brace is encountered
	while (1)
	{
		std::string token = tok.nextToken();

		// Token should be either a "(" (start of face) or "}" (end of brush)
		if (token == "}")
		{
			break; // end of brush
		}
		else if (token == "(") // FACE
		{
			// Parse three 3D points to construct a plane
			Vector3 p1(string::to_float(tok.nextToken()), string::to_float(tok.nextToken()), string::to_float(tok.nextToken()));
			tok.assertNextToken(")");
			tok.assertNextToken("(");

			Vector3 p2(string::to_float(tok.nextToken()), string::to_float(tok.nextToken()), string::to_float(tok.nextToken()));
			tok.assertNextToken(")");
			tok.assertNextToken("(");

			Vector3 p3(string::to_float(tok.nextToken()), string::to_float(tok.nextToken()), string::to_float(tok.nextToken()));
			tok.assertNextToken(")");

			// Construct the plane from the three points
			Plane3 plane(p1, p2, p3);

			// Parse TexDef
			Matrix4 texdef;
			tok.assertNextToken("(");

			tok.assertNextToken("(");
			texdef.xx() = string::to_float(tok.nextToken());
			texdef.yx() = string::to_float(tok.nextToken());
			texdef.tx() = string::to_float(tok.nextToken());
			tok.assertNextToken(")");

			tok.assertNextToken("(");
			texdef.xy() = string::to_float(tok.nextToken());
			texdef.yy() = string::to_float(tok.nextToken());
			texdef.ty() = string::to_float(tok.nextToken());
			tok.assertNextToken(")");

			tok.assertNextToken(")");

			// Parse Shader, brushDef has an implicit "textures/" not written to the map
			std::string shader = "textures/" + tok.nextToken();

			// Parse Contents Flags (and ignore them)
			tok.skipTokens(3);

			// Finally, add the new face to the brush
			/*IFace& face = */brush.addFace(plane, texdef, shader);
		}
		else
		{
			std::string text = (boost::format(_("BrushDefParser: invalid token '%s'")) % token).str();
			throw parser::ParseException(text);
		}
	}

	// Final outer "}"
	tok.assertNextToken("}");

	return node;
}
示例#17
0
void GuiWindowDef::constructFromTokens(parser::DefTokeniser& tokeniser)
{
	// The windowDef keyword has already been parsed, so expect a name plus an opening brace here
	name = tokeniser.nextToken();

	tokeniser.assertNextToken("{");

	while (tokeniser.hasMoreTokens())
	{
		std::string token = tokeniser.nextToken();
		boost::algorithm::to_lower(token);

		if (token == "rect")
		{
			rect = parseVector4(tokeniser);
		}
		else if (token == "visible")
		{
			visible = parseBool(tokeniser);
		}
		else if (token == "notime")
		{
			notime = parseBool(tokeniser);
		}
		else if (token == "forecolor")
		{
			forecolor = parseVector4(tokeniser);
		}
		else if (token == "backcolor")
		{
			backcolor = parseVector4(tokeniser);
		}
		else if (token == "bordercolor")
		{
			bordercolor = parseVector4(tokeniser);
		}
		else if (token == "matcolor")
		{
			matcolor = parseVector4(tokeniser);
		}
		else if (token == "rotate")
		{
			rotate = parseFloat(tokeniser);
		}
		else if (token == "text")
		{
			setText(parseString(tokeniser));
		}
		else if (token == "font")
		{
			font = parseString(tokeniser);

			// Cut off the "fonts/" part
			boost::algorithm::replace_first(font, "fonts/", "");
		}
		else if (token == "textscale")
		{
			textscale = parseFloat(tokeniser);
		}
		else if (token == "textalign")
		{
			textalign = parseInt(tokeniser);
		}
		else if (token == "textalignx")
		{
			textalignx = parseFloat(tokeniser);
		}
		else if (token == "textaligny")
		{
			textaligny = parseFloat(tokeniser);
		}
		else if (token == "forceaspectwidth")
		{
			forceaspectwidth = parseFloat(tokeniser);
		}
		else if (token == "forceaspectheight")
		{
			forceaspectheight = parseFloat(tokeniser);
		}
		else if (token == "background")
		{
			background = parseString(tokeniser);
		}
		else if (token == "noevents")
		{
			noevents = parseBool(tokeniser);
		}
		else if (token == "nocursor")
		{
			nocursor = parseBool(tokeniser);
		}
		else if (token == "noclip")
		{
			noclip = parseBool(tokeniser);
		}
		else if (token == "nowrap")
		{
			nowrap = parseBool(tokeniser);
		}
		else if (token == "modal")
		{
			noevents = parseBool(tokeniser);
		}
		else if (token == "menugui")
		{
			menugui = parseBool(tokeniser);
		}
		else if (token == "windowdef")
		{
			// Child windowdef
			GuiWindowDefPtr window(new GuiWindowDef(_owner));
			window->constructFromTokens(tokeniser);

			addWindow(window);
		}
		else if (token == "ontime")
		{
			std::string timeStr = tokeniser.nextToken();

			// Check the time for validity
			std::size_t time = string::convert<std::size_t>(
                timeStr, std::numeric_limits<std::size_t>::max()
            );

			if (time == std::numeric_limits<std::size_t>::max())
			{
				rWarning() << "Invalid time encountered in onTime event in "
					<< name << ": " << timeStr << std::endl;
			}

			// Allocate a new GuiScript
			GuiScriptPtr script(new GuiScript(*this));

			script->constructFromTokens(tokeniser);

			_timedEvents.insert(TimedEventMap::value_type(time, script));
		}
		else if (token == "onnamedevent")
		{
			std::string eventName = tokeniser.nextToken();

			// Parse the script
			GuiScriptPtr script(new GuiScript(*this));
			script->constructFromTokens(tokeniser);

			// TODO: Save event
		}
		else if (token == "onevent")
		{
			GuiScriptPtr script(new GuiScript(*this));
			script->constructFromTokens(tokeniser);

			// TODO
		}
		else if (token == "onesc")
		{
			GuiScriptPtr script(new GuiScript(*this));
			script->constructFromTokens(tokeniser);

			// TODO
		}
		else if (token == "onmouseenter" || token == "onmouseexit")
		{
			GuiScriptPtr script(new GuiScript(*this));
			script->constructFromTokens(tokeniser);

			// TODO
		}
		else if (token == "onaction")
		{
			GuiScriptPtr script(new GuiScript(*this));
			script->constructFromTokens(tokeniser);

			// TODO
		}
		else if (token == "float" || token == "definefloat")
		{
			// TODO: Add variable
			std::string variableName = tokeniser.nextToken();
		}
		else if (token == "definevec4")
		{
			// TODO: Add variable
			std::string variableName = tokeniser.nextToken();
			parseVector4(tokeniser);
		}
		else if (token == "}")
		{
			break;
		}
		else
		{
			rWarning() << "Unknown token encountered in GUI: " << token << std::endl;
		}
	}
}
示例#18
0
void MD5Surface::parseFromTokens(parser::DefTokeniser& tok)
{
	// Start of datablock
	tok.assertNextToken("mesh");
	tok.assertNextToken("{");

	// Get the reference to the mesh definition
	MD5Mesh& mesh = *_mesh;

	// Get the shader name
	tok.assertNextToken("shader");
	setDefaultMaterial(tok.nextToken());

	// ----- VERTICES ------

	// Read the vertex count
	tok.assertNextToken("numverts");
	std::size_t numVerts = string::convert<std::size_t>(tok.nextToken());

	// Initialise the vertex vector
	MD5Verts& verts = mesh.vertices;
	verts.resize(numVerts);

	// Populate each vertex struct with parsed values
	for (MD5Verts::iterator vt = verts.begin(); vt != verts.end(); ++vt) {

		tok.assertNextToken("vert");

		// Index of vert
		vt->index = string::convert<std::size_t>(tok.nextToken());

		// U and V texcoords
		tok.assertNextToken("(");
		vt->u = string::convert<float>(tok.nextToken());
		vt->v = string::convert<float>(tok.nextToken());
		tok.assertNextToken(")");

		// Weight index and count
		vt->weight_index = string::convert<std::size_t>(tok.nextToken());
		vt->weight_count = string::convert<std::size_t>(tok.nextToken());

	} // for each vertex

	// ------  TRIANGLES ------

	// Read the number of triangles
	tok.assertNextToken("numtris");
	std::size_t numTris = string::convert<std::size_t>(tok.nextToken());

	// Initialise the triangle vector
	MD5Tris& tris = mesh.triangles;
	tris.resize(numTris);

	// Read each triangle
	for(MD5Tris::iterator tr = tris.begin(); tr != tris.end(); ++tr) {

		tok.assertNextToken("tri");

		// Triangle index, followed by the indexes of its 3 vertices
		tr->index = string::convert<std::size_t>(tok.nextToken());
		tr->a = 	string::convert<std::size_t>(tok.nextToken());
		tr->b = 	string::convert<std::size_t>(tok.nextToken());
		tr->c = 	string::convert<std::size_t>(tok.nextToken());

	} // for each triangle

	// -----  WEIGHTS ------

	// Read the number of weights
	tok.assertNextToken("numweights");
	std::size_t numWeights = string::convert<std::size_t>(tok.nextToken());

	// Initialise weights vector
	MD5Weights& weights = mesh.weights;
	weights.resize(numWeights);

	// Populate with weight data
	for(MD5Weights::iterator w = weights.begin(); w != weights.end(); ++w) {

		tok.assertNextToken("weight");

		// Index and joint
		w->index = string::convert<std::size_t>(tok.nextToken());
		w->joint = string::convert<std::size_t>(tok.nextToken());

		// Strength and relative position
		w->t = string::convert<float>(tok.nextToken());
		w->v = MD5Model::parseVector3(tok);

	} // for each weight

	// ----- END OF MESH DECL -----

	tok.assertNextToken("}");
}