Beispiel #1
0
void debug_ParseIncoming(void)
{
    if (g_receivedMessage) {
        if (IsStringMatch(g_rxBuffer, "hello")) {
           debug_Print("Hello:)");
        } else if (IsStringMatch(g_rxBuffer, ":P")) {
           debug_Print(":D");
        } else {
            debug_Print(":(");
        }

        g_bytesReceived = 0;
        g_receivedMessage = 0;
    }
}
StructDefinition ParseStructDefinition( Tokenizer* tokenizer ) {
	StructDefinition definition = { };

	bool hasMetaData = tokenizer->tokensSinceLastMeta <= 3;
	MetaTagDataStorage* allMetaData = tokenizer->metaTagData;
	MetaTagList* metaData = &allMetaData->lists[ allMetaData->listCount - 1 ];
	definition.typeNameToken = &tokenizer->tokens[ tokenizer->tokenCount - 2 ];

	if( hasMetaData ) {
		definition.metaData = metaData;
	} else {
		definition.metaData = NULL;
	}

	int stdMemberPattern [3] = { 
		TokenType::Identifier, 
		TokenType::Identifier, 
		TokenType::SemiColon 
	};
	PatternTrackingState stdMemberChecking = { stdMemberPattern, 3, 0 };
	int ptrLevel = 0;

	MetaTagList* metaTags = NULL;
	while( tokenizer->at[0] ) {
		Token* token = ReadNextToken( tokenizer );
		if( token->tokenType == TokenType::CloseBrace ) break;

 		if( UpdatePatternTracker( &tokenizer->metaTagData->metaTagPatternTracker, token ) ) {
 			metaTags = ParseMetaTagValues( tokenizer );
 			ptrLevel = 0;
		}

		//The idea here, is skip any indicaters of pointers, just check for the
		//identifier - identifier - semicolon pattern but retain how deep
		//the pointer reference level is
		//(i.e. char vs char* vs char**  is 0 vs 1 vs 2, etc. )
		if( token->tokenType == TokenType::Star ) {
			++ptrLevel;
			//Skip updating the memberDef tracker
			continue;
		}

		//the 'Static' keyword should reset pattern tracking logic, since
		//members definitions only start with that, so for now we ignore such
		//modifiers
		if( token->tokenType == TokenType::kwStatic ) {
			stdMemberChecking.trackingIndex = 0;
			ptrLevel = 0;
			continue;
		}

		if( UpdatePatternTracker( &stdMemberChecking, token ) ) {
			int memberTypeTokenIndex = tokenizer->tokenCount - ptrLevel - 3;
			int memberNameTokenIndex = tokenizer->tokenCount - 2;

			definition.memberDefinitions[ definition.memberCount ] = { 
				&tokenizer->tokens[ memberTypeTokenIndex ],
				&tokenizer->tokens[ memberNameTokenIndex ],
				metaTags,
				ptrLevel
			};
			++definition.memberCount;
			metaTags = NULL;
		}

		if( token->tokenType == TokenType::SemiColon ) {
			ptrLevel = 0;
		}
	}

	//Forward declare struct type
	writeConstStringToFile( "\nstruct ", outFile_H );
	writeTokenDataToFile( definition.typeNameToken, outFile_H );
	writeConstStringToFile( ";", outFile_H );

	//Check for meta programming code generation flags
	if( hasMetaData ) {
		//Output introspection data, assume its functionality is needed
		GenerateIntrospectionCode( &definition );

		char* editableMetaFlag = "Editable";
		char* serializableMetaFlag = "Serializable";

		char* prevMetaString = metaData->tagTokens[0]->tokenStart;

		if( IsStringMatch( prevMetaString, editableMetaFlag ) ) {
			GenerateImguiEditor( &definition );
		} else if( IsStringMatch( prevMetaString, serializableMetaFlag ) ) {
			//GenerateSerializationCode( &definition );
		}
	}

	return definition;
}
Token* ReadNextToken( Tokenizer* tokenizer ) {
	EatAllWhiteSpace( tokenizer );

	Token* newToken = &tokenizer->tokens[ tokenizer->tokenCount ];

	assert( tokenizer->tokenCount <= MAX_TOKENS );
	newToken->tokenStart = tokenizer->at;
	newToken->tokenLength = 1;
	newToken->nextToken = NULL;
	switch( tokenizer->at[0] ) {
		case '{' : newToken->tokenType = TokenType::OpenBrace; ++tokenizer->at; break;
		case '}' : newToken->tokenType = TokenType::CloseBrace; ++tokenizer->at; break;
		case '[' : newToken->tokenType = TokenType::OpenBracket; ++tokenizer->at; break;
		case ']' : newToken->tokenType = TokenType::CloseBracket; ++tokenizer->at; break;
	    case '(' : newToken->tokenType = TokenType::OpenParen; ++tokenizer->at; break;
	    case ')' : newToken->tokenType = TokenType::CloseParen; ++tokenizer->at;  break;
	    case ';' : newToken->tokenType = TokenType::SemiColon; ++tokenizer->at; break;
	    case ',' : newToken->tokenType = TokenType::Comma; ++tokenizer->at; break;
	    case '*' : newToken->tokenType = TokenType::Star; ++tokenizer->at; break;
	    case '&' : newToken->tokenType = TokenType::Ampersand; ++tokenizer->at; break;
	    case '\0' : newToken->tokenType = TokenType::EndOfStream; break;
	    default: { 
	    	if( tokenizer->at[0] == '\"' ) {
	    		++tokenizer->at;
	    		while( tokenizer->at[0] && tokenizer->at[0] != '\"' ) {
	    			++tokenizer->at;
	    			if( tokenizer->at[0] == '\\' ) {
	    				tokenizer->at += 2;
	    			}
	    		}
	    		++tokenizer->at; //Chew through the last "
	    		newToken->tokenType = TokenType::StringLiteral;
	    	} else if ( tokenizer->at[0] == '#' ) {
	    		const char* defineString = "define";
	    		const char* includeString = "include";
	    		++tokenizer->at;

	    		if( IsStringMatch( tokenizer->at, defineString ) ) {
	    			newToken->tokenType = TokenType::poundDefine;
	    			tokenizer->at += 6;
	    		} else if( IsStringMatch( tokenizer->at, includeString ) ) {
	    			newToken->tokenType = TokenType::poundInclude;
	    			tokenizer->at += 7;
	    		} else {
	    			newToken->tokenType = TokenType::UndefinedToken;
	    			++tokenizer->at;
	    		}

	    	} else {
	    		const char* structString = "struct";
	    		const char* metatagString = "meta";
	    		const char* staticString = "static";

	    		//Compare to struct or meta, else identitfer or numeric
	    		if( IsStringMatch( tokenizer->at, structString ) ) {
	    			newToken->tokenType = TokenType::kwStruct;
	    			tokenizer->at += 6;
	    		} else if( IsStringMatch( tokenizer->at, metatagString ) ) {
	    			newToken->tokenType = TokenType::MetaTag;
	    			tokenizer->at += 4;
	    		} else if( IsStringMatch( tokenizer->at, staticString ) ) {
	    			newToken->tokenType == TokenType::kwStatic;
	    			tokenizer->at += 6;
	    		} else {
	    			if( IsIdentifierValidCharacter( tokenizer->at[0] ) ) {
	    				while( tokenizer->at[0] && ( IsIdentifierValidCharacter( tokenizer->at[0] ) || IsNumeric( tokenizer->at[0] ) ) )  {
	    					++tokenizer->at;
	    				}
	    				newToken->tokenType = TokenType::Identifier;
	    			} else if( IsNumeric( tokenizer->at[0] ) ) {
	    				while( IsNumeric( tokenizer->at[0] ) ) {
	    					++tokenizer->at;
	    				}
	    				newToken->tokenType = TokenType::Number;
	    			} else {
	    				newToken->tokenType = TokenType::UndefinedToken;
	    				++tokenizer->at;
	    			}
	    		}
	    	}

	    	newToken->tokenLength = tokenizer->at - newToken->tokenStart;

	    	//Edge case solution, StringLiterals were spitting out tokens like:
	    	// \"String\", if you wanted to compare their values, then you had to
	    	// make const string with escaped quotes
	    	if( newToken->tokenType == TokenType::StringLiteral ) {
	    		++newToken->tokenStart;
	    		newToken->tokenLength -= 2;
	    	}

	    	break;
	    }
	}
	++tokenizer->tokensSinceLastMeta;
	++tokenizer->tokenCount;
	return newToken;
}