コード例 #1
0
ファイル: test4.cpp プロジェクト: Kaoswerk/newton-dynamics
const test4::dGotoEntry* dAssemblerParcer::FindGoto (const dGotoEntry* const gotoList, int count, dToken token) const
{
	int i0 = 0;
	int i1 = count - 1;
	while ((i1 - i0) >= 4) {
		int i = (i1 + i0 + 1)>>1;

		const dGotoEntry& action = gotoList[i];
		if (token <= dToken(action.m_token)) {
			i1 = i;
		} else {
			i0 = i;
		}
	}

	for (int i = i0; i <= i1; i ++) {
		const dGotoEntry& action = gotoList[i];
		if (token == dToken(action.m_token)) {
			return &action;
		}
	}

	_ASSERT (0);
	return NULL;
}
コード例 #2
0
ファイル: test6.cpp プロジェクト: Kaoswerk/newton-dynamics
const test6::dActionEntry* test6::FindAction (const dActionEntry* const actionList, int count, dToken token) const
{
	int i0 = 0;
	int i1 = count - 1;
	while ((i1 - i0) >= 4) {
		int i = (i1 + i0 + 1)>>1;

		const dActionEntry& action = actionList[i];
		dToken actionToken (dToken(action.m_token));
		if (token <= actionToken) {
			i1 = i;
		} else {
			i0 = i;
		}
	}

	for (int i = i0; i <= i1; i ++) {
		const dActionEntry& action = actionList[i];
		dToken actionToken (dToken(action.m_token));
		if (token == actionToken) {
			return& action;
		}
	}


	return NULL;
}
コード例 #3
0
ファイル: test0.cpp プロジェクト: Hurleyworks/newton-dynamics
test0::dGotoEntry dAssemblerParcer::FindGoto (const int* const gotoList, int count, dToken token) const
{
	int i0 = 0;
	int i1 = count - 1;
	while ((i1 - i0) >= 4) {
		int i = (i1 + i0 + 1)>>1;

		dGotoEntry action (gotoList[i]);
		if (token <= dToken(action.m_token)) {
			i1 = i;
		} else {
			i0 = i;
		}
	}

	for (int i = i0; i <= i1; i ++) {
		dGotoEntry action (gotoList[i]);
		if (token == dToken(action.m_token)) {
			return action;
		}
	}

	return dGotoEntry(unsigned (-1));
}
コード例 #4
0
ファイル: test0.cpp プロジェクト: Hurleyworks/newton-dynamics
test0::dActionEntry test0::FindAction (const int* const actionList, int count, dToken token) const
{
	int i0 = 0;
	int i1 = count - 1;
	while ((i1 - i0) >= 4) {
		int i = (i1 + i0 + 1)>>1;

		dActionEntry action (actionList[i]);
		if (token <= dToken(action.m_token)) {
			i1 = i;
		} else {
			i0 = i;
		}
	}

	for (int i = i0; i <= i1; i ++) {
		dActionEntry action (actionList[i]);
		if (token == dToken(action.m_token)) {
			return action;;
		}
	}

	return dActionEntry(unsigned (-1));
}
コード例 #5
0
ファイル: test6.cpp プロジェクト: Kaoswerk/newton-dynamics
	dStackPair()
		:m_state(0), m_scannerLine(0), m_scannerIndex(0), m_token(dToken (0)), m_value()
	{
	}
コード例 #6
0
ファイル: test6.cpp プロジェクト: Kaoswerk/newton-dynamics
bool test6::Parse(xxxx& scanner)
{
	static short gotoCount[] = {
			2, 1, 0, 0, 0};
	static short gotoStart[] = {
			0, 2, 3, 3, 3};
	static dGotoEntry gotoTable[] = {
			dGotoEntry (257, 2), dGotoEntry (258, 1), dGotoEntry (259, 3)};

	dList<dStackPair> stack;
	const int lastToken = 257;
	
	stack.Append ();
	m_grammarError = false;
	dToken token = dToken (scanner.NextToken());
	for (bool terminate = false; !terminate;) {

		const dActionEntry* const action = GetNextAction (stack, token, scanner);
		if (!action) {
			terminate = true;
			fprintf (stderr, "unrecoverable parser error\n");
			DTRACE (("unrecoverable parser error\n"));
		} else {
			switch (action->m_stateType) 
			{
				case dSHIFT: 
				{
					dStackPair& entry = stack.Append()->GetInfo();
					entry.m_state = action->m_nextState;
					entry.m_scannerLine = scanner.GetLineNumber();
					entry.m_scannerIndex = scanner.GetIndex();
					entry.m_value = dUserVariable (token, scanner.GetTokenString(), entry.m_scannerLine, entry.m_scannerIndex);
					token = dToken (scanner.NextToken());
					entry.m_token = token;
					if (token == -1) {
						token = ACCEPTING_TOKEN;
					}

					break;
				}

				case dREDUCE: 
				{
					dStackPair parameter[MAX_USER_PARAM];

					int reduceCount = action->m_ruleSymbols;
					_ASSERTE (reduceCount < sizeof (parameter) / sizeof (parameter[0]));

					for (int i = 0; i < reduceCount; i ++) {
						parameter[reduceCount - i - 1] = stack.GetLast()->GetInfo();
						stack.Remove (stack.GetLast());
					}

					const dStackPair& stackTop = stack.GetLast()->GetInfo();
					int start = gotoStart[stackTop.m_state];
					int count = gotoCount[stackTop.m_state];
					const dGotoEntry* const table = &gotoTable[start];
					const dGotoEntry* const gotoEntry = FindGoto (table, count, dToken (action->m_nextState + lastToken));

					dStackPair& entry = stack.Append()->GetInfo();
					entry.m_state = gotoEntry->m_nextState;
					entry.m_scannerLine = scanner.GetLineNumber();
					entry.m_scannerIndex = scanner.GetIndex();
					entry.m_token = dToken (gotoEntry->m_token);
					
					switch (action->m_ruleIndex) 
					{
						//do user semantic Actions
						case 3:// rule0 : 
							{/*xxx1*/}
							break;
						case 1:// NewtonHeader : 
							{/*xxx0*/}
							break;
						case 4:// rule1 : 
							{/*xxx2*/}
							break;

						default:;
					}

					break;

				}
		
				case dACCEPT: // 2 = accept
				{
					// program parsed successfully, exit with successful code
					terminate = true;
					break;
				}
				
				default:  
				{
					_ASSERTE (0);
					// syntax error parsing program
					//if (!ErrorHandler ("error")) {
					//}
					terminate = true;
					m_grammarError = true;
					break;
				}
			}
		}
	}
	return !m_grammarError;
}
コード例 #7
0
ファイル: test6.cpp プロジェクト: Kaoswerk/newton-dynamics
const test6::dActionEntry* test6::GetNextAction (dList<dStackPair>& stack, dToken token, xxxx& scanner) const
{
	static short actionsCount[] = {
			2, 1, 1, 1, 1};
	static short actionsStart[] = {
			0, 2, 3, 4, 5};
	static dActionEntry actionTable[] = {
			dActionEntry (254, 0, 1, 0, 0, 1), dActionEntry (256, 0, 1, 1, 0, 3), dActionEntry (256, 0, 1, 2, 0, 4), dActionEntry (254, 0, 2, 0, 0, 0), 
			dActionEntry (256, 0, 0, 4, 0, 0), dActionEntry (254, 0, 1, 0, 3, 2)};

	bool errorMode = false;
	const dStackPair& stackTop = stack.GetLast()->GetInfo();
	int state = stackTop.m_state;
	int start = actionsStart[state];
	int count = actionsCount[state];

	const dActionEntry* const table = &actionTable[start];
	const dActionEntry* action = FindAction (table, count, token);
	while (!action && (stack.GetCount() > 1)) {
		errorMode = true; 

		// we found a syntax error in go into error recovering mode, and find the token mark by a ". error" rule
		stack.Remove (stack.GetLast());

		const dStackPair& stackTop = stack.GetLast()->GetInfo();
		int state = stackTop.m_state;
		int start = actionsStart[state];
		int count = actionsCount[state];
		const dActionEntry* const table = &actionTable[start];
		action = FindAction (table, count, ERROR_TOKEN);
		if (action && !action->m_errorRule) {
			action = NULL;
		}
	}

	if (errorMode && action) {
		dStackPair& stackTop = stack.GetLast()->GetInfo();
		stackTop.m_token = ERROR_TOKEN;

		int state = action->m_nextState;
		int start = actionsStart[state];
		int count = actionsCount[state];
		const dActionEntry* const table = &actionTable[start];

		// find the next viable token to continues parsing
		while (!FindAction (table, count, token)) {
			token = dToken (scanner.NextToken());
			if (token == -1) {
				// reached end of the file, can not recover from this error;
				return NULL;
			}
		}
		action = FindAction (table, count, token);
		
		dStackPair& entry = stack.Append()->GetInfo();
		entry.m_state = state;
		entry.m_scannerLine = stackTop.m_scannerLine;
		entry.m_scannerIndex = stackTop.m_scannerIndex;
		entry.m_value = dUserVariable (ERROR_TOKEN, "error", entry.m_scannerLine, entry.m_scannerIndex);
		entry.m_token = token;
	}

	return action;
}
コード例 #8
0
dParserCompiler::dToken dParserCompiler::ScanGrammarRule(
	dParserLexical& lexical, 
	dProductionRule& rules, 
	dTree<dTokenInfo, dCRCTYPE>& symbolList, 
	int& ruleNumber,
	int& tokenEnumeration,
	const dOperatorsPrecedence& operatorPrecedence)
{
	dRuleInfo* currentRule = &rules.GetLast()->GetInfo();
	dToken token = dToken(lexical.NextToken());
	do {
		
		dList<dTokenStringPair> ruleTokens;
		for (token = dToken(lexical.NextToken()); !((token == SIMICOLOM) || (token == OR)); token = dToken(lexical.NextToken())) {
			dAssert (token != -1);

			dTokenStringPair& pair = ruleTokens.Append()->GetInfo();
			pair.m_token = token;
			pair.m_info = lexical.GetTokenString();
		}
		
		dList<dTokenStringPair>::dListNode* lastNode = ruleTokens.GetLast();
		if (lastNode) {
			if (lastNode->GetInfo().m_token != SEMANTIC_ACTION) {
				lastNode = NULL;
			} else {
				currentRule->m_semanticActionCode = lastNode->GetInfo().m_info;
			}
		}
		for (dList<dTokenStringPair>::dListNode* node = ruleTokens.GetFirst(); node != lastNode; node = node->GetNext()) {
			dTokenStringPair& pair = node->GetInfo();

			if (pair.m_token == LITERAL) {
				dSymbol& symbol = currentRule->Append()->GetInfo();
				symbol.m_token = pair.m_token;
				symbol.m_name = pair.m_info;
				symbol.m_nameCRC = dCRC64 (symbol.m_name.GetStr());

				dTree<dTokenInfo, dCRCTYPE>::dTreeNode* symbolNode = symbolList.Find(symbol.m_nameCRC);
				if (!symbolNode) {
					symbolNode = symbolList.Insert(dTokenInfo (tokenEnumeration, NONTERMINAL, symbol.m_name), symbol.m_nameCRC);
					tokenEnumeration ++;
				}
				symbol.m_type = symbolNode->GetInfo().m_type;

			} else if (pair.m_token < 256) {
				dAssert (pair.m_info.Size() == 1);
				dSymbol& symbol = currentRule->Append()->GetInfo();
				symbol.m_name = pair.m_info;
				symbol.m_nameCRC = dCRC64 (symbol.m_name.GetStr());

				symbol.m_type = TERMINAL;
				symbol.m_token = LITERAL;
				symbolList.Insert(dTokenInfo (pair.m_token, TERMINAL, symbol.m_name), symbol.m_nameCRC);

			} else if (pair.m_token == PREC) {
				node = node->GetNext();
				for (dRuleInfo::dListNode* ruleNode = currentRule->GetLast(); ruleNode; ruleNode = ruleNode->GetPrev()) {
					dSymbol& symbol = ruleNode->GetInfo();
					if (operatorPrecedence.FindAssociation (symbol.m_nameCRC)) {
						dTokenStringPair& pair = node->GetInfo();		
						symbol.m_operatorPrecendeceOverright = pair.m_info;
						break;
					}
				}
//			} else if (pair.m_token != SEMANTIC_ACTION) {
//				// no user action allowed in the middle of a sentence
//				_ASSERTE (pair.m_token == SEMANTIC_ACTION);
//			} else {
//				_ASSERTE (0);
			}
		}

		if (token == OR) {
			// this is a rule with multiples sentences alternates, add new rule with the same name Non terminal
			dRuleInfo& rule = rules.Append()->GetInfo();
			rule.m_ruleNumber = ruleNumber;
			ruleNumber ++;
			rule.m_ruleId = currentRule->m_ruleId;
			rule.m_token = currentRule->m_token;
			rule.m_type = NONTERMINAL;
			//rule.m_name += currentRule->m_name;
			rule.m_name = currentRule->m_name;
			rule.m_nameCRC = currentRule->m_nameCRC;
			currentRule = &rule;
		}

	} while (token != SIMICOLOM);

	return token;
}
コード例 #9
0
void dParserCompiler::ScanGrammarFile(
	const dString& inputRules, 
	dProductionRule& ruleList, 
	dTree<dTokenInfo, dCRCTYPE>& symbolList, 
	dOperatorsPrecedence& operatorPrecedence,
	dString& userCodeBlock,
	dString& userVariableClass,
	dString& endUserCode,
	int& lastTokenEnum)
{
	dString startSymbol ("");
	int tokenEnumeration = 256;
	int operatorPrecedencePriority = 0;

	dParserLexical lexical (inputRules.GetStr());
	LoadTemplateFile("dParserUserVariableTemplate_cpp.txt", userVariableClass);

	// scan the definition segment
	for (dToken token = dToken(lexical.NextToken()); token != GRAMMAR_SEGMENT; ) {
		switch (int (token)) 
		{
			case START:
			{
				token = dToken(lexical.NextToken());
				startSymbol = lexical.GetTokenString();
				token = dToken(lexical.NextToken());
				break;
			}

			case TOKEN:
			{
				for (token = dToken(lexical.NextToken()); token == LITERAL; token = dToken(lexical.NextToken())) {
					const char* const name = lexical.GetTokenString();
					symbolList.Insert(dTokenInfo (tokenEnumeration, TERMINAL, name), dCRC64 (name));
					tokenEnumeration ++;
				}
				break;
			}

			case LEFT:
			case RIGHT:
			{
				dOperatorsAssociation& association = operatorPrecedence.Append()->GetInfo();
				association.m_prioprity = operatorPrecedencePriority;
				operatorPrecedencePriority ++;
				switch (int (token))
				{
					case LEFT:
						association.m_associativity = dOperatorsAssociation::m_left;
						break;
					case RIGHT:
						association.m_associativity = dOperatorsAssociation::m_right;
						break;
				}

				for (token = dToken(lexical.NextToken()); (token == LITERAL) || ((token < 256) && !isalnum (token)); token = dToken(lexical.NextToken())) {
					association.Append(dCRC64 (lexical.GetTokenString()));
				}
				break;
			}


			case UNION:
			{
				token = dToken(lexical.NextToken());
				dAssert (token == SEMANTIC_ACTION);
				userVariableClass = lexical.GetTokenString() + 1;
				userVariableClass.Replace(userVariableClass.Size() - 1, 1, "");
				token = dToken(lexical.NextToken());
				break;
			}


			case CODE_BLOCK:
			{
				userCodeBlock += lexical.GetTokenString();
				token = dToken(lexical.NextToken());
				break;
			}

			case EXPECT:
			{
				token = dToken(lexical.NextToken());
				dAssert (token == INTEGER);
				m_shiftReduceExpectedWarnings = atoi (lexical.GetTokenString());
				token = dToken(lexical.NextToken());
				break;
			}

			default:;
			{
				dAssert (0);
				token = dToken(lexical.NextToken());
			}
		}
	}


	int ruleNumber = 1;
	lastTokenEnum = tokenEnumeration;

	// scan the production rules segment
	dToken token1 = dToken(lexical.NextToken());
	for (; (token1 != GRAMMAR_SEGMENT) && (token1 != -1); token1 = dToken(lexical.NextToken())) {
		//dTrace (("%s\n", lexical.GetTokenString()));
		switch (int (token1)) 
		{
			case LITERAL:
			{
				// add the first Rule;
				dRuleInfo& rule = ruleList.Append()->GetInfo();
				rule.m_token = token1;
				rule.m_type = NONTERMINAL;
				rule.m_name = lexical.GetTokenString();
				rule.m_nameCRC = dCRC64 (lexical.GetTokenString());

				dTree<dTokenInfo, dCRCTYPE>::dTreeNode* nonTerminalIdNode = symbolList.Find(rule.m_nameCRC);
				if (!nonTerminalIdNode) {
					nonTerminalIdNode = symbolList.Insert(dTokenInfo (tokenEnumeration, NONTERMINAL, rule.m_name), rule.m_nameCRC);
					tokenEnumeration ++;
				}
				rule.m_ruleId = nonTerminalIdNode->GetInfo().m_tokenId;
				rule.m_ruleNumber = ruleNumber;
				ruleNumber ++;

				token1 = ScanGrammarRule(lexical, ruleList, symbolList, ruleNumber, tokenEnumeration, operatorPrecedence); 
				break;
			}
			default:
				dAssert (0);
		}
	}

	dProductionRule::dListNode* firtRuleNode = ruleList.GetFirst();	
	if (startSymbol != "") {
		firtRuleNode = ruleList.Find (dCRC64 (startSymbol.GetStr()));	
	}

	//Expand the Grammar Rule by adding an empty start Rule;
	const dRuleInfo& firstRule = firtRuleNode->GetInfo();

	dRuleInfo& rule = ruleList.Addtop()->GetInfo();
	rule.m_ruleNumber = 0;
	rule.m_ruleId = tokenEnumeration;
	rule.m_token = firstRule.m_token;
	rule.m_type = NONTERMINAL;
	rule.m_name = firstRule.m_name + dString("__");
	rule.m_nameCRC = dCRC64 (rule.m_name.GetStr());
	symbolList.Insert(dTokenInfo (tokenEnumeration, rule.m_type, rule.m_name), rule.m_nameCRC);
	tokenEnumeration ++;
	
	dSymbol& symbol = rule.Append()->GetInfo();
	symbol.m_token = firstRule.m_token;
	symbol.m_type = firstRule.m_type;
	symbol.m_name = firstRule.m_name;
	symbol.m_nameCRC = firstRule.m_nameCRC;

	// scan literal use code
	if (token1 == GRAMMAR_SEGMENT) {
		endUserCode = lexical.GetNextBuffer();
		//endUserCode += "\n";
	}
}
コード例 #10
0
dScriptCompiler::dUserVariable dScriptCompiler::NewExpresionNodeAssigment (const dUserVariable& leftVariable, const dUserVariable& assigmentOperator, const dUserVariable& expression)
{
	dUserVariable leftVariableCopy;

	dDAGExpressionNodeVariable* const leftNode = (dDAGExpressionNodeVariable*) leftVariable.m_node;
	dAssert (leftNode->IsType(dDAGExpressionNodeVariable::GetRttiType()));


	dUserVariable tmpOperator;
	switch (assigmentOperator.m_token) 
	{
		case _ASS_ADD:
		{
			tmpOperator.m_token = dToken ('+');
			break;
		}
		case _ASS_SUB:
		{
			tmpOperator.m_token = dToken ('-');
			break;
		}
		case _ASS_MUL: 
		{
			tmpOperator.m_token = dToken ('*');
			break;
		}
		case _ASS_DIV: 
		{
			tmpOperator.m_token = dToken ('/');
			break;
		}
		case _ASS_MOD:
		{
			tmpOperator.m_token = dToken ('%');
			break;
		}
		
		case _ASS_SHL:
		{
			tmpOperator.m_token = dToken ('<<');
			break;
		}

		case _ASS_SHR:
		{
			tmpOperator.m_token = dToken ('>>');
			break;
		}

		case _ASS_AND:
		{
			tmpOperator.m_token = dToken ('&');
			break;
		}

		case _ASS_XOR:
		{
			tmpOperator.m_token = dToken ('^');
			break;
		}

		case _ASS_OR:
		{
			tmpOperator.m_token = dToken ('|');
			break;
		}

		default:
			dAssert (0);
	}

	dUserVariable expressionA;
	expressionA.m_node = leftNode->Clone (m_allNodes);
	return NewExpresionNodeAssigment (leftVariable, NewExpressionNodeBinaryOperator (expressionA, tmpOperator, expression));
}
コード例 #11
0
ファイル: test4.cpp プロジェクト: Kaoswerk/newton-dynamics
	dStackPair()
		:m_state(0), m_token(dToken (0)), m_value()
	{
	}
コード例 #12
0
ファイル: test4.cpp プロジェクト: Kaoswerk/newton-dynamics
		dUserVariable () 
			:m_token (dToken (0)), m_data("")
		{
		}
コード例 #13
0
ファイル: test4.cpp プロジェクト: Kaoswerk/newton-dynamics
bool test4::Parce(lextest1& scanner)
{
	dList<dStackPair> stack;
	static short actionsCount[] = {2, 1, 2, 1, 1, 2, 2, 2, 1, 2};
	static short actionsStart[] = {0, 2, 0, 3, 4, 5, 7, 0, 9, 5};
	static dActionEntry actionTable[] = {
					dActionEntry (259, 0, 1, 0, 0), dActionEntry (256, 0, 2, 0, 0), 
					dActionEntry (0, 1, 0, 1, 3), 
					dActionEntry (0, 2, 0, 0, 0), 
					dActionEntry (257, 0, 5, 0, 0), 
					dActionEntry (259, 0, 6, 0, 0), dActionEntry (256, 0, 7, 0, 0), 
					dActionEntry (0, 1, 0, 1, 3), dActionEntry (258, 1, 0, 1, 3), 
					dActionEntry (257, 0, 9, 0, 0), 
			};

	static short gotoCount[] = {1, 0, 1, 0, 0, 1, 0, 1, 0, 1};
	static short gotoStart[] = {0, 1, 1, 2, 2, 2, 3, 3, 4, 4};
	static dGotoEntry gotoTable[] = {
					dGotoEntry (261, 3), dGotoEntry (261, 4), dGotoEntry (261, 4), dGotoEntry (261, 8), 
					dGotoEntry (261, 8)};

	const int lastToken = 261;

	stack.Append ();
	dToken token = dToken (scanner.NextToken());
	for (;;) {
		const dStackPair& stackTop = stack.GetLast()->GetInfo();
		int start = actionsStart[stackTop.m_state];
		int count = actionsCount[stackTop.m_state];
		const dActionEntry* const action (FindAction (&actionTable[start], count, token));
		_ASSERTE (action);

		switch (action->m_stateType) 
		{
			case dSHIFT: 
			{
				dStackPair& entry = stack.Append()->GetInfo();
				entry.m_token = dToken (action->m_token);
				entry.m_state = action->m_nextState;
				entry.m_value = dStackPair::dUserVariable (entry.m_token, scanner.GetTokenString());
				token = dToken (scanner.NextToken());
				if (token == -1) {
					token = dToken (0);
				}

				break;
			}

			case dREDUCE: 
			{
				dStackPair parameter[MAX_USER_PARAM];

				int reduceCount = action->m_ruleSymbols;
				_ASSERTE (reduceCount < sizeof (parameter) / sizeof (parameter[0]));

				for (int i = 0; i < reduceCount; i ++) {
					parameter[reduceCount - i - 1] = stack.GetLast()->GetInfo();
					stack.Remove (stack.GetLast());
				}

				const dStackPair& stackTop = stack.GetLast()->GetInfo();
				int start = gotoStart[stackTop.m_state];
				int count = gotoCount[stackTop.m_state];
				const dGotoEntry* const gotoEntry = FindGoto (&gotoTable[start], count, dToken (action->m_nextState + lastToken));

				dStackPair& entry = stack.Append()->GetInfo();
				entry.m_state = gotoEntry->m_nextState;
				entry.m_token = dToken (gotoEntry->m_token);
				
				switch (action->m_ruleIndex) 
				{
					//do user semantic Actions
					case 3:// rule stmt : a 
						{}
						break;

					default:;
				}

				break;

			}
	
			case dACCEPT: // 2 = accept
			{
				// program parced successfully, exit with successful code
				return true;
			}
			
			default:  
			{
				_ASSERTE (0);
				// syntact error parciing program
				//if (!ErrorHandler ("error")) {
				//}
				break;
			}
		}
	}
	return false;
}
コード例 #14
0
ファイル: test0.cpp プロジェクト: Hurleyworks/newton-dynamics
		dUserVariable ()
			:string(), m_token (dToken(0))
		{
		}
コード例 #15
0
ファイル: test0.cpp プロジェクト: Hurleyworks/newton-dynamics
int test0::Parce(lextest1& scanner)
{
	dList<dStackPair> stack;
	static int actionsCount[] = {2, 2, 5, 2, 5, 2, 2, 5, 5};
	static int actionsStart[] = {0, 2, 4, 9, 11, 16, 18, 20, 25};
	static int actionTable[] = {0xc0a0, 0x10400, 0x2, 0x140ac, 0x4000001, 0x40000a1, 0x40000a5, 0x40000ad, 0x4000401, 0xc0a0, 0x10400, 0x4004001, 0x40040a1, 0x40040a5, 0x40040ad, 0x4004401, 0xc0a0, 0x10400, 0x1c0a4, 0x140ac, 0xc004001, 0xc0040a1, 0xc0040a5, 0xc0040ad, 0xc004401, 0xc000001, 0xc0000a1, 0xc0000a5, 0xc0000ad, 0xc000401};

	static int gotoCount[] = {2, 0, 0, 2, 0, 1, 0, 0, 0};
	static int gotoStart[] = {0, 2, 2, 2, 4, 4, 5, 5, 5};
	static int gotoTable[] = {0x10101, 0x20102, 0x60101, 0x20102, 0x80102};

	const int lastToken = 257;

	stack.Append ();
	for (dToken token = dToken (scanner.NextToken()); token != -1; ) {
		const dStackPair& stackTop = stack.GetLast()->GetInfo();
		int start = actionsStart[stackTop.m_state];
		int count = actionsCount[stackTop.m_state];
		dActionEntry action (FindAction (&actionTable[start], count, token));

		switch (action.m_stateType) 
		{
			case 0: // 0 = shift
			{
				dStackPair& entry = stack.Append()->GetInfo();
				entry.m_token = dToken (action.m_token);
				entry.m_state = action.m_nextState;
				entry.m_value = dStackPair::dUserVariable (entry.m_token, scanner.GetTokenString());
				token = dToken (scanner.NextToken());
				break;
			}

			case 1: // 1 = reduce
			{
				dStackPair parameter[MAX_USER_PARAM];

				int reduceCount = action.m_reduceCount;
				_ASSERTE (reduceCount < sizeof (parameter) / sizeof (parameter[0]));

				for (int i = 0; i < reduceCount; i ++) {
					parameter[i] = stack.GetLast()->GetInfo();
					stack.Remove (stack.GetLast());
				}

				const dStackPair& stackTop = stack.GetLast()->GetInfo();
				int start = gotoStart[stackTop.m_state];
				int count = gotoCount[stackTop.m_state];
				dGotoEntry gotoEntry (FindGoto (&gotoTable[start], count, dToken (action.m_nextState + lastToken)));

				dStackPair& entry = stack.Append()->GetInfo();
				entry.m_state = gotoEntry.m_nextState;
				entry.m_token = dToken (gotoEntry.m_token);
				
				switch (action.m_nextState) 
				{
					//do user semantic Action
					//$(semanticActionsCode);
					case 0:
					{
						break;
					}
					default:;
				}

				break;

			}
	
			case 2: // 2 = accept
			{
				_ASSERTE (0);
			}
			
			default:  // syntax grammar error
			{
				_ASSERTE (0);
				// error
				//if (!ErrorHandler ("error")) {
				//}
			}
		}
	}

	return 1;
}