void dDeterministicFiniteAutonata::EmptyTransitionClosure (const dTree<dAutomataState*,dAutomataState*>& set, dTree<dAutomataState*,dAutomataState*>& closureStates) const
{

	int stack = 0;
	dAutomataState* stackPool[2048];

	dTree<dAutomataState*,dAutomataState*>::Iterator iter (set);
	for (iter.Begin(); iter; iter ++) {
		dAutomataState* const state = iter.GetNode()->GetInfo();
		stackPool[stack] = state;
		stack ++;
		dAssert (stack  < sizeof (stackPool) / sizeof (stackPool[0]));
		closureStates.Insert(state, state);
	}

	while(stack) {
		stack --;
		dAutomataState* const state = stackPool[stack];
		for (dList<dAutomataState::dTransition>::dListNode* node = state->m_transtions.GetFirst(); node; node = node->GetNext()) {
			dAutomataState::dTransition& transition = node->GetInfo();
			dAutomataState::dCharacter ch (transition.GetCharater());
			if (ch.m_symbol == 0) {
				dAutomataState* const targetState = transition.GetState();
				if(!closureStates.Find(targetState)) {
					closureStates.Insert(targetState, targetState);
					stackPool[stack] = targetState;
					stack ++;
					dAssert (stack  < sizeof (stackPool) / sizeof (stackPool[0]));
				}
			}
		}
	}
}
void dParserCompiler::First (
	dCRCTYPE symbol, 
	dTree<int, dCRCTYPE>& symbolListMark, 
	const dTree<dTokenInfo, dCRCTYPE>& symbolList, 
	const dTree<dList<void*>, dCRCTYPE>& ruleMap,
	dTree<int, dCRCTYPE>& firstSetOut) const
{
	if (symbolListMark.Find(symbol)) {
		return;
	}
	symbolListMark.Insert(0, symbol);

	dTree<dTokenInfo, dCRCTYPE>::dTreeNode* const node = symbolList.Find(symbol);
	dAssert (node);
	if (node->GetInfo().m_type == TERMINAL) {
		firstSetOut.Insert(0, symbol);
	} else if (DoesSymbolDeriveEmpty (symbol, ruleMap)) {
		firstSetOut.Insert(0, 0);
	} else {

		dTree<dList<void*>, dCRCTYPE>::dTreeNode* const ruleNodes = ruleMap.Find(symbol);
		if (ruleNodes) {
			const dList<void*>& matchingRulesList = ruleNodes->GetInfo();
			for (dList<void*>::dListNode* node = matchingRulesList.GetFirst(); node; node = node->GetNext()) {
				dProductionRule::dListNode* const ruleInfoNode = (dProductionRule::dListNode*) node->GetInfo();
				const dRuleInfo& info = ruleInfoNode->GetInfo();

				bool allDeriveEmpty = true;
				for (dRuleInfo::dListNode* sentenceSymbolNode = info.GetFirst(); sentenceSymbolNode; sentenceSymbolNode = sentenceSymbolNode->GetNext()) {
					const dSymbol& sentenceSymnol = sentenceSymbolNode->GetInfo();
					if (!DoesSymbolDeriveEmpty (sentenceSymnol.m_nameCRC, ruleMap)) {
						allDeriveEmpty = false;
						dTree<int, dCRCTYPE> newFirstSetOut;
						First (sentenceSymnol.m_nameCRC, symbolListMark, symbolList, ruleMap, newFirstSetOut);
						dTree<int, dCRCTYPE>::Iterator iter (newFirstSetOut);
						for (iter.Begin(); iter; iter ++) {
							dCRCTYPE symbol = iter.GetKey();
							dAssert (symbol != 0);
							dAssert (symbolList.Find(symbol)->GetInfo().m_type == TERMINAL);
							firstSetOut.Insert(0, symbol);
						}
						break;
					}
				}
				if (allDeriveEmpty) {
					dTrace (("this could be a bug here, I am not sure if I should closure with the accepting state or not, need more debugging\n"))
//					firstSetOut.Insert(0, 0);
				}
			}
		}
	}
}
void dParserCompiler::First (
	const dList<dCRCTYPE>& symbolSet, 
	const dTree<dTokenInfo, dCRCTYPE>& symbolList, 
	const dTree<dList<void*>, dCRCTYPE>& ruleMap,
	dTree<int, dCRCTYPE>& firstSetOut) const
{
	if (symbolSet.GetCount() > 1) {

		dList<dCRCTYPE>::dListNode* node = symbolSet.GetFirst();
		bool deriveEmpty = true;
		while ((deriveEmpty) && node) {
			dCRCTYPE symbol = node->GetInfo();
			node = node->GetNext();

			dTree<int, dCRCTYPE> tmpFirst;
			dTree<int, dCRCTYPE> symbolListMark; 
			First (symbol, symbolListMark, symbolList, ruleMap, tmpFirst);
			dTree<int, dCRCTYPE>::Iterator iter (tmpFirst);
			deriveEmpty = false;  
			for (iter.Begin(); iter; iter ++) {
				dCRCTYPE symbol = iter.GetKey();
				if (symbol == 0) {
					deriveEmpty = true;  
				} else {
					firstSetOut.Insert(0, symbol);
				}
			}
		}
		if (deriveEmpty) {
			firstSetOut.Insert(0, 0);
		}

	} else  {
		dCRCTYPE symbol = symbolSet.GetFirst()->GetInfo();
		dTree<int, dCRCTYPE> symbolListMark; 
		First (symbol, symbolListMark, symbolList, ruleMap, firstSetOut);
	}
}
void dDeterministicFiniteAutonata::MoveSymbol (int symbol, const dAutomataState* const state, dTree<dAutomataState*,dAutomataState*>& ouput) const
{
	for (dList<dAutomataState*>::dListNode* stateNode = state->m_myNFANullStates.GetFirst(); stateNode; stateNode = stateNode->GetNext()) {
		const dAutomataState* const state = stateNode->GetInfo();
		for (dList<dAutomataState::dTransition>::dListNode* transitionNode = state->m_transtions.GetFirst(); transitionNode; transitionNode = transitionNode->GetNext()) {
			dAutomataState::dTransition& thans = transitionNode->GetInfo();
			dAutomataState::dCharacter ch (thans.GetCharater());
			if (ch.m_symbol == symbol) {
				dAutomataState* const target = thans.GetState();
				ouput.Insert(target, target);
			}
		}
	}
}
void dDataFlowGraph::FindNodesInPathway(dCIL::dListNode* const source, dCIL::dListNode* const destination, dTree<int, dCIL::dListNode*>& pathOut) const
{
	m_mark ++;
	dList<dDataFlowPoint*> queue; 
	queue.Append(&m_dataFlowGraph.Find(source)->GetInfo());
	dAssert (queue.GetFirst()->GetInfo()->m_statement == source);

	while (queue.GetCount()) {
		dDataFlowPoint* const rootNode = queue.GetFirst()->GetInfo();
		queue.Remove(queue.GetFirst());
		if (rootNode->m_mark != m_mark) {
			rootNode->m_mark = m_mark;
			pathOut.Insert(rootNode->m_statement);

			for (dList<dDataFlowPoint*>::dListNode* successorsNode = rootNode->m_successors.GetFirst(); successorsNode; successorsNode = successorsNode->GetNext()) {
				dDataFlowPoint* const successor = successorsNode->GetInfo();
				if (successor->m_statement != destination) {
					queue.Append(successor);
				}
			}
		}
	}
//	pathOut.Remove(source);
}
void dBasicBlocksGraph::GetStatementsWorklist(dTree <int, dCIL::dListNode*>& list) const
{
	for (dCIL::dListNode* node = m_begin; node != m_end; node = node->GetNext()) {
		list.Insert(0, node);
	}
}
// generates the canonical Items set for a LR(1) grammar
void dParserCompiler::CanonicalItemSets (
	dTree<dState*, dCRCTYPE>& stateMap, 
	const dProductionRule& ruleList, 
	const dTree<dTokenInfo, dCRCTYPE>& symbolList, 
	const dOperatorsPrecedence& operatorPrecedence,
	FILE* const debugFile)
{
	dList<dItem> itemSet;
	dList<dState*> stateList;

	// start by building an item set with only the first rule
	dItem& item = itemSet.Append()->GetInfo();
	item.m_indexMarker = 0;
	item.m_lookAheadSymbolCRC = dCRC64 (DACCEPT_SYMBOL);
	item.m_lookAheadSymbolName = DACCEPT_SYMBOL;
	item.m_ruleNode = ruleList.GetFirst();

	// build a rule info map
	dTree<dList<void*>, dCRCTYPE> ruleMap;
	for (dProductionRule::dListNode* ruleNode = ruleList.GetFirst(); ruleNode; ruleNode = ruleNode->GetNext()) {
		dRuleInfo& info = ruleNode->GetInfo();

		dTree<dList<void*>, dCRCTYPE>::dTreeNode* node = ruleMap.Find(info.m_nameCRC);
		if (!node) {
			node = ruleMap.Insert(info.m_nameCRC);
		}
		dList<void*>& entry = node->GetInfo();
		entry.Append(ruleNode);
	}


	// find the closure for the first this item set with only the first rule
	dState* const state = Closure (itemSet, symbolList, ruleMap);
	operatorPrecedence.SaveLastOperationSymbol (state);

	stateMap.Insert(state, state->GetKey());
	stateList.Append(state);

	state->Trace(debugFile);

	// now for each state found 
	int stateNumber = 1;
	for (dList<dState*>::dListNode* node = stateList.GetFirst(); node; node = node->GetNext()) {
		dState* const state = node->GetInfo();

		dTree<dTokenInfo, dCRCTYPE>::Iterator iter (symbolList);
		for (iter.Begin(); iter; iter ++) {

			dCRCTYPE symbol = iter.GetKey();
			dState* const newState = Goto (state, symbol, symbolList, ruleMap);

			if (newState->GetCount()) {
				const dTokenInfo& tokenInfo = iter.GetNode()->GetInfo();
				dTransition& transition = state->m_transitions.Append()->GetInfo();
				transition.m_symbol = symbol;
				transition.m_name = tokenInfo.m_name; 
				transition.m_type = tokenInfo.m_type;
				dAssert (transition.m_symbol == dCRC64(transition.m_name.GetStr()));
				transition.m_targetState = newState;

				dTree<dState*, dCRCTYPE>::dTreeNode* const targetStateNode = stateMap.Find(newState->GetKey());
				if (!targetStateNode) {
					newState->m_number = stateNumber;

					stateNumber ++;
					stateMap.Insert(newState, newState->GetKey());
					newState->Trace(debugFile);
					stateList.Append(newState);

					operatorPrecedence.SaveLastOperationSymbol (newState);

				} else {
					transition.m_targetState = targetStateNode->GetInfo();
					delete newState;
				}
			} else {
				delete newState;
			}
		}

		dTrace (("state#:%d   items: %d   transitions: %d\n", state->m_number, state->GetCount(), state->m_transitions.GetCount()));
	}
}
dParserCompiler::dToken dParserCompiler::ScanGrammarRule(
	dParserLexical& lexical, 
	dProductionRule& rules, 
	dTree<dTokenInfo, dCRCTYPE>& symbolList, 
	int& ruleNumber,
	int& tokenEnumeration,
	const dOperatorsPrecedence& operatorPrecedence)
{
	dRuleInfo* currentRule = &rules.GetLast()->GetInfo();
	dToken token = dToken(lexical.NextToken());
	do {
		
		dList<dTokenStringPair> ruleTokens;
		for (token = dToken(lexical.NextToken()); !((token == SIMICOLOM) || (token == OR)); token = dToken(lexical.NextToken())) {
			dAssert (token != -1);

			dTokenStringPair& pair = ruleTokens.Append()->GetInfo();
			pair.m_token = token;
			pair.m_info = lexical.GetTokenString();
		}
		
		dList<dTokenStringPair>::dListNode* lastNode = ruleTokens.GetLast();
		if (lastNode) {
			if (lastNode->GetInfo().m_token != SEMANTIC_ACTION) {
				lastNode = NULL;
			} else {
				currentRule->m_semanticActionCode = lastNode->GetInfo().m_info;
			}
		}
		for (dList<dTokenStringPair>::dListNode* node = ruleTokens.GetFirst(); node != lastNode; node = node->GetNext()) {
			dTokenStringPair& pair = node->GetInfo();

			if (pair.m_token == LITERAL) {
				dSymbol& symbol = currentRule->Append()->GetInfo();
				symbol.m_token = pair.m_token;
				symbol.m_name = pair.m_info;
				symbol.m_nameCRC = dCRC64 (symbol.m_name.GetStr());

				dTree<dTokenInfo, dCRCTYPE>::dTreeNode* symbolNode = symbolList.Find(symbol.m_nameCRC);
				if (!symbolNode) {
					symbolNode = symbolList.Insert(dTokenInfo (tokenEnumeration, NONTERMINAL, symbol.m_name), symbol.m_nameCRC);
					tokenEnumeration ++;
				}
				symbol.m_type = symbolNode->GetInfo().m_type;

			} else if (pair.m_token < 256) {
				dAssert (pair.m_info.Size() == 1);
				dSymbol& symbol = currentRule->Append()->GetInfo();
				symbol.m_name = pair.m_info;
				symbol.m_nameCRC = dCRC64 (symbol.m_name.GetStr());

				symbol.m_type = TERMINAL;
				symbol.m_token = LITERAL;
				symbolList.Insert(dTokenInfo (pair.m_token, TERMINAL, symbol.m_name), symbol.m_nameCRC);

			} else if (pair.m_token == PREC) {
				node = node->GetNext();
				for (dRuleInfo::dListNode* ruleNode = currentRule->GetLast(); ruleNode; ruleNode = ruleNode->GetPrev()) {
					dSymbol& symbol = ruleNode->GetInfo();
					if (operatorPrecedence.FindAssociation (symbol.m_nameCRC)) {
						dTokenStringPair& pair = node->GetInfo();		
						symbol.m_operatorPrecendeceOverright = pair.m_info;
						break;
					}
				}
//			} else if (pair.m_token != SEMANTIC_ACTION) {
//				// no user action allowed in the middle of a sentence
//				_ASSERTE (pair.m_token == SEMANTIC_ACTION);
//			} else {
//				_ASSERTE (0);
			}
		}

		if (token == OR) {
			// this is a rule with multiples sentences alternates, add new rule with the same name Non terminal
			dRuleInfo& rule = rules.Append()->GetInfo();
			rule.m_ruleNumber = ruleNumber;
			ruleNumber ++;
			rule.m_ruleId = currentRule->m_ruleId;
			rule.m_token = currentRule->m_token;
			rule.m_type = NONTERMINAL;
			//rule.m_name += currentRule->m_name;
			rule.m_name = currentRule->m_name;
			rule.m_nameCRC = currentRule->m_nameCRC;
			currentRule = &rule;
		}

	} while (token != SIMICOLOM);

	return token;
}
void dParserCompiler::ScanGrammarFile(
	const dString& inputRules, 
	dProductionRule& ruleList, 
	dTree<dTokenInfo, dCRCTYPE>& symbolList, 
	dOperatorsPrecedence& operatorPrecedence,
	dString& userCodeBlock,
	dString& userVariableClass,
	dString& endUserCode,
	int& lastTokenEnum)
{
	dString startSymbol ("");
	int tokenEnumeration = 256;
	int operatorPrecedencePriority = 0;

	dParserLexical lexical (inputRules.GetStr());
	LoadTemplateFile("dParserUserVariableTemplate_cpp.txt", userVariableClass);

	// scan the definition segment
	for (dToken token = dToken(lexical.NextToken()); token != GRAMMAR_SEGMENT; ) {
		switch (int (token)) 
		{
			case START:
			{
				token = dToken(lexical.NextToken());
				startSymbol = lexical.GetTokenString();
				token = dToken(lexical.NextToken());
				break;
			}

			case TOKEN:
			{
				for (token = dToken(lexical.NextToken()); token == LITERAL; token = dToken(lexical.NextToken())) {
					const char* const name = lexical.GetTokenString();
					symbolList.Insert(dTokenInfo (tokenEnumeration, TERMINAL, name), dCRC64 (name));
					tokenEnumeration ++;
				}
				break;
			}

			case LEFT:
			case RIGHT:
			{
				dOperatorsAssociation& association = operatorPrecedence.Append()->GetInfo();
				association.m_prioprity = operatorPrecedencePriority;
				operatorPrecedencePriority ++;
				switch (int (token))
				{
					case LEFT:
						association.m_associativity = dOperatorsAssociation::m_left;
						break;
					case RIGHT:
						association.m_associativity = dOperatorsAssociation::m_right;
						break;
				}

				for (token = dToken(lexical.NextToken()); (token == LITERAL) || ((token < 256) && !isalnum (token)); token = dToken(lexical.NextToken())) {
					association.Append(dCRC64 (lexical.GetTokenString()));
				}
				break;
			}


			case UNION:
			{
				token = dToken(lexical.NextToken());
				dAssert (token == SEMANTIC_ACTION);
				userVariableClass = lexical.GetTokenString() + 1;
				userVariableClass.Replace(userVariableClass.Size() - 1, 1, "");
				token = dToken(lexical.NextToken());
				break;
			}


			case CODE_BLOCK:
			{
				userCodeBlock += lexical.GetTokenString();
				token = dToken(lexical.NextToken());
				break;
			}

			case EXPECT:
			{
				token = dToken(lexical.NextToken());
				dAssert (token == INTEGER);
				m_shiftReduceExpectedWarnings = atoi (lexical.GetTokenString());
				token = dToken(lexical.NextToken());
				break;
			}

			default:;
			{
				dAssert (0);
				token = dToken(lexical.NextToken());
			}
		}
	}


	int ruleNumber = 1;
	lastTokenEnum = tokenEnumeration;

	// scan the production rules segment
	dToken token1 = dToken(lexical.NextToken());
	for (; (token1 != GRAMMAR_SEGMENT) && (token1 != -1); token1 = dToken(lexical.NextToken())) {
		//dTrace (("%s\n", lexical.GetTokenString()));
		switch (int (token1)) 
		{
			case LITERAL:
			{
				// add the first Rule;
				dRuleInfo& rule = ruleList.Append()->GetInfo();
				rule.m_token = token1;
				rule.m_type = NONTERMINAL;
				rule.m_name = lexical.GetTokenString();
				rule.m_nameCRC = dCRC64 (lexical.GetTokenString());

				dTree<dTokenInfo, dCRCTYPE>::dTreeNode* nonTerminalIdNode = symbolList.Find(rule.m_nameCRC);
				if (!nonTerminalIdNode) {
					nonTerminalIdNode = symbolList.Insert(dTokenInfo (tokenEnumeration, NONTERMINAL, rule.m_name), rule.m_nameCRC);
					tokenEnumeration ++;
				}
				rule.m_ruleId = nonTerminalIdNode->GetInfo().m_tokenId;
				rule.m_ruleNumber = ruleNumber;
				ruleNumber ++;

				token1 = ScanGrammarRule(lexical, ruleList, symbolList, ruleNumber, tokenEnumeration, operatorPrecedence); 
				break;
			}
			default:
				dAssert (0);
		}
	}

	dProductionRule::dListNode* firtRuleNode = ruleList.GetFirst();	
	if (startSymbol != "") {
		firtRuleNode = ruleList.Find (dCRC64 (startSymbol.GetStr()));	
	}

	//Expand the Grammar Rule by adding an empty start Rule;
	const dRuleInfo& firstRule = firtRuleNode->GetInfo();

	dRuleInfo& rule = ruleList.Addtop()->GetInfo();
	rule.m_ruleNumber = 0;
	rule.m_ruleId = tokenEnumeration;
	rule.m_token = firstRule.m_token;
	rule.m_type = NONTERMINAL;
	rule.m_name = firstRule.m_name + dString("__");
	rule.m_nameCRC = dCRC64 (rule.m_name.GetStr());
	symbolList.Insert(dTokenInfo (tokenEnumeration, rule.m_type, rule.m_name), rule.m_nameCRC);
	tokenEnumeration ++;
	
	dSymbol& symbol = rule.Append()->GetInfo();
	symbol.m_token = firstRule.m_token;
	symbol.m_type = firstRule.m_type;
	symbol.m_name = firstRule.m_name;
	symbol.m_nameCRC = firstRule.m_nameCRC;

	// scan literal use code
	if (token1 == GRAMMAR_SEGMENT) {
		endUserCode = lexical.GetNextBuffer();
		//endUserCode += "\n";
	}
}