コード例 #1
0
ファイル: Globals.cpp プロジェクト: lishunan246/GKC
bool _InitGlobals() throw()
{
	CallResult cr;

	//mutex
	cr = GET_SA_GLOBAL_VARIABLE(g_mutex).Init();
	if( cr.IsFailed() )
		return false;

	//crt
	IMemoryManager* pMgr = get_crt_mem_mgr();

	//spb
	GET_SA_GLOBAL_VARIABLE(g_spb_pool).SetMemoryManager(pMgr);

	//sab
	GET_SA_GLOBAL_VARIABLE(g_sab_pool).SetMemoryManager(pMgr);

	//scb
	GET_SA_GLOBAL_VARIABLE(g_scb_pool).SetMemoryManager(pMgr);

	return true;
}
コード例 #2
0
CallResult _Generate_Lexer_Tables(const ShareCom<ITextStream>& sp, TokenTable& tokenTable, FsaTable& fsaTable)
{
	CallResult cr;

	//lexer
	_LdfLexerParser lexer;
	cr = lexer.Initialize();  //may throw
	if( cr.IsFailed() )
		return cr;
	//actions
	{
		ShareCom<_ILexerAction> spMacroToken;
		cr = _Create_MacroTokenAction(spMacroToken);
		if( cr.IsFailed() )
			return cr;
		lexer.SetAction(DECLARE_TEMP_CONST_STRING(ConstStringA, "TK_MACRO"), spMacroToken);  //may throw
		lexer.SetAction(DECLARE_TEMP_CONST_STRING(ConstStringA, "TK_TOKEN"), spMacroToken);  //may throw
	} //end block
	//stream
	lexer.SetStream(sp);

	//grammar
	_LdfGrammarParser grammar;
	grammar.SetLexerParser(lexer.GetLexerParser());
	//nonterminal
	TokenTable nt_table;
	uint uID = 101;
	nt_table.InsertToken(DECLARE_TEMP_CONST_STRING(ConstStringA, "lex_def"), uID ++);  //may throw
	nt_table.InsertToken(DECLARE_TEMP_CONST_STRING(ConstStringA, "rule_block"), uID ++);  //may throw
	nt_table.InsertToken(DECLARE_TEMP_CONST_STRING(ConstStringA, "id"), uID ++);  //may throw
	nt_table.Finish();  //may throw
	grammar.SetNonterminalTable(RefPtr<TokenTable>(nt_table));
	//reduction action table for lex file
	TokenTable ra_table;
	uID = 1;
	ra_table.InsertToken(DECLARE_TEMP_CONST_STRING(ConstStringA, "do_ref"), uID ++);  //may throw
	ra_table.InsertToken(DECLARE_TEMP_CONST_STRING(ConstStringA, "do_rule_block_id"), uID ++);  //may throw
	ra_table.InsertToken(DECLARE_TEMP_CONST_STRING(ConstStringA, "do_rule_id"), uID ++);  //may throw
	ra_table.InsertToken(DECLARE_TEMP_CONST_STRING(ConstStringA, "do_id_token"), uID ++);  //may throw
	ra_table.InsertToken(DECLARE_TEMP_CONST_STRING(ConstStringA, "do_id_macro"), uID ++);  //may throw
	ra_table.Finish();  //may throw
	grammar.SetReductionActionTable(RefPtr<TokenTable>(ra_table));
	//pda table
	grammar.SetPdaTable(LdfLexPdaTraits::GetTable());
	//factory
	{
		ShareCom<IComFactory> spCF;
		_BasicSymbolDataFactory_Create(spCF, cr);
		if( cr.IsFailed() )
			return cr;
		grammar.SetFactory(DECLARE_TEMP_CONST_STRING(ConstStringA, "TK_SEP"), spCF);  //may throw
		grammar.SetFactory(DECLARE_TEMP_CONST_STRING(ConstStringA, "TK_TOKEN"), spCF);  //may throw
		grammar.SetFactory(DECLARE_TEMP_CONST_STRING(ConstStringA, "TK_MACRO"), spCF);  //may throw
		grammar.SetFactory(DECLARE_TEMP_CONST_STRING(ConstStringA, "lex_def"), spCF);  //may throw
		grammar.SetFactory(DECLARE_TEMP_CONST_STRING(ConstStringA, "rule_block"), spCF);  //may throw
		grammar.SetFactory(DECLARE_TEMP_CONST_STRING(ConstStringA, "id"), spCF);  //may throw
	} //end block
	//data
	_Lex_Data lex_data(tokenTable);
	lex_data.Init();  //may throw
	//actions
	{
		ShareCom<_IGrammarAction> spAction;
		//DoIdToken
		cr = _Create_DoIdTokenMacroAction(spAction);
		if( cr.IsFailed() )
			return cr;
		ShareCom<_I_IdTokenMacroAction_Utility> spUtility;
		_COMPONENT_INSTANCE_INTERFACE(_IGrammarAction, _I_IdTokenMacroAction_Utility, spAction, spUtility, cr);
		assert( cr.IsSucceeded() );
		spUtility.Deref().SetOutput(lex_data.GetTokenTable(), lex_data.GetTokenRegex(), lex_data.GetTokenId());
		grammar.SetAction(DECLARE_TEMP_CONST_STRING(ConstStringA, "do_id_token"), spAction);  //may throw
		//DoIdMacro
		cr = _Create_DoIdTokenMacroAction(spAction);
		if( cr.IsFailed() )
			return cr;
		_COMPONENT_INSTANCE_INTERFACE(_IGrammarAction, _I_IdTokenMacroAction_Utility, spAction, spUtility, cr);
		assert( cr.IsSucceeded() );
		spUtility.Deref().SetOutput(lex_data.GetMacroTable(), lex_data.GetMacroRegex(), lex_data.GetMacroId());
		grammar.SetAction(DECLARE_TEMP_CONST_STRING(ConstStringA, "do_id_macro"), spAction);  //may throw
	} //end block

	//execute
	cr = grammar.Execute();  //may throw
	if( cr.IsFailed() )
		return cr;

	lex_data.Finish();  //may throw
	lex_data.ExpandTokenMacros();  //may throw

	//check overflow
	{
#ifdef DEBUG
		TokenTable& tt = lex_data.GetTokenTable().Deref();
#endif
		assert( tt.GetMinID() == CPL_TK_FIRST
				&& tt.GetMaxID() < (uint)(Limits<int>::Max - 1) );  //with last NULL item
	} //end block

	//AST
	_Regex_AST rast;
	rast.Init();  //may throw
	cr = _Regex_Generate_AST(lex_data.GetTokenRegex(), rast);  //may throw
	if( cr.IsFailed() )
		return cr;

	//Generate
	_Regex_Generate_Tables(rast, fsaTable);  //may throw

	return cr;
}