lem::Path lem::sqlite_get_absolute_filepath( const lem::UFString & connection_string ) { lem::Path sqlite_file; if( connection_string.eq_begi(L"path=") || connection_string.eq_begi(L"file=") ) { lem::UFString db_path, db_file, basedir; lem::Collect<UFString> params; lem::parse( connection_string, params, L";" ); for( lem::Container::size_type i=0; i<params.size(); ++i ) { const lem::UFString & param = params[i]; if( param.eq_begi(L"path=") ) db_path = param.c_str()+5; else if( param.eq_begi(L"file=") ) db_file = param.c_str()+5; } if( !db_path.empty() ) sqlite_file = lem::Path(db_path); else { sqlite_file = lem::Path(basedir); sqlite_file.ConcateLeaf(db_file); } } else { sqlite_file = lem::Path(connection_string); } return sqlite_file; }
static bool IsHtmlClosed( const lem::UFString &tag ) { if( tags1.empty() ) { const wchar_t* stags[] = { L"br", L"hr", L"link", L"meta", L"img", L"input", NULL }; int i=0; while(stags[i]!=NULL) tags1.push_back( lem::UFString(stags[i++]) ); } for( lem::Container::size_type i=0; i<tags1.size(); ++i ) { const lem::UFString &t = tags1[i]; if( tag.eq_begi(t) && (tag.length()==t.length() || tag[ t.length() ]==L' ' ) ) return true; } return false; }
bool SyntaxShell::TryCommand( const lem::UFString &_str ) { LEM_CHECKIT_Z( !_str.empty() ); if( _str==L"#help" || _str==L"?" ) { ShowHelp(); return true; } if( _str.front()!=L'#' ) return false; if( _str.eq_beg( L"# " ) ) return true; // комментарий if( _str.eq_beg( L"#timeout" ) ) { lem::MCollect<UCString> toks; lem::parse( _str, toks, false ); MaxTimeout = lem::to_int( toks[1] ); return true; } if( _str.eq_beg( L"#maxalt" ) ) { lem::MCollect<UCString> toks; lem::parse( _str, toks, false ); MaxAlt = lem::to_int( toks[1] ); lem::mout->printf( "MaxAlt=%d\n", MaxAlt ); return true; } if( _str.eq_beg( L"#maxskiptoken" ) ) { lem::MCollect<UCString> toks; lem::parse( _str, toks, false ); MaxSkipToken = lem::to_int( toks[1] ); lem::mout->printf( "MaxSkipToken=%d\n", MaxSkipToken ); if( MaxSkipToken>0 ) CompleteAnalysisOnly = false; if( MaxAlt==0 || MaxAlt==lem::int_max ) { lem::mout->printf( "Attention: it is highly recommended to use %vfE#maxalt%vn NNN in order to limit the search tree depth\n" ); } return true; } if( _str.eq_beg( L"#sem" ) ) { lem::MCollect<UCString> toks; lem::parse( _str, toks, false ); FindFacts = lem::to_bool( toks[1] ); return true; } if( _str.eqi( L"#info" ) ) { ShowDictionaryInfo(); return true; } if( _str.eqi( L"#disconnect" ) ) { sol_id.Delete(); lem::mout->printf( "Dictionary database is disconnected.\n" ); return true; } if( _str.eqi( L"#connect" ) ) { LoadDictionary(); return true; } if( _str.eq_begi( L"#tag" ) ) { if( _str==L"#tag-" ) { // Сбрасываем установленный фильтр tags_ptr.Delete(); tags.clear(); return true; } lem::Collect<lem::UFString> toks; lem::parse( UFString(_str.c_str()+4), toks, L"=" ); UCString tag_name, tag_value; if( toks.size()>0 ) tag_name = toks[0].c_str(); if( toks.size()>1 ) tag_value = toks[1].c_str(); tag_name.trim(); tag_value.trim(); const int itag = sol_id->GetSynGram().Get_Net().FindTag(tag_name); if( itag==UNKNOWN ) { lem::mout->printf( "Tag [%vfE%us%vn] not found\n", tag_name.c_str() ); return true; } const ThesaurusTag &tt = sol_id->GetSynGram().Get_Net().GetTagDefs()[itag]; if( tt.CountValues()>0 ) { int ivalue = tt[tag_value]; if( ivalue==UNKNOWN ) { lem::mout->printf( "Tag value [%vfE%us%vn] not found\n", tag_value.c_str() ); return true; } } tags_ptr = new TF_TagOrNullFilter( *sol_id, tag_name, tag_value ); return true; } if( _str.eq_begi( L"#param" ) ) { if( _str==L"#param-" ) { // Очищаем список параметров. params.clear(); return true; } lem::Collect<lem::UFString> toks; lem::parse( UFString(_str.c_str()+7), toks, L"=" ); UCString param_name, param_value; if( toks.size()>0 ) param_name = toks[0].c_str(); if( toks.size()>1 ) param_value = toks[1].c_str(); param_name.trim(); param_value.trim(); params.push_back( std::make_pair( param_name, param_value ) ); return true; } lem::UFString str = lem::right( _str, _str.length()-1 ); lem::zbool ret; if( str==L"debug" ) { SetDebug(true); ret=true; } else if( str==L"nodebug" ) { SetDebug(false); ret=true; } else if( str==L"traceon" ) { SetDebug(true); traceon=true; debugger->Trace(true); ret=true; } else if( str==L"traceoff" ) { traceon=false; if( debugger.NotNull() ) debugger->Trace(true); ret=true; } else if( str==L"fuzzyon" ) { allow_fuzzy = true; mout->printf( "Fuzzy projection is now %vfAON%vn\n" ); ret=true; } else if( str==L"fuzzyoff" ) { allow_fuzzy = false; mout->printf( "Fuzzy projection is now %vfDOFF%vn\n" ); ret=true; } else if( str=="disable_filters" ) { EnableFilters=false; ret = true; } else if( str=="enable_filters" ) { EnableFilters=true; ret = true; } else if( str=="schedule1" ) { CompleteAnalysisOnly=true; UseTopDownThenSparse=true; mout->printf( "Workflow=%vfATOP-DOWN, TOP-DOWN INCOMPLETE%vn\n" ); ret=true; } else if( str==L"topdown" ) { UseTopDownThenSparse=false; CompleteAnalysisOnly=true; mout->printf( "%vfAtop-down%vn analyzer is activated\n" ); ret=true; } else if( str==L"allow_incomplete" ) { CompleteAnalysisOnly = false; mout->printf( "Incomplete analysis is %vfAALLOWED%vn\n" ); ret=true; } else if( str==L"disallow_incomplete" ) { CompleteAnalysisOnly = true; mout->printf( "Incomplete analysis is %vfDDISALLOWED%vn\n" ); ret=true; } else if( str==L"allow_reco" ) { UseReconstructor = true; mout->printf( "Token reconstructor is %vfAALLOWED%vn\n" ); ret=true; } else if( str==L"disallow_reco" ) { UseReconstructor = false; mout->printf( "Token reconstructor is %vfDDISALLOWED%vn\n" ); ret=true; } else if( str==L"allow_model" ) { if( sol_id->GetLexAuto().GetModel().GetSequenceLabeler().IsAvailable() || sol_id->GetLexAuto().GetModel().GetClassifier().IsAvailable() ) { ApplyModel = true; mout->printf( "Morphology model is enabled\n" ); } else { mout->printf( "Morphology model is not available\n" ); } ret=true; } else if( str==L"disallow_model" ) { ApplyModel = false; mout->printf( "Morphology model is disabled\n" ); ret=true; } else if( str==L"show" ) { if( current_analysis.NotNull() ) { const Res_Pack &pack = current_analysis->GetPack(); mout->printf( "\nResult pack contains %vfE%d%vn variators:\n", pack.vars().size() ); if( run_mode==MorphologyMode ) { for( lem::Container::size_type i=0; i<pack.vars().size(); i++ ) { const Variator * var = pack.vars()[i]; for( lem::Container::size_type k=0; k<var->size(); ++k ) { const Tree_Node & root = var->get(k); mout->printf( "%d: ", CastSizeToInt(k) ); root.Print( *lem::mout, sol_id->GetSynGram(), -1, true ); mout->eol(); } mout->eol(); mout->eol(); } } else { for( lem::Container::size_type i=0; i<pack.vars().size(); i++ ) { pack.vars()[i]->PrintV( *mout, sol_id->GetSynGram(), true ); mout->eol(); mout->eol(); } } } ret=true; } else if( str==L"tree" ) { if( current_analysis.NotNull() ) { const Res_Pack &pack = current_analysis->GetPack(); Solarix::print_syntax_tree( current_analysis->GetString(), current_analysis->GetPack(), *sol_id, *lem::mout, false, true ); } ret=true; } else if( str.eq_beg("recog" ) ) { if( current_analysis.NotNull() ) { lem::mout->eol(); current_analysis->GetLexer().PrintRecognitions( *lem::mout ); } return true; } else if( str==L"tokenize" ) { SetMode(TokenizerMode); ret=true; } else if( str==L"lemmatize" ) { SetMode(LemmatizerMode); ret=true; } else if( str==L"speak" ) { SetMode(SpeakerMode); ret=true; } else if( str==L"syntax" ) { SetMode(SyntaxMode); ret=true; } else if( str==L"morphology" ) { SetMode(MorphologyMode); ret=true; } else if( str==L"debugger" ) { if( debugger.NotNull() ) debugger->ManageBreakpoints(); ret=true; } else { lem::mout->printf( "Invalid command %vfC%us%vn\n", str.c_str() ); ret=true; } return ret; }