/////////////////////////////// // Tokenizer-Utils void TTokenizerUtil::Sentencize(const PSIn& SIn, TStrV& Sentences, const bool& SplitNewLineP) { TChA SentenceBuf; int c; while (!SIn->Eof()) { c = SIn->GetCh(); switch (c) { case '\r': case '\n': { if (!SplitNewLineP) { SentenceBuf += ' '; break; } } case '"' : case '.' : case '!' : case ':' : case ';' : case '?' : case '\t': { if (SentenceBuf.Len() > 2) { Sentences.Add(SentenceBuf); SentenceBuf.Clr(); } break; } default: SentenceBuf += c; break; } } if (SentenceBuf.Len() > 0) { Sentences.Add(SentenceBuf); } }
char THttpLx::GetFirstCh(){ if (SIn->Eof()){ if (AtEof){throw THttpEx(heUnexpectedEof);} AtEof=true; return 0; } else { Ch=SIn->GetCh(); return Ch; } }
void TFRnd::PutSIn(const PSIn& SIn, TCs& Cs){ int BfL=SIn->Len(); char* Bf=new char[BfL]; SIn->GetBf(Bf, BfL); Cs=TCs::GetCsFromBf(Bf, BfL); PutBf(Bf, BfL); delete[] Bf; }
char THttpLx::GetCh(){ if (EofChPrS.Empty()){ if (SIn->Eof()){ if (AtEof){throw THttpEx(heUnexpectedEof);} AtEof=true; SfMem+=Ch; Ch=TCh::NullCh; return Ch; } else { SfMem+=Ch; Ch=SIn->GetCh(); return Ch; } } else { SfMem+=Ch; AtEof=EofChPrS.Top().Val1; Ch=EofChPrS.Top().Val2; EofChPrS.Pop(); return Ch; } }
bool TCpDoc::FNextCpd(const PSIn& SIn, PCpDoc& CpDoc){ if (SIn->Eof()){ CpDoc=NULL; return false; } else { CpDoc=TCpDoc::Load(*SIn); return true; } }
/////////////////////////////// // Tokenizer-Html-Unicode void TTokenizerHtmlUnicode::GetTokens(const PSIn& SIn, TStrV& TokenV) const { TStr LineStr; TStrV WordStrV; while (SIn->GetNextLn(LineStr)) { TStr SimpleText = TUStr(LineStr).GetStarterLowerCaseStr(); TTokenizerHtml::GetTokens(TStrIn::New(SimpleText), TokenV); } }
void TNmObjBs::LoadNrWordBs(const PSIn& SIn){ if (SIn.Empty()){return;} TILx Lx(SIn, TFSet(iloCmtAlw, iloRetEoln, iloExcept)); // traverse lines Lx.GetSym(syQStr, syEoln, syEof); while (Lx.Sym!=syEof){ if (Lx.Sym==syQStr){ // get normalized word TStr NrWordStr=Lx.Str; // get inflected words Lx.GetSym(syColon); Lx.GetSym(syQStr, syEoln); while (Lx.Sym!=syEoln){ // get inflected word TStr WordStr=Lx.Str; // test if inflected word already exists if (WordStrToNrH.IsKey(WordStr)){ printf("Word already normalized (%s)", WordStr.CStr());} // add inflected word and corresponding normalized word WordStrToNrH.AddDat(WordStr, NrWordStr); //printf("'%s' ->'%s'\n", WordStr.CStr(), NrWordStr.CStr()); Lx.GetSym(syQStr, syEoln); } Lx.GetSym(syQStr, syEoln, syEof); } else if (Lx.Sym==syEoln){ // empty line Lx.GetSym(syQStr, syEoln, syEof); } else { Fail; } } }
uint TWbmp::GetMultiByteInt(const PSIn& SIn){ uint Val=0; TB8Set BSet; do { BSet=uchar(SIn->GetCh()); Val=Val*128+BSet.GetInt(0, 6); } while (BSet.In(7)); return Val; }
void TSockSys::Send(const uint64& SockId, const PSIn& SIn) { // make sure it's a valid socket IAssert(IsSock(SockId)); uv_tcp_t* SockHnd = SockIdToHndH.GetDat(SockId); // create write request uv_write_req_t* WriteHnd = (uv_write_req_t*)malloc(sizeof(uv_write_req_t)); // copy the data in the buffer WriteHnd->Buffer.len = SIn->Len(); //TODO: handle cases when SIn doesn't have known Len() WriteHnd->Buffer.base = (char*)malloc(WriteHnd->Buffer.len); SIn->GetBf(WriteHnd->Buffer.base, WriteHnd->Buffer.len); // execute the request int ResCd = uv_write((uv_write_t*)WriteHnd, (uv_stream_t*)SockHnd, &WriteHnd->Buffer, 1, OnWrite); // check for errors if (ResCd != 0) { // cleanup first free(WriteHnd->Buffer.base); free(WriteHnd); // and throw exception throw TExcept::New("SockSys.Send: Error sending data: " + SockSys.GetLastErr()); } }
PWbmp TWbmp::LoadWbmp(const PSIn& SIn){ // read header uint TypeField=GetMultiByteInt(SIn); if (TypeField!=0){TExcept::Throw("Invalid WBMP TypeField.");} TB8Set FixHeaderField=uchar(SIn->GetCh()); if (FixHeaderField.In(7)){ GetMultiByteInt(SIn);} // ExtFields int Width=GetMultiByteInt(SIn); int Height=GetMultiByteInt(SIn); // create wbmp PWbmp Wbmp=TWbmp::New(Width, Height); // read & fill bitmap for (int Y=0; Y<Height; Y++){ int X=0; TB8Set BSet; while (X<Width){ if (X%8==0){BSet=uchar(SIn->GetCh());} Wbmp->PutPxVal(X, Y, BSet.In(7-X%8)); X++; } } return Wbmp; }
void TTokenizerUtil::Paragraphize(const PSIn& SIn, TStrV& Paragraphs) { TChA ParagraphBuf; int c; bool wasSpace = false; while (!SIn->Eof()) { c = SIn->GetCh(); // two consecutive spaces signal a new paragraph if (c == ' ' || c == '\t' || c == '\n') { if (wasSpace) { Paragraphs.Add(ParagraphBuf); ParagraphBuf.Clr(); continue; } wasSpace = true; } else { wasSpace = false; } ParagraphBuf += c; } if (ParagraphBuf.Len() > 0) { Paragraphs.Add(ParagraphBuf); } }
void TNmObjBs::LoadNmObjTypeBs(const PSIn& SIn){ if (SIn.Empty()){return;} TILx Lx(SIn, TFSet(iloCmtAlw, iloRetEoln, iloExcept)); // traverse lines Lx.GetSym(syQStr, syIdStr, syEoln, syEof); while (Lx.Sym!=syEof){ if ((Lx.Sym==syQStr)||(Lx.Sym==syIdStr)){ TVec<TStrV> NmObjWordStrVV; TB32Set NmObjAttrSet; while ((Lx.Sym==syQStr)||(Lx.Sym==syIdStr)){ if (Lx.Sym==syQStr){ // named-object word-string TStr WordStrVStr=Lx.Str; TStrV WordStrV; WordStrVStr.SplitOnWs(WordStrV); NmObjWordStrVV.Add(WordStrV); } else if (Lx.Sym==syIdStr){ // named-object attribute TNmObjAttr NmObjAttr=TNmObjBs::GetNmObjTypeFromStr(Lx.Str); NmObjAttrSet.Incl(NmObjAttr); } else { Fail; } Lx.GetSym(syQStr, syIdStr, syEoln, syEof); } // assign 'defined' attribute if 'not ignore' if (!NmObjAttrSet.In(noaIgnore)){ NmObjAttrSet.Incl(noaDefined);} // assign attribute-sets to word-vectors for (int NmObjN=0; NmObjN<NmObjWordStrVV.Len(); NmObjN++){ WordStrVToNmObjAttrSetH.AddDat(NmObjWordStrVV[NmObjN])|=NmObjAttrSet; } // assign aliases {for (int NmObjN=1; NmObjN<NmObjWordStrVV.Len(); NmObjN++){ NmObjWordStrVToNrH.AddDat(NmObjWordStrVV[NmObjN], NmObjWordStrVV[0]); }} // get eoln } else if (Lx.Sym==syEoln){ // empty line Lx.GetSym(syQStr, syEoln, syEof); } else { Fail; } } }
/////////////////////////////// // Tokenizer-Simple void TTokenizerSimple::GetTokens(const PSIn& SIn, TStrV& TokenV) const { TStr LineStr; TStrV WordStrV; while (SIn->GetNextLn(LineStr)) { WordStrV.Clr(false); LineStr.SplitOnAllAnyCh(" .,!?\n\r()+=-{}[]%$#@\\/", WordStrV, true); for (int WordStrN = 0; WordStrN < WordStrV.Len(); WordStrN++) { const TStr& WordStr = WordStrV[WordStrN]; const TStr UcStr = WordStr.GetUc(); if (SwSet.Empty() || (!SwSet->IsIn(UcStr))) { TStr TokenStr = ToUcP ? UcStr : WordStr; if (!Stemmer.Empty()) { TokenStr = Stemmer->GetStem(TokenStr); } TokenV.Add(TokenStr); } } } }
void TNmObjBs::LoadCustSwSet(const PSIn& SIn){ if (SIn.Empty()){return;} TILx Lx(SIn, TFSet(iloCmtAlw, iloRetEoln, iloExcept)); // traverse lines Lx.GetSym(syLn, syEof); while (Lx.Sym!=syEof){ // get stop-phrase string TStr WordStrVStr=Lx.Str; // split phrase to words TStrV WordStrV; WordStrVStr.SplitOnWs(WordStrV); if (!WordStrV.Empty()){ // define phrase as stop-word WordStrVToNmObjAttrSetH.AddDat(WordStrV).Incl(noaIgnore); } // get next symbol Lx.GetSym(syLn, syEof); } }
bool TReplaySrv::ReplayStream(const PSIn& SIn, const PNotify& ErrorNotify) { while (!SIn->Eof()) { try { THttpReqSerInfo ReqInfo(*SIn); PHttpRq HttpRq = ReqInfo.GetHttpRq(); ReplayHttpRq(HttpRq); } catch (PExcept E) { ErrorNotify->OnNotifyFmt(ntErr, "TReplaySrv::ReplayStream. Exception while loading next request: %s", E->GetMsgStr().CStr()); } catch (...) { ErrorNotify->OnNotifyFmt(ntErr, "TReplaySrv::ReplayStream. General exception while loading next request."); } } return true; }
PBowDocBs TBowFl::LoadCpdTxt( const PSIn& CpdSIn, const int& MxDocs, const TStr& SwSetTypeNm, const TStr& StemmerTypeNm, const int& MxNGramLen, const int& MnNGramFq){ // prepare stop-words PSwSet SwSet=TSwSet::GetSwSet(SwSetTypeNm); // prepare stemmer PStemmer Stemmer=TStemmer::GetStemmer(StemmerTypeNm); // create ngrams PNGramBs NGramBs; if (!((MxNGramLen==1)&&(MnNGramFq==1))){ NGramBs=TNGramBs::GetNGramBsFromCpd( CpdSIn, MxDocs, MxNGramLen, MnNGramFq, SwSet, Stemmer); } // create document-base PBowDocBs BowDocBs=TBowDocBs::New(SwSet, Stemmer, NGramBs); // traverse documents printf("Load Bag-Of-Words from Compact-Documents ...\n"); TIntH DocWIdToFqH(100); CpdSIn->Reset(); PSIn CpDocSIn=TCpDoc::FFirstCpd(CpdSIn); PCpDoc CpDoc; int Docs=0; while (TCpDoc::FNextCpd(CpDocSIn, CpDoc)){ Docs++; if (Docs%100==0){printf(" %d\r", Docs);} if ((MxDocs!=-1)&&(Docs>=MxDocs)){break;} // get document-name TStr DocNm=CpDoc->GetDocNm(); // get document-categories TStrV CatNmV; for (int CatN=0; CatN<CpDoc->GetCats(); CatN++){ CatNmV.Add(CpDoc->GetCatNm(CatN)); } // get document-contents TStr DocStr=CpDoc->GetTxtStr(); TStr DateStr=CpDoc->GetDateStr(); // add document to bow int DId=BowDocBs->AddHtmlDoc(DocNm, CatNmV, DocStr, false); BowDocBs->PutDateStr(DId, DateStr); } printf(" %d\nDone.\n", Docs); // return results BowDocBs->AssertOk(); return BowDocBs; }
TBlobPt TGBlobBs::PutBlob(const PSIn& SIn){ EAssert((Access==faCreate)||(Access==faUpdate)||(Access==faRestore)); int BfL=SIn->Len(); int MxBfL; int FFreeBlobPtN; GetAllocInfo(BfL, BlockLenV, MxBfL, FFreeBlobPtN); TBlobPt BlobPt; TCs Cs; if (FFreeBlobPtV[FFreeBlobPtN].Empty()){ int FLen=FBlobBs->GetFLen(); if (FLen<=MxSegLen){ EAssert(FLen<=MxBlobFLen); BlobPt=TBlobPt(FLen); FBlobBs->SetFPos(BlobPt.GetAddr()); PutBlobTag(FBlobBs, btBegin); FBlobBs->PutInt(MxBfL); PutBlobState(FBlobBs, bsActive); FBlobBs->PutInt(BfL); FBlobBs->PutSIn(SIn, Cs); FBlobBs->PutCh(TCh::NullCh, MxBfL-BfL); FBlobBs->PutCs(Cs); PutBlobTag(FBlobBs, btEnd); } } else { BlobPt=FFreeBlobPtV[FFreeBlobPtN]; FBlobBs->SetFPos(BlobPt.GetAddr()); AssertBlobTag(FBlobBs, btBegin); int MxBfL=FBlobBs->GetInt(); int FPos=FBlobBs->GetFPos(); AssertBlobState(FBlobBs, bsFree); FFreeBlobPtV[FFreeBlobPtN]=TBlobPt::LoadAddr(FBlobBs); FBlobBs->SetFPos(FPos); PutBlobState(FBlobBs, bsActive); FBlobBs->PutInt(BfL); FBlobBs->PutSIn(SIn, Cs); FBlobBs->PutCh(TCh::NullCh, MxBfL-BfL); FBlobBs->PutCs(Cs); AssertBlobTag(FBlobBs, btEnd); } FBlobBs->Flush(); return BlobPt; }
TBlobPt TGBlobBs::PutBlob(const TBlobPt& BlobPt, const PSIn& SIn){ EAssert((Access==faCreate)||(Access==faUpdate)||(Access==faRestore)); int BfL=SIn->Len(); FBlobBs->SetFPos(BlobPt.GetAddr()); AssertBlobTag(FBlobBs, btBegin); int MxBfL=FBlobBs->GetInt(); AssertBlobState(FBlobBs, bsActive); if (BfL>MxBfL){ DelBlob(BlobPt); return PutBlob(SIn); } else { TCs Cs; FBlobBs->PutInt(BfL); FBlobBs->PutSIn(SIn, Cs); FBlobBs->PutCh(TCh::NullCh, MxBfL-BfL); FBlobBs->PutCs(Cs); PutBlobTag(FBlobBs, btEnd); FBlobBs->Flush(); return BlobPt; } }
PTransCorpus TTransCorpus::LoadTxt(const TStr& InOrgFNm, const TStr& InTransFNm, const TStr& InRefTransFNm) { // open files PSIn OrgSIn = !InOrgFNm.Empty() ? TFIn::New(InOrgFNm) : PSIn(); PSIn TransSIn = !InTransFNm.Empty() ? TFIn::New(InTransFNm) : PSIn(); PSIn RefTransSIn = !InRefTransFNm.Empty() ? TFIn::New(InRefTransFNm) : PSIn(); // check which are given const bool IsOrgP = !OrgSIn.Empty(); const bool IsTransP = !TransSIn.Empty(); const bool IsRefTransP = !RefTransSIn.Empty(); // print warnings if (!IsOrgP) { printf("No original sentences!\n"); } if (!IsTransP) { printf("No machine translation sentences!\n"); } if (!IsRefTransP) { printf("No reference translation sentences!\n"); } // traverse the files and add sentences PTransCorpus TransCorpus = TTransCorpus::New(); TLnRet OrgLnRet(OrgSIn), TransLnRet(TransSIn), RefTransLnRet(RefTransSIn); TStr OrgLnStr, TransLnStr, RefTransLnStr; int LnN = 1; forever { // try to read next line, otherwise break if (IsOrgP && !OrgLnRet.NextLn(OrgLnStr)) { break; } if (IsTransP && !TransLnRet.NextLn(TransLnStr)) { break; } if (IsRefTransP && !RefTransLnRet.NextLn(RefTransLnStr)) { break; } // print progress if (LnN % 100 == 0) { printf(" %7d Sentences\r", LnN); } // add sentence and translation(s) to the corpus if (!IsOrgP) { TransCorpus->AddSentenceNoOrg(LnN, TransLnStr, RefTransLnStr); } else if (!IsTransP) { TransCorpus->AddSentenceNoTrans(LnN, OrgLnStr, RefTransLnStr); } else { IAssert(IsRefTransP); TransCorpus->AddSentence(LnN, OrgLnStr, TransLnStr, RefTransLnStr); } // next sentence :-) LnN++; } printf("\n"); // finish return TransCorpus; }
int Len(){return EofChPrS.Len()+SIn->Len();}
void THttpLx::GetRest(){ while ((!SIn->Eof())&&(!EofChPrS.Empty())){GetCh();} if (!SIn->Eof()){SfMem+=Ch;} TMem RestMem; TMem::LoadMem(SIn, RestMem); SfMem+=RestMem; }
PBowDocBs TFtrGenBs::LoadCsv(TStr& FNm, const int& ClassId, const TIntV& IgnoreIdV, const int& TrainLen) { // feature generators PFtrGenBs FtrGenBs = TFtrGenBs::New(); // CSV parsing stuff PSIn SIn = TFIn::New(FNm); char SsCh = ' '; TStrV FldValV; // read the headers and initialise the feature generators TSs::LoadTxtFldV(ssfCommaSep, SIn, SsCh, FldValV, false); for (int FldValN = 0; FldValN < FldValV.Len(); FldValN++) { const TStr& FldVal = FldValV[FldValN]; if (FldValN == ClassId) { if (FldVal == "NOM") { FtrGenBs->PutClsFtrGen(TFtrGenNominal::New()); } else if (FldVal == "MULTI-NOM") { FtrGenBs->PutClsFtrGen(TFtrGenMultiNom::New()); } else { TExcept::Throw("Wrong class type '" + FldVal + "', should be NOM or MULTI-NOM!"); } } else if (!IgnoreIdV.IsIn(FldValN)) { if (FldVal == TFtrGenNumeric::GetType()) { FtrGenBs->AddFtrGen(TFtrGenNumeric::New()); } else if (FldVal == TFtrGenNominal::GetType()) { FtrGenBs->AddFtrGen(TFtrGenNominal::New()); } else if (FldVal == TFtrGenToken::GetType()) { FtrGenBs->AddFtrGen(TFtrGenToken::New( TSwSet::New(swstNone), TStemmer::New(stmtNone))); } else if (FldVal == TFtrGenSparseNumeric::GetType()) { FtrGenBs->AddFtrGen(TFtrGenSparseNumeric::New()); } else if (FldVal == TFtrGenMultiNom::GetType()) { FtrGenBs->AddFtrGen(TFtrGenMultiNom::New()); } else { TExcept::Throw("Wrong type '" + FldVal + "'!"); } } } const int Flds = FldValV.Len(); // read the lines and feed them to the feature generators int Recs = 0; while (!SIn->Eof()) { if (Recs == TrainLen) { break; } Recs++; printf("%7d\r", Recs); TSs::LoadTxtFldV(ssfCommaSep, SIn, SsCh, FldValV, false); // make sure line still has the same number of fields as the header EAssertR(FldValV.Len() == Flds, TStr::Fmt("Wrong number of fields in line %d! Found %d and expected %d!", Recs + 1, FldValV.Len(), Flds)); // go over lines try { TStrV FtrValV; for (int FldValN = 0; FldValN < FldValV.Len(); FldValN++) { const TStr& FldVal = FldValV[FldValN]; if (FldValN == ClassId) { FtrGenBs->UpdateCls(FldVal); } else if (!IgnoreIdV.IsIn(FldValN)) { FtrValV.Add(FldVal); } } FtrGenBs->Update(FtrValV); } catch (PExcept Ex) { TExcept::Throw(TStr::Fmt("Error in line %d: '%s'!", Recs+1, Ex->GetMsgStr().CStr())); } } // read the file again and feed it to the training set PBowDocBs BowDocBs = FtrGenBs->MakeBowDocBs(); // we read and ignore the headers since we parsed them already SIn = TFIn::New(FNm); SsCh = ' '; TSs::LoadTxtFldV(ssfCommaSep, SIn, SsCh, FldValV, false); // read the lines and feed them to the training set Recs = 0; while (!SIn->Eof()){ Recs++; printf("%7d\r", Recs); TSs::LoadTxtFldV(ssfCommaSep, SIn, SsCh, FldValV, false); // make sure line still has the same number of fields as the header EAssertR(FldValV.Len() == Flds, TStr::Fmt("Wrong number of fields in line %s! Found %d and expected %d!", Recs + 1, FldValV.Len(), Flds)); // go over lines and construct the sparse vector TStrV FtrValV; TStr ClsFtrVal; try { for (int FldValN = 0; FldValN < FldValV.Len(); FldValN++) { const TStr& FldVal = FldValV[FldValN]; if (FldValN == ClassId) { ClsFtrVal = FldVal; } else if (!IgnoreIdV.IsIn(FldValN)) { FtrValV.Add(FldVal); } } } catch (PExcept Ex) { TExcept::Throw(TStr::Fmt("Error in line %d: '%s'!", Recs+1, Ex->GetMsgStr().CStr())); } // add the feature vector to trainsets FtrGenBs->AddBowDoc(BowDocBs, TStr::Fmt("Line-%d", Recs), FtrValV, ClsFtrVal); } // prepare training and testing doc ids TIntV AllDIdV; BowDocBs->GetAllDIdV(AllDIdV); IAssert(AllDIdV.IsSorted()); TIntV TrainDIdV = AllDIdV; TrainDIdV.Trunc(TrainLen); BowDocBs->PutTrainDIdV(TrainDIdV); TIntV TestDIdV = AllDIdV; TestDIdV.Minus(TrainDIdV); BowDocBs->PutTestDIdV(TestDIdV); return BowDocBs; }
void TSAppSrvFun::Exec(const TStrKdV& FldNmValPrV, const PSAppSrvRqEnv& RqEnv) { const PNotify& Notify = RqEnv->GetWebSrv()->GetNotify(); PHttpResp HttpResp; try { // log the call if (NotifyOnRequest) Notify->OnStatus(TStr::Fmt("RequestStart %s", FunNm.CStr())); TTmStopWatch StopWatch(true); // execute the actual function, according to the type PSIn BodySIn; TStr ContTypeVal; if (GetFunOutType() == saotXml) { PXmlDoc ResXmlDoc = ExecXml(FldNmValPrV, RqEnv); TStr ResXmlStr; ResXmlDoc->SaveStr(ResXmlStr); BodySIn = TMIn::New(XmlHdStr + ResXmlStr); ContTypeVal = THttp::TextXmlFldVal; } else if (GetFunOutType() == saotJSon) { TStr ResStr = ExecJSon(FldNmValPrV, RqEnv); BodySIn = TMIn::New(ResStr); ContTypeVal = THttp::AppJSonFldVal; } else { BodySIn = ExecSIn(FldNmValPrV, RqEnv, ContTypeVal); } if (ReportResponseSize) Notify->OnStatusFmt("Response size: %.1f KB", BodySIn->Len() / (double) TInt::Kilo); // log finish of the call if (NotifyOnRequest) Notify->OnStatus(TStr::Fmt("RequestFinish %s [request took %d ms]", FunNm.CStr(), StopWatch.GetMSecInt())); // prepare response HttpResp = THttpResp::New(THttp::OkStatusCd, ContTypeVal, false, BodySIn); } catch (PExcept Except) { // known internal error Notify->OnStatusFmt("Exception: %s", Except->GetMsgStr().CStr()); Notify->OnStatusFmt("Location: %s", Except->GetLocStr().CStr()); TStr ResStr, ContTypeVal = THttp::TextPlainFldVal; if (GetFunOutType() == saotXml) { PXmlTok TopTok = TXmlTok::New("error"); TopTok->AddSubTok(TXmlTok::New("message", Except->GetMsgStr())); TopTok->AddSubTok(TXmlTok::New("location", Except->GetLocStr())); PXmlDoc ErrorXmlDoc = TXmlDoc::New(TopTok); ResStr = XmlHdStr + ErrorXmlDoc->SaveStr(); ContTypeVal = THttp::TextXmlFldVal; } else if (GetFunOutType() == saotJSon) { PJsonVal ResVal = TJsonVal::NewObj(); ResVal->AddToObj("message", Except->GetMsgStr()); ResVal->AddToObj("location", Except->GetLocStr()); ResStr = TJsonVal::NewObj("error", ResVal)->SaveStr(); ContTypeVal = THttp::AppJSonFldVal; } // prepare response HttpResp = THttpResp::New(THttp::InternalErrStatusCd, ContTypeVal, false, TMIn::New(ResStr)); } catch (...) { // unknown internal error TStr ResStr, ContTypeVal = THttp::TextPlainFldVal; if (GetFunOutType() == saotXml) { PXmlDoc ErrorXmlDoc = TXmlDoc::New(TXmlTok::New("error")); ResStr = XmlHdStr + ErrorXmlDoc->SaveStr(); ContTypeVal = THttp::TextXmlFldVal; } else if (GetFunOutType() == saotJSon) { ResStr = TJsonVal::NewObj("error", "Unknown")->SaveStr(); ContTypeVal = THttp::AppJSonFldVal; } // prepare response HttpResp = THttpResp::New(THttp::InternalErrStatusCd, ContTypeVal, false, TMIn::New(ResStr)); } if (LogRqToFile) LogReqRes(FldNmValPrV, HttpResp); // send response RqEnv->GetWebSrv()->SendHttpResp(RqEnv->GetSockId(), HttpResp); }