static inline size_t Format(T t, char* buf, size_t len) { STATIC_ASSERT(1 < base && base < 17); if (!len) { ythrow yexception() << "zero length"; } char* tmp = buf; do { *tmp++ = IntToChar[(ui32)SafeMod<base>(t)]; SafeShift<base>(t); } while (t && --len); if (t) { ythrow yexception() << "not enough room in buffer"; } const size_t ret = tmp - buf; --tmp; while (buf < tmp) { const char c = *buf; *buf = *tmp; *tmp = c; ++buf; --tmp; } return ret; }
inline void ReadDict() { if (Blob_.Size() < sizeof(ui64)) { ythrow yexception() << "too small blob"; } const char* end = (const char*)Blob_.End(); const char* ptr = end - sizeof(ui64); ui64 dictlen = 0; memcpy(&dictlen, ptr, sizeof(ui64)); dictlen = LittleToHost(dictlen); if (dictlen > Blob_.Size() - sizeof(ui64)) { ythrow yexception() << "bad blob"; } const char* beg = ptr - dictlen; TMemoryInput mi(beg, dictlen); TZLibDecompress d((IZeroCopyInput*)&mi); const ui32 count = Load<ui32>(&d); for (size_t i = 0; i < count; ++i) { TArchiveRecordDescriptorRef descr(new TArchiveRecordDescriptor(&d)); Recs_.push_back(descr); Dict_[descr->Name()] = descr; } }
static Stroka GetExecPathImpl() { #if defined(_solaris_) return execname(); #elif defined(_darwin_) TTempBuf execNameBuf; for (size_t i = 0; i < 2; ++i) { uint32_t bufsize = (uint32_t)execNameBuf.Size(); int r = _NSGetExecutablePath(execNameBuf.Data(), &bufsize); if (r == 0) { return execNameBuf.Data(); } else if (r == -1) { execNameBuf = TTempBuf(bufsize); } } ythrow yexception() << "GetExecPathImpl() failed"; #elif defined(_win_) TTempBuf execNameBuf; for (;;) { DWORD r = GetModuleFileName(NULL, execNameBuf.Data(), execNameBuf.Size()); if (r == execNameBuf.Size()) { execNameBuf = TTempBuf(execNameBuf.Size() * 2); } else if (r == 0) { ythrow yexception() << "GetExecPathImpl() failed: " << LastSystemErrorText(); } else { return execNameBuf.Data(); } } #elif defined(_linux_) return ReadLink("/proc/" + ToString(getpid()) + "/exe"); // TODO(yoda): check if the filename ends with " (deleted)" #elif defined(_freebsd_) Stroka execPath = FreeBSDGetExecPath(); if (GoodPath(execPath)) { return execPath; } if (FreeBSDGuessExecPath(FreeBSDGetArgv0(), execPath)) { return execPath; } if (FreeBSDGuessExecPath(getenv("_"), execPath)) { return execPath; } if (FreeBSDGuessExecBasePath(getenv("PWD"), execPath)) { return execPath; } if (FreeBSDGuessExecBasePath(GetCwd(), execPath)) { return execPath; } ythrow yexception() << "can not resolve exec path"; #else # error dont know how to implement GetExecPath on this platform #endif }
inline T() { int result; result = pthread_mutexattr_init(&Attr); if (result != 0) { ythrow yexception() << "mutexattr init failed(" << LastSystemErrorText(result) << ")"; } result = pthread_mutexattr_settype(&Attr, PTHREAD_MUTEX_RECURSIVE); if (result != 0) { ythrow yexception() << "mutexattr set type failed(" << LastSystemErrorText(result) << ")"; } }
TParsedHttpRequest::TParsedHttpRequest(const TStringBuf& str) { TStringBuf tmp; if (!StripLeft(str).SplitImpl(' ', Method, tmp)) { ythrow yexception() << "bad request(" << ToString(str).Quote() << ")"; } if (!StripLeft(tmp).SplitImpl(' ', Request, Proto)) { ythrow yexception() << "bad request(" << ToString(str).Quote() << ")"; } Proto = StripLeft(Proto); }
inline TMappedBlobBase(const TMemoryMap& map, ui64 offset, size_t len) : Map_(map) { if (!Map_.IsOpen()) { ythrow yexception() << "memory map not open"; } Map_.Map(offset, len); if (len && !Map_.Ptr()) { // Ptr is 0 for blob of size 0 ythrow yexception() << "can not map(" << offset << ", " << len << ")"; } }
TImpl(const Stroka& style, const Stroka& base = "") { InitError(); TxmlDocHolder sheetDoc(xmlParseMemory(~style, +style)); if (!!base) { xmlNodeSetBase(sheetDoc->children, (xmlChar*)base.c_str()); } if (!sheetDoc) ythrow yexception() << "cannot parse xml of xslt: " << ErrorMessage; Stylesheet.Reset(xsltParseStylesheetDoc(sheetDoc.Get())); if (!Stylesheet) ythrow yexception() << "cannot parse xslt: " << ErrorMessage; // sheetDoc - ownership transferred to Stylesheet sheetDoc.Release(); }
inline TMapResult Map(i64 offset, size_t size, const char *dbg_name) { assert(File_.IsOpen()); if (offset > Length_) { ythrow yexception() << dbg_name << ": can't map something at offset " << offset << "in a file with length " << Length_; } if (offset + (i64)size > Length_) { ythrow yexception() << dbg_name << ": can't map " << (unsigned long)size << " bytes at offset " << offset << ": " << LastSystemErrorText(); } TMapResult result; i64 base = DownToGranularity(offset); result.Head = (i32)(offset - base); size += result.Head; #if defined (_win_) result.Ptr = MapViewOfFile(Mapping_, (Mode_ & oAccessMask) == oRdOnly ? FILE_MAP_READ : FILE_MAP_WRITE, HI_32(base), LO_32(base), size); #else # if defined(_unix_mmap_64_) if (Mode_ & oNotGreedy) { # endif result.Ptr = mmap((caddr_t)NULL, size, (Mode_ & oAccessMask) == oRdOnly ? PROT_READ : PROT_READ | PROT_WRITE, MAP_SHARED | MAP_NOCORE, File_.GetHandle(), base); if (result.Ptr == (char *)(-1)) { result.Ptr = 0; } # if defined(_unix_mmap_64_) } else { result.Ptr = PtrStart_ ? static_cast<caddr_t>(PtrStart_) + base : NULL; } # endif #endif if (result.Ptr != 0 || size == 0) { // allow map of size 0 result.Size = size; } else { ythrow yexception() << dbg_name << ": can't map " << (unsigned long)size << " bytes at offset " << offset << ": " << LastSystemErrorText(); } if (Mode_ & oPrecharge) { int p = PrechargeImpl(File_, result.Ptr, result.Size, 0, result.Size); UNUSED(p); } return result; }
void SetFunction(const Stroka& name, const Stroka& uri, TxmlXPathFunction fn) { InitError(); if (xsltRegisterExtModuleFunction(BAD_CAST ~name, BAD_CAST ~uri, (xmlXPathFunction)fn) < 0) { ythrow yexception() << "cannot register xsl function " << uri << ':' << name << ": " << ErrorMessage; } }
void CDocListRetrieverFromDisc::FillDocInfo(SDocumentAttribtes& attrs) { Stroka strFilePath = m_SmartFileFind.GetFoundFilePath(m_iCurPath); Stroka strURL; if (strFilePath == m_strSearchDir) { TStringBuf left, right; PathHelper::Split(strFilePath, left, right); strURL = ToString(right); } else strURL = strFilePath.substr(m_strSearchDir.size()); if (strURL.empty()) ythrow yexception() << "Can't build url for file \"" << strFilePath << "\" with searchdir \"" << m_strSearchDir << "\"."; TransformBackSlash(strURL); attrs.m_strUrl = strURL; Stroka strTime; if (stroka(m_strLTM) == "file") { CTime lw_time = m_SmartFileFind.GetFoundFileInfo(m_iCurPath).m_LastWriteTime; strTime = lw_time.Format("%d.%m.%Y %H:%M:%S"); } if (strTime.empty()) strTime = m_strStartTime; attrs.m_strTitle = CharToWide(strTime); attrs.m_strSource = strURL; attrs.m_strTitle = CharToWide(attrs.m_strSource); // ??? rewriting }
bool CDocBase::ProceedInternal(const TWtringBuf& src, const SDocumentAttribtes& docAttrs, const CParserOptions* parserOptions, bool bAnalyze) { // if caller forgot this... FreeData(); // this really allocate memory only on first call //лениво создаем m_pText CreateTextClass(parserOptions); GetText()->PutAttributes(docAttrs); GetText()->SetParserOptions(parserOptions); const size_t MAX_SRC_SIZE = 2*1024*1024; m_Source = Wtroka(~src, (src.size() < MAX_SRC_SIZE) ? src.size() : MAX_SRC_SIZE); switch (m_dtyp) { case DocumentHtml: proceedHtml(); break; default: proceedText(); } if (bAnalyze) { if( parserOptions ) m_pText->analyzeSentences(); else ythrow yexception() << "ParserOptions are not initialized."; } return true; }
ui32 NCatboostCuda::UpdateFeatureId(TBinarizedFeaturesManager& featuresManager, const TModelFeaturesMap& map, const ui32 featureId) { if (map.Ctrs.has(featureId)) { const auto& info = map.Ctrs.at(featureId); TCtr remapedCtr = MigrateCtr(featuresManager, map, info.Ctr); if (featuresManager.IsKnown(remapedCtr)) { ui32 remappedId = featuresManager.GetId(remapedCtr); CB_ENSURE(info.Borders == featuresManager.GetBorders(remappedId), " tensor : " << remapedCtr.FeatureTensor << " (ctr type " << remapedCtr.Configuration.Type << "). Error: progress borders should be consistent: " << remappedId << " / " << featureId << " " << Print(info.Borders) << " vs " << Print(featuresManager.GetBorders(remappedId))); return remappedId; } else { return featuresManager.AddCtr(remapedCtr, TVector<float>(info.Borders)); } } else if (map.FloatFeatures.has(featureId)) { auto& floatInfo = map.FloatFeatures.at(featureId); const ui32 featureManagerId = featuresManager.GetFeatureManagerIdForFloatFeature(floatInfo.DataProviderId); CB_ENSURE(floatInfo.Borders == featuresManager.GetBorders(featureManagerId), "Error: progress borders should be consistent"); return featureManagerId; } else if (map.CatFeaturesMap.has(featureId)) { const ui32 dataProviderId = map.CatFeaturesMap.at(featureId); return featuresManager.GetFeatureManagerIdForCatFeature(dataProviderId); } else { ythrow yexception() << "Error: can't remap featureId #" << featureId; } }
// Options with parameters that can be specified several times const TVector<const char*> &Opt2::MArg(char opt, const char *help) { Opt2Param &p = GetInternal(opt, nullptr, help, false); p.MultipleUse = true; if (!p.HasArg) ythrow yexception() << "Opt2::Arg called for '" << opt << "' which is an option without argument"; return p.ActualValue; }
static inline TBlob ConstructFromFileContent(const TFile& file, ui64 offset, ui64 length) { if (length > Max<size_t>()) { ythrow yexception() << "can not read whole file(length = " << length << ")"; } return ReadFromFile<TCounter>(file, offset, static_cast<size_t>(length)); }
inline TImpl() { #if defined (_win_) InitializeCriticalSection(&Obj); #else struct T { pthread_mutexattr_t Attr; inline T() { int result; result = pthread_mutexattr_init(&Attr); if (result != 0) { ythrow yexception() << "mutexattr init failed(" << LastSystemErrorText(result) << ")"; } result = pthread_mutexattr_settype(&Attr, PTHREAD_MUTEX_RECURSIVE); if (result != 0) { ythrow yexception() << "mutexattr set type failed(" << LastSystemErrorText(result) << ")"; } } inline ~T() throw () { int result = pthread_mutexattr_destroy(&Attr); VERIFY(result == 0, "mutexattr destroy(%s)", LastSystemErrorText(result)); } } pma; int result = pthread_mutex_init(&Obj, &pma.Attr); if (result != 0) { ythrow yexception() << "mutex init failed(" << LastSystemErrorText(result) << ")"; } #endif }
void TKWTypePool::Load(TInputStream* buffer) { yvector<Stroka> names; ::Load(buffer, names); static TAtomic lock; TGuard<TAtomic> guard(lock); // rebuild mLoadIndex mLoadIndex.clear(); for (size_t i = 0; i < names.size(); ++i) { if (names[i].empty()) { static const TDescriptor* null_descriptor = NULL; mLoadIndex.push_back(null_descriptor); continue; } // all loaded names should be among registred descriptors yhash<Stroka, const TDescriptor*>::const_iterator descr = mRegisteredTypes.find(names[i]); if (descr != mRegisteredTypes.end()) mLoadIndex.push_back(descr->second); else ythrow yexception() << "Unknown kw-type " << names[i] << " is found. " << "You should re-compile your grammar files."; } }
const TKWTypePool::TDescriptor* TKWTypePool::RequireMessageType(const Stroka& name) const { const TDescriptor* res = FindMessageTypeByName(name); if (res == NULL) ythrow yexception() << "KW-type \"" << name << "\" is not registered."; return res; }
bool CQuotesFinder::AddQuoteDBFact(SWordHomonymNum Subj, CSentenceRusProcessor* pSent, const Wtroka& QuoteStr, SLeadInfo LeadInfo, bool bSubject) { CFactFields factFields("Quote"); factFields.m_LeadInfo = LeadInfo; if (!CreateQuoteSubjectAsFio(Subj, pSent, factFields)) return false; if (!CreateQuoteValue(QuoteStr, factFields)) return false; CBoolWS bWs(bSubject); bWs.SetPair(0,0); factFields.AddValue(CFactFields::IsSubject, bWs); CFactsWS* factWS = new CFactsWS(); if (!pSent->m_Words[Subj].GetSourceWordSequence()) ythrow yexception() << "CQuotesFinder::AddQuoteDBFact: no pSent->m_Words[Subj].GetSourceWordSequence() "; factWS->SetPair(*pSent->m_Words[Subj].GetSourceWordSequence()); factWS->PutArticle(m_MainArticle); factWS->AddFact(factFields); factWS->SetMainWord(Subj); pSent->GetMultiWordCreator().TakeMultiWord(factWS); return true; }
void TBufferedOutput::DoWrite(const void* data, size_t len) { if (Impl_.Get()) { Impl_->Write(data, len); } else { ythrow yexception() << "can not write to finished stream"; } }
ECharset CCommonParm::ParseEncoding(const Stroka& encodingName) const { ECharset enc = CharsetByName(encodingName.c_str()); if (enc == CODES_UNKNOWN) ythrow yexception() << "Unknown encoding: \"" << encodingName << "\""; return enc; }
TBlob TBlob::SubBlob(size_t begin, size_t end) const { if (begin > Length() || end > Length() || begin > end) { ythrow yexception() << "incorrect subblob (" << begin << ", " << end << ", outer length = " << Length() << ")"; } return TBlob(Begin() + begin, end - begin, S_.Base); }
size_t ToStringImpl<bool>(bool t, char* buf, size_t len) { if (!len) { ythrow yexception() << "zero length"; } *buf = t ? '1' : '0'; return 1; }
TChunkedDataReader::TChunkedDataReader(const TBlob& blob) : Version(0) , Size(0) { const char* cdata = blob.AsCharPtr(); const size_t size = blob.Size(); if (size < 4) ythrow yexception() << "empty file with chunks"; ui32 last = ((ui32*)(cdata + size))[-1]; if (last != 0) { // old version file ui32* aux = (ui32*)(cdata + size); ui32 count = last; Size = size - (count + 1) * sizeof(ui32); aux -= (count + 1); ReadAux<ui32>(cdata, aux, count, &Offsets); return; } ui64* aux = (ui64*)(cdata + size); Version = aux[-2]; VERIFY(Version > 0, "Invalid version"); ui64 count = aux[-3]; aux -= (count + 3); ReadAux<ui64>(cdata, aux, count, &Offsets); aux -= count; Lengths.resize(count); for (size_t i = 0; i < count; ++i) { Lengths[i] = IntegerCast<size_t>(aux[i]); } }
void TLzmaCompress::DoWrite(const void* buf, size_t len) { if (!Impl_) { ythrow yexception() << "can not write to finished lzma stream"; } Impl_->Write(buf, len); }
void TArchiveWriter::Add(const Stroka& key, TInputStream* src) { if (!Impl_.Get()) { ythrow yexception() << "archive already closed"; } Impl_->Add(key, src); }
TFile::TFile(const Stroka& name) : GeneratedDescriptor(NULL) { if (!name) ythrow yexception() << "NBuiltin::TFile's base name cannot be empty"; AddAlias(name); }
inline Stroka KeyByIndex(size_t n) const { if (n < Count()) { return Recs_[n]->Name(); } ythrow yexception() << "incorrect index"; }
void CSitFactInterpretation::FillFactField(const fact_field_reference_t& fact_field, const SWordHomonymNum& value, yvector<CFactFields>& newFacts) { if (fact_field.m_bHasValue && ((fact_field.m_Field_type == TextField) || (fact_field.m_Field_type == BoolField))) { FillConstFieldValue(fact_field, newFacts, value); return; } CWordSequence* pWS = m_Words[value].GetSourceWordSequence(); if (pWS == NULL) ythrow yexception() << "Bad wordsequence in \"CSitFactInterpretation::FillFactField\""; CFactsWS* factWS = dynamic_cast<CFactsWS*>(pWS); if (factWS != NULL) FillFactFieldFromFactWS(fact_field, factWS , newFacts); else switch (fact_field.m_Field_type) { case FioField: FillFioFactField(fact_field, pWS, newFacts, value); break; case TextField: FillTextFactField(fact_field, pWS, newFacts); break; case DateField: FillDateFactField(fact_field, pWS, newFacts); break; default: break; } }
void* TDynamicLibrary::Sym(const char* name) { if (!IsLoaded()) { ythrow yexception() << "library not loaded"; } return Impl_->Sym(name); }
void CParserOptions::InitFromProtobuf(const Stroka& name) { TTextMinerConfig c; if (!NProtoConf::LoadFromFile(name, c)) ythrow yexception() << "Cannot read the config from \"" << name << "\"."; InitFromConfigObject(c); }