void TMultinomial::AddFtr(const TStrV& StrV, const TFltV& FltV, TIntFltKdV& SpV) const { // make sure we either do not have explicit values, or their dimension matches with string keys EAssertR(FltV.Empty() || (StrV.Len() == FltV.Len()), "TMultinomial::AddFtr:: String and double values not aligned"); // generate internal feature vector SpV.Gen(StrV.Len(), 0); for (int StrN = 0; StrN < StrV.Len(); StrN++) { const int FtrId = FtrGen.GetFtr(StrV[StrN]); // only use features we've seen during updates if (FtrId != -1) { const double Val = FltV.Empty() ? 1.0 : FltV[StrN].Val; if (Val > 1e-16) { SpV.Add(TIntFltKd(FtrId, Val)); } } } SpV.Sort(); // merge elements with the same id int GoodSpN = 0; for (int SpN = 1; SpN < SpV.Len(); SpN++) { if (SpV[GoodSpN].Key == SpV[SpN].Key) { // repetition of previous id, sum counts SpV[GoodSpN].Dat += SpV[SpN].Dat; } else { // increase the pointer to the next good position GoodSpN++; // and move the new value down to the good position SpV[GoodSpN] = SpV[SpN]; } } // truncate the vector SpV.Trunc(GoodSpN + 1); // replace values with 1 if needed if (IsBinary()) { for (TIntFltKd& Sp : SpV) { Sp.Dat = 1.0; } } // final normalization, if needed if (IsNormalize()) { TLinAlg::Normalize(SpV); } }
void TJsonVal::GetArrNumSpV(TIntFltKdV& NumSpV) const { EAssert(IsArr()); for (int ElN = 0; ElN < GetArrVals(); ElN++) { PJsonVal ArrVal = GetArrVal(ElN); EAssert(ArrVal->IsArr()); EAssert(ArrVal->GetArrVals() == 2); int Idx = ArrVal->GetArrVal(0)->GetInt(); double Val = ArrVal->GetArrVal(1)->GetNum(); NumSpV.Add(TIntFltKd(Idx, Val)); } NumSpV.Sort(); }
void TStrFeatureSpace::FromAddStr(const TStr& Serialized, TIntFltKdV& Vec, char Sep) { TStrV Toks; Serialized.SplitOnAllCh(Sep, Toks, true); Vec.Gen(Toks.Len()); for (int i = 0; i < Toks.Len(); i++) { TStr Key, Value; Toks[i].SplitOnCh(Key, ':', Value); int FeatureId = GetAddId(Key); double FeatureWgt; if (Value.IsFlt(FeatureWgt)) { Vec[i].Key = FeatureId; Vec[i].Dat = FeatureWgt; } else { EFailR((Value + TStr(" is not a valid floating point number.")).CStr()); } } Vec.Sort(); }
void TStrFeatureSpace::FromStr(const TStr& Serialized, TIntFltKdV& Vec, char Sep) const { TStrV Toks; Serialized.SplitOnAllCh(Sep, Toks, true); Vec.Gen(Toks.Len(),0); for (int i = 0; i < Toks.Len(); i++) { TStr Key, Value; Toks[i].SplitOnCh(Key, ':', Value); TStrFSSize FeatureId; if (GetIfExistsId(Key, FeatureId)) { double FeatureWgt; if (Value.IsFlt(FeatureWgt)) { TIntFltKd& Kv = Vec[Vec.Add()]; Kv.Key = FeatureId; Kv.Dat = FeatureWgt; } else { EFailR((Value + TStr(" is not a valid floating point number.")).CStr()); } } } Vec.Sort(); }
void TBagOfWords::AddFtr(const TStrV& TokenStrV, TIntFltKdV& SpV) const { // aggregate token counts TIntH TermFqH; TStrV NgramStrV; GenerateNgrams(TokenStrV, NgramStrV); for (int TokenStrN = 0; TokenStrN < NgramStrV.Len(); TokenStrN++) { const TStr& TokenStr = NgramStrV[TokenStrN]; // get token ID const int TokenId = IsHashing() ? (TokenStr.GetHashTrick() % HashDim) : // hashing TokenSet.GetKeyId(TokenStr); // vocabulary // add if known token if (TokenId != -1) { TermFqH.AddDat(TokenId)++; } } // make a sparse vector out of it SpV.Gen(TermFqH.Len(), 0); int KeyId = TermFqH.FFirstKeyId(); while (TermFqH.FNextKeyId(KeyId)) { const int TermId = TermFqH.GetKey(KeyId); double TermVal = 1.0; if (IsTf()) { TermVal *= double(TermFqH[KeyId]); } if (IsIdf()) { if (ForgetP) { const double DocFq = double(DocFqV[TermId]) + OldDocFqV[TermId]; if (DocFq > 0.1) { TermVal *= log((double(Docs) + OldDocs) / DocFq); } } else { TermVal *= log(double(Docs) / double(DocFqV[TermId])); } } SpV.Add(TIntFltKd(TermId, TermVal)); } SpV.Sort(); // step (4): normalize the vector if so required if (IsNormalize()) { TLinAlg::Normalize(SpV); } }