// on a segment void featureMapping(MixtureServer & ms, FeatureServer & fs,Seg * seg,Config &config) { unsigned long begin=seg->begin()+fs.getFirstFeatureIndexOfASource(seg->sourceName()); // Idx of the first frame of the current file in the feature server fs.seekFeature(begin); Feature f; for (unsigned long idxFrame=0;idxFrame<seg->length();idxFrame++){ // for all the features of the segment fs.readFeature(f,0); featureMapping(ms,f,config); fs.writeFeature(f); } }
RealVector <double> TopGauss::get(MixtureGD & UBM,FeatureServer &fs,Config & config){ RealVector <double> llkv; for (unsigned long i=0;i<fs.getSourceCount();i++) { String filename=fs.getNameOfASource(i); if (verbose) cout << "(TopGauss::get on FeatureServer) File ["<<filename<<"] ... "; this->read(filename,config); // this should be removed!! but when passing a feature server things get worse llkv.addValue(get(UBM,fs,filename,config)); if (verbose) cout <<_nbgcnt/_nt <<" per frames LLK="<<llkv[i]<<endl; } return llkv; }
// ---------------------------------------------------------------------------------------------------------- // Feature Warping giving a source(tab of histo, one by coeff) and a target distribution // for a segment and cluster (segment is the minimum time unit to perform this) void computeWarp(Histo *histoT,Histo &destH,FeatureServer & fs,unsigned long begin, unsigned long length,Config &config) { unsigned long vectsize=fs.getVectSize(); // Get the vect size (number of coeff) Feature f; fs.seekFeature(begin); for (unsigned long idxFrame=0;idxFrame<length;idxFrame++){ // for all the features of the segment fs.readFeature(f,0); // Get the feature; for (unsigned int i = 0; i < vectsize; i++){ // For each coeff f[i]=warping(f[i],histoT[i],destH); // Apply the warping function } fs.writeFeature(f); } }
// ---------------------------------------------------------------------------------------------------------- // Feature Mean subtraction and Cov reduction for a segment and cluster (segment is considerred to be the minimum time unit to perform this). void computeZeroOne(const DoubleVector &featureMean,const DoubleVector &featureStd,FeatureServer & fs,unsigned long begin, unsigned long length,Config &config) { unsigned long vectsize=fs.getVectSize(); // Get the vect size (number of coeff) Feature f; fs.seekFeature(begin); for (unsigned long idxFrame=0;idxFrame<length;idxFrame++){ // for all the features of the segment fs.readFeature(f,0); // Get the feature; for (unsigned int i = 0; i < vectsize; i++) { // For each coeff f[i]=(f[i]-featureMean[i])/featureStd[i]; // Apply the 0 mean 1 cov normalisation } fs.writeFeature(f); } }
// Can use this function to get likelihood with a topgauss double TopGauss::get(MixtureGD & UBM,FeatureServer &fs,String & featureFilename,Config & config){ StatServer ss(config); String labelSelectedFrames =config.getParam("labelSelectedFrames"); unsigned long begin=fs.getFirstFeatureIndexOfASource(featureFilename); fs.seekFeature(begin); SegServer segmentsServer; LabelServer labelServer; initializeClusters(featureFilename,segmentsServer,labelServer,config); // __android_log_print(ANDROID_LOG_DEBUG, "TopGauss::get", " Feature file %s \n", featureFilename.c_str()); verifyClusterFile(segmentsServer,fs,config); unsigned long codeSelectedFrame=labelServer.getLabelIndexByString(labelSelectedFrames); SegCluster& selectedSegments=segmentsServer.getCluster(codeSelectedFrame); MixtureGDStat &acc=ss.createAndStoreMixtureStat(UBM); Seg *seg; // current selected segment selectedSegments.rewind(); unsigned long t=0; //cnt frames acc.resetLLK(); unsigned long idxBegin=0; while((seg=selectedSegments.getSeg())!=NULL){ unsigned long begin=seg->begin()+fs.getFirstFeatureIndexOfASource(seg->sourceName()); fs.seekFeature(begin); Feature f; idxBegin=this->frameToIdx(t); for (unsigned long idxFrame=0;idxFrame<seg->length();idxFrame++){ fs.readFeature(f); //unsigned long idx=this->frameToIdx(t); unsigned long nbg=_nbg[t]; ULongVector index; double sumNonSelectedWeights=_snsw[t]; double sumNonSelectedLLK=_snsl[t]; for (unsigned long i=0;i<nbg;i++) { index.addValue(_idx[idxBegin+i]); } char c[100]; sprintf(c,"%d",(int)index.size()); config.setParam("topDistribsCount",c); // this should be high enough if (t==0) {acc.computeAndAccumulateLLK(f,1.0,DETERMINE_TOP_DISTRIBS);acc.resetLLK();} // to remove in ALIZE, this is to init the LKvector ss.setTopDistribIndexVector(index, sumNonSelectedWeights, sumNonSelectedLLK); acc.computeAndAccumulateLLK(f,1.0,USE_TOP_DISTRIBS); idxBegin+=nbg; t++; } } //ss.deleteMixtureStat(acc); if (t!=_nt || idxBegin !=_nbgcnt) cout << "W: t("<<t<<") != _nt(" <<_nt<<")"<<"W: idxBegin("<<idxBegin<<") != _nbgcnt(" <<_nbgcnt<<")"<<endl; return acc.getMeanLLK(); }
// on a part of a file void outputFeatureFile(Config &config, FeatureServer &fs, unsigned long begin,unsigned long length, FeatureFileWriter &w) { Feature f; fs.seekFeature(begin); for (unsigned long idxFrame=0;idxFrame<length;idxFrame++){ outputFeatureFile(config, fs, f,w); } }
void FactorAnalysisStat::computeAndAccumulateGeneralFAStats(SegCluster &selectedSegments,FeatureServer &fs,Config & config){ if (verbose) cout <<"(FactorAnalysisStat) Compute General FA Stats (Complete)" << endl; double *N_h, *N, *S_X_h, *S_X,*ff; _matN_h.setAllValues(0.0); _matN.setAllValues(0.0); _matS_X_h.setAllValues(0.0); _matS_X.setAllValues(0.0); N_h=_matN_h.getArray(); N=_matN.getArray(); S_X_h=_matS_X_h.getArray();S_X=_matS_X.getArray(); MixtureGD & UBM=_ms.getMixtureGD((unsigned long) 1); MixtureGDStat &acc=_ss.createAndStoreMixtureStat(UBM); // Compute Occupations and Statistics acc.resetOcc(); Seg *seg; selectedSegments.rewind(); String currentSource="";unsigned long loc=0;unsigned long sent=0; while((seg=selectedSegments.getSeg())!=NULL){ unsigned long begin=seg->begin()+fs.getFirstFeatureIndexOfASource(seg->sourceName()); // Idx of the first frame of the current file in the feature server if (currentSource!=seg->sourceName()) { currentSource=seg->sourceName(); loc=_ndxTable.locNb(currentSource); sent=_ndxTable.sessionNb(currentSource); if (verbose)cout << "Processing speaker["<<currentSource<<"]"<< endl; } fs.seekFeature(begin); Feature f; if (!_topGauss) { for (unsigned long idxFrame=0;idxFrame<seg->length();idxFrame++){ fs.readFeature(f); acc.computeAndAccumulateOcc(f); RealVector <double> aPost=acc.getOccVect(); ff=f.getDataVector(); for(unsigned long k=0;k<_mixsize;k++) { N_h[sent*_mixsize+k]+=aPost[k]; N[loc*_mixsize+k] +=aPost[k]; for (unsigned long i=0;i<_vsize;i++) { S_X_h[sent*_supervsize+(k*_vsize+i)]+=aPost[k]*ff[i]; S_X[loc*_supervsize+(k*_vsize+i)] +=aPost[k]*ff[i]; } } } } else throw Exception("ComputeGeneralStats TopGauss not done at this level",__FILE__,__LINE__); } };
/// Normalize features with a smooth mixture transformation o't=ot-sum(P(c|ot)Uc.x) void FactorAnalysisStat::normalizeFeatures(SegCluster &selectedSegments,FeatureServer &fs,Config & config){ if (verbose) cout << "(FactorAnalysisStat) Normalize Features" << endl; MixtureGD & clientMixture=_ms.getMixtureGD(1); // copy the UBM mixture unsigned long nt=0; RealVector <double> m_xh_1; m_xh_1.setSize(_supervsize); double *_m_xh_1=m_xh_1.getArray(); Seg *seg; // current selectd segment selectedSegments.rewind(); String currentSource=""; while((seg=selectedSegments.getSeg())!=NULL){ unsigned long begin=seg->begin()+fs.getFirstFeatureIndexOfASource(seg->sourceName()); if (currentSource!=seg->sourceName()) { currentSource=seg->sourceName(); this->getUX(m_xh_1,currentSource); this->getSpeakerModel(clientMixture,currentSource); if (verbose)cout << "Processing speaker["<<currentSource<<"]"<< endl; } fs.seekFeature(begin); Feature f; if (!_topGauss) { for (unsigned long idxFrame=0;idxFrame<seg->length();idxFrame++){ fs.readFeature(f,0); double *ff=f.getDataVector(); double sum=0.0; RealVector <double> P; P.setSize(_mixsize); double *Prob=P.getArray(); for(unsigned long k=0;k<_mixsize;k++) { Prob[k]=clientMixture.weight(k)*clientMixture.getDistrib(k).computeLK(f); sum+=Prob[k]; } for(unsigned long k=0;k<_mixsize;k++) Prob[k]/=sum; for(unsigned long k=0;k<_mixsize;k++) { for (unsigned long i=0;i<_vsize;i++) ff[i]-= Prob[k]*_m_xh_1[k*_vsize+i]; } fs.writeFeature(f); nt++; } } else { throw Exception("no topgauss yet",__FILE__,__LINE__); } } };
void cms(String & featureFileName,FeatureServer &fs,Config &config) { unsigned long begin=fs.getFirstFeatureIndexOfASource(featureFileName); fs.seekFeature(begin); SegServer segmentsServer; LabelServer labelServer; initializeClusters(featureFileName,segmentsServer,labelServer,config); verifyClusterFile(segmentsServer,fs,config); unsigned long codeSelectedFrame=labelServer.getLabelIndexByString(config.getParam("labelSelectedFrames")); SegCluster& selectedSegments=segmentsServer.getCluster(codeSelectedFrame); selectedSegments.rewind(); RealVector <double> mean,cov; FrameAccGD frameAccu; frameAccu.reset(); accumulateStatFrame(frameAccu,fs, selectedSegments, config); mean = frameAccu.getMeanVect(); // Get the mean vector cov = frameAccu.getStdVect(); // Get the std vector computeZeroOne(mean,cov,fs, selectedSegments, config); }
/// Compute Log Likelihood of the Factor Analysis model double FactorAnalysisStat::getLLK(SegCluster &selectedSegments,MixtureGD &model,FeatureServer&fs,Config & config){ if (verbose) cout << "(FactorAnalysisStat) Compute Likelihood" << endl; double llk=0.0; MixtureGDStat &acc=_ss.createAndStoreMixtureStat(model); Seg *seg; selectedSegments.rewind(); while((seg=selectedSegments.getSeg())!=NULL){ unsigned long begin=seg->begin()+fs.getFirstFeatureIndexOfASource(seg->sourceName()); fs.seekFeature(begin); Feature f; for (unsigned long idxFrame=0;idxFrame<seg->length();idxFrame++){ fs.readFeature(f); acc.computeAndAccumulateLLK(f,1.0,TOP_DISTRIBS_NO_ACTION); } } llk= acc.getMeanLLK(); _ss.deleteMixtureStat(acc); return llk; };
double computeEnergyThreshold(FeatureServer & fs,double pSelect,unsigned long nbBins=100) { Histo histo(nbBins); // Create an histo accumulator with 100 bins Feature f; // reset the reader at the begin of the input stream fs.reset(); // feature server reset for (unsigned long ind=0;fs.readFeature(f); ind++) // feature loop histo.accumulateValue(f[0]); // Accumulate the energy in the histo Accumulator histo.computeHisto(); // Compute the histo long i=nbBins-1; real_t count=0; while((i>=0) && (count<=pSelect)){ // Find the bin corresponding to the percentage of data wanted count+=histo.count(i)*(histo.higherBound(i)-histo.lowerBound(i)); i--; } double threshold; if (i>=0) threshold=histo.higherBound(i); // Set the threshold to the higherBound of the next bin else threshold=histo.lowerBound(0); // if 100% of data should be selected if (verbose) cout << "Percentage wanted["<<(int) (pSelect*100.0) <<"]Energy threshold["<<threshold<<"]"<<endl; return threshold; }
// Build the segments with the energized frames unsigned long selectFrames(FeatureServer &fs,SegServer & segServer,double threshold,SegCluster &selectedSeg,SegCluster &outputSeg,String labelOutput,String fileName) { unsigned long countFrames=0; fs.reset(); // feature server reset unsigned long ind=0; unsigned long begin=0; bool in=false; Seg *seg; // current selectd segment selectedSeg.rewind(); // reset the reader at the begin of the input stream while((seg=selectedSeg.getSeg())!=NULL){ // For each input segments for (unsigned long idx=seg->begin();idx<seg->begin()+seg->length();idx++){ // for each frame Feature f; fs.seekFeature(idx); fs.readFeature(f); if (f[0]>threshold){ // the frame is selected countFrames++; if (in==false){ // Begin of a new segment in=true; begin=ind; } } else if (in){ // End of a segment in=false; Seg & segFake=segServer.createSeg(begin,ind-begin,0, // Create a segment - Take care : length=end-begin+1 but ind =end+1 !! labelOutput,fileName); outputSeg.add(segFake); // Add a segment } ind++; // Increment the frame index } // end of one input segment if (in){ // deal with the last energized segmeent inside the current input segment in=false; Seg & segFake=segServer.createSeg(begin,ind-begin+1,0, // Create a segment labelOutput,fileName); outputSeg.add(segFake); // Add a segment - Take care : length=end-begin+1 and ind=end in this case !! } } // end feature loop return countFrames; }
// Main init function double TopGauss::compute(MixtureGD & UBM,FeatureServer &fs,String & featureFilename,Config & config){ StatServer ss(config); MixtureGDStat &acc=ss.createAndStoreMixtureStat(UBM); unsigned long _mixsize=UBM.getDistribCount(); String labelSelectedFrames =config.getParam("labelSelectedFrames"); unsigned long begin=fs.getFirstFeatureIndexOfASource(featureFilename); fs.seekFeature(begin); SegServer segmentsServer; LabelServer labelServer; initializeClusters(featureFilename,segmentsServer,labelServer,config); // __android_log_print(ANDROID_LOG_DEBUG, "TopGauss::compute", " Feature file %s \n", featureFilename.c_str()); verifyClusterFile(segmentsServer,fs,config); unsigned long codeSelectedFrame=labelServer.getLabelIndexByString(labelSelectedFrames); SegCluster& selectedSegments=segmentsServer.getCluster(codeSelectedFrame); acc.resetLLK(); double topD=config.getParam("topGauss").toDouble(); if (verbose) {if(topD<1.0) cout << "LLK %="<< topD << "% ";else cout << "Top-"<<topD<<" ";} // Class values _nt=totalFrame(selectedSegments); _nbg.setSize(_nt); _idx.setSize(0);_snsw.setSize(0); _snsl.setSize(0); _nbg.setAllValues(0); _idx.setAllValues(0);_snsw.setAllValues(0.0);_snsl.setAllValues(0.0); _nbgcnt=0; Seg *seg; // current selected segment selectedSegments.rewind(); unsigned long t=0; //cnt frames while((seg=selectedSegments.getSeg())!=NULL){ unsigned long begin=seg->begin()+fs.getFirstFeatureIndexOfASource(seg->sourceName()); fs.seekFeature(begin); Feature f; for (unsigned long idxFrame=0;idxFrame<seg->length();idxFrame++){ fs.readFeature(f); double llk=acc.computeAndAccumulateLLK(f,1.0,DETERMINE_TOP_DISTRIBS); const LKVector &topV=ss.getTopDistribIndexVector(); double lk_tot=exp(llk); double val=0.0; if (topD<1.0) { for(unsigned long j=0;j<_mixsize;j++){ if (val > topD*lk_tot) break; val+=(topV[j].lk); _nbg[t]++; } } else _nbg[t]=(unsigned long)topD; _nbgcnt+=_nbg[t]; double snsw=1.0; double snsl=lk_tot; for(unsigned long j=0;j<_nbg[t];j++) { _idx.addValue(topV[j].idx); snsw -=UBM.weight(topV[j].idx); snsl -=topV[j].lk; } _snsw.addValue(snsw); if (snsl < EPS_LK) _snsl.addValue(EPS_LK); else _snsl.addValue(snsl); t++; } } if (t!=_nt) cout << "W: t("<<t<<") != _nt(" <<_nt<<")"<<endl; return acc.getMeanLLK(); }
// on a segment void outputFeatureFile(Config &config, FeatureServer &fs, Seg * seg, FeatureFileWriter &w) { unsigned long begin=seg->begin()+fs.getFirstFeatureIndexOfASource(seg->sourceName()); // Idx of the first frame of the current file in the feature server if (verbose) cout <<"(GeneralTools) Writing ["<<seg->sourceName()<<"]"<<" begin:"<<begin<<" length:"<<seg->length()<<endl; outputFeatureFile(config,fs,begin,seg->length(),w); }
// Feature writing in an output stream w - could be used for multiple segmen,ts from multiple files to one file void outputFeatureFile(Config &config, FeatureServer &fs, Feature & f, FeatureFileWriter &w) { fs.readFeature(f); w.writeFeature(f); }
void computeZeroOne(const DoubleVector &featureMean,const DoubleVector &featureStd, FeatureServer & fs, Seg* seg, Config & config){ unsigned long begin=seg->begin()+fs.getFirstFeatureIndexOfASource(seg->sourceName()); // Idx of the first frame of the current file in the feature server computeZeroOne(featureMean,featureStd,fs,begin,seg->length(),config); // Normalize the feature to fit 0 mean, 1 cov }
// on a segment void computeZeroOne(FrameAccGD & frameAccu, FeatureServer & fs, Seg* seg, Config & config){ const DoubleVector & featureMean = frameAccu.getMeanVect(); // Get the mean vector const DoubleVector & featureStd = frameAccu.getStdVect(); // Get the std vector (sqrt(cov)) unsigned long begin=seg->begin()+fs.getFirstFeatureIndexOfASource(seg->sourceName()); // Idx of the first frame of the current file in the feature server computeZeroOne(featureMean,featureStd,fs,begin,seg->length(),config); // Normalize the feature to fit 0 mean, 1 cov }
// on a segment void computeWarp(Histo *histoT,Histo &destH, FeatureServer & fs, Seg* seg,Config & config){ unsigned long begin=seg->begin()+fs.getFirstFeatureIndexOfASource(seg->sourceName()); // Idx of the first frame of the current file in the feature server computeWarp(histoT,destH,fs,begin,seg->length(),config); }
// On a complete feature stream void globalMeanCov (FeatureServer &fs,FrameAcc & globalFrameAcc,Config &config) { globalFrameAcc.reset(); accumulateStatFrame(globalFrameAcc,fs,0,fs.getFeatureCount(),config); }