void Texture::setUp(dx::IDirect3DTexturePtr tex) { ptr_ = tex; org_width_ = size().x; org_height_ = size().y; org_format_ = format(); org_miplevel_ = maxlevel(); }
//creates the ngramtable on demand from the sublm tables int mixture::get(ngram& ng,int n,int lev) { if (usefulltable) { return ngramtable::get(ng,n,lev); } //free current tree resetngramtable(); //get 1-word prefix from ng ngram ug(dict,1); *ug.wordp(1)=*ng.wordp(ng.size); //local ngram to upload entries ngram locng(dict,maxlevel()); //allocate subtrees from sublm for (int i=0; i<numslm; i++) { ngram subug(sublm[i]->dict,1); subug.trans(ug); if (sublm[i]->get(subug,1,1)) { ngram subng(sublm[i]->dict,maxlevel()); *subng.wordp(maxlevel())=*subug.wordp(1); sublm[i]->scan(subug.link,subug.info,1,subng,INIT,maxlevel()); while(sublm[i]->scan(subug.link,subug.info,1,subng,CONT,maxlevel())) { locng.trans(subng); put(locng); } } } return ngramtable::get(ng,n,lev); }
int mshiftbeta::discount(ngram ng_,int size,double& fstar,double& lambda, int cv) { ngram ng(dict); ng.trans(ng_); //cout << "size :" << size << " " << ng <<"\n"; if (size > 1) { ngram history=ng; //singleton pruning only on real counts!! if (ng.ckhisto(size) && get(history,size,size-1) && (history.freq > cv) && ((size < 3) || ((history.freq-cv) > prunethresh ))) { // no history pruning with corrected counts! int suc[3]; suc[0]=succ1(history.link); suc[1]=succ2(history.link); suc[2]=history.succ-suc[0]-suc[1]; if (get(ng,size,size) && (!prunesingletons() || mfreq(ng,size)>1 || size<3) && (!prunetopsingletons() || mfreq(ng,size)>1 || size<maxlevel())) { ng.freq=mfreq(ng,size); cv=(cv>ng.freq)?ng.freq:cv; if (ng.freq>cv) { double b=(ng.freq-cv>=3?beta[2][size]:beta[ng.freq-cv-1][size]); fstar=(double)((double)(ng.freq - cv) - b)/(double)(history.freq-cv); lambda=(beta[0][size] * suc[0] + beta[1][size] * suc[1] + beta[2][size] * suc[2]) / (double)(history.freq-cv); if ((size>=3 && prunesingletons()) || (size==maxlevel() && prunetopsingletons())) //correction lambda+=(double)(suc[0] * (1-beta[0][size])) / (double)(history.freq-cv); } else { // ng.freq==cv ng.freq>=3?suc[2]--:suc[ng.freq-1]--; //update successor stat fstar=0.0; lambda=(beta[0][size] * suc[0] + beta[1][size] * suc[1] + beta[2][size] * suc[2]) / (double)(history.freq-cv); if ((size>=3 && prunesingletons()) || (size==maxlevel() && prunetopsingletons())) //correction lambda+=(double)(suc[0] * (1-beta[0][size])) / (double)(history.freq-cv); ng.freq>=3?suc[2]++:suc[ng.freq-1]++; //resume successor stat } } else { fstar=0.0; lambda=(beta[0][size] * suc[0] + beta[1][size] * suc[1] + beta[2][size] * suc[2]) / (double)(history.freq-cv); if ((size>=3 && prunesingletons()) || (size==maxlevel() && prunetopsingletons())) //correction lambda+=(double)(suc[0] * (1-beta[0][size])) / (double)(history.freq-cv); } //cerr << "ngram :" << ng << "\n"; if (*ng.wordp(1)==dict->oovcode()) { lambda+=fstar; fstar=0.0; } else { *ng.wordp(1)=dict->oovcode(); if (get(ng,size,size)) { ng.freq=mfreq(ng,size); if ((!prunesingletons() || mfreq(ng,size)>1 || size<3) && (!prunetopsingletons() || mfreq(ng,size)>1 || size<maxlevel())) { double b=(ng.freq>=3?beta[2][size]:beta[ng.freq-1][size]); lambda+=(double)(ng.freq - b)/(double)(history.freq-cv); } } } } else { fstar=0; lambda=1; } } else { // unigram case, no cross-validation lambda=0.0; int unigrtotfreq=(size<lmsize()?btotfreq():totfreq()); if (get(ng,size,size)) fstar=(double) mfreq(ng,size)/(double)unigrtotfreq; else { cerr << "Missing probability for word: " << dict->decode(*ng.wordp(1)) << "\n"; exit(1); } } return 1; }