// compute the log likelihood. double CLinearMapping::logLikelihood() const { double L=0.0; for(unsigned int i=0; i<getNumData(); i++) { outActive.copyRowRow(0, b, 0); outActive.gemvRowRow(0, W, *pX, i, 1.0, 1.0, "t"); L+=outActive.dist2Row(0, *py, i); } L=L/variance; L+=(double)getNumData()*(ndlutil::LOGTWOPI + log(variance)); L*=-0.5; //L+=priorLogProb(); return L; }
void CLinearMapping::display(ostream& os) const { cout << "Linear Mapping:" << endl; cout << "Optimiser: " << getDefaultOptimiserStr() << endl; cout << "Data Set Size: " << getNumData() << endl; cout << "Log likelihood: " << logLikelihood() << endl; }
void hListBox::setScrollBarPosition(int start_item) { int numItems = getNumWidgets(); int numData = getNumData(); if(numData == numItems) return; // nothing to scroll scrollBar->setPosition(start_item); }
void CMlpMapping::display(ostream& os) const { cout << "Multi-Layer Perceptron Model:" << endl; cout << "Optimiser: " << getDefaultOptimiserStr() << endl; cout << "Data Set Size: " << getNumData() << endl; cout << "Number hidden: " << hiddenDim << endl; cout << "Log likelihood: " << logLikelihood() << endl; }
// compute the log likelihood. double CMlpMapping::logLikelihood() const { double L=0.0; for(unsigned int i=0; i<getNumData(); i++) { hiddenActive.deepCopy(b1); hiddenActive.gemvRowRow(0, W1, *pX, i, 1.0, 1.0, "t"); hiddenActive.tanh(); outActive.copyRowRow(0, b2, 0); outActive.gemvRowRow(0, W2, hiddenActive, 0, 1.0, 1.0, "t"); L+=outActive.dist2Row(0, *py, i); } L=L/variance; L+=(double)getNumData()*(ndlutil::LOGTWOPI + log(variance)); L*=-0.5; //L+=priorLogProb(); return L; }
void CLinearMapping::writeParamsToStream(ostream& out) const { writeToStream(out, "baseType", getBaseType()); writeToStream(out, "type", getType()); writeToStream(out, "numData", getNumData()); writeToStream(out, "outputDim", getOutputDim()); writeToStream(out, "inputDim", getInputDim()); writeToStream(out, "numParams", getOptNumParams()); CMatrix par(1, getOptNumParams()); getOptParams(par); par.toStream(out); }
// compute the gradients wrt parameters and latent variables. double CLinearMapping::logLikelihoodGradient(CMatrix& g) const { double L=0.0; g.zeros(); CMatrix gtemp(1, getOptNumParams()); for(unsigned int j=0; j<getOutputDim(); j++) { for(unsigned int i=0; i<getNumData(); i++) { double diff = outGradParams(gtemp, *pX, i, j) - py->getVal(i, j); L+= diff*diff; gtemp.scale(-diff/variance); g.add(gtemp); } } L=L/variance; L+=(double)getNumData()*(ndlutil::LOGTWOPI + log(variance)); L*=-0.5; //L+=priorLogProb(); return L; }