/************************************************************************* Internal cross-validation subroutine *************************************************************************/ static void mlpkfoldcvgeneral(const multilayerperceptron& n, const ap::real_2d_array& xy, int npoints, double decay, int restarts, int foldscount, bool lmalgorithm, double wstep, int maxits, int& info, mlpreport& rep, mlpcvreport& cvrep) { int i; int fold; int j; int k; multilayerperceptron network; int nin; int nout; int rowlen; int wcount; int nclasses; int tssize; int cvssize; ap::real_2d_array cvset; ap::real_2d_array testset; ap::integer_1d_array folds; int relcnt; mlpreport internalrep; ap::real_1d_array x; ap::real_1d_array y; // // Read network geometry, test parameters // mlpproperties(n, nin, nout, wcount); if( mlpissoftmax(n) ) { nclasses = nout; rowlen = nin+1; } else { nclasses = -nout; rowlen = nin+nout; } if( npoints<=0||foldscount<2||foldscount>npoints ) { info = -1; return; } mlpcopy(n, network); // // K-fold out cross-validation. // First, estimate generalization error // testset.setbounds(0, npoints-1, 0, rowlen-1); cvset.setbounds(0, npoints-1, 0, rowlen-1); x.setbounds(0, nin-1); y.setbounds(0, nout-1); mlpkfoldsplit(xy, npoints, nclasses, foldscount, false, folds); cvrep.relclserror = 0; cvrep.avgce = 0; cvrep.rmserror = 0; cvrep.avgerror = 0; cvrep.avgrelerror = 0; rep.ngrad = 0; rep.nhess = 0; rep.ncholesky = 0; relcnt = 0; for(fold = 0; fold <= foldscount-1; fold++) { // // Separate set // tssize = 0; cvssize = 0; for(i = 0; i <= npoints-1; i++) { if( folds(i)==fold ) { ap::vmove(&testset(tssize, 0), &xy(i, 0), ap::vlen(0,rowlen-1)); tssize = tssize+1; } else { ap::vmove(&cvset(cvssize, 0), &xy(i, 0), ap::vlen(0,rowlen-1)); cvssize = cvssize+1; } } // // Train on CV training set // if( lmalgorithm ) { mlptrainlm(network, cvset, cvssize, decay, restarts, info, internalrep); } else { mlptrainlbfgs(network, cvset, cvssize, decay, restarts, wstep, maxits, info, internalrep); } if( info<0 ) { cvrep.relclserror = 0; cvrep.avgce = 0; cvrep.rmserror = 0; cvrep.avgerror = 0; cvrep.avgrelerror = 0; return; } rep.ngrad = rep.ngrad+internalrep.ngrad; rep.nhess = rep.nhess+internalrep.nhess; rep.ncholesky = rep.ncholesky+internalrep.ncholesky; // // Estimate error using CV test set // if( mlpissoftmax(network) ) { // // classification-only code // cvrep.relclserror = cvrep.relclserror+mlpclserror(network, testset, tssize); cvrep.avgce = cvrep.avgce+mlperrorn(network, testset, tssize); } for(i = 0; i <= tssize-1; i++) { ap::vmove(&x(0), &testset(i, 0), ap::vlen(0,nin-1)); mlpprocess(network, x, y); if( mlpissoftmax(network) ) { // // Classification-specific code // k = ap::round(testset(i,nin)); for(j = 0; j <= nout-1; j++) { if( j==k ) { cvrep.rmserror = cvrep.rmserror+ap::sqr(y(j)-1); cvrep.avgerror = cvrep.avgerror+fabs(y(j)-1); cvrep.avgrelerror = cvrep.avgrelerror+fabs(y(j)-1); relcnt = relcnt+1; } else { cvrep.rmserror = cvrep.rmserror+ap::sqr(y(j)); cvrep.avgerror = cvrep.avgerror+fabs(y(j)); } } } else { // // Regression-specific code // for(j = 0; j <= nout-1; j++) { cvrep.rmserror = cvrep.rmserror+ap::sqr(y(j)-testset(i,nin+j)); cvrep.avgerror = cvrep.avgerror+fabs(y(j)-testset(i,nin+j)); if( ap::fp_neq(testset(i,nin+j),0) ) { cvrep.avgrelerror = cvrep.avgrelerror+fabs((y(j)-testset(i,nin+j))/testset(i,nin+j)); relcnt = relcnt+1; } } } } } if( mlpissoftmax(network) ) { cvrep.relclserror = cvrep.relclserror/npoints; cvrep.avgce = cvrep.avgce/(log(double(2))*npoints); } cvrep.rmserror = sqrt(cvrep.rmserror/(npoints*nout)); cvrep.avgerror = cvrep.avgerror/(npoints*nout); cvrep.avgrelerror = cvrep.avgrelerror/relcnt; info = 1; }
void cylinder::fold() { folds(); foldt(); foldb(); }