Eigen::VectorXd RBM::getParameters() { return currentParameters(); }
void Net::save(std::ostream& stream) { stream << architecture.str() << "parameters " << currentParameters(); }
bool QMCSHLinearOptimize::run() { start(); //size of matrix numParams = optTarget->NumParams(); N = numParams + 1; // initialize our parameters vector<RealType> currentOvlp(N,0); vector<RealType> currentHOvlp(N,0); vector<RealType> currentParameters(numParams,0); RealType E_avg(0); vector<RealType> bestParameters(currentParameters); optdir.resize(numParams,0); optparm.resize(numParams,0); for (int i=0; i<numParams; i++) optparm[i] = currentParameters[i] = optTarget->Params(i); bool acceptedOneMove(false); int tooManyTries(20); int failedTries(0); RealType lastCost(0); RealType startCost(0); startCost = lastCost = optTarget->Cost(false); app_log()<<"Starting cost: "<<startCost<<endl; Matrix<RealType> OM; OM.resize(N,N); dmcEngine->fillVectors(currentOvlp,currentHOvlp,E_avg,OM); for (int i=0; i<numParams; i++) optdir[i]=currentOvlp[i]; std::vector<RealType> dP(N,1); for (int i=0; i<numParams; i++) dP[i+1]=optdir[i]; Lambda = getNonLinearRescale(dP,OM); app_log()<<"rescaling factor :"<<Lambda<<endl; RealType bigOptVec(std::abs(optdir[0])); for (int i=1; i<numParams; i++) bigOptVec =std::max(std::abs(optdir[i]),bigOptVec); // app_log()<<"currentOvlp"<<endl; // for (int i=0; i<numParams; i++) // app_log()<<optdir[i]<<" "; // app_log()<<endl; // // app_log()<<"optparam"<<endl; // for (int i=0; i<numParams; i++) // app_log()<<optparm[i]<<" "; // app_log()<<endl; if (MinMethod=="rescale") { if (bigOptVec*std::abs(Lambda)>bigChange) app_log()<<" Failed Step. Largest LM parameter change:"<<bigOptVec*std::abs(Lambda)<<endl; else { for (int i=0; i<numParams; i++) bestParameters[i] = optTarget->Params(i) = currentParameters[i] + Lambda*optdir[i]; acceptedOneMove = true; } } else if (MinMethod=="overlap") { orthoScale(optdir,OM); if (bigOptVec>bigChange) app_log()<<" Failed Step. Largest LM parameter change:"<<bigOptVec<<endl; else { for (int i=0; i<numParams; i++) bestParameters[i] = optTarget->Params(i) = currentParameters[i] + optdir[i]; acceptedOneMove = true; } } else if (MinMethod=="average") { for (int i=0; i<numParams; i++) bestParameters[i] = optTarget->Params(i) = currentParameters[i] + currentHOvlp[i]; acceptedOneMove = true; } else { TOL = param_tol/bigOptVec; AbsFuncTol=true; largeQuarticStep=bigChange/bigOptVec; quadstep = stepsize*Lambda; //bigOptVec; // initial guess for line min bracketing LambdaMax = quadstep; myTimers[3]->start(); if (MinMethod=="quartic") { int npts(7); lineoptimization3(npts,startCost); } else lineoptimization2(); myTimers[3]->stop(); RealType biggestParameterChange = bigOptVec*std::abs(Lambda); if (biggestParameterChange>bigChange) { app_log()<<" Failed Step. Largest LM parameter change:"<<biggestParameterChange<<endl; for (int i=0; i<numParams; i++) optTarget->Params(i) = bestParameters[i] = currentParameters[i]; } else { for (int i=0; i<numParams; i++) optTarget->Params(i) = optparm[i] + Lambda * optdir[i]; lastCost = optTarget->Cost(false); app_log()<<" Costs: "<<startCost<<" "<<lastCost<<endl; app_log()<<" Optimal rescaling factor :"<<Lambda<<endl; if (lastCost<startCost) acceptedOneMove = true; else { for (int i=0; i<numParams; i++) optTarget->Params(i) = currentParameters[i]; app_log()<<" Failed Step. Cost increase "<<endl; } } } // if (acceptedOneMove) // for (int i=0; i<numParams; i++) optTarget->Params(i) = bestParameters[i]; // else // for (int i=0; i<numParams; i++) optTarget->Params(i) = currentParameters[i]; // if (W.getActiveWalkers()>NumOfVMCWalkers) // { // W.destroyWalkers(W.getActiveWalkers()-NumOfVMCWalkers); // app_log() << " QMCLinearOptimize::generateSamples removed walkers." << endl; // app_log() << " Number of Walkers per node " << W.getActiveWalkers() << endl; // } finish(); return (optTarget->getReportCounter() > 0); }