EndCriteria::Type HybridSimulatedAnnealing<Sampler, Probability, Temperature, Reannealing>::minimize(Problem &P, const EndCriteria &endCriteria) { EndCriteria::Type ecType = EndCriteria::None; P.reset(); reannealing_.setProblem(P); Array x = P.currentValue(); Size n = x.size(); Size k = 1; Size kStationary = 1; Size kReAnneal = 1; Size kReset = 1; Size maxK = endCriteria.maxIterations(); Size maxKStationary = endCriteria.maxStationaryStateIterations(); bool temperatureBreached = false; Array currentTemperature(n, startTemperature_); Array annealStep(n, 1.0); Array bestPoint(x); Array currentPoint(x); Array startingPoint(x); Array newPoint(x); Real bestValue = P.value(bestPoint); Real currentValue = bestValue; Real startingValue = bestValue; //to reset to starting point if desired while (k <= maxK && kStationary <= maxKStationary && !temperatureBreached) { //Draw a new sample point sampler_(newPoint, currentPoint, currentTemperature); //Evaluate new point Real newValue = P.value(newPoint); //Determine if new point is accepted if (probability_(currentValue, newValue, currentTemperature)) { if (optimizeScheme_ == EveryNewPoint) { P.setCurrentValue(newPoint); P.setFunctionValue(newValue); localOptimizer_->minimize(P, endCriteria); newPoint = P.currentValue(); newValue = P.functionValue(); } currentPoint = newPoint; currentValue = newValue; } //Check if we have a new best point if (newValue < bestValue) { if (optimizeScheme_ == EveryBestPoint) { P.setCurrentValue(newPoint); P.setFunctionValue(newValue); localOptimizer_->minimize(P, endCriteria); newPoint = P.currentValue(); newValue = P.functionValue(); } kStationary = 0; bestValue = newValue; bestPoint = newPoint; } //Increase steps k++; kStationary++; for (Size i = 0; i < annealStep.size(); i++) annealStep[i]++; //Reanneal if necessary if (kReAnneal == reAnnealSteps_) { kReAnneal = 0; reannealing_(annealStep, currentPoint, currentValue, currentTemperature); } kReAnneal++; //Reset if necessary if (kReset == resetSteps_) { kReset = 0; switch (resetScheme_) { case NoResetScheme: break; case ResetToBestPoint: currentPoint = startingPoint; currentValue = startingValue; break; case ResetToOrigin: currentPoint = bestPoint; currentValue = bestValue; break; } } kReset++; //Update the current temperature according to current step temperature_(currentTemperature, currentTemperature, annealStep); //Check if temperature condition is breached for (Size i = 0; i < n; i++) temperatureBreached = temperatureBreached && currentTemperature[i] < endTemperature_; } //Change end criteria type if appropriate if (k > maxK) ecType = EndCriteria::MaxIterations; else if (kStationary > maxKStationary) ecType = EndCriteria::StationaryPoint; //Set result to best point P.setCurrentValue(bestPoint); P.setFunctionValue(bestValue); return ecType; }
EndCriteria::Type Simplex::minimize(Problem& P, const EndCriteria& endCriteria) { // set up of the problem //Real ftol = endCriteria.functionEpsilon(); // end criteria on f(x) (see Numerical Recipes in C++, p.410) Real xtol = endCriteria.rootEpsilon(); // end criteria on x (see GSL v. 1.9, http://www.gnu.org/software/gsl/) Size maxStationaryStateIterations_ = endCriteria.maxStationaryStateIterations(); EndCriteria::Type ecType = EndCriteria::None; P.reset(); Array x_ = P.currentValue(); Integer iterationNumber_=0; // Initialize vertices of the simplex bool end = false; Size n = x_.size(), i; vertices_ = std::vector<Array>(n+1, x_); for (i=0; i<n; i++) { Array direction(n, 0.0); direction[i] = 1.0; P.constraint().update(vertices_[i+1], direction, lambda_); } // Initialize function values at the vertices of the simplex values_ = Array(n+1, 0.0); for (i=0; i<=n; i++) values_[i] = P.value(vertices_[i]); // Loop looking for minimum do { sum_ = Array(n, 0.0); Size i; for (i=0; i<=n; i++) sum_ += vertices_[i]; // Determine the best (iLowest), worst (iHighest) // and 2nd worst (iNextHighest) vertices Size iLowest = 0; Size iHighest, iNextHighest; if (values_[0]<values_[1]) { iHighest = 1; iNextHighest = 0; } else { iHighest = 0; iNextHighest = 1; } for (i=1;i<=n; i++) { if (values_[i]>values_[iHighest]) { iNextHighest = iHighest; iHighest = i; } else { if ((values_[i]>values_[iNextHighest]) && i!=iHighest) iNextHighest = i; } if (values_[i]<values_[iLowest]) iLowest = i; } // Now compute accuracy, update iteration number and check end criteria //// Numerical Recipes exit strategy on fx (see NR in C++, p.410) //Real low = values_[iLowest]; //Real high = values_[iHighest]; //Real rtol = 2.0*std::fabs(high - low)/ // (std::fabs(high) + std::fabs(low) + QL_EPSILON); //++iterationNumber_; //if (rtol < ftol || // endCriteria.checkMaxIterations(iterationNumber_, ecType)) { // GSL exit strategy on x (see GSL v. 1.9, http://www.gnu.org/software/gsl Real simplexSize = computeSimplexSize(vertices_); ++iterationNumber_; if (simplexSize < xtol || endCriteria.checkMaxIterations(iterationNumber_, ecType)) { endCriteria.checkStationaryPoint(0.0, 0.0, maxStationaryStateIterations_, ecType); // PC this is probably not meant like this ? Use separate counter ? endCriteria.checkMaxIterations(iterationNumber_, ecType); x_ = vertices_[iLowest]; Real low = values_[iLowest]; P.setFunctionValue(low); P.setCurrentValue(x_); return ecType; } // If end criteria is not met, continue Real factor = -1.0; Real vTry = extrapolate(P, iHighest, factor); if ((vTry <= values_[iLowest]) && (factor == -1.0)) { factor = 2.0; extrapolate(P, iHighest, factor); } else if (std::fabs(factor) > QL_EPSILON) { if (vTry >= values_[iNextHighest]) { Real vSave = values_[iHighest]; factor = 0.5; vTry = extrapolate(P, iHighest, factor); if (vTry >= vSave && std::fabs(factor) > QL_EPSILON) { for (Size i=0; i<=n; i++) { if (i!=iLowest) { #if defined(QL_ARRAY_EXPRESSIONS) vertices_[i] = 0.5*(vertices_[i] + vertices_[iLowest]); #else vertices_[i] += vertices_[iLowest]; vertices_[i] *= 0.5; #endif values_[i] = P.value(vertices_[i]); } } } } } // If can't extrapolate given the constraints, exit if (std::fabs(factor) <= QL_EPSILON) { x_ = vertices_[iLowest]; Real low = values_[iLowest]; P.setFunctionValue(low); P.setCurrentValue(x_); return EndCriteria::StationaryFunctionValue; } } while (end == false); QL_FAIL("optimization failed: unexpected behaviour"); }
EndCriteria::Type LineSearchBasedMethod::minimize(Problem& P, const EndCriteria& endCriteria) { // Initializations Real ftol = endCriteria.functionEpsilon(); Size maxStationaryStateIterations_ = endCriteria.maxStationaryStateIterations(); EndCriteria::Type ecType = EndCriteria::None; // reset end criteria P.reset(); // reset problem Array x_ = P.currentValue(); // store the starting point Size iterationNumber_ = 0; // dimension line search lineSearch_->searchDirection() = Array(x_.size()); bool done = false; // function and squared norm of gradient values; Real fnew, fold, gold2; Real fdiff; // classical initial value for line-search step Real t = 1.0; // Set gradient g at the size of the optimization problem // search direction Size sz = lineSearch_->searchDirection().size(); Array prevGradient(sz), d(sz), sddiff(sz), direction(sz); // Initialize cost function, gradient prevGradient and search direction P.setFunctionValue(P.valueAndGradient(prevGradient, x_)); P.setGradientNormValue(DotProduct(prevGradient, prevGradient)); lineSearch_->searchDirection() = -prevGradient; bool first_time = true; // Loop over iterations do { // Linesearch if (!first_time) prevGradient = lineSearch_->lastGradient(); t = (*lineSearch_)(P, ecType, endCriteria, t); // don't throw: it can fail just because maxIterations exceeded //QL_REQUIRE(lineSearch_->succeed(), "line-search failed!"); if (lineSearch_->succeed()) { // Updates // New point x_ = lineSearch_->lastX(); // New function value fold = P.functionValue(); P.setFunctionValue(lineSearch_->lastFunctionValue()); // New gradient and search direction vectors // orthogonalization coef gold2 = P.gradientNormValue(); P.setGradientNormValue(lineSearch_->lastGradientNorm2()); // conjugate gradient search direction direction = getUpdatedDirection(P, gold2, prevGradient); sddiff = direction - lineSearch_->searchDirection(); lineSearch_->searchDirection() = direction; // Now compute accuracy and check end criteria // Numerical Recipes exit strategy on fx (see NR in C++, p.423) fnew = P.functionValue(); fdiff = 2.0*std::fabs(fnew-fold) / (std::fabs(fnew) + std::fabs(fold) + QL_EPSILON); if (fdiff < ftol || endCriteria.checkMaxIterations(iterationNumber_, ecType)) { endCriteria.checkStationaryFunctionValue(0.0, 0.0, maxStationaryStateIterations_, ecType); endCriteria.checkMaxIterations(iterationNumber_, ecType); return ecType; } P.setCurrentValue(x_); // update problem current value ++iterationNumber_; // Increase iteration number first_time = false; } else { done = true; } } while (!done); P.setCurrentValue(x_); return ecType; }
EndCriteria::Type DifferentialEvolution::minimize(Problem& P, const EndCriteria& endCriteria) { EndCriteria::Type ecType = EndCriteria::MaxIterations; QL_REQUIRE(P.currentValue().size() == nParam_, "Number of parameters mismatch between problem and DE optimizer"); P.reset(); init(); Real bestCost = QL_MAX_REAL; Size bestPop = 0; for (Size p = 0; p < nPop_; ++p) { Array tmp(currGen_[p].pop_); try { currGen_[p].cost_ = P.costFunction().value(tmp); } catch (Error&) { currGen_[p].cost_ = QL_MAX_REAL; } if (currGen_[p].cost_ < bestCost) { bestPop = p; bestCost = currGen_[p].cost_; } } Size lastChange = 0; Size lastParamChange = 0; for(Size i=0; i<endCriteria.maxIterations(); ++i) { Size newBestPop = bestPop; Real newBestCost = bestCost; for (Size p=0; p<nPop_; ++p) { // Find 3 different populations randomly Size r1; do { r1 = static_cast <Size> (uniformRng_.nextInt32() % nPop_); } while(r1 == p || r1 == bestPop); Size r2; do { r2 = static_cast <Size> (uniformRng_.nextInt32() % nPop_); } while ( r2 == p || r2 == bestPop || r2 == r1); Size r3; do { r3 = static_cast <Size> (uniformRng_.nextInt32() % nPop_); } while ( r3 == p || r3 == bestPop || r3 == r1 || r3 == r2); for(Size j=0; j<nParam_; ++j) { nextGen_[p].pop_[j] = currGen_[p].pop_[j]; } Size j = static_cast <Size> (uniformRng_.nextInt32() % nParam_); Size L = 0; do { const double tmp = currGen_[ p].pop_[j] * a0_ + currGen_[ r1].pop_[j] * a1_ + currGen_[ r2].pop_[j] * a2_ + currGen_[ r3].pop_[j] * a3_ + currGen_[bestPop].pop_[j] * aBest_; nextGen_[p].pop_[j] = std::min(maxParams_[j], std::max(minParams_[j], tmp)); j = (j+1)%nParam_; ++L; } while ((uniformRng_.nextReal() < CR_) && (L < nParam_)); // Evaluate the new population Array tmp(nextGen_[p].pop_); try { nextGen_[p].cost_ = P.costFunction().value(tmp); } catch (Error&) { nextGen_[p].cost_ = QL_MAX_REAL; } // Not better, discard it and keep the old one. if (nextGen_[p].cost_ >= currGen_[p].cost_) { nextGen_[p] = currGen_[p]; } // Better, keep it. else { // New best? if (nextGen_[p].cost_ < newBestCost) { newBestPop = p; newBestCost = nextGen_[p].cost_; } } } if(std::abs(newBestCost-bestCost) > endCriteria.functionEpsilon()) { lastChange = i; } const Array absDiff = Abs(nextGen_[newBestPop].pop_-currGen_[bestPop].pop_); if(*std::max_element(absDiff.begin(), absDiff.end()) > endCriteria.rootEpsilon()) { lastParamChange = i; } bestPop = newBestPop; bestCost = newBestCost; currGen_ = nextGen_; if(i-lastChange > endCriteria.maxStationaryStateIterations()) { ecType = EndCriteria::StationaryFunctionValue; break; } if(i-lastParamChange > endCriteria.maxStationaryStateIterations()) { ecType = EndCriteria::StationaryPoint; break; } if (adaptive_) adaptParameters(); } const Array res(currGen_[bestPop].pop_); P.setCurrentValue(res); P.setFunctionValue(bestCost); return ecType; }
EndCriteria::Type FireflyAlgorithm::minimize(Problem &P, const EndCriteria &endCriteria) { QL_REQUIRE(!P.constraint().empty(), "Firefly Algorithm is a constrained optimizer"); EndCriteria::Type ecType = EndCriteria::None; P.reset(); Size iteration = 0; Size iterationStat = 0; Size maxIteration = endCriteria.maxIterations(); Size maxIStationary = endCriteria.maxStationaryStateIterations(); startState(P, endCriteria); bool isFA = Mfa_ > 0 ? true : false; //Variables for DE Array z(N_, 0.0); Size indexBest, indexR1, indexR2; //Set best value & position Real bestValue = values_[0].first; Size bestPosition = 0; for (Size i = 1; i < M_; i++) { if (values_[i].first < bestValue) { bestPosition = i; bestValue = values_[i].first; } } Array bestX = x_[bestPosition]; //Run optimization do { iteration++; iterationStat++; //Check if stopping criteria is met if (iteration > maxIteration || iterationStat > maxIStationary) break; //Divide into two subpopulations //First sort values std::sort(values_.begin(), values_.end()); //Differential evolution if(Mfa_ < M_){ Size indexBest = values_[0].second; Array& xBest = x_[indexBest]; for (Size i = Mfa_; i < M_; i++) { if (!isFA) { //Pure DE requires random index indexBest = drawIndex_(); xBest = x_[indexBest]; } do { indexR1 = drawIndex_(); } while(indexR1 == indexBest); do { indexR2 = drawIndex_(); } while(indexR2 == indexBest || indexR2 == indexR1); Size index = values_[i].second; Array& x = x_[index]; Array& xR1 = x_[indexR1]; Array& xR2 = x_[indexR2]; for (Size j = 0; j < N_; j++) { if (rng_.nextReal() <= crossover_) { //Change x[j] according to crossover z[j] = xBest[j] + mutation_*(xR1[j] - xR2[j]); } else { z[j] = x[j]; } } Real val = P.value(z); if (val < values_[index].first) { //Accept new point x = z; values_[index].first = val; //mark best if (val < bestValue) { bestValue = val; bestX = x; iterationStat = 0; } } } } //Firefly algorithm if(isFA){ //According to the intensity, determine best global position intensity_->findBrightest(); //Prepare random walk randomWalk_->walk(); //Loop over particles for (Size i = 0; i < Mfa_; i++) { Size index = values_[i].second; Array& x = x_[index]; Array& xI = xI_[index]; Array& xRW = xRW_[index]; //Loop over dimensions for (Size j = 0; j < N_; j++) { //Update position x[j] += xI[j] + xRW[j]; //Enforce bounds on positions if (x[j] < lX_[j]) { x[j] = lX_[j]; } else if (x[j] > uX_[j]) { x[j] = uX_[j]; } } //Evaluate x & mark best values_[index].first = P.value(x); if (values_[index].first < bestValue) { bestValue = values_[index].first; bestX = x; iterationStat = 0; } } } } while (true); if (iteration > maxIteration) ecType = EndCriteria::MaxIterations; else ecType = EndCriteria::StationaryPoint; //Set result to best point P.setCurrentValue(bestX); P.setFunctionValue(bestValue); return ecType; }