Пример #1
0
void RpropMinus::step(ObjectiveFunctionType const& objectiveFunction) {
	for (size_t i = 0; i < m_parameterSize; i++)
	{
		double p = m_best.point(i);
		if (m_derivative(i) * m_oldDerivative(i) > 0)
		{
			m_delta(i) = std::min(m_maxDelta, m_increaseFactor * m_delta(i));
		}
		else if (m_derivative(i) * m_oldDerivative(i) < 0)
		{
			m_delta(i) = std::max(m_minDelta, m_decreaseFactor * m_delta(i));
		}
		m_best.point(i) -= m_delta(i) * boost::math::sign(m_derivative(i));
		if (! objectiveFunction.isFeasible(m_best.point))
		{
			m_best.point(i) = p;
			m_delta(i) *= m_decreaseFactor;
			m_oldDerivative(i) = 0.0;
		}
		else
		{
			m_oldDerivative(i) = m_derivative(i);
		}
	}
	//evaluate the new point
	m_best.value = objectiveFunction.evalDerivative(m_best.point,m_derivative);
}
Пример #2
0
void RpropMinus::init(
	ObjectiveFunctionType & objectiveFunction, 
	SearchPointType const& startingPoint, 
	double initDelta
) {
	checkFeatures(objectiveFunction);
	objectiveFunction.init();
	
	m_parameterSize = startingPoint.size();
	m_delta.resize(m_parameterSize);
	m_oldDerivative.resize(m_parameterSize);

	std::fill(m_delta.begin(),m_delta.end(),initDelta);
	m_oldDerivative.clear();
	m_best.point = startingPoint;
	//evaluate initial point
	m_best.value = objectiveFunction.evalDerivative(m_best.point,m_derivative);
}
Пример #3
0
void AbstractLineSearchOptimizer::init(ObjectiveFunctionType& objectiveFunction, SearchPointType const& startingPoint) {
	checkFeatures(objectiveFunction);
	objectiveFunction.init();

	m_linesearch.init(objectiveFunction);
	m_dimension = startingPoint.size();
	
	m_best.point = startingPoint;
	m_best.value = objectiveFunction.evalDerivative(m_best.point,m_derivative);
	

	// Get space for the different vectors we store.
	m_lastPoint.resize(m_dimension);
	m_searchDirection = -m_derivative;
	
	m_initialStepLength = 0.0;//1.0 as step length might be very wrong.
	for (size_t i = 0; i < m_derivative.size(); ++i)
		m_initialStepLength += std::abs(m_derivative(i));
	m_initialStepLength = std::min(1.0, 1.0 / m_initialStepLength);
	
	initModel();
}
Пример #4
0
void NoisyRprop::step(const ObjectiveFunctionType& objectiveFunction)
{
	size_t pc = m_best.point.size();
	ObjectiveFunctionType::FirstOrderDerivative dedw;

	// compute noisy error function value and derivative
	m_best.value = objectiveFunction.evalDerivative(m_best.point,dedw);

	// loop over all coordinates
	m_gradient += dedw;
	for (size_t p=0; p<pc; p++)
	{
		m_current[p]++;
		if (m_current[p] == m_numberOfAverages[p])
		{
			// basic Rprop algorithm
			double value = m_best.point(p);
			if (m_gradient(p) < 0.0)
			{
				m_best.point(p)+=m_delta(p);
				m_position[p]++;
				m_toTheRight[p]++;
				m_rightsum(p) -= m_gradient(p);
			}
			else if (m_gradient(p) > 0.0)
			{
				m_best.point(p)-=m_delta(p);
				m_position[p]--;
				m_leftsum(p) += m_gradient(p);
			}

			// collect leftmost and rightmost of 100 position samples
			if (m_iteration[p] == m_sampleIteration[p])
			{
				if (m_position[p] > m_rightmost[p]) m_rightmost[p] = m_position[p];
				if (m_position[p] < m_leftmost[p]) m_leftmost[p] = m_position[p];
				m_sample[p]++;
				m_sampleIteration[p] = m_sample[p] * m_episodeLength[p] / 100;
				if (m_sampleIteration[p] <= m_iteration[p]) m_sampleIteration[p] = m_iteration[p] + 1;
			}

			if (!objectiveFunction.isFeasible(m_best.point))
			{
				m_best.point(p)=value;
			}

			// strategy adaptation
			m_iteration[p]++;
			if (m_iteration[p] == m_episodeLength[p])
			{
				// Compute the normalization of the toTheRight statistics
				// and compare it to its expected normal approximation
				unsigned int N = m_episodeLength[p];
				unsigned int newN = N;
				double normalized = std::fabs((m_toTheRight[p] - 0.5 * N) / std::sqrt((double)N));
				if (normalized > 1.64)				// 90% quantile
				{
					// increase the step size
					m_delta(p) *= 2.0;
					newN /= 2;
				}
				else
				{
					if (normalized < 0.25)			// 20% quantile
					{
						// decrease the step size
						m_delta(p) /= 2.0;
						newN *= 2;
					}

					// Compute the normalization of the spread statistics
					// and compare it to its expected distribution

					// compute approximate quantiles of the spread statistics
					int spread = m_rightmost[p] - m_leftmost[p];
					normalized = spread / std::sqrt((double)N);

					if (normalized < 0.95)			// 10% quantile
					{
						// decrease the number of iterations
						newN /= 2;
					}
					else if (normalized > 1.77)		// 75% quantile
					{
						// increase the number of iterations
						newN *= 2;
					}

					// Compute the normalization of the asymmetry statistics
					// and compare it to its expected distribution
					normalized = std::fabs(m_rightsum[p] / m_toTheRight[p] - m_leftsum[p] / (N - m_toTheRight[p])) / (m_rightsum[p] + m_leftsum[p]) * N * std::sqrt((double)N);

					if (normalized < 0.29)			// 15% quantile
					{
						// decrease averaging
						if (m_numberOfAverages[p] > 1)
						{
							m_numberOfAverages[p] /= 2;
							newN *= 2;
						}
					}
					else if (normalized > 2.5)		// 90% quantile
					{
						// increase averaging
						m_numberOfAverages[p] *= 2;
						newN /= 2;
					}
				}
				if (newN < m_minEpisodeLength) newN = m_minEpisodeLength;
				m_episodeLength[p] = newN;

				// reset episode
				m_iteration[p] = 0;
				m_sample[p] = 0;
				m_sampleIteration[p] = m_minEpisodeLength / 100;
				m_position[p] = 0;
				m_leftmost[p] = 0;
				m_rightmost[p] = 0;
				m_toTheRight[p] = 0;
				m_leftsum(p) = 0.0;
				m_rightsum(p) = 0.0;
			}

			m_current[p] = 0;
			m_gradient(p) = 0.0;
		}
	}
}