Beispiel #1
0
void Backpropagation::trainOnlineCV(Mlp& network, 
	MATRIX& trainingInputs, 
	VECTOR& trainingTargets,
	MATRIX& testInputs,
	VECTOR& testTargets)
{
	VECTOR trainingOutputs(trainingTargets.size(), 0.0);
	VECTOR testOutputs(testTargets.size(), 0.0);
	
	while(error > tolerance && testCount < maxTestCount)
	{
		VECTOR::iterator output = trainingOutputs.begin();
		VECTOR::iterator target = trainingTargets.begin();
		for(MATRIX::iterator input = trainingInputs.begin(); 
			input != trainingInputs.end(); 
			++input, ++target, ++output)
		{
			*output = network(*input);
			double err = *output - *target;
			
			getWeightUpdates(network, *input, err);
			
			applyWeightUpdates(network);
			
			++iteration;
			
			if(iteration >= maxIterations)
				break;
		}
		
		++epoch;
		
		error = mse(trainingTargets, trainingOutputs);
		
		// Early-stopping using test (cross-validation) error
		testOutputs = network(testInputs);
		testError = mse(testTargets, testOutputs);
		if(testError < minTestError)
		{
			// Preserve test error and network weights
			minTestError = testError;
			W = network.W;			
			V = network.V;
			biasW = network.biasW;
			biasV = network.biasV;
			testCount = 0;
		}
		else
		{
			++testCount;
		}
	}
	
	network.W = W;
	network.V = V;
	network.biasW = biasW;
	network.biasV = biasV;
	testError = minTestError;
}
Beispiel #2
0
void Backpropagation::trainOnline(Mlp& network, MATRIX& inputs, VECTOR& targets)
{
	VECTOR outputs(targets.size(), 0.0);
	while(iteration < maxIterations)
	{
		VECTOR::iterator output = outputs.begin();
		VECTOR::iterator target = targets.begin();
		for(MATRIX::iterator input = inputs.begin(); 
			input != inputs.end(); 
			++input, ++target, ++output)
		{
			*output = network(*input);
			double err = *output - *target;
			
			getWeightUpdates(network, *input, err);
			
			applyWeightUpdates(network);
			
			++iteration;
		}
		++epoch;
	}
}