예제 #1
0
void
MPTreeMgr::writeAllNet( ostream & out ) const
{
	int i , n ;
	out << "Print all nets:\n";
	for ( i = 0 , n = _allNet.size() ; i < n ; ++i ) {
		out << " >  i = " << i << endl;
		printNet( out , _allNet[i] );
	}
}
예제 #2
0
void updateDeltas(NeuralNetwork* net, double* target) {
	//TODO: could test for net validity, but do I really need to?
	unsigned int layer, size;
	if(!net || !target) {
		return;
	}
	layer = net->layers - 1;
	size = net->layerSizes[layer];
	//delta_l = (activation_l - target)*sigma'(preActivation_l)
	subtract(target, net->activations[layer], net->biasDelta[layer - 1], size);
	applyOnEach(net->preActivations[layer], net->scratchPaper, net->activationFunctionDerivative, size);
	hadamardProduct(net->biasDelta[layer - 1], net->scratchPaper, net->biasDelta[layer - 1], size);
	//dC/db_l = delta_l
	//TODO: beware of unsigned int underflow
	//work backwards for each layer
	//do {
#define DEBUG 1
#if DEBUG
	puts("########################################");
	printf("layer %d, size %d\n", layer, size);
	puts("TARGET");
	printVector(stdout,target,size);
	puts("PREACTIVATIONS");
	printVector(stdout,net->preActivations[layer],size);
	puts("sigma'(preActivations)");
	printVector(stdout,net->scratchPaper,size);
	puts("bias delta");
	printVector(stdout,net->biasDelta[layer - 1],size);
	printf("error: %f\n", magnitude(net->biasDelta[layer - 1], size));
	puts("CURRENT STATE");
	printNet(stdout, net, 1);
#endif
/*
		layer--;
		size = net->layerSizes[layer];
		//delta_l = (W_l+1^T * delta_l+1) hprod sigma'(preactive_l)
		applyOnEach(net->preActivations[layer],net->scratchPaper, net->activationFunctionDerivative, size);
		matrixTransposeVectorProduct(net->weights[layer], net->biasDelta[layer],
			net->biasDelta[layer - 1], size, net->layerSizes[layer + 1]);
	} while(layer > 0);
*/
		//dc/db = delta_l
		//dc/dw = act_l-1 delta_l
	//for the previous layers
		//
}