Exemplo n.º 1
0
void NNLayer::Calculate()
{
	ASSERT( m_pPrevLayer != NULL );
	
	VectorNeurons::iterator nit;
	VectorConnections::iterator cit;
	
	double dSum;
	
	for( nit=m_Neurons.begin(); nit<m_Neurons.end(); nit++ )
	{
		NNNeuron& n = *(*nit);  // to ease the terminology
		
		cit = n.m_Connections.begin();
		
		ASSERT( (*cit).WeightIndex < m_Weights.size() );
		
		dSum = m_Weights[ (*cit).WeightIndex ]->value;  // weight of the first connection is the bias; neuron is ignored
		
		for ( cit++ ; cit<n.m_Connections.end(); cit++ )
		{
			ASSERT( (*cit).WeightIndex < m_Weights.size() );
			ASSERT( (*cit).NeuronIndex < m_pPrevLayer->m_Neurons.size() );
			
			dSum += ( m_Weights[ (*cit).WeightIndex ]->value ) * 
				( m_pPrevLayer->m_Neurons[ (*cit).NeuronIndex ]->output );
		}
		
		n.output = SIGMOID( dSum );
		
	}
	
}
Exemplo n.º 2
0
// Calculates the activation of each neuron for a particular input `_sample`
void Network::getActivation(std::vector<double> &_sample) {
	// Input layer activations equal to input sample
	for (int i=0; i < numInputs; i++) {
		activation[0][i] = _sample[i];
	}

	// Solve hidden and output layer activations
	for(int l=1; l < 3; l++) {
		for(unsigned int i=0; i < input[l].size(); i++) {
			input[l][i] = -1 * weights[l-1][i][0];
			for(unsigned int j=0; j < input[l-1].size(); j++) {
				input[l][i] += weights[l-1][i][j+1] * activation[l-1][j];
			}
			activation[l][i] = SIGMOID(input[l][i]);
		}
	}
}