Ejemplo n.º 1
0
Individual::Individual() : _fitness(0)
{
    Layer hidden;
    for (uint64_t i = 0; i < 2; ++i) {
        hidden._v.push_back(Neuron(5));
    }
    _ann.push_back(hidden);

    Layer last;
    for (uint64_t i = 0; i < 2; ++i) {
        last._v.push_back(Neuron(2));
    }
    _ann.push_back(last);
}
Ejemplo n.º 2
0
void Network::createMultiLayer(int M, int N, double weight, double parameter)
{
    // clear previous network
    _synapses.clear();
    _neurons.clear();
    _ptrInputNeurons.clear();
    _ptrOutputNeurons.clear();

    // reserve memory to store all neurons and synapses
    // otherwise reallocation and invalid pointers
    _neurons.reserve(M*M);
    _synapses.reserve(M*M*M);


    // add input neurons
    for (int i=0; i<M; i++)
    {
        _neurons.push_back(Neuron());
        _neurons[i].init(parameter);
        _ptrInputNeurons.push_back(&_neurons[i]);
    }

    // add other neurons and add synapse
    // for all layer
    for (int i=M,k=0; i>N; i--)
    {
        // for all neurons of the sub-layer
        for (int j=0; j<i-1; j++)
        {
            // add a neuron
            _neurons.push_back(Neuron());
            _neurons.back().init(parameter);

            // add synapses from neurons of the main layer to the new neuron
            for (int l=0; l<i; l++)
            {
                _synapses.push_back(Synapse());
                _synapses.back().init(&_neurons[k+l], &_neurons.back(), weight);
            }
        }
        k += i;
    }

    // get output neurons
    for (int i=0; i<N; i++)
    {
        _ptrOutputNeurons.push_back(&_neurons[_neurons.size()-N+i]);
    }
}
Ejemplo n.º 3
0
/**
 * Constructeur ameliore (poids donnes)
 * @param vWeights vecteur de poids
 * @param alpha Coefficient d'apprentissage etape n
 * @param beta Coefficient d'apprentissage etape n-1
 */
Layer::Layer(vector<vector<double> > vWeights, double const alpha, double const beta) {
    for (unsigned int i = 0; i < vWeights.size(); i++) {
        neurons.push_back(Neuron(vWeights[i], alpha, beta));
    }
    Layer::alpha = alpha;
    Layer::beta = beta;
}
Ejemplo n.º 4
0
Net::Net(const std::vector<int>& topology) {
	
	// Create 'topology.size' layers
	size_t numLayers = topology.size();
	
	// For all news layers
	for (size_t layerNum = 0; layerNum < numLayers; ++layerNum) {
		
		// Add a new layer
		layers.push_back(Layer());
		
		// Get the size of the next layer, used to create neurons that have numOuputs Connections
		size_t numOutputs = layerNum == topology.size() - 1 ? 0 : topology[layerNum + 1];


		// We have a new layer, now fill it with neurons, and
		// add a bias neuron in each layer.
		for (size_t neuronNum = 0; neuronNum <= topology[layerNum]; ++neuronNum) {
			// Add Neuronof neuroNums size
			layers.back().push_back(Neuron(numOutputs, neuronNum));
		}

		// Force the bias node's output to 1.0 (it was the last neuron pushed in this layer):
		layers.back().back().setOutputVal(1.0);
	}
}
NeuronLayer::NeuronLayer(int numNeurons, int numLinksPerNeuron) : m_numNeurons(numNeurons)
{
	for(int i=0; i<numNeurons; ++i)
	{
		m_neurons.push_back(Neuron(numLinksPerNeuron));
	}
}
Ejemplo n.º 6
0
// Post Conditions
// - A vector of neurons has been created for each layer of the neural net
// - Each neuron is connected to all neurons in the following layer so its output is the next layer neurons input, except for the last layer which has no ouput
// - Each neuron in each layer knows how many outputs it has and its position within its own layer
// - Last neuron created for each layer is initialized to 1.0 since those are bias neurons
Net::Net(unsigned const topology[], unsigned const topology_length)
{
  unsigned total_layers = topology_length;
  accumulated_error_ = 0;

  for (unsigned current_layer = 0; current_layer < total_layers; ++current_layer)
  {
    // initialize a new vector of neurons for each layer
    layers_.emplace_back();

    unsigned current_layer_outputs;

    // if: the loop is on the final layer there are no outputs
    // else: current_layer_outputs is equal to the number of neurons in the next layer
    if (current_layer == topology_length - 1)
    {
      current_layer_outputs = 0;
    }
    else
    {
      current_layer_outputs = topology[current_layer + 1];
    }

    // for each neurons in the current layer of the loop, tell that neuron how many outputs it needs to have and that neurons position in the current layer
    for (unsigned current_neuron = 0; current_neuron <= topology[current_layer]; ++current_neuron)
    {
      layers_.back().push_back(Neuron(current_layer_outputs, current_neuron));
    }

    // force the bias neuron (last neuron created for each layer) to have an ouput initially set to 1.0
    layers_.back().back().Set_neuron_output_value_(1.0);
  }
}
Ejemplo n.º 7
0
/**
 * Constructeur classique (poids determine aleatoirement)
 * @param height Hauteur de la couche, ie nombre de neurones presents sur la couche
 * @param previousLayerHeight Hauteur de la couche precedente (utilisee pour definir le nombre d'entree des neurones de la couche)
 * @param alpha Coefficient d'apprentissage etape n
 * @param beta Coefficient d'apprentissage etape n-1
 */
Layer::Layer(unsigned int const height, unsigned int const previousLayerHeight, double const alpha, double const beta) {
    for (unsigned int i = 0; i < height; i++) {
        neurons.push_back(Neuron(previousLayerHeight, alpha, beta));
    }
    Layer::alpha = alpha;
    Layer::beta = beta;
}
Ejemplo n.º 8
0
NeuralLayer::NeuralLayer(int numNeuronsInCurrentLayer, int numInPreviousLayer)
{
    //std::cout <<"Creating neural layer!\n";
    for (int i=0; i < numNeuronsInCurrentLayer; i++)
    {
        this->neurons.push_back(Neuron(numInPreviousLayer));
    }
}
Ejemplo n.º 9
0
		neuronLayer(int neuronNum, int inputNum)
		{
			neurons = neuronNum;
			for(int i = 0; i<neuronNum; i++)
			{
				neuronsInLayer.push_back(Neuron(inputNum));
			}
		}
Ejemplo n.º 10
0
int NNetwork::addNode()
{
    // get new node id
    int nid = mNet.size();
    // push new node on back of vector
    mNet.push_back(Neuron(nid));
    return nid;
}
Ejemplo n.º 11
0
// Initialisation des neurones de la couche
void Layer::initNeurons() {	
	neurons.clear();
	
	for (unsigned int i = 0; i < neuronsNumber; i++)
		neurons.push_back(Neuron(inputsNumber));
	
	outputs.clear();
	outputs.resize(neuronsNumber);
}
Ejemplo n.º 12
0
int main2(){
    //layer1
    vector<Neuron> layer1;
    int layer1count = 3;
    //layer2
    vector<Neuron> layer2;
    int layer2count = 4;
    //input vector
    vector<int> input;
    int timesteps = 5;

    //connections
    vector<Connection> connections;

    //add neurons
    for(int i = 0;i<layer1count;i++){
        layer1.push_back(Neuron());
    }

    for(int i = 0 ;i<layer2count;i++){
        layer2.push_back(Neuron());
    }

    //add connections
    for(int x = 0;x<layer1count;x++){
        for(int y = 0;y<layer2count;y++){
            connections.push_back(Connection(layer1[x],layer2[y]));
            layer1[x].addAxonForwardConnection(layer2[y]);
        }
    }

    Connection& test = connections[0];
    layer1[0].axonForwardConnections[0]->outputLevel = 100;
    test.addSpike();
    for(int i =0;i<1000;i++){
        cout<<"TIME: "<<i<<":"<<endl;
        if(i%5==0) test.addSpike();
        test.step();
    }
    std::cout<<"END";

	return 1;
}
Ejemplo n.º 13
0
Archivo: ai.cpp Proyecto: sUtop/Kate
NeuralLayer::NeuralLayer(int dem_,     int nextdem_,   int layer_,Random *rnd, bool is_last_):
                         dem(dem_),nextdem(nextdem_),layer(layer_),is_last(is_last_){
#ifdef LOG
    std::cout<<"Layer "<<layer<<" dem ="<<dem<<" nextdem ="<<nextdem<<" is_last = "<<is_last<<" \n";
#endif
    if(is_last) nextdem = 0;
    for(int i=0;i<dem;++i){
        Layer.push_back(Neuron(nextdem,layer,i,rnd)); // Мы передаем размерность следующего слоя
    };
} // NeuralLayer::NeuralLayer
Ejemplo n.º 14
0
NeuronLayer::NeuronLayer( int p_numNeurons, int p_numInputsPerNeuron )
{
    numNeurons = p_numNeurons;
    
    for( int i = 0; i < p_numNeurons; i++ )
    {
        // place neurons in vecNeurons vector
        vecNeurons.push_back( Neuron( p_numInputsPerNeuron ) );
    }
}
void Net::setNetwork(std::vector<unsigned> &topology)
{
 //std::cout<<"Made a Network!"<<std::endl;
 numLayers = topology.size();
 for(unsigned int i=0;i<numLayers; ++i)
 {
  layers.push_back(Layer());
 // std::cout<<"Made Layer:"<<i+1<<std::endl;
  for(unsigned int j=0; j<=topology[i]; ++j)
  {
   if(i+1!= topology.size())
    layers.back().push_back(Neuron(topology[i+1], j));
   else
    layers.back().push_back(Neuron(0, j));
  }
 }
 layers.back().back().setOutputVal(0.1);
 //for(unsigned layerNum=0; layerNum<layers.size()-1 ;layerNum++)
 //layers[layerNum].back().setOutputVal(1.0);
}
Ejemplo n.º 16
0
NeuronLayer::NeuronLayer(int _numNeurons, int inputsPerNeuron){
	
	numNeurons = _numNeurons;
	
	neurons.reserve(numNeurons);
	
	for (int i = 0; i < numNeurons; i++) {
		
		neurons.push_back(Neuron(inputsPerNeuron));
	}
}
Ejemplo n.º 17
0
NNet::NNet(unsigned int nInputs, unsigned int nHidden, unsigned int hiddenNeurons, unsigned int nOutputs)
{
    //Initialise the input and layer
    DLOG(INFO) << "Creating input layer.";
    for (unsigned int i = 0; i < nInputs; i++)
    {
        //One input
        this->inputs.push_back(Neuron(1));
    }

    //Initialise the hidden layers
    DLOG(INFO) << "Creating hidden layer.";
    for (unsigned int i = 0; i < nHidden; i++)
    {
        DLOG(INFO) << "Creating hidden layer " << i << ".";
        vector<Neuron> layer;
        for (unsigned int j = 0; j < hiddenNeurons; j++)
        {
            if (i == 0)
            {
                //First hidden layer has only enough inputs to connect to the input layer
                layer.push_back(Neuron(nInputs));
            }
            else
            {
                //Same amount of inputs as previous outputs
                layer.push_back(Neuron(hiddenNeurons));
            }
        }
        this->hiddens.push_back(layer);
    }

    //Initialize the output layer
    DLOG(INFO) << "Creating output layer.";
    for (unsigned int i = 0; i < nOutputs; i++)
    {
        //Same amount of inputs as nets
        this->outputs.push_back(Neuron(hiddenNeurons));
    }
}
Ejemplo n.º 18
0
   SOM::SOM(unsigned int dataDim, const std::vector<unsigned int> &dims,
            const std::vector<Range<float> > &prototypeBounds, float epsilon, float sigma){
     // {{{ open
 
     ICLASSERT_THROW(dataDim>0,ICLException("SOM data dimension must be > 0"));
     ICLASSERT_THROW(dims.size()>0,ICLException("SOM grid dimension must be > 0"));
     m_uiDataDim = dataDim;
     m_uiSomDim = dims.size();
     m_vecDimensions = dims;
     m_vecPrototypeBounds = prototypeBounds;
     m_fEpsilon = epsilon;
     m_fSigma = sigma;
 
     // count neuron count
     unsigned int dim = dims[0];
     for(unsigned int i=1;i<m_uiSomDim;++i){
       dim *= dims[i];
     }
     ICLASSERT_THROW(dim > 0,ICLException("Product of SOM dimensions must be > 0"));
     
     m_vecNeurons.resize(dim);
 
     // calculate offsets for each dimension in the planar neurons array
     m_vecDimOffsets.resize(m_uiSomDim);
     m_vecDimOffsets[0] = 1;
 
     for(unsigned int i=1;i<m_uiSomDim;i++){
       m_vecDimOffsets[i] = i>1 ? m_vecDimOffsets[i-1]*dims[i-1] : dims[i-1];
     }
     
     // Create the neurons
     for(unsigned int i=0;i<dim;++i){
       float *gridpos = new float[m_uiSomDim];
       float *prototype = new float[m_uiDataDim];
       
       // calculate the corresponding grid location ( TODO check check check! )
       int iRest = i;
       for(int d=m_uiSomDim-1;d>=0;--d){
         gridpos[d] = iRest ? iRest/m_vecDimOffsets[d] : 0;
         iRest -= gridpos[d]*m_vecDimOffsets[d];
       }
       ICLASSERT_THROW(iRest == 0,ICLException("Somethings going wrong here! [code 1240/B.l]") );
       
       // create some randomly initialized prototypes (using the given ranges for each dimension)
       for(unsigned int d=0;d<m_uiDataDim;++d){
         prototype[d] = random((double)m_vecPrototypeBounds[d].minVal,(double)m_vecPrototypeBounds[d].maxVal);
       }
       
       // set up new neuron
       m_vecNeurons[i] = Neuron(gridpos,prototype,m_uiSomDim,m_uiDataDim);
     }
   }
Ejemplo n.º 19
0
void GenSOM::init(size_t nneurons, size_t nbands, bool randomize)
{
	// create vector contents
	neurons.assign(nneurons, Neuron(nbands));

	if (randomize) {
		// initialize randomly. Note that initialization range does not matter.
		cv::RNG rng(config.seed);

		for (size_t i = 0; i < neurons.size(); ++i) {
			neurons[i].randomize(rng, 0., 1.);
		}
	}
}
Ejemplo n.º 20
0
Net::Net(const std::vector<unsigned int> &_topology)
{
	uint numLayers = _topology.size();

	for (uint layerNum = 0; layerNum < numLayers; ++layerNum)
	{
		//creaitng a layer
		m_layers.push_back(Layer());

		for (uint neuronNum = 0; neuronNum <= _topology[layerNum]; ++neuronNum)
		{
			m_layers.back().push_back(Neuron());
			std::cout << "New Neuron created." << std::endl;
		}
	}
}
Ejemplo n.º 21
0
Neural::Net::Net(const std::vector<unsigned> &topology)
{
	free();

	for (unsigned layerNum = 0; layerNum < topology.size(); ++layerNum)
	{
		layers.push_back(Layer());
		// numOutputs of layer[i] is the numInputs of layer[i+1]
		// numOutputs of last layer is 0
		unsigned numOutputs = layerNum == topology.size() - 1 ? 0 : topology[layerNum + 1];

		// We have made a new Layer, now fill it with neurons, and add a bias neuron to the layer
		for (unsigned neuronNum = 0; neuronNum <= topology[layerNum]; ++neuronNum)
		{
			layers.back().push_back(Neuron(numOutputs, neuronNum));
		}

		// Force the bias node's output value to 1.0. It's the last neuron created above
		layers.back().back().setOutputValue(1.0);
	}
}
Ejemplo n.º 22
0
Net::Net(const std::vector<unsigned>& topology)
{
	std::cout << "Net creating... [" << topology.size() << "]" << std::endl;

	unsigned layersCount = topology.size();
	for (unsigned layerNr = 0; layerNr < layersCount; ++layerNr) {

		std::cout << "  Layer creating... [" << layerNr << "]" << std::endl;

		layers.push_back(Layer{});
		unsigned outputsCount = layerNr == topology.size() - 1 ? 0 : topology[layerNr + 1];


		for (unsigned neuronNr = 0; neuronNr <= topology[layerNr]; ++neuronNr) {
			layers.back().push_back(Neuron(outputsCount, neuronNr));
		}

		// Dla bies ustawiamy wartoϾ zwsze 1.0
		layers.back().back().setOutputValue(1.0);
	}

}
Ejemplo n.º 23
0
Net::Net(const std::vector<unsigned> topology){
    
    unsigned numLayers = topology.size();
    
    std::cout << "numLayer: " << numLayers << std::endl;
        
    for(unsigned layerNum = 0; layerNum < numLayers; ++layerNum)
    {
        m_layers.push_back(Layer());
        
        // the last layer have always 0 outputs
        int numOutputs = (layerNum == topology.size() - 1) ? 0 : topology[layerNum +1];
        
        std::cout << "numOutputs: " << numOutputs << std::endl;
           
        // new layer, fill it with neurons
        for(int neuronNum = 0; neuronNum <= topology[layerNum]; neuronNum++) {
            m_layers.back().push_back(Neuron(numOutputs, neuronNum));
            std::cout << "Made a Neuron: " << neuronNum << std::endl;
        }      
    }
    
    m_layers.back().back().setOutputVal(1.0); // for the bias
}
Ejemplo n.º 24
0
// constructor sets all constant variables(bias, gains, timesteps, weights) and creates all neuron objects.
CTRNN_h::CTRNN_h(vector<float> gains, vector<float> bias, vector<float> timesteps, vector<float> weights){
	// add sensor neurons with externalInput values.
	for (int i = 0; i < NOF_SENSOR_NEURONS; i++){
		sensorNeurons.push_back(Neuron(SENSOR));
	}

	// add hidden neurons and links with weights from sensor neurons to hidden neurons.
	int weightsIndex = 0;
	int timestepsIndex = 0;
	int biasIndex = 0;
	int gainsIndex = 0;

	vector<float>::iterator start = weights.begin();
	vector<float>::iterator end = weights.begin() + NOF_SENSOR_NEURONS;
	vector<float> tempVector;

	for (int i = 0; i < NOF_SENSOR_NEURONS; i++){
		tempVector.insert(tempVector.begin(), start, end);
		
		hiddenNeurons[i] = Neuron(OTHER, bias[biasIndex++], timesteps[timestepsIndex++], gains[gainsIndex++], sensorNeurons, tempVector);
		
		tempVector.clear();
		start = end;
		end += NOF_SENSOR_NEURONS;
		weightsIndex += NOF_SENSOR_NEURONS;
	}

	// add links + weights between hidden neurons.
	for (int i = 0; i < NOF_HIDDEN_NEURONS; i++) {
		// self link!
		hiddenNeurons[i].addInputNeuron(hiddenNeurons[i], weights[weightsIndex++]);
		for (int j = i + 1; j < NOF_HIDDEN_NEURONS; j++) {
			hiddenNeurons[i].addInputNeuron(hiddenNeurons[j], weights[weightsIndex++]);
			hiddenNeurons[j].addInputNeuron(hiddenNeurons[i], weights[weightsIndex++]);
		}
	}

	// add motor neurons with links w/weights from hidden neurons.
	start = weights.begin() + weightsIndex;
	end = start + NOF_HIDDEN_NEURONS;

	for (int i = 0; i < NOF_MOTOR_NEURONS; i++) {
		tempVector.insert(tempVector.begin(), start, end);
		
		motorNeurons[i] = Neuron(OTHER, bias[biasIndex++], timesteps[timestepsIndex++], gains[gainsIndex++], hiddenNeurons, tempVector);

		tempVector.clear();
		start = end;
		end +=NOF_HIDDEN_NEURONS;
		weightsIndex += NOF_HIDDEN_NEURONS;

	}

	// add links + weights between hidden neurons.
	for (int i = 0; i < NOF_MOTOR_NEURONS; i++) {
		// self link
		motorNeurons[i].addInputNeuron(motorNeurons[i], weights[weightsIndex++]);
		for (int j = i + 1; j < NOF_MOTOR_NEURONS; j++) {
			motorNeurons[i].addInputNeuron(motorNeurons[j], weights[weightsIndex++]);
			motorNeurons[j].addInputNeuron(motorNeurons[i], weights[weightsIndex++]);
		}
	}
}
Ejemplo n.º 25
0
int main(int argc, char *argv[])
{
	QCoreApplication a(argc, argv);

	Neuron perceptron = Neuron(26);

	QList<NeuronInputs> trainingInputs = QList<NeuronInputs>();
	QList<bool> trainingOutputs = QList<bool>();

	NeuronInputs t1 = NeuronInputs(26);
	double t1_t[26] = { 0, 0, 0, 0, 0,
						0, 1, 1, 0, 0,
						0, 0, 1, 0, 0,
						0, 0, 1, 0, 0,
						0, 0, 1, 0, 0, 1 };
	for (int i = 0; i < 26; i++) t1[i] = t1_t[i];
	trainingInputs.push_back(t1);
	trainingOutputs.push_back(true);

	NeuronInputs t2 = NeuronInputs(26);
	double t2_t[26] = { 0, 0, 1, 1, 0,
						0, 0, 0, 1, 0,
						0, 0, 0, 1, 0,
						0, 0, 0, 0, 0,
						0, 0, 0, 0, 0, 1 };
	for (int i = 0; i < 26; i++) t2[i] = t2_t[i];
	trainingInputs.push_back(t2);
	trainingOutputs.push_back(true);

	NeuronInputs t3 = NeuronInputs(26);
	double t3_t[26] = { 0, 0, 0, 0, 0,
						1, 1, 0, 0, 0,
						0, 1, 0, 0, 0,
						0, 1, 0, 0, 0,
						0, 1, 0, 0, 0, 1 };
	for (int i = 0; i < 26; i++) t3[i] = t3_t[i];
	trainingInputs.push_back(t3);
	trainingOutputs.push_back(true);

	NeuronInputs f1 = NeuronInputs(26);
	double f1_t[26] = { 0, 0, 0, 0, 0,
						0, 1, 1, 1, 0,
						0, 1, 0, 1, 0,
						0, 1, 1, 1, 0,
						0, 0, 0, 0, 0, 1 };
	for (int i = 0; i < 26; i++) f1[i] = f1_t[i];
	trainingInputs.push_back(f1);
	trainingOutputs.push_back(false);

	NeuronInputs f2 = NeuronInputs(26);
	double f2_t[26] = { 0, 0, 0, 0, 0,
						0, 0, 0, 0, 0,
						1, 1, 1, 0, 0,
						1, 0, 1, 0, 0,
						1, 1, 1, 0, 0, 1 };
	for (int i = 0; i < 26; i++) f2[i] = f2_t[i];
	trainingInputs.push_back(f2);
	trainingOutputs.push_back(false);

	int n0 = perceptron.learnErrorThreshold(PerceptronLearn(0.1), trainingInputs, trainingOutputs, 0.0);

	qDebug() << "PERFORMING TESTS";
	perceptron.setInputs(t1);
	qDebug() << perceptron.calculateOutput();
	perceptron.setInputs(t2);
	qDebug() << perceptron.calculateOutput();
	perceptron.setInputs(t3);
	qDebug() << perceptron.calculateOutput();
	perceptron.setInputs(f1);
	qDebug() << perceptron.calculateOutput();
	perceptron.setInputs(f2);
	qDebug() << perceptron.calculateOutput();

	NeuronWeights weightsDebug = perceptron.getWeights();
	qDebug() << "Threshold:";
	qDebug() << weightsDebug.first;
	qDebug() << "Weights:";
	for (double weight : weightsDebug.second)
	{
		qDebug() << QString::number(weight);
	}
	qDebug() << "N0";
	qDebug() << QString::number(n0);

	return a.exec();
}
Ejemplo n.º 26
0
	perceptron::perceptron(int num_hidden, float part,Test data_training, float rate, bool shift, bool norm){

		learn_rate = 0.5;

		num_att = data_training[0].first.size();
		num_class = data_training[0].second.size();

        int c = 0;
        float top = data_training.size()*part;
	    if(shift){
	        for(c=0; c < top; c++){
	            training_data.push_back(data_training[c]);
	        }
	        for(; c < data_training.size(); c++){
	            testing_data.push_back(data_training[c]);
	        }
	    }else{
	    	training_data = data_training;
			//input
	    }
	    cout << training_data.size();
		for (int i = 0; i < num_att; ++i)
		{
			input.push_back(Neuron());
		}
		//add a Bias
		Neuron in_bias;
		in_bias.in_value = 1;
		in_bias.is_bias = 1;
		input.push_back(in_bias);
			//hidden
		for (int i = 0; i < num_hidden; ++i)
		{
			z.push_back(Neuron());
		}
		//if there is not a hidden layer, dont add bias neuron
		

		threshold = 2;
        for(int i = 0; i < data_training[0].second.size();i++){
        	y.push_back(Neuron());
        }

		//create the links
		if(z.size()==0){//if it is a single perceptron
			//output
			for (std::vector<Neuron>::iterator i = input.begin(); i != input.end(); ++i)
			{
				for (std::vector<Neuron>::iterator j = y.begin(); j != y.end(); ++j)
				{
					Link l;
					l.from = &i[0];
					l.to = &j[0];
					l_y.push_back(l);
				}
			}
		}else{ //multicapa
			for (std::vector<Neuron>::iterator i = input.begin(); i != input.end(); ++i)
			{
				for (std::vector<Neuron>::iterator j = z.begin(); j != z.end(); ++j)
				{
					Link l;
					l.from = &i[0];
					l.to = &j[0];
					l_z.push_back(l);

				}
			}

			Neuron n;
			n.in_value = 1;
			n.out_value = 0;
			n.is_bias = 1;
			z.push_back(n);

			for (std::vector<Neuron>::iterator a = z.begin(); a != z.end(); ++a)
			{
				for (std::vector<Neuron>::iterator b = y.begin(); b != y.end(); ++b)
				{
					Link l;
					l.from = &a[0];
					l.to = &b[0];
					l_y.push_back(l);
				}
			}
		}


		//Normalización
		normalizar = norm;
		if(normalizar) {
			for(int indice = 0; indice < num_att; indice++) {
				promedio.push_back(0);
				for(Test::iterator it = testing_data.begin(); 	it != testing_data.end(); ++it) {
					promedio[indice] += it->first[indice];
				}
				promedio[indice] /= testing_data.size();
				float diffCuadradas = 0;
				for(Test::iterator it = testing_data.begin(); 	it != testing_data.end(); ++it) {
					diffCuadradas += (it->first[indice]-promedio[indice]) * (it->first[indice]-promedio[indice]);
				}
				diffCuadradas /= testing_data.size() - 1;
				desvioEstandar.push_back (sqrt(diffCuadradas) );
			}
		}
	}
Ejemplo n.º 27
0
Layer::Layer(int count,int inserts) {
    for (int i=0; i < count; ++i)
        neurons.push_back(Neuron(inserts));
}
Ejemplo n.º 28
0
NeuronLayer::NeuronLayer(int newNumNeurons, int inPerNeuron) : numNeurons(newNumNeurons){
  	for (int i=0;i<newNumNeurons;i++){
  		  vecNeurons.push_back(Neuron(inPerNeuron));
  	}
}