示例#1
0
//TO FINISH
NeuralNetwork::NeuralNetwork(const NeuralNetwork& father, const NeuralNetwork& mother, bool addRandomChange)
{
    //checks if sizes correspond
    if(father.getInputNum() != mother.getInputNum()
            || father.getLayers().size() != mother.getLayers().size())
    {
            cerr << "error, mother and father NN of different sizes" << endl;
            cerr << "error in numbers of layers" << endl;
            cerr << father.getLayers().size() << " " << mother.getLayers().size() << endl;
    }
    for(unsigned int i=0; i<father.getLayers().size(); i++)
    {
        if (father.getLayers()[i].getNeurons().size() != mother.getLayers()[i].getNeurons().size())
        {
            cerr << "error, mother and father NN of different sizes" << endl;
            cerr << "error in layers " << i << endl;
            cerr << father.getLayers()[i].getNeurons().size() << " " << mother.getLayers()[i].getNeurons().size() << endl;
        }
    }
    m_inputNum = father.getInputNum();
    m_outputNum = father.getOutputNum();
    m_hiddenLayerNum = father.getHiddenLayerNum();

    vector<Neuron> childNeurons;
    static mt19937 generator(random_device{}());
    bernoulli_distribution distribution(0.5);

    for(unsigned i = 0; i < father.m_layers.size(); i++)
    {
        const auto& fatherLayer = father.m_layers[i];
        const auto& motherLayer = mother.m_layers[i];
        for(int j = 0; j < fatherLayer.getNeuronNum(); j++)
        {
            const auto& fatherNeurons = fatherLayer.getNeurons();
            const auto& motherNeurons = motherLayer.getNeurons();

            // 1/2 chance to take the mother neuron
            if(distribution(generator))
            {
                childNeurons.push_back(motherNeurons[j]);
            }
            else
            {
                childNeurons.push_back(fatherNeurons[j]);
            }
        }
        m_layers.push_back(childNeurons);
        childNeurons.clear();
    }
    if(addRandomChange)
    {
        randomiseWeight();
    }
}
示例#2
0
int SaveManager::SaveNetwork(const NeuralNetwork& nn, QXmlStreamWriter & writer)
{
    writer.writeStartElement("NeuralNetwork");

    // recuperation de quelqes informations sur le réseau de neurones
    const std::vector<NeuronLayer> layers = nn.getLayers();
    const unsigned int workingLayersNum=layers.size(); //nombre de couches cachées (en plus de l'input et de l'output)
    const unsigned int inputNum = nn.getInputNum(); // nombre d'inputs

    writer.writeStartElement("NeuronLayer");
    writer.writeAttribute("id",QString::number(0)); // 0 means "input layer"
    writer.writeTextElement("inputsNum", QString::number(inputNum));
    writer.writeEndElement();

    //for each layer
    for (unsigned int layer=1; layer<=workingLayersNum; layer++)
    {
        const std::vector<Neuron> neurons = layers.at(layer-1).getNeurons();
        unsigned int neuronsNum = neurons.size();
        writer.writeStartElement("NeuronLayer");
        writer.writeAttribute("id", QString::number(layer));

        //for each neuron
        for (unsigned int neuron=0; neuron<neuronsNum; neuron++)
        {
            writer.writeStartElement("Neuron");
            writer.writeAttribute("id",QString::number(neuron));
            const std::vector<double> weights = neurons.at(neuron).getWeights();
            if(weights.size()!=0)
            {
                QString tempS = QString::number(weights.at(0)) + " ";
                //for each weight
                for(unsigned int w=1; w<weights.size(); w++)
                {
                    tempS +=QString::number(weights.at(w)) + " " ;
                }
                writer.writeTextElement("weights", tempS);
            }
            writer.writeEndElement();
        }
        writer.writeEndElement();
    }
    writer.writeEndElement();//NeuraleNetwork
    return 0;
}