Exemplo n.º 1
0
void MultilayerPerceptron::initMLP(int ninputs, int noutputs, const vector<int> &hiddenLayerSizes, const TransferFunctionType &tf)
{
	tres = new MLPTrainingResult();
	nInputs = ninputs;
	setAlfa(1);
	setLayerSizes(hiddenLayerSizes);
	setInputSize(ninputs);
	setOutputSize(noutputs);
	setTransferFunctionType(tf);
	//	setOutputType(Continuous);
	randomizeWeights();
	connect(this, SIGNAL(finished()), SLOT(finished()));
}
Exemplo n.º 2
0
OneLayerPerceptron::OneLayerPerceptron(const float pEta, const int pInputPerceptrons, const int pHiddenPerceptrons, const int pOutputPerceptrons)
    : m_eta{pEta}, m_hidPerceptrons{pHiddenPerceptrons}, m_inpPerceptrons{pInputPerceptrons},
    m_outPerceptrons{pOutputPerceptrons}, m_hidWeights{new float*[m_inpPerceptrons+1]},
	m_outWeights{new float*[m_hidPerceptrons+1]}
{
    //+1 due to constant coefficient, i.e. bias
    for (int i=0;i<m_inpPerceptrons+1;i++)
    {
        m_hidWeights[i] = new float[m_hidPerceptrons];
	}

    for (int i=0;i<m_hidPerceptrons+1;i++)
    {
        m_outWeights[i] = new float[m_outPerceptrons];
	}
    randomizeWeights();
}