Ejemplo n.º 1
0
    void read( const FileNode& fn )
    {
        clear();

        vector<int> _layer_sizes;
        readVectorOrMat(fn["layer_sizes"], _layer_sizes);
        setLayerSizes( _layer_sizes );

        int i, l_count = layer_count();
        read_params(fn);

        size_t esz = weights[0].elemSize();

        FileNode w = fn["input_scale"];
        w.readRaw("d", weights[0].ptr(), weights[0].total()*esz);

        w = fn["output_scale"];
        w.readRaw("d", weights[l_count].ptr(), weights[l_count].total()*esz);

        w = fn["inv_output_scale"];
        w.readRaw("d", weights[l_count+1].ptr(), weights[l_count+1].total()*esz);

        FileNodeIterator w_it = fn["weights"].begin();

        for( i = 1; i < l_count; i++, ++w_it )
            (*w_it).readRaw("d", weights[i].ptr(), weights[i].total()*esz);
        trained = true;
    }
Ejemplo n.º 2
0
 ANN_MLPImpl()
 {
     clear();
     setActivationFunction( SIGMOID_SYM, 0, 0 );
     setLayerSizes(Mat());
     setTrainMethod(ANN_MLP::RPROP, 0.1, FLT_EPSILON);
 }
Ejemplo n.º 3
0
void MultilayerPerceptron::initMLP(int ninputs, int noutputs, const vector<int> &hiddenLayerSizes, const TransferFunctionType &tf)
{
	tres = new MLPTrainingResult();
	nInputs = ninputs;
	setAlfa(1);
	setLayerSizes(hiddenLayerSizes);
	setInputSize(ninputs);
	setOutputSize(noutputs);
	setTransferFunctionType(tf);
	//	setOutputType(Continuous);
	randomizeWeights();
	connect(this, SIGNAL(finished()), SLOT(finished()));
}