Пример #1
0
 ANN_MLPImpl()
 {
     clear();
     setActivationFunction( SIGMOID_SYM, 0, 0 );
     setLayerSizes(Mat());
     setTrainMethod(ANN_MLP::RPROP, 0.1, FLT_EPSILON);
 }
Пример #2
0
bool ViNeuron::importData(ViElement element)
{
	if(element.name() != "neuron")
	{
		return false;
	}

	bool success = true;

	ViElement id = element.child("id");
	if(id.isNull())
	{
		success = false;
	}
	else
	{
		setId(id.toString());
	}

	ViElement theType = element.child("type");
	if(theType.isNull())
	{
		success = false;
	}
	else
	{
		setType(stringToType(theType.toString()));
	}

	if(type() == ViNeuron::HiddenNeuron || type() == ViNeuron::OutputNeuron)
	{
		if(id.isNull())
		{
			success = false;
			setActivationFunction(NULL);
		}
		else
		{
			setActivationFunction(ViActivationFunctionManager::create(element));
		}
		if(mActivationFunction == NULL)
		{
			setActivationFunction(ViActivationFunctionManager::createDefault());
			LOG("Unable to create the required activation function from the import data. The default activation function (" + mActivationFunction->name() + ") will be used.", QtCriticalMsg);
		}
		else
		{
			if(!mActivationFunction->importData(element))
			{
				success = false;
			}
		}
	}
	else if(type() == ViNeuron::BiasNeuron)
	{
		ViElement value = element.child("value");
		if(value.isNull())
		{
			success = false;
			setActivationFunction(NULL);
		}
		else
		{
			setValue(value.toReal());
		}
	}

	return success;
}
Пример #3
0
    void read_params( const FileNode& fn )
    {
        String activ_func_name = (String)fn["activation_function"];
        if( !activ_func_name.empty() )
        {
            activ_func = activ_func_name == "SIGMOID_SYM" ? SIGMOID_SYM :
                         activ_func_name == "IDENTITY" ? IDENTITY :
                         activ_func_name == "GAUSSIAN" ? GAUSSIAN : -1;
            CV_Assert( activ_func >= 0 );
        }
        else
            activ_func = (int)fn["activation_function_id"];

        f_param1 = (double)fn["f_param1"];
        f_param2 = (double)fn["f_param2"];

        setActivationFunction( activ_func, f_param1, f_param2 );

        min_val = (double)fn["min_val"];
        max_val = (double)fn["max_val"];
        min_val1 = (double)fn["min_val1"];
        max_val1 = (double)fn["max_val1"];

        FileNode tpn = fn["training_params"];
        params = AnnParams();

        if( !tpn.empty() )
        {
            String tmethod_name = (String)tpn["train_method"];

            if( tmethod_name == "BACKPROP" )
            {
                params.trainMethod = ANN_MLP::BACKPROP;
                params.bpDWScale = (double)tpn["dw_scale"];
                params.bpMomentScale = (double)tpn["moment_scale"];
            }
            else if( tmethod_name == "RPROP" )
            {
                params.trainMethod = ANN_MLP::RPROP;
                params.rpDW0 = (double)tpn["dw0"];
                params.rpDWPlus = (double)tpn["dw_plus"];
                params.rpDWMinus = (double)tpn["dw_minus"];
                params.rpDWMin = (double)tpn["dw_min"];
                params.rpDWMax = (double)tpn["dw_max"];
            }
            else
                CV_Error(CV_StsParseError, "Unknown training method (should be BACKPROP or RPROP)");

            FileNode tcn = tpn["term_criteria"];
            if( !tcn.empty() )
            {
                FileNode tcn_e = tcn["epsilon"];
                FileNode tcn_i = tcn["iterations"];
                params.termCrit.type = 0;
                if( !tcn_e.empty() )
                {
                    params.termCrit.type |= TermCriteria::EPS;
                    params.termCrit.epsilon = (double)tcn_e;
                }
                if( !tcn_i.empty() )
                {
                    params.termCrit.type |= TermCriteria::COUNT;
                    params.termCrit.maxCount = (int)tcn_i;
                }
            }
        }
    }
Пример #4
0
//----------------------------------------------------------------------------------------------------------------------  
void CTRNN::setInputNeuron(int i)
{
  setUpdateFunction(i, kUF_Input);
  setActivationFunction(i, kAF_Identity);
  setBias(i, 0.0);
}