示例#1
0
文件: vifann.cpp 项目: EQ4/Visore
bool ViFann::setActivation(const QList<Activation> &activations)
{
	if(mNetwork == NULL) return false;
	if(fann_get_num_layers(mNetwork) - 1 != activations.size()) return false; // -1 because of the input layer
	int layers = fann_get_num_layers(mNetwork);
	for(mI = 1; mI < layers; ++mI)
	{
		if(!setActivation(activations[mI - 1], mI)) return false;
	}
	return true;
}
示例#2
0
/***
 * @method rspamd_fann:get_layers()
 * Returns array of neurons count for each layer
 * @return {table/number} table with number ofr neurons in each layer
 */
static gint
lua_fann_get_layers (lua_State *L)
{
#ifndef WITH_FANN
	return 0;
#else
	struct fann *f = rspamd_lua_check_fann (L, 1);
	guint nlayers, i, *layers;

	if (f != NULL) {
		nlayers = fann_get_num_layers (f);
		layers = g_new (guint, nlayers);
		fann_get_layer_array (f, layers);
		lua_createtable (L, nlayers, 0);

		for (i = 0; i < nlayers; i ++) {
			lua_pushnumber (L, layers[i]);
			lua_rawseti (L, -2, i + 1);
		}

		g_free (layers);
	}
	else {
		lua_pushnil (L);
	}

	return 1;
#endif
}
示例#3
0
NeuralNet *nn_fann_Fann2NeuralNet(struct fann *ANN ) {
    unsigned int ANN_NLayers = fann_get_num_layers(ANN),
                 *BiasArray = (unsigned int* ) pgm_malloc (sizeof(unsigned int)*ANN_NLayers),
                  *ANN_NNeurons = (unsigned int* ) pgm_malloc (sizeof(unsigned int)*ANN_NLayers);
    double *ANN_Weights;
    NeuralNet *NN;

    fann_get_bias_array(ANN,BiasArray);

    fann_get_layer_array(ANN,ANN_NNeurons);

    ANN_Weights = nn_fann_parse_get_weights(ANN,ANN_NLayers,ANN_NNeurons);
    NN = nn_NeuralNetCreate(
             ANN_NLayers,
             // Na FANN, cada Neuronio tem uma função de ativação. A neuralnet usa somente a função de ativação do primeiro neuronio,
             // porem a primeira camada da FANN nao tem função de ativação, logo pegamos da camada seguinte
             (ANN->first_layer+1)->first_neuron->activation_function,
             ANN->BiperbolicLambda,ANN->BiperbolicT1,ANN->BiperbolicT2,
             ANN_NNeurons,
             ANN->input_min,ANN->input_max,ANN->output_min,ANN->output_max,
             // Na FANN, cada Neuronio tem um steepness de ativação. A neuralnet usa somente o steepness de ativação do primeiro neuronio,
             // porem a primeira camada da FANN nao tem stepness, logo pegamos da camada seguinte
             (ANN->first_layer+1)->first_neuron->activation_steepness,
             // A FANN tem um bias para cada camada da rede, na NeuralNet eh usado somente o bias da primeira camada
             BiasArray[0],
             ANN_Weights
         );
    NN->MSE = fann_get_MSE(ANN);

    return NN;
}
示例#4
0
文件: vifann.cpp 项目: EQ4/Visore
bool ViFann::loadFromFile(const QString &path)
{
	QFile file(path);
	if(!file.exists()) return false;

	clear();
	mNetwork = fann_create_from_file(path.toLatin1().data());
	if(mNetwork == NULL) return false;

	fann_nettype_enum type = fann_get_network_type(mNetwork);
	//TODO: get type

	int layerCount = fann_get_num_layers(mNetwork);
	unsigned int layers[layerCount];
	fann_get_layer_array(mNetwork, layers);
	for(int i = 0; i < layerCount; ++i)
	{
		mNeurons.append(layers[i]);
	}
	mInputCount = mNeurons.first();
	mOutputCount = mNeurons.last();

	if(mInput == NULL) delete [] mInput;
	if(mOutput == NULL) delete [] mOutput;
	mInput = new float[mInputCount];
	mOutput = new float[mOutputCount];

	mConnectionRate = fann_get_connection_rate(mNetwork);

	//TODO: get weights

	return true;
}
示例#5
0
文件: vifann.cpp 项目: EQ4/Visore
bool ViFann::setActivation(const Activation &activation)
{
	if(mNetwork == NULL) return false;
	int layers = fann_get_num_layers(mNetwork);
	for(mI = 1; mI < layers; ++mI)
	{
		if(!setActivation(activation, mI)) return false;
	}
	return true;
}
示例#6
0
void fann_save_matrices(struct fann *network, char *fname){
	unsigned int layers;
	unsigned int layer[100];
	unsigned int bias[100];
	unsigned int total_weights;
	unsigned int neuron_inputs;
	unsigned int writes_counter;
	DATA_TYPE weight;
	struct 	fann_connection *connections;
	FILE *array;
	char array_name[255];
	int i, j;
	writes_counter = 0;
	layers = fann_get_num_layers(network);
	fann_get_layer_array(network, layer);
	fann_get_bias_array(network, bias);
	total_weights = fann_get_total_connections(network);
	printf("Total weights: %i\n", total_weights);
	connections = (struct 	fann_connection *)
				malloc(total_weights * sizeof(struct 	fann_connection));
	fann_get_connection_array(network, connections);
	for(i = 1; i < layers; i++){
		sprintf(array_name, "%s_W%i.net", fname, i);
		array = fopen(array_name, "wb");
		for (j = 0; j < layer[i]*(layer[i-1] + 1); j++){
			weight = connections[writes_counter].weight;
#ifdef DEBUG
			mexPrintf("Number:\t%i\n", writes_counter);
			mexPrintf("Weight:\t%e\n", connections[writes_counter].weight);
			mexPrintf("From:\t%i\n", connections[writes_counter].from_neuron);
			mexPrintf("To:\t%i\n", connections[writes_counter].to_neuron);
			mexEvalString("drawnow;");
#endif
			fwrite(&weight, sizeof(DATA_TYPE) , 1, array);
			writes_counter++;
		}
		fclose(array);
	}
	return;
}
示例#7
0
void rebuild_functions(int neur)
{
    int sygm_functions[]={FANN_SIGMOID_SYMMETRIC_STEPWISE,FANN_SIGMOID_SYMMETRIC};
    int sym_functions[]={FANN_LINEAR,
                         FANN_GAUSSIAN_SYMMETRIC,FANN_COS_SYMMETRIC,FANN_SIN_SYMMETRIC,
                         FANN_LINEAR_PIECE_SYMMETRIC,FANN_ELLIOT_SYMMETRIC,
                         FANN_SIGMOID_SYMMETRIC_STEPWISE,FANN_SIGMOID_SYMMETRIC
                        };

    int functions[]={FANN_ELLIOT,FANN_LINEAR,FANN_GAUSSIAN,FANN_COS,FANN_SIN,
                     FANN_SIGMOID_STEPWISE,FANN_LINEAR_PIECE,FANN_SIGMOID,FANN_GAUSSIAN_STEPWISE,

                     FANN_LINEAR,
                     FANN_GAUSSIAN_SYMMETRIC,FANN_COS_SYMMETRIC,FANN_SIN_SYMMETRIC,
                     FANN_LINEAR_PIECE_SYMMETRIC,FANN_ELLIOT_SYMMETRIC,
                     FANN_SIGMOID_SYMMETRIC_STEPWISE
                    };

    int mid_functions[]={FANN_SIGMOID_STEPWISE,FANN_ELLIOT,FANN_LINEAR_PIECE,
                         FANN_GAUSSIAN_STEPWISE,FANN_GAUSSIAN,FANN_COS,FANN_SIN,FANN_SIGMOID
                        };

    int in_functions[]={FANN_SIGMOID_SYMMETRIC,FANN_SIGMOID_SYMMETRIC_STEPWISE,FANN_GAUSSIAN_SYMMETRIC};
    int out_functions[]={FANN_GAUSSIAN_SYMMETRIC,FANN_SIGMOID_SYMMETRIC,FANN_SIGMOID_SYMMETRIC_STEPWISE};

    int l=1,a=0;



    numn=fann_get_num_layers(ann);
    // printf("\r\n[ act funcs: ");
    for (l=1;l<2;l++)
    {
        int sta;

        if (l==1)
            sta=neur;
        else
            sta=2;
        for (a=0;a<sta;a++)
        {
            int nfunc;
            if (l==1)
                nfunc=mid_functions[rand()%((sizeof(mid_functions)/sizeof(int)))];
            else if (l==numn-1)
                nfunc=mid_functions[rand()%((sizeof(mid_functions)/sizeof(int)))];
            else
                nfunc=mid_functions[rand()%((sizeof(mid_functions)/sizeof(int)))];
            //	printf("mid_functions %d",nfunc);
            //   if (nfunc==1||nfunc==2)
            //     nfunc=FANN_LINEAR_PIECE_SYMMETRIC;

            double stp;

            stp=rand()  % 100;

            //if(l==1)
            //	nfunc=FANN_SIGMOID_STEPWISE;
            stp=0.1+(stp*0.01);
            if (l==numn-1||l==1)
                stp=1.0f;
            // else
            //  stp=1.0f;
            fann_set_activation_steepness_layer(ann, 	stp, l);

            char chars[]={'q','w','e','r','t','y','u','i','o','z','x','c','v','b','n','a','s','d','f','g','h'};

            printf("%c",chars[nfunc]);
            //	printf("\r\nset %d %d",l,a);
            fann_set_activation_function(ann,nfunc,l,a);
            //   printf("\r\n #%-02d %s <%-4.02f55l, FANN_ACTIVATIONFUNC_NAMES[	fann_get_activation_function(ann,l,0)],
            //       fann_get_activation_steepness(ann,l,0));

        }
    }
    // printf("]\r\n");








}