NeuralNet *nn_fann_Fann2NeuralNet(struct fann *ANN ) { unsigned int ANN_NLayers = fann_get_num_layers(ANN), *BiasArray = (unsigned int* ) pgm_malloc (sizeof(unsigned int)*ANN_NLayers), *ANN_NNeurons = (unsigned int* ) pgm_malloc (sizeof(unsigned int)*ANN_NLayers); double *ANN_Weights; NeuralNet *NN; fann_get_bias_array(ANN,BiasArray); fann_get_layer_array(ANN,ANN_NNeurons); ANN_Weights = nn_fann_parse_get_weights(ANN,ANN_NLayers,ANN_NNeurons); NN = nn_NeuralNetCreate( ANN_NLayers, // Na FANN, cada Neuronio tem uma função de ativação. A neuralnet usa somente a função de ativação do primeiro neuronio, // porem a primeira camada da FANN nao tem função de ativação, logo pegamos da camada seguinte (ANN->first_layer+1)->first_neuron->activation_function, ANN->BiperbolicLambda,ANN->BiperbolicT1,ANN->BiperbolicT2, ANN_NNeurons, ANN->input_min,ANN->input_max,ANN->output_min,ANN->output_max, // Na FANN, cada Neuronio tem um steepness de ativação. A neuralnet usa somente o steepness de ativação do primeiro neuronio, // porem a primeira camada da FANN nao tem stepness, logo pegamos da camada seguinte (ANN->first_layer+1)->first_neuron->activation_steepness, // A FANN tem um bias para cada camada da rede, na NeuralNet eh usado somente o bias da primeira camada BiasArray[0], ANN_Weights ); NN->MSE = fann_get_MSE(ANN); return NN; }
/*** * @method rspamd_fann:get_layers() * Returns array of neurons count for each layer * @return {table/number} table with number ofr neurons in each layer */ static gint lua_fann_get_layers (lua_State *L) { #ifndef WITH_FANN return 0; #else struct fann *f = rspamd_lua_check_fann (L, 1); guint nlayers, i, *layers; if (f != NULL) { nlayers = fann_get_num_layers (f); layers = g_new (guint, nlayers); fann_get_layer_array (f, layers); lua_createtable (L, nlayers, 0); for (i = 0; i < nlayers; i ++) { lua_pushnumber (L, layers[i]); lua_rawseti (L, -2, i + 1); } g_free (layers); } else { lua_pushnil (L); } return 1; #endif }
bool ViFann::loadFromFile(const QString &path) { QFile file(path); if(!file.exists()) return false; clear(); mNetwork = fann_create_from_file(path.toLatin1().data()); if(mNetwork == NULL) return false; fann_nettype_enum type = fann_get_network_type(mNetwork); //TODO: get type int layerCount = fann_get_num_layers(mNetwork); unsigned int layers[layerCount]; fann_get_layer_array(mNetwork, layers); for(int i = 0; i < layerCount; ++i) { mNeurons.append(layers[i]); } mInputCount = mNeurons.first(); mOutputCount = mNeurons.last(); if(mInput == NULL) delete [] mInput; if(mOutput == NULL) delete [] mOutput; mInput = new float[mInputCount]; mOutput = new float[mOutputCount]; mConnectionRate = fann_get_connection_rate(mNetwork); //TODO: get weights return true; }
void fann_save_matrices(struct fann *network, char *fname){ unsigned int layers; unsigned int layer[100]; unsigned int bias[100]; unsigned int total_weights; unsigned int neuron_inputs; unsigned int writes_counter; DATA_TYPE weight; struct fann_connection *connections; FILE *array; char array_name[255]; int i, j; writes_counter = 0; layers = fann_get_num_layers(network); fann_get_layer_array(network, layer); fann_get_bias_array(network, bias); total_weights = fann_get_total_connections(network); printf("Total weights: %i\n", total_weights); connections = (struct fann_connection *) malloc(total_weights * sizeof(struct fann_connection)); fann_get_connection_array(network, connections); for(i = 1; i < layers; i++){ sprintf(array_name, "%s_W%i.net", fname, i); array = fopen(array_name, "wb"); for (j = 0; j < layer[i]*(layer[i-1] + 1); j++){ weight = connections[writes_counter].weight; #ifdef DEBUG mexPrintf("Number:\t%i\n", writes_counter); mexPrintf("Weight:\t%e\n", connections[writes_counter].weight); mexPrintf("From:\t%i\n", connections[writes_counter].from_neuron); mexPrintf("To:\t%i\n", connections[writes_counter].to_neuron); mexEvalString("drawnow;"); #endif fwrite(&weight, sizeof(DATA_TYPE) , 1, array); writes_counter++; } fclose(array); } return; }