static void test_backprop_neuron_init() { bp_neuron n; int no_of_inputs=10; unsigned int random_seed = 123; printf("test_backprop_neuron_init..."); bp_neuron_init(&n, no_of_inputs, &random_seed); bp_neuron_free(&n); printf("Ok\n"); }
static void test_backprop_neuron_save_load() { bp_neuron n1, n2; int no_of_inputs=10; unsigned int random_seed = 123; char filename[256]; FILE * fp; printf("test_backprop_neuron_save_load..."); /* create neurons */ bp_neuron_init(&n1, no_of_inputs, &random_seed); bp_neuron_init(&n2, no_of_inputs, &random_seed); sprintf(filename,"%stemp_deep.dat",DEEPLEARN_TEMP_DIRECTORY); /* save the first neuron */ fp = fopen(filename,"wb"); assert(fp!=0); bp_neuron_save(fp, &n1); fclose(fp); /* load into the second neuron */ fp = fopen(filename,"rb"); assert(fp!=0); bp_neuron_load(fp, &n2); fclose(fp); /* compare the two */ assert(bp_neuron_compare(&n1, &n2)==1); /* free memory */ bp_neuron_free(&n1); bp_neuron_free(&n2); printf("Ok\n"); }
static void test_backprop_neuron_copy() { bp_neuron n1, n2; int retval, no_of_inputs=10; unsigned int random_seed = 123; printf("test_backprop_neuron_copy..."); bp_neuron_init(&n1, no_of_inputs, &random_seed); bp_neuron_init(&n2, no_of_inputs, &random_seed); bp_neuron_copy(&n1, &n2); retval = bp_neuron_compare(&n1, &n2); if (retval != 1) { printf("\nretval %d\n", retval); } assert(retval == 1); bp_neuron_free(&n1); bp_neuron_free(&n2); printf("Ok\n"); }
/** * @brief Initialise a backprop neural net * @param net Backprop neural net object * @param no_of_inputs The number of input units * @param no_of_hiddens The number of units in each hidden layer * @param hidden_layers The number of hidden layers * @param no_of_inputs The number of output units * @param random_seed The random number generator seed */ void bp_init(bp * net, int no_of_inputs, int no_of_hiddens, int hidden_layers, int no_of_outputs, unsigned int * random_seed) { int i, j, l; bp_neuron * n; net->learningRate = 0.2f; net->noise = 0.0f; net->random_seed = *random_seed; net->BPerror = DEEPLEARN_UNKNOWN_ERROR; net->BPerrorAverage = DEEPLEARN_UNKNOWN_ERROR; net->BPerrorTotal = DEEPLEARN_UNKNOWN_ERROR; net->itterations = 0; net->DropoutPercent = 20; net->NoOfInputs = no_of_inputs; net->inputs = (bp_neuron**)malloc(no_of_inputs*sizeof(bp_neuron*)); net->NoOfHiddens = no_of_hiddens; net->HiddenLayers = hidden_layers; net->hiddens = (bp_neuron***)malloc(hidden_layers*sizeof(bp_neuron**)); for (l = 0; l < hidden_layers; l++) { net->hiddens[l] = (bp_neuron**)malloc(no_of_hiddens*sizeof(bp_neuron*)); } net->NoOfOutputs = no_of_outputs; net->outputs = (bp_neuron**)malloc(no_of_outputs*sizeof(bp_neuron*)); /* create inputs */ for (i = 0; i < net->NoOfInputs; i++) { net->inputs[i] = (bp_neuron*)malloc(sizeof(struct bp_n)); bp_neuron_init(net->inputs[i], 1, random_seed); } /* create hiddens */ for (l = 0; l < hidden_layers; l++) { for (i = 0; i < net->NoOfHiddens; i++) { net->hiddens[l][i] = (bp_neuron*)malloc(sizeof(bp_neuron)); n = net->hiddens[l][i]; if (l == 0) { bp_neuron_init(n, no_of_inputs, random_seed); /* connect to input layer */ for (j = 0; j < net->NoOfInputs; j++) { bp_neuron_add_connection(n, j, net->inputs[j]); } } else { bp_neuron_init(n, no_of_hiddens, random_seed); /* connect to previous hidden layer */ for (j = 0; j < net->NoOfHiddens; j++) { bp_neuron_add_connection(n, j, net->hiddens[l-1][j]); } } } } /* create outputs */ for (i = 0; i < net->NoOfOutputs; i++) { net->outputs[i] = (bp_neuron*)malloc(sizeof(bp_neuron)); n = net->outputs[i]; bp_neuron_init(n, no_of_hiddens, random_seed); for (j = 0; j < net->NoOfHiddens; j++) { bp_neuron_add_connection(n, j, net->hiddens[hidden_layers-1][j]); } } }