/* Allocate memory when neuron is created */ FANN_EXTERNAL int FANN_API fann_neuron_constructor_fully_recurrent(struct fann *ann, struct fann_layer *layer, struct fann_neuron *neuron, struct fann_neuron_descr * descr) { unsigned int i; #ifdef FIXEDFANN fann_type multiplier = ann->fixed_params->multiplier; neuron->activation_steepness = ann->fixed_params->multiplier / 2; #else neuron->activation_steepness = 0.5; #endif /* Each neuron outputs only to one portion of the output array (if any)*/ neuron->num_outputs = 1; neuron->activation_function = FANN_SIGMOID_STEPWISE; neuron->inputs = layer->inputs; neuron->num_inputs = layer->num_inputs + layer->num_neurons; neuron->destructor = fann_neuron_destructor_fully_recurrent; neuron->run = fann_neuron_run_fully_recurrent; neuron->compute_error = fann_fully_recurrent_neuron_compute_MSE; neuron->train_initialize = fann_neuron_train_initialize_fully_recurrent; /* set the error array to null (lazy allocation) */ neuron->train_errors = NULL; /* allocate the weights -- connected to all neurons and all inputs */ neuron->num_weights = neuron->num_outputs * (layer->num_inputs + layer->num_neurons); if ( (neuron->weights = (fann_type*) malloc(neuron->num_weights * sizeof(fann_type))) == NULL) { return 1; } /* randomly initialize the weights */ for (i=0; i<neuron->num_weights; i++) { neuron->weights[i] = (fann_type) fann_random_weight(); } /* allocate space for the dot product results */ if ( (neuron->sums = (fann_type*) malloc(neuron->num_outputs*sizeof(fann_type))) == NULL) { return 1; } return 0; }
/* Allocates room inside the neuron for the connections. * Creates a fully connected neuron */ FANN_EXTERNAL int FANN_API fann_sparse_neuron_constructor(struct fann *ann, struct fann_layer *layer, struct fann_neuron *neuron, struct fann_neuron_descr * descr) { unsigned int i, j; unsigned int min_connections, max_connections, num_connections; unsigned int connections_per_output; float connection_rate = * ((float* )descr->private_data); struct fann_sparse_neuron_private_data* private_data; struct fann_neuron_private_data_connected_any_any* generic_private_data; fann_type *mask, *weights; struct dice *dices; #ifdef FIXEDFANN fann_type multiplier = ann->fixed_params->multiplier; neuron->activation_steepness = ann->fixed_params->multiplier / 2; #else neuron->activation_steepness = 0.5; #endif connection_rate = connection_rate > 1.0f ? 1.0f : connection_rate; neuron->activation_function = FANN_SIGMOID_STEPWISE; neuron->num_outputs=descr->num_outputs; neuron->inputs=layer->inputs; neuron->num_inputs=layer->num_inputs; /* set the error array to null (lazy allocation) */ neuron->train_errors=NULL; /* this is the number of actually allocated weights (some are unused) */ neuron->num_weights=neuron->num_outputs*neuron->num_inputs; /* allocate the weights even for unused connections */ if ( (weights = neuron->weights = (fann_type*) calloc(neuron->num_weights, sizeof(fann_type))) == NULL) return 1; /* allocate space for the dot products results */ if ( (neuron->sums = (fann_type*) malloc(neuron->num_outputs*sizeof(fann_type))) == NULL) return 1; /* allocate private data */ if ( (private_data = neuron->private_data = (struct fann_sparse_neuron_private_data*) malloc(sizeof(struct fann_sparse_neuron_private_data))) == NULL) return 1; /* private data stores the connection mask, allocate it */ if ( (mask = private_data->mask = (fann_type*) calloc(neuron->num_weights, sizeof(fann_type))) == NULL) return 1; if ( (generic_private_data = private_data->generic = (struct fann_neuron_private_data_connected_any_any*) malloc (sizeof(struct fann_neuron_private_data_connected_any_any))) == NULL) return 1; generic_private_data->prev_steps=NULL; generic_private_data->prev_weights_deltas=NULL; /* alocate a set of dices to select rows */ if ( (dices = (struct dice*) malloc(neuron->num_inputs*sizeof(struct dice))) == NULL) return 1; for (i=0; i<neuron->num_inputs; i++) { dices[i].idx=i; dices[i].value=0; } min_connections = fann_max(neuron->num_inputs, neuron->num_outputs); max_connections = neuron->num_inputs * neuron->num_outputs; num_connections = fann_max(min_connections, (unsigned int) (0.5 + (connection_rate * max_connections))); connections_per_output = num_connections / neuron->num_outputs; /* Dice throw simulation: a float value is assigned to each input. * The value decimal component is chosen randomly between 0 and 0.4 ("dice throw"). * The integer components is equal to the number of output neurons already * connected to this input. * For each output neuron ecah input gets a new "dice throw". Then the inputs and are * sorted in ascending order according to the value. * The first ones in the array had less output neurons attached to the * and better luck in "dice thow". This ones are selected and theyr value is incremented. */ for (i=0; i<neuron->num_outputs; i++) { /* throw one dice per input */ for (j=0; j<neuron->num_inputs; j++) dices[j].value= ((int)dices[j].value) + fann_rand(0, 0.4); /* sort: smaller (dice value + num_connections) wins) */ qsort((void*) dices, neuron->num_inputs, sizeof(struct dice), dice_sorter); /* assign connections to the output to the winner inputs */ for (j=0; j<connections_per_output; j++) { dices[j].value+=1; mask[dices[j].idx] = (fann_type) 1.0f; weights[dices[j].idx] = (fann_type) fann_random_weight(); } weights += neuron->num_inputs; } free(dices); /* set the function pointers */ neuron->destructor = fann_sparse_neuron_destructor; neuron->run = fann_sparse_neuron_run; neuron->backpropagate = fann_sparse_neuron_backprop; neuron->update_weights = fann_sparse_neuron_update; neuron->compute_error = fann_sparse_neuron_compute_MSE; return 0; }
FANN_EXTERNAL struct fann *FANN_API fann_create_sparse_array(float connection_rate, unsigned int num_layers, const unsigned int *layers) { struct fann_layer *layer_it, *last_layer, *prev_layer; struct fann *ann; struct fann_neuron *neuron_it, *last_neuron, *random_neuron, *bias_neuron; #ifdef DEBUG unsigned int prev_layer_size; #endif unsigned int num_neurons_in, num_neurons_out, i, j; unsigned int min_connections, max_connections, num_connections; unsigned int connections_per_neuron, allocated_connections; unsigned int random_number, found_connection, tmp_con; #ifdef FIXEDFANN unsigned int decimal_point; unsigned int multiplier; #endif if(connection_rate > 1) { connection_rate = 1; } /* seed random */ #ifndef FANN_NO_SEED fann_seed_rand(); #endif /* allocate the general structure */ ann = fann_allocate_structure(num_layers); if(ann == NULL) { fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM); return NULL; } ann->connection_rate = connection_rate; #ifdef FIXEDFANN decimal_point = ann->decimal_point; multiplier = ann->multiplier; fann_update_stepwise(ann); #endif /* determine how many neurons there should be in each layer */ i = 0; for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++) { /* we do not allocate room here, but we make sure that * last_neuron - first_neuron is the number of neurons */ layer_it->first_neuron = NULL; layer_it->last_neuron = layer_it->first_neuron + layers[i++] + 1; /* +1 for bias */ ann->total_neurons += layer_it->last_neuron - layer_it->first_neuron; } ann->num_output = (ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron - 1; ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1; /* allocate room for the actual neurons */ fann_allocate_neurons(ann); if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM) { fann_destroy(ann); return NULL; } #ifdef DEBUG printf("creating network with connection rate %f\n", connection_rate); printf("input\n"); printf(" layer : %d neurons, 1 bias\n", ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1); #endif num_neurons_in = ann->num_input; for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++) { num_neurons_out = layer_it->last_neuron - layer_it->first_neuron - 1; /*�if all neurons in each layer should be connected to at least one neuron * in the previous layer, and one neuron in the next layer. * and the bias node should be connected to the all neurons in the next layer. * Then this is the minimum amount of neurons */ min_connections = fann_max(num_neurons_in, num_neurons_out) + num_neurons_out; max_connections = num_neurons_in * num_neurons_out; /* not calculating bias */ num_connections = fann_max(min_connections, (unsigned int) (0.5 + (connection_rate * max_connections)) + num_neurons_out); connections_per_neuron = num_connections / num_neurons_out; allocated_connections = 0; /* Now split out the connections on the different neurons */ for(i = 0; i != num_neurons_out; i++) { layer_it->first_neuron[i].first_con = ann->total_connections + allocated_connections; allocated_connections += connections_per_neuron; layer_it->first_neuron[i].last_con = ann->total_connections + allocated_connections; layer_it->first_neuron[i].activation_function = FANN_SIGMOID_STEPWISE; #ifdef FIXEDFANN layer_it->first_neuron[i].activation_steepness = ann->multiplier / 2; #else layer_it->first_neuron[i].activation_steepness = 0.5; #endif if(allocated_connections < (num_connections * (i + 1)) / num_neurons_out) { layer_it->first_neuron[i].last_con++; allocated_connections++; } } /* bias neuron also gets stuff */ layer_it->first_neuron[i].first_con = ann->total_connections + allocated_connections; layer_it->first_neuron[i].last_con = ann->total_connections + allocated_connections; ann->total_connections += num_connections; /* used in the next run of the loop */ num_neurons_in = num_neurons_out; } fann_allocate_connections(ann); if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM) { fann_destroy(ann); return NULL; } if(connection_rate >= 1) { #ifdef DEBUG prev_layer_size = ann->num_input + 1; #endif prev_layer = ann->first_layer; last_layer = ann->last_layer; for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++) { last_neuron = layer_it->last_neuron - 1; for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++) { tmp_con = neuron_it->last_con - 1; for(i = neuron_it->first_con; i != tmp_con; i++) { ann->weights[i] = (fann_type) fann_random_weight(); /* these connections are still initialized for fully connected networks, to allow * operations to work, that are not optimized for fully connected networks. */ ann->connections[i] = prev_layer->first_neuron + (i - neuron_it->first_con); } /* bias weight */ ann->weights[tmp_con] = (fann_type) fann_random_bias_weight(); ann->connections[tmp_con] = prev_layer->first_neuron + (tmp_con - neuron_it->first_con); } #ifdef DEBUG prev_layer_size = layer_it->last_neuron - layer_it->first_neuron; #endif prev_layer = layer_it; #ifdef DEBUG printf(" layer : %d neurons, 1 bias\n", prev_layer_size - 1); #endif } } else { /* make connections for a network, that are not fully connected */ /* generally, what we do is first to connect all the input * neurons to a output neuron, respecting the number of * available input neurons for each output neuron. Then * we go through all the output neurons, and connect the * rest of the connections to input neurons, that they are * not allready connected to. */ /* All the connections are cleared by calloc, because we want to * be able to see which connections are allready connected */ for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++) { num_neurons_out = layer_it->last_neuron - layer_it->first_neuron - 1; num_neurons_in = (layer_it - 1)->last_neuron - (layer_it - 1)->first_neuron - 1; /* first connect the bias neuron */ bias_neuron = (layer_it - 1)->last_neuron - 1; last_neuron = layer_it->last_neuron - 1; for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++) { ann->connections[neuron_it->first_con] = bias_neuron; ann->weights[neuron_it->first_con] = (fann_type) fann_random_bias_weight(); } /* then connect all neurons in the input layer */ last_neuron = (layer_it - 1)->last_neuron - 1; for(neuron_it = (layer_it - 1)->first_neuron; neuron_it != last_neuron; neuron_it++) { /* random neuron in the output layer that has space * for more connections */ do { random_number = (int) (0.5 + fann_rand(0, num_neurons_out - 1)); random_neuron = layer_it->first_neuron + random_number; /* checks the last space in the connections array for room */ } while(ann->connections[random_neuron->last_con - 1]); /* find an empty space in the connection array and connect */ for(i = random_neuron->first_con; i < random_neuron->last_con; i++) { if(ann->connections[i] == NULL) { ann->connections[i] = neuron_it; ann->weights[i] = (fann_type) fann_random_weight(); break; } } } /* then connect the rest of the unconnected neurons */ last_neuron = layer_it->last_neuron - 1; for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++) { /* find empty space in the connection array and connect */ for(i = neuron_it->first_con; i < neuron_it->last_con; i++) { /* continue if allready connected */ if(ann->connections[i] != NULL) continue; do { found_connection = 0; random_number = (int) (0.5 + fann_rand(0, num_neurons_in - 1)); random_neuron = (layer_it - 1)->first_neuron + random_number; /* check to see if this connection is allready there */ for(j = neuron_it->first_con; j < i; j++) { if(random_neuron == ann->connections[j]) { found_connection = 1; break; } } } while(found_connection); /* we have found a neuron that is not allready * connected to us, connect it */ ann->connections[i] = random_neuron; ann->weights[i] = (fann_type) fann_random_weight(); } } #ifdef DEBUG printf(" layer : %d neurons, 1 bias\n", num_neurons_out); #endif } /* TODO it would be nice to have the randomly created * connections sorted for smoother memory access. */ } #ifdef DEBUG printf("output\n"); #endif return ann; }
FANN_EXTERNAL struct fann *FANN_API fann_create_standard_array(unsigned int num_layers, const unsigned int *layers) { struct fann_layer *layer_it, *last_layer, *prev_layer; struct fann *ann; struct fann_neuron *neuron_it, *last_neuron; #ifdef DEBUG unsigned int prev_layer_size; #endif unsigned int num_neurons_in, num_neurons_out, i; unsigned int min_connections, max_connections, num_connections; unsigned int connections_per_neuron, allocated_connections; unsigned int tmp_con; /* seed random */ #ifndef FANN_NO_SEED fann_seed_rand(); #endif /* allocate the general structure */ ann = fann_allocate_structure(num_layers); if(ann == NULL) { fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM); return NULL; } /* determine how many neurons there should be in each layer */ i = 0; for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++) { /* we do not allocate room here, but we make sure that * last_neuron - first_neuron is the number of neurons */ layer_it->first_neuron = NULL; layer_it->last_neuron = layer_it->first_neuron + layers[i++] + 1; /* +1 for bias */ ann->total_neurons += (unsigned int)(layer_it->last_neuron - layer_it->first_neuron); } ann->num_output = (unsigned int)((ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron - 1); ann->num_input = (unsigned int)(ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1); /* allocate room for the actual neurons */ fann_allocate_neurons(ann); if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM) { fann_destroy(ann); return NULL; } #ifdef DEBUG printf("creating network with connection rate %f\n", connection_rate); printf("input\n"); printf(" layer : %d neurons, 1 bias\n", ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1); #endif num_neurons_in = ann->num_input; for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++) { layer_it->activation_function = FANN_SIGMOID_SYMMETRIC; layer_it->activation_steepness = 0.5; num_neurons_out = (unsigned int)(layer_it->last_neuron - layer_it->first_neuron - 1); /*�if all neurons in each layer should be connected to at least one neuron * in the previous layer, and one neuron in the next layer. * and the bias node should be connected to the all neurons in the next layer. * Then this is the minimum amount of neurons */ min_connections = fann_max(num_neurons_in, num_neurons_out) + num_neurons_out; max_connections = num_neurons_in * num_neurons_out; /* not calculating bias */ num_connections = fann_max(min_connections, max_connections + num_neurons_out); connections_per_neuron = num_connections / num_neurons_out; allocated_connections = 0; /* Now split out the connections on the different neurons */ for(i = 0; i != num_neurons_out; i++) { layer_it->first_neuron[i].first_con = ann->total_connections + allocated_connections; allocated_connections += connections_per_neuron; layer_it->first_neuron[i].last_con = ann->total_connections + allocated_connections; if(allocated_connections < (num_connections * (i + 1)) / num_neurons_out) { layer_it->first_neuron[i].last_con++; allocated_connections++; } } /* bias neuron also gets stuff */ layer_it->first_neuron[i].first_con = ann->total_connections + allocated_connections; layer_it->first_neuron[i].last_con = ann->total_connections + allocated_connections; ann->total_connections += num_connections; /* used in the next run of the loop */ num_neurons_in = num_neurons_out; } fann_allocate_connections(ann); if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM) { fann_destroy(ann); return NULL; } #ifdef DEBUG prev_layer_size = ann->num_input + 1; #endif prev_layer = ann->first_layer; last_layer = ann->last_layer; for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++) { last_neuron = layer_it->last_neuron - 1; for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++) { tmp_con = neuron_it->last_con - 1; for(i = neuron_it->first_con; i != tmp_con; i++) { ann->weights[i] = (fann_type) fann_random_weight(); /* these connections are still initialized for fully connected networks, to allow * operations to work, that are not optimized for fully connected networks. */ ann->connections[i] = prev_layer->first_neuron + (i - neuron_it->first_con); } /* bias weight */ ann->weights[tmp_con] = (fann_type) fann_random_bias_weight(); ann->connections[tmp_con] = prev_layer->first_neuron + (tmp_con - neuron_it->first_con); } #ifdef DEBUG prev_layer_size = layer_it->last_neuron - layer_it->first_neuron; #endif prev_layer = layer_it; #ifdef DEBUG printf(" layer : %d neurons, 1 bias\n", prev_layer_size - 1); #endif } #ifdef DEBUG printf("output\n"); #endif return ann; }
FANN_EXTERNAL struct fann *FANN_API fann_create_shortcut_array(unsigned int num_layers, const unsigned int *layers) { struct fann_layer *layer_it, *layer_it2, *last_layer; struct fann *ann; struct fann_neuron *neuron_it, *neuron_it2 = 0; unsigned int i; unsigned int num_neurons_in, num_neurons_out; #ifdef FIXEDFANN unsigned int decimal_point; unsigned int multiplier; #endif /* seed random */ #ifndef FANN_NO_SEED fann_seed_rand(); #endif /* allocate the general structure */ ann = fann_allocate_structure(num_layers); if(ann == NULL) { fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM); return NULL; } ann->connection_rate = 1; ann->network_type = FANN_NETTYPE_SHORTCUT; #ifdef FIXEDFANN decimal_point = ann->decimal_point; multiplier = ann->multiplier; fann_update_stepwise(ann); #endif /* determine how many neurons there should be in each layer */ i = 0; for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++) { /* we do not allocate room here, but we make sure that * last_neuron - first_neuron is the number of neurons */ layer_it->first_neuron = NULL; layer_it->last_neuron = layer_it->first_neuron + layers[i++]; if(layer_it == ann->first_layer) { /* there is a bias neuron in the first layer */ layer_it->last_neuron++; } ann->total_neurons += layer_it->last_neuron - layer_it->first_neuron; } ann->num_output = (ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron; ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1; /* allocate room for the actual neurons */ fann_allocate_neurons(ann); if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM) { fann_destroy(ann); return NULL; } #ifdef DEBUG printf("creating fully shortcut connected network.\n"); printf("input\n"); printf(" layer : %d neurons, 1 bias\n", ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1); #endif num_neurons_in = ann->num_input; last_layer = ann->last_layer; for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++) { num_neurons_out = layer_it->last_neuron - layer_it->first_neuron; /* Now split out the connections on the different neurons */ for(i = 0; i != num_neurons_out; i++) { layer_it->first_neuron[i].first_con = ann->total_connections; ann->total_connections += num_neurons_in + 1; layer_it->first_neuron[i].last_con = ann->total_connections; layer_it->first_neuron[i].activation_function = FANN_SIGMOID_STEPWISE; #ifdef FIXEDFANN layer_it->first_neuron[i].activation_steepness = ann->multiplier / 2; #else layer_it->first_neuron[i].activation_steepness = 0.5; #endif } #ifdef DEBUG printf(" layer : %d neurons, 0 bias\n", num_neurons_out); #endif /* used in the next run of the loop */ num_neurons_in += num_neurons_out; } fann_allocate_connections(ann); if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM) { fann_destroy(ann); return NULL; } /* Connections are created from all neurons to all neurons in later layers */ num_neurons_in = ann->num_input + 1; for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++) { for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++) { i = neuron_it->first_con; for(layer_it2 = ann->first_layer; layer_it2 != layer_it; layer_it2++) { for(neuron_it2 = layer_it2->first_neuron; neuron_it2 != layer_it2->last_neuron; neuron_it2++) { ann->weights[i] = (fann_type) fann_random_weight(); ann->connections[i] = neuron_it2; i++; } } } num_neurons_in += layer_it->last_neuron - layer_it->first_neuron; } #ifdef DEBUG printf("output\n"); #endif return ann; }