FANN_EXTERNAL struct fann *FANN_API fann_create_sparse_array(float connection_rate, unsigned int num_layers, const unsigned int *layers) { struct fann_layer *layer_it, *last_layer, *prev_layer; struct fann *ann; struct fann_neuron *neuron_it, *last_neuron, *random_neuron, *bias_neuron; #ifdef DEBUG unsigned int prev_layer_size; #endif unsigned int num_neurons_in, num_neurons_out, i, j; unsigned int min_connections, max_connections, num_connections; unsigned int connections_per_neuron, allocated_connections; unsigned int random_number, found_connection, tmp_con; #ifdef FIXEDFANN unsigned int decimal_point; unsigned int multiplier; #endif if(connection_rate > 1) { connection_rate = 1; } /* seed random */ #ifndef FANN_NO_SEED fann_seed_rand(); #endif /* allocate the general structure */ ann = fann_allocate_structure(num_layers); if(ann == NULL) { fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM); return NULL; } ann->connection_rate = connection_rate; #ifdef FIXEDFANN decimal_point = ann->decimal_point; multiplier = ann->multiplier; fann_update_stepwise(ann); #endif /* determine how many neurons there should be in each layer */ i = 0; for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++) { /* we do not allocate room here, but we make sure that * last_neuron - first_neuron is the number of neurons */ layer_it->first_neuron = NULL; layer_it->last_neuron = layer_it->first_neuron + layers[i++] + 1; /* +1 for bias */ ann->total_neurons += layer_it->last_neuron - layer_it->first_neuron; } ann->num_output = (ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron - 1; ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1; /* allocate room for the actual neurons */ fann_allocate_neurons(ann); if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM) { fann_destroy(ann); return NULL; } #ifdef DEBUG printf("creating network with connection rate %f\n", connection_rate); printf("input\n"); printf(" layer : %d neurons, 1 bias\n", ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1); #endif num_neurons_in = ann->num_input; for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++) { num_neurons_out = layer_it->last_neuron - layer_it->first_neuron - 1; /*�if all neurons in each layer should be connected to at least one neuron * in the previous layer, and one neuron in the next layer. * and the bias node should be connected to the all neurons in the next layer. * Then this is the minimum amount of neurons */ min_connections = fann_max(num_neurons_in, num_neurons_out) + num_neurons_out; max_connections = num_neurons_in * num_neurons_out; /* not calculating bias */ num_connections = fann_max(min_connections, (unsigned int) (0.5 + (connection_rate * max_connections)) + num_neurons_out); connections_per_neuron = num_connections / num_neurons_out; allocated_connections = 0; /* Now split out the connections on the different neurons */ for(i = 0; i != num_neurons_out; i++) { layer_it->first_neuron[i].first_con = ann->total_connections + allocated_connections; allocated_connections += connections_per_neuron; layer_it->first_neuron[i].last_con = ann->total_connections + allocated_connections; layer_it->first_neuron[i].activation_function = FANN_SIGMOID_STEPWISE; #ifdef FIXEDFANN layer_it->first_neuron[i].activation_steepness = ann->multiplier / 2; #else layer_it->first_neuron[i].activation_steepness = 0.5; #endif if(allocated_connections < (num_connections * (i + 1)) / num_neurons_out) { layer_it->first_neuron[i].last_con++; allocated_connections++; } } /* bias neuron also gets stuff */ layer_it->first_neuron[i].first_con = ann->total_connections + allocated_connections; layer_it->first_neuron[i].last_con = ann->total_connections + allocated_connections; ann->total_connections += num_connections; /* used in the next run of the loop */ num_neurons_in = num_neurons_out; } fann_allocate_connections(ann); if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM) { fann_destroy(ann); return NULL; } if(connection_rate >= 1) { #ifdef DEBUG prev_layer_size = ann->num_input + 1; #endif prev_layer = ann->first_layer; last_layer = ann->last_layer; for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++) { last_neuron = layer_it->last_neuron - 1; for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++) { tmp_con = neuron_it->last_con - 1; for(i = neuron_it->first_con; i != tmp_con; i++) { ann->weights[i] = (fann_type) fann_random_weight(); /* these connections are still initialized for fully connected networks, to allow * operations to work, that are not optimized for fully connected networks. */ ann->connections[i] = prev_layer->first_neuron + (i - neuron_it->first_con); } /* bias weight */ ann->weights[tmp_con] = (fann_type) fann_random_bias_weight(); ann->connections[tmp_con] = prev_layer->first_neuron + (tmp_con - neuron_it->first_con); } #ifdef DEBUG prev_layer_size = layer_it->last_neuron - layer_it->first_neuron; #endif prev_layer = layer_it; #ifdef DEBUG printf(" layer : %d neurons, 1 bias\n", prev_layer_size - 1); #endif } } else { /* make connections for a network, that are not fully connected */ /* generally, what we do is first to connect all the input * neurons to a output neuron, respecting the number of * available input neurons for each output neuron. Then * we go through all the output neurons, and connect the * rest of the connections to input neurons, that they are * not allready connected to. */ /* All the connections are cleared by calloc, because we want to * be able to see which connections are allready connected */ for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++) { num_neurons_out = layer_it->last_neuron - layer_it->first_neuron - 1; num_neurons_in = (layer_it - 1)->last_neuron - (layer_it - 1)->first_neuron - 1; /* first connect the bias neuron */ bias_neuron = (layer_it - 1)->last_neuron - 1; last_neuron = layer_it->last_neuron - 1; for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++) { ann->connections[neuron_it->first_con] = bias_neuron; ann->weights[neuron_it->first_con] = (fann_type) fann_random_bias_weight(); } /* then connect all neurons in the input layer */ last_neuron = (layer_it - 1)->last_neuron - 1; for(neuron_it = (layer_it - 1)->first_neuron; neuron_it != last_neuron; neuron_it++) { /* random neuron in the output layer that has space * for more connections */ do { random_number = (int) (0.5 + fann_rand(0, num_neurons_out - 1)); random_neuron = layer_it->first_neuron + random_number; /* checks the last space in the connections array for room */ } while(ann->connections[random_neuron->last_con - 1]); /* find an empty space in the connection array and connect */ for(i = random_neuron->first_con; i < random_neuron->last_con; i++) { if(ann->connections[i] == NULL) { ann->connections[i] = neuron_it; ann->weights[i] = (fann_type) fann_random_weight(); break; } } } /* then connect the rest of the unconnected neurons */ last_neuron = layer_it->last_neuron - 1; for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++) { /* find empty space in the connection array and connect */ for(i = neuron_it->first_con; i < neuron_it->last_con; i++) { /* continue if allready connected */ if(ann->connections[i] != NULL) continue; do { found_connection = 0; random_number = (int) (0.5 + fann_rand(0, num_neurons_in - 1)); random_neuron = (layer_it - 1)->first_neuron + random_number; /* check to see if this connection is allready there */ for(j = neuron_it->first_con; j < i; j++) { if(random_neuron == ann->connections[j]) { found_connection = 1; break; } } } while(found_connection); /* we have found a neuron that is not allready * connected to us, connect it */ ann->connections[i] = random_neuron; ann->weights[i] = (fann_type) fann_random_weight(); } } #ifdef DEBUG printf(" layer : %d neurons, 1 bias\n", num_neurons_out); #endif } /* TODO it would be nice to have the randomly created * connections sorted for smoother memory access. */ } #ifdef DEBUG printf("output\n"); #endif return ann; }
FANN_EXTERNAL struct fann *FANN_API fann_create_standard_array(unsigned int num_layers, const unsigned int *layers) { struct fann_layer *layer_it, *last_layer, *prev_layer; struct fann *ann; struct fann_neuron *neuron_it, *last_neuron; #ifdef DEBUG unsigned int prev_layer_size; #endif unsigned int num_neurons_in, num_neurons_out, i; unsigned int min_connections, max_connections, num_connections; unsigned int connections_per_neuron, allocated_connections; unsigned int tmp_con; /* seed random */ #ifndef FANN_NO_SEED fann_seed_rand(); #endif /* allocate the general structure */ ann = fann_allocate_structure(num_layers); if(ann == NULL) { fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM); return NULL; } /* determine how many neurons there should be in each layer */ i = 0; for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++) { /* we do not allocate room here, but we make sure that * last_neuron - first_neuron is the number of neurons */ layer_it->first_neuron = NULL; layer_it->last_neuron = layer_it->first_neuron + layers[i++] + 1; /* +1 for bias */ ann->total_neurons += (unsigned int)(layer_it->last_neuron - layer_it->first_neuron); } ann->num_output = (unsigned int)((ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron - 1); ann->num_input = (unsigned int)(ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1); /* allocate room for the actual neurons */ fann_allocate_neurons(ann); if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM) { fann_destroy(ann); return NULL; } #ifdef DEBUG printf("creating network with connection rate %f\n", connection_rate); printf("input\n"); printf(" layer : %d neurons, 1 bias\n", ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1); #endif num_neurons_in = ann->num_input; for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++) { layer_it->activation_function = FANN_SIGMOID_SYMMETRIC; layer_it->activation_steepness = 0.5; num_neurons_out = (unsigned int)(layer_it->last_neuron - layer_it->first_neuron - 1); /*�if all neurons in each layer should be connected to at least one neuron * in the previous layer, and one neuron in the next layer. * and the bias node should be connected to the all neurons in the next layer. * Then this is the minimum amount of neurons */ min_connections = fann_max(num_neurons_in, num_neurons_out) + num_neurons_out; max_connections = num_neurons_in * num_neurons_out; /* not calculating bias */ num_connections = fann_max(min_connections, max_connections + num_neurons_out); connections_per_neuron = num_connections / num_neurons_out; allocated_connections = 0; /* Now split out the connections on the different neurons */ for(i = 0; i != num_neurons_out; i++) { layer_it->first_neuron[i].first_con = ann->total_connections + allocated_connections; allocated_connections += connections_per_neuron; layer_it->first_neuron[i].last_con = ann->total_connections + allocated_connections; if(allocated_connections < (num_connections * (i + 1)) / num_neurons_out) { layer_it->first_neuron[i].last_con++; allocated_connections++; } } /* bias neuron also gets stuff */ layer_it->first_neuron[i].first_con = ann->total_connections + allocated_connections; layer_it->first_neuron[i].last_con = ann->total_connections + allocated_connections; ann->total_connections += num_connections; /* used in the next run of the loop */ num_neurons_in = num_neurons_out; } fann_allocate_connections(ann); if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM) { fann_destroy(ann); return NULL; } #ifdef DEBUG prev_layer_size = ann->num_input + 1; #endif prev_layer = ann->first_layer; last_layer = ann->last_layer; for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++) { last_neuron = layer_it->last_neuron - 1; for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++) { tmp_con = neuron_it->last_con - 1; for(i = neuron_it->first_con; i != tmp_con; i++) { ann->weights[i] = (fann_type) fann_random_weight(); /* these connections are still initialized for fully connected networks, to allow * operations to work, that are not optimized for fully connected networks. */ ann->connections[i] = prev_layer->first_neuron + (i - neuron_it->first_con); } /* bias weight */ ann->weights[tmp_con] = (fann_type) fann_random_bias_weight(); ann->connections[tmp_con] = prev_layer->first_neuron + (tmp_con - neuron_it->first_con); } #ifdef DEBUG prev_layer_size = layer_it->last_neuron - layer_it->first_neuron; #endif prev_layer = layer_it; #ifdef DEBUG printf(" layer : %d neurons, 1 bias\n", prev_layer_size - 1); #endif } #ifdef DEBUG printf("output\n"); #endif return ann; }
FANN_EXTERNAL struct fann *FANN_API fann_create_shortcut_array(unsigned int num_layers, const unsigned int *layers) { struct fann_layer *layer_it, *layer_it2, *last_layer; struct fann *ann; struct fann_neuron *neuron_it, *neuron_it2 = 0; unsigned int i; unsigned int num_neurons_in, num_neurons_out; #ifdef FIXEDFANN unsigned int decimal_point; unsigned int multiplier; #endif /* seed random */ #ifndef FANN_NO_SEED fann_seed_rand(); #endif /* allocate the general structure */ ann = fann_allocate_structure(num_layers); if(ann == NULL) { fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM); return NULL; } ann->connection_rate = 1; ann->network_type = FANN_NETTYPE_SHORTCUT; #ifdef FIXEDFANN decimal_point = ann->decimal_point; multiplier = ann->multiplier; fann_update_stepwise(ann); #endif /* determine how many neurons there should be in each layer */ i = 0; for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++) { /* we do not allocate room here, but we make sure that * last_neuron - first_neuron is the number of neurons */ layer_it->first_neuron = NULL; layer_it->last_neuron = layer_it->first_neuron + layers[i++]; if(layer_it == ann->first_layer) { /* there is a bias neuron in the first layer */ layer_it->last_neuron++; } ann->total_neurons += layer_it->last_neuron - layer_it->first_neuron; } ann->num_output = (ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron; ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1; /* allocate room for the actual neurons */ fann_allocate_neurons(ann); if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM) { fann_destroy(ann); return NULL; } #ifdef DEBUG printf("creating fully shortcut connected network.\n"); printf("input\n"); printf(" layer : %d neurons, 1 bias\n", ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1); #endif num_neurons_in = ann->num_input; last_layer = ann->last_layer; for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++) { num_neurons_out = layer_it->last_neuron - layer_it->first_neuron; /* Now split out the connections on the different neurons */ for(i = 0; i != num_neurons_out; i++) { layer_it->first_neuron[i].first_con = ann->total_connections; ann->total_connections += num_neurons_in + 1; layer_it->first_neuron[i].last_con = ann->total_connections; layer_it->first_neuron[i].activation_function = FANN_SIGMOID_STEPWISE; #ifdef FIXEDFANN layer_it->first_neuron[i].activation_steepness = ann->multiplier / 2; #else layer_it->first_neuron[i].activation_steepness = 0.5; #endif } #ifdef DEBUG printf(" layer : %d neurons, 0 bias\n", num_neurons_out); #endif /* used in the next run of the loop */ num_neurons_in += num_neurons_out; } fann_allocate_connections(ann); if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM) { fann_destroy(ann); return NULL; } /* Connections are created from all neurons to all neurons in later layers */ num_neurons_in = ann->num_input + 1; for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++) { for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++) { i = neuron_it->first_con; for(layer_it2 = ann->first_layer; layer_it2 != layer_it; layer_it2++) { for(neuron_it2 = layer_it2->first_neuron; neuron_it2 != layer_it2->last_neuron; neuron_it2++) { ann->weights[i] = (fann_type) fann_random_weight(); ann->connections[i] = neuron_it2; i++; } } } num_neurons_in += layer_it->last_neuron - layer_it->first_neuron; } #ifdef DEBUG printf("output\n"); #endif return ann; }