Esempio n. 1
0
/* INTERNAL FUNCTION
   Allocate room for the connections.
 */
void fann_allocate_connections(struct fann *ann)
{
	ann->weights = (fann_type *) fann_calloc(ann->total_connections, sizeof(fann_type));
	if(ann->weights == NULL)
	{
		fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
		return;
	}

	/* TODO make special cases for all places where the connections
	 * is used, so that it is not needed for fully connected networks.
	 */
	ann->connections =
		(struct fann_neuron **) fann_calloc(ann->total_connections,
									   sizeof(struct fann_neuron *));
	if(ann->connections == NULL)
	{
		fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
		return;
	}

	if(ann->prev_weights_deltas == NULL)
	{
		ann->prev_weights_deltas =
			(fann_type *) fann_calloc(ann->total_connections, sizeof(fann_type));
		if(ann->prev_weights_deltas == NULL)
		{
			fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
			return;
		}		
	}
}
/* INTERNAL FUNCTION
   Allocates room for the neurons.
 */
void fann_allocate_neurons(struct fann *ann)
{
	struct fann_layer *layer_it;
	struct fann_neuron *neurons;
	unsigned int num_neurons_so_far = 0;
	unsigned int num_neurons = 0;

	/* all the neurons is allocated in one long array (calloc clears mem) */
	neurons = (struct fann_neuron *) calloc(ann->total_neurons, sizeof(struct fann_neuron));
	ann->total_neurons_allocated = ann->total_neurons;

	if(neurons == NULL)
	{
		fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
		return;
	}

	for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
	{
		num_neurons = layer_it->last_neuron - layer_it->first_neuron;
		layer_it->first_neuron = neurons + num_neurons_so_far;
		layer_it->last_neuron = layer_it->first_neuron + num_neurons;
		num_neurons_so_far += num_neurons;
	}

	ann->output = (fann_type *) calloc(num_neurons, sizeof(fann_type));
	if(ann->output == NULL)
	{
		fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
		return;
	}
}
Esempio n. 3
0
/*
 * Creates an empty set of training data
 */
FANN_EXTERNAL struct fann_train_data * FANN_API fann_create_train(unsigned int num_data, unsigned int num_input, unsigned int num_output)
{
	fann_type *data_input, *data_output;
	unsigned int i;
	struct fann_train_data *data =
		(struct fann_train_data *) malloc(sizeof(struct fann_train_data));

	if(data == NULL)
	{
		fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
		return NULL;
	}
	
	fann_init_error_data((struct fann_error *) data);

	data->num_data = num_data;
	data->num_input = num_input;
	data->num_output = num_output;
	data->input = (fann_type **) calloc(num_data, sizeof(fann_type *));
	if(data->input == NULL)
	{
		fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
		fann_destroy_train(data);
		return NULL;
	}

	data->output = (fann_type **) calloc(num_data, sizeof(fann_type *));
	if(data->output == NULL)
	{
		fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
		fann_destroy_train(data);
		return NULL;
	}

	data_input = (fann_type *) calloc(num_input * num_data, sizeof(fann_type));
	if(data_input == NULL)
	{
		fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
		fann_destroy_train(data);
		return NULL;
	}

	data_output = (fann_type *) calloc(num_output * num_data, sizeof(fann_type));
	if(data_output == NULL)
	{
		fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
		fann_destroy_train(data);
		return NULL;
	}

	for(i = 0; i != num_data; i++)
	{
		data->input[i] = data_input;
		data_input += num_input;
		data->output[i] = data_output;
		data_output += num_output;
	}
	return data;
}
Esempio n. 4
0
/* INTERNAL FUNCTION
   Allocates room for the neurons.
 */
void fann_allocate_neurons(struct fann *ann)
{
	struct fann_layer *layer_it;
	struct fann_neuron *neurons;
	fann_type *sumPtr = NULL;
	fann_type *valuePtr = NULL;
	unsigned int num_neurons_so_far = 0;
	unsigned int num_neurons = 0;
	unsigned int i;

	/* all the neurons is allocated in one long array (fann_calloc clears mem) */
	neurons = (struct fann_neuron *) fann_calloc(ann->total_neurons, sizeof(struct fann_neuron));

	if(neurons == NULL)
	{
		fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
		return;
	}

	// extra --- AK
	sumPtr = (fann_type *)fann_calloc(ann->total_neurons, sizeof(fann_type));
	valuePtr = (fann_type *)fann_calloc(ann->total_neurons, sizeof(fann_type));

	for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
	{
		num_neurons = (unsigned int)(layer_it->last_neuron - layer_it->first_neuron);
		layer_it->first_neuron = neurons + num_neurons_so_far;
		layer_it->last_neuron = layer_it->first_neuron + num_neurons;

		//AK
		layer_it->sum = sumPtr + num_neurons_so_far;
		layer_it->value = valuePtr + num_neurons_so_far;

		for(i = 0; i < num_neurons; i++)
		{
			layer_it->first_neuron[i].sumPtr = layer_it->sum + i;
			layer_it->first_neuron[i].valuePtr = layer_it->value + i;
			//layer_it->first_neuron[i].activation_functionPtr = &layer_it->activation_function;
			//layer_it->first_neuron[i].activation_steepnessPtr = &layer_it->activation_steepness;
		}

		num_neurons_so_far += num_neurons;
	}

	ann->output = (fann_type *) fann_calloc(num_neurons, sizeof(fann_type));
	if(ann->output == NULL)
	{
		fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
		return;
	}

	ann->train_errors = (fann_type *) fann_calloc(ann->total_neurons, sizeof(fann_type));
	if(ann->train_errors == NULL)
	{
		fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
		return;
	}
}
Esempio n. 5
0
int fann_reallocate_connections(struct fann *ann, unsigned int total_connections)
{
	/* The connections are allocated, but the pointers inside are
	 * first moved in the end of the cascade training session.
	 */

#ifdef CASCADE_DEBUG
	printf("realloc from %d to %d\n", ann->total_connections_allocated, total_connections);
#endif
	ann->connections =
		(struct fann_neuron **) realloc(ann->connections,
										total_connections * sizeof(struct fann_neuron *));
	if(ann->connections == NULL)
	{
		fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
		return -1;
	}

	ann->weights = (fann_type *) realloc(ann->weights, total_connections * sizeof(fann_type));
	if(ann->weights == NULL)
	{
		fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
		return -1;
	}

	fann_set_train_slopes(ann, 
		(fann_type *) realloc(fann_get_train_slopes(ann), total_connections * sizeof(fann_type)));
	if(fann_get_train_slopes(ann) == NULL)
	{
		fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
		return -1;
	}

	fann_set_prev_steps(ann,
		(fann_type *) realloc(fann_get_prev_steps(ann), 
							  total_connections * sizeof(fann_type)));
	if(fann_get_prev_steps(ann) == NULL)
	{
		fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
		return -1;
	}

	fann_set_prev_train_slopes(ann,
		(fann_type *) realloc(fann_get_prev_train_slopes(ann), 
							  total_connections * sizeof(fann_type)));
	if(fann_get_prev_train_slopes(ann) == NULL)
	{
		fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
		return -1;
	}

	fann_set_total_connections_allocated(ann, total_connections);

	return 0;
}
Esempio n. 6
0
int fann_reallocate_neurons(struct fann *ann, unsigned int total_neurons)
{
	struct fann_layer *layer_it;
	struct fann_neuron *neurons;
	unsigned int num_neurons = 0;
	unsigned int num_neurons_so_far = 0;

	neurons =
		(struct fann_neuron *) realloc(ann->first_layer->first_neuron,
									   total_neurons * sizeof(struct fann_neuron));
	fann_set_total_neurons_allocated(ann, total_neurons);

	if(neurons == NULL)
	{
		fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
		return -1;
	}

	/* Also allocate room for more train_errors */
	fann_set_train_errors(ann,
		(fann_type *) realloc(fann_get_train_errors(ann), 
							  total_neurons * sizeof(fann_type)));
	if(fann_get_train_errors(ann) == NULL)
	{
		fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
		return -1;
	}

	if(neurons != ann->first_layer->first_neuron)
	{
		/* Then the memory has moved, also move the pointers */

#ifdef CASCADE_DEBUG_FULL
		printf("Moving neuron pointers\n");
#endif

		/* Move pointers from layers to neurons */
		for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
		{
			num_neurons = layer_it->last_neuron - layer_it->first_neuron;
			layer_it->first_neuron = neurons + num_neurons_so_far;
			layer_it->last_neuron = layer_it->first_neuron + num_neurons;
			num_neurons_so_far += num_neurons;
		}
	}

	return 0;
}
Esempio n. 7
0
FANN_EXTERNAL void FANN_API fann_set_cascade_activation_steepnesses(struct fann *ann,
														   fann_type *
														   cascade_activation_steepnesses,
														   unsigned int 
														   cascade_activation_steepnesses_count)
{
	if(fann_get_cascade_activation_steepnesses_count(ann) != cascade_activation_steepnesses_count)
	{
		fann_set_cascade_activation_steepnesses_count(ann, cascade_activation_steepnesses_count);
		
		/* reallocate mem */
		ann->cascade_params->cascade_activation_steepnesses = 
			(fann_type *)realloc(
				ann->cascade_params->cascade_activation_steepnesses, 
				fann_get_cascade_activation_steepnesses_count(ann) * sizeof(fann_type));
		if(ann->cascade_params->cascade_activation_steepnesses == NULL)
		{
			fann_error((struct fann_error*)ann, FANN_E_CANT_ALLOCATE_MEM);
			return;
		}
	}
	
	memmove(
		ann->cascade_params->cascade_activation_steepnesses, 
		cascade_activation_steepnesses, 
		fann_get_cascade_activation_steepnesses_count(ann) * sizeof(fann_type));
}
Esempio n. 8
0
FANN_EXTERNAL struct fann *FANN_API MAKE_NAME(create_standard)(unsigned int num_layers, ...)
{
    struct fann *ann;
    va_list layer_sizes;
    int i;
    unsigned int *layers = (unsigned int *) calloc(num_layers, sizeof(unsigned int));

    if(layers == NULL)
    {
        fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
        return NULL;
    }

    va_start(layer_sizes, num_layers);
    for(i = 0; i < (int) num_layers; i++)
    {
        layers[i] = va_arg(layer_sizes, unsigned int);
    }
    va_end(layer_sizes);

    ann = MAKE_NAME(create_standard_array)(num_layers, layers);

    free(layers);

    return ann;
}
Esempio n. 9
0
//--------------------------------------------------------------------------------------------------------
//Evaluate the ann on an array of samples
void evaluateNetwork(struct fann *ann, 
		     const double *input, 
		     double* output,
		     const unsigned int numData)
{
  int i,j;
  unsigned int numInputs  = ann->num_input;
  unsigned int numOutputs = ann->num_output;
  fann_type * out = NULL;
  fann_type * in = (fann_type *)MALLOC(numInputs * sizeof(fann_type));
  if (in == NULL) 
    {
      fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
    }
  
  for(i=0;i<(int)numData;++i)
    {
      for(j=0;j<(int)numInputs;j++) 
	{
	  in[j] = input[(j*numData)+i];
	}
      
      out = fann_run(ann,in);
      
      for(j=0;j<(int)numOutputs;j++) 
	{
	  output[(j*numData)+i] = out[j];
	}
    }
  
  FREE(in);
}
Esempio n. 10
0
void fann_update_candidate_weights(struct fann *ann, unsigned int num_data)
{
	struct fann_neuron *first_cand = (ann->last_layer - 1)->last_neuron + 1;	/* there is an empty neuron between the actual neurons and the candidate neuron */
	struct fann_neuron *last_cand = first_cand + fann_get_cascade_num_candidates(ann) - 1;

	switch (ann->training_algorithm)
	{
		case FANN_TRAIN_RPROP:
			fann_update_weights_irpropm(ann, first_cand->first_con,
										last_cand->last_con + ann->num_output);
			break;
		case FANN_TRAIN_SARPROP:
			/* TODO: increase epoch? */
			fann_update_weights_sarprop(ann, ann->sarprop_epoch, first_cand->first_con,
										last_cand->last_con + ann->num_output);
			break;
		case FANN_TRAIN_QUICKPROP:
			fann_update_weights_quickprop(ann, num_data, first_cand->first_con,
										  last_cand->last_con + ann->num_output);
			break;
		case FANN_TRAIN_BATCH:
		case FANN_TRAIN_INCREMENTAL:
			fann_error((struct fann_error *) ann, FANN_E_CANT_USE_TRAIN_ALG);
			break;
	}
}
Esempio n. 11
0
/* add a layer at the position pointed to by *layer */
struct fann_layer *fann_add_layer(struct fann *ann, struct fann_layer *layer)
{
	int layer_pos = (int)(layer - ann->first_layer);
	int num_layers = (int)(ann->last_layer - ann->first_layer + 1);
	int i;

	/* allocate the layer */
	struct fann_layer *layers =
		(struct fann_layer *) realloc(ann->first_layer, num_layers * sizeof(struct fann_layer));
	if(layers == NULL)
	{
		fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
		return NULL;
	}

	/* copy layers so that the free space is at the right location */
	for(i = num_layers - 1; i >= layer_pos; i--)
	{
		layers[i] = layers[i - 1];
	}

	/* the newly allocated layer is empty */
	layers[layer_pos].first_neuron = layers[layer_pos + 1].first_neuron;
	layers[layer_pos].last_neuron = layers[layer_pos + 1].first_neuron;

	/* Set the ann pointers correctly */
	ann->first_layer = layers;
	ann->last_layer = layers + num_layers;

#ifdef CASCADE_DEBUG_FULL
	printf("add layer at pos %d\n", layer_pos);
#endif

	return layers + layer_pos;
}
Esempio n. 12
0
FANN_EXTERNAL struct fann *FANN_API fann_create_sparse(float connection_rate, 
													   unsigned int num_layers, ...)
{
	struct fann *ann;
	va_list layer_sizes;
	int i;
	unsigned int *layers = (unsigned int *) calloc(num_layers, sizeof(unsigned int));

	if(layers == NULL)
	{
		fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
		return NULL;
	}

	va_start(layer_sizes, num_layers);
	for(i = 0; i < (int) num_layers; i++)
	{
		layers[i] = va_arg(layer_sizes, unsigned int);
	}
	va_end(layer_sizes);

	ann = fann_create_sparse_array(connection_rate, num_layers, layers);

	free(layers);

	return ann;
}
/* Allocate memory for training for a layer */
FANN_EXTERNAL void FANN_API fann_layer_train_initialize_fully_recurrent(struct fann *ann, struct fann_layer *layer)
{
	fann_type *free_train_errors;
	struct fann_neuron *neuron_it, *last_neuron;

	last_neuron = layer->last_neuron;

	/* reset the layer train errors array */
	if(layer->train_errors == NULL)
	{
		if( (layer->train_errors = (fann_type *) calloc(layer->num_outputs, sizeof(fann_type))) == NULL )
		{
			fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
			return;
		}
		/* assign to MIMO neurons a piece of layer train_errors array */
		free_train_errors = layer->train_errors;
		for (neuron_it = layer->first_neuron; neuron_it != last_neuron; neuron_it++)
		{
			neuron_it->train_errors = free_train_errors;
			free_train_errors += neuron_it->num_outputs;
		}	
	}
	else
	{
		/* clear the error variables */
		memset(layer->train_errors, 0, (layer->num_outputs) * sizeof(fann_type));
	}

	for (neuron_it = layer->first_neuron; neuron_it != last_neuron; neuron_it++)
	{
		neuron_it->train_initialize(ann, layer, neuron_it);
	}
}
Esempio n. 14
0
FANN_EXTERNAL void FANN_API fann_set_cascade_activation_functions(struct fann *ann,
														 enum fann_activationfunc_enum *
														 cascade_activation_functions,
														 unsigned int 
														 cascade_activation_functions_count)
{
	if(fann_get_cascade_activation_functions_count(ann) != cascade_activation_functions_count)
	{
		fann_set_cascade_activation_functions_count(ann, cascade_activation_functions_count);
		
		/* reallocate mem */
		ann->cascade_params->cascade_activation_functions = 
			(enum fann_activationfunc_enum *)realloc(ann->cascade_params->cascade_activation_functions, 
			fann_get_cascade_activation_functions_count(ann) * sizeof(enum fann_activationfunc_enum));
		if(fann_get_cascade_activation_functions(ann) == NULL)
		{
			fann_error((struct fann_error*)ann, FANN_E_CANT_ALLOCATE_MEM);
			return;
		}
	}
	
	memmove(
		ann->cascade_params->cascade_activation_functions, 
		cascade_activation_functions, 
		fann_get_cascade_activation_functions_count(ann) * sizeof(enum fann_activationfunc_enum));
}
Esempio n. 15
0
/*
 * INTERNAL FUNCTION Reads training data from a file descriptor. 
 */
struct fann_train_data *fann_read_train_from_fd(FILE * file, const char *filename)
{
	unsigned int num_input, num_output, num_data, i, j;
	unsigned int line = 1;
	struct fann_train_data *data;

	if(fscanf(file, "%u %u %u\n", &num_data, &num_input, &num_output) != 3)
	{
		fann_error(NULL, FANN_E_CANT_READ_TD, filename, line);
		return NULL;
	}
	line++;

	data = fann_create_train(num_data, num_input, num_output);
	if(data == NULL)
	{
		return NULL;
	}

	for(i = 0; i != num_data; i++)
	{
		for(j = 0; j != num_input; j++)
		{
			if(!fann_scanvalue(file, FANNSCANF, &data->input[i][j]))
			{
				fann_error(NULL, FANN_E_CANT_READ_TD, filename, line);
				fann_destroy_train(data);
				return NULL;
			}
		}
		line++;

		for(j = 0; j != num_output; j++)
		{
			if(!fann_scanvalue(file, FANNSCANF, &data->output[i][j]))
			{
				fann_error(NULL, FANN_E_CANT_READ_TD, filename, line);
				fann_destroy_train(data);
				return NULL;
			}
		}
		line++;
	}
	return data;
}
Esempio n. 16
0
/* INTERNAL FUNCTION
    compute the error at the network output
	(usually, after forward propagation of a certain input vector, fann_run)
	the error is a sum of squares for all the output units
	also increments a counter because MSE is an average of such errors

	After this train_errors in the output layer will be set to:
	neuron_value_derived * (desired_output - neuron_value)
 */
void fann_compute_MSE(struct fann *ann, fann_type * desired_output)
{
	fann_type neuron_value, neuron_diff, *error_it = 0, *error_begin = 0;
	struct fann_neuron *last_layer_begin = (ann->last_layer - 1)->first_neuron;
	const struct fann_neuron *last_layer_end = last_layer_begin + ann->num_output;
	const struct fann_neuron *first_neuron = ann->first_layer->first_neuron;

	/* if no room allocated for the error variabels, allocate it now */
	if(ann->train_errors == NULL)
	{
		ann->train_errors = (fann_type *) calloc(ann->total_neurons, sizeof(fann_type));
		if(ann->train_errors == NULL)
		{
			fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
			return;
		}
	}
	else
	{
		/* clear the error variabels */
		memset(ann->train_errors, 0, (ann->total_neurons) * sizeof(fann_type));
	}
	error_begin = ann->train_errors;

#ifdef DEBUGTRAIN
	printf("\ncalculate errors\n");
#endif
	/* calculate the error and place it in the output layer */
	error_it = error_begin + (last_layer_begin - first_neuron);

	for(; last_layer_begin != last_layer_end; last_layer_begin++)
	{
		neuron_value = last_layer_begin->value;
		neuron_diff = *desired_output - neuron_value;

		neuron_diff = fann_update_MSE(ann, last_layer_begin, neuron_diff);

		if(ann->train_error_function)
		{						/* TODO make switch when more functions */
			if(neuron_diff < -.9999999)
				neuron_diff = -17.0;
			else if(neuron_diff > .9999999)
				neuron_diff = 17.0;
			else
				neuron_diff = (fann_type) log((1.0 + neuron_diff) / (1.0 - neuron_diff));
		}

		*error_it = fann_activation_derived(last_layer_begin->activation_function,
											last_layer_begin->activation_steepness, neuron_value,
											last_layer_begin->sum) * neuron_diff;

		desired_output++;
		error_it++;

		ann->num_MSE++;
	}
}
Esempio n. 17
0
int fann_check_input_output_sizes(struct fann *ann, struct fann_train_data *data)
{
	if(ann->num_input != data->num_input)
    {
    	fann_error((struct fann_error *) ann, FANN_E_INPUT_NO_MATCH,
        	ann->num_input, data->num_input);
        return -1;
    }
        
	if(ann->num_output != data->num_output)
	{
		fann_error((struct fann_error *) ann, FANN_E_OUTPUT_NO_MATCH,
					ann->num_output, data->num_output);
		return -1;
	}
	
	return 0;
}
Esempio n. 18
0
FANN_EXTERNAL struct fann_neuron* FANN_API fann_get_neuron_layer(struct fann *ann, struct fann_layer* layer, int neuron)
{
	if(neuron >= (layer->last_neuron - layer->first_neuron))
	{
		fann_error((struct fann_error *) ann, FANN_E_INDEX_OUT_OF_BOUND, neuron);
		return NULL;
	}

	return layer->first_neuron + neuron;
}
Esempio n. 19
0
FANN_EXTERNAL struct fann_layer* FANN_API fann_get_layer(struct fann *ann, int layer)
{
	if(layer <= 0 || layer >= (ann->last_layer - ann->first_layer))
	{
		fann_error((struct fann_error *) ann, FANN_E_INDEX_OUT_OF_BOUND, layer);
		return NULL;
	}

	return ann->first_layer + layer;
}
Esempio n. 20
0
/*
 * Reads training data from a file. 
 */
FANN_EXTERNAL struct fann_train_data *FANN_API fann_read_train_from_file(const char *configuration_file)
{
	struct fann_train_data *data;
	FILE *file = fopen(configuration_file, "r");

	if(!file)
	{
		fann_error(NULL, FANN_E_CANT_OPEN_CONFIG_R, configuration_file);
		return NULL;
	}

	data = fann_read_train_from_fd(file, configuration_file);
	fclose(file);
	return data;
}
/* INTERNAL FUNCTION
   Save the train data structure.
 */
int fann_save_train_internal(struct fann_train_data *data, const char *filename)
{
    int retval = 0;
    FILE *file = fopen(filename, "w");

    if(!file)
    {
        fann_error((struct fann_error *) data, FANN_E_CANT_OPEN_TD_W, filename);
        return -1;
    }
    retval = fann_save_train_internal_fd(data, file, filename);
    fclose(file);

    return retval;
}
/*
 * Scale input and output data based on previously calculated parameters.
 */
FANN_EXTERNAL void FANN_API fann_scale_train( struct fann *ann, struct fann_train_data *data )
{
    unsigned cur_sample;
    if(ann->scale_mean_in == NULL)
    {
        fann_error( (struct fann_error *) ann, FANN_E_SCALE_NOT_PRESENT );
        return;
    }
    /* Check that we have good training data. */
    /* No need for if( !params || !ann ) */
    if(    data->num_input != ann->num_input
            || data->num_output != ann->num_output
      )
    {
        fann_error( (struct fann_error *) ann, FANN_E_TRAIN_DATA_MISMATCH );
        return;
    }

    for( cur_sample = 0; cur_sample < data->num_data; cur_sample++ )
    {
        fann_scale_input( ann, data->input[ cur_sample ] );
        fann_scale_output( ann, data->output[ cur_sample ] );
    }
}
Esempio n. 23
0
/*
 * Scale input and output data based on previously calculated parameters.
 */
FANN_EXTERNAL void FANN_API fann_descale_train( struct fann *ann, struct fann_train_data *data )
{
	unsigned cur_sample;
	if(ann->scale_mean_in == NULL)
	{
		fann_error( (struct fann_error *) ann, FANN_E_SCALE_NOT_PRESENT );
		return;
	}
	/* Check that we have good training data. */
	if(fann_check_input_output_sizes(ann, data) == -1)
		return;

	for( cur_sample = 0; cur_sample < data->num_data; cur_sample++ )
	{
		fann_descale_input( ann, data->input[ cur_sample ] );
		fann_descale_output( ann, data->output[ cur_sample ] );
	}
}
Esempio n. 24
0
/* INTERNAL FUNCTION
  Calculates the derived of a value, given an activation function
   and a steepness
*/
fann_type fann_activation_derived(unsigned int activation_function,
								  fann_type steepness, fann_type value, fann_type sum)
{
	switch (activation_function)
	{
		case FANN_LINEAR:
		case FANN_LINEAR_PIECE:
		case FANN_LINEAR_PIECE_SYMMETRIC:
			return (fann_type) fann_linear_derive(steepness, value);
		case FANN_SIGMOID:
		case FANN_SIGMOID_STEPWISE:
			value = fann_clip(value, 0.01f, 0.99f);
			return (fann_type) fann_sigmoid_derive(steepness, value);
		case FANN_SIGMOID_SYMMETRIC:
		case FANN_SIGMOID_SYMMETRIC_STEPWISE:
			value = fann_clip(value, -0.98f, 0.98f);
			return (fann_type) fann_sigmoid_symmetric_derive(steepness, value);
		case FANN_GAUSSIAN:
			/* value = fann_clip(value, 0.01f, 0.99f); */
			return (fann_type) fann_gaussian_derive(steepness, value, sum);
		case FANN_GAUSSIAN_SYMMETRIC:
			/* value = fann_clip(value, -0.98f, 0.98f); */
			return (fann_type) fann_gaussian_symmetric_derive(steepness, value, sum);
		case FANN_ELLIOT:
			value = fann_clip(value, 0.01f, 0.99f);
			return (fann_type) fann_elliot_derive(steepness, value, sum);
		case FANN_ELLIOT_SYMMETRIC:
			value = fann_clip(value, -0.98f, 0.98f);
			return (fann_type) fann_elliot_symmetric_derive(steepness, value, sum);
		case FANN_SIN_SYMMETRIC:
			return (fann_type) fann_sin_symmetric_derive(steepness, sum);
		case FANN_COS_SYMMETRIC:
			return (fann_type) fann_cos_symmetric_derive(steepness, sum);
		case FANN_SIN:
			return (fann_type) fann_sin_derive(steepness, sum);
		case FANN_COS:
			return (fann_type) fann_cos_derive(steepness, sum);
		case FANN_THRESHOLD:
			fann_error(NULL, FANN_E_CANT_TRAIN_ACTIVATION);
		case FANN_MAXPOOLING:
			return (fann_type) 1;
	}
	return 0;
}
Esempio n. 25
0
/*
 * Scale data in output vector before feed it to ann based on previously calculated parameters.
 */
FANN_EXTERNAL void FANN_API fann_scale_output( struct fann *ann, fann_type *output_vector )
{
	unsigned cur_neuron;
	if(ann->scale_mean_in == NULL)
	{
		fann_error( (struct fann_error *) ann, FANN_E_SCALE_NOT_PRESENT );
		return;
	}

	for( cur_neuron = 0; cur_neuron < ann->num_output; cur_neuron++ )
		output_vector[ cur_neuron ] =
			(
				( output_vector[ cur_neuron ] - ann->scale_mean_out[ cur_neuron ] )
				/ ann->scale_deviation_out[ cur_neuron ]
				- ( (fann_type)-1.0 ) /* This is old_min */
			)
			* ann->scale_factor_out[ cur_neuron ]
			+ ann->scale_new_min_out[ cur_neuron ];
}
Esempio n. 26
0
void fann_sparse_neuron_standard_backprop_update(struct fann *ann, struct fann_neuron *neuron)
{
	unsigned int o, j;
	fann_type *weights, *deltas, *mask;
	const unsigned int num_outputs = neuron->num_outputs;
	const unsigned int num_inputs = neuron->num_inputs;
	float learning_rate = ann->backprop_params->learning_rate;
#ifdef FIXEDFANN 
	unsigned int decimal_point = ann->fixed_params->decimal_point;
#endif

	if (neuron->num_backprop_done==0)
	{
		fann_error(NULL, FANN_E_CANT_USE_TRAIN_ALG);
		return;
	}

	learning_rate=learning_rate/neuron->num_backprop_done;

	/* some assignments to speed up things */
	weights = neuron->weights;
	deltas = neuron->weights_deltas;
	mask = ((struct fann_sparse_neuron_private_data*)neuron->private_data)->mask;

	for (o = 0; o < num_outputs; o++)
	{
		for (j = 0; j < num_inputs; j++)
		{
			/* adjust the weight */
			weights[j] += deltas[j] * mask[j] * learning_rate; /* FIXME add the learning momentum here */
			deltas[j]=0;
		}
		weights += num_inputs;
		deltas += num_inputs;
		mask += num_inputs;
	}
	neuron->num_backprop_done=0;
}
/* Allocate memory for training a neuron */
FANN_EXTERNAL void FANN_API fann_neuron_train_initialize_fully_recurrent(
	struct fann *ann, 
	struct fann_layer *layer, 
	struct fann_neuron *neuron)
{
	neuron->num_backprop_done=0;

	/* allocate the weights_deltas */
	if(neuron->weights_deltas == NULL)
	{
		if ((neuron->weights_deltas = 
				(fann_type*) calloc(neuron->num_weights, sizeof(fann_type))) == NULL)
		{
			fann_error((struct fann_error *) ann, FANN_E_CANT_ALLOCATE_MEM);
			return;
		}
	}
	else
	{
		/* clear the error variables */
		memset(neuron->weights_deltas, 0,  neuron->num_weights*sizeof(fann_type));
	}
}
Esempio n. 28
0
FANN_EXTERNAL int FANN_API fann_set_output_scaling_params(
	struct fann *ann,
	const struct fann_train_data *data,
	float new_output_min,
	float new_output_max)
{
	unsigned cur_neuron, cur_sample;

	/* Check that we have good training data. */
	/* No need for if( !params || !ann ) */
	if(data->num_input != ann->num_input
	   || data->num_output != ann->num_output)
	{
		fann_error( (struct fann_error *) ann, FANN_E_TRAIN_DATA_MISMATCH );
		return -1;
	}

	if(ann->scale_mean_out == NULL)
		fann_allocate_scale(ann);
	
	if(ann->scale_mean_out == NULL)
		return -1;
		
	if( !data->num_data )
	{
		SCALE_RESET( scale_mean,		out,	0.0 )
		SCALE_RESET( scale_deviation,	out,	1.0 )
		SCALE_RESET( scale_new_min,		out,	-1.0 )
		SCALE_RESET( scale_factor,		out,	1.0 )
	}
	else
	{
		SCALE_SET_PARAM( out );
	}

	return 0;
}
Esempio n. 29
0
float fann_train_outputs_epoch(struct fann *ann, struct fann_train_data *data)
{
	unsigned int i;
	
	fann_reset_MSE(ann);

	for(i = 0; i < data->num_data; i++)
	{
		fann_run(ann, data->input[i]);
		fann_compute_MSE(ann, data->output[i]);
		fann_update_slopes_batch(ann, ann->last_layer - 1, ann->last_layer - 1);
	}

	switch (ann->training_algorithm)
	{
		case FANN_TRAIN_RPROP:
			fann_update_weights_irpropm(ann, (ann->last_layer - 1)->first_neuron->first_con,
										ann->total_connections);
			break;
		case FANN_TRAIN_SARPROP:
			fann_update_weights_sarprop(ann, ann->sarprop_epoch, (ann->last_layer - 1)->first_neuron->first_con,
										ann->total_connections);
			++(ann->sarprop_epoch);
			break;
		case FANN_TRAIN_QUICKPROP:
			fann_update_weights_quickprop(ann, data->num_data,
										  (ann->last_layer - 1)->first_neuron->first_con,
										  ann->total_connections);
			break;
		case FANN_TRAIN_BATCH:
		case FANN_TRAIN_INCREMENTAL:
			fann_error((struct fann_error *) ann, FANN_E_CANT_USE_TRAIN_ALG);
	}

	return fann_get_MSE(ann);
}
Esempio n. 30
0
FANN_EXTERNAL struct fann_train_data *FANN_API fann_subset_train_data(struct fann_train_data
																		 *data, unsigned int pos,
																		 unsigned int length)
{
	unsigned int i;
	fann_type *data_input, *data_output;
	struct fann_train_data *dest =
		(struct fann_train_data *) malloc(sizeof(struct fann_train_data));

	if(dest == NULL)
	{
		fann_error((struct fann_error*)data, FANN_E_CANT_ALLOCATE_MEM);
		return NULL;
	}
	
	if(pos > data->num_data || pos+length > data->num_data)
	{
		fann_error((struct fann_error*)data, FANN_E_TRAIN_DATA_SUBSET, pos, length, data->num_data);
		return NULL;
	}

	fann_init_error_data((struct fann_error *) dest);
	dest->error_log = data->error_log;

	dest->num_data = length;
	dest->num_input = data->num_input;
	dest->num_output = data->num_output;
	dest->input = (fann_type **) calloc(dest->num_data, sizeof(fann_type *));
	if(dest->input == NULL)
	{
		fann_error((struct fann_error*)data, FANN_E_CANT_ALLOCATE_MEM);
		fann_destroy_train(dest);
		return NULL;
	}

	dest->output = (fann_type **) calloc(dest->num_data, sizeof(fann_type *));
	if(dest->output == NULL)
	{
		fann_error((struct fann_error*)data, FANN_E_CANT_ALLOCATE_MEM);
		fann_destroy_train(dest);
		return NULL;
	}

	data_input = (fann_type *) calloc(dest->num_input * dest->num_data, sizeof(fann_type));
	if(data_input == NULL)
	{
		fann_error((struct fann_error*)data, FANN_E_CANT_ALLOCATE_MEM);
		fann_destroy_train(dest);
		return NULL;
	}
	memcpy(data_input, data->input[pos], dest->num_input * dest->num_data * sizeof(fann_type));

	data_output = (fann_type *) calloc(dest->num_output * dest->num_data, sizeof(fann_type));
	if(data_output == NULL)
	{
		fann_error((struct fann_error*)data, FANN_E_CANT_ALLOCATE_MEM);
		fann_destroy_train(dest);
		return NULL;
	}
	memcpy(data_output, data->output[pos], dest->num_output * dest->num_data * sizeof(fann_type));

	for(i = 0; i != dest->num_data; i++)
	{
		dest->input[i] = data_input;
		data_input += dest->num_input;
		dest->output[i] = data_output;
		data_output += dest->num_output;
	}
	return dest;
}