Example #1
0
/// Call this function after you have added all the layers.
/// It adds an extra output layer.
int ANN_Init(ANN * ann)
{
	// Add output layer
	LISTITEM *item = LastListItem(ann->c);
	Layer *l = NULL;
#ifdef ANN_DBUG
	message("Initialising");
#endif
	if (item) {
		Layer *p = (Layer *) item->obj;
		l = ANN_AddLayer(ann, p->n_outputs, ann->n_outputs, p->y);
	} else {
		l = ANN_AddLayer(ann, ann->n_inputs, ann->n_outputs,
				 ann->x);
	}
	if (l == NULL) {
		Serror("Could not create final layer\n");
		DeleteANN(ann);
		return -1;
	}
	ann->y = l->y;
	l->f = &linear;
	l->f_d = &linear_d;
	//  ann->t = l->t;
	return 0;
}
Example #2
0
/// Save the ANN to a C file handle.
int SaveANN(ANN* ann, FILE* f)
{
	if (f==NULL) {
		return -1;
	}
	
	StringBuffer* rtag = NewStringBuffer (256);

	WriteToken("VSOUND_ANN", f);
	fwrite(&ann->n_inputs, sizeof(int), 1, f);
	fwrite(&ann->n_outputs, sizeof(int), 1, f);
	WriteToken("Layer Data", f);
	int n_layers = 0;
	LISTITEM* list_item = FirstListItem(ann->c);
	while (list_item) {
		n_layers++;
		list_item = NextListItem (ann->c);
	}
	fwrite(&n_layers, sizeof(int), 1, f);
	list_item = FirstListItem(ann->c);
	for (int i=0; i<n_layers-1; i++) {
		Layer* l = (Layer*) list_item->obj;

		int layer_type = 0;
		WriteToken("TYPE", f);
		fwrite(&layer_type, sizeof(int), 1, f);

		int nhu = l->n_outputs;
		WriteToken("UNITS", f);
		fwrite(&nhu, sizeof(int), 1, f);
		list_item = NextListItem (ann->c);
	}
	WriteToken("Output Type", f);
	{
		int layer_type = 0;
		LISTITEM *c;
		c = LastListItem(ann->c);
		if (c) {
			Layer *l = (Layer *) c->obj;
			if (l->f==&linear) {
				layer_type = 0;
			} else {
				layer_type = 1;
			}
		}
		fwrite(&layer_type, sizeof(int), 1, f);
	}
	list_item = FirstListItem(ann->c); 
	while(list_item) {
		Layer* l = (Layer*) list_item->obj;
		WriteToken("Connections", f);
		int size = (l->n_inputs + 1 /*bias*/) * l->n_outputs;
		fwrite(l->c, size, sizeof(Connection), f);
		list_item = NextListItem(ann->c);
	}
	WriteToken("END", f);

	FreeStringBuffer (&rtag);
	return 0;
}
Example #3
0
//==========================================================
// ANN_SetOutputsToTanH()
//----------------------------------------------------------
/// Set outputs to hyperbolic tangent.
void ANN_SetOutputsToTanH(ANN * ann)
{
	LISTITEM *c;
	c = LastListItem(ann->c);
	if (c) {
		Layer *l = (Layer *) c->obj;
		l->f = &htan;
		l->f_d = &htan_d;
	} else {
		Serror("Could not set outputs to TanH\n");
	}
}
Example #4
0
//==========================================================
// ANN_SetOutputsToLinear()
//----------------------------------------------------------
/// Set outputs to linear
void ANN_SetOutputsToLinear(ANN * ann)
{
	LISTITEM *c;
	c = LastListItem(ann->c);
	if (c) {
		Layer *l = (Layer *) c->obj;
		l->f = &linear;
		l->f_d = &linear_d;
	} else {
		Serror("Could not set outputs to linear\n");
	}
}
Example #5
0
//==========================================================
// ANN_AddRBFHiddenLayer()
//----------------------------------------------------------
/// Add an RBF layer with n_nodes
int ANN_AddRBFHiddenLayer(ANN * ann, int n_nodes)
{
#ifdef ANN_DBUG
	message("Adding Hidden layer with %d nodes", n_nodes);
#endif
	LISTITEM *item = LastListItem(ann->c);
	if (item) {
		Layer *p = (Layer *) item->obj;
		ANN_AddRBFLayer(ann, p->n_outputs, n_nodes, p->y);
	} else {
		ANN_AddRBFLayer(ann, ann->n_inputs, n_nodes, ann->x);
	}
	return 0;
}
Example #6
0
/// The argument delta is a the derivative of the cost function with
/// respect to the neural network outputs. In this case you must call
/// ANN_Input() yourself. As an example, to use MSE cost with
/// ANN_Delta_Train(), you should call ANN_Input() with your data,
/// call ANN_GetOutput() to take a look at the outputs and then call
/// ANN_DeltaTrain() with a real vector delta equal to the target
/// vector minus the ANN output vector. The argument TD is useful if
/// you want to separate the calculation of the derivative of the
/// output with respect to the parameters from the derivative the cost
/// function with respect to the output. This is done in reinforcement
/// learning with eligibility traces for example, where the cost
/// function depends upon previous outputs.
real ANN_Delta_Train(ANN * ann, real* delta, real TD)
{
	LISTITEM *p = LastListItem(ann->c);
	Layer *l = (Layer *) p->obj;
	real sum = 0.0f;
	int j;
	//ANN_Input(ann, x);
	for (j = 0; j < ann->n_outputs; j++) {
		real f = l->f_d(ann->y[j]);
		real e = delta[j];
		ann->error[j] = e;
		ann->d[j] = e * f;
		sum += e * e;
	}

	l->backward(p, ann->d, ann->eligibility_traces, TD);

	return sum;
}