Esempio n. 1
0
class_methods_end


int FFNet_init (FFNet me, long numberOfInputs, long nodesInLayer1, long nodesInLayer2, 
	long numberOfOutputs, int outputsAreLinear)
{
	long numberOfLayers = 3;
	
	if (numberOfInputs < 1 || numberOfOutputs < 1) return 0;
	if (nodesInLayer1 < 1) numberOfLayers--;
	if (nodesInLayer2 < 1) numberOfLayers--;
	my nLayers = numberOfLayers;
	my nUnitsInLayer = NUMlvector (0, numberOfLayers);
	if (my nUnitsInLayer == NULL) return 0;
	
	my nUnitsInLayer[numberOfLayers--] = numberOfOutputs;
	if (nodesInLayer2 > 0) my nUnitsInLayer[numberOfLayers--] = nodesInLayer2;
	if (nodesInLayer1 > 0) my nUnitsInLayer[numberOfLayers--] = nodesInLayer1;
	my nUnitsInLayer[numberOfLayers] = numberOfInputs;
	Melder_assert (numberOfLayers == 0);
	my outputsAreLinear = outputsAreLinear;
	
	if (! bookkeeping (me)) return 0;
	
	FFNet_setCostFunction (me, FFNet_COST_MSE);
	FFNet_setNonLinearity (me, FFNet_NONLIN_SIGMOID);
    FFNet_reset (me, 0.1);
	
	return 1;
}
Esempio n. 2
0
void FFNet_init (FFNet me, long numberOfInputs, long nodesInLayer1, long nodesInLayer2, long numberOfOutputs, int outputsAreLinear) {
	long numberOfLayers = 3;

	if (numberOfInputs < 1) {
		Melder_throw (U"Number of inputs must be a natural number.");
	}
	if (numberOfOutputs < 1) {
		Melder_throw (U"Number of outputs must be a natural number.");
	}
	if (nodesInLayer1 < 1) {
		numberOfLayers--;
	}
	if (nodesInLayer2 < 1) {
		numberOfLayers--;
	}
	my nLayers = numberOfLayers;
	my nUnitsInLayer = NUMvector<long> (0, numberOfLayers);

	my nUnitsInLayer[numberOfLayers--] = numberOfOutputs;
	if (nodesInLayer2 > 0) {
		my nUnitsInLayer[numberOfLayers--] = nodesInLayer2;
	}
	if (nodesInLayer1 > 0) {
		my nUnitsInLayer[numberOfLayers--] = nodesInLayer1;
	}
	my nUnitsInLayer[numberOfLayers] = numberOfInputs;
	Melder_assert (numberOfLayers == 0);
	my outputsAreLinear = outputsAreLinear;

	bookkeeping (me);

	FFNet_setCostFunction (me, FFNet_COST_MSE);
	FFNet_setNonLinearity (me, FFNet_NONLIN_SIGMOID);
	FFNet_reset (me, 0.1);
}
double FFNet_Pattern_Activation_getCosts_total (FFNet me, Pattern p, Activation a, int costFunctionType) {
	try {
		_FFNet_Pattern_Activation_checkDimensions (me, p, a);
		FFNet_setCostFunction (me, costFunctionType);

		double cost = 0;
		for (long i = 1; i <= p -> ny; i++) {
			FFNet_propagate (me, p -> z[i], NULL);
			cost += FFNet_computeError (me, a -> z[i]);
		}
		return cost;
	} catch (MelderError) {
		return NUMundefined;
	}
}
static void _FFNet_Pattern_Activation_learn (FFNet me, Pattern pattern,
        Activation activation, long maxNumOfEpochs, double tolerance,
        Any parameters, int costFunctionType, int reset) {
	try {
		_FFNet_Pattern_Activation_checkDimensions (me, pattern, activation);
		Minimizer_setParameters (my minimizer, parameters);

		// Link the things to be learned

		my nPatterns = pattern -> ny;
		my inputPattern = pattern -> z;
		my targetActivation = activation -> z;
		FFNet_setCostFunction (me, costFunctionType);

		if (reset) {
			autoNUMvector<double> wbuf (1, my dimension);
			long k = 1;
			for (long i = 1; i <= my nWeights; i++) {
				if (my wSelected[i]) {
					wbuf[k++] = my w[i];
				}
			}
			Minimizer_reset (my minimizer, wbuf.peek());
		}

		Minimizer_minimize (my minimizer, maxNumOfEpochs, tolerance, 1);

		// Unlink

		my nPatterns = 0;
		my inputPattern = NULL;
		my targetActivation = NULL;
	} catch (MelderError) {
		my nPatterns = 0;
		my inputPattern = 0;
		my targetActivation = 0;
	}
}