// Write C code for corresponding to the feed forward network into a given file.
void MultipleBackPropagation::GenerateCCode(OutputFile & f, VariablesData & trainVariables, BOOL inputLayerIsConnectedWithOutputLayer, BOOL spaceInputLayerIsConnectedWithOutputLayer) {
	CString s;

	CString MBPVersion;
	MBPVersion.LoadString(IDS_VERSION);

	f.WriteLine("/**");
	f.WriteLine(_TEXT(" Generated by ") + MBPVersion);
	f.WriteLine(" Multiple Back-Propagation can be freely obtained at http://dit.ipg.pt/MBP");
	f.WriteLine("*/\n");

	f.WriteLine("#include <math.h>");
	
	f.WriteLine("/**");
	s.Format(_TEXT(" inputs  - should be an array of %d element(s), containing the network input(s)."), inputs);

	bool hasMissingValues = false;
	for(int i = 0; i < inputs; i++) {
		if (trainVariables.HasMissingValues(i)) {
			s += " Inputs with NaN value are considered missing values.";
			hasMissingValues = true;
			break;
		}
	}

	f.WriteLine(s);
		
	s.Format(_TEXT(" outputs - should be an array of %d element(s), that will contain the network output(s)."), outputs);
	f.WriteLine(s);
	s = " Note : The array inputs will also be changed.";
	if (!hasMissingValues) s += "Its values will be rescaled between -1 and 1.";
	s += "\n*/";
	f.WriteLine(s);

	s = f.GetFileName();
	int p = s.Find('.');
	if (p != -1) s = s.Left(p);
	f.WriteLine(_TEXT("void ") +  s + _TEXT("(double * inputs, double * outputs) {"));

	f.WriteString("\tdouble mainWeights[] = {");
	SaveWeights(f, ", ");
	f.WriteLine("};");
	f.WriteLine("\tdouble * mw = mainWeights;");
	if (hasMissingValues) f.WriteLine("\tdouble b;");

	if (!spaceNetwork.IsNull()) {
		f.WriteString("\tdouble spaceWeights[] = {");
		spaceNetwork->SaveWeights(f, ", ");
		f.WriteLine("};");
		f.WriteLine("\tdouble * sw = spaceWeights;");
		s.Format(_TEXT("\tdouble mk[%d];"), spaceNetwork->Outputs());
		f.WriteLine(s);
		f.WriteLine("\tdouble *m = mk;");
		
		if (hasMissingValues) {
			s.Format(L"\tdouble spaceInputs[%d];", inputs);
			f.WriteLine(s);
		}
	}

	int numberLayers = layers.Lenght();

	for (int l = 1; l < numberLayers - 1; l++) {
		s.Format(L"\tdouble hiddenLayer%doutputs[%d];", l, layers.Element(l)->neurons.Lenght());
		f.WriteLine(s);
	}

	int numberSpaceLayers = (spaceNetwork.IsNull()) ? 0 : spaceNetwork->layers.Lenght();

	for (int l = 1; l < numberSpaceLayers - 1; l++) {
		s.Format(_TEXT("\tdouble spaceHiddenLayer%doutputs[%d];"), l, spaceNetwork->layers.Element(l)->neurons.Lenght());
		f.WriteLine(s);
	}

	f.WriteLine("\tint c;");

	f.WriteString("\n");

	// input variables will be rescaled between -1 and 1
	if (trainVariables.Number() == inputs + outputs) {
		for (int i = 0; i < inputs; i++) { 
			double min = trainVariables.Minimum(i);
			double max = trainVariables.Maximum(i);

			if (trainVariables.HasMissingValues(i)) {
				s.Format(L"\tif(inputs[%d] == inputs[%d]) { /* compiler must have support for NaN numbers */\n\t", i, i);
				f.WriteString(s);
			}

			if (min != max) {
				s.Format(L"\tinputs[%d] = -1.0 + (inputs[%d] - %1.15f) / %1.15f;", i, i, min, (max - min)/2);				
			} else {
				s.Format(L"\tinputs[%d] = inputs[%d] / %1.15f; /* WARNING: During the training this variable remain always constant */", i, i, max);
			}
			f.WriteLine(s);

			if (hasMissingValues && !spaceNetwork.IsNull()) {
				if (trainVariables.HasMissingValues(i)) f.WriteString("\t");
				s.Format(L"\tspaceInputs[%d] = inputs[%d];", i, i);
				f.WriteLine(s);
			}

			if (trainVariables.HasMissingValues(i)) {
				if (!spaceNetwork.IsNull()) {
					f.WriteLine("\t\tb = *sw++;");
					s.Format(L"\t\tspaceInputs[%d] = tanh(b + spaceInputs[%d] * *sw++);", i, i);
					f.WriteLine(s);
				}

				f.WriteLine("\t\tb = *mw++;");
				s.Format(L"\t\tinputs[%d] = tanh(b + inputs[%d] * *mw++);", i, i);
				f.WriteLine(s);
				f.WriteLine("\t} else {");
				s.Format(L"\t\tspaceInputs[%d] = inputs[%d] = 0.0;", i, i);
				f.WriteLine(s);
				f.WriteLine("\t}");
			}
		}
	}

	// space network
	for (int l = 1; l < numberSpaceLayers; l++) {
		List<Neuron> * neurons = &(spaceNetwork->layers.Element(l)->neurons);

		int nn = 0;
		for(NeuronWithInputConnections * n = dynamic_cast<NeuronWithInputConnections *>(neurons->First()); n != NULL; n = dynamic_cast<NeuronWithInputConnections *>(neurons->Next())) {
			CString aux;

			if (l == numberSpaceLayers -1) {
				aux.Format(_TEXT("mk[%d]"), nn);
			} else {
				aux.Format(_TEXT("spaceHiddenLayer%doutputs[%d]"), l, nn);
			}

			f.WriteString("\t");
			f.WriteString(aux);
			f.WriteLine(" = *sw++;");

			int numberInputsFromLastLayer = n->inputs.Lenght() - 1; // - bias
			if (spaceInputLayerIsConnectedWithOutputLayer  && l == numberSpaceLayers -1) numberInputsFromLastLayer -= inputs;

			s.Format(_TEXT("\tfor(c = 0; c < %d; c++) "), numberInputsFromLastLayer);
			f.WriteString(s);
			f.WriteString(aux);
			f.WriteString(" += *sw++ * ");

			if (l == 1) {
				s = (hasMissingValues) ? "spaceI" : "i";
				s+= "nputs[c];";
			} else {
				s.Format(_TEXT("spaceHiddenLayer%doutputs[c];"), l-1);
			}
			f.WriteLine(s);

			if (spaceInputLayerIsConnectedWithOutputLayer  && l == numberSpaceLayers -1) {
				s.Format(_TEXT("\tfor(c = 0; c < %d; c++) "), inputs);
				f.WriteString(s);
				f.WriteLine(aux + L" += *sw++ * " + ((hasMissingValues) ? "spaceI" : "i") + "nputs[c];");
			}

			WriteActivationFunctionCCode(f, n, CT2CA(aux));

			nn++;
		}
	}

	// main network

	for (int l = 1; l < numberLayers; l++) {
		List<Neuron> * neurons = &(layers.Element(l)->neurons);

		int nn = 0;
		for(NeuronWithInputConnections * n = dynamic_cast<NeuronWithInputConnections *>(neurons->First()); n != NULL; n = dynamic_cast<NeuronWithInputConnections *>(neurons->Next())) {
			CString aux;

			if (l == numberLayers -1) {
				aux.Format(_TEXT("outputs[%d]"), nn);
			} else {
				aux.Format(_TEXT("hiddenLayer%doutputs[%d]"), l, nn);
			}

			f.WriteString("\t");
			f.WriteString(aux);
			f.WriteLine(" = *mw++;");
			
			int numberInputsFromLastLayer = n->inputs.Lenght() - 1; // - bias
			if (inputLayerIsConnectedWithOutputLayer && l == numberLayers -1) numberInputsFromLastLayer -= inputs;

			s.Format(_TEXT("\tfor(c = 0; c < %d; c++) "), numberInputsFromLastLayer);
			f.WriteString(s);
			f.WriteString(aux);
			f.WriteString(" += *mw++ * ");

			if (l == 1) {
				s = "inputs[c];";
			} else {
				s.Format(_TEXT("hiddenLayer%doutputs[c];"), l-1);
			}

			f.WriteLine(s);

			if (inputLayerIsConnectedWithOutputLayer && l == numberLayers -1) {
				s.Format(_TEXT("\tfor(c = 0; c < %d; c++) "), inputs);
				f.WriteString(s);
				f.WriteLine(aux + _TEXT(" += *mw++ * inputs[c];"));
			}

			WriteActivationFunctionCCode(f, n, CT2CA(aux));

			if (!spaceNetwork.IsNull() && nn < neuronsWithSelectiveActivation[l]) {
				f.WriteString("\t");
				f.WriteString(aux);
				f.WriteLine(" *= *m++;");
			}

			nn++;
		}
	}

	// Rescale the outputs
	if (trainVariables.Number() == inputs + outputs) {
		for (int o = 0; o < outputs; o++) {
			int outVar = o + inputs;

			double min = trainVariables.Minimum(outVar);
			double max = trainVariables.Maximum(outVar);
			double nmin = trainVariables.newMinimum[outVar];

			s.Format(_TEXT("\toutputs[%d] = %1.15f + (outputs[%d] - %f) * %1.15f;"), o, min, o, nmin, (max - min) / (1.0 - nmin));
			f.WriteLine(s);
		}
	}

	f.WriteLine("}");
}
/**
	Method   : void WriteActivationFunctionCCode(OutputFile & f, NeuronWithInputConnections * n, const char * outputNeuronVariable)
	Purpose  : Write C code for the activation function of a given neuron.
	Version  : 1.0.1
*/
void MultipleBackPropagation::WriteActivationFunctionCCode(OutputFile & f, NeuronWithInputConnections * n, const char * outputNeuronVariable) {
	CString s;

	ActivationFunction * a = (ActivationFunction *) (n->function);

	if (a->Alpha() == 1 && a->id == Linear) return;

	f.WriteString("\t");
	f.WriteString(outputNeuronVariable);

	switch (a->id) {
		case Sigmoid :
			f.WriteString(" = 1.0 / (1.0 + exp(");
			if (a->Alpha() == 1.0) {
				f.WriteString("-");
			} else {
				s.Format(_TEXT("%1.15f"), -a->Alpha());
				f.WriteString(s);
				f.WriteString(" * ");
			}
			f.WriteString(outputNeuronVariable);
			f.WriteLine("));");
			break;
		case Tanh :
			f.WriteString(" = tanh(");
			if (a->Alpha() != 1.0) {
				s.Format(_TEXT("%1.15f"), a->Alpha());
				f.WriteString(s);
				f.WriteString(" * ");
			}
			f.WriteString(outputNeuronVariable);
			f.WriteLine(");");
			break;
		case Gaussian :
			f.WriteString(" = exp(-(");
			f.WriteString(outputNeuronVariable);
			f.WriteString(" * ");
			f.WriteString(outputNeuronVariable);
			f.WriteString(")");
			if (a->Alpha() != 1.0) {
				f.WriteString(" / ");
				s.Format(_TEXT("%1.15f"), a->Alpha());
				f.WriteString(s);
			}
			f.WriteLine(");");
			break;
		default : // linear	
			if (a->Alpha() != 1.0) {
				s.Format(_TEXT(" *= %1.15f"), a->Alpha());
				f.WriteString(s);
				f.WriteLine(";");
			}
	}
}