Ejemplo n.º 1
0
	/**
	 * 
	 * ~~~~~ Back Propogation ~~~~~
	 * 
	 * Train the network by 'back propogation'
	 * see http://en.wikipedia.org/wiki/Backpropagation
	 */
	std::vector<double> _backPropogation ( std::vector<double> input, std::vector<double> expected, Network& net )
	{
		// get the result of feeding the input into the network
		std::vector<double> output = net.feedForward(input);

		ActFunction actf = net.activate();
		double rate = net.rate();

		// ~~~~~ loop backwards over the layers ~~~~~
		// 
		// as we descend though the layers, the difference between the output and the expected 
		// result is propogated through each layer; while the change in weights (deltas) is computed 
		// on for each layer.
		for(auto layer = net.rbegin(); layer != net.rend(); ++layer)
		{

			// input and output for each layer
			std::vector<double> layer_input = (*layer)->getInput();
			std::vector<double> layer_output = (*layer)->getOutput();

			// iterate over the neurons in a vector
			auto out = layer_output.begin();
			for (auto neuron = (*layer)->begin(); neuron != (*layer)->end(); ++neuron, ++out)
			{

				// the output layer is handled a bit differently, as it can be compared directly with the 
				// expected answer
				auto ex = expected.begin();
				auto in = layer_input.begin();
				for (auto weight = (*neuron).begin(); weight != (*neuron).end(); ++weight, ++in, ++ex )
				{
					// calculate the deltas of the weights
					double delta = rate * ((*ex) - (*in)) * actf.dydx((*out)) * (*in);
					(*weight) -= delta; 

				}
			}

			// propogate the expected value down the chain by 
			// recalculating the layer's output with the new weights
			expected = (*layer)->feedForward( layer_input );
		}

		return expected;
	}
Ejemplo n.º 2
0
bool xor_evaluate(Organism *org) {
  Network *net;
  double out[4]; //The four outputs
  double this_out; //The current output
  int count;
  double errorsum;

  bool success;  //Check for successful activation
  int numnodes;  /* Used to figure out how many nodes
		    should be visited during activation */

  int net_depth; //The max depth of the network to be activated
  int relax; //Activates until relaxation

  //The four possible input combinations to xor
  //The first number is for biasing
  double in[4][3]={{1.0,0.0,0.0},
		   {1.0,0.0,1.0},
		   {1.0,1.0,0.0},
		   {1.0,1.0,1.0}};
  
  net=org->net;
  numnodes=((org->gnome)->nodes).size();

  net_depth=net->max_depth();

  //TEST CODE: REMOVE
  //cout<<"ACTIVATING: "<<org->gnome<<endl;
  //cout<<"DEPTH: "<<net_depth<<endl;

  //Load and activate the network on each input
  for(count=0;count<=3;count++) {
    net->load_sensors(in[count]);

    //Relax net and get output
    success=net->activate();

    //use depth to ensure relaxation
    for (relax=0;relax<=net_depth;relax++) {
      success=net->activate();
      this_out=(*(net->outputs.begin()))->activation;
    }

    out[count]=(*(net->outputs.begin()))->activation;

    net->flush();

  }
  
  if (success) {
    errorsum=(fabs(out[0])+fabs(1.0-out[1])+fabs(1.0-out[2])+fabs(out[3]));
    org->fitness=pow((4.0-errorsum),2);
    org->error=errorsum;
  }
  else {
    //The network is flawed (shouldnt happen)
    errorsum=999.0;
    org->fitness=0.001;
  }

  #ifndef NO_SCREEN_OUT
  cout<<"Org "<<(org->gnome)->genome_id<<"                                     error: "<<errorsum<<"  ["<<out[0]<<" "<<out[1]<<" "<<out[2]<<" "<<out[3]<<"]"<<endl;
  cout<<"Org "<<(org->gnome)->genome_id<<"                                     fitness: "<<org->fitness<<endl;
  #endif

  //  if (errorsum<0.05) { 
  //if (errorsum<0.2) {
  if ((out[0]<0.5)&&(out[1]>=0.5)&&(out[2]>=0.5)&&(out[3]<0.5)) {
    org->winner=true;
    return true;
  }
  else {
    org->winner=false;
    return false;
  }

}