示例#1
0
文件: SplitImpl.hpp 项目: hfp/libxsmm
 virtual void backPropagate(vector<TensorBuf*>& deloutp, TensorBuf* delinp)
 {
   switch(engine)
   {
     case XSMM:
       backPropagate(deloutp, delinp, 0);
       break;
   }
 }
float
SentimentTraining::computeDerivatives(int startIndex, int endIndex) {
    CudaInterface::fillParamMem(mModel.derivatives_d, 0);

    for (int i = startIndex; i < endIndex; i++) {
        forwardPropagate(mTrainTrees[i]);
    }

    for (int i = startIndex; i < endIndex; i++) {
        backPropagate(mTrainTrees[i]);
    }

    return 1.0;
}
static void backPropagate_test( void ) {
    float hiddenWeights[2][3] = {{-1, 1, 0}, {1, -1, 0}};
    float outputWeights[2][3] = {{0.5, 0.25, 0}, {0.25, -0.5, 0}};
    Node inputNodes[2];
    Node hiddenNodes[2] = {{hiddenWeights[0]}, {hiddenWeights[1]}};
    Node outputNode[2] = {{outputWeights[0]}, {outputWeights[1]}};
    Node desired[2] = {{NULL, -0.5}, {NULL, -0.5}};
    Layer inputLayer = {inputNodes, 2};
    Layer hiddenLayer = {hiddenNodes, 2};
    Layer outputLayer = {outputNode, 2};
    Layer desiredLayer = {desired, 2};

    inputLayer.nodes[0].output = 0.5; inputLayer.nodes[1].output = -0.5;
    forwardPropagate( &inputLayer, &hiddenLayer, &outputLayer );
    backPropagate( &hiddenLayer, &outputLayer, &desiredLayer );

    assert( fabs( -0.3118693 - outputLayer.nodes[0].error ) < 0.001 && "First output node error is not as expected" );
    assert( fabs( 0.0162368 - outputLayer.nodes[1].error ) < 0.001 && "Second output node error is not as expected" );
    assert( fabs( -0.1518755 - hiddenLayer.nodes[0].error ) < 0.001 && "First node of hidden layer's output is not as expected" );
    assert( fabs( -0.0860857 - hiddenLayer.nodes[1].error ) < 0.001 && "Second node of hidden layer's output is not as expected" );
}
示例#4
0
void test_and_function()
{
    int neurals[3];
	long step=0, epochs, iSample=0;
	BPNETWORK *nw = 0;
	double input[4][2] = {{0,0},{0,1},{1,0},{1,1}};
	double target[4][1] = {{0},{0},{0},{1}};
	double e;
	double *biases;
	double threshold = 0.00001;

    nw = (BPNETWORK *)malloc(sizeof(BPNETWORK));
	epochs = 200000;
	nw->learningRate = 0.15;
	nw->alpha = 0.1;

    biases = (double*)malloc(3 * sizeof(double));
	biases[0] = 0.89;
	biases[1] = -0.69;
	biases[2] = 0.55;

	printf("-------------Network configuration-------------\n");
	printf("Epoch: %ld\n", epochs);
	printf("LearningRate: %lf\n", nw->learningRate);
	printf("Alpha(momentum): %lf \n", nw->alpha);
	printf("Threshold: %lf \n", threshold);
	printf("-----------------------------------------------\n");

	if(nw != 0)
	{
		neurals[0] = 3;
		neurals[1] = 3;
		neurals[2] = 1;
		initNetwork(biases, 3, neurals, nw);

		step = 0;
		do{
			inputData(input[iSample], nw);
            e = backPropagate(target[iSample], nw );
			step = step + 1;
			iSample = step % 4;

		}while( (step<epochs) && e>threshold );
		//printf("Mean square error: %lf (minimum error: %lf) (threshold: %lf) after %ld epoch\n", e, minE, threshold, count);
		printf("Training completed with error/threshold %lf/%lf after %ld epochs \n\n", e, threshold, step);

		//Test the network
		printf("This network demonstrate for AND gate: \n");
		inputData(input[0], nw);
		feedForward(nw);
		printf("Output(0,0): %lf\n", nw->layer[nw->nLayer-1].p[0].x);

		inputData(input[1], nw);
		feedForward(nw);
		printf("Output(0,1): %lf\n", nw->layer[nw->nLayer-1].p[0].x);

		inputData(input[2], nw);
		feedForward(nw);
		printf("Output(1,0): %lf\n", nw->layer[nw->nLayer-1].p[0].x);

		inputData(input[3], nw);
		feedForward(nw);
		printf("Output(1,1): %lf\n", nw->layer[nw->nLayer-1].p[0].x);

		release(nw);
		free(nw);
		free(biases);
	}
}
示例#5
0
文件: backprop.c 项目: Alerion/ai
int main()
{
  double err;
  int i, j, sample=0, iterations=0;
  int sum = 0;

  out = fopen("stats.txt", "w");

  /* Seed the random number generator */
  srand( time(NULL) );

  assignRandomWeights();

  while (1) {

    if (++sample == MAX_SAMPLES) sample = 0;
    
    inputs[0] = samples[sample].health;
    inputs[1] = samples[sample].knife;
    inputs[2] = samples[sample].gun;
    inputs[3] = samples[sample].enemy;

    target[0] = samples[sample].out[0];
    target[1] = samples[sample].out[1];
    target[2] = samples[sample].out[2];
    target[3] = samples[sample].out[3];

    feedForward(0);

    /* need to iterate through all ... */
    
    err = 0.0;
    for (i = 0 ; i < OUTPUT_NEURONS ; i++) {
      err += sqr( (samples[sample].out[i] - actual[i]) );
    }
    err = 0.5 * err;

    fprintf(out, "%g\n", err);
    printf("mse = %g\n", err);

    if (iterations++ > 100000) break;

    backPropagate();

  }
  printf("wih\n");
  for (j = 0; j < HIDDEN_NEURONS; j++){
    for (i = 0; i < INPUT_NEURONS+1; i++){
       printf("%lf, ", wih[i][j]);
    }
    printf("\n");
  }
  printf("who\n");
  for (j = 0; j < OUTPUT_NEURONS; j++){
    for (i = 0; i < HIDDEN_NEURONS+1; i++){
       printf("%lf, ", who[i][j]);
    }
    printf("\n");
  }

  /* Test the network */
  for (i = 0 ; i < MAX_SAMPLES ; i++) {

    inputs[0] = samples[i].health;
    inputs[1] = samples[i].knife;
    inputs[2] = samples[i].gun;
    inputs[3] = samples[i].enemy;

    target[0] = samples[i].out[0];
    target[1] = samples[i].out[1];
    target[2] = samples[i].out[2];
    target[3] = samples[i].out[3];

    feedForward(0);

    if (action(actual) != action(target)) {

      printf("%2.1g:%2.1g:%2.1g:%2.1g %s (%s)\n", 
           inputs[0], inputs[1], inputs[2], inputs[3], 
           strings[action(actual)], strings[action(target)]);

    } else {
      sum++;
    }

  }

  printf("Network is %g%% correct\n", 
          ((float)sum / (float)MAX_SAMPLES) * 100.0);

  /* Run some tests */

  /*  Health            Knife            Gun              Enemy */
  inputs[0] = 2.0; inputs[1] = 1.0; inputs[2] = 1.0; inputs[3] = 1.0;
  feedForward(0);
  printf("2111 Action %s\n", strings[action(actual)]);

  inputs[0] = 1.0; inputs[1] = 1.0; inputs[2] = 1.0; inputs[3] = 2.0;
  feedForward(0);
  printf("1112 Action %s\n", strings[action(actual)]);

  inputs[0] = 0.0; inputs[1] = 0.0; inputs[2] = 0.0; inputs[3] = 0.0;
  feedForward(0);
  printf("0000 Action %s\n", strings[action(actual)]);

  inputs[0] = 0.0; inputs[1] = 1.0; inputs[2] = 1.0; inputs[3] = 1.0;
  feedForward(0);
  printf("0111 Action %s\n", strings[action(actual)]);

  inputs[0] = 2.0; inputs[1] = 0.0; inputs[2] = 1.0; inputs[3] = 3.0;
  feedForward(0);
  printf("2013 Action %s\n", strings[action(actual)]);

  inputs[0] = 2.0; inputs[1] = 1.0; inputs[2] = 0.0; inputs[3] = 3.0;
  feedForward(0);
  printf("2103 Action %s\n", strings[action(actual)]);

  inputs[0] = 0.0; inputs[1] = 1.0; inputs[2] = 0.0; inputs[3] = 3.0;
  feedForward(0);
  printf("0103 Action %s\n", strings[action(actual)]);

  fclose(out);

  return 0;
}