Example #1
0
void Backpropagation::trainOnlineCV(Mlp& network, 
	MATRIX& trainingInputs, 
	VECTOR& trainingTargets,
	MATRIX& testInputs,
	VECTOR& testTargets)
{
	VECTOR trainingOutputs(trainingTargets.size(), 0.0);
	VECTOR testOutputs(testTargets.size(), 0.0);
	
	while(error > tolerance && testCount < maxTestCount)
	{
		VECTOR::iterator output = trainingOutputs.begin();
		VECTOR::iterator target = trainingTargets.begin();
		for(MATRIX::iterator input = trainingInputs.begin(); 
			input != trainingInputs.end(); 
			++input, ++target, ++output)
		{
			*output = network(*input);
			double err = *output - *target;
			
			getWeightUpdates(network, *input, err);
			
			applyWeightUpdates(network);
			
			++iteration;
			
			if(iteration >= maxIterations)
				break;
		}
		
		++epoch;
		
		error = mse(trainingTargets, trainingOutputs);
		
		// Early-stopping using test (cross-validation) error
		testOutputs = network(testInputs);
		testError = mse(testTargets, testOutputs);
		if(testError < minTestError)
		{
			// Preserve test error and network weights
			minTestError = testError;
			W = network.W;			
			V = network.V;
			biasW = network.biasW;
			biasV = network.biasV;
			testCount = 0;
		}
		else
		{
			++testCount;
		}
	}
	
	network.W = W;
	network.V = V;
	network.biasW = biasW;
	network.biasV = biasV;
	testError = minTestError;
}
int main() {

    ImportarDados();
    InicializarVariaveisGradiente();
    //NormalizarDados();
    printf("\nCusto inicial igual a : %f", mse());
    GradienteDescendente();
    printf("\nCusto final igual a : %f", mse());
    printf("\nTheta zero: %f e theta 1: %f",theta[0],theta[1]);

    return 0;
}
Example #3
0
void YuvAnalyse::psnr(YuvFrame& a, YuvFrame& b, float* psnrY, float* psnrU, float* psnrV){

	float mseY, mseU, mseV;

	mse(a, b, &mseY, &mseU, &mseV);

	*psnrY = 10.0 * log10f((255.0*255.0)/mseY);
	*psnrU = 10.0 * log10f((255.0*255.0)/mseU);
	*psnrV = 10.0 * log10f((255.0*255.0)/mseV);
}
Example #4
0
//detecting mouse button
void dtcbttn ()
{
  /* function 1 returns 0 until a key is pressed */
  //while (_bios_keybrd(_KEYBRD_READY) == 0)  //while (!kbhit () )
    {
     int x,y;
     static int mov_flag=0;
     float x1,y1;
     static int xx,yy;
     static buffer[2];

     in.x.ax = 3;
     int86 (0X33,&in,&out);
       //clrscr();
       //gotoxy(40,30);
	 x = out.x.cx;
	 y = out.x.dx;
	 //printf ("\nLeft || X - %d  Y - %d", x, y);
	 x1=x/639.0*80;
	 y1=y/399.0*50;
	 if (x1>(int)x1) x=(int)x1+1;
	 else x=x1;
	 if (y1>(int)y1) y=(int)y1+1;
	 else y=y1;

	/*if (mov_flag==1 && ((xx!=x) ||(yy!=y)))
	{puttext(xx,yy,xx,yy,buffer); printf("%d",buffer[0]);}
	mov_flag=1;
	 xx=x;
	 yy=y;
	 gettext(xx,yy,xx,yy,buffer);
	*/
     if (out.x.bx == 1)
       {
	 //gotoxy(2,2);
	 //cprintf ("\nLeft || X - %f  Y - %f",x1,y1 );
	 //cprintf ("\nLeft || X - %d  Y - %d", x, y);
	 //gotoxy(x,y);
	 //cprintf("||");
	 mse_flag=1;
	 mse(x,y);
       }
       else if (out.x.bx == 2)
       {
       //printf ("\nRight");
       delay (100);}
       else if (out.x.bx == 3)
       {//printf ("\nMiddle");
       delay (100);}
       else
       mse_flag=0;
	  // Otherwise due to quick computer response 100s of words will get print
    }
       delay(100);
}
Example #5
0
Void snr (const CVideoObjectPlane& vop1, const CVideoObjectPlane& vop2, Double dsnr [3])
{
	Double msError [3];

	mse (vop1, vop2, msError);
	for (UInt i = 0; i < 3; i++) {
		if (msError [i] == 0.0)
			dsnr [i] = 1000.0;
		else 
			dsnr [i] = (log10 (255 * 255 / msError [i]) * 10.0);
	}
}
Example #6
0
// Try to train the net to learn the given function. It returns the number
// of adjustments needed to learn the function, or FAILURE if it was never
// learned. The variable "stats" tells whether or not to print information
// while training.
int PDP::trial(Function* target, int stat, double rate)
{
   if(target->get_num_inputs() != num_inputs) {
      cerr << "number of inputs for network and function do not match" << endl;
      exit(1);
   }

   int freq = 10;    // How often do we want results printed?

   double weights[MAX_WEIGHTS];
   for(int x = 0; x < num_weights; x++)
      weights[x] = (rand() % 10000) / 5000.0;  // From 0 to 2

   for(int n = 0; n < MAX_ADJUSTMENTS && !success(target, weights); n++) {
      if(!stat && (n % freq == 0))
         cout << "Adjustments: " << n
              << " Error: " << mse(target, weights) << endl;

      gradient(target, weights, rate);
   }

   if(!stat) {
      if(n < MAX_ADJUSTMENTS) {
         cout << "Weights:";
         for(x = 0; x < num_weights; x++)
            cout << " " << weights[x];
         cout << endl;
      } else
         cout << "Function not learned in " << MAX_ADJUSTMENTS
              << " adjustments" << endl;
   }

   if(n < MAX_ADJUSTMENTS)
      return n;
   else
      return FAILURE;
}
Example #7
0
inline double
psnr(unsigned char *p1, unsigned char *p2, size_t length, size_t maxval)
{	
	return 10*log10(maxval*maxval/mse(p1,p2,length));
}
Example #8
0
inline double
rmse(unsigned char *p1, unsigned char *p2, size_t length)
{
	return sqrt(mse(p1,p2,length));
}