Esempio n. 1
0
void SupervisedAlgorithmTest::linearRegressionWithRegularFeatures()
{
  // simple sine curve estimation
  // training samples
  multimap<double, double> X;
  for (int i = 0; i < 100; i++)
  {
    double x = -M_PI_2 + 2 * M_PI * random->nextReal(); // @@>> input noise?
    double y = sin(2 * x); // @@>> output noise?
    X.insert(make_pair(x, y));
  }

  // train
  int nbInputs = 1;
  PVector<double> phi(nbInputs + 1);
  phi.setEntry(phi.dimension() - 1, 1.0);
  Adaline<double> adaline(phi.dimension(), 0.00001);
  IDBD<double> idbd(phi.dimension(), 0.001); // This value looks good
  Autostep<double> autostep(phi.dimension());
  int traininCounter = 0;
  ofstream outFileError("visualization/linearRegressionWithRegularFeaturesTrainError.data");
  while (++traininCounter < 100)
  {
    for (multimap<double, double>::const_iterator iter = X.begin(); iter != X.end(); ++iter)
    {
      phi.setEntry(0, iter->first);
      adaline.learn(&phi, iter->second);
      idbd.learn(&phi, iter->second);
      autostep.learn(&phi, iter->second);
    }

    // Calculate the error
    double mse[3] = { 0 };
    for (multimap<double, double>::const_iterator iterMse = X.begin(); iterMse != X.end();
        ++iterMse)
    {
      phi.setEntry(0, iterMse->first);
      mse[0] += pow(iterMse->second - adaline.predict(&phi), 2) / X.size();
      mse[1] += pow(iterMse->second - idbd.predict(&phi), 2) / X.size();
      mse[2] += pow(iterMse->second - autostep.predict(&phi), 2) / X.size();
    }
    if (outFileError.is_open())
      outFileError << mse[0] << " " << mse[1] << " " << mse[2] << endl;
  }
  outFileError.close();

  // output
  ofstream outFilePrediction("visualization/linearRegressionWithRegularFeaturesPrediction.data");
  for (multimap<double, double>::const_iterator iter = X.begin(); iter != X.end(); ++iter)
  {
    phi.setEntry(0, iter->first);
    if (outFilePrediction.is_open())
      outFilePrediction << iter->first << " " << iter->second << " " << adaline.predict(&phi) << " "
          << idbd.predict(&phi) << " " << autostep.predict(&phi) << endl;
  }
  outFilePrediction.close();
}
Esempio n. 2
0
void AdalineTest::testAdalineOnTracking()
{
  {
    random->reseed(0);
    NoisyInputSumEvaluation noisyInputSumEvaluation;
    Adaline<double> adaline(noisyInputSumEvaluation.nbInputs, 0.0);
    double error = noisyInputSumEvaluation.evaluateLearner(&adaline);
    std::cout << error << std::endl;
    Assert::assertObjectEquals(noisyInputSumEvaluation.nbNonZeroWeights, error, 0.2);
  }
  {
    random->reseed(0);
    NoisyInputSumEvaluation noisyInputSumEvaluation;
    Adaline<double> adaline(noisyInputSumEvaluation.nbInputs, 0.03);
    double error = noisyInputSumEvaluation.evaluateLearner(&adaline);
    std::cout << error << std::endl;
    Assert::assertObjectEquals(3.4, error, 0.1);
  }
}
Esempio n. 3
0
void AdalineTest::testAdaline()
{
  random->reseed(0);
  PVector<double> targetWeights(2);
  targetWeights[0] = 1.0;
  targetWeights[1] = 2.0;
  Adaline<double> adaline(2, 0.05);
  learnTarget(&targetWeights, &adaline);
  std::cout << adaline.weights()->getEntry(0) << " " << adaline.weights()->getEntry(1) << std::endl;
  Assert::assertObjectEquals(1.0, adaline.weights()->getEntry(0), 1e-2);
  Assert::assertObjectEquals(2.0, adaline.weights()->getEntry(1), 1e-2);
}
Esempio n. 4
0
int main(int argc, char *argv[]) {




	 printf("Les nouveaux poids : \n");
	adaline(poids,entree,desiree) ;





	return 0;
}
Esempio n. 5
0
void SupervisedAlgorithmTest::linearRegressionWithTileFeatures()
{
  // simple sine curve estimation
  // training samples
  multimap<double, double> X;
  for (int i = 0; i < 100; i++)
  {
    double x = -M_PI_2 + 2 * M_PI * random->nextReal(); // @@>> input noise?
    double y = sin(2 * x); // @@>> output noise?
    X.insert(make_pair(x, y));
  }

  // train
  int nbInputs = 1;
  double gridResolution = 8;
  int memorySize = 512;
  int nbTilings = 16;
  Random<double> random;
  UNH<double> hashing(&random, memorySize);
  Range<double> inputRange(-M_PI_2, M_PI_2);
  TileCoderHashing<double> coder(&hashing, nbInputs, gridResolution, nbTilings, true);
  PVector<double> x(nbInputs);
  Adaline<double> adaline(coder.dimension(), 0.1 / coder.vectorNorm());
  IDBD<double> idbd(coder.dimension(), 0.001); // This value looks good
  Autostep<double> autostep(coder.dimension());
  int traininCounter = 0;
  ofstream outFileError("visualization/linearRegressionWithTileFeaturesTrainError.dat");
  while (++traininCounter < 100)
  {
    for (multimap<double, double>::const_iterator iter = X.begin(); iter != X.end(); ++iter)
    {
      x[0] = inputRange.toUnit(iter->first); // normalized and unit generalized
      const Vector<double>* phi = coder.project(&x);
      adaline.learn(phi, iter->second);
      idbd.learn(phi, iter->second);
      autostep.learn(phi, iter->second);
    }

    // Calculate the error
    double mse[3] = { 0 };
    for (multimap<double, double>::const_iterator iterMse = X.begin(); iterMse != X.end();
        ++iterMse)
    {
      x[0] = inputRange.toUnit(iterMse->first);
      const Vector<double>* phi = coder.project(&x);
      mse[0] += pow(iterMse->second - adaline.predict(phi), 2) / X.size();
      mse[1] += pow(iterMse->second - idbd.predict(phi), 2) / X.size();
      mse[2] += pow(iterMse->second - autostep.predict(phi), 2) / X.size();
    }
    if (outFileError.is_open())
      outFileError << mse[0] << " " << mse[1] << " " << mse[2] << endl;
  }
  outFileError.close();

  // output
  ofstream outFilePrediction("visualization/linearRegressionWithTileFeaturesPrediction.dat");
  for (multimap<double, double>::const_iterator iter = X.begin(); iter != X.end(); ++iter)
  {
    x[0] = inputRange.toUnit(iter->first);
    const Vector<double>* phi = coder.project(&x);
    if (outFilePrediction.is_open())
      outFilePrediction << iter->first << " " << iter->second << " " << adaline.predict(phi) << " "
          << idbd.predict(phi) << " " << autostep.predict(phi) << endl;
  }
  outFilePrediction.close();
}
int main(int argc, char **argv)
{
  LR_PARAM   input_params;
  long int   i,j, m, d;
  double     *w, *Y, **X, precision,  *h, Erreur, Precision, Rappel, F, PosPred, PosEffect, PosEffPred;
  char input_filename[200], params_filename[200];

    srand(time(NULL));
  // Reading the parameters line
  lire_commande(&input_params,input_filename, params_filename,argc, argv);
  // Scan of the training file 
  // definition in utilitaire.c
  FileScan(input_filename,&m,&d);
  printf("Training set containing %ld examples in dimension %ld\n",m,d);

  Y  = (double *)  malloc((m+1)*sizeof(double ));
  X  = (double **) malloc((m+1)*sizeof(double *));
  if(!X){
    printf("Memory allocation problem (1)\n");
    exit(0);
  }
  X[1]=(double *)malloc((size_t)((m*d+1)*sizeof(double)));
  if(!X[1]){
    printf("Memory allocation problem (2)\n");
    exit(0);
  }
  for(i=2; i<=m; i++)
    X[i]=X[i-1]+d;

  w  = (double *) malloc((d+1) * sizeof(double ));

  // Loading of the data matrix procedure defined in utilitaire.c
  ChrgMatrix(input_filename, m, d, X, Y);

  // Adaline algorithm
  adaline(X, Y, w, m, d, input_params.eta, input_params.T);
  h = (double *)  malloc((m+1)*sizeof(double ));
  for(i=1; i<=m; i++)
    /*@$\rhd h_t(\mathbf{x}_i)\leftarrow w^{(t)}_0+\left\langle \boldsymbol w^{(t)},\mathbf{x}_i\right\rangle$@*/
      for(h[i]=w[0], j=1; j<=d; j++)
          h[i]+=(w[j]*X[i][j]);
    
    for(i = 1, Erreur = 0.0; i <= m; ++i) {
	    Erreur += (Y[i] - h[i]) * (Y[i] - h[i]);
    }
    Erreur /= (double)m;
    printf("Erreur=%lf\n", Erreur);
    
  /*
  for(i=1,PosPred=PosEffect=PosEffPred=Erreur=0.0; i<=m; i++){
     if(Y[i]*h[i]<=0.0)
         Erreur+=1.0;
     if(Y[i]==1.0){
         PosEffect++;
         if(h[i]>0.0)
            PosEffPred++;
     }
     if(h[i]>0.0)
         PosPred++;
  }
    
  Erreur/=(double)m;
  Precision=PosEffPred/PosPred;
  Rappel=PosEffPred/PosEffect;
  F=2.0*Precision*Rappel/(Precision+Rappel);
    
  printf("Precision:%lf Recall:%lf F1-measure:%lf Error=%lf\n",Precision,Rappel,F,Erreur);
    
  */
  free((char *)h);

  save_params(params_filename, w,d);

  return 1;
}