Ejemplo n.º 1
0
Predictor getTestPredictorLibSvm(){
  
  //Load sample structure
  SampleStructure sampleStructure = getTestSampleStructure();
  
  //Load training data
  Array<Sample> trainingData = getTrainingSampleData();
  
  //Make creator lambdas
  MlCreators creators;
  
   creators.rc = new RegressorCreator[1];
   creators.rc[0] = [](SampleStructure* st, Array<Sample> training, unsigned index){
    
    svm_parameter svm_param;
    
    svm_param.svm_type = EPSILON_SVR; // default for frac.libsvr is EPSILON_SVR, not C_SVC
    svm_param.kernel_type = LINEAR; // noto changed default from RBF
    svm_param.degree = 3;
    svm_param.gamma = 0;	// 1/num_features
    svm_param.coef0 = 0;
    svm_param.nu = 0.5;
    svm_param.cache_size = 100;
    svm_param.C = 1;
    svm_param.eps = 1e-3;
    svm_param.p = 0.0;	// noto changed default from 0.1
    svm_param.shrinking = 1;
    svm_param.probability = 0;
    svm_param.nr_weight = 0;
    svm_param.weight_label = NULL;
    svm_param.weight = NULL;
    svm_param.timeout = 86400; // noto

    Regressor* r = new SvmRegressor(*st, index, &svm_param);
    r->train(training);
    return r;
  };
  
  creators.cc = new ClassifierCreator[1];
  creators.cc[0] = [](SampleStructure* st, Array<Sample> training, unsigned index){
    Classifier* c = new WafflesDecisionTreeClassifier(*st, index, true);
    c->train(training);
    
    return c;
  };
  
  creators.bcc = new BinaryClassifierCreator[1];
  creators.bcc[0] = [](SampleStructure* st, Array<Sample> training, unsigned index){
    BinaryClassifier* r = new ConstantBinaryClassifier(false);
    r->train(training);
    return r;
  };
  
  //TODO build + use takeBest.
  
  Predictor p = Predictor(sampleStructure, trainingData, creators);
  
  return p;
}
Ejemplo n.º 2
0
int Process(ParametersType *Parameters, MatrixDataType *MatrixData, FemStructsType *FemStructs, 
	    FemFunctionsType *FemFunctions, FemOtherFunctionsType *FemOtherFunctions)
{
	int (*Predictor)(ParametersType *, MatrixDataType *, FemStructsType *, FemFunctionsType *, FemOtherFunctionsType *);
	
	setProblem(Parameters, FemFunctions);
	setMatrixVectorProductType(Parameters, FemFunctions);
	setSolver(Parameters,FemOtherFunctions);
	setPreconditioner(Parameters, FemFunctions);
	setScaling(Parameters, FemFunctions);
	setStabilizationForm(Parameters, FemFunctions, FemOtherFunctions, &Predictor);
	setDimensionlessness(Parameters, FemFunctions);
	set_BC_no_penetrability(Parameters, FemFunctions);
	setStopCriteria(Parameters, FemFunctions);
	Predictor(Parameters, MatrixData, FemStructs, FemFunctions, FemOtherFunctions);
	
	return 0;
}
Ejemplo n.º 3
0
Predictor getTestPredictorWaffles(){
  
  //Load sample structure
  SampleStructure sampleStructure = getTestSampleStructure();
  
  //Load training data
  Array<Sample> trainingData = getTrainingSampleData();
  
  //Make creator lambdas
  MlCreators creators;
  
  //Lambdas and side effects are best friends!
  creators.rc = new RegressorCreator[1];
  creators.rc[0] = [](SampleStructure* st, Array<Sample> training, unsigned index){
    //Regressor* r = new ConstantRegressor(0);
    Regressor* r = new WafflesDecisionTreeRegressor(*st, index);
    r->train(training);
    return r;
  };
  creators.cc = new ClassifierCreator[1];
  creators.cc[0] = [](SampleStructure* st, Array<Sample> training, unsigned index){
    Classifier* c = new WafflesDecisionTreeClassifier(*st, index, true); //TODO Why does this random give identical results to the default?
    c->train(training);
    
    return c;
  };
  creators.bcc = new BinaryClassifierCreator[1];
  creators.bcc[0] = [](SampleStructure* st, Array<Sample> training, unsigned index){
    BinaryClassifier* r = new ConstantBinaryClassifier(false);
    r->train(training);
    return r;
  };
  
  //TODO build + use takeBest.
  
  //std::cout << "Making predictor." << std::endl;
  Predictor p = Predictor(sampleStructure, trainingData, creators);
  
  //std::cout << "Made predictor." << std::endl;
  
  return p;
}
Ejemplo n.º 4
0
void ADERTimeStep(ADERDG* adg)
{
  double dt = adg->dt_small;

  // first, predict the values of w at an intermediate time step
  for(int ie = 1;ie <= _NBELEMS_IN; ie++) {
    Predictor(adg, ie, dt / 2);
  }

  // init the derivative to zero
  for(int ie = 1; ie<= _NBELEMS_IN; ie++) {
    for(int i = 0; i < _NGLOPS; i++) {
      for(int iv = 0; iv < _M; iv++) {
	adg->dtw[ie][i][iv]=0;
      }
    }
  }

  // impose the exact values on boundary left and right cells
  double x = adg->face[0];
  double t = adg->tnow + dt / 2 ;
  ExactSol(x, t, adg->wpred[0][_D]); // _D = last point of left cell 

  x = adg->face[_NBFACES-1];

  // 0 = first point of right cell
  ExactSol(x, t, adg->wpred[_NBELEMS_IN + 1][0]); 

  // compute the face flux terms
  // loop on the faces
  for(int i = 0; i < _NBFACES; i++){
    double *wL, *wR;
    double flux[_M];
    int ie = i;
    wL = adg->wpred[ie][_D]; // _D = last point of left cell 
    wR = adg->wpred[ie+1][0];  // 0 = first point of right cell
    NumFlux(wL, wR, flux);
    for (int k = 0; k < _M; k++){
      adg->dtw[ie][_D][k] -=  flux[k];  
      adg->dtw[ie+1][0][k] +=  flux[k];  
    }
  }

  // compute the volume terms
  for(int ie = 1; ie<= _NBELEMS_IN; ie++){
    double h = adg->face[ie] - adg->face[ie-1];
    // loop on the glops i 
    for(int i = 0; i < _NGLOPS; i++){
      
      // integration weight
      double omega = wglop(_D, i) * h;
      
      // flux at glop i
      double flux[_M];
      NumFlux(adg->wpred[ie][i], adg->wpred[ie][i], flux);
      
      // loop on the basis functions j
      for(int j = 0; j < _D+1; j++){
	// derivative of basis function j at glop i
	double dd = dlag(_D, j, i) / h;
	for (int k = 0; k < _M; k++){
	  adg->dtw[ie][j][k] += omega * dd * flux[k];
	}
      }
    }
  }    
   
  // divide by the mass matrix
  for(int ie = 1; ie<= _NBELEMS_IN; ie++){
    double h = adg->face[ie] - adg->face[ie-1];
    for(int i = 0; i < _NGLOPS; i++){
      double omega = wglop(_D, i) * h;
      for (int k = 0; k < _M; k++){
	adg->dtw[ie][i][k] /= omega;
      }
    }
    
  }
  
  // update wnext and 
  // copy wnext into wnow for the next time step
  for(int ie = 1; ie<= _NBELEMS_IN; ie++){
    for(int i = 0; i < _NGLOPS; i++){
      for(int iv = 0; iv < _M; iv++){
	adg->wnext[ie][i][iv] =
	  adg->wnow[ie][i][iv] + dt * adg->dtw[ie][i][iv];
	adg->wnow[ie][i][iv] = adg->wnext[ie][i][iv];
      }
    }
  }
 
}