Ejemplo n.º 1
0
void DataSetVOC::importPaulData(CStr &_inDir, CStr &vocDir)
{
	string inDir, outImgDir = vocDir + "JPEGImages/", annoDir = vocDir + "Annotations/";
	CmFile::MkDir(outImgDir);
	CmFile::MkDir(annoDir);
	CmFile::MkDir(vocDir + "ImageSets/Main/");
	vecS namesNE;
	FILE *f = fopen(_S(_inDir + "Possitive.txt"), "r");
	CV_Assert(f != NULL);
	char fName[1000];
	int l, x, y, w, h;
	while (fscanf(f, "%s %d %d %d %d %d\n", fName, &l, &x, &y, &w, &h) == 6){
		string nameNE = CmFile::GetNameNE(fName);
		namesNE.push_back(nameNE);
		Mat img = imread(_inDir + fName), imgN;
		double ratio = 500.0 / max(img.cols, img.rows);
		resize(img, imgN, Size(), ratio, ratio);
		imwrite(outImgDir + nameNE + ".jpg", imgN);
		
		FileStorage fs(annoDir + nameNE + ".yml", FileStorage::WRITE);
		fs<<"annotation"<<"{"<<"object"<<"{"<<"bndbox"<<"{";
		fs<<"xmin"<<format("'%d'", 1 + cvRound(x*ratio))<<"ymin"<<format("'%d'", 1 + cvRound(y*ratio));
		fs<<"xmax"<<format("'%d'", min(imgN.cols, cvRound((x+w)*ratio)))<<"ymax"<<format("'%d'", min(imgN.rows, cvRound((y+h)*ratio)));
		fs<<"}"<<"name"<<"Salient"<<"}"<<"}";
	}
	fclose(f);

	int imgNum = namesNE.size();
	random_shuffle(namesNE.begin(), namesNE.end());
	vecS trainSet(namesNE.begin(), namesNE.begin() + imgNum/2);
	vecS testSet(namesNE.begin() + imgNum/2, namesNE.end());
	CmFile::writeStrList(vocDir + "ImageSets/Main/TrainVal.txt", trainSet);
	CmFile::writeStrList(vocDir + "ImageSets/Main/Test.txt", testSet);
	
}
Ejemplo n.º 2
0
bool DataSetVOC::importSaliencyBench(CStr &salDir, CStr &vocDir)
{
	string inDir, outImgDir = vocDir + "JPEGImages/", annoDir = vocDir + "Annotations/";
	CmFile::MkDir(outImgDir);
	CmFile::MkDir(annoDir);
	CmFile::MkDir(vocDir + "ImageSets/Main/");
	vecS namesNE;
	int imgNum = CmFile::GetNamesNE(salDir + "Src/*.jpg", namesNE, inDir);
	random_shuffle(namesNE.begin(), namesNE.end());
	for (int i = 0; i < imgNum; i++){
		CmFile::Copy(inDir + namesNE[i] + ".jpg", outImgDir + namesNE[i] + ".jpg");
		Mat mask1u = CmFile::LoadMask(inDir + namesNE[i] + ".png");
		Vec4i bb = getMaskRange(mask1u);
		FileStorage fs(annoDir + namesNE[i] + ".yml", FileStorage::WRITE);
		fs<<"annotation"<<"{"<<"object"<<"{"<<"bndbox"<<"{";
		fs<<"xmin"<<format("'%d'", bb[0])<<"ymin"<<format("'%d'", bb[1])<<"xmax"<<format("'%d'", bb[2])<<"ymax"<<format("'%d'", bb[3]);
		fs<<"}"<<"name"<<"Salient"<<"}"<<"}";
	}

	vecS trainSet(namesNE.begin(), namesNE.begin() + imgNum/2);
	vecS testSet(namesNE.begin() + imgNum/2, namesNE.end());
	CmFile::writeStrList(vocDir + "ImageSets/Main/TrainVal.txt", trainSet);
	CmFile::writeStrList(vocDir + "ImageSets/Main/Test.txt", testSet);
	vecS classNames;
	classNames.push_back("Salient");
	CmFile::writeStrList(vocDir + "ImageSets/Main/Class.txt", classNames);
	
	return true;	
}
Ejemplo n.º 3
0
int main(int argc, char* argv[]) {
    if (argc < 4) {
        cout << "usage: ./bayes train-set-file test-set-file mode:n|t [size-of-train-set] [debug-output:f|t]" << endl;
    } else {
        string trainSetFile = argv[1];
        string testSetFile = argv[2];
        bool treeAugmented = argv[3][0] == 't' ? true : false;
        int sizeOfTrainSet = argc >= 5 ? atoi(argv[4]) : 0;
        bool debugOutput = argc >= 6 ? (argv[5][0] == 't' ? true : false) : false;
        
        shared_ptr<Dataset> dataset(Dataset::loadDataset(trainSetFile, testSetFile));
        const DatasetMetadata* metadata = dataset->getMetadata();
        
        vector<Instance*> trainSet(dataset->getTrainSet().begin(), dataset->getTrainSet().end());
        if (sizeOfTrainSet > 0 && sizeOfTrainSet < trainSet.size()) {
            unsigned int seed = (unsigned int)chrono::system_clock::now().time_since_epoch().count();
            shuffle (trainSet.begin(), trainSet.end(), default_random_engine(seed));
            trainSet.resize(sizeOfTrainSet);
        }
        
        BayesNet bayesNet(metadata, trainSet, treeAugmented);
        
        if (debugOutput) {
            cout << bayesNet.getMutualInfoTable() << endl;
            cout << bayesNet.getMaximalSpanningTree() << endl;
            cout << bayesNet.getProbabilityTables() << endl;
        }
        
        cout << bayesNet.getBayesNet() << endl;
        
        const vector<Instance*>& testSet = dataset->getTestSet();
        int correctCount = 0;
        cout << "<Predictions for Test-set Instances>" << endl;
        cout << "Predicted" << DELIMITER << "Actual" << DELIMITER << "Probability" << endl;
        cout.setf(ios::fixed, ios::floatfield);
        cout.precision(PRECISION);
        for (int i = 0; i < testSet.size(); ++i) {
            Instance* inst = testSet[i];
            double prob = 0.0;
            string predicted = bayesNet.predict(inst, &prob);
            string actual = inst->toString(metadata, true);
            
            if (predicted == actual)
                correctCount++;
            
            cout << predicted << DELIMITER << actual << DELIMITER << prob << endl;
        }
        cout << correctCount << " out of " << testSet.size() << " test instances were correctly classified" << endl;
    }
}
Ejemplo n.º 4
0
float NN_File::train(TiXmlElement * pRoot)
{
    TiXmlElement * pSet = pRoot->FirstChildElement("set");

    float error = 0.0f;
    int i;
    for(i=0; pSet != NULL; i++)
    {
        float e = trainSet(pSet);
        error += e;
        //std::cout << "\t\t E : " << e << std::endl;
        pSet = pSet->NextSiblingElement("set");
    }
    return error/i;
}
Ejemplo n.º 5
0
int main() {
  std::string baseName = "weights/plankton";

  OpenCVLabeledDataSet trainSet("Data/kagglePlankton/classList",
                                "Data/kagglePlankton/train", "*.jpg",
                                TRAINBATCH, 255, true, 0);
  trainSet.summary();
  std::cout << "\n ** Use the private test set as as extra source of "
               "training data ! ** \n\n";
  OpenCVLabeledDataSet cheekyExtraTrainSet("Data/kagglePlankton/classList",
                                           "Data/kagglePlankton/testPrivate",
                                           "*.jpg", TRAINBATCH, 255, true, 0);
  cheekyExtraTrainSet.summary();
  std::cout << "\n ** Use the public test set for validation ** \n\n";
  OpenCVLabeledDataSet valSet("Data/kagglePlankton/classList",
                              "Data/kagglePlankton/testPublic", "*.jpg",
                              TESTBATCH, 255, true, 0);
  valSet.summary();

  FractionalSparseConvNet cnn(trainSet.nFeatures, trainSet.nClasses,
                              cudaDevice);

  if (epoch > 0) {
    cnn.loadWeights(baseName, epoch);
    cnn.processDatasetRepeatTest(valSet, batchSize / 2, 12);
  }
  for (epoch++;; epoch++) {
    std::cout << "epoch: " << epoch << std::endl;
    float lr = 0.003 * exp(-0.1 * epoch);
    for (int i = 0; i < 10; ++i) {
      cnn.processDataset(trainSet, batchSize, lr, 0.999);
      cnn.processDataset(cheekyExtraTrainSet, batchSize, lr, 0.999);
    }
    cnn.saveWeights(baseName, epoch);
    cnn.processDatasetRepeatTest(valSet, batchSize, 6);
  }

  // For unlabelled data
  // OpenCVUnlabeledDataSet
  // testSet("Data/kagglePlankton/classList"," ... ","*.jpg",255,true,0);
  // testSet.summary();
  // cnn.processDatasetRepeatTest(testSet, batchSize/2,
  // 24,"plankton.predictions",testSet.header);
}
Ejemplo n.º 6
0
int main(int argc, const char * argv[]) {
    if (argc < 4) {
        cout << "usage: ./dt-learn train-set-file test-set-file m [percentage-of-train-set]" << endl;
    } else {
        string trainSetFile = argv[1];
        string testSetFile = argv[2];
        int stopThreshold = atoi(argv[3]);
        int percentageOfTrainSet = argc == 5 ? atoi(argv[4]) : 100;
        
        shared_ptr<Dataset> dataset(Dataset::loadDataset(trainSetFile, testSetFile));
        const DatasetMetadata* metadata = dataset->getMetadata();
    
        vector<Instance*> trainSet(dataset->getTrainSet().begin(), dataset->getTrainSet().end());
        if (percentageOfTrainSet < 100) {
            unsigned int seed = (unsigned int)chrono::system_clock::now().time_since_epoch().count();
            shuffle (trainSet.begin(), trainSet.end(), default_random_engine(seed));
            int newSize = (int)trainSet.size() * percentageOfTrainSet / 100;
            trainSet.resize(newSize);
        }
    
        DecisionTree tree(metadata, trainSet, stopThreshold);
        cout << tree.toString();
    
        const vector<Instance*>& testSet = dataset->getTestSet();
        int correctCount = 0;
        cout << "<Predictions for the Test Set Instances>" << endl;
        for (int i = 0; i < testSet.size(); ++i) {
            Instance* inst = testSet[i];
            string predicted = tree.predict(inst);
            string actual = inst->toString(metadata, true);
            if (predicted == actual)
                correctCount++;
            cout << setfill(' ') << setw(3) << (i + 1) << ": ";
            cout << "Actual: " << actual << "  Predicted: " << predicted<< endl;
        }
        cout << "Number of correctly classified: " << correctCount << "  Total number of test instances: " << testSet.size() << endl;
    }
}
Ejemplo n.º 7
0
int main(int argc, char** argv)
{
#ifdef PARALLEL_CORES
  omp_set_num_threads(PARALLEL_CORES);
#endif

  std::string directory = "./";
  if(argc > 1)
    directory = std::string(argv[1]);

  IDXLoader loader(28, 28, 10000, 1, directory);
  OpenANN::DirectStorageDataSet trainSet(&loader.trainingInput,
                                         &loader.trainingInput);

  int H = 196;
  OpenANN::SparseAutoEncoder sae(loader.D, H, 3.0, 0.1, 3e-3, OpenANN::LOGISTIC);
  sae.trainingSet(trainSet);

  OpenANN::LBFGS optimizer(20);
  OpenANN::StoppingCriteria stop;
  stop.maximalIterations = 400;
  optimizer.setOptimizable(sae);
  optimizer.setStopCriteria(stop);
  optimizer.optimize();

  OpenANN::MulticlassEvaluator evaluator(1, OpenANN::Logger::FILE);
  OpenANN::DirectStorageDataSet testSet(&loader.testInput, &loader.testInput,
                                        &evaluator);
  sae.validationSet(testSet);

  QApplication app(argc, argv);
  SparseAutoEncoderVisualization visual(sae, trainSet, H, 5, 7, 800, 600);
  visual.show();
  visual.resize(800, 600);
  return app.exec();
}
Ejemplo n.º 8
0
int main(int argc, char **argv) {
   OptionParser opts;

   string mapFile, evidFile;//interactFile,ignoreFile;

   int factor;

   opts.addOption(new StringOption("map", 
            "--map <filename>                 : map file",
            "../input/grid.bmp", mapFile, false));

   opts.addOption(new StringOption("evidence", 
            "--evidence <filename>            : evidence file",
            "", evidFile, true));
   opts.addOption(new IntOption("factor", 
            "--factor <int>                   : scaling factor",
            1, factor, true));


   opts.parse(argc,argv);

   cout << "Loading Map File"<<endl;
   BMPFile bmpFile(mapFile); 
   Grid grid(bmpFile, black);
//   cout << "xdim: "<<grid.dims().first<<" yDim: "<<grid.dims().second<<endl;
   cout << "Loading Evidence"<<endl;
   //Evidence trainSet(evidFile, grid, factor);
   /* used when need to train two seperate models
   Evidence evid_int(interactFile, grid, factor);
   Evidence evid_ig(ignoreFile, grid, factor);
   Evidence train_int(grid),test_int(grid),train_ig(grid), test_ig(grid);
   evid_int.split(train_int, test_int, 0.05);
   evid_ig.split(train_ig, test_ig, 0.05);
   */
   Evidence evid(evidFile,grid,factor);
   Evidence trainSet(grid),testSet(grid);
   evid.split(trainSet,testSet,0.05);
   cout<<"Optimize over "<<trainSet.size()<<" examples"<<endl;
#if 0 
   for (int i=0; i < evid.size(); i++) {
      cout << "Evid "<<i<<endl;
      vector<pair<int, int> > traj = evid.at(i);
      vector<double> timestamps = evid.getTimes(i);

      cout << timestamps.size()<<"  "<<traj.size()<<endl;

      for (int j=0; j < traj.size(); j++) {
         cout << timestamps.at(j)<<"  "<<traj.at(j).first
            << "  "<<traj.at(j).second<<endl;
      } 
   }
#endif
//   testSet.write("testTraj.data");

   cout << "Generating Feature Set"<<endl;

   vector<PosFeature> features;

   cout << "   Constant Feature"<<endl;

   ConstantFeature constFeat(grid);
   features.push_back(constFeat);

   cout << "   Obstacle Feature"<<endl;

   ObstacleFeature obsFeat(grid);
   features.push_back(obsFeat);
	

   for (int i=1; i < 5; i++) {
      cout << "   Blur Feature "<<i<<endl;
      ObstacleBlurFeature blurFeat(grid, 5*i);
      features.push_back(blurFeat);
   }

   /*
   cout << "    Robot Feature"<<endl;
   RobotGlobalFeature robglobal(grid,snackbot,factor);
   features.push_back(robglobal);
   //  robot local blurres features
   for (int i=1; i < 5; i++) {
      cout << "  RobotBlur Feature "<<i<<endl;
      RobotLocalBlurFeature robblur(grid,snackbot,5*i,factor);
      features.push_back(robblur);
   }
	
   */
 
   /* 
   cout << "   Creating feature array"<<endl;
   FeatureArray featArray2(features);

   cout << "   Creating lower resolution feature array"<<endl;
   FeatureArray featArray(featArray2, factor);
   */

   cout << " Speed Feature"<<endl;
   vector<double> speedTable(2,0.0);
   speedTable.at(1) = 0.75;
   //speedTable.at(2) = 1.1;
   DisVecSeqFeature speedfeat(speedTable);


   /* Robset training weights: 
	* -3.83 -8.35991 -2.6512 -5.43475 -3.15203 -3.29758
	*  0.596987 0.439284
	* 0.589445 -0.82448
	* Non-robot-ending trainng weights:
	* -4.57257  -6.2 -0.3537 -2.7385 -0.9357 -0.2797
	* -0.495205 -0.2863
	* -1.2225 0.43993
	*/
   vector<double> weights(6+2+2, -0.0);
   weights.at(0) = -25;	
   weights.at(1) = -8.36;
   weights.at(2) = -2.65;
   weights.at(3) = -5.43;
   weights.at(4) = -3.17;
   weights.at(5) = -3.34;
   
   weights.at(6) = 0.5; // robot feature
   weights.at(7) = 0.3; // robot feature
  
   weights.at(8) = -0.29;  // velocity feature
   weights.at(9) = -1.11; // velocity feature

   //weights.push_back(1.5);//the last parameter is for velocity feature
   Parameters params(weights);

   DisSeqOrderInferEngine engine(8,InferenceEngine::GRID8);

   trajOptimizerplus optimizer(grid,trainSet,features,speedfeat,engine);

   optimizer.optimize(params,0.005,1000,1.0,OPT_EXP);

   return 0;

}