Example #1
0
static SpeedAccResult AccBiasTest(ImageData& lut, QueuedTracker *trk, int N, vector3f centerpos, vector3f range, const char *name, int MaxPixelValue, int extraFlags=0)
{
	typedef QueuedTracker TrkType;
	std::vector<vector3f> results, truepos;

	int NImg=N;//std::max(1,N/20);
	std::vector<ImageData> imgs(NImg);
	const float R=5;
	
	int flags= LT_LocalizeZ|LT_NormalizeProfile|extraFlags;
	if (trk->cfg.qi_iterations>0) flags|=LT_QI;

	trk->SetLocalizationMode((LocMode_t)flags);
	Matrix3X3 fisher;
	for (int i=0;i<NImg;i++) {
		imgs[i]=ImageData::alloc(trk->cfg.width,trk->cfg.height);
		vector3f pos = centerpos + range*vector3f(rand_uniform<float>()-0.5f, rand_uniform<float>()-0.5f, rand_uniform<float>()-0.5f)*1;
		GenerateImageFromLUT(&imgs[i], &lut, trk->cfg.zlut_minradius, trk->cfg.zlut_maxradius, vector3f( pos.x,pos.y, pos.z));

		SampleFisherMatrix fm(MaxPixelValue);
		fisher += fm.Compute(pos, vector3f(1,1,1)*0.001f, lut, trk->cfg.width,trk->cfg.height, trk->cfg.zlut_minradius,trk->cfg.zlut_maxradius);

		imgs[i].normalize();
		if (MaxPixelValue> 0) ApplyPoissonNoise(imgs[i], MaxPixelValue);
		//if(i==0) WriteJPEGFile(name, imgs[i]);

		LocalizationJob job(i, 0, 0, 0);
		trk->ScheduleLocalization((uchar*)imgs[i%NImg].data, sizeof(float)*trk->cfg.width, QTrkFloat, &job);
		truepos.push_back(pos);
	}
	WaitForFinish(trk, N);

	results.resize(trk->GetResultCount());
	for (uint i=0;i<results.size();i++) {
		LocalizationResult r;
		trk->FetchResults(&r,1);
		results[r.job.frame]=r.pos;
	}

	for (int i=0;i<NImg;i++)
		imgs[i].free();

	SpeedAccResult r;
	r.Compute(results, [&](int index) { return truepos[index]; });

	fisher *= 1.0f/NImg;
	r.crlb = sqrt(fisher.Inverse().diag());
	return r;
}
Example #2
0
void ObjectTester::TestObjectRanking(const DatasetName& dbname)
{
	// load dataset
	FileInfos img_fns;
	FileInfos dmap_fns;
	map<string, vector<ImgWin>> rawgtwins;

	NYUDepth2DataMan nyudata;
	Berkeley3DDataManager berkeleydata;

	if(dbname == DB_NYU2_RGBD)
	{
		nyudata.GetImageList(img_fns);
		nyudata.GetDepthmapList(dmap_fns);
		if(img_fns.size() != dmap_fns.size())
			return;
		nyudata.LoadGTWins(img_fns, rawgtwins);
	}
	if(dbname == DB_BERKELEY3D)
	{
		berkeleydata.GetImageList(img_fns);
		berkeleydata.GetDepthmapList(dmap_fns);
		if(img_fns.size() != dmap_fns.size())
			return;
		berkeleydata.LoadGTWins(img_fns, rawgtwins);
	}

	GenericObjectDetector detector;
	if( !detector.InitBingObjectness() )
		return;

	SalientRegionDetector saldet;
	SalientRGBDRegionDetector salrgbd;
	DepthSaliency depth_sal;
	vector<vector<ImgWin>> objdetwins(img_fns.size()), saldetwins(img_fns.size()), depthdetwins;
	vector<vector<ImgWin>> gtwins(img_fns.size());

#pragma omp parallel for
	for (int i=0; i<img_fns.size(); i++)
	{
		// read image
		Mat curimg = imread(img_fns[i].filepath);
		if(curimg.empty())
			continue;

		// read depth
		Mat curdmap;
		if(dbname == DB_NYU2_RGBD)
			if( !nyudata.LoadDepthData(dmap_fns[i].filepath, curdmap) )
				continue;
		if(dbname == DB_BERKELEY3D)
			if( !berkeleydata.LoadDepthData(dmap_fns[i].filepath, curdmap) )
				continue;

//#define VERBOSE
#ifdef VERBOSE
		// show gt
		visualsearch::ImgVisualizer::DrawImgWins("gt", curimg, rawgtwins[img_fns[i].filename]);
		visualsearch::ImgVisualizer::DrawFloatImg("dmap", curdmap, Mat());
#endif

		// normalize to image
		//normalize(curdmap, curdmap, 0, 255, NORM_MINMAX);
		//curdmap.convertTo(curdmap, CV_8U);
		//cvtColor(curdmap, curdmap, CV_GRAY2BGR);
		//imshow("depthimg", curdmap);
		//waitKey(10);

		//visualsearch::ImgVisualizer::DrawImgWins("b3d", curimg, rawgtwins[img_fns[i].filename]);
		//waitKey(0);

		// resize
		Size newsz;
		//ToolFactory::compute_downsample_ratio(Size(curimg.cols, curimg.rows), 300, newsz);
		//resize(curimg, curimg, newsz);

		double start_t = getTickCount();
		// get objectness windows
		vector<ImgWin> objboxes;
		detector.GetObjectsFromBing(curimg, objboxes, 1000);
		//visualsearch::ImgVisualizer::DrawImgWins("objectness", curimg, objboxes);
		cout<<"objectness: "<<(double)(getTickCount()-start_t) / getTickFrequency()<<endl;
		
		start_t = getTickCount();
		// rank
		normalize(curdmap, curdmap, 0, 255, NORM_MINMAX);
		vector<ImgWin> salboxes = objboxes;
		int saltype = SAL_COLOR | SAL_DEPTH;
		salrgbd.Init(saltype, curimg, curdmap);
		salrgbd.RankWins(salboxes);
		//depth_sal.RankWins(curdmap, salboxes);
		cout<<"Depth ranking: "<<(double)(getTickCount()-start_t) / getTickFrequency()<<endl;

#ifdef VERBOSE
		vector<Mat> imgs(50);
		for (int i=0; i<50; i++)
		{
			imgs[i] = curimg(salboxes[i]);
		}
		Mat dispimg;
		visualsearch::ImgVisualizer::DrawImgCollection("objectness", imgs, 50, 15, dispimg);
		imshow("objectness", dispimg);
		visualsearch::ImgVisualizer::DrawImgWins("saldet", curimg, salboxes);
		waitKey(0);
#endif
		/*saldet.g_para.segThresholdK = 200;
		saldet.Init(curdmap);
		saldet.RankWins(salboxes);*/
		//visualsearch::ImgVisualizer::DrawImgWins("sal", curimg, salboxes);
		//waitKey(0);

		// add to collection
		objdetwins[i] = objboxes;
		saldetwins[i] = salboxes;
		gtwins[i] = rawgtwins[img_fns[i].filename];
		
		cout<<"Finish detection on "<<i<<"/"<<img_fns.size()<<endl;
	}

	// evaluation
	WindowEvaluator eval;
	vector<Point2f> objprvals, salprvals, depthprvals;
	int topnum[] = {1, 5, 10, 50, 100, 200, 500, 800, 1000};
	for(int i=0; i<9; i++)
	{
		Point2f curpr = eval.ComputePR(objdetwins, gtwins, topnum[i]);
		objprvals.push_back(curpr);
		curpr = eval.ComputePR(saldetwins, gtwins, topnum[i]);
		salprvals.push_back(curpr);
	}
	
	// save to file
	ofstream out1("nyu_objpr.txt");
	for (size_t i=0; i<objprvals.size(); i++) out1<<objprvals[i].x<<" "<<objprvals[i].y<<endl;
	ofstream out2("nyu_rgbdpr.txt");
	for (size_t i=0; i<salprvals.size(); i++) out2<<salprvals[i].x<<" "<<salprvals[i].y<<endl;

	cout<<"Finish evaluation"<<endl;

}
Example #3
0
int main(int argc, char* argv[])
{
  bool bAlignment = false;
  int arg_base = 1;
  
  for (;arg_base < argc; ++arg_base) {
    const char *a = argv[arg_base];
    
    if (a[0] != '-')
      break;
    
    if (std::strcmp(a+1, "alignment") == 0) {
      bAlignment = true;
    }
  }
  
  const char * outputFilePath = argv[arg_base];
  const char * const * inputFilePaths = argv + arg_base + 1;
  int numInputs = argc - arg_base - 1;
  if (numInputs < 1)
    usage(argc, argv);
  
  // Read Images
  for (int i = 0; i < numInputs; ++i) {
    float shutter;
    if (!readExif(inputFilePaths[i], shutter)) {
      std::printf("%s : shutter ?\n > ", inputFilePaths[i]);
      std::scanf("%f", &shutter);
    }
    
    cv::Mat img = cv::imread(inputFilePaths[i], cv::IMREAD_COLOR);
    
    sourceImages.push_back(img);
    exposureTimes.push_back(shutter);
    indirectIndex.push_back(i);
  }
  
  // Sort by exposure time
  auto cmp = [](int a, int b){ return exposureTimes[a] < exposureTimes[b]; };
  std::sort(indirectIndex.begin(), indirectIndex.end(), cmp);
  if (0) {
    char buf[128];
    for (int i = 0; i < numInputs; ++i) {
      sprintf(buf, "input-%d", i);
      cv::imshow(buf, getImage(i));
    }
    cv::waitKey(0);
  }
  
  // Perform Alignment
  if (bAlignment) {
    std::vector<cv::Mat> imgs(numInputs);
    for (int i = 0; i < numInputs; ++i) {
      imgs[i] = getImage(i);
    }
    
    std::vector<cv::Point> offsets;
    alignment(imgs, offsets);
    
    cv::Size minsize = getImage(0).size();
    
    for (int i = 0; i < numInputs; ++i) {
      std::printf("offset[%d] = %d %d\n", i, offsets[i].x, offsets[i].y);
      
      imgs[i] = cv::Mat(imgs[i], offsetRect(offsets[i], i));
      
      cv::Size sz = imgs[i].size();
      
      minsize.width = std::min(minsize.width, sz.width);
      minsize.height = std::min(minsize.height, sz.height);
    }
    
    std::printf("minsize = %d %d\n", minsize.width, minsize.height);
    
    for (int i = 0; i < numInputs; ++i) {
      cv::Rect rect(cv::Point(0,0), minsize);
      sourceImages[indirectIndex[i]] = imgs[i](rect).clone();
    }
  }
  
  // Recover Response Curve
  cv::Mat rc = recoverResponseCurve();
  if (0) {
    std::printf("response curve:\n");
    for (int i = 0; i < 256; ++i) {
      std::printf("%d, %f\n", i, rc.at<float>(i, 0));
    }
  }
  
  // Generate Radiance Map
  cv::Mat hdr = generateRadianceMap(rc);
  
  // Write Output
  writeHDR(outputFilePath, hdr);
  
  if (1) {
    showHDR(hdr);
    cv::waitKey(0);
  }
  
  return 0;
}
Example #4
0
int main(int argc, char* argv[])
{
    int num_imgs=0;
    cv::Stitcher stitcher = cv::Stitcher::createDefault();

    std::cout << "\n[PANORAMA]: creation of a panorama from a two or more images." << std::endl;

    if (argc < 3) {
        std::cout << "\tUsage: " << argv[0] << " output.jpg input1.jpg input2.jpg... " << std::endl;
        return -1;
    }

    num_imgs = argc - 2;
    std::string output = argv[1];
    std::vector<cv::Mat> imgs(num_imgs);

    std::cout << "[I] Input: " << num_imgs << " images." << std::endl;

    //try to open the images:
    for(int i=0; i<num_imgs; i++)
    {
        std::cout << "[I] Opening image " << argv[2+i] << std::endl;
        try
        {
            imgs[i]=cv::imread(argv[2+i]);
            if( !imgs[i].data )
                throw "Could not read image";
            std::cout << "[I] Image is " << imgs[i].cols << "x" << imgs[i].rows << std::endl;
        }
        catch( char * str )
        {
            std::cout << "Exception raised: " << str  << argv[2+i] << std::endl;
        }
    }

    //find features

    cv::Ptr<cv::detail::FeaturesFinder> features_finder(new cv::detail::OrbFeaturesFinder(cv::Size(3,1), 1500, 1.2f, 8));
    stitcher.setFeaturesFinder(features_finder);

    cv::Mat pano;

    std::cout << "[I] Stitching..." << std::endl;

    std::cout << imgs[0].size().area() << std::endl;
    std::cout << imgs[1].size().area() << std::endl;

    cv::Stitcher::Status status = stitcher.stitch(imgs, pano);
    if (status != cv::Stitcher::OK)
    {
        std::cout << "Can't stitch images, error code = " << status << std::endl;
        return -1;
    }

    std::cout << "[I] Writing pano in " << output << std::endl;
    cv::imwrite(output,pano);

    //match features

    //select images and matched subset to build pano

    //estimate camera parameters

    //refine camera parameters

    //paralleled:
    ///Wave correction
    ///Final pano scale estimation


    return 0;
}
int _tmain(int argc, _TCHAR* argv[])
{
	_CrtSetDbgFlag ( _CRTDBG_ALLOC_MEM_DF | _CRTDBG_LEAK_CHECK_DF ); // dump leaks at return
	//_CrtSetBreakAlloc(17534);

	char* fn_detFrontal = "D:\\CloudStation\\libFD_patrik2011\\config\\fdetection\\fd_config_ffd_fd.mat";
	char* fn_regrSVR = "D:\\CloudStation\\libFD_patrik2011\\config\\fdetection\\fd_config_ffd_ra.mat";
	char* fn_regrWVR = "D:\\CloudStation\\libFD_patrik2011\\config\\fdetection\\fd_config_ffd_la.mat";
	
	FdImage *myimg = new FdImage();
	//myimg->load("D:\\CloudStation\\libFD_patrik2011\\data\\firstrun\\ws_220.tiff");
	//myimg->load("D:\\CloudStation\\libFD_patrik2011\\horse.jpg");
	//myimg->load("D:\\CloudStation\\libFD_patrik2011\\ws_220.jpg");
	//myimg->load("D:\\CloudStation\\libFD_patrik2011\\ws_115.jpg");
	
	myimg->load("D:\\CloudStation\\libFD_patrik2011\\ws_220.tif");
	//myimg->load("D:\\CloudStation\\libFD_patrik2011\\ws_71.tif");
	//myimg->load("D:\\CloudStation\\libFD_patrik2011\\color_feret_00194_940128_hr.png");


	CascadeERT *ert = new CascadeERT();
	ert->wvm->load(fn_detFrontal);
	//ert->svm->load(fn_detFrontal);
	//ert->wvr->load(fn_regrWVR);
	//ert->svr->load(fn_regrSVR);

	
	std::vector<FdImage> imgs(4864);
	std::vector<FdPatch> fps(4864);
	char* fn = new char[500];

	std::vector<FdPatch*> ml_patches;
	for(int i=1; i<=15; i++) {
		sprintf(fn, "D:\\CloudStation\\libFD_patrik2011\\cp\\cp_%04d.png", i);
		imgs[i-1].load(fn);
		fps[i-1].w = imgs[i-1].w;
		fps[i-1].h = imgs[i-1].h;
		fps[i-1].data = imgs[i-1].data;	// Has a bug because this pointer can't be deleted twice
		ml_patches.push_back(&fps[i-1]);


	}

	ert->wvm->detect_on_patchvec(ml_patches);
	std::vector<FdPatch*>::iterator pit = ml_patches.begin();
	for(int i=1; i<=15; i++) {
		//Logger->drawPatchYawAngleColor(imgs[i-1].data_matbgr, *ml_patches[i-1]);
		//sprintf(fn, "D:\\CloudStation\\libFD_patrik2011\\cp_out\\cp_%04d_w.png", i);
		//cv::imwrite(fn, imgs[i-1].data_matbgr);
		std::cout << ml_patches[i-1]->fout["RegressorSVR"] << std::endl;
	}
	delete fn;
	//candidates_wvm_center = wvm->detect_on_patchvec(candidates_wvr_center);

	//ert->init_for_image(myimg);
	//ert->detect_on_image(myimg);

	delete ert;
	delete myimg;
	return 0;
}