Exemplo n.º 1
0
int mainStaticMatch()
{
  IplImage *img1, *img2;
  img1 = cvLoadImage("imgs/img1.jpg");
  img2 = cvLoadImage("imgs/img2.jpg");

  IpVec ipts1, ipts2;
  surfDetDes(img1,ipts1,false,4,4,2,0.0001f);
  surfDetDes(img2,ipts2,false,4,4,2,0.0001f);

  IpPairVec matches;
  getMatches(ipts1,ipts2,matches);

  for (unsigned int i = 0; i < matches.size(); ++i)
  {
    drawPoint(img1,matches[i].first);
    drawPoint(img2,matches[i].second);
  
    const int & w = img1->width;
    cvLine(img1,cvPoint(matches[i].first.x,matches[i].first.y),cvPoint(matches[i].second.x+w,matches[i].second.y), cvScalar(255,255,255),1);
    cvLine(img2,cvPoint(matches[i].first.x-w,matches[i].first.y),cvPoint(matches[i].second.x,matches[i].second.y), cvScalar(255,255,255),1);
  }

  std::cout<< "Matches: " << matches.size();

  cvNamedWindow("1", CV_WINDOW_AUTOSIZE );
  cvNamedWindow("2", CV_WINDOW_AUTOSIZE );
  cvShowImage("1", img1);
  cvShowImage("2",img2);
  cvWaitKey(0);

  return 0;
}
Exemplo n.º 2
0
void ofxSurfStaticMatch(ofxSurfImage * src, ofxSurfImage * dst, IpPairVec * matches) {
    //IpPairVec matches;
    surfDetDes(src->getCvImage(),src->ipts,false,4,4,2,0.0006f);
    surfDetDes(dst->getCvImage(),dst->ipts,false,4,4,2,0.0006f);
    getMatches(src->ipts,dst->ipts,*matches);
    //return matches;
}
Exemplo n.º 3
0
void ofxSurfImage::calcSurf(int tp){
    switch(tp){
        case OFX_SURF_STATIC:
            surfDetDes(getCvImage(),ipts,true,3,4,2,0.0004f);
            break;
        case OFX_SURF_VIDEO:
            surfDetDes(getCvImage(),ipts,true,3,4,2,0.004f);
            break;
        case OFX_SURF_VIDEOMATCH:
            surfDetDes(getCvImage(),ipts,false,4,4,2,0.0001f);
            break;
    }
}
Exemplo n.º 4
0
int mainImage()
{
	int i,ipts_size = 0;
	float tclock;
	clock_t start, end;
	Ipoint IpArray[N];

	IplImage *img = cvLoadImage("imgs/lena.bmp",1);

	// Detect and describe interest points in the image
	start = clock();
	ipts_size = surfDetDes(img, IpArray, 0, 5, 4, 2, 0.0004f, N);
	end = clock();

	tclock = (float)(end - start) / CLOCKS_PER_SEC;

	printf("OpenSURF found: %d interest points \n",ipts_size);
	printf("OpenSURF took: %f seconds \n",tclock);



	// Draw the detected points
	drawIpoints(img, IpArray,0, ipts_size);

	// Display the result
	showImage(img);
	
	free(IpArray);
	return 0;
}
Exemplo n.º 5
0
int mainImage(void)
{
  // Declare Ipoints and other stuff
  IpVec ipts;
  // Make image as a Mat; convert to IplImage for OpenSURF library actions
  cv::Mat mimg=cv::imread("OpenSURF/imgs/sf.jpg", CV_LOAD_IMAGE_COLOR);
  IplImage iimg=mimg;
  IplImage* img=&iimg;

  // Detect and describe interest points in the image
  clock_t start = clock();
  surfDetDes(img, ipts, false, 5, 4, 2, 0.0004f); 
  clock_t end = clock();

  std::cout<< "OpenSURF found: " << ipts.size() << " interest points" << std::endl;
  std::cout<< "OpenSURF took: " << float(end - start) / CLOCKS_PER_SEC  << " seconds" << std::endl;

  // Draw the detected points
  drawIpoints(img, ipts);
  
  // Display the result
  showImage(img);

  return 0;
}
Exemplo n.º 6
0
int mainKmeans(void)
{
  IplImage *img = cvLoadImage("../imgs/img1.jpg");
  IpVec ipts;
  Kmeans km;
  
  // Get Ipoints
  surfDetDes(img,ipts,true,3,4,2,0.0006f);

  for (int repeat = 0; repeat < 10; ++repeat)
  {

    IplImage *img = cvLoadImage("../imgs/img1.jpg");
    km.Run(&ipts, 5, true);
    drawPoints(img, km.clusters);

    for (unsigned int i = 0; i < ipts.size(); ++i)
    {
      cvLine(img, cvPoint(ipts[i].x,ipts[i].y), cvPoint(km.clusters[ipts[i].clusterIndex].x ,km.clusters[ipts[i].clusterIndex].y),cvScalar(255,255,255));
    }

    showImage(img);
  }

  return 0;
}
Exemplo n.º 7
0
int mainStaticMatch( IplImage *img1, IplImage *img2)
{
//IplImage *img1, *img2;
//img1 = cvLoadImage("../imgs/img1.jpg");
//img2 = cvLoadImage("../imgs/img2.jpg");

  IpVec ipts1, ipts2;

  LARGE_INTEGER llPerfCount = {0};
  QueryPerformanceCounter(&llPerfCount);
  __int64 beginPerfCount = llPerfCount.QuadPart;

  //surfDetDes(img1,ipts1,false,4,4,2,0.0001f);
  //surfDetDes(img2,ipts2,false,4,4,2,0.0001f);
  surfDetDes(img1,ipts1,true,4,4,2,0.0001f);
  surfDetDes(img2,ipts2,true,4,4,2,0.0001f);

  IpPairVec matches;
  getMatches(ipts1,ipts2,matches);

  QueryPerformanceCounter(&llPerfCount);
  __int64 endPerfCount = llPerfCount.QuadPart;
  LARGE_INTEGER liPerfFreq={0};
  QueryPerformanceFrequency(&liPerfFreq);
  std::cout << __FUNCTION__ << " excute time: " 
	  <<  float(endPerfCount - beginPerfCount) * 1000 / liPerfFreq.QuadPart  << " millisecond(ºÁÃë)" << std::endl;

  for (unsigned int i = 0; i < matches.size(); ++i)
  {
    drawPoint(img1,matches[i].first);
    drawPoint(img2,matches[i].second);
  
    const int & w = img1->width;
    cvLine(img1,cvPoint(matches[i].first.x,matches[i].first.y),cvPoint(matches[i].second.x+w,matches[i].second.y), cvScalar(255,255,255),1);
    cvLine(img2,cvPoint(matches[i].first.x-w,matches[i].first.y),cvPoint(matches[i].second.x,matches[i].second.y), cvScalar(255,255,255),1);
  }

  std::cout<< "Matches: " << matches.size();

  cvNamedWindow("1", CV_WINDOW_AUTOSIZE );
  cvNamedWindow("2", CV_WINDOW_AUTOSIZE );
  cvShowImage("1", img1);
  cvShowImage("2",img2);
  cvWaitKey(0);

  return 0;
}
Exemplo n.º 8
0
int mainStaticMatch()
{
  // Make images as Mats; convert to IplImage for OpenSURF library actions
  cv::Mat mimg1, mimg2;
  mimg1=cv::imread("OpenSURF/imgs/img1.jpg", CV_LOAD_IMAGE_COLOR);
  mimg2=cv::imread("OpenSURF/imgs/img2.jpg", CV_LOAD_IMAGE_COLOR);

  IplImage iimg1, iimg2;
  iimg1=mimg1;
  iimg2=mimg2;

  IplImage *img1, *img2;
  img1 = &iimg1;
  img2 = &iimg2;

  IpVec ipts1, ipts2;
  surfDetDes(img1,ipts1,false,4,4,2,0.0001f);
  surfDetDes(img2,ipts2,false,4,4,2,0.0001f);

  IpPairVec matches;
  getMatches(ipts1,ipts2,matches);

  for (unsigned int i = 0; i < matches.size(); ++i)
  {
    drawPoint(img1,matches[i].first);
    drawPoint(img2,matches[i].second);
  
    const int & w = img1->width;
    cvLine(img1,cvPoint(matches[i].first.x,matches[i].first.y),cvPoint(matches[i].second.x+w,matches[i].second.y), cvScalar(255,255,255),1);
    cvLine(img2,cvPoint(matches[i].first.x-w,matches[i].first.y),cvPoint(matches[i].second.x,matches[i].second.y), cvScalar(255,255,255),1);
  }

  std::cout<< "Matches: " << matches.size();

  cvNamedWindow("1", CV_WINDOW_AUTOSIZE );
  cvNamedWindow("2", CV_WINDOW_AUTOSIZE );
  cvShowImage("1", img1);
  cvShowImage("2",img2);
  cvWaitKey(0);

  return 0;
}
Exemplo n.º 9
0
void ofxSurfMotion::update() {
    oldIpts = cam->ipts;
    motion.clear();
    surfDetDes(cam->getCvImage(),cam->ipts,true,3,4,2,0.0004f);
    getMatches(cam->ipts,oldIpts,matches);
    for(uint i=0; i<matches.size(); i++) {
        Motion m;
        m.src.set(matches[i].first.x,matches[i].first.y);
        m.dst.set(matches[i].second.x,matches[i].second.y);
        float dx = matches[i].first.dx;
        float dy = matches[i].first.dy;
        m.speed = sqrtf(dx*dx+dy+dy);
        motion.push_back(m);
    }
}
Exemplo n.º 10
0
int mainMotionPoints(void)
{
  // Initialise capture device
  CvCapture* capture = cvCaptureFromCAM( CV_CAP_ANY );
  if(!capture) error("No Capture");

  // Create a window 
  cvNamedWindow("OpenSURF", CV_WINDOW_AUTOSIZE );

  // Declare Ipoints and other stuff
  IpVec ipts, old_ipts, motion;
  IpPairVec matches;
  IplImage *img;

  // Main capture loop
  while( 1 ) 
  {
    // Grab frame from the capture source
    img = cvQueryFrame(capture);

    // Detect and describe interest points in the image
    old_ipts = ipts;
    surfDetDes(img, ipts, true, 3, 4, 2, 0.0004f);

    // Fill match vector
    getMatches(ipts,old_ipts,matches);
    for (unsigned int i = 0; i < matches.size(); ++i) 
    {
      const float & dx = matches[i].first.dx;
      const float & dy = matches[i].first.dy;
      float speed = sqrt(dx*dx+dy*dy);
      if (speed > 5 && speed < 30) 
        drawIpoint(img, matches[i].first, 3);
    }
        
    // Display the result
    cvShowImage("OpenSURF", img);

    // If ESC key pressed exit loop
    if( (cvWaitKey(10) & 255) == 27 ) break;
  }

  // Release the capture device
  cvReleaseCapture( &capture );
  cvDestroyWindow( "OpenSURF" );
  return 0;
}
Exemplo n.º 11
0
int mainVideo(void)
{
  // Initialise capture device
  CvCapture* capture = cvCaptureFromCAM( CV_CAP_ANY );
  if(!capture) error("No Capture");

  // Initialise video writer
  //cv::VideoWriter vw("c:\\out.avi", CV_FOURCC('D','I','V','X'),10,cvSize(320,240),1);
  //vw << img;

  // Create a window 
  cvNamedWindow("OpenSURF", CV_WINDOW_AUTOSIZE );

  // Declare Ipoints and other stuff
  IpVec ipts;
  IplImage *img=NULL;

  // Main capture loop
  while( 1 ) 
  {
    // Grab frame from the capture source
    img = cvQueryFrame(capture);

    // Extract surf points
    surfDetDes(img, ipts, false, 4, 4, 2, 0.004f);    

    // Draw the detected points
    drawIpoints(img, ipts);

    // Draw the FPS figure
    drawFPS(img);

    // Display the result
    cvShowImage("OpenSURF", img);

    // If ESC key pressed exit loop
    if( (cvWaitKey(10) & 255) == 27 ) break;
  }

  cvReleaseCapture( &capture );
  cvDestroyWindow( "OpenSURF" );
  return 0;
}
Exemplo n.º 12
0
int mainImage(IplImage *img)
{
  // Declare Ipoints and other stuff
  IpVec ipts;

  // Detect and describe interest points in the image
  clock_t start = clock();
  surfDetDes(img, ipts, true, 5, 4, 2, 0.01f); 
  clock_t end = clock();

  std::cout<< "OpenSURF found: " << ipts.size() << " interest points" << std::endl;
  std::cout<< "OpenSURF took: " << float(end - start) / CLOCKS_PER_SEC  << " seconds" << std::endl;

  // Draw the detected points
  drawIpoints(img, ipts);
  
  // Display the result
  showImage(img);

  return 0;
}
Exemplo n.º 13
0
void init(unsigned char *img,double *refhist,IpVec *refpts,double updt[][9],int w,int h)
{
    double obj[8]={0};
    unsigned char pix[60*80*3]={0,0};
    obj[0]=w/2;obj[1]=h/2;obj[2]=80;obj[3]=60;obj[4]=obj[5]=1;obj[6]=obj[7]=0;
    int x=obj[0],y=obj[1],wt=obj[2],ht=obj[3];
    int p=0,l=0;
    IpVec pt;
    IplImage *im;
    im=cvCreateImage(cvSize(w,h),IPL_DEPTH_8U,3);
    memcpy(im->imageData,img,im->imageSize);
    surfDetDes(im,pt,false,5,4,2,0.00004f);
    p=0;
    for(int k=0;k<pt.size();k++)
    {
        if((pt.at(k).x>=(w/2-wt/2)) && (pt.at(k).x<=(w/2+wt/2)) &&
        (pt.at(k).y>=(h/2-ht/2)) && (pt.at(k).y<=(h/2+ht/2)) )
        {
            Ipoint tmp;
            pt.at(k).x-=(w/2-wt/2);

            pt.at(k).y-=(h/2-ht/2);
            (*refpts).push_back(pt.at(k));
        }
    }
    for(int i=0;i<N;i++)
    {
        updt[i][0]=obj[0]+(rand()%60-30);
        updt[i][1]=obj[1]+(rand()%60-30);
        updt[i][2]=obj[2];
        updt[i][3]=obj[3];
        updt[i][4]=obj[4];
        updt[i][5]=obj[5];
        updt[i][6]=obj[6];
        updt[i][7]=obj[7];
        updt[i][8]=(double)1/N;
        img[3*(int)(w*updt[i][1]+updt[i][0])]=0;img[3*(int)(w*updt[i][1]+updt[i
        ][0])+1]=255;img[3*(int)(w*updt[i][1]+updt[i][0])+1]=0;
    }
}
Exemplo n.º 14
0
int mainImage(void)
{
    // Declare Ipoints and other stuff
    IpVec ipts;
    IplImage *img=cvLoadImage("Images/img1.jpg");

    // Detect and describe interest points in the image
    {
        surfDetDes(img, ipts, false, 3, 4, 2, 0.0004f);
    }

    std::cout<< "OpenSURF found: " << ipts.size() << " interest points" << std::endl;
    //std::cout<< "OpenSURF took: min/avg/max/stddev " << time_min << "/" << time_avg << "/" << time_max << "/" << stddev
    //		<< std::endl;

    // Draw the detected points
    drawIpoints(img, ipts);

    // Display the result
    //showImage(img);
    cvSaveImage("result.jpg",img);

    return 0;
}
Exemplo n.º 15
0
void ofxOpenSurf :: threadedFunction() 
{
    while( isThreadRunning() )
    {
        if( bSurfStaticImage )
        {
            if( srcImage != srcImageTemp )
            {
                lock();
                srcImage = srcImageTemp;
                unlock();
                
                surfDetDes
                ( 
                    srcImage->getCvImage(),     // image to find Ipoints in
                    srcIptsTemp,                // reference to vector of Ipoints
                    false,                      // run in rotation invariant mode?
                    4,                          // number of octaves to calculate
                    4,                          // number of intervals per octave
                    2,                          // initial sampling step
                    0.0006                      // blob response threshold
                 );
            }
            
            if( trgImage != trgImageTemp )
            {
                lock();
                trgImage = trgImageTemp;
                unlock();
                
                surfDetDes
                ( 
                    trgImage->getCvImage(),     // image to find Ipoints in
                    trgIptsTemp,                // reference to vector of Ipoints
                    false,                      // run in rotation invariant mode?
                    4,                          // number of octaves to calculate
                    4,                          // number of intervals per octave
                    2,                          // initial sampling step
                    0.0006                      // blob response threshold
                );
            }
            
            getMatches( srcIptsTemp, trgIptsTemp, matchesTemp );
            
            lock();
            srcIpts = srcIptsTemp;
            trgIpts = trgIptsTemp;
            matches = matchesTemp;
            bSurfStaticImage = false;
            unlock();

        }
        
        if( bSurfMotionImage )
        {
            if( srcImage != srcImageTemp )
            {
                lock();
                srcImage = srcImageTemp;
                unlock();
                
                surfDetDes
                ( 
                    srcImage->getCvImage(),     // image to find Ipoints in
                    srcIptsTemp,                // reference to vector of Ipoints
                    false,                      // run in rotation invariant mode?
                    4,                          // number of octaves to calculate
                    4,                          // number of intervals per octave
                    2,                          // initial sampling step
                    0.0004                      // blob response threshold
                );
            }
            
            lock();
            trgImage = trgImageTemp;
            unlock();
            
            surfDetDes
            ( 
                trgImage->getCvImage(),     // image to find Ipoints in
                trgIptsTemp,                // reference to vector of Ipoints
                false,                      // run in rotation invariant mode?
                4,                          // number of octaves to calculate
                4,                          // number of intervals per octave
                2,                          // initial sampling step
                0.001                       // blob response threshold
            );
            
            getMatches( srcIptsTemp, trgIptsTemp, matchesTemp );
            
            lock();
            srcIpts = srcIptsTemp;
            trgIpts = trgIptsTemp;
            matches = matchesTemp;
            bSurfMotionImage = false;
            unlock();
        }
    }
}
Exemplo n.º 16
0
int mainStaticMatchStrengths()
{
  bool matchGlobalOrientations = true;

  // Make images as Mats; convert to IplImage for OpenSURF library actions
  cv::Mat mimg1, mimg2;
  mimg1=cv::imread("OpenSURF/imgs/img1.jpg", CV_LOAD_IMAGE_COLOR);
  mimg2=cv::imread("OpenSURF/imgs/img2.jpg", CV_LOAD_IMAGE_COLOR);

  IplImage iimg1, iimg2;
  iimg1=mimg1;
  iimg2=mimg2;

  IplImage *img1, *img2;
  img1 = &iimg1;
  img2 = &iimg2;

  IpVec ipts1, ipts2;
  surfDetDes(img1,ipts1,false,4,4,2,0.0001f,matchGlobalOrientations);
  surfDetDes(img2,ipts2,false,4,4,2,0.0001f,matchGlobalOrientations);

  MatchVec matches;
  getMatchesSymmetric(ipts1,ipts2,matches);

  IpVec mpts1, mpts2;

  const int & w = img1->width;

  for (unsigned int i = 0; i < matches.size(); ++i)
  {
    float strengthOverThreshold = 1 - matches[i].second; // /MATCH_THRESHOLD;
    strengthOverThreshold*=255;
    CvScalar clr = cvScalar(strengthOverThreshold,strengthOverThreshold,strengthOverThreshold);
    clr = cvScalar(255,255,255);
    
    //drawPoint(img1,matches[i].first.first,clr);
    //drawPoint(img2,matches[i].first.second,clr),
    mpts1.push_back(matches[i].first.first);
    mpts2.push_back(matches[i].first.second);
  
    cvLine(img1,cvPoint(matches[i].first.first.x,matches[i].first.first.y),cvPoint(matches[i].first.second.x+w,matches[i].first.second.y), clr,1);
    cvLine(img2,cvPoint(matches[i].first.first.x-w,matches[i].first.first.y),cvPoint(matches[i].first.second.x,matches[i].first.second.y), clr,1);
  }

  drawIpoints(img1,mpts1);
  drawIpoints(img2,mpts2);

  std::cout<< "Matches: " << matches.size() << std::endl;

  cvNamedWindow("1", CV_WINDOW_AUTOSIZE );
  cvNamedWindow("2", CV_WINDOW_AUTOSIZE );
  cvShowImage("1", img1);
  cvShowImage("2",img2);
  cvWaitKey(0);

  // NOW DO IT AGAIN!
  cv::Mat mimg3, mimg4;
  mimg3=cv::imread("OpenSURF/imgs/img1.jpg", CV_LOAD_IMAGE_COLOR);
  mimg4=cv::imread("OpenSURF/imgs/img2.jpg", CV_LOAD_IMAGE_COLOR);

  IplImage iimg3, iimg4;
  iimg3=mimg3;
  iimg4=mimg4;

  IplImage *img3, *img4;
  img3 = &iimg3;
  img4 = &iimg4;

  IpVec ipts3, ipts4;
  surfDetDes(img3,ipts3,false,4,4,2,0.0001f,!matchGlobalOrientations);
  surfDetDes(img4,ipts4,false,4,4,2,0.0001f,!matchGlobalOrientations);

  matches.clear();
  getMatchesSymmetric(ipts3,ipts4,matches);

  IpVec mpts3, mpts4;

  for (unsigned int i = 0; i < matches.size(); ++i)
  {
    float strengthOverThreshold = 1 - matches[i].second; // /MATCH_THRESHOLD;
    strengthOverThreshold*=255;
    CvScalar clr = cvScalar(strengthOverThreshold,strengthOverThreshold,strengthOverThreshold);
    clr = cvScalar(255,255,255);
    
    //drawPoint(img1,matches[i].first.first,clr);
    //drawPoint(img2,matches[i].first.second,clr),
    mpts3.push_back(matches[i].first.first);
    mpts4.push_back(matches[i].first.second);
  
    cvLine(img3,cvPoint(matches[i].first.first.x,matches[i].first.first.y),cvPoint(matches[i].first.second.x+w,matches[i].first.second.y), clr,1);
    cvLine(img4,cvPoint(matches[i].first.first.x-w,matches[i].first.first.y),cvPoint(matches[i].first.second.x,matches[i].first.second.y), clr,1);
  }

  drawIpoints(img3,mpts3);
  drawIpoints(img4,mpts4);

  std::cout<< "Matches: " << matches.size() << std::endl;

  cvNamedWindow("3", CV_WINDOW_AUTOSIZE );
  cvNamedWindow("4", CV_WINDOW_AUTOSIZE );
  cvShowImage("3", img3);
  cvShowImage("4",img4);
  cvWaitKey(0);


  return 0;
}
Exemplo n.º 17
0
int mainMatch(void)
{
  // Initialise capture device
  CvCapture* capture = cvCaptureFromCAM( CV_CAP_ANY );
  if(!capture) error("No Capture");

  // Declare Ipoints and other stuff
  IpPairVec matches;
  IpVec ipts, ref_ipts;
  
  // This is the reference object we wish to find in video frame
  // Replace the line below with IplImage *img = cvLoadImage("imgs/object.jpg"); 
  // where object.jpg is the planar object to be located in the video
  IplImage *img = cvLoadImage("../imgs/object.jpg"); 
  if (img == NULL) error("Need to load reference image in order to run matching procedure");
  CvPoint src_corners[4] = {{0,0}, {img->width,0}, {img->width, img->height}, {0, img->height}};
  CvPoint dst_corners[4];

  // Extract reference object Ipoints
  surfDetDes(img, ref_ipts, false, 3, 4, 3, 0.004f);
  drawIpoints(img, ref_ipts);
  showImage(img);

  // Create a window 
  cvNamedWindow("OpenSURF", CV_WINDOW_AUTOSIZE );

  // Main capture loop
  while( true ) 
  {
    // Grab frame from the capture source
    img = cvQueryFrame(capture);
     
    // Detect and describe interest points in the frame
    surfDetDes(img, ipts, false, 3, 4, 3, 0.004f);

    // Fill match vector
    getMatches(ipts,ref_ipts,matches);
    
    // This call finds where the object corners should be in the frame
    if (translateCorners(matches, src_corners, dst_corners))
    {
      // Draw box around object
      for(int i = 0; i < 4; i++ )
      {
        CvPoint r1 = dst_corners[i%4];
        CvPoint r2 = dst_corners[(i+1)%4];
        cvLine( img, cvPoint(r1.x, r1.y),
          cvPoint(r2.x, r2.y), cvScalar(255,255,255), 3 );
      }

      for (unsigned int i = 0; i < matches.size(); ++i)
        drawIpoint(img, matches[i].first);
    }

    // Draw the FPS figure
    drawFPS(img);

    // Display the result
    cvShowImage("OpenSURF", img);

    // If ESC key pressed exit loop
    if( (cvWaitKey(10) & 255) == 27 ) break;
  }

  // Release the capture device
  cvReleaseCapture( &capture );
  cvDestroyWindow( "OpenSURF" );
  return 0;
}
Exemplo n.º 18
0
bool BagOfFeatures::extractSURFFeatures(bool invariant,
                                        int octaves,
                                        int intervals,
                                        int step,
                                        float thresh)
{
    if(numFeatures > 0)
        return false;

	int i, j;
    int train, valid, test, label;

    char fileName[256];

	IpVec temp;
    IplImage* dataImage = NULL;

    descrSize = 64;

	for(i = 0; i < numClasses; i++)
	{
	    // Get the distribution of data
        data[i].getDataInfo(train, valid, test, label);
	    // Extrain the features of the training set
        // For each training image
        for(j = 0; j < train; j++)
        {
            strcpy(fileName, data[i].getDataList(j));
            cout << "Loading training image: " << fileName << endl;
            dataImage = cvLoadImage(fileName);
            IplImage *dataGray = cvCreateImage(cvSize(dataImage->width, dataImage->height), 8, 1);
            // Convert to grayscale
            cvCvtColor(dataImage, dataGray, CV_BGR2GRAY);

            //Resize the images
            IplImage *resized = preProcessImages(dataGray, 75, 150);

            // Detect the SURF features
            surfDetDes(resized, temp, invariant, octaves, intervals, step, thresh);

            cout << "OpenSURF found: " << temp.size() << " interest points" << endl;
            // Keep track of the feature count
            numFeatures += temp.size();

            /*
            drawIpoints(resized, temp, 3);

            IplImage* display = cvCreateImage(cvSize(resized->width*4, resized->height*4), resized->depth, resized->nChannels);
            cvResize(resized, display, CV_INTER_CUBIC);
            cvShowImage("Extracted SURF", display);
            cvWaitKey(150);
            cvReleaseImage(&display);
            */
            // Copy the SURF feature into the feature object
            copySURFPts(trainObject[i].featureSet[j], temp, descrSize);

            cvReleaseImage(&dataImage);
            cvReleaseImage(&dataGray);
            cvReleaseImage(&resized);
        }

        // Extrain the features of the validation set
        // For each validation image
        for(j = 0; j < valid; j++)
        {
            strcpy(fileName, data[i].getDataList(j+train));
            cout << "Loading validation image: " << fileName << endl;
            dataImage = cvLoadImage(fileName);
            IplImage *dataGray = cvCreateImage(cvSize(dataImage->width, dataImage->height), 8, 1);
            // Convert to grayscale
            cvCvtColor(dataImage, dataGray, CV_BGR2GRAY);

            //Resize the images
            IplImage *resized = preProcessImages(dataGray, 75, 150);

            // Detect the SURF features
            surfDetDes(resized, temp, invariant, octaves, intervals, step, thresh);

            cout << "OpenSURF found: " << temp.size() << " interest points" << endl;

            /*
            drawIpoints(resized, temp, 3);

            IplImage* display = cvCreateImage(cvSize(resized->width*4, resized->height*4), resized->depth, resized->nChannels);
            cvResize(resized, display, CV_INTER_CUBIC);
            cvShowImage("Extracted SURF", display);
            cvWaitKey(150);
            cvReleaseImage(&display);
            */

            // Copy the SURF feature into the feature object
            copySURFPts(validObject[i].featureSet[j], temp, descrSize);

            cvReleaseImage(&dataImage);
            cvReleaseImage(&dataGray);
            cvReleaseImage(&resized);

        }

        // Extrain the features of the test set
        // For each test image
        for(j = 0; j < test; j++)
        {
            strcpy(fileName, data[i].getDataList(j+train+valid));
            cout << "Loading test image: " << fileName << endl;
            dataImage = cvLoadImage(fileName);
            IplImage *dataGray = cvCreateImage(cvSize(dataImage->width, dataImage->height), 8, 1);
            // Convert to grayscale
            cvCvtColor(dataImage, dataGray, CV_BGR2GRAY);

            //Resize the images
            IplImage *resized = preProcessImages(dataGray, 75, 150);

            // Detect the SURF features
            surfDetDes(resized, temp, invariant, octaves, intervals, step, thresh);

            cout << "OpenSURF found: " << temp.size() << " interest points" << endl;

            /*
            drawIpoints(resized, temp, 3);

            IplImage* display = cvCreateImage(cvSize(resized->width*4, resized->height*4), resized->depth, resized->nChannels);
            cvResize(resized, display, CV_INTER_CUBIC);
            cvShowImage("Extracted SURF", display);
            cvWaitKey(150);
            cvReleaseImage(&display);
            */
            // Copy the SURF feature into the feature object
            copySURFPts(testObject[i].featureSet[j], temp, descrSize);

            cvReleaseImage(&dataImage);
            cvReleaseImage(&dataGray);
            cvReleaseImage(&resized);
        }
	}

    return true;
}
Exemplo n.º 19
0
void msrmtupdate(unsigned char *img,double updt[][9],IpVec *refpts,double
*refhist,int w,int h)
{
    long double tot=0,dist2=0;
    double hist[257 * 10]={0,0,0,0,0,0,0,0,0,0,0,0,0,0,0};
    static double ptr[4],p;
    int key[10][2];
    IpVec pts,tpts;
    IplImage *cl,*tmp2;
    cl=cvCreateImage(cvSize(w,h),IPL_DEPTH_8U,3);
    memcpy(cl->imageData,(img),cl->imageSize);
    tmp2=cvCloneImage(cl);
    surfDetDes(cl,tpts ,false, 5, 4, 2, 0.00004f);
    IpPairVec matches;
    IpVec ipts, ref_ipts;
    CvPoint src_corners[4] = {cvPoint(0,0), cvPoint(80,0), cvPoint(80, 60),
    cvPoint(0, 60)};
    CvPoint dst_corners[4];
    getMatches(tpts,*refpts,matches);
    int tt=0;
    tt=translateCorners(matches, src_corners, dst_corners);
    if (translateCorners(matches, src_corners, dst_corners))
     {
         // Draw box around object
         for(int i = 0; i < 4; i++ )

         {
             CvPoint r1 = dst_corners[i%4];
             CvPoint r2 = dst_corners[(i+1)%4];
             cvLine( cl, cvPoint(r1.x, r1.y), cvPoint(r2.x, r2.y), cvScalar(255,255,255),3 );
         }
         for (unsigned int i = 0; i < matches.size(); ++i)
         drawIpoint(cl, matches[i].first);
     }
    CvPoint cpt;
    cpt.x=((dst_corners[0].x)+(dst_corners[2].x))/2;
    cpt.y=((dst_corners[0].y)+(dst_corners[2].y))/2;
    p++;
    if(tt)
    {
        if((abs(ptr[2]-abs(dst_corners[0].x-dst_corners[1].x))>=30 ||
        abs(ptr[3]-abs(dst_corners[0].y-dst_corners[3].y))>=30 ||
        !isrect(dst_corners)) && p>3 )
        {
             tt=0;
        }
        else
        {
            cvCvtColor(tmp2,cl ,CV_RGB2HSV);
            ptr[0]=cpt.x;ptr[1]=cpt.y;ptr[2]=abs(dst_corners[0].xst_corners[1].x);ptr[3]=abs(dst_corners[0].y-dst_corners[3].y);
            crhist((unsigned char *)cl->imageData,hist,w,h,ptr);
            dist2=.1*(double)exp(-2*pow(comphist(hist,refhist),2));
        }
    }
    for(int i=0;i<N;i++)
    {
    if(tt && dist2>.05 )
    {
        updt[i][0]=cpt.x;
        updt[i][1]=cpt.y;
        updt[i][2]=ptr[2];
        updt[i][3]=ptr[3];
        updt[i][4]=1;
        updt[i][5]=1;
        updt[i][8]=1;
        tot++;
    }
    else
     {
        double pt[4];
        for(int k=0;k<4;k++)
        {
            pt[k]=updt[i][k];
        }
        cvCvtColor(tmp2,cl, CV_RGB2HSV);
        crhist((unsigned char *)cl->imageData,hist,w,h,pt);
        dist2=.1*(double)exp(-100*pow(comphist(hist,refhist),2));
        updt[i][8]=dist2;
        tot+=updt[i][8];
    }
    }
    for(int i=0;i<N;i++)
      updt[i][8]/=(double)tot;

}
Exemplo n.º 20
0
int mainStaticMatch()
{

    time_t start,end1,end2,end3,end4,end5;
    start = clock();

    IplImage *img1, *img2;
    img1 = cvLoadImage("../data/1.JPG");
    img2 = cvLoadImage("../data/2.JPG");


    end1 = clock();

    IpVec ipts1, ipts2;
    surfDetDes(img1,ipts1,false,4,4,2,0.0008f);
    surfDetDes(img2,ipts2,false,4,4,2,0.0008f);

    std::cout << "im1" << std::endl;
    std::cout << "Size:" << ipts1.size() << std::endl;

    std::cout << "im2" << std::endl;
    std::cout << "Size:" << ipts2.size() << std::endl;
    end2 = clock();

    IpPairVec matches;
    getMatches(ipts1,ipts2,matches);

    end3 = clock();

    for (unsigned int i = 0; i < matches.size(); ++i)
    {
        drawPoint(img1,matches[i].first);
        drawPoint(img2,matches[i].second);

        const int & w = img1->width;
        cvLine(img1,cvPoint(matches[i].first.x,matches[i].first.y),cvPoint(matches[i].second.x+w,matches[i].second.y), cvScalar(255,255,255),1);
        cvLine(img2,cvPoint(matches[i].first.x-w,matches[i].first.y),cvPoint(matches[i].second.x,matches[i].second.y), cvScalar(255,255,255),1);
    }

    std::cout << "Matches: " << matches.size() << std::endl;
    /*
      cvNamedWindow("1", CV_WINDOW_AUTOSIZE );
      cvNamedWindow("2", CV_WINDOW_AUTOSIZE );
      cvShowImage("1", img1);
      cvShowImage("2", img2);
      cvWaitKey(0);
    */
    end4 = clock();

//  cvSaveImage("result_gpu1.jpg",img1);
//	cvSaveImage("result_gpu2.jpg",img2);

    // Stitch two images
    IplImage *img = cvCreateImage(cvSize(img1->width + img2->width,
                                         img1->height),img1->depth,img1->nChannels);
    cvSetImageROI( img, cvRect( 0, 0, img1->width, img1->height ) );
    cvCopy(img1, img);
    cvSetImageROI( img, cvRect(img1->width,0, img2->width, img2->height) );
    cvCopy(img2, img);
    cvResetImageROI(img);
    cvSaveImage("result_gpu.jpg",img);

    end5 = clock();
    double dif1 = (double)(end1 - start) / CLOCKS_PER_SEC;
    double dif2 = (double)(end2 - end1) / CLOCKS_PER_SEC;
    double dif3 = (double)(end3 - end2) / CLOCKS_PER_SEC;
    double dif4 = (double)(end4 - end3) / CLOCKS_PER_SEC;
    double dif5 = (double)(end5 - end4) / CLOCKS_PER_SEC;
    double total = (double)(end5 - start) / CLOCKS_PER_SEC;
    std::cout.setf(std::ios::fixed,std::ios::floatfield);
    std::cout.precision(5);
    std::cout << "Time(load):" << dif1 << std::endl;
    std::cout << "Time(descriptor):" << dif2 << std::endl;
    std::cout << "Time(match):" << dif3 << std::endl;
    std::cout << "Time(plot):" << dif4 << std::endl;
    std::cout << "Time(save):" << dif5 << std::endl;
    std::cout << "Time(Total):" << total << std::endl;
    return 0;
}
Exemplo n.º 21
0
void ofxSurfVideoMatch(ofxSurfImage * cam, ofxSurfImage * mrk, IpPairVec * matches) {
    surfDetDes(cam->getCvImage(),cam->ipts,false,4,4,2,0.001f);
    getMatches(cam->ipts,mrk->ipts,*matches);
}