コード例 #1
0
ファイル: compat_video.cpp プロジェクト: BRAINSia/opencv
CV_IMPL int
cvCamShift( const void* imgProb, CvRect windowIn,
            CvTermCriteria criteria,
            CvConnectedComp* comp,
            CvBox2D* box )
{
    cv::Mat img = cv::cvarrToMat(imgProb);
    cv::Rect window = windowIn;
    cv::RotatedRect rr = cv::CamShift(img, window, criteria);

    if( comp )
    {
        comp->rect = cvRect(window);
        cv::Rect roi = rr.boundingRect() & cv::Rect(0, 0, img.cols, img.rows);
        comp->area = cvRound(cv::sum(img(roi))[0]);
    }

    if( box )
        *box = cvBox2D(rr);

    return rr.size.width*rr.size.height > 0.f ? 1 : -1;
}
コード例 #2
0
ファイル: cvparticlefilter.cpp プロジェクト: liangfu/dnn
void CvParticleFilter::initialize(CvRect roi)
{
  // initialize particle filter  
  CvParticle * init_particle = cvCreateParticle( N_p/*num_states*/, 1 );
  float lin = m_imsize.width/160.f;
  float bound = 5.*lin;
  float m_iLinMultiplier=m_imgY->width/160.;
  fprintf(stderr, "roi: %d,%d,%d,%d\n", roi.x,roi.y,roi.width,roi.height);
  if ( (roi.x<1) || (roi.y<1) ||
       (roi.width >m_imsize.width-1) ||
       (roi.height>m_imsize.height-1) )
  {
    fprintf(stderr, "WARNING: initializing at image boundary, rejected!\n");
    return;
  }
  CvRect initROI = cvRect(roi.x+bound,
                          roi.y+bound,
                          roi.width -bound*2.,
                          roi.height-bound*2.);
  CvBox2D box = cvBox2DFromRect( initROI ); // centerize

  cvParticleStateSet( init_particle, 0, box );
  cvParticleInit( m_particle, init_particle );
  cvReleaseParticle( &init_particle );

  {
    float dx=1.8,dy=1.8,dw=.01,dh=.01,dtheta=.05;
    CvBox2D box = cvBox2D(dx*lin, dy*lin, dw*lin, dh*lin, dtheta);
    cvParticleStateConfig( m_particle, m_imsize, box);
  }

  {
    // CvMat * imgYpatch  =
    //     cvCreateMat(initROI.height, initROI.width, CV_8U);
    // CvMat * imgUpatch  =
    //     cvCreateMat(initROI.height, initROI.width, CV_8U);
    // CvMat * imgVpatch  =
    //     cvCreateMat(initROI.height, initROI.width, CV_8U);
    // cvGetSubRect(m_imgY, imgYpatch, initROI);
    // cvGetSubRect(m_imgU, imgUpatch, initROI);
    // cvGetSubRect(m_imgV, imgVpatch, initROI);
    // // CV_SHOW(imgYpatch);
    // m_observer.update(imgYpatch, imgUpatch, imgVpatch);

    // int pnr=(roi.height/2)*2,pnc=(roi.width/2)*2;
    // CvMat * imgYpatch = cvCreateMat(pnr, pnc, CV_8U);
    // CvMat * imgUpatch = cvCreateMat(pnr, pnc, CV_8U);
    // CvMat * imgVpatch = cvCreateMat(pnr, pnc, CV_8U);
    // CvMat * mask = cvCreateMat(pnr, pnc, CV_8U);
    // float warp_p_data[3]={1,roi.x,roi.y};
    // CvMat warp_p = cvMat(3,1,CV_32F,warp_p_data);
    // icvWarp(m_imgY, imgYpatch, &warp_p);
    // icvWarp(m_imgU, imgUpatch, &warp_p);
    // icvWarp(m_imgV, imgVpatch, &warp_p);

    // CvMat * subdx = cvCreateMat(pnr, pnc, CV_8U);
    // CvMat * subdy = cvCreateMat(pnr, pnc, CV_8U);
    // icvWarp(m_tracker.dx, subdx, &warp_p);
    // icvWarp(m_tracker.dy, subdy, &warp_p);

    // m_observer.update(imgYpatch, imgUpatch, imgVpatch, mask);
    m_observer.initialize(roi);

    // cvReleaseMat(&subdx);
    // cvReleaseMat(&subdy);

    // cvReleaseMat(&imgYpatch);
    // cvReleaseMat(&imgUpatch);
    // cvReleaseMat(&imgVpatch);
    // cvReleaseMat(&mask);
  }
    
  m_initialized = 1;
}