bool AdaptiveHistogramCamshift::ComputeCamshift(const IplImage* hue, const IplImage* mask)
{
  // Compute backproject
  cvCalcBackProject(&hue, m_imgBackproject, m_hist);
  cvAnd(m_imgBackproject, mask, m_imgBackproject, 0);

  // Init velocity
  m_trackPosTwoFramesBack = cvPoint(static_cast<int>(m_trackBox.center.x),
                                    static_cast<int>(m_trackBox.center.y));
  m_trackAreaTwoFramesBack = m_trackBox.size.width * m_trackBox.size.height;

  // DEBUG track window area
  //printf("track wnd area: %f\n", m_trackBox.size.width * m_trackBox.size.height);

  // Compute camshift this frame
  CvConnectedComp trackComp;
  assert((m_trackWindow.height > 0) && (m_trackWindow.width > 0));
  CvBox2D trackBox;
  const int camShiftRes = cvCamShift(m_imgBackproject,
                                     m_trackWindow,
                                     cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1),
                                     &trackComp,
                                     &trackBox);
  if (camShiftRes >= 0)
  {
    m_trackBox = trackBox;
    m_trackCompRect = trackComp.rect;
    return true;
  }
  else
  {
    return false;
  }
}
Ejemplo n.º 2
0
CvRect combi_track(IplImage * pImg,KalmanFilter &kfilter)
{
	CvRect predrect=kfilter.predictionReport(prevHandRect);	
	//if((predrect.x<0)||(predrect.y<0)||((predrect.x+predrect.width)>pImg->width)||((predrect.y+predrect.height)>pImg->height))
	//	return NULL;	
	CvConnectedComp components;

	// Create a new hue image
	updateHueImage(pImg);

	// Create a probability image based on the hand histogram
	cvCalcBackProject( &pHueImg, pProbImg, pHist );
    cvAnd( pProbImg, pMask, pProbImg, 0 );
	//cvSetImageROI(pProbImg,predrect);
	
	// Use CamShift to find the center of the new hand probability
    if(!((predrect.x<0)||(predrect.y<0)||((predrect.x+predrect.width)>pImg->width)||((predrect.y+predrect.height)>pImg->height))) {
        cvCamShift( pProbImg, predrect, cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),&components, handBox );
	// Update hand location and angle
    prevHandRect = components.rect;

    }
    else
        //cvCamShift( pProbImg, prevHandRect, cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),&components, handBox );
		prevHandRect.x=-1;
	
    //if(!pImg->origin)	
	//	handBox->angle = -handBox->angle;
	//cvResetImageROI(pProbImg);
	
	return prevHandRect;

}
Ejemplo n.º 3
0
void CamShift::DoExternal(IplImage* image)
{
  ASSERT(m_pProbProv);

  ASSERT(m_comp.rect.width != 0 && m_comp.rect.height != 0);
  
  CvRect rect = m_comp.rect;
  if( rect.x < 0 )
      rect.x = 0;
  if( rect.x + rect.width > m_img_width )
      rect.width = m_img_width - rect.x;
  if( rect.y < 0 )
      rect.y = 0;
  if( rect.y + rect.height > m_img_height )
      rect.height = m_img_height - rect.y;

  CRect roi;
  roi.left = m_comp.rect.x-m_comp.rect.width/2;
  roi.right = roi.left+m_comp.rect.width*2;
  roi.top = m_comp.rect.y-m_comp.rect.height/2;
  roi.bottom = roi.top+m_comp.rect.height*2;

  m_pProbProv->CreateMap(image, m_prob_map, roi);

  cvCamShift(m_prob_map, rect,
	     cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1),
	     &m_comp, &m_box );

  if( m_comp.rect.width == 0 || m_comp.rect.height == 0 )
      m_comp.rect = rect; // do not allow tracker to loose the object
}
CvBox2D CamShiftPatch::getTrackBox(CvScalar maskRange, CvHistogram *hist)
{
	IplImage* backproject = cvCreateImage(cvGetSize(originImage), 8, 1);//反投影空間,單通道
	IplImage* hue = 0;
	hue = cvCreateImage(cvGetSize(originImage), 8, 1);
	IplImage *mask = getInRangeMask(maskRange, hue);

	cvCalcBackProject(&hue, backproject, hist); //使用back project方法 ,計算hue的反投影圖
	cvAnd(backproject, mask, backproject, 0);   // 將backproject 與mask 做AND 運算 再放到backproject 

	CvConnectedComp track_comp;
	CvBox2D track_box; // tracking返回的區域box,帶角度 

	CvRect zero;
	zero.x = 0; zero.y = 0; zero.width = 320; zero.height = 240;
	track_window = zero;

	for (int i = 0; i < 10; i++)
	{
		cvCamShift(
			backproject,    //色彩概率分佈圖像
			track_window,   //Search Window的初始值
			cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1),//用來判斷搜尋是否停止的一個標準
			&track_comp,    //保存運算結果,包括新的Search Window的位置和面積
			&track_box     //包含被跟蹤物體的最小矩形
			);
		track_window = track_comp.rect;
	}

	cvReleaseImage(&backproject);
	cvReleaseImage(&hue);
	cvReleaseImage(&mask);

	return track_box;
}
Ejemplo n.º 5
0
CamShift::Box CamShift::Track(const ImgBgr& img)
{
	cvCopy(ImgIplImage(img), image, 0 );
  cvCvtColor( image, hsv, CV_BGR2HSV );
  cvFlip(hsv,hsv,0);
  int _vmin = vmin, _vmax = vmax;
  
  cvInRangeS( hsv, cvScalar(0,smin,MIN(_vmin,_vmax),0),
  cvScalar(180,256,MAX(_vmin,_vmax),0), mask );
  cvSplit( hsv, hue, 0, 0, 0 );
  cvCalcBackProject( &hue, backproject, hist );
  //cvSaveImage("backproject.bmp", backproject);
  cvAnd( backproject, mask, backproject, 0 );
  //cvSaveImage("backproject.bmp", backproject);
  cvCamShift( backproject, track_window,
    cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),
    &track_comp, &track_box );
  track_window = track_comp.rect;

  Box result;
  result.angle= track_box.angle;
  result.center.x= static_cast<LONG>( track_box.center.x );
  result.center.y= static_cast<LONG>( img.Height()-track_box.center.y-1 );
  result.size.cy = static_cast<LONG>( track_box.size.width );
  result.size.cx = static_cast<LONG>( track_box.size.height );
  return result;
}
Ejemplo n.º 6
0
void WebCamData::trackFace() {
  CvConnectedComp comps;
  updateHugeImage(d->data);

  cvCalcBackProject(&d->hueImage, d->prob, d->histogram);
  cvAnd(d->prob, d->mask, d->prob, 0);
  CvBox2D box;
  cvCamShift(d->prob, d->faceRect,
             cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1), &comps,
             &box);

  d->faceRect = comps.rect;

  int radius = cvRound((d->faceRect.width + d->faceRect.height) * 0.25);
  CvPoint center;
  center.x = cvRound(d->faceRect.x + d->faceRect.width * 0.5);
  center.y = cvRound(d->faceRect.y + d->faceRect.height * 0.5);
  /*
      qDebug() << Q_FUNC_INFO
      << comps.rect.x
      << comps.rect.y
      << comps.rect.width
      << comps.rect.height
      << box.angle
      << center.x
      << center.y
      << radius;
   */
  d->dataMap.clear();
  d->dataMap["z"] = QVariant(radius);
  d->dataMap["x"] = QVariant(center.x);
  d->dataMap["y"] = QVariant(center.y);
  d->dataMap["angle"] = QVariant(box.angle);
  Q_EMIT dataReady();
}
int
camshift(const IplImage* next, TrackObject* obj)
  //Input: next : Next Frame, obj : offline tracking object
{
  IplImage* img = cvCloneImage(next);
  IplImage* hsv_next	= cvCreateImage(cvGetSize(next), IPL_DEPTH_8U, 3); //Size changes. No global or static
  IplImage* h_next_8	= cvCreateImage(cvGetSize(next), IPL_DEPTH_8U, 1); //Size changes. No global or static
  IplImage* h_next	= cvCreateImage(cvGetSize(next), IPL_DEPTH_32F, 1);
  IplImage* img_bp	= cvCreateImage(cvGetSize(next), IPL_DEPTH_32F, 1);
  CvConnectedComp track_comp;

  //CvRect search_window	= CALC_RECT(CALC_RECT_CENTER_X(eye), CALC_RECT_CENTER_Y(eye), WINDOW_W, WINDOW_H);

  //Conversion
  cvCvtColor(next, hsv_next, CV_BGR2HSV);
  cvSplit(hsv_next, h_next_8, 0, 0, 0);
  cvConvertScale(h_next_8, h_next, 1, 0);

  cvCalcBackProject(&h_next, img_bp, obj->hist);

  int iteration;
  iteration = cvCamShift(img_bp, obj->track_window, 
	cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, MAX_ITER, EPSILON), 
	&track_comp, &obj->track_box);
  obj->track_window = track_comp.rect;

  cvReleaseImage(&img_bp);
  cvReleaseImage(&h_next);
  cvReleaseImage(&h_next_8);
  cvReleaseImage(&hsv_next);
  cvReleaseImage(&img);

  return(iteration);
}
Ejemplo n.º 8
0
RotatedRect CamShift( const Mat& probImage, Rect& window,
                      TermCriteria criteria )
{
    CvConnectedComp comp;
    CvBox2D box;
    CvMat _probImage = probImage;
    cvCamShift(&_probImage, window, (CvTermCriteria)criteria, &comp, &box);
    window = comp.rect;
    return RotatedRect(Point2f(box.center), Size2f(box.size), box.angle);
}
Ejemplo n.º 9
0
void CamShiftPlugin::ProcessStatic
( int i, ImagePlus *img, ImagePlus *oimg, int *hsizes, CvTermCriteria criteria,
IplImage** &planes, CvHistogram* &hist, IplImage* &backproject, CvRect &orect, CvPoint &ocenter, CvRect &searchwin, CvMat* &rotation, CvMat* &shift, bool oready){
	if (hist && hist->mat.dim[0].size!=hsizes[0])
		cvReleaseHist(&hist);
	if( !hist )
        hist = cvCreateHist( 3, hsizes, CV_HIST_ARRAY, NULL, 0);
    if( !backproject )
		backproject = cvCreateImage( cvGetSize(img->orig), IPL_DEPTH_8U, 1 );
	if( !planes ){
	    planes = (IplImage**) malloc(3 * sizeof(IplImage*));
        for (int p=0; p<3; p++)
			planes[p] = cvCreateImage( cvGetSize(img->orig), 8, 1 );
	}
	if (!rotation)
		rotation = cvCreateMat(2,3,CV_32FC1);
	if (!shift)
		shift = cvCreateMat(2,1,CV_32FC1);

	if (!oready){
		orect = cvBoundingRect(oimg->contourArray[i],1);
		cvCvtPixToPlane( oimg->orig, planes[0], planes[1], planes[2], 0 );
        for (int p=0; p<3; p++)
            cvSetImageROI(planes[p],orect);
        cvCalcHist( planes, hist, 0, NULL );
		cvNormalizeHist(hist, 255);
        for (int p=0; p<3; p++)
            cvResetImageROI(planes[p]);
		searchwin = orect; //cvRect(0,0,img->orig->width, img->orig->height);
		ocenter = cvPoint(orect.x+orect.width/2, orect.y+orect.height/2);
	}
	//The following checks shouldn't be needed.
	RestrictRect(searchwin, cvRect(0,0,backproject->width,backproject->height));

	cvCvtPixToPlane( img->orig, planes[0], planes[1], planes[2], 0 );
    cvCalcBackProject( planes, backproject, hist );
	CvBox2D track_box;
	CvConnectedComp track_comp;
    cvCamShift( backproject, searchwin,
                criteria,
                &track_comp, &track_box );
	searchwin = track_comp.rect;
	cvmSet(shift,0,0,track_box.center.x - ocenter.x);
	cvmSet(shift,1,0,track_box.center.y - ocenter.y);
//	shift->data.fl[0] = track_box.center.x - ocenter.x;
//	shift->data.fl[1] = track_box.center.y - ocenter.y;
	cv2DRotationMatrix(track_box.center, track_box.angle, 1.0, rotation);
	cvTransform(oimg->contourArray[i],img->contourArray[i],rotation,shift);
//	CvMat *ofm = FeatPointsToMat(oimg->feats[i]);
//	Cvmat *fm  = FeatPointsToMat(img->feats[i]);
//	cvTransform(ofm,img->contourArray[i],rotation,shift);
	TransformFeatPoints(oimg->feats[i], img->feats[i], rotation, shift);
}
Ejemplo n.º 10
0
void HandDetect::skinDetect()
{
	setImage();
	cvFlip(image, image, 1);

	hsv = cvCreateImage(cvGetSize(image), 8, 3);
	msk = cvCreateImage(cvGetSize(image), 8, 1);
	hue = cvCreateImage(cvGetSize(image), 8, 1);
	
	backproject1 = cvCreateImage(cvGetSize(image), 8, 1);
	backproject2 = cvCreateImage(cvGetSize(image), 8, 1);

	cvCvtColor(image, hsv, CV_RGB2HSV);
	cvInRangeS(hsv, cvScalar(0, smin, MIN(vmin, vmax), 0), cvScalar(180, 256, MAX(vmin, vmax), 0), msk);
	cvSplit(hsv, hue, 0, 0, 0);
		
	cvCalcBackProject(&hue, backproject1, hist1);
	cvCalcBackProject(&hue, backproject2, hist2);
		
	cvThreshold(backproject1, backproject1, 50, 255, CV_THRESH_BINARY | CV_THRESH_OTSU);
	cvThreshold(backproject2, backproject2, 50, 255, CV_THRESH_BINARY | CV_THRESH_OTSU);

	cvOr(backproject1, backproject2, backproject, 0);

	cvErode(backproject, backproject, 0, 1);
	cvDilate(backproject, backproject, 0, 1);
	cvAnd(backproject, msk, backproject, 0);


	if(track_box.center.x!=-1&&track_box.center.y!=-1)
		preCen=cvPoint(handCen.x, handCen.y);
	else
		preCen=cvPoint(0,0);

	cvCamShift(backproject, track_window, cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1), &track_comp, &track_box);
	if(track_comp.rect.height>0&&track_comp.rect.width>0)
		track_window = track_comp.rect;
	else
	{
		track_box.center.x=-1;
		track_box.center.y=-1;
	}
	

	cvReleaseImage(&hsv);
	cvReleaseImage(&msk);
	cvReleaseImage(&hue);
	cvReleaseImage(&backproject1);
	cvReleaseImage(&backproject2);
}
Ejemplo n.º 11
0
int track(camshift * cs, IplImage * img, CvBox2D * fBox)
{
  CvConnectedComp components;

  updateHueImage(cs, img);

  cvCalcBackProject( &cs->hueImg, cs->probImg, cs->hist );
  cvAnd( cs->probImg, cs->mask, cs->probImg, 0 );

  CvSize size = cvGetSize(cs->probImg);
//  printf("%d %d %d %d\n", cs->prevFaceRect.x, cs->prevFaceRect.y, cs->prevFaceRect.width, cs->prevFaceRect.height);
  if (cs->prevFaceRect.x <= 0) {
    return 0;
  }
  if (cs->prevFaceRect.x > size.width) {
    return 0;
  }
  if (cs->prevFaceRect.y <= 0) {
    return 0;
  }
  if (cs->prevFaceRect.y > size.height) {
    return 0;
  }

  if (cs->prevFaceRect.x + cs->prevFaceRect.width > size.width) {
    return 0;
  }
  if (cs->prevFaceRect.y + cs->prevFaceRect.height > size.height) {
    return 0;
  }
  if (cs->prevFaceRect.width <= 0) {
    return 0;
  }
  if (cs->prevFaceRect.height <= 0) {
    return 0;
  }


  cvCamShift( cs->probImg, cs->prevFaceRect,
      cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),
      &components, &cs->faceBox );

  cs->prevFaceRect = components.rect;
  cs->faceBox.angle = -cs->faceBox.angle;

  *fBox = cs->faceBox;
  return 1;
}
Ejemplo n.º 12
0
//////////////////////////////////
// track()
//
CvBox2D track(camshift * cs, IplImage * pImg)
{
	CvConnectedComp components;

	// Create a new hue image
	updateHueImage(cs, pImg);

	// Create a probability image based on the face histogram
	cvCalcBackProject( &cs->pHueImg, cs->pProbImg, cs->pHist );
    cvAnd( cs->pProbImg, cs->pMask, cs->pProbImg, 0 );

	// Use CamShift to find the center of the new face probability
    CvSize size = cvGetSize(cs->pProbImg);
    if (cs->prevFaceRect.x < 0) {
      cs->prevFaceRect.x = 0;
    }
    if (cs->prevFaceRect.x >= size.width) {
      cs->prevFaceRect.x = size.width - 1;
    }
    if (cs->prevFaceRect.y < 0) {
      cs->prevFaceRect.y = 0;
    }
    if (cs->prevFaceRect.y >= size.height) {
      cs->prevFaceRect.y = size.height - 1;
    }

    if (cs->prevFaceRect.x + cs->prevFaceRect.width > size.width) {
      cs->prevFaceRect.width = size.width - cs->prevFaceRect.x;
    }
    if (cs->prevFaceRect.y + cs->prevFaceRect.height > size.height) {
      cs->prevFaceRect.height = size.height - cs->prevFaceRect.y;
    }


    cvCamShift( cs->pProbImg, cs->prevFaceRect,
                cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),
                &components, &cs->faceBox );

	// Update face location and angle
    cs->prevFaceRect = components.rect;
	cs->faceBox.angle = -cs->faceBox.angle;

	return cs->faceBox;
}
Ejemplo n.º 13
0
/* Given an image and tracked object, return box position. */
CvBox2D FaceBl0r::camshift_track_face (IplImage* image, TrackedObj* obj) {
  CvConnectedComp components;

  //create a new hue image
  update_hue_image(image, obj);

  //create a probability image based on the face histogram
  cvCalcBackProject(&obj->hue, obj->prob, obj->hist);
  cvAnd(obj->prob, obj->mask, obj->prob, 0);

  //use CamShift to find the center of the new face probability
  cvCamShift(obj->prob, obj->prev_rect,
             cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1),
             &components, &obj->curr_box);

  //update face location and angle
  obj->prev_rect = components.rect;
  obj->curr_box.angle = -obj->curr_box.angle;

  return obj->curr_box;
}
Ejemplo n.º 14
0
//////////////////////////////////
// track()
//
CvRect camshift_track(IplImage * pImg)
{
	CvConnectedComp components;

	// Create a new hue image
	updateHueImage(pImg);

	// Create a probability image based on the hand histogram
	cvCalcBackProject( &pHueImg, pProbImg, pHist );
    cvAnd( pProbImg, pMask, pProbImg, 0 );
	//cvSetImageROI(pProbImg,predrect);
	
	// Use CamShift to find the center of the new hand probability
    cvCamShift( pProbImg, prevHandRect2, cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),&components, handBox );

	// Update hand location and angle
    prevHandRect2 = components.rect;
	//if(!pImg->origin)	
	//	handBox->angle = -handBox->angle;
	//cvResetImageROI(pProbImg);
	return prevHandRect2;
}
Ejemplo n.º 15
0
void BoatDetecting::startTrackObject(){
		cvInRangeS(hsv, cvScalar(0, smin, MIN(vmin, vmax), 0), cvScalar(180, 256, MAX(vmin, vmax), 0), mask);
	// 10,256,30
	
	cvSplit(hsv, hue, 0, 0, 0);
	if (!isTrackingInitialized){ // 如果跟踪窗口未初始化
		float max_val = 0.f;		
		cvSetImageROI(hue, selection);
		cvSetImageROI(mask, selection);		
		cvCalcHist(&hue, hist, 0, mask);
		cvGetMinMaxHistValue(hist, 0, &max_val, 0, 0);
		cvConvertScale(hist->bins, hist->bins, max_val ? 255. / max_val : 0., 0);
		cvResetImageROI(hue);
		cvResetImageROI(mask);
		trackWindow = selection;
		isTrackingInitialized = true;

	}

	cvCalcBackProject(&hue, backproject, hist);
	//cvShowImage("Hue Channel",backproject);
	
	cvAnd(backproject, mask, backproject, 0);
	
	//if (trackWindow.x + trackWindow.width/2< allfWidth &&trackWindow.y + trackWindow.height/2< allfHeight &&trackWindow.x>0)
	if (trackWindow.x + trackWindow.width< allfWidth &&trackWindow.y + trackWindow.height< allfHeight &&trackWindow.x>0)
		cvCamShift(backproject, trackWindow, cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 20, 1), &trackComp, 0);//初始化跟踪窗口以后直接用trackWindow做跟踪,每帧都会更新
	

	//if (trackComp.rect.width<90 && trackComp.rect.y<200){
	//	trackWindow = trackComp.rect;
	//}
	//if (trackComp.rect.y>200)
	//{
	//	trackWindow = trackComp.rect;
	//}
	trackWindow = trackComp.rect;
	
}
Ejemplo n.º 16
0
//////////////////////////////////
// track()
//
CvBox2D track(IplImage * pImg)
{
	CvConnectedComp components;

	// Create a new hue image
	updateHueImage(pImg);

	// Create a probability image based on the face histogram
	cvCalcBackProject( &pHueImg, pProbImg, pHist );
    cvAnd( pProbImg, pMask, pProbImg, 0 );

	// Use CamShift to find the center of the new face probability
    cvCamShift( pProbImg, prevFaceRect,
                cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),
                &components, &faceBox );

	// Update face location and angle
    prevFaceRect = components.rect;
	faceBox.angle = -faceBox.angle;

	return faceBox;
}
Ejemplo n.º 17
0
CvConnectedComp* CAMShift::track(const Image* image, const Mask* probabilisticMap)
{
    if (!m_trackingHist)
        return NULL;
        
    //Obtain probabilistic image from backProject of histogram
    Mask* probimage = m_trackingHist->calcBackProjection(image);
    if (probabilisticMap)
        (*probimage) = (*probabilisticMap);

    if ((m_lastPostition.width < 0) || (m_lastPostition.height < 0)
        || (m_lastPostition.x > image->width()) || (m_lastPostition.y > image->height()))
        return &m_components;

    //cvTermCriteria with CV_TERMCRIT_ITER specifies number of iteratios until center of mass found
    //cvTermCriteria with CV_TERMCRIT_EPS specifies the max error of the result
    cvCamShift(probimage->cvImage(), m_lastPostition, cvTermCriteria(CV_TERMCRIT_EPS, 256, .001l), &m_components, m_box);
        
    //Update lastPosition
    m_lastPostition = m_components.rect;
        
    return &m_components;
}
void CamShift::Track(IplImage *frame, CvRect &selection, bool calc_hist)
{
	int i, bin_w, c;

	cvCvtColor( frame, _hsv, CV_BGR2HSV );

	cvInRangeS( _hsv, cvScalar(0,_smin,MIN(_vmin,_vmax),0),
		cvScalar(180,256,MAX(_vmin,_vmax),0), _mask );
	cvSplit( _hsv, _hue, 0, 0, 0 );

	if(calc_hist)
	{
		float max_val = 0.f;
		cvSetImageROI( _hue, selection );
		cvSetImageROI( _mask, selection );
		cvCalcHist( &_hue, _hist, 0, _mask );
		cvGetMinMaxHistValue( _hist, 0, &max_val, 0, 0 );
		cvConvertScale( _hist->bins, _hist->bins, max_val ? 255. / max_val : 0., 0 );
		cvResetImageROI( _hue );
		cvResetImageROI( _mask );
		_track_window = selection; 
	}

	cvCalcBackProject( &_hue, _backproject, _hist );
	cvAnd( _backproject, _mask, _backproject, 0 );
	cvCamShift( _backproject, _track_window,
		cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),
		&_track_comp, &_track_box );
	_track_window = _track_comp.rect;

	if( frame->origin )
		_track_box.angle = -_track_box.angle;

	selection = cvRect(_track_box.center.x-_track_box.size.width/2, _track_box.center.y-_track_box.size.height/2,
		selection.width, selection.height);
}
Ejemplo n.º 19
0
CvBox2D *MultiCamshiftUI::track(IplImage **images, bool show_ui, bool show_backprojections) {
  for (int camera=0; camera<n_cameras; camera++) {
    /* Calculate the backprojection, in the original (YUV) colorspace */
    cvCvtPixToPlane(images[camera], planes[0], planes[1], planes[2], 0);
    cvCalcBackProject(planes, backprojection, histograms[camera]);

    if (show_ui && show_backprojections) {
      /* Make a YUV version of the output, for display */
      gray_to_yuv(backprojection, yuv_backprojections[camera]);
    }

    if (search_windows[camera].width > 0 && search_windows[camera].height > 0) {
      /* Use the CAMSHIFT algorithm to search for the object of interest */
      CvConnectedComp comp;
      cvCamShift(backprojection, search_windows[camera],
		 cvTermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 1),
		 &comp, &results[camera]);
      search_windows[camera] = comp.rect;

      if (results[camera].size.width > 0 &&
	  results[camera].size.height > 0) {
	/* We found an interesting object, draw it if applicable */
	if (show_ui)
	  drawResults(camera, images[camera]);
      }
      else {
	/* We lost tracking, expand the search window */
	search_windows[camera].x = 0;
	search_windows[camera].y = 0;
	search_windows[camera].width = image_size.width;
	search_windows[camera].height = image_size.height;
      }
    }
  }

  if (sample_from_sample_square) {
    cvSetImageROI(images[sample_square_camera], sample_square);
    cvSetImageROI(planes[0], sample_square);
    cvSetImageROI(planes[1], sample_square);
    cvSetImageROI(planes[2], sample_square);

    cvCvtPixToPlane(images[sample_square_camera], planes[0], planes[1], planes[2], 0);
    cvCalcHist(planes, histograms[sample_square_camera], 1);

    cvResetImageROI(images[sample_square_camera]);
    cvResetImageROI(planes[0]);
    cvResetImageROI(planes[1]);
    cvResetImageROI(planes[2]);

    /* Also set the windowIn to the sampling rectangle, to point CAMSHIFT at
     * what we're interested in.
     */
    search_windows[sample_square_camera] = sample_square;
  }

  if (show_ui) {
    /* Tile cameras horizontally, with original image on
     * top and backprojection on bottom.
     */
    IplImage* view_grid[n_cameras * 2];
    int num_views = 0;
    for (int i=0; i<n_cameras; i++)
      view_grid[num_views++] = images[i];
    if (show_backprojections) {
      for (int i=0; i<n_cameras; i++)
	view_grid[num_views++] = yuv_backprojections[i];
    }

    if (draw_sample_square) {
      cvRectangle(images[sample_square_camera], cvPoint(sample_square.x-1, sample_square.y-1),
		  cvPoint(sample_square.x + sample_square.width + 1, sample_square.y + sample_square.width + 1),
		  CV_RGB(128,128,255), 1);
    }

    cv_sdl_show_yuv_tiles(view_grid, num_views, n_cameras);
  }

  return results;
}
Ejemplo n.º 20
0
//--------------------------------------------------------------------------------
int ContourFinder::findContours( ofxCvGrayscaleImage&  input,
									  int minArea,
									  int maxArea,
									  int nConsidered,
									  bool bFindHoles,
                                      bool bUseApproximation) {
	reset();

	// opencv will clober the image it detects contours on, so we want to
    // copy it into a copy before we detect contours.  That copy is allocated
    // if necessary (necessary = (a) not allocated or (b) wrong size)
	// so be careful if you pass in different sized images to "findContours"
	// there is a performance penalty, but we think there is not a memory leak
    // to worry about better to create mutiple contour finders for different
    // sizes, ie, if you are finding contours in a 640x480 image but also a
    // 320x240 image better to make two ContourFinder objects then to use
    // one, because you will get penalized less.

	if( inputCopy.width == 0 ) {
		inputCopy.allocate( input.width, input.height );
		inputCopy = input;
	} else {
		if( inputCopy.width == input.width && inputCopy.height == input.height ) {
			inputCopy = input;
		} else {
			// we are allocated, but to the wrong size --
			// been checked for memory leaks, but a warning:
			// be careful if you call this function with alot of different
			// sized "input" images!, it does allocation every time
			// a new size is passed in....
			//inputCopy.clear();
			inputCopy.allocate( input.width, input.height );
			inputCopy = input;
		}
	}

	CvSeq* contour_list = NULL;

	contour_storage = cvCreateMemStorage( 1000 );
	storage	= cvCreateMemStorage( 1000 );

	CvContourRetrievalMode  retrieve_mode
        = (bFindHoles) ? CV_RETR_LIST : CV_RETR_EXTERNAL;
    teste = inputCopy.getCvImage();

	cvFindContours( teste, contour_storage, &contour_list,
                    sizeof(CvContour), retrieve_mode, bUseApproximation ? CV_CHAIN_APPROX_SIMPLE : CV_CHAIN_APPROX_NONE );
	CvSeq* contour_ptr = contour_list;

	nCvSeqsFound = 0;

	// put the contours from the linked list, into an array for sorting
	while( (contour_ptr != NULL) ) {
		float area = fabs( cvContourArea(contour_ptr, CV_WHOLE_SEQ) );
		if( (area > minArea) && (area < maxArea) ) {
                if (nCvSeqsFound < TOUCH_MAX_CONTOUR_LENGTH){
				cvSeqBlobs[nCvSeqsFound] = contour_ptr;	 // copy the pointer
                nCvSeqsFound++;
				}
		}
		contour_ptr = contour_ptr->h_next;
	}

	// sort the pointers based on size
	if( nCvSeqsFound > 0 ) {
		qsort( cvSeqBlobs, nCvSeqsFound, sizeof(CvSeq*), qsort_carea_compare);
	}

	// now, we have nCvSeqsFound contours, sorted by size in the array
    // cvSeqBlobs let's get the data out and into our structures that we like
	for( int i = 0; i < MIN(nConsidered, nCvSeqsFound); i++ ) {
		blobs.push_back( Blob() );
		float area = cvContourArea( cvSeqBlobs[i], CV_WHOLE_SEQ );

		cvMoments( cvSeqBlobs[i], myMoments );

		// this is if using non-angle bounding box
		CvRect rect	= cvBoundingRect( cvSeqBlobs[i], 0 );
		blobs[i].boundingRect.x      = rect.x;
		blobs[i].boundingRect.y      = rect.y;
		blobs[i].boundingRect.width  = rect.width;
		blobs[i].boundingRect.height = rect.height;

        cvCamShift(teste, rect, cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ), &track_comp, &track_box);

		// this is for using angle bounding box
		CvBox2D32f box;
		box = cvMinAreaRect2( cvSeqBlobs[i] );

		blobs[i].angleBoundingRect.x	  = box.center.x;
		blobs[i].angleBoundingRect.y	  = box.center.y;
		blobs[i].angleBoundingRect.width  = box.size.height;
		blobs[i].angleBoundingRect.height = box.size.width;
		blobs[i].angle = box.angle;

		// assign other parameters
		blobs[i].area                = fabs(area);
		blobs[i].hole                = area < 0 ? true : false;
		blobs[i].length 			 = cvArcLength(cvSeqBlobs[i]);
		blobs[i].centroid.x			 = (int) (myMoments->m10 / myMoments->m00);
		blobs[i].centroid.y 		 = (int) (myMoments->m01 / myMoments->m00);
		blobs[i].lastCentroid.x 	 = (int) 0;
		blobs[i].lastCentroid.y 	 = (int) 0;

		// get the points for the blob:
		CvPoint           pt;
		CvSeqReader       reader;
		cvStartReadSeq( cvSeqBlobs[i], &reader, 0 );

    	for( int j=0; j < min(TOUCH_MAX_CONTOUR_LENGTH, cvSeqBlobs[i]->total); j++ ) {
			CV_READ_SEQ_ELEM( pt, reader );
            blobs[i].pts.push_back( ofPoint((float)pt.x, (float)pt.y) );
		}
		blobs[i].nPts = blobs[i].pts.size();

	}

    nBlobs = blobs.size();

	// Free the storage memory.
	// Warning: do this inside this function otherwise a strange memory leak
	if( contour_storage != NULL ) { cvReleaseMemStorage(&contour_storage); }
	if( storage != NULL ) { cvReleaseMemStorage(&storage); }

	return nBlobs;
}
Ejemplo n.º 21
0
t_jit_err cv_jit_shift_matrix_calc(t_cv_jit_shift *x, void *inputs, void *outputs)
{
	t_jit_err			err=JIT_ERR_NONE;
	long				in_savelock = 0;
	t_jit_matrix_info	in_minfo;
	void				*in_matrix;
	CvMat				source;
	CvRect				rectangle;
	CvBox2D				box;
	CvConnectedComp		component;
	CvPoint2D32f		vertices[4];
	float				w,h,c,s;
	
	//Get pointer to matrix
	in_matrix 	= jit_object_method(inputs,_jit_sym_getindex,0);

	if (x&&in_matrix) 
	{
		//Lock the matrix
		in_savelock = (long) jit_object_method(in_matrix,_jit_sym_lock,1);
		
		//Make sure input is of proper format
		jit_object_method(in_matrix,_jit_sym_getinfo,&in_minfo);

		if(in_minfo.dimcount != 2)
		{
			err = JIT_ERR_MISMATCH_DIM;
			goto out;
		}
		if(in_minfo.planecount != 1)
		{
			err = JIT_ERR_MISMATCH_PLANE;
			goto out;
		}
		if(in_minfo.type != _jit_sym_char)
		{
			err = JIT_ERR_MISMATCH_TYPE;
			goto out;
		}

		//Don't process if image is too small
		if((in_minfo.dim[0] < 2)||(in_minfo.dim[1] < 2))
			goto out;
			
		//Calculate start rectangle:
		rectangle = cvRect(x->rect[0],x->rect[1],x->rect[2]-x->rect[0],x->rect[3]-x->rect[1]);
		CLIP_ASSIGN(rectangle.x,0,in_minfo.dim[0]-1);
		CLIP_ASSIGN(rectangle.y,0,in_minfo.dim[1]-1);
		CLIP_ASSIGN(rectangle.width,1,in_minfo.dim[0]-rectangle.x);
		CLIP_ASSIGN(rectangle.height,1,in_minfo.dim[1]-rectangle.y);

		//Convert Jitter matrix to OpenCV matrix
		cvJitter2CvMat(in_matrix, &source);
		
		//Calculate camshift
		if(x->mode == 1) //Use camshift
			cvCamShift(&source, rectangle, cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,(int)x->maxiters,x->epsilon), &component, &box );
		else {
			cvMeanShift(&source, rectangle, cvTermCriteria(CV_TERMCRIT_ITER|CV_TERMCRIT_EPS,(int)x->maxiters,x->epsilon), &component);
			box.angle = 90.f;
			box.size = cvSize2D32f(component.rect.width, component.rect.height);
			box.center = cvPoint2D32f((float)component.rect.x + (float)component.rect.width * 0.5f,(float)component.rect.y + (float)component.rect.height * 0.5f);
		}
		
		//Prepare output
		//
		jit_atom_setlong(&x->box[0],component.rect.x);
		jit_atom_setlong(&x->box[1],component.rect.y);
		jit_atom_setlong(&x->box[2],component.rect.x + component.rect.width);
		jit_atom_setlong(&x->box[3],component.rect.y + component.rect.height);
		
		x->rect[0]=component.rect.x;
		x->rect[1]=component.rect.y;
		x->rect[2]=component.rect.x + component.rect.width;
		x->rect[3]=component.rect.y + component.rect.height;
		
		//cvBoxPoints(box,vertices);
		w = box.size.width * 0.5;
		h = box.size.height * 0.5;
		c = cos((box.angle - 90.f) * -0.01745329252);
		s = sin((box.angle - 90.f) * -0.01745329252);
		
		vertices[0].x = box.center.x - s*h - c*w;
		vertices[0].y = box.center.y - c*h + s*w;
		vertices[1].x = box.center.x - s*h + c*w;
		vertices[1].y = box.center.y - c*h - s*w;
		vertices[2].x = box.center.x + s*h + c*w;
		vertices[2].y = box.center.y + c*h - s*w;
		vertices[3].x = box.center.x + s*h - c*w;
		vertices[3].y = box.center.y + c*h + s*w;
		
		jit_atom_setlong(&x->frame[0],(long)vertices[0].x);
		jit_atom_setlong(&x->frame[1],(long)vertices[0].y);
		jit_atom_setlong(&x->frame[2],(long)vertices[1].x);
		jit_atom_setlong(&x->frame[3],(long)vertices[1].y);
		jit_atom_setlong(&x->frame[4],(long)vertices[2].x);
		jit_atom_setlong(&x->frame[5],(long)vertices[2].y);
		jit_atom_setlong(&x->frame[6],(long)vertices[3].x);
		jit_atom_setlong(&x->frame[7],(long)vertices[3].y);
		
		x->mass = (float)(component.area / 256.);
		}
	
out:
	jit_object_method(in_matrix,gensym("lock"),in_savelock);
	return err;
}
Ejemplo n.º 22
0
//=========================================
CvRect camKalTrack(IplImage* frame, camshift_kalman_tracker& camKalTrk) {
//=========================================
	if (!frame)
		printf("Input frame empty!\n");

	cvCopy(frame, camKalTrk.image, 0);
	cvCvtColor(camKalTrk.image, camKalTrk.hsv, CV_BGR2HSV); // BGR to HSV

	if (camKalTrk.trackObject) {
		int _vmin = vmin, _vmax = vmax;
		cvInRangeS(camKalTrk.hsv, cvScalar(0, smin, MIN(_vmin,_vmax), 0), cvScalar(180, 256, MAX(_vmin,_vmax), 0), camKalTrk.mask); // MASK
		cvSplit(camKalTrk.hsv, camKalTrk.hue, 0, 0, 0); //  HUE
		if (camKalTrk.trackObject < 0) {
			float max_val = 0.f;
			boundaryCheck(camKalTrk.originBox, frame->width, frame->height);
			cvSetImageROI(camKalTrk.hue, camKalTrk.originBox); // for ROI
			cvSetImageROI(camKalTrk.mask, camKalTrk.originBox); // for camKalTrk.mask
			cvCalcHist(&camKalTrk.hue, camKalTrk.hist, 0, camKalTrk.mask); //
			cvGetMinMaxHistValue(camKalTrk.hist, 0, &max_val, 0, 0);
			cvConvertScale(camKalTrk.hist->bins, camKalTrk.hist->bins, max_val ? 255. / max_val : 0., 0); //  bin  [0,255]
			cvResetImageROI(camKalTrk.hue); // remove ROI
			cvResetImageROI(camKalTrk.mask);
			camKalTrk.trackWindow = camKalTrk.originBox;
			camKalTrk.trackObject = 1;
			camKalTrk.lastpoint = camKalTrk.predictpoint = cvPoint(camKalTrk.trackWindow.x + camKalTrk.trackWindow.width / 2,
					camKalTrk.trackWindow.y + camKalTrk.trackWindow.height / 2);
			getCurrState(camKalTrk.kalman, camKalTrk.lastpoint, camKalTrk.predictpoint);//input curent state
		}
		//(x,y,vx,vy),
		camKalTrk.prediction = cvKalmanPredict(camKalTrk.kalman, 0);//predicton=kalman->state_post

		camKalTrk.predictpoint = cvPoint(cvRound(camKalTrk.prediction->data.fl[0]), cvRound(camKalTrk.prediction->data.fl[1]));

		camKalTrk.trackWindow = cvRect(camKalTrk.predictpoint.x - camKalTrk.trackWindow.width / 2, camKalTrk.predictpoint.y
				- camKalTrk.trackWindow.height / 2, camKalTrk.trackWindow.width, camKalTrk.trackWindow.height);

		camKalTrk.trackWindow = checkRectBoundary(cvRect(0, 0, frame->width, frame->height), camKalTrk.trackWindow);

		camKalTrk.searchWindow = cvRect(camKalTrk.trackWindow.x - region, camKalTrk.trackWindow.y - region, camKalTrk.trackWindow.width + 2
				* region, camKalTrk.trackWindow.height + 2 * region);

		camKalTrk.searchWindow = checkRectBoundary(cvRect(0, 0, frame->width, frame->height), camKalTrk.searchWindow);

		cvSetImageROI(camKalTrk.hue, camKalTrk.searchWindow);
		cvSetImageROI(camKalTrk.mask, camKalTrk.searchWindow);
		cvSetImageROI(camKalTrk.backproject, camKalTrk.searchWindow);

		cvCalcBackProject( &camKalTrk.hue, camKalTrk.backproject, camKalTrk.hist ); // back project

		cvAnd(camKalTrk.backproject, camKalTrk.mask, camKalTrk.backproject, 0);

		camKalTrk.trackWindow = cvRect(region, region, camKalTrk.trackWindow.width, camKalTrk.trackWindow.height);

		if (camKalTrk.trackWindow.height > 5 && camKalTrk.trackWindow.width > 5) {
			// calling CAMSHIFT
			cvCamShift(camKalTrk.backproject, camKalTrk.trackWindow, cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1),
					&camKalTrk.trackComp, &camKalTrk.trackBox);

			/*cvMeanShift( camKalTrk.backproject, camKalTrk.trackWindow,
			 cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),
			 &camKalTrk.trackComp);*/
		}
		else {
			camKalTrk.trackComp.rect.x = 0;
			camKalTrk.trackComp.rect.y = 0;
			camKalTrk.trackComp.rect.width = 0;
			camKalTrk.trackComp.rect.height = 0;
		}

		cvResetImageROI(camKalTrk.hue);
		cvResetImageROI(camKalTrk.mask);
		cvResetImageROI(camKalTrk.backproject);
		camKalTrk.trackWindow = camKalTrk.trackComp.rect;
		camKalTrk.trackWindow = cvRect(camKalTrk.trackWindow.x + camKalTrk.searchWindow.x, camKalTrk.trackWindow.y
				+ camKalTrk.searchWindow.y, camKalTrk.trackWindow.width, camKalTrk.trackWindow.height);

		camKalTrk.measurepoint = cvPoint(camKalTrk.trackWindow.x + camKalTrk.trackWindow.width / 2, camKalTrk.trackWindow.y
				+ camKalTrk.trackWindow.height / 2);
		camKalTrk.realposition->data.fl[0] = camKalTrk.measurepoint.x;
		camKalTrk.realposition->data.fl[1] = camKalTrk.measurepoint.y;
		camKalTrk.realposition->data.fl[2] = camKalTrk.measurepoint.x - camKalTrk.lastpoint.x;
		camKalTrk.realposition->data.fl[3] = camKalTrk.measurepoint.y - camKalTrk.lastpoint.y;
		camKalTrk.lastpoint = camKalTrk.measurepoint;//keep the current real position

		//measurement x,y
		cvMatMulAdd( camKalTrk.kalman->measurement_matrix/*2x4*/, camKalTrk.realposition/*4x1*/,/*measurementstate*/0, camKalTrk.measurement );
		cvKalmanCorrect(camKalTrk.kalman, camKalTrk.measurement);

		cvRectangle(frame, cvPoint(camKalTrk.trackWindow.x, camKalTrk.trackWindow.y), cvPoint(camKalTrk.trackWindow.x
				+ camKalTrk.trackWindow.width, camKalTrk.trackWindow.y + camKalTrk.trackWindow.height), CV_RGB(255,128,0), 4, 8, 0);
	}
	// set new selection if it exists
	if (camKalTrk.selectObject && camKalTrk.selection.width > 0 && camKalTrk.selection.height > 0) {
		cvSetImageROI(camKalTrk.image, camKalTrk.selection);
		cvXorS(camKalTrk.image, cvScalarAll(255), camKalTrk.image, 0);
		cvResetImageROI(camKalTrk.image);
	}

	return camKalTrk.trackWindow;
}
Ejemplo n.º 23
0
CvBox2D CamShiftIris::track( IplImage* image, CvRect selection, bool isIris){
	CamShiftIris camshift;
	select_object1=1;
	track_object1=-1;
	origin1=cvPoint(0,0);

///////////////////////////////

	int i, bin_w, c;
	//frame = cvQueryFrame( capture );

//
//	frame=cvCloneImage(image);
//        if( !frame )
//            return 0;
	if( image ){
		/* allocate all the buffers */
//		image = cvCreateImage( cvGetSize(frame), 8, 3 );
//		image->origin = frame->origin;
		hsv1 = cvCreateImage( cvGetSize(image), 8, 3 );
		h = cvCreateImage( cvGetSize(image), 8, 1 );
		s = cvCreateImage( cvGetSize(image), 8, 1 );
		v = cvCreateImage( cvGetSize(image), 8, 1);
		hue1 = cvCreateImage( cvGetSize(image), 8, 1 );
		mask1 = cvCreateImage( cvGetSize(image), 8, 1 );
		backproject1 = cvCreateImage( cvGetSize(image), 8, 1 );
		hist1= cvCreateHist( 1, &hdims1, CV_HIST_ARRAY, &hranges1, 1 );
		histimg1 = cvCreateImage( cvSize(320,200), 8, 3 );
		cvZero( histimg1 );
	}
	cvCvtColor( image, hsv1, CV_BGR2HSV );
	///////////////////Equalize v in hsv///////////
	cvSplit( hsv1, h, s, v, 0 );
	cvEqualizeHist(v,v);
	cvMerge(h,s,v,0,hsv1);
	///////////////////Equalize v in hsv///////////

	if( track_object1 !=0 ){
		int _vmin1 = vmin1, _vmax1 = vmax1;

		cvInRangeS( hsv1, cvScalar(0,smin1,MIN(_vmin1,_vmax1),0),
					cvScalar(180,256,MAX(_vmin1,_vmax1),0), mask1 );
		cvSplit( hsv1, hue1, 0, 0, 0 );

		if( track_object1 < 0 ){
			float max_val = 0.f;
			cvSetImageROI( hue1, selection );
			cvSetImageROI( mask1, selection );
			cvCalcHist( &hue1, hist1, 0, mask1 );
			cvGetMinMaxHistValue( hist1, 0, &max_val, 0, 0 );
			cvConvertScale( hist1->bins, hist1->bins, max_val ? 255. / max_val : 0., 0 );
			cvResetImageROI( hue1 );
			cvResetImageROI( mask1 );
			track_window1 = selection;
			track_object1 = 1;

			cvZero( histimg1 );
			bin_w = histimg1->width / hdims1;
			for( i = 0; i < hdims1; i++ )
			{
				int val = cvRound( cvGetReal1D(hist1->bins,i)*histimg1->height/255 );
				CvScalar color = camshift.hsvrgb(i*180.f/hdims1);
				cvRectangle( histimg1, cvPoint(i*bin_w,histimg1->height),
							 cvPoint((i+1)*bin_w,histimg1->height - val),
							 color, -1, 8, 0 );
			}
		}
		cvCalcBackProject( &hue1, backproject1, hist1);
		cvAnd( backproject1, mask1, backproject1, 0 );
		try{
		cvCamShift( backproject1, track_window1,
					cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),
					&track_comp1, &track_box1 );
		}catch(...){
			cvReleaseImage(&hsv1);
			cvReleaseImage(&h);
			cvReleaseImage(&s);
			cvReleaseImage(&v);
			cvReleaseImage(&hue1);
			cvReleaseImage(&mask1);
			cvReleaseImage(&backproject1);
			cvReleaseHist(&hist1);
			cvReleaseImage(&histimg1);
		}
		track_window1 = track_comp1.rect;
		if( backproject1_mode )
			cvCvtColor( backproject1, image, CV_GRAY2BGR );
		if( !image->origin )
			track_box1.angle = -track_box1.angle;
		if(isIris)
			cvEllipseBox( image, track_box1, CV_RGB(255,0,0), 3, CV_AA, 0 );
	}
	cvShowImage( "CamShift Tracking", image );
	//cvShowImage( "Histogram", histimg1 );

//	c = cvWaitKey(10);
//	if( (char) c == 27 )
//		cout<<"esc pressed";
//		//return; //break;
//	switch( (char) c ){
//	case 'b':
//		backproject1_mode ^= 1;
//		break;
//	case 'c':
//		track_object1 = 0;
//		cvZero( histimg1 );
//		break;
//	case 'h':
//		show_hist1^= 1;
//		if( !show_hist1)
//			cvDestroyWindow( "Histogram" );
//		else
//			cvNamedWindow( "Histogram", 1 );
//		break;
//	default:
//		;
//	}

	//cvReleaseImage(&image);
	cvReleaseImage(&hsv1);
	cvReleaseImage(&h);
	cvReleaseImage(&s);
	cvReleaseImage(&v);
	cvReleaseImage(&hue1);
	cvReleaseImage(&mask1);
	cvReleaseImage(&backproject1);
	cvReleaseHist(&hist1);
	cvReleaseImage(&histimg1);

    return track_box1;
}
Ejemplo n.º 24
0
static int foaCamShiftC1R( void* prm )
{
    /* Some variables */
    long       lParam  = (long)prm;
    int        Flvr = (lParam >> 4) & 0xf;
    int        depth = (Flvr == ATS_8U ? IPL_DEPTH_8U : 
                        Flvr == ATS_8S ? IPL_DEPTH_8S : IPL_DEPTH_32F);
    int        Type = lParam & 0xf;
    int        Errors = 0;

    CvTermCriteria criteria;
    CvRect     Window;
    CvSize     roi;

    IplImage*  src;

    float      alpha = 0;
    int        i;
    int        x, y;

    float      destOrientation = 0;
    float      destLen = 0;
    float      destWidth = 0;
    float      destArea = 0;
    int        destIters = 0;

    static int  read_param = 0;

    /* Initialization global parameters */
    if( !read_param )
    {
        read_param = 1;
        trsiRead( &height, "512", "source array length" );
        trsiRead( &width, "512", "source array width" );
        trsiRead( &Length, "68", "oval length" );
        trsiRead( &Width, "15", "oval width" );
        trsiRead( &iter, "10", "iterations" );
        trsiRead( &steps, "10", "steps" );
        trssRead( &epsilon, "1", "epsilon" );
    }

    /* Initilization */
    Window.x = width / 4;
    Window.y = height / 4;
    Window.width = width / 2;
    Window.height = height / 2;

    roi.width = width;
    roi.height = height;

    criteria.type = Type;
    criteria.epsilon = epsilon;
    criteria.maxIter = iter;

    /* Allocating source arrays; */
    src = cvCreateImage(roi, depth, 1);
    assert(src);

    for( alpha = -Pi / 2; alpha < Pi / 2; alpha += Pi / steps )
    {
        x = (int)(width  / 2 + width / 8 * cos(alpha));
        y = (int)(height / 2 + height / 8 * sin(alpha));

        switch( Flvr )
        {
        case ATS_8U:
            atsbInitEllipse( (uchar*)src->imageData,
                             roi.width,
                             roi.height,
                             src->widthStep,
                             x,
                             y,
                             Length,
                             Width,
                             alpha,
                             10 );
            break;
        case ATS_8S:
            atsbInitEllipse( (uchar*)src->imageData,
                             roi.width,
                             roi.height,
                             src->widthStep,
                             x,
                             y,
                             Length,
                             Width,
                             alpha,
                             10 );
            break;
        case ATS_32F:
            atsfInitEllipse( (float*)src->imageData,
                             roi.width,
                             roi.height,
                             src->widthStep,
                             x,
                             y,
                             Length,
                             Width,
                             alpha,
                             10 );
            break;
        } /* switch( Flvr ) */

        putchar('.');

        for( i = 0; i < steps; i++ )
        {
            CvConnectedComp comp;
            CvBox2D box;
            destIters = cvCamShift( src, Window, criteria, &comp, &box );
            Window = comp.rect;
            destArea = (float) comp.area;
            destOrientation = box.angle;
            destLen = box.size.height;
            destWidth = box.size.width;
        }
        
        /* Checking results */
        /* Checking orientation */
        if( fabs( alpha - destOrientation ) > 0.01 &&
            fabs( alpha + Pi - destOrientation ) > 0.01 )
        {
            Errors++;
            trsWrite( ATS_LST,
                      "orientation: act: %f,  exp: %f\n",
                      destOrientation,
                      alpha );
        }
        /* Checking length */
        if( fabs( destLen - Length * 2 ) > epsilon )
        {
            Errors++;
            trsWrite( ATS_LST,
                      "length: act: %f,  exp: %d\n",
                      destLen,
                      Length );
        }
        /* Checking width */
        if( fabs( destWidth - Width * 2 ) > epsilon )
        {
            Errors++;
            trsWrite( ATS_LST,
                      "width: act: %f,  exp: %d\n",
                      destWidth,
                      Width );
        }
    }

    cvReleaseImage(&src);

    return Errors == 0 ? TRS_OK : trsResult( TRS_FAIL, "Fixed %d errors", Errors );

} /* foaCamShiftC1R */
Ejemplo n.º 25
0
int main222( int argc,   char** argv )
{
    CvCapture* capture = 0;

    if( argc == 1 || (argc == 2 && strlen(argv[1]) == 1 && isdigit(argv[1][0])))
        capture = cvCaptureFromCAM( argc == 2 ? argv[1][0] - '0' : 0 );
    else if( argc == 2 )
        capture = cvCaptureFromAVI( argv[1] );

    if( !capture )
    {
        fprintf(stderr,"Could not initialize capturing...\n");
        return -1;
    }

    printf( "Hot keys: \n"
        "\tESC - quit the program\n"
        "\tc - stop the tracking\n"
        "\tb - switch to/from backprojection view\n"
        "\th - show/hide object histogram\n"
        "To initialize tracking, select the object with mouse\n" );

    cvNamedWindow( "Histogram", 1 );
    cvNamedWindow( "CamShiftDemo", 1 );
    cvSetMouseCallback( "CamShiftDemo", on_mouse, 0 );
    cvCreateTrackbar( "Vmin", "CamShiftDemo", &vmin, 256, 0 );
    cvCreateTrackbar( "Vmax", "CamShiftDemo", &vmax, 256, 0 );
    cvCreateTrackbar( "Smin", "CamShiftDemo", &smin, 256, 0 );

    for(;;)
    {
        IplImage* frame = 0;
        int i, bin_w, c;


        if( !frame )
            break;

        if( !image )
        {
            /* allocate all the buffers */
            image = cvCreateImage( cvGetSize(frame), 8, 3 );
            image->origin = frame->origin;
            hsv = cvCreateImage( cvGetSize(frame), 8, 3 );
            hue = cvCreateImage( cvGetSize(frame), 8, 1 );
            mask = cvCreateImage( cvGetSize(frame), 8, 1 );
            backproject = cvCreateImage( cvGetSize(frame), 8, 1 );
            hist = cvCreateHist( 1, &hdims, CV_HIST_ARRAY, &hranges, 1 );
            histimg = cvCreateImage( cvSize(320,200), 8, 3 );
            cvZero( histimg );
        }

        cvCopy( frame, image, 0 );
        cvCvtColor( image, hsv, CV_BGR2HSV );

        if( track_object )
        {
            int _vmin = vmin, _vmax = vmax;

            cvInRangeS( hsv, cvScalar(0,smin,MIN(_vmin,_vmax),0),
                        cvScalar(180,256,MAX(_vmin,_vmax),0), mask );
            cvSplit( hsv, hue, 0, 0, 0 );

            if( track_object < 0 )
            {
                float max_val = 0.f;
                cvSetImageROI( hue, selection );
                cvSetImageROI( mask, selection );
                cvCalcHist( &hue, hist, 0, mask );
                cvGetMinMaxHistValue( hist, 0, &max_val, 0, 0 );
                cvConvertScale( hist->bins, hist->bins, max_val ? 255. / max_val : 0., 0 );
                cvResetImageROI( hue );
                cvResetImageROI( mask );
                track_window = selection;
                track_object = 1;

                cvZero( histimg );
                bin_w = histimg->width / hdims;
                for( i = 0; i < hdims; i++ )
                {
                    int val = cvRound( cvGetReal1D(hist->bins,i)*histimg->height/255 );
                    CvScalar color = hsv2rgb(i*180.f/hdims);
                    cvRectangle( histimg, cvPoint(i*bin_w,histimg->height),
                                 cvPoint((i+1)*bin_w,histimg->height - val),
                                 color, -1, 8, 0 );
                }
            }

            cvCalcBackProject( &hue, backproject, hist );
            cvAnd( backproject, mask, backproject, 0 );
            cvCamShift( backproject, track_window,
                        cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),
                        &track_comp, &track_box );
            track_window = track_comp.rect;

            if( backproject_mode )
                cvCvtColor( backproject, image, CV_GRAY2BGR );
            if( !image->origin )
                track_box.angle = -track_box.angle;
            cvEllipseBox( image, track_box, CV_RGB(255,0,0), 3, CV_AA, 0 );
        }

        if( select_object && selection.width > 0 && selection.height > 0 )
        {
            cvSetImageROI( image, selection );
            cvXorS( image, cvScalarAll(255), image, 0 );
            cvResetImageROI( image );
        }

        cvShowImage( "CamShiftDemo", image );
        cvShowImage( "Histogram", histimg );

        c = cvWaitKey(10);
        if( (char) c == 27 )
            break;
        switch( (char) c )
        {
        case 'b':
            backproject_mode ^= 1;
            break;
        case 'c':
            track_object = 0;
            cvZero( histimg );
            break;
        case 'h':
            show_hist ^= 1;
            if( !show_hist )
                cvDestroyWindow( "Histogram" );
            else
                cvNamedWindow( "Histogram", 1 );
            break;
        default:
            ;
        }
    }

    cvReleaseCapture( &capture );
    cvDestroyWindow("CamShiftDemo");

    return 0;
}
Ejemplo n.º 26
0
int track( IplImage* frame, int flag,int Cx,int Cy,int R )
{

    {

        int i, bin_w, c;

        LOGE("#######################Check1############################");

        if( !image )
        {
            /* allocate all the buffers */
            image = cvCreateImage( cvGetSize(frame), 8, 3 );
            image->origin = frame->origin;
            hsv = cvCreateImage( cvGetSize(frame), 8, 3 );
            hue = cvCreateImage( cvGetSize(frame), 8, 1 );
            mask = cvCreateImage( cvGetSize(frame), 8, 1 );
            backproject = cvCreateImage( cvGetSize(frame), 8, 1 );
            hist = cvCreateHist( 1, &hdims, CV_HIST_ARRAY, &hranges, 1 );
            histimg = cvCreateImage( cvSize(320,200), 8, 3 );
            cvZero( histimg );
            LOGE("######################Check2###########################");
        }

        cvCopy( frame, image, 0 );
        cvCvtColor( image, hsv, CV_BGR2HSV );


        {
            int _vmin = vmin, _vmax = vmax;

            cvInRangeS( hsv, cvScalar(0,smin,MIN(_vmin,_vmax),0),
                        cvScalar(180,256,MAX(_vmin,_vmax),0), mask );
            cvSplit( hsv, hue, 0, 0, 0 );
            LOGE("###########################Check3######################");
            if(flag==0)
            {
            	LOGE("###############Initialized#############################");
				selection.x=Cx-R;
				selection.y=Cy-R;
				selection.height=2*R;
				selection.width=2*R;
                float max_val = 0.f;
                cvSetImageROI( hue, selection );
                cvSetImageROI( mask, selection );
                cvCalcHist( &hue, hist, 0, mask );
                cvGetMinMaxHistValue( hist, 0, &max_val, 0, 0 );
                cvConvertScale( hist->bins, hist->bins, max_val ? 255. / max_val : 0., 0 );
                cvResetImageROI( hue );
                cvResetImageROI( mask );
                track_window = selection;
                track_object = 1;

                cvZero( histimg );
                bin_w = histimg->width / hdims;
                for( i = 0; i < hdims; i++ )
                {
                    int val = cvRound( cvGetReal1D(hist->bins,i)*histimg->height/255 );
                    CvScalar color = hsv2rgb(i*180.f/hdims);
                    cvRectangle( histimg, cvPoint(i*bin_w,histimg->height),
                                 cvPoint((i+1)*bin_w,histimg->height - val),
                                 color, -1, 8, 0 );
                }
                LOGE("##############Check4#########################");
            }
            LOGE("##############Check5#########################");
            cvCalcBackProject( &hue, backproject, hist );
            cvAnd( backproject, mask, backproject, 0 );
            cvCamShift( backproject, track_window,
                        cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),
                        &track_comp, &track_box );
            track_window = track_comp.rect;
            char buffer[50];
            sprintf(buffer,"vals= %d %d and %d",track_window.x,track_window.y,track_window.width);
            LOGE(buffer);
            if( backproject_mode )
                cvCvtColor( backproject, image, CV_GRAY2BGR );
            if( image->origin )
                track_box.angle = -track_box.angle;
            cvEllipseBox( image, track_box, CV_RGB(255,0,0), 3, CV_AA, 0 );
        }

        if( select_object && selection.width > 0 && selection.height > 0 )
        {
            cvSetImageROI( image, selection );
            cvXorS( image, cvScalarAll(255), image, 0 );
            cvResetImageROI( image );
        }

        LOGE("!!!!!!!!!!!!!!!!!!Done Tracking!!!!!!!!!!!!!!!!!!!!!!!!!!!!");


    }



    return 0;
}