IplImage* BouyObject::HistogramMask(const IplImage * imgIn) const { IplImage * imgOut = cvCreateImage(cvGetSize(imgIn),IPL_DEPTH_8U, 1); IplImage * temp = cvCreateImage(cvGetSize(imgIn),IPL_DEPTH_8U, 1); //IplImage * norm = cvCreateImage(cvGetSize(imgIn),IPL_DEPTH_32F, 1); IplImage* hsv = cvCreateImage( cvGetSize(imgIn), 8, 3 ); cvCvtColor( imgIn, hsv, CV_BGR2HSV ); IplImage* h_plane = cvCreateImage( cvGetSize(imgIn), 8, 1 ); IplImage* s_plane = cvCreateImage( cvGetSize(imgIn), 8, 1 ); IplImage* v_plane = cvCreateImage( cvGetSize(imgIn), 8, 1 ); IplImage* planes[] = { h_plane, s_plane }; cvCvtPixToPlane( hsv, h_plane, s_plane, v_plane, 0 ); cvCalcBackProject(planes,imgOut,mNearHist); cvCalcBackProject(planes,temp,mFarHist); VisionUtils::CombineMasks(imgOut,temp,imgOut); //cvNormalize(norm,imgOut,255,0,CV_MINMAX); cvSmooth(imgOut,imgOut,2); //cvShowImage("histo", imgOut); cvReleaseImage(&h_plane); cvReleaseImage(&s_plane); cvReleaseImage(&v_plane); cvReleaseImage(&hsv); cvReleaseImage(&temp); return imgOut; }
void HandDetect::skinDetect() { setImage(); cvFlip(image, image, 1); hsv = cvCreateImage(cvGetSize(image), 8, 3); msk = cvCreateImage(cvGetSize(image), 8, 1); hue = cvCreateImage(cvGetSize(image), 8, 1); backproject1 = cvCreateImage(cvGetSize(image), 8, 1); backproject2 = cvCreateImage(cvGetSize(image), 8, 1); cvCvtColor(image, hsv, CV_RGB2HSV); cvInRangeS(hsv, cvScalar(0, smin, MIN(vmin, vmax), 0), cvScalar(180, 256, MAX(vmin, vmax), 0), msk); cvSplit(hsv, hue, 0, 0, 0); cvCalcBackProject(&hue, backproject1, hist1); cvCalcBackProject(&hue, backproject2, hist2); cvThreshold(backproject1, backproject1, 50, 255, CV_THRESH_BINARY | CV_THRESH_OTSU); cvThreshold(backproject2, backproject2, 50, 255, CV_THRESH_BINARY | CV_THRESH_OTSU); cvOr(backproject1, backproject2, backproject, 0); cvErode(backproject, backproject, 0, 1); cvDilate(backproject, backproject, 0, 1); cvAnd(backproject, msk, backproject, 0); if(track_box.center.x!=-1&&track_box.center.y!=-1) preCen=cvPoint(handCen.x, handCen.y); else preCen=cvPoint(0,0); cvCamShift(backproject, track_window, cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1), &track_comp, &track_box); if(track_comp.rect.height>0&&track_comp.rect.width>0) track_window = track_comp.rect; else { track_box.center.x=-1; track_box.center.y=-1; } cvReleaseImage(&hsv); cvReleaseImage(&msk); cvReleaseImage(&hue); cvReleaseImage(&backproject1); cvReleaseImage(&backproject2); }
bool AdaptiveHistogramCamshift::ComputeCamshift(const IplImage* hue, const IplImage* mask) { // Compute backproject cvCalcBackProject(&hue, m_imgBackproject, m_hist); cvAnd(m_imgBackproject, mask, m_imgBackproject, 0); // Init velocity m_trackPosTwoFramesBack = cvPoint(static_cast<int>(m_trackBox.center.x), static_cast<int>(m_trackBox.center.y)); m_trackAreaTwoFramesBack = m_trackBox.size.width * m_trackBox.size.height; // DEBUG track window area //printf("track wnd area: %f\n", m_trackBox.size.width * m_trackBox.size.height); // Compute camshift this frame CvConnectedComp trackComp; assert((m_trackWindow.height > 0) && (m_trackWindow.width > 0)); CvBox2D trackBox; const int camShiftRes = cvCamShift(m_imgBackproject, m_trackWindow, cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1), &trackComp, &trackBox); if (camShiftRes >= 0) { m_trackBox = trackBox; m_trackCompRect = trackComp.rect; return true; } else { return false; } }
CvBox2D CamShiftPatch::getTrackBox(CvScalar maskRange, CvHistogram *hist) { IplImage* backproject = cvCreateImage(cvGetSize(originImage), 8, 1);//反投影空間,單通道 IplImage* hue = 0; hue = cvCreateImage(cvGetSize(originImage), 8, 1); IplImage *mask = getInRangeMask(maskRange, hue); cvCalcBackProject(&hue, backproject, hist); //使用back project方法 ,計算hue的反投影圖 cvAnd(backproject, mask, backproject, 0); // 將backproject 與mask 做AND 運算 再放到backproject CvConnectedComp track_comp; CvBox2D track_box; // tracking返回的區域box,帶角度 CvRect zero; zero.x = 0; zero.y = 0; zero.width = 320; zero.height = 240; track_window = zero; for (int i = 0; i < 10; i++) { cvCamShift( backproject, //色彩概率分佈圖像 track_window, //Search Window的初始值 cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1),//用來判斷搜尋是否停止的一個標準 &track_comp, //保存運算結果,包括新的Search Window的位置和面積 &track_box //包含被跟蹤物體的最小矩形 ); track_window = track_comp.rect; } cvReleaseImage(&backproject); cvReleaseImage(&hue); cvReleaseImage(&mask); return track_box; }
void WebCamData::trackFace() { CvConnectedComp comps; updateHugeImage(d->data); cvCalcBackProject(&d->hueImage, d->prob, d->histogram); cvAnd(d->prob, d->mask, d->prob, 0); CvBox2D box; cvCamShift(d->prob, d->faceRect, cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1), &comps, &box); d->faceRect = comps.rect; int radius = cvRound((d->faceRect.width + d->faceRect.height) * 0.25); CvPoint center; center.x = cvRound(d->faceRect.x + d->faceRect.width * 0.5); center.y = cvRound(d->faceRect.y + d->faceRect.height * 0.5); /* qDebug() << Q_FUNC_INFO << comps.rect.x << comps.rect.y << comps.rect.width << comps.rect.height << box.angle << center.x << center.y << radius; */ d->dataMap.clear(); d->dataMap["z"] = QVariant(radius); d->dataMap["x"] = QVariant(center.x); d->dataMap["y"] = QVariant(center.y); d->dataMap["angle"] = QVariant(box.angle); Q_EMIT dataReady(); }
CamShift::Box CamShift::Track(const ImgBgr& img) { cvCopy(ImgIplImage(img), image, 0 ); cvCvtColor( image, hsv, CV_BGR2HSV ); cvFlip(hsv,hsv,0); int _vmin = vmin, _vmax = vmax; cvInRangeS( hsv, cvScalar(0,smin,MIN(_vmin,_vmax),0), cvScalar(180,256,MAX(_vmin,_vmax),0), mask ); cvSplit( hsv, hue, 0, 0, 0 ); cvCalcBackProject( &hue, backproject, hist ); //cvSaveImage("backproject.bmp", backproject); cvAnd( backproject, mask, backproject, 0 ); //cvSaveImage("backproject.bmp", backproject); cvCamShift( backproject, track_window, cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ), &track_comp, &track_box ); track_window = track_comp.rect; Box result; result.angle= track_box.angle; result.center.x= static_cast<LONG>( track_box.center.x ); result.center.y= static_cast<LONG>( img.Height()-track_box.center.y-1 ); result.size.cy = static_cast<LONG>( track_box.size.width ); result.size.cx = static_cast<LONG>( track_box.size.height ); return result; }
int camshift(const IplImage* next, TrackObject* obj) //Input: next : Next Frame, obj : offline tracking object { IplImage* img = cvCloneImage(next); IplImage* hsv_next = cvCreateImage(cvGetSize(next), IPL_DEPTH_8U, 3); //Size changes. No global or static IplImage* h_next_8 = cvCreateImage(cvGetSize(next), IPL_DEPTH_8U, 1); //Size changes. No global or static IplImage* h_next = cvCreateImage(cvGetSize(next), IPL_DEPTH_32F, 1); IplImage* img_bp = cvCreateImage(cvGetSize(next), IPL_DEPTH_32F, 1); CvConnectedComp track_comp; //CvRect search_window = CALC_RECT(CALC_RECT_CENTER_X(eye), CALC_RECT_CENTER_Y(eye), WINDOW_W, WINDOW_H); //Conversion cvCvtColor(next, hsv_next, CV_BGR2HSV); cvSplit(hsv_next, h_next_8, 0, 0, 0); cvConvertScale(h_next_8, h_next, 1, 0); cvCalcBackProject(&h_next, img_bp, obj->hist); int iteration; iteration = cvCamShift(img_bp, obj->track_window, cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, MAX_ITER, EPSILON), &track_comp, &obj->track_box); obj->track_window = track_comp.rect; cvReleaseImage(&img_bp); cvReleaseImage(&h_next); cvReleaseImage(&h_next_8); cvReleaseImage(&hsv_next); cvReleaseImage(&img); return(iteration); }
CvRect combi_track(IplImage * pImg,KalmanFilter &kfilter) { CvRect predrect=kfilter.predictionReport(prevHandRect); //if((predrect.x<0)||(predrect.y<0)||((predrect.x+predrect.width)>pImg->width)||((predrect.y+predrect.height)>pImg->height)) // return NULL; CvConnectedComp components; // Create a new hue image updateHueImage(pImg); // Create a probability image based on the hand histogram cvCalcBackProject( &pHueImg, pProbImg, pHist ); cvAnd( pProbImg, pMask, pProbImg, 0 ); //cvSetImageROI(pProbImg,predrect); // Use CamShift to find the center of the new hand probability if(!((predrect.x<0)||(predrect.y<0)||((predrect.x+predrect.width)>pImg->width)||((predrect.y+predrect.height)>pImg->height))) { cvCamShift( pProbImg, predrect, cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),&components, handBox ); // Update hand location and angle prevHandRect = components.rect; } else //cvCamShift( pProbImg, prevHandRect, cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),&components, handBox ); prevHandRect.x=-1; //if(!pImg->origin) // handBox->angle = -handBox->angle; //cvResetImageROI(pProbImg); return prevHandRect; }
Mask* Histogram::calcBackProjection(const Image* rgbImage) const { if (!m_histogram || !rgbImage) return NULL; if (!(rgbImage->cvImage())) return NULL; IplImage* hsv = cvCreateImage(rgbImage->size(), 8, 3); //Create HSV image from BGR image cvCvtColor(rgbImage->cvImage(), hsv, CV_BGR2HSV); IplImage* h = cvCreateImage(rgbImage->size(), 8, 1); // create diferents planes IplImage* s = cvCreateImage(rgbImage->size(), 8, 1); IplImage* v = cvCreateImage(rgbImage->size(), 8, 1); IplImage* planes[] = {h, s}; cvCvtPixToPlane(hsv, h, s, v, NULL); IplImage* backProject = cvCreateImage(rgbImage->size(), 8, 1); cvCalcBackProject(planes, backProject, m_histogram); cvReleaseImage(&hsv); cvReleaseImage(&h); cvReleaseImage(&s); cvReleaseImage(&v); return new Image(backProject); }
void ImgProducer::calcSKINPROB() { /* Separation en trois plans */ cvSplit(imgSRC,img[idP3],img[idP2],img[idP1],NULL); /* Segmentation par projection de l'histogramme */ cvCalcBackProject(images,img[idSKINPROB],skinModel.hist); imgOK[idSKINPROB] = 1; }
void CamShiftPlugin::ProcessStatic ( int i, ImagePlus *img, ImagePlus *oimg, int *hsizes, CvTermCriteria criteria, IplImage** &planes, CvHistogram* &hist, IplImage* &backproject, CvRect &orect, CvPoint &ocenter, CvRect &searchwin, CvMat* &rotation, CvMat* &shift, bool oready){ if (hist && hist->mat.dim[0].size!=hsizes[0]) cvReleaseHist(&hist); if( !hist ) hist = cvCreateHist( 3, hsizes, CV_HIST_ARRAY, NULL, 0); if( !backproject ) backproject = cvCreateImage( cvGetSize(img->orig), IPL_DEPTH_8U, 1 ); if( !planes ){ planes = (IplImage**) malloc(3 * sizeof(IplImage*)); for (int p=0; p<3; p++) planes[p] = cvCreateImage( cvGetSize(img->orig), 8, 1 ); } if (!rotation) rotation = cvCreateMat(2,3,CV_32FC1); if (!shift) shift = cvCreateMat(2,1,CV_32FC1); if (!oready){ orect = cvBoundingRect(oimg->contourArray[i],1); cvCvtPixToPlane( oimg->orig, planes[0], planes[1], planes[2], 0 ); for (int p=0; p<3; p++) cvSetImageROI(planes[p],orect); cvCalcHist( planes, hist, 0, NULL ); cvNormalizeHist(hist, 255); for (int p=0; p<3; p++) cvResetImageROI(planes[p]); searchwin = orect; //cvRect(0,0,img->orig->width, img->orig->height); ocenter = cvPoint(orect.x+orect.width/2, orect.y+orect.height/2); } //The following checks shouldn't be needed. RestrictRect(searchwin, cvRect(0,0,backproject->width,backproject->height)); cvCvtPixToPlane( img->orig, planes[0], planes[1], planes[2], 0 ); cvCalcBackProject( planes, backproject, hist ); CvBox2D track_box; CvConnectedComp track_comp; cvCamShift( backproject, searchwin, criteria, &track_comp, &track_box ); searchwin = track_comp.rect; cvmSet(shift,0,0,track_box.center.x - ocenter.x); cvmSet(shift,1,0,track_box.center.y - ocenter.y); // shift->data.fl[0] = track_box.center.x - ocenter.x; // shift->data.fl[1] = track_box.center.y - ocenter.y; cv2DRotationMatrix(track_box.center, track_box.angle, 1.0, rotation); cvTransform(oimg->contourArray[i],img->contourArray[i],rotation,shift); // CvMat *ofm = FeatPointsToMat(oimg->feats[i]); // Cvmat *fm = FeatPointsToMat(img->feats[i]); // cvTransform(ofm,img->contourArray[i],rotation,shift); TransformFeatPoints(oimg->feats[i], img->feats[i], rotation, shift); }
int track(camshift * cs, IplImage * img, CvBox2D * fBox) { CvConnectedComp components; updateHueImage(cs, img); cvCalcBackProject( &cs->hueImg, cs->probImg, cs->hist ); cvAnd( cs->probImg, cs->mask, cs->probImg, 0 ); CvSize size = cvGetSize(cs->probImg); // printf("%d %d %d %d\n", cs->prevFaceRect.x, cs->prevFaceRect.y, cs->prevFaceRect.width, cs->prevFaceRect.height); if (cs->prevFaceRect.x <= 0) { return 0; } if (cs->prevFaceRect.x > size.width) { return 0; } if (cs->prevFaceRect.y <= 0) { return 0; } if (cs->prevFaceRect.y > size.height) { return 0; } if (cs->prevFaceRect.x + cs->prevFaceRect.width > size.width) { return 0; } if (cs->prevFaceRect.y + cs->prevFaceRect.height > size.height) { return 0; } if (cs->prevFaceRect.width <= 0) { return 0; } if (cs->prevFaceRect.height <= 0) { return 0; } cvCamShift( cs->probImg, cs->prevFaceRect, cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ), &components, &cs->faceBox ); cs->prevFaceRect = components.rect; cs->faceBox.angle = -cs->faceBox.angle; *fBox = cs->faceBox; return 1; }
////////////////////////////////// // track() // CvBox2D track(camshift * cs, IplImage * pImg) { CvConnectedComp components; // Create a new hue image updateHueImage(cs, pImg); // Create a probability image based on the face histogram cvCalcBackProject( &cs->pHueImg, cs->pProbImg, cs->pHist ); cvAnd( cs->pProbImg, cs->pMask, cs->pProbImg, 0 ); // Use CamShift to find the center of the new face probability CvSize size = cvGetSize(cs->pProbImg); if (cs->prevFaceRect.x < 0) { cs->prevFaceRect.x = 0; } if (cs->prevFaceRect.x >= size.width) { cs->prevFaceRect.x = size.width - 1; } if (cs->prevFaceRect.y < 0) { cs->prevFaceRect.y = 0; } if (cs->prevFaceRect.y >= size.height) { cs->prevFaceRect.y = size.height - 1; } if (cs->prevFaceRect.x + cs->prevFaceRect.width > size.width) { cs->prevFaceRect.width = size.width - cs->prevFaceRect.x; } if (cs->prevFaceRect.y + cs->prevFaceRect.height > size.height) { cs->prevFaceRect.height = size.height - cs->prevFaceRect.y; } cvCamShift( cs->pProbImg, cs->prevFaceRect, cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ), &components, &cs->faceBox ); // Update face location and angle cs->prevFaceRect = components.rect; cs->faceBox.angle = -cs->faceBox.angle; return cs->faceBox; }
/* Given an image and tracked object, return box position. */ CvBox2D FaceBl0r::camshift_track_face (IplImage* image, TrackedObj* obj) { CvConnectedComp components; //create a new hue image update_hue_image(image, obj); //create a probability image based on the face histogram cvCalcBackProject(&obj->hue, obj->prob, obj->hist); cvAnd(obj->prob, obj->mask, obj->prob, 0); //use CamShift to find the center of the new face probability cvCamShift(obj->prob, obj->prev_rect, cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1), &components, &obj->curr_box); //update face location and angle obj->prev_rect = components.rect; obj->curr_box.angle = -obj->curr_box.angle; return obj->curr_box; }
////////////////////////////////// // track() // CvRect camshift_track(IplImage * pImg) { CvConnectedComp components; // Create a new hue image updateHueImage(pImg); // Create a probability image based on the hand histogram cvCalcBackProject( &pHueImg, pProbImg, pHist ); cvAnd( pProbImg, pMask, pProbImg, 0 ); //cvSetImageROI(pProbImg,predrect); // Use CamShift to find the center of the new hand probability cvCamShift( pProbImg, prevHandRect2, cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),&components, handBox ); // Update hand location and angle prevHandRect2 = components.rect; //if(!pImg->origin) // handBox->angle = -handBox->angle; //cvResetImageROI(pProbImg); return prevHandRect2; }
void BoatDetecting::startTrackObject(){ cvInRangeS(hsv, cvScalar(0, smin, MIN(vmin, vmax), 0), cvScalar(180, 256, MAX(vmin, vmax), 0), mask); // 10,256,30 cvSplit(hsv, hue, 0, 0, 0); if (!isTrackingInitialized){ // 如果跟踪窗口未初始化 float max_val = 0.f; cvSetImageROI(hue, selection); cvSetImageROI(mask, selection); cvCalcHist(&hue, hist, 0, mask); cvGetMinMaxHistValue(hist, 0, &max_val, 0, 0); cvConvertScale(hist->bins, hist->bins, max_val ? 255. / max_val : 0., 0); cvResetImageROI(hue); cvResetImageROI(mask); trackWindow = selection; isTrackingInitialized = true; } cvCalcBackProject(&hue, backproject, hist); //cvShowImage("Hue Channel",backproject); cvAnd(backproject, mask, backproject, 0); //if (trackWindow.x + trackWindow.width/2< allfWidth &&trackWindow.y + trackWindow.height/2< allfHeight &&trackWindow.x>0) if (trackWindow.x + trackWindow.width< allfWidth &&trackWindow.y + trackWindow.height< allfHeight &&trackWindow.x>0) cvCamShift(backproject, trackWindow, cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 20, 1), &trackComp, 0);//初始化跟踪窗口以后直接用trackWindow做跟踪,每帧都会更新 //if (trackComp.rect.width<90 && trackComp.rect.y<200){ // trackWindow = trackComp.rect; //} //if (trackComp.rect.y>200) //{ // trackWindow = trackComp.rect; //} trackWindow = trackComp.rect; }
////////////////////////////////// // track() // CvBox2D track(IplImage * pImg) { CvConnectedComp components; // Create a new hue image updateHueImage(pImg); // Create a probability image based on the face histogram cvCalcBackProject( &pHueImg, pProbImg, pHist ); cvAnd( pProbImg, pMask, pProbImg, 0 ); // Use CamShift to find the center of the new face probability cvCamShift( pProbImg, prevFaceRect, cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ), &components, &faceBox ); // Update face location and angle prevFaceRect = components.rect; faceBox.angle = -faceBox.angle; return faceBox; }
void CamShift::Track(IplImage *frame, CvRect &selection, bool calc_hist) { int i, bin_w, c; cvCvtColor( frame, _hsv, CV_BGR2HSV ); cvInRangeS( _hsv, cvScalar(0,_smin,MIN(_vmin,_vmax),0), cvScalar(180,256,MAX(_vmin,_vmax),0), _mask ); cvSplit( _hsv, _hue, 0, 0, 0 ); if(calc_hist) { float max_val = 0.f; cvSetImageROI( _hue, selection ); cvSetImageROI( _mask, selection ); cvCalcHist( &_hue, _hist, 0, _mask ); cvGetMinMaxHistValue( _hist, 0, &max_val, 0, 0 ); cvConvertScale( _hist->bins, _hist->bins, max_val ? 255. / max_val : 0., 0 ); cvResetImageROI( _hue ); cvResetImageROI( _mask ); _track_window = selection; } cvCalcBackProject( &_hue, _backproject, _hist ); cvAnd( _backproject, _mask, _backproject, 0 ); cvCamShift( _backproject, _track_window, cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ), &_track_comp, &_track_box ); _track_window = _track_comp.rect; if( frame->origin ) _track_box.angle = -_track_box.angle; selection = cvRect(_track_box.center.x-_track_box.size.width/2, _track_box.center.y-_track_box.size.height/2, selection.width, selection.height); }
CvBox2D *MultiCamshiftUI::track(IplImage **images, bool show_ui, bool show_backprojections) { for (int camera=0; camera<n_cameras; camera++) { /* Calculate the backprojection, in the original (YUV) colorspace */ cvCvtPixToPlane(images[camera], planes[0], planes[1], planes[2], 0); cvCalcBackProject(planes, backprojection, histograms[camera]); if (show_ui && show_backprojections) { /* Make a YUV version of the output, for display */ gray_to_yuv(backprojection, yuv_backprojections[camera]); } if (search_windows[camera].width > 0 && search_windows[camera].height > 0) { /* Use the CAMSHIFT algorithm to search for the object of interest */ CvConnectedComp comp; cvCamShift(backprojection, search_windows[camera], cvTermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 1), &comp, &results[camera]); search_windows[camera] = comp.rect; if (results[camera].size.width > 0 && results[camera].size.height > 0) { /* We found an interesting object, draw it if applicable */ if (show_ui) drawResults(camera, images[camera]); } else { /* We lost tracking, expand the search window */ search_windows[camera].x = 0; search_windows[camera].y = 0; search_windows[camera].width = image_size.width; search_windows[camera].height = image_size.height; } } } if (sample_from_sample_square) { cvSetImageROI(images[sample_square_camera], sample_square); cvSetImageROI(planes[0], sample_square); cvSetImageROI(planes[1], sample_square); cvSetImageROI(planes[2], sample_square); cvCvtPixToPlane(images[sample_square_camera], planes[0], planes[1], planes[2], 0); cvCalcHist(planes, histograms[sample_square_camera], 1); cvResetImageROI(images[sample_square_camera]); cvResetImageROI(planes[0]); cvResetImageROI(planes[1]); cvResetImageROI(planes[2]); /* Also set the windowIn to the sampling rectangle, to point CAMSHIFT at * what we're interested in. */ search_windows[sample_square_camera] = sample_square; } if (show_ui) { /* Tile cameras horizontally, with original image on * top and backprojection on bottom. */ IplImage* view_grid[n_cameras * 2]; int num_views = 0; for (int i=0; i<n_cameras; i++) view_grid[num_views++] = images[i]; if (show_backprojections) { for (int i=0; i<n_cameras; i++) view_grid[num_views++] = yuv_backprojections[i]; } if (draw_sample_square) { cvRectangle(images[sample_square_camera], cvPoint(sample_square.x-1, sample_square.y-1), cvPoint(sample_square.x + sample_square.width + 1, sample_square.y + sample_square.width + 1), CV_RGB(128,128,255), 1); } cv_sdl_show_yuv_tiles(view_grid, num_views, n_cameras); } return results; }
int cam() //calling main { int hdims = 16; printf("I am main"); CvCapture* capture = cvCreateCameraCapture(1); //determining usb camera CvHistogram *hist = 0; CvMemStorage* g_storage = NULL; Display *display=construct_display(); int x,y, tmpx=0, tmpy=0, chk=0; IplImage* image=0; IplImage* lastimage1=0; IplImage* lastimage=0; IplImage* diffimage; IplImage* bitimage; IplImage* src=0,*hsv=0,*hue=0,*backproject=0; IplImage* hsv1=0,*hue1=0,*histimg=0,*frame=0,*edge=0; float* hranges; cvNamedWindow( "CA", CV_WINDOW_AUTOSIZE ); //display window 3 //Calculation of Histogram// cvReleaseImage(&src); src= cvLoadImage("images/skin.jpg"); //taking patch while(1) { frame = cvQueryFrame( capture ); //taking frame by frame for image prcessing int j=0; float avgx=0; float avgy=0; if( !frame ) break; //#########################Background Substraction#########################// if(!image) { image=cvCreateImage(cvSize(frame->width,frame->height),frame->depth,1); bitimage=cvCreateImage(cvSize(frame->width,frame->height),frame->depth,1); diffimage=cvCreateImage(cvSize(frame->width,frame->height),frame->depth,1); lastimage=cvCreateImage(cvSize(frame->width,frame->height),frame->depth,1); } cvCvtColor(frame,image,CV_BGR2GRAY); if(!lastimage1) { lastimage1=cvLoadImage("images/img.jpg"); } cvCvtColor(lastimage1,lastimage,CV_BGR2GRAY); cvAbsDiff(image,lastimage,diffimage); cvThreshold(diffimage,bitimage,65,225,CV_THRESH_BINARY); cvInRangeS(bitimage,cvScalar(0),cvScalar(30),bitimage); cvSet(frame,cvScalar(0,0,0),bitimage); cvReleaseImage(&hsv); hsv= cvCreateImage( cvGetSize(src), 8, 3 ); cvReleaseImage(&hue); hue= cvCreateImage( cvGetSize(src), 8, 1); cvCvtColor(src,hsv,CV_BGR2HSV); cvSplit(hsv,hue,0,0,0); float hranges_arr[] = {0,180}; hranges = hranges_arr; hist = cvCreateHist( 1, &hdims, CV_HIST_ARRAY, &hranges, 1 ); cvCalcHist(&hue, hist, 0, 0 ); cvThreshHist( hist, 100 ); //#############################Display histogram##############################// cvReleaseImage(&histimg); histimg = cvCreateImage( cvSize(320,200), 8, 3 ); cvZero( histimg ); int bin_w = histimg->width / hdims; //#### Calculating the Probablity of Finding the skin with in-built method ###// if(0) { free (backproject); free (hsv1); free (hue1); } cvReleaseImage(&backproject); backproject= cvCreateImage( cvGetSize(frame), 8, 1 ); cvReleaseImage(&hsv1); hsv1 = cvCreateImage( cvGetSize(frame), 8, 3); cvReleaseImage(&hue1); hue1 = cvCreateImage( cvGetSize(frame), 8, 1); cvCvtColor(frame,hsv1,CV_BGR2HSV); cvSplit(hsv1,hue1,0,0,0); cvCalcBackProject( &hue1, backproject, hist ); cvSmooth(backproject,backproject,CV_GAUSSIAN); cvSmooth(backproject,backproject,CV_MEDIAN); if( g_storage == NULL ) g_storage = cvCreateMemStorage(0); else cvClearMemStorage( g_storage ); CvSeq* contours=0; CvSeq* result =0; cvFindContours(backproject, g_storage, &contours ); if(contours) { result=cvApproxPoly(contours, sizeof(CvContour), g_storage, CV_POLY_APPROX_DP, 7, 1); } cvZero( backproject); for( ; result != 0; result = result->h_next ) { double area = cvContourArea( result ); cvDrawContours( backproject,result, CV_RGB(255,255, 255), CV_RGB(255,0, 255) , -1,CV_FILLED, 8 ); for( int i=1; i<=result-> total; i++ ) { if(i>=1 and abs(area)>300) { CvPoint* p2 = CV_GET_SEQ_ELEM( CvPoint, result, i ); if(1) { avgx=avgx+p2->x; avgy=avgy+p2->y; j=j+1; cvCircle(backproject,cvPoint(p2->x,p2->y ),10, cvScalar(255,255,255)); } } } } cvCircle( backproject, cvPoint(avgx/j, avgy/j ), 40, cvScalar(255,255,255) ); x = ( avgx/j ); y = ( avgy/j ); x=( (x*1240)/640 )-20; y=( (y*840)/480 )-20; if ( (abs(tmpx-x)>6 or abs(tmpy-y)>6 ) and j ) { tmpx = x; tmpy = y; chk=0; } else chk++; mouse_move1( tmpx, tmpy, display ); if ( chk==10 ) { mouse_click( 5, 2, display ); mouse_click( 5, 3, display ); } cvSaveImage( "final.jpg", frame ); cvSaveImage( "final1.jpg", backproject ); cvShowImage( "CA", backproject ); char c = cvWaitKey(33); if( c == 27 ) break; //function break and destroying windows if press <escape> key } cvReleaseCapture( &capture ); cvDestroyWindow( "CA" ); }
int track( IplImage* frame, int flag,int Cx,int Cy,int R ) { { int i, bin_w, c; LOGE("#######################Check1############################"); if( !image ) { /* allocate all the buffers */ image = cvCreateImage( cvGetSize(frame), 8, 3 ); image->origin = frame->origin; hsv = cvCreateImage( cvGetSize(frame), 8, 3 ); hue = cvCreateImage( cvGetSize(frame), 8, 1 ); mask = cvCreateImage( cvGetSize(frame), 8, 1 ); backproject = cvCreateImage( cvGetSize(frame), 8, 1 ); hist = cvCreateHist( 1, &hdims, CV_HIST_ARRAY, &hranges, 1 ); histimg = cvCreateImage( cvSize(320,200), 8, 3 ); cvZero( histimg ); LOGE("######################Check2###########################"); } cvCopy( frame, image, 0 ); cvCvtColor( image, hsv, CV_BGR2HSV ); { int _vmin = vmin, _vmax = vmax; cvInRangeS( hsv, cvScalar(0,smin,MIN(_vmin,_vmax),0), cvScalar(180,256,MAX(_vmin,_vmax),0), mask ); cvSplit( hsv, hue, 0, 0, 0 ); LOGE("###########################Check3######################"); if(flag==0) { LOGE("###############Initialized#############################"); selection.x=Cx-R; selection.y=Cy-R; selection.height=2*R; selection.width=2*R; float max_val = 0.f; cvSetImageROI( hue, selection ); cvSetImageROI( mask, selection ); cvCalcHist( &hue, hist, 0, mask ); cvGetMinMaxHistValue( hist, 0, &max_val, 0, 0 ); cvConvertScale( hist->bins, hist->bins, max_val ? 255. / max_val : 0., 0 ); cvResetImageROI( hue ); cvResetImageROI( mask ); track_window = selection; track_object = 1; cvZero( histimg ); bin_w = histimg->width / hdims; for( i = 0; i < hdims; i++ ) { int val = cvRound( cvGetReal1D(hist->bins,i)*histimg->height/255 ); CvScalar color = hsv2rgb(i*180.f/hdims); cvRectangle( histimg, cvPoint(i*bin_w,histimg->height), cvPoint((i+1)*bin_w,histimg->height - val), color, -1, 8, 0 ); } LOGE("##############Check4#########################"); } LOGE("##############Check5#########################"); cvCalcBackProject( &hue, backproject, hist ); cvAnd( backproject, mask, backproject, 0 ); cvCamShift( backproject, track_window, cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ), &track_comp, &track_box ); track_window = track_comp.rect; char buffer[50]; sprintf(buffer,"vals= %d %d and %d",track_window.x,track_window.y,track_window.width); LOGE(buffer); if( backproject_mode ) cvCvtColor( backproject, image, CV_GRAY2BGR ); if( image->origin ) track_box.angle = -track_box.angle; cvEllipseBox( image, track_box, CV_RGB(255,0,0), 3, CV_AA, 0 ); } if( select_object && selection.width > 0 && selection.height > 0 ) { cvSetImageROI( image, selection ); cvXorS( image, cvScalarAll(255), image, 0 ); cvResetImageROI( image ); } LOGE("!!!!!!!!!!!!!!!!!!Done Tracking!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); } return 0; }
// A Simple Camera Capture Framework int main() { CvCapture* capture = cvCaptureFromCAM( 0 ); if( !capture ) { fprintf( stderr, "ERROR: capture is NULL \n" ); return -1; } #ifdef HALF_SIZE_CAPTURE cvSetCaptureProperty(capture, CV_CAP_PROP_FRAME_WIDTH, 352/2); cvSetCaptureProperty(capture, CV_CAP_PROP_FRAME_HEIGHT, 288/2); #endif // Create a window in which the captured images will be presented cvNamedWindow( "Source Image Window", CV_WINDOW_AUTOSIZE ); cvNamedWindow( "Back Projected Image", CV_WINDOW_AUTOSIZE ); cvNamedWindow( "Brightness and Contrast Window", CV_WINDOW_AUTOSIZE ); cvNamedWindow( "Blob Output Window", CV_WINDOW_AUTOSIZE ); cvNamedWindow( "Histogram Window", 0); cvNamedWindow( "Rainbow Window", CV_WINDOW_AUTOSIZE ); // Capture one frame to get image attributes: source_frame = cvQueryFrame( capture ); if( !source_frame ) { fprintf( stderr, "ERROR: frame is null...\n" ); return -1; } cvCreateTrackbar("histogram\nnormalization", "Back Projected Image", &normalization_sum, 6000, NULL); cvCreateTrackbar("brightness", "Brightness and Contrast Window", &_brightness, 200, NULL); cvCreateTrackbar("contrast", "Brightness and Contrast Window", &_contrast, 200, NULL); cvCreateTrackbar("threshold", "Blob Output Window", &blob_extraction_threshold, 255, NULL); cvCreateTrackbar("min blob size", "Blob Output Window", &min_blob_size, 2000, NULL); cvCreateTrackbar("max blob size", "Blob Output Window", &max_blob_size, source_frame->width*source_frame->height/4, NULL); inputImage = cvCreateImage(cvGetSize(source_frame), IPL_DEPTH_8U, 1); histAdjustedImage = cvCreateImage(cvGetSize(source_frame), IPL_DEPTH_8U, 1); outputImage = cvCreateImage(cvGetSize(source_frame), IPL_DEPTH_8U, 3 ); hist_image = cvCreateImage(cvSize(320,200), 8, 1); rainbowImage = cvCreateImage(cvGetSize(source_frame), IPL_DEPTH_8U, 3 ); // object that will contain blobs of inputImage CBlobResult blobs; CBlob my_enumerated_blob; cvInitFont(&font, CV_FONT_HERSHEY_SIMPLEX|CV_FONT_ITALIC, hScale, vScale, 0, lineWidth); // Some brightness/contrast stuff: bright_cont_image = cvCloneImage(inputImage); lut_mat = cvCreateMatHeader( 1, 256, CV_8UC1 ); cvSetData( lut_mat, lut, 0 ); while( 1 ) { // Get one frame source_frame = cvQueryFrame( capture ); if( !source_frame ) { fprintf( stderr, "ERROR: frame is null...\n" ); getchar(); break; } cvShowImage( "Source Image Window", source_frame ); // Do not release the frame! cvCvtColor(source_frame, inputImage, CV_RGB2GRAY); // Histogram Stuff! my_hist = cvCreateHist(1, hist_size_array, CV_HIST_ARRAY, ranges, 1); cvCalcHist( &inputImage, my_hist, 0, NULL ); cvNormalizeHist(my_hist, normalization_sum); // NOTE: First argument MUST have an ampersand, or a segmentation fault will result cvCalcBackProject(&inputImage, histAdjustedImage, my_hist); // Histogram Picture int bin_w; float max_value = 0; cvGetMinMaxHistValue( my_hist, 0, &max_value, 0, 0 ); cvScale( my_hist->bins, my_hist->bins, ((double)hist_image->height)/max_value, 0 ); cvSet( hist_image, cvScalarAll(255), 0 ); bin_w = cvRound((double)hist_image->width/hist_size); for(int i = 0; i < hist_size; i++ ) cvRectangle( hist_image, cvPoint(i*bin_w, hist_image->height), cvPoint((i+1)*bin_w, hist_image->height - cvRound(cvGetReal1D(my_hist->bins,i))), cvScalarAll(0), -1, 8, 0 ); cvShowImage( "Histogram Window", hist_image ); cvShowImage("Back Projected Image", histAdjustedImage); // Brightness/contrast loop stuff: int brightness = _brightness - 100; int contrast = _contrast - 100; /* * The algorithm is by Werner D. Streidt * (http://visca.com/ffactory/archives/5-99/msg00021.html) */ if( contrast > 0 ) { double delta = 127.*contrast/100; double a = 255./(255. - delta*2); double b = a*(brightness - delta); for(int i = 0; i < 256; i++ ) { int v = cvRound(a*i + b); if( v < 0 ) v = 0; if( v > 255 ) v = 255; lut[i] = (uchar)v; } } else { double delta = -128.*contrast/100; double a = (256.-delta*2)/255.; double b = a*brightness + delta; for(int i = 0; i < 256; i++ ) { int v = cvRound(a*i + b); if( v < 0 ) v = 0; if( v > 255 ) v = 255; lut[i] = (uchar)v; } } cvLUT( inputImage, bright_cont_image, lut_mat ); cvShowImage( "Brightness and Contrast Window", bright_cont_image); // --------------- // Blob Manipulation Code begins here: // Extract the blobs using a threshold of 100 in the image blobs = CBlobResult( bright_cont_image, NULL, blob_extraction_threshold, true ); // discard the blobs with less area than 5000 pixels // ( the criteria to filter can be any class derived from COperadorBlob ) blobs.Filter( blobs, B_INCLUDE, CBlobGetArea(), B_GREATER_OR_EQUAL, min_blob_size); blobs.Filter( blobs, B_EXCLUDE, CBlobGetArea(), B_GREATER, max_blob_size); // build an output image equal to the input but with 3 channels (to draw the coloured blobs) cvMerge( bright_cont_image, bright_cont_image, bright_cont_image, NULL, outputImage ); // plot the selected blobs in a output image for (int i=0; i < blobs.GetNumBlobs(); i++) { blobs.GetNthBlob( CBlobGetArea(), i, my_enumerated_blob ); // Color 5/6 of the color wheel (300 degrees) my_enumerated_blob.FillBlob( outputImage, cv_hsv2rgb((float)i/blobs.GetNumBlobs() * 300, 1, 1)); } // END Blob Manipulation Code // --------------- sprintf(str, "Count: %d", blobs.GetNumBlobs()); cvPutText(outputImage, str, cvPoint(50, 25), &font, cvScalar(255,0,255)); cvShowImage("Blob Output Window", outputImage); /* // Rainbow manipulation: for (int i=0; i < CV_CAP_PROP_FRAME_WIDTH; i++) { for (int j=0; j < CV_CAP_PROP_FRAME_HEIGHT; j++) { // This line is not figure out yet... // pixel_color_set = ((uchar*)(rainbowImage->imageData + rainbowImage->widthStep * j))[i * 3] ((uchar*)(rainbowImage->imageData + rainbowImage->widthStep * j))[i * 3] = 30; ((uchar*)(rainbowImage->imageData + rainbowImage->widthStep * j))[i * 3 + 1] = 30; ((uchar*)(rainbowImage->imageData + rainbowImage->widthStep * j))[i * 3 + 2] = 30; } } cvShowImage("Rainbow Window", rainbowImage); */ //If ESC key pressed, Key=0x10001B under OpenCV 0.9.7(linux version), //remove higher bits using AND operator if( (cvWaitKey(10) & 255) == 27 ) break; } cvReleaseImage(&inputImage); cvReleaseImage(&histAdjustedImage); cvReleaseImage(&hist_image); cvReleaseImage(&bright_cont_image); cvReleaseImage(&outputImage); cvReleaseImage(&rainbowImage); // Release the capture device housekeeping cvReleaseCapture( &capture ); cvDestroyAllWindows(); return 0; }
int main(int argc, char* argv[]) { // Set up images IplImage* img = cvLoadImage("airplane.jpg"); IplImage* back_img = cvCreateImage( cvGetSize( img ), IPL_DEPTH_8U, 1 ); // Compute HSV image and separate into colors IplImage* hsv = cvCreateImage( cvGetSize(img), IPL_DEPTH_8U, 3 ); cvCvtColor( img, hsv, CV_BGR2HSV ); IplImage* h_plane = cvCreateImage( cvGetSize( img ), 8, 1 ); IplImage* s_plane = cvCreateImage( cvGetSize( img ), 8, 1 ); IplImage* v_plane = cvCreateImage( cvGetSize( img ), 8, 1 ); IplImage* planes[] = { h_plane, s_plane }; cvCvtPixToPlane( hsv, h_plane, s_plane, v_plane, 0 ); // Build and fill the histogram int h_bins = 30, s_bins = 32; CvHistogram* hist; { int hist_size[] = { h_bins, s_bins }; float h_ranges[] = { 0, 180 }; float s_ranges[] = { 0, 255 }; float* ranges[] = { h_ranges, s_ranges }; hist = cvCreateHist( 2, hist_size, CV_HIST_ARRAY, ranges, 1 ); } cvCalcHist( planes, hist, 0, 0 ); // Compute histogram cvNormalizeHist( hist, 20*255 ); // Normalize it cvCalcBackProject( planes, back_img, hist );// Calculate back projection cvNormalizeHist( hist, 1.0 ); // Normalize it // Create an image to visualize the histogram int scale = 10; IplImage* hist_img = cvCreateImage( cvSize( h_bins * scale, s_bins * scale ), 8, 3 ); cvZero ( hist_img ); // populate the visualization float max_value = 0; cvGetMinMaxHistValue( hist, 0, &max_value, 0, 0 ); for( int h = 0; h < h_bins; h++ ){ for( int s = 0; s < s_bins; s++ ){ float bin_val = cvQueryHistValue_2D( hist, h, s ); int intensity = cvRound( bin_val * 255 / max_value ); cvRectangle( hist_img, cvPoint( h*scale, s*scale ), cvPoint( (h+1)*scale - 1, (s+1)*scale - 1 ), CV_RGB( intensity, intensity, intensity ), CV_FILLED ); } } // Show original cvNamedWindow( "Source", 1) ; cvShowImage( "Source", img ); // Show back projection cvNamedWindow( "Back Projection", 1) ; cvShowImage( "Back Projection", back_img ); // Show histogram equalized cvNamedWindow( "H-S Histogram", 1) ; cvShowImage( "H-S Histogram", hist_img ); cvWaitKey(0); cvReleaseImage( &img ); cvReleaseImage( &back_img ); cvReleaseImage( &hist_img ); return 0; }
//========================================= CvRect camKalTrack(IplImage* frame, camshift_kalman_tracker& camKalTrk) { //========================================= if (!frame) printf("Input frame empty!\n"); cvCopy(frame, camKalTrk.image, 0); cvCvtColor(camKalTrk.image, camKalTrk.hsv, CV_BGR2HSV); // BGR to HSV if (camKalTrk.trackObject) { int _vmin = vmin, _vmax = vmax; cvInRangeS(camKalTrk.hsv, cvScalar(0, smin, MIN(_vmin,_vmax), 0), cvScalar(180, 256, MAX(_vmin,_vmax), 0), camKalTrk.mask); // MASK cvSplit(camKalTrk.hsv, camKalTrk.hue, 0, 0, 0); // HUE if (camKalTrk.trackObject < 0) { float max_val = 0.f; boundaryCheck(camKalTrk.originBox, frame->width, frame->height); cvSetImageROI(camKalTrk.hue, camKalTrk.originBox); // for ROI cvSetImageROI(camKalTrk.mask, camKalTrk.originBox); // for camKalTrk.mask cvCalcHist(&camKalTrk.hue, camKalTrk.hist, 0, camKalTrk.mask); // cvGetMinMaxHistValue(camKalTrk.hist, 0, &max_val, 0, 0); cvConvertScale(camKalTrk.hist->bins, camKalTrk.hist->bins, max_val ? 255. / max_val : 0., 0); // bin [0,255] cvResetImageROI(camKalTrk.hue); // remove ROI cvResetImageROI(camKalTrk.mask); camKalTrk.trackWindow = camKalTrk.originBox; camKalTrk.trackObject = 1; camKalTrk.lastpoint = camKalTrk.predictpoint = cvPoint(camKalTrk.trackWindow.x + camKalTrk.trackWindow.width / 2, camKalTrk.trackWindow.y + camKalTrk.trackWindow.height / 2); getCurrState(camKalTrk.kalman, camKalTrk.lastpoint, camKalTrk.predictpoint);//input curent state } //(x,y,vx,vy), camKalTrk.prediction = cvKalmanPredict(camKalTrk.kalman, 0);//predicton=kalman->state_post camKalTrk.predictpoint = cvPoint(cvRound(camKalTrk.prediction->data.fl[0]), cvRound(camKalTrk.prediction->data.fl[1])); camKalTrk.trackWindow = cvRect(camKalTrk.predictpoint.x - camKalTrk.trackWindow.width / 2, camKalTrk.predictpoint.y - camKalTrk.trackWindow.height / 2, camKalTrk.trackWindow.width, camKalTrk.trackWindow.height); camKalTrk.trackWindow = checkRectBoundary(cvRect(0, 0, frame->width, frame->height), camKalTrk.trackWindow); camKalTrk.searchWindow = cvRect(camKalTrk.trackWindow.x - region, camKalTrk.trackWindow.y - region, camKalTrk.trackWindow.width + 2 * region, camKalTrk.trackWindow.height + 2 * region); camKalTrk.searchWindow = checkRectBoundary(cvRect(0, 0, frame->width, frame->height), camKalTrk.searchWindow); cvSetImageROI(camKalTrk.hue, camKalTrk.searchWindow); cvSetImageROI(camKalTrk.mask, camKalTrk.searchWindow); cvSetImageROI(camKalTrk.backproject, camKalTrk.searchWindow); cvCalcBackProject( &camKalTrk.hue, camKalTrk.backproject, camKalTrk.hist ); // back project cvAnd(camKalTrk.backproject, camKalTrk.mask, camKalTrk.backproject, 0); camKalTrk.trackWindow = cvRect(region, region, camKalTrk.trackWindow.width, camKalTrk.trackWindow.height); if (camKalTrk.trackWindow.height > 5 && camKalTrk.trackWindow.width > 5) { // calling CAMSHIFT cvCamShift(camKalTrk.backproject, camKalTrk.trackWindow, cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1), &camKalTrk.trackComp, &camKalTrk.trackBox); /*cvMeanShift( camKalTrk.backproject, camKalTrk.trackWindow, cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ), &camKalTrk.trackComp);*/ } else { camKalTrk.trackComp.rect.x = 0; camKalTrk.trackComp.rect.y = 0; camKalTrk.trackComp.rect.width = 0; camKalTrk.trackComp.rect.height = 0; } cvResetImageROI(camKalTrk.hue); cvResetImageROI(camKalTrk.mask); cvResetImageROI(camKalTrk.backproject); camKalTrk.trackWindow = camKalTrk.trackComp.rect; camKalTrk.trackWindow = cvRect(camKalTrk.trackWindow.x + camKalTrk.searchWindow.x, camKalTrk.trackWindow.y + camKalTrk.searchWindow.y, camKalTrk.trackWindow.width, camKalTrk.trackWindow.height); camKalTrk.measurepoint = cvPoint(camKalTrk.trackWindow.x + camKalTrk.trackWindow.width / 2, camKalTrk.trackWindow.y + camKalTrk.trackWindow.height / 2); camKalTrk.realposition->data.fl[0] = camKalTrk.measurepoint.x; camKalTrk.realposition->data.fl[1] = camKalTrk.measurepoint.y; camKalTrk.realposition->data.fl[2] = camKalTrk.measurepoint.x - camKalTrk.lastpoint.x; camKalTrk.realposition->data.fl[3] = camKalTrk.measurepoint.y - camKalTrk.lastpoint.y; camKalTrk.lastpoint = camKalTrk.measurepoint;//keep the current real position //measurement x,y cvMatMulAdd( camKalTrk.kalman->measurement_matrix/*2x4*/, camKalTrk.realposition/*4x1*/,/*measurementstate*/0, camKalTrk.measurement ); cvKalmanCorrect(camKalTrk.kalman, camKalTrk.measurement); cvRectangle(frame, cvPoint(camKalTrk.trackWindow.x, camKalTrk.trackWindow.y), cvPoint(camKalTrk.trackWindow.x + camKalTrk.trackWindow.width, camKalTrk.trackWindow.y + camKalTrk.trackWindow.height), CV_RGB(255,128,0), 4, 8, 0); } // set new selection if it exists if (camKalTrk.selectObject && camKalTrk.selection.width > 0 && camKalTrk.selection.height > 0) { cvSetImageROI(camKalTrk.image, camKalTrk.selection); cvXorS(camKalTrk.image, cvScalarAll(255), camKalTrk.image, 0); cvResetImageROI(camKalTrk.image); } return camKalTrk.trackWindow; }
void detectSkinColorArea(IplImage* srcImage_hsv, IplImage** skinColorAreaImage, CvHistogram* hist, CvSeq** convers, double* v_min, double* v_max) { CvMemStorage* storage = cvCreateMemStorage(0); CvSize size = cvGetSize(srcImage_hsv); IplImage* dstImage = cvCreateImage(size, IPL_DEPTH_8U, 1); cvZero(dstImage); IplImage* backProjectImage = cvCreateImage(size, IPL_DEPTH_8U, 1); IplImage* maskImage = cvCreateImage(size, IPL_DEPTH_8U, 1); { IplImage* h_plane = cvCreateImage(size, IPL_DEPTH_8U,1); IplImage* s_plane = cvCreateImage(size, IPL_DEPTH_8U,1); IplImage* v_plane = cvCreateImage(size, IPL_DEPTH_8U,1); IplImage* planes[] = {h_plane, s_plane}; cvCvtPixToPlane(srcImage_hsv, h_plane, s_plane, v_plane, NULL); cvCalcBackProject(planes, backProjectImage, hist); cvThreshold(v_plane, maskImage, *v_min, *v_max, CV_THRESH_BINARY); cvAnd(backProjectImage, maskImage, backProjectImage); cvReleaseImage(&h_plane); cvReleaseImage(&s_plane); cvReleaseImage(&v_plane); } CvSeq* contours = NULL; { cvThreshold(backProjectImage, dstImage, 10,255, CV_THRESH_BINARY); // cvThreshold(imgBackproj, dst_image, 40,255, CV_THRESH_BINARY); cvErode(dstImage, dstImage, NULL, 1); cvDilate(dstImage, dstImage, NULL, 1); cvFindContours(dstImage, storage, &contours); CvSeq* hand_ptr = NULL; double maxArea = -1; for (CvSeq* c= contours; c != NULL; c = c->h_next){ double area = abs(cvContourArea(c, CV_WHOLE_SEQ)); if (maxArea < area) { maxArea = area; hand_ptr = c; } } cvZero(dstImage); if (hand_ptr == NULL) { *skinColorAreaImage = cvCreateImage(cvSize(1, 1), IPL_DEPTH_8U, 1); } else { hand_ptr->h_next = NULL; *convers = hand_ptr; cvDrawContours(dstImage, hand_ptr, cvScalarAll(255), cvScalarAll(0),100); CvRect rect= cvBoundingRect(hand_ptr,0); cvSetImageROI(dstImage, rect); *skinColorAreaImage = cvCreateImage(cvSize(rect.width, rect.height), IPL_DEPTH_8U, 1); cvCopy(dstImage, *skinColorAreaImage); cvResetImageROI(dstImage); } } cvReleaseImage(&backProjectImage); cvReleaseImage(&maskImage); cvReleaseImage(&dstImage); cvReleaseMemStorage(&storage); }
CvBox2D CamShiftIris::track( IplImage* image, CvRect selection, bool isIris){ CamShiftIris camshift; select_object1=1; track_object1=-1; origin1=cvPoint(0,0); /////////////////////////////// int i, bin_w, c; //frame = cvQueryFrame( capture ); // // frame=cvCloneImage(image); // if( !frame ) // return 0; if( image ){ /* allocate all the buffers */ // image = cvCreateImage( cvGetSize(frame), 8, 3 ); // image->origin = frame->origin; hsv1 = cvCreateImage( cvGetSize(image), 8, 3 ); h = cvCreateImage( cvGetSize(image), 8, 1 ); s = cvCreateImage( cvGetSize(image), 8, 1 ); v = cvCreateImage( cvGetSize(image), 8, 1); hue1 = cvCreateImage( cvGetSize(image), 8, 1 ); mask1 = cvCreateImage( cvGetSize(image), 8, 1 ); backproject1 = cvCreateImage( cvGetSize(image), 8, 1 ); hist1= cvCreateHist( 1, &hdims1, CV_HIST_ARRAY, &hranges1, 1 ); histimg1 = cvCreateImage( cvSize(320,200), 8, 3 ); cvZero( histimg1 ); } cvCvtColor( image, hsv1, CV_BGR2HSV ); ///////////////////Equalize v in hsv/////////// cvSplit( hsv1, h, s, v, 0 ); cvEqualizeHist(v,v); cvMerge(h,s,v,0,hsv1); ///////////////////Equalize v in hsv/////////// if( track_object1 !=0 ){ int _vmin1 = vmin1, _vmax1 = vmax1; cvInRangeS( hsv1, cvScalar(0,smin1,MIN(_vmin1,_vmax1),0), cvScalar(180,256,MAX(_vmin1,_vmax1),0), mask1 ); cvSplit( hsv1, hue1, 0, 0, 0 ); if( track_object1 < 0 ){ float max_val = 0.f; cvSetImageROI( hue1, selection ); cvSetImageROI( mask1, selection ); cvCalcHist( &hue1, hist1, 0, mask1 ); cvGetMinMaxHistValue( hist1, 0, &max_val, 0, 0 ); cvConvertScale( hist1->bins, hist1->bins, max_val ? 255. / max_val : 0., 0 ); cvResetImageROI( hue1 ); cvResetImageROI( mask1 ); track_window1 = selection; track_object1 = 1; cvZero( histimg1 ); bin_w = histimg1->width / hdims1; for( i = 0; i < hdims1; i++ ) { int val = cvRound( cvGetReal1D(hist1->bins,i)*histimg1->height/255 ); CvScalar color = camshift.hsvrgb(i*180.f/hdims1); cvRectangle( histimg1, cvPoint(i*bin_w,histimg1->height), cvPoint((i+1)*bin_w,histimg1->height - val), color, -1, 8, 0 ); } } cvCalcBackProject( &hue1, backproject1, hist1); cvAnd( backproject1, mask1, backproject1, 0 ); try{ cvCamShift( backproject1, track_window1, cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ), &track_comp1, &track_box1 ); }catch(...){ cvReleaseImage(&hsv1); cvReleaseImage(&h); cvReleaseImage(&s); cvReleaseImage(&v); cvReleaseImage(&hue1); cvReleaseImage(&mask1); cvReleaseImage(&backproject1); cvReleaseHist(&hist1); cvReleaseImage(&histimg1); } track_window1 = track_comp1.rect; if( backproject1_mode ) cvCvtColor( backproject1, image, CV_GRAY2BGR ); if( !image->origin ) track_box1.angle = -track_box1.angle; if(isIris) cvEllipseBox( image, track_box1, CV_RGB(255,0,0), 3, CV_AA, 0 ); } cvShowImage( "CamShift Tracking", image ); //cvShowImage( "Histogram", histimg1 ); // c = cvWaitKey(10); // if( (char) c == 27 ) // cout<<"esc pressed"; // //return; //break; // switch( (char) c ){ // case 'b': // backproject1_mode ^= 1; // break; // case 'c': // track_object1 = 0; // cvZero( histimg1 ); // break; // case 'h': // show_hist1^= 1; // if( !show_hist1) // cvDestroyWindow( "Histogram" ); // else // cvNamedWindow( "Histogram", 1 ); // break; // default: // ; // } //cvReleaseImage(&image); cvReleaseImage(&hsv1); cvReleaseImage(&h); cvReleaseImage(&s); cvReleaseImage(&v); cvReleaseImage(&hue1); cvReleaseImage(&mask1); cvReleaseImage(&backproject1); cvReleaseHist(&hist1); cvReleaseImage(&histimg1); return track_box1; }
static int aGestureRecognition(void) { IplImage *image, *imagew, *image_rez, *mask_rez, *image_hsv, *img_p[2],*img_v, *init_mask_ver = 0, *final_mask_ver = 0; CvPoint3D32f *pp, p; CvPoint pt; CvSize2D32f fsize; CvPoint3D32f center, cf; IplImage *image_mask, *image_maskw; CvSize size; CvHistogram *hist, *hist_mask; int width, height; int k_points, k_indexs; int warpFlag, interpolate; int hdim[2] = {20, 20}; double coeffs[3][3], rect[2][2], rez = 0, eps_rez = 2.5, rez_h; float *thresh[2]; float hv[3]; float reps, aeps, ww; float line[6], in[3][3], h[3][3]; float cx, cy, fx, fy; static char num[4]; char *name_image; char *name_range_image; char *name_verify_data; char *name_init_mask_very; char *name_final_mask_very; CvSeq *numbers; CvSeq *points; CvSeq *indexs; CvMemStorage *storage; CvRect hand_roi, hand_roi_trans; int i,j, lsize, block_size = 1000, flag; int code; FILE *filin, *fil_ver; /* read tests params */ code = TRS_OK; /* define input information */ strcpy (num, "001"); lsize = strlen(data_path)+12; name_verify_data = (char*)trsmAlloc(lsize); name_range_image = (char*)trsmAlloc(lsize); name_image = (char*)trsmAlloc(lsize); name_init_mask_very = (char*)trsmAlloc(lsize); name_final_mask_very = (char*)trsmAlloc(lsize); /* define input range_image file path */ strcpy(name_range_image, data_path); strcat(name_range_image, "rpts"); strcat(name_range_image, num); strcat(name_range_image, ".txt"); /* define input image file path */ strcpy(name_image, data_path); strcat(name_image, "real"); strcat(name_image, num); strcat(name_image, ".bmp"); /* define verify data file path */ strcpy(name_verify_data, data_path); strcat(name_verify_data, "very"); strcat(name_verify_data, num); strcat(name_verify_data, ".txt"); /* define verify init mask file path */ strcpy(name_init_mask_very, data_path); strcat(name_init_mask_very, "imas"); strcat(name_init_mask_very, num); strcat(name_init_mask_very, ".bmp"); /* define verify final mask file path */ strcpy(name_final_mask_very, data_path); strcat(name_final_mask_very, "fmas"); strcat(name_final_mask_very, num); strcat(name_final_mask_very, ".bmp"); filin = fopen(name_range_image,"r"); fil_ver = fopen(name_verify_data,"r"); fscanf( filin, "\n%d %d\n", &width, &height); printf("width=%d height=%d reading testing data...", width,height); OPENCV_CALL( storage = cvCreateMemStorage ( block_size ) ); OPENCV_CALL( points = cvCreateSeq( CV_SEQ_POINT3D_SET, sizeof(CvSeq), sizeof(CvPoint3D32f), storage ) ); OPENCV_CALL (indexs = cvCreateSeq( CV_SEQ_POINT_SET, sizeof(CvSeq), sizeof(CvPoint), storage ) ); pp = 0; /* read input image from file */ image = atsCreateImageFromFile( name_image ); if(image == NULL) {code = TRS_FAIL; goto m_exit;} /* read input 3D points from input file */ for (i = 0; i < height; i++) { for (j = 0; j < width; j++) { fscanf( filin, "%f %f %f\n", &p.x, &p.y, &p.z); if(/*p.x != 0 || p.y != 0 ||*/ p.z != 0) { OPENCV_CALL(cvSeqPush(points, &p)); pt.x = j; pt.y = i; OPENCV_CALL(cvSeqPush(indexs, &pt)); } } } k_points = points->total; k_indexs = indexs->total; /* convert sequence to array */ pp = (CvPoint3D32f*)trsmAlloc(k_points * sizeof(CvPoint3D32f)); OPENCV_CALL(cvCvtSeqToArray(points, pp )); /* find 3D-line */ reps = (float)0.1; aeps = (float)0.1; ww = (float)0.08; OPENCV_CALL( cvFitLine3D(pp, k_points, CV_DIST_WELSCH, &ww, reps, aeps, line )); /* find hand location */ flag = -1; fsize.width = fsize.height = (float)0.22; // (hand size in m) numbers = NULL; OPENCV_CALL( cvFindHandRegion (pp, k_points, indexs,line, fsize, flag,¢er,storage, &numbers)); /* read verify data */ fscanf( fil_ver, "%f %f %f\n", &cf.x, &cf.y, &cf.z); rez+= cvSqrt((center.x - cf.x)*(center.x - cf.x)+(center.y - cf.y)*(center.y - cf.y)+ (center.z - cf.z)*(center.z - cf.z))/3.; /* create hand mask */ size.height = height; size.width = width; OPENCV_CALL( image_mask = cvCreateImage(size, IPL_DEPTH_8U, 1) ); OPENCV_CALL( cvCreateHandMask(numbers, image_mask, &hand_roi) ); /* read verify initial image mask */ init_mask_ver = atsCreateImageFromFile( name_init_mask_very ); if(init_mask_ver == NULL) {code = TRS_FAIL; goto m_exit;} rez+= iplNorm(init_mask_ver, image_mask, IPL_L2) / (width*height+0.); /* calculate homographic transformation matrix */ cx = (float)(width / 2.); cy = (float)(height / 2.); fx = fy = (float)571.2048; /* define intrinsic camera parameters */ in[0][1] = in[1][0] = in[2][0] = in[2][1] = 0; in[0][0] = fx; in[0][2] = cx; in[1][1] = fy; in[1][2] = cy; in[2][2] = 1; OPENCV_CALL( cvCalcImageHomography(line, ¢er, in, h) ); rez_h = 0; for(i=0;i<3;i++) { fscanf( fil_ver, "%f %f %f\n", &hv[0], &hv[1], &hv[2]); for(j=0;j<3;j++) { rez_h+=(hv[j] - h[i][j])*(hv[j] - h[i][j]); } } rez+=sqrt(rez_h)/9.; /* image unwarping */ size.width = image->width; size.height = image->height; OPENCV_CALL( imagew = cvCreateImage(size, IPL_DEPTH_8U,3) ); OPENCV_CALL( image_maskw = cvCreateImage(size, IPL_DEPTH_8U,1) ); iplSet(image_maskw, 0); cvSetImageROI(image, hand_roi); cvSetImageROI(image_mask, hand_roi); /* convert homographic transformation matrix from float to double */ for(i=0;i<3;i++) for(j=0;j<3;j++) coeffs[i][j] = (double)h[i][j]; /* get bounding rectangle for image ROI */ iplGetPerspectiveBound(image, coeffs, rect); width = (int)(rect[1][0] - rect[0][0]); height = (int)(rect[1][1] - rect[0][1]); hand_roi_trans.x = (int)rect[0][0];hand_roi_trans.y = (int)rect[0][1]; hand_roi_trans.width = width; hand_roi_trans.height = height; cvMaxRect(&hand_roi, &hand_roi_trans, &hand_roi); iplSetROI((IplROI*)image->roi, 0, hand_roi.x, hand_roi.y, hand_roi.width,hand_roi.height); iplSetROI((IplROI*)image_mask->roi, 0, hand_roi.x, hand_roi.y, hand_roi.width,hand_roi.height); warpFlag = IPL_WARP_R_TO_Q; /* interpolate = IPL_INTER_CUBIC; */ /* interpolate = IPL_INTER_NN; */ interpolate = IPL_INTER_LINEAR; iplWarpPerspective(image, imagew, coeffs, warpFlag, interpolate); iplWarpPerspective(image_mask, image_maskw, coeffs, warpFlag, IPL_INTER_NN); /* set new image and mask ROI after transformation */ iplSetROI((IplROI*)imagew->roi,0, (int)rect[0][0], (int)rect[0][1],(int)width,(int)height); iplSetROI((IplROI*)image_maskw->roi,0, (int)rect[0][0], (int)rect[0][1],(int)width,(int)height); /* copy image ROI to new image and resize */ size.width = width; size.height = height; image_rez = cvCreateImage(size, IPL_DEPTH_8U,3); mask_rez = cvCreateImage(size, IPL_DEPTH_8U,1); iplCopy(imagew,image_rez); iplCopy(image_maskw,mask_rez); /* convert rezult image from RGB to HSV */ image_hsv = iplCreateImageHeader(3, 0, IPL_DEPTH_8U, "HSV", "HSV", IPL_DATA_ORDER_PIXEL, IPL_ORIGIN_TL,IPL_ALIGN_DWORD, image_rez->width, image_rez->height, NULL, NULL, NULL, NULL); iplAllocateImage(image_hsv, 0, 0 ); strcpy(image_rez->colorModel, "RGB"); strcpy(image_rez->channelSeq, "RGB"); image_rez->roi = NULL; iplRGB2HSV(image_rez, image_hsv); /* convert to three images planes */ img_p[0] = cvCreateImage(size, IPL_DEPTH_8U,1); img_p[1] = cvCreateImage(size, IPL_DEPTH_8U,1); img_v = cvCreateImage(size, IPL_DEPTH_8U,1); cvCvtPixToPlane(image_hsv, img_p[0], img_p[1], img_v, NULL); /* calculate histograms */ hist = cvCreateHist ( 2, hdim, CV_HIST_ARRAY); hist_mask = cvCreateHist ( 2, hdim, CV_HIST_ARRAY); /* install histogram threshold */ thresh[0] = (float*) trsmAlloc(2*sizeof(float)); thresh[1] = (float*) trsmAlloc(2*sizeof(float)); thresh[0][0] = thresh[1][0] = -0.5; thresh[0][1] = thresh[1][1] = 255.5; cvSetHistThresh( hist, thresh, 1); cvSetHistThresh( hist_mask, thresh, 1); cvCalcHist(img_p, hist, 0); cvCalcHistMask(img_p, mask_rez, hist_mask, 0); cvCalcProbDensity(hist, hist_mask, hist_mask); cvCalcBackProject( img_p, mask_rez, hist_mask ); /* read verify final image mask */ final_mask_ver = atsCreateImageFromFile( name_final_mask_very ); if(final_mask_ver == NULL) {code = TRS_FAIL; goto m_exit;} rez+= iplNorm(final_mask_ver, mask_rez, IPL_L2) / (width*height+0.); trsWrite( ATS_CON | ATS_SUM, "\n gesture recognition \n"); trsWrite( ATS_CON | ATS_SUM, "result testing error = %f \n",rez); if(rez > eps_rez) code = TRS_FAIL; else code = TRS_OK; m_exit: cvReleaseImage(&image_mask); cvReleaseImage(&mask_rez); cvReleaseImage(&image_rez); atsReleaseImage(final_mask_ver); atsReleaseImage(init_mask_ver); cvReleaseImage(&imagew); cvReleaseImage(&image_maskw); cvReleaseImage(&img_p[0]); cvReleaseImage(&img_p[1]); cvReleaseImage(&img_v); cvReleaseHist( &hist); cvReleaseHist( &hist_mask); cvReleaseMemStorage ( &storage ); trsFree(pp); trsFree(name_final_mask_very); trsFree(name_init_mask_very); trsFree(name_image); trsFree(name_range_image); trsFree(name_verify_data); fclose(filin); fclose(fil_ver); /* _getch(); */ return code; }
int main222( int argc, char** argv ) { CvCapture* capture = 0; if( argc == 1 || (argc == 2 && strlen(argv[1]) == 1 && isdigit(argv[1][0]))) capture = cvCaptureFromCAM( argc == 2 ? argv[1][0] - '0' : 0 ); else if( argc == 2 ) capture = cvCaptureFromAVI( argv[1] ); if( !capture ) { fprintf(stderr,"Could not initialize capturing...\n"); return -1; } printf( "Hot keys: \n" "\tESC - quit the program\n" "\tc - stop the tracking\n" "\tb - switch to/from backprojection view\n" "\th - show/hide object histogram\n" "To initialize tracking, select the object with mouse\n" ); cvNamedWindow( "Histogram", 1 ); cvNamedWindow( "CamShiftDemo", 1 ); cvSetMouseCallback( "CamShiftDemo", on_mouse, 0 ); cvCreateTrackbar( "Vmin", "CamShiftDemo", &vmin, 256, 0 ); cvCreateTrackbar( "Vmax", "CamShiftDemo", &vmax, 256, 0 ); cvCreateTrackbar( "Smin", "CamShiftDemo", &smin, 256, 0 ); for(;;) { IplImage* frame = 0; int i, bin_w, c; if( !frame ) break; if( !image ) { /* allocate all the buffers */ image = cvCreateImage( cvGetSize(frame), 8, 3 ); image->origin = frame->origin; hsv = cvCreateImage( cvGetSize(frame), 8, 3 ); hue = cvCreateImage( cvGetSize(frame), 8, 1 ); mask = cvCreateImage( cvGetSize(frame), 8, 1 ); backproject = cvCreateImage( cvGetSize(frame), 8, 1 ); hist = cvCreateHist( 1, &hdims, CV_HIST_ARRAY, &hranges, 1 ); histimg = cvCreateImage( cvSize(320,200), 8, 3 ); cvZero( histimg ); } cvCopy( frame, image, 0 ); cvCvtColor( image, hsv, CV_BGR2HSV ); if( track_object ) { int _vmin = vmin, _vmax = vmax; cvInRangeS( hsv, cvScalar(0,smin,MIN(_vmin,_vmax),0), cvScalar(180,256,MAX(_vmin,_vmax),0), mask ); cvSplit( hsv, hue, 0, 0, 0 ); if( track_object < 0 ) { float max_val = 0.f; cvSetImageROI( hue, selection ); cvSetImageROI( mask, selection ); cvCalcHist( &hue, hist, 0, mask ); cvGetMinMaxHistValue( hist, 0, &max_val, 0, 0 ); cvConvertScale( hist->bins, hist->bins, max_val ? 255. / max_val : 0., 0 ); cvResetImageROI( hue ); cvResetImageROI( mask ); track_window = selection; track_object = 1; cvZero( histimg ); bin_w = histimg->width / hdims; for( i = 0; i < hdims; i++ ) { int val = cvRound( cvGetReal1D(hist->bins,i)*histimg->height/255 ); CvScalar color = hsv2rgb(i*180.f/hdims); cvRectangle( histimg, cvPoint(i*bin_w,histimg->height), cvPoint((i+1)*bin_w,histimg->height - val), color, -1, 8, 0 ); } } cvCalcBackProject( &hue, backproject, hist ); cvAnd( backproject, mask, backproject, 0 ); cvCamShift( backproject, track_window, cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ), &track_comp, &track_box ); track_window = track_comp.rect; if( backproject_mode ) cvCvtColor( backproject, image, CV_GRAY2BGR ); if( !image->origin ) track_box.angle = -track_box.angle; cvEllipseBox( image, track_box, CV_RGB(255,0,0), 3, CV_AA, 0 ); } if( select_object && selection.width > 0 && selection.height > 0 ) { cvSetImageROI( image, selection ); cvXorS( image, cvScalarAll(255), image, 0 ); cvResetImageROI( image ); } cvShowImage( "CamShiftDemo", image ); cvShowImage( "Histogram", histimg ); c = cvWaitKey(10); if( (char) c == 27 ) break; switch( (char) c ) { case 'b': backproject_mode ^= 1; break; case 'c': track_object = 0; cvZero( histimg ); break; case 'h': show_hist ^= 1; if( !show_hist ) cvDestroyWindow( "Histogram" ); else cvNamedWindow( "Histogram", 1 ); break; default: ; } } cvReleaseCapture( &capture ); cvDestroyWindow("CamShiftDemo"); return 0; }
void CV_CalcBackProjectTest::run_func(void) { cvCalcBackProject( images, images[CV_MAX_DIM+1], hist[0] ); }