Пример #1
0
IplImage* GetThresholdedImage(IplImage* img, int color) {
    // Convert the image into an HSV image
    IplImage* imgHSV = cvCreateImage(cvGetSize(img), 8, 3);
    cvCvtColor(img, imgHSV, CV_BGR2HSV);

    IplImage* imgThreshed = cvCreateImage(cvGetSize(img), 8, 1);
    if (color == 1)
        //    cvInRangeS(imgHSV, cvScalar(10, 100, 100), cvScalar(20, 255, 255), imgThreshed);- orange paper
        //    cvInRangeS(imgHSV, cvScalar(155, 100, 100), cvScalar(165, 255, 255), imgThreshed); - pink one
        //    cvInRangeS(imgHSV, cvScalar(80, 100, 100), cvScalar(90, 255, 255), imgThreshed); - green robot
        //    cvInRangeS(imgHSV, cvScalar(40, 100, 100), cvScalar(50, 255, 255), imgThreshed);  - green paper
        //    cvInRangeS(imgHSV, cvScalar(100, 100, 100), cvScalar(110, 255, 255), imgThreshed); - blue tape


        cvInRangeS(imgHSV, cvScalar(40, 100, 100), cvScalar(50, 255, 255),
                imgThreshed);

    if (color == 2)
        cvInRangeS(imgHSV, cvScalar(100, 100, 100), cvScalar(110, 255, 255),
                imgThreshed);

    if (color == 3)

        cvInRangeS(imgHSV, cvScalar(155, 100, 100), cvScalar(165, 255, 255),
                imgThreshed);

    if (color == 4)
        cvInRangeS(imgHSV, cvScalar(10, 100, 100), cvScalar(20, 255, 255),
                imgThreshed);

    cvReleaseImage(&imgHSV);

    return imgThreshed;
}
Пример #2
0
//영역 추출 후 이진화
void ColorTracking::color_config(IplImage* image, std::string config){
	
	//추출할 영역 변수
	CvScalar hsv_min, hsv_max, hsv_min2, hsv_max2;
	if(image != NULL)
	{
		IplImage* m_tem1_img = cvCreateImage(cvGetSize(image), IPL_DEPTH_8U, 1);//영역 추출 이미지
		IplImage* m_tem2_img = cvCreateImage(cvGetSize(image), IPL_DEPTH_8U, 1);//영역 추출 이미지
	
		//필요한 색상 영역 으로 축소
		if(config == "Red")
		{
			//빨강 - 영역 두개로 잡아봄
			hsv_min = cvScalar(0, 85, 100, 0);
			hsv_max = cvScalar(10, 255, 255, 0);

			hsv_min2 = cvScalar(170, 85, 100, 0);
			hsv_max2 = cvScalar(220, 255, 255, 0);

		}
		else if(config =="Green")
		{
			//초록
			hsv_min = cvScalar(55, 80, 100, 0);
			hsv_max = cvScalar(75, 255, 255, 0);
		}
		else if(config == "Blue")
		{
			//파랑
			hsv_min = cvScalar(100, 100,100 , 0);
			hsv_max = cvScalar(130, 255, 200, 0);
		}
		else if(config =="Yellow")
		{
			//노랑
			hsv_min = cvScalar(20, 100, 100, 0);
			hsv_max = cvScalar(35, 255, 255, 0);
		}
		
		if(config == "Red")
		{
			//일단 레드는 두 영역으로 잡아봄
			cvInRangeS(image, hsv_min, hsv_max, m_tem1_img);
			cvInRangeS(image, hsv_min2, hsv_max2, m_tem2_img);

			//공통영역 추출
			cvOr(m_tem1_img, m_tem2_img , m_gray_img);
			
		}	
		else
		{
			//레드가 아닐 때는 그대로
			cvInRangeS(image, hsv_min, hsv_max, m_gray_img);
		}

		cvReleaseImage(&m_tem1_img);
		cvReleaseImage(&m_tem2_img);
	}
}
Пример #3
0
/**
 * \brief	Takes frame and applies image processing techniques to filter out non-laser line points. Updates images used for runtime display.
 */
int filterFrame() {
	args[0] = frame;
	cvCvtColor(frame, frameHSV, CV_BGR2HSV);	//convert RGB values of frame to HSV and place in frameHSV
	cvSplit(frameHSV, hue, saturation, value, NULL);	//split frameHSV into constituent components and place appropriately; we are done with frameHSV
	args[1] = hue;
	args[2] = value;
	cvCopy(saturation, saturation2);	//make an additional copy of saturation for display
	//args[8] = saturation2;
	//cvShowImage("saturation", saturation2);
	cvSmooth(frame, frameHSV, CV_BLUR, 20, 20 );   //smooth frame and store in frameHSV
	//cvShowImage("Smoothed frame", frameHSV);
	cvSplit(frame, blue, green, red, NULL);	//split frame into its RGB components
	cvSplit(frameHSV, blue2, green2, red2, NULL);	//split the smoothed version into its RGB components
	cvMin(blue, green, min_bg);	//take the min of blue and green and store in min_bg
	args[3] = min_bg;
	//cvShowImage("minimum of blue and green", min_bg);
	cvSub(red, min_bg, red_last);	//take red less the min of the blue and green
	//cvShowImage("red_last = red - min_bg", red_last);
	cvThreshold(red_last, red_last, thresholdValue, 255, CV_THRESH_BINARY_INV);	//threshold the red_last
	//cvShowImage("threshold of red_last", red_last);
	args[4] = red_last;
	cvSub(red, red2, deltaRed);
	//cvShowImage("deltaRed = Original red - smooth red", deltaRed);
	cvThreshold(deltaRed, deltaRed, thresholdValue, 255, CV_THRESH_BINARY);
	//cvShowImage("threshold(deltaRed)", deltaRed);
	cvCopy(deltaRed, alpha);
	cvInRangeS(saturation, cvScalar(0), cvScalar(25), saturation);
	//cvShowImage("Low saturation in original frame", saturation);
	cvInRangeS(hue, cvScalar(49), cvScalar(125), beta);
	//cvShowImage("Mixed hue in original frame", beta);
	cvOr(beta, saturation, beta);
	//cvShowImage("beta = Low saturation OR mixed hue", beta);
	cvOr(beta, red_last, beta);
	//cvShowImage("beta = beta OR red_last", beta);
	//args[5] = alpha;
	args[5] = beta;

	IplConvKernel*mask= cvCreateStructuringElementEx(5, 5, 2, 2, 2, NULL );

	cvDilate(saturation2,dialated, mask, 20);
	//cvShowImage("dilate original saturation", dialated);
	args[6] = dialated;
	cvThreshold(dialated, dialated, 100, 255, CV_THRESH_BINARY);
	cvErode(dialated,eroded, mask, 30);

	args[7] = eroded;
	cvSub(alpha, beta, orig_filter);
	args[8] = orig_filter;
	cvAnd(orig_filter, eroded, zeta);
	args[9] = zeta;
	return 0;
}
Пример #4
0
int filterByHSV(IplImage *src, CvScalar minHSV, CvScalar maxHSV, IplImage *dst) {
	IplImage *tmp3d = cvCloneImage(src);
	cvSmooth(tmp3d, tmp3d, CV_GAUSSIAN, 13, 0, 0, 0);

	cvCvtColor(tmp3d, tmp3d, CV_BGR2HSV);
	IplImage *tmp1dH_mask = cvCreateImage(cvGetSize(src), IPL_DEPTH_8U, 1);
	IplImage *tmp1dS_mask = cvCreateImage(cvGetSize(src), IPL_DEPTH_8U, 1);
	IplImage *tmp1dV_mask = cvCreateImage(cvGetSize(src), IPL_DEPTH_8U, 1);
	cvSplit(tmp3d, tmp1dH_mask, tmp1dS_mask, tmp1dV_mask, NULL);

	//printf("\rmin: %03d,%03d,%03d", (int)minHSV.val[0], (int)minHSV.val[1], (int)minHSV.val[2]);
	//printf("\tmax: %03d,%03d,%03d", (int)maxHSV.val[0], (int)maxHSV.val[1], (int)maxHSV.val[2]);

	if (minHSV.val[0] < maxHSV.val[0]) {
		cvInRangeS(tmp1dH_mask, cvScalar(minHSV.val[0], 0, 0), cvScalar(maxHSV.val[0], 0, 0), tmp1dH_mask);
	} else {
		IplImage *tmp1d = cvCloneImage(tmp1dH_mask);
		cvInRangeS(tmp1dH_mask, cvScalar(0, 0, 0), cvScalar(maxHSV.val[0], 0, 0), tmp1d);
		cvInRangeS(tmp1dH_mask, cvScalar(minHSV.val[0], 0, 0), cvScalar(255, 0, 0), tmp1dH_mask);
		cvOr(tmp1d, tmp1dH_mask, tmp1dH_mask, NULL);
		cvReleaseImage(&tmp1d);
	}

	cvInRangeS(tmp1dS_mask, cvScalar(minHSV.val[1], 0, 0), cvScalar(maxHSV.val[1], 0, 0), tmp1dS_mask);
	cvInRangeS(tmp1dV_mask, cvScalar(minHSV.val[2], 0, 0), cvScalar(maxHSV.val[2], 0, 0), tmp1dV_mask);

	IplImage *tmp1d_mask = cvCreateImage(cvGetSize(src), IPL_DEPTH_8U, 1);
	cvSet(tmp1d_mask, cvScalarAll(255), NULL);
	cvAnd(tmp1d_mask, tmp1dH_mask, tmp1d_mask, NULL);
	cvAnd(tmp1d_mask, tmp1dS_mask, tmp1d_mask, NULL);
	cvAnd(tmp1d_mask, tmp1dV_mask, tmp1d_mask, NULL);

	cvReleaseImage(&tmp1dH_mask);
	cvReleaseImage(&tmp1dS_mask);
	cvReleaseImage(&tmp1dV_mask);

	cvClose(tmp1d_mask, tmp1d_mask, NULL, 2);

#define CONTROLS_WIDTHA  640/2
#define CONTROLS_HEIGHTA 480/2
#if 1
	cvNamedWindow(CONTROL_WINDOW  "4", 0);
	cvResizeWindow(CONTROL_WINDOW "4", CONTROLS_WIDTHA, CONTROLS_HEIGHTA);
	cvShowImage(CONTROL_WINDOW    "4", tmp1d_mask);
#endif

	cvCopy2(src, dst, tmp1d_mask);

	cvReleaseImage(&tmp1d_mask);

	return 0;
}
Пример #5
0
IplImage* getThresholdImage(IplImage* img)
{
  IplImage* imgHSV = cvCreateImage(cvGetSize(img), 8, 3);
	cvCvtColor(img, imgHSV, CV_BGR2HSV);  	//Convert image to HSV
	
	IplImage* thresholded = cvCreateImage(cvGetSize(img), 8, 1);
	
  if(minC.val[0] == NULL && maxC.val[0] == NULL){cvInRangeS(imgHSV, hsv_min, hsv_max, thresholded);}  
	else{cvInRangeS(imgHSV, minC, maxC, thresholded);}

	cvReleaseImage(&imgHSV);
	return thresholded;
}
Пример #6
0
//--------------------------------------------------------------
void ofApp::update(){
	ofBackground(0, 0, 0);

	
    bool bNewFrame = false;

	#ifdef _USE_LIVE_VIDEO
       vidGrabber.update();
	   bNewFrame = vidGrabber.isFrameNew();
    #else
        vidPlayer.update();
        bNewFrame = vidPlayer.isFrameNew();
	#endif

	if (bNewFrame){

		#ifdef _USE_LIVE_VIDEO
            cvColorImage.setFromPixels(vidGrabber.getPixels());
	    #else
            cvColorImage.setFromPixels(vidPlayer.getPixels());
        #endif

        hsvImage = cvColorImage;
        hsvImage.convertRgbToHsv();
		threshedImage.setFromColorImage(hsvImage);

		cvInRangeS(hsvImage.getCvImage(), cvScalar(targetHue-5, 70,70/*targetSat - 70, targetVal-30*/), cvScalar(targetHue + 5, 255, 255), threshedImage.getCvImage());
		//threshedImage.draw(2*IMG_X_OFFSET, 0);


		// find contours which are between the size of 20 pixels and 1/3 the w*h pixels.
		// also, find holes is set to true so we will get interior contours as well....
		contourFinder.findContours(threshedImage, 10, (width * height)/3, 10, true);	// find holes
	}
}
Пример #7
0
int convRGB(IplImage* srcRGB, IplImage* dstRGB, CvSize sizIm)
{
	// создаем Image 
	srcR = cvCreateImage( sizIm, IPL_DEPTH_8U, 1 );
	srcG = cvCreateImage( sizIm, IPL_DEPTH_8U, 1 );
	srcB = cvCreateImage( sizIm, IPL_DEPTH_8U, 1 );

	srcRR = cvCreateImage( sizIm, IPL_DEPTH_8U, 1 );
	
	// разбиваем на каналы
	cvSplit(srcRGB, srcB, srcG, srcR, 0);
		
	// выделяем для каждого канала границы
	cvInRangeS(srcR, cvScalar(aRmin), cvScalar(aRmax), srcRR);
	
	cvCopy( srcRR, dstRGB );

	// выводим в окне изображение
	cvShowImage("RGBVideo", srcRR);
	
	// освобождаем ресурсы
	cvReleaseImage( &srcR );
	cvReleaseImage( &srcG );
	cvReleaseImage( &srcB );
	cvReleaseImage( &srcRR );
	
	return 0;
}
Пример #8
0
IplImage* Panoramic::GetHsvFeature(IplImage* src,int H,int S,int V,int Scale ,int Scale_1 ,int Scale_2)
{
	IplImage *colorImg	  = cvCreateImage(cvGetSize(src),8,3);
    IplImage *hsvImg	  = cvCreateImage(cvGetSize(src),8,3);
	cvCopy(src,colorImg);
	IplImage *Plane_1	  = cvCreateImage( cvGetSize(colorImg), 8, 1);//H plane
    IplImage *Plane_2	  = cvCreateImage( cvGetSize(colorImg), 8, 1);//S plane
	IplImage *Plane_3	  = cvCreateImage( cvGetSize(colorImg), 8, 1);//V plane
	IplImage *dst	      = cvCreateImage( cvGetSize(src),8,1);
	cvCvtColor(colorImg,hsvImg,CV_BGR2HSV);
	cvCvtPixToPlane( hsvImg, Plane_1, Plane_2, Plane_3, 0 );

	cvEqualizeHist(Plane_2,Plane_2);//s_plane
	cvEqualizeHist(Plane_3,Plane_3);//v_plane
	cvMerge(Plane_1,Plane_2,Plane_3,0,hsvImg);
	cvInRangeS(hsvImg, cvScalar(H,S, V), cvScalar(5*Scale+H,5*Scale_1+S,5*Scale_2+V), dst);//cvScalar(0,40, 40), cvScalar(60, 170, 255)
	cvErode(dst,dst,0,2);

	/*cvNamedWindow("HSV_ROI",0);
	cvShowImage ("HSV_ROI",dst);*/

	cvReleaseImage(&colorImg);
	cvReleaseImage(&hsvImg);
	cvReleaseImage(&Plane_1);
	cvReleaseImage(&Plane_2);
	cvReleaseImage(&Plane_3);

	return dst;
}
Пример #9
0
void the_project::project_binary()
{

	get_binary = cvCreateImage(image_size, IPL_DEPTH_8U, 1);

	int blue=100;
	int green=10;
	int red=10;
	cvCreateTrackbar("blue","win2",&blue,0xff);
	cvCreateTrackbar("green","win2",&green,0xff);
	cvCreateTrackbar("red","win2",&red,0xff);

	cvNamedWindow("win3");
	cout << "Press Space to continue...\n";
	while(1){
		char a = cvWaitKey(10);
		if(a==' ')
			break;
		cvInRangeS(get_change,cvScalarAll(0),CV_RGB(red,green,blue),get_binary);
		cvShowImage("win3",get_binary);
	}
	//cvWaitKey();
	

	get_path = cvCreateImage(image_size,8,1);
	cvCopyImage(get_binary,get_path);
}
Пример #10
0
// calibration function to be run at the beginning only
vector<double> calibrate(){
	
	cvSmooth(frame, imageFiltree, CV_BLUR,seuilFiltre,seuilFiltre,0.0,0.0);
	cvCvtColor(imageFiltree, imageHSV,CV_BGR2HSV);
	cvInRangeS(imageHSV,cvScalar(hmin, smin, vmin, 0.0),cvScalar(hmax, smax, vmax, 0.0),imageBinaire);
	cvErode(imageBinaire, imageErodee, NULL, nbErosions);
	cvDilate(imageErodee, imageDilatee, NULL, nbDilatations);
	
	imageObjectRGB = multBinColor(imageDilatee, frame);
	imageObjectHSV = multBinColor(imageDilatee, imageHSV);
	
	vector<vector<CvPoint3D32f> > vecDistinctPoints = findPoint();
	
	// find the centroid of the object and trace it
	vector<CvPoint> centroid = centroiding(vecDistinctPoints);
	sort(centroid);
	
	vector<double> tanAlphaT = vector<double>(centroid.size(),0);
	double p;
	
	for (int i=0; i<centroid.size(); i++){
		p = abs(centroid[i].x - (frame->width / 2));
		tanAlphaT[i] = atan(d/D-p*ratioPixelSizeF);
	}
	return tanAlphaT;
}
Пример #11
0
int convRGB(IplImage* srcRGB, IplImage* dstRGB, CvSize sizIm)
{
	// ������� Image 
	srcR = cvCreateImage( sizIm, IPL_DEPTH_8U, 1 );
	srcG = cvCreateImage( sizIm, IPL_DEPTH_8U, 1 );
	srcB = cvCreateImage( sizIm, IPL_DEPTH_8U, 1 );

	srcRR = cvCreateImage( sizIm, IPL_DEPTH_8U, 1 );
	
	// ��������� �� ������
	cvSplit(srcRGB, srcB, srcG, srcR, 0);
		
	// �������� ��� ������� ������ �������
	cvInRangeS(srcR, cvScalar(aRmin), cvScalar(aRmax), srcRR);
	
	cvCopy( srcRR, dstRGB );

	// ������� � ���� �����������
	cvShowImage("RGBVideo", srcRR);
	
	// ����������� �������
	cvReleaseImage( &srcR );
	cvReleaseImage( &srcG );
	cvReleaseImage( &srcB );
	cvReleaseImage( &srcRR );
	
	return 0;
}
Пример #12
0
void filter_and_threshold(struct ctx *ctx)
{

	/* Soften image */
	cvSmooth(ctx->image, ctx->temp_image3, CV_GAUSSIAN, 11, 11, 0, 0);
	/* Remove some impulsive noise */
	cvSmooth(ctx->temp_image3, ctx->temp_image3, CV_MEDIAN, 11, 11, 0, 0);
	cvCvtColor(ctx->temp_image3, ctx->temp_image3, CV_BGR2HSV);

	/*
	 * Apply threshold on HSV values to detect skin color
	 */
/*	cvInRangeS(ctx->temp_image3,
		   cvScalar(0, 55, 90, 255), // cvScalar( (b), (g), (r), 0 )
		   cvScalar(28, 175, 230, 255),
		   ctx->thr_image);
*/
	cvInRangeS(ctx->temp_image3,
		   cvScalar(100, 200, 200, 0), // cvScalar( (b), (g), (r), 0 )
		   cvScalar(200, 220, 255, 0),
		   ctx->thr_image);


	/* Apply morphological opening */
	cvMorphologyEx(ctx->thr_image, ctx->thr_image, NULL, ctx->kernel,
		       CV_MOP_OPEN, 1);  // 2 interations of opening
	cvSmooth(ctx->thr_image, ctx->thr_image, CV_GAUSSIAN, 3, 3, 0, 0);
}
IplImage* CamShiftPatch::getInRangeMask(CvScalar maskRange, IplImage* &hue)
{
	IplImage *mask = 0;
	mask = cvCreateImage(cvGetSize(originImage), 8, 1);//遮罩空間,單通道

	IplImage *hsv = 0;
	hue = cvCreateImage(cvGetSize(originImage), 8, 1); //1 channel
	hsv = cvCreateImage(cvGetSize(originImage), 8, 3);
	cvCvtColor(originImage, hsv, CV_BGR2HSV); //彩色空間轉換BGR to HSV 


	//cvShowImage("hsv", hsv);
	cvInRangeS(//cvInRangeS 功能為 => 檢查數組元素是否在兩個數量之間
		hsv, //第一個原數組
		cvScalar(0, maskRange.val[2], MIN(maskRange.val[0], maskRange.val[1]), 0),  //包括進的下邊界
		cvScalar(180, 256, MAX(maskRange.val[0], maskRange.val[1]), 0), //不包括進的上邊界
		mask);
	//得到二值的MASK  
	cvSplit(hsv, hue, 0, 0, 0); //只提取HUE分量  
	
	//cvShowImage("mask", mask);

	IplImage* returnImg = nullptr;
	returnImg = cvCloneImage(mask);

	cvReleaseImage(&mask);
	cvReleaseImage(&hsv);

	return returnImg;
}
Пример #14
0
IplImage* threshImage(IplImage *imgOrig, CvScalar lower, CvScalar upper, int n)
{
	IplImage* imgHSV = cvCreateImage(cvGetSize(imgOrig), 8, 3);   //size, depth, channels
	cvCvtColor(imgOrig, imgHSV, CV_BGR2HSV);   //check!

	IplImage* imgThresh = cvCreateImage(cvGetSize(imgOrig), 8, 1);
	cvInRangeS(imgHSV, lower, upper, imgThresh);

	
	CvMoments *moments = (CvMoments*)malloc(sizeof(CvMoments));
	cvMoments (imgThresh, moments, 1);

	double moment10 = cvGetSpatialMoment(moments, 1, 0);
	double moment01 = cvGetSpatialMoment(moments, 0, 1);
	double area = cvGetSpatialMoment(moments, 0, 0);

	static int posX = 0;
	static int posY = 0;


	posX = moment10/area;
	posY = moment01/area;

	int curX = posX * XRATIO;
	int curY = posY * YRATIO;

	SetCursorPos(1366-curX, curY);

	delete moments;
	cvReleaseImage(&imgHSV);

	return imgThresh;
}
Пример #15
0
CamShift::Box CamShift::Track(const ImgBgr& img)
{
	cvCopy(ImgIplImage(img), image, 0 );
  cvCvtColor( image, hsv, CV_BGR2HSV );
  cvFlip(hsv,hsv,0);
  int _vmin = vmin, _vmax = vmax;
  
  cvInRangeS( hsv, cvScalar(0,smin,MIN(_vmin,_vmax),0),
  cvScalar(180,256,MAX(_vmin,_vmax),0), mask );
  cvSplit( hsv, hue, 0, 0, 0 );
  cvCalcBackProject( &hue, backproject, hist );
  //cvSaveImage("backproject.bmp", backproject);
  cvAnd( backproject, mask, backproject, 0 );
  //cvSaveImage("backproject.bmp", backproject);
  cvCamShift( backproject, track_window,
    cvTermCriteria( CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 10, 1 ),
    &track_comp, &track_box );
  track_window = track_comp.rect;

  Box result;
  result.angle= track_box.angle;
  result.center.x= static_cast<LONG>( track_box.center.x );
  result.center.y= static_cast<LONG>( img.Height()-track_box.center.y-1 );
  result.size.cy = static_cast<LONG>( track_box.size.width );
  result.size.cx = static_cast<LONG>( track_box.size.height );
  return result;
}
Пример #16
0
//Filtra una imagen dada según determinado rango de color en formato HSV, devuelve una imagen en blanco y negro 
IplImage* filterByColorHSV(IplImage *img, CvScalar min, CvScalar max){

	cvNamedWindow("filtro");

	double dWidth = cvGetSize(img).width;
    double dHeight = cvGetSize(img).height;
	IplImage *hsvframe=cvCreateImage(cvSize(dWidth,dHeight),8,3);//Image in HSV color space
	IplImage *threshy=cvCreateImage(cvSize(dWidth,dHeight),8,1); //Threshold image of defined color
	
	//smooth the original image using Gaussian kernel
	cvSmooth(img, img, CV_MEDIAN,7,7);  //----------------> el kernel es el método que se usa para remover ruido, habría que ver cual es el mejor para
										  //				  lo que queremos. Aca están las opciones: http://docs.opencv.org/modules/imgproc/doc/filtering.html
	//Changing the color space from BGR to HSV
	cvCvtColor(img,hsvframe,CV_BGR2HSV);
	//Thresholding the frame for the color given
	cvInRangeS(hsvframe,min, max,threshy);
	//smooth the thresholded image using Median kernel
    cvSmooth(threshy,threshy,CV_MEDIAN,7,7);
	
	cvShowImage("filtro",threshy);
	
	return threshy;
	cvReleaseImage(&threshy);
	cvReleaseImage(&hsvframe);
}
int main(int argc, char** argv)
{
//Image variables
    char* imageName = argv[1];
    IplImage* img=cvLoadImage(imageName);
    IplImage* rimg=cvCreateImage(cvSize(w,h),8,3);
    IplImage* hsvimg=cvCreateImage(cvSize(w,h),8,3);
    IplImage* thresh=cvCreateImage(cvSize(w,h),8,1);
//Windows
    cvNamedWindow("Original Image",CV_WINDOW_AUTOSIZE);
    cvNamedWindow("Thresholded Image",CV_WINDOW_AUTOSIZE);
    cvNamedWindow("cnt",CV_WINDOW_AUTOSIZE);

//Variables for trackbar
    int h1=0;
    int s1=0;
    int v1=6;
    int h2=100;
    int s2=255;
    int v2=255;
//Creating the trackbars
    cvCreateTrackbar("H1","cnt",&h1,255,0);
    cvCreateTrackbar("H2","cnt",&h2,255,0);
    cvCreateTrackbar("S1","cnt",&s1,255,0);
    cvCreateTrackbar("S2","cnt",&s2,255,0);
    cvCreateTrackbar("V1","cnt",&v1,255,0);
    cvCreateTrackbar("V2","cnt",&v2,255,0);

//Resizing the image
    cvResize(img,rimg,CV_INTER_LINEAR);
//Changing into HSV plane
    cvCvtColor(rimg,hsvimg,CV_BGR2HSV);
    while(1)
    {
//Thresholding the image
        cvInRangeS(hsvimg,cvScalar(h1,s1,v1),cvScalar(h2,s2,v2),thresh);
//Showing the images
        cvShowImage("Original Image",rimg);
        cvShowImage("Thresholded Image",thresh);
//Escape Sequence
        char c=cvWaitKey(33);
        if(c==27)
            break;
    }
//Showing the image
    cvShowImage("Original Image",rimg);
    cvShowImage("Thresholded Image",thresh);
//Waiting for user to press any key
    cvWaitKey(0);
    cvSaveImage("backSub.png",thresh);


//Cleanup
    cvReleaseImage(&img);
    cvReleaseImage(&thresh);
    cvReleaseImage(&rimg);
    cvReleaseImage(&hsvimg);
    cvDestroyAllWindows();

}
Пример #18
0
void WebCamData::updateHugeImage(const IplImage* img)
{
    cvCvtColor(img, d->hsvImage, CV_BGR2HSV);
    cvInRangeS(d->hsvImage, cvScalar(0, 55, MIN(65, 256), 0),
               cvScalar(180, 256, MAX(65, 255), 0), d->mask);
    cvSplit(d->hsvImage, d->hueImage, 0, 0, 0);
}
Пример #19
0
void tracker_bitmap_color::run()
{
  vsx_bitmap *bmp = in_bitmap->get_addr();

  //Check if there is any new image to process
  if(!(bmp && bmp->valid && bmp->timestamp && bmp->timestamp != m_previousTimestamp)){
#ifdef VSXU_DEBUG
    printf("Skipping frame after %d \n",m_previousTimestamp);
#endif
    return;
  }

  m_previousTimestamp = bmp->timestamp;  
  initialize_buffers(bmp->size_x, bmp->size_y);

  //Grab the input image
  m_img[FILTER_NONE]->imageData = (char*)bmp->data;

  //1)filter the image to the HSV color space
  cvCvtColor(m_img[FILTER_NONE],m_img[FILTER_HSV],CV_RGB2HSV);

  //2Threshold the image based on the supplied range of colors
  cvInRangeS( m_img[FILTER_HSV],
              cvScalar( (int)(in_color1->get(0)*255), (int)(in_color1->get(1)*255), (int)(in_color1->get(2)*255) ),
              cvScalar( (int)(in_color2->get(0)*255), (int)(in_color2->get(1)*255), (int)(in_color2->get(2)*255) ),
              m_img[FILTER_HSV_THRESHOLD] );

  //3)Now the math to find the centroid of the "thresholded image"
  //3.1)Get the moments
  cvMoments(m_img[FILTER_HSV_THRESHOLD],m_moments,1);
  double moment10 = cvGetSpatialMoment(m_moments,1,0);
  double moment01 = cvGetSpatialMoment(m_moments,0,1);
  double area = cvGetCentralMoment(m_moments,0,0);

  //3.2)Calculate the positions
  double posX =  moment10/area;
  double posY = moment01/area;

  //3.3) Normalize the positions
  posX = posX/bmp->size_x;
  posY = posY/bmp->size_y;

  //Finally set the result
#ifdef VSXU_DEBUG
  printf("Position: (%f,%f)\n",posX,posY);
#endif
  out_centroid->set(posX,0);
  out_centroid->set(posY,1);

  //Calculate the debug output only if requested
  if(m_compute_debug_out){
    m_compute_debug_out = false;
    cvCvtColor(m_img[FILTER_HSV_THRESHOLD],m_img[FILTER_HSV_THRESHOLD_RGB], CV_GRAY2RGB);

    m_debug = *bmp;
    m_debug.data = m_img[FILTER_HSV_THRESHOLD_RGB]->imageData;
    out_debug->set_p(m_debug);
  }
}
IplImage* HsvToBinaryConverter(IplImage* HSVImage)
	{

	IplImage* BinaryImage=cvCreateImage(cvGetSize(HSVImage),IPL_DEPTH_8U,1);
	cvInRangeS(HSVImage,cvScalar(HueLowervalue,SaturationLowervalue,ValueLowervalue),cvScalar(HueUppervalue,SaturationUppervalue,ValueUppervalue),BinaryImage);
	
	return BinaryImage;
	}
Пример #21
0
//This function threshold the HSV image and create a binary image
IplImage* GetThresholdedImage(IplImage* imgHSV){
 
 IplImage* imgThresh=cvCreateImage(cvGetSize(imgHSV),IPL_DEPTH_8U, 1);
 cvInRangeS(imgHSV, cvScalar(lowerH,lowerS,lowerV), cvScalar(upperH,upperS,upperV), imgThresh); 
 
 return imgThresh;

}
Пример #22
0
  IplImage* GetThresholdedImage(IplImage* imgHSV3, int H, int S, int V, int H1, int S1, int V1){  
     
    IplImage* imgTemp=cvCreateImage(cvGetSize(imgHSV3),IPL_DEPTH_8U, 1);

    cvInRangeS(imgHSV3, cvScalar(H,S,V), cvScalar(H1,S1,V1), imgTemp); 

    return imgTemp;
  }
Пример #23
0
int frRGBStrip::convRGB( map<range, CvScalar>* rRGB, bool a )
{
    // создаем Image
    IplImage* srcR = cvCreateImage( cvGetSize( GeoStrip->stripFrame ), IPL_DEPTH_8U, 1 );
    IplImage* srcG = cvCreateImage( cvGetSize( GeoStrip->stripFrame ), IPL_DEPTH_8U, 1 );
    IplImage* srcB = cvCreateImage( cvGetSize( GeoStrip->stripFrame ), IPL_DEPTH_8U, 1 );

    IplImage* srcRR = cvCreateImage( cvGetSize( GeoStrip->stripFrame ), IPL_DEPTH_8U, 1 );
    IplImage* srcGR = cvCreateImage( cvGetSize( GeoStrip->stripFrame ), IPL_DEPTH_8U, 1 );
    IplImage* srcBR = cvCreateImage( cvGetSize( GeoStrip->stripFrame ), IPL_DEPTH_8U, 1 );

    // разбиваем на каналы
    cvSplit( GeoStrip->stripFrame, srcB, srcG, srcR, 0);

    // выдел¤ем дл¤ каждого канала границы
    cvInRangeS(srcR, cvScalar(rRGB->at(minR).val[2]), cvScalar(rRGB->at(maxR).val[2]), srcRR);
    //cvInRangeS(srcR, cvScalar(230), cvScalar(256), srcRR);
    cvInRangeS(srcG, cvScalar(rRGB->at(minR).val[1]), cvScalar(rRGB->at(maxR).val[1]), srcGR);
    cvInRangeS(srcB, cvScalar(rRGB->at(minR).val[0]), cvScalar(rRGB->at(maxR).val[0]), srcBR);

    // "склеиваем" каналы
    cvAnd( srcRR, srcGR,  framRGB );
    //cvAnd(  framRGB, srcBR,  framRGB );
    //cvCopy( srcRR,  framRGB );

    // выводим в окне изображение
#ifdef __DEBUG_RGB__
    cvShowImage("RGBVideo",  framRGB);
    cvShowImage("srcR", srcR);
    cvShowImage("srcG", srcG);
    cvShowImage("srcB", srcB);
    cvShowImage("srcRR", srcRR);
    cvShowImage("srcGR", srcGR);
    cvShowImage("srcBR", srcBR);
#endif

    // освобождаем ресурсы
    cvReleaseImage( &srcR );
    cvReleaseImage( &srcG );
    cvReleaseImage( &srcB );
    cvReleaseImage( &srcRR );
    cvReleaseImage( &srcGR );
    cvReleaseImage( &srcBR );

    return 0;
}
Пример #24
0
void callback(int i)
{
	float time;
	clock_t t1, t2;
	
	// Start timer
	t1 = clock();
	
	// Filtering, HSV to Binary Image, Erosions and Dilations
	cvSmooth(frame, imageFiltree, CV_BLUR,seuilFiltre,seuilFiltre,0.0,0.0);
	cvCvtColor(imageFiltree, imageHSV,CV_BGR2HSV);
	cvInRangeS(imageHSV,cvScalar(hmin, smin, vmin, 0.0),cvScalar(hmax, smax, vmax, 0.0),imageBinaire);
	cvErode(imageBinaire, imageErodee, NULL, nbErosions);
	cvDilate(imageErodee, imageDilatee, NULL, nbDilatations);
	
	//imageDilateeFiltree =  lowPassFilter(imageDilatee); FILTER
	
	// multiplication between the original image in RGB and HSV and the binary image
	imageObjectRGB = multBinColor(imageDilatee, frame);
	imageObjectHSV = multBinColor(imageDilatee, imageHSV);
	
	// find the points and separate them (rows correspond to each point and the columns to the pixels belonging to the points)
	vector<vector<CvPoint3D32f> > vecDistinctPoints = findPoint();
	
	// find the centroid of the point and trace it
	vector<CvPoint> centroid = centroiding(vecDistinctPoints);
	// sort the centroids
	centroid = sort(centroid);
	
	// compute the distance with and without lens distortion
	vector<double> distance = findDistance(imageObjectHSV, centroid, tanAlphaT);
	
	// Contours
	/*cvFindContours( imageDilatee, storage, &contours, sizeof(CvContour),
	 CV_RETR_LIST, CV_CHAIN_APPROX_NONE, cvPoint(0,0) );*/
	
	/*cvDrawContours( frame, contours,
	 CV_RGB(255,255,0), CV_RGB(0,255,0),
	 1, 2, 8, cvPoint(0,0));*/
	
	
	cvNamedWindow(myWindow, CV_WINDOW_AUTOSIZE);
	cvNamedWindow(myWindowObjectHSV, CV_WINDOW_AUTOSIZE);
	cvNamedWindow(myWindowObjectRGB, CV_WINDOW_AUTOSIZE);
	cvShowImage(myWindow, frame);
	cvShowImage(myWindowObjectHSV, imageObjectHSV);
	cvShowImage(myWindowObjectRGB, imageObjectRGB);
	//cvSaveImage("NoisyGridCentroiding.png", imageObjectRGB,0);
	
	// End timer
	t2 = clock();
	
	// Compute execution time
	time = (float)(t2 - t1) / CLOCKS_PER_SEC;
	
	cout << "execution time = " << time << " s" << endl;
	
}
Пример #25
0
HandDetect::HandDetect()
{
	numColorBins = 256;
	max_val = 0.f;
	hand1 = cvLoadImage("hand.png");
	hand2 = cvLoadImage("hand2.png");
	hist1 = cvCreateHist(1, &numColorBins, CV_HIST_ARRAY);
	hist2 = cvCreateHist(1, &numColorBins, CV_HIST_ARRAY);
	
	rad=0;
	vmin=10, vmax=256, smin=30;

	capture = cvCaptureFromCAM(0);
	setImage();
	backproject = cvCreateImage(cvGetSize(image), 8, 1);
	gray = cvCreateImage(cvGetSize(image), 8, 1);
	track_window = cvRect((int)image->width/2, (int)image->height/2, 1, 1);
	track_box.center.x=-1;
	track_box.center.y=-1;

	hsvHand1 = cvCreateImage(cvGetSize(hand1), 8, 3);
	mskHand1 = cvCreateImage(cvGetSize(hand1), 8, 1);
	hueHand1 = cvCreateImage(cvGetSize(hand1), 8, 1);

	hsvHand2 = cvCreateImage(cvGetSize(hand2), 8, 3);
	mskHand2 = cvCreateImage(cvGetSize(hand2), 8, 1);
	hueHand2 = cvCreateImage(cvGetSize(hand2), 8, 1);	

	cvCvtColor(hand1, hsvHand1, CV_RGB2HSV);
	cvInRangeS(hsvHand1, cvScalar(0, smin, MIN(vmin, vmax), 0), cvScalar(180, 256, MAX(vmin, vmax), 0), mskHand1);
	cvSplit(hsvHand1, hueHand1, 0, 0, 0);

	cvCalcHist(&hueHand1, hist1, 0, mskHand1);
	cvGetMinMaxHistValue(hist1, 0, &max_val, 0, 0);
	cvConvertScale(hist1->bins, hist1->bins, max_val ? 255. / max_val : 0., 0);


	cvCvtColor(hand2, hsvHand2, CV_RGB2HSV);
	cvInRangeS(hsvHand2, cvScalar(0, smin, MIN(vmin, vmax), 0), cvScalar(180, 256, MAX(vmin, vmax), 0), mskHand2);
	cvSplit(hsvHand2, hueHand2, 0, 0, 0);

	cvCalcHist(&hueHand2, hist2, 0, mskHand2);
	cvGetMinMaxHistValue(hist2, 0, &max_val, 0, 0);
	cvConvertScale(hist2->bins, hist2->bins, max_val ? 255. / max_val : 0., 0);
}
Пример #26
0
int main(int argc, char** argv)
{
	IplImage* frame = cvLoadImage(argv[1],CV_LOAD_IMAGE_UNCHANGED);
	
	CvSize size = cvSize(frame->width,frame->height);
	IplImage* hsv_frame = cvCreateImage(size, IPL_DEPTH_8U, 3);
	IplImage* thresholded = cvCreateImage(size, IPL_DEPTH_8U,1);
	IplImage* thresholded2 = cvCreateImage(size, IPL_DEPTH_8U, 1);

	CvScalar hsv_min = cvScalar(0, 80, 220, 0);
        CvScalar hsv_max = cvScalar(50, 140, 256, 0);
        CvScalar hsv_min2 = cvScalar(170, 80, 220, 0);
        CvScalar hsv_max2 = cvScalar(256, 140, 256, 0);
 
	cvNamedWindow("Original", CV_WINDOW_AUTOSIZE);
	cvNamedWindow("HSV", CV_WINDOW_AUTOSIZE);
	int p[3];
	p[0] = CV_IMWRITE_JPEG_QUALITY;
	p[1] = 95;
	p[2] = 0;

        cvCvtColor(frame, hsv_frame, CV_BGR2HSV);
        // to handle color wrap-around, two halves are detected and combined
        
	cvInRangeS(hsv_frame, hsv_min, hsv_max, thresholded);
        cvInRangeS(hsv_frame, hsv_min2, hsv_max2, thresholded2);
        cvOr(thresholded, thresholded2, thresholded, 0);
 
        //cvSaveImage("thresholded.jpg",thresholded,p);
 
            // hough detector works better with some smoothing of the image
        cvSmooth( thresholded, thresholded, CV_GAUSSIAN, 9, 9, 0, 0);
        
        //cvSaveImage("frame.jpg", frame, p);
 	

	cvShowImage("Original",thresholded);
	cvShowImage("HSV", hsv_frame);
       
	cvWaitKey(0);
	
	cvDestroyAllWindows();
	cvReleaseImage(&frame);
        return 0;
}
Пример #27
0
void updateHueImage(camshift * cs, const IplImage * img)
{
  cvCvtColor( img, cs->HSVImg, CV_BGR2HSV );

  cvInRangeS( cs->HSVImg, cvScalar(0, cs->smin, MIN(cs->vmin,cs->vmax), 0),
      cvScalar(180, 256, MAX(cs->vmin,cs->vmax) ,0), cs->mask );

  cvSplit( cs->HSVImg, cs->hueImg, 0, 0, 0 );
}
Пример #28
0
/*
 * Transform the image into a two colored image, one color for the color we want to track, another color for the others colors
 * From this image, we get two datas : the number of pixel detected, and the center of gravity of these pixel
 */
CvPoint binarisation(IplImage* image, int *nbPixels) {
 
	int x, y;
	CvScalar pixel;
	IplImage *hsv, *mask;
	IplConvKernel *kernel;
	int sommeX = 0, sommeY = 0;
	*nbPixels = 0;
 
	// Create the mask &initialize it to white (no color detected)
	mask = cvCreateImage(cvGetSize(image), image->depth, 1);
 
	// Create the hsv image
	hsv = cvCloneImage(image);
	cvCvtColor(image, hsv, CV_BGR2HSV);
 
	cvShowImage("GeckoGeek Color Rectification", hsv);
	// We create the mask
	cvInRangeS(hsv, cvScalar(h - tolerance -1, s - tolerance, 0), cvScalar(h + tolerance -1, s + tolerance, 255), mask);
 
	// Create kernels for the morphological operation
	kernel = cvCreateStructuringElementEx(5, 5, 2, 2, CV_SHAPE_ELLIPSE);
 
	// Morphological opening (inverse because we have white pixels on black background)
	cvDilate(mask, mask, kernel, 1);
	cvErode(mask, mask, kernel, 1);  
 
	// We go through the mask to look for the tracked object and get its gravity center
	for(x = 0; x < mask->width; x++) {
		for(y = 0; y < mask->height; y++) { 
 
			// If its a tracked pixel, count it to the center of gravity's calcul
			if(((uchar *)(mask->imageData + y*mask->widthStep))[x] == 255) {
				sommeX += x;
				sommeY += y;
				(*nbPixels)++;
			}
		}
	}
 
	// Show the result of the mask image
	cvShowImage("GeckoGeek Mask", mask);
 
	// We release the memory of kernels
	cvReleaseStructuringElement(&kernel);
 
	// We release the memory of the mask
	cvReleaseImage(&mask);
	// We release the memory of the hsv image
    	cvReleaseImage(&hsv);
 
	// If there is no pixel, we return a center outside the image, else we return the center of gravity
	if(*nbPixels > 0)
		return cvPoint((int)(sommeX / (*nbPixels)), (int)(sommeY / (*nbPixels)));
	else
		return cvPoint(-1, -1);
}
Пример #29
0
IplImage* Calibrator::GetThresholdedImage(IplImage *img) {
    IplImage* imgHSV = cvCreateImage(cvGetSize(img), 8, 3);
    cvCvtColor(img, imgHSV, CV_BGR2HSV);

    IplImage* imgThreshed = cvCreateImage(cvGetSize(img), 8, 1);
    cvInRangeS(imgHSV, cvScalar((Color-5), Sl, Vl), cvScalar((Color+5), Sh, Vh), imgThreshed);

    cvReleaseImage(&imgHSV);
    return imgThreshed;
}
Пример #30
0
/* Given an IplImage, convert it to HSV, and select only colors within the proper range. */
IplImage* MarkerCapture::apply_threshold(IplImage* img, MarkerColorRange color_range){
    // convert to HSV
    // IplImage* imgHSV = cvCreateImage(cvGetSize(img), 8, 3);
    // cvCvtColor(img, imgHSV, CV_BGR2HSV);
    
    IplImage* imgThresh = cvCreateImage(cvGetSize(img), 8, 1);
    cvInRangeS(img, color_range.from, color_range.to, imgThresh); // match the color range.
    
    return imgThresh;
}