Exemplo n.º 1
0
int grow_mat::reserve(int maxrows, int maxcols)
{
	if (_mat==0 || maxrows<0 || maxcols<0)
		return 0;

	// 只要申请的矩阵的行或列大于当前矩阵就分配内存
	if (maxrows > _mat->rows || maxcols > _mat->cols) 
	{
		// 新矩阵
		CvMat * nm = cvCreateMat(maxrows, maxcols, _mat->type);
		if (nm==0)
			return 0;
		if (_zero_flag) cvSetZero(nm);
		if (this->rows && this->cols && _copy_flag)
		{
			// 若当前矩阵大小不为0,且需要复制数据,则将当前矩阵this的数据复制到nm
			CvMat sub;
			cvGetSubRect(nm, &sub, cvRect(0,0,this->cols, this->rows));
			cvCopy(this, &sub);
		}
		cvReleaseMat(&_mat);
		_mat = nm;
	}
	return 1;
}
Exemplo n.º 2
0
int grow_mat::init(int r, int c, int type, int maxrows, int maxcols)
{
	int no_max = 0;
	if (maxrows==0) {maxrows=r*_expand_factor;no_max=1;}
	if (maxcols==0) {maxcols=c*_expand_factor;no_max=1;}
	if (type==0) type=CV_64FC1;
	if (r<=0 || c<=0 || maxrows<0 || maxcols<0 || r>maxrows || c>maxcols)
		return 0;

	// 为了和mini_solver的set_solver一致,允许再次初始化!
	if (_mat)
	{
		// 若矩阵_mat存在,且类型相同,表示客户想改变大小。
		int mat_type = CV_MAT_TYPE(_mat->type);
		if (CV_MAT_TYPE(type)==mat_type)
		{
			return resize(r,c);
		}
	}

	if(_mat) cvReleaseMat(&_mat);
	if (no_max)
		_mat = cvCreateMat(r, c, type);
	else
		_mat = cvCreateMat(maxrows, maxcols, type);
	if (_mat==0) return 0;
	if (_zero_flag) cvSetZero(_mat);
	cvGetSubRect(_mat, this, cvRect(0,0,c,r));
	return 1;
}
Exemplo n.º 3
0
void ofxOpticalFlowFarneback::reset() {
	colrImgLrg.set(0);
	colrImgSml.set(0);
	greyImgLrg.set(0);
	greyImgSml.set(0);
	greyImgPrv.set(0);
	cvSetZero(flow);
}
void ofxCvOpticalFlowLK::allocate(int _w, int _h){
	width		= _w;
	height		= _h; 
	invWidth	= 1.0f/width;
	invHeight	= 1.0f/height;
	
	if(vel_x) cvReleaseImage(&vel_x);		// MEMO
	if(vel_y) cvReleaseImage(&vel_y);		// MEMO
	
	vel_x = cvCreateImage( cvSize( width, height ), IPL_DEPTH_32F, 1  );
	vel_y = cvCreateImage( cvSize( width, height ), IPL_DEPTH_32F, 1  );
	
    cvSetZero(vel_x);
    cvSetZero(vel_y);
	
	resetROI();
}
Exemplo n.º 5
0
void ShowTrackResult(IplImage *ShowTrack, MyTrackStruct TrackT[],int NumOfTrack)
{
	cvSetZero(ShowTrack);

	for (int i = 0; i < NumOfTrack; i++)
	{
		if (TrackT[i].TrackLife == 0)
		{
			continue;
		}

		int LeftTop_y = 12 * i;
		int RightButton_y = LeftTop_y + 10;

		if (RightButton_y >= ShowTrack->height)//防止图片数据访问越界
			break;

		for (int j = 0; j < (TrackT[i].NumOFTrackList + 1); j++)
		{
			int LeftTop_x = 12 * j;
			int RightButton_x = 12 * (j + 1) - 2;

			if (RightButton_x >= ShowTrack->width)//防止图片数据访问越界
				goto SHOW_ShowImage;//内层的跳出用什么好?


			//画矩形块
			for (int h = LeftTop_y; h < RightButton_y + 1; h++)
			{
				for (int w = LeftTop_x; w < RightButton_x + 1; w++)
				{
					int color;
					if (j == 0)
					{
						color = 2;
					}
					if (j == 1)
					{
						color = 0;
					}
					if (j > 1)
					{
						color = 1;
					}

					ShowTrack->imageData[(h * ShowTrack->width + w) * 3 + color] = 255;
				}
			}
		}
	}





SHOW_ShowImage:	
	cvShowImage("TrackBar", ShowTrack);
}
//table查找表,rows查找表行数,cols查找表列数,usecols所使用到的列数,不能大于cols
//pframe的大小为(usecols+1)/2,pframe[0]为主分量,pframe[1]...pframe[(usecols+1)/2]为辅分量
//根据查找表检测猪只
void cvLookUpTable(int *table,const int Rows,const int Cols,const int usecols,IplImage** pframe, IplImage* mask){
	assert(Cols>=usecols);
	const int IMGNUM=(usecols-1)/2;
	IplImage **imgs=new IplImage*[IMGNUM];
	IplImage *grayImg=pframe[0];
	IplImage *img=grayImg;
	for (int i=0; i<IMGNUM; ++i)
	{
		imgs[i] =pframe[i+1];
	}
	cvSetZero(mask);
	int *param=new int[usecols-1];
	int *componPiex=new int[IMGNUM];
	for (int i=0; i<img->height; ++i){
		for (int j=0; j<img->width; ++j){
			uchar grayPiex=((unsigned char *)grayImg->imageData+ i* grayImg->widthStep)[j];
			for (int m=0; m<Rows; ++m)
			{
				if (grayPiex==table[m*Cols+0])
				{
					for (int n=0; n<usecols-1; ++n)
					{
						param[n]=table[m*Cols+n+1];
					}
					for (int imgnumIndex=0; imgnumIndex<IMGNUM; ++imgnumIndex){
						componPiex[imgnumIndex]=((unsigned char *)imgs[imgnumIndex]->imageData+ i* imgs[imgnumIndex]->widthStep)[j];
					}
					bool isObjectPiex=true;
					for (int n=0; n<IMGNUM; ++n)
					{
						if (!(componPiex[n]>param[n*2] && componPiex[n] <param[n*2+1])){
							isObjectPiex=false;
							break;
						}
					}
					if (isObjectPiex==true)
					{
						((unsigned char *)mask->imageData+ i* mask->widthStep)[j]=255;
					}
					break;//for (int m=0; m<Rows; ++m)
				}				
			}
		}
	}
	if (imgs){
		delete[] imgs;
		imgs=NULL;
	}
	if (componPiex){
		delete[] componPiex;
		componPiex=NULL;
	}
	if(param) {
		delete []param;
		param=NULL;
	}
}
Exemplo n.º 7
0
bool processFrame(xn::DepthGenerator* dpg,xn::UserGenerator* ug,XnUserID userID)
{
	if (!dImage)
	{
		dImage=cvCreateImage(dSize,IPL_DEPTH_16U,1);
		uImage=cvCreateImage(dSize,IPL_DEPTH_8U,1);
		showImage=cvCreateImage(dSize,IPL_DEPTH_8U,1);
	}
	cvSetZero(dImage);
	cvSetZero(uImage);
	if (!convertMetaDataToIpl( dpg,ug,userID))	//Convert Xn Matrices to OpenCV Matrices for easier calculation.
		return false;
	optimizeDepthMap();
	getSphereSizes(dpg,ug,userID);
	measureBody(dpg,ug,userID);
	estimateParameters();
	return true;
}
IplImage *TBackground::CreateTestImg()
{
  IplImage *pImage = cvCreateImage(cvSize(256, 256), IPL_DEPTH_8U, 3);

  if(pImage != NULL)
    cvSetZero(pImage);

  return pImage;
}
////Improving shadow suppression in moving object detection with HSV color information里阴影检测的算法
//TS,TH分别表示H分量、S分量的阈值,可通过实验来确定。因数阴影点的V分量值通常小于非阴影点相应的V分量值,
//所以β取值应该小于1,而α的取值则需要考虑场景光线的强弱,光线越强,α取值越小
bool ShadowDetect(const IplImage *srcImg,const IplImage *bgImg,const IplImage *mask,
	const double Alpha, const double Beta, const double TS, const double TH,IplImage *dstImg)
{
	if (!srcImg || !bgImg || !dstImg || (mask && mask==dstImg)
		|| srcImg->width != bgImg->width ||  (mask && srcImg->width !=mask->width) || srcImg->width != dstImg->width
		|| srcImg->height!=bgImg->height || (mask && srcImg->height!=mask->height) || srcImg->height!=dstImg->height
		|| srcImg->nChannels!=3 || bgImg->nChannels!=3 || (mask && mask->nChannels!=1) || dstImg->nChannels!=1)
	{
		return false;
	}
	cvSetZero(dstImg);
	IplImage *channelsImg=cvCreateImage(cvGetSize(srcImg),IPL_DEPTH_8U,3);
	IplImage *IhImg=cvCreateImage(cvGetSize(srcImg),IPL_DEPTH_8U,1);
	IplImage *IsImg=cvCreateImage(cvGetSize(srcImg),IPL_DEPTH_8U,1);
	IplImage *IvImg=cvCreateImage(cvGetSize(srcImg),IPL_DEPTH_8U,1);
	IplImage *BhImg=cvCreateImage(cvGetSize(srcImg),IPL_DEPTH_8U,1);
	IplImage *BsImg=cvCreateImage(cvGetSize(srcImg),IPL_DEPTH_8U,1);
	IplImage *BvImg=cvCreateImage(cvGetSize(srcImg),IPL_DEPTH_8U,1);
	cvCvtColor(srcImg,channelsImg,CV_RGB2HSV);
	cvSplit(channelsImg,IhImg,IsImg,IvImg,NULL);
	cvCvtColor(bgImg,channelsImg,CV_RGB2HSV);
	cvSplit(channelsImg,BhImg,BsImg,BvImg,NULL);
	uchar Ih,Is,Iv,Bh,Bs,Bv;
	for (int i=0; i<srcImg->height; ++i){
		for (int j=0; j<srcImg->width; ++j){
			Ih=((uchar*)IhImg->imageData + i *IhImg->widthStep)[j];
			Is=((uchar*)IsImg->imageData + i *IsImg->widthStep)[j];
			Iv=((uchar*)IvImg->imageData + i *IvImg->widthStep)[j];
			Bh=((uchar*)BhImg->imageData + i *BhImg->widthStep)[j];
			Bs=((uchar*)BsImg->imageData + i *BsImg->widthStep)[j];
			Bv=((uchar*)BvImg->imageData + i *BvImg->widthStep)[j];
			if (mask){
				uchar maskPiex=((uchar*)mask->imageData + i *mask->widthStep)[j];
				if (maskPiex){
					if (Iv*1./Bv>Alpha && Iv*1./Bv<Beta
						&& abs(Is-Bs)<=TS && abs(Ih-Bh)<=TH){
							((uchar*)dstImg->imageData + i *dstImg->widthStep)[j]=255;
					}
				}
			}else{
				if (Iv*1./Bv>Alpha && Iv*1./Bv<Beta
					&& abs(Is-Bs)<=TS && abs(Ih-Bh)<=TH){
						((uchar*)dstImg->imageData + i *dstImg->widthStep)[j]=255;
				}
			}
		}
	}
	cvReleaseImage(&channelsImg);
	cvReleaseImage(&IhImg);
	cvReleaseImage(&IsImg);
	cvReleaseImage(&IvImg);
	cvReleaseImage(&BhImg);
	cvReleaseImage(&BsImg);
	cvReleaseImage(&BvImg);
	return true;
}
Exemplo n.º 10
0
IplImage *rotateImage(const IplImage *img, int angle) 
{
    IplImage *newImg;
    int newWidth, newHeight;
    int rectX, rectY;
    if (angle == -45 || angle == 45) {
        newWidth = (int) ((img->width + img->height) / sqrt(2.0));
        newHeight = (int) ((img->width + img->height) / sqrt(2.0));
        
    } else if (angle == -90 || angle == 90) {
        if (img->width > img->height) {
            newWidth = img->width;
            newHeight = img->width;
        } else {
            newWidth = img->height;
            newHeight = img->height;
        }
    } else {
        newWidth = img->width;
        newHeight = img->height;
    }
    
    newImg = cvCreateImage(cvSize(newWidth, newHeight), img->depth,
                           img->nChannels);
    cvSetZero(newImg);
    
    rectX = (int) ((newWidth - img->width) / 2);
    rectY = (int) ((newHeight - img->height) / 2);
    
    CvRect rect = cvRect(rectX, rectY, img->width, img->height);
    
    cvSetImageROI(newImg, rect);
    
    cvResize(img, newImg, CV_INTER_LINEAR);
    cvResetImageROI(newImg);
    
    IplImage *rotatedImg = cvCreateImage(cvGetSize(newImg), IPL_DEPTH_8U,
                                         img -> nChannels);
    
    CvPoint2D32f center;
    int xPos, yPos;
    
    xPos = (int) newWidth / 2;
    yPos = (int) newHeight / 2;
    
    CvMat *mapMatrix = cvCreateMat(2, 3, CV_32FC1);
    
    center.x = xPos;
    center.y = yPos;
    
    cv2DRotationMatrix(center, angle, 1.0, mapMatrix);
    cvWarpAffine(newImg, rotatedImg, mapMatrix, CV_INTER_LINEAR | CV_WARP_FILL_OUTLIERS, cvScalarAll(255.f));
    return rotatedImg;
}
Exemplo n.º 11
0
IplImage *TBackgroundVuMeter::CreateTestImg()
{
  IplImage *pImage = NULL;

  if(m_nBinCount > 0)
    pImage = cvCreateImage(cvSize(m_nBinCount, 100), IPL_DEPTH_8U, 3);

  if(pImage != NULL)
    cvSetZero(pImage);

  return pImage;
}
Exemplo n.º 12
0
CvMat* AbstractCamera::computeKMatrix(double focalLength, float pX, float pY) {

	CvMat* kMatrix = cvCreateMat(3, 3, CV_32FC1);
	cvSetZero(kMatrix);
	cvmSet(kMatrix, 0, 0, focalLength);
	cvmSet(kMatrix, 1, 1, focalLength);
	cvmSet(kMatrix, 0, 2, pX);
	cvmSet(kMatrix, 1, 2, pY);
	cvmSet(kMatrix, 2, 2, 1.0f);

	return kMatrix;
}
Exemplo n.º 13
0
/**
- FUNCIÓ: Mean
- FUNCIONALITAT: Get blob mean color in input image
- PARÀMETRES:
	- image: image from gray color are extracted
- RESULTAT:
	- 
- RESTRICCIONS:
	- 
- AUTOR: rborras
- DATA DE CREACIÓ: 2008/05/06
- MODIFICACIÓ: Data. Autor. Descripció.
*/
double CBlob::Mean( IplImage *image )
{
	// it is calculated?
/*	if( m_meanGray != -1 )
	{
		return m_meanGray;
	}
*/	
	// Create a mask with same size as blob bounding box
	IplImage *mask;
	CvScalar mean, std;
	CvPoint offset;

	GetBoundingBox();
	
	if (m_boundingBox.height == 0 ||m_boundingBox.width == 0 || !CV_IS_IMAGE( image ))
	{
		m_meanGray = 0;
		return m_meanGray;
	}

	// apply ROI and mask to input image to compute mean gray and standard deviation
	mask = cvCreateImage( cvSize(m_boundingBox.width, m_boundingBox.height), IPL_DEPTH_8U, 1);
	cvSetZero(mask);

	offset.x = -m_boundingBox.x;
	offset.y = -m_boundingBox.y;

	// draw contours on mask
	cvDrawContours( mask, m_externalContour.GetContourPoints(), CV_RGB(255,255,255), CV_RGB(255,255,255),0, CV_FILLED, 8,
					offset );

	// draw internal contours
	t_contourList::iterator it = m_internalContours.begin();
	while(it != m_internalContours.end() )
	{
		cvDrawContours( mask, (*it).GetContourPoints(), CV_RGB(0,0,0), CV_RGB(0,0,0),0, CV_FILLED, 8,
					offset );
		it++;
	}

	cvSetImageROI( image, m_boundingBox );
	cvAvgSdv( image, &mean, &std, mask );
	
	m_meanGray = mean.val[0];
	m_stdDevGray = std.val[0];

	cvReleaseImage( &mask );
	cvResetImageROI( image );

	return m_meanGray;
}
Exemplo n.º 14
0
// 以点center为旋转中心,对src旋转angle度并缩放factor倍。
void RotateImage(IplImage *src, IplImage *dst, CvPoint center, float angle,
		float factor) {
	float m[6];
	CvMat mat = cvMat(2, 3, CV_32FC1, m);
	m[0] = (float) (factor * cos(-angle * CV_PI / 180.));
	m[1] = (float) (factor * sin(-angle * CV_PI / 180.));
	m[2] = center.x;
	m[3] = -m[1];
	m[4] = m[0];
	m[5] = center.y;
	cvSetZero(dst);
	cvGetQuadrangleSubPix(src, dst, &mat);
}
Exemplo n.º 15
0
KalmanSensorCore::KalmanSensorCore(int _n, int _m) {
	n = _n;
	m = _m;
	z = cvCreateMat(m,1,CV_64FC1); cvSetZero(z);
	H = cvCreateMat(m,n,CV_64FC1); cvSetZero(H);
	H_trans = cvCreateMat(n,m,CV_64FC1); cvSetZero(H_trans);
	K = cvCreateMat(n,m,CV_64FC1); cvSetZero(K);
	z_pred = cvCreateMat(m,1,CV_64FC1); cvSetZero(z_pred);
	z_residual = cvCreateMat(m,1,CV_64FC1); cvSetZero(z_residual);
	x_gain = cvCreateMat(n,1,CV_64FC1); cvSetZero(x_gain);
}
void initialize(struct timeval *past)
{
	char scorechar[10];
	CvMat *rot_mat = cvCreateMat(2, 3, CV_32FC1);
	
	gettimeofday(past, NULL);
	
	// goal of computer handle
	gpc = cpc;
	com_change_goal = 0;
	
	// ball positon
	bpc = cvPoint2D32f(boundw+r0-winx/7,boundh/2);
	
	// ball velocity
	bv.x = 0;// 2004;
	bv.y = 0;//1000;
	
	// explosion effect
	explosr = 0;
	
	// score texture (computer)
	cvSetZero(scoretext1);
	cv2DRotationMatrix(cvPoint2D32f(rotatbox/2,rotatbox/2), -90.0, 1.0, rot_mat);
	if(score[0]>99) score[0] = 99;
	sprintf(scorechar, "%2d", score[0]);
	cvPutText(scoretext1, scorechar, cvPoint(rotatbox/3,rotatbox/2), &fontline,   blue);
	cvPutText(scoretext1, scorechar, cvPoint(rotatbox/3,rotatbox/2), &fontlight, white);
	cvWarpAffine(scoretext1, scoretext1, rot_mat);
	
	// score texture (user)
	cvSetZero(scoretext2);
	if(score[1]>99) score[1] = 99;
	sprintf(scorechar, "%d", score[1]);
	cvPutText(scoretext2, scorechar, cvPoint(rotatbox/3,rotatbox/2), &fontline,   blue);
	cvPutText(scoretext2, scorechar, cvPoint(rotatbox/3,rotatbox/2), &fontlight, white);
	cvWarpAffine(scoretext2, scoretext2, rot_mat);
}
Exemplo n.º 17
0
UINT ShowTrackResult(LPVOID pParm)
{
	int _NumOfTrack = (int)(pParm);

	cvSetZero(ThisShowTrack);

	for (int i = 0; i < _NumOfTrack; i++)
	{
		if (TrackT[i].TrackLife == 0)
		{
			continue;
		}

		int LeftTop_y = 24 * i;
		int RightButton_y = LeftTop_y + 20;

		for (int j = 0; j < (TrackT[i].NumOFTrackList + 1); j++)
		{
			int LeftTop_x = 24 * j;
			int RightButton_x = 24 * (j + 1) - 4;

			for (int h = LeftTop_y; h < RightButton_y + 1; h++)
			{
				for (int w = LeftTop_x; w < RightButton_x + 1; w++)
				{
					int color;
					if (j == 0)
					{
						color = 2;
					}
					if (j == 1)
					{
						color = 0;
					}
					if (j > 1)
					{
						color = 1;
					}

					ThisShowTrack->imageData[(h * 640 + w) * 3 + color] = 0xff;
				}
			}
		}
	}

	cvShowImage("ThisShowTrack", ThisShowTrack);
	cvWaitKey(1);

	return 0;
}
Exemplo n.º 18
0
void ofCvOpticalFlowBM::allocate(int _w, int _h) {
    captureWidth = _w;
    captureHeight = _h;

    cw = 320;
    ch = 240;

    block_size = 10;
    shift_size = 1;

    rows = int(ceil(double(ch) / block_size));
    cols = int(ceil(double(cw) / block_size));

    vel_x = cvCreateMat (rows, cols, CV_32FC1);
    vel_y = cvCreateMat (rows, cols, CV_32FC1);

    cvSetZero(vel_x);
    cvSetZero(vel_y);

    block = cvSize (block_size, block_size);
    shift = cvSize (shift_size, shift_size);
    max_range = cvSize (10, 10);

}
void objectCompare::detectionBlob(int iterationObjectSal) {

		CvPoint newwinnerpos;
	    static CvScalar colors[] =
	    {
	        {{0,0,255}},
	        {{0,128,255}},
	        {{0,255,255}},
	        {{0,255,0}},
	        {{255,128,0}},
	        {{255,255,0}},
	        {{255,0,0}},
	        {{255,0,255}},
	        {{255,255,255}}
	    };

		newwinnerpos.x = objLocX;
		newwinnerpos.y = objLocY;

			cvSetZero(salimg);
			int m_windowSize = 75,m_size=25*25;
			for(int x = -m_windowSize; x <= m_windowSize; ++x)
			{
				for(int y = -m_windowSize; y <= m_windowSize; ++y)
				{
					int posx = newwinnerpos.x + x;
					int posy = newwinnerpos.y + y;
					if(posx >= 0 && posy >= 0 && posx < 320 && posy < 240)
					{
						//((float*)(mp_cvSalImage->ipl->imageData))[(m_newWinnerData.winnerPos.x+x)+m_width*(m_newWinnerData.winnerPos.y+y)] = exp(-0.5/m_size*(x*x+y*y));
						double val = exp(-0.5/m_size*(x*x+y*y));

						/*switch(iterationObjectSal)
						{
						case 0: b=cvRound(val*128);
						break;
						case 1: g=cvRound(val*128);
						break;
						case 2: r=cvRound(val*128);
						break;
						}*/
						(salimg->imageData + posy*salimg->widthStep)[posx*salimg->nChannels+0]=cvRound(colors[iterationObjectSal].val[0]*val);
						(salimg->imageData + posy*salimg->widthStep)[posx*salimg->nChannels+1]=cvRound(colors[iterationObjectSal].val[1]*val);//cvRound(val*128);
						(salimg->imageData + posy*salimg->widthStep)[posx*salimg->nChannels+2]=cvRound(colors[iterationObjectSal].val[2]*val);//cvRound(val*128);
					}
				}
			}
}
Exemplo n.º 20
0
//---------------------------
void coordWarping::calculateMatrix(ofxPoint2f src[4], ofxPoint2f dst[4]){

	cvSetZero(translate);
	for (int i = 0; i < 4; i++){
		cvsrc[i].x = src[i].x;
		cvsrc[i].y = src[i].y;
		cvdst[i].x = dst[i].x;
		cvdst[i].y = dst[i].y;
	}
	
	cvWarpPerspectiveQMatrix(cvsrc, cvdst, translate);  // calculate homography
	
	int n       = translate->cols;
	float *data = translate->data.fl;

}
Exemplo n.º 21
0
void CensusFlowBase::vector_array_2_vector_3image3x() {
    int x1,x2,y1,y2;
    cvSet(m_vx_image,cvScalar(0)); cvSet(m_vy_image,cvScalar(0)); cvSetZero(m_val_image); 
    for(int kk=0;kk<zaeler_flow_vector_array;kk++) {
        x1 =   flow_vector_array[kk].x1; 
        y1 =   flow_vector_array[kk].y1;
        x2 =   flow_vector_array[kk].x2;
        y2 =   flow_vector_array[kk].y2;
        m_vx_image->imageData[y2*m_vx_image->widthStep+x2] = ((uchar) (x2-x1)+128);
        m_vy_image->imageData[y2*m_vy_image->widthStep+x2] = ((uchar) (y2-y1)+128);
        m_val_image->imageData[y2*m_val_image->widthStep+x2] = ((uchar) 255);
        m_vx_vy_val_image->imageData[y2*m_vx_vy_val_image->widthStep+x2*3] = ((uchar) ((x2-x1)+128));
        m_vx_vy_val_image->imageData[y2*m_vx_vy_val_image->widthStep+x2*3+1]=((uchar) ((y2-y1)+128));
        m_vx_vy_val_image->imageData[y2*m_vx_vy_val_image->widthStep+x2*3+2]=((uchar) 255);
    }
}
//-------------------------------------------------------------------------------------
void ofCvColorImage::warpIntoMe(ofCvColorImage &mom, ofPoint2f src[4], ofPoint2f dst[4]){
	// compute matrix for perspectival warping (homography)
	CvPoint2D32f cvsrc[4];
	CvPoint2D32f cvdst[4];
	CvMat *translate = cvCreateMat(3,3,CV_32FC1);
	cvSetZero(translate);
	for (int i = 0; i < 4; i++){
		cvsrc[i].x = src[i].x;
		cvsrc[i].y = src[i].y;
		cvdst[i].x = dst[i].x;
		cvdst[i].y = dst[i].y;
	}
	cvWarpPerspectiveQMatrix(cvsrc, cvdst, translate);  // calculate homography
	cvWarpPerspective( mom.getCvImage(), cvImage, translate);
	cvReleaseMat(&translate);
}
Exemplo n.º 23
0
bool HandRegion::InitHandMask( IplImage * srcImage )
{
    //
    // Initialize Memory for Hand Mask Image
    //
    if ( !_pImageHandMask )
    {
        _pImageHandMask = cvCreateImage( cvGetSize( srcImage ), 8, 1 );
        if ( _pImageHandMask == 0 )
            return false;
        _pImageHandMask->origin = srcImage->origin;
        cvSetZero( _pImageHandMask );
    }

    return true;
}
Exemplo n.º 24
0
int main()
{
    CvMat* srcMat=cvCreateMat(100,100,CV_8UC3);
    cvSetZero(srcMat);//清零
  
    int radius=40;
    CvScalar cirColor=cvScalar(255,0,0);
    cvCircle(srcMat,cvPoint(50,50),radius,cirColor,1,8,0);

    cvNamedWindow("Ex302");
    cvShowImage("Ex302",(IplImage*)srcMat);

    cvWaitKey(0);
    cvReleaseMat(&srcMat);
    cvDestroyWindow("Ex302");
    return 0;
}
//Set Texture Mapping Parameters
void Init(void){

	//Clear The Background Color
	glClearColor(1.0f, 1.0f, 1.0f, 0.0f);	


	glShadeModel(GL_SMOOTH);


	Image=cvCreateImage(cvSize(800,600),IPL_DEPTH_8U,3);
	cvSetZero(Image);



	cvNamedWindow("Image",CV_WINDOW_AUTOSIZE);
 
}
Exemplo n.º 26
0
void CVP::runcvt(IplImage** dst)
{
	std::chrono::system_clock::time_point start = std::chrono::system_clock::now();
	CvSize size = cvGetSize(imgR);
	IplImage* copyImgR = cvCloneImage(imgR);
	//IplImage* cvtR2L = cvCloneImage(imgL);
	IplImage* cvtR2L = cvCreateImage(size, IPL_DEPTH_8U, 3);
	cvSetZero(cvtR2L);


//	std::chrono::system_clock::time_point start = std::chrono::system_clock::now();
//#pragma omp parallel for schedule(dynamic)
	for (int i = 0; i < size.height; i++)
	{
		uchar* pDispG = (uchar*)(disparity->imageData + i*disparity->widthStep);
		uchar* prtCvtR2L = (uchar*)(cvtR2L->imageData + i*cvtR2L->widthStep);
		uchar* prtCopyImgR = (uchar*)(copyImgR->imageData + i*copyImgR->widthStep);
		for (int j = size.width-1; j >=0; j--)
		{
			int tempDisp = pDispG[j];
			int tempVal = j;
			tempVal -= tempDisp / PixelDiff;
			if (tempVal >= 0 && prtCopyImgR[3 * tempVal] != 0)
			{
				prtCvtR2L[3 * j] = prtCopyImgR[3 * tempVal];
				prtCvtR2L[3 * j + 1] = prtCopyImgR[3 * tempVal + 1];
				prtCvtR2L[3 * j + 2] = prtCopyImgR[3 * tempVal + 2];

				prtCopyImgR[3 * tempVal] = 0;
				//prtCopyImgR[3 * tempVal + 1] = 0;
				//prtCopyImgR[3 * tempVal + 2] = 0;
			}
		}
	}

	std::chrono::duration<double> sec = std::chrono::system_clock::now() - start;
	std::cout << "시차변환 걸린 시간(초) : " << sec.count() << " seconds" << std::endl;

	*dst = cvCloneImage(cvtR2L);

	cvReleaseImage(&copyImgR);
	cvReleaseImage(&cvtR2L);
	return;

} 
Exemplo n.º 27
0
  void diagnostics_space::Diagnostics::plotPath(vector<Triplet> my_path) {
    cvSetZero(path_image);
    
    for(int i = 0; i < (int) my_path.size() - 1; i++) {
      int x = (int) my_path[i].x;
      int y = MAP_MAX - (int) my_path[i].y - 1;

      int x1 = (int) my_path[i + 1].x;
      int y1 = MAP_MAX - (int) my_path[i + 1].y - 1;
      
      srand(i + time(0));
      
      cvLine(path_image, cvPoint(x, y), cvPoint(x1, y1), CV_RGB(rand() % 255, rand() % 255, rand() % 255), 2, CV_AA, 0);
    }
    
    cvShowImage("[DIAG] Path", path_image);
    cvWaitKey(1);
  }
Exemplo n.º 28
0
//由于高斯背景建模没返回背景,所以使用平均背景法自动生成背景
bool GetAvgBackgroudImg(const IplImage *pFrame, IplImage *bgImg,double alpha)
{  
	static bool isFirst=true;
	static IplImage *pAccImg = cvCreateImage(cvSize(pFrame->width, pFrame->height),  IPL_DEPTH_32F,3);
	if(isFirst){
		cvSetZero(pAccImg);
		isFirst=false;
	}
	if (!pFrame || !bgImg || !pAccImg ||
		pAccImg->width != pFrame->width || pAccImg->width != bgImg->width ||
		pAccImg->height != pFrame->height || pAccImg->height != bgImg->height|| 
		pFrame->nChannels!=3 || pFrame->nChannels!=3){
		return false;
	}
	cvRunningAvg(pFrame,pAccImg,alpha);
	cvConvertScale(pAccImg,bgImg,1.,0.);
	return true;
}
void GlcmJNode::judge(const Result* result) {
    Result& myResult = this->result;
    myResult.setImage(result->getImage());

    //转换成256灰阶图像
    cvCvtColor(result->getImage(), grayImage, CV_BGR2GRAY);

    //计算灰度共生矩阵
    cvSetZero(grayMat);
    for(int i = 1; i < height; ++i) for(int j = 0; j + 1 < width; ++j) { //(i, j)<-->(i - 1, j + 1), 45度
        int a = cvGetReal2D(grayImage, i, j);
        int b = cvGetReal2D(grayImage, i - 1, j + 1);
        cvmSet(grayMat, a, b, cvmGet(grayMat, a, b) + 1);
        //cvmSet(grayMat, b, a, cvmGet(grayMat, b, a) + 1);
    }

    //灰度共生矩阵归一化
    for(size_t i = 0; i < GRAY_LEVEL; ++i) for(size_t j = 0; j < GRAY_LEVEL; ++j)
        cvmSet(grayMat, i, j, cvmGet(grayMat, i, j) / R);

    //计算能量
    //double energy = 0;
    //for(size_t i = 0; i < GRAY_LEVEL; ++i) for(size_t j = 0; j < GRAY_LEVEL; ++j)
    //    energy += cvmGet(grayMat, i, j) * cvmGet(grayMat, i, j);

    //计算熵
    //double entropy = 0;
    //for(size_t i = 0; i < GRAY_LEVEL; ++i) for(size_t j = 0; j < GRAY_LEVEL; ++j)
    //    if(fabs(cvmGet(grayMat, i, j)) > 0.000001)
    //        entropy += -cvmGet(grayMat, i, j) * log(cvmGet(grayMat, i, j));

    //计算局部平稳性
    double stable = 0;
    for(size_t i = 0; i < GRAY_LEVEL; ++i) for(size_t j = 0; j < GRAY_LEVEL; ++j)
        stable += cvmGet(grayMat, i, j) / (1 + (i - j) * (i - j));
    if(stable < stable_max) myResult.setValue(1); else myResult.setValue(0);

    if(show_info) {
        CvFont font = cvFont(1.5, 1);
        char buf[256]; sprintf(buf, "Local calm:%f %s", stable, stable < stable_max ? "!FIRE!" : "|---|");
        cvPutText(grayImage, buf, cvPoint(0, height - 8), &font, cvScalarAll(255));
        cvShowImage(name.c_str(), grayImage);
    }
}
Exemplo n.º 30
0
  int regionTracker::initialize()
  {
    // input: intensity image, 8bit, 1channel, grayscale
    //        depth image, 16bit, 1channel, grayscale
    // output: human region, IplImage, 8bit, 1channlel, binary image
    //         ('initializeFlag' is set to 1 if initialize is successfully processed)
    // return: 0
    //
    // take normal intensity image and generate silhouette binary image
    // silhouette is human region in the input image

    CvPoint center;                               // coordinate of center of detected face
    int radius;                                   // radius of detected face
    CvScalar tmp;

    // clear result image
    cvSetZero(result);

    // detect face and get human region
    if(fd->faceDetect(intensity, &center, &radius) > 0)
      {
	// get human region
	human->getRegion(depth, center.x, center.y, result);

	// calcurate centroid and area of human region
	if(calcCentroidAndArea() == -1)
	  {
	    initializeFlag = 0;
	    return -1;
	  }

	// get depth value of centroid
	tmp = cvGet2D(depth, center.y, center.x);
	centroidDepth = (int)tmp.val[0];

	// set flag
	initializeFlag = 1;

	return 0;
      }

    return -1;
  }