void getDepthData(GLubyte* dest) {
	float* fdest = (float*) dest;
	long* depth2rgb = (long*) depthToRgbMap;
    NUI_IMAGE_FRAME imageFrame;
    NUI_LOCKED_RECT LockedRect;
    if (sensor->NuiImageStreamGetNextFrame(depthStream, 0, &imageFrame) < 0) return;
    INuiFrameTexture* texture = imageFrame.pFrameTexture;
    texture->LockRect(0, &LockedRect, NULL, 0);
    if (LockedRect.Pitch != 0) {
        const USHORT* curr = (const USHORT*) LockedRect.pBits;
        for (int j = 0; j < height; ++j) {
			for (int i = 0; i < width; ++i) {
				// Get depth of pixel in millimeters
				USHORT depth = NuiDepthPixelToDepth(*curr++);
				// Store coordinates of the point corresponding to this pixel
				Vector4 pos = NuiTransformDepthImageToSkeleton(i, j, depth<<3, NUI_IMAGE_RESOLUTION_640x480);
				*fdest++ = pos.x/pos.w;
				*fdest++ = pos.y/pos.w;
				*fdest++ = pos.z/pos.w;
				// Store the index into the color array corresponding to this pixel
				NuiImageGetColorPixelCoordinatesFromDepthPixelAtResolution(
					NUI_IMAGE_RESOLUTION_640x480, NUI_IMAGE_RESOLUTION_640x480, NULL,
					i, j, depth<<3, depth2rgb, depth2rgb+1);
				depth2rgb += 2;
			}
		}
    }
    texture->UnlockRect(0);
    sensor->NuiImageStreamReleaseFrame(depthStream, &imageFrame);
}
bool MultiCursorAppCpp::getDepthImageV1()
{
	// Initialize matrix for each data
	userAreaMat = Mat::zeros(CAMERA_HEIGHT, CAMERA_WIDTH, CV_8UC3);
	point3fMatrix = Mat::zeros(CAMERA_HEIGHT, CAMERA_WIDTH, CV_32FC3);
	heightMatrix = Mat::zeros(CAMERA_HEIGHT, CAMERA_WIDTH, CV_16U);


	// Get the frame data of the depth camera
	NUI_IMAGE_FRAME depthFrame = { 0 };
	if (kinect->NuiImageStreamGetNextFrame(depthStreamHandle, 0, &depthFrame) < 0) {
		return false;
	}

	// Get the actual depth data
	NUI_LOCKED_RECT depthData = { 0 };
	depthFrame.pFrameTexture->LockRect(0, &depthData, 0, 0);

	USHORT* depth = (USHORT*)depthData.pBits;
	for (int i = 0; i < (depthData.size / sizeof(USHORT)); ++i) {
		USHORT distance = ::NuiDepthPixelToDepth(depth[i]);

		LONG depthX = i % CAMERA_WIDTH;
		LONG depthY = i / CAMERA_WIDTH;

		int index = ((depthY * CAMERA_WIDTH) + depthX) * 3;
		UCHAR* dataDepth = &userAreaMat.data[index];

		// 高さ情報を記録 / Set the height from floor
		USHORT heightFromFloor;
		(0 < distance && distance < KINECT_HEIGHT) ? heightFromFloor = KINECT_HEIGHT - distance : heightFromFloor = 0;
		*heightMatrix.ptr<USHORT>(depthY, depthX) = heightFromFloor;

		// ユーザ領域を記憶 / Define user area
		if (USER_HEIGHT_THRESHOLD <= heightFromFloor && heightFromFloor <= HEAD_HEIGHT_MAX) {
			dataDepth[0] = 255;
			dataDepth[1] = 255;
			dataDepth[2] = 255;
		}
		else {
			dataDepth[0] = 0;
			dataDepth[1] = 0;
			dataDepth[2] = 0;
		}

		// ポイントクラウドを記憶 / Set 3D point data
		Vector4 realPoint = NuiTransformDepthImageToSkeleton(depthX, depthY, distance << 3, KINECT_RESOLUTION);
		point3fMatrix.ptr<float>(depthY, depthX)[0] = realPoint.x;
		point3fMatrix.ptr<float>(depthY, depthX)[1] = realPoint.y;
		point3fMatrix.ptr<float>(depthY, depthX)[2] = realPoint.z;
	}

	// Release each data
	if (S_OK != (kinect->NuiImageStreamReleaseFrame(depthStreamHandle, &depthFrame))) {
		cout << "Error: NuiImageStreamReleaseFrame()" << endl;
		exit(0);
	}

	return true;
}
Пример #3
0
/*
 * @brief メソッドKinect::setDepthImage(Mat& Mat_image).デプス画像を取得するメソッド(c57)
 * @param cv::Mat& Mat_image
 * @return pcl::PointCloud<pcl::PointXYZ>::Ptr points
 */
pcl::PointCloud<pcl::PointXYZRGB>::Ptr Kinect::getPointCloud(Mat& Mat_image)
{
	try{
		pcl::PointCloud<pcl::PointXYZRGB>::Ptr points(new pcl::PointCloud<pcl::PointXYZRGB>()); //ポイントクラウド保存用(c57)
		points->width = width;
		points->height = height;

		Mat_image = Mat(height, width, CV_8UC1, Scalar(0)); //距離画像の準備

		//距離カメラのフレームデータを取得
		NUI_IMAGE_FRAME depthFrame = { 0 };
		ERROR_CHECK(kinect->NuiImageStreamGetNextFrame(depthStreamHandle, 0, &depthFrame));

		//距離データを取得する
		NUI_LOCKED_RECT depthData = { 0 };
		depthFrame.pFrameTexture->LockRect(0, &depthData, 0, 0);

		USHORT* depth = (USHORT*)depthData.pBits;
		for (int i = 0; i < (depthData.size / sizeof(USHORT)); ++i){
			USHORT distance = ::NuiDepthPixelToDepth(depth[i]);
			LONG depthX = i % width;
			LONG depthY = i / width;
			LONG colorX = depthX;
			LONG colorY = depthY;

			// 距離カメラの座標を、RGBカメラの座標に変換する
			kinect->NuiImageGetColorPixelCoordinatesFromDepthPixelAtResolution(CAMERA_RESOLUTION, CAMERA_RESOLUTION, 0, depthX, depthY, 0, &colorX, &colorY);

			// 距離画像作成
			Mat_image.at<UCHAR>(colorY, colorX) = distance / 8192.0 * 255.0;

			// ポイントクラウド
			Vector4 real = NuiTransformDepthImageToSkeleton(depthX, depthY, distance, CAMERA_RESOLUTION);
			pcl::PointXYZRGB point;
			point.x = real.x;
			point.y = real.y;
			point.z = real.z; //PCLは右手系で表現されるため-をつけている
			
			//テクスチャ
			Vec4b color = image.at<Vec4b>(colorY, colorX);
			point.r = color[2];
			point.g = color[1];
			point.b = color[0];

			points->push_back(point);
		}
		cloud = points;

		//フレームデータを開放する(c58)
		ERROR_CHECK(kinect->NuiImageStreamReleaseFrame(depthStreamHandle, &depthFrame));
	}
	catch (exception& ex){
		cout << ex.what() << endl;
	}
	return cloud;
}
Пример #4
0
	virtual void convertProjectiveToWorld( const uint32_t deviceID, const uint32_t coordinateCount, const _2RealVector3f* inProjective, _2RealVector3f* outWorld )
	{
		checkDeviceRunning(deviceID,  "_2RealImplOpenNI::convertProjectiveToWorld()" );

		//fetching and writing data to array
		Vector4 out;
		for( uint32_t i=0; i < coordinateCount; ++i )
		{
			out = NuiTransformDepthImageToSkeleton( (LONG)inProjective[i].x, (LONG)inProjective[i].y, (USHORT)inProjective[i].z );
			outWorld[i].x = out.x;
			outWorld[i].y = out.y;
			outWorld[i].z = out.z;
		}
	}
Пример #5
0
void makeCloudMap(void)
{
	GLfloat win_tex_w = (float)win_width/tex_width;
	GLfloat win_tex_h = (float)win_height/tex_height;
	long cx=0,cy=0;

	for( int y = 0; y < 240; ++y ){
		for( int x = 0; x < 320; ++x){
			Vector4 tmp = NuiTransformDepthImageToSkeleton(x, y, depth[y][x]);
			NuiImageGetColorPixelCoordinatesFromDepthPixel(NUI_IMAGE_RESOLUTION_320x240, NULL, x, y, depth[y][x], &cx, &cy);
			CloudMap[y][x][0] = (short)(tmp.z*1000);
			TexMap[y][x][0] = (short)cx*win_tex_w;
			CloudMap[y][x][1] = -(short)(tmp.x*1000);
			TexMap[y][x][1] = (short)cy*win_tex_h;
			CloudMap[y][x][2] = (short)(tmp.y*1000);
			TexMap[y][x][2] = (short)(tmp.z*1000);
			indices[y][x] = y*win_width+x;
		}
	}
}
Пример #6
0
/*!
* @brief メソッドKinect::coordinateTransform.座標の変換を行うメソッド(c10)
* @param なし
* @return なし
*/
/*void*/Vector4 Kinect::getLocalPosition(Point3ius averageCoordinate)
{
	Vector4 rp; //realPoint格納用の変数(c38)
	Vector4 worldCoordinate; //ワールド座標系(c38)

	//変数の初期化(c33)
	rp.x = 0.0;
	rp.y = 0.0;
	rp.z = 0.0;
	worldCoordinate.x = 0.0;
	worldCoordinate.y = 0.0;
	worldCoordinate.z = 0.0;

	worldCoordinate = NuiTransformDepthImageToSkeleton((long)averageCoordinate.x, (long)averageCoordinate.y, (USHORT)averageCoordinate.z, NUI_IMAGE_RESOLUTION_640x480);
	rp.x = (float)(worldCoordinate.x *1000.0f);
	rp.y = (float)(worldCoordinate.y *1000.0f);
	rp.z = (float)(worldCoordinate.z *1000.0f);

	return rp;
}
Пример #7
0
bool Nui_GotDepthAlert( )
{
	NUI_IMAGE_FRAME imageFrame;
	bool processedFrame = true;

	HRESULT hr = m_pNuiSensor->NuiImageStreamGetNextFrame(
		m_pDepthStreamHandle,
		0,
		&imageFrame );

	if ( FAILED( hr ) )
	{
		return false;
	}

	INuiFrameTexture * pTexture = imageFrame.pFrameTexture;
	NUI_LOCKED_RECT LockedRect;
	pTexture->LockRect( 0, &LockedRect, NULL, 0 );
	if ( 0 != LockedRect.Pitch )
	{
		memcpy(m_DepthData, LockedRect.pBits, LockedRect.size);

		// draw the bits to the bitmap
		//BYTE * rgbrun = m_depthRGBX;
		const USHORT * pBufferRun = (const USHORT *)LockedRect.pBits;

		// end pixel is start + width*height - 1
		const USHORT * pBufferEnd = pBufferRun + (g_DepthWidth * g_DepthHeight);

		//assert( frameWidth * frameHeight * g_BytesPerPixel <= ARRAYSIZE(m_depthRGBX) );
		int DepthIndex = 0;
		g_iPointsCount = 0;
		while ( pBufferRun < pBufferEnd )
		{
			USHORT depth     = *pBufferRun;
			//USHORT realDepth = NuiDepthPixelToDepth(depth);
			USHORT player    = NuiDepthPixelToPlayerIndex(depth);

			// transform 13-bit depth information into an 8-bit intensity appropriate
			// for display (we disregard information in most significant bit)
			//BYTE intensity = static_cast<BYTE>(~(realDepth >> 4));

			LONG DepthX = DepthIndex % g_DepthWidth;
			LONG DepthY = DepthIndex / g_DepthWidth;
			Vector4 point;
			point = NuiTransformDepthImageToSkeleton(DepthX, DepthY, depth, g_DepthImgResolution);
			
			if (g_bElimateBackground)
			{
				// 只显示人
				if (player != 0)
				{				
					g_PointsData[DepthIndex] = point;
				}
				else
				{
					g_PointsData[DepthIndex].x = 0.0f;
					g_PointsData[DepthIndex].y = 0.0f;
					g_PointsData[DepthIndex].z = 0.0f;
					g_PointsData[DepthIndex].w = 0.0f;
				}
			}
			else
			{
				//同时显示背景和人
				g_PointsData[DepthIndex] = point;
			}

			DepthIndex++;

			++pBufferRun;
		}

		//m_pDrawDepth->Draw( m_depthRGBX, frameWidth * frameHeight * g_BytesPerPixel );
	}
	else
	{
		processedFrame = false;
		//OutputDebugString( L"Buffer length of received texture is bogus\r\n" );
	}

	pTexture->UnlockRect(0);

	m_pNuiSensor->NuiImageStreamReleaseFrame( m_pDepthStreamHandle, &imageFrame );

	return processedFrame;
}
Пример #8
0
void measureBody(NUI_Controller* mNui)
{
	//Head Width Measurement
	NUI_Vector4 head,neck,lShoulder,rShoulder,lFoot,rFoot,lHip,rHip,lElbow,rElbow,lHand,rHand,torso ;
	head=mNui->m_Points[NUI_SKELETON_POSITION_HEAD];
	LONG projHeadX,projHeadY;
	USHORT depth;
	NuiTransformSkeletonToDepthImage( head, &projHeadX, &projHeadY, &depth ,mNui->m_DepthResolution);

	UCHAR* headPtr=(UCHAR*)( uImage->imageData+projHeadY*uImage->widthStep+projHeadX);
	USHORT* headDepthPtr=(USHORT*) (dImage->imageData+projHeadY*dImage->widthStep+projHeadX*2);
	UCHAR* iPtr=headPtr;
	LONG leftX=projHeadX;
	LONG rightX=projHeadX;
	LONG leftStep=0;
	LONG rightStep=0;
	while(*(--iPtr)>0 && 0<leftX)
	{
		leftX--;	//Extend the line horizontally until it reaches the borders of the head.
		leftStep++;
	}
	iPtr=headPtr;
	while(*(++iPtr)>0 && rightX<(m_Width-1))
	{
		rightX++;
		rightStep++;
	}

	NUI_Vector4 leftHead=NuiTransformDepthImageToSkeleton(leftX,projHeadY,*(headDepthPtr-leftStep),mNui->m_DepthResolution);
	NUI_Vector4 rightHead=NuiTransformDepthImageToSkeleton(rightX,projHeadY,*(headDepthPtr+leftStep),mNui->m_DepthResolution);

	bodyMeasurements[HEAD_WIDTH]=abs(rightHead.x-leftHead.y);

	//Head Height Measurement
	
	neck=mNui->m_Points[NUI_SKELETON_POSITION_SHOULDER_CENTER];
	bodyMeasurements[HEAD_HEIGHT]=abs(head.y-neck.y);

	//Body Height Measurement
	
	lFoot=mNui->m_Points[NUI_SKELETON_POSITION_FOOT_LEFT];
	rFoot=mNui->m_Points[NUI_SKELETON_POSITION_FOOT_RIGHT];
	float lowPointY=(lFoot.y+rFoot.y)/2;
	
	iPtr=headPtr-uImage->widthStep;	//Initialize the pointer 1 pixel above, since decrement will take place after comparison.
	LONG topY;
	int topStep=0;
	while(*iPtr>0 && (topY>0))
	{
		iPtr-=uImage->widthStep;
		topY--;
		topStep++;
	}
	NUI_Vector4 topPoint=NuiTransformDepthImageToSkeleton(projHeadX,topY,*(headDepthPtr - topStep*dImage->widthStep/2),mNui->m_DepthResolution);
	bodyMeasurements[BODY_HEIGHT]=abs(topPoint.y-lowPointY);

	//Hip Height Measurement

	lHip=mNui->m_Points[NUI_SKELETON_POSITION_HIP_LEFT];
	rHip=mNui->m_Points[NUI_SKELETON_POSITION_HIP_RIGHT];
	bodyMeasurements[HIP_HEIGHT]=(abs(lHip.y-lFoot.y)+abs(rHip.y-lHip.y))/2;

	//Elbow-Fingertip Measurement
	lElbow=mNui->m_Points[NUI_SKELETON_POSITION_ELBOW_LEFT];
	rElbow=mNui->m_Points[NUI_SKELETON_POSITION_ELBOW_RIGHT];
	lHand=mNui->m_Points[NUI_SKELETON_POSITION_HAND_LEFT];
	rHand=mNui->m_Points[NUI_SKELETON_POSITION_HAND_RIGHT];


	LONG lHandUpX,rHandUpX,lHandDownX,rHandDownX,lHandUpY,lHandDownY,rHandUpY,rHandDownY;
	NuiTransformSkeletonToDepthImage( lHand, &lHandUpX, &lHandUpY, &depth ,mNui->m_DepthResolution);
	NuiTransformSkeletonToDepthImage( rHand, &rHandUpX, &rHandUpY, &depth ,mNui->m_DepthResolution);
	NuiTransformSkeletonToDepthImage( lElbow, &lHandDownX, &lHandDownY, &depth ,mNui->m_DepthResolution);
	NuiTransformSkeletonToDepthImage( rElbow, &rHandDownX, &rHandDownY, &depth ,mNui->m_DepthResolution);

	UCHAR* lHandPtr=(UCHAR*)( uImage->imageData+(int)(lHandUpY-1)*uImage->widthStep+(int)lHandUpX);//Initialize the pointer 1 pixel above, since decrement will take place after comparison.
	while(*lHandPtr>0  && (lHandUpY>0))
	{
		lHandPtr-=uImage->widthStep;
		lHandUpY--;	
	}	//Extend the line vertically until it reaches the borders of the arm.


	UCHAR* lElbowPtr=(UCHAR*) (uImage->imageData+(int)(lHandDownY+1)*uImage->widthStep+(int)lHandDownX);//Initialize the pointer 1 pixel above, since decrement will take place after comparison.
	while(*lElbowPtr>0  && (lHandDownY<(m_Width-1)))
	{
		lElbowPtr+=uImage->widthStep;
		lHandDownY++;	
	}	//Extend the line vertically until it reaches the borders of the arm.

	UCHAR* rHandPtr=(UCHAR*) (uImage->imageData+(int)(rHandUpY-1)*uImage->widthStep+(int)rHandUpX);//Initialize the pointer 1 pixel above, since decrement will take place after comparison.
	while(*rHandPtr>0  && (rHandUpY>0))
	{
		rHandPtr-=uImage->widthStep;
		rHandUpY--;	
	}	//Extend the line vertically until it reaches the borders of the arm.

	UCHAR* rElbowPtr=(UCHAR*)(uImage->imageData+(int)(rHandDownY+1)*uImage->widthStep+(int)rHandDownX);//Initialize the pointer 1 pixel above, since decrement will take place after comparison.
	while(*rElbowPtr>0  && (rHandDownY<(m_Height-1)))
	{
		rElbowPtr+=uImage->widthStep;
		rHandDownY++;	
	}	//Extend the line vertically until it reaches the borders of the arm.

	USHORT* depthPtr=(USHORT*) (dImage->imageData);
	NUI_Vector4 leftArmDown=NuiTransformDepthImageToSkeleton(lHandDownX,lHandDownY,*(depthPtr+lHandDownY*dImage->widthStep/2+lHandDownX),mNui->m_DepthResolution);
	NUI_Vector4 leftArmUp=NuiTransformDepthImageToSkeleton(lHandUpX,lHandUpY,*(depthPtr+lHandUpY*dImage->widthStep/2+lHandUpX),mNui->m_DepthResolution);
	NUI_Vector4 rightArmDown=NuiTransformDepthImageToSkeleton(rHandDownX,rHandDownY,*(depthPtr+rHandDownY*dImage->widthStep/2+rHandDownX),mNui->m_DepthResolution);
	NUI_Vector4 rightArmUp=NuiTransformDepthImageToSkeleton(rHandUpX,rHandUpY,*(depthPtr+rHandUpY*dImage->widthStep/2+rHandUpX),mNui->m_DepthResolution);
	bodyMeasurements[ELBOW_FINGERTIP]=(abs(rightArmUp.y-rightArmDown.y)+abs(leftArmUp.y-leftArmDown.y))/2;
	//Wrist to Fingertip Measurement
	bodyMeasurements[WRIST_FINGERTIP]=abs(leftArmUp.y-mNui->m_Points[NUI_SKELETON_POSITION_WRIST_LEFT].y)+abs(rightArmUp.y-mNui->m_Points[NUI_SKELETON_POSITION_WRIST_RIGHT].y)/2;

	//SHoulder Width Measurement
	lShoulder=mNui->m_Points[NUI_SKELETON_POSITION_SHOULDER_LEFT];
	rShoulder=mNui->m_Points[NUI_SKELETON_POSITION_SHOULDER_RIGHT];
	bodyMeasurements[SHOULDER_WIDTH]=abs(rShoulder.x-lShoulder.x);

	//Hip Width Measurement
	LONG lHipX,lHipY,rHipX,rHipY;
	NuiTransformSkeletonToDepthImage(lHip,&lHipX,&lHipY,&depth,mNui->m_DepthResolution);
	NuiTransformSkeletonToDepthImage(rHip,&rHipX,&rHipY,&depth,mNui->m_DepthResolution);

	UCHAR* lEndPtr=(UCHAR*)(uImage->imageData+(int)lHipY*uImage->widthStep+(int)lHipX);
	leftX=lHipX;
	leftStep=0;
	while(*(--lEndPtr)>0 && leftX>0)
	{
		leftX--;	//Extend the line horizontally until it reaches the borders of the head.
		leftStep++;
	}
	NUI_Vector4 leftHipEnd=NuiTransformDepthImageToSkeleton(leftX,lHipY,*((USHORT*)(dImage->imageData+(int)lHipY*dImage->widthStep+(int)leftX*2)),mNui->m_DepthResolution);

	UCHAR* rEndPtr=(UCHAR*) (uImage->imageData+(int)rHipY*uImage->widthStep+(int)rHipX);
	rightX=rHipX;
	rightStep=0;
	while(*(++rEndPtr)>0 && rightX<(m_Width-1))
	{
		rightX++;	//Extend the line horizontally until it reaches the borders of the head.
		rightStep++;
	}
	NUI_Vector4 rightHipEnd=NuiTransformDepthImageToSkeleton(rightX,rHipY,*((USHORT*)(dImage->imageData+(int)rHipY*dImage->widthStep+(int)rightX*2)),mNui->m_DepthResolution);
	bodyMeasurements[HIP_WIDTH]=abs(rightHipEnd.x-leftHipEnd.x);

	//Torso Height Measurement

	torso=mNui->m_Points[NUI_SKELETON_POSITION_SPINE];
	bodyMeasurements[TORSO_HEIGHT]=abs(torso.y-lowPointY);


}
Пример #9
0
void getSphereSizes(NUI_Controller* mNui)
{
	int x_init=0;
	int y_init=0;
	int step=0;
	try 
	{
		for (int i=0;i<16;i++)
		{
			NUI_SKELETON_POSITION_INDEX sJoint=nuiIDs[i];
			for (int j=0;j<i;j++)				//If joint is processed before, do not repeat it
			{
				if (nuiIDs[j]==sJoint)
					sphereRadii[i]=sphereRadii[j];
			}

			NUI_Vector4 realPosition=mNui->m_Points[sJoint];
			Vector2 endOfJoint;
			LONG x,y;
			USHORT depth;				
			NuiTransformSkeletonToDepthImage( realPosition, &x, &y, &depth ,mNui->m_DepthResolution);
			x_init=x;
			y_init=y;
			step=0;
			int radius=0;
			//cvShowImage(windowName.c_str(),uImage);
			//cvSetMouseCallback(windowName.c_str(), mouseEvent, 0);
			//cvWaitKey();
		
			endOfJoint.x=x;
			endOfJoint.y=x;

			UCHAR* iPtr=(UCHAR*)(uImage->imageData+y_init*uImage->widthStep+x_init);
			USHORT* dPtr=(USHORT*)(dImage->imageData+y_init*dImage->widthStep+x_init*2);//Multipy x_init by 2, since dImage is 16 bits- 2bytes
			while( step<m_Width)					//Slowly enlarge the joint sphere until it reaches the end of a bone in one direction.
			{			
				if (x_init-step>-1)
				{
					UCHAR tValue=*(iPtr-step);
					if( tValue!=0 )
					{
						endOfJoint.x-=(step-1);
						NUI_Vector4 trueEnd=NuiTransformDepthImageToSkeleton(endOfJoint.x,endOfJoint.y,*(dPtr-(step-1)),mNui->m_DepthResolution);
						radius=abs(realPosition.x-trueEnd.x);
						break;
					}
				}
				if (x_init+step<m_Width)
				{
					UCHAR tValue=*(iPtr+step);
					if( tValue!=0)
					{

						endOfJoint.x+=(step-1);
						NUI_Vector4 trueEnd=NuiTransformDepthImageToSkeleton(endOfJoint.x,endOfJoint.y,*(dPtr+(step-1)),mNui->m_DepthResolution);
						radius=abs(realPosition.x-trueEnd.x);
						break;
					}
				}
				if (step<m_Height)
				{
					if (y_init-step>-1)
					{
						UCHAR tValue=*(iPtr-step*uImage->widthStep);
						if(  tValue!=0 )
						{
							endOfJoint.y-=(step-1);
							NUI_Vector4 trueEnd=NuiTransformDepthImageToSkeleton(endOfJoint.x,endOfJoint.y,*(dPtr-(step-1)*dImage->widthStep/2),mNui->m_DepthResolution);
							radius=abs(realPosition.y-trueEnd.y);
							break;
						}
					}
					if (y_init+step<m_Height)
					{
						UCHAR tValue=*(iPtr+step*uImage->widthStep);
						if( tValue!=0)
						{
							endOfJoint.y+=(step-1);
							NUI_Vector4 trueEnd=NuiTransformDepthImageToSkeleton(endOfJoint.x,endOfJoint.y,*(dPtr+(step-1)*dImage->widthStep/2),mNui->m_DepthResolution);
							radius=abs(realPosition.y-trueEnd.y);
							break;
						}
					}
				}
				step++;
			}
			sphereRadii[i]=radius;
		}
	}
	catch( Ogre::Exception& e ) {
			MessageBox( NULL, e.getFullDescription().c_str(), "An exception has occured!", MB_OK | MB_ICONERROR | MB_TASKMODAL);

	}
	catch(cv::Exception e) {
		MessageBox( NULL, e.err.c_str(), "An exception has occured!", MB_OK | MB_ICONERROR | MB_TASKMODAL);

	}
}
Пример #10
0
int main(void)
{
	//Set the error callback
	glfwSetErrorCallback(error_callback);

	//Initialize GLFW
	if (!glfwInit())
	{
		exit(EXIT_FAILURE);
	}

	//Set the GLFW window creation hints - these are optional
	//glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3); //Request a specific OpenGL version
	//glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3); //Request a specific OpenGL version
	//glfwWindowHint(GLFW_SAMPLES, 4); //Request 4x antialiasing
	//glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);


	//Create a window and create its OpenGL context
	window = glfwCreateWindow(960, 720, "Test Window", NULL, NULL);

	//If the window couldn't be created
	if (!window)
	{
		fprintf(stderr, "Failed to open GLFW window.\n");
		glfwTerminate();
		//exit(EXIT_FAILURE);
	}

	//This function makes the context of the specified window current on the calling thread. 
	glfwMakeContextCurrent(window);

	//Sets the key callback
	glfwSetKeyCallback(window, key_callback);

	//Initialize GLEW
	GLenum err = glewInit();

	//If GLEW hasn't initialized
	if (err != GLEW_OK)
	{
		fprintf(stderr, "Error: %s\n", glewGetErrorString(err));
		return -1;
	}

	//Set a background color
	glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
	glfwSetCursorPos(window, 1024 / 2, 768 / 2);

	GLuint VertexArrayID;
	glGenVertexArrays(1, &VertexArrayID);
	glBindVertexArray(VertexArrayID);

	// Create and compile our GLSL program from the shaders
	GLuint red = LoadShaders("SimpleTransform.vertexshader", "SingleColorRed.fragmentshader");
	GLuint grid = LoadShaders("SimpleTransform.vertexshader", "SingleColorGrid.fragmentshader");
	glBindFragDataLocation(red, 0, "red");
	glBindFragDataLocation(grid, 1, "grid");
	// Get a handle for our "MVP" uniform
	GLuint MatrixID = glGetUniformLocation(red, "MVP");

	// Projection matrix : 45° Field of View, 4:3 ratio, display range : 0.1 unit <-> 100 units
	glm::mat4 Projection = glm::perspective(45.0f, 4.0f / 3.0f, 0.1f, 1000.0f);
	// Or, for an ortho camera :
	//glm::mat4 Projection = glm::ortho(-10.0f,10.0f,-10.0f,10.0f,0.0f,100.0f); // In world coordinates

	// Camera matrix
	glm::mat4 View = glm::lookAt(
		glm::vec3(4, 3, 3), // Camera is at (4,3,3), in World Space
		glm::vec3(0, 0, 0), // and looks at the origin
		glm::vec3(0, 1, 0)  // Head is up (set to 0,-1,0 to look upside-down)
		);


	static const GLfloat g_vertex_buffer_data[] = {
		-1.0f, -1.0f, 0.0f,
		1.0f, -1.0f, 0.0f,
		0.0f, 1.0f, 0.0f,
	};

	static const GLushort g_element_buffer_data[] = { 0, 1, 2 };

	GLuint vertexbuffer;
	glGenBuffers(1, &vertexbuffer);
	glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
	glBufferData(GL_ARRAY_BUFFER, sizeof(g_vertex_buffer_data), g_vertex_buffer_data, GL_STATIC_DRAW);

	static const GLfloat g_triangle_buffer_data[] = {
		-1.0f, -1.0f, -1.0f,
		1.0f, -1.0f, -1.0f,
		0.0f, 1.0f, -1.0f,
	};

	GLuint triangle;
	glGenBuffers(1, &triangle);
	glBindBuffer(GL_ARRAY_BUFFER, triangle);
	glBufferData(GL_ARRAY_BUFFER, sizeof(g_triangle_buffer_data), g_triangle_buffer_data, GL_STATIC_DRAW);

	// Enable depth test
	glEnable(GL_DEPTH_TEST);
	// Accept fragment if it closer to the camera than the former one
	glDepthFunc(GL_LESS);
	glEnable(GL_CULL_FACE);
	glEnable(GL_LIGHTING);
	glEnable(GL_SMOOTH);//OPENGL INSTANTIATION
	HRESULT hr;
	NUI_IMAGE_FRAME depthFrame;
	HANDLE hDepth;
	INuiSensor* pNuiSensor = NULL;
	int iSensorCount = 0;
	hr = NuiGetSensorCount(&iSensorCount);

	if (FAILED(hr))
		return hr;

	for (int i = 0; i < iSensorCount; i++)
	{
		INuiSensor* tempSensor;
		hr = NuiCreateSensorByIndex(i, &tempSensor);

		if (FAILED(hr))
			continue;

		hr = tempSensor->NuiStatus();
		if (S_OK == hr)
		{
			pNuiSensor = tempSensor;
			break;
		}

		tempSensor->Release();
	}

	for (int i = 0; i < 2048; i++) {
		depthLookUp[i] = rawDepthToMeters(i);
	}

	rotation = getRotationMatrix(theta, psi, fi);

	pNuiSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_DEPTH);
	pNuiSensor->NuiImageStreamOpen(
		NUI_IMAGE_TYPE_DEPTH,
		NUI_IMAGE_RESOLUTION_320x240,
		0,
		2,
		NULL,
		&hDepth);//KINECT INSTANTIATION

	cout << "Starting Main Loop";

	static double lastTime = glfwGetTime();
	//Main Loop
	do
	{
		double currentTime = glfwGetTime();
		float deltaTime = float(currentTime - lastTime);
		//Clear color buffer
		glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

		glUseProgram(grid);
		modelMatrix(MatrixID);


		hr = pNuiSensor->NuiImageStreamGetNextFrame(hDepth, 0, &depthFrame);
		if (!FAILED(hr))
		{

			INuiFrameTexture* pTexture;
			NUI_LOCKED_RECT LockedRect;

			hr = pNuiSensor->NuiImageFrameGetDepthImagePixelFrameTexture(
				hDepth, &depthFrame, false, &pTexture);

			if (FAILED(hr))
			{
				pNuiSensor->NuiImageStreamReleaseFrame(hDepth, &depthFrame);
				continue;
			}

			pTexture->LockRect(0, &LockedRect, NULL, 0);//Kinect Image Grab
			int skipX = 1;
			int skipY = 1;
			float scalar = 4.0f;

			if (LockedRect.Pitch != 0)
			{
				for (int x = 0; x < width; x += skipX)
				{
					for (int y = 0; y < height; y += skipY)
					{
						const NUI_DEPTH_IMAGE_PIXEL * pBufferRun = reinterpret_cast<const NUI_DEPTH_IMAGE_PIXEL *>(LockedRect.pBits) + x + y * width;
						
						//float depth = (float)(pBufferRun->depth);
						//glm::vec3 location = realWorld(depth, height - y, x, 500.0f, 1000.0f);
						//createCube(0.006f, location);
						Vector4 locationDepth = NuiTransformDepthImageToSkeleton(x, y, (short)(pBufferRun->depth << 3));
						glm::vec3 locationDepthxyz = glm::vec3(locationDepth.x * scalar, locationDepth.y * scalar, locationDepth.z * scalar);
						createCube(0.009f, locationDepthxyz);
					}
				}
			}

			pTexture->UnlockRect(0);
			pTexture->Release();

			pNuiSensor->NuiImageStreamReleaseFrame(hDepth, &depthFrame);
		}

		createGrid();

		//Test drawings
		/*
		glUseProgram(red);
		modelMatrix(MatrixID);
		//createCube(0.05f, glm::vec3(1.0f,1.0f,1.0f));
		// 1rst attribute buffer : vertices
		glEnableVertexAttribArray(0);
		//createObject(vertexbuffer, GL_TRIANGLES, 3);
		//createObject(triangle, GL_TRIANGLES, 3);
		glDisableVertexAttribArray(0);
		*/

		//Swap buffers
		glfwSwapBuffers(window);
		//Get and organize events, like keyboard and mouse input, window resizing, etc...
		glfwPollEvents();

		std::string title = "Title | DELTA TIME " + std::to_string(1.0f/deltaTime);
		const char* pszConstString = title.c_str();
		glfwSetWindowTitle(window, pszConstString);

		lastTime = currentTime;
	} //Check if the ESC key had been pressed or if the window had been closed
	while (!glfwWindowShouldClose(window));


	//Close OpenGL window and terminate GLFW
	glfwDestroyWindow(window);
	//Finalize and clean up GLFW
	glfwTerminate();

	exit(EXIT_SUCCESS);
}
Пример #11
0
/*!
 * @brief メソッドKinect::getAverageCoordinate(Mat& image).Depthカメラの処理
 * @param cv::Mat& image
 * @return なし
 */
Point3ius Kinect::getAverageCoordinate(Mat& image) //(c31)
{
	Vector4 sumCoordinate; //1フレームの各座標の総和を求める変数の初期化(c31)
	Point3ius avgCoordinate; //!<平均座標を格納するクラス内のローカル変数(c38)

	//変数の初期化(c31)
	actualExtractedNum = 0;

	sumCoordinate.x = 0;
	sumCoordinate.y = 0;
	sumCoordinate.z = 0;

	avgCoordinate.x = 0;
	avgCoordinate.y = 0;
	avgCoordinate.z = 0;

	//Depthカメラのフレームデータを取得する
	NUI_IMAGE_FRAME depthFrame = { 0 };
	ERROR_CHECK(kinect->NuiImageStreamGetNextFrame(depthStreamHandle, INFINITE, &depthFrame));

	//Depthデータを取得
	NUI_LOCKED_RECT depthData = { 0 };
	depthFrame.pFrameTexture->LockRect(0, &depthData, 0, 0);

	USHORT* depth = (USHORT*)depthData.pBits; //depthデータを格納

				//cout << actualExtractedNum << endl;
	if (extractedNum > 0){ //抽出した座標が1つ以上あれば以下の処理を実行
		for (int i = 0; i < extractedNum; i++){ //抽出された数だけ座標を足しあわせていく(c37)
			USHORT distance = ::NuiDepthPixelToDepth(depth[extractedPointOneDim[i]]); //distanceの単位はmm
			if (distance != 0){ //距離が0以外であれば,距離と座標を足し合わせ,足された数をカウント(c31)
				sumCoordinate.x += extCoordinate[i].x; //抽出されたx座標を足しあわせていく(c37)
				sumCoordinate.y += extCoordinate[i].y; //抽出されたy座標を足しあわせていく(c37)
				sumCoordinate.z += distance; //抽出されたz(距離)を足しあわせていく(c37)

				//cout << distance << endl; (※確認用)

				Vector4 worldCoordinate = NuiTransformDepthImageToSkeleton((long)extCoordinate[i].x, (long)extCoordinate[i].y, (USHORT)distance, NUI_IMAGE_RESOLUTION_640x480);
				XYZCoordinate[actualExtractedNum].x = worldCoordinate.x * 1000.0f;
				XYZCoordinate[actualExtractedNum].y = worldCoordinate.y * 1000.0f;
				XYZCoordinate[actualExtractedNum].z = worldCoordinate.z * 1000.0f;

				//cout << XYZCoordinate[actualExtractedNum].z << endl; (※確認用)
				actualExtractedNum++; //実際に足しあわされた数をカウントする(c37)
			}
		}
		avgCoordinate.x = /*(float)*/(int)(sumCoordinate.x / actualExtractedNum);
		avgCoordinate.y = /*(float)*/(int)(sumCoordinate.y / actualExtractedNum);
		avgCoordinate.z = /*(USHORT)*/(USHORT)(sumCoordinate.z / actualExtractedNum); //(c11)

		avgFlag = true;
	}
	else
	{
		avgFlag = false; //平均座標を計算しないようにする
	}

	ERROR_CHECK(kinect->NuiImageStreamReleaseFrame(depthStreamHandle, &depthFrame));

	return (avgCoordinate);
}