Exemplo n.º 1
0
void TriangleMesh::UpdateDepth( StereoKinectHeadTracking* tracking, vector<NUI_DEPTH_IMAGE_POINT>& points )
{
	INuiCoordinateMapper* coordMapper;
	tracking->GetSensor()->NuiGetCoordinateMapper(&coordMapper);

	for ( unsigned int i = 0; i < _x; i++ )
	{
		for ( unsigned int j = 0; j < _y; j++ )
		{
			NUI_DEPTH_IMAGE_POINT o = points[(_y-j-1)*_x + i];
			Vector4 p;

			coordMapper->MapDepthPointToSkeletonPoint(NUI_IMAGE_RESOLUTION_640x480, &points[(_y-j-1)*_x + i], &p);
			//p = NuiTransformDepthImageToSkeleton(o.x, o.y, o.depth, NUI_IMAGE_RESOLUTION_640x480);
			
			glm::vec3 rwPoint = glm::vec3(p.x, p.y, p.z);
			glm::vec3 vwPoint = tracking->SensorToVirtualWorldCoordinates(rwPoint);
			vwPoint += glm::vec3(-0.027f, 0.03f, 0.013f); // Seen error

			if (false/*i == _x/2 && j == 0*/) {
				std::cout << "Depth conversion " << i <<","<<j<<":" << std::endl;
				std::cout << o.x << "," << o.y << "," << o.depth << std::endl;
				std::cout << "->" << p.x << "," << p.y << "," << p.z << std::endl;
				std::cout << "VW:" << vwPoint.x << "," << vwPoint.y << "," << vwPoint.z << endl;
				std::cout << "-------------------------" << std::endl;
			}
			_mesh[ i*_y + j ].coord[0] = vwPoint.x;
			_mesh[ i*_y + j ].coord[1] = vwPoint.y;
			_mesh[ i*_y + j ].coord[2] = vwPoint.z;
		}
	}
	UpdateVBO();
}
Exemplo n.º 2
0
int _tmain(int argc, _TCHAR* argv[])
{
	cv::setUseOptimized( true );

	INuiSensor* pSensor;
	HRESULT hResult = S_OK;
	hResult = NuiCreateSensorByIndex( 0, &pSensor );
	if( FAILED( hResult ) ){
		std::cerr << "Error : NuiCreateSensorByIndex" << std::endl;
		return -1;
	}

	hResult = pSensor->NuiInitialize( NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_SKELETON );
	if( FAILED( hResult ) ){
		std::cerr << "Error : NuiInitialize" << std::endl;
		return -1;
	}

	HANDLE hColorEvent = INVALID_HANDLE_VALUE;
	HANDLE hColorHandle = INVALID_HANDLE_VALUE;
	hColorEvent = CreateEvent( nullptr, true, false, nullptr );
	hResult = pSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 2, hColorEvent, &hColorHandle );
	if( FAILED( hResult ) ){
		std::cerr << "Error : NuiImageStreamOpen( COLOR )" << std::endl;
		return -1;
	}

	HANDLE hDepthPlayerEvent = INVALID_HANDLE_VALUE;
	HANDLE hDepthPlayerHandle = INVALID_HANDLE_VALUE;
	hDepthPlayerEvent = CreateEvent( nullptr, true, false, nullptr );
	hResult = pSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, NUI_IMAGE_RESOLUTION_640x480, 0, 2, hDepthPlayerEvent, &hDepthPlayerHandle );
	if( FAILED( hResult ) ){
		std::cerr << "Error : NuiImageStreamOpen( DEPTH&PLAYER )" << std::endl;
		return -1;
	}

	HANDLE hSkeletonEvent = INVALID_HANDLE_VALUE;
	hSkeletonEvent = CreateEvent( nullptr, true, false, nullptr );
	hResult = pSensor->NuiSkeletonTrackingEnable( hSkeletonEvent, 0 );
	if( FAILED( hResult ) ){
		std::cerr << "Error : NuiSkeletonTrackingEnable" << std::endl;
		return -1;
	}

	unsigned long refWidth = 0;
	unsigned long refHeight = 0;
	NuiImageResolutionToSize( NUI_IMAGE_RESOLUTION_640x480, refWidth, refHeight );
	int width = static_cast<int>( refWidth );
	int height = static_cast<int>( refHeight );

	INuiCoordinateMapper* pCordinateMapper;
	hResult = pSensor->NuiGetCoordinateMapper( &pCordinateMapper );
	if( FAILED( hResult ) ){
		std::cerr << "Error : NuiGetCoordinateMapper" << std::endl;
		return -1;
	}
	std::vector<NUI_COLOR_IMAGE_POINT> pColorPoint( width * height );

	HANDLE hEvents[3] = { hColorEvent, hDepthPlayerEvent, hSkeletonEvent };

	cv::Vec3b color[7];
	color[0] = cv::Vec3b(   0,   0,   0 );
	color[1] = cv::Vec3b( 255,   0,   0 );
	color[2] = cv::Vec3b(   0, 255,   0 );
	color[3] = cv::Vec3b(   0,   0, 255 );
	color[4] = cv::Vec3b( 255, 255,   0 );
	color[5] = cv::Vec3b( 255,   0, 255 );
	color[6] = cv::Vec3b(   0, 255, 255 );

	cv::namedWindow( "Color" );
	cv::namedWindow( "Depth" );
	cv::namedWindow( "Player" );
	cv::namedWindow( "Skeleton" );

	while( 1 ){
		ResetEvent( hColorEvent );
		ResetEvent( hDepthPlayerEvent );
		ResetEvent( hSkeletonEvent );
		WaitForMultipleObjects( ARRAYSIZE( hEvents ), hEvents, true, INFINITE );

		NUI_IMAGE_FRAME colorImageFrame = { 0 };
		hResult = pSensor->NuiImageStreamGetNextFrame( hColorHandle, 0, &colorImageFrame );
		if( FAILED( hResult ) ){
			std::cerr << "Error : NuiImageStreamGetNextFrame( COLOR )" << std::endl;
			return -1;
		}

		INuiFrameTexture* pColorFrameTexture = colorImageFrame.pFrameTexture;
		NUI_LOCKED_RECT colorLockedRect;
		pColorFrameTexture->LockRect( 0, &colorLockedRect, nullptr, 0 );

		NUI_IMAGE_FRAME depthPlayerImageFrame = { 0 };
		hResult = pSensor->NuiImageStreamGetNextFrame( hDepthPlayerHandle, 0, &depthPlayerImageFrame );
		if( FAILED( hResult ) ){
			std::cerr << "Error : NuiImageStreamGetNextFrame( DEPTH&PLAYER )" << std::endl;
			return -1;
		}

		BOOL nearMode = false;
		INuiFrameTexture* pDepthPlayerFrameTexture = nullptr;
		pSensor->NuiImageFrameGetDepthImagePixelFrameTexture( hDepthPlayerHandle, &depthPlayerImageFrame, &nearMode, &pDepthPlayerFrameTexture );
		NUI_LOCKED_RECT depthPlayerLockedRect;
		pDepthPlayerFrameTexture->LockRect( 0, &depthPlayerLockedRect, nullptr, 0 );

		NUI_SKELETON_FRAME skeletonFrame = { 0 };
		hResult = pSensor->NuiSkeletonGetNextFrame( 0, &skeletonFrame );
		if( FAILED( hResult ) ){
			std::cout << "Error : NuiSkeletonGetNextFrame" << std::endl;
			return -1;
		}

		/*
		NUI_TRANSFORM_SMOOTH_PARAMETERS smoothParameter;
		smoothParameter.fSmoothing = 0.5; 
		smoothParameter.fCorrection = 0.5; 
		smoothParameter.fPrediction = 0.0f; 
		smoothParameter.fJitterRadius = 0.05f; 
		smoothParameter.fMaxDeviationRadius = 0.04f;

		hResult = NuiTransformSmooth( &skeletonFrame, &smoothParameter );
		*/

		cv::Mat colorMat( height, width, CV_8UC4, reinterpret_cast<unsigned char*>( colorLockedRect.pBits ) );

		cv::Mat bufferMat = cv::Mat::zeros( height, width, CV_16UC1 );
		cv::Mat playerMat = cv::Mat::zeros( height, width, CV_8UC3 );
		NUI_DEPTH_IMAGE_PIXEL* pDepthPlayerPixel = reinterpret_cast<NUI_DEPTH_IMAGE_PIXEL*>( depthPlayerLockedRect.pBits );
		pCordinateMapper->MapDepthFrameToColorFrame( NUI_IMAGE_RESOLUTION_640x480, width * height, pDepthPlayerPixel, NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, width * height, &pColorPoint[0] );
		for( int y = 0; y < height; y++ ){
			for( int x = 0; x < width; x++ ){
				unsigned int index = y * width + x;
				bufferMat.at<unsigned short>( pColorPoint[index].y, pColorPoint[index].x ) = pDepthPlayerPixel[index].depth;
				playerMat.at<cv::Vec3b>( pColorPoint[index].y, pColorPoint[index].x ) = color[pDepthPlayerPixel[index].playerIndex];
			}
		}
		cv::Mat depthMat( height, width, CV_8UC1 );
		bufferMat.convertTo( depthMat, CV_8U, -255.0f / 10000.0f, 255.0f );

		cv::Mat skeletonMat = cv::Mat::zeros( height, width, CV_8UC3 );
		NUI_COLOR_IMAGE_POINT colorPoint;
		for( int count = 0; count < NUI_SKELETON_COUNT; count++ ){
			NUI_SKELETON_DATA skeletonData = skeletonFrame.SkeletonData[count];
			if( skeletonData.eTrackingState == NUI_SKELETON_TRACKED ){
				for( int position = 0; position < NUI_SKELETON_POSITION_COUNT; position++ ){
					pCordinateMapper->MapSkeletonPointToColorPoint( &skeletonData.SkeletonPositions[position], NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, &colorPoint );
					if( ( colorPoint.x >= 0 ) && ( colorPoint.x < width ) && ( colorPoint.y >= 0 ) && ( colorPoint.y < height ) ){
						cv::circle( skeletonMat, cv::Point( colorPoint.x, colorPoint.y ), 10, static_cast<cv::Scalar>( color[count + 1] ), -1, CV_AA );
					}
				}

				std::stringstream ss;
				ss << skeletonData.SkeletonPositions[NUI_SKELETON_POSITION_HIP_CENTER].z;
				pCordinateMapper->MapSkeletonPointToColorPoint( &skeletonData.SkeletonPositions[NUI_SKELETON_POSITION_HEAD], NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, &colorPoint );
				if( ( colorPoint.x >= 0 ) && ( colorPoint.x < width ) && ( colorPoint.y >= 0 ) && ( colorPoint.y < height ) ){
					cv::putText( skeletonMat, ss.str(), cv::Point( colorPoint.x - 50, colorPoint.y - 20 ), cv::FONT_HERSHEY_SIMPLEX, 1.5f, static_cast<cv::Scalar>( color[count + 1] ) );
				}
			}
			else if( skeletonData.eTrackingState == NUI_SKELETON_POSITION_ONLY ){
				pCordinateMapper->MapSkeletonPointToColorPoint( &skeletonData.SkeletonPositions[NUI_SKELETON_POSITION_HIP_CENTER], NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, &colorPoint );
				if( ( colorPoint.x >= 0 ) && ( colorPoint.x < width ) && ( colorPoint.y >= 0 ) && ( colorPoint.y < height ) ){
						cv::circle( skeletonMat, cv::Point( colorPoint.x, colorPoint.y ), 10, static_cast<cv::Scalar>( color[count + 1] ), -1, CV_AA );
				}
			}
		}

		cv::imshow( "Color", colorMat );
		cv::imshow( "Depth", depthMat );
		cv::imshow( "Player", playerMat );
		cv::imshow( "Skeleton", skeletonMat );

		pColorFrameTexture->UnlockRect( 0 );
		pDepthPlayerFrameTexture->UnlockRect( 0 );
		pSensor->NuiImageStreamReleaseFrame( hColorHandle, &colorImageFrame );
		pSensor->NuiImageStreamReleaseFrame( hDepthPlayerHandle, &depthPlayerImageFrame );

		if( cv::waitKey( 30 ) == VK_ESCAPE ){
			break;
		}
	}

	pSensor->NuiShutdown();
	pSensor->NuiSkeletonTrackingDisable();
	pCordinateMapper->Release();
	CloseHandle( hColorEvent );
	CloseHandle( hDepthPlayerEvent );
	CloseHandle( hSkeletonEvent );

	cv::destroyAllWindows();

	return 0;
}
Exemplo n.º 3
0
void KinectDevice::Nui_GotDepthAlert()
{
    //changed old const NUI_IMAGE_FRAME * pImageFrame = NULL;
	//NUI_IMAGE_FRAME pImageFrame;
	//for full depth not const
	//const NUI_IMAGE_FRAME * pImageFrame = NULL;
	NUI_IMAGE_FRAME pImageFrame;
		 
	HRESULT hr = m_nuiInstance->NuiImageStreamGetNextFrame(m_pDepthStreamHandle, 0, &pImageFrame);
	//get full depth
	//NUI_DEPTH_IMAGE_PIXEL *Iout;
	BOOL bNearMode = false;
	INuiFrameTexture * pTexture = NULL;
	hr = m_nuiInstance->NuiImageFrameGetDepthImagePixelFrameTexture(m_pDepthStreamHandle, &pImageFrame, &bNearMode, &pTexture);
	if( FAILED( hr ) )
    {
        return;
    }

	//no need to check, if( pImageFrame->eResolution == NUI_IMAGE_RESOLUTION_320x240 ) etc. Already set this flag to this resolution earlier on.
	int width = 640;
    int height= 480;

    //old SDK:
	//NuiImageBuffer * pTexture = pImageFrame->pFrameTexture;
    //KINECT_LOCKED_RECT LockedRect;
	
	//removed for full depth:
	//INuiFrameTexture *  pTexture = pImageFrame->pFrameTexture;

	NUI_LOCKED_RECT LockedRect;
   
	//full depth
    pTexture->LockRect( 0, &LockedRect, NULL, 0 );
    //instead of 
	//BYTE* pBuffer = 0;
    
	 if( LockedRect.Pitch != 0 )
	 {
		NUI_DEPTH_IMAGE_PIXEL * pBuffer =  (NUI_DEPTH_IMAGE_PIXEL *) LockedRect.pBits;
		INuiCoordinateMapper* pMapper;
		m_nuiInstance->NuiGetCoordinateMapper(&pMapper);
		//check if int maxval < 307200 (640*480)
		int j = 0;
	
		//USHORT* pBuffer =  (USHORT*) LockedRect.pBits;
		//pBuffer = (BYTE*) LockedRect.pBits;
 
		CvMatData img;
		img = CvMatData::create(width, height, CV_16U, 1);
		//If your application included NUI_INITIALIZE_FLAG_USES_DEPTH in the dwFlags argument to NuiInitialize, depth data is returned as a 16-bit value in which the low-order 12 bits (bits 0–11) contain the depth value in millimeters.
		//might be remark of the old nui changed from beta to 1.0
		
		//full high depth
		if(pImageFrame.eImageType == NUI_IMAGE_TYPE_DEPTH )
		{
			// todo should be faster with memcpy something with 8-bit description

			//full depth is also 16 bit but in a struct [depth & playerindex]
			//img = CvMatData::create(width, height, CV_16U, 1);
		
			// draw the bits to the bitmap needs to be casted to unsigned shorts as sdk gives a signed short by default
			//USHORT* pBufferRun = (USHORT*) pBuffer;
			cv::Mat& mat = img;
			bool realcoordtemp= getRealWorldCoord(); 
			int cxl = getCutXL();
			int cxr = getCutXR();
			int cyu = getCutYU();
			int cyd = getCutYD();
			int cz = getCutZ();
			//check if values are properly transmitted qDebug() << "x " <<cx << "y " << cy << "z "<<  cz; 

			//int realx;
			//int realy;
			int realz; //? is this helping needed for the bithshift by one to meters
			//Vector4 realPoints;
			//Vector4 realPointsKinect;
			Vector4 realPointsKinect2;
			NUI_DEPTH_IMAGE_POINT test;
			//floating point /single-value notation is unnecesary
			
			//old sdk: 
			//faq gives sthe answer >>3 to get depth, actually upto 12 correct bits and max of 12 bits but given in 13bits and  in mm so I don't alter this here?
			//Too near: 0x0000
			//Too far: 0x7ff8
			//Unknown: 0xfff8
			//unsigned short maxValue = 0;
			//unsigned short minValue = 15000; //should be 2^13 8192, unshifted it can become 31800 (<< 3 = 3975) thus bitshift once to create a better viewable image.
			////maxvalue will then be around 62000
			//http://www.i-programmer.info/ebooks/practical-windows-kinect-in-c/3802-using-the-kinect-depth-sensor.html?start=1
				
			//defined: ushort 0 - 65535
			//int max = 32767, uint=65535, schar=127, uchar= 255, although std http://msdn.microsoft.com/en-us/library/s086ab1z(v=vs.71).aspx
        
			////to check real space dependencies
			//float minx = 15000.0f;
			//float maxx = 0.0f;
			//float miny = 15000.0f;
			//float maxy = 0.0f;
			//float minz = 15000.0f;
			//float maxz = 0.0f;

			// todo should be faster with memcpy
			if (realcoordtemp)
			{
				//for always gettig the closest point it is wise to start with x,y 0 and loop the two ways, it is probably faster to draw over than it is to check whether the point has been drawn and is closer.
				for( int y = height/2 ; y < height ; y++ )
				{
					for( int x = width/2 ; x < width ; x++ )	
					{
						//I(robby) did the following for new sdk 
						//the number of bits has been changed (and probably significant/unsignificant bits changed?) showing 15bit max , accroding to doc still the lower 12 bits are used, but faq says higher 13-bits and typo of doc
						//anyway to convert to m use >> 4 in rest of program, which throws away the lower values which should not exist due to minimum range. 
						//optimalisation could be to use an 8 bit image (representing values 2^8-2^16) and if transformation to meters has to be done
						//to show it we use more clearly in viewer we use << 1.
						
						//TODO DANGEROUS functions one of the two results in crash, switched x and y value.
						//TODO  calculate values by own function, the realpoint values are probably influenced by the floor cut-off plane which will be inconsistent for the kinects
						//realPoints = NuiTransformDepthImageToSkeleton(y,x,pBuffer[j].depth);
						//realz = pBuffer[j].depth>>1;
						//assume it is actual depth in mm
						//j = y*width+x
						//realz = pBuffer[j].depth>>1;
						realz = pBuffer[y*width+x].depth; 
						//based on emperical results with cutz value a cz would cut image after 2m 
						if ((realz > 0) && !(((realz) > cz) && (cz > 0)) )
						{
							//KINECT realpoint transformation just give it up!
							//maybe we should transorm by 3 instead of two so assuming depth is allready in mm
							//using the non bitshifted function for the other method: NOT >>3, NOT >>2, not <<3, 
							/*realPointsKinect = NuiTransformDepthImageToSkeleton(x,y,pBuffer[j].depth<<2);
							realPointsKinect.x = realPointsKinect.x *1000;
							realPointsKinect.y = realPointsKinect.y *1000;*/

							//http://msdn.microsoft.com/en-us/library/nuisensor.inuicoordinatemapper.mapdepthpointtoskeletonpoint.aspx
							//this actually works and gives same results as own functions but with y flipped
							
							test.x = x;
							test.y = y;
							test.depth = pBuffer[y*width+x].depth; //commentout the bitshift >>1
							// INuiCoordinateMapper* pMapper;
							// m_nuiInstance->NuiGetCoordinateMapper(&pMapper);
							pMapper->MapDepthPointToSkeletonPoint(NUI_IMAGE_RESOLUTION_640x480, &test, &realPointsKinect2);

							//if (pBuffer[j].depth == 0)
							if (pBuffer[y*width+x].depth == 0)
							{ 
								//debug: check if if-statement is behaving as it should
								qDebug() << "WTF @ " << x << " " << y;
							}

							// to check if we can loop from center outward
							//if (j != y*width+x) qDebug() << "j is " << j << "x is" << x << "y is" << y;
							//j max is 307199
							
							//optimize the cutoff!
							//TODO if realx is beyond point dont calc y, remove check from frontal, if y beyond cy dont do frontal.
							//realPoints = TransformationToRealworldEucledianPoints(x,y,realz); //apperantly bitshift by one (smaller) to get mm
							//realx = TransformationToRealworldEucledianPointX(x,realz);
							//realy = TransformationToRealworldEucledianPointY(y,realz);
							
							//debug the values
							/*if (x>600 && x < 620 && y< 330 && y>310 )
							{
								qDebug() << "check x" << realPoints.x << "kinectx " << realPointsKinect.x << "mapped" << realPointsKinect2.x *1000 << "int x" << realx; 
								qDebug() << "check y" << realPoints.y << "kinecty " << realPointsKinect.y << "mapped" << realPointsKinect2.y *1000 << "int y" << realy << "z "<< realz; 
							}*/
							
							FrontalImage(mat, realPointsKinect2, pBuffer[y*width+x].depth, cxl, cxr, cyu, cyd);
						}
					
						/*if (realPoints.x < minx)
						{
							minx = realPoints.x;
						}
						if (realPoints.x > maxx)
						{
							maxx = realPoints.x;
						}
						if (realPoints.y > maxy)
						{
							maxy = realPoints.y;
						}
						if (realPoints.y < miny)
						{
							miny = realPoints.y;
						}
						if (realPoints.z < minz && realPoints.z > 0)
						{
							minz = realPoints.z;
						}
						if (realPoints.z > maxz)
						{
							maxz = realPoints.z;
						}*/
						
						//j++;

						//pre-full depth:
						//realPoints = NuiTransformDepthImageToSkeleton(y,x,(*pBufferRun));
						//FrontalImage(mat, realPoints, *pBufferRun); 
						//pBuffer++;

						//mat.at<USHORT>(y,x)  = Nui_ShortToIntensity(*pBufferRun);
						//pBufferRun++;
					}
					for( int x = width/2 ; x > 0; x-- )	
					{
						//neater to make a function of it as it has to be done 4 times
						realz = pBuffer[y*width+x].depth;
						if ((realz > 0) && !(((realz) > cz) && (cz > 0)) )
						{
							test.x = x;
							test.y = y;
							test.depth = pBuffer[y*width+x].depth;
							pMapper->MapDepthPointToSkeletonPoint(NUI_IMAGE_RESOLUTION_640x480, &test, &realPointsKinect2);
							FrontalImage(mat, realPointsKinect2, pBuffer[y*width+x].depth, cxl, cxr, cyu, cyd);
						}
						//j++;
					}
				}
				for( int y = height/2 ; y>0 ; y-- )
				{

					for( int x = width/2 ; x<width; x++ )	
					{
						//neater to make a function of it as it has to be done 4 times
						realz = pBuffer[y*width+x].depth;
						if ((realz > 0) && !(((realz) > cz) && (cz > 0)) )
						{
							test.x = x;
							test.y = y;
							test.depth = pBuffer[y*width+x].depth;
							pMapper->MapDepthPointToSkeletonPoint(NUI_IMAGE_RESOLUTION_640x480, &test, &realPointsKinect2);
							FrontalImage(mat, realPointsKinect2, pBuffer[y*width+x].depth, cxl, cxr, cyu, cyd);
						}
						//j++;
					}

					for( int x = width/2 ; x > 0; x-- )	
					{
						//neater to make a function of it as it has to be done 4 times
						realz = pBuffer[y*width+x].depth;
						if ((realz > 0) && !(((realz) > cz) && (cz > 0)) )
						{
							test.x = x;
							test.y = y;
							test.depth = pBuffer[y*width+x].depth;
							pMapper->MapDepthPointToSkeletonPoint(NUI_IMAGE_RESOLUTION_640x480, &test, &realPointsKinect2);
							FrontalImage(mat, realPointsKinect2, pBuffer[y*width+x].depth, cxl, cxr, cyu, cyd);
						}
						//j++;
					}
				}
			} //end if realcoord
			else
			{
				for( int y = 0 ; y < height ; y++ )
				{
					for( int x = 0 ; x < width ; x++ )	
					{
						//bitshift: << bigger , >>smaller
						//full depth
						//USHORT* pBufferRun = (USHORT*) pBuffer;
						
						//mat.at<USHORT>(y,x) = (*pBufferRun) << 1;
						//TODO check if the 16-bit depth isn't actually in meters, then probably only first 13 bits are used and one should bitshift by 5 to make it viewable
						//proabaly/it seems stull bitshift by 3 for viewing by << 1 for meters
						mat.at<USHORT>(y,x) = pBuffer[j].depth << 3;
						j++;
						//pBufferRun++;
						//pBuffer++;
						//mat.at<USHORT>(y,x) = (*pBufferRun) << 1;
					}
				}
			}
			
			/*if (realcoordtemp)
			{
				qDebug() << tr("minx is %1 maxx %2 min y %3 max y %4 min z %5 max z%6").arg(minx).arg(maxx).arg(miny).arg(maxy).arg(minz).arg(maxz);
			}*/

			//if not bishifted:  "maxvalue is 31800 ...0"  qdebug 2^15=32768 what happend to the remaining 968? 2^10=1024

			// the last 8 pixels are black.
			// From http://groups.google.com/group/openkinect/browse_thread/thread/6539281cf451ae9e
			// Turns out the scaled down raw IR image that we can stream from the
			// Kinect is 640x488, so it loses 8 pixels in both the X and Y dimensions.
			// We just don't see the lost Y because the image is truncated there, while
			// the missing X pixels are padded.

			// The actual raw IR acquisition image is likely 1280x976 (from a 1280x1024
			// sensor, windowing off the extra Y pixels), and from that it derives a
			// 632x480 depth map at 1:2 ratio and using 16 extra source pixels in X and Y. 
		} //end of if statement: pImageFrame.eImageType == NUI_IMAGE_TYPE_DEPTH 
		//If you included NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX in the dwFlags argument to NuiInitialize and did not use NuiImageFrameGetDepthImagePixelFrameTexture to get all depth data, then, depth data is returned as a 16-bit value that contains the following information:
		//The low-order three bits (bits 0–2) contain the skeleton (player) ID.
		//The high-order bits (bits 3–15) contain the depth value in millimeters. A depth data value of zero indicates that no depth data is available at that position because all of the objects were either too close to the camera or too far away from it.
		//this is different for the beta sdk
		else if( pImageFrame.eImageType == NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX )
		{
			qDebug() << "entered IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX "; //this is exceptional not intended to happen in the current version
			img = CvMatData::create(width, height, CV_8U, 3);

			// draw the bits to the bitmap
			USHORT* pBufferRun = (USHORT*) pBuffer;
			cv::Mat& mat = img;

			for( int y = 0 ; y < height ; y++ )
			{
				for( int x = 0 ; x < width ; x++ )
				{
					RGBQUAD quad = Nui_ShortToQuad_DepthAndPlayerIndex( *pBufferRun );

					pBufferRun++;
					mat.at<cv::Vec3b>(y,x)[0] = quad.rgbBlue;
					mat.at<cv::Vec3b>(y,x)[1] = quad.rgbGreen;
					mat.at<cv::Vec3b>(y,x)[2] = quad.rgbRed;
				}
			}
		}
		emit newDepthFrame( m_id, img );
	} //end of if LockedRect.Pitch != 0
	else 
	{
		OutputDebugString( L"Buffer length of received texture is bogus\r\n" );
		return;
	}
	//changed back to old SDK way:
	m_nuiInstance->NuiImageStreamReleaseFrame( m_pDepthStreamHandle, &pImageFrame);
	//NuiImageStreamReleaseFrame( m_pDepthStreamHandle, pImageFrame );
}