Example #1
0
void KinectGrabber::Kinect_GotDepthAlert( ) {
	const NUI_IMAGE_FRAME * pImageFrame = NULL;

    HRESULT hr = NuiImageStreamGetNextFrame(
        m_pDepthStreamHandle,
        0,
        &pImageFrame );

    if( FAILED( hr ) )
    {
    	printf("Unable to get the frame after recieving alert for depth frame \n");
		return;
    }

    NuiImageBuffer * pTexture = pImageFrame->pFrameTexture;
    KINECT_LOCKED_RECT LockedRect;
    pTexture->LockRect( 0, &LockedRect, NULL, 0 );
    if( LockedRect.Pitch != 0 )
    {
        BYTE * pBuffer = (BYTE*) LockedRect.pBits;

        // draw the bits to the bitmap
        RGBQUAD * rgbrun = m_rgbDepth;
		USHORT * depthrun = m_depthBuffer;
		USHORT * playerrun = m_playerBuffer;
        USHORT * pBufferRun = (USHORT*) pBuffer;
        for( int y = 0 ; y < DEPTH_HEIGHT ; y++ )
        {
            for( int x = 0 ; x < DEPTH_WIDTH ; x++ )
            {
				// set the color (just for visualization)
                RGBQUAD quad = Kinect_DepthToRGB( *pBufferRun );
                *rgbrun = quad;
                rgbrun++;
				
				//USHORT RealDepth = (*pBufferRun & 0xfff8) >> 3;
				USHORT RealDepth = (*pBufferRun & 0x0fff);			
				//*depthrun = RealDepth;
				//depthrun++;
				//flip the depth image to match the video image
				m_depthBuffer[((y+1)*DEPTH_WIDTH) - x] = RealDepth;

				USHORT Player = *pBufferRun  & 7;
				*playerrun = Player;
				playerrun++;

				//inc buffer pointer
				pBufferRun++;
            }
        }
		
    }
    else
    {
        printf( "Buffer length of received texture is bogus\r\n" );
    }

    NuiImageStreamReleaseFrame( m_pDepthStreamHandle, pImageFrame );

}
Example #2
0
	//--
	void
	Dialog::eventFrameColor()
	{
		const NUI_IMAGE_FRAME* image_frame = NULL;

		HRESULT hr = ::NuiImageStreamGetNextFrame(m_color_stream_handle, 0, &image_frame);
		
		if(FAILED(hr))
		{
			return;	
		}

		NuiImageBuffer* texture = image_frame->pFrameTexture;
		KINECT_LOCKED_RECT locked_rect;
		texture->LockRect(0, &locked_rect, NULL, 0);

		if(locked_rect.Pitch != 0)
    	{
    		m_display->drawColor((const unsigned char*) locked_rect.pBits);
    	}

    	texture->UnlockRect(0);

		::NuiImageStreamReleaseFrame(m_color_stream_handle, image_frame);
	}
Example #3
0
void KinectGrabber::Kinect_GotVideoAlert( )
{
    const NUI_IMAGE_FRAME * pImageFrame = NULL;

    HRESULT hr = NuiImageStreamGetNextFrame(
        m_pVideoStreamHandle,
        0,
        &pImageFrame );
    if( FAILED( hr ) )
    {
		printf("Unable to get the frame after recieving alert for video frame \n");
        return;
    }

    NuiImageBuffer * pTexture = pImageFrame->pFrameTexture;
    KINECT_LOCKED_RECT LockedRect;
    pTexture->LockRect( 0, &LockedRect, NULL, 0 );
    if( LockedRect.Pitch != 0 )
    {
        memcpy(m_rgbBuffer, LockedRect.pBits, sizeof(BYTE) * VIDEO_HEIGHT * VIDEO_WIDTH * 4);
		//m_rgbBuffer = (BYTE*) LockedRect.pBits;
		Kinect_FormatRGBForOutput();	
		Kinect_makeRGBFromRGBA();
		//2560 bytes per line = 640 * 4 (4 bytes per pixel)
	} else {
        printf("buffer length of recieved texture is bogus\n");
    }
	NuiImageStreamReleaseFrame( m_pVideoStreamHandle, pImageFrame );
}
Example #4
0
	//--
	void
	Dialog::eventFrameDepth()
	{
		const NUI_IMAGE_FRAME* image_frame = NULL;

		HRESULT hr = ::NuiImageStreamGetNextFrame(m_depth_stream_handle, 0, &image_frame);
		
		if(FAILED(hr))
		{
			return;	
		}

		NuiImageBuffer* texture = image_frame->pFrameTexture;
		KINECT_LOCKED_RECT locked_rect;
		texture->LockRect(0, &locked_rect, NULL, 0);

		if(locked_rect.Pitch != 0)
    	{
    		// [rad] Do nothing.
    	}

    	texture->UnlockRect(0);

		::NuiImageStreamReleaseFrame(m_depth_stream_handle, image_frame);
	}
Example #5
0
void CSkeletalViewerApp::Nui_GotVideoAlert( )
{
	
	if (GetTickCount64() > m_videoDelay && m_videoDelay != 0) {
		m_videoDelay = 0;
	} else {
		return;
	}
	//m_FramesTotal++;
    const NUI_IMAGE_FRAME * pImageFrame = NULL;

    HRESULT hr = NuiImageStreamGetNextFrame(
        m_pVideoStreamHandle,
        0,
        &pImageFrame );
    if( FAILED( hr ) )
    {
        return;
    }

    NuiImageBuffer * pTexture = pImageFrame->pFrameTexture;
    KINECT_LOCKED_RECT LockedRect;
    pTexture->LockRect( 0, &LockedRect, NULL, 0 );
    if( LockedRect.Pitch != 0 )
    {
		
			BYTE * pBuffer = (BYTE*) LockedRect.pBits;	
			UINT * pBufferRun = (UINT*) pBuffer;
			UINT * pVideoRun = m_videoCache;

			USHORT * pPlayerRun = m_playerMap;
			for( int y = 0 ; y < 480 ; y++ )
			{
				for( int x = 0 ; x < 640 ; x++ )
				{
					*pVideoRun = *pBufferRun;
					pVideoRun++;
					pBufferRun++;
				}
			}
    }
    else
    {
        OutputDebugString( L"Buffer length of received texture is bogus\r\n" );
    }
	
    NuiImageStreamReleaseFrame( m_pVideoStreamHandle, pImageFrame );
}
void KinectDevice::Nui_GotVideoAlert()
{
    const NUI_IMAGE_FRAME * pImageFrame = NULL;

    HRESULT hr = NuiImageStreamGetNextFrame( m_pVideoStreamHandle, 0, &pImageFrame );
    if( FAILED( hr ) )
    {
        return;
    }

    NuiImageBuffer* pTexture = pImageFrame->pFrameTexture;
    KINECT_LOCKED_RECT LockedRect;
    pTexture->LockRect( 0, &LockedRect, NULL, 0 );
    if( LockedRect.Pitch != 0 )
    {
        BYTE * pBuffer = (BYTE*) LockedRect.pBits;
        
        // draw the bits to the bitmap
        RGBQUAD* pBufferRun = (RGBQUAD*) pBuffer;
        plv::CvMatData img = plv::CvMatData::create( 640, 480, CV_8UC3 );
        cv::Mat& mat = img;

        for( int y = 0 ; y < 480 ; y++ )
        {
            for( int x = 0 ; x < 640 ; x++ )
            {
                RGBQUAD quad = *pBufferRun;
                pBufferRun++;
                mat.at<cv::Vec3b>(y,x)[0] = quad.rgbBlue;
                mat.at<cv::Vec3b>(y,x)[1] = quad.rgbGreen;
                mat.at<cv::Vec3b>(y,x)[2] = quad.rgbRed;
            }
        }

        emit newVideoFrame( m_id, img );
    }
    else
    {
        OutputDebugString( L"Buffer length of received texture is bogus\r\n" );
    }

    NuiImageStreamReleaseFrame( m_pVideoStreamHandle, pImageFrame );
}
Example #7
0
HRESULT RTCKinect::WriteColorImage(void)
{
	static const long TIMEOUT_IN_MILLI = 100;
	const NUI_IMAGE_FRAME * pImageFrame = NULL;
    HRESULT hr = NuiImageStreamGetNextFrame(m_pVideoStreamHandle, TIMEOUT_IN_MILLI, &pImageFrame );
    if( FAILED( hr ) ) {
		std::cout << "NuiImageStreamGetNextFrame failed." << std::endl;
		return hr;
    }

    NuiImageBuffer * pTexture = pImageFrame->pFrameTexture;
    KINECT_LOCKED_RECT LockedRect;
    pTexture->LockRect( 0, &LockedRect, NULL, 0 );
    if( LockedRect.Pitch != 0 )
    {
        BYTE * pBuffer = (BYTE*) LockedRect.pBits;
		for(int h = 0;h < m_camera_height;h++) {
			for(int w = 0;w < m_camera_width;w++) {
				BYTE* pixel = pBuffer + (h * m_camera_width * 4) + w * 4;
				BYTE b = pixel[0];
				BYTE g = pixel[1];
				BYTE r = pixel[2];
				int offset = h*m_camera_width*3+w*3;
				m_image.pixels[offset + 0] = b;
				m_image.pixels[offset + 1] = g;
				m_image.pixels[offset + 2] = r;
			}
			m_imageOut.write();
		}

    }
    else {
		std::cout << "Buffer length of received texture is bogus\r\n" << std::endl;
    }

    NuiImageStreamReleaseFrame( m_pVideoStreamHandle, pImageFrame );

	return S_OK;
}
Example #8
0
/**
 * Writing Depth Image to the Outport
 *
 * In this release the depth map is converted to the gray scale image.
 * But in the future, the depth map should have its own data type, 
 * because the aquired data includes not only depth data [mm]
 * but also the tracking user id!
 *
 */
HRESULT RTCKinect::WriteDepthImage(void)
{
	static const long TIMEOUT_IN_MILLI = 100;
	const NUI_IMAGE_FRAME * pImageFrame = NULL;
    HRESULT hr = NuiImageStreamGetNextFrame(m_pDepthStreamHandle, TIMEOUT_IN_MILLI, &pImageFrame );
    if( FAILED( hr ) ) {
		std::cout << "NuiImageStreamGetNextFrame failed." << std::endl;
		return hr;
    }

    NuiImageBuffer * pTexture = pImageFrame->pFrameTexture;
    KINECT_LOCKED_RECT LockedRect;
    pTexture->LockRect( 0, &LockedRect, NULL, 0 );
    if( LockedRect.Pitch != 0 )
    {
        BYTE * pBuffer = (BYTE*) LockedRect.pBits;
		for(int h = 0;h < m_depth_height;h++) {
			for(int w = 0;w < m_depth_width;w++) {
				USHORT* pixel = (USHORT*)(pBuffer + (h * m_depth_width * sizeof(USHORT)) + w * sizeof(USHORT));

				USHORT RealDepth = (*pixel & 0xfff8) >> 3;
				USHORT Player = *pixel & 7;

				// transform 13-bit depth information into an 8-bit intensity appropriate
				// for display (we disregard information in most significant bit)
				BYTE depth = 255 - (BYTE)(256*RealDepth/0x0fff);

				unsigned char r, g, b;
				r=g=b = depth/2;
				int offset = h*m_depth_width*3+w*3;
				m_depth.pixels[offset + 0] = b;
				m_depth.pixels[offset + 1] = g;
				m_depth.pixels[offset + 2] = r;
			}
			m_depthOut.write();
		}

    }
void KinectDevice::Nui_GotDepthAlert()
{
    const NUI_IMAGE_FRAME * pImageFrame = NULL;

    HRESULT hr = NuiImageStreamGetNextFrame(m_pDepthStreamHandle, 0, &pImageFrame);
    if( FAILED( hr ) )
    {
        return;
    }

    int width;
    int height;
    if( pImageFrame->eResolution == NUI_IMAGE_RESOLUTION_320x240 )
    {
        width = 320;
        height = 240;
    }
    else
    {
        width = 640;
        height = 480;
    }

    NuiImageBuffer * pTexture = pImageFrame->pFrameTexture;
    KINECT_LOCKED_RECT LockedRect;
    pTexture->LockRect( 0, &LockedRect, NULL, 0 );
    BYTE* pBuffer = 0;
    if( LockedRect.Pitch != 0 )
    {
        pBuffer = (BYTE*) LockedRect.pBits;
    }
    else
    {
        OutputDebugString( L"Buffer length of received texture is bogus\r\n" );
    }


    CvMatData img;
    if( pImageFrame->eImageType == NUI_IMAGE_TYPE_DEPTH )
    {
        //img = CvMatData::create(width, height, CV_8U, 1);

        //// draw the bits to the bitmap
        //USHORT* pBufferRun = (USHORT*) pBuffer;
        //cv::Mat& mat = img;

        //// todo should be faster with memcpy
        //for( int y = 0 ; y < height ; y++ )
        //{
        //    for( int x = 0 ; x < width ; x++ )
        //    {
        //        // from 12-bit to 16-bit
        //        USHORT RealDepth = *pBufferRun;

        //        // transform 13-bit depth information into an 8-bit intensity appropriate
        //        // for display (we disregard information in most significant bit)
        //        BYTE l = 255 - (BYTE)(256*RealDepth/0x0fff);
        //        mat.at<BYTE>(y,x) = l;
        //        pBufferRun++;
        //    }
        //}

        img = CvMatData::create(width, height, CV_16U, 1);

        // draw the bits to the bitmap
        USHORT* pBufferRun = (USHORT*) pBuffer;
        cv::Mat& mat = img;

        // todo should be faster with memcpy
        for( int y = 0 ; y < height ; y++ )
        {
            for( int x = 0 ; x < width ; x++ )
            {
                // multiply by 2^4 so we see something in the viewer
                // this is a temporary hack until we can adjust viewer to see
                // 12 bit values
                mat.at<USHORT>(y,x) = (*pBufferRun) << 4;
                pBufferRun++;
            }
        }

        // for some reason (bug?) the image is flipped in 640x480 depth mode
        // flip the image back around y-axis
        // cv::flip( mat, mat, 1 );

        // also the last 8 pixels are black.
        // From http://groups.google.com/group/openkinect/browse_thread/thread/6539281cf451ae9e
        // Turns out the scaled down raw IR image that we can stream from the
        // Kinect is 640x488, so it loses 8 pixels in both the X and Y dimensions.
        // We just don't see the lost Y because the image is truncated there, while
        // the missing X pixels are padded.

        // The actual raw IR acquisition image is likely 1280x976 (from a 1280x1024
        // sensor, windowing off the extra Y pixels), and from that it derives a
        // 632x480 depth map at 1:2 ratio and using 16 extra source pixels in X and Y. 
    }
    else if( pImageFrame->eImageType == NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX )
    {
        img = CvMatData::create(width, height, CV_8U, 3);

        // draw the bits to the bitmap
        USHORT* pBufferRun = (USHORT*) pBuffer;
        cv::Mat& mat = img;

        for( int y = 0 ; y < height ; y++ )
        {
            for( int x = 0 ; x < width ; x++ )
            {
                RGBQUAD quad = Nui_ShortToQuad_DepthAndPlayerIndex( *pBufferRun );

                pBufferRun++;
                mat.at<cv::Vec3b>(y,x)[0] = quad.rgbBlue;
                mat.at<cv::Vec3b>(y,x)[1] = quad.rgbGreen;
                mat.at<cv::Vec3b>(y,x)[2] = quad.rgbRed;
            }
        }
    }
    emit newDepthFrame( m_id, img );
    NuiImageStreamReleaseFrame( m_pDepthStreamHandle, pImageFrame );
}
Example #10
0
void CSkeletalViewerApp::Nui_GotDepthAlert( )
{
    const NUI_IMAGE_FRAME * pImageFrame = NULL;

    HRESULT hr = NuiImageStreamGetNextFrame(
        m_pDepthStreamHandle,
        0,
        &pImageFrame );

    if( FAILED( hr ) )
    {
        return;
    }

    NuiImageBuffer * pTexture = pImageFrame->pFrameTexture;
    KINECT_LOCKED_RECT LockedRect;
    pTexture->LockRect( 0, &LockedRect, NULL, 0 );
    if( LockedRect.Pitch != 0 )
    {
        BYTE * pBuffer = (BYTE*) LockedRect.pBits;

		USHORT * pPlayerRun = m_playerMap;
        for( int y = 0 ; y < 480 ; y++ )
        {
            for( int x = 0 ; x < 640 ; x++ )
            {
				*pPlayerRun = 0;
				*pPlayerRun++;
			}
		}
		
        // draw the bits to the bitmap
        RGBQUAD * rgbrun = m_rgbWk;
        USHORT * pBufferRun = (USHORT*) pBuffer;
		USHORT player, depth;
		long colorX = 0, colorY = 0;
        for( int y = 0 ; y < 240 ; y++ )
        {
            for( int x = 0 ; x < 320 ; x++ )
            {
				depth = *pBufferRun & 0xfff8;

				if (FrameCount == 0)
				{
					initialdepth[x + y*320] = depth;
				}
				//TODO if first frame, save depth values as background

				player = *pBufferRun & 7;

				
				NuiImageGetColorPixelCoordinatesFromDepthPixel(
					NUI_IMAGE_RESOLUTION_640x480,
					0, x, y, depth, &colorX, &colorY);

				USHORT writeover = 0;

				//HACK player id based:
				writeover = player;
				//HACK background subtraction
				if (depth < initialdepth[x + y*320] - 10)
				{
					//HACK writeover = 10;
				}

				m_playerMap[colorY * 640 + colorX] = writeover;
				m_playerMap[colorY * 640 + colorX + 1] = writeover;
				m_playerMap[(colorY + 1)* 640 + colorX] = writeover;
				m_playerMap[(colorY + 1)* 640 + colorX + 1] = writeover;

						

				

                RGBQUAD quad = Nui_ShortToQuad_Depth( *pBufferRun );
                pBufferRun++;
                *rgbrun = quad;
                rgbrun++;
            }
        }

		UINT bgColor = 0x00ffeedd;
		UINT * pVideoRun = m_videoCache;
		pPlayerRun = m_playerMap;
		for( int y = 0 ; y < 480 ; y++ )
        {
            for( int x = 0 ; x < 640; x++ )
            {	
				if (*pPlayerRun == 0) 
				{
					*pVideoRun = bgColor;
				}
					pVideoRun++;
					pPlayerRun++;
            }
        }

		m_DrawVideo.DrawFrame( (BYTE*) m_videoCache );
        m_DrawDepth.DrawFrame( (BYTE*) m_rgbWk );

		ULONGLONG diff = (ULONGLONG) ((1.0 / m_FramesTotal - 1.0 / 20) * 1000);
		//diff = 0;
		m_videoDelay = GetTickCount64() + diff;
    }
    else
    {
        OutputDebugString( L"Buffer length of received texture is bogus\r\n" );
    }
	FrameCount++;
    NuiImageStreamReleaseFrame( m_pDepthStreamHandle, pImageFrame );
}