KinectSensor::KinectSensor() { // get resolution as DWORDS, but store as LONGs to avoid casts later DWORD width = 0; DWORD height = 0; NuiImageResolutionToSize(cDepthResolution, width, height); unsigned int depthWidth = static_cast<unsigned int>(width); unsigned int depthHeight = static_cast<unsigned int>(height); NuiImageResolutionToSize(cColorResolution, width, height); unsigned int colorWidth = static_cast<unsigned int>(width); unsigned int colorHeight = static_cast<unsigned int>(height); DepthSensor::init(depthWidth, depthHeight, colorWidth, colorHeight); m_colorToDepthDivisor = colorWidth/depthWidth; m_bDepthReceived = false; m_bColorReceived = false; m_hNextDepthFrameEvent = INVALID_HANDLE_VALUE; m_pDepthStreamHandle = INVALID_HANDLE_VALUE; m_hNextColorFrameEvent = INVALID_HANDLE_VALUE; m_pColorStreamHandle = INVALID_HANDLE_VALUE; m_colorCoordinates = new LONG[depthWidth*depthHeight*2]; m_bDepthImageIsUpdated = false; m_bDepthImageCameraIsUpdated = false; m_bNormalImageCameraIsUpdated = false; initializeIntrinsics(2.0f*NUI_CAMERA_SKELETON_TO_DEPTH_IMAGE_MULTIPLIER_320x240, 2.0f*NUI_CAMERA_SKELETON_TO_DEPTH_IMAGE_MULTIPLIER_320x240, 320.0f, 240.0f); }
/// <summary> /// Constructor /// </summary> CBackgroundRemovalBasics::CBackgroundRemovalBasics(std::vector<std::string> args) : m_hNextDepthFrameEvent(INVALID_HANDLE_VALUE), m_hNextColorFrameEvent(INVALID_HANDLE_VALUE), m_hNextSkeletonFrameEvent(INVALID_HANDLE_VALUE), m_hNextBackgroundRemovedFrameEvent(INVALID_HANDLE_VALUE), m_pDepthStreamHandle(INVALID_HANDLE_VALUE), m_pColorStreamHandle(INVALID_HANDLE_VALUE), m_bNearMode(false), m_pNuiSensor(NULL), m_pSensorChooser(NULL), m_pSensorChooserUI(NULL), m_pBackgroundRemovalStream(NULL), m_trackedSkeleton(NUI_SKELETON_INVALID_TRACKING_ID) { DWORD width = 0; DWORD height = 0; NuiImageResolutionToSize(cDepthResolution, width, height); m_depthWidth = static_cast<LONG>(width); m_depthHeight = static_cast<LONG>(height); NuiImageResolutionToSize(cColorResolution, width, height); m_colorWidth = static_cast<LONG>(width); m_colorHeight = static_cast<LONG>(height);; // create heap storage for depth pixel data in RGBX format m_outputRGBX = new BYTE[m_colorWidth * m_colorHeight * cBytesPerPixel]; m_backgroundRGBX = new BYTE[m_colorWidth * m_colorHeight * cBytesPerPixel]; // Create an event that will be signaled when depth data is available m_hNextDepthFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL); // Create an event that will be signaled when color data is available m_hNextColorFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL); // Create an event that will be signaled when skeleton frame is available m_hNextSkeletonFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL); // Create an event that will be signaled when the segmentation frame is ready m_hNextBackgroundRemovedFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL); nManager = new NetworkManager(args); }
// add Properties->Debugging ->environment PATH = %PATH%; D:\VTK_bin\bin\Debug DepthSensor::DepthSensor() : mNuiSensor(NULL) , mNextDepthFrameEvent(INVALID_HANDLE_VALUE) , mDepthStreamHandle(INVALID_HANDLE_VALUE) , mDrawDepth(NULL) , mdepthImageResolution(NUI_IMAGE_RESOLUTION_640x480) , m_pVolume(NULL) , cDepthImagePixels(0) , m_pDepthImagePixelBuffer(NULL) , m_pDepthFloatImage(NULL) , m_pPointCloud(NULL) , m_pShadedSurface(NULL) , m_bMirrorDepthFrame(false) , m_bTranslateResetPoseByMinDepthThreshold(true) , m_cLostFrameCounter(0) , m_bTrackingFailed(false) , m_bAutoResetReconstructionWhenLost(false) , m_bAutoResetReconstructionOnTimeout(true) , m_fStartTime(0) , m_isInit(false) , m_saveMeshFormat(Stl) , filename() { // Get the depth frame size from the NUI_IMAGE_RESOLUTION enum DWORD WIDTH = 0, HEIGHT = 0; NuiImageResolutionToSize(mdepthImageResolution, WIDTH, HEIGHT); cDepthWidth = WIDTH; cDepthHeight = HEIGHT; cDepthImagePixels = cDepthWidth*cDepthHeight; //create heap storage for depth pixel data in RGBX format m_depthRGBX = new BYTE[cDepthWidth*cDepthHeight*cBytesPerPixel]; // Define a cubic Kinect Fusion reconstruction volume, // with the Kinect at the center of the front face and the volume directly in front of Kinect. reconstructionParams.voxelsPerMeter = 256; // 1000mm / 256vpm = ~3.9mm/voxel reconstructionParams.voxelCountX = 512; // 512 / 256vpm = 2m wide reconstruction reconstructionParams.voxelCountY = 384; // Memory = 512*384*512 * 4bytes per voxel reconstructionParams.voxelCountZ = 512; // Require 512MB GPU memory // These parameters are for optionally clipping the input depth image m_fMinDepthThreshold = NUI_FUSION_DEFAULT_MINIMUM_DEPTH; // min depth in meters m_fMaxDepthThreshold = NUI_FUSION_DEFAULT_MAXIMUM_DEPTH; // max depth in meters // This parameter is the temporal averaging parameter for depth integration into the reconstruction m_cMaxIntegrationWeight = NUI_FUSION_DEFAULT_INTEGRATION_WEIGHT; // Reasonable for static scenes SetIdentityMatrix(m_worldToCameraTransform); SetIdentityMatrix(m_defaultWorldToVolumeTransform); m_cLastDepthFrameTimeStamp.QuadPart = 0; // Initialize synchronization objects InitializeCriticalSection(&m_lockVolume); }
KinectCapture::KinectCapture(): m_depthImagePixelBuffer(nullptr), m_colorImagePixelBuffer(nullptr), m_alignedColorImagePixelBuffer(nullptr), m_NuiSensor(nullptr), m_colorImageResolution(NUI_IMAGE_RESOLUTION_640x480), m_depthImageResolution(NUI_IMAGE_RESOLUTION_640x480) { DWORD width, height; NuiImageResolutionToSize(m_depthImageResolution, width, height); m_DepthHeight = height; m_DepthWidth = width; m_DepthImagePixels = m_DepthHeight * m_DepthWidth; NuiImageResolutionToSize(m_colorImageResolution, width, height); m_ColorHeight = height; m_ColorWidth = width; m_ColorImagePixels = m_ColorHeight * m_ColorWidth; m_colorImagePixelBuffer = new BYTE[m_ColorHeight * m_ColorWidth * 4]; m_alignedColorImagePixelBuffer = new BYTE[m_DepthHeight * m_DepthWidth * 4]; m_depthImagePixelBuffer = new USHORT[m_DepthHeight * m_DepthWidth]; }
bool UKinect::pollSkeleton() { if (NULL != sensor) { // // Attempt to get the color frame hr = sensor->NuiSkeletonGetNextFrame(0, &skeletonFrame); if (FAILED(hr)) { cerr << "[UKinect] WARNING: Skeleton pool." << endl; return false; } // // smooth out the skeleton data if (skeletonFilter.as<int>() == 0) { //sensor->NuiTransformSmooth(&skeletonFrame, NULL); } else if (skeletonFilter.as<int>() == 1) { //const NUI_TRANSFORM_SMOOTH_PARAMETERS DefaultParams = {0.5f, 0.5f, 0.5f, 0.05f, 0.04f}; sensor->NuiTransformSmooth(&skeletonFrame, NULL); } else if (skeletonFilter.as<int>() == 2) { const NUI_TRANSFORM_SMOOTH_PARAMETERS SomewhatLatentParams = { 0.5f, 0.1f, 0.5f, 0.1f, 0.1f }; sensor->NuiTransformSmooth(&skeletonFrame, &SomewhatLatentParams); } else { const NUI_TRANSFORM_SMOOTH_PARAMETERS VerySmoothParams = { 0.7f, 0.3f, 1.0f, 1.0f, 1.0f }; sensor->NuiTransformSmooth(&skeletonFrame, &VerySmoothParams); } // // process skeleton frame if interaction function enabled if (interaction) { Vector4 v; sensor->NuiAccelerometerGetCurrentReading(&v); hr = interactionStream->ProcessSkeleton(NUI_SKELETON_COUNT, skeletonFrame.SkeletonData, &v, skeletonFrame.liTimeStamp); if (FAILED(hr)) { cerr << "[UKinect] ERROR: Process skeleton failed (for interaction purpose)." << endl; return false; } } vector<int> skelIDs; // these are used in face tracking vector<int> skelTrackedIDs = UpdateTrackedSkeletons(); // << use this to set tracked for (int i = 0; i < NUI_SKELETON_COUNT; ++i) { NUI_SKELETON_TRACKING_STATE trackingState = skeletonFrame.SkeletonData[i].eTrackingState; if (NUI_SKELETON_POSITION_ONLY == trackingState) skelIDs.push_back((int)skeletonFrame.SkeletonData[i].dwTrackingID); } // Save vectors to UVars skeletonIDs = skelIDs; skeletonTrackedIDs = skelTrackedIDs; if (skeletonVisualization) { DWORD t_width, t_height; NuiImageResolutionToSize((NUI_IMAGE_RESOLUTION)colorResolution.as<int>(), t_width, t_height); if ((color) && (skeletonVisualizationOnColor.as<int>())) skeletonCVMat = colorCVMat.clone(); // use color image as a background if color function enabled else skeletonCVMat = Mat(Size(static_cast<int>(t_width), static_cast<int>(t_height)), CV_8UC3, CV_RGB(0, 0, 0)); for (int i = 0; i < NUI_SKELETON_COUNT; ++i) { NUI_SKELETON_TRACKING_STATE trackingState = skeletonFrame.SkeletonData[i].eTrackingState; if (NUI_SKELETON_TRACKED == trackingState) { // We're tracking the skeleton, draw it drawSkeleton(skeletonFrame.SkeletonData[i]); drawPosition(skeletonFrame.SkeletonData[i]); } else if (NUI_SKELETON_POSITION_ONLY == trackingState) { // we've only received the center point of the skeleton, draw that drawPosition(skeletonFrame.SkeletonData[i]); } drawOutOfFrame(skeletonFrame.SkeletonData[i]); } // Save CV image to UImage skeletonBin.image.width = skeletonCVMat.cols; skeletonBin.image.height = skeletonCVMat.rows; skeletonBin.image.size = skeletonCVMat.cols * skeletonCVMat.rows * 3; skeletonBin.image.data = skeletonCVMat.data; skeletonImage = skeletonBin; } return true; } cerr << "[UKinect] ERROR: Skeleton pool error." << endl; return false; }
HRESULT KinectSensor::Init(NUI_IMAGE_TYPE depthType, NUI_IMAGE_RESOLUTION depthRes, BOOL bNearMode, BOOL bFallbackToDefault, NUI_IMAGE_TYPE colorType, NUI_IMAGE_RESOLUTION colorRes, BOOL bSeatedSkeletonMode) { HRESULT hr = E_UNEXPECTED; Release(); // Deal with double initializations. //do not support NUI_IMAGE_TYPE_COLOR_RAW_YUV for now if(colorType != NUI_IMAGE_TYPE_COLOR && colorType != NUI_IMAGE_TYPE_COLOR_YUV || depthType != NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX && depthType != NUI_IMAGE_TYPE_DEPTH) { return E_INVALIDARG; } m_VideoBuffer = FTCreateImage(); if (!m_VideoBuffer) { return E_OUTOFMEMORY; } DWORD width = 0; DWORD height = 0; NuiImageResolutionToSize(colorRes, width, height); hr = m_VideoBuffer->Allocate(width, height, FTIMAGEFORMAT_UINT8_B8G8R8X8); if (FAILED(hr)) { return hr; } m_DepthBuffer = FTCreateImage(); if (!m_DepthBuffer) { return E_OUTOFMEMORY; } NuiImageResolutionToSize(depthRes, width, height); hr = m_DepthBuffer->Allocate(width, height, FTIMAGEFORMAT_UINT16_D13P3); if (FAILED(hr)) { return hr; } m_FramesTotal = 0; m_SkeletonTotal = 0; for (int i = 0; i < NUI_SKELETON_COUNT; ++i) { m_HeadPoint[i] = m_NeckPoint[i] = FT_VECTOR3D(0, 0, 0); m_SkeletonTracked[i] = false; } m_hNextDepthFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL); m_hNextVideoFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL); m_hNextSkeletonEvent = CreateEvent(NULL, TRUE, FALSE, NULL); DWORD dwNuiInitDepthFlag = (depthType == NUI_IMAGE_TYPE_DEPTH)? NUI_INITIALIZE_FLAG_USES_DEPTH : NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX; hr = NuiInitialize(dwNuiInitDepthFlag | NUI_INITIALIZE_FLAG_USES_SKELETON | NUI_INITIALIZE_FLAG_USES_COLOR); if (FAILED(hr)) { return hr; } m_bNuiInitialized = true; DWORD dwSkeletonFlags = NUI_SKELETON_TRACKING_FLAG_ENABLE_IN_NEAR_RANGE; if (bSeatedSkeletonMode) { dwSkeletonFlags |= NUI_SKELETON_TRACKING_FLAG_ENABLE_SEATED_SUPPORT; } hr = NuiSkeletonTrackingEnable( m_hNextSkeletonEvent, dwSkeletonFlags ); if (FAILED(hr)) { return hr; } hr = NuiImageStreamOpen( colorType, colorRes, 0, 2, m_hNextVideoFrameEvent, &m_pVideoStreamHandle ); if (FAILED(hr)) { return hr; } hr = NuiImageStreamOpen( depthType, depthRes, (bNearMode)? NUI_IMAGE_STREAM_FLAG_ENABLE_NEAR_MODE : 0, 2, m_hNextDepthFrameEvent, &m_pDepthStreamHandle ); if (FAILED(hr)) { if(bNearMode && bFallbackToDefault) { hr = NuiImageStreamOpen( depthType, depthRes, 0, 2, m_hNextDepthFrameEvent, &m_pDepthStreamHandle ); } if(FAILED(hr)) { return hr; } } // Start the Nui processing thread m_hEvNuiProcessStop=CreateEvent(NULL,TRUE,FALSE,NULL); m_hThNuiProcess=CreateThread(NULL,0,ProcessThread,this,0,NULL); return hr; }
HRESULT Nui_Init( ) { HRESULT hr; bool result; if ( !m_pNuiSensor ) { hr = NuiCreateSensorByIndex(0, &m_pNuiSensor); if ( FAILED(hr) ) { return hr; } SysFreeString(m_instanceId); m_instanceId = m_pNuiSensor->NuiDeviceConnectionId(); } m_hNextDepthFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL ); m_hNextColorFrameEvent = CreateEvent( NULL, TRUE, FALSE, NULL ); m_hNextSkeletonEvent = CreateEvent( NULL, TRUE, FALSE, NULL ); DWORD nuiFlags = NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_SKELETON | NUI_INITIALIZE_FLAG_USES_COLOR; hr = m_pNuiSensor->NuiInitialize(nuiFlags); if ( E_NUI_SKELETAL_ENGINE_BUSY == hr ) { nuiFlags = NUI_INITIALIZE_FLAG_USES_DEPTH | NUI_INITIALIZE_FLAG_USES_COLOR; hr = m_pNuiSensor->NuiInitialize( nuiFlags) ; } if ( HasSkeletalEngine( m_pNuiSensor ) ) { //m_SkeletonTrackingFlags = NUI_SKELETON_TRACKING_FLAG_TITLE_SETS_TRACKED_SKELETONS; hr = m_pNuiSensor->NuiSkeletonTrackingEnable( m_hNextSkeletonEvent, 0/*m_SkeletonTrackingFlags*/ ); if( FAILED( hr ) ) { return hr; } } g_ColorImgResolution = NUI_IMAGE_RESOLUTION_640x480; hr = m_pNuiSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR, g_ColorImgResolution, 0, 2, m_hNextColorFrameEvent, &m_pVideoStreamHandle ); if ( FAILED( hr ) ) { return hr; } //g_DepthImgResolution = NUI_IMAGE_RESOLUTION_320x240; g_DepthImgResolution = NUI_IMAGE_RESOLUTION_640x480; hr = m_pNuiSensor->NuiImageStreamOpen( HasSkeletalEngine(m_pNuiSensor) ? NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX : NUI_IMAGE_TYPE_DEPTH, g_DepthImgResolution, m_DepthStreamFlags, 2, m_hNextDepthFrameEvent, &m_pDepthStreamHandle ); if ( FAILED( hr ) ) { return hr; } //new点云数据 NuiImageResolutionToSize(g_DepthImgResolution, g_DepthWidth, g_DepthHeight ); g_PointsData = new Vector4[g_DepthWidth*g_DepthHeight]; m_DepthData = new USHORT[g_DepthWidth*g_DepthHeight]; //new图像数据 NuiImageResolutionToSize(g_ColorImgResolution, g_ColorWidth, g_ColorHeight); g_ColorsData = new BYTE[g_ColorWidth*g_ColorHeight*4]; g_ColorCoordinates = new LONG[g_DepthWidth*g_DepthHeight*2]; g_colorToDepthDivisor = g_ColorWidth/g_DepthWidth; // Start the Nui processing thread m_hEvNuiProcessStop = CreateEvent( NULL, FALSE, FALSE, NULL ); m_hThNuiProcess = CreateThread( NULL, 0, Nui_ProcessThread, NULL, 0, NULL ); g_TrackingUserID= 0; return hr; }
/// <summary> /// Constructor /// </summary> CKinectFusion::CKinectFusion() : m_pVolume(nullptr), m_pNuiSensor(nullptr), m_imageResolution(NUI_IMAGE_RESOLUTION_640x480), m_cImageSize(0), m_hNextDepthFrameEvent(INVALID_HANDLE_VALUE), m_pDepthStreamHandle(INVALID_HANDLE_VALUE), m_bNearMode(true), m_bMirrorDepthFrame(false), m_bTranslateResetPoseByMinDepthThreshold(true), m_bAutoResetReconstructionWhenLost(false), m_cLostFrameCounter(0), m_bTrackingFailed(false), m_cFrameCounter(0), m_pDepthImagePixelBuffer(nullptr), m_pDepthFloatImage(nullptr), m_pPointCloud(nullptr), m_pShadedSurface(nullptr), m_pImageData(nullptr), m_bInitializeError(false) { // Get the depth frame size from the NUI_IMAGE_RESOLUTION enum // You can use NUI_IMAGE_RESOLUTION_640x480 or NUI_IMAGE_RESOLUTION_320x240 in this sample // Smaller resolutions will be faster in per-frame computations, but show less detail in reconstructions. DWORD width = 0, height = 0; NuiImageResolutionToSize(m_imageResolution, width, height); m_cDepthWidth = width; m_cDepthHeight = height; m_cImageSize = m_cDepthWidth*m_cDepthHeight; // create heap storage for depth pixel data in RGBX format m_pDepthRGBX = new BYTE[m_cImageSize*cBytesPerPixel]; // Define a cubic Kinect Fusion reconstruction volume, // with the Kinect at the center of the front face and the volume directly in front of Kinect. m_reconstructionParams.voxelsPerMeter = 256;// 1000mm / 256vpm = ~3.9mm/voxel m_reconstructionParams.voxelCountX = 512; // 512 / 256vpm = 2m wide reconstruction m_reconstructionParams.voxelCountY = 384; // Memory = 512*384*512 * 4bytes per voxel m_reconstructionParams.voxelCountZ = 512; // This will require a GPU with at least 512MB // These parameters specify the maximum translation and rotation that can occur between frames // before we consider camera tracking to be lost. They specify a typical maximum speed of motion // for a hand-held Kinect camera, assuming you are running at 30Hz. m_fMaxTranslationDelta = 0.3f; // 0.15 - 0.3m per frame typical m_fMaxRotationDelta = 20.0f; // 10-20 degrees per frame typical // These parameters are for optionally clipping the input depth image m_fMinDepthThreshold = NUI_FUSION_DEFAULT_MINIMUM_DEPTH; // min depth in meters m_fMaxDepthThreshold = NUI_FUSION_DEFAULT_MAXIMUM_DEPTH; // max depth in meters // This parameter is the temporal averaging parameter for depth integration into the reconstruction m_cMaxIntegrationWeight = NUI_FUSION_DEFAULT_INTEGRATION_WEIGHT; // Reasonable for static scenes // This parameter sets whether GPU or CPU processing is used. Note that the CPU will likely be // too slow for real-time processing. m_processorType = NUI_FUSION_RECONSTRUCTION_PROCESSOR_TYPE_AMP; // If GPU processing is selected, we can choose the index of the device we would like to // use for processing by setting this zero-based index parameter. Note that setting -1 will cause // automatic selection of the most suitable device (specifically the DirectX11 compatible device // with largest memory), which is useful in systems with multiple GPUs when only one reconstruction // volume is required. Note that the automatic choice will not load balance across multiple // GPUs, hence users should manually select GPU indices when multiple reconstruction volumes // are required, each on a separate device. m_deviceIndex = -1; // automatically choose device index for processing SetIdentityMatrix(m_cameraTransform); SetIdentityMatrix(m_worldToCameraTransform); SetIdentityMatrix(m_defaultWorldToVolumeTransform); m_cLastFrameTimeStamp.QuadPart = 0; }
/// <summary> /// Constructor /// </summary> KinectEasyGrabber::KinectEasyGrabber() : m_pD2DFactory(NULL), m_pDrawKinectEasyGrabber(NULL), m_hNextDepthFrameEvent(INVALID_HANDLE_VALUE), m_hNextColorFrameEvent(INVALID_HANDLE_VALUE), m_pDepthStreamHandle(INVALID_HANDLE_VALUE), m_pColorStreamHandle(INVALID_HANDLE_VALUE), m_bNearMode(false), m_pNuiSensor(NULL) { // get resolution as DWORDS, but store as LONGs to avoid casts later DWORD width = 0; DWORD height = 0; NuiImageResolutionToSize(cDepthResolution, width, height); m_depthWidth = static_cast<LONG>(width); m_depthHeight = static_cast<LONG>(height); NuiImageResolutionToSize(cColorResolution, width, height); m_colorWidth = static_cast<LONG>(width); m_colorHeight = static_cast<LONG>(height); m_colorToDepthDivisor = m_colorWidth/m_depthWidth; m_depthTimeStamp.QuadPart = 0; m_colorTimeStamp.QuadPart = 0; // create heap storage for depth pixel data in RGBX format m_depthD16 = new USHORT[m_depthWidth*m_depthHeight]; m_colorCoordinates = new LONG[m_depthWidth*m_depthHeight*2]; m_colorRGBX = new BYTE[m_colorWidth*m_colorHeight*cBytesPerPixel]; //m_backgroundRGBX = NULL; m_outputRGBX = NULL; m_backgroundRGBX = new BYTE[m_colorWidth*m_colorHeight*cBytesPerPixel]; //m_outputRGBX = new BYTE[m_colorWidth*m_colorHeight*cBytesPerPixel]; //m_outputDepthD16 = new USHORT[m_depthWidth*m_depthHeight]; //m_outputPlayerUC8 = new unsigned char[m_depthWidth*m_depthHeight]; //USHORT** m_outputArrayDepthD16; //unsigned char** m_outputArrayPlayerUC8; //LONG* m_outputArrayColorCoordinates; //BYTE** m_outputArrayRGBX; m_frameBasename = new char[256]; m_frameIndex = 0; m_totalFrames = TOTAL_FRAMES; m_dumped = false; m_outputArrayDepthD16 = new USHORT*[m_totalFrames]; m_outputArrayColorCoordinates = new LONG*[m_totalFrames]; m_outputArrayRGBX = new BYTE*[m_totalFrames]; m_depthArrayTimeStamp = new LARGE_INTEGER[m_totalFrames]; m_colorArrayTimeStamp = new LARGE_INTEGER[m_totalFrames]; // Heavy memory allocations for(int i=0; i < m_totalFrames; i++) m_outputArrayDepthD16[i] = new USHORT[m_depthWidth*m_depthHeight]; for(int i=0; i < m_totalFrames; i++) m_outputArrayColorCoordinates[i] = new LONG[m_depthWidth*m_depthHeight*2]; for(int i=0; i < m_totalFrames; i++) m_outputArrayRGBX[i] = new BYTE[m_colorWidth*m_colorHeight*cBytesPerPixel]; }
int _tmain(int argc, _TCHAR* argv[]) { cv::setUseOptimized( true ); INuiSensor* pSensor; HRESULT hResult = S_OK; hResult = NuiCreateSensorByIndex( 0, &pSensor ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiCreateSensorByIndex" << std::endl; return -1; } hResult = pSensor->NuiInitialize( NUI_INITIALIZE_FLAG_USES_COLOR | NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_SKELETON ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiInitialize" << std::endl; return -1; } HANDLE hColorEvent = INVALID_HANDLE_VALUE; HANDLE hColorHandle = INVALID_HANDLE_VALUE; hColorEvent = CreateEvent( nullptr, true, false, nullptr ); hResult = pSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, 0, 2, hColorEvent, &hColorHandle ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamOpen( COLOR )" << std::endl; return -1; } HANDLE hDepthPlayerEvent = INVALID_HANDLE_VALUE; HANDLE hDepthPlayerHandle = INVALID_HANDLE_VALUE; hDepthPlayerEvent = CreateEvent( nullptr, true, false, nullptr ); hResult = pSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, NUI_IMAGE_RESOLUTION_640x480, 0, 2, hDepthPlayerEvent, &hDepthPlayerHandle ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamOpen( DEPTH&PLAYER )" << std::endl; return -1; } HANDLE hSkeletonEvent = INVALID_HANDLE_VALUE; hSkeletonEvent = CreateEvent( nullptr, true, false, nullptr ); hResult = pSensor->NuiSkeletonTrackingEnable( hSkeletonEvent, 0 ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiSkeletonTrackingEnable" << std::endl; return -1; } unsigned long refWidth = 0; unsigned long refHeight = 0; NuiImageResolutionToSize( NUI_IMAGE_RESOLUTION_640x480, refWidth, refHeight ); int width = static_cast<int>( refWidth ); int height = static_cast<int>( refHeight ); INuiCoordinateMapper* pCordinateMapper; hResult = pSensor->NuiGetCoordinateMapper( &pCordinateMapper ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiGetCoordinateMapper" << std::endl; return -1; } std::vector<NUI_COLOR_IMAGE_POINT> pColorPoint( width * height ); HANDLE hEvents[3] = { hColorEvent, hDepthPlayerEvent, hSkeletonEvent }; cv::Vec3b color[7]; color[0] = cv::Vec3b( 0, 0, 0 ); color[1] = cv::Vec3b( 255, 0, 0 ); color[2] = cv::Vec3b( 0, 255, 0 ); color[3] = cv::Vec3b( 0, 0, 255 ); color[4] = cv::Vec3b( 255, 255, 0 ); color[5] = cv::Vec3b( 255, 0, 255 ); color[6] = cv::Vec3b( 0, 255, 255 ); cv::namedWindow( "Color" ); cv::namedWindow( "Depth" ); cv::namedWindow( "Player" ); cv::namedWindow( "Skeleton" ); while( 1 ){ ResetEvent( hColorEvent ); ResetEvent( hDepthPlayerEvent ); ResetEvent( hSkeletonEvent ); WaitForMultipleObjects( ARRAYSIZE( hEvents ), hEvents, true, INFINITE ); NUI_IMAGE_FRAME colorImageFrame = { 0 }; hResult = pSensor->NuiImageStreamGetNextFrame( hColorHandle, 0, &colorImageFrame ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamGetNextFrame( COLOR )" << std::endl; return -1; } INuiFrameTexture* pColorFrameTexture = colorImageFrame.pFrameTexture; NUI_LOCKED_RECT colorLockedRect; pColorFrameTexture->LockRect( 0, &colorLockedRect, nullptr, 0 ); NUI_IMAGE_FRAME depthPlayerImageFrame = { 0 }; hResult = pSensor->NuiImageStreamGetNextFrame( hDepthPlayerHandle, 0, &depthPlayerImageFrame ); if( FAILED( hResult ) ){ std::cerr << "Error : NuiImageStreamGetNextFrame( DEPTH&PLAYER )" << std::endl; return -1; } BOOL nearMode = false; INuiFrameTexture* pDepthPlayerFrameTexture = nullptr; pSensor->NuiImageFrameGetDepthImagePixelFrameTexture( hDepthPlayerHandle, &depthPlayerImageFrame, &nearMode, &pDepthPlayerFrameTexture ); NUI_LOCKED_RECT depthPlayerLockedRect; pDepthPlayerFrameTexture->LockRect( 0, &depthPlayerLockedRect, nullptr, 0 ); NUI_SKELETON_FRAME skeletonFrame = { 0 }; hResult = pSensor->NuiSkeletonGetNextFrame( 0, &skeletonFrame ); if( FAILED( hResult ) ){ std::cout << "Error : NuiSkeletonGetNextFrame" << std::endl; return -1; } /* NUI_TRANSFORM_SMOOTH_PARAMETERS smoothParameter; smoothParameter.fSmoothing = 0.5; smoothParameter.fCorrection = 0.5; smoothParameter.fPrediction = 0.0f; smoothParameter.fJitterRadius = 0.05f; smoothParameter.fMaxDeviationRadius = 0.04f; hResult = NuiTransformSmooth( &skeletonFrame, &smoothParameter ); */ cv::Mat colorMat( height, width, CV_8UC4, reinterpret_cast<unsigned char*>( colorLockedRect.pBits ) ); cv::Mat bufferMat = cv::Mat::zeros( height, width, CV_16UC1 ); cv::Mat playerMat = cv::Mat::zeros( height, width, CV_8UC3 ); NUI_DEPTH_IMAGE_PIXEL* pDepthPlayerPixel = reinterpret_cast<NUI_DEPTH_IMAGE_PIXEL*>( depthPlayerLockedRect.pBits ); pCordinateMapper->MapDepthFrameToColorFrame( NUI_IMAGE_RESOLUTION_640x480, width * height, pDepthPlayerPixel, NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, width * height, &pColorPoint[0] ); for( int y = 0; y < height; y++ ){ for( int x = 0; x < width; x++ ){ unsigned int index = y * width + x; bufferMat.at<unsigned short>( pColorPoint[index].y, pColorPoint[index].x ) = pDepthPlayerPixel[index].depth; playerMat.at<cv::Vec3b>( pColorPoint[index].y, pColorPoint[index].x ) = color[pDepthPlayerPixel[index].playerIndex]; } } cv::Mat depthMat( height, width, CV_8UC1 ); bufferMat.convertTo( depthMat, CV_8U, -255.0f / 10000.0f, 255.0f ); cv::Mat skeletonMat = cv::Mat::zeros( height, width, CV_8UC3 ); NUI_COLOR_IMAGE_POINT colorPoint; for( int count = 0; count < NUI_SKELETON_COUNT; count++ ){ NUI_SKELETON_DATA skeletonData = skeletonFrame.SkeletonData[count]; if( skeletonData.eTrackingState == NUI_SKELETON_TRACKED ){ for( int position = 0; position < NUI_SKELETON_POSITION_COUNT; position++ ){ pCordinateMapper->MapSkeletonPointToColorPoint( &skeletonData.SkeletonPositions[position], NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, &colorPoint ); if( ( colorPoint.x >= 0 ) && ( colorPoint.x < width ) && ( colorPoint.y >= 0 ) && ( colorPoint.y < height ) ){ cv::circle( skeletonMat, cv::Point( colorPoint.x, colorPoint.y ), 10, static_cast<cv::Scalar>( color[count + 1] ), -1, CV_AA ); } } std::stringstream ss; ss << skeletonData.SkeletonPositions[NUI_SKELETON_POSITION_HIP_CENTER].z; pCordinateMapper->MapSkeletonPointToColorPoint( &skeletonData.SkeletonPositions[NUI_SKELETON_POSITION_HEAD], NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, &colorPoint ); if( ( colorPoint.x >= 0 ) && ( colorPoint.x < width ) && ( colorPoint.y >= 0 ) && ( colorPoint.y < height ) ){ cv::putText( skeletonMat, ss.str(), cv::Point( colorPoint.x - 50, colorPoint.y - 20 ), cv::FONT_HERSHEY_SIMPLEX, 1.5f, static_cast<cv::Scalar>( color[count + 1] ) ); } } else if( skeletonData.eTrackingState == NUI_SKELETON_POSITION_ONLY ){ pCordinateMapper->MapSkeletonPointToColorPoint( &skeletonData.SkeletonPositions[NUI_SKELETON_POSITION_HIP_CENTER], NUI_IMAGE_TYPE_COLOR, NUI_IMAGE_RESOLUTION_640x480, &colorPoint ); if( ( colorPoint.x >= 0 ) && ( colorPoint.x < width ) && ( colorPoint.y >= 0 ) && ( colorPoint.y < height ) ){ cv::circle( skeletonMat, cv::Point( colorPoint.x, colorPoint.y ), 10, static_cast<cv::Scalar>( color[count + 1] ), -1, CV_AA ); } } } cv::imshow( "Color", colorMat ); cv::imshow( "Depth", depthMat ); cv::imshow( "Player", playerMat ); cv::imshow( "Skeleton", skeletonMat ); pColorFrameTexture->UnlockRect( 0 ); pDepthPlayerFrameTexture->UnlockRect( 0 ); pSensor->NuiImageStreamReleaseFrame( hColorHandle, &colorImageFrame ); pSensor->NuiImageStreamReleaseFrame( hDepthPlayerHandle, &depthPlayerImageFrame ); if( cv::waitKey( 30 ) == VK_ESCAPE ){ break; } } pSensor->NuiShutdown(); pSensor->NuiSkeletonTrackingDisable(); pCordinateMapper->Release(); CloseHandle( hColorEvent ); CloseHandle( hDepthPlayerEvent ); CloseHandle( hSkeletonEvent ); cv::destroyAllWindows(); return 0; }
bool UKinect::pollInteraction() { NUI_USER_INFO user; NUI_INTERACTION_FRAME Interaction_Frame; hr = interactionStream->GetNextFrame( 0, &Interaction_Frame ); if (FAILED(hr)) { cerr << hex << hr <<endl; cerr <<"[UKinect] WARNING: Interaction pool." << endl; return false; } for (int i = 0 ; i < NUI_SKELETON_COUNT; ++i) { user = Interaction_Frame.UserInfos[i]; if (user.SkeletonTrackingId != 0) break; } NUI_HANDPOINTER_INFO handLeft = user.HandPointerInfos[0]; NUI_HANDPOINTER_INFO handRight = user.HandPointerInfos[1]; NUI_HANDPOINTER_STATE stateLeft = (NUI_HANDPOINTER_STATE)handLeft.State; NUI_HANDPOINTER_STATE stateRight = (NUI_HANDPOINTER_STATE)handRight.State; interID=user.SkeletonTrackingId; interLeftTracked= (bool)(stateLeft & NUI_HANDPOINTER_STATE_TRACKED); interLeftActive= (bool)(stateLeft & NUI_HANDPOINTER_STATE_PRESSED); interLeftInteractive= (bool)(stateLeft & NUI_HANDPOINTER_STATE_ACTIVE); interLeftPressed= (bool)(stateLeft & NUI_HANDPOINTER_STATE_INTERACTIVE); interRightTracked=(bool)(stateRight & NUI_HANDPOINTER_STATE_TRACKED); interRightActive=(bool)(stateRight & NUI_HANDPOINTER_STATE_PRESSED); interRightInteractive=(bool)(stateRight & NUI_HANDPOINTER_STATE_ACTIVE); interRightPressed= (bool)(stateRight & NUI_HANDPOINTER_STATE_INTERACTIVE); if (stateLeft!= NUI_HANDPOINTER_STATE_NOT_TRACKED) { interLeftX = handLeft.X; interLeftY = -handLeft.Y; interLeftRawX = handLeft.RawX; interLeftRawY = -handLeft.RawY; interLeftRawZ = handLeft.RawZ; interLeftPress = handLeft.PressExtent; if (handLeft.HandEventType>0) interLeftEvent = handLeft.HandEventType; //interLeftEvent = handLeft.HandEventType; } if (stateLeft!= NUI_HANDPOINTER_STATE_NOT_TRACKED) { interRightX = handRight.X; interRightY = -handRight.Y; interRightRawX = handRight.RawX; interRightRawY = -handRight.RawY; interRightRawZ = handRight.RawZ; interRightPress = handRight.PressExtent; if (handRight.HandEventType>0) interRightEvent = handRight.HandEventType; //interRightEvent = handRight.HandEventType; } if (interVisualization) { DWORD t_width, t_height; NuiImageResolutionToSize((NUI_IMAGE_RESOLUTION)colorResolution.as<int>(), t_width, t_height); if ((color)&&(interVisualizationOnColor.as<int>())) interCVMat=colorCVMat.clone(); // use color image as a background if color function enabled else interCVMat=Mat(Size(static_cast<int>(t_width), static_cast<int>(t_height)), CV_8UC3, CV_RGB( 0, 0, 0 )); if (stateLeft!= NUI_HANDPOINTER_STATE_NOT_TRACKED) { drawHand(user.SkeletonTrackingId,11,interRightEvent.as<int>(),interRightPressed.as<bool>(), interRightInteractive.as<bool>(), interRightPress.as<double>()); } if (stateLeft!= NUI_HANDPOINTER_STATE_NOT_TRACKED) { drawHand(user.SkeletonTrackingId,7,interLeftEvent.as<int>(),interLeftPressed.as<bool>(),interLeftInteractive.as<bool>(),interLeftPress.as<double>()); } interBin.image.width = interCVMat.cols; interBin.image.height = interCVMat.rows; interBin.image.size = interCVMat.cols * skeletonCVMat.rows * 3; interBin.image.data = interCVMat.data; interImage=interBin; } return true; }