void getKinectData(GLubyte* dest) { IColorFrame* frame = NULL; if (SUCCEEDED(reader->AcquireLatestFrame(&frame))) { frame->CopyConvertedFrameDataToArray(width*height*4, data, ColorImageFormat_Bgra); } if (frame) frame->Release(); }
void idle() { // Read color data IColorFrame* pCFrame = nullptr; if (pColorFrameReader->AcquireLatestFrame(&pCFrame) == S_OK) { pCFrame->CopyConvertedFrameDataToArray(uColorBufferSize, pColorBuffer, ColorImageFormat_Rgba); pCFrame->Release(); pCFrame = nullptr; } // Read depth data IDepthFrame* pDFrame = nullptr; if (pDepthFrameReader->AcquireLatestFrame(&pDFrame) == S_OK) { pDFrame->CopyFrameDataToArray(uDepthPointNum, pDepthBuffer); pDFrame->Release(); pDFrame = nullptr; // map to camera space pCoordinateMapper->MapColorFrameToCameraSpace(uDepthPointNum, pDepthBuffer, uColorPointNum, pCSPoints); } }
bool KinectInterface::getFrameData(IMultiSourceFrame* frame, cv::Mat& intensity_mat, cv::Mat& depth_mat, cv::Mat& pos_mat) { //Obtain depth frame IDepthFrame* depthframe = nullptr; if (FAILED(depthFrameReader->AcquireLatestFrame(&depthframe))) return false; if (!depthframe) return false; // Get data from frame unsigned int sz; unsigned short* buf; if (FAILED(depthframe->AccessUnderlyingBuffer(&sz, &buf))) return false; //get depth -> xyz mapping if (FAILED(mapper->MapDepthFrameToCameraSpace(width*height, buf, width*height, depth2xyz))) return false; //get depth -> rgb image mapping if (FAILED(mapper->MapDepthFrameToColorSpace(width*height, buf, width*height, depth2rgb))) return false; //save depth if (FAILED(depthframe->CopyFrameDataToArray(height * width, depth_data))); if (depthframe) depthframe->Release(); //Obtain RGB frame IColorFrame* colorframe; if (FAILED(colorFrameReader->AcquireLatestFrame(&colorframe))) return false; if (!colorframe) return false; // Get data from frame if (FAILED(colorframe->CopyConvertedFrameDataToArray(colorwidth*colorheight * 4, rgbimage, ColorImageFormat_Rgba))) return false; cv::Mat tmp_depth = cv::Mat::zeros(colorheight, colorwidth, CV_16UC1); cv::Mat tmp_pos = cv::Mat::zeros(colorheight, colorwidth, CV_32FC3); cv::Mat depth_org(height, width, CV_16UC1, depth_data); cv::Mat tmp_rgb(colorheight, colorwidth, CV_8UC4, rgbimage); // Write color array for vertices for (int i = 0; i < width*height; i++) { ColorSpacePoint p = depth2rgb[i]; int iY = (int)(p.Y + 0.5); int iX = (int)(p.X + 0.5); if (iX >= 0 && iY >= 0 && iX < colorwidth && iY < colorheight) { // Check if color pixel coordinates are in bounds tmp_depth.at<unsigned short>(iY, iX) = depth_data[i]; //tmp_pos.at<float>(iY, iX, 0) = depth2xyz[i].X; //tmp_pos.at<float>(iY, iX, 1) = depth2xyz[i].Y; //tmp_pos.at<float>(iY, iX, 2) = depth2xyz[i].Z; } } if (colorframe) colorframe->Release(); cv::resize(tmp_rgb(cv::Rect(240, 0, 1440, 1080)), intensity_mat, cv::Size(640, 480)); cv::resize(tmp_depth(cv::Rect(240, 0, 1440, 1080)), depth_mat, cv::Size(640, 480)); cv::resize(tmp_pos(cv::Rect(240, 0, 1440, 1080)), pos_mat, cv::Size(640, 480)); cv::cvtColor(intensity_mat, intensity_mat, CV_RGBA2GRAY); return true; }
// カラーデータを取得する GLuint KinectV2::getColor() const { // カラーのテクスチャを指定する glBindTexture(GL_TEXTURE_2D, colorTexture); // 次のカラーのフレームデータが到着していれば IColorFrame *colorFrame; if (colorReader->AcquireLatestFrame(&colorFrame) == S_OK) { // カラーデータを取得して RGBA 形式に変換する colorFrame->CopyConvertedFrameDataToArray(colorCount * 4, static_cast<BYTE *>(color), ColorImageFormat::ColorImageFormat_Bgra); // カラーフレームを開放する colorFrame->Release(); // カラーデータをテクスチャに転送する glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, colorWidth, colorHeight, GL_BGRA, GL_UNSIGNED_BYTE, color); } return colorTexture; }
void MKinect::getColorData(IMultiSourceFrame* frame, QImage& dest) { IColorFrame* colorframe; IColorFrameReference* frameref = NULL; frame->get_ColorFrameReference(&frameref); frameref->AcquireFrame(&colorframe); if (frameref) frameref->Release(); if (!colorframe) return; // Process color frame data... colorframe->CopyConvertedFrameDataToArray(KinectColorWidth*KinectColorHeight * 4, data, ColorImageFormat_Bgra); QImage colorImage(data, KinectColorWidth, KinectColorHeight, QImage::Format_RGB32); //QImage depthImage(depthData.planes[0], width2, height2, QImage::Format_RGB32); dest = colorImage; //QDir dir("../tests/k2/last_test"); //if (!dir.exists()) { // dir.mkpath("."); // colorImage.save("../tests/k2/last_test/image_" + QString::number(_actual_frame) + ".png", 0); //} //else { // colorImage.save("../tests/k2/last_test/image_" + QString::number(_actual_frame) + ".png", 0); //} if (colorframe) colorframe->Release(); }
void capture_point() // 抓人體 { int count_number = 0; #ifdef HUMANCOLORIMAGE // Read color data IColorFrame* pCFrame = nullptr; if (pColorFrameReader->AcquireLatestFrame(&pCFrame) == S_OK) { // 使用openCV擷取Kinect畫面 cv::Mat mImg(iColorHeight, iColorWidth, CV_8UC4); cv::Mat gray_image(iColorHeight, iColorWidth, CV_8UC4); cv::namedWindow("Color Image", 0); // 創造彩色圖片顯示視窗 if (pCFrame->CopyConvertedFrameDataToArray(uColorBufferSize, mImg.data, ColorImageFormat_Bgra) == S_OK) { if ((int)pColorBuffer[0] != 0) { raw_color = mImg.clone(); cv::cvtColor(mImg, gray_image, CV_RGB2GRAY); // 彩色轉灰階 cv::imshow("Color Image", raw_color); // 顯示彩色畫面 # ifdef BACKGROUND cv::imwrite("background_color.bmp", mImg); // 輸出背景的彩色畫面 # endif cout << "Press any key on the image window to continue!" << endl; cv::waitKey(); } } # ifdef CAPTURE absdiff(); watershed(); human_mask(); # endif cv::destroyWindow("Color Image"); pCFrame->Release(); pCFrame = nullptr; } #endif for (int y = 0; y < iColorHeight; ++y) { for (int x = 0; x < iColorWidth; ++x) { int idx = x + y * iColorWidth; CameraSpacePoint& rPt = pCSPoints[idx]; if (rPt.Z <= 0) rPt.X = rPt.Y = rPt.Z = 0; if (rPt.Z < 2.35 && rPt.Z != 0 && rPt.Z>1.1 && rPt.X<0.69 && rPt.X>-0.69 && rPt.Y>-0.88) { //raw_point << rPt.X * 1000 << " " << rPt.Z * 1000 << " " << rPt.Y * 1000 << endl; if (rPt.Y > -0.74 && idx % 75 == 0) { //for_icp << rPt.X * 1000 << " " << rPt.Z * 1000 << " " << rPt.Y * 1000 << endl; } } count_number++; } } //for_icp.close(); //raw_point.close(); //if (count_number>0) //system("pause"); }
int main(int argc, char** argv) { // 1a. Get default Sensor std::cout << "Try to get default sensor" << std::endl; IKinectSensor* pSensor = nullptr; if (GetDefaultKinectSensor(&pSensor) != S_OK) { cerr << "Get Sensor failed" << std::endl; return -1; } // 1b. Open sensor std::cout << "Try to open sensor" << std::endl; if (pSensor->Open() != S_OK) { cerr << "Can't open sensor" << std::endl; return -1; } // 2. Color Related code IColorFrameReader* pColorFrameReader = nullptr; cv::Mat mColorImg; UINT uBufferSize = 0; { // 2a. Get color frame source std::cout << "Try to get color source" << std::endl; IColorFrameSource* pFrameSource = nullptr; if (pSensor->get_ColorFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get color frame source" << std::endl; return -1; } // 2b. Get frame description std::cout << "get color frame description" << std::endl; int iWidth = 0; int iHeight = 0; IFrameDescription* pFrameDescription = nullptr; if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK) { pFrameDescription->get_Width(&iWidth); pFrameDescription->get_Height(&iHeight); } pFrameDescription->Release(); pFrameDescription = nullptr; // 2c. get frame reader std::cout << "Try to get color frame reader" << std::endl; if (pFrameSource->OpenReader(&pColorFrameReader) != S_OK) { cerr << "Can't get color frame reader" << std::endl; return -1; } // 2d. release Frame source std::cout << "Release frame source" << std::endl; pFrameSource->Release(); pFrameSource = nullptr; // Prepare OpenCV data mColorImg = cv::Mat(iHeight, iWidth, CV_8UC4); uBufferSize = iHeight * iWidth * 4 * sizeof(BYTE); } // 3. Body related code IBodyFrameReader* pBodyFrameReader = nullptr; IBody** aBodyData = nullptr; INT32 iBodyCount = 0; { // 3a. Get frame source std::cout << "Try to get body source" << std::endl; IBodyFrameSource* pFrameSource = nullptr; if (pSensor->get_BodyFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get body frame source" << std::endl; return -1; } // 3b. Get the number of body if (pFrameSource->get_BodyCount(&iBodyCount) != S_OK) { cerr << "Can't get body count" << std::endl; return -1; } std::cout << " > Can trace " << iBodyCount << " bodies" << std::endl; aBodyData = new IBody*[iBodyCount]; for (int i = 0; i < iBodyCount; ++i) aBodyData[i] = nullptr; // 3c. get frame reader std::cout << "Try to get body frame reader" << std::endl; if (pFrameSource->OpenReader(&pBodyFrameReader) != S_OK) { cerr << "Can't get body frame reader" << std::endl; return -1; } // 3d. release Frame source std::cout << "Release frame source" << std::endl; pFrameSource->Release(); pFrameSource = nullptr; } // 4. get CoordinateMapper ICoordinateMapper* pCoordinateMapper = nullptr; if (pSensor->get_CoordinateMapper(&pCoordinateMapper) != S_OK) { std::cout << "Can't get coordinate mapper" << std::endl; return -1; } // Enter main loop cv::namedWindow("Body Image"); // Debug:output the velocity of joints ofstream current_average_velocityTXT("current_average_velocity.txt"); ofstream average_velocityTXT("average_velocity.txt"); int frame_count = 0; int frame_count_for_standby = 0; float positionX0[25] = {0}; float positionX1[25] = {0}; float positionY0[25] = { 0 }; float positionY1[25] = { 0 }; float positionZ0[25] = { 0 }; float positionZ1[25] = { 0 }; float velocityX[25] = { 0 }; float velocityY[25] = { 0 }; float velocityZ[25] = { 0 }; float current_velocity[25] = { 0 }; float velocityee[8] = { 0 }; float current_total_velocity = 0; float current_average_velocity = 0; float total_velocity = 0; float average_velocity = 0; while (true) { // 4a. Get last frame IColorFrame* pColorFrame = nullptr; if (pColorFrameReader->AcquireLatestFrame(&pColorFrame) == S_OK) { // 4c. Copy to OpenCV image if (pColorFrame->CopyConvertedFrameDataToArray(uBufferSize, mColorImg.data, ColorImageFormat_Bgra) != S_OK) { cerr << "Data copy error" << endl; } // 4e. release frame pColorFrame->Release(); } cv::Mat mImg = mColorImg.clone(); // 4b. Get body data IBodyFrame* pBodyFrame = nullptr; if (pBodyFrameReader->AcquireLatestFrame(&pBodyFrame) == S_OK) { // 4b. get Body data if (pBodyFrame->GetAndRefreshBodyData(iBodyCount, aBodyData) == S_OK) { // 4c. for each body for (int i = 0; i < iBodyCount; ++i) { IBody* pBody = aBodyData[i]; // check if is tracked BOOLEAN bTracked = false; if ((pBody->get_IsTracked(&bTracked) == S_OK) && bTracked) { // get joint position Joint aJoints[JointType::JointType_Count]; if (pBody->GetJoints(JointType::JointType_Count, aJoints) == S_OK) { DrawLine(mImg, aJoints[JointType_SpineBase], aJoints[JointType_SpineMid], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_SpineMid], aJoints[JointType_SpineShoulder], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_SpineShoulder], aJoints[JointType_Neck], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_Neck], aJoints[JointType_Head], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_ShoulderLeft], aJoints[JointType_ElbowLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_ElbowLeft], aJoints[JointType_WristLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_WristLeft], aJoints[JointType_HandLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_HandLeft], aJoints[JointType_HandTipLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_HandLeft], aJoints[JointType_ThumbLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_ShoulderRight], aJoints[JointType_ElbowRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_ElbowRight], aJoints[JointType_WristRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_WristRight], aJoints[JointType_HandRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_HandRight], aJoints[JointType_HandTipRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_HandRight], aJoints[JointType_ThumbRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_SpineBase], aJoints[JointType_HipLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_HipLeft], aJoints[JointType_KneeLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_KneeLeft], aJoints[JointType_AnkleLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_AnkleLeft], aJoints[JointType_FootLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_SpineBase], aJoints[JointType_HipRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_HipRight], aJoints[JointType_KneeRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_KneeRight], aJoints[JointType_AnkleRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_AnkleRight], aJoints[JointType_FootRight], pCoordinateMapper); } // Debug:print out the number of frame std::cout << "frame " << ++frame_count << std::endl; for (int j = 1; j < 8; j++) { velocityee[j] = velocityee[j-1]; total_velocity += velocityee[j]; } average_velocity = total_velocity / 8.0; if (average_velocity <= 0.0015) { // determine if the person is still if (frame_count_for_standby == 0) { PlaySound(TEXT("Alarm02.wav"), NULL, SND_FILENAME); std::cout << "Start capturing points!" << std::endl; } // count the number of frame whose velocity is below the threshold frame_count_for_standby++; if (frame_count_for_standby >= 5) { frame_count_for_standby = 0; } } // Debug:output the average velocity average_velocityTXT << frame_count << " " << average_velocity << std::endl; total_velocity = 0; // Update the average velocity int available_joints = 0; for (int i = 0; i < 25; i++) { // X positionX1[i] = positionX0[i]; positionX0[i] = aJoints[i].Position.X; velocityX[i] = (positionX1[i] - positionX0[i]) * (positionX1[i] - positionX0[i]); // Y positionY1[i] = positionY0[i]; positionY0[i] = aJoints[i].Position.Y; velocityY[i] = (positionY1[i] - positionY0[i]) * (positionY1[i] - positionY0[i]); // Z positionZ1[i] = positionZ0[i]; positionZ0[i] = aJoints[i].Position.Z; velocityZ[i] = (positionZ1[i] - positionZ0[i]) * (positionZ1[i] - positionZ0[i]); current_velocity[i] = sqrtf(velocityX[i] + velocityY[i] + velocityZ[i]); // exclude the discrete velocity if (current_velocity[i] < 0.01) { current_total_velocity += current_velocity[i]; available_joints++; } } // If no joint is available, save the velocity of last frame if (available_joints != 0) { current_average_velocity = current_total_velocity / available_joints; } velocityee[0] = current_average_velocity; // Debug:output the current average velocity current_average_velocityTXT << frame_count << " " << current_average_velocity << std::endl; current_total_velocity = 0; } } } else { cerr << "Can't read body data" << endl; } // 4e. release frame pBodyFrame->Release(); } // show image cv::imshow("Body Image",mImg); // 4c. check keyboard input if (cv::waitKey(30) == VK_ESCAPE) { break; } } // 3. delete body data array delete[] aBodyData; // 3. release frame reader std::cout << "Release body frame reader" << std::endl; pBodyFrameReader->Release(); pBodyFrameReader = nullptr; // 2. release color frame reader std::cout << "Release color frame reader" << std::endl; pColorFrameReader->Release(); pColorFrameReader = nullptr; // 1c. Close Sensor std::cout << "close sensor" << std::endl; pSensor->Close(); // 1d. Release Sensor std::cout << "Release sensor" << std::endl; pSensor->Release(); pSensor = nullptr; return 0; }
void Device::update() { if ( mSensor != 0 ) { mSensor->get_Status( &mStatus ); } if ( mFrameReader == 0 ) { return; } IAudioBeamFrame* audioFrame = 0; IBodyFrame* bodyFrame = 0; IBodyIndexFrame* bodyIndexFrame = 0; IColorFrame* colorFrame = 0; IDepthFrame* depthFrame = 0; IMultiSourceFrame* frame = 0; IInfraredFrame* infraredFrame = 0; ILongExposureInfraredFrame* infraredLongExposureFrame = 0; HRESULT hr = mFrameReader->AcquireLatestFrame( &frame ); // TODO audio if ( SUCCEEDED( hr ) ) { console() << "SUCCEEDED " << getElapsedFrames() << endl; } if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyEnabled() ) { IBodyFrameReference* frameRef = 0; hr = frame->get_BodyFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &bodyFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyIndexEnabled() ) { IBodyIndexFrameReference* frameRef = 0; hr = frame->get_BodyIndexFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &bodyIndexFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isColorEnabled() ) { IColorFrameReference* frameRef = 0; hr = frame->get_ColorFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &colorFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isDepthEnabled() ) { IDepthFrameReference* frameRef = 0; hr = frame->get_DepthFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &depthFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredEnabled() ) { IInfraredFrameReference* frameRef = 0; hr = frame->get_InfraredFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &infraredFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredLongExposureEnabled() ) { ILongExposureInfraredFrameReference* frameRef = 0; hr = frame->get_LongExposureInfraredFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &infraredLongExposureFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) ) { long long time = 0L; // TODO audio IFrameDescription* bodyFrameDescription = 0; int32_t bodyWidth = 0; int32_t bodyHeight = 0; uint32_t bodyBufferSize = 0; uint8_t* bodyBuffer = 0; IFrameDescription* bodyIndexFrameDescription = 0; int32_t bodyIndexWidth = 0; int32_t bodyIndexHeight = 0; uint32_t bodyIndexBufferSize = 0; uint8_t* bodyIndexBuffer = 0; IFrameDescription* colorFrameDescription = 0; int32_t colorWidth = 0; int32_t colorHeight = 0; ColorImageFormat imageFormat = ColorImageFormat_None; uint32_t colorBufferSize = 0; uint8_t* colorBuffer = 0; IFrameDescription* depthFrameDescription = 0; int32_t depthWidth = 0; int32_t depthHeight = 0; uint16_t depthMinReliableDistance = 0; uint16_t depthMaxReliableDistance = 0; uint32_t depthBufferSize = 0; uint16_t* depthBuffer = 0; IFrameDescription* infraredFrameDescription = 0; int32_t infraredWidth = 0; int32_t infraredHeight = 0; uint32_t infraredBufferSize = 0; uint16_t* infraredBuffer = 0; IFrameDescription* infraredLongExposureFrameDescription = 0; int32_t infraredLongExposureWidth = 0; int32_t infraredLongExposureHeight = 0; uint32_t infraredLongExposureBufferSize = 0; uint16_t* infraredLongExposureBuffer = 0; hr = depthFrame->get_RelativeTime( &time ); // TODO audio if ( mDeviceOptions.isAudioEnabled() ) { } // TODO body if ( mDeviceOptions.isBodyEnabled() ) { } if ( mDeviceOptions.isBodyIndexEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrame->get_FrameDescription( &bodyIndexFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrameDescription->get_Width( &bodyIndexWidth ); } if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrameDescription->get_Height( &bodyIndexHeight ); } if ( SUCCEEDED( hr ) ) { //hr = bodyIndexFrame->AccessUnderlyingBuffer( &bodyIndexBufferSize, &bodyIndexBuffer ); } } if ( mDeviceOptions.isColorEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = colorFrame->get_FrameDescription( &colorFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = colorFrameDescription->get_Width( &colorWidth ); } if ( SUCCEEDED( hr ) ) { hr = colorFrameDescription->get_Height( &colorHeight ); } if ( SUCCEEDED( hr ) ) { hr = colorFrame->get_RawColorImageFormat( &imageFormat ); } if ( SUCCEEDED( hr ) ) { bool isAllocated = false; SurfaceChannelOrder channelOrder = SurfaceChannelOrder::BGRA; if ( imageFormat == ColorImageFormat_Bgra ) { hr = colorFrame->AccessRawUnderlyingBuffer( &colorBufferSize, reinterpret_cast<uint8_t**>( &colorBuffer ) ); channelOrder = SurfaceChannelOrder::BGRA; } else if ( imageFormat == ColorImageFormat_Rgba ) { hr = colorFrame->AccessRawUnderlyingBuffer( &colorBufferSize, reinterpret_cast<uint8_t**>( &colorBuffer ) ); channelOrder = SurfaceChannelOrder::RGBA; } else { isAllocated = true; colorBufferSize = colorWidth * colorHeight * sizeof( uint8_t ) * 4; colorBuffer = new uint8_t[ colorBufferSize ]; hr = colorFrame->CopyConvertedFrameDataToArray( colorBufferSize, reinterpret_cast<uint8_t*>( colorBuffer ), ColorImageFormat_Rgba ); channelOrder = SurfaceChannelOrder::RGBA; } if ( SUCCEEDED( hr ) ) { colorFrame->get_RelativeTime( &time ); Surface8u colorSurface = Surface8u( colorBuffer, colorWidth, colorHeight, colorWidth * sizeof( uint8_t ) * 4, channelOrder ); mFrame.mSurfaceColor = Surface8u( colorWidth, colorHeight, false, channelOrder ); mFrame.mSurfaceColor.copyFrom( colorSurface, colorSurface.getBounds() ); console() << "Color\n\twidth: " << colorWidth << "\n\theight: " << colorHeight << "\n\tbuffer size: " << colorBufferSize << "\n\ttime: " << time << endl; } if ( isAllocated && colorBuffer != 0 ) { delete[] colorBuffer; colorBuffer = 0; } } } if ( mDeviceOptions.isDepthEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = depthFrame->get_FrameDescription( &depthFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = depthFrameDescription->get_Width( &depthWidth ); } if ( SUCCEEDED( hr ) ) { hr = depthFrameDescription->get_Height( &depthHeight ); } if ( SUCCEEDED( hr ) ) { hr = depthFrame->get_DepthMinReliableDistance( &depthMinReliableDistance ); } if ( SUCCEEDED( hr ) ) { hr = depthFrame->get_DepthMaxReliableDistance( &depthMaxReliableDistance ); } if ( SUCCEEDED( hr ) ) { hr = depthFrame->AccessUnderlyingBuffer( &depthBufferSize, &depthBuffer ); } if ( SUCCEEDED( hr ) ) { Channel16u depthChannel = Channel16u( depthWidth, depthHeight, depthWidth * sizeof( uint16_t ), 1, depthBuffer ); mFrame.mChannelDepth = Channel16u( depthWidth, depthHeight ); mFrame.mChannelDepth.copyFrom( depthChannel, depthChannel.getBounds() ); console( ) << "Depth\n\twidth: " << depthWidth << "\n\theight: " << depthHeight << endl; } } if ( mDeviceOptions.isInfraredEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = infraredFrame->get_FrameDescription( &infraredFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = infraredFrameDescription->get_Width( &infraredWidth ); } if ( SUCCEEDED( hr ) ) { hr = infraredFrameDescription->get_Height( &infraredHeight ); } if ( SUCCEEDED( hr ) ) { hr = infraredFrame->AccessUnderlyingBuffer( &infraredBufferSize, &infraredBuffer ); } if ( SUCCEEDED( hr ) ) { Channel16u infraredChannel = Channel16u( infraredWidth, infraredHeight, infraredWidth * sizeof( uint16_t ), 1, infraredBuffer ); mFrame.mChannelInfrared = Channel16u( infraredWidth, infraredHeight ); mFrame.mChannelInfrared.copyFrom( infraredChannel, infraredChannel.getBounds() ); console( ) << "Infrared\n\twidth: " << infraredWidth << "\n\theight: " << infraredHeight << endl; } } if ( mDeviceOptions.isInfraredLongExposureEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrame->get_FrameDescription( &infraredLongExposureFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrameDescription->get_Width( &infraredLongExposureWidth ); } if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrameDescription->get_Height( &infraredLongExposureHeight ); } if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrame->AccessUnderlyingBuffer( &infraredLongExposureBufferSize, &infraredLongExposureBuffer ); } if ( SUCCEEDED( hr ) ) { Channel16u infraredLongExposureChannel = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight, infraredLongExposureWidth * sizeof( uint16_t ), 1, infraredLongExposureBuffer ); mFrame.mChannelInfraredLongExposure = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight ); mFrame.mChannelInfraredLongExposure.copyFrom( infraredLongExposureChannel, infraredLongExposureChannel.getBounds() ); int64_t irLongExpTime = 0; hr = infraredLongExposureFrame->get_RelativeTime( &irLongExpTime ); console( ) << "Infrared Long Exposure\n\twidth: " << infraredLongExposureWidth << "\n\theight: " << infraredLongExposureHeight; if ( SUCCEEDED( hr ) ) { console() << "\n\ttimestamp: " << irLongExpTime; } console() << endl; } } if ( SUCCEEDED( hr ) ) { // TODO build Kinect2::Frame from buffers, data mFrame.mTimeStamp = time; } if ( bodyFrameDescription != 0 ) { bodyFrameDescription->Release(); bodyFrameDescription = 0; } if ( bodyIndexFrameDescription != 0 ) { bodyIndexFrameDescription->Release(); bodyIndexFrameDescription = 0; } if ( colorFrameDescription != 0 ) { colorFrameDescription->Release(); colorFrameDescription = 0; } if ( depthFrameDescription != 0 ) { depthFrameDescription->Release(); depthFrameDescription = 0; } if ( infraredFrameDescription != 0 ) { infraredFrameDescription->Release(); infraredFrameDescription = 0; } if ( infraredLongExposureFrameDescription != 0 ) { infraredLongExposureFrameDescription->Release(); infraredLongExposureFrameDescription = 0; } } if ( audioFrame != 0 ) { audioFrame->Release(); audioFrame = 0; } if ( bodyFrame != 0 ) { bodyFrame->Release(); bodyFrame = 0; } if ( bodyIndexFrame != 0 ) { bodyIndexFrame->Release(); bodyIndexFrame = 0; } if ( colorFrame != 0 ) { colorFrame->Release(); colorFrame = 0; } if ( depthFrame != 0 ) { depthFrame->Release(); depthFrame = 0; } if ( frame != 0 ) { frame->Release(); frame = 0; } if ( infraredFrame != 0 ) { infraredFrame->Release(); infraredFrame = 0; } if ( infraredLongExposureFrame != 0 ) { infraredLongExposureFrame->Release(); infraredLongExposureFrame = 0; } }
void* Kinect2StreamImpl::populateFrameBuffer(int& buffWidth, int& buffHeight) { buffWidth = 0; buffHeight = 0; if (m_sensorType == ONI_SENSOR_COLOR) { if (m_pFrameReader.color && m_pFrameBuffer.color) { buffWidth = 1920; buffHeight = 1080; IColorFrame* frame = NULL; HRESULT hr = m_pFrameReader.color->AcquireLatestFrame(&frame); if (SUCCEEDED(hr)) { ColorImageFormat imageFormat = ColorImageFormat_None; hr = frame->get_RawColorImageFormat(&imageFormat); if (SUCCEEDED(hr)) { if (imageFormat == ColorImageFormat_Bgra) { RGBQUAD* data; UINT bufferSize; frame->AccessRawUnderlyingBuffer(&bufferSize, reinterpret_cast<BYTE**>(&data)); memcpy(m_pFrameBuffer.color, data, 1920*1080*sizeof(RGBQUAD)); } else { frame->CopyConvertedFrameDataToArray(1920*1080*sizeof(RGBQUAD), reinterpret_cast<BYTE*>(m_pFrameBuffer.color), ColorImageFormat_Bgra); } } } if (frame) { frame->Release(); } return reinterpret_cast<void*>(m_pFrameBuffer.color); } } else if (m_sensorType == ONI_SENSOR_DEPTH) { if (m_pFrameReader.depth && m_pFrameBuffer.depth) { buffWidth = 512; buffHeight = 424; IDepthFrame* frame = NULL; HRESULT hr = m_pFrameReader.depth->AcquireLatestFrame(&frame); if (SUCCEEDED(hr)) { UINT16* data; UINT bufferSize; frame->AccessUnderlyingBuffer(&bufferSize, &data); memcpy(m_pFrameBuffer.depth, data, 512*424*sizeof(UINT16)); } if (frame) { frame->Release(); } return reinterpret_cast<void*>(m_pFrameBuffer.depth); } } else { // ONI_SENSOR_IR if (m_pFrameReader.infrared && m_pFrameBuffer.infrared) { buffWidth = 512; buffHeight = 424; IInfraredFrame* frame = NULL; HRESULT hr = m_pFrameReader.infrared->AcquireLatestFrame(&frame); if (SUCCEEDED(hr)) { UINT16* data; UINT bufferSize; frame->AccessUnderlyingBuffer(&bufferSize, &data); memcpy(m_pFrameBuffer.infrared, data, 512*424*sizeof(UINT16)); } if (frame) { frame->Release(); } return reinterpret_cast<void*>(m_pFrameBuffer.infrared); } } return NULL; }
void KinectDevice::listen() { if (_listening) throw std::exception("Already listening for new frames"); _listening = true; while (_listening) { int idx = WaitForSingleObject((HANDLE) _frameEvent, 100); switch (idx) { case WAIT_TIMEOUT: std::cout << "."; continue; case WAIT_OBJECT_0: IMultiSourceFrameArrivedEventArgs *frameArgs = nullptr; IMultiSourceFrameReference *frameRef = nullptr; HRESULT hr = _reader->GetMultiSourceFrameArrivedEventData(_frameEvent, &frameArgs); if (hr == S_OK) { hr = frameArgs->get_FrameReference(&frameRef); frameArgs->Release(); } if (hr == S_OK) { //if (_lastFrame) _lastFrame->Release(); hr = frameRef->AcquireFrame(&_lastFrame); frameRef->Release(); } if (hr == S_OK) { // Store frame data //std::cout << "Got a frame YEAH" << std::endl; IDepthFrameReference *depthRef = nullptr; IColorFrameReference *colorRef = nullptr; IInfraredFrameReference *irRef = nullptr; ILongExposureInfraredFrameReference *hdirRef = nullptr; IBodyIndexFrameReference *indexRef = nullptr; IDepthFrame *depth = nullptr; IColorFrame *color = nullptr; IInfraredFrame *ir = nullptr; ILongExposureInfraredFrame *hdir = nullptr; IBodyIndexFrame *index = nullptr; size_t size; uint16_t *buff; BYTE *cbuff; frameLock.lock(); if (_streams & Streams::DEPTH_STREAM) { _lastFrame->get_DepthFrameReference(&depthRef); depthRef->AcquireFrame(&depth); if (depth) { depthSwap(); depth->AccessUnderlyingBuffer(&size, &buff); memcpy(depthData.get(), buff, size * sizeof(uint16_t)); depth->Release(); } depthRef->Release(); } if (_streams & Streams::COLOR_STREAM) { _lastFrame->get_ColorFrameReference(&colorRef); colorRef->AcquireFrame(&color); //color->AccessUnderlyingBuffer(&size, &buff); //memcpy(_colorData.get(), buff, size); color->Release(); colorRef->Release(); } if (_streams & Streams::IR_STREAM) { _lastFrame->get_InfraredFrameReference(&irRef); irRef->AcquireFrame(&ir); ir->AccessUnderlyingBuffer(&size, &buff); memcpy(irData.get(), buff, size); ir->Release(); irRef->Release(); } if (_streams & Streams::HDIR_STREAM) { _lastFrame->get_LongExposureInfraredFrameReference(&hdirRef); hdirRef->AcquireFrame(&hdir); hdir->AccessUnderlyingBuffer(&size, &buff); memcpy(hdirData.get(), buff, size); hdir->Release(); hdirRef->Release(); } if (_streams & Streams::INDEX_STREAM) { _lastFrame->get_BodyIndexFrameReference(&indexRef); indexRef->AcquireFrame(&index); index->AccessUnderlyingBuffer(&size, &cbuff); memcpy(indexData.get(), cbuff, size); index->Release(); indexRef->Release(); } frameLock.unlock(); _lastFrame->Release(); } } } }
int main(int argc, char** argv) { int first_time = 0; Size screen_size(1440, 900);//the dst image size,e.g.100x100 Scalar text_color = Scalar(0, 255, 0); Scalar text_color2 = Scalar(0, 255, 255); Scalar text_color3 = Scalar(0, 0, 255); inhaler_coach coach; coach.control = 0; thread mThread(test_func, &coach); // 1a. Get Kinect Sensor cout << "Try to get default sensor" << endl; IKinectSensor* pSensor = nullptr; if (GetDefaultKinectSensor(&pSensor) != S_OK) { cerr << "Get Sensor failed" << endl; return -1; } // 1b. Open sensor cout << "Try to open sensor" << endl; if (pSensor->Open() != S_OK) { cerr << "Can't open sensor" << endl; return -1; } // 2. Color Related code IColorFrameReader* pColorFrameReader = nullptr; cv::Mat mColorImg; UINT uBufferSize = 0; UINT uColorPointNum = 0; int iWidth = 0; int iHeight = 0; { // 2a. Get color frame source cout << "Try to get color source" << endl; IColorFrameSource* pFrameSource = nullptr; if (pSensor->get_ColorFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get color frame source" << endl; return -1; } // 2b. Get frame description cout << "get color frame description" << endl; IFrameDescription* pFrameDescription = nullptr; if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK) { pFrameDescription->get_Width(&iWidth); pFrameDescription->get_Height(&iHeight); } pFrameDescription->Release(); pFrameDescription = nullptr; // 2c. get frame reader cout << "Try to get color frame reader" << endl; if (pFrameSource->OpenReader(&pColorFrameReader) != S_OK) { cerr << "Can't get color frame reader" << endl; return -1; } // 2d. release Frame source cout << "Release frame source" << endl; pFrameSource->Release(); pFrameSource = nullptr; // Prepare OpenCV data mColorImg = cv::Mat(iHeight, iWidth, CV_8UC4); uBufferSize = iHeight * iWidth * 4 * sizeof(BYTE); uColorPointNum = iHeight * iWidth; } // 3. Depth related code IDepthFrameReader* pDepthFrameReader = nullptr; UINT uDepthPointNum = 0; int iDepthWidth = 0, iDepthHeight = 0; cout << "Try to get depth source" << endl; { // Get frame source IDepthFrameSource* pFrameSource = nullptr; if (pSensor->get_DepthFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get depth frame source" << endl; return -1; } // Get frame description cout << "get depth frame description" << endl; IFrameDescription* pFrameDescription = nullptr; if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK) { pFrameDescription->get_Width(&iDepthWidth); pFrameDescription->get_Height(&iDepthHeight); uDepthPointNum = iDepthWidth * iDepthHeight; } pFrameDescription->Release(); pFrameDescription = nullptr; // get frame reader cout << "Try to get depth frame reader" << endl; if (pFrameSource->OpenReader(&pDepthFrameReader) != S_OK) { cerr << "Can't get depth frame reader" << endl; return -1; } // release Frame source cout << "Release frame source" << endl; pFrameSource->Release(); pFrameSource = nullptr; } // 4. Body related code IBodyFrameReader* pBodyFrameReader = nullptr; IBody** aBodyData = nullptr; INT32 iBodyCount = 0; { // 3a. Get frame source cout << "Try to get body source" << endl; IBodyFrameSource* pFrameSource = nullptr; if (pSensor->get_BodyFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get body frame source" << endl; return -1; } // 3b. Get the number of body if (pFrameSource->get_BodyCount(&iBodyCount) != S_OK) { cerr << "Can't get body count" << endl; return -1; } cout << " > Can trace " << iBodyCount << " bodies" << endl; aBodyData = new IBody*[iBodyCount]; for (int i = 0; i < iBodyCount; ++i) aBodyData[i] = nullptr; // 3c. get frame reader cout << "Try to get body frame reader" << endl; if (pFrameSource->OpenReader(&pBodyFrameReader) != S_OK) { cerr << "Can't get body frame reader" << endl; return -1; } // 3d. release Frame source cout << "Release frame source" << endl; pFrameSource->Release(); pFrameSource = nullptr; } // 4. Body Index releated code IBodyIndexFrameReader* pBIFrameReader = nullptr; cout << "Try to get body index source" << endl; { // Get frame source IBodyIndexFrameSource* pFrameSource = nullptr; if (pSensor->get_BodyIndexFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get body index frame source" << endl; return -1; } // get frame reader cout << "Try to get body index frame reader" << endl; if (pFrameSource->OpenReader(&pBIFrameReader) != S_OK) { cerr << "Can't get depth frame reader" << endl; return -1; } // release Frame source cout << "Release frame source" << endl; pFrameSource->Release(); pFrameSource = nullptr; } // 5. background cv::Mat imgBG(iHeight, iWidth, CV_8UC3); imgBG.setTo(0); // 4. get CoordinateMapper ICoordinateMapper* pCoordinateMapper = nullptr; if (pSensor->get_CoordinateMapper(&pCoordinateMapper) != S_OK) { cout << "Can't get coordinate mapper" << endl; return -1; } // Enter main loop UINT16* pDepthPoints = new UINT16[uDepthPointNum]; BYTE* pBodyIndex = new BYTE[uDepthPointNum]; DepthSpacePoint* pPointArray = new DepthSpacePoint[uColorPointNum]; cv::namedWindow("Inhaler Coach"); while (true) { // 4a. Get last frame IColorFrame* pColorFrame = nullptr; if (pColorFrameReader->AcquireLatestFrame(&pColorFrame) == S_OK) { pColorFrame->CopyConvertedFrameDataToArray(uBufferSize, mColorImg.data, ColorImageFormat_Bgra); pColorFrame->Release(); pColorFrame = nullptr; } cv::Mat mImg = mColorImg.clone(); // 8b. read depth frame IDepthFrame* pDepthFrame = nullptr; if (pDepthFrameReader->AcquireLatestFrame(&pDepthFrame) == S_OK) { pDepthFrame->CopyFrameDataToArray(uDepthPointNum, pDepthPoints); pDepthFrame->Release(); pDepthFrame = nullptr; } // 8c. read body index frame IBodyIndexFrame* pBIFrame = nullptr; if (pBIFrameReader->AcquireLatestFrame(&pBIFrame) == S_OK) { pBIFrame->CopyFrameDataToArray(uDepthPointNum, pBodyIndex); pBIFrame->Release(); pBIFrame = nullptr; } #ifdef COACH_DEBUG cv::Mat imgTarget = imgBG.clone(); // 9b. map color to depth if (pCoordinateMapper->MapColorFrameToDepthSpace(uDepthPointNum, pDepthPoints, uColorPointNum, pPointArray) == S_OK) { for (int y = 0; y < imgTarget.rows; ++y) { for (int x = 0; x < imgTarget.cols; ++x) { // ( x, y ) in color frame = rPoint in depth frame const DepthSpacePoint& rPoint = pPointArray[y * imgTarget.cols + x]; // check if rPoint is in range if (rPoint.X >= 0 && rPoint.X < iDepthWidth && rPoint.Y >= 0 && rPoint.Y < iDepthHeight) { // fill color from color frame if this pixel is user int iIdx = (int)rPoint.X + iDepthWidth * (int)rPoint.Y; if (pBodyIndex[iIdx] < 6) { cv::Vec4b& rPixel = mImg.at<cv::Vec4b>(y, x); imgTarget.at<cv::Vec3b>(y, x) = cv::Vec3b(rPixel[0], rPixel[1], rPixel[2]); } } } } } #else cv::Mat imgTarget = mImg.clone(); #endif // 4b. Get body data IBodyFrame* pBodyFrame = nullptr; if (pBodyFrameReader->AcquireLatestFrame(&pBodyFrame) == S_OK) { // 4b. get Body data if (pBodyFrame->GetAndRefreshBodyData(iBodyCount, aBodyData) == S_OK) { // 4c. for each body for (int i = 0; i < iBodyCount; ++i) { IBody* pBody = aBodyData[i]; // check if is tracked BOOLEAN bTracked = false; if ((pBody->get_IsTracked(&bTracked) == S_OK) && bTracked) { // get joint position Joint aJoints[JointType::JointType_Count]; if (pBody->GetJoints(JointType::JointType_Count, aJoints) == S_OK) { if (coach.state == 0){ coach.state = 1; if (first_time == 0){ first_time = 1; PlaySound(TEXT("welcome.wav"), NULL, SND_FILENAME); } } #ifdef COACH_DEBUG DrawLine(imgTarget, aJoints[JointType_SpineBase], aJoints[JointType_SpineMid], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_SpineMid], aJoints[JointType_SpineShoulder], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_SpineShoulder], aJoints[JointType_Neck], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_Neck], aJoints[JointType_Head], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_ShoulderLeft], aJoints[JointType_ElbowLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_ElbowLeft], aJoints[JointType_WristLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_WristLeft], aJoints[JointType_HandLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_HandLeft], aJoints[JointType_HandTipLeft], pCoordinateMapper); //DrawLine(imgTarget, aJoints[JointType_HandLeft], aJoints[JointType_ThumbLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderRight], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_ShoulderRight], aJoints[JointType_ElbowRight], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_ElbowRight], aJoints[JointType_WristRight], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_WristRight], aJoints[JointType_HandRight], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_HandRight], aJoints[JointType_HandTipRight], pCoordinateMapper); //DrawLine(imgTarget, aJoints[JointType_HandRight], aJoints[JointType_ThumbRight], pCoordinateMapper); #endif ColorSpacePoint q; ColorSpacePoint head; //ColorSpacePoint w; pCoordinateMapper->MapCameraPointToColorSpace(aJoints[JointType_Head].Position, &head); // check shaking coach.shaking_detection(aJoints, pCoordinateMapper); q = coach.position_checking(aJoints, pCoordinateMapper); #ifdef COACH_DEBUG circle(imgTarget, cv::Point(q.X, q.Y), 10, Scalar(0, 255, 255), 10, 8, 0); //circle(imgTarget, cv::Point(q.X, q.Y), 10, Scalar(0, 255, 255), 10, 8, 0); rectangle(imgTarget, Point(head.X - 50, head.Y - 40), Point(head.X + 50, head.Y + 90), Scalar(0, 255, 255), 1, 8, 0); //circle(imgTarget, cv::Point(w.X, w.Y), 10, Scalar(255, 0, 255), 10, 8, 0); #endif coach.state_change_rule(); } } } } else { cerr << "Can't read body data" << endl; } // 4e. release frame pBodyFrame->Release(); } switch (coach.state){ case 0: putText(imgTarget, "CMU Inhaler Coaching System", Point(120, 120), FONT_HERSHEY_DUPLEX, 2, text_color); break; case 1: putText(imgTarget, "Please shake the inhaler", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break; case 2: putText(imgTarget, "Shaking detected", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break; case 3: putText(imgTarget, "Please put the inhaler in front of your mouth", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break; case 4: putText(imgTarget, "Position check OK", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break; case 5: putText(imgTarget, "You forget to shake the inhaler first!!!", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color3); break; } // show image Mat dst; resize(imgTarget, dst, screen_size); imshow("Coach", dst); // 4c. check keyboard input if (cv::waitKey(30) == VK_ESCAPE){ break; } } mThread.join(); // 3. delete body data array delete[] aBodyData; // 3. release frame reader cout << "Release body frame reader" << endl; pBodyFrameReader->Release(); pBodyFrameReader = nullptr; // 2. release color frame reader cout << "Release color frame reader" << endl; pColorFrameReader->Release(); pColorFrameReader = nullptr; // 1c. Close Sensor cout << "close sensor" << endl; pSensor->Close(); // 1d. Release Sensor cout << "Release sensor" << endl; pSensor->Release(); pSensor = nullptr; return 0; }
void Device::update() { if ( mFrameReader == 0 ) { return; } IAudioBeamFrame* audioFrame = 0; IBodyFrame* bodyFrame = 0; IBodyIndexFrame* bodyIndexFrame = 0; IColorFrame* colorFrame = 0; IDepthFrame* depthFrame = 0; IMultiSourceFrame* frame = 0; IInfraredFrame* infraredFrame = 0; ILongExposureInfraredFrame* infraredLongExposureFrame = 0; HRESULT hr = mFrameReader->AcquireLatestFrame( &frame ); if ( SUCCEEDED( hr ) && mDeviceOptions.isAudioEnabled() ) { // TODO audio } if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyEnabled() ) { IBodyFrameReference* frameRef = 0; hr = frame->get_BodyFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &bodyFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyIndexEnabled() ) { IBodyIndexFrameReference* frameRef = 0; hr = frame->get_BodyIndexFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &bodyIndexFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isColorEnabled() ) { IColorFrameReference* frameRef = 0; hr = frame->get_ColorFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &colorFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isDepthEnabled() ) { IDepthFrameReference* frameRef = 0; hr = frame->get_DepthFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &depthFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredEnabled() ) { IInfraredFrameReference* frameRef = 0; hr = frame->get_InfraredFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &infraredFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredLongExposureEnabled() ) { ILongExposureInfraredFrameReference* frameRef = 0; hr = frame->get_LongExposureInfraredFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &infraredLongExposureFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) ) { long long timeStamp = 0L; // TODO audio std::vector<Body> bodies; int64_t bodyTime = 0L; IBody* kinectBodies[ BODY_COUNT ] = { 0 }; Vec4f floorClipPlane = Vec4f::zero(); Channel8u bodyIndexChannel; IFrameDescription* bodyIndexFrameDescription = 0; int32_t bodyIndexWidth = 0; int32_t bodyIndexHeight = 0; uint32_t bodyIndexBufferSize = 0; uint8_t* bodyIndexBuffer = 0; int64_t bodyIndexTime = 0L; Surface8u colorSurface; IFrameDescription* colorFrameDescription = 0; int32_t colorWidth = 0; int32_t colorHeight = 0; ColorImageFormat colorImageFormat = ColorImageFormat_None; uint32_t colorBufferSize = 0; uint8_t* colorBuffer = 0; Channel16u depthChannel; IFrameDescription* depthFrameDescription = 0; int32_t depthWidth = 0; int32_t depthHeight = 0; uint16_t depthMinReliableDistance = 0; uint16_t depthMaxReliableDistance = 0; uint32_t depthBufferSize = 0; uint16_t* depthBuffer = 0; Channel16u infraredChannel; IFrameDescription* infraredFrameDescription = 0; int32_t infraredWidth = 0; int32_t infraredHeight = 0; uint32_t infraredBufferSize = 0; uint16_t* infraredBuffer = 0; Channel16u infraredLongExposureChannel; IFrameDescription* infraredLongExposureFrameDescription = 0; int32_t infraredLongExposureWidth = 0; int32_t infraredLongExposureHeight = 0; uint32_t infraredLongExposureBufferSize = 0; uint16_t* infraredLongExposureBuffer = 0; hr = depthFrame->get_RelativeTime( &timeStamp ); // TODO audio if ( mDeviceOptions.isAudioEnabled() ) { } if ( mDeviceOptions.isBodyEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = bodyFrame->get_RelativeTime( &bodyTime ); } if ( SUCCEEDED( hr ) ) { hr = bodyFrame->GetAndRefreshBodyData( BODY_COUNT, kinectBodies ); } if ( SUCCEEDED( hr ) ) { Vector4 v; hr = bodyFrame->get_FloorClipPlane( &v ); floorClipPlane = toVec4f( v ); } if ( SUCCEEDED( hr ) ) { for ( uint8_t i = 0; i < BODY_COUNT; ++i ) { IBody* kinectBody = kinectBodies[ i ]; if ( kinectBody != 0 ) { uint8_t isTracked = false; hr = kinectBody->get_IsTracked( &isTracked ); if ( SUCCEEDED( hr ) && isTracked ) { Joint joints[ JointType_Count ]; kinectBody->GetJoints( JointType_Count, joints ); JointOrientation jointOrientations[ JointType_Count ]; kinectBody->GetJointOrientations( JointType_Count, jointOrientations ); uint64_t id = 0; kinectBody->get_TrackingId( &id ); std::map<JointType, Body::Joint> jointMap; for ( int32_t j = 0; j < JointType_Count; ++j ) { Body::Joint joint( toVec3f( joints[ j ].Position ), toQuatf( jointOrientations[ j ].Orientation ), joints[ j ].TrackingState ); jointMap.insert( pair<JointType, Body::Joint>( static_cast<JointType>( j ), joint ) ); } Body body( id, i, jointMap ); bodies.push_back( body ); } } } } } if ( mDeviceOptions.isBodyIndexEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrame->get_RelativeTime( &bodyIndexTime ); } if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrame->get_FrameDescription( &bodyIndexFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrameDescription->get_Width( &bodyIndexWidth ); } if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrameDescription->get_Height( &bodyIndexHeight ); } if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrame->AccessUnderlyingBuffer( &bodyIndexBufferSize, &bodyIndexBuffer ); } if ( SUCCEEDED( hr ) ) { bodyIndexChannel = Channel8u( bodyIndexWidth, bodyIndexHeight ); memcpy( bodyIndexChannel.getData(), bodyIndexBuffer, bodyIndexWidth * bodyIndexHeight * sizeof( uint8_t ) ); } } if ( mDeviceOptions.isColorEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = colorFrame->get_FrameDescription( &colorFrameDescription ); if ( SUCCEEDED( hr ) ) { float vFov = 0.0f; float hFov = 0.0f; float dFov = 0.0f; colorFrameDescription->get_VerticalFieldOfView( &vFov ); colorFrameDescription->get_HorizontalFieldOfView( &hFov ); colorFrameDescription->get_DiagonalFieldOfView( &dFov ); } } if ( SUCCEEDED( hr ) ) { hr = colorFrameDescription->get_Width( &colorWidth ); } if ( SUCCEEDED( hr ) ) { hr = colorFrameDescription->get_Height( &colorHeight ); } if ( SUCCEEDED( hr ) ) { hr = colorFrame->get_RawColorImageFormat( &colorImageFormat ); } if ( SUCCEEDED( hr ) ) { colorBufferSize = colorWidth * colorHeight * sizeof( uint8_t ) * 4; colorBuffer = new uint8_t[ colorBufferSize ]; hr = colorFrame->CopyConvertedFrameDataToArray( colorBufferSize, reinterpret_cast<uint8_t*>( colorBuffer ), ColorImageFormat_Rgba ); if ( SUCCEEDED( hr ) ) { colorSurface = Surface8u( colorWidth, colorHeight, false, SurfaceChannelOrder::RGBA ); memcpy( colorSurface.getData(), colorBuffer, colorWidth * colorHeight * sizeof( uint8_t ) * 4 ); } delete [] colorBuffer; colorBuffer = 0; } } if ( mDeviceOptions.isDepthEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = depthFrame->get_FrameDescription( &depthFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = depthFrameDescription->get_Width( &depthWidth ); } if ( SUCCEEDED( hr ) ) { hr = depthFrameDescription->get_Height( &depthHeight ); } if ( SUCCEEDED( hr ) ) { hr = depthFrame->get_DepthMinReliableDistance( &depthMinReliableDistance ); } if ( SUCCEEDED( hr ) ) { hr = depthFrame->get_DepthMaxReliableDistance( &depthMaxReliableDistance ); } if ( SUCCEEDED( hr ) ) { hr = depthFrame->AccessUnderlyingBuffer( &depthBufferSize, &depthBuffer ); } if ( SUCCEEDED( hr ) ) { depthChannel = Channel16u( depthWidth, depthHeight ); memcpy( depthChannel.getData(), depthBuffer, depthWidth * depthHeight * sizeof( uint16_t ) ); } } if ( mDeviceOptions.isInfraredEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = infraredFrame->get_FrameDescription( &infraredFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = infraredFrameDescription->get_Width( &infraredWidth ); } if ( SUCCEEDED( hr ) ) { hr = infraredFrameDescription->get_Height( &infraredHeight ); } if ( SUCCEEDED( hr ) ) { hr = infraredFrame->AccessUnderlyingBuffer( &infraredBufferSize, &infraredBuffer ); } if ( SUCCEEDED( hr ) ) { infraredChannel = Channel16u( infraredWidth, infraredHeight ); memcpy( infraredChannel.getData(), infraredBuffer, infraredWidth * infraredHeight * sizeof( uint16_t ) ); } } if ( mDeviceOptions.isInfraredLongExposureEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrame->get_FrameDescription( &infraredLongExposureFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrameDescription->get_Width( &infraredLongExposureWidth ); } if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrameDescription->get_Height( &infraredLongExposureHeight ); } if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrame->AccessUnderlyingBuffer( &infraredLongExposureBufferSize, &infraredLongExposureBuffer ); } if ( SUCCEEDED( hr ) ) { infraredLongExposureChannel = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight ); memcpy( infraredLongExposureChannel.getData(), infraredLongExposureBuffer, infraredLongExposureWidth * infraredLongExposureHeight * sizeof( uint16_t ) ); } } if ( SUCCEEDED( hr ) ) { mFrame.mBodies = bodies; mFrame.mChannelBodyIndex = bodyIndexChannel; mFrame.mChannelDepth = depthChannel; mFrame.mChannelInfrared = infraredChannel; mFrame.mChannelInfraredLongExposure = infraredLongExposureChannel; mFrame.mDeviceId = mDeviceOptions.getDeviceId(); mFrame.mSurfaceColor = colorSurface; mFrame.mTimeStamp = timeStamp; mFrame.mFloorClipPlane = floorClipPlane; } if ( bodyIndexFrameDescription != 0 ) { bodyIndexFrameDescription->Release(); bodyIndexFrameDescription = 0; } if ( colorFrameDescription != 0 ) { colorFrameDescription->Release(); colorFrameDescription = 0; } if ( depthFrameDescription != 0 ) { depthFrameDescription->Release(); depthFrameDescription = 0; } if ( infraredFrameDescription != 0 ) { infraredFrameDescription->Release(); infraredFrameDescription = 0; } if ( infraredLongExposureFrameDescription != 0 ) { infraredLongExposureFrameDescription->Release(); infraredLongExposureFrameDescription = 0; } } if ( audioFrame != 0 ) { audioFrame->Release(); audioFrame = 0; } if ( bodyFrame != 0 ) { bodyFrame->Release(); bodyFrame = 0; } if ( bodyIndexFrame != 0 ) { bodyIndexFrame->Release(); bodyIndexFrame = 0; } if ( colorFrame != 0 ) { colorFrame->Release(); colorFrame = 0; } if ( depthFrame != 0 ) { depthFrame->Release(); depthFrame = 0; } if ( frame != 0 ) { frame->Release(); frame = 0; } if ( infraredFrame != 0 ) { infraredFrame->Release(); infraredFrame = 0; } if ( infraredLongExposureFrame != 0 ) { infraredLongExposureFrame->Release(); infraredLongExposureFrame = 0; } }