void KinectCapture::GetDepthFrame(IMultiSourceFrame* pMultiFrame) { IDepthFrameReference* pDepthFrameReference = NULL; IDepthFrame* pDepthFrame = NULL; pMultiFrame->get_DepthFrameReference(&pDepthFrameReference); HRESULT hr = pDepthFrameReference->AcquireFrame(&pDepthFrame); if (SUCCEEDED(hr)) { if (pDepth == NULL) { IFrameDescription* pFrameDescription = NULL; hr = pDepthFrame->get_FrameDescription(&pFrameDescription); pFrameDescription->get_Width(&nDepthFrameWidth); pFrameDescription->get_Height(&nDepthFrameHeight); pDepth = new UINT16[nDepthFrameHeight * nDepthFrameWidth]; SafeRelease(pFrameDescription); } UINT nBufferSize = nDepthFrameHeight * nDepthFrameWidth; hr = pDepthFrame->CopyFrameDataToArray(nBufferSize, pDepth); } SafeRelease(pDepthFrame); SafeRelease(pDepthFrameReference); }
void idle() { // Read color data IColorFrame* pCFrame = nullptr; if (pColorFrameReader->AcquireLatestFrame(&pCFrame) == S_OK) { pCFrame->CopyConvertedFrameDataToArray(uColorBufferSize, pColorBuffer, ColorImageFormat_Rgba); pCFrame->Release(); pCFrame = nullptr; } // Read depth data IDepthFrame* pDFrame = nullptr; if (pDepthFrameReader->AcquireLatestFrame(&pDFrame) == S_OK) { pDFrame->CopyFrameDataToArray(uDepthPointNum, pDepthBuffer); pDFrame->Release(); pDFrame = nullptr; // map to camera space pCoordinateMapper->MapColorFrameToCameraSpace(uDepthPointNum, pDepthBuffer, uColorPointNum, pCSPoints); } }
bool KinectInterface::getFrameData(IMultiSourceFrame* frame, cv::Mat& intensity_mat, cv::Mat& depth_mat, cv::Mat& pos_mat) { //Obtain depth frame IDepthFrame* depthframe = nullptr; if (FAILED(depthFrameReader->AcquireLatestFrame(&depthframe))) return false; if (!depthframe) return false; // Get data from frame unsigned int sz; unsigned short* buf; if (FAILED(depthframe->AccessUnderlyingBuffer(&sz, &buf))) return false; //get depth -> xyz mapping if (FAILED(mapper->MapDepthFrameToCameraSpace(width*height, buf, width*height, depth2xyz))) return false; //get depth -> rgb image mapping if (FAILED(mapper->MapDepthFrameToColorSpace(width*height, buf, width*height, depth2rgb))) return false; //save depth if (FAILED(depthframe->CopyFrameDataToArray(height * width, depth_data))); if (depthframe) depthframe->Release(); //Obtain RGB frame IColorFrame* colorframe; if (FAILED(colorFrameReader->AcquireLatestFrame(&colorframe))) return false; if (!colorframe) return false; // Get data from frame if (FAILED(colorframe->CopyConvertedFrameDataToArray(colorwidth*colorheight * 4, rgbimage, ColorImageFormat_Rgba))) return false; cv::Mat tmp_depth = cv::Mat::zeros(colorheight, colorwidth, CV_16UC1); cv::Mat tmp_pos = cv::Mat::zeros(colorheight, colorwidth, CV_32FC3); cv::Mat depth_org(height, width, CV_16UC1, depth_data); cv::Mat tmp_rgb(colorheight, colorwidth, CV_8UC4, rgbimage); // Write color array for vertices for (int i = 0; i < width*height; i++) { ColorSpacePoint p = depth2rgb[i]; int iY = (int)(p.Y + 0.5); int iX = (int)(p.X + 0.5); if (iX >= 0 && iY >= 0 && iX < colorwidth && iY < colorheight) { // Check if color pixel coordinates are in bounds tmp_depth.at<unsigned short>(iY, iX) = depth_data[i]; //tmp_pos.at<float>(iY, iX, 0) = depth2xyz[i].X; //tmp_pos.at<float>(iY, iX, 1) = depth2xyz[i].Y; //tmp_pos.at<float>(iY, iX, 2) = depth2xyz[i].Z; } } if (colorframe) colorframe->Release(); cv::resize(tmp_rgb(cv::Rect(240, 0, 1440, 1080)), intensity_mat, cv::Size(640, 480)); cv::resize(tmp_depth(cv::Rect(240, 0, 1440, 1080)), depth_mat, cv::Size(640, 480)); cv::resize(tmp_pos(cv::Rect(240, 0, 1440, 1080)), pos_mat, cv::Size(640, 480)); cv::cvtColor(intensity_mat, intensity_mat, CV_RGBA2GRAY); return true; }
bool ms_kinect2::acquire_depth_frame(const _OPENNUI byte* dst) { bool result = false; IDepthFrame* pDepthFrame = NULL; static unsigned int bufferSize = 512 * 424; HRESULT hResult = S_OK; hResult = pDepthReader->AcquireLatestFrame(&pDepthFrame); if (SUCCEEDED(hResult)) { hResult = pDepthFrame->CopyFrameDataToArray(bufferSize, (UINT16*)dst); if (SUCCEEDED(hResult)) result = true; } SafeRelease(pDepthFrame); return result; }
void pcl::Kinect2Grabber::threadFunction() { while (!quit){ boost::unique_lock<boost::mutex> lock(mutex); // Acquire Latest Color Frame IColorFrame* colorFrame = nullptr; result = colorReader->AcquireLatestFrame(&colorFrame); if (SUCCEEDED(result)){ // Retrieved Color Data result = colorFrame->CopyConvertedFrameDataToArray(colorBuffer.size() * sizeof(RGBQUAD), reinterpret_cast<BYTE*>(&colorBuffer[0]), ColorImageFormat::ColorImageFormat_Bgra); if (FAILED(result)){ throw std::exception("Exception : IColorFrame::CopyConvertedFrameDataToArray()"); } } SafeRelease(colorFrame); // Acquire Latest Depth Frame IDepthFrame* depthFrame = nullptr; result = depthReader->AcquireLatestFrame(&depthFrame); if (SUCCEEDED(result)){ // Retrieved Depth Data result = depthFrame->CopyFrameDataToArray(depthBuffer.size(), &depthBuffer[0]); if (FAILED(result)){ throw std::exception("Exception : IDepthFrame::CopyFrameDataToArray()"); } } SafeRelease(depthFrame); lock.unlock(); if (signal_PointXYZ->num_slots() > 0) { signal_PointXYZ->operator()(convertDepthToPointXYZ(&depthBuffer[0])); } if (signal_PointXYZRGB->num_slots() > 0) { signal_PointXYZRGB->operator()(convertRGBDepthToPointXYZRGB(&colorBuffer[0], &depthBuffer[0])); } if (signal_PointXYZI->num_slots() > 0) { signal_PointXYZI->operator()(convertRGBDepthToPointXYZI(&colorBuffer[0], &depthBuffer[0])); } } }
void KinectHDFaceGrabber::update() { if (!m_pColorFrameReader || !m_pBodyFrameReader){ return; } IColorFrame* pColorFrame = nullptr; HRESULT hr = m_pColorFrameReader->AcquireLatestFrame(&pColorFrame); IDepthFrame* depthFrame = nullptr; if (SUCCEEDED(hr)){ hr = m_pDepthFrameReader->AcquireLatestFrame(&depthFrame); } if (SUCCEEDED(hr)){ ColorImageFormat imageFormat = ColorImageFormat_None; if (SUCCEEDED(hr)){ hr = pColorFrame->get_RawColorImageFormat(&imageFormat); } if (SUCCEEDED(hr)){ UINT nBufferSize = m_colorWidth * m_colorHeight * sizeof(RGBQUAD); hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, reinterpret_cast<BYTE*>(m_colorBuffer.data()), ColorImageFormat_Bgra); } if (SUCCEEDED(hr)){ hr = depthFrame->CopyFrameDataToArray(m_depthBuffer.size(), &m_depthBuffer[0]); } if (SUCCEEDED(hr)){ renderColorFrameAndProcessFaces(); } } SafeRelease(depthFrame); SafeRelease(pColorFrame); }
int main(int argc, char** argv) { // 1a. Get default Sensor cout << "Try to get default sensor" << endl; IKinectSensor* pSensor = nullptr; if (GetDefaultKinectSensor(&pSensor) != S_OK) { cerr << "Get Sensor failed" << endl; } else { // 1b. Open sensor cout << "Try to open sensor" << endl; if (pSensor->Open() != S_OK) { cerr << "Can't open sensor" << endl; } else { // 2a. Get frame source cout << "Try to get source" << endl; IDepthFrameSource* pFrameSource = nullptr; if (pSensor->get_DepthFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get frame source" << endl; } else { // 2b. Get frame description int iWidth = 0; int iHeight = 0; IFrameDescription* pFrameDescription = nullptr; if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK) { pFrameDescription->get_Width(&iWidth); pFrameDescription->get_Height(&iHeight); pFrameDescription->Release(); pFrameDescription = nullptr; } // 2c. get some dpeth only meta UINT16 uDepthMin = 0, uDepthMax = 0; pFrameSource->get_DepthMinReliableDistance(&uDepthMin); pFrameSource->get_DepthMaxReliableDistance(&uDepthMax); cout << "Reliable Distance: " << uDepthMin << " - " << uDepthMax << endl; // perpare OpenCV cv::Mat mDepthImg(iHeight, iWidth, CV_16UC1); cv::Mat mImg8bit(iHeight, iWidth, CV_8UC1); cv::namedWindow( "Depth Map" ); // 3a. get frame reader cout << "Try to get frame reader" << endl; IDepthFrameReader* pFrameReader = nullptr; if (pFrameSource->OpenReader(&pFrameReader) != S_OK) { cerr << "Can't get frame reader" << endl; } else { // Enter main loop cout << "Enter main loop" << endl; while (true) { // 4a. Get last frame IDepthFrame* pFrame = nullptr; if (pFrameReader->AcquireLatestFrame(&pFrame) == S_OK) { // 4c. copy the depth map to image if (pFrame->CopyFrameDataToArray(iWidth * iHeight, reinterpret_cast<UINT16*>(mDepthImg.data)) == S_OK) { // 4d. convert from 16bit to 8bit mDepthImg.convertTo(mImg8bit, CV_8U, 255.0f / uDepthMax); cv::imshow("Depth Map", mImg8bit); } else { cerr << "Data copy error" << endl; } // 4e. release frame pFrame->Release(); } // 4f. check keyboard input if (cv::waitKey(30) == VK_ESCAPE){ break; } } // 3b. release frame reader cout << "Release frame reader" << endl; pFrameReader->Release(); pFrameReader = nullptr; } // 2d. release Frame source cout << "Release frame source" << endl; pFrameSource->Release(); pFrameSource = nullptr; } // 1c. Close Sensor cout << "close sensor" << endl; pSensor->Close(); } // 1d. Release Sensor cout << "Release sensor" << endl; pSensor->Release(); pSensor = nullptr; } return 0; }
/// Main processing function void CBodyBasics::Update() { clear = true; for ( int i = 0; i < BODY_COUNT; i ++ ) { bodyXY[i][0] = bodyXY[i][1] = -1; position[i][0] = position[i][1] = -1; angle[i] = -1; distance = -1; } //每次先清空skeletonImg skeletonImg.setTo(0); //如果丢失了kinect,则不继续操作 if (!m_pBodyFrameReader) { return; } IBodyFrame* pBodyFrame = NULL;//骨架信息 IDepthFrame* pDepthFrame = NULL;//深度信息 IBodyIndexFrame* pBodyIndexFrame = NULL;//背景二值图 //记录每次操作的成功与否 HRESULT hr = S_OK; //---------------------------------------获取背景二值图并显示--------------------------------- if (SUCCEEDED(hr)){ hr = m_pBodyIndexFrameReader->AcquireLatestFrame(&pBodyIndexFrame);//获得背景二值图信息 } if (SUCCEEDED(hr)){ BYTE *bodyIndexArray = new BYTE[cDepthHeight * cDepthWidth];//背景二值图是8为uchar,有人是黑色,没人是白色 pBodyIndexFrame->CopyFrameDataToArray(cDepthHeight * cDepthWidth, bodyIndexArray); //把背景二值图画到MAT里 uchar* skeletonData = (uchar*)skeletonImg.data; for (int j = 0; j < cDepthHeight * cDepthWidth; ++j){ *skeletonData = bodyIndexArray[j]; ++skeletonData; *skeletonData = bodyIndexArray[j]; ++skeletonData; *skeletonData = bodyIndexArray[j]; ++skeletonData; } delete[] bodyIndexArray; } SafeRelease(pBodyIndexFrame);//必须要释放,否则之后无法获得新的frame数据 //-----------------------------获取骨架并显示---------------------------- if (SUCCEEDED(hr)){ hr = m_pBodyFrameReader->AcquireLatestFrame(&pBodyFrame);//获取骨架信息 } if (SUCCEEDED(hr)) { IBody* ppBodies[BODY_COUNT] = { 0 };//每一个IBody可以追踪一个人,总共可以追踪六个人 if (SUCCEEDED(hr)) { //把kinect追踪到的人的信息,分别存到每一个IBody中 hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies); } if (SUCCEEDED(hr)) { //对每一个IBody,我们找到他的骨架信息,并且画出来 ProcessBody(BODY_COUNT, ppBodies); } for (int i = 0; i < _countof(ppBodies); ++i) { SafeRelease(ppBodies[i]);//释放所有 } } SafeRelease(pBodyFrame);//必须要释放,否则之后无法获得新的frame数据 //-----------------------获取深度数据并显示-------------------------- if (SUCCEEDED(hr)){ hr = m_pDepthFrameReader->AcquireLatestFrame(&pDepthFrame);//获得深度数据 } if (SUCCEEDED(hr)){ UINT16 *depthArray = new UINT16[cDepthHeight * cDepthWidth];//深度数据是16位unsigned int pDepthFrame->CopyFrameDataToArray(cDepthHeight * cDepthWidth, depthArray); //把深度数据画到MAT中 uchar* depthData = (uchar*)depthImg.data; for (int j = 0; j < cDepthHeight * cDepthWidth; ++j){ *depthData = depthArray[j]; ++depthData; } distance = depthArray[cDepthHeight*cDepthWidth/2 + cDepthWidth/2]; for ( int j = 0; j < BODY_COUNT; j ++ ) { if ( -1 == (bodyXY[j][0] | bodyXY[j][1]) ) { continue; } double r = depthArray[cDepthWidth*bodyXY[j][1] + bodyXY[j][0]]; position[j][0] = r * cos(angle[j]) / 1000.0; position[j][1] = r * sin(angle[j]) / 1000.0; } delete[] depthArray; } SafeRelease(pDepthFrame);//必须要释放,否则之后无法获得新的frame数据 imshow("depthImg", depthImg); cv::waitKey(5); }
bool DepthStream::readFrame(IMultiSourceFrame *multiFrame) { bool readed = false; if (!m_StreamHandle.depthFrameReader) { ofLogWarning("ofxKinect2::DepthStream") << "Stream is not open."; return readed; } Stream::readFrame(multiFrame); IDepthFrame *depthFrame = nullptr; HRESULT hr = E_FAIL; if (!multiFrame) { hr = m_StreamHandle.depthFrameReader->AcquireLatestFrame(&depthFrame); } else { IDepthFrameReference *depthFrameReference = nullptr; hr = multiFrame->get_DepthFrameReference(&depthFrameReference); if (SUCCEEDED(hr)) { hr = depthFrameReference->AcquireFrame(&depthFrame); } safeRelease(depthFrameReference); } if (SUCCEEDED(hr)) { IFrameDescription *depthFrameDescription = nullptr; hr = depthFrame->get_RelativeTime((INT64 *)&m_Frame.timestamp); if (SUCCEEDED(hr)) { hr = depthFrame->get_FrameDescription(&depthFrameDescription); } if (SUCCEEDED(hr)) { hr = depthFrameDescription->get_Width(&m_Frame.width); } if (SUCCEEDED(hr)) { hr = depthFrameDescription->get_Height(&m_Frame.height); } if (SUCCEEDED(hr)) { hr = depthFrameDescription->get_HorizontalFieldOfView(&m_Frame.horizontalFieldOfView); } if (SUCCEEDED(hr)) { hr = depthFrameDescription->get_VerticalFieldOfView(&m_Frame.verticalFieldOfView); } if (SUCCEEDED(hr)) { hr = depthFrameDescription->get_DiagonalFieldOfView(&m_Frame.diagonalFieldOfView); } if (SUCCEEDED(hr)) { hr = depthFrame->get_DepthMinReliableDistance((USHORT *)&m_NearValue); } if (SUCCEEDED(hr)) { hr = depthFrame->get_DepthMaxReliableDistance((USHORT *)&m_FarValue); } if (SUCCEEDED(hr)) { hr = depthFrame->get_DepthMinReliableDistance((USHORT *)&m_NearValue); } if (SUCCEEDED(hr)) { if (m_Frame.dataSize == 0) { m_Frame.dataSize = m_Frame.width * m_Frame.height; m_Frame.data = new UINT16[m_Frame.width * m_Frame.height]; } hr = depthFrame->CopyFrameDataToArray(m_Frame.width * m_Frame.height, reinterpret_cast<UINT16 *>(m_Frame.data)); } if (SUCCEEDED(hr)) { readed = true; setPixels(m_Frame); } safeRelease(depthFrameDescription); } safeRelease(depthFrame); return readed; }
HRESULT KinectHandler::GetColorAndDepth(RGBQUAD* &color, RGBQUAD* &depth, UINT16*& depthBuffer) { if (!m_pMultiFrameReader) { cout << "No frame reader!" << endl; return E_FAIL; } IColorFrame* pColorFrame = NULL; IDepthFrame* pDepthFrame = NULL; IMultiSourceFrame* pMultiSourceFrame = NULL; HRESULT hr = m_pMultiFrameReader->AcquireLatestFrame(&pMultiSourceFrame); if (SUCCEEDED(hr)) { IColorFrameReference* pColorFrameReference = NULL; hr = pMultiSourceFrame->get_ColorFrameReference(&pColorFrameReference); if (SUCCEEDED(hr)) { hr = pColorFrameReference->AcquireFrame(&pColorFrame); } IDepthFrameReference* pDepthFrameReference = NULL; hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference); if (SUCCEEDED(hr)) { hr = pDepthFrameReference->AcquireFrame(&pDepthFrame); } SafeRelease(pColorFrameReference); SafeRelease(pDepthFrameReference); } if (SUCCEEDED(hr) && pColorFrame != NULL && pDepthFrame != NULL) { INT64 nTime = 0; IFrameDescription* pColorFrameDescription = NULL; int nColorWidth = 0; int nColorHeight = 0; ColorImageFormat imageFormat = ColorImageFormat_None; UINT nColorBufferSize = 0; RGBQUAD *pColorBuffer = NULL; hr = pColorFrame->get_RelativeTime(&nTime); if (SUCCEEDED(hr)) { hr = pColorFrame->get_FrameDescription(&pColorFrameDescription); } if (SUCCEEDED(hr)) { hr = pColorFrameDescription->get_Width(&nColorWidth); } if (SUCCEEDED(hr)) { hr = pColorFrameDescription->get_Height(&nColorHeight); } if (SUCCEEDED(hr)) { hr = pColorFrame->get_RawColorImageFormat(&imageFormat); } if (SUCCEEDED(hr)) { if (imageFormat == ColorImageFormat_Bgra) { hr = pColorFrame->AccessRawUnderlyingBuffer(&nColorBufferSize, reinterpret_cast<BYTE**>(&pColorBuffer)); } else if (m_pColorRGBX) { pColorBuffer = m_pColorRGBX; nColorBufferSize = cColorWidth * cColorHeight * sizeof(RGBQUAD); hr = pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(pColorBuffer), ColorImageFormat_Bgra); } else { cout << "FAILED" << endl; hr = E_FAIL; } } if (SUCCEEDED(hr)) { color = pColorBuffer; } ///===========================================//// nTime = 0; IFrameDescription* pDepthFrameDescription = NULL; int nDepthWidth = 0; int nDepthHeight = 0; USHORT nDepthMinReliableDistance = 0; USHORT nDepthMaxDistance = 0; UINT nDepthBufferSize = 0; UINT16 *pDepthBuffer = NULL; hr = pDepthFrame->get_RelativeTime(&nTime); if (SUCCEEDED(hr)) { hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription); } if (SUCCEEDED(hr)) { hr = pDepthFrameDescription->get_Width(&nDepthWidth); } if (SUCCEEDED(hr)) { hr = pDepthFrameDescription->get_Height(&nDepthHeight); } if (SUCCEEDED(hr)) { hr = pDepthFrame->get_DepthMinReliableDistance(&nDepthMinReliableDistance); } if (SUCCEEDED(hr)) { // In order to see the full range of depth (including the less reliable far field depth) // we are setting nDepthMaxDistance to the extreme potential depth threshold nDepthMaxDistance = USHRT_MAX; // Note: If you wish to filter by reliable depth distance, uncomment the following line. //// hr = pDepthFrame->get_DepthMaxReliableDistance(&nDepthMaxDistance); } if (SUCCEEDED(hr)) { hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer); } if (SUCCEEDED(hr)) { //RGBQUAD* pRGBX = new RGBQUAD[cDepthWidth * cDepthHeight]; // end pixel is start + width*height - 1 const UINT16* pBufferEnd = pDepthBuffer + (nDepthWidth * nDepthHeight); RGBQUAD* auxiliar = m_pDepthRGBX; //const UINT16* pBufferEnd = pDepthBuffer + (640 * 480); int counter = 0; while (pDepthBuffer < pBufferEnd) { //cout << "now:" << pDepthBuffer << " end:" << pBufferEnd << endl; USHORT depth = *pDepthBuffer; //cout << "now:" << pDepthBuffer << " end:" << pBufferEnd << endl; // To convert to a byte, we're discarding the most-significant // rather than least-significant bits. // We're preserving detail, although the intensity will "wrap." // Values outside the reliable depth range are mapped to 0 (black). // Note: Using conditionals in this loop could degrade performance. // Consider using a lookup table instead when writing production code. //BYTE intensity = static_cast<BYTE>((depth >= nDepthMinReliableDistance) && (depth <= nDepthMaxDistance) ? (depth % 256) : 0); BYTE intensity = static_cast<BYTE>((depth >= nDepthMinReliableDistance) && (depth <= nDepthMaxDistance) ? ((depth - nDepthMinReliableDistance) * (0 - 255) / (nDepthMaxDistance / 50 - nDepthMinReliableDistance) + 255) : 0); auxiliar->rgbBlue = intensity; auxiliar->rgbGreen = intensity; auxiliar->rgbRed = intensity; auxiliar->rgbReserved = (BYTE)255; counter++; ++auxiliar; ++pDepthBuffer; } depth = m_pDepthRGBX; } if (m_pDepthRawBuffer) { hr = pDepthFrame->CopyFrameDataToArray((cDepthWidth * cDepthHeight), m_pDepthRawBuffer); if(SUCCEEDED(hr)) depthBuffer = m_pDepthRawBuffer; } SafeRelease(pDepthFrameDescription); } else { cout << "Acquire last frame FAILED " << endl; hr = E_FAIL; SafeRelease(pColorFrame); SafeRelease(pDepthFrame); return hr; } SafeRelease(pColorFrame); SafeRelease(pDepthFrame); SafeRelease(pMultiSourceFrame); return hr; }
int main(int argc, char** argv) { int first_time = 0; Size screen_size(1440, 900);//the dst image size,e.g.100x100 Scalar text_color = Scalar(0, 255, 0); Scalar text_color2 = Scalar(0, 255, 255); Scalar text_color3 = Scalar(0, 0, 255); inhaler_coach coach; coach.control = 0; thread mThread(test_func, &coach); // 1a. Get Kinect Sensor cout << "Try to get default sensor" << endl; IKinectSensor* pSensor = nullptr; if (GetDefaultKinectSensor(&pSensor) != S_OK) { cerr << "Get Sensor failed" << endl; return -1; } // 1b. Open sensor cout << "Try to open sensor" << endl; if (pSensor->Open() != S_OK) { cerr << "Can't open sensor" << endl; return -1; } // 2. Color Related code IColorFrameReader* pColorFrameReader = nullptr; cv::Mat mColorImg; UINT uBufferSize = 0; UINT uColorPointNum = 0; int iWidth = 0; int iHeight = 0; { // 2a. Get color frame source cout << "Try to get color source" << endl; IColorFrameSource* pFrameSource = nullptr; if (pSensor->get_ColorFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get color frame source" << endl; return -1; } // 2b. Get frame description cout << "get color frame description" << endl; IFrameDescription* pFrameDescription = nullptr; if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK) { pFrameDescription->get_Width(&iWidth); pFrameDescription->get_Height(&iHeight); } pFrameDescription->Release(); pFrameDescription = nullptr; // 2c. get frame reader cout << "Try to get color frame reader" << endl; if (pFrameSource->OpenReader(&pColorFrameReader) != S_OK) { cerr << "Can't get color frame reader" << endl; return -1; } // 2d. release Frame source cout << "Release frame source" << endl; pFrameSource->Release(); pFrameSource = nullptr; // Prepare OpenCV data mColorImg = cv::Mat(iHeight, iWidth, CV_8UC4); uBufferSize = iHeight * iWidth * 4 * sizeof(BYTE); uColorPointNum = iHeight * iWidth; } // 3. Depth related code IDepthFrameReader* pDepthFrameReader = nullptr; UINT uDepthPointNum = 0; int iDepthWidth = 0, iDepthHeight = 0; cout << "Try to get depth source" << endl; { // Get frame source IDepthFrameSource* pFrameSource = nullptr; if (pSensor->get_DepthFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get depth frame source" << endl; return -1; } // Get frame description cout << "get depth frame description" << endl; IFrameDescription* pFrameDescription = nullptr; if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK) { pFrameDescription->get_Width(&iDepthWidth); pFrameDescription->get_Height(&iDepthHeight); uDepthPointNum = iDepthWidth * iDepthHeight; } pFrameDescription->Release(); pFrameDescription = nullptr; // get frame reader cout << "Try to get depth frame reader" << endl; if (pFrameSource->OpenReader(&pDepthFrameReader) != S_OK) { cerr << "Can't get depth frame reader" << endl; return -1; } // release Frame source cout << "Release frame source" << endl; pFrameSource->Release(); pFrameSource = nullptr; } // 4. Body related code IBodyFrameReader* pBodyFrameReader = nullptr; IBody** aBodyData = nullptr; INT32 iBodyCount = 0; { // 3a. Get frame source cout << "Try to get body source" << endl; IBodyFrameSource* pFrameSource = nullptr; if (pSensor->get_BodyFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get body frame source" << endl; return -1; } // 3b. Get the number of body if (pFrameSource->get_BodyCount(&iBodyCount) != S_OK) { cerr << "Can't get body count" << endl; return -1; } cout << " > Can trace " << iBodyCount << " bodies" << endl; aBodyData = new IBody*[iBodyCount]; for (int i = 0; i < iBodyCount; ++i) aBodyData[i] = nullptr; // 3c. get frame reader cout << "Try to get body frame reader" << endl; if (pFrameSource->OpenReader(&pBodyFrameReader) != S_OK) { cerr << "Can't get body frame reader" << endl; return -1; } // 3d. release Frame source cout << "Release frame source" << endl; pFrameSource->Release(); pFrameSource = nullptr; } // 4. Body Index releated code IBodyIndexFrameReader* pBIFrameReader = nullptr; cout << "Try to get body index source" << endl; { // Get frame source IBodyIndexFrameSource* pFrameSource = nullptr; if (pSensor->get_BodyIndexFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get body index frame source" << endl; return -1; } // get frame reader cout << "Try to get body index frame reader" << endl; if (pFrameSource->OpenReader(&pBIFrameReader) != S_OK) { cerr << "Can't get depth frame reader" << endl; return -1; } // release Frame source cout << "Release frame source" << endl; pFrameSource->Release(); pFrameSource = nullptr; } // 5. background cv::Mat imgBG(iHeight, iWidth, CV_8UC3); imgBG.setTo(0); // 4. get CoordinateMapper ICoordinateMapper* pCoordinateMapper = nullptr; if (pSensor->get_CoordinateMapper(&pCoordinateMapper) != S_OK) { cout << "Can't get coordinate mapper" << endl; return -1; } // Enter main loop UINT16* pDepthPoints = new UINT16[uDepthPointNum]; BYTE* pBodyIndex = new BYTE[uDepthPointNum]; DepthSpacePoint* pPointArray = new DepthSpacePoint[uColorPointNum]; cv::namedWindow("Inhaler Coach"); while (true) { // 4a. Get last frame IColorFrame* pColorFrame = nullptr; if (pColorFrameReader->AcquireLatestFrame(&pColorFrame) == S_OK) { pColorFrame->CopyConvertedFrameDataToArray(uBufferSize, mColorImg.data, ColorImageFormat_Bgra); pColorFrame->Release(); pColorFrame = nullptr; } cv::Mat mImg = mColorImg.clone(); // 8b. read depth frame IDepthFrame* pDepthFrame = nullptr; if (pDepthFrameReader->AcquireLatestFrame(&pDepthFrame) == S_OK) { pDepthFrame->CopyFrameDataToArray(uDepthPointNum, pDepthPoints); pDepthFrame->Release(); pDepthFrame = nullptr; } // 8c. read body index frame IBodyIndexFrame* pBIFrame = nullptr; if (pBIFrameReader->AcquireLatestFrame(&pBIFrame) == S_OK) { pBIFrame->CopyFrameDataToArray(uDepthPointNum, pBodyIndex); pBIFrame->Release(); pBIFrame = nullptr; } #ifdef COACH_DEBUG cv::Mat imgTarget = imgBG.clone(); // 9b. map color to depth if (pCoordinateMapper->MapColorFrameToDepthSpace(uDepthPointNum, pDepthPoints, uColorPointNum, pPointArray) == S_OK) { for (int y = 0; y < imgTarget.rows; ++y) { for (int x = 0; x < imgTarget.cols; ++x) { // ( x, y ) in color frame = rPoint in depth frame const DepthSpacePoint& rPoint = pPointArray[y * imgTarget.cols + x]; // check if rPoint is in range if (rPoint.X >= 0 && rPoint.X < iDepthWidth && rPoint.Y >= 0 && rPoint.Y < iDepthHeight) { // fill color from color frame if this pixel is user int iIdx = (int)rPoint.X + iDepthWidth * (int)rPoint.Y; if (pBodyIndex[iIdx] < 6) { cv::Vec4b& rPixel = mImg.at<cv::Vec4b>(y, x); imgTarget.at<cv::Vec3b>(y, x) = cv::Vec3b(rPixel[0], rPixel[1], rPixel[2]); } } } } } #else cv::Mat imgTarget = mImg.clone(); #endif // 4b. Get body data IBodyFrame* pBodyFrame = nullptr; if (pBodyFrameReader->AcquireLatestFrame(&pBodyFrame) == S_OK) { // 4b. get Body data if (pBodyFrame->GetAndRefreshBodyData(iBodyCount, aBodyData) == S_OK) { // 4c. for each body for (int i = 0; i < iBodyCount; ++i) { IBody* pBody = aBodyData[i]; // check if is tracked BOOLEAN bTracked = false; if ((pBody->get_IsTracked(&bTracked) == S_OK) && bTracked) { // get joint position Joint aJoints[JointType::JointType_Count]; if (pBody->GetJoints(JointType::JointType_Count, aJoints) == S_OK) { if (coach.state == 0){ coach.state = 1; if (first_time == 0){ first_time = 1; PlaySound(TEXT("welcome.wav"), NULL, SND_FILENAME); } } #ifdef COACH_DEBUG DrawLine(imgTarget, aJoints[JointType_SpineBase], aJoints[JointType_SpineMid], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_SpineMid], aJoints[JointType_SpineShoulder], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_SpineShoulder], aJoints[JointType_Neck], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_Neck], aJoints[JointType_Head], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_ShoulderLeft], aJoints[JointType_ElbowLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_ElbowLeft], aJoints[JointType_WristLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_WristLeft], aJoints[JointType_HandLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_HandLeft], aJoints[JointType_HandTipLeft], pCoordinateMapper); //DrawLine(imgTarget, aJoints[JointType_HandLeft], aJoints[JointType_ThumbLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderRight], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_ShoulderRight], aJoints[JointType_ElbowRight], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_ElbowRight], aJoints[JointType_WristRight], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_WristRight], aJoints[JointType_HandRight], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_HandRight], aJoints[JointType_HandTipRight], pCoordinateMapper); //DrawLine(imgTarget, aJoints[JointType_HandRight], aJoints[JointType_ThumbRight], pCoordinateMapper); #endif ColorSpacePoint q; ColorSpacePoint head; //ColorSpacePoint w; pCoordinateMapper->MapCameraPointToColorSpace(aJoints[JointType_Head].Position, &head); // check shaking coach.shaking_detection(aJoints, pCoordinateMapper); q = coach.position_checking(aJoints, pCoordinateMapper); #ifdef COACH_DEBUG circle(imgTarget, cv::Point(q.X, q.Y), 10, Scalar(0, 255, 255), 10, 8, 0); //circle(imgTarget, cv::Point(q.X, q.Y), 10, Scalar(0, 255, 255), 10, 8, 0); rectangle(imgTarget, Point(head.X - 50, head.Y - 40), Point(head.X + 50, head.Y + 90), Scalar(0, 255, 255), 1, 8, 0); //circle(imgTarget, cv::Point(w.X, w.Y), 10, Scalar(255, 0, 255), 10, 8, 0); #endif coach.state_change_rule(); } } } } else { cerr << "Can't read body data" << endl; } // 4e. release frame pBodyFrame->Release(); } switch (coach.state){ case 0: putText(imgTarget, "CMU Inhaler Coaching System", Point(120, 120), FONT_HERSHEY_DUPLEX, 2, text_color); break; case 1: putText(imgTarget, "Please shake the inhaler", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break; case 2: putText(imgTarget, "Shaking detected", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break; case 3: putText(imgTarget, "Please put the inhaler in front of your mouth", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break; case 4: putText(imgTarget, "Position check OK", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break; case 5: putText(imgTarget, "You forget to shake the inhaler first!!!", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color3); break; } // show image Mat dst; resize(imgTarget, dst, screen_size); imshow("Coach", dst); // 4c. check keyboard input if (cv::waitKey(30) == VK_ESCAPE){ break; } } mThread.join(); // 3. delete body data array delete[] aBodyData; // 3. release frame reader cout << "Release body frame reader" << endl; pBodyFrameReader->Release(); pBodyFrameReader = nullptr; // 2. release color frame reader cout << "Release color frame reader" << endl; pColorFrameReader->Release(); pColorFrameReader = nullptr; // 1c. Close Sensor cout << "close sensor" << endl; pSensor->Close(); // 1d. Release Sensor cout << "Release sensor" << endl; pSensor->Release(); pSensor = nullptr; return 0; }
void capture() { IMultiSourceFrame *multiFrame = NULL; IColorFrame *colorFrame = NULL; IColorFrameReference *colorFrameReference = NULL; UINT colorBufferSize = 0; RGBQUAD *colorBuffer = NULL; IDepthFrame *depthFrame = NULL; IDepthFrameReference *depthFrameReference = NULL; UINT bufferSize = 0; // UINT16 *depthBuffer = NULL; IBodyIndexFrame *bodyIndexFrame = NULL; IBodyIndexFrameReference *bodyIndexFrameReference = NULL; UINT bodyIndexBufferSize = 0; static int lastTime = 0; static int currentTime = 0; HRESULT hr = -1; //フレームリーダーが読み込み可能になるのを待つループ(各Frameしか取らない) while(1) { if((currentTime = GetTickCount()) > 33) { hr = multiFrameReader->AcquireLatestFrame(&multiFrame); lastTime = currentTime; }else continue; if(FAILED(hr)) { //fprintf(stderr, "AcquireLatestFrame(&multiFrame)\n"); Sleep(1); continue; } hr = multiFrame->get_ColorFrameReference(&colorFrameReference); if(FAILED(hr)) { Sleep(1); fprintf(stderr, "ColorFrameReference(&colorFrameReference)\n"); SafeRelease(multiFrame); continue; } hr = colorFrameReference->AcquireFrame(&colorFrame); if(FAILED(hr)) { Sleep(1); fprintf(stderr, "AcquireFrame(&colorFrame)\n"); SafeRelease(colorFrameReference); SafeRelease(multiFrame); continue; } hr = multiFrame->get_DepthFrameReference(&depthFrameReference); if (FAILED(hr)) { Sleep(1); fprintf(stderr, "DepthFrameReference(&depthFrameReference)\n"); SafeRelease(colorFrame); SafeRelease(colorFrameReference); SafeRelease(multiFrame); continue; } hr = depthFrameReference->AcquireFrame(&depthFrame); if (FAILED(hr)) { Sleep(1); fprintf(stderr, "AcquireFrame(&depthFrame)\n"); SafeRelease(depthFrameReference); SafeRelease(colorFrame); SafeRelease(colorFrameReference); SafeRelease(multiFrame); continue; } // hr = depthFrame->AccessUnderlyingBuffer(&bufferSize, &depthBuffer); hr = depthFrame->CopyFrameDataToArray( dPixels, &depthBuffer[0] ); if (FAILED(hr)) { Sleep(1); fprintf(stderr, "AccessUnderlyingBuffer(&bufferSize, &depthBuffer\n"); SafeRelease(depthFrame); SafeRelease(depthFrameReference); SafeRelease(colorFrame); SafeRelease(colorFrameReference); SafeRelease(multiFrame); continue; } hr = multiFrame->get_BodyIndexFrameReference(&bodyIndexFrameReference); if (FAILED(hr)) { Sleep(1); fprintf(stderr, "BodyIndexReference(&colorFrameReference)\n"); free(depthBuffer); SafeRelease(depthFrame); SafeRelease(depthFrameReference); SafeRelease(colorFrame); SafeRelease(colorFrameReference); SafeRelease(multiFrame); continue; } hr = bodyIndexFrameReference->AcquireFrame(&bodyIndexFrame); if (FAILED(hr)) { Sleep(1); fprintf(stderr, "AcquireFrame(&bodyIndexFrame)\n"); SafeRelease(bodyIndexFrameReference); free(depthBuffer); SafeRelease(depthFrame); SafeRelease(depthFrameReference); SafeRelease(colorFrame); SafeRelease(colorFrameReference); SafeRelease(multiFrame); continue; } hr = bodyIndexFrame->AccessUnderlyingBuffer(&bodyIndexBufferSize, &bodyIndexBuffer); if(FAILED(hr)) { Sleep(1); fprintf(stderr, "bodyIndexFrame->AccessUnderlyingBuffer(&bodyIndexBufferSize, &bodyIndexBuffer)"); SafeRelease(bodyIndexFrame); SafeRelease(bodyIndexFrameReference); free(depthBuffer); SafeRelease(depthFrame); SafeRelease(depthFrameReference); SafeRelease(colorFrame); SafeRelease(colorFrameReference); SafeRelease(multiFrame); continue; } SafeRelease(colorFrameReference); SafeRelease(bodyIndexFrameReference); SafeRelease(depthFrameReference); break; } //深度値をBufferへ格納 // ERROR_CHECK(depthFrame->AccessUnderlyingBuffer(&bufferSize, &depthBuffer)); //カラーマップの設定データの読み込みと、colorBufferメモリの確保 if(colorRGBX == NULL) { IFrameDescription *colorFrameDescription = NULL; ERROR_CHECK2(colorFrame->get_FrameDescription(&colorFrameDescription), "FrameDescription"); ERROR_CHECK2(colorFrameDescription->get_Width(&colorWidth), "get_Width"); ERROR_CHECK2(colorFrameDescription->get_Height(&colorHeight), "get_Height"); colorRGBX = new RGBQUAD[colorWidth * colorHeight]; glutReshapeWindow(width, height); ERROR_CHECK2(colorFrame->get_RawColorImageFormat(&imageFormat), "get_RawColorImageFormat"); SafeRelease(colorFrameDescription); } //カラーイメージをcolorBufferへコピー if(imageFormat == ColorImageFormat_Bgra) { ERROR_CHECK2(colorFrame->AccessRawUnderlyingBuffer(&colorBufferSize, reinterpret_cast<BYTE**>(&colorBuffer)), "AccessRawUnderlyingBuffer"); }else if(colorRGBX) { colorBuffer = colorRGBX; colorBufferSize = colorWidth * colorHeight * sizeof(RGBQUAD); ERROR_CHECK2(colorFrame->CopyConvertedFrameDataToArray(colorBufferSize, reinterpret_cast<BYTE*>(colorBuffer), ColorImageFormat_Bgra), "CopyConvertedFrameDataToArray"); }else { //Error } //colorMapの初期化 一度だけ実行される if(colorMap == NULL) { colorMap = new float[colorWidth * colorHeight][3]; } if (colorCoordinates == NULL) { colorCoordinates = new ColorSpacePoint[width * height]; } if (cameraSpacePoints == NULL) { cameraSpacePoints = new CameraSpacePoint[width * height]; } if (cameraSpacePoints_ave == NULL) { cameraSpacePoints_ave = new CameraSpacePoint[width * height]; } ERROR_CHECK2(coordinateMapper->MapDepthFrameToColorSpace( width * height, (UINT16*)depthBuffer, width * height, colorCoordinates), "MapDepthFrameToColorSpace"); ERROR_CHECK2(coordinateMapper->MapDepthFrameToCameraSpace( width * height, (UINT16*)depthBuffer, width * height, cameraSpacePoints),"MapDepthFrameToCameraSpace"); //colorBufferのデータを、colorMapにコピー for(int i = 0; i < height; i++){ for(int j = 0; j < width; j++){ int index = i * width + j; ColorSpacePoint colorPoint = colorCoordinates[index]; int colorX = (int)(floor(colorPoint.X + 0.5)); int colorY = (int)(floor(colorPoint.Y + 0.5)); if(colorX >= 0 && colorX < colorWidth && colorY >= 0 && colorY < colorHeight) { int colorIndex = colorX + colorY * colorWidth; //格納先はミラーモードを解除 float* colorp = colorMap[index]; UCHAR* data = (UCHAR*)(colorBuffer + colorIndex); colorp[0] = (float)data[2] / 255.0f; colorp[1] = (float)data[1] / 255.0f; colorp[2] = (float)data[0] / 255.0f; }else { float* colorp = colorMap[index]; colorp[0] = 0; colorp[1] = 0; colorp[2] = 0; } } } //フレームリソースを解放 SafeRelease(colorFrame); SafeRelease(multiFrame); SafeRelease(bodyIndexFrame); SafeRelease(depthFrame); }