/// <summary> /// Main processing function /// </summary> void CBodyBasics::Update() { if (!m_pBodyFrameReader) { return; } IBodyFrame* pBodyFrame = NULL; HRESULT hr = m_pBodyFrameReader->AcquireLatestFrame(&pBodyFrame); if (SUCCEEDED(hr)) { INT64 nTime = 0; hr = pBodyFrame->get_RelativeTime(&nTime); IBody* ppBodies[BODY_COUNT] = {0}; if (SUCCEEDED(hr)) { hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies); } if (SUCCEEDED(hr)) { ProcessBody(nTime, BODY_COUNT, ppBodies); } for (int i = 0; i < _countof(ppBodies); ++i) { SafeRelease(ppBodies[i]); } } SafeRelease(pBodyFrame); /// Update Color Frame if (IsDrawColorBase) { UpdateColorBase(); } }
void Step() { HRESULT hr; IBodyFrame* frame; hr = g_kinect.bodyReader_->AcquireLatestFrame( &frame ); if( hr == E_PENDING ) { return; } Assert( hr ); IBody* bodies[ BODY_COUNT ] = {}; hr = frame->GetAndRefreshBodyData( ARRAYSIZE( bodies ), bodies ); Assert( hr ); // test g_d3d.jointRot_[ 0 ] = 0; g_d3d.jointRot_[ 1 ] = 0; g_d3d.jointRot_[ 2 ] = 0; for( int bi = 0; bi < ARRAYSIZE( bodies ); ++bi ) { BOOLEAN isTracked; hr = bodies[ bi ]->get_IsTracked( &isTracked ); Assert( hr ); if( isTracked ) { JointOrientation jointOrients[ JointType_Count ]; hr = bodies[ bi ]->GetJointOrientations( ARRAYSIZE( jointOrients ), jointOrients ); Assert( hr ); g_d3d.jointRot_[ 0 ] = jointOrients[ JointType_SpineBase ].Orientation.x; g_d3d.jointRot_[ 1 ] = jointOrients[ JointType_SpineBase ].Orientation.y; g_d3d.jointRot_[ 2 ] = jointOrients[ JointType_SpineBase ].Orientation.z; break; } } //for( auto& body : bodies ) { // body->Release(); //} frame->Release(); }
void KinectCapture::GetBodyFrame(IMultiSourceFrame* pMultiFrame) { IBodyFrameReference* pBodyFrameReference = NULL; IBodyFrame* pBodyFrame = NULL; pMultiFrame->get_BodyFrameReference(&pBodyFrameReference); HRESULT hr = pBodyFrameReference->AcquireFrame(&pBodyFrame); if (SUCCEEDED(hr)) { IBody* bodies[BODY_COUNT] = { NULL }; pBodyFrame->GetAndRefreshBodyData(BODY_COUNT, bodies); vBodies = std::vector<Body>(BODY_COUNT); for (int i = 0; i < BODY_COUNT; i++) { if (bodies[i]) { Joint joints[JointType_Count]; BOOLEAN isTracked; bodies[i]->get_IsTracked(&isTracked); bodies[i]->GetJoints(JointType_Count, joints); vBodies[i].vJoints.assign(joints, joints + JointType_Count); vBodies[i].bTracked = isTracked; vBodies[i].vJointsInColorSpace.resize(JointType_Count); for (int j = 0; j < JointType_Count; j++) { ColorSpacePoint tempPoint; pCoordinateMapper->MapCameraPointToColorSpace(joints[j].Position, &tempPoint); vBodies[i].vJointsInColorSpace[j].X = tempPoint.X; vBodies[i].vJointsInColorSpace[j].Y = tempPoint.Y; } } } } SafeRelease(pBodyFrame); SafeRelease(pBodyFrameReference); }
void KinectV2Module::updateKinect() { #if JUCE_WINDOWS if (!m_pBodyFrameReader) { return; } IBodyFrame* pBodyFrame = NULL; HRESULT hr = m_pBodyFrameReader->AcquireLatestFrame(&pBodyFrame); if (SUCCEEDED(hr)) { INT64 nTime = 0; hr = pBodyFrame->get_RelativeTime(&nTime); IBody* ppBodies[BODY_COUNT] = { 0 }; if (SUCCEEDED(hr)) { hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies); } if (SUCCEEDED(hr)) { processBody(BODY_COUNT, ppBodies); } for (int i = 0; i < _countof(ppBodies); ++i) { SafeRelease(ppBodies[i]); } } SafeRelease(pBodyFrame); #endif }
void MSKinectService::pollBody() { if ( bodyFrameReader == NULL ) { return; } IBodyFrame* pBodyFrame = NULL; HRESULT hr = bodyFrameReader->AcquireLatestFrame(&pBodyFrame); if (SUCCEEDED(hr)) { INT64 nTime = 0; hr = pBodyFrame->get_RelativeTime(&nTime); IBody* ppBodies[BODY_COUNT] = {0}; if (SUCCEEDED(hr)) { hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies); } if (SUCCEEDED(hr)) { ProcessBody(nTime, BODY_COUNT, ppBodies); } for (int i = 0; i < _countof(ppBodies); ++i) { SafeRelease(ppBodies[i]); } } SafeRelease(pBodyFrame); }
/// Main processing function void CBodyBasics::Update() { clear = true; for ( int i = 0; i < BODY_COUNT; i ++ ) { bodyXY[i][0] = bodyXY[i][1] = -1; position[i][0] = position[i][1] = -1; angle[i] = -1; distance = -1; } //每次先清空skeletonImg skeletonImg.setTo(0); //如果丢失了kinect,则不继续操作 if (!m_pBodyFrameReader) { return; } IBodyFrame* pBodyFrame = NULL;//骨架信息 IDepthFrame* pDepthFrame = NULL;//深度信息 IBodyIndexFrame* pBodyIndexFrame = NULL;//背景二值图 //记录每次操作的成功与否 HRESULT hr = S_OK; //---------------------------------------获取背景二值图并显示--------------------------------- if (SUCCEEDED(hr)){ hr = m_pBodyIndexFrameReader->AcquireLatestFrame(&pBodyIndexFrame);//获得背景二值图信息 } if (SUCCEEDED(hr)){ BYTE *bodyIndexArray = new BYTE[cDepthHeight * cDepthWidth];//背景二值图是8为uchar,有人是黑色,没人是白色 pBodyIndexFrame->CopyFrameDataToArray(cDepthHeight * cDepthWidth, bodyIndexArray); //把背景二值图画到MAT里 uchar* skeletonData = (uchar*)skeletonImg.data; for (int j = 0; j < cDepthHeight * cDepthWidth; ++j){ *skeletonData = bodyIndexArray[j]; ++skeletonData; *skeletonData = bodyIndexArray[j]; ++skeletonData; *skeletonData = bodyIndexArray[j]; ++skeletonData; } delete[] bodyIndexArray; } SafeRelease(pBodyIndexFrame);//必须要释放,否则之后无法获得新的frame数据 //-----------------------------获取骨架并显示---------------------------- if (SUCCEEDED(hr)){ hr = m_pBodyFrameReader->AcquireLatestFrame(&pBodyFrame);//获取骨架信息 } if (SUCCEEDED(hr)) { IBody* ppBodies[BODY_COUNT] = { 0 };//每一个IBody可以追踪一个人,总共可以追踪六个人 if (SUCCEEDED(hr)) { //把kinect追踪到的人的信息,分别存到每一个IBody中 hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies); } if (SUCCEEDED(hr)) { //对每一个IBody,我们找到他的骨架信息,并且画出来 ProcessBody(BODY_COUNT, ppBodies); } for (int i = 0; i < _countof(ppBodies); ++i) { SafeRelease(ppBodies[i]);//释放所有 } } SafeRelease(pBodyFrame);//必须要释放,否则之后无法获得新的frame数据 //-----------------------获取深度数据并显示-------------------------- if (SUCCEEDED(hr)){ hr = m_pDepthFrameReader->AcquireLatestFrame(&pDepthFrame);//获得深度数据 } if (SUCCEEDED(hr)){ UINT16 *depthArray = new UINT16[cDepthHeight * cDepthWidth];//深度数据是16位unsigned int pDepthFrame->CopyFrameDataToArray(cDepthHeight * cDepthWidth, depthArray); //把深度数据画到MAT中 uchar* depthData = (uchar*)depthImg.data; for (int j = 0; j < cDepthHeight * cDepthWidth; ++j){ *depthData = depthArray[j]; ++depthData; } distance = depthArray[cDepthHeight*cDepthWidth/2 + cDepthWidth/2]; for ( int j = 0; j < BODY_COUNT; j ++ ) { if ( -1 == (bodyXY[j][0] | bodyXY[j][1]) ) { continue; } double r = depthArray[cDepthWidth*bodyXY[j][1] + bodyXY[j][0]]; position[j][0] = r * cos(angle[j]) / 1000.0; position[j][1] = r * sin(angle[j]) / 1000.0; } delete[] depthArray; } SafeRelease(pDepthFrame);//必须要释放,否则之后无法获得新的frame数据 imshow("depthImg", depthImg); cv::waitKey(5); }
int main(int argc, char** argv) { // 1a. Get default Sensor std::cout << "Try to get default sensor" << std::endl; IKinectSensor* pSensor = nullptr; if (GetDefaultKinectSensor(&pSensor) != S_OK) { cerr << "Get Sensor failed" << std::endl; return -1; } // 1b. Open sensor std::cout << "Try to open sensor" << std::endl; if (pSensor->Open() != S_OK) { cerr << "Can't open sensor" << std::endl; return -1; } // 2. Color Related code IColorFrameReader* pColorFrameReader = nullptr; cv::Mat mColorImg; UINT uBufferSize = 0; { // 2a. Get color frame source std::cout << "Try to get color source" << std::endl; IColorFrameSource* pFrameSource = nullptr; if (pSensor->get_ColorFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get color frame source" << std::endl; return -1; } // 2b. Get frame description std::cout << "get color frame description" << std::endl; int iWidth = 0; int iHeight = 0; IFrameDescription* pFrameDescription = nullptr; if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK) { pFrameDescription->get_Width(&iWidth); pFrameDescription->get_Height(&iHeight); } pFrameDescription->Release(); pFrameDescription = nullptr; // 2c. get frame reader std::cout << "Try to get color frame reader" << std::endl; if (pFrameSource->OpenReader(&pColorFrameReader) != S_OK) { cerr << "Can't get color frame reader" << std::endl; return -1; } // 2d. release Frame source std::cout << "Release frame source" << std::endl; pFrameSource->Release(); pFrameSource = nullptr; // Prepare OpenCV data mColorImg = cv::Mat(iHeight, iWidth, CV_8UC4); uBufferSize = iHeight * iWidth * 4 * sizeof(BYTE); } // 3. Body related code IBodyFrameReader* pBodyFrameReader = nullptr; IBody** aBodyData = nullptr; INT32 iBodyCount = 0; { // 3a. Get frame source std::cout << "Try to get body source" << std::endl; IBodyFrameSource* pFrameSource = nullptr; if (pSensor->get_BodyFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get body frame source" << std::endl; return -1; } // 3b. Get the number of body if (pFrameSource->get_BodyCount(&iBodyCount) != S_OK) { cerr << "Can't get body count" << std::endl; return -1; } std::cout << " > Can trace " << iBodyCount << " bodies" << std::endl; aBodyData = new IBody*[iBodyCount]; for (int i = 0; i < iBodyCount; ++i) aBodyData[i] = nullptr; // 3c. get frame reader std::cout << "Try to get body frame reader" << std::endl; if (pFrameSource->OpenReader(&pBodyFrameReader) != S_OK) { cerr << "Can't get body frame reader" << std::endl; return -1; } // 3d. release Frame source std::cout << "Release frame source" << std::endl; pFrameSource->Release(); pFrameSource = nullptr; } // 4. get CoordinateMapper ICoordinateMapper* pCoordinateMapper = nullptr; if (pSensor->get_CoordinateMapper(&pCoordinateMapper) != S_OK) { std::cout << "Can't get coordinate mapper" << std::endl; return -1; } // Enter main loop cv::namedWindow("Body Image"); // Debug:output the velocity of joints ofstream current_average_velocityTXT("current_average_velocity.txt"); ofstream average_velocityTXT("average_velocity.txt"); int frame_count = 0; int frame_count_for_standby = 0; float positionX0[25] = {0}; float positionX1[25] = {0}; float positionY0[25] = { 0 }; float positionY1[25] = { 0 }; float positionZ0[25] = { 0 }; float positionZ1[25] = { 0 }; float velocityX[25] = { 0 }; float velocityY[25] = { 0 }; float velocityZ[25] = { 0 }; float current_velocity[25] = { 0 }; float velocityee[8] = { 0 }; float current_total_velocity = 0; float current_average_velocity = 0; float total_velocity = 0; float average_velocity = 0; while (true) { // 4a. Get last frame IColorFrame* pColorFrame = nullptr; if (pColorFrameReader->AcquireLatestFrame(&pColorFrame) == S_OK) { // 4c. Copy to OpenCV image if (pColorFrame->CopyConvertedFrameDataToArray(uBufferSize, mColorImg.data, ColorImageFormat_Bgra) != S_OK) { cerr << "Data copy error" << endl; } // 4e. release frame pColorFrame->Release(); } cv::Mat mImg = mColorImg.clone(); // 4b. Get body data IBodyFrame* pBodyFrame = nullptr; if (pBodyFrameReader->AcquireLatestFrame(&pBodyFrame) == S_OK) { // 4b. get Body data if (pBodyFrame->GetAndRefreshBodyData(iBodyCount, aBodyData) == S_OK) { // 4c. for each body for (int i = 0; i < iBodyCount; ++i) { IBody* pBody = aBodyData[i]; // check if is tracked BOOLEAN bTracked = false; if ((pBody->get_IsTracked(&bTracked) == S_OK) && bTracked) { // get joint position Joint aJoints[JointType::JointType_Count]; if (pBody->GetJoints(JointType::JointType_Count, aJoints) == S_OK) { DrawLine(mImg, aJoints[JointType_SpineBase], aJoints[JointType_SpineMid], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_SpineMid], aJoints[JointType_SpineShoulder], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_SpineShoulder], aJoints[JointType_Neck], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_Neck], aJoints[JointType_Head], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_ShoulderLeft], aJoints[JointType_ElbowLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_ElbowLeft], aJoints[JointType_WristLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_WristLeft], aJoints[JointType_HandLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_HandLeft], aJoints[JointType_HandTipLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_HandLeft], aJoints[JointType_ThumbLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_ShoulderRight], aJoints[JointType_ElbowRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_ElbowRight], aJoints[JointType_WristRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_WristRight], aJoints[JointType_HandRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_HandRight], aJoints[JointType_HandTipRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_HandRight], aJoints[JointType_ThumbRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_SpineBase], aJoints[JointType_HipLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_HipLeft], aJoints[JointType_KneeLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_KneeLeft], aJoints[JointType_AnkleLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_AnkleLeft], aJoints[JointType_FootLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_SpineBase], aJoints[JointType_HipRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_HipRight], aJoints[JointType_KneeRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_KneeRight], aJoints[JointType_AnkleRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_AnkleRight], aJoints[JointType_FootRight], pCoordinateMapper); } // Debug:print out the number of frame std::cout << "frame " << ++frame_count << std::endl; for (int j = 1; j < 8; j++) { velocityee[j] = velocityee[j-1]; total_velocity += velocityee[j]; } average_velocity = total_velocity / 8.0; if (average_velocity <= 0.0015) { // determine if the person is still if (frame_count_for_standby == 0) { PlaySound(TEXT("Alarm02.wav"), NULL, SND_FILENAME); std::cout << "Start capturing points!" << std::endl; } // count the number of frame whose velocity is below the threshold frame_count_for_standby++; if (frame_count_for_standby >= 5) { frame_count_for_standby = 0; } } // Debug:output the average velocity average_velocityTXT << frame_count << " " << average_velocity << std::endl; total_velocity = 0; // Update the average velocity int available_joints = 0; for (int i = 0; i < 25; i++) { // X positionX1[i] = positionX0[i]; positionX0[i] = aJoints[i].Position.X; velocityX[i] = (positionX1[i] - positionX0[i]) * (positionX1[i] - positionX0[i]); // Y positionY1[i] = positionY0[i]; positionY0[i] = aJoints[i].Position.Y; velocityY[i] = (positionY1[i] - positionY0[i]) * (positionY1[i] - positionY0[i]); // Z positionZ1[i] = positionZ0[i]; positionZ0[i] = aJoints[i].Position.Z; velocityZ[i] = (positionZ1[i] - positionZ0[i]) * (positionZ1[i] - positionZ0[i]); current_velocity[i] = sqrtf(velocityX[i] + velocityY[i] + velocityZ[i]); // exclude the discrete velocity if (current_velocity[i] < 0.01) { current_total_velocity += current_velocity[i]; available_joints++; } } // If no joint is available, save the velocity of last frame if (available_joints != 0) { current_average_velocity = current_total_velocity / available_joints; } velocityee[0] = current_average_velocity; // Debug:output the current average velocity current_average_velocityTXT << frame_count << " " << current_average_velocity << std::endl; current_total_velocity = 0; } } } else { cerr << "Can't read body data" << endl; } // 4e. release frame pBodyFrame->Release(); } // show image cv::imshow("Body Image",mImg); // 4c. check keyboard input if (cv::waitKey(30) == VK_ESCAPE) { break; } } // 3. delete body data array delete[] aBodyData; // 3. release frame reader std::cout << "Release body frame reader" << std::endl; pBodyFrameReader->Release(); pBodyFrameReader = nullptr; // 2. release color frame reader std::cout << "Release color frame reader" << std::endl; pColorFrameReader->Release(); pColorFrameReader = nullptr; // 1c. Close Sensor std::cout << "close sensor" << std::endl; pSensor->Close(); // 1d. Release Sensor std::cout << "Release sensor" << std::endl; pSensor->Release(); pSensor = nullptr; return 0; }
void Device::update() { if ( mSensor != 0 ) { mSensor->get_Status( &mStatus ); } if ( mFrameReader == 0 ) { return; } IAudioBeamFrame* audioFrame = 0; IBodyFrame* bodyFrame = 0; IBodyIndexFrame* bodyIndexFrame = 0; IColorFrame* colorFrame = 0; IDepthFrame* depthFrame = 0; IMultiSourceFrame* frame = 0; IInfraredFrame* infraredFrame = 0; ILongExposureInfraredFrame* infraredLongExposureFrame = 0; HRESULT hr = mFrameReader->AcquireLatestFrame( &frame ); // TODO audio if ( SUCCEEDED( hr ) ) { console() << "SUCCEEDED " << getElapsedFrames() << endl; } if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyEnabled() ) { IBodyFrameReference* frameRef = 0; hr = frame->get_BodyFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &bodyFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyIndexEnabled() ) { IBodyIndexFrameReference* frameRef = 0; hr = frame->get_BodyIndexFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &bodyIndexFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isColorEnabled() ) { IColorFrameReference* frameRef = 0; hr = frame->get_ColorFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &colorFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isDepthEnabled() ) { IDepthFrameReference* frameRef = 0; hr = frame->get_DepthFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &depthFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredEnabled() ) { IInfraredFrameReference* frameRef = 0; hr = frame->get_InfraredFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &infraredFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredLongExposureEnabled() ) { ILongExposureInfraredFrameReference* frameRef = 0; hr = frame->get_LongExposureInfraredFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &infraredLongExposureFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) ) { long long time = 0L; // TODO audio IFrameDescription* bodyFrameDescription = 0; int32_t bodyWidth = 0; int32_t bodyHeight = 0; uint32_t bodyBufferSize = 0; uint8_t* bodyBuffer = 0; IFrameDescription* bodyIndexFrameDescription = 0; int32_t bodyIndexWidth = 0; int32_t bodyIndexHeight = 0; uint32_t bodyIndexBufferSize = 0; uint8_t* bodyIndexBuffer = 0; IFrameDescription* colorFrameDescription = 0; int32_t colorWidth = 0; int32_t colorHeight = 0; ColorImageFormat imageFormat = ColorImageFormat_None; uint32_t colorBufferSize = 0; uint8_t* colorBuffer = 0; IFrameDescription* depthFrameDescription = 0; int32_t depthWidth = 0; int32_t depthHeight = 0; uint16_t depthMinReliableDistance = 0; uint16_t depthMaxReliableDistance = 0; uint32_t depthBufferSize = 0; uint16_t* depthBuffer = 0; IFrameDescription* infraredFrameDescription = 0; int32_t infraredWidth = 0; int32_t infraredHeight = 0; uint32_t infraredBufferSize = 0; uint16_t* infraredBuffer = 0; IFrameDescription* infraredLongExposureFrameDescription = 0; int32_t infraredLongExposureWidth = 0; int32_t infraredLongExposureHeight = 0; uint32_t infraredLongExposureBufferSize = 0; uint16_t* infraredLongExposureBuffer = 0; hr = depthFrame->get_RelativeTime( &time ); // TODO audio if ( mDeviceOptions.isAudioEnabled() ) { } // TODO body if ( mDeviceOptions.isBodyEnabled() ) { } if ( mDeviceOptions.isBodyIndexEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrame->get_FrameDescription( &bodyIndexFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrameDescription->get_Width( &bodyIndexWidth ); } if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrameDescription->get_Height( &bodyIndexHeight ); } if ( SUCCEEDED( hr ) ) { //hr = bodyIndexFrame->AccessUnderlyingBuffer( &bodyIndexBufferSize, &bodyIndexBuffer ); } } if ( mDeviceOptions.isColorEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = colorFrame->get_FrameDescription( &colorFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = colorFrameDescription->get_Width( &colorWidth ); } if ( SUCCEEDED( hr ) ) { hr = colorFrameDescription->get_Height( &colorHeight ); } if ( SUCCEEDED( hr ) ) { hr = colorFrame->get_RawColorImageFormat( &imageFormat ); } if ( SUCCEEDED( hr ) ) { bool isAllocated = false; SurfaceChannelOrder channelOrder = SurfaceChannelOrder::BGRA; if ( imageFormat == ColorImageFormat_Bgra ) { hr = colorFrame->AccessRawUnderlyingBuffer( &colorBufferSize, reinterpret_cast<uint8_t**>( &colorBuffer ) ); channelOrder = SurfaceChannelOrder::BGRA; } else if ( imageFormat == ColorImageFormat_Rgba ) { hr = colorFrame->AccessRawUnderlyingBuffer( &colorBufferSize, reinterpret_cast<uint8_t**>( &colorBuffer ) ); channelOrder = SurfaceChannelOrder::RGBA; } else { isAllocated = true; colorBufferSize = colorWidth * colorHeight * sizeof( uint8_t ) * 4; colorBuffer = new uint8_t[ colorBufferSize ]; hr = colorFrame->CopyConvertedFrameDataToArray( colorBufferSize, reinterpret_cast<uint8_t*>( colorBuffer ), ColorImageFormat_Rgba ); channelOrder = SurfaceChannelOrder::RGBA; } if ( SUCCEEDED( hr ) ) { colorFrame->get_RelativeTime( &time ); Surface8u colorSurface = Surface8u( colorBuffer, colorWidth, colorHeight, colorWidth * sizeof( uint8_t ) * 4, channelOrder ); mFrame.mSurfaceColor = Surface8u( colorWidth, colorHeight, false, channelOrder ); mFrame.mSurfaceColor.copyFrom( colorSurface, colorSurface.getBounds() ); console() << "Color\n\twidth: " << colorWidth << "\n\theight: " << colorHeight << "\n\tbuffer size: " << colorBufferSize << "\n\ttime: " << time << endl; } if ( isAllocated && colorBuffer != 0 ) { delete[] colorBuffer; colorBuffer = 0; } } } if ( mDeviceOptions.isDepthEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = depthFrame->get_FrameDescription( &depthFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = depthFrameDescription->get_Width( &depthWidth ); } if ( SUCCEEDED( hr ) ) { hr = depthFrameDescription->get_Height( &depthHeight ); } if ( SUCCEEDED( hr ) ) { hr = depthFrame->get_DepthMinReliableDistance( &depthMinReliableDistance ); } if ( SUCCEEDED( hr ) ) { hr = depthFrame->get_DepthMaxReliableDistance( &depthMaxReliableDistance ); } if ( SUCCEEDED( hr ) ) { hr = depthFrame->AccessUnderlyingBuffer( &depthBufferSize, &depthBuffer ); } if ( SUCCEEDED( hr ) ) { Channel16u depthChannel = Channel16u( depthWidth, depthHeight, depthWidth * sizeof( uint16_t ), 1, depthBuffer ); mFrame.mChannelDepth = Channel16u( depthWidth, depthHeight ); mFrame.mChannelDepth.copyFrom( depthChannel, depthChannel.getBounds() ); console( ) << "Depth\n\twidth: " << depthWidth << "\n\theight: " << depthHeight << endl; } } if ( mDeviceOptions.isInfraredEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = infraredFrame->get_FrameDescription( &infraredFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = infraredFrameDescription->get_Width( &infraredWidth ); } if ( SUCCEEDED( hr ) ) { hr = infraredFrameDescription->get_Height( &infraredHeight ); } if ( SUCCEEDED( hr ) ) { hr = infraredFrame->AccessUnderlyingBuffer( &infraredBufferSize, &infraredBuffer ); } if ( SUCCEEDED( hr ) ) { Channel16u infraredChannel = Channel16u( infraredWidth, infraredHeight, infraredWidth * sizeof( uint16_t ), 1, infraredBuffer ); mFrame.mChannelInfrared = Channel16u( infraredWidth, infraredHeight ); mFrame.mChannelInfrared.copyFrom( infraredChannel, infraredChannel.getBounds() ); console( ) << "Infrared\n\twidth: " << infraredWidth << "\n\theight: " << infraredHeight << endl; } } if ( mDeviceOptions.isInfraredLongExposureEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrame->get_FrameDescription( &infraredLongExposureFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrameDescription->get_Width( &infraredLongExposureWidth ); } if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrameDescription->get_Height( &infraredLongExposureHeight ); } if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrame->AccessUnderlyingBuffer( &infraredLongExposureBufferSize, &infraredLongExposureBuffer ); } if ( SUCCEEDED( hr ) ) { Channel16u infraredLongExposureChannel = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight, infraredLongExposureWidth * sizeof( uint16_t ), 1, infraredLongExposureBuffer ); mFrame.mChannelInfraredLongExposure = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight ); mFrame.mChannelInfraredLongExposure.copyFrom( infraredLongExposureChannel, infraredLongExposureChannel.getBounds() ); int64_t irLongExpTime = 0; hr = infraredLongExposureFrame->get_RelativeTime( &irLongExpTime ); console( ) << "Infrared Long Exposure\n\twidth: " << infraredLongExposureWidth << "\n\theight: " << infraredLongExposureHeight; if ( SUCCEEDED( hr ) ) { console() << "\n\ttimestamp: " << irLongExpTime; } console() << endl; } } if ( SUCCEEDED( hr ) ) { // TODO build Kinect2::Frame from buffers, data mFrame.mTimeStamp = time; } if ( bodyFrameDescription != 0 ) { bodyFrameDescription->Release(); bodyFrameDescription = 0; } if ( bodyIndexFrameDescription != 0 ) { bodyIndexFrameDescription->Release(); bodyIndexFrameDescription = 0; } if ( colorFrameDescription != 0 ) { colorFrameDescription->Release(); colorFrameDescription = 0; } if ( depthFrameDescription != 0 ) { depthFrameDescription->Release(); depthFrameDescription = 0; } if ( infraredFrameDescription != 0 ) { infraredFrameDescription->Release(); infraredFrameDescription = 0; } if ( infraredLongExposureFrameDescription != 0 ) { infraredLongExposureFrameDescription->Release(); infraredLongExposureFrameDescription = 0; } } if ( audioFrame != 0 ) { audioFrame->Release(); audioFrame = 0; } if ( bodyFrame != 0 ) { bodyFrame->Release(); bodyFrame = 0; } if ( bodyIndexFrame != 0 ) { bodyIndexFrame->Release(); bodyIndexFrame = 0; } if ( colorFrame != 0 ) { colorFrame->Release(); colorFrame = 0; } if ( depthFrame != 0 ) { depthFrame->Release(); depthFrame = 0; } if ( frame != 0 ) { frame->Release(); frame = 0; } if ( infraredFrame != 0 ) { infraredFrame->Release(); infraredFrame = 0; } if ( infraredLongExposureFrame != 0 ) { infraredLongExposureFrame->Release(); infraredLongExposureFrame = 0; } }
int main() { printf("Hello, Wellcome to kinect world!\n"); IKinectSensor* bb; //申请一个Sensor指针 HRESULT hr = GetDefaultKinectSensor(&bb); // 获取一个默认的Sensor if ( FAILED(hr) ) { printf("No Kinect connect to your pc!\n"); goto endstop; } BOOLEAN bIsOpen = 0; bb->get_IsOpen(&bIsOpen); // 查看下是否已经打开 printf("bIsOpen: %d\n", bIsOpen); if ( !bIsOpen ) // 没打开,则尝试打开 { hr = bb->Open(); if ( FAILED(hr) ) { printf("Kinect Open Failed!\n"); goto endstop; } printf("Kinect opened! But it need sometime to work!\n"); // 这里一定要多等会,否则下面的判断都是错误的 printf("Wait For 3000 ms...\n"); Sleep(3000); } bIsOpen = 0; bb->get_IsOpen(&bIsOpen); // 是否已经打开 printf("bIsOpen: %d\n", bIsOpen); BOOLEAN bAvaliable = 0; bb->get_IsAvailable(&bAvaliable); // 是否可用 printf("bAvaliable: %d\n", bAvaliable); DWORD dwCapability = 0; bb->get_KinectCapabilities(&dwCapability); // 获取容量 printf("dwCapability: %d\n", dwCapability); WCHAR bbuid[256] = { 0 }; bb->get_UniqueKinectId(256, bbuid); // 获取唯一ID printf("UID: %s\n",bbuid); // 音频数据获取 // 获取身体数据 IBodyFrameSource* bodys = nullptr; bb->get_BodyFrameSource(&bodys); // Body 数据源 INT32 nBodyNum = 0; bodys->get_BodyCount(&nBodyNum); // 获取body 个数,没用,一直是6 printf("Body Num: %d\n", nBodyNum); IBodyFrameReader* bodyr = nullptr; bodys->OpenReader(&bodyr); // 准备读取body数据 while (true) { IBodyFrame* bodyf = nullptr; bodyr->AcquireLatestFrame(&bodyf); // 获取最近的一帧数据 if ( !bodyf ) { Sleep(100); printf("."); continue; } IBody* ppBodies[BODY_COUNT] = { 0 }; bodyf->GetAndRefreshBodyData(BODY_COUNT, ppBodies); // 更新所有人身体数据 for (int i = 0; i < BODY_COUNT; ++i) { IBody* pBody = ppBodies[i]; // 轮询每个人的信息 if (pBody) { BOOLEAN bTracked = false; hr = pBody->get_IsTracked(&bTracked); // 检测是否被跟踪,即是否有这个人 if (bTracked) { Joint joints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); // 获取左手的状态 pBody->get_HandRightState(&rightHandState); // 获取右手的状态 hr = pBody->GetJoints(_countof(joints), joints); // 获取身体的骨骼信息,25点 printf("Person %d : Joints[0].Z %.2f\n", i, joints[0].Position.X); //简单的输出他的信息 } } } for (int i = 0; i < BODY_COUNT; ++i) { ppBodies[i]->Release(); } bodyf->Release(); } endclose: bb->Close(); endstop: system("pause"); return 0; }
/// <summary> /// Main processing function /// </summary> void CCoordinateMappingBasics::Update() { if (!m_pMultiSourceFrameReader) { return; } IMultiSourceFrame* pMultiSourceFrame = NULL; IDepthFrame* pDepthFrame = NULL; IColorFrame* pColorFrame = NULL; IBodyIndexFrame* pBodyIndexFrame = NULL; IBodyFrame* pBodyFrame = NULL; HRESULT hr = m_pMultiSourceFrameReader->AcquireLatestFrame(&pMultiSourceFrame); if (SUCCEEDED(hr)) { IDepthFrameReference* pDepthFrameReference = NULL; hr = pMultiSourceFrame->get_DepthFrameReference(&pDepthFrameReference); if (SUCCEEDED(hr)) { hr = pDepthFrameReference->AcquireFrame(&pDepthFrame); } SafeRelease(pDepthFrameReference); } if (SUCCEEDED(hr)) { IColorFrameReference* pColorFrameReference = NULL; hr = pMultiSourceFrame->get_ColorFrameReference(&pColorFrameReference); if (SUCCEEDED(hr)) { hr = pColorFrameReference->AcquireFrame(&pColorFrame); } SafeRelease(pColorFrameReference); } if (SUCCEEDED(hr)) { IBodyIndexFrameReference* pBodyIndexFrameReference = NULL; hr = pMultiSourceFrame->get_BodyIndexFrameReference(&pBodyIndexFrameReference); if (SUCCEEDED(hr)) { hr = pBodyIndexFrameReference->AcquireFrame(&pBodyIndexFrame); } SafeRelease(pBodyIndexFrameReference); } if (SUCCEEDED(hr)) { IBodyFrameReference* pBodyFrameReference = NULL; hr = pMultiSourceFrame->get_BodyFrameReference(&pBodyFrameReference); if (SUCCEEDED(hr)) { hr = pBodyFrameReference->AcquireFrame(&pBodyFrame); } SafeRelease(pBodyFrameReference); } if (SUCCEEDED(hr)) { // Depth INT64 nDepthTime = 0; IFrameDescription* pDepthFrameDescription = NULL; int nDepthWidth = 0; int nDepthHeight = 0; UINT nDepthBufferSize = 0; UINT16 *pDepthBuffer = NULL; // Color IFrameDescription* pColorFrameDescription = NULL; int nColorWidth = 0; int nColorHeight = 0; ColorImageFormat imageFormat = ColorImageFormat_None; UINT nColorBufferSize = 0; RGBQUAD *pColorBuffer = NULL; // BodyIndex IFrameDescription* pBodyIndexFrameDescription = NULL; int nBodyIndexWidth = 0; int nBodyIndexHeight = 0; UINT nBodyIndexBufferSize = 0; BYTE *pBodyIndexBuffer = NULL; // Body IBody* ppBodies[BODY_COUNT] = { 0 }; // get depth frame data hr = pDepthFrame->get_RelativeTime(&nDepthTime); // Depth if (SUCCEEDED(hr)) { hr = pDepthFrame->get_FrameDescription(&pDepthFrameDescription); } if (SUCCEEDED(hr)) { hr = pDepthFrameDescription->get_Width(&nDepthWidth); } if (SUCCEEDED(hr)) { hr = pDepthFrameDescription->get_Height(&nDepthHeight); } if (SUCCEEDED(hr)) { hr = pDepthFrame->AccessUnderlyingBuffer(&nDepthBufferSize, &pDepthBuffer); } // get color frame data if (SUCCEEDED(hr)) { hr = pColorFrame->get_FrameDescription(&pColorFrameDescription); } if (SUCCEEDED(hr)) { hr = pColorFrameDescription->get_Width(&nColorWidth); } if (SUCCEEDED(hr)) { hr = pColorFrameDescription->get_Height(&nColorHeight); } if (SUCCEEDED(hr)) { hr = pColorFrame->get_RawColorImageFormat(&imageFormat); } if (SUCCEEDED(hr)) { if (imageFormat == ColorImageFormat_Bgra) { hr = pColorFrame->AccessRawUnderlyingBuffer(&nColorBufferSize, reinterpret_cast<BYTE**>(&pColorBuffer)); } else if (m_pColorRGBX) { pColorBuffer = m_pColorRGBX; nColorBufferSize = cColorWidth * cColorHeight * sizeof(RGBQUAD); hr = pColorFrame->CopyConvertedFrameDataToArray(nColorBufferSize, reinterpret_cast<BYTE*>(pColorBuffer), ColorImageFormat_Bgra); } else { hr = E_FAIL; } } // get body index frame data if (SUCCEEDED(hr)) { hr = pBodyIndexFrame->get_FrameDescription(&pBodyIndexFrameDescription); } if (SUCCEEDED(hr)) { hr = pBodyIndexFrameDescription->get_Width(&nBodyIndexWidth); } if (SUCCEEDED(hr)) { hr = pBodyIndexFrameDescription->get_Height(&nBodyIndexHeight); } if (SUCCEEDED(hr)) { hr = pBodyIndexFrame->AccessUnderlyingBuffer(&nBodyIndexBufferSize, &pBodyIndexBuffer); } // get body frame data if (SUCCEEDED(hr)) { hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies); } if (SUCCEEDED(hr)) { ProcessFrame(nDepthTime, pDepthBuffer, nDepthWidth, nDepthHeight, pColorBuffer, nColorWidth, nColorHeight, pBodyIndexBuffer, nBodyIndexWidth, nBodyIndexHeight, BODY_COUNT, ppBodies); } SafeRelease(pDepthFrameDescription); SafeRelease(pColorFrameDescription); SafeRelease(pBodyIndexFrameDescription); for (int i = 0; i < _countof(ppBodies); ++i) { SafeRelease(ppBodies[i]); } } SafeRelease(pDepthFrame); SafeRelease(pColorFrame); SafeRelease(pBodyIndexFrame); SafeRelease(pBodyFrame); SafeRelease(pMultiSourceFrame); }
bool BodyStream::readFrame(IMultiSourceFrame *multiFrame) { bool readed = false; if (!m_StreamHandle.bodyFrameReader) { ofLogWarning("ofxKinect2::BodyStream") << "Stream is not open."; return readed; } IBodyFrame *bodyFrame = nullptr; HRESULT hr = E_FAIL; if (!multiFrame) { hr = m_StreamHandle.bodyFrameReader->AcquireLatestFrame(&bodyFrame); } else { IBodyFrameReference *bodyFrameReference = nullptr; hr = multiFrame->get_BodyFrameReference(&bodyFrameReference); if (SUCCEEDED(hr)) { hr = bodyFrameReference->AcquireFrame(&bodyFrame); } safeRelease(bodyFrameReference); } if (SUCCEEDED(hr)) { hr = bodyFrame->get_RelativeTime((INT64 *)&m_Frame.timestamp); IBody *ppBodies[BODY_COUNT] = {0}; if (SUCCEEDED(hr)) { hr = bodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies); } if (lock()) { if (SUCCEEDED(hr)) { readed = true; // Clears the body list for (int b = 0 ; b < m_Bodies.size() ; b++) { delete m_Bodies[b]; } m_Bodies.clear(); for (int i = 0; i < _countof(ppBodies); ++i) { BOOLEAN isTracked = false; if (ppBodies[i]) { ppBodies[i]->get_IsTracked(&isTracked); if (isTracked) { UINT64 id = -1; ppBodies[i]->get_TrackingId(&id); // Add the tracked body to the list Body *body = new Body(); body->setup(*m_Device, ppBodies[i]); body->update(); m_Bodies.push_back(body); // TODO: Use the body tracked id to re-use the Body objects // TODO: Clarify the relationship between Body and IBody. } } } } unlock(); //Sort the bodies from left to right on the X-axis. Player one is the left-most body. auto ascSort = [](Body * bodyOne, Body * bodyTwo) { bool isBigger = false; const CameraSpacePoint spineMidPosOne = bodyOne->getJoint(JointType_SpineMid).Position; const CameraSpacePoint spineMidPosTwo = bodyTwo->getJoint(JointType_SpineMid).Position; if (spineMidPosOne.X < spineMidPosTwo.X) { isBigger = true; } else if (spineMidPosTwo.X < spineMidPosOne.X) { isBigger = false; } return isBigger; }; std::sort(m_Bodies.begin(), m_Bodies.end(), ascSort); } for (int i = 0; i < _countof(ppBodies); ++i) { safeRelease(ppBodies[i]); } } safeRelease(bodyFrame); return readed; }
int main(int argc, char** argv) { int first_time = 0; Size screen_size(1440, 900);//the dst image size,e.g.100x100 Scalar text_color = Scalar(0, 255, 0); Scalar text_color2 = Scalar(0, 255, 255); Scalar text_color3 = Scalar(0, 0, 255); inhaler_coach coach; coach.control = 0; thread mThread(test_func, &coach); // 1a. Get Kinect Sensor cout << "Try to get default sensor" << endl; IKinectSensor* pSensor = nullptr; if (GetDefaultKinectSensor(&pSensor) != S_OK) { cerr << "Get Sensor failed" << endl; return -1; } // 1b. Open sensor cout << "Try to open sensor" << endl; if (pSensor->Open() != S_OK) { cerr << "Can't open sensor" << endl; return -1; } // 2. Color Related code IColorFrameReader* pColorFrameReader = nullptr; cv::Mat mColorImg; UINT uBufferSize = 0; UINT uColorPointNum = 0; int iWidth = 0; int iHeight = 0; { // 2a. Get color frame source cout << "Try to get color source" << endl; IColorFrameSource* pFrameSource = nullptr; if (pSensor->get_ColorFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get color frame source" << endl; return -1; } // 2b. Get frame description cout << "get color frame description" << endl; IFrameDescription* pFrameDescription = nullptr; if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK) { pFrameDescription->get_Width(&iWidth); pFrameDescription->get_Height(&iHeight); } pFrameDescription->Release(); pFrameDescription = nullptr; // 2c. get frame reader cout << "Try to get color frame reader" << endl; if (pFrameSource->OpenReader(&pColorFrameReader) != S_OK) { cerr << "Can't get color frame reader" << endl; return -1; } // 2d. release Frame source cout << "Release frame source" << endl; pFrameSource->Release(); pFrameSource = nullptr; // Prepare OpenCV data mColorImg = cv::Mat(iHeight, iWidth, CV_8UC4); uBufferSize = iHeight * iWidth * 4 * sizeof(BYTE); uColorPointNum = iHeight * iWidth; } // 3. Depth related code IDepthFrameReader* pDepthFrameReader = nullptr; UINT uDepthPointNum = 0; int iDepthWidth = 0, iDepthHeight = 0; cout << "Try to get depth source" << endl; { // Get frame source IDepthFrameSource* pFrameSource = nullptr; if (pSensor->get_DepthFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get depth frame source" << endl; return -1; } // Get frame description cout << "get depth frame description" << endl; IFrameDescription* pFrameDescription = nullptr; if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK) { pFrameDescription->get_Width(&iDepthWidth); pFrameDescription->get_Height(&iDepthHeight); uDepthPointNum = iDepthWidth * iDepthHeight; } pFrameDescription->Release(); pFrameDescription = nullptr; // get frame reader cout << "Try to get depth frame reader" << endl; if (pFrameSource->OpenReader(&pDepthFrameReader) != S_OK) { cerr << "Can't get depth frame reader" << endl; return -1; } // release Frame source cout << "Release frame source" << endl; pFrameSource->Release(); pFrameSource = nullptr; } // 4. Body related code IBodyFrameReader* pBodyFrameReader = nullptr; IBody** aBodyData = nullptr; INT32 iBodyCount = 0; { // 3a. Get frame source cout << "Try to get body source" << endl; IBodyFrameSource* pFrameSource = nullptr; if (pSensor->get_BodyFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get body frame source" << endl; return -1; } // 3b. Get the number of body if (pFrameSource->get_BodyCount(&iBodyCount) != S_OK) { cerr << "Can't get body count" << endl; return -1; } cout << " > Can trace " << iBodyCount << " bodies" << endl; aBodyData = new IBody*[iBodyCount]; for (int i = 0; i < iBodyCount; ++i) aBodyData[i] = nullptr; // 3c. get frame reader cout << "Try to get body frame reader" << endl; if (pFrameSource->OpenReader(&pBodyFrameReader) != S_OK) { cerr << "Can't get body frame reader" << endl; return -1; } // 3d. release Frame source cout << "Release frame source" << endl; pFrameSource->Release(); pFrameSource = nullptr; } // 4. Body Index releated code IBodyIndexFrameReader* pBIFrameReader = nullptr; cout << "Try to get body index source" << endl; { // Get frame source IBodyIndexFrameSource* pFrameSource = nullptr; if (pSensor->get_BodyIndexFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get body index frame source" << endl; return -1; } // get frame reader cout << "Try to get body index frame reader" << endl; if (pFrameSource->OpenReader(&pBIFrameReader) != S_OK) { cerr << "Can't get depth frame reader" << endl; return -1; } // release Frame source cout << "Release frame source" << endl; pFrameSource->Release(); pFrameSource = nullptr; } // 5. background cv::Mat imgBG(iHeight, iWidth, CV_8UC3); imgBG.setTo(0); // 4. get CoordinateMapper ICoordinateMapper* pCoordinateMapper = nullptr; if (pSensor->get_CoordinateMapper(&pCoordinateMapper) != S_OK) { cout << "Can't get coordinate mapper" << endl; return -1; } // Enter main loop UINT16* pDepthPoints = new UINT16[uDepthPointNum]; BYTE* pBodyIndex = new BYTE[uDepthPointNum]; DepthSpacePoint* pPointArray = new DepthSpacePoint[uColorPointNum]; cv::namedWindow("Inhaler Coach"); while (true) { // 4a. Get last frame IColorFrame* pColorFrame = nullptr; if (pColorFrameReader->AcquireLatestFrame(&pColorFrame) == S_OK) { pColorFrame->CopyConvertedFrameDataToArray(uBufferSize, mColorImg.data, ColorImageFormat_Bgra); pColorFrame->Release(); pColorFrame = nullptr; } cv::Mat mImg = mColorImg.clone(); // 8b. read depth frame IDepthFrame* pDepthFrame = nullptr; if (pDepthFrameReader->AcquireLatestFrame(&pDepthFrame) == S_OK) { pDepthFrame->CopyFrameDataToArray(uDepthPointNum, pDepthPoints); pDepthFrame->Release(); pDepthFrame = nullptr; } // 8c. read body index frame IBodyIndexFrame* pBIFrame = nullptr; if (pBIFrameReader->AcquireLatestFrame(&pBIFrame) == S_OK) { pBIFrame->CopyFrameDataToArray(uDepthPointNum, pBodyIndex); pBIFrame->Release(); pBIFrame = nullptr; } #ifdef COACH_DEBUG cv::Mat imgTarget = imgBG.clone(); // 9b. map color to depth if (pCoordinateMapper->MapColorFrameToDepthSpace(uDepthPointNum, pDepthPoints, uColorPointNum, pPointArray) == S_OK) { for (int y = 0; y < imgTarget.rows; ++y) { for (int x = 0; x < imgTarget.cols; ++x) { // ( x, y ) in color frame = rPoint in depth frame const DepthSpacePoint& rPoint = pPointArray[y * imgTarget.cols + x]; // check if rPoint is in range if (rPoint.X >= 0 && rPoint.X < iDepthWidth && rPoint.Y >= 0 && rPoint.Y < iDepthHeight) { // fill color from color frame if this pixel is user int iIdx = (int)rPoint.X + iDepthWidth * (int)rPoint.Y; if (pBodyIndex[iIdx] < 6) { cv::Vec4b& rPixel = mImg.at<cv::Vec4b>(y, x); imgTarget.at<cv::Vec3b>(y, x) = cv::Vec3b(rPixel[0], rPixel[1], rPixel[2]); } } } } } #else cv::Mat imgTarget = mImg.clone(); #endif // 4b. Get body data IBodyFrame* pBodyFrame = nullptr; if (pBodyFrameReader->AcquireLatestFrame(&pBodyFrame) == S_OK) { // 4b. get Body data if (pBodyFrame->GetAndRefreshBodyData(iBodyCount, aBodyData) == S_OK) { // 4c. for each body for (int i = 0; i < iBodyCount; ++i) { IBody* pBody = aBodyData[i]; // check if is tracked BOOLEAN bTracked = false; if ((pBody->get_IsTracked(&bTracked) == S_OK) && bTracked) { // get joint position Joint aJoints[JointType::JointType_Count]; if (pBody->GetJoints(JointType::JointType_Count, aJoints) == S_OK) { if (coach.state == 0){ coach.state = 1; if (first_time == 0){ first_time = 1; PlaySound(TEXT("welcome.wav"), NULL, SND_FILENAME); } } #ifdef COACH_DEBUG DrawLine(imgTarget, aJoints[JointType_SpineBase], aJoints[JointType_SpineMid], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_SpineMid], aJoints[JointType_SpineShoulder], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_SpineShoulder], aJoints[JointType_Neck], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_Neck], aJoints[JointType_Head], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_ShoulderLeft], aJoints[JointType_ElbowLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_ElbowLeft], aJoints[JointType_WristLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_WristLeft], aJoints[JointType_HandLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_HandLeft], aJoints[JointType_HandTipLeft], pCoordinateMapper); //DrawLine(imgTarget, aJoints[JointType_HandLeft], aJoints[JointType_ThumbLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderRight], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_ShoulderRight], aJoints[JointType_ElbowRight], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_ElbowRight], aJoints[JointType_WristRight], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_WristRight], aJoints[JointType_HandRight], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_HandRight], aJoints[JointType_HandTipRight], pCoordinateMapper); //DrawLine(imgTarget, aJoints[JointType_HandRight], aJoints[JointType_ThumbRight], pCoordinateMapper); #endif ColorSpacePoint q; ColorSpacePoint head; //ColorSpacePoint w; pCoordinateMapper->MapCameraPointToColorSpace(aJoints[JointType_Head].Position, &head); // check shaking coach.shaking_detection(aJoints, pCoordinateMapper); q = coach.position_checking(aJoints, pCoordinateMapper); #ifdef COACH_DEBUG circle(imgTarget, cv::Point(q.X, q.Y), 10, Scalar(0, 255, 255), 10, 8, 0); //circle(imgTarget, cv::Point(q.X, q.Y), 10, Scalar(0, 255, 255), 10, 8, 0); rectangle(imgTarget, Point(head.X - 50, head.Y - 40), Point(head.X + 50, head.Y + 90), Scalar(0, 255, 255), 1, 8, 0); //circle(imgTarget, cv::Point(w.X, w.Y), 10, Scalar(255, 0, 255), 10, 8, 0); #endif coach.state_change_rule(); } } } } else { cerr << "Can't read body data" << endl; } // 4e. release frame pBodyFrame->Release(); } switch (coach.state){ case 0: putText(imgTarget, "CMU Inhaler Coaching System", Point(120, 120), FONT_HERSHEY_DUPLEX, 2, text_color); break; case 1: putText(imgTarget, "Please shake the inhaler", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break; case 2: putText(imgTarget, "Shaking detected", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break; case 3: putText(imgTarget, "Please put the inhaler in front of your mouth", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break; case 4: putText(imgTarget, "Position check OK", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break; case 5: putText(imgTarget, "You forget to shake the inhaler first!!!", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color3); break; } // show image Mat dst; resize(imgTarget, dst, screen_size); imshow("Coach", dst); // 4c. check keyboard input if (cv::waitKey(30) == VK_ESCAPE){ break; } } mThread.join(); // 3. delete body data array delete[] aBodyData; // 3. release frame reader cout << "Release body frame reader" << endl; pBodyFrameReader->Release(); pBodyFrameReader = nullptr; // 2. release color frame reader cout << "Release color frame reader" << endl; pColorFrameReader->Release(); pColorFrameReader = nullptr; // 1c. Close Sensor cout << "close sensor" << endl; pSensor->Close(); // 1d. Release Sensor cout << "Release sensor" << endl; pSensor->Release(); pSensor = nullptr; return 0; }
void Device::update() { if ( mFrameReader == 0 ) { return; } IAudioBeamFrame* audioFrame = 0; IBodyFrame* bodyFrame = 0; IBodyIndexFrame* bodyIndexFrame = 0; IColorFrame* colorFrame = 0; IDepthFrame* depthFrame = 0; IMultiSourceFrame* frame = 0; IInfraredFrame* infraredFrame = 0; ILongExposureInfraredFrame* infraredLongExposureFrame = 0; HRESULT hr = mFrameReader->AcquireLatestFrame( &frame ); if ( SUCCEEDED( hr ) && mDeviceOptions.isAudioEnabled() ) { // TODO audio } if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyEnabled() ) { IBodyFrameReference* frameRef = 0; hr = frame->get_BodyFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &bodyFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyIndexEnabled() ) { IBodyIndexFrameReference* frameRef = 0; hr = frame->get_BodyIndexFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &bodyIndexFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isColorEnabled() ) { IColorFrameReference* frameRef = 0; hr = frame->get_ColorFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &colorFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isDepthEnabled() ) { IDepthFrameReference* frameRef = 0; hr = frame->get_DepthFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &depthFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredEnabled() ) { IInfraredFrameReference* frameRef = 0; hr = frame->get_InfraredFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &infraredFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredLongExposureEnabled() ) { ILongExposureInfraredFrameReference* frameRef = 0; hr = frame->get_LongExposureInfraredFrameReference( &frameRef ); if ( SUCCEEDED( hr ) ) { hr = frameRef->AcquireFrame( &infraredLongExposureFrame ); } if ( frameRef != 0 ) { frameRef->Release(); frameRef = 0; } } if ( SUCCEEDED( hr ) ) { long long timeStamp = 0L; // TODO audio std::vector<Body> bodies; int64_t bodyTime = 0L; IBody* kinectBodies[ BODY_COUNT ] = { 0 }; Vec4f floorClipPlane = Vec4f::zero(); Channel8u bodyIndexChannel; IFrameDescription* bodyIndexFrameDescription = 0; int32_t bodyIndexWidth = 0; int32_t bodyIndexHeight = 0; uint32_t bodyIndexBufferSize = 0; uint8_t* bodyIndexBuffer = 0; int64_t bodyIndexTime = 0L; Surface8u colorSurface; IFrameDescription* colorFrameDescription = 0; int32_t colorWidth = 0; int32_t colorHeight = 0; ColorImageFormat colorImageFormat = ColorImageFormat_None; uint32_t colorBufferSize = 0; uint8_t* colorBuffer = 0; Channel16u depthChannel; IFrameDescription* depthFrameDescription = 0; int32_t depthWidth = 0; int32_t depthHeight = 0; uint16_t depthMinReliableDistance = 0; uint16_t depthMaxReliableDistance = 0; uint32_t depthBufferSize = 0; uint16_t* depthBuffer = 0; Channel16u infraredChannel; IFrameDescription* infraredFrameDescription = 0; int32_t infraredWidth = 0; int32_t infraredHeight = 0; uint32_t infraredBufferSize = 0; uint16_t* infraredBuffer = 0; Channel16u infraredLongExposureChannel; IFrameDescription* infraredLongExposureFrameDescription = 0; int32_t infraredLongExposureWidth = 0; int32_t infraredLongExposureHeight = 0; uint32_t infraredLongExposureBufferSize = 0; uint16_t* infraredLongExposureBuffer = 0; hr = depthFrame->get_RelativeTime( &timeStamp ); // TODO audio if ( mDeviceOptions.isAudioEnabled() ) { } if ( mDeviceOptions.isBodyEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = bodyFrame->get_RelativeTime( &bodyTime ); } if ( SUCCEEDED( hr ) ) { hr = bodyFrame->GetAndRefreshBodyData( BODY_COUNT, kinectBodies ); } if ( SUCCEEDED( hr ) ) { Vector4 v; hr = bodyFrame->get_FloorClipPlane( &v ); floorClipPlane = toVec4f( v ); } if ( SUCCEEDED( hr ) ) { for ( uint8_t i = 0; i < BODY_COUNT; ++i ) { IBody* kinectBody = kinectBodies[ i ]; if ( kinectBody != 0 ) { uint8_t isTracked = false; hr = kinectBody->get_IsTracked( &isTracked ); if ( SUCCEEDED( hr ) && isTracked ) { Joint joints[ JointType_Count ]; kinectBody->GetJoints( JointType_Count, joints ); JointOrientation jointOrientations[ JointType_Count ]; kinectBody->GetJointOrientations( JointType_Count, jointOrientations ); uint64_t id = 0; kinectBody->get_TrackingId( &id ); std::map<JointType, Body::Joint> jointMap; for ( int32_t j = 0; j < JointType_Count; ++j ) { Body::Joint joint( toVec3f( joints[ j ].Position ), toQuatf( jointOrientations[ j ].Orientation ), joints[ j ].TrackingState ); jointMap.insert( pair<JointType, Body::Joint>( static_cast<JointType>( j ), joint ) ); } Body body( id, i, jointMap ); bodies.push_back( body ); } } } } } if ( mDeviceOptions.isBodyIndexEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrame->get_RelativeTime( &bodyIndexTime ); } if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrame->get_FrameDescription( &bodyIndexFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrameDescription->get_Width( &bodyIndexWidth ); } if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrameDescription->get_Height( &bodyIndexHeight ); } if ( SUCCEEDED( hr ) ) { hr = bodyIndexFrame->AccessUnderlyingBuffer( &bodyIndexBufferSize, &bodyIndexBuffer ); } if ( SUCCEEDED( hr ) ) { bodyIndexChannel = Channel8u( bodyIndexWidth, bodyIndexHeight ); memcpy( bodyIndexChannel.getData(), bodyIndexBuffer, bodyIndexWidth * bodyIndexHeight * sizeof( uint8_t ) ); } } if ( mDeviceOptions.isColorEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = colorFrame->get_FrameDescription( &colorFrameDescription ); if ( SUCCEEDED( hr ) ) { float vFov = 0.0f; float hFov = 0.0f; float dFov = 0.0f; colorFrameDescription->get_VerticalFieldOfView( &vFov ); colorFrameDescription->get_HorizontalFieldOfView( &hFov ); colorFrameDescription->get_DiagonalFieldOfView( &dFov ); } } if ( SUCCEEDED( hr ) ) { hr = colorFrameDescription->get_Width( &colorWidth ); } if ( SUCCEEDED( hr ) ) { hr = colorFrameDescription->get_Height( &colorHeight ); } if ( SUCCEEDED( hr ) ) { hr = colorFrame->get_RawColorImageFormat( &colorImageFormat ); } if ( SUCCEEDED( hr ) ) { colorBufferSize = colorWidth * colorHeight * sizeof( uint8_t ) * 4; colorBuffer = new uint8_t[ colorBufferSize ]; hr = colorFrame->CopyConvertedFrameDataToArray( colorBufferSize, reinterpret_cast<uint8_t*>( colorBuffer ), ColorImageFormat_Rgba ); if ( SUCCEEDED( hr ) ) { colorSurface = Surface8u( colorWidth, colorHeight, false, SurfaceChannelOrder::RGBA ); memcpy( colorSurface.getData(), colorBuffer, colorWidth * colorHeight * sizeof( uint8_t ) * 4 ); } delete [] colorBuffer; colorBuffer = 0; } } if ( mDeviceOptions.isDepthEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = depthFrame->get_FrameDescription( &depthFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = depthFrameDescription->get_Width( &depthWidth ); } if ( SUCCEEDED( hr ) ) { hr = depthFrameDescription->get_Height( &depthHeight ); } if ( SUCCEEDED( hr ) ) { hr = depthFrame->get_DepthMinReliableDistance( &depthMinReliableDistance ); } if ( SUCCEEDED( hr ) ) { hr = depthFrame->get_DepthMaxReliableDistance( &depthMaxReliableDistance ); } if ( SUCCEEDED( hr ) ) { hr = depthFrame->AccessUnderlyingBuffer( &depthBufferSize, &depthBuffer ); } if ( SUCCEEDED( hr ) ) { depthChannel = Channel16u( depthWidth, depthHeight ); memcpy( depthChannel.getData(), depthBuffer, depthWidth * depthHeight * sizeof( uint16_t ) ); } } if ( mDeviceOptions.isInfraredEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = infraredFrame->get_FrameDescription( &infraredFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = infraredFrameDescription->get_Width( &infraredWidth ); } if ( SUCCEEDED( hr ) ) { hr = infraredFrameDescription->get_Height( &infraredHeight ); } if ( SUCCEEDED( hr ) ) { hr = infraredFrame->AccessUnderlyingBuffer( &infraredBufferSize, &infraredBuffer ); } if ( SUCCEEDED( hr ) ) { infraredChannel = Channel16u( infraredWidth, infraredHeight ); memcpy( infraredChannel.getData(), infraredBuffer, infraredWidth * infraredHeight * sizeof( uint16_t ) ); } } if ( mDeviceOptions.isInfraredLongExposureEnabled() ) { if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrame->get_FrameDescription( &infraredLongExposureFrameDescription ); } if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrameDescription->get_Width( &infraredLongExposureWidth ); } if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrameDescription->get_Height( &infraredLongExposureHeight ); } if ( SUCCEEDED( hr ) ) { hr = infraredLongExposureFrame->AccessUnderlyingBuffer( &infraredLongExposureBufferSize, &infraredLongExposureBuffer ); } if ( SUCCEEDED( hr ) ) { infraredLongExposureChannel = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight ); memcpy( infraredLongExposureChannel.getData(), infraredLongExposureBuffer, infraredLongExposureWidth * infraredLongExposureHeight * sizeof( uint16_t ) ); } } if ( SUCCEEDED( hr ) ) { mFrame.mBodies = bodies; mFrame.mChannelBodyIndex = bodyIndexChannel; mFrame.mChannelDepth = depthChannel; mFrame.mChannelInfrared = infraredChannel; mFrame.mChannelInfraredLongExposure = infraredLongExposureChannel; mFrame.mDeviceId = mDeviceOptions.getDeviceId(); mFrame.mSurfaceColor = colorSurface; mFrame.mTimeStamp = timeStamp; mFrame.mFloorClipPlane = floorClipPlane; } if ( bodyIndexFrameDescription != 0 ) { bodyIndexFrameDescription->Release(); bodyIndexFrameDescription = 0; } if ( colorFrameDescription != 0 ) { colorFrameDescription->Release(); colorFrameDescription = 0; } if ( depthFrameDescription != 0 ) { depthFrameDescription->Release(); depthFrameDescription = 0; } if ( infraredFrameDescription != 0 ) { infraredFrameDescription->Release(); infraredFrameDescription = 0; } if ( infraredLongExposureFrameDescription != 0 ) { infraredLongExposureFrameDescription->Release(); infraredLongExposureFrameDescription = 0; } } if ( audioFrame != 0 ) { audioFrame->Release(); audioFrame = 0; } if ( bodyFrame != 0 ) { bodyFrame->Release(); bodyFrame = 0; } if ( bodyIndexFrame != 0 ) { bodyIndexFrame->Release(); bodyIndexFrame = 0; } if ( colorFrame != 0 ) { colorFrame->Release(); colorFrame = 0; } if ( depthFrame != 0 ) { depthFrame->Release(); depthFrame = 0; } if ( frame != 0 ) { frame->Release(); frame = 0; } if ( infraredFrame != 0 ) { infraredFrame->Release(); infraredFrame = 0; } if ( infraredLongExposureFrame != 0 ) { infraredLongExposureFrame->Release(); infraredLongExposureFrame = 0; } }
//Thread1 for Getting Every Frames Body DWORD WINAPI DtoG(LPVOID pParam) { //Initiate Template SMPL bodyTemplate = SMPL(MALE); cout << "SMPL::initial finished!" << endl; //Initiate Sensor IKinectSensor *pSensor; HRESULT hResult = S_OK; hResult = GetDefaultKinectSensor(&pSensor); hResult = pSensor->Open(); if (FAILED(hResult)) { std::cerr << "Error : IKinectSensor::Open()" << std::endl; return -1; } IBodyFrameSource *pBodySource; hResult = pSensor->get_BodyFrameSource(&pBodySource); if (FAILED(hResult)) { std::cerr << "Error : IKinectSensor::get_BodyFrameSource()" << std::endl; return -1; } IBodyFrameReader *pBodyReader; hResult = pBodySource->OpenReader(&pBodyReader); if (FAILED(hResult)) { std::cerr << "Error : IBodyFrameSource::OpenReader()" << std::endl; return -1; } //mat pp = zeros(24, 3); //mat result=bodyTemplate.gen_pose_model(pp, TRUE); //bodyTemplate.write_to_obj(result, "MALE.obj"); // Holt Double Exponential Smoothing Filter Sample::FilterDoubleExponential filter[BODY_COUNT]; // Option : Setting Smoothing Parameter for (int count = 0; count < BODY_COUNT; count++) { float smoothing = 0.5f; // [0..1], lower values closer to raw data float correction = 0.5f; // [0..1], lower values slower to correct towards the raw data float prediction = 0.5f; // [0..n], the number of frames to predict into the future float jitterRadius = 0.05f; // The radius in meters for jitter reduction float maxDeviationRadius = 0.04f; // The maximum radius in meters that filtered positions are allowed to deviate from raw data filter[count].Init(smoothing, correction, prediction, jitterRadius, maxDeviationRadius); } //The label number of the first body detected by Kinect int BODY_LABEL = -1; StopWatch time; time.start(); int counter = 1; bool tag = TRUE; bool first = TRUE; while (counter) { Vec4s vertex; Vec3s normal; //Obj new_body = BodyQueue.front(); mat trans_joint; //bool judge = detectJoint(hResult, pBodyReader, joint); IBodyFrame *pBodyFrame = nullptr; hResult = pBodyReader->AcquireLatestFrame(&pBodyFrame); if (SUCCEEDED(hResult)) { IBody *pBody[BODY_COUNT] = { 0 }; hResult = pBodyFrame->GetAndRefreshBodyData(BODY_COUNT, pBody); if (SUCCEEDED(hResult)) { for (int count = 0; count < BODY_COUNT; count++) { BOOLEAN bTracked = false; hResult = pBody[count]->get_IsTracked(&bTracked); if (bTracked&&SUCCEEDED(hResult) && BODY_LABEL == -1) BODY_LABEL = count; if (SUCCEEDED(hResult) && bTracked && count == BODY_LABEL) { //counter--; Joint joint[JointType::JointType_Count]; ///////////////////////////// hResult = pBody[count]->GetJoints(JointType::JointType_Count, joint);//joint //////////////////////// Filtered Joint////////////////////////////////// filter[count].Update(joint); const DirectX::XMVECTOR *vec = filter[count].GetFilteredJoints(); for (int type = 0; type < JointType::JointType_Count; type++) { if (joint[type].TrackingState != TrackingState::TrackingState_NotTracked) { float x = 0.0f, y = 0.0f, z = 0.0f; DirectX::XMVectorGetXPtr(&x, vec[type]); DirectX::XMVectorGetYPtr(&y, vec[type]); DirectX::XMVectorGetZPtr(&z, vec[type]); } } //////////////////////////////////////////////////////////////////////// //Get joint for genBody from kinect joint trans_joint = JointTransform(joint); ////////////////Transition from T-pose to first frame/////////////////////////////////// if (first == TRUE) { mat pose = bodyTemplate.J_to_pose(trans_joint); float coefficient = 0.04f / max(max(pose)); cout << coefficient << endl; mat transition = zeros(24, 3); int num = 0; while (max(max(abs(transition))) < max(max(abs(pose)))) { //transition.print("t:"); genFirstBody(transition, vertex, normal, bodyTemplate); transition += pose*coefficient; VQueue.push(vertex); NQueue.push(normal); num++; } cout << num << endl; first = FALSE; } ////////////////////////////////////////////////////////////////////////////////// /////////////////////////////Smooth by List//////////////////////////////////////// mat sum = zeros(24, 3); if (smoothList.size() < 5) smoothList.push_back(trans_joint); else { for (iter = smoothList.begin(); iter != smoothList.end(); ++iter) { sum += (*iter); } sum = sum / 5; smoothList.pop_front(); smoothList.push_back(trans_joint); /////////////////////////////////////////////////////////////////////////// genBodyVector(sum, vertex, normal, bodyTemplate); cout << "A new pose has been detected!" << endl; if (tag == TRUE) { VQueue.push(vertex); NQueue.push(normal); tag = FALSE; cout << "num:" << VQueue.size() << endl; } else tag = TRUE; time.stop(); cout << "cost:" << time.elapsed_ms() << endl; time.restart(); } //return TRUE; } } } for (int count = 0; count < BODY_COUNT; count++) { SafeRelease(pBody[count]); } } SafeRelease(pBodyFrame); //if (judge) //{ // genBody(joint, new_body); // cout << "A new pose has been detected!" << endl; //} //else continue; //new_body.scale_translate(0.30, 0, 1.0, 0); //new_body.unified(); //BodyQueue.push(new_body); } SafeRelease(pBodySource); SafeRelease(pBodyReader);; if (pSensor) { pSensor->Close(); } SafeRelease(pSensor); return 0; }
bool ms_kinect2::acquire_body_frame(const _OPENNUI byte* dst) { unsigned char* dest = const_cast<unsigned char*>(dst); bool result = false; IBodyFrame* pBodyFrame = nullptr; HRESULT hr = pBodyReader->AcquireLatestFrame(&pBodyFrame); if (FAILED(hr)) { if (pBodyFrame != NULL) { pBodyFrame->Release(); pBodyFrame = NULL; } return false; } IBody* ppBodies[BODY_COUNT] = { 0 }; hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies); if (SUCCEEDED(hr)) { int index = 0; __int32 count = 0; BOOLEAN isTracked = FALSE; memcpy(dest, &count, sizeof(__int32)); for (__int32 i = 0; i < _countof(ppBodies); ++i) { if (ppBodies[i] != NULL && SUCCEEDED(ppBodies[i]->get_IsTracked(&isTracked)) && isTracked == TRUE) { Joint pJoints[JointType_Count]; hr = ppBodies[i]->GetJoints(_countof(pJoints), pJoints); if (SUCCEEDED(hr)) { result = true; count++; memcpy(dest, &count, sizeof(__int32)); UINT64 trackedId = -1; hr = ppBodies[i]->get_TrackingId(&trackedId); __int32 trackingId = static_cast<__int32>(trackedId); memcpy(dest + 4 + index, &trackingId, sizeof(__int32)); index += sizeof(__int32); *(dest + 4 + index) = true; index++; __int32 jointCount = JointType_Count; memcpy(dest + 4 + index, &jointCount, sizeof(__int32)); index += sizeof(__int32); for (int j = 0; j < _countof(pJoints); ++j) { _OPENNUI body::joint joint; joint.type = (_OPENNUI body::joint_type)pJoints[j].JointType; joint.state = (_OPENNUI body::tracking_state)pJoints[j].TrackingState; joint.position.x = pJoints[j].Position.X; joint.position.y = pJoints[j].Position.Y; joint.position.z = pJoints[j].Position.Z; memcpy(dest + 4 + index, &joint, sizeof(_OPENNUI body::joint)); index += sizeof(_OPENNUI body::joint); } HandState leftHand = HandState_Unknown; HandState rightHand = HandState_Unknown; ppBodies[i]->get_HandLeftState(&leftHand); ppBodies[i]->get_HandRightState(&rightHand); __int16 leftHandStatus = leftHand; memcpy(dest + 4 + index, &leftHandStatus, sizeof(__int16)); index += sizeof(__int16); __int16 rightHandStatus = rightHand; memcpy(dest + 4 + index, &rightHandStatus, sizeof(__int16)); index += sizeof(__int16); } } } } for (int i = 0; i < _countof(ppBodies); ++i) { SafeRelease(ppBodies[i]); } SafeRelease(pBodyFrame); return result; }
void MyKinect2::Update() { // récupération de l'image en 2D if (!m_pColorFrameReader) { return; } IColorFrame* pColorFrame = NULL; HRESULT hr = m_pColorFrameReader->AcquireLatestFrame(&pColorFrame); if (SUCCEEDED(hr)) { INT64 nTime = 0; IFrameDescription* pFrameDescription = NULL; int nWidth = 0; int nHeight = 0; ColorImageFormat imageFormat = ColorImageFormat_None; UINT nBufferSize = 0; hr = pColorFrame->get_RelativeTime(&nTime); if (SUCCEEDED(hr)) { hr = pColorFrame->get_FrameDescription(&pFrameDescription); } if (SUCCEEDED(hr)) { hr = pFrameDescription->get_Width(&nWidth); } if (SUCCEEDED(hr)) { hr = pFrameDescription->get_Height(&nHeight); } if (SUCCEEDED(hr)) { hr = pColorFrame->get_RawColorImageFormat(&imageFormat); } if (SUCCEEDED(hr) && (nWidth == cColorWidth) && (nHeight == cColorHeight)) { if (imageFormat == ColorImageFormat_Bgra) { hr = pColorFrame->AccessRawUnderlyingBuffer(&nBufferSize, reinterpret_cast<BYTE**>(&webcam.data)); } else if (m_pColorRGBX) { nBufferSize = cColorWidth * cColorHeight * sizeof(RGBQUAD); hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, reinterpret_cast<BYTE*>(webcam.data), ColorImageFormat_Bgra); } else { hr = E_FAIL; } } SafeRelease(pFrameDescription); } // récupération du squelette if (!m_pBodyFrameReader) { return; } IBodyFrame* pBodyFrame = NULL; hr = m_pBodyFrameReader->AcquireLatestFrame(&pBodyFrame); if (SUCCEEDED(hr)) { INT64 nTime = 0; hr = pBodyFrame->get_RelativeTime(&nTime); IBody* ppBodies[BODY_COUNT] = {0}; if (SUCCEEDED(hr)) { hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies); } if (SUCCEEDED(hr)) { ProcessBody(BODY_COUNT, ppBodies); // BODY_COUNT est un define de Kinect.h égal à 6... peut etre mettre 1 plus tard afin déviter des problemes lors de la récupération des positions de joints } for (int i = 0; i < _countof(ppBodies); ++i) { SafeRelease(ppBodies[i]); } } SafeRelease(pBodyFrame); SafeRelease(pColorFrame); }
void testApp::update() { kinect.update(); if (!m_pBodyFrameReader) { return; } IBodyFrame* pBodyFrame = NULL; HRESULT hr = m_pBodyFrameReader->AcquireLatestFrame(&pBodyFrame); if (SUCCEEDED(hr)) { INT64 nTime = 0; hr = pBodyFrame->get_RelativeTime(&nTime); IBody* ppBodies[BODY_COUNT] = { 0 }; if (SUCCEEDED(hr)) { hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies); } if (SUCCEEDED(hr)) { ProcessBody(nTime, BODY_COUNT, ppBodies); } for (int i = 0; i < _countof(ppBodies); ++i) { SafeRelease(ppBodies[i]); } } SafeRelease(pBodyFrame); for (int y = 0; y < vectorField.getHeight(); y++) for (int x = 0; x< vectorField.getWidth(); x++) { int index = vectorField.getPixelIndex(x, y); float angle = ofNoise(x / (float)vectorField.getWidth()*4.0, y / (float)vectorField.getHeight()*4.0, ofGetElapsedTimef()*0.05)*TWO_PI*2.0; ofVec2f dir(cos(angle), sin(angle)); dir.normalize().scale(ofNoise(x / (float)vectorField.getWidth()*4.0, y / (float)vectorField.getHeight()*4.0, ofGetElapsedTimef()*0.05 + 10.0)); vectorField.setColor(x, y, ofColor_<float>(dir.x, dir.y, 0)); } for (int i = 0; i < BODY_COUNT; i++) { updateParticleSystem(&particleSystems[i], lastChestPositions[i], lastHandPositionLeft[i], lastHandPositionRight[i], leftHandStates[i], rightHandStates[i]); } //Check if a body left the system for a long period of time for (int i = 0; i < BODY_COUNT; i++) { if (lastKnownChestPosition[i] == lastChestPositions[i]) { bodyFreezeIterationToRemoveCount[i]++; } else { bodyFreezeIterationToRemoveCount[i] = 0; } lastKnownChestPosition[i] = lastChestPositions[i]; if (bodyFreezeIterationToRemoveCount[i] >= MAX_NUM_OF_ITERATIONS_TO_REMOVE_A_BODY ) { bodyFreezeIterationToRemoveCount[i] = 0; lastChestPositions[i].x = lastChestPositions[i].y = 0; } } }
bool MKinect::track(){ HRESULT hResult = S_OK; // Body Frame IBodyFrame* pBodyFrame = nullptr; hResult = pBodyReader->AcquireLatestFrame(&pBodyFrame); if (SUCCEEDED(hResult)){ IBody* pBody[BODY_COUNT] = { 0 }; hResult = pBodyFrame->GetAndRefreshBodyData(BODY_COUNT, pBody); if (SUCCEEDED(hResult)){ for (int count = 0; count < BODY_COUNT; count++){ BOOLEAN bTracked = false; hResult = pBody[count]->get_IsTracked(&bTracked); if (SUCCEEDED(hResult) && bTracked){ /*// Joint Joint joint[JointType::JointType_Count]; hResult = pBody[count]->GetJoints( JointType::JointType_Count, joint ); if( SUCCEEDED( hResult ) ){ for( int type = 0; type < JointType::JointType_Count; type++ ){ ColorSpacePoint colorSpacePoint = { 0 }; pCoordinateMapper->MapCameraPointToColorSpace( joint[type].Position, &colorSpacePoint ); int x = static_cast<int>( colorSpacePoint.X ); int y = static_cast<int>( colorSpacePoint.Y ); if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){ cv::circle( bufferMat, cv::Point( x, y ), 5, static_cast<cv::Scalar>( color[count] ), -1, CV_AA ); } } }*/ // Set TrackingID to Detect Face UINT64 trackingId = _UI64_MAX; hResult = pBody[count]->get_TrackingId(&trackingId); if (SUCCEEDED(hResult)){ pFaceSource[count]->put_TrackingId(trackingId); } } } } for (int count = 0; count < BODY_COUNT; count++){ if (pBody[count]) pBody[count]->Release(); } } if (pBodyFrame) pBodyFrame->Release(); // Face Frame for (int count = 0; count < BODY_COUNT; count++){ IFaceFrame* pFaceFrame = nullptr; hResult = pFaceReader[count]->AcquireLatestFrame(&pFaceFrame); if (SUCCEEDED(hResult) && pFaceFrame != nullptr){ BOOLEAN bFaceTracked = false; hResult = pFaceFrame->get_IsTrackingIdValid(&bFaceTracked); if (SUCCEEDED(hResult) && bFaceTracked){ IFaceFrameResult* pFaceResult = nullptr; hResult = pFaceFrame->get_FaceFrameResult(&pFaceResult); if (SUCCEEDED(hResult) && pFaceResult != nullptr){ std::vector<std::string> result; // Face Rotation Vector4 faceRotation; hResult = pFaceResult->get_FaceRotationQuaternion(&faceRotation); if (SUCCEEDED(hResult)){ int pitch, yaw, roll; ExtractFaceRotationInDegrees(&faceRotation, &pitch, &yaw, &roll); _yaw = yaw; _pitch = pitch; _roll = roll; } } if (pFaceResult) pFaceResult->Release(); } } if (pFaceFrame) pFaceFrame->Release(); } return true; }
int _tmain( int argc, _TCHAR* argv[] ) { cv::setUseOptimized( true ); // Sensor IKinectSensor* pSensor; HRESULT hResult = S_OK; hResult = GetDefaultKinectSensor( &pSensor ); if( FAILED( hResult ) ){ std::cerr << "Error : GetDefaultKinectSensor" << std::endl; return -1; } hResult = pSensor->Open( ); if( FAILED( hResult ) ){ std::cerr << "Error : IKinectSensor::Open()" << std::endl; return -1; } // Source IColorFrameSource* pColorSource; hResult = pSensor->get_ColorFrameSource( &pColorSource ); if( FAILED( hResult ) ){ std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl; return -1; } IBodyFrameSource* pBodySource; hResult = pSensor->get_BodyFrameSource( &pBodySource ); if( FAILED( hResult ) ){ std::cerr << "Error : IKinectSensor::get_BodyFrameSource()" << std::endl; return -1; } // Reader IColorFrameReader* pColorReader; hResult = pColorSource->OpenReader( &pColorReader ); if( FAILED( hResult ) ){ std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl; return -1; } IBodyFrameReader* pBodyReader; hResult = pBodySource->OpenReader( &pBodyReader ); if( FAILED( hResult ) ){ std::cerr << "Error : IBodyFrameSource::OpenReader()" << std::endl; return -1; } // Description IFrameDescription* pDescription; hResult = pColorSource->get_FrameDescription( &pDescription ); if( FAILED( hResult ) ){ std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl; return -1; } int width = 0; int height = 0; pDescription->get_Width( &width ); // 1920 pDescription->get_Height( &height ); // 1080 unsigned int bufferSize = width * height * 4 * sizeof( unsigned char ); cv::Mat bufferMat( height, width, CV_8UC4 ); cv::Mat bodyMat( height / 2, width / 2, CV_8UC4 ); cv::namedWindow( "Body" ); // Color Table cv::Vec3b color[BODY_COUNT]; color[0] = cv::Vec3b( 255, 0, 0 ); color[1] = cv::Vec3b( 0, 255, 0 ); color[2] = cv::Vec3b( 0, 0, 255 ); color[3] = cv::Vec3b( 255, 255, 0 ); color[4] = cv::Vec3b( 255, 0, 255 ); color[5] = cv::Vec3b( 0, 255, 255 ); // Coordinate Mapper ICoordinateMapper* pCoordinateMapper; hResult = pSensor->get_CoordinateMapper( &pCoordinateMapper ); if( FAILED( hResult ) ){ std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl; return -1; } while( 1 ){ // Frame IColorFrame* pColorFrame = nullptr; hResult = pColorReader->AcquireLatestFrame( &pColorFrame ); if( SUCCEEDED( hResult ) ){ hResult = pColorFrame->CopyConvertedFrameDataToArray( bufferSize, reinterpret_cast<BYTE*>( bufferMat.data ), ColorImageFormat::ColorImageFormat_Bgra ); if( SUCCEEDED( hResult ) ){ cv::resize( bufferMat, bodyMat, cv::Size(), 0.5, 0.5 ); } } //SafeRelease( pColorFrame ); IBodyFrame* pBodyFrame = nullptr; hResult = pBodyReader->AcquireLatestFrame( &pBodyFrame ); if( SUCCEEDED( hResult ) ){ IBody* pBody[BODY_COUNT] = { 0 }; hResult = pBodyFrame->GetAndRefreshBodyData( BODY_COUNT, pBody ); if( SUCCEEDED( hResult ) ){ for( int count = 0; count < BODY_COUNT; count++ ){ BOOLEAN bTracked = false; hResult = pBody[count]->get_IsTracked( &bTracked ); if( SUCCEEDED( hResult ) && bTracked ){ Joint joint[JointType::JointType_Count]; hResult = pBody[ count ]->GetJoints( JointType::JointType_Count, joint ); if( SUCCEEDED( hResult ) ){ // Left Hand State HandState leftHandState = HandState::HandState_Unknown; hResult = pBody[count]->get_HandLeftState( &leftHandState ); if( SUCCEEDED( hResult ) ){ ColorSpacePoint colorSpacePoint = { 0 }; hResult = pCoordinateMapper->MapCameraPointToColorSpace( joint[JointType::JointType_HandLeft].Position, &colorSpacePoint ); if( SUCCEEDED( hResult ) ){ int x = static_cast<int>( colorSpacePoint.X ); int y = static_cast<int>( colorSpacePoint.Y ); if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){ if( leftHandState == HandState::HandState_Open ){ cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 128, 0 ), 5, CV_AA ); } else if( leftHandState == HandState::HandState_Closed ){ cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 0, 128 ), 5, CV_AA ); } else if( leftHandState == HandState::HandState_Lasso ){ cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 128, 128, 0 ), 5, CV_AA ); } } } } // Right Hand State HandState rightHandState = HandState::HandState_Unknown; hResult = pBody[count]->get_HandRightState( &rightHandState ); if( SUCCEEDED( hResult ) ){ ColorSpacePoint colorSpacePoint = { 0 }; hResult = pCoordinateMapper->MapCameraPointToColorSpace( joint[JointType::JointType_HandRight].Position, &colorSpacePoint ); if( SUCCEEDED( hResult ) ){ int x = static_cast<int>( colorSpacePoint.X ); int y = static_cast<int>( colorSpacePoint.Y ); if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){ if( rightHandState == HandState::HandState_Open ){ cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 128, 0 ), 5, CV_AA ); } else if( rightHandState == HandState::HandState_Closed ){ cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 0, 128 ), 5, CV_AA ); } else if( rightHandState == HandState::HandState_Lasso ){ cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 128, 128, 0 ), 5, CV_AA ); } } } } // Joint for( int type = 0; type < JointType::JointType_Count; type++ ){ ColorSpacePoint colorSpacePoint = { 0 }; pCoordinateMapper->MapCameraPointToColorSpace( joint[type].Position, &colorSpacePoint ); int x = static_cast<int>( colorSpacePoint.X ); int y = static_cast<int>( colorSpacePoint.Y ); if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){ cv::circle( bufferMat, cv::Point( x, y ), 5, static_cast< cv::Scalar >( color[count] ), -1, CV_AA ); } } } /*// Activity UINT capacity = 0; DetectionResult detectionResults = DetectionResult::DetectionResult_Unknown; hResult = pBody[count]->GetActivityDetectionResults( capacity, &detectionResults ); if( SUCCEEDED( hResult ) ){ if( detectionResults == DetectionResult::DetectionResult_Yes ){ switch( capacity ){ case Activity::Activity_EyeLeftClosed: std::cout << "Activity_EyeLeftClosed" << std::endl; break; case Activity::Activity_EyeRightClosed: std::cout << "Activity_EyeRightClosed" << std::endl; break; case Activity::Activity_MouthOpen: std::cout << "Activity_MouthOpen" << std::endl; break; case Activity::Activity_MouthMoved: std::cout << "Activity_MouthMoved" << std::endl; break; case Activity::Activity_LookingAway: std::cout << "Activity_LookingAway" << std::endl; break; default: break; } } } else{ std::cerr << "Error : IBody::GetActivityDetectionResults()" << std::endl; }*/ /*// Appearance capacity = 0; detectionResults = DetectionResult::DetectionResult_Unknown; hResult = pBody[count]->GetAppearanceDetectionResults( capacity, &detectionResults ); if( SUCCEEDED( hResult ) ){ if( detectionResults == DetectionResult::DetectionResult_Yes ){ switch( capacity ){ case Appearance::Appearance_WearingGlasses: std::cout << "Appearance_WearingGlasses" << std::endl; break; default: break; } } } else{ std::cerr << "Error : IBody::GetAppearanceDetectionResults()" << std::endl; }*/ /*// Expression capacity = 0; detectionResults = DetectionResult::DetectionResult_Unknown; hResult = pBody[count]->GetExpressionDetectionResults( capacity, &detectionResults ); if( SUCCEEDED( hResult ) ){ if( detectionResults == DetectionResult::DetectionResult_Yes ){ switch( capacity ){ case Expression::Expression_Happy: std::cout << "Expression_Happy" << std::endl; break; case Expression::Expression_Neutral: std::cout << "Expression_Neutral" << std::endl; break; default: break; } } } else{ std::cerr << "Error : IBody::GetExpressionDetectionResults()" << std::endl; }*/ // Lean PointF amount; hResult = pBody[count]->get_Lean( &amount ); if( SUCCEEDED( hResult ) ){ std::cout << "amount : " << amount.X << ", " << amount.Y << std::endl; } } } cv::resize( bufferMat, bodyMat, cv::Size(), 0.5, 0.5 ); } for( int count = 0; count < BODY_COUNT; count++ ){ SafeRelease( pBody[count] ); } } //SafeRelease( pBodyFrame ); SafeRelease( pColorFrame ); SafeRelease( pBodyFrame ); cv::imshow( "Body", bodyMat ); if( cv::waitKey( 10 ) == VK_ESCAPE ){ break; } } SafeRelease( pColorSource ); SafeRelease( pBodySource ); SafeRelease( pColorReader ); SafeRelease( pBodyReader ); SafeRelease( pDescription ); SafeRelease( pCoordinateMapper ); if( pSensor ){ pSensor->Close(); } SafeRelease( pSensor ); cv::destroyAllWindows(); return 0; }
/// <summary> /// Main processing function /// </summary> // この関数がループ処理される void CColorBasics::Update() { HRESULT hr = NULL; // (1) カラーフレーム(背景描画のみに使用) if (m_pColorFrameReader) { IColorFrame* pColorFrame = NULL; hr = m_pColorFrameReader->AcquireLatestFrame(&pColorFrame); if (SUCCEEDED(hr)) { INT64 nTime = 0; IFrameDescription* pFrameDescription = NULL; int nWidth = 0; int nHeight = 0; ColorImageFormat imageFormat = ColorImageFormat_None; UINT nBufferSize = 0; RGBQUAD *pBuffer = NULL; hr = pColorFrame->get_RelativeTime(&nTime); if (SUCCEEDED(hr)) { hr = pColorFrame->get_FrameDescription(&pFrameDescription); } if (SUCCEEDED(hr)) { hr = pFrameDescription->get_Width(&nWidth); } if (SUCCEEDED(hr)) { hr = pFrameDescription->get_Height(&nHeight); } if (SUCCEEDED(hr)) { hr = pColorFrame->get_RawColorImageFormat(&imageFormat); } if (SUCCEEDED(hr)) { if (imageFormat == ColorImageFormat_Bgra) { hr = pColorFrame->AccessRawUnderlyingBuffer(&nBufferSize, reinterpret_cast<BYTE**>(&pBuffer)); } else if (m_pColorRGBX) { pBuffer = m_pColorRGBX; nBufferSize = cColorWidth * cColorHeight * sizeof(RGBQUAD); hr = pColorFrame->CopyConvertedFrameDataToArray(nBufferSize, reinterpret_cast<BYTE*>(pBuffer), ColorImageFormat_Bgra); } else { hr = E_FAIL; } } if (SUCCEEDED(hr)) { ProcessColor(nTime, pBuffer, nWidth, nHeight); } SafeRelease(pFrameDescription); } SafeRelease(pColorFrame); } // これ以降にBody処理を実装 TIMESPAN nBodyTime = 0; if (m_pBodyFrameReader) { IBodyFrame* pBodyFrame = NULL; hr = m_pBodyFrameReader->AcquireLatestFrame(&pBodyFrame); if (SUCCEEDED(hr)) { hr = pBodyFrame->get_RelativeTime(&nBodyTime); // ここに、UI描画処理 ボタン等 // ゲームのステータスによって描画を設定 if (!m_pGame){ m_pGame = new CSemaphoreGame(m_pDrawColor, m_pCoordinateMapper); } if (m_pGame){ m_pGame->Display(nBodyTime); } IBody* ppBodies[BODY_COUNT] = { 0 }; if (SUCCEEDED(hr)) { hr = pBodyFrame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies); } if (SUCCEEDED(hr)) { // ここに、UI処理->ProcessBody()内でOK // ボディデータにてUI処理を行う // ステータスの変更 if (m_pGame){ m_pGame->Play(nBodyTime, BODY_COUNT, ppBodies); } ProcessBody(nBodyTime, BODY_COUNT, ppBodies); } for (int i = 0; i < _countof(ppBodies); ++i) { SafeRelease(ppBodies[i]); } } SafeRelease(pBodyFrame); } return; }