//---------- void Body::init(IKinectSensor * sensor) { this->reader = NULL; try { IBodyFrameSource * source = NULL; if (FAILED(sensor->get_BodyFrameSource(&source))) { throw(Exception("Failed to initialise BodyFrame source")); } if (FAILED(source->OpenReader(&this->reader))) { throw(Exception("Failed to initialise BodyFrame reader")); } SafeRelease(source); if (FAILED(sensor->get_CoordinateMapper(&this->coordinateMapper))) { throw(Exception("Failed to acquire coordinate mapper")); } bodies.resize(BODY_COUNT); } catch (std::exception & e) { SafeRelease(this->reader); throw (e); } }
HRESULT MyKinect2::InitializeDefaultSensor() { HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) { return hr; } if (m_pKinectSensor) { // Initialize the Kinect and get the color reader IColorFrameSource* pColorFrameSource = NULL; IBodyFrameSource* pBodyFrameSource = NULL; hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper); } if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_BodyFrameSource(&pBodyFrameSource); } if (SUCCEEDED(hr)) { hr = pBodyFrameSource->OpenReader(&m_pBodyFrameReader); } if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_ColorFrameSource(&pColorFrameSource); } if (SUCCEEDED(hr)) { hr = pColorFrameSource->OpenReader(&m_pColorFrameReader); } SafeRelease(pBodyFrameSource); SafeRelease(pColorFrameSource); } if (!m_pKinectSensor || FAILED(hr)) { qDebug("Kinect2 initialization failed 1."); return E_FAIL; } return hr; }
/// <summary> /// Initializes the default Kinect sensor /// </summary> /// <returns>indicates success or failure</returns> HRESULT CColorBasics::InitializeDefaultSensor() { HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) { return hr; } if (m_pKinectSensor) { // Initialize the Kinect and get the color reader IColorFrameSource* pColorFrameSource = NULL; IBodyFrameSource* pBodyFrameSource = NULL; hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_ColorFrameSource(&pColorFrameSource); } if (SUCCEEDED(hr)) { hr = pColorFrameSource->OpenReader(&m_pColorFrameReader); } // Body if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper); } if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_BodyFrameSource(&pBodyFrameSource); } if (SUCCEEDED(hr)) { hr = pBodyFrameSource->OpenReader(&m_pBodyFrameReader); } SafeRelease(pBodyFrameSource); SafeRelease(pColorFrameSource); } if (!m_pKinectSensor || FAILED(hr)) { SetStatusMessage(L"No ready Kinect found!", 10000, true); return E_FAIL; } return hr; }
HRESULT BreathingClass::InitializeDefaultSensor() { HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) { return hr; } if (m_pKinectSensor) { // Initialize the Kinect and get coordinate mapper and the body reader IBodyFrameSource* pBodyFrameSource = NULL; hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper); } if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_BodyFrameSource(&pBodyFrameSource); } if (SUCCEEDED(hr)) { hr = pBodyFrameSource->OpenReader(&m_pBodyFrameReader); } SafeRelease(pBodyFrameSource); } if (!m_pKinectSensor || FAILED(hr)) { return E_FAIL; } return hr; }
bool KinectV2Module::initKinect() { #if JUCE_WINDOWS HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) { LOG("Kinect init failed"); return false; } if (m_pKinectSensor) { // Initialize the Kinect and get coordinate mapper and the body reader IBodyFrameSource* pBodyFrameSource = NULL; hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper); if (SUCCEEDED(hr)) hr = m_pKinectSensor->get_BodyFrameSource(&pBodyFrameSource); if (SUCCEEDED(hr)) hr = pBodyFrameSource->OpenReader(&m_pBodyFrameReader); SafeRelease(pBodyFrameSource); } if (!m_pKinectSensor || FAILED(hr)) { LOG("No ready Kinect found"); return false; } LOG("Kinect is initialized"); return true; #else return false; #endif }
bool BodyStream::open() { if (!m_Device->isOpen()) { ofLogWarning("ofxKinect2::BodyStream") << "No ready Kinect2 found."; return false; } IBodyFrameSource *bodyFrameSource = nullptr; HRESULT hr = m_Device->get().kinect2->get_BodyFrameSource(&bodyFrameSource); if (SUCCEEDED(hr)) { hr = bodyFrameSource->OpenReader(&m_StreamHandle.bodyFrameReader); } safeRelease(bodyFrameSource); if (FAILED(hr)) { ofLogWarning("ofxKinect2::BodyStream") << "Can't open stream."; return false; } return Stream::open(); }
bool KinectPlugin::initializeDefaultSensor() { #ifdef HAVE_KINECT HRESULT hr; hr = GetDefaultKinectSensor(&_kinectSensor); if (FAILED(hr)) { return false; } if (_kinectSensor) { // Initialize the Kinect and get coordinate mapper and the body reader IBodyFrameSource* bodyFrameSource = NULL; hr = _kinectSensor->Open(); if (SUCCEEDED(hr)) { hr = _kinectSensor->get_CoordinateMapper(&_coordinateMapper); } if (SUCCEEDED(hr)) { hr = _kinectSensor->get_BodyFrameSource(&bodyFrameSource); } if (SUCCEEDED(hr)) { hr = bodyFrameSource->OpenReader(&_bodyFrameReader); } SafeRelease(bodyFrameSource); } if (!_kinectSensor || FAILED(hr)) { return false; } return true; #else return false; #endif }
int _tmain( int argc, _TCHAR* argv[] ) { cv::setUseOptimized( true ); // Sensor IKinectSensor* pSensor; HRESULT hResult = S_OK; hResult = GetDefaultKinectSensor( &pSensor ); if( FAILED( hResult ) ){ std::cerr << "Error : GetDefaultKinectSensor" << std::endl; return -1; } hResult = pSensor->Open( ); if( FAILED( hResult ) ){ std::cerr << "Error : IKinectSensor::Open()" << std::endl; return -1; } // Source IColorFrameSource* pColorSource; hResult = pSensor->get_ColorFrameSource( &pColorSource ); if( FAILED( hResult ) ){ std::cerr << "Error : IKinectSensor::get_ColorFrameSource()" << std::endl; return -1; } IBodyFrameSource* pBodySource; hResult = pSensor->get_BodyFrameSource( &pBodySource ); if( FAILED( hResult ) ){ std::cerr << "Error : IKinectSensor::get_BodyFrameSource()" << std::endl; return -1; } // Reader IColorFrameReader* pColorReader; hResult = pColorSource->OpenReader( &pColorReader ); if( FAILED( hResult ) ){ std::cerr << "Error : IColorFrameSource::OpenReader()" << std::endl; return -1; } IBodyFrameReader* pBodyReader; hResult = pBodySource->OpenReader( &pBodyReader ); if( FAILED( hResult ) ){ std::cerr << "Error : IBodyFrameSource::OpenReader()" << std::endl; return -1; } // Description IFrameDescription* pDescription; hResult = pColorSource->get_FrameDescription( &pDescription ); if( FAILED( hResult ) ){ std::cerr << "Error : IColorFrameSource::get_FrameDescription()" << std::endl; return -1; } int width = 0; int height = 0; pDescription->get_Width( &width ); // 1920 pDescription->get_Height( &height ); // 1080 unsigned int bufferSize = width * height * 4 * sizeof( unsigned char ); cv::Mat bufferMat( height, width, CV_8UC4 ); cv::Mat bodyMat( height / 2, width / 2, CV_8UC4 ); cv::namedWindow( "Body" ); // Color Table cv::Vec3b color[BODY_COUNT]; color[0] = cv::Vec3b( 255, 0, 0 ); color[1] = cv::Vec3b( 0, 255, 0 ); color[2] = cv::Vec3b( 0, 0, 255 ); color[3] = cv::Vec3b( 255, 255, 0 ); color[4] = cv::Vec3b( 255, 0, 255 ); color[5] = cv::Vec3b( 0, 255, 255 ); // Coordinate Mapper ICoordinateMapper* pCoordinateMapper; hResult = pSensor->get_CoordinateMapper( &pCoordinateMapper ); if( FAILED( hResult ) ){ std::cerr << "Error : IKinectSensor::get_CoordinateMapper()" << std::endl; return -1; } while( 1 ){ // Frame IColorFrame* pColorFrame = nullptr; hResult = pColorReader->AcquireLatestFrame( &pColorFrame ); if( SUCCEEDED( hResult ) ){ hResult = pColorFrame->CopyConvertedFrameDataToArray( bufferSize, reinterpret_cast<BYTE*>( bufferMat.data ), ColorImageFormat::ColorImageFormat_Bgra ); if( SUCCEEDED( hResult ) ){ cv::resize( bufferMat, bodyMat, cv::Size(), 0.5, 0.5 ); } } //SafeRelease( pColorFrame ); IBodyFrame* pBodyFrame = nullptr; hResult = pBodyReader->AcquireLatestFrame( &pBodyFrame ); if( SUCCEEDED( hResult ) ){ IBody* pBody[BODY_COUNT] = { 0 }; hResult = pBodyFrame->GetAndRefreshBodyData( BODY_COUNT, pBody ); if( SUCCEEDED( hResult ) ){ for( int count = 0; count < BODY_COUNT; count++ ){ BOOLEAN bTracked = false; hResult = pBody[count]->get_IsTracked( &bTracked ); if( SUCCEEDED( hResult ) && bTracked ){ Joint joint[JointType::JointType_Count]; hResult = pBody[ count ]->GetJoints( JointType::JointType_Count, joint ); if( SUCCEEDED( hResult ) ){ // Left Hand State HandState leftHandState = HandState::HandState_Unknown; hResult = pBody[count]->get_HandLeftState( &leftHandState ); if( SUCCEEDED( hResult ) ){ ColorSpacePoint colorSpacePoint = { 0 }; hResult = pCoordinateMapper->MapCameraPointToColorSpace( joint[JointType::JointType_HandLeft].Position, &colorSpacePoint ); if( SUCCEEDED( hResult ) ){ int x = static_cast<int>( colorSpacePoint.X ); int y = static_cast<int>( colorSpacePoint.Y ); if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){ if( leftHandState == HandState::HandState_Open ){ cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 128, 0 ), 5, CV_AA ); } else if( leftHandState == HandState::HandState_Closed ){ cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 0, 128 ), 5, CV_AA ); } else if( leftHandState == HandState::HandState_Lasso ){ cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 128, 128, 0 ), 5, CV_AA ); } } } } // Right Hand State HandState rightHandState = HandState::HandState_Unknown; hResult = pBody[count]->get_HandRightState( &rightHandState ); if( SUCCEEDED( hResult ) ){ ColorSpacePoint colorSpacePoint = { 0 }; hResult = pCoordinateMapper->MapCameraPointToColorSpace( joint[JointType::JointType_HandRight].Position, &colorSpacePoint ); if( SUCCEEDED( hResult ) ){ int x = static_cast<int>( colorSpacePoint.X ); int y = static_cast<int>( colorSpacePoint.Y ); if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){ if( rightHandState == HandState::HandState_Open ){ cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 128, 0 ), 5, CV_AA ); } else if( rightHandState == HandState::HandState_Closed ){ cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 0, 0, 128 ), 5, CV_AA ); } else if( rightHandState == HandState::HandState_Lasso ){ cv::circle( bufferMat, cv::Point( x, y ), 75, cv::Scalar( 128, 128, 0 ), 5, CV_AA ); } } } } // Joint for( int type = 0; type < JointType::JointType_Count; type++ ){ ColorSpacePoint colorSpacePoint = { 0 }; pCoordinateMapper->MapCameraPointToColorSpace( joint[type].Position, &colorSpacePoint ); int x = static_cast<int>( colorSpacePoint.X ); int y = static_cast<int>( colorSpacePoint.Y ); if( ( x >= 0 ) && ( x < width ) && ( y >= 0 ) && ( y < height ) ){ cv::circle( bufferMat, cv::Point( x, y ), 5, static_cast< cv::Scalar >( color[count] ), -1, CV_AA ); } } } /*// Activity UINT capacity = 0; DetectionResult detectionResults = DetectionResult::DetectionResult_Unknown; hResult = pBody[count]->GetActivityDetectionResults( capacity, &detectionResults ); if( SUCCEEDED( hResult ) ){ if( detectionResults == DetectionResult::DetectionResult_Yes ){ switch( capacity ){ case Activity::Activity_EyeLeftClosed: std::cout << "Activity_EyeLeftClosed" << std::endl; break; case Activity::Activity_EyeRightClosed: std::cout << "Activity_EyeRightClosed" << std::endl; break; case Activity::Activity_MouthOpen: std::cout << "Activity_MouthOpen" << std::endl; break; case Activity::Activity_MouthMoved: std::cout << "Activity_MouthMoved" << std::endl; break; case Activity::Activity_LookingAway: std::cout << "Activity_LookingAway" << std::endl; break; default: break; } } } else{ std::cerr << "Error : IBody::GetActivityDetectionResults()" << std::endl; }*/ /*// Appearance capacity = 0; detectionResults = DetectionResult::DetectionResult_Unknown; hResult = pBody[count]->GetAppearanceDetectionResults( capacity, &detectionResults ); if( SUCCEEDED( hResult ) ){ if( detectionResults == DetectionResult::DetectionResult_Yes ){ switch( capacity ){ case Appearance::Appearance_WearingGlasses: std::cout << "Appearance_WearingGlasses" << std::endl; break; default: break; } } } else{ std::cerr << "Error : IBody::GetAppearanceDetectionResults()" << std::endl; }*/ /*// Expression capacity = 0; detectionResults = DetectionResult::DetectionResult_Unknown; hResult = pBody[count]->GetExpressionDetectionResults( capacity, &detectionResults ); if( SUCCEEDED( hResult ) ){ if( detectionResults == DetectionResult::DetectionResult_Yes ){ switch( capacity ){ case Expression::Expression_Happy: std::cout << "Expression_Happy" << std::endl; break; case Expression::Expression_Neutral: std::cout << "Expression_Neutral" << std::endl; break; default: break; } } } else{ std::cerr << "Error : IBody::GetExpressionDetectionResults()" << std::endl; }*/ // Lean PointF amount; hResult = pBody[count]->get_Lean( &amount ); if( SUCCEEDED( hResult ) ){ std::cout << "amount : " << amount.X << ", " << amount.Y << std::endl; } } } cv::resize( bufferMat, bodyMat, cv::Size(), 0.5, 0.5 ); } for( int count = 0; count < BODY_COUNT; count++ ){ SafeRelease( pBody[count] ); } } //SafeRelease( pBodyFrame ); SafeRelease( pColorFrame ); SafeRelease( pBodyFrame ); cv::imshow( "Body", bodyMat ); if( cv::waitKey( 10 ) == VK_ESCAPE ){ break; } } SafeRelease( pColorSource ); SafeRelease( pBodySource ); SafeRelease( pColorReader ); SafeRelease( pBodyReader ); SafeRelease( pDescription ); SafeRelease( pCoordinateMapper ); if( pSensor ){ pSensor->Close(); } SafeRelease( pSensor ); cv::destroyAllWindows(); return 0; }
/// <summary> /// Initializes the default Kinect sensor /// </summary> /// <returns>S_OK on success else the failure code</returns> HRESULT CFaceBasics::InitializeDefaultSensor() { HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) { return hr; } if (m_pKinectSensor) { // Initialize Kinect and get color, body and face readers IColorFrameSource* pColorFrameSource = nullptr; IBodyFrameSource* pBodyFrameSource = nullptr; hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper); } if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_ColorFrameSource(&pColorFrameSource); } if (SUCCEEDED(hr)) { hr = pColorFrameSource->OpenReader(&m_pColorFrameReader); } if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_BodyFrameSource(&pBodyFrameSource); } if (SUCCEEDED(hr)) { hr = pBodyFrameSource->OpenReader(&m_pBodyFrameReader); } if (SUCCEEDED(hr)) { // create a face frame source + reader to track each body in the fov for (int i = 0; i < BODY_COUNT; i++) { if (SUCCEEDED(hr)) { // create the face frame source by specifying the required face frame features hr = CreateFaceFrameSource(m_pKinectSensor, 0, c_FaceFrameFeatures, &m_pFaceFrameSources[i]); } if (SUCCEEDED(hr)) { // open the corresponding reader hr = m_pFaceFrameSources[i]->OpenReader(&m_pFaceFrameReaders[i]); } if (SUCCEEDED(hr)) { // create the face frame source by specifying the required face frame features hr = CreateHighDefinitionFaceFrameSource(m_pKinectSensor, &m_phdFaceFrameSources[i]); } if (SUCCEEDED(hr)) { // open the corresponding reader hr = m_phdFaceFrameSources[i]->OpenReader(&m_phdFaceFrameReaders[i]); } if (SUCCEEDED(hr)) { // create the face frame alignment hr = CreateFaceAlignment(&m_phdFaceAlignments[i]); } } } SafeRelease(pColorFrameSource); SafeRelease(pBodyFrameSource); } if (!m_pKinectSensor || FAILED(hr)) { SetStatusMessage(L"No ready Kinect found!", 10000, true); return E_FAIL; } return hr; }
//-------------------------------------------------------------- void testApp::setup() { HRESULT hr; shader.load("shaders/bodyIndex"); kinect.open(); kinect.initDepthSource(); kinect.initColorSource(); kinect.initInfraredSource(); kinect.initBodySource(); kinect.initBodyIndexSource(); // Initialize the Kinect and get coordinate mapper and the body reader IBodyFrameSource* pBodyFrameSource = NULL; hr = kinect.getSensor()->get_CoordinateMapper(&m_pCoordinateMapper); ofLogNotice("Initialized Kinect"); if (SUCCEEDED(hr)) { hr = kinect.getSensor()->get_BodyFrameSource(&pBodyFrameSource); } if (SUCCEEDED(hr)) { hr = pBodyFrameSource->OpenReader(&m_pBodyFrameReader); } SafeRelease(pBodyFrameSource); isLeftEmitterEnabled = isRightEmitterEnabled = isTopEmitterEnabled = isBottomEmitterEnabled = true; ofSetFrameRate(60.0); ofBackground(0, 0, 0); leftHandEmitter.velSpread = ofVec3f(25.0, 25.0); leftHandEmitter.life = 3.0; leftHandEmitter.lifeSpread = 1; leftHandEmitter.numPars = 10; leftHandEmitter.color = ofColor(200, 200, 255); leftHandEmitter.colorSpread = ofColor(20, 20, 0); leftHandEmitter.size = 22; rightHandEmitter.velSpread = ofVec3f(25.0, 25.0); rightHandEmitter.life = 3.0; rightHandEmitter.lifeSpread = 1; rightHandEmitter.numPars = 10; rightHandEmitter.color = ofColor(200, 200, 255); rightHandEmitter.colorSpread = ofColor(20, 20, 0); rightHandEmitter.size = 22; leftEmitter.setPosition(ofVec3f(0, ofGetHeight() / 3)); leftEmitter.setVelocity(ofVec3f(150.0, 0.0)); leftEmitter.posSpread = ofVec3f(10, 10.0); leftEmitter.velSpread = ofVec3f(10.0, 10); leftEmitter.life = 15; leftEmitter.lifeSpread = 5.0; leftEmitter.numPars = 2; leftEmitter.color = ofColor(200, 100, 100); leftEmitter.colorSpread = ofColor(50, 50, 50); leftEmitter.size = 22; rightEmitter = leftEmitter; rightEmitter.setPosition(ofVec3f(ofGetWidth() - 1, ofGetHeight() * 2 / 3)); rightEmitter.setVelocity(ofVec3f(-150.0, 0.0)); rightEmitter.color = ofColor(100, 100, 200); rightEmitter.colorSpread = ofColor(50, 50, 50); topEmitter = leftEmitter; topEmitter.setPosition(ofVec3f(ofGetWidth() * 2 / 3, 0)); topEmitter.setVelocity(ofVec3f(0.0, 150.0)); topEmitter.color = ofColor(100, 200, 100); topEmitter.colorSpread = ofColor(50, 50, 50); botEmitter = leftEmitter; botEmitter.setPosition(ofVec3f(ofGetWidth() / 3, ofGetHeight() - 1)); botEmitter.setVelocity(ofVec3f(0.0, -150.0)); botEmitter.color = ofColor(200, 200, 0); botEmitter.colorSpread = ofColor(50, 50, 0); secondPersonEmitter = botEmitter; secondPersonEmitter.setPosition(ofVec3f(ofGetWidth() / 3, ofGetHeight() - 1)); secondPersonEmitter.setVelocity(ofVec3f(0.0, -150.0)); secondPersonEmitter.color = ofColor(80, 270, 121); secondPersonEmitter.colorSpread = ofColor(50, 50, 0); vectorField.allocate(128, 128, 3); ofLoadImage(pTex, "p.png"); ofLoadImage(p1Tex, "p1.png"); ofLoadImage(p2Tex, "p2.png"); rotAcc = 4500; gravAcc = 13500; drag = 0.5; fieldMult = 40.0; displayMode = 0; ofEnableBlendMode(OF_BLENDMODE_ADD); }
HRESULT KinectHDFaceGrabber::initHDFaceReader() { IBodyFrameSource* pBodyFrameSource = nullptr; UINT32 vertices = 0; HRESULT hr = GetFaceModelVertexCount(&vertices); if (SUCCEEDED(hr)){ hr = m_pKinectSensor->get_BodyFrameSource(&pBodyFrameSource); } std::vector<std::vector<float>> deformations(BODY_COUNT, std::vector<float>(FaceShapeDeformations::FaceShapeDeformations_Count)); if (SUCCEEDED(hr)){ // create a face frame source + reader to track each body in the fov for (int i = 0; i < BODY_COUNT; i++){ if (SUCCEEDED(hr)){ // create the face frame source by specifying the required face frame features hr = CreateFaceFrameSource(m_pKinectSensor, 0, c_FaceFrameFeatures, &m_pFaceFrameSources[i]); } if (SUCCEEDED(hr)){ // open the corresponding reader hr = m_pFaceFrameSources[i]->OpenReader(&m_pFaceFrameReaders[i]); } std::vector<std::vector<float>> deformations(BODY_COUNT, std::vector<float>(FaceShapeDeformations::FaceShapeDeformations_Count)); if (SUCCEEDED(hr)){ hr = CreateHighDefinitionFaceFrameSource(m_pKinectSensor, &m_pHDFaceSource[i]); m_pHDFaceSource[i]->put_TrackingQuality(FaceAlignmentQuality_High); } if (SUCCEEDED(hr)){ hr = m_pHDFaceSource[i]->OpenReader(&m_pHDFaceReader[i]); } if (SUCCEEDED(hr)){ hr = m_pHDFaceSource[i]->OpenModelBuilder(FaceModelBuilderAttributes::FaceModelBuilderAttributes_None, &m_pFaceModelBuilder[i]); } if (SUCCEEDED(hr)){ hr = m_pFaceModelBuilder[i]->BeginFaceDataCollection(); } if (SUCCEEDED(hr)){ hr = CreateFaceAlignment(&m_pFaceAlignment[i]); } if (SUCCEEDED(hr)){ m_HDFaceDetectedPointsCamSpace[i].resize(vertices); m_HDFaceDetectedPointsColorSpace[i].resize(vertices); } // Create Face Model hr = CreateFaceModel(1.0f, FaceShapeDeformations::FaceShapeDeformations_Count, deformations[i].data(), &m_pFaceModel[i]); if (FAILED(hr)){ std::cerr << "Error : CreateFaceModel()" << std::endl; return hr; } } if (SUCCEEDED(hr)){ hr = pBodyFrameSource->OpenReader(&m_pBodyFrameReader); } SafeRelease(pBodyFrameSource); } return hr; }
/// Initializes the default Kinect sensor HRESULT CBodyBasics::InitializeDefaultSensor() { //用于判断每次读取操作的成功与否 HRESULT hr; //搜索kinect hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)){ return hr; } //找到kinect设备 if (m_pKinectSensor) { // Initialize the Kinect and get coordinate mapper and the body reader IBodyFrameSource* pBodyFrameSource = NULL;//读取骨架 IDepthFrameSource* pDepthFrameSource = NULL;//读取深度信息 IBodyIndexFrameSource* pBodyIndexFrameSource = NULL;//读取背景二值图 //打开kinect hr = m_pKinectSensor->Open(); //coordinatemapper if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper); } //bodyframe if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_BodyFrameSource(&pBodyFrameSource); } if (SUCCEEDED(hr)) { hr = pBodyFrameSource->OpenReader(&m_pBodyFrameReader); } //depth frame if (SUCCEEDED(hr)){ hr = m_pKinectSensor->get_DepthFrameSource(&pDepthFrameSource); } if (SUCCEEDED(hr)){ hr = pDepthFrameSource->OpenReader(&m_pDepthFrameReader); } //body index frame if (SUCCEEDED(hr)){ hr = m_pKinectSensor->get_BodyIndexFrameSource(&pBodyIndexFrameSource); } if (SUCCEEDED(hr)){ hr = pBodyIndexFrameSource->OpenReader(&m_pBodyIndexFrameReader); } SafeRelease(pBodyFrameSource); SafeRelease(pDepthFrameSource); SafeRelease(pBodyIndexFrameSource); } if (!m_pKinectSensor || FAILED(hr)) { std::cout << "Kinect initialization failed!" << std::endl; return E_FAIL; } //skeletonImg,用于画骨架、背景二值图的MAT skeletonImg.create(cDepthHeight, cDepthWidth, CV_8UC3); skeletonImg.setTo(0); //depthImg,用于画深度信息的MAT depthImg.create(cDepthHeight, cDepthWidth, CV_8UC1); depthImg.setTo(0); return hr; }
int main(int argc, char** argv) { // 1a. Get default Sensor std::cout << "Try to get default sensor" << std::endl; IKinectSensor* pSensor = nullptr; if (GetDefaultKinectSensor(&pSensor) != S_OK) { cerr << "Get Sensor failed" << std::endl; return -1; } // 1b. Open sensor std::cout << "Try to open sensor" << std::endl; if (pSensor->Open() != S_OK) { cerr << "Can't open sensor" << std::endl; return -1; } // 2. Color Related code IColorFrameReader* pColorFrameReader = nullptr; cv::Mat mColorImg; UINT uBufferSize = 0; { // 2a. Get color frame source std::cout << "Try to get color source" << std::endl; IColorFrameSource* pFrameSource = nullptr; if (pSensor->get_ColorFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get color frame source" << std::endl; return -1; } // 2b. Get frame description std::cout << "get color frame description" << std::endl; int iWidth = 0; int iHeight = 0; IFrameDescription* pFrameDescription = nullptr; if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK) { pFrameDescription->get_Width(&iWidth); pFrameDescription->get_Height(&iHeight); } pFrameDescription->Release(); pFrameDescription = nullptr; // 2c. get frame reader std::cout << "Try to get color frame reader" << std::endl; if (pFrameSource->OpenReader(&pColorFrameReader) != S_OK) { cerr << "Can't get color frame reader" << std::endl; return -1; } // 2d. release Frame source std::cout << "Release frame source" << std::endl; pFrameSource->Release(); pFrameSource = nullptr; // Prepare OpenCV data mColorImg = cv::Mat(iHeight, iWidth, CV_8UC4); uBufferSize = iHeight * iWidth * 4 * sizeof(BYTE); } // 3. Body related code IBodyFrameReader* pBodyFrameReader = nullptr; IBody** aBodyData = nullptr; INT32 iBodyCount = 0; { // 3a. Get frame source std::cout << "Try to get body source" << std::endl; IBodyFrameSource* pFrameSource = nullptr; if (pSensor->get_BodyFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get body frame source" << std::endl; return -1; } // 3b. Get the number of body if (pFrameSource->get_BodyCount(&iBodyCount) != S_OK) { cerr << "Can't get body count" << std::endl; return -1; } std::cout << " > Can trace " << iBodyCount << " bodies" << std::endl; aBodyData = new IBody*[iBodyCount]; for (int i = 0; i < iBodyCount; ++i) aBodyData[i] = nullptr; // 3c. get frame reader std::cout << "Try to get body frame reader" << std::endl; if (pFrameSource->OpenReader(&pBodyFrameReader) != S_OK) { cerr << "Can't get body frame reader" << std::endl; return -1; } // 3d. release Frame source std::cout << "Release frame source" << std::endl; pFrameSource->Release(); pFrameSource = nullptr; } // 4. get CoordinateMapper ICoordinateMapper* pCoordinateMapper = nullptr; if (pSensor->get_CoordinateMapper(&pCoordinateMapper) != S_OK) { std::cout << "Can't get coordinate mapper" << std::endl; return -1; } // Enter main loop cv::namedWindow("Body Image"); // Debug:output the velocity of joints ofstream current_average_velocityTXT("current_average_velocity.txt"); ofstream average_velocityTXT("average_velocity.txt"); int frame_count = 0; int frame_count_for_standby = 0; float positionX0[25] = {0}; float positionX1[25] = {0}; float positionY0[25] = { 0 }; float positionY1[25] = { 0 }; float positionZ0[25] = { 0 }; float positionZ1[25] = { 0 }; float velocityX[25] = { 0 }; float velocityY[25] = { 0 }; float velocityZ[25] = { 0 }; float current_velocity[25] = { 0 }; float velocityee[8] = { 0 }; float current_total_velocity = 0; float current_average_velocity = 0; float total_velocity = 0; float average_velocity = 0; while (true) { // 4a. Get last frame IColorFrame* pColorFrame = nullptr; if (pColorFrameReader->AcquireLatestFrame(&pColorFrame) == S_OK) { // 4c. Copy to OpenCV image if (pColorFrame->CopyConvertedFrameDataToArray(uBufferSize, mColorImg.data, ColorImageFormat_Bgra) != S_OK) { cerr << "Data copy error" << endl; } // 4e. release frame pColorFrame->Release(); } cv::Mat mImg = mColorImg.clone(); // 4b. Get body data IBodyFrame* pBodyFrame = nullptr; if (pBodyFrameReader->AcquireLatestFrame(&pBodyFrame) == S_OK) { // 4b. get Body data if (pBodyFrame->GetAndRefreshBodyData(iBodyCount, aBodyData) == S_OK) { // 4c. for each body for (int i = 0; i < iBodyCount; ++i) { IBody* pBody = aBodyData[i]; // check if is tracked BOOLEAN bTracked = false; if ((pBody->get_IsTracked(&bTracked) == S_OK) && bTracked) { // get joint position Joint aJoints[JointType::JointType_Count]; if (pBody->GetJoints(JointType::JointType_Count, aJoints) == S_OK) { DrawLine(mImg, aJoints[JointType_SpineBase], aJoints[JointType_SpineMid], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_SpineMid], aJoints[JointType_SpineShoulder], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_SpineShoulder], aJoints[JointType_Neck], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_Neck], aJoints[JointType_Head], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_ShoulderLeft], aJoints[JointType_ElbowLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_ElbowLeft], aJoints[JointType_WristLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_WristLeft], aJoints[JointType_HandLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_HandLeft], aJoints[JointType_HandTipLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_HandLeft], aJoints[JointType_ThumbLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_ShoulderRight], aJoints[JointType_ElbowRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_ElbowRight], aJoints[JointType_WristRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_WristRight], aJoints[JointType_HandRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_HandRight], aJoints[JointType_HandTipRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_HandRight], aJoints[JointType_ThumbRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_SpineBase], aJoints[JointType_HipLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_HipLeft], aJoints[JointType_KneeLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_KneeLeft], aJoints[JointType_AnkleLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_AnkleLeft], aJoints[JointType_FootLeft], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_SpineBase], aJoints[JointType_HipRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_HipRight], aJoints[JointType_KneeRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_KneeRight], aJoints[JointType_AnkleRight], pCoordinateMapper); DrawLine(mImg, aJoints[JointType_AnkleRight], aJoints[JointType_FootRight], pCoordinateMapper); } // Debug:print out the number of frame std::cout << "frame " << ++frame_count << std::endl; for (int j = 1; j < 8; j++) { velocityee[j] = velocityee[j-1]; total_velocity += velocityee[j]; } average_velocity = total_velocity / 8.0; if (average_velocity <= 0.0015) { // determine if the person is still if (frame_count_for_standby == 0) { PlaySound(TEXT("Alarm02.wav"), NULL, SND_FILENAME); std::cout << "Start capturing points!" << std::endl; } // count the number of frame whose velocity is below the threshold frame_count_for_standby++; if (frame_count_for_standby >= 5) { frame_count_for_standby = 0; } } // Debug:output the average velocity average_velocityTXT << frame_count << " " << average_velocity << std::endl; total_velocity = 0; // Update the average velocity int available_joints = 0; for (int i = 0; i < 25; i++) { // X positionX1[i] = positionX0[i]; positionX0[i] = aJoints[i].Position.X; velocityX[i] = (positionX1[i] - positionX0[i]) * (positionX1[i] - positionX0[i]); // Y positionY1[i] = positionY0[i]; positionY0[i] = aJoints[i].Position.Y; velocityY[i] = (positionY1[i] - positionY0[i]) * (positionY1[i] - positionY0[i]); // Z positionZ1[i] = positionZ0[i]; positionZ0[i] = aJoints[i].Position.Z; velocityZ[i] = (positionZ1[i] - positionZ0[i]) * (positionZ1[i] - positionZ0[i]); current_velocity[i] = sqrtf(velocityX[i] + velocityY[i] + velocityZ[i]); // exclude the discrete velocity if (current_velocity[i] < 0.01) { current_total_velocity += current_velocity[i]; available_joints++; } } // If no joint is available, save the velocity of last frame if (available_joints != 0) { current_average_velocity = current_total_velocity / available_joints; } velocityee[0] = current_average_velocity; // Debug:output the current average velocity current_average_velocityTXT << frame_count << " " << current_average_velocity << std::endl; current_total_velocity = 0; } } } else { cerr << "Can't read body data" << endl; } // 4e. release frame pBodyFrame->Release(); } // show image cv::imshow("Body Image",mImg); // 4c. check keyboard input if (cv::waitKey(30) == VK_ESCAPE) { break; } } // 3. delete body data array delete[] aBodyData; // 3. release frame reader std::cout << "Release body frame reader" << std::endl; pBodyFrameReader->Release(); pBodyFrameReader = nullptr; // 2. release color frame reader std::cout << "Release color frame reader" << std::endl; pColorFrameReader->Release(); pColorFrameReader = nullptr; // 1c. Close Sensor std::cout << "close sensor" << std::endl; pSensor->Close(); // 1d. Release Sensor std::cout << "Release sensor" << std::endl; pSensor->Release(); pSensor = nullptr; return 0; }
int main() { printf("Hello, Wellcome to kinect world!\n"); IKinectSensor* bb; //申请一个Sensor指针 HRESULT hr = GetDefaultKinectSensor(&bb); // 获取一个默认的Sensor if ( FAILED(hr) ) { printf("No Kinect connect to your pc!\n"); goto endstop; } BOOLEAN bIsOpen = 0; bb->get_IsOpen(&bIsOpen); // 查看下是否已经打开 printf("bIsOpen: %d\n", bIsOpen); if ( !bIsOpen ) // 没打开,则尝试打开 { hr = bb->Open(); if ( FAILED(hr) ) { printf("Kinect Open Failed!\n"); goto endstop; } printf("Kinect opened! But it need sometime to work!\n"); // 这里一定要多等会,否则下面的判断都是错误的 printf("Wait For 3000 ms...\n"); Sleep(3000); } bIsOpen = 0; bb->get_IsOpen(&bIsOpen); // 是否已经打开 printf("bIsOpen: %d\n", bIsOpen); BOOLEAN bAvaliable = 0; bb->get_IsAvailable(&bAvaliable); // 是否可用 printf("bAvaliable: %d\n", bAvaliable); DWORD dwCapability = 0; bb->get_KinectCapabilities(&dwCapability); // 获取容量 printf("dwCapability: %d\n", dwCapability); WCHAR bbuid[256] = { 0 }; bb->get_UniqueKinectId(256, bbuid); // 获取唯一ID printf("UID: %s\n",bbuid); // 音频数据获取 // 获取身体数据 IBodyFrameSource* bodys = nullptr; bb->get_BodyFrameSource(&bodys); // Body 数据源 INT32 nBodyNum = 0; bodys->get_BodyCount(&nBodyNum); // 获取body 个数,没用,一直是6 printf("Body Num: %d\n", nBodyNum); IBodyFrameReader* bodyr = nullptr; bodys->OpenReader(&bodyr); // 准备读取body数据 while (true) { IBodyFrame* bodyf = nullptr; bodyr->AcquireLatestFrame(&bodyf); // 获取最近的一帧数据 if ( !bodyf ) { Sleep(100); printf("."); continue; } IBody* ppBodies[BODY_COUNT] = { 0 }; bodyf->GetAndRefreshBodyData(BODY_COUNT, ppBodies); // 更新所有人身体数据 for (int i = 0; i < BODY_COUNT; ++i) { IBody* pBody = ppBodies[i]; // 轮询每个人的信息 if (pBody) { BOOLEAN bTracked = false; hr = pBody->get_IsTracked(&bTracked); // 检测是否被跟踪,即是否有这个人 if (bTracked) { Joint joints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); // 获取左手的状态 pBody->get_HandRightState(&rightHandState); // 获取右手的状态 hr = pBody->GetJoints(_countof(joints), joints); // 获取身体的骨骼信息,25点 printf("Person %d : Joints[0].Z %.2f\n", i, joints[0].Position.X); //简单的输出他的信息 } } } for (int i = 0; i < BODY_COUNT; ++i) { ppBodies[i]->Release(); } bodyf->Release(); } endclose: bb->Close(); endstop: system("pause"); return 0; }
HRESULT InitializeDefaultSensor() { HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) { return hr; } if (m_pKinectSensor) { // Initialize the Kinect and get coordinate mapper and the body reader IBodyFrameSource* pBodyFrameSource = NULL; hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper); } if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_BodyFrameSource(&pBodyFrameSource); } if (SUCCEEDED(hr)) { hr = pBodyFrameSource->OpenReader(&m_pBodyFrameReader); } SafeRelease(pBodyFrameSource); // get the color reader IColorFrameSource* pColorFrameSource = NULL; if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_ColorFrameSource(&pColorFrameSource); } if (SUCCEEDED(hr)) { hr = pColorFrameSource->OpenReader(&m_pColorFrameReader); } SafeRelease(pColorFrameSource); // get the depth reader IDepthFrameSource* pDepthFrameSource = NULL; if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_DepthFrameSource(&pDepthFrameSource); } if (SUCCEEDED(hr)) { hr = pDepthFrameSource->OpenReader(&m_pDepthFrameReader); } SafeRelease(pDepthFrameSource); //get the body frame index reader IBodyIndexFrameSource * pBodyIndexFrameSource = NULL; if(SUCCEEDED(hr)){ hr = m_pKinectSensor->get_BodyIndexFrameSource(&pBodyIndexFrameSource); } if(SUCCEEDED(hr)){ hr = pBodyIndexFrameSource->OpenReader(&m_pBodyIndexFrameReader); } SafeRelease(pBodyIndexFrameSource); } if (!m_pKinectSensor || FAILED(hr)) { std::cout << "no ready Kinect found!"; return E_FAIL; } return hr; }
//Thread1 for Getting Every Frames Body DWORD WINAPI DtoG(LPVOID pParam) { //Initiate Template SMPL bodyTemplate = SMPL(MALE); cout << "SMPL::initial finished!" << endl; //Initiate Sensor IKinectSensor *pSensor; HRESULT hResult = S_OK; hResult = GetDefaultKinectSensor(&pSensor); hResult = pSensor->Open(); if (FAILED(hResult)) { std::cerr << "Error : IKinectSensor::Open()" << std::endl; return -1; } IBodyFrameSource *pBodySource; hResult = pSensor->get_BodyFrameSource(&pBodySource); if (FAILED(hResult)) { std::cerr << "Error : IKinectSensor::get_BodyFrameSource()" << std::endl; return -1; } IBodyFrameReader *pBodyReader; hResult = pBodySource->OpenReader(&pBodyReader); if (FAILED(hResult)) { std::cerr << "Error : IBodyFrameSource::OpenReader()" << std::endl; return -1; } //mat pp = zeros(24, 3); //mat result=bodyTemplate.gen_pose_model(pp, TRUE); //bodyTemplate.write_to_obj(result, "MALE.obj"); // Holt Double Exponential Smoothing Filter Sample::FilterDoubleExponential filter[BODY_COUNT]; // Option : Setting Smoothing Parameter for (int count = 0; count < BODY_COUNT; count++) { float smoothing = 0.5f; // [0..1], lower values closer to raw data float correction = 0.5f; // [0..1], lower values slower to correct towards the raw data float prediction = 0.5f; // [0..n], the number of frames to predict into the future float jitterRadius = 0.05f; // The radius in meters for jitter reduction float maxDeviationRadius = 0.04f; // The maximum radius in meters that filtered positions are allowed to deviate from raw data filter[count].Init(smoothing, correction, prediction, jitterRadius, maxDeviationRadius); } //The label number of the first body detected by Kinect int BODY_LABEL = -1; StopWatch time; time.start(); int counter = 1; bool tag = TRUE; bool first = TRUE; while (counter) { Vec4s vertex; Vec3s normal; //Obj new_body = BodyQueue.front(); mat trans_joint; //bool judge = detectJoint(hResult, pBodyReader, joint); IBodyFrame *pBodyFrame = nullptr; hResult = pBodyReader->AcquireLatestFrame(&pBodyFrame); if (SUCCEEDED(hResult)) { IBody *pBody[BODY_COUNT] = { 0 }; hResult = pBodyFrame->GetAndRefreshBodyData(BODY_COUNT, pBody); if (SUCCEEDED(hResult)) { for (int count = 0; count < BODY_COUNT; count++) { BOOLEAN bTracked = false; hResult = pBody[count]->get_IsTracked(&bTracked); if (bTracked&&SUCCEEDED(hResult) && BODY_LABEL == -1) BODY_LABEL = count; if (SUCCEEDED(hResult) && bTracked && count == BODY_LABEL) { //counter--; Joint joint[JointType::JointType_Count]; ///////////////////////////// hResult = pBody[count]->GetJoints(JointType::JointType_Count, joint);//joint //////////////////////// Filtered Joint////////////////////////////////// filter[count].Update(joint); const DirectX::XMVECTOR *vec = filter[count].GetFilteredJoints(); for (int type = 0; type < JointType::JointType_Count; type++) { if (joint[type].TrackingState != TrackingState::TrackingState_NotTracked) { float x = 0.0f, y = 0.0f, z = 0.0f; DirectX::XMVectorGetXPtr(&x, vec[type]); DirectX::XMVectorGetYPtr(&y, vec[type]); DirectX::XMVectorGetZPtr(&z, vec[type]); } } //////////////////////////////////////////////////////////////////////// //Get joint for genBody from kinect joint trans_joint = JointTransform(joint); ////////////////Transition from T-pose to first frame/////////////////////////////////// if (first == TRUE) { mat pose = bodyTemplate.J_to_pose(trans_joint); float coefficient = 0.04f / max(max(pose)); cout << coefficient << endl; mat transition = zeros(24, 3); int num = 0; while (max(max(abs(transition))) < max(max(abs(pose)))) { //transition.print("t:"); genFirstBody(transition, vertex, normal, bodyTemplate); transition += pose*coefficient; VQueue.push(vertex); NQueue.push(normal); num++; } cout << num << endl; first = FALSE; } ////////////////////////////////////////////////////////////////////////////////// /////////////////////////////Smooth by List//////////////////////////////////////// mat sum = zeros(24, 3); if (smoothList.size() < 5) smoothList.push_back(trans_joint); else { for (iter = smoothList.begin(); iter != smoothList.end(); ++iter) { sum += (*iter); } sum = sum / 5; smoothList.pop_front(); smoothList.push_back(trans_joint); /////////////////////////////////////////////////////////////////////////// genBodyVector(sum, vertex, normal, bodyTemplate); cout << "A new pose has been detected!" << endl; if (tag == TRUE) { VQueue.push(vertex); NQueue.push(normal); tag = FALSE; cout << "num:" << VQueue.size() << endl; } else tag = TRUE; time.stop(); cout << "cost:" << time.elapsed_ms() << endl; time.restart(); } //return TRUE; } } } for (int count = 0; count < BODY_COUNT; count++) { SafeRelease(pBody[count]); } } SafeRelease(pBodyFrame); //if (judge) //{ // genBody(joint, new_body); // cout << "A new pose has been detected!" << endl; //} //else continue; //new_body.scale_translate(0.30, 0, 1.0, 0); //new_body.unified(); //BodyQueue.push(new_body); } SafeRelease(pBodySource); SafeRelease(pBodyReader);; if (pSensor) { pSensor->Close(); } SafeRelease(pSensor); return 0; }
int main(int argc, char** argv) { int first_time = 0; Size screen_size(1440, 900);//the dst image size,e.g.100x100 Scalar text_color = Scalar(0, 255, 0); Scalar text_color2 = Scalar(0, 255, 255); Scalar text_color3 = Scalar(0, 0, 255); inhaler_coach coach; coach.control = 0; thread mThread(test_func, &coach); // 1a. Get Kinect Sensor cout << "Try to get default sensor" << endl; IKinectSensor* pSensor = nullptr; if (GetDefaultKinectSensor(&pSensor) != S_OK) { cerr << "Get Sensor failed" << endl; return -1; } // 1b. Open sensor cout << "Try to open sensor" << endl; if (pSensor->Open() != S_OK) { cerr << "Can't open sensor" << endl; return -1; } // 2. Color Related code IColorFrameReader* pColorFrameReader = nullptr; cv::Mat mColorImg; UINT uBufferSize = 0; UINT uColorPointNum = 0; int iWidth = 0; int iHeight = 0; { // 2a. Get color frame source cout << "Try to get color source" << endl; IColorFrameSource* pFrameSource = nullptr; if (pSensor->get_ColorFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get color frame source" << endl; return -1; } // 2b. Get frame description cout << "get color frame description" << endl; IFrameDescription* pFrameDescription = nullptr; if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK) { pFrameDescription->get_Width(&iWidth); pFrameDescription->get_Height(&iHeight); } pFrameDescription->Release(); pFrameDescription = nullptr; // 2c. get frame reader cout << "Try to get color frame reader" << endl; if (pFrameSource->OpenReader(&pColorFrameReader) != S_OK) { cerr << "Can't get color frame reader" << endl; return -1; } // 2d. release Frame source cout << "Release frame source" << endl; pFrameSource->Release(); pFrameSource = nullptr; // Prepare OpenCV data mColorImg = cv::Mat(iHeight, iWidth, CV_8UC4); uBufferSize = iHeight * iWidth * 4 * sizeof(BYTE); uColorPointNum = iHeight * iWidth; } // 3. Depth related code IDepthFrameReader* pDepthFrameReader = nullptr; UINT uDepthPointNum = 0; int iDepthWidth = 0, iDepthHeight = 0; cout << "Try to get depth source" << endl; { // Get frame source IDepthFrameSource* pFrameSource = nullptr; if (pSensor->get_DepthFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get depth frame source" << endl; return -1; } // Get frame description cout << "get depth frame description" << endl; IFrameDescription* pFrameDescription = nullptr; if (pFrameSource->get_FrameDescription(&pFrameDescription) == S_OK) { pFrameDescription->get_Width(&iDepthWidth); pFrameDescription->get_Height(&iDepthHeight); uDepthPointNum = iDepthWidth * iDepthHeight; } pFrameDescription->Release(); pFrameDescription = nullptr; // get frame reader cout << "Try to get depth frame reader" << endl; if (pFrameSource->OpenReader(&pDepthFrameReader) != S_OK) { cerr << "Can't get depth frame reader" << endl; return -1; } // release Frame source cout << "Release frame source" << endl; pFrameSource->Release(); pFrameSource = nullptr; } // 4. Body related code IBodyFrameReader* pBodyFrameReader = nullptr; IBody** aBodyData = nullptr; INT32 iBodyCount = 0; { // 3a. Get frame source cout << "Try to get body source" << endl; IBodyFrameSource* pFrameSource = nullptr; if (pSensor->get_BodyFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get body frame source" << endl; return -1; } // 3b. Get the number of body if (pFrameSource->get_BodyCount(&iBodyCount) != S_OK) { cerr << "Can't get body count" << endl; return -1; } cout << " > Can trace " << iBodyCount << " bodies" << endl; aBodyData = new IBody*[iBodyCount]; for (int i = 0; i < iBodyCount; ++i) aBodyData[i] = nullptr; // 3c. get frame reader cout << "Try to get body frame reader" << endl; if (pFrameSource->OpenReader(&pBodyFrameReader) != S_OK) { cerr << "Can't get body frame reader" << endl; return -1; } // 3d. release Frame source cout << "Release frame source" << endl; pFrameSource->Release(); pFrameSource = nullptr; } // 4. Body Index releated code IBodyIndexFrameReader* pBIFrameReader = nullptr; cout << "Try to get body index source" << endl; { // Get frame source IBodyIndexFrameSource* pFrameSource = nullptr; if (pSensor->get_BodyIndexFrameSource(&pFrameSource) != S_OK) { cerr << "Can't get body index frame source" << endl; return -1; } // get frame reader cout << "Try to get body index frame reader" << endl; if (pFrameSource->OpenReader(&pBIFrameReader) != S_OK) { cerr << "Can't get depth frame reader" << endl; return -1; } // release Frame source cout << "Release frame source" << endl; pFrameSource->Release(); pFrameSource = nullptr; } // 5. background cv::Mat imgBG(iHeight, iWidth, CV_8UC3); imgBG.setTo(0); // 4. get CoordinateMapper ICoordinateMapper* pCoordinateMapper = nullptr; if (pSensor->get_CoordinateMapper(&pCoordinateMapper) != S_OK) { cout << "Can't get coordinate mapper" << endl; return -1; } // Enter main loop UINT16* pDepthPoints = new UINT16[uDepthPointNum]; BYTE* pBodyIndex = new BYTE[uDepthPointNum]; DepthSpacePoint* pPointArray = new DepthSpacePoint[uColorPointNum]; cv::namedWindow("Inhaler Coach"); while (true) { // 4a. Get last frame IColorFrame* pColorFrame = nullptr; if (pColorFrameReader->AcquireLatestFrame(&pColorFrame) == S_OK) { pColorFrame->CopyConvertedFrameDataToArray(uBufferSize, mColorImg.data, ColorImageFormat_Bgra); pColorFrame->Release(); pColorFrame = nullptr; } cv::Mat mImg = mColorImg.clone(); // 8b. read depth frame IDepthFrame* pDepthFrame = nullptr; if (pDepthFrameReader->AcquireLatestFrame(&pDepthFrame) == S_OK) { pDepthFrame->CopyFrameDataToArray(uDepthPointNum, pDepthPoints); pDepthFrame->Release(); pDepthFrame = nullptr; } // 8c. read body index frame IBodyIndexFrame* pBIFrame = nullptr; if (pBIFrameReader->AcquireLatestFrame(&pBIFrame) == S_OK) { pBIFrame->CopyFrameDataToArray(uDepthPointNum, pBodyIndex); pBIFrame->Release(); pBIFrame = nullptr; } #ifdef COACH_DEBUG cv::Mat imgTarget = imgBG.clone(); // 9b. map color to depth if (pCoordinateMapper->MapColorFrameToDepthSpace(uDepthPointNum, pDepthPoints, uColorPointNum, pPointArray) == S_OK) { for (int y = 0; y < imgTarget.rows; ++y) { for (int x = 0; x < imgTarget.cols; ++x) { // ( x, y ) in color frame = rPoint in depth frame const DepthSpacePoint& rPoint = pPointArray[y * imgTarget.cols + x]; // check if rPoint is in range if (rPoint.X >= 0 && rPoint.X < iDepthWidth && rPoint.Y >= 0 && rPoint.Y < iDepthHeight) { // fill color from color frame if this pixel is user int iIdx = (int)rPoint.X + iDepthWidth * (int)rPoint.Y; if (pBodyIndex[iIdx] < 6) { cv::Vec4b& rPixel = mImg.at<cv::Vec4b>(y, x); imgTarget.at<cv::Vec3b>(y, x) = cv::Vec3b(rPixel[0], rPixel[1], rPixel[2]); } } } } } #else cv::Mat imgTarget = mImg.clone(); #endif // 4b. Get body data IBodyFrame* pBodyFrame = nullptr; if (pBodyFrameReader->AcquireLatestFrame(&pBodyFrame) == S_OK) { // 4b. get Body data if (pBodyFrame->GetAndRefreshBodyData(iBodyCount, aBodyData) == S_OK) { // 4c. for each body for (int i = 0; i < iBodyCount; ++i) { IBody* pBody = aBodyData[i]; // check if is tracked BOOLEAN bTracked = false; if ((pBody->get_IsTracked(&bTracked) == S_OK) && bTracked) { // get joint position Joint aJoints[JointType::JointType_Count]; if (pBody->GetJoints(JointType::JointType_Count, aJoints) == S_OK) { if (coach.state == 0){ coach.state = 1; if (first_time == 0){ first_time = 1; PlaySound(TEXT("welcome.wav"), NULL, SND_FILENAME); } } #ifdef COACH_DEBUG DrawLine(imgTarget, aJoints[JointType_SpineBase], aJoints[JointType_SpineMid], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_SpineMid], aJoints[JointType_SpineShoulder], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_SpineShoulder], aJoints[JointType_Neck], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_Neck], aJoints[JointType_Head], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_ShoulderLeft], aJoints[JointType_ElbowLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_ElbowLeft], aJoints[JointType_WristLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_WristLeft], aJoints[JointType_HandLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_HandLeft], aJoints[JointType_HandTipLeft], pCoordinateMapper); //DrawLine(imgTarget, aJoints[JointType_HandLeft], aJoints[JointType_ThumbLeft], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_SpineShoulder], aJoints[JointType_ShoulderRight], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_ShoulderRight], aJoints[JointType_ElbowRight], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_ElbowRight], aJoints[JointType_WristRight], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_WristRight], aJoints[JointType_HandRight], pCoordinateMapper); DrawLine(imgTarget, aJoints[JointType_HandRight], aJoints[JointType_HandTipRight], pCoordinateMapper); //DrawLine(imgTarget, aJoints[JointType_HandRight], aJoints[JointType_ThumbRight], pCoordinateMapper); #endif ColorSpacePoint q; ColorSpacePoint head; //ColorSpacePoint w; pCoordinateMapper->MapCameraPointToColorSpace(aJoints[JointType_Head].Position, &head); // check shaking coach.shaking_detection(aJoints, pCoordinateMapper); q = coach.position_checking(aJoints, pCoordinateMapper); #ifdef COACH_DEBUG circle(imgTarget, cv::Point(q.X, q.Y), 10, Scalar(0, 255, 255), 10, 8, 0); //circle(imgTarget, cv::Point(q.X, q.Y), 10, Scalar(0, 255, 255), 10, 8, 0); rectangle(imgTarget, Point(head.X - 50, head.Y - 40), Point(head.X + 50, head.Y + 90), Scalar(0, 255, 255), 1, 8, 0); //circle(imgTarget, cv::Point(w.X, w.Y), 10, Scalar(255, 0, 255), 10, 8, 0); #endif coach.state_change_rule(); } } } } else { cerr << "Can't read body data" << endl; } // 4e. release frame pBodyFrame->Release(); } switch (coach.state){ case 0: putText(imgTarget, "CMU Inhaler Coaching System", Point(120, 120), FONT_HERSHEY_DUPLEX, 2, text_color); break; case 1: putText(imgTarget, "Please shake the inhaler", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break; case 2: putText(imgTarget, "Shaking detected", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break; case 3: putText(imgTarget, "Please put the inhaler in front of your mouth", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break; case 4: putText(imgTarget, "Position check OK", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color2); break; case 5: putText(imgTarget, "You forget to shake the inhaler first!!!", Point(20, 120), FONT_HERSHEY_DUPLEX, 2, text_color3); break; } // show image Mat dst; resize(imgTarget, dst, screen_size); imshow("Coach", dst); // 4c. check keyboard input if (cv::waitKey(30) == VK_ESCAPE){ break; } } mThread.join(); // 3. delete body data array delete[] aBodyData; // 3. release frame reader cout << "Release body frame reader" << endl; pBodyFrameReader->Release(); pBodyFrameReader = nullptr; // 2. release color frame reader cout << "Release color frame reader" << endl; pColorFrameReader->Release(); pColorFrameReader = nullptr; // 1c. Close Sensor cout << "close sensor" << endl; pSensor->Close(); // 1d. Release Sensor cout << "Release sensor" << endl; pSensor->Release(); pSensor = nullptr; return 0; }