Exemple #1
0
void Kin2::buildHDFaceModels(int &collectionStatus, int &captureStatus)
{
    collectionStatus = -1;
    captureStatus = -1;
    
	if (!(m_flags & k2::HD_FACE))
	{
        mexPrintf("ERROR: NO HD-FACE FUNCTIONALITY SELECTED!\n");
        return;
    }
    
	HRESULT hr;

	// iterate through each HD face reader
	for (int iFace = 0; iFace < BODY_COUNT; ++iFace)
	{
		// retrieve the latest face frame from this reader
		IHighDefinitionFaceFrame *pHDFaceFrame = nullptr;

		hr = m_pHDFaceFrameReaders[iFace]->AcquireLatestFrame(&pHDFaceFrame);

		BOOLEAN bFaceTracked = false;
		if (SUCCEEDED(hr) && nullptr != pHDFaceFrame)
		{
			// check if a valid face is tracked in this face frame
			hr = pHDFaceFrame->get_IsTrackingIdValid(&bFaceTracked);
		}

		// If face tracked, try to align it
		if (SUCCEEDED(hr) && bFaceTracked)
		{
			IFaceModel *pFaceModel = nullptr;

			hr = pHDFaceFrame->GetAndRefreshFaceAlignmentResult(m_pFaceAlignment[iFace]);

			// If face aligned, continue building the model
			if (SUCCEEDED(hr) && m_pFaceAlignment[iFace] != nullptr)
			{
				// If face model not ready
				if (!m_faceModelReady[iFace])
				{
					FaceModelBuilderCollectionStatus collection;
					hr = m_pFaceModelBuilder[iFace]->get_CollectionStatus(&collection);
                    collectionStatus = (int)collection;

					// If model completed
					if (collection == FaceModelBuilderCollectionStatus::FaceModelBuilderCollectionStatus_Complete)
					{
						mexPrintf("Face Model Completed!\n");						

						IFaceModelData* pFaceModelData = nullptr;
						hr = m_pFaceModelBuilder[iFace]->GetFaceData(&pFaceModelData);

						// Produce the model
						if (SUCCEEDED(hr) && pFaceModelData != nullptr)
						{
                            mexPrintf("Producing model...\n");
							hr = pFaceModelData->ProduceFaceModel(&m_pFaceModel[iFace]);
                            mexPrintf("Model Ready!\n");

							// Set the model ready flag
							if (SUCCEEDED(hr) && m_pFaceModel[iFace] != nullptr)
							{
								m_faceModelReady[iFace] = true;
							}
						}
						SafeRelease(pFaceModelData);

						// Get the shape units (SU) i.e. the deformations wrt the base face model
                        /*
						if (SUCCEEDED(hr))
						{
							float deformations[FaceShapeDeformations_Count];
							hr = m_pFaceModel[iFace]->GetFaceShapeDeformations(FaceShapeDeformations_Count, deformations);										
						}
                        */
					}
					// if model not completed yet
					else
					{
						// Display Collection Status
                        /*
						if (collection >= FaceModelBuilderCollectionStatus::FaceModelBuilderCollectionStatus_TiltedUpViewsNeeded)
						{
							mexPrintf("Need : Tilted Up Views\n");							
						}


						else if (collection >= FaceModelBuilderCollectionStatus::FaceModelBuilderCollectionStatus_RightViewsNeeded)
						{
							mexPrintf("Need : Right Views\n");							
						}

						else if (collection >= FaceModelBuilderCollectionStatus::FaceModelBuilderCollectionStatus_LeftViewsNeeded)
						{
							mexPrintf("Need : Left Views\n");							
						}

						else if (collection >= FaceModelBuilderCollectionStatus::FaceModelBuilderCollectionStatus_FrontViewFramesNeeded)
						{
							mexPrintf("Need : Front ViewFrames\n");							
						}
                        */ 

						// Display Capture Status
						FaceModelBuilderCaptureStatus capture;
						hr = m_pFaceModelBuilder[iFace]->get_CaptureStatus(&capture);

                        captureStatus = (int)capture;
                        
                        /*
						switch (capture)
						{
						case FaceModelBuilderCaptureStatus::FaceModelBuilderCaptureStatus_OtherViewsNeeded:
							std::cout << "Other views needed" << std::endl;
							break;
						case FaceModelBuilderCaptureStatus::FaceModelBuilderCaptureStatus_FaceTooFar:
							std::cout << "Face Too Far from Camera" << std::endl;
							break;
						case FaceModelBuilderCaptureStatus::FaceModelBuilderCaptureStatus_FaceTooNear:
							std::cout << "Face Too Near to Camera" << std::endl;
							break;
						case FaceModelBuilderCaptureStatus_MovingTooFast:
							std::cout << "Moving Too Fast" << std::endl;
							break;
						case FaceModelBuilderCaptureStatus::FaceModelBuilderCaptureStatus_LostFaceTrack:
							std::cout << "Lost Face Track" << std::endl;
							break;
						case FaceModelBuilderCaptureStatus::FaceModelBuilderCaptureStatus_SystemError:
							std::cout << "ERROR: System Error" << std::endl;
							break;

						default:
							break;
						}
                         */
					} // collection not complete
				} // If face model not ready
			} // If face aligned
		} // If face tracked
		else
		{
			// face tracking is not valid - attempt to fix the issue
			// a valid body is required to perform this step
			if (m_bHaveBodyData)
			{
				// check if the corresponding body is tracked 
				// if this is true then update the face frame source to track this body
				IBody* pBody = m_ppBodies[iFace];
				if (pBody != nullptr)
				{
					BOOLEAN bTracked = false;
					hr = pBody->get_IsTracked(&bTracked);

					UINT64 bodyTId;
					if (SUCCEEDED(hr) && bTracked)
					{
						// get the tracking ID of this body
						hr = pBody->get_TrackingId(&bodyTId);
						if (SUCCEEDED(hr))
						{
							// update the face frame source with the tracking ID
							m_pHDFaceFrameSources[iFace]->put_TrackingId(bodyTId);
						}
					}
				} // if (pBody != nullptr)
			} // if (m_bHaveBodyData)
		} // if face tracked
	}// for each face reader

} // end buildHDFaceModels
Exemple #2
0
void Kin2::getHDFaces(bool withVertices, std::vector<k2::HDFaceData>& facesData)
{
    if (!(m_flags & k2::HD_FACE))
	{
        mexPrintf("ERROR: NO HD-FACE FUNCTIONALITY SELECTED!\n");
        return;
    }
        
	HRESULT hr;
	facesData.clear();

	// iterate through each HD face reader
	for (int iFace = 0; iFace < BODY_COUNT; ++iFace)
	{
		// retrieve the latest face frame from this reader
		IHighDefinitionFaceFrame *pHDFaceFrame = nullptr;
		
		hr = m_pHDFaceFrameReaders[iFace]->AcquireLatestFrame(&pHDFaceFrame);

		BOOLEAN bFaceTracked = false;
		if (SUCCEEDED(hr) && nullptr != pHDFaceFrame)
		{
			// check if a valid face is tracked in this face frame
			hr = pHDFaceFrame->get_IsTrackingIdValid(&bFaceTracked);
		}

		// If face tracked, save its data on the facesData structure array
		if (bFaceTracked)
		{		
            float animationUnits[FaceShapeAnimations_Count]={0};					
			UINT32 vertexCount;
            
            // Here we save the HD face data
			k2::HDFaceData faceData;
            
			hr = pHDFaceFrame->GetAndRefreshFaceAlignmentResult(m_pFaceAlignment[iFace]);

            if (SUCCEEDED(hr) && m_pFaceAlignment[iFace] != nullptr)
			{	
                // Get the Animation units
                hr = m_pFaceAlignment[iFace]->GetAnimationUnits(FaceShapeAnimations_Count, animationUnits);

                if (SUCCEEDED(hr))
                {
                    for (int vi = 0; vi < FaceShapeAnimations_Count; vi++)
                        faceData.animationUnits[vi] = animationUnits[vi];
                }

                // If HD face model vertices are requested
				if (withVertices)
				{
                    hr = GetFaceModelVertexCount(&vertexCount);
                    //mexPrintf("Number of Vertices: %d", vertexCount);

					// If there is no model ready, issue a warning message (just once)
					if (!m_faceModelReady[iFace] && !m_faceModelWarning[iFace])
					{
						mexPrintf("WARNING: No personal model has been created. An average face model will be used\n");
						m_faceModelWarning[iFace] = true;
					}
                    
                    CameraSpacePoint *vertices = new CameraSpacePoint[vertexCount];

					// Get the vertices (HD points)
					if (SUCCEEDED(hr))
						hr = m_pFaceModel[iFace]->CalculateVerticesForAlignment(m_pFaceAlignment[iFace], vertexCount, vertices);

					if (SUCCEEDED(hr))
                    {
						faceData.faceModel.resize(vertexCount);

						for (int vi = 0; vi < vertexCount; vi++)
							faceData.faceModel[vi] = vertices[vi];
					}

					if (vertices)
					{
						delete[] vertices;
						vertices = NULL;
					}
                } // if withVertices	
				
                // Get the facebox
                if (SUCCEEDED(hr))
                    hr = m_pFaceAlignment[iFace]->get_FaceBoundingBox(&faceData.faceBox);

                // Get the face rotation
                if (SUCCEEDED(hr))
                    hr = m_pFaceAlignment[iFace]->get_FaceOrientation(&faceData.faceRotation);

                // Get the head pivot
                if (SUCCEEDED(hr))
                {
                    hr = m_pFaceAlignment[iFace]->get_HeadPivotPoint(&faceData.headPivot);
                }

                // Save the HD face data in the member variable m_HDfacesData
                facesData.push_back(faceData);			
            }  // if face alignment	
        } // If face tracked
		else
		{
			// face tracking is not valid - attempt to fix the issue
			// a valid body is required to perform this step
			if (m_bHaveBodyData)
			{
				// check if the corresponding body is tracked 
				// if this is true then update the face frame source to track this body
				IBody* pBody = m_ppBodies[iFace];
				if (pBody != nullptr)
				{
					BOOLEAN bTracked = false;
					hr = pBody->get_IsTracked(&bTracked);

					UINT64 bodyTId;
					if (SUCCEEDED(hr) && bTracked)
					{
						// get the tracking ID of this body
						hr = pBody->get_TrackingId(&bodyTId);
						if (SUCCEEDED(hr))
						{
							// update the face frame source with the tracking ID
							m_pHDFaceFrameSources[iFace]->put_TrackingId(bodyTId);
						}
					}
				} // if (pBody != nullptr)
			} // if (m_bHaveBodyData)
		} // if face tracked

		SafeRelease(pHDFaceFrame);		
	} // for each face reader
} // end getHDFaces function
Exemple #3
0
void Kin2::getFaces(std::vector<k2::FaceData>& facesData)
{
    if (!(m_flags & k2::FACE))
    {
        mexPrintf("ERROR: NO FACE FUNCTIONALITY SELECTED!\n");
        return;
    }
        
	HRESULT hr;
	facesData.clear();

	// iterate through each face reader
	for (int iFace = 0; iFace < BODY_COUNT; ++iFace)
	{
		// retrieve the latest face frame from this reader
		IFaceFrame* pFaceFrame = nullptr;
		hr = m_pFaceFrameReaders[iFace]->AcquireLatestFrame(&pFaceFrame);

		BOOLEAN bFaceTracked = false;
		if (SUCCEEDED(hr) && nullptr != pFaceFrame)
		{
			// check if a valid face is tracked in this face frame
			hr = pFaceFrame->get_IsTrackingIdValid(&bFaceTracked);
		}

		if (SUCCEEDED(hr))
		{
			// If face tracked, save its data on the facesData structure array
			if (bFaceTracked)
			{
				IFaceFrameResult* pFaceFrameResult = nullptr;
				hr = pFaceFrame->get_FaceFrameResult(&pFaceFrameResult);

				k2::FaceData faceData;

				// need to verify if pFaceFrameResult contains data before trying to access it
				if (SUCCEEDED(hr) && pFaceFrameResult != nullptr)
				{
					hr = pFaceFrameResult->get_FaceBoundingBoxInColorSpace(&faceData.faceBox);

					if (SUCCEEDED(hr))
					{
						hr = pFaceFrameResult->GetFacePointsInColorSpace(FacePointType::FacePointType_Count, faceData.facePoints);
					}

					if (SUCCEEDED(hr))
					{
						hr = pFaceFrameResult->get_FaceRotationQuaternion(&faceData.faceRotation);
					}

					if (SUCCEEDED(hr))
					{
						hr = pFaceFrameResult->GetFaceProperties(FaceProperty::FaceProperty_Count, faceData.faceProperties);
					}

					facesData.push_back(faceData);
				}

				SafeRelease(pFaceFrameResult);
			}
			else
			{
				// face tracking is not valid - attempt to fix the issue
				// a valid body is required to perform this step
				if (m_bHaveBodyData)
				{
					// check if the corresponding body is tracked 
					// if this is true then update the face frame source to track this body
					IBody* pBody = m_ppBodies[iFace];
					if (pBody != nullptr)
					{
						BOOLEAN bTracked = false;
						hr = pBody->get_IsTracked(&bTracked);

						UINT64 bodyTId;
						if (SUCCEEDED(hr) && bTracked)
						{
							// get the tracking ID of this body
							hr = pBody->get_TrackingId(&bodyTId);
							if (SUCCEEDED(hr))
							{
								// update the face frame source with the tracking ID
								m_pFaceFrameSources[iFace]->put_TrackingId(bodyTId);
							}
						}
					}
				}
			}
		}

		SafeRelease(pFaceFrame);
	}
}
Exemple #4
0
/// <summary>
/// Handle new body data
/// <param name="nTime">timestamp of frame</param>
/// <param name="nBodyCount">body data count</param>
/// <param name="ppBodies">body data in frame</param>
/// </summary>
void testApp::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies)
{

	HRESULT hr;
	int trackedBodies = 0;
	if (m_pCoordinateMapper)
	{
		//IBody* pBodyToTrack = NULL;
		//IBody* pBody2ToTrack = NULL;
		//UINT64 trackingId;
		//for (int i = 0; i < nBodyCount; ++i)
		//{
		//	IBody* pBody = ppBodies[i];
		//	if (lastBodyTrackingId == NULL || lastBody2TrackingId == NULL)
		//	{
		//		//Init a new body tracking
		//		if (pBody) {
		//			BOOLEAN bTracked = false;
		//			hr = pBody->get_IsTracked(&bTracked);

		//			if (SUCCEEDED(hr) && bTracked) {
		//				ofLogNotice("Body is tracked");
		//				if(lastBodyTrackingId == NULL)
		//					hr = pBody->get_TrackingId(&lastBodyTrackingId);
		//				else
		//					hr = pBody->get_TrackingId(&lastBody2TrackingId);

		//				if (SUCCEEDED(hr)) {
		//					ofLogNotice("Found body to track");
		//					pBodyToTrack = pBody;
		//				}
		//				break;
		//			}
		//		}
		//	}
		//	else {
		//		//Some body is already tracked
		//		if (pBody) {
		//			BOOLEAN bTracked = false;
		//			hr = pBody->get_IsTracked(&bTracked);

		//			if (SUCCEEDED(hr) && bTracked) {
		//				pBody->get_TrackingId(&trackingId);
		//				if (trackingId == lastBodyTrackingId) {
		//					pBodyToTrack = pBody;
		//				}
		//			}
		//		}
		//	}
		//}

		//if (pBodyToTrack == NULL && lastBodyTrackingId != NULL) {
		//	ofLogNotice("Lost body. Allowing new body to step in.");
		//	lastBodyTrackingId = NULL; //Allow new body to step in
		//}
		for (int i = 0; i < nBodyCount; ++i) {
			IBody* pBodyToTrack = ppBodies[i];
			if (pBodyToTrack)
			{
				BOOLEAN bTracked = false;
				hr = pBodyToTrack->get_IsTracked(&bTracked);

				if (SUCCEEDED(hr) && bTracked)
				{
					Joint joints[JointType_Count];
					ofVec2f jointPoints[JointType_Count];
					leftHandStates[i] = HandState_Unknown;
					rightHandStates[i] = HandState_Unknown;

					pBodyToTrack->get_HandLeftState(&leftHandStates[i]);
					pBodyToTrack->get_HandRightState(&rightHandStates[i]);

					hr = pBodyToTrack->GetJoints(_countof(joints), joints);
					if (SUCCEEDED(hr))
					{
						for (int j = 0; j < _countof(joints); ++j)
						{
							jointPoints[j] = BodyToScreen(joints[j].Position, 1024, 768);
						}

						lastChestPositions[trackedBodies] = jointPoints[JointType_Neck];
						lastHandPositionLeft[trackedBodies] = jointPoints[JointType_HandLeft];
						lastHandPositionRight[trackedBodies] = jointPoints[JointType_HandRight];
						pBodyToTrack->get_TrackingId(&lastBodyTrackingIds[trackedBodies]);
						trackedBodies++;
						
						//DrawBody(joints, jointPoints);
					}
				}
			}
		}
		

		//hr = m_pRenderTarget->EndDraw();

		// Device lost, need to recreate the render target
		// We'll dispose it now and retry drawing
		if (D2DERR_RECREATE_TARGET == hr)
		{
			hr = S_OK;
			//DiscardDirect2DResources();
		}
	}

	if (!m_nStartTime)
	{
		m_nStartTime = nTime;
	}

	double fps = 0.0;

	LARGE_INTEGER qpcNow = { 0 };
	if (m_fFreq)
	{
		if (QueryPerformanceCounter(&qpcNow))
		{
			if (m_nLastCounter)
			{
				m_nFramesSinceUpdate++;
				fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter);
			}
		}
	}


}
Exemple #5
0
void Device::update()
{
    if ( mFrameReader == 0 ) {
        return;
    }

    IAudioBeamFrame* audioFrame								= 0;
    IBodyFrame* bodyFrame									= 0;
    IBodyIndexFrame* bodyIndexFrame							= 0;
    IColorFrame* colorFrame									= 0;
    IDepthFrame* depthFrame									= 0;
    IMultiSourceFrame* frame								= 0;
    IInfraredFrame* infraredFrame							= 0;
    ILongExposureInfraredFrame* infraredLongExposureFrame	= 0;

    HRESULT hr = mFrameReader->AcquireLatestFrame( &frame );

    if ( SUCCEEDED( hr ) && mDeviceOptions.isAudioEnabled() ) {
        // TODO audio
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyEnabled() ) {
        IBodyFrameReference* frameRef = 0;
        hr = frame->get_BodyFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &bodyFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isBodyIndexEnabled() ) {
        IBodyIndexFrameReference* frameRef = 0;
        hr = frame->get_BodyIndexFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &bodyIndexFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isColorEnabled() ) {
        IColorFrameReference* frameRef = 0;
        hr = frame->get_ColorFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &colorFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isDepthEnabled() ) {
        IDepthFrameReference* frameRef = 0;
        hr = frame->get_DepthFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &depthFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredEnabled() ) {
        IInfraredFrameReference* frameRef = 0;
        hr = frame->get_InfraredFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &infraredFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) && mDeviceOptions.isInfraredLongExposureEnabled() ) {
        ILongExposureInfraredFrameReference* frameRef = 0;
        hr = frame->get_LongExposureInfraredFrameReference( &frameRef );
        if ( SUCCEEDED( hr ) ) {
            hr = frameRef->AcquireFrame( &infraredLongExposureFrame );
        }
        if ( frameRef != 0 ) {
            frameRef->Release();
            frameRef = 0;
        }
    }

    if ( SUCCEEDED( hr ) ) {
        long long timeStamp										= 0L;

        // TODO audio

        std::vector<Body> bodies;
        int64_t bodyTime										= 0L;
        IBody* kinectBodies[ BODY_COUNT ]						= { 0 };
        Vec4f floorClipPlane									= Vec4f::zero();

        Channel8u bodyIndexChannel;
        IFrameDescription* bodyIndexFrameDescription			= 0;
        int32_t bodyIndexWidth									= 0;
        int32_t bodyIndexHeight									= 0;
        uint32_t bodyIndexBufferSize							= 0;
        uint8_t* bodyIndexBuffer								= 0;
        int64_t bodyIndexTime									= 0L;

        Surface8u colorSurface;
        IFrameDescription* colorFrameDescription				= 0;
        int32_t colorWidth										= 0;
        int32_t colorHeight										= 0;
        ColorImageFormat colorImageFormat						= ColorImageFormat_None;
        uint32_t colorBufferSize								= 0;
        uint8_t* colorBuffer									= 0;

        Channel16u depthChannel;
        IFrameDescription* depthFrameDescription				= 0;
        int32_t depthWidth										= 0;
        int32_t depthHeight										= 0;
        uint16_t depthMinReliableDistance						= 0;
        uint16_t depthMaxReliableDistance						= 0;
        uint32_t depthBufferSize								= 0;
        uint16_t* depthBuffer									= 0;

        Channel16u infraredChannel;
        IFrameDescription* infraredFrameDescription				= 0;
        int32_t infraredWidth									= 0;
        int32_t infraredHeight									= 0;
        uint32_t infraredBufferSize								= 0;
        uint16_t* infraredBuffer								= 0;

        Channel16u infraredLongExposureChannel;
        IFrameDescription* infraredLongExposureFrameDescription	= 0;
        int32_t infraredLongExposureWidth						= 0;
        int32_t infraredLongExposureHeight						= 0;
        uint32_t infraredLongExposureBufferSize					= 0;
        uint16_t* infraredLongExposureBuffer					= 0;

        hr = depthFrame->get_RelativeTime( &timeStamp );

        // TODO audio
        if ( mDeviceOptions.isAudioEnabled() ) {

        }

        if ( mDeviceOptions.isBodyEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = bodyFrame->get_RelativeTime( &bodyTime );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyFrame->GetAndRefreshBodyData( BODY_COUNT, kinectBodies );
            }
            if ( SUCCEEDED( hr ) ) {
                Vector4 v;
                hr = bodyFrame->get_FloorClipPlane( &v );
                floorClipPlane = toVec4f( v );
            }
            if ( SUCCEEDED( hr ) ) {
                for ( uint8_t i = 0; i < BODY_COUNT; ++i ) {
                    IBody* kinectBody = kinectBodies[ i ];
                    if ( kinectBody != 0 ) {
                        uint8_t isTracked	= false;
                        hr					= kinectBody->get_IsTracked( &isTracked );
                        if ( SUCCEEDED( hr ) && isTracked ) {
                            Joint joints[ JointType_Count ];
                            kinectBody->GetJoints( JointType_Count, joints );

                            JointOrientation jointOrientations[ JointType_Count ];
                            kinectBody->GetJointOrientations( JointType_Count, jointOrientations );

                            uint64_t id = 0;
                            kinectBody->get_TrackingId( &id );

                            std::map<JointType, Body::Joint> jointMap;
                            for ( int32_t j = 0; j < JointType_Count; ++j ) {
                                Body::Joint joint(
                                    toVec3f( joints[ j ].Position ),
                                    toQuatf( jointOrientations[ j ].Orientation ),
                                    joints[ j ].TrackingState
                                );
                                jointMap.insert( pair<JointType, Body::Joint>( static_cast<JointType>( j ), joint ) );
                            }
                            Body body( id, i, jointMap );
                            bodies.push_back( body );
                        }
                    }
                }
            }
        }

        if ( mDeviceOptions.isBodyIndexEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrame->get_RelativeTime( &bodyIndexTime );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrame->get_FrameDescription( &bodyIndexFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrameDescription->get_Width( &bodyIndexWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrameDescription->get_Height( &bodyIndexHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = bodyIndexFrame->AccessUnderlyingBuffer( &bodyIndexBufferSize, &bodyIndexBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                bodyIndexChannel = Channel8u( bodyIndexWidth, bodyIndexHeight );
                memcpy( bodyIndexChannel.getData(), bodyIndexBuffer, bodyIndexWidth * bodyIndexHeight * sizeof( uint8_t ) );
            }
        }

        if ( mDeviceOptions.isColorEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrame->get_FrameDescription( &colorFrameDescription );
                if ( SUCCEEDED( hr ) ) {
                    float vFov = 0.0f;
                    float hFov = 0.0f;
                    float dFov = 0.0f;
                    colorFrameDescription->get_VerticalFieldOfView( &vFov );
                    colorFrameDescription->get_HorizontalFieldOfView( &hFov );
                    colorFrameDescription->get_DiagonalFieldOfView( &dFov );
                }
            }
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrameDescription->get_Width( &colorWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrameDescription->get_Height( &colorHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = colorFrame->get_RawColorImageFormat( &colorImageFormat );
            }
            if ( SUCCEEDED( hr ) ) {
                colorBufferSize = colorWidth * colorHeight * sizeof( uint8_t ) * 4;
                colorBuffer		= new uint8_t[ colorBufferSize ];
                hr = colorFrame->CopyConvertedFrameDataToArray( colorBufferSize, reinterpret_cast<uint8_t*>( colorBuffer ), ColorImageFormat_Rgba );

                if ( SUCCEEDED( hr ) ) {
                    colorSurface = Surface8u( colorWidth, colorHeight, false, SurfaceChannelOrder::RGBA );
                    memcpy( colorSurface.getData(), colorBuffer, colorWidth * colorHeight * sizeof( uint8_t ) * 4 );
                }

                delete [] colorBuffer;
                colorBuffer = 0;
            }
        }

        if ( mDeviceOptions.isDepthEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->get_FrameDescription( &depthFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrameDescription->get_Width( &depthWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrameDescription->get_Height( &depthHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->get_DepthMinReliableDistance( &depthMinReliableDistance );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->get_DepthMaxReliableDistance( &depthMaxReliableDistance );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = depthFrame->AccessUnderlyingBuffer( &depthBufferSize, &depthBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                depthChannel = Channel16u( depthWidth, depthHeight );
                memcpy( depthChannel.getData(), depthBuffer, depthWidth * depthHeight * sizeof( uint16_t ) );
            }
        }

        if ( mDeviceOptions.isInfraredEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrame->get_FrameDescription( &infraredFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrameDescription->get_Width( &infraredWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrameDescription->get_Height( &infraredHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredFrame->AccessUnderlyingBuffer( &infraredBufferSize, &infraredBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                infraredChannel = Channel16u( infraredWidth, infraredHeight );
                memcpy( infraredChannel.getData(), infraredBuffer,  infraredWidth * infraredHeight * sizeof( uint16_t ) );
            }
        }

        if ( mDeviceOptions.isInfraredLongExposureEnabled() ) {
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrame->get_FrameDescription( &infraredLongExposureFrameDescription );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrameDescription->get_Width( &infraredLongExposureWidth );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrameDescription->get_Height( &infraredLongExposureHeight );
            }
            if ( SUCCEEDED( hr ) ) {
                hr = infraredLongExposureFrame->AccessUnderlyingBuffer( &infraredLongExposureBufferSize, &infraredLongExposureBuffer );
            }
            if ( SUCCEEDED( hr ) ) {
                infraredLongExposureChannel = Channel16u( infraredLongExposureWidth, infraredLongExposureHeight );
                memcpy( infraredLongExposureChannel.getData(), infraredLongExposureBuffer, infraredLongExposureWidth * infraredLongExposureHeight * sizeof( uint16_t ) );
            }
        }

        if ( SUCCEEDED( hr ) ) {
            mFrame.mBodies						= bodies;
            mFrame.mChannelBodyIndex			= bodyIndexChannel;
            mFrame.mChannelDepth				= depthChannel;
            mFrame.mChannelInfrared				= infraredChannel;
            mFrame.mChannelInfraredLongExposure	= infraredLongExposureChannel;
            mFrame.mDeviceId					= mDeviceOptions.getDeviceId();
            mFrame.mSurfaceColor				= colorSurface;
            mFrame.mTimeStamp					= timeStamp;
            mFrame.mFloorClipPlane				= floorClipPlane;
        }

        if ( bodyIndexFrameDescription != 0 ) {
            bodyIndexFrameDescription->Release();
            bodyIndexFrameDescription = 0;
        }
        if ( colorFrameDescription != 0 ) {
            colorFrameDescription->Release();
            colorFrameDescription = 0;
        }
        if ( depthFrameDescription != 0 ) {
            depthFrameDescription->Release();
            depthFrameDescription = 0;
        }
        if ( infraredFrameDescription != 0 ) {
            infraredFrameDescription->Release();
            infraredFrameDescription = 0;
        }
        if ( infraredLongExposureFrameDescription != 0 ) {
            infraredLongExposureFrameDescription->Release();
            infraredLongExposureFrameDescription = 0;
        }
    }

    if ( audioFrame != 0 ) {
        audioFrame->Release();
        audioFrame = 0;
    }
    if ( bodyFrame != 0 ) {
        bodyFrame->Release();
        bodyFrame = 0;
    }
    if ( bodyIndexFrame != 0 ) {
        bodyIndexFrame->Release();
        bodyIndexFrame = 0;
    }
    if ( colorFrame != 0 ) {
        colorFrame->Release();
        colorFrame = 0;
    }
    if ( depthFrame != 0 ) {
        depthFrame->Release();
        depthFrame = 0;
    }
    if ( frame != 0 ) {
        frame->Release();
        frame = 0;
    }
    if ( infraredFrame != 0 ) {
        infraredFrame->Release();
        infraredFrame = 0;
    }
    if ( infraredLongExposureFrame != 0 ) {
        infraredLongExposureFrame->Release();
        infraredLongExposureFrame = 0;
    }
}
/// <summary>
/// Processes new face frames
/// </summary>
void CFaceBasics::ProcessFaces()
{
    HRESULT hr;
    IBody* ppBodies[BODY_COUNT] = {0};
    bool bHaveBodyData = SUCCEEDED( UpdateBodyData(ppBodies) );

	UINT32 vertexCount = 0;
	hr = GetFaceModelVertexCount(&vertexCount);

	UINT colorSpaceCount = vertexCount;
	ColorSpacePoint * pFaceColors = new ColorSpacePoint[colorSpaceCount];

    // iterate through each face reader
    for (int iFace = 0; iFace < BODY_COUNT; ++iFace)
    {
        // retrieve the latest face frame from this reader
        IFaceFrame* pFaceFrame = nullptr;
        hr = m_pFaceFrameReaders[iFace]->AcquireLatestFrame(&pFaceFrame);
		
        BOOLEAN bFaceTracked = false;
        if (SUCCEEDED(hr) && nullptr != pFaceFrame)
        {
            // check if a valid face is tracked in this face frame
            hr = pFaceFrame->get_IsTrackingIdValid(&bFaceTracked);
        }
		
        if (SUCCEEDED(hr))
        {
            if (bFaceTracked)
            {
                IFaceFrameResult* pFaceFrameResult = nullptr;
                RectI faceBox = {0};
                PointF facePoints[FacePointType::FacePointType_Count];
                Vector4 faceRotation;
                DetectionResult faceProperties[FaceProperty::FaceProperty_Count];
                D2D1_POINT_2F faceTextLayout;

                hr = pFaceFrame->get_FaceFrameResult(&pFaceFrameResult);

                // need to verify if pFaceFrameResult contains data before trying to access it
                if (SUCCEEDED(hr) && pFaceFrameResult != nullptr)
                {
                    hr = pFaceFrameResult->get_FaceBoundingBoxInColorSpace(&faceBox);

                    if (SUCCEEDED(hr))
                    {										
                        hr = pFaceFrameResult->GetFacePointsInColorSpace(FacePointType::FacePointType_Count, facePoints);
                    }

                    if (SUCCEEDED(hr))
                    {
                        hr = pFaceFrameResult->get_FaceRotationQuaternion(&faceRotation);
                    }

                    if (SUCCEEDED(hr))
                    {
                        hr = pFaceFrameResult->GetFaceProperties(FaceProperty::FaceProperty_Count, faceProperties);
                    }

                    if (SUCCEEDED(hr))
                    {
                        hr = GetFaceTextPositionInColorSpace(ppBodies[iFace], &faceTextLayout);
                    }

                    if (SUCCEEDED(hr))
                    {
						//let's see if we can get hd face frame here
						// retrieve the latest face frame from this reader
						IHighDefinitionFaceFrame* phdFaceFrame = nullptr;
						hr = m_phdFaceFrameReaders[iFace]->AcquireLatestFrame(&phdFaceFrame);
						if (SUCCEEDED(hr) && nullptr != phdFaceFrame)
						{
							//we have a hd face frame so get the vertices							
							hr = phdFaceFrame->GetAndRefreshFaceAlignmentResult(m_phdFaceAlignments[iFace]);

							IFaceModel * pFaceModel = nullptr;
							if (SUCCEEDED(hr))
							{
								//we have updated the faceAlignment results
								hr = phdFaceFrame->get_FaceModel(&pFaceModel);
								if (SUCCEEDED(hr) && nullptr != pFaceModel)
								{	
									 CameraSpacePoint * pFacePoints = new CameraSpacePoint[vertexCount];
									hr = pFaceModel->CalculateVerticesForAlignment(m_phdFaceAlignments[iFace], vertexCount, pFacePoints);

									const CameraSpacePoint * pConstFacePoints = pFacePoints;								//now convert cameraspace points to colorspacepoints
									hr = m_pCoordinateMapper->MapCameraPointsToColorSpace(vertexCount, pConstFacePoints, colorSpaceCount, pFaceColors);
									
									if (FAILED(hr))
									{
										pFaceColors = nullptr;
									}
									delete pFacePoints;
								}
								SafeRelease(pFaceModel);
							}

							SafeRelease(phdFaceFrame);
						}
						
						

						if (nullptr != pFaceColors)
						{
							
							m_pDrawDataStreams->DrawFaceFrameResults(iFace, &faceBox, facePoints, &faceRotation, faceProperties, &faceTextLayout, pFaceColors);
						}
						else
						{
							// draw face frame results
							m_pDrawDataStreams->DrawFaceFrameResults(iFace, &faceBox, facePoints, &faceRotation, faceProperties, &faceTextLayout);
						}
                    }							
                }

                SafeRelease(pFaceFrameResult);				

            }
            else 
            {	
                // face tracking is not valid - attempt to fix the issue
                // a valid body is required to perform this step
                if (bHaveBodyData)
                {
                    // check if the corresponding body is tracked 
                    // if this is true then update the face frame source to track this body
                    IBody* pBody = ppBodies[iFace];
                    if (pBody != nullptr)
                    {
                        BOOLEAN bTracked = false;
                        hr = pBody->get_IsTracked(&bTracked);

                        UINT64 bodyTId;
                        if (SUCCEEDED(hr) && bTracked)
                        {
                            // get the tracking ID of this body
                            hr = pBody->get_TrackingId(&bodyTId);
                            if (SUCCEEDED(hr))
                            {
                                // update the face frame source with the tracking ID
                                m_pFaceFrameSources[iFace]->put_TrackingId(bodyTId);
								m_phdFaceFrameSources[iFace]->put_TrackingId(bodyTId);
                            }
                        }
                    }
                }
            }
        }			

        SafeRelease(pFaceFrame);
    }

	delete pFaceColors;
    if (bHaveBodyData)
    {
        for (int i = 0; i < _countof(ppBodies); ++i)
        {
            SafeRelease(ppBodies[i]);
        }
    }
}
Exemple #7
0
//----------
void Body::update() {
    CHECK_OPEN

    IBodyFrame * frame = NULL;
    IFrameDescription * frameDescription = NULL;
    try {
        //acquire frame
        if (FAILED(this->reader->AcquireLatestFrame(&frame))) {
            return; // we often throw here when no new frame is available
        }
        INT64 nTime = 0;
        if (FAILED(frame->get_RelativeTime(&nTime))) {
            throw Exception("Failed to get relative time");
        }

        if (FAILED(frame->get_FloorClipPlane(&floorClipPlane))) {
            throw(Exception("Failed to get floor clip plane"));
        }

        IBody* ppBodies[BODY_COUNT] = {0};
        if (FAILED(frame->GetAndRefreshBodyData(_countof(ppBodies), ppBodies))) {
            throw Exception("Failed to refresh body data");
        }

        for (int i = 0; i < BODY_COUNT; ++i) {
            auto & body = bodies[i];
            body.clear();

            IBody* pBody = ppBodies[i];
            if (pBody)
            {
                BOOLEAN bTracked = false;
                if (FAILED(pBody->get_IsTracked(&bTracked))) {
                    throw Exception("Failed to get tracking status");
                }
                body.tracked = bTracked;
                body.bodyId = i;

                if (bTracked)
                {
                    // retrieve tracking id

                    UINT64 trackingId = -1;

                    if (FAILED(pBody->get_TrackingId(&trackingId))) {
                        throw Exception("Failed to get tracking id");
                    }

                    body.trackingId = trackingId;

                    // retrieve joint position & orientation

                    _Joint joints[JointType_Count];
                    _JointOrientation jointsOrient[JointType_Count];

                    if (FAILED(pBody->GetJoints(JointType_Count, joints))) {
                        throw Exception("Failed to get joints");
                    }
                    if (FAILED(pBody->GetJointOrientations(JointType_Count, jointsOrient))) {
                        throw Exception("Failed to get joints orientation");
                    }

                    for (int j = 0; j < JointType_Count; ++j) {
                        body.joints[joints[j].JointType] = Data::Joint(joints[j], jointsOrient[j]);
                    }

                    // retrieve hand states

                    HandState leftHandState = HandState_Unknown;
                    HandState rightHandState = HandState_Unknown;

                    if (FAILED(pBody->get_HandLeftState(&leftHandState))) {
                        throw Exception("Failed to get left hand state");
                    }
                    if (FAILED(pBody->get_HandRightState(&rightHandState))) {
                        throw Exception("Failed to get right hand state");
                    }

                    body.leftHandState = leftHandState;
                    body.rightHandState = rightHandState;
                }
            }
        }

        for (int i = 0; i < _countof(ppBodies); ++i)
        {
            SafeRelease(ppBodies[i]);
        }
    }
    catch (std::exception & e) {
        OFXKINECTFORWINDOWS2_ERROR << e.what();
    }
    SafeRelease(frameDescription);
    SafeRelease(frame);
}
Exemple #8
0
/// <summary>
/// Handle new body data
/// <param name="nTime">timestamp of frame</param>
/// <param name="nBodyCount">body data count</param>
/// <param name="ppBodies">body data in frame</param>
/// </summary>
void CColorBasics::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies)
{
	if (m_hWnd)
	{
		HRESULT hr = S_OK;

		D2D1_POINT_2F start;
		start.x = 1500.0;
		start.y = 800.0;

		D2D1_POINT_2F quit;
		quit.x = 300.0;
		quit.y = 800.0;

		//int width = 0;
		//int height = 0;
		if (SUCCEEDED(hr) && m_pCoordinateMapper)
		{
			// 先に実行しているProcessColor()にて行っているのでコメント
			//hr = m_pDrawColor->BeginDraw();

			DetectionResult nEngaged[6] = { DetectionResult_Unknown };
			PointF lean;

			//RECT rct;
			//GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct);
			//width = rct.right;
			//height = rct.bottom;

			UINT64 nTrackBody = 10;

			for (int i = 0; i < nBodyCount; ++i)
			{
				IBody* pBody = ppBodies[i];
				if (pBody)
				{
					// 手旗二人での対戦モードを想定してインデックスを取得する。
					// 本来はゲーム前に対戦の二人wフィックスしておくべきだろうが。
					// 
					// トラッキングされているボディかはちゃんと確かめること。
					BOOLEAN bTracked = false;
					hr = pBody->get_IsTracked(&bTracked);

					// Engaged()は使えるみたい。これは、視野に入ってきた人を認識するものだろう。
					hr = pBody->get_Engaged(&nEngaged[i]);
					pBody->get_Lean(&lean);
					// 以下はまだ使えないようだ
					//hr = pBody->GetAppearanceDetectionResults((UINT)i, &nEngaged[i]);

					if (SUCCEEDED(hr) && bTracked)
					{
						// トラッキングが無効な場合のインデックスは0が返るので使い方に注意!!
						UINT64 nBodyIndex = 0;
						hr = pBody->get_TrackingId(&nBodyIndex);

						Joint joints[JointType_Count];
						D2D1_POINT_2F jointPoints[JointType_Count];
						HandState leftHandState = HandState_Unknown;
						HandState rightHandState = HandState_Unknown;

						pBody->get_HandLeftState(&leftHandState);
						pBody->get_HandRightState(&rightHandState);

						hr = pBody->GetJoints(_countof(joints), joints);
						if (SUCCEEDED(hr))
						{
							// スクリーン座標に変換
							for (int j = 0; j < _countof(joints); ++j)
							{
								jointPoints[j] = BodyToScreen(joints[j].Position);
							}
							// ここに頭部に丸を描いて、ボディ番号を表示
							m_pDrawColor->DrawHead(jointPoints[JointType_Head], i, nEngaged[i], lean);

							// 手先がある領域にきたら実行
							// ボタンのような
							// 現状、複数人が認識されても実行するので、本来は最初に認識された一人のみにする必要がある。
							float xy[2] = { 0.0 };

							if (!m_bSemaphore)
							{
								if (m_pSemaphore[0])
								{
									delete m_pSemaphore[0];
									m_pSemaphore[0] = NULL;
								}
								if (m_pSemaphore[1])
								{
									delete m_pSemaphore[1];
									m_pSemaphore[1] = NULL;
								}
								m_nButton = 1;
								xy[0] = jointPoints[JointType_HandTipRight].x - start.x;
								xy[1] = jointPoints[JointType_HandTipRight].y - start.y;
								if (sqrt( xy[0]*xy[0] + xy[1]*xy[1] ) < 100.0 )
								{
									if (nTrackBody == 10 || nTrackBody == nBodyIndex)
									{
										m_nButton = 0;
										nTrackBody = nBodyIndex;
									}
								}
							}
							else
							{
								// 手旗スタート
								// 手旗判定
								if (m_pSemaphore[0] == NULL)
								{
									m_pSemaphore[0] = new Semaphore( &nBodyIndex );
								}
								else
								{
									if (m_pSemaphore[1] == NULL && !m_pSemaphore[0]->ItsMe(&nBodyIndex))
									{
										m_pSemaphore[1] = new Semaphore(&nBodyIndex);
									}
								}

								// カウント
								// 基本ポーズでのデータ取得
								// 手旗本番処理
								// 手旗の判定に画像と同等のフレームは必要はないのでは。
								// タイマーでBodyフレームを取得し、それで手旗判定を行う。
								if (m_pSemaphore[0])
								{
									m_pSemaphore[0]->SetSignalType(&nBodyIndex, jointPoints, m_pDrawColor);
								}
								if (m_pSemaphore[1])
								{
									m_pSemaphore[1]->SetSignalType(&nBodyIndex, jointPoints, m_pDrawColor);
								}
								//m_pSemaphore[0]->Practice(nTime, jointPoints, m_pDrawColor);

								// quitボタン処理
								m_nButton = 2;
								// 基本ポーズ用の表示
								xy[0] = jointPoints[JointType_HandTipLeft].x - quit.x;
								xy[1] = jointPoints[JointType_HandTipLeft].y - quit.y;
								if (sqrt( xy[0]*xy[0] + xy[1]*xy[1] ) < 100.0 )
								{
									if (nTrackBody == 10 || nTrackBody == nBodyIndex)
									{
										m_nButton = 0;
										nTrackBody = nBodyIndex;
									}
								}
							}
							m_pDrawColor->DrawBody(joints, jointPoints);
							//m_pDrawColor->DrawHand(leftHandState, jointPoints[JointType_HandLeft]);
							//m_pDrawColor->DrawHand(rightHandState, jointPoints[JointType_HandRight]);

							Detect(pBody);
							//break;
						}
					}
				}
			}
			if (!m_bSemaphore)
			{
				// このボタン処理でウィンドウにメッセージを送っている
				m_pDrawColor->DrawButton(start, m_nButton);
			}
			else
			{
				m_pDrawColor->DrawButton(quit, m_nButton);
			}
			// 二人対戦モードのお題表示
			if (Question(nTime))
			{
				m_pDrawColor->DrawButton(quit, 0);
			}

			m_pDrawColor->EndDraw();
		}
	}
}