示例#1
0
void Kin2::getHDFaces(bool withVertices, std::vector<k2::HDFaceData>& facesData)
{
    if (!(m_flags & k2::HD_FACE))
	{
        mexPrintf("ERROR: NO HD-FACE FUNCTIONALITY SELECTED!\n");
        return;
    }
        
	HRESULT hr;
	facesData.clear();

	// iterate through each HD face reader
	for (int iFace = 0; iFace < BODY_COUNT; ++iFace)
	{
		// retrieve the latest face frame from this reader
		IHighDefinitionFaceFrame *pHDFaceFrame = nullptr;
		
		hr = m_pHDFaceFrameReaders[iFace]->AcquireLatestFrame(&pHDFaceFrame);

		BOOLEAN bFaceTracked = false;
		if (SUCCEEDED(hr) && nullptr != pHDFaceFrame)
		{
			// check if a valid face is tracked in this face frame
			hr = pHDFaceFrame->get_IsTrackingIdValid(&bFaceTracked);
		}

		// If face tracked, save its data on the facesData structure array
		if (bFaceTracked)
		{		
            float animationUnits[FaceShapeAnimations_Count]={0};					
			UINT32 vertexCount;
            
            // Here we save the HD face data
			k2::HDFaceData faceData;
            
			hr = pHDFaceFrame->GetAndRefreshFaceAlignmentResult(m_pFaceAlignment[iFace]);

            if (SUCCEEDED(hr) && m_pFaceAlignment[iFace] != nullptr)
			{	
                // Get the Animation units
                hr = m_pFaceAlignment[iFace]->GetAnimationUnits(FaceShapeAnimations_Count, animationUnits);

                if (SUCCEEDED(hr))
                {
                    for (int vi = 0; vi < FaceShapeAnimations_Count; vi++)
                        faceData.animationUnits[vi] = animationUnits[vi];
                }

                // If HD face model vertices are requested
				if (withVertices)
				{
                    hr = GetFaceModelVertexCount(&vertexCount);
                    //mexPrintf("Number of Vertices: %d", vertexCount);

					// If there is no model ready, issue a warning message (just once)
					if (!m_faceModelReady[iFace] && !m_faceModelWarning[iFace])
					{
						mexPrintf("WARNING: No personal model has been created. An average face model will be used\n");
						m_faceModelWarning[iFace] = true;
					}
                    
                    CameraSpacePoint *vertices = new CameraSpacePoint[vertexCount];

					// Get the vertices (HD points)
					if (SUCCEEDED(hr))
						hr = m_pFaceModel[iFace]->CalculateVerticesForAlignment(m_pFaceAlignment[iFace], vertexCount, vertices);

					if (SUCCEEDED(hr))
                    {
						faceData.faceModel.resize(vertexCount);

						for (int vi = 0; vi < vertexCount; vi++)
							faceData.faceModel[vi] = vertices[vi];
					}

					if (vertices)
					{
						delete[] vertices;
						vertices = NULL;
					}
                } // if withVertices	
				
                // Get the facebox
                if (SUCCEEDED(hr))
                    hr = m_pFaceAlignment[iFace]->get_FaceBoundingBox(&faceData.faceBox);

                // Get the face rotation
                if (SUCCEEDED(hr))
                    hr = m_pFaceAlignment[iFace]->get_FaceOrientation(&faceData.faceRotation);

                // Get the head pivot
                if (SUCCEEDED(hr))
                {
                    hr = m_pFaceAlignment[iFace]->get_HeadPivotPoint(&faceData.headPivot);
                }

                // Save the HD face data in the member variable m_HDfacesData
                facesData.push_back(faceData);			
            }  // if face alignment	
        } // If face tracked
		else
		{
			// face tracking is not valid - attempt to fix the issue
			// a valid body is required to perform this step
			if (m_bHaveBodyData)
			{
				// check if the corresponding body is tracked 
				// if this is true then update the face frame source to track this body
				IBody* pBody = m_ppBodies[iFace];
				if (pBody != nullptr)
				{
					BOOLEAN bTracked = false;
					hr = pBody->get_IsTracked(&bTracked);

					UINT64 bodyTId;
					if (SUCCEEDED(hr) && bTracked)
					{
						// get the tracking ID of this body
						hr = pBody->get_TrackingId(&bodyTId);
						if (SUCCEEDED(hr))
						{
							// update the face frame source with the tracking ID
							m_pHDFaceFrameSources[iFace]->put_TrackingId(bodyTId);
						}
					}
				} // if (pBody != nullptr)
			} // if (m_bHaveBodyData)
		} // if face tracked

		SafeRelease(pHDFaceFrame);		
	} // for each face reader
} // end getHDFaces function
HRESULT KinectHDFaceGrabber::initHDFaceReader()
{
	IBodyFrameSource* pBodyFrameSource = nullptr;
	UINT32 vertices = 0;
	HRESULT hr = GetFaceModelVertexCount(&vertices);

	if (SUCCEEDED(hr)){
		hr = m_pKinectSensor->get_BodyFrameSource(&pBodyFrameSource);
	}
	std::vector<std::vector<float>> deformations(BODY_COUNT, std::vector<float>(FaceShapeDeformations::FaceShapeDeformations_Count));

	if (SUCCEEDED(hr)){
		// create a face frame source + reader to track each body in the fov
		for (int i = 0; i < BODY_COUNT; i++){
			if (SUCCEEDED(hr)){
				// create the face frame source by specifying the required face frame features
				hr = CreateFaceFrameSource(m_pKinectSensor, 0, c_FaceFrameFeatures, &m_pFaceFrameSources[i]);
			}

			if (SUCCEEDED(hr)){
				// open the corresponding reader
				hr = m_pFaceFrameSources[i]->OpenReader(&m_pFaceFrameReaders[i]);
			}
			std::vector<std::vector<float>> deformations(BODY_COUNT, std::vector<float>(FaceShapeDeformations::FaceShapeDeformations_Count));

			if (SUCCEEDED(hr)){
				hr = CreateHighDefinitionFaceFrameSource(m_pKinectSensor, &m_pHDFaceSource[i]);
				m_pHDFaceSource[i]->put_TrackingQuality(FaceAlignmentQuality_High);
			}

			if (SUCCEEDED(hr)){
				hr = m_pHDFaceSource[i]->OpenReader(&m_pHDFaceReader[i]);
			}

			if (SUCCEEDED(hr)){
				hr = m_pHDFaceSource[i]->OpenModelBuilder(FaceModelBuilderAttributes::FaceModelBuilderAttributes_None, &m_pFaceModelBuilder[i]);
			}

			if (SUCCEEDED(hr)){
				hr = m_pFaceModelBuilder[i]->BeginFaceDataCollection();
			}

			if (SUCCEEDED(hr)){
				hr = CreateFaceAlignment(&m_pFaceAlignment[i]);
			}
			if (SUCCEEDED(hr)){
				m_HDFaceDetectedPointsCamSpace[i].resize(vertices);
				m_HDFaceDetectedPointsColorSpace[i].resize(vertices);
			}
			// Create Face Model
			hr = CreateFaceModel(1.0f, FaceShapeDeformations::FaceShapeDeformations_Count, deformations[i].data(), &m_pFaceModel[i]);
			if (FAILED(hr)){
				std::cerr << "Error : CreateFaceModel()" << std::endl;
				return hr;
			}
		}
		
		if (SUCCEEDED(hr)){
			hr = pBodyFrameSource->OpenReader(&m_pBodyFrameReader);
		}
		SafeRelease(pBodyFrameSource);
	}

	return hr;
}
void KinectHDFaceGrabber::processFaces()
{
    //update the tracked bodys
	HRESULT hr;
    IBody* ppBodies[BODY_COUNT] = {0};
    bool bHaveBodyData = SUCCEEDED( updateBodyData(ppBodies) );
	if (!bHaveBodyData)
		return;

	//indicate the start of data providing
	m_outputStreamUpdater->startFaceCollection(m_colorBuffer.data(), m_depthBuffer.data());
	bool updatedOneFace = false;
	UINT32 vertex = 0;
	hr = GetFaceModelVertexCount(&vertex); // 1347

    // iterate through each face reader
    for (int iFace = 0; iFace < BODY_COUNT; ++iFace)
    {
		//asociate the faces with the bodies
		updateHDFaceTrackingID(m_pHDFaceSource[iFace], ppBodies[iFace]);

		IHighDefinitionFaceFrame* pHDFaceFrame = nullptr;
		hr = m_pHDFaceReader[iFace]->AcquireLatestFrame(&pHDFaceFrame);
		
		BOOLEAN bFaceTracked = false;
		if (SUCCEEDED(hr) && pHDFaceFrame != nullptr){
			hr = pHDFaceFrame->get_IsFaceTracked(&bFaceTracked);
		}
		//update face aligment
		if (SUCCEEDED(hr) && bFaceTracked){
			hr = pHDFaceFrame->GetAndRefreshFaceAlignmentResult(m_pFaceAlignment[iFace]);
		}
		
		if (FAILED(hr) || m_pFaceAlignment[iFace] == nullptr){
			continue;
		}
		//have we finished building our model?
		if (m_pFaceModelBuilder[iFace] != nullptr){
			updateFaceModelStatusOfFaceModelBuilder(&m_pFaceModelBuilder[iFace], m_pFaceModel[iFace]);
		}
		
		if (m_outputStreamUpdater){
			hr = m_outputStreamUpdater->updateOutputStreams(m_pFaceModel[iFace], m_pFaceAlignment[iFace], 
				std::min(m_HDFaceDetectedPointsCamSpace[iFace].size(), m_HDFaceDetectedPointsColorSpace[iFace].size()), 
				m_HDFaceDetectedPointsCamSpace[iFace].data(), m_HDFaceDetectedPointsColorSpace[iFace].data());
			updatedOneFace = true;
		}
			
		if (SUCCEEDED(hr)){
			m_pDrawDataStreams->drawPoints(m_HDFaceDetectedPointsColorSpace[iFace]);
		}		
    }
	if (updatedOneFace){
		m_outputStreamUpdater->stopFaceCollection();
	}
    if (bHaveBodyData)
    {
        for (int i = 0; i < _countof(ppBodies); ++i)
        {
            SafeRelease(ppBodies[i]);
        }
    }
}
示例#4
0
/// <summary>
/// Processes new face frames
/// </summary>
void CFaceBasics::ProcessFaces()
{
    HRESULT hr;
    IBody* ppBodies[BODY_COUNT] = {0};
    bool bHaveBodyData = SUCCEEDED( UpdateBodyData(ppBodies) );

	UINT32 vertexCount = 0;
	hr = GetFaceModelVertexCount(&vertexCount);

	UINT colorSpaceCount = vertexCount;
	ColorSpacePoint * pFaceColors = new ColorSpacePoint[colorSpaceCount];

    // iterate through each face reader
    for (int iFace = 0; iFace < BODY_COUNT; ++iFace)
    {
        // retrieve the latest face frame from this reader
        IFaceFrame* pFaceFrame = nullptr;
        hr = m_pFaceFrameReaders[iFace]->AcquireLatestFrame(&pFaceFrame);
		
        BOOLEAN bFaceTracked = false;
        if (SUCCEEDED(hr) && nullptr != pFaceFrame)
        {
            // check if a valid face is tracked in this face frame
            hr = pFaceFrame->get_IsTrackingIdValid(&bFaceTracked);
        }
		
        if (SUCCEEDED(hr))
        {
            if (bFaceTracked)
            {
                IFaceFrameResult* pFaceFrameResult = nullptr;
                RectI faceBox = {0};
                PointF facePoints[FacePointType::FacePointType_Count];
                Vector4 faceRotation;
                DetectionResult faceProperties[FaceProperty::FaceProperty_Count];
                D2D1_POINT_2F faceTextLayout;

                hr = pFaceFrame->get_FaceFrameResult(&pFaceFrameResult);

                // need to verify if pFaceFrameResult contains data before trying to access it
                if (SUCCEEDED(hr) && pFaceFrameResult != nullptr)
                {
                    hr = pFaceFrameResult->get_FaceBoundingBoxInColorSpace(&faceBox);

                    if (SUCCEEDED(hr))
                    {										
                        hr = pFaceFrameResult->GetFacePointsInColorSpace(FacePointType::FacePointType_Count, facePoints);
                    }

                    if (SUCCEEDED(hr))
                    {
                        hr = pFaceFrameResult->get_FaceRotationQuaternion(&faceRotation);
                    }

                    if (SUCCEEDED(hr))
                    {
                        hr = pFaceFrameResult->GetFaceProperties(FaceProperty::FaceProperty_Count, faceProperties);
                    }

                    if (SUCCEEDED(hr))
                    {
                        hr = GetFaceTextPositionInColorSpace(ppBodies[iFace], &faceTextLayout);
                    }

                    if (SUCCEEDED(hr))
                    {
						//let's see if we can get hd face frame here
						// retrieve the latest face frame from this reader
						IHighDefinitionFaceFrame* phdFaceFrame = nullptr;
						hr = m_phdFaceFrameReaders[iFace]->AcquireLatestFrame(&phdFaceFrame);
						if (SUCCEEDED(hr) && nullptr != phdFaceFrame)
						{
							//we have a hd face frame so get the vertices							
							hr = phdFaceFrame->GetAndRefreshFaceAlignmentResult(m_phdFaceAlignments[iFace]);

							IFaceModel * pFaceModel = nullptr;
							if (SUCCEEDED(hr))
							{
								//we have updated the faceAlignment results
								hr = phdFaceFrame->get_FaceModel(&pFaceModel);
								if (SUCCEEDED(hr) && nullptr != pFaceModel)
								{	
									 CameraSpacePoint * pFacePoints = new CameraSpacePoint[vertexCount];
									hr = pFaceModel->CalculateVerticesForAlignment(m_phdFaceAlignments[iFace], vertexCount, pFacePoints);

									const CameraSpacePoint * pConstFacePoints = pFacePoints;								//now convert cameraspace points to colorspacepoints
									hr = m_pCoordinateMapper->MapCameraPointsToColorSpace(vertexCount, pConstFacePoints, colorSpaceCount, pFaceColors);
									
									if (FAILED(hr))
									{
										pFaceColors = nullptr;
									}
									delete pFacePoints;
								}
								SafeRelease(pFaceModel);
							}

							SafeRelease(phdFaceFrame);
						}
						
						

						if (nullptr != pFaceColors)
						{
							
							m_pDrawDataStreams->DrawFaceFrameResults(iFace, &faceBox, facePoints, &faceRotation, faceProperties, &faceTextLayout, pFaceColors);
						}
						else
						{
							// draw face frame results
							m_pDrawDataStreams->DrawFaceFrameResults(iFace, &faceBox, facePoints, &faceRotation, faceProperties, &faceTextLayout);
						}
                    }							
                }

                SafeRelease(pFaceFrameResult);				

            }
            else 
            {	
                // face tracking is not valid - attempt to fix the issue
                // a valid body is required to perform this step
                if (bHaveBodyData)
                {
                    // check if the corresponding body is tracked 
                    // if this is true then update the face frame source to track this body
                    IBody* pBody = ppBodies[iFace];
                    if (pBody != nullptr)
                    {
                        BOOLEAN bTracked = false;
                        hr = pBody->get_IsTracked(&bTracked);

                        UINT64 bodyTId;
                        if (SUCCEEDED(hr) && bTracked)
                        {
                            // get the tracking ID of this body
                            hr = pBody->get_TrackingId(&bodyTId);
                            if (SUCCEEDED(hr))
                            {
                                // update the face frame source with the tracking ID
                                m_pFaceFrameSources[iFace]->put_TrackingId(bodyTId);
								m_phdFaceFrameSources[iFace]->put_TrackingId(bodyTId);
                            }
                        }
                    }
                }
            }
        }			

        SafeRelease(pFaceFrame);
    }

	delete pFaceColors;
    if (bHaveBodyData)
    {
        for (int i = 0; i < _countof(ppBodies); ++i)
        {
            SafeRelease(ppBodies[i]);
        }
    }
}