bool SkeletonTrackerHelper::SubmitFaceTrackingResult(IFTResult* pResult)
{
    if (pResult != NULL && SUCCEEDED(pResult->GetStatus()))
    {
        FLOAT* pSU = NULL;
        UINT numSU;
        BOOL suConverged;
        m_pFaceTracker->GetShapeUnits(NULL, &pSU, &numSU, &suConverged);
        POINT viewOffset = {0, 0};
        FT_CAMERA_CONFIG cameraConfig;
        if (m_KinectSensorPresent)
        {
            m_KinectSensor.GetVideoConfiguration(&cameraConfig);
        }
        else
        {
            cameraConfig.Width = 640;
            cameraConfig.Height = 480;
            cameraConfig.FocalLength = 500.0f;
        }
        IFTModel* ftModel;
        HRESULT hr = m_pFaceTracker->GetFaceModel(&ftModel);
        if (SUCCEEDED(hr))
        {
			UINT vertexCount = ftModel->GetVertexCount();
			FT_VECTOR3D* points3D = reinterpret_cast<FT_VECTOR3D*> (_malloca(sizeof(FT_VECTOR3D) * vertexCount));

			FLOAT* pAU = NULL;
			UINT numAU;
			pResult->GetAUCoefficients(&pAU, &numAU);
			//pApp->m_eggavatar.SetCandideAU(pAU, numAU);
			FLOAT scale;
			FLOAT rotationXYZ[3];
			FLOAT translationXYZ[3];
			pResult->Get3DPose(&scale, rotationXYZ, translationXYZ);

			hr = ftModel->Get3DShape(pSU,ftModel->GetSUCount(), pAU, numAU, 1.0f, rotationXYZ, translationXYZ, points3D, vertexCount);

			m_leftEyePosition.x = (points3D[LEFT_UP_EYE_INDEX].x + points3D[LEFT_DOWN_EYE_INDEX].x) * 0.5;
			m_leftEyePosition.y = (points3D[LEFT_UP_EYE_INDEX].y + points3D[LEFT_DOWN_EYE_INDEX].y) * 0.5;
			m_leftEyePosition.z = (points3D[LEFT_UP_EYE_INDEX].z + points3D[LEFT_DOWN_EYE_INDEX].z) * 0.5;

			m_rightEyePosition.x = (points3D[RIGHT_UP_EYE_INDEX].x + points3D[RIGHT_DOWN_EYE_INDEX].x) * 0.5;
			m_rightEyePosition.y = (points3D[RIGHT_UP_EYE_INDEX].y + points3D[RIGHT_DOWN_EYE_INDEX].y) * 0.5;
			m_rightEyePosition.z = (points3D[RIGHT_UP_EYE_INDEX].z + points3D[RIGHT_DOWN_EYE_INDEX].z) * 0.5;

			ftModel->Release();
			_freea(points3D);
        }

        if (m_CallBack && SUCCEEDED(hr))
        {
            (*m_CallBack)(m_CallBackParam);
        }
    }
    return true;
}
Exemplo n.º 2
0
void FubiKinectSDKSensor::update()
{
	HRESULT hrFT = S_OK;
	// Get new stream data
	if (m_videoBuffer && m_depthBuffer && m_imageDataNew)
	{
		m_imageDataNew = false;
		FT_SENSOR_DATA sensorData(m_videoBuffer, m_depthBuffer, m_zoomFactor, &m_viewOffset);
        selectUsersToTrack(KINECT_SDK_MAX_NUM_FACES_TRACKED, m_userContext);
        for (UINT i=0; i<KINECT_SDK_MAX_NUM_FACES_TRACKED; i++)
        {
			if(m_userContext[i].m_CountUntilFailure > 0) // user has been selected
			{
				m_headTracked[m_userContext[i].m_SkeletonId] = false;
				m_faceTracked[m_userContext[i].m_SkeletonId] = false;
				m_face2DTracked[m_userContext[i].m_SkeletonId] = false;

			    FT_VECTOR3D hint[2];
				hint[0] =  m_skelPos[m_userContext[i].m_SkeletonId][NUI_SKELETON_POSITION_SHOULDER_CENTER];
				hint[1] =  m_skelPos[m_userContext[i].m_SkeletonId][NUI_SKELETON_POSITION_HEAD];

				if (m_userContext[i].m_LastTrackSucceeded)
				{
					hrFT = m_userContext[i].m_pFaceTracker->ContinueTracking(&sensorData, hint, m_userContext[i].m_pFTResult);
				}
				else
				{
					hrFT = m_userContext[i].m_pFaceTracker->StartTracking(&sensorData, NULL, hint, m_userContext[i].m_pFTResult);
				}
				m_userContext[i].m_LastTrackSucceeded = SUCCEEDED(hrFT) && SUCCEEDED(m_userContext[i].m_pFTResult->GetStatus());
				if (m_userContext[i].m_LastTrackSucceeded)
				{
					// Store head orientation
					static FLOAT scale;
					hrFT = m_userContext[i].m_pFTResult->Get3DPose(&scale, m_headOrient[m_userContext[i].m_SkeletonId], m_headPos[m_userContext[i].m_SkeletonId]);
					if (SUCCEEDED(hrFT))
					{
						m_headTracked[m_userContext[i].m_SkeletonId]  = true;

						IFTModel* ftModel;
						HRESULT hr = m_userContext[i].m_pFaceTracker->GetFaceModel(&ftModel);
						if (SUCCEEDED(hr))
						{
							FLOAT* pAUCOeffs;
							UINT pAUCOunt;
							m_userContext[i].m_pFTResult->GetAUCoefficients(&pAUCOeffs, &pAUCOunt);
					
							FLOAT* pSU = NULL;
							UINT numSU;
							BOOL suConverged;
							m_userContext[i].m_pFaceTracker->GetShapeUnits(NULL, &pSU, &numSU, &suConverged);

							FT_VECTOR2D* pPts2D;
							UINT pts2DCount;
							m_userContext[i].m_pFTResult->Get2DShapePoints(&pPts2D, &pts2DCount);
							if (pts2DCount <= 121)
							{
								m_face2DTracked[m_userContext[i].m_SkeletonId] = true;
								for(UINT j = 0; j < pts2DCount; j++)
								{
									m_face2DPos[m_userContext[i].m_SkeletonId][j] = pPts2D[j];
								}
							}
							else
							{
								static double lastWarning = -99;
								if (Fubi::currentTime() - lastWarning > 10)
								{
									Fubi_logErr("Error in face tracking - face point count does not match!\n");
									lastWarning = Fubi::currentTime();
								}
							}

			
							UINT vertexCount = ftModel->GetVertexCount();
							if (vertexCount <= 121)
							{
								if (SUCCEEDED(ftModel->Get3DShape(pSU, numSU, pAUCOeffs, pAUCOunt, scale, m_headOrient[m_userContext[i].m_SkeletonId], m_headPos[m_userContext[i].m_SkeletonId], m_facePos[m_userContext[i].m_SkeletonId], vertexCount)))
								{
									m_faceTracked[m_userContext[i].m_SkeletonId] = true;
									FT_TRIANGLE* pTriangles;
									UINT triangleCount;
									if (SUCCEEDED(ftModel->GetTriangles(&pTriangles, &triangleCount)))
									{
										for (UINT j = 0; j < triangleCount; ++j)
										{
											m_faceTriangleIndices[m_userContext[i].m_SkeletonId][j] = pTriangles[j];
										}
									}
								}
							}
							else
							{
								static double lastWarning = -99;
								if (Fubi::currentTime() - lastWarning > 10)
								{
									Fubi_logErr("Error in face tracking - vertex count does not match!\n");
									lastWarning = Fubi::currentTime();
								}
							}
						}
					}
				}
				else
				{
					m_userContext[i].m_pFTResult->Reset();
				}
			}
        }
	}	
}