bool SkeletonTrackerHelper::SubmitFaceTrackingResult(IFTResult* pResult)
{
    if (pResult != NULL && SUCCEEDED(pResult->GetStatus()))
    {
        FLOAT* pSU = NULL;
        UINT numSU;
        BOOL suConverged;
        m_pFaceTracker->GetShapeUnits(NULL, &pSU, &numSU, &suConverged);
        POINT viewOffset = {0, 0};
        FT_CAMERA_CONFIG cameraConfig;
        if (m_KinectSensorPresent)
        {
            m_KinectSensor.GetVideoConfiguration(&cameraConfig);
        }
        else
        {
            cameraConfig.Width = 640;
            cameraConfig.Height = 480;
            cameraConfig.FocalLength = 500.0f;
        }
        IFTModel* ftModel;
        HRESULT hr = m_pFaceTracker->GetFaceModel(&ftModel);
        if (SUCCEEDED(hr))
        {
			UINT vertexCount = ftModel->GetVertexCount();
			FT_VECTOR3D* points3D = reinterpret_cast<FT_VECTOR3D*> (_malloca(sizeof(FT_VECTOR3D) * vertexCount));

			FLOAT* pAU = NULL;
			UINT numAU;
			pResult->GetAUCoefficients(&pAU, &numAU);
			//pApp->m_eggavatar.SetCandideAU(pAU, numAU);
			FLOAT scale;
			FLOAT rotationXYZ[3];
			FLOAT translationXYZ[3];
			pResult->Get3DPose(&scale, rotationXYZ, translationXYZ);

			hr = ftModel->Get3DShape(pSU,ftModel->GetSUCount(), pAU, numAU, 1.0f, rotationXYZ, translationXYZ, points3D, vertexCount);

			m_leftEyePosition.x = (points3D[LEFT_UP_EYE_INDEX].x + points3D[LEFT_DOWN_EYE_INDEX].x) * 0.5;
			m_leftEyePosition.y = (points3D[LEFT_UP_EYE_INDEX].y + points3D[LEFT_DOWN_EYE_INDEX].y) * 0.5;
			m_leftEyePosition.z = (points3D[LEFT_UP_EYE_INDEX].z + points3D[LEFT_DOWN_EYE_INDEX].z) * 0.5;

			m_rightEyePosition.x = (points3D[RIGHT_UP_EYE_INDEX].x + points3D[RIGHT_DOWN_EYE_INDEX].x) * 0.5;
			m_rightEyePosition.y = (points3D[RIGHT_UP_EYE_INDEX].y + points3D[RIGHT_DOWN_EYE_INDEX].y) * 0.5;
			m_rightEyePosition.z = (points3D[RIGHT_UP_EYE_INDEX].z + points3D[RIGHT_DOWN_EYE_INDEX].z) * 0.5;

			ftModel->Release();
			_freea(points3D);
        }

        if (m_CallBack && SUCCEEDED(hr))
        {
            (*m_CallBack)(m_CallBackParam);
        }
    }
    return true;
}
Exemplo n.º 2
0
BOOL FTHelper2::SubmitFraceTrackingResult(IFTResult* pResult, UINT userId)
{
    if (pResult != NULL && SUCCEEDED(pResult->GetStatus()))
    {
        if (m_CallBack)
        {
            (*m_CallBack)(m_CallBackParam, userId);
        }

        if (m_DrawMask)
        {
            FLOAT* pSU = NULL;
            UINT numSU;
            BOOL suConverged;
            m_UserContext[userId].m_pFaceTracker->GetShapeUnits(NULL, &pSU, &numSU, &suConverged);
            POINT viewOffset = {0, 0};
            FT_CAMERA_CONFIG cameraConfig;
            if (m_KinectSensorPresent)
            {
                m_KinectSensor.GetVideoConfiguration(&cameraConfig);
            }
            else
            {
                cameraConfig.Width = 640;
                cameraConfig.Height = 480;
                cameraConfig.FocalLength = 500.0f;
            }
            IFTModel* ftModel;
            HRESULT hr = m_UserContext[userId].m_pFaceTracker->GetFaceModel(&ftModel);
            if (SUCCEEDED(hr))
            {
                DWORD color = s_ColorCode[userId%6];
                hr = VisualizeFaceModel(m_colorImage, ftModel, &cameraConfig, pSU, 1.0, viewOffset, pResult, color);
                ftModel->Release();
            }
        }
    }
    return TRUE;
}
bool UKinect::pollFaceUser(int user) {
  if (user != 0 && user != 1) return false;

  FT_SENSOR_DATA sensorData;
  sensorData.pVideoFrame = colorBuffer;
  sensorData.pDepthFrame = depthBuffer;
  sensorData.ZoomFactor = 1.0f;       // Not used must be 1.0
  sensorData.ViewOffset.x = 0; // Not used must be (0,0)
  sensorData.ViewOffset.y = 0; // Not used must be (0,0)

  IFTFaceTracker* _pFaceTracker = pFaceTracker[user];	//				// An instance of a face tracker
  IFTResult*  _pFTResult = pFTResult[user];							// Face tracking result interface
  IFTResult*  _pFTResult_copy = pFTResult_copy[user];						// Copy of Face tracking result interface
  bool _isFaceTracked = isFaceTracked[user];
  //                        red          yellow
  int color = (user == 0) ? 0x00FF0000 : 0x00FFFF00;

  int trackedID = skeletonTrackedIDs.as< vector<int>>()[user];

  FT_VECTOR3D headHint[2];
  if (trackedID != 0) {
    vector<double> shoulders = skeletonJointPosition(trackedID, NUI_SKELETON_POSITION_SHOULDER_CENTER);
    if (shoulders.size() == 0) return false;
    headHint[0] = FT_VECTOR3D(shoulders[0], shoulders[1], shoulders[2]);

    vector<double> head = skeletonJointPosition(trackedID, NUI_SKELETON_POSITION_HEAD);
    if (head.size() == 0) return false;
    headHint[1] = FT_VECTOR3D(head[0], head[1], head[2]);
  } else return false;


  // Check if we are already tracking a face
  if (!_isFaceTracked) {
    // Initiate face tracking.
    // This call is more expensive and searches over the input RGB frame for a face.
    // However if hint != null id limits only to head region
    hr = _pFaceTracker->StartTracking(&sensorData, NULL, headHint, _pFTResult);
  } else {
    // Continue tracking. It uses a previously known face position.
    // This call is less expensive than StartTracking()
    hr = _pFaceTracker->ContinueTracking(&sensorData, headHint, _pFTResult);
  }

  // exit on fail
  if (FAILED(hr) || FAILED(_pFTResult->GetStatus())) {
    _pFTResult->Reset();
    return false;
  }

  _pFTResult->CopyTo(_pFTResult_copy);

  if (faceVisualization) {
    FLOAT* pSU = NULL;
    UINT numSU;
    BOOL suConverged;
    hr = _pFaceTracker->GetShapeUnits(NULL, &pSU, &numSU, &suConverged);
    if (FAILED(hr)) {
      cerr << "[UKinect] ERROR: Can not get SU units." << endl;
      throw;
    }

    POINT viewOffset = { 0, 0 };
    FT_CAMERA_CONFIG colorConfig;
    getColorConfiguration(&colorConfig);

    IFTModel* ftModel;
    HRESULT hr = _pFaceTracker->GetFaceModel(&ftModel);
    if (FAILED(hr)) {
      cerr << "[UKinect] ERROR: Can not get Face Model." << endl;
      throw;
    }

    Mat tmp;
    if (!faceVisualizationOnColor.as<int>()) {
      tmp = Mat(Size(static_cast<int>(colorBuffer->GetWidth()), static_cast<int>(colorBuffer->GetHeight())), CV_8UC4, CV_RGB(0, 0, 0));
      memcpy(colorBuffer->GetBuffer(), tmp.data, min(colorBuffer->GetBufferSize(), UINT(colorBuffer->GetBufferSize())));
    }
    if (faceVisualizationMode.as<int>())
      hr = VisualizeFacetracker(colorBuffer, _pFTResult, color);
    else
      hr = VisualizeFaceModel(colorBuffer, ftModel, &colorConfig, pSU, 1.0, viewOffset, _pFTResult, color);

    if (FAILED(hr)) {
      cerr << "[UKinect] ERROR: Cannot visualize Face Model." << endl;
      throw;
    }
    tmp = Mat(Size(static_cast<int>(colorBuffer->GetWidth()), static_cast<int>(colorBuffer->GetHeight())), CV_8UC4, colorBuffer->GetBuffer());
    cvtColor(tmp, faceCVMat, CV_BGRA2RGB); // <- alloc new memory
    // Save CV image to UImage
    faceBin.image.data = faceCVMat.data;
    faceImage = faceBin;

    ftModel->Release();
  }

  return true;

}
Exemplo n.º 4
0
void FubiKinectSDKSensor::update()
{
	HRESULT hrFT = S_OK;
	// Get new stream data
	if (m_videoBuffer && m_depthBuffer && m_imageDataNew)
	{
		m_imageDataNew = false;
		FT_SENSOR_DATA sensorData(m_videoBuffer, m_depthBuffer, m_zoomFactor, &m_viewOffset);
        selectUsersToTrack(KINECT_SDK_MAX_NUM_FACES_TRACKED, m_userContext);
        for (UINT i=0; i<KINECT_SDK_MAX_NUM_FACES_TRACKED; i++)
        {
			if(m_userContext[i].m_CountUntilFailure > 0) // user has been selected
			{
				m_headTracked[m_userContext[i].m_SkeletonId] = false;
				m_faceTracked[m_userContext[i].m_SkeletonId] = false;
				m_face2DTracked[m_userContext[i].m_SkeletonId] = false;

			    FT_VECTOR3D hint[2];
				hint[0] =  m_skelPos[m_userContext[i].m_SkeletonId][NUI_SKELETON_POSITION_SHOULDER_CENTER];
				hint[1] =  m_skelPos[m_userContext[i].m_SkeletonId][NUI_SKELETON_POSITION_HEAD];

				if (m_userContext[i].m_LastTrackSucceeded)
				{
					hrFT = m_userContext[i].m_pFaceTracker->ContinueTracking(&sensorData, hint, m_userContext[i].m_pFTResult);
				}
				else
				{
					hrFT = m_userContext[i].m_pFaceTracker->StartTracking(&sensorData, NULL, hint, m_userContext[i].m_pFTResult);
				}
				m_userContext[i].m_LastTrackSucceeded = SUCCEEDED(hrFT) && SUCCEEDED(m_userContext[i].m_pFTResult->GetStatus());
				if (m_userContext[i].m_LastTrackSucceeded)
				{
					// Store head orientation
					static FLOAT scale;
					hrFT = m_userContext[i].m_pFTResult->Get3DPose(&scale, m_headOrient[m_userContext[i].m_SkeletonId], m_headPos[m_userContext[i].m_SkeletonId]);
					if (SUCCEEDED(hrFT))
					{
						m_headTracked[m_userContext[i].m_SkeletonId]  = true;

						IFTModel* ftModel;
						HRESULT hr = m_userContext[i].m_pFaceTracker->GetFaceModel(&ftModel);
						if (SUCCEEDED(hr))
						{
							FLOAT* pAUCOeffs;
							UINT pAUCOunt;
							m_userContext[i].m_pFTResult->GetAUCoefficients(&pAUCOeffs, &pAUCOunt);
					
							FLOAT* pSU = NULL;
							UINT numSU;
							BOOL suConverged;
							m_userContext[i].m_pFaceTracker->GetShapeUnits(NULL, &pSU, &numSU, &suConverged);

							FT_VECTOR2D* pPts2D;
							UINT pts2DCount;
							m_userContext[i].m_pFTResult->Get2DShapePoints(&pPts2D, &pts2DCount);
							if (pts2DCount <= 121)
							{
								m_face2DTracked[m_userContext[i].m_SkeletonId] = true;
								for(UINT j = 0; j < pts2DCount; j++)
								{
									m_face2DPos[m_userContext[i].m_SkeletonId][j] = pPts2D[j];
								}
							}
							else
							{
								static double lastWarning = -99;
								if (Fubi::currentTime() - lastWarning > 10)
								{
									Fubi_logErr("Error in face tracking - face point count does not match!\n");
									lastWarning = Fubi::currentTime();
								}
							}

			
							UINT vertexCount = ftModel->GetVertexCount();
							if (vertexCount <= 121)
							{
								if (SUCCEEDED(ftModel->Get3DShape(pSU, numSU, pAUCOeffs, pAUCOunt, scale, m_headOrient[m_userContext[i].m_SkeletonId], m_headPos[m_userContext[i].m_SkeletonId], m_facePos[m_userContext[i].m_SkeletonId], vertexCount)))
								{
									m_faceTracked[m_userContext[i].m_SkeletonId] = true;
									FT_TRIANGLE* pTriangles;
									UINT triangleCount;
									if (SUCCEEDED(ftModel->GetTriangles(&pTriangles, &triangleCount)))
									{
										for (UINT j = 0; j < triangleCount; ++j)
										{
											m_faceTriangleIndices[m_userContext[i].m_SkeletonId][j] = pTriangles[j];
										}
									}
								}
							}
							else
							{
								static double lastWarning = -99;
								if (Fubi::currentTime() - lastWarning > 10)
								{
									Fubi_logErr("Error in face tracking - vertex count does not match!\n");
									lastWarning = Fubi::currentTime();
								}
							}
						}
					}
				}
				else
				{
					m_userContext[i].m_pFTResult->Reset();
				}
			}
        }
	}	
}