// 顔追跡(内部用)
    HRESULT FaceTracker::FaceTracking( const ColorImageFrame& colorFrame, const DepthImageFrame& depthFrame, const NUI_SKELETON_DATA* skeleton )
    {
        // データを更新する
        memcpy( &colorCameraFrameBuffer[0], colorFrame.GetPixelData(), colorFrame.GetPixelDataLength() );
        memcpy( &depthCameraFrameBuffer[0], depthFrame.GetPixelData(), depthFrame.GetPixelDataLength() );

        // スケルトンの座標を設定する(スケルトンが指定されていない場合は0を渡す)
        FT_VECTOR3D* headPointsPtr = 0;
        FT_VECTOR3D headPoints[2] = { FT_VECTOR3D( 0, 0, 0 ), FT_VECTOR3D( 0, 0, 0 ) };
        if ( skeleton != 0 ) {
            if ( skeleton->eTrackingState == NUI_SKELETON_TRACKING_STATE::NUI_SKELETON_TRACKED ) {
                headPoints[0] = JointToVector3D( skeleton, NUI_SKELETON_POSITION_SHOULDER_CENTER );
                headPoints[1] = JointToVector3D( skeleton, NUI_SKELETON_POSITION_HEAD );
                headPointsPtr = headPoints;
            }
        }

        // 追跡中、未追跡によって処理が変わる
        if ( !IsFaceTracked() ) {
            // FaceTrackingを開始する
            return pFaceTracker->StartTracking( &sensorData, 0, headPointsPtr, pFaceTrackerResult.get() );
        }
        else {
            // FaceTrackingを継続する
            return pFaceTracker->ContinueTracking( &sensorData, headPointsPtr, pFaceTrackerResult.get() );
        }
    }
Example #2
0
void KinectSensor::GotSkeletonAlert()
{
    NUI_SKELETON_FRAME SkeletonFrame = {0};

    HRESULT hr = NuiSkeletonGetNextFrame(0, &SkeletonFrame);
    if(FAILED(hr))
    {
        return;
    }

    for( int i = 0 ; i < NUI_SKELETON_COUNT ; i++ )
    {
        if( SkeletonFrame.SkeletonData[i].eTrackingState == NUI_SKELETON_TRACKED &&
            NUI_SKELETON_POSITION_TRACKED == SkeletonFrame.SkeletonData[i].eSkeletonPositionTrackingState[NUI_SKELETON_POSITION_HEAD] &&
            NUI_SKELETON_POSITION_TRACKED == SkeletonFrame.SkeletonData[i].eSkeletonPositionTrackingState[NUI_SKELETON_POSITION_SHOULDER_CENTER])
        {
            m_SkeletonTracked[i] = true;
            m_HeadPoint[i].x = SkeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HEAD].x;
            m_HeadPoint[i].y = SkeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HEAD].y;
            m_HeadPoint[i].z = SkeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HEAD].z;
            m_NeckPoint[i].x = SkeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_CENTER].x;
            m_NeckPoint[i].y = SkeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_CENTER].y;
            m_NeckPoint[i].z = SkeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_CENTER].z;
        }
        else
        {
            m_HeadPoint[i] = m_NeckPoint[i] = FT_VECTOR3D(0, 0, 0);
            m_SkeletonTracked[i] = false;
        }
    }
}
Example #3
0
void KinectSensor::GotSkeletonAlert()
{
    NUI_SKELETON_FRAME SkeletonFrame = {0};

    HRESULT hr = NuiSkeletonGetNextFrame(0, &SkeletonFrame);
    if(FAILED(hr))
    {
        return;
    }

	int skeletonCount = 0;
	
    for( int i = 0 ; i < NUI_SKELETON_COUNT ; i++ )
    {
		if (SkeletonFrame.SkeletonData[i].eTrackingState == NUI_SKELETON_TRACKED)
			skeletonCount++;


        if( SkeletonFrame.SkeletonData[i].eTrackingState == NUI_SKELETON_TRACKED &&
            NUI_SKELETON_POSITION_TRACKED == SkeletonFrame.SkeletonData[i].eSkeletonPositionTrackingState[NUI_SKELETON_POSITION_HEAD] &&
            NUI_SKELETON_POSITION_TRACKED == SkeletonFrame.SkeletonData[i].eSkeletonPositionTrackingState[NUI_SKELETON_POSITION_SHOULDER_CENTER])
        {
            m_SkeletonTracked[i] = true;
            m_HeadPoint[i].x = SkeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HEAD].x;
            m_HeadPoint[i].y = SkeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HEAD].y;
            m_HeadPoint[i].z = SkeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_HEAD].z;
            m_NeckPoint[i].x = SkeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_CENTER].x;
            m_NeckPoint[i].y = SkeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_CENTER].y;
            m_NeckPoint[i].z = SkeletonFrame.SkeletonData[i].SkeletonPositions[NUI_SKELETON_POSITION_SHOULDER_CENTER].z;
			
        }
        else
        {
            m_HeadPoint[i] = m_NeckPoint[i] = FT_VECTOR3D(0, 0, 0);
            m_SkeletonTracked[i] = false;
        }
    }

	m_Skeletons = cv::Mat::zeros(NUI_SKELETON_COUNT, 21, CV_32FC4);

	for (int i = 0; i < NUI_SKELETON_COUNT; i++)
	{
		if (SkeletonFrame.SkeletonData[i].eTrackingState != NUI_SKELETON_TRACKED)
			continue;

		for (int j = 0; j < 20; j++){
			cv::Vec4f point;
			point[0] = SkeletonFrame.SkeletonData[i].SkeletonPositions[j].x;
			point[1] = SkeletonFrame.SkeletonData[i].SkeletonPositions[j].y;
			point[2] = SkeletonFrame.SkeletonData[i].SkeletonPositions[j].z;
			point[3] = SkeletonFrame.SkeletonData[i].eSkeletonPositionTrackingState[j]; // 0,1,2;
			m_Skeletons.at<cv::Vec4f>(i, j) = point;
		}
	}

}
bool UKinect::pollFaceUser(int user) {
  if (user != 0 && user != 1) return false;

  FT_SENSOR_DATA sensorData;
  sensorData.pVideoFrame = colorBuffer;
  sensorData.pDepthFrame = depthBuffer;
  sensorData.ZoomFactor = 1.0f;       // Not used must be 1.0
  sensorData.ViewOffset.x = 0; // Not used must be (0,0)
  sensorData.ViewOffset.y = 0; // Not used must be (0,0)

  IFTFaceTracker* _pFaceTracker = pFaceTracker[user];	//				// An instance of a face tracker
  IFTResult*  _pFTResult = pFTResult[user];							// Face tracking result interface
  IFTResult*  _pFTResult_copy = pFTResult_copy[user];						// Copy of Face tracking result interface
  bool _isFaceTracked = isFaceTracked[user];
  //                        red          yellow
  int color = (user == 0) ? 0x00FF0000 : 0x00FFFF00;

  int trackedID = skeletonTrackedIDs.as< vector<int>>()[user];

  FT_VECTOR3D headHint[2];
  if (trackedID != 0) {
    vector<double> shoulders = skeletonJointPosition(trackedID, NUI_SKELETON_POSITION_SHOULDER_CENTER);
    if (shoulders.size() == 0) return false;
    headHint[0] = FT_VECTOR3D(shoulders[0], shoulders[1], shoulders[2]);

    vector<double> head = skeletonJointPosition(trackedID, NUI_SKELETON_POSITION_HEAD);
    if (head.size() == 0) return false;
    headHint[1] = FT_VECTOR3D(head[0], head[1], head[2]);
  } else return false;


  // Check if we are already tracking a face
  if (!_isFaceTracked) {
    // Initiate face tracking.
    // This call is more expensive and searches over the input RGB frame for a face.
    // However if hint != null id limits only to head region
    hr = _pFaceTracker->StartTracking(&sensorData, NULL, headHint, _pFTResult);
  } else {
    // Continue tracking. It uses a previously known face position.
    // This call is less expensive than StartTracking()
    hr = _pFaceTracker->ContinueTracking(&sensorData, headHint, _pFTResult);
  }

  // exit on fail
  if (FAILED(hr) || FAILED(_pFTResult->GetStatus())) {
    _pFTResult->Reset();
    return false;
  }

  _pFTResult->CopyTo(_pFTResult_copy);

  if (faceVisualization) {
    FLOAT* pSU = NULL;
    UINT numSU;
    BOOL suConverged;
    hr = _pFaceTracker->GetShapeUnits(NULL, &pSU, &numSU, &suConverged);
    if (FAILED(hr)) {
      cerr << "[UKinect] ERROR: Can not get SU units." << endl;
      throw;
    }

    POINT viewOffset = { 0, 0 };
    FT_CAMERA_CONFIG colorConfig;
    getColorConfiguration(&colorConfig);

    IFTModel* ftModel;
    HRESULT hr = _pFaceTracker->GetFaceModel(&ftModel);
    if (FAILED(hr)) {
      cerr << "[UKinect] ERROR: Can not get Face Model." << endl;
      throw;
    }

    Mat tmp;
    if (!faceVisualizationOnColor.as<int>()) {
      tmp = Mat(Size(static_cast<int>(colorBuffer->GetWidth()), static_cast<int>(colorBuffer->GetHeight())), CV_8UC4, CV_RGB(0, 0, 0));
      memcpy(colorBuffer->GetBuffer(), tmp.data, min(colorBuffer->GetBufferSize(), UINT(colorBuffer->GetBufferSize())));
    }
    if (faceVisualizationMode.as<int>())
      hr = VisualizeFacetracker(colorBuffer, _pFTResult, color);
    else
      hr = VisualizeFaceModel(colorBuffer, ftModel, &colorConfig, pSU, 1.0, viewOffset, _pFTResult, color);

    if (FAILED(hr)) {
      cerr << "[UKinect] ERROR: Cannot visualize Face Model." << endl;
      throw;
    }
    tmp = Mat(Size(static_cast<int>(colorBuffer->GetWidth()), static_cast<int>(colorBuffer->GetHeight())), CV_8UC4, colorBuffer->GetBuffer());
    cvtColor(tmp, faceCVMat, CV_BGRA2RGB); // <- alloc new memory
    // Save CV image to UImage
    faceBin.image.data = faceCVMat.data;
    faceImage = faceBin;

    ftModel->Release();
  }

  return true;

}
Example #5
0
HRESULT KinectSensor::Init(NUI_IMAGE_TYPE depthType, NUI_IMAGE_RESOLUTION depthRes, BOOL bNearMode, BOOL bFallbackToDefault, NUI_IMAGE_TYPE colorType, NUI_IMAGE_RESOLUTION colorRes, BOOL bSeatedSkeletonMode)
{
    HRESULT hr = E_UNEXPECTED;

    Release(); // Deal with double initializations.

    //do not support NUI_IMAGE_TYPE_COLOR_RAW_YUV for now
    if(colorType != NUI_IMAGE_TYPE_COLOR && colorType != NUI_IMAGE_TYPE_COLOR_YUV
        || depthType != NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX && depthType != NUI_IMAGE_TYPE_DEPTH)
    {
        return E_INVALIDARG;
    }

    m_VideoBuffer = FTCreateImage();
    if (!m_VideoBuffer)
    {
        return E_OUTOFMEMORY;
    }

    DWORD width = 0;
    DWORD height = 0;

    NuiImageResolutionToSize(colorRes, width, height);

    hr = m_VideoBuffer->Allocate(width, height, FTIMAGEFORMAT_UINT8_B8G8R8X8);
    if (FAILED(hr))
    {
        return hr;
    }

    m_DepthBuffer = FTCreateImage();
    if (!m_DepthBuffer)
    {
        return E_OUTOFMEMORY;
    }

    NuiImageResolutionToSize(depthRes, width, height);

    hr = m_DepthBuffer->Allocate(width, height, FTIMAGEFORMAT_UINT16_D13P3);
    if (FAILED(hr))
    {
        return hr;
    }
    
    m_FramesTotal = 0;
    m_SkeletonTotal = 0;

    for (int i = 0; i < NUI_SKELETON_COUNT; ++i)
    {
        m_HeadPoint[i] = m_NeckPoint[i] = FT_VECTOR3D(0, 0, 0);
        m_SkeletonTracked[i] = false;
    }

    m_hNextDepthFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
    m_hNextVideoFrameEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
    m_hNextSkeletonEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
    
    DWORD dwNuiInitDepthFlag = (depthType == NUI_IMAGE_TYPE_DEPTH)? NUI_INITIALIZE_FLAG_USES_DEPTH : NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX;

    hr = NuiInitialize(dwNuiInitDepthFlag | NUI_INITIALIZE_FLAG_USES_SKELETON | NUI_INITIALIZE_FLAG_USES_COLOR);
    if (FAILED(hr))
    {
        return hr;
    }
    m_bNuiInitialized = true;

	DWORD dwSkeletonFlags = NUI_SKELETON_TRACKING_FLAG_ENABLE_IN_NEAR_RANGE;
	if (bSeatedSkeletonMode)
	{
		dwSkeletonFlags |= NUI_SKELETON_TRACKING_FLAG_ENABLE_SEATED_SUPPORT;
	}
    hr = NuiSkeletonTrackingEnable( m_hNextSkeletonEvent, dwSkeletonFlags );
    if (FAILED(hr))
    {
        return hr;
    }

    hr = NuiImageStreamOpen(
        colorType,
        colorRes,
        0,
        2,
        m_hNextVideoFrameEvent,
        &m_pVideoStreamHandle );
    if (FAILED(hr))
    {
        return hr;
    }

    hr = NuiImageStreamOpen(
        depthType,
        depthRes,
        (bNearMode)? NUI_IMAGE_STREAM_FLAG_ENABLE_NEAR_MODE : 0,
        2,
        m_hNextDepthFrameEvent,
        &m_pDepthStreamHandle );
    if (FAILED(hr))
    {
        if(bNearMode && bFallbackToDefault)
        {
            hr = NuiImageStreamOpen(
                depthType,
                depthRes,
                0,
                2,
                m_hNextDepthFrameEvent,
                &m_pDepthStreamHandle );
        }

        if(FAILED(hr))
        {
            return hr;
        }
    }

    // Start the Nui processing thread
    m_hEvNuiProcessStop=CreateEvent(NULL,TRUE,FALSE,NULL);
    m_hThNuiProcess=CreateThread(NULL,0,ProcessThread,this,0,NULL);

    return hr;
}