void FaceTrackingRenderer::DrawGraphics(PXCFaceData* faceOutput) { assert(faceOutput != NULL); if (!m_bitmap) return; const int numFaces = faceOutput->QueryNumberOfDetectedFaces(); for (int i = 0; i < numFaces; ++i) { PXCFaceData::Face* trackedFace = faceOutput->QueryFaceByIndex(i); assert(trackedFace != NULL); if (FaceTrackingUtilities::IsModuleSelected(m_window, IDC_LOCATION) && trackedFace->QueryDetection() != NULL) DrawLocation(trackedFace); if (FaceTrackingUtilities::IsModuleSelected(m_window, IDC_LANDMARK) && trackedFace->QueryLandmarks() != NULL) DrawLandmark(trackedFace); if (FaceTrackingUtilities::IsModuleSelected(m_window, IDC_POSE) && trackedFace->QueryPose() != NULL) DrawPose(trackedFace, i); if (FaceTrackingUtilities::IsModuleSelected(m_window, IDC_EXPRESSIONS) && trackedFace->QueryExpressions() != NULL) DrawExpressions(trackedFace, i); if (FaceTrackingUtilities::IsModuleSelected(m_window, IDC_RECOGNITION) && trackedFace->QueryRecognition() != NULL) DrawRecognition(trackedFace, i); } }
// Camera Processing Thread // Initialize the RealSense SenseManager and initiate camera processing loop: // Step 1: Acquire new camera frame // Step 2: Load shared settings // Step 3: Perform Core SDK and middleware processing and store results // in background RealSenseDataFrame // Step 4: Swap the background and mid RealSenseDataFrames void RealSenseImpl::CameraThread() { uint64 currentFrame = 0; fgFrame->number = 0; midFrame->number = 0; bgFrame->number = 0; pxcStatus status = senseManager->Init(); RS_LOG_STATUS(status, "SenseManager Initialized") assert(status == PXC_STATUS_NO_ERROR); if (bFaceEnabled) { faceData = pFace->CreateOutput(); } while (bCameraThreadRunning == true) { // Acquires new camera frame status = senseManager->AcquireFrame(true); assert(status == PXC_STATUS_NO_ERROR); bgFrame->number = ++currentFrame; // Performs Core SDK and middleware processing and store results // in background RealSenseDataFrame if (bCameraStreamingEnabled) { PXCCapture::Sample* sample = senseManager->QuerySample(); CopyColorImageToBuffer(sample->color, bgFrame->colorImage, colorResolution.width, colorResolution.height); CopyDepthImageToBuffer(sample->depth, bgFrame->depthImage, depthResolution.width, depthResolution.height); } if (bScan3DEnabled) { if (bScanStarted) { PXC3DScan::Configuration config = p3DScan->QueryConfiguration(); config.startScan = true; p3DScan->SetConfiguration(config); bScanStarted = false; } if (bScanStopped) { PXC3DScan::Configuration config = p3DScan->QueryConfiguration(); config.startScan = false; p3DScan->SetConfiguration(config); bScanStopped = false; } PXCImage* scanImage = p3DScan->AcquirePreviewImage(); if (scanImage) { UpdateScan3DImageSize(scanImage->QueryInfo()); CopyColorImageToBuffer(scanImage, bgFrame->scanImage, scan3DResolution.width, scan3DResolution.height); scanImage->Release(); } if (bReconstructEnabled) { status = p3DScan->Reconstruct(scan3DFileFormat, scan3DFilename.GetCharArray().GetData()); bReconstructEnabled = false; bScanCompleted = true; } } if (bFaceEnabled) { faceData->Update(); bgFrame->headCount = faceData->QueryNumberOfDetectedFaces(); if (bgFrame->headCount > 0) { PXCFaceData::Face* face = faceData->QueryFaceByIndex(0); PXCFaceData::PoseData* poseData = face->QueryPose(); if (poseData) { PXCFaceData::HeadPosition headPosition = {}; poseData->QueryHeadPosition(&headPosition); bgFrame->headPosition = FVector(headPosition.headCenter.x, headPosition.headCenter.y, headPosition.headCenter.z); PXCFaceData::PoseEulerAngles headRotation = {}; poseData->QueryPoseAngles(&headRotation); bgFrame->headRotation = FRotator(headRotation.pitch, headRotation.yaw, headRotation.roll); } } } senseManager->ReleaseFrame(); // Swaps background and mid RealSenseDataFrames std::unique_lock<std::mutex> lockIntermediate(midFrameMutex); bgFrame.swap(midFrame); } }