void Graphics::DrawGraphics(PXCFaceData* faceOutput) { assert(faceOutput != NULL); if (!m_bitmap) return; const int numFaces = faceOutput->QueryNumberOfDetectedFaces(); for (int i = 0; i < numFaces; ++i) { PXCFaceData::Face* trackedFace = faceOutput->QueryFaceByIndex(i); assert(trackedFace != NULL); if (trackedFace->QueryLandmarks() != NULL) DrawLandmark(trackedFace); DrawPose(trackedFace); } }
void FaceTrackingRenderer3D::DrawGraphics(PXCFaceData* faceOutput) { assert(faceOutput != NULL); if (!m_bitmap) return; const int numFaces = faceOutput->QueryNumberOfDetectedFaces(); for (int i = 0; i < numFaces; ++i) { PXCFaceData::Face* trackedFace = faceOutput->QueryFaceByIndex(i); assert(trackedFace != NULL); if (FaceTrackingUtilities::IsModuleSelected(m_window, IDC_LANDMARK) && trackedFace->QueryLandmarks() != NULL) DrawLandmark(trackedFace); if (FaceTrackingUtilities::IsModuleSelected(m_window, IDC_POSE)) DrawPose(trackedFace); } }
void FaceTrackingRenderer::DrawGraphics(PXCFaceData* faceOutput) { assert(faceOutput != NULL); if (!m_bitmap) return; const int numFaces = faceOutput->QueryNumberOfDetectedFaces(); for (int i = 0; i < numFaces; ++i) { PXCFaceData::Face* trackedFace = faceOutput->QueryFaceByIndex(i); assert(trackedFace != NULL); if (FaceTrackingUtilities::IsModuleSelected(m_window, IDC_LOCATION) && trackedFace->QueryDetection() != NULL) DrawLocation(trackedFace); if (FaceTrackingUtilities::IsModuleSelected(m_window, IDC_LANDMARK) && trackedFace->QueryLandmarks() != NULL) DrawLandmark(trackedFace); if (FaceTrackingUtilities::IsModuleSelected(m_window, IDC_POSE) && trackedFace->QueryPose() != NULL) DrawPose(trackedFace, i); if (FaceTrackingUtilities::IsModuleSelected(m_window, IDC_EXPRESSIONS) && trackedFace->QueryExpressions() != NULL) DrawExpressions(trackedFace, i); if (FaceTrackingUtilities::IsModuleSelected(m_window, IDC_RECOGNITION) && trackedFace->QueryRecognition() != NULL) DrawRecognition(trackedFace, i); } }
// Camera Processing Thread // Initialize the RealSense SenseManager and initiate camera processing loop: // Step 1: Acquire new camera frame // Step 2: Load shared settings // Step 3: Perform Core SDK and middleware processing and store results // in background RealSenseDataFrame // Step 4: Swap the background and mid RealSenseDataFrames void RealSenseImpl::CameraThread() { uint64 currentFrame = 0; fgFrame->number = 0; midFrame->number = 0; bgFrame->number = 0; pxcStatus status = senseManager->Init(); RS_LOG_STATUS(status, "SenseManager Initialized") assert(status == PXC_STATUS_NO_ERROR); if (bFaceEnabled) { faceData = pFace->CreateOutput(); } while (bCameraThreadRunning == true) { // Acquires new camera frame status = senseManager->AcquireFrame(true); assert(status == PXC_STATUS_NO_ERROR); bgFrame->number = ++currentFrame; // Performs Core SDK and middleware processing and store results // in background RealSenseDataFrame if (bCameraStreamingEnabled) { PXCCapture::Sample* sample = senseManager->QuerySample(); CopyColorImageToBuffer(sample->color, bgFrame->colorImage, colorResolution.width, colorResolution.height); CopyDepthImageToBuffer(sample->depth, bgFrame->depthImage, depthResolution.width, depthResolution.height); } if (bScan3DEnabled) { if (bScanStarted) { PXC3DScan::Configuration config = p3DScan->QueryConfiguration(); config.startScan = true; p3DScan->SetConfiguration(config); bScanStarted = false; } if (bScanStopped) { PXC3DScan::Configuration config = p3DScan->QueryConfiguration(); config.startScan = false; p3DScan->SetConfiguration(config); bScanStopped = false; } PXCImage* scanImage = p3DScan->AcquirePreviewImage(); if (scanImage) { UpdateScan3DImageSize(scanImage->QueryInfo()); CopyColorImageToBuffer(scanImage, bgFrame->scanImage, scan3DResolution.width, scan3DResolution.height); scanImage->Release(); } if (bReconstructEnabled) { status = p3DScan->Reconstruct(scan3DFileFormat, scan3DFilename.GetCharArray().GetData()); bReconstructEnabled = false; bScanCompleted = true; } } if (bFaceEnabled) { faceData->Update(); bgFrame->headCount = faceData->QueryNumberOfDetectedFaces(); if (bgFrame->headCount > 0) { PXCFaceData::Face* face = faceData->QueryFaceByIndex(0); PXCFaceData::PoseData* poseData = face->QueryPose(); if (poseData) { PXCFaceData::HeadPosition headPosition = {}; poseData->QueryHeadPosition(&headPosition); bgFrame->headPosition = FVector(headPosition.headCenter.x, headPosition.headCenter.y, headPosition.headCenter.z); PXCFaceData::PoseEulerAngles headRotation = {}; poseData->QueryPoseAngles(&headRotation); bgFrame->headRotation = FRotator(headRotation.pitch, headRotation.yaw, headRotation.roll); } } } senseManager->ReleaseFrame(); // Swaps background and mid RealSenseDataFrames std::unique_lock<std::mutex> lockIntermediate(midFrameMutex); bgFrame.swap(midFrame); } }
void Emotions::update() { int numFaces = 0; if (mSenseMgr->AcquireFrame(true) >= PXC_STATUS_NO_ERROR) { // Emotion Data PXCEmotion *emotionDet = mSenseMgr->QueryEmotion(); PXCEmotion::EmotionData arrData[10]; fdata->Update(); int numFaces = fdata->QueryNumberOfDetectedFaces(); for (int i = 0; i < numFaces; ++i) { // FaceData PXCFaceData::Face *face = fdata->QueryFaceByIndex(i); PXCFaceData::ExpressionsData *edata = face->QueryExpressions(); emotionDet->QueryAllEmotionData(i, &arrData[0]); //Face Detection and Location if (arrData->rectangle.x > -1 && arrData->rectangle.y > -1) { //cout << arrData->rectangle.x << ", " << arrData->rectangle.y << endl; iSeeYou = true; } else iSeeYou = false; # pragma region Expression Logic if (iSeeYou) { PXCFaceData::ExpressionsData::FaceExpressionResult smileScore; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_SMILE, &smileScore); PXCFaceData::ExpressionsData::FaceExpressionResult raiseLeftBrow; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_BROW_RAISER_LEFT, &raiseLeftBrow); PXCFaceData::ExpressionsData::FaceExpressionResult raiseRightBrow; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_BROW_RAISER_RIGHT, &raiseRightBrow); PXCFaceData::ExpressionsData::FaceExpressionResult eyeClosedLeft; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_EYES_CLOSED_LEFT, &eyeClosedLeft); PXCFaceData::ExpressionsData::FaceExpressionResult eyeClosedRight; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_EYES_CLOSED_RIGHT, &eyeClosedRight); PXCFaceData::ExpressionsData::FaceExpressionResult kiss; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_KISS, &kiss); PXCFaceData::ExpressionsData::FaceExpressionResult openMouth; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_MOUTH_OPEN, &openMouth); PXCFaceData::ExpressionsData::FaceExpressionResult tongueOut; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_TONGUE_OUT, &tongueOut); if (smileScore.intensity > 80) cout << "smile back!" << endl; if (raiseLeftBrow.intensity > 80 && raiseRightBrow.intensity > 80) cout << "eyebrows up" << endl; if (raiseLeftBrow.intensity > 80 && raiseRightBrow.intensity < 80) cout << "eyebrow raised" << endl; if (raiseLeftBrow.intensity < 80 && raiseRightBrow.intensity > 80) cout << "eyebrow raised" << endl; if (eyeClosedLeft.intensity > 80 && eyeClosedRight.intensity > 80) cout << "eyes Closed" << endl; //else // eyes open if (kiss.intensity > 80) cout << "kissy face!" << endl; if (openMouth.intensity > 80) cout << "say Ahhhhh" << endl; if (tongueOut.intensity > 80) cout << "Stick Tongue Out" << endl; } //PXCFaceData::ExpressionsData::FaceExpressionResult score; //edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_KISS, &score); //cout << score.intensity << endl; # pragma endregion //Expression Logic #pragma region Emotion Logic /*bool emotionPresent = false; int epidx = -1; pxcI32 maxScoreE = -3; pxcF32 maxscoreI = 0; for (int i = 0; i < 7; i++) { if (arrData[i].evidence < maxScoreE) continue; if (arrData[i].intensity < maxscoreI) continue; maxScoreE = arrData[i].evidence; maxscoreI = arrData[i].intensity; epidx = i; std::this_thread::sleep_for(std::chrono::milliseconds(50)); } if (maxScoreE > -1) { std::string foundEmo = ""; switch (arrData[epidx].eid) { case PXCEmotion::EMOTION_PRIMARY_ANGER: foundEmo = "Anger"; break; case PXCEmotion::EMOTION_PRIMARY_CONTEMPT: foundEmo = "Contempt"; break; case PXCEmotion::EMOTION_PRIMARY_DISGUST: foundEmo = "Disgust"; break; case PXCEmotion::EMOTION_PRIMARY_FEAR: foundEmo = "Fear"; break; case PXCEmotion::EMOTION_PRIMARY_JOY: foundEmo = "Joy"; break; case PXCEmotion::EMOTION_PRIMARY_SADNESS: foundEmo = "Sadness"; break; case PXCEmotion::EMOTION_PRIMARY_SURPRISE: foundEmo = "Surprise"; break; case PXCEmotion::EMOTION_SENTIMENT_POSITIVE: foundEmo = "Positive"; break; case PXCEmotion::EMOTION_SENTIMENT_NEGATIVE: foundEmo = "Negative"; break; case PXCEmotion::EMOTION_SENTIMENT_NEUTRAL: foundEmo = "Neutral"; break; } cout << "outstanding emotion = " << foundEmo << endl; } if (maxscoreI > 0.4) emotionPresent = true; if (emotionPresent) { //int spidx = -1; maxScoreE = -3; maxscoreI = 0; for (int i = 0; i < 7; i++) { if (arrData[i].evidence < maxScoreE) continue; if (arrData[i].intensity < maxscoreI) continue; maxScoreE = arrData[i].evidence; maxscoreI = arrData[i].intensity; //spidx = i; } }*/ #pragma endregion //Emotion Logic } numFaces = emotionDet->QueryNumFaces(); const PXCCapture::Sample *sample = mSenseMgr->QueryEmotionSample(); mSenseMgr->ReleaseFrame(); } }
void Processor::Process(HWND dialogWindow) { // set startup mode PXCSenseManager* senseManager = session->CreateSenseManager(); if (senseManager == NULL) { Utilities::SetStatus(dialogWindow, L"Failed to create an SDK SenseManager", statusPart); return; } /* Set Mode & Source */ PXCCaptureManager* captureManager = senseManager->QueryCaptureManager(); pxcStatus status = PXC_STATUS_NO_ERROR; if (Utilities::GetPlaybackState(dialogWindow)) { status = captureManager->SetFileName(m_rssdkFilename, false); senseManager->QueryCaptureManager()->SetRealtime(true); } if (status < PXC_STATUS_NO_ERROR) { Utilities::SetStatus(dialogWindow, L"Failed to Set Record/Playback File", statusPart); return; } /* Set Module */ senseManager->EnableFace(); /* Initialize */ Utilities::SetStatus(dialogWindow, L"Init Started", statusPart); PXCFaceModule* faceModule = senseManager->QueryFace(); if (faceModule == NULL) { assert(faceModule); return; } PXCFaceConfiguration* config = faceModule->CreateActiveConfiguration(); if (config == NULL) { assert(config); return; } // Enable Gaze Algo config->QueryGaze()->isEnabled = true; // set dominant eye if (dominant_eye) { PXCFaceData::GazeCalibData::DominantEye eye = (PXCFaceData::GazeCalibData::DominantEye)(dominant_eye - 1); config->QueryGaze()->SetDominantEye(eye); } // set tracking mode config->SetTrackingMode(PXCFaceConfiguration::TrackingModeType::FACE_MODE_COLOR_PLUS_DEPTH); config->ApplyChanges(); // Load Calibration File bool need_calibration = true; if (isLoadCalibFile) { FILE* my_file; errno_t err; err = _wfopen_s(&my_file, m_CalibFilename, L"rb"); if (!err && my_file) { if (calibBuffer == NULL) { calibBuffersize = config->QueryGaze()->QueryCalibDataSize(); calibBuffer = new unsigned char[calibBuffersize]; } fread(calibBuffer, calibBuffersize, sizeof(pxcBYTE), my_file); fclose(my_file); pxcStatus st = config->QueryGaze()->LoadCalibration(calibBuffer, calibBuffersize); if (st != PXC_STATUS_NO_ERROR) { // get save file name calib_status = LOAD_CALIBRATION_ERROR; need_calibration = false; PostMessage(dialogWindow, WM_COMMAND, ID_CALIB_DONE, 0); return; } } isLoadCalibFile = false; need_calibration = false; PostMessage(dialogWindow, WM_COMMAND, ID_CALIB_LOADED, 0); } else if (calibBuffer) { // load existing calib stored in memory config->QueryGaze()->LoadCalibration(calibBuffer, calibBuffersize); need_calibration = false; } // init sense manager if (senseManager->Init() < PXC_STATUS_NO_ERROR) { captureManager->FilterByStreamProfiles(NULL); if (senseManager->Init() < PXC_STATUS_NO_ERROR) { Utilities::SetStatus(dialogWindow, L"Init Failed", statusPart); PostMessage(dialogWindow, WM_COMMAND, ID_STOP, 0); return; } } PXCCapture::DeviceInfo info; senseManager->QueryCaptureManager()->QueryDevice()->QueryDeviceInfo(&info); CheckForDepthStream(senseManager, dialogWindow); AlertHandler alertHandler(dialogWindow); config->detection.isEnabled = true; config->landmarks.isEnabled = true; config->pose.isEnabled = true; config->EnableAllAlerts(); config->SubscribeAlert(&alertHandler); config->ApplyChanges(); //} Utilities::SetStatus(dialogWindow, L"Streaming", statusPart); m_output = faceModule->CreateOutput(); int failed_counter = 0; bool isNotFirstFrame = false; bool isFinishedPlaying = false; bool activeapp = true; ResetEvent(renderer->GetRenderingFinishedSignal()); renderer->SetSenseManager(senseManager); renderer->SetNumberOfLandmarks(config->landmarks.numLandmarks); renderer->SetCallback(renderer->SignalProcessor); // Creating PXCSmoother instance PXCSmoother* smoother = NULL; senseManager->QuerySession()->CreateImpl<PXCSmoother>(&smoother); // Creating 2D smoother with quadratic algorithm with smooth value PXCSmoother::Smoother2D* smoother2D = smoother->Create2DQuadratic(1.0f); // acquisition loop if (!isStopped) { while (true) { if (isPaused) { // allow the application to pause for user input Sleep(200); continue; } if (senseManager->AcquireFrame(true) < PXC_STATUS_NO_ERROR) { isFinishedPlaying = true; } if (isNotFirstFrame) { WaitForSingleObject(renderer->GetRenderingFinishedSignal(), INFINITE); } else { // enable back window if (need_calibration) EnableBackWindow(); } if (isFinishedPlaying || isStopped) { if (isStopped) senseManager->ReleaseFrame(); if (isFinishedPlaying) PostMessage(dialogWindow, WM_COMMAND, ID_STOP, 0); break; } m_output->Update(); pxcI64 stamp = m_output->QueryFrameTimestamp(); PXCCapture::Sample* sample = senseManager->QueryFaceSample(); isNotFirstFrame = true; if (sample != NULL) { DWORD dwWaitResult; dwWaitResult = WaitForSingleObject(g_hMutex, INFINITE); if (dwWaitResult == WAIT_OBJECT_0) { // check calibration state if (need_calibration) { // CALIBRATION FLOW if (m_output->QueryNumberOfDetectedFaces()) { PXCFaceData::Face* trackedFace = m_output->QueryFaceByIndex(0); PXCFaceData::GazeCalibData* gazeData = trackedFace->QueryGazeCalibration(); if (gazeData) { // gaze enabled check calibration PXCFaceData::GazeCalibData::CalibrationState state = trackedFace->QueryGazeCalibration()->QueryCalibrationState(); if (state == PXCFaceData::GazeCalibData::CALIBRATION_NEW_POINT) { // present new point for calibration PXCPointI32 new_point = trackedFace->QueryGazeCalibration()->QueryCalibPoint(); // set the cursor to that point eye_point_x = new_point.x; eye_point_y = new_point.y; SetCursorPos(OUT_OF_SCREEN, OUT_OF_SCREEN); } else if (state == PXCFaceData::GazeCalibData::CALIBRATION_DONE) { // store calib data in a file calibBuffersize = trackedFace->QueryGazeCalibration()->QueryCalibDataSize(); if (calibBuffer == NULL) calibBuffer = new unsigned char[calibBuffersize]; calib_status = trackedFace->QueryGazeCalibration()->QueryCalibData(calibBuffer); dominant_eye = trackedFace->QueryGazeCalibration()->QueryCalibDominantEye(); // get save file name PostMessage(dialogWindow, WM_COMMAND, ID_CALIB_DONE, 0); need_calibration = false; } else if (state == PXCFaceData::GazeCalibData::CALIBRATION_IDLE) { // set the cursor beyond the screen eye_point_x = OUT_OF_SCREEN; eye_point_y = OUT_OF_SCREEN; SetCursorPos(OUT_OF_SCREEN, OUT_OF_SCREEN); } } else { // gaze not enabled stop processing need_calibration = false; PostMessage(dialogWindow, WM_COMMAND, ID_STOP, 0); } } else { failed_counter++; // wait 20 frames , if no detection happens go to failed mode if (failed_counter > NO_DETECTION_FOR_LONG) { calib_status = 3; // failed need_calibration = false; PostMessage(dialogWindow, WM_COMMAND, ID_CALIB_DONE, 0); } } } else { // GAZE PROCESSING AFTER CALIBRATION IS DONE if (m_output->QueryNumberOfDetectedFaces()) { PXCFaceData::Face* trackedFace = m_output->QueryFaceByIndex(0); // get gaze point if (trackedFace != NULL) { if (trackedFace->QueryGaze()) { PXCFaceData::GazePoint gaze_point = trackedFace->QueryGaze()->QueryGazePoint(); PXCPointF32 new_point; new_point.x = (pxcF32)gaze_point.screenPoint.x; new_point.y = (pxcF32)gaze_point.screenPoint.y; // Smoothing PXCPointF32 smoothed2DPoint = smoother2D->SmoothValue(new_point); pxcF64 horizontal_angle = trackedFace->QueryGaze()->QueryGazeHorizontalAngle(); pxcF64 vertical_angle = trackedFace->QueryGaze()->QueryGazeVerticalAngle(); eye_horizontal_angle = (float)horizontal_angle; eye_vertical_angle = (float)vertical_angle; eye_point_x = (int)smoothed2DPoint.x; eye_point_y = (int)smoothed2DPoint.y; } } } } // render output renderer->DrawBitmap(sample); renderer->SetOutput(m_output); renderer->SignalRenderer(); if (!ReleaseMutex(g_hMutex)) { throw std::exception("Failed to release mutex"); return; } } } senseManager->ReleaseFrame(); } m_output->Release(); Utilities::SetStatus(dialogWindow, L"Stopped", statusPart); } config->Release(); senseManager->Close(); senseManager->Release(); }