void ITA_ForcesApp::updateRS() { if (mIsRunning) { if (mRS->AcquireFrame(false,0) >= PXC_STATUS_NO_ERROR) { mHandData->Update(); getRSCursorPos(); getRSGestureState(); auto sample = mRS->QuerySample(); if (sample != nullptr) { auto rgb = sample->color; if (rgb != nullptr) { PXCImage::ImageData rgbData; if (rgb->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::PIXEL_FORMAT_RGB32, &rgbData) >= PXC_STATUS_NO_ERROR) { auto srf = Surface8u::create(rgbData.planes[0], RGB_SIZE.x, RGB_SIZE.y, rgbData.pitches[0], SurfaceChannelOrder::BGRA); auto chan = Channel8u::create(*srf); mTexRgb = gl::Texture2d::create(*chan); rgb->ReleaseAccess(&rgbData); } } } mRS->ReleaseFrame(); } } }
void URealSenseTask::OnTaskTick() { UBackgroundTask::OnTaskTick(); if (collector == NULL || !collector->IsValid()){ return; } PXCSenseManager* pp = collector->psm; if (consumerHasRead){ //Resume next frame processing pp->ReleaseFrame(); } else{ FPlatformProcess::Sleep(0.001f); return; } //Grab Frame pxcStatus sts = pp->AcquireFrame(true); //,1 for a 1ms timeout. This should all be happening off-frame anyway consumerHasRead = false; collector->handData->Update(); HandProducerTick(); collector->faceData->Update(); FaceProducerTick(); //Check Frame status if (sts < PXC_STATUS_NO_ERROR) return; pp->ReleaseFrame(); //sleep after... FPlatformProcess::Sleep(0.001f); //1ms sleep.. }
DWORD WINAPI RealSenseAdaptor::acquireThread(void* param) { RealSenseAdaptor* adaptor = reinterpret_cast<RealSenseAdaptor*>(param); // Set the thread priority. SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_TIME_CRITICAL); MSG msg; while (GetMessage(&msg, NULL, 0, 0) > 0) { switch (msg.message) { case WM_USER: { // Check if a frame needs to be acquired std::auto_ptr<imaqkit::IAutoCriticalSection> driverSection(imaqkit::createAutoCriticalSection(adaptor->_driverGuard, false)); // Get frame type & dimensions. imaqkit::frametypes::FRAMETYPE frameType = adaptor->getFrameType(); int imWidth = adaptor->getMaxWidth(); int imHeight = adaptor->getMaxHeight(); int imBand = adaptor->getNumberOfBands(); int camera_id = adaptor->getDeviceID(); PXCSession *session = PXCSession_Create(); if (!session) return 0; PXCSenseManager *psm = session->CreateSenseManager(); if (imBand == 3) { if (camera_id == 1) { PXCCapture::DeviceInfo dinfo = {}; pxcCHAR *myname = L"Intel(R) RealSense(TM) 3D Camera"; psm->QueryCaptureManager()->FilterByDeviceInfo(myname, dinfo.did, dinfo.didx); psm->EnableStream(PXCCapture::STREAM_TYPE_COLOR, imWidth, imHeight); } else { PXCCapture::DeviceInfo dinfo = {}; pxcCHAR *myname = L"Intel(R) RealSense(TM) 3D Camera R200"; psm->QueryCaptureManager()->FilterByDeviceInfo(myname, dinfo.did, dinfo.didx); psm->EnableStream(PXCCapture::STREAM_TYPE_COLOR, imWidth, imHeight, 30); } } else { if (camera_id == 2) { PXCCapture::DeviceInfo dinfo = {}; pxcCHAR *myname = L"Intel(R) RealSense(TM) 3D Camera"; psm->QueryCaptureManager()->FilterByDeviceInfo(myname, dinfo.did, dinfo.didx); psm->EnableStream(PXCCapture::STREAM_TYPE_DEPTH, imWidth, imHeight); } else { PXCCapture::DeviceInfo dinfo = {}; pxcCHAR *myname = L"Intel(R) RealSense(TM) 3D Camera R200"; psm->QueryCaptureManager()->FilterByDeviceInfo(myname, dinfo.did, dinfo.didx); psm->EnableStream(PXCCapture::STREAM_TYPE_DEPTH, imWidth, imHeight, 30); psm->EnableStream(PXCCapture::STREAM_TYPE_COLOR, 640, 480, 30); } } UtilRender *renderColor = new UtilRender(L"COLOR STREAM"); UtilRender *renderDepth = new UtilRender(L"DEPTH STREAM"); psm->Init(); PXCImage *colorIm, *depthIm; while (adaptor->isAcquisitionNotComplete() && adaptor->isAcquisitionActive()) { driverSection->enter(); if (psm->AcquireFrame(true) < PXC_STATUS_NO_ERROR) break; // retrieve all available image samples PXCCapture::Sample *sample = psm->QuerySample(); if (!sample) break; unsigned char *imBuffer1 = new unsigned char[imWidth * imHeight * imBand]; //unsigned short *imBuffer2 = new unsigned short[imWidth * imHeight]; unsigned char *imBuffer2 = new unsigned char[imWidth * imHeight * 3]; if (imBand == 3) { colorIm = sample->color; if (!colorIm) break; PXCImage::ImageData cdata; pxcStatus sts = colorIm->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::PIXEL_FORMAT_RGB32, &cdata); for (int ix = 0; ix < imHeight; ix++) { for (int jx = 0; jx < imWidth; jx++) { for (int c = 0; c < imBand; c++) { imBuffer1[(ix * imWidth + jx) * imBand + imBand - 1 - c] = *(pxcBYTE*)(cdata.planes[0] + ix * imWidth * 4 + 4 * jx + c); } } } colorIm->ReleaseAccess(&cdata); renderColor->RenderFrame(colorIm); } else { depthIm = sample->depth; if (!depthIm) break; PXCImage::ImageData ddata; pxcStatus sts = depthIm->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::PIXEL_FORMAT_DEPTH, &ddata); for (int id = 0; id < imHeight; id++) { for (int jd = 0; jd < imWidth; jd++) { imBuffer2[id * imWidth + jd] = *(pxcU16*)(ddata.planes[0] + id * imWidth * 2 + jd * 2); } } depthIm->ReleaseAccess(&ddata); renderDepth->RenderFrame(depthIm); } if (adaptor->isSendFrame()) { // Create a frame object. imaqkit::IAdaptorFrame* frame = adaptor->getEngine()->makeFrame(frameType, imWidth, imHeight); // Copy data from buffer into frame object. if (imBand == 3) { frame->setImage(imBuffer1, imWidth, imHeight, 0, // X Offset from origin 0); // Y Offset from origin } else { frame->setImage(imBuffer2, imWidth, imHeight, 0, // X Offset from origin 0); } // Set image's timestamp. frame->setTime(imaqkit::getCurrentTime()); // Send frame object to engine. adaptor->getEngine()->receiveFrame(frame); } // if isSendFrame() psm->ReleaseFrame(); // Increment the frame count. adaptor->incrementFrameCount(); // Cleanup. Deallocate imBuffer. delete[] imBuffer1; delete[] imBuffer2; driverSection->leave(); } // while(isAcquisitionNotComplete() delete renderColor; delete renderDepth; psm->Release(); } break; } //switch-case WM_USER }//while message is not WM_QUIT return 0; }
/******************************************************************************* Main entry point. Here we demonstrate, after some initial housekeeping, how to initialize the camera, start streaming, grab samples, and process them. *******************************************************************************/ int main(int argc, char *argv[]) { if (!ProcessCmdArgs(argc, argv)) { return 1; } // Check / create file directories if (xdmPath != nullptr) { if (_mkdir(xdmPath) != 0 && errno != EEXIST){ fprintf(stderr, "Error: Invalid XDM path. Error %d\n", errno); fprintf(stderr, "Terminate? [Y/n]\n"); char choice = _getch(); if (choice != 'n' && choice != 'N') { return ERROR_FILE; } xdmPath = nullptr; } else { // Remove any trailing '\' in the path since we'll add it later. if (xdmPath[strlen(xdmPath) - 1] == '\\') xdmPath[strlen(xdmPath) - 1] = '0'; } } if (bufPath != nullptr) { if (_mkdir(bufPath) != 0 && errno != EEXIST){ fprintf(stderr, "Error: Invalid Buffer path. Error %d\n", errno); fprintf(stderr, "Terminate? [Y/n]\n"); char choice = _getch(); if (choice != 'n' && choice != 'N') { return ERROR_FILE; } bufPath = nullptr; } else { // Remove any trailing '\' in the path since we'll add it later. if (bufPath[strlen(bufPath) - 1] == '\\') bufPath[strlen(bufPath) - 1] = '0'; } } // Start timer for total test execution unsigned long long programStart = GetTickCount64(); // Initialize camera and streams if (verbose) fprintf_s(stdout, "Initializing camera...\n"); // The Sense Manager is the root object for interacting with the camera. PXCSenseManager *senseManager = nullptr; senseManager = PXCSenseManager::CreateInstance(); if (!senseManager) { fprintf_s(stderr, "Unable to create the PXCSenseManager\n"); return ERROR_CAMERA; } // When enabling the streams (color and depth), the parameters must match // the capabilities of the camera. For example, 60fps for color will fail // on the DS4 / R200. // Here we're hard-coding the resolution and frame rate senseManager->EnableStream(PXCCapture::STREAM_TYPE_COLOR, 320, 240, 30); senseManager->EnableStream(PXCCapture::STREAM_TYPE_DEPTH, 320, 240, 30); // Initialize the PXCSenseManager pxcStatus camStatus; camStatus = senseManager->Init(); if (camStatus != PXC_STATUS_NO_ERROR) { fprintf_s(stderr, "Unable to initizlize PXCSenseManager\n"); senseManager->Release(); return ERROR_CAMERA; } PXCImage *colorImage; PXCImage *depthImage; PXCPhoto *xdmPhoto = senseManager->QuerySession()->CreatePhoto(); // These two objects come from the Intel RealSense SDK helper for rendering // camera data. Any rendering technique could be used or omitted if no // visual feedback is desired. UtilRender *renderColor = new UtilRender(L"COLOR STREAM"); UtilRender *renderDepth = new UtilRender(L"DEPTH STREAM"); // Start test if (verbose) fprintf_s(stdout, "Running...\n"); // This section may be wrapped in additional code to automate // repetitive tests. Closure provided just for convenience. { // Beginning of single test block unsigned long totalFrames = 0; unsigned long long streamStart = GetTickCount64(); for (unsigned int i = 0; i < framesPerTest; i++) { // Passing 'true' to AcquireFrame blocks until all streams are // ready (depth and color). Passing 'false' will result in // frames unaligned in time. camStatus = senseManager->AcquireFrame(true); if (camStatus < PXC_STATUS_NO_ERROR) { fprintf_s(stderr, "Error acquiring frame: %f\n", camStatus); break; } // Retrieve all available image samples PXCCapture::Sample *sample = senseManager->QuerySample(); if (sample == NULL) { fprintf_s(stderr, "Sample unavailable\n"); } else { totalFrames++; colorImage = sample->color; depthImage = sample->depth; // Render the frames (not necessary) if (!renderColor->RenderFrame(colorImage)) break; if (!renderDepth->RenderFrame(depthImage)) break; // Save data if requested wchar_t filePath[MAXPATH]; char fileName[18]; TimeStampString(fileName); if (xdmPath != nullptr) { // Save XDM image swprintf_s(filePath, MAXPATH, L"%S\\%S%S", xdmPath, fileName, xdmExt); xdmPhoto->ImportFromPreviewSample(sample); pxcStatus saveStatus = xdmPhoto->SaveXDM(filePath); if (saveStatus != PXC_STATUS_NO_ERROR) fprintf_s(stderr, "Error: SaveXDM\n"); } if (bufPath != NULL) { // Save depth buffer swprintf_s(filePath, MAXPATH, L"%S\\%S%S", bufPath, fileName, depBufExt); // The WriteDepthBuf function has a lot of detail about // accessing image data and should be examined. if (!WriteDepthBuf(depthImage, filePath)) { fprintf(stderr, "Error: WriteDepthBuf\n"); } } } // Unlock the current frame to fetch the next one senseManager->ReleaseFrame(); } float frameRate = (float)totalFrames / ((float)(GetTickCount64() - streamStart) / 1000); if (verbose) fprintf_s(stdout, "Frame Rate: %.2f\n", frameRate); } // End of single test block // Wrap up if(verbose) fprintf_s(stdout, "Finished in %llu seconds.\n", (GetTickCount64() - programStart) / 1000); delete renderColor; delete renderDepth; xdmPhoto->Release(); if(senseManager != nullptr) senseManager->Release(); return 0; }
t_jit_err jit_realsense_grab_matrix_calc(t_jit_realsense_grab *x, void *inputs, void *outputs) { t_jit_err err = JIT_ERR_NONE; long out_savelock; t_jit_matrix_info out_minfo; char *out_bp; long i, xx, yy, dimcount, planecount, dim[JIT_MATRIX_MAX_DIMCOUNT]; //t_jit_noise_vecdata vecdata; void *out_matrix; PXCSenseManager *sm = x->sm; // note: if there is no frame available, should probably send out previous frame (to do) if (sm->AcquireFrame(true) < PXC_STATUS_NO_ERROR) { object_error((t_object *)x, "camera not available"); return JIT_ERR_GENERIC; } // retrieve the sample PXCCapture::Sample *sample = sm->QuerySample(); PXCImage::ImageInfo info = sample->depth->QueryInfo(); PXCImage::ImageData data; out_matrix = jit_object_method(outputs, _jit_sym_getindex, 0); // post("w %d h %d format %s", info.width, info.height, PXCImage::PixelFormatToString(info.format)); //get zeroith outlet if (x && out_matrix) { // lock output out_savelock = (long)jit_object_method(out_matrix, _jit_sym_lock, 1); // fill out matrix info structs for input and output jit_object_method(out_matrix, _jit_sym_getinfo, &out_minfo); // get pointer to matrix jit_object_method(out_matrix, _jit_sym_getdata, &out_bp); if (!out_bp){ err = JIT_ERR_INVALID_OUTPUT; goto out; } if (!(sample->depth->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::PIXEL_FORMAT_DEPTH, &data) >= PXC_STATUS_NO_ERROR)) { object_error((t_object *)x, "aquire error"); goto out; } //get dimensions/planecount dimcount = out_minfo.dimcount; planecount = out_minfo.planecount; if (dimcount != 2){ err = JIT_ERR_MISMATCH_DIM; goto out; } //post("dim %d x %d planes %d stride %ld", out_minfo.dim[0], out_minfo.dim[1], planecount, out_minfo.dimstride); for (i = 0; i < dimcount; i++) { dim[i] = out_minfo.dim[i]; } short *dpixels = (short *)data.planes[0]; int dpitch = data.pitches[0] / sizeof(short); for (yy = 0; yy < dim[1]; yy++) { for (xx = 0; xx < dim[0]; xx++) { *out_bp++ = (char)dpixels[yy * dpitch + xx]; } } //jit_parallel_ndim_simplecalc1((method)jit_realsense_grab_calculate_ndim, x, dimcount, dim, planecount, &out_minfo, out_bp, 0 /* flags1 */); } else err = JIT_ERR_INVALID_PTR; out: sample->depth->ReleaseAccess(&data); sm->ReleaseFrame(); jit_object_method(out_matrix, _jit_sym_lock, out_savelock); return err; }
int main(char* args,char* argsc) { pxcStatus status = PXC_STATUS_ALLOC_FAILED; PXCSenseManager *session = PXCSenseManager::CreateInstance(); if(!session) { printf("Instance create failed\n"); return 10; } status = session->EnableHand(nullptr); if(status = PXC_STATUS_NO_ERROR) { printf("Hand data unavailable\n"); return 20; } session->Init(); if(status = PXC_STATUS_NO_ERROR) { printf("init failed\n"); return 30; } PXCHandModule* handTracker = session->QueryHand(); if(status = PXC_STATUS_NO_ERROR) { printf("no hand tracking support\n"); return 40; } PXCHandConfiguration* handConfig = handTracker->CreateActiveConfiguration(); handConfig->EnableAllGestures(); handConfig->ApplyChanges(); PXCHandData* handData = handTracker->CreateOutput(); bool running = true; status = session->EnableStream(PXCCapture::StreamType::STREAM_TYPE_DEPTH,1920,1080,30.0); if (status = PXC_STATUS_NO_ERROR) { printf("Unknown error when enabling stream."); return 50; } while(running) { //printf("Acquire frame. "); status = session->AcquireFrame(true); //printf("Got frame.\n"); if(status >= PXC_STATUS_NO_ERROR) { printf("He's dead Jim.\n"); return 50; } handData->Update(); //printf("Got %i gestures for %i hands.\n", handData->QueryFiredGesturesNumber(),handData->QueryNumberOfHands()); for(int i=0; i<handData->QueryFiredGesturesNumber(); i++) { PXCHandData::GestureData gestureData; handData->QueryFiredGestureData(i,gestureData); wchar_t* name = (wchar_t*)gestureData.name; std::wstring nameStr(name); std::wcout << nameStr << std::endl; //printf("%s - len %d\n",nameStr.c_str(), nameStr.size()); if(nameStr == L"v_sign") { running = false; } } PXCCapture::Sample* capture = session->QuerySample(); PXCImage* depthImage = capture->depth; //std::this_thread::sleep_for(std::chrono::milliseconds(10)); session->ReleaseFrame(); } session->Release(); return 0; }
void Emotions::update() { int numFaces = 0; if (mSenseMgr->AcquireFrame(true) >= PXC_STATUS_NO_ERROR) { // Emotion Data PXCEmotion *emotionDet = mSenseMgr->QueryEmotion(); PXCEmotion::EmotionData arrData[10]; fdata->Update(); int numFaces = fdata->QueryNumberOfDetectedFaces(); for (int i = 0; i < numFaces; ++i) { // FaceData PXCFaceData::Face *face = fdata->QueryFaceByIndex(i); PXCFaceData::ExpressionsData *edata = face->QueryExpressions(); emotionDet->QueryAllEmotionData(i, &arrData[0]); //Face Detection and Location if (arrData->rectangle.x > -1 && arrData->rectangle.y > -1) { //cout << arrData->rectangle.x << ", " << arrData->rectangle.y << endl; iSeeYou = true; } else iSeeYou = false; # pragma region Expression Logic if (iSeeYou) { PXCFaceData::ExpressionsData::FaceExpressionResult smileScore; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_SMILE, &smileScore); PXCFaceData::ExpressionsData::FaceExpressionResult raiseLeftBrow; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_BROW_RAISER_LEFT, &raiseLeftBrow); PXCFaceData::ExpressionsData::FaceExpressionResult raiseRightBrow; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_BROW_RAISER_RIGHT, &raiseRightBrow); PXCFaceData::ExpressionsData::FaceExpressionResult eyeClosedLeft; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_EYES_CLOSED_LEFT, &eyeClosedLeft); PXCFaceData::ExpressionsData::FaceExpressionResult eyeClosedRight; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_EYES_CLOSED_RIGHT, &eyeClosedRight); PXCFaceData::ExpressionsData::FaceExpressionResult kiss; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_KISS, &kiss); PXCFaceData::ExpressionsData::FaceExpressionResult openMouth; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_MOUTH_OPEN, &openMouth); PXCFaceData::ExpressionsData::FaceExpressionResult tongueOut; edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_TONGUE_OUT, &tongueOut); if (smileScore.intensity > 80) cout << "smile back!" << endl; if (raiseLeftBrow.intensity > 80 && raiseRightBrow.intensity > 80) cout << "eyebrows up" << endl; if (raiseLeftBrow.intensity > 80 && raiseRightBrow.intensity < 80) cout << "eyebrow raised" << endl; if (raiseLeftBrow.intensity < 80 && raiseRightBrow.intensity > 80) cout << "eyebrow raised" << endl; if (eyeClosedLeft.intensity > 80 && eyeClosedRight.intensity > 80) cout << "eyes Closed" << endl; //else // eyes open if (kiss.intensity > 80) cout << "kissy face!" << endl; if (openMouth.intensity > 80) cout << "say Ahhhhh" << endl; if (tongueOut.intensity > 80) cout << "Stick Tongue Out" << endl; } //PXCFaceData::ExpressionsData::FaceExpressionResult score; //edata->QueryExpression(PXCFaceData::ExpressionsData::EXPRESSION_KISS, &score); //cout << score.intensity << endl; # pragma endregion //Expression Logic #pragma region Emotion Logic /*bool emotionPresent = false; int epidx = -1; pxcI32 maxScoreE = -3; pxcF32 maxscoreI = 0; for (int i = 0; i < 7; i++) { if (arrData[i].evidence < maxScoreE) continue; if (arrData[i].intensity < maxscoreI) continue; maxScoreE = arrData[i].evidence; maxscoreI = arrData[i].intensity; epidx = i; std::this_thread::sleep_for(std::chrono::milliseconds(50)); } if (maxScoreE > -1) { std::string foundEmo = ""; switch (arrData[epidx].eid) { case PXCEmotion::EMOTION_PRIMARY_ANGER: foundEmo = "Anger"; break; case PXCEmotion::EMOTION_PRIMARY_CONTEMPT: foundEmo = "Contempt"; break; case PXCEmotion::EMOTION_PRIMARY_DISGUST: foundEmo = "Disgust"; break; case PXCEmotion::EMOTION_PRIMARY_FEAR: foundEmo = "Fear"; break; case PXCEmotion::EMOTION_PRIMARY_JOY: foundEmo = "Joy"; break; case PXCEmotion::EMOTION_PRIMARY_SADNESS: foundEmo = "Sadness"; break; case PXCEmotion::EMOTION_PRIMARY_SURPRISE: foundEmo = "Surprise"; break; case PXCEmotion::EMOTION_SENTIMENT_POSITIVE: foundEmo = "Positive"; break; case PXCEmotion::EMOTION_SENTIMENT_NEGATIVE: foundEmo = "Negative"; break; case PXCEmotion::EMOTION_SENTIMENT_NEUTRAL: foundEmo = "Neutral"; break; } cout << "outstanding emotion = " << foundEmo << endl; } if (maxscoreI > 0.4) emotionPresent = true; if (emotionPresent) { //int spidx = -1; maxScoreE = -3; maxscoreI = 0; for (int i = 0; i < 7; i++) { if (arrData[i].evidence < maxScoreE) continue; if (arrData[i].intensity < maxscoreI) continue; maxScoreE = arrData[i].evidence; maxscoreI = arrData[i].intensity; //spidx = i; } }*/ #pragma endregion //Emotion Logic } numFaces = emotionDet->QueryNumFaces(); const PXCCapture::Sample *sample = mSenseMgr->QueryEmotionSample(); mSenseMgr->ReleaseFrame(); } }
void Processor::Process(HWND dialogWindow) { // set startup mode PXCSenseManager* senseManager = session->CreateSenseManager(); if (senseManager == NULL) { Utilities::SetStatus(dialogWindow, L"Failed to create an SDK SenseManager", statusPart); return; } /* Set Mode & Source */ PXCCaptureManager* captureManager = senseManager->QueryCaptureManager(); pxcStatus status = PXC_STATUS_NO_ERROR; if (Utilities::GetPlaybackState(dialogWindow)) { status = captureManager->SetFileName(m_rssdkFilename, false); senseManager->QueryCaptureManager()->SetRealtime(true); } if (status < PXC_STATUS_NO_ERROR) { Utilities::SetStatus(dialogWindow, L"Failed to Set Record/Playback File", statusPart); return; } /* Set Module */ senseManager->EnableFace(); /* Initialize */ Utilities::SetStatus(dialogWindow, L"Init Started", statusPart); PXCFaceModule* faceModule = senseManager->QueryFace(); if (faceModule == NULL) { assert(faceModule); return; } PXCFaceConfiguration* config = faceModule->CreateActiveConfiguration(); if (config == NULL) { assert(config); return; } // Enable Gaze Algo config->QueryGaze()->isEnabled = true; // set dominant eye if (dominant_eye) { PXCFaceData::GazeCalibData::DominantEye eye = (PXCFaceData::GazeCalibData::DominantEye)(dominant_eye - 1); config->QueryGaze()->SetDominantEye(eye); } // set tracking mode config->SetTrackingMode(PXCFaceConfiguration::TrackingModeType::FACE_MODE_COLOR_PLUS_DEPTH); config->ApplyChanges(); // Load Calibration File bool need_calibration = true; if (isLoadCalibFile) { FILE* my_file; errno_t err; err = _wfopen_s(&my_file, m_CalibFilename, L"rb"); if (!err && my_file) { if (calibBuffer == NULL) { calibBuffersize = config->QueryGaze()->QueryCalibDataSize(); calibBuffer = new unsigned char[calibBuffersize]; } fread(calibBuffer, calibBuffersize, sizeof(pxcBYTE), my_file); fclose(my_file); pxcStatus st = config->QueryGaze()->LoadCalibration(calibBuffer, calibBuffersize); if (st != PXC_STATUS_NO_ERROR) { // get save file name calib_status = LOAD_CALIBRATION_ERROR; need_calibration = false; PostMessage(dialogWindow, WM_COMMAND, ID_CALIB_DONE, 0); return; } } isLoadCalibFile = false; need_calibration = false; PostMessage(dialogWindow, WM_COMMAND, ID_CALIB_LOADED, 0); } else if (calibBuffer) { // load existing calib stored in memory config->QueryGaze()->LoadCalibration(calibBuffer, calibBuffersize); need_calibration = false; } // init sense manager if (senseManager->Init() < PXC_STATUS_NO_ERROR) { captureManager->FilterByStreamProfiles(NULL); if (senseManager->Init() < PXC_STATUS_NO_ERROR) { Utilities::SetStatus(dialogWindow, L"Init Failed", statusPart); PostMessage(dialogWindow, WM_COMMAND, ID_STOP, 0); return; } } PXCCapture::DeviceInfo info; senseManager->QueryCaptureManager()->QueryDevice()->QueryDeviceInfo(&info); CheckForDepthStream(senseManager, dialogWindow); AlertHandler alertHandler(dialogWindow); config->detection.isEnabled = true; config->landmarks.isEnabled = true; config->pose.isEnabled = true; config->EnableAllAlerts(); config->SubscribeAlert(&alertHandler); config->ApplyChanges(); //} Utilities::SetStatus(dialogWindow, L"Streaming", statusPart); m_output = faceModule->CreateOutput(); int failed_counter = 0; bool isNotFirstFrame = false; bool isFinishedPlaying = false; bool activeapp = true; ResetEvent(renderer->GetRenderingFinishedSignal()); renderer->SetSenseManager(senseManager); renderer->SetNumberOfLandmarks(config->landmarks.numLandmarks); renderer->SetCallback(renderer->SignalProcessor); // Creating PXCSmoother instance PXCSmoother* smoother = NULL; senseManager->QuerySession()->CreateImpl<PXCSmoother>(&smoother); // Creating 2D smoother with quadratic algorithm with smooth value PXCSmoother::Smoother2D* smoother2D = smoother->Create2DQuadratic(1.0f); // acquisition loop if (!isStopped) { while (true) { if (isPaused) { // allow the application to pause for user input Sleep(200); continue; } if (senseManager->AcquireFrame(true) < PXC_STATUS_NO_ERROR) { isFinishedPlaying = true; } if (isNotFirstFrame) { WaitForSingleObject(renderer->GetRenderingFinishedSignal(), INFINITE); } else { // enable back window if (need_calibration) EnableBackWindow(); } if (isFinishedPlaying || isStopped) { if (isStopped) senseManager->ReleaseFrame(); if (isFinishedPlaying) PostMessage(dialogWindow, WM_COMMAND, ID_STOP, 0); break; } m_output->Update(); pxcI64 stamp = m_output->QueryFrameTimestamp(); PXCCapture::Sample* sample = senseManager->QueryFaceSample(); isNotFirstFrame = true; if (sample != NULL) { DWORD dwWaitResult; dwWaitResult = WaitForSingleObject(g_hMutex, INFINITE); if (dwWaitResult == WAIT_OBJECT_0) { // check calibration state if (need_calibration) { // CALIBRATION FLOW if (m_output->QueryNumberOfDetectedFaces()) { PXCFaceData::Face* trackedFace = m_output->QueryFaceByIndex(0); PXCFaceData::GazeCalibData* gazeData = trackedFace->QueryGazeCalibration(); if (gazeData) { // gaze enabled check calibration PXCFaceData::GazeCalibData::CalibrationState state = trackedFace->QueryGazeCalibration()->QueryCalibrationState(); if (state == PXCFaceData::GazeCalibData::CALIBRATION_NEW_POINT) { // present new point for calibration PXCPointI32 new_point = trackedFace->QueryGazeCalibration()->QueryCalibPoint(); // set the cursor to that point eye_point_x = new_point.x; eye_point_y = new_point.y; SetCursorPos(OUT_OF_SCREEN, OUT_OF_SCREEN); } else if (state == PXCFaceData::GazeCalibData::CALIBRATION_DONE) { // store calib data in a file calibBuffersize = trackedFace->QueryGazeCalibration()->QueryCalibDataSize(); if (calibBuffer == NULL) calibBuffer = new unsigned char[calibBuffersize]; calib_status = trackedFace->QueryGazeCalibration()->QueryCalibData(calibBuffer); dominant_eye = trackedFace->QueryGazeCalibration()->QueryCalibDominantEye(); // get save file name PostMessage(dialogWindow, WM_COMMAND, ID_CALIB_DONE, 0); need_calibration = false; } else if (state == PXCFaceData::GazeCalibData::CALIBRATION_IDLE) { // set the cursor beyond the screen eye_point_x = OUT_OF_SCREEN; eye_point_y = OUT_OF_SCREEN; SetCursorPos(OUT_OF_SCREEN, OUT_OF_SCREEN); } } else { // gaze not enabled stop processing need_calibration = false; PostMessage(dialogWindow, WM_COMMAND, ID_STOP, 0); } } else { failed_counter++; // wait 20 frames , if no detection happens go to failed mode if (failed_counter > NO_DETECTION_FOR_LONG) { calib_status = 3; // failed need_calibration = false; PostMessage(dialogWindow, WM_COMMAND, ID_CALIB_DONE, 0); } } } else { // GAZE PROCESSING AFTER CALIBRATION IS DONE if (m_output->QueryNumberOfDetectedFaces()) { PXCFaceData::Face* trackedFace = m_output->QueryFaceByIndex(0); // get gaze point if (trackedFace != NULL) { if (trackedFace->QueryGaze()) { PXCFaceData::GazePoint gaze_point = trackedFace->QueryGaze()->QueryGazePoint(); PXCPointF32 new_point; new_point.x = (pxcF32)gaze_point.screenPoint.x; new_point.y = (pxcF32)gaze_point.screenPoint.y; // Smoothing PXCPointF32 smoothed2DPoint = smoother2D->SmoothValue(new_point); pxcF64 horizontal_angle = trackedFace->QueryGaze()->QueryGazeHorizontalAngle(); pxcF64 vertical_angle = trackedFace->QueryGaze()->QueryGazeVerticalAngle(); eye_horizontal_angle = (float)horizontal_angle; eye_vertical_angle = (float)vertical_angle; eye_point_x = (int)smoothed2DPoint.x; eye_point_y = (int)smoothed2DPoint.y; } } } } // render output renderer->DrawBitmap(sample); renderer->SetOutput(m_output); renderer->SignalRenderer(); if (!ReleaseMutex(g_hMutex)) { throw std::exception("Failed to release mutex"); return; } } } senseManager->ReleaseFrame(); } m_output->Release(); Utilities::SetStatus(dialogWindow, L"Stopped", statusPart); } config->Release(); senseManager->Close(); senseManager->Release(); }