void RegressionWorker::thread() { try { while (!should_stop()) { _hypsqueue.waitAccept(); _inqueue.peek()->waitready(); GazeHypsPtr ghyps = _inqueue.pop(); ghyps->setready(1); tpool.add_task_by_value( [ghyps, this](void) {runTasks(ghyps);} ); _hypsqueue.push(ghyps); } } catch(QueueInterruptedException) {} _hypsqueue.interrupt(); tpool.wait_for_all_tasks(); }
void GazerGui::displayGazehyps(GazeHypsPtr gazehyps) { if (_mirror) { cv::Mat dst; cv::flip(gazehyps->frame, dst, 1); ui->frameView->setImage(dst); } else { ui->frameView->setImage(gazehyps->frame); } if (gazehyps->size() > 0) { ui->eyeView->setImage(gazehyps->hyps(0).pupils.faceRegion()); ui->normEyeView->setImage(gazehyps->hyps(0).eyePatch); } QString msg; QTextStream out(&msg); out.setRealNumberPrecision(0); out.setRealNumberNotation(QTextStream::FixedNotation); out << "fps: " << gazehyps->fps << " | latency: " << gazehyps->latency << " ms | frame: " << gazehyps->frameCounter; if (!gazehyps->id.empty()) out << " | " << QString::fromStdString(gazehyps->id); ui->statusbar->showMessage(msg); }
void WorkerThread::dumpEst(ofstream& fout, GazeHypsPtr gazehyps) { if (fout.is_open()) { double lid = std::nan("not set"); double gazeest = std::nan("not set"); double vertest = std::nan("not set"); bool mutgaze = false; if (gazehyps->size()) { GazeHyp& ghyp = gazehyps->hyps(0); lid = ghyp.eyeLidClassification.get_value_or(lid); gazeest = ghyp.horizontalGazeEstimation.get_value_or(gazeest); vertest = ghyp.verticalGazeEstimation.get_value_or(vertest); mutgaze = ghyp.isMutualGaze.get_value_or(false); } fout << gazehyps->frameCounter << "\t" << gazehyps->id << "\t" << gazehyps->label << "\t" << lid << "\t" << gazeest << "\t" << vertest << "\t" << mutgaze << endl; } }
void RegressionWorker::runTasks(GazeHypsPtr gazehyps) { for (auto& ghyp : *gazehyps) { tpool.add_task_by_value( [&gazehyps, &ghyp](void) {ghyp.pupils = PupilFinder(gazehyps->frame, ghyp.faceParts);} ); tpool.add_task_by_value( [&ghyp, this](void) {featureExtractor.extractLidFeatures(ghyp);} ); tpool.add_task_by_value( [&ghyp, this](void) {featureExtractor.extractEyeHogFeatures(ghyp);} ); tpool.wait_for_all_tasks(); featureExtractor.extractFaceFeatures(ghyp); featureExtractor.extractHorizGazeFeatures(ghyp); featureExtractor.extractVertGazeFeatures(ghyp); concurrentClassify(lidlearner, ghyp); concurrentClassify(gazelearner, ghyp); concurrentClassify(rellearner, ghyp); concurrentClassify(relativeGazeLearner, ghyp); concurrentClassify(vglearner, ghyp); } tpool.wait_for_all_tasks(); gazehyps->setready(-1); }
void WorkerThread::process() { MutualGazeLearner glearner(trainingParameters); RelativeGazeLearner rglearner(trainingParameters); EyeLidLearner eoclearner(trainingParameters); RelativeEyeLidLearner rellearner(trainingParameters); VerticalGazeLearner vglearner(trainingParameters); tryLoadModel(glearner, classifyGaze); tryLoadModel(eoclearner, classifyLid); tryLoadModel(rglearner, estimateGaze); tryLoadModel(rellearner, estimateLid); tryLoadModel(vglearner, estimateVerticalGaze); emit statusmsg("Setting up detector threads..."); std::unique_ptr<ImageProvider> imgProvider(getImageProvider()); FaceDetectionWorker faceworker(std::move(imgProvider), threadcount); ShapeDetectionWorker shapeworker(faceworker.hypsqueue(), modelfile, max(1, threadcount/2)); RegressionWorker regressionWorker(shapeworker.hypsqueue(), eoclearner, glearner, rglearner, rellearner, vglearner, max(1, threadcount)); emit statusmsg("Detector threads started"); #ifdef ENABLE_YARP_SUPPORT unique_ptr<YarpSender> yarpSender; if (inputType == "port") { yarpSender.reset(new YarpSender(inputParam)); } #endif ofstream ppmout; if (!streamppm.empty()) { ppmout.open(streamppm); } ofstream estimateout; if (!dumpEstimates.empty()) { estimateout.open(dumpEstimates); if (estimateout.is_open()) { writeEstHeader(estimateout); } else { cerr << "Warning: could not open " << dumpEstimates << endl; } } RlsSmoother horizGazeSmoother; RlsSmoother vertGazeSmoother; RlsSmoother lidSmoother(5, 0.95, 0.09); emit statusmsg("Entering processing loop..."); cerr << "Processing frames..." << endl; TemporalStats temporalStats; while(!shouldStop) { GazeHypsPtr gazehyps; try { gazehyps = regressionWorker.hypsqueue().peek(); gazehyps->waitready(); } catch(QueueInterruptedException) { break; } cv::Mat frame = gazehyps->frame; for (auto& ghyp : *gazehyps) { if (smoothingEnabled) { horizGazeSmoother.smoothValue(ghyp.horizontalGazeEstimation); vertGazeSmoother.smoothValue(ghyp.verticalGazeEstimation); lidSmoother.smoothValue(ghyp.eyeLidClassification); } interpretHyp(ghyp); auto& pupils = ghyp.pupils; auto& faceparts = ghyp.faceParts; faceparts.draw(frame); pupils.draw(frame); glearner.visualize(ghyp); eoclearner.visualize(ghyp); rellearner.visualize(ghyp); vglearner.visualize(ghyp, verticalGazeTolerance); rglearner.visualize(ghyp, horizGazeTolerance); if (!trainLid.empty()) eoclearner.accumulate(ghyp); if (!trainGaze.empty()) glearner.accumulate(ghyp); if (!trainGazeEstimator.empty()) rglearner.accumulate(ghyp); if (!trainLidEstimator.empty()) rellearner.accumulate(ghyp); if (!trainVerticalGazeEstimator.empty()) vglearner.accumulate(ghyp); } temporalStats(gazehyps); dumpPpm(ppmout, frame); dumpEst(estimateout, gazehyps); if (showstats) temporalStats.printStats(gazehyps); #ifdef ENABLE_YARP_SUPPORT if (yarpSender) yarpSender->sendGazeHypotheses(gazehyps); #endif emit imageProcessed(gazehyps); QCoreApplication::processEvents(); if (limitFps > 0) { usleep(1e6/limitFps); } regressionWorker.hypsqueue().pop(); } regressionWorker.hypsqueue().interrupt(); regressionWorker.wait(); cerr << "Frames processed..." << endl; if (glearner.sampleCount() > 0) { glearner.train(trainGaze); } if (eoclearner.sampleCount() > 0) { eoclearner.train(trainLid); } if (vglearner.sampleCount() > 0) { vglearner.train(trainVerticalGazeEstimator); } if (rglearner.sampleCount() > 0) { rglearner.train(trainGazeEstimator); } if (rellearner.sampleCount() > 0) { rellearner.train(trainLidEstimator); } emit finished(); cerr << "Primary worker thread finished processing" << endl; }