int FFmpegTagger::IORead(void *opaque, uint8_t *buf, int buf_size) { URLContext* pUrlContext = (URLContext*)opaque; std::istream* pInputStream = (std::istream*)pUrlContext->priv_data; if (!pInputStream) { LOGNS(Omm::AvStream, avstream, error, "IORead failed, std::istream not set"); return -1; } pInputStream->read((char*)buf, buf_size); Poco::Timestamp::TimeDiff time = _timestamp.elapsed(); _timestamp.update(); Poco::Timestamp::TimeDiff startTime = _startTimestamp.elapsed(); int bytes = pInputStream->gcount(); _totalBytes += bytes; if (!pInputStream->good()) { LOGNS(Omm::AvStream, avstream, error, "IORead failed to read from std::istream"); return -1; } LOGNS(Omm::AvStream, avstream, trace, "IORead() bytes read: " + Poco::NumberFormatter::format(bytes) + " in " + Poco::NumberFormatter::format(time/1000.0, 3) + " msec (" + Poco::NumberFormatter::format(bytes*1000/time) + " kB/s), total : " +\ Poco::NumberFormatter::format(_totalBytes/1000) + "kB in " + Poco::NumberFormatter::format(startTime/(Poco::Timestamp::TimeDiff)1000000) + " sec (" + Poco::NumberFormatter::format((Poco::Timestamp::TimeDiff)(_totalBytes*1000)/startTime) + "kB/s)"); return bytes; }
bool Debounce(void) { if (fpsTimeLast.isElapsed(debounceTimeout.totalMicroseconds())) { fpsTimeLast.update(); return true; } else { return false; } }
bool Debounce(int dtime) { const Poco::Timespan TimeOut(0, 1000*dtime); if (fpsTimeLast.isElapsed(TimeOut.totalMicroseconds())) { fpsTimeLast.update(); return true; } else { return false; } }
void Timer::run() { Poco::Timestamp now; long interval(0); do { long sleep(0); do { now.update(); sleep = static_cast<long>((_nextInvocation - now)/1000); if (sleep < 0) { if (interval == 0) { sleep = 0; break; } _nextInvocation += static_cast<Timestamp::TimeVal>(interval)*1000; ++_skipped; } } while (sleep < 0); if (_wakeUp.tryWait(sleep)) { Poco::FastMutex::ScopedLock lock(_mutex); _nextInvocation.update(); interval = _periodicInterval; } else { try { _pCallback->invoke(*this); } catch (Poco::Exception& exc) { Poco::ErrorHandler::handle(exc); } catch (std::exception& exc) { Poco::ErrorHandler::handle(exc); } catch (...) { Poco::ErrorHandler::handle(); } interval = _periodicInterval; } _nextInvocation += static_cast<Timestamp::TimeVal>(interval)*1000; _skipped = 0; } while (interval > 0); _done.set(); }
int main(int argc, char**argv) { dsosg::DSOSG *dsosg; Poco::Timestamp time; bool done; osg::ArgumentParser arguments(&argc, argv); arguments.getApplicationUsage()->setApplicationName(arguments.getApplicationName()); arguments.getApplicationUsage()->setDescription("Manual display/camera calibration utility"); arguments.getApplicationUsage()->addCommandLineOption("--config <filename>","Display server config JSON file"); arguments.getApplicationUsage()->addCommandLineOption("--display-mode <name>","Display mode"); osg::ApplicationUsage::Type help = arguments.readHelpType(); if (help != osg::ApplicationUsage::NO_HELP) { arguments.getApplicationUsage()->write(std::cout); exit(0); } std::string config_filename = "~/flyvr-devel/flyvr/flyvr/config/config.json"; while(arguments.read("--config", config_filename)); std::string display_mode = "vr_display"; while(arguments.read("--display-mode", display_mode)); std::string flyvr_basepath = "/home/john/Programming/flyvr.git/flyvr/"; float observer_radius = 0.01; bool two_pass = false; bool show_geom_coords = false; osg::Vec3 observer_position(0,0,0); osg::Quat observer_orientation(0,0,0,1); dsosg = new dsosg::DSOSG( flyvr_basepath, display_mode, observer_radius, config_filename, two_pass, show_geom_coords); dsosg->setup_viewer("display_server","{}"); dsosg->set_stimulus_plugin("Stimulus3DDemo"); done = false; while (!done) { float now; time.update(); now = time.epochMicroseconds() * 1e6; /* microseconds to seconds */ dsosg->update(now, observer_position, observer_orientation); dsosg->frame(); done = dsosg->done(); } return 0; }
ByteIOContext* FFmpegTagger::initIo(std::istream& istr, bool isSeekable, unsigned char* pIoBuffer) { static char streamName[] = "std::istream"; // FIXME: deallocate URLContext after stream has finished. URLContext* pUrlContext = new URLContext; pUrlContext->is_streamed = 1; // pUrlContext->priv_data = 0; pUrlContext->priv_data = &istr; pUrlContext->filename = streamName; pUrlContext->prot = new URLProtocol; pUrlContext->prot->name = "OMM avio wrapper for std::iostream"; pUrlContext->prot->next = 0; pUrlContext->prot->url_open = 0; // pUrlContext->prot->url_open = (int (*)(URLContext *, const char *, int))IOOpen; pUrlContext->prot->url_read = (int (*) (URLContext *, unsigned char *, int))FFmpegTagger::IORead; pUrlContext->prot->url_write = 0; if (isSeekable) { pUrlContext->prot->url_seek = (int64_t (*) (URLContext *, int64_t, int))FFmpegTagger::IOSeek; } else { pUrlContext->prot->url_seek = 0; } pUrlContext->prot->url_close = 0; // pUrlContext->prot->url_close = (int (*)(URLContext *))IOClose; LOG(ffmpeg, trace, "ffmpeg::av_alloc_put_byte() ..."); ByteIOContext* pIoContext; if (isSeekable) { pIoContext = av_alloc_put_byte(pIoBuffer, _IoBufferSize, 0, pUrlContext, FFmpegTagger::IORead, 0, FFmpegTagger::IOSeek); } else { pIoContext = av_alloc_put_byte(pIoBuffer, _IoBufferSize, 0, pUrlContext, FFmpegTagger::IORead, 0, 0); pIoContext->is_streamed = 1; } pIoContext->max_packet_size = _IoBufferSize; _timestamp.update(); _startTimestamp.update(); return pIoContext; }
void XplDevice::run ( void ) { if ( !m_bInitialised ) { assert ( 0 ); return; } while ( !m_bExitThread ) { // Deal with heartbeats int64_t currentTime; Poco::Timestamp tst; tst.update(); currentTime = tst.epochMicroseconds(); //GetSystemTimeAsFileTime( (FILETIME*)¤tTime ); if ( m_nextHeartbeat <= currentTime ) { poco_debug( devLog, "Sending heartbeat" ); // It is time to send a heartbeat if ( m_bConfigRequired ) { // Send a config heartbeat, then calculate the time of the next one m_pComms->SendConfigHeartbeat ( m_completeId, m_heartbeatInterval, m_version ); } else { // Send a heartbeat, then calculate the time of the next one m_pComms->SendHeartbeat ( m_completeId, m_heartbeatInterval, m_version ); } SetNextHeartbeatTime(); } // Calculate the time (in milliseconds) until the next heartbeat int32 heartbeatTimeout = ( int32 ) ( ( m_nextHeartbeat - currentTime ) ); // Divide by 10000 to convert 100 nanosecond intervals to milliseconds. poco_debug ( devLog, "Sleeping " + NumberFormatter::format ( heartbeatTimeout/1000 ) + " seconds till next hbeat" ); m_hRxInterrupt->tryWait ( heartbeatTimeout ); //Thread::sleep(); poco_debug ( devLog, "Woken up for hbeat or interrupt" ); } // cout << "exiting dev thread (ret)\n"; return; }
void XplDevice::SetNextHeartbeatTime() { // Set the new heartbeat time int64_t currentTime; Poco::Timestamp tst; tst.update(); currentTime = tst.epochMicroseconds(); //GetSystemTimeAsFileTime( (FILETIME*)¤tTime ); // If we're waiting for a hub, we have to send at more // rapid intervals - every 3 seconds for the first two // minutes, then once every 30 seconds after that. if ( m_bWaitingForHub ) { if ( m_rapidHeartbeatCounter ) { // This counter starts at 40 for 2 minutes of // heartbeats at 3 second intervals. --m_rapidHeartbeatCounter; m_nextHeartbeat = currentTime + ( ( int64_t ) c_rapidHeartbeatFastInterval * 1000l ); } else { // one second m_nextHeartbeat = currentTime + ( ( int64_t ) c_rapidHeartbeatSlowInterval * 1000l ); } } else { // It is time to send a heartbeat if ( m_bConfigRequired ) { // one minute m_nextHeartbeat = currentTime + 60*1000l; } else { // 60000000 is one minute in the 100 nanosecond intervals // that the system time is measured in. m_nextHeartbeat = currentTime + ( ( int64_t ) m_heartbeatInterval * 60*1000l ); } } }
void run() { Poco::Timestamp lastScan; ItemInfoMap entries; scan(entries); while (!_stopped) { struct timespec timeout; timeout.tv_sec = 0; timeout.tv_nsec = 200000000; unsigned eventFilter = NOTE_WRITE; struct kevent event; struct kevent eventData; EV_SET(&event, _dirFD, EVFILT_VNODE, EV_ADD | EV_CLEAR, eventFilter, 0, 0); int nEvents = kevent(_queueFD, &event, 1, &eventData, 1, &timeout); if (nEvents < 0 || eventData.flags == EV_ERROR) { try { FileImpl::handleLastErrorImpl(owner().directory().path()); } catch (Poco::Exception& exc) { owner().scanError(&owner(), exc); } } else if (nEvents > 0 || ((owner().eventMask() & DirectoryWatcher::DW_ITEM_MODIFIED) && lastScan.isElapsed(owner().scanInterval()*1000000))) { ItemInfoMap newEntries; scan(newEntries); compare(entries, newEntries); std::swap(entries, newEntries); lastScan.update(); } } }
void RealSenseSensorImpl::faceDetect(bool depthMask) { Poco::Int64 maskTime = 0; Poco::Int64 detectTime = 0; Poco::Timestamp now; ImageGrayscale mask(grayscalePreview.clone()); if (depthMask) { PXCCapture::Sample *sample = senseManager->QuerySample(); mask = RealSenseProcessor::depthMask(sample->depth, sample->color, projection, grayscalePreview); //cv::imshow("Preview mask", mask); } maskTime = now.elapsed(); now.update(); cv::resize(mask, resizedPreview, cv::Size(320, 240), 0, 0, CV_INTER_NN); roiPreview = resizedPreview(detectRoi); tracker->detect(roiPreview); detectTime = now.elapsed(); if (depthMask && tracker->isLastDetect()) { cv::Rect faceRoi = toRealSizeRoi(tracker->getLastRegion()); mask = RealSenseProcessor::fillHoles(faceRoi, mask, grayscalePreview, 0.0); //cv::imshow("grayscalePreview mask 2", mask); } //std::cout << std::dec << "Mask time: " << maskTime << "; Face detect time: " << detectTime << std::endl; /* if (Face::Settings::instance().debug()) { if (tracker->isLastDetect()) { cv::rectangle(colorPreview, toRealSizeRoi(tracker->getLastRegion()), 255); } //cv::imshow("detection", roiPreview); }*/ }
int main (int argc, char** argv) { const bool USE_OPENCV_DISPLAY = false; // Use OpenCV or CImg for display? pI::BaseApplication app; pI::Runtime runtime (app.GetRuntime()); LoadPlugins (app); // Creates and initializes all the necessary plugins. const int DeviceIndex = 0; const bool ShowRGB = true; // Freenect/GetVideoAndDepth returns both the RGB image and the depth map captured by a Kinect. boost::shared_ptr<pI::pIn> getImageAndDepth = app.SpawnAndInitialize (runtime, "Freenect/GetVideoAndDepth", DeviceIndex, ShowRGB ? 0 : 2, // Video format: FREENECT_VIDEO_RGB or FREENECT_VIDEO_IR_8BIT 0 // Depth format: FREENECT_DEPTH_11BIT ); pI::Arguments getImageAndDepth_in = getImageAndDepth->GetInputSignature(), getImageAndDepth_out = getImageAndDepth->GetOutputSignature(); // Freenect/SetTiltAndLED sets the tilt angle and LED status of a Kinect. boost::shared_ptr<pI::pIn> setTiltAndLED = app.SpawnAndInitialize (runtime, "Freenect/SetTiltAndLED", DeviceIndex); pI::Arguments setTiltAndLED_in = setTiltAndLED->GetInputSignature(), setTiltAndLED_out = setTiltAndLED->GetOutputSignature(); // Color/ApplyRGBPalette applies an arbitrary RGB palette onto a matrix boost::shared_ptr<pI::pIn> applyRGB (runtime.SpawnPlugin ("pIn/Color/ApplyRGBPalette")); pI::Arguments applyRGB_params = applyRGB->GetParameterSignature(); pI::RGBPalette rgb (applyRGB_params[0]); rgb.SetCount (2048); std::map<size_t, uint32_t> freenect_glview_colors; // Coloring as used in Freenect's glview example freenect_glview_colors[0] = (0xFFFFFF); // white freenect_glview_colors[256] = (0xFF0000); // red freenect_glview_colors[512] = (0xFFFF00); // yellow freenect_glview_colors[768] = (0x00FF00); // green freenect_glview_colors[1024] = (0x00FFFF); // cyan freenect_glview_colors[1280] = (0x0000FF); // blue freenect_glview_colors[1536] = (0x000000); // black std::map<size_t, uint32_t> terrain; // Alternative coloring; names taken from http://en.wikipedia.org/wiki/List_of_colors terrain[0] = (0xFFFFFF); // White terrain[256] = (0x964B00); // Brown (traditional) terrain[512] = (0xFBEC5D); // Maize terrain[768] = (0x66FF00); // Bright green terrain[1024] = (0x014421); // Forest green (traditional) terrain[1025] = (0x87CEFA); // Light sky blue terrain[1280] = (0x00008B); // Dark blue terrain[2047] = (0x000000); // Black terrain[2048] = (0x696969); // Dim gray ColorRamp ramp (terrain); for (int i = 0; i < 2048; i++) { float v = powf ( (i / 2048.), 3); uint16_t gamma = 9216 * v; uint32_t c = ramp.GetColor (gamma); rgb.SetR (i, red (c)); rgb.SetG (i, green (c)); rgb.SetB (i, blue (c)); } applyRGB->Initialize (applyRGB_params); pI::Arguments applyRGB_in = applyRGB->GetInputSignature(), applyRGB_out = applyRGB->GetOutputSignature(); // OpenCV/Draw/Rectangle draws the inner rectangle. boost::shared_ptr<pI::pIn> drawRectImage = app.SpawnAndInitialize (runtime, "OpenCV/Draw/Rectangle"); pI::Arguments drawRectImage_in = drawRectImage->GetInputSignature(), drawRectImage_out = drawRectImage->GetOutputSignature(); boost::shared_ptr<pI::pIn> drawRectDepth = app.SpawnAndInitialize (runtime, "OpenCV/Draw/Rectangle"); pI::Arguments drawRectDepth_in = drawRectDepth->GetInputSignature(), drawRectDepth_out = drawRectDepth->GetOutputSignature(); // OpenCV/Draw/PutText draws a text string into an image. boost::shared_ptr<pI::pIn> puttext = app.SpawnAndInitialize (runtime, "OpenCV/Draw/PutText", 0, // font type: CV_FONT_HERSHEY_SIMPLEX pI_FALSE, // render font italic: no 0.75, 0.75, // horizontal and vertical scale 0.0, // shear: no shear 1, // thickness of the text strokes: 1 2 // type of the line: CV_AA ); pI::Arguments puttext_in = puttext->GetInputSignature(), puttext_out = puttext->GetOutputSignature(); // OpenCV/IO/SaveImage saves images to disk. boost::shared_ptr<pI::pIn> saveImage (runtime.SpawnPlugin ("OpenCV/IO/SaveImage")); saveImage->Initialize (saveImage->GetParameterSignature()); pI::Arguments saveImage_in = saveImage->GetInputSignature(), saveImage_out = saveImage->GetOutputSignature(); // OpenCV/IO/ShowImage or CImg/Display displays the image and the depth map. boost::shared_ptr<pI::pIn> displayImage, displayDepth; if (USE_OPENCV_DISPLAY) { displayImage = app.SpawnAndInitialize (runtime, "OpenCV/IO/ShowImage", "RGB image window", false, 640, 480, 0, 0); displayDepth = app.SpawnAndInitialize (runtime, "OpenCV/IO/ShowImage", "Depth map window", false, 640, 480, 650, 0); } else { displayImage = app.SpawnAndInitialize (runtime, "CImg/Display", pI_TRUE); displayDepth = app.SpawnAndInitialize (runtime, "CImg/Display", pI_TRUE); } pI::Arguments displayImage_in = displayImage->GetInputSignature(), displayImage_out = displayImage->GetOutputSignature(); pI::Arguments displayDepth_in = displayDepth->GetInputSignature(), displayDepth_out = displayDepth->GetOutputSignature(); // Now that all plugins are initialized, connect them. drawRectImage_in[0] = getImageAndDepth_out[0]; pI::IntValue (drawRectImage_in[1]).SetData (310); pI::IntValue (drawRectImage_in[2]).SetData (230); pI::IntValue (drawRectImage_in[3]).SetData (330); pI::IntValue (drawRectImage_in[4]).SetData (250); pI::IntValue (drawRectImage_in[5]).SetData (255); pI::IntValue (drawRectImage_in[6]).SetData (255); pI::IntValue (drawRectImage_in[7]).SetData (255); pI::IntValue (drawRectImage_in[8]).SetData (1); pI::StringSelection (drawRectImage_in[9]).SetIndex (0); pI::IntValue (drawRectImage_in[10]).SetData (0); displayImage_in[0] = drawRectImage_in[0]; pI::StringValue fileName (saveImage_in[0]); saveImage_in[1] = getImageAndDepth_out[0]; applyRGB_in[0] = getImageAndDepth_out[1]; drawRectDepth_in[0] = applyRGB_out[0]; pI::IntValue (drawRectDepth_in[1]).SetData (310); pI::IntValue (drawRectDepth_in[2]).SetData (230); pI::IntValue (drawRectDepth_in[3]).SetData (330); pI::IntValue (drawRectDepth_in[4]).SetData (250); pI::IntValue (drawRectDepth_in[5]).SetData (255); pI::IntValue (drawRectDepth_in[6]).SetData (255); pI::IntValue (drawRectDepth_in[7]).SetData (255); pI::IntValue (drawRectDepth_in[8]).SetData (1); pI::StringSelection (drawRectDepth_in[9]).SetIndex (0); pI::IntValue (drawRectDepth_in[10]).SetData (0); displayDepth_in[0] = drawRectDepth_in[0]; pI::StringValue puttext_text (puttext_in[1]); pI::IntValue puttext_x (puttext_in[2]); pI::IntValue puttext_y (puttext_in[3]); pI::IntValue puttext_R (puttext_in[4]); pI::IntValue puttext_G (puttext_in[5]); pI::IntValue puttext_B (puttext_in[6]); pI::BoolValue puttext_do_draw (puttext_in[7]); puttext_in[0] = drawRectDepth_in[0]; puttext_R.SetData (255); puttext_G.SetData (255); puttext_B.SetData (255); puttext_do_draw.SetData (pI_TRUE); puttext_x.SetData (285); puttext_y.SetData (272); // Executes getImageAndDepth and setTiltAndLED a first time to initialize Kinect. getImageAndDepth->Execute (getImageAndDepth_in, getImageAndDepth_out); pI::IntValue newTilt (setTiltAndLED_in[0]); pI::IntValue newLED (setTiltAndLED_in[1]); newTilt.SetData (0); // Tilt angle 0 degree newLED.SetData (1); // LED status Green setTiltAndLED->Execute (setTiltAndLED_in, setTiltAndLED_out); Poco::Timestamp now; bool doLoop = true; bool showDistance = false; try { while (doLoop) { while (!kbhit()) { // Get Depth and RGB data, ... getImageAndDepth->Execute (getImageAndDepth_in, getImageAndDepth_out); // ... apply depth to color image conversion, ... applyRGB->Execute (applyRGB_in, applyRGB_out); // ... draw a white rectangle into the center of both image and map, ... if (showDistance) { drawRectImage->Execute (drawRectImage_in, drawRectImage_out); drawRectDepth->Execute (drawRectDepth_in, drawRectDepth_out); } // ... and perhaps the result of distance measurement, ... double avgDistance = 0.; if (showDistance) { pI::ShortMatrix depth (getImageAndDepth_out[1]); double sumDistance = 0.; int sumCount = 0; for (int j = 230; j <= 250; ++j) { for (int i = 310; i <= 330; ++i) { int dep = depth.GetData (j, i); if (dep < 2047) { sumDistance += RawDepthToMeters (dep); ++sumCount; } } } if (sumCount > 0) { avgDistance = sumDistance / (double) sumCount; } char str[32]; sprintf (str, "%.2f m", avgDistance); puttext_text.SetData (str); puttext->Execute (puttext_in, puttext_out); } // ... and display both the image and the depth map. displayImage->Execute (displayImage_in, displayImage_out); displayDepth->Execute (displayDepth_in, displayDepth_out); std::cout << "Tilt " << (int) newTilt.GetData(); std::cout << "; " << (1000000. / now.elapsed()) << " frames/sec \r"; now.update(); } int ch = getch(); switch (ch) { case 'q': case 'Q': case 27 /* ESC */: // Terminates newTilt.SetData (0); // Tilt angle 0 degree newLED.SetData (0); // LED status off setTiltAndLED->Execute (setTiltAndLED_in, setTiltAndLED_out); doLoop = false; break; case 'm': case 'M': // Measurement mode showDistance = !showDistance; break; case 's': case 'S': { // Takes snapshot std::string dateTime = Poco::DateTimeFormatter::format (Poco::LocalDateTime(), "%Y-%m-%d_%H-%M-%S"); std::string name = std::string ("KinectImage_") + dateTime + std::string (".png"); fileName.SetData (const_cast<char*> (name.c_str())); saveImage->Execute (saveImage_in); std::cout << "Saved " << name << std::endl; // HACK Experimental: saves PCL point cloud pI::ShortMatrix depth (getImageAndDepth_out[1]); int width = depth.GetCols(); int height = depth.GetRows(); pcl::PointCloud<pcl::PointXYZ> cloud; for (int j = 0; j < height; ++j) { for (int i = 0; i < width; ++i) { int dep = depth.GetData (j, i); if (dep < 2048) { cloud.push_back (DepthToWorld (i, j, dep)); } } } name = std::string ("PointCloud_") + dateTime + std::string (".pcd"); pcl::io::savePCDFileASCII (name, cloud); std::cout << "Saved " << name << std::endl; } break; case '+': case 'u': case 'U': //std::cout << std::endl << "Tilt: " << newTilt.GetData(); newTilt.SetData (newTilt.GetData() + 1); //std::cout << " -> " << newTilt.GetData() << std::endl; setTiltAndLED->Execute (setTiltAndLED_in, setTiltAndLED_out); break; case '-': case 'd': case 'D': //std::cout << std::endl << "Tilt: " << newTilt.GetData(); newTilt.SetData (newTilt.GetData() - 1); //std::cout << " -> " << newTilt.GetData() << std::endl; setTiltAndLED->Execute (setTiltAndLED_in, setTiltAndLED_out); break; default: break; } } } catch (pI::exception::ExecutionException& e) { std::cerr << e.what() << std::endl; } }
void UnGhost() { unghost = true; unghost_request_time.update(); }
void RealSenseSensorImpl::singleCapture(bool calcUvMap, bool calcLandmarks, bool releaseFrame) { Poco::Int64 acquireTime = 0; Poco::Int64 getBuffersTime = 0; Poco::Timestamp now; sts = senseManager->AcquireFrame(true); if (sts != PXC_STATUS_NO_ERROR) { senseManager->ReleaseFrame(); std::string msg("Can't acquire frame from camera (" + std::to_string(sts) + ")"); std::cerr << msg << std::endl; throw FACELIB_EXCEPTION(msg); } PXCCapture::Sample *sample = senseManager->QuerySample(); acquireTime = now.elapsed(); now.update(); getBuffers(sample->depth, sample->color, calcUvMap, calcLandmarks); getBuffersTime = now.elapsed(); try { brightnessToSet = 0; Poco::Int64 detectFaceRoiTime = 0; Poco::Int64 meanCalcTime = 0; Poco::Int64 queryColorBrightnessTime = 0; Poco::Int64 queryColorBrightnessInfoTime = 0; Poco::Int64 setColorBrightnessTime = 0; now.update(); cv::Rect croi = realsenseDepthFaceTracker->detectFaceRoi(depthMatrix, sample->depth, RealSenseDepthFaceTracker::Domain::Color, false /*debug*/); detectFaceRoiTime = now.elapsed(); // brightness control if ((settings.brightnessControl == static_cast<int>(RealSenseSensor::BrightnessControlType::HeadRoiBased) && croi.area() && (state == RealSenseSensor::STATE_POSITIONING || state == RealSenseSensor::STATE_IDLE))) { now.update(); cv::Mat mask = cv::Mat::zeros(grayscalePreview.size(), CV_8U); cv::rectangle(mask, croi, cv::Scalar(255), CV_FILLED); int mean = cv::mean(grayscalePreview, mask)[0]; meanCalcTime = now.elapsed(); // if (Face::Settings::instance().debug()) { // cv::Mat maskDrawing = grayscalePreview.clone(); // cv::rectangle(maskDrawing, croi, cv::Scalar(255)); // cv::imshow("Depth roi for gain projected to color", maskDrawing); // } now.update(); pxcI32 brightness = device->QueryColorBrightness(); queryColorBrightnessTime = now.elapsed(); diff = targetColorMean - mean; auto newValue = [=](const PXCCapture::Device::PropertyInfo& info, pxcI32 val, pxcI32 newVal) -> pxcI32 { pxcI32 newV = std::max(info.range.min, std::min(static_cast<pxcF32>(newVal), info.range.max)); if (Face::Settings::instance().debug()) { std::stringstream sstr; sstr << "Val: " << val << "; adjusted val: " << newV << "; mean: " << mean << "/" << targetColorMean; shadowText(grayscalePreview, sstr.str(), cv::Point(150, 30), 0.5); } return newV; }; int _diff = diff == 0 ? 0 : diff / abs(diff); pxcI32 newBrightness = newValue(device->QueryColorBrightnessInfo(), brightness, brightness + _diff); if (abs(diff) > 15) { brightnessToSet = newBrightness; } } } catch (std::exception& e) { std::cout << " --- Brightness control exception: " << e.what() << std::endl; } //std::cout << std::dec << "Acquire time: " << acquireTime << "; getBuffers time: " << getBuffersTime << std::endl; if (releaseFrame) { senseManager->ReleaseFrame(); } }
void RealSenseSensorImpl::doPositioning() { Poco::Int64 singleCaptureTime = 0; Poco::Int64 faceDetectTime = 0; Poco::Int64 pose1Time = 0; Poco::Int64 pose2Time = 0; Poco::Int64 frameReleaseTime = 0; Poco::Int64 frameTime = 0; static Poco::Timestamp frameTimer; RealSenseSensor::PositioningOutput prevPosOutput = positioningOutput; positioningOutput = RealSenseSensor::POS_NONE; Poco::Timestamp now; singleCapture(false, false, false); singleCaptureTime = now.elapsed(); now.update(); faceDetect(settings.hasPosFeature(RealSenseSensor::PositioningFeature::DepthMask)); faceDetectTime = now.elapsed(); frameTime = frameTimer.elapsed(); frameTimer.update(); calcMinZ(); if (tracker->getConsecutiveNonDetects() >= settings.consecutiveNonDetectsToStopPositioning || (tracker->isLastDetect() && nearestZpoint > settings.maximalPositioningDistance)) { setState(RealSenseSensor::STATE_IDLE); senseManager->ReleaseFrame(); return; } if (tracker->getConsecutiveNonDetects() > 10) { positioningOutput = RealSenseSensor::POS_NONE; senseManager->ReleaseFrame(); return; } if (!tracker->isLastDetect()) { senseManager->ReleaseFrame(); return; } cv::Rect reg = toRealSizeRoi(tracker->getLastRegion()); double distance = calcDistance(reg); now.update(); FacePoseEstimator::Pose pose = FacePoseEstimator::Pose::UnknownPose(); if (settings.hasPosFeature(RealSenseSensor::PositioningFeature::PoseEstimation) && tracker->isLastDetect()) { landmarks = lmDetector.getLandmarks(grayscalePreview, toRealSizeRoi4LMDetector(tracker->getLastRegion()), false); std::map<std::string, std::vector<cv::Point2d> > selectedLMPoints = lmDetector.lmGroupPoints(landmarks); pose1Time = now.elapsed(); now.update(); pose = facePoseEstimator->facePose(selectedLMPoints, realsenseProjection); pose2Time = now.elapsed(); if (Face::Settings::instance().debug() && pose != FacePoseEstimator::Pose::UnknownPose()) { cv::Scalar red(0, 0, 255); cv::Scalar green(0, 255, 0); cv::Scalar white(255, 255, 255); cv::Scalar black(0, 0, 0); { positioningFrameStats.update(); std::stringstream sstr; sstr.precision(1); sstr.setf(std::ios::fixed, std::ios::floatfield); sstr << "Pitch: " << pose.pitch - pose.relativePitch << "/" << facePoseEstimator->getSettings().pitchZeroAngle << ", Yaw: " << pose.yaw - pose.relativeYaw << "/" << facePoseEstimator->getSettings().yawZeroAngle << ", FPS: " << positioningFrameStats.fps(); sstr.flush(); shadowText(grayscalePreview, sstr.str(), cv::Point(150, 15), 0.5); { std::stringstream sstrDebug; sstrDebug << std::dec << "Capture time: " << singleCaptureTime / 1000 << "; FaceDetect time: " << faceDetectTime / 1000; shadowText(grayscalePreview, sstrDebug.str(), cv::Point(150, 45), 0.5); } { std::stringstream sstrDebug; sstrDebug << std::dec << "pose time: " << pose1Time / 1000 << "/" << pose2Time / 1000 << "; frame time: " << frameTime / 1000; shadowText(grayscalePreview, sstrDebug.str(), cv::Point(150, 60), 0.5); } auto mirror = [](const cv::Mat& img, const cv::Point& p) -> cv::Point { return cv::Point(img.cols - p.x, p.y); }; cv::line(grayscalePreview, mirror(grayscalePreview, pose.imageAxisX.from), mirror(grayscalePreview, pose.imageAxisX.to), black); cv::line(grayscalePreview, mirror(grayscalePreview, pose.imageAxisY.from), mirror(grayscalePreview, pose.imageAxisY.to), black); } cv::rectangle(grayscalePreview, toRealSizeRoi4LMDetector(tracker->getLastRegion()), 255); for (const auto& lm : landmarks) { cv::circle(grayscalePreview, lm, 2, black); } } landmarks.clear(); } // add hysteresis for distance double targetMaxDistance = settings.maximalDistance; if (prevPosOutput == RealSenseSensor::POS_MOVE_CLOSER) { targetMaxDistance *= 0.96; } double targetMinDistance = settings.minimalDistance; if (prevPosOutput == RealSenseSensor::POS_MOVE_FAR) { targetMinDistance *= 1.04; } if (distance > targetMaxDistance) { positioningOutput = RealSenseSensor::POS_MOVE_CLOSER; } else if (distance < targetMinDistance) { positioningOutput = RealSenseSensor::POS_MOVE_FAR; } else { FacePoseEstimator::AlignInstruction instruction = facePoseEstimator->giveFaceAlignInstruction(pose); convert(instruction, positioningOutput); } if (tracker->positionUnsteady() || positioningOutput != RealSenseSensor::POS_DONTMOVE) { tracker->init(); } if (positioningOutput == RealSenseSensor::POS_DONTMOVE && tracker->getConsecutiveDetects() >= settings.consecutiveDetectsToStartCapturing) { setState(RealSenseSensor::STATE_CAPTURING); } senseManager->ReleaseFrame(); // std::cout << std::dec << "SingleCapture time: " << singleCaptureTime << "; FaceDetect time: " << faceDetectTime; // std::cout << std::dec << "; pose time: " << pose1Time << "/" << pose2Time << "; frame time: " << frameTime << std::endl; }
int main(int argc, char** argv) { int maxManSpeed = 30000; try { TBS::initLogs("cvclient", 8); std::cout << "HAL Client Starts" << std::endl; Mat image(200, 200, CV_8UC3); Mat imageRadar(50, 200, CV_8UC3); cv::line(image, cv::Point(0, 100), cv::Point(200, 100), cv::Scalar(255, 0, 0), 1); cv::line(image, cv::Point(100, 0), cv::Point(100, 200), cv::Scalar(255, 0, 0), 1); namedWindow("movement", 0); setMouseCallback("movement", onMouse, 0); namedWindow("bioradar", 0); setMouseCallback("bioradar", onMouse2, 0); TBS::Services::JsonClientParams bp("localhost", HAL::API::Communication::BioRadarPort, TBS::Services::JsonClientParams::JsonHttp); bioRadarClient = new HAL::API::BioRadar::Json::Client(bp); TBS::Services::JsonClientParams cp("localhost", HAL::API::Communication::CameraPort, TBS::Services::JsonClientParams::JsonHttp); cameraClient = new HAL::API::Camera::Json::Client(cp); TBS::Services::JsonClientParams mc("localhost", HAL::API::Communication::ManipulatorPort, TBS::Services::JsonClientParams::JsonHttp); manipulatorClient = new HAL::API::Manipulator::Json::Client(mc); client = new HAL::API::MovementClient(); client->Movement().StatusChanged += Poco::delegate(&updateSpeed); for (;;) { cv::Mat cpy = image.clone(); drawSpeed(cpy); imshow("movement", cpy); imshow("bioradar", cpy); int c = waitKey(100); if ((c & 255) == 27) { cout << "Exiting ...\n"; break; } try { static int speed = 0; static Poco::Timestamp tst; static int lastInRow = 0; if ((c & 255) == 'q') { lastInRow++; speed = 50; bioRadarClient->BioRadar().GoRelBase(speed); } else if ((c & 255) == 'a') { speed = -50; lastInRow++; bioRadarClient->BioRadar().GoRelBase(speed); } else { speed = 0; if (lastInRow > 1) { bioRadarClient->BioRadar().GoRelBase(speed); } lastInRow = 0; } //std::cout << "tst " << tst.elapsed() / 1000 << "ms - speed: " << speed << std::endl; tst.update(); } catch (Poco::Exception & e) { } try { static int speed = 0; static Poco::Timestamp tst; static int lastInRow = 0; if ((c & 255) == 'w') { lastInRow++; speed = 20; bioRadarClient->BioRadar().GoRelAntenna(speed); } else if ((c & 255) == 's') { speed = -20; lastInRow++; bioRadarClient->BioRadar().GoRelAntenna(speed); } else { speed = 0; if (lastInRow > 1) { bioRadarClient->BioRadar().GoRelAntenna(speed); } lastInRow = 0; } //std::cout << "tst " << tst.elapsed() / 1000 << "ms - speed: " << speed << std::endl; tst.update(); } catch (Poco::Exception & e) { } try { if ((c & 255) == 'e') { bioRadarClient->BioRadar().Enable(); } else if ((c & 255) == 'd') { bioRadarClient->BioRadar().Disable(); } } catch (Poco::Exception & e) { } try { if ((c & 255) == 'r') { HAL::API::BioRadar::MotorInfo base; HAL::API::BioRadar::MotorInfo antena; std::vector<HAL::API::BioRadar::TouchInfo> touchInfo; bioRadarClient->BioRadar().GetStatus(base, antena, touchInfo); std::cout << "Antena touch min: " << (antena.touchMin ? 1 : 0) << std::endl; std::cout << "Antena touch max: " << (antena.touchMax ? 1 : 0) << std::endl; std::cout << "Antena position: " << (antena.position) << std::endl; std::cout << "Antena error: " << (antena.positionError ? 1 : 0) << std::endl; } } catch (Poco::Exception & e) { } try { if ((c & 255) == '{') { cameraClient->Camera().Enable(); } else if ((c & 255) == '}') { cameraClient->Camera().Disable(); } } catch (Poco::Exception & e) { } try { static int speed = 0; static int lastInRow = 0; if ((c & 255) == '+') { lastInRow++; speed = 100; cameraClient->Camera().GoRelEngine(speed); } else if ((c & 255) == '-') { speed = -100; lastInRow++; cameraClient->Camera().GoRelEngine(speed); } else { speed = 0; if (lastInRow > 1) { cameraClient->Camera().GoRelEngine(0); } lastInRow = 0; } } catch (Poco::Exception & e) { } try { static int speed = 0; static int lastInRow = 0; if ((c & 255) == '4') { lastInRow++; speed = 100; cameraClient->Camera().GoRelServo1(speed); } else if ((c & 255) == '6') { speed = -100; lastInRow++; cameraClient->Camera().GoRelServo1(speed); } else { speed = 0; if (lastInRow > 1) { cameraClient->Camera().GoRelServo1(0); } lastInRow = 0; } } catch (Poco::Exception & e) { } try { static int speed = 0; static int lastInRow = 0; if ((c & 255) == '8') { lastInRow++; speed = 100; cameraClient->Camera().GoRelServo2(speed); } else if ((c & 255) == '2') { speed = -100; lastInRow++; cameraClient->Camera().GoRelServo2(speed); } else { speed = 0; if (lastInRow > 1) { cameraClient->Camera().GoRelServo2(0); } lastInRow = 0; } } catch (Poco::Exception & e) { } try { if ((c & 255) == 'x') { manipulatorClient->Manipulator().Enable(); } else if ((c & 255) == 'c') { manipulatorClient->Manipulator().Disable(); } } catch (Poco::Exception & e) { } try { static int speed = 0; static int lastInRow = 0; if ((c & 255) == 'v') { lastInRow++; speed = 50; manipulatorClient->Manipulator().StartRotation(speed); } else if ((c & 255) == 'b') { speed = -50; lastInRow++; manipulatorClient->Manipulator().StartRotation(speed); } else if ((c & 255) == 'n') { lastInRow++; speed = 50; manipulatorClient->Manipulator().StartRotationTo(speed, 1450); } else if ((c & 255) == 'm') { speed = -50; lastInRow++; manipulatorClient->Manipulator().StartRotationTo(speed, 1450); } else { speed = 0; if (lastInRow > 1) { manipulatorClient->Manipulator().StopRotation(); } lastInRow = 0; } } catch (Poco::Exception & e) { } /* try { static int speed = 0; static int lastInRow = 0; if ((c & 255) == 'n') { lastInRow++; speed = 50; manipulatorClient->Manipulator().StartHolder(speed); } else if ((c & 255) == 'm') { speed = -50; lastInRow++; manipulatorClient->Manipulator().StartHolder(speed); } else { speed = 0; if (lastInRow > 1) { manipulatorClient->Manipulator().StopHolder(); } lastInRow = 0; } } catch (Poco::Exception & e) { } */ try { static int speed = 0; static int lastInRow = 0; if ((c & 255) == 'j') { lastInRow++; speed = maxManSpeed; manipulatorClient->Manipulator().StartJoint1(speed); } else if ((c & 255) == 'k') { speed = -maxManSpeed; lastInRow++; manipulatorClient->Manipulator().StartJoint1(speed); } else if ((c & 255) == 'g') { lastInRow++; speed = maxManSpeed; manipulatorClient->Manipulator().StartJoint1To(speed, 3900); } else if ((c & 255) == 'h') { speed = -maxManSpeed; lastInRow++; manipulatorClient->Manipulator().StartJoint1To(speed, 3900); } else { speed = 0; if (lastInRow > 1) { manipulatorClient->Manipulator().StopJoint1(); } lastInRow = 0; } } catch (Poco::Exception & e) { } try { static int speed = 0; static int lastInRow = 0; if ((c & 255) == 'u') { lastInRow++; speed = maxManSpeed; manipulatorClient->Manipulator().StartJoint2(speed); } else if ((c & 255) == 'i') { speed = -maxManSpeed; lastInRow++; manipulatorClient->Manipulator().StartJoint2(speed); } else if ((c & 255) == 'o') { lastInRow++; speed = maxManSpeed; manipulatorClient->Manipulator().StartJoint2To(speed, 1100); } else if ((c & 255) == 'p') { speed = -maxManSpeed; lastInRow++; manipulatorClient->Manipulator().StartJoint2To(speed, 1100); } else { speed = 0; if (lastInRow > 1) { manipulatorClient->Manipulator().StopJoint2(); } lastInRow = 0; } } catch (Poco::Exception & e) { } try { if ((c & 255) == ',') { manipulatorClient->Manipulator().LightOn(); } else if ((c & 255) == '.') { manipulatorClient->Manipulator().LightOff(); } } catch (Poco::Exception & e) { } } client->Movement().StatusChanged -= Poco::delegate(&updateSpeed); } catch (Poco::Exception & e) { std::cerr << "Exception: " << e.displayText() << std::endl; } catch (std::exception & e) { std::cerr << "Exception: " << e.what() << std::endl; } return 0; }