~Delay<Image>() { vxReleaseDelay(&m_handle); }
int main(int argc, char* argv[]) { try { nvxio::Application &app = nvxio::Application::get(); // // Parse command line arguments // std::string sourceUri = app.findSampleFilePath("cars.mp4"); std::string configFile = app.findSampleFilePath("feature_tracker_demo_config.ini"); app.setDescription("This demo demonstrates Feature Tracker algorithm"); app.addOption('s', "source", "Source URI", nvxio::OptionHandler::string(&sourceUri)); app.addOption('c', "config", "Config file path", nvxio::OptionHandler::string(&configFile)); #if defined USE_OPENCV || defined USE_GSTREAMER std::string maskFile; app.addOption('m', "mask", "Optional mask", nvxio::OptionHandler::string(&maskFile)); #endif app.init(argc, argv); // // Create OpenVX context // nvxio::ContextGuard context; // // Reads and checks input parameters // nvx::FeatureTracker::HarrisPyrLKParams params; std::string error; if (!read(configFile, params, error)) { std::cout<<error; return nvxio::Application::APP_EXIT_CODE_INVALID_VALUE; } // // Create a Frame Source // std::unique_ptr<nvxio::FrameSource> source( nvxio::createDefaultFrameSource(context, sourceUri)); if (!source || !source->open()) { std::cerr << "Can't open source URI " << sourceUri << std::endl; return nvxio::Application::APP_EXIT_CODE_NO_RESOURCE; } if (source->getSourceType() == nvxio::FrameSource::SINGLE_IMAGE_SOURCE) { std::cerr << "Can't work on a single image." << std::endl; return nvxio::Application::APP_EXIT_CODE_INVALID_FORMAT; } nvxio::FrameSource::Parameters sourceParams = source->getConfiguration(); // // Create a Render // std::unique_ptr<nvxio::Render> renderer(nvxio::createDefaultRender( context, "Feature Tracker Demo", sourceParams.frameWidth, sourceParams.frameHeight)); if (!renderer) { std::cerr << "Can't create a renderer" << std::endl; return nvxio::Application::APP_EXIT_CODE_NO_RENDER; } EventData eventData; renderer->setOnKeyboardEventCallback(eventCallback, &eventData); // // Messages generated by the OpenVX framework will be processed by nvxio::stdoutLogCallback // vxRegisterLogCallback(context, &nvxio::stdoutLogCallback, vx_false_e); // // Create OpenVX Image to hold frames from video source // vx_image frameExemplar = vxCreateImage(context, sourceParams.frameWidth, sourceParams.frameHeight, sourceParams.format); NVXIO_CHECK_REFERENCE(frameExemplar); vx_delay frame_delay = vxCreateDelay(context, (vx_reference)frameExemplar, 2); NVXIO_CHECK_REFERENCE(frame_delay); vxReleaseImage(&frameExemplar); vx_image prevFrame = (vx_image)vxGetReferenceFromDelay(frame_delay, -1); vx_image frame = (vx_image)vxGetReferenceFromDelay(frame_delay, 0); // // Load mask image if needed // vx_image mask = NULL; #if defined USE_OPENCV || defined USE_GSTREAMER if (!maskFile.empty()) { mask = nvxio::loadImageFromFile(context, maskFile, VX_DF_IMAGE_U8); vx_uint32 mask_width = 0, mask_height = 0; NVXIO_SAFE_CALL( vxQueryImage(mask, VX_IMAGE_ATTRIBUTE_WIDTH, &mask_width, sizeof(mask_width)) ); NVXIO_SAFE_CALL( vxQueryImage(mask, VX_IMAGE_ATTRIBUTE_HEIGHT, &mask_height, sizeof(mask_height)) ); if (mask_width != sourceParams.frameWidth || mask_height != sourceParams.frameHeight) { std::cerr << "The mask must have the same size as the input source." << std::endl; return nvxio::Application::APP_EXIT_CODE_INVALID_DIMENSIONS; } } #endif // // Create FeatureTracker instance // std::unique_ptr<nvx::FeatureTracker> tracker(nvx::FeatureTracker::createHarrisPyrLK(context, params)); nvxio::FrameSource::FrameStatus frameStatus; do { frameStatus = source->fetch(frame); } while (frameStatus == nvxio::FrameSource::TIMEOUT); if (frameStatus == nvxio::FrameSource::CLOSED) { std::cerr << "Source has no frames" << std::endl; return nvxio::Application::APP_EXIT_CODE_NO_FRAMESOURCE; } tracker->init(frame, mask); vxAgeDelay(frame_delay); // // Run processing loop // nvx::Timer totalTimer; totalTimer.tic(); double proc_ms = 0; while (!eventData.shouldStop) { if (!eventData.pause) { frameStatus = source->fetch(frame); if (frameStatus == nvxio::FrameSource::TIMEOUT) { continue; } if (frameStatus == nvxio::FrameSource::CLOSED) { if (!source->open()) { std::cerr << "Failed to reopen the source" << std::endl; break; } continue; } // // Process // nvx::Timer procTimer; procTimer.tic(); tracker->track(frame, mask); proc_ms = procTimer.toc(); // // Print performance results // tracker->printPerfs(); } // // show the previous frame // renderer->putImage(prevFrame); // // Draw arrows & state // drawArrows(renderer.get(), tracker->getPrevFeatures(), tracker->getCurrFeatures()); double total_ms = totalTimer.toc(); std::cout << "Display Time : " << total_ms << " ms" << std::endl << std::endl; // // Add a delay to limit frame rate // app.sleepToLimitFPS(total_ms); total_ms = totalTimer.toc(); totalTimer.tic(); displayState(renderer.get(), sourceParams, proc_ms, total_ms); if (!renderer->flush()) { eventData.shouldStop = true; } if (!eventData.pause) { vxAgeDelay(frame_delay); } } // // Release all objects // vxReleaseImage(&mask); vxReleaseDelay(&frame_delay); } catch (const std::exception& e) { std::cerr << "Error: " << e.what() << std::endl; return nvxio::Application::APP_EXIT_CODE_ERROR; } return nvxio::Application::APP_EXIT_CODE_SUCCESS; }
~Delay<Buffer>() { vxReleaseDelay(&m_handle); }