int main(int argc, char** argv) { devMan = new DeviceManager(); unsigned int i = devMan->GetNODevicesConnected(); if (i < 1) { std::cout << "No devices found." << std::endl; return -1; } printf("Number of devices connected: %d.\n", i); //VTK Pipeline vtkSmartPointer<vtkRenderer> ren = vtkSmartPointer<vtkRenderer>::New(); //vtkSmartPointer<vtkRenderer> dataren = vtkSmartPointer<vtkRenderer>::New(); renwin = vtkRenderWindow::New(); /*datawin= vtkRenderWindow::New();*/ vtkSmartPointer<vtkRenderWindowInteractor> iren = vtkSmartPointer<vtkRenderWindowInteractor>::New(); vtkSmartPointer<vtkRenderWindowInteractor> data_iren = vtkSmartPointer<vtkRenderWindowInteractor>::New(); if (KINECT_SET_STEREO_ON) { renwin->SetStereoCapableWindow(1); renwin->StereoRenderOn(); /*datawin->SetStereoCapableWindow(1); datawin->StereoRenderOn();*/ } renwin->AddRenderer(ren); renwin->SetInteractor(iren); renwin->SetSize(1280,800); iren->Initialize(); /*datawin->AddRenderer(dataren); datawin->SetInteractor(data_iren); datawin->SetSize(1280,800); data_iren->Initialize();*/ //Add timer callback vtkSmartPointer<UpdateData> updateCallback = vtkSmartPointer<UpdateData>::New(); iren->AddObserver(vtkCommand::TimerEvent, updateCallback); iren->CreateRepeatingTimer(TIMER_LENGTH); initializeLights(); devMan->GetCameraDataForAllDevices(cameraDataVector); updatePolyData(); if (USE_TNG) initializeTNG(); if (USE_TRACKER) initializeTracker(); ren->GetActiveCamera()->SetPosition(0,0,3.34); ren->GetActiveCamera()->Roll(180.0); ren->GetActiveCamera()->Azimuth(180.0); ren->GetActiveCamera()->Modified(); // //ren->ResetCamera(); //printf("rencamdis %f",dataren->GetActiveCamera()->GetDistance()); //printf("renviewangle %f",dataren->GetActiveCamera()->GetViewAngle()); // //dataren->ResetCamera(); //printf("camdis %f",dataren->GetActiveCamera()->GetDistance()); //printf("viewangle %f",dataren->GetActiveCamera()->GetViewAngle()); // Continue interacting iren->Start(); //MSG msg; // while (1) { // PeekMessage(&msg, NULL, 0, 0, PM_REMOVE); // TranslateMessage(&msg); // DispatchMessage(&msg); // // } delete devMan; return 0; }
TrackerViewer::TrackerViewer(ros::NodeHandle& nh, ros::NodeHandle& privateNh, volatile bool& exiting, unsigned queueSize) : exiting_ (exiting), queueSize_(queueSize), nodeHandle_(nh), nodeHandlePrivate_(privateNh), imageTransport_(nodeHandle_), rectifiedImageTopic_(), cameraInfoTopic_(), checkInputs_(nodeHandle_, ros::this_node::getName()), tracker_(), cameraParameters_(), image_(), info_(), cMo_(boost::none), sites_(), imageSubscriber_(), cameraInfoSubscriber_(), trackingResultSubscriber_(), movingEdgeSitesSubscriber_(), kltPointsSubscriber_(), synchronizer_(syncPolicy_t(queueSize_)), timer_(), countAll_(0u), countImages_(0u), countCameraInfo_(0u), countTrackingResult_(0u), countMovingEdgeSites_(0u), countKltPoints_(0u) { // Compute topic and services names. std::string cameraPrefix; ros::Rate rate (1); while (cameraPrefix.empty ()) { if (!nodeHandle_.getParam ("camera_prefix", cameraPrefix)) { ROS_WARN ("the camera_prefix parameter does not exist.\n" "This may mean that:\n" "- the tracker is not launched,\n" "- the tracker and viewer are not running in the same namespace." ); } else if (cameraPrefix.empty ()) { ROS_INFO ("tracker is not yet initialized, waiting...\n" "You may want to launch the client to initialize the tracker."); } if (this->exiting()) return; rate.sleep (); } rectifiedImageTopic_ = ros::names::resolve(cameraPrefix + "/image_rect"); cameraInfoTopic_ = ros::names::resolve(cameraPrefix + "/camera_info"); boost::filesystem::ofstream modelStream; std::string path; while (!nodeHandle_.hasParam(visp_tracker::model_description_param)) { if (!nodeHandle_.hasParam(visp_tracker::model_description_param)) { ROS_WARN ("the model_description parameter does not exist.\n" "This may mean that:\n" "- the tracker is not launched or not initialized,\n" "- the tracker and viewer are not running in the same namespace." ); } if (this->exiting()) return; rate.sleep (); } if (!makeModelFile(modelStream, path)) throw std::runtime_error ("failed to load the model from the parameter server"); ROS_INFO_STREAM("Model loaded from the parameter server."); vrmlPath_ = path; initializeTracker(); if (this->exiting()) return; checkInputs(); if (this->exiting()) return; // Subscribe to camera and tracker synchronously. imageSubscriber_.subscribe (imageTransport_, rectifiedImageTopic_, queueSize_); cameraInfoSubscriber_.subscribe (nodeHandle_, cameraInfoTopic_, queueSize_); trackingResultSubscriber_.subscribe (nodeHandle_, visp_tracker::object_position_covariance_topic, queueSize_); movingEdgeSitesSubscriber_.subscribe (nodeHandle_, visp_tracker::moving_edge_sites_topic, queueSize_); kltPointsSubscriber_.subscribe (nodeHandle_, visp_tracker::klt_points_topic, queueSize_); synchronizer_.connectInput (imageSubscriber_, cameraInfoSubscriber_, trackingResultSubscriber_, movingEdgeSitesSubscriber_, kltPointsSubscriber_); synchronizer_.registerCallback(boost::bind(&TrackerViewer::callback, this, _1, _2, _3, _4, _5)); // Check for synchronization every 30s. synchronizer_.registerCallback(boost::bind(increment, &countAll_)); imageSubscriber_.registerCallback(boost::bind(increment, &countImages_)); cameraInfoSubscriber_.registerCallback (boost::bind(increment, &countCameraInfo_)); trackingResultSubscriber_.registerCallback (boost::bind(increment, &countTrackingResult_)); movingEdgeSitesSubscriber_.registerCallback (boost::bind(increment, &countMovingEdgeSites_)); kltPointsSubscriber_.registerCallback (boost::bind(increment, &countKltPoints_)); timer_ = nodeHandle_.createWallTimer (ros::WallDuration(30.), boost::bind(&TrackerViewer::timerCallback, this)); // Wait for image. waitForImage(); if (this->exiting()) return; if (!image_.getWidth() || !image_.getHeight()) throw std::runtime_error("failed to retrieve image"); // Load camera parameters. initializeVpCameraFromCameraInfo(cameraParameters_, info_); tracker_.setCameraParameters(cameraParameters_); }
int utManagerThread::run_with_templateTracker_SFM() { int kalState = -1; switch (stateFlag) { case 0: yDebug("Looking for motion...\n"); timeNow = yarp::os::Time::now(); oldMcutPoss.clear(); stateFlag++; deleteGuiTarget(); break; case 1: // state #01: check the motionCUT and see if there's something interesting. // if so, initialize the tracker, and step up. if (processMotion()) { yDebug("Initializing tracker...\n"); timeNow = yarp::os::Time::now(); initializeTracker(); stateFlag++; } break; case 2: // state #02: read data from the Tracker and use the SFM to retrieve a 3D point. // with that, initialize the kalman filter, and then step up. readFromTracker(); if (getPointFromStereo()) { yDebug("Initializing Kalman filter...\n"); kalThrd -> setKalmanState(KALMAN_INIT); kalThrd -> kalmanInit(SFMPos); stateFlag++; } break; case 3: // state #03: keep reading data from the Tracker and retrieving the 3D point from the SFM // With this info, keep feeding the kalman filter until it thinks that the object is still // tracked. If not, go back from the initial state. printMessage(2,"Reading from tracker and SFM...\n"); readFromTracker(); if (getPointFromStereo()) { kalThrd -> setKalmanInput(SFMPos); } kalThrd -> getKalmanState(kalState); sendGuiTarget(); if (kalState == KALMAN_STOPPED) { yDebug("For some reasons, the kalman filters stopped. Going back to initial state.\n"); stateFlag = 0; } break; default: yError(" utManagerThread should never be here!!!\nState: %d\n",stateFlag); Time::delay(1); break; } return kalState; }