int main(int argc, char** argv)
{
  ros::init(argc, argv, "lat_lon_tf_echo");
  if (argc < 3)
  {
    ROS_FATAL("No Frame Id specified");
    printf("Usage: lat_lon_tf_echo <fixed_frame_id> <target_frame_id>\n");
    ros::shutdown();
    return 1;
  }
  std::string fixed_frame(argv[1]);
  std::string frame_id(argv[2]);
  LatLonTFEchoNode node(ros::NodeHandle(), frame_id, fixed_frame);
  ros::spin();

  return (0);
}
Exemple #2
0
void Driver::setup()
{
  double hz(DEFAULT_RATE);
  int32_t device_id(0);
  std::string frame_id("camera");
  std::string file_path("");

  private_node_.getParam("device_id", device_id);
  private_node_.getParam("frame_id", frame_id);
  private_node_.getParam("rate", hz);

  int32_t image_width(640);
  int32_t image_height(480);

  camera_.reset(new Capture(camera_node_,
                            "image_raw",
                            PUBLISHER_BUFFER_SIZE,
                            frame_id));
  if (private_node_.getParam("file", file_path) &&
      file_path != "")
  {
    camera_->openFile(file_path);
  }
  else
  {
    camera_->open(device_id);
  }
  if (private_node_.getParam("image_width", image_width))
  {
    if (!camera_->setWidth(image_width))
    {
      ROS_WARN("fail to set image_width");
    }
  }
  if (private_node_.getParam("image_height", image_height))
  {
    if (!camera_->setHeight(image_height))
    {
      ROS_WARN("fail to set image_height");
    }
  }

  camera_->setPropertyFromParam(CV_CAP_PROP_POS_MSEC, "cv_cap_prop_pos_msec");
  camera_->setPropertyFromParam(CV_CAP_PROP_POS_AVI_RATIO, "cv_cap_prop_pos_avi_ratio");
  camera_->setPropertyFromParam(CV_CAP_PROP_FRAME_WIDTH, "cv_cap_prop_frame_width");
  camera_->setPropertyFromParam(CV_CAP_PROP_FRAME_HEIGHT, "cv_cap_prop_frame_height");
  camera_->setPropertyFromParam(CV_CAP_PROP_FPS, "cv_cap_prop_fps");
  camera_->setPropertyFromParam(CV_CAP_PROP_FOURCC, "cv_cap_prop_fourcc");
  camera_->setPropertyFromParam(CV_CAP_PROP_FRAME_COUNT, "cv_cap_prop_frame_count");
  camera_->setPropertyFromParam(CV_CAP_PROP_FORMAT, "cv_cap_prop_format");
  camera_->setPropertyFromParam(CV_CAP_PROP_MODE, "cv_cap_prop_mode");
  camera_->setPropertyFromParam(CV_CAP_PROP_BRIGHTNESS, "cv_cap_prop_brightness");
  camera_->setPropertyFromParam(CV_CAP_PROP_CONTRAST, "cv_cap_prop_contrast");
  camera_->setPropertyFromParam(CV_CAP_PROP_SATURATION, "cv_cap_prop_saturation");
  camera_->setPropertyFromParam(CV_CAP_PROP_HUE, "cv_cap_prop_hue");
  camera_->setPropertyFromParam(CV_CAP_PROP_GAIN, "cv_cap_prop_gain");
  camera_->setPropertyFromParam(CV_CAP_PROP_EXPOSURE, "cv_cap_prop_exposure");
  camera_->setPropertyFromParam(CV_CAP_PROP_CONVERT_RGB, "cv_cap_prop_convert_rgb");

  camera_->setPropertyFromParam(CV_CAP_PROP_RECTIFICATION, "cv_cap_prop_rectification");
  camera_->setPropertyFromParam(CV_CAP_PROP_ISO_SPEED, "cv_cap_prop_iso_speed");
#ifdef CV_CAP_PROP_WHITE_BALANCE_U
  camera_->setPropertyFromParam(CV_CAP_PROP_WHITE_BALANCE_U, "cv_cap_prop_white_balance_u");
#endif  // CV_CAP_PROP_WHITE_BALANCE_U
#ifdef CV_CAP_PROP_WHITE_BALANCE_V
  camera_->setPropertyFromParam(CV_CAP_PROP_WHITE_BALANCE_V, "cv_cap_prop_white_balance_v");
#endif  // CV_CAP_PROP_WHITE_BALANCE_V
#ifdef CV_CAP_PROP_BUFFERSIZE
  camera_->setPropertyFromParam(CV_CAP_PROP_BUFFERSIZE, "cv_cap_prop_buffersize");
#endif  // CV_CAP_PROP_BUFFERSIZE

  rate_.reset(new ros::Rate(hz));
}
int main(int argc, char** argv)
{
    ros::init(argc, argv, "gaussian_tracker");
    ros::NodeHandle nh("~");

    /* ------------------------------ */
    /* - Parameters                 - */
    /* ------------------------------ */
    // tracker's main parameter container
    dbot::GaussianTrackerBuilder::Parameters params;

    // camera data
    dbot::CameraData::Resolution resolution;
    std::string camera_info_topic;
    std::string depth_image_topic;
    int downsampling_factor;

    // object data
    std::string object_package;
    std::string object_directory;
    std::vector<std::string> object_meshes;

    // parameter shorthand prefix
    std::string pre = "gaussian_filter/";

    /* ------------------------------ */
    /* - Read out parameters        - */
    /* ------------------------------ */
    // get object parameters
    /// \todo nh.getParam does not check whether the parameter exists in the
    /// config file. this is dangerous, we should use ri::read instead

    nh.getParam("object/meshes", object_meshes);
    nh.getParam("object/package", object_package);
    nh.getParam("object/directory", object_directory);

    params.ori.package_path(ros::package::getPath(object_package));
    params.ori.directory(object_directory);
    params.ori.meshes(object_meshes);

    // get filter parameters
    nh.getParam(pre + "unscented_transform/alpha", params.ut_alpha);
    nh.getParam(pre + "moving_average_update_rate",
                params.moving_average_update_rate);
    nh.getParam(pre + "center_object_frame", params.center_object_frame);

    nh.getParam(pre + "observation/tail_weight",
                params.observation.tail_weight);
    nh.getParam(pre + "observation/bg_depth", params.observation.bg_depth);
    nh.getParam(pre + "observation/fg_noise_std",
                params.observation.fg_noise_std);
    nh.getParam(pre + "observation/bg_noise_std",
                params.observation.bg_noise_std);
    nh.getParam(pre + "observation/uniform_tail_max",
                params.observation.uniform_tail_max);
    nh.getParam(pre + "observation/uniform_tail_min",
                params.observation.uniform_tail_min);

    // state transition parameters
    nh.getParam(pre + "object_transition/linear_sigma_x",
                params.object_transition.linear_sigma_x);
    nh.getParam(pre + "object_transition/linear_sigma_y",
                params.object_transition.linear_sigma_y);
    nh.getParam(pre + "object_transition/linear_sigma_z",
                params.object_transition.linear_sigma_z);

    nh.getParam(pre + "object_transition/angular_sigma_x",
                params.object_transition.angular_sigma_x);
    nh.getParam(pre + "object_transition/angular_sigma_y",
                params.object_transition.angular_sigma_y);
    nh.getParam(pre + "object_transition/angular_sigma_z",
                params.object_transition.angular_sigma_z);

    nh.getParam(pre + "object_transition/velocity_factor",
                params.object_transition.velocity_factor);
    params.object_transition.part_count = object_meshes.size();

    // camera parameters
    nh.getParam("camera_info_topic", camera_info_topic);
    nh.getParam("depth_image_topic", depth_image_topic);
    nh.getParam("downsampling_factor", downsampling_factor);
    nh.getParam("resolution/width", resolution.width);
    nh.getParam("resolution/height", resolution.height);

    /* ------------------------------ */
    /* - Setup camera data          - */
    /* ------------------------------ */
    // setup camera data
    auto camera_data_provider = std::shared_ptr<dbot::CameraDataProvider>(
        new dbot::RosCameraDataProvider(nh,
                                        camera_info_topic,
                                        depth_image_topic,
                                        resolution,
                                        downsampling_factor,
                                        2.0));
    auto camera_data = std::make_shared<dbot::CameraData>(camera_data_provider);

    // finally, set number of pixels
    params.observation.sensors = camera_data->pixels();

    /* ------------------------------ */
    /* - Initialize interactively   - */
    /* ------------------------------ */
    opi::InteractiveMarkerInitializer object_initializer(
        camera_data->frame_id(),
        params.ori.package(),
        params.ori.directory(),
        params.ori.meshes(),
        {},
        true);
    if (!object_initializer.wait_for_object_poses())
    {
        ROS_INFO("Setting object poses was interrupted.");
        return 0;
    }

    auto initial_ros_poses = object_initializer.poses();
    std::vector<Tracker::State> initial_poses;
    initial_poses.push_back(Tracker::State(params.ori.count_meshes()));
    int i = 0;
    for (auto& ros_pose : initial_ros_poses)
    {
        initial_poses[0].component(i++) = ri::to_pose_velocity_vector(ros_pose);
    }

    /* ------------------------------ */
    /* - Create the tracker         - */
    /* ------------------------------ */
    auto tracker_builder =
        dbot::GaussianTrackerBuilder(params, camera_data);

    auto tracker = tracker_builder.build();
    tracker->initialize(initial_poses);

    /* ------------------------------ */
    /* - Tracker publisher          - */
    /* ------------------------------ */
    int object_color[3];
    nh.getParam(pre + "object_color/R", object_color[0]);
    nh.getParam(pre + "object_color/G", object_color[1]);
    nh.getParam(pre + "object_color/B", object_color[2]);
    auto tracker_publisher = dbot::ObjectStatePublisher(
        params.ori, object_color[0], object_color[1], object_color[2]);

    /* ------------------------------ */
    /* - Create and run tracker     - */
    /* - node                       - */
    /* ------------------------------ */
    dbot::ObjectTrackerRos<dbot::GaussianTracker>
        ros_object_tracker(tracker, camera_data, params.ori.count_meshes());

    ros::Subscriber subscriber =
        nh.subscribe(depth_image_topic,
                     1,
                     &dbot::ObjectTrackerRos<
                         dbot::GaussianTracker>::update_obsrv,
                     &ros_object_tracker);

    while (ros::ok())
    {
        if (ros_object_tracker.run_once())
        {
            tracker_publisher.publish(
                ros_object_tracker.current_state_messages());
        }
        ros::spinOnce();
    }

    ros::spin();

    return 0;
}
int main(int argc, char **argv){
  ros::init(argc, argv, "pplTracker");
  ros::NodeHandle nh;

  std::string configFilename = ros::package::getPath("people_tracker_denbyk") + "/init/openni_tracker.xml";
  genericUserCalibrationFileName = ros::package::getPath("people_tracker_denbyk") + "/init/GenericUserCalibration.bin";

  ros::Rate loop_rate(1);

  //valore di ritorno Xn
  XnStatus nRetVal;

  //while (ros::ok())
  while (nh.ok())
  {
    //inizializzo contesto openni
    //ROS_INFO(configFilename.c_str(),xnGetStatusString(nRetVal));
    nRetVal = g_Context.InitFromXmlFile(configFilename.c_str());

    //TODO: remove
    nRetVal = XN_STATUS_OK;

    //riprovo tra un po'
    if(nRetVal != XN_STATUS_OK)
    {
      ROS_INFO("InitFromXml failed: %s Retrying in 3 seconds...", xnGetStatusString(nRetVal));
      ros::Duration(3).sleep();

    }
    else
    {
      break;
    }
  }


  //std::string frame_id;

  nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_DEPTH, g_DepthGenerator);


  if(nRetVal != XN_STATUS_OK)
  {
    ROS_ERROR("Find depth generator failed: %s", xnGetStatusString(nRetVal));
  }

  //cerca nodo ti tipo user generator e lo salva in g_UserGenerator
  nRetVal = g_Context.FindExistingNode(XN_NODE_TYPE_USER, g_UserGenerator);


  if (nRetVal != XN_STATUS_OK)
  {
    //crea lo userGenerator del g_context. SE non riesce probabilmente manca NITE
    nRetVal = g_UserGenerator.Create(g_Context);

    if (nRetVal != XN_STATUS_OK)
    {
      ROS_ERROR("NITE is likely missing: Please install NITE >= 1.5.2.21. Check the readme for download information. Error Info: User generator failed: %s", xnGetStatusString(nRetVal));
      return nRetVal;
    }
  }
  //veriica che lo userGenerator creato supporti SKELETON
  if (!g_UserGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON))
  {
    ROS_INFO("Supplied user generator doesn't support skeleton");
    return EXIT_FAILURE;
  }

  //imposto la modalità dello skeleton, quali giunzioni rilevare e quali no.
  //in questo caso upper è il torso/la testa
  g_UserGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_UPPER);

  //setto varie callbacks per entrata, uscita e rientro user nello UserGenerator
  XnCallbackHandle hUserCallbacks;
  g_UserGenerator.RegisterUserCallbacks(User_NewUser, User_LostUser, NULL, hUserCallbacks);
  g_UserGenerator.RegisterToUserExit(User_OutOfScene, NULL, hUserCallbacks);
  g_UserGenerator.RegisterToUserReEnter(User_BackIntoScene, NULL, hUserCallbacks);

  //attivo la generazione dei vari generators
  nRetVal = g_Context.StartGeneratingAll();

  if(nRetVal != XN_STATUS_OK)
  {
    ROS_ERROR("StartGenerating failed: %s", xnGetStatusString(nRetVal));
  }

  //recupera un parametro passato al nodo dal server dei parametri ROS.
  //usando la chiave camera_frame_id e lo memorizza nella variabile frame_id
  std::string frame_id("camera_depth_frame");
  nh.getParam("camera_frame_id", frame_id);


  std::cout << "init ok\n";
  //ciclo principale.
  while(nh.ok())
  {
    //aggiorna il contesto e aspetta
    g_Context.WaitAndUpdateAll();
    //pubblica le trasformazioni su frame_id
    publishTransforms(frame_id);
    //dormi per un tot.
    ros::Rate(30).sleep();
  }

  //rilascia risorse e esci.
  g_Context.Shutdown();
  return EXIT_SUCCESS;
}
int main(int argc, char** argv)
{
	ros::init(argc, argv, "follow_me");

	ros::NodeHandle nh;
	std::string user_to_track;

	std::string frame_id("camera_depth_frame");
	nh.getParam("camera_frame_id", frame_id);

	int skeleton_to_track = 0;
	ros::param::set("skeleton_to_track", skeleton_to_track);

	ROS_INFO("Waiting for user identity.");

    while(!ros::param::get("user_to_track", user_to_track) && nh.ok())
    {
    	ros::Duration(1).sleep();
    }

    tf::TransformListener listener;

    ros::Publisher pub = nh.advertise<geometry_msgs::Twist>("/kobra/cmd_vel", 1);

    while(nh.ok())
    {
    	ROS_INFO("Looking for %s's face.", user_to_track.c_str());

    	std::string skeleton_to_track_frame;

		while(nh.ok())							//Search continuously for a skeleton head very close to the designated user face.
		{
			std::vector<tf::StampedTransform> transforms;

			lookForEveryHeadTransform(listener, transforms, user_to_track);

			if(findClosestHeadToFace(transforms, skeleton_to_track_frame))
			{
				skeleton_to_track = changeFrameAndReturnIndex(skeleton_to_track_frame);
				ros::param::set("skeleton_to_track", skeleton_to_track);
				break;
			}

			geometry_msgs::Twist twist;
			twist.linear.x = twist.angular.z = 0;
			pub.publish(twist);

			ros::Rate(30).sleep();
		}

		ROS_INFO("User %s and skeleton %s associated. Start tracking.", user_to_track.c_str(), skeleton_to_track_frame.c_str());

		while(ros::param::get("skeleton_to_track", skeleton_to_track) && nh.ok())
		{
			if(skeleton_to_track == 0)
			{
				ROS_INFO("User %s and skeleton %s association lost. Stop tracking.", user_to_track.c_str(), skeleton_to_track_frame.c_str());
				break;
			}

			tf::StampedTransform transform;

			if(lookForSpecificBodyTransform(listener, frame_id, skeleton_to_track_frame, transform))
			{
				double distance = std::sqrt(std::pow(transform.getOrigin().getX(), 2) + std::pow(transform.getOrigin().getY(), 2) + std::pow(transform.getOrigin().getZ(), 2));

				//TODO Ho la distanza, in base ad essa restituisco la percentuale di velocità del robot.

				geometry_msgs::Twist twist;

				if(distance < 1.35)
				{
					//Mi allontano
					twist.linear.x = -0.4;
				}
				else if(distance > 1.65)
				{
					//Mi avvicino
					twist.linear.x = 0.4;
				}

				if(transform.getOrigin().getY() > 0.2)
				{
					//Giro a sinistra
					twist.angular.z = 0.5;
				}
				else if(transform.getOrigin().getY() < -0.2)
				{
					//Giro a destra
					twist.angular.z = -0.5;
				}

				pub.publish(twist);
			}

			ros::Rate(30).sleep();
		}
    }

    ros::shutdown();

    return EXIT_SUCCESS;
}
int main(int argc, char** argv)
{
    ros::init(argc, argv, "particle_tracker");
    ros::NodeHandle nh("~");

    /* ---------------------------------------------------------------------- */
    /* Roa-Blackwellized Coordinate Particle Filter Object Tracker            */
    /*                                                                        */
    /* Ingredients:                                                           */
    /*   - ObjectTrackerRos                                                   */
    /*     - Tracker                                                          */
    /*       - Rbc Particle Filter Algorithm                                  */
    /*         - Objects tate transition model                                */
    /*         - Observation model                                            */
    /*       - Object model                                                   */
    /*       - Camera data                                                    */
    /*     - Tracker publisher to advertise the estimated state               */
    /*                                                                        */
    /*  Construnction of the tracker will utilize few builders and factories. */
    /*  For that, we need the following builders/factories:                   */
    /*    - Object state transition model builder                             */
    /*    - Observation model builder to build GPU or CPU based models        */
    /*    - Filter builder                                                    */
    /* ---------------------------------------------------------------------- */

    // parameter shorthand prefix
    std::string pre = "particle_filter/";


    /* ------------------------------ */
    /* - Create the object model    - */
    /* ------------------------------ */
    // get object parameters
    std::string object_package;
    std::string object_directory;
    std::vector<std::string> object_meshes;

    /// \todo nh.getParam does not check whether the parameter exists in the
    /// config file. this is dangerous, we should use ri::read instead

    nh.getParam("object/meshes", object_meshes);
    nh.getParam("object/package", object_package);
    nh.getParam("object/directory", object_directory);

    // Use the ORI to load the object model usign the
    // SimpleWavefrontObjectLoader
    dbot::ObjectResourceIdentifier ori;
    ori.package_path(ros::package::getPath(object_package));
    ori.directory(object_directory);
    ori.meshes(object_meshes);

    auto object_model_loader = std::shared_ptr<dbot::ObjectModelLoader>(
        new dbot::SimpleWavefrontObjectModelLoader(ori));

    // Load the model usign the simple wavefront load and center the frames
    // of all object part meshes
    bool center_object_frame;
    nh.getParam(pre + "center_object_frame", center_object_frame);
    auto object_model = std::make_shared<dbot::ObjectModel>(
        object_model_loader, center_object_frame);

    /* ------------------------------ */
    /* - Setup camera data          - */
    /* ------------------------------ */
    int downsampling_factor;
    std::string camera_info_topic;
    std::string depth_image_topic;
    dbot::CameraData::Resolution resolution;
    nh.getParam("camera_info_topic", camera_info_topic);
    nh.getParam("depth_image_topic", depth_image_topic);
    nh.getParam("downsampling_factor", downsampling_factor);
    nh.getParam("resolution/width", resolution.width);
    nh.getParam("resolution/height", resolution.height);

    auto camera_data_provider = std::shared_ptr<dbot::CameraDataProvider>(
        new dbot::RosCameraDataProvider(nh,
                                        camera_info_topic,
                                        depth_image_topic,
                                        resolution,
                                        downsampling_factor,
                                        60.0));
    // Create camera data from the RosCameraDataProvider which takes the data
    // from a ros camera topic
    auto camera_data = std::make_shared<dbot::CameraData>(camera_data_provider);

    /* ------------------------------ */
    /* - Few types we will be using - */
    /* ------------------------------ */
    typedef osr::FreeFloatingRigidBodiesState<> State;
    typedef dbot::ParticleTracker Tracker;
    typedef dbot::ParticleTrackerBuilder<Tracker> TrackerBuilder;
    typedef TrackerBuilder::TransitionBuilder TransitionBuilder;
    typedef TrackerBuilder::SensorBuilder SensorBuilder;


    /* ------------------------------ */
    /* - State transition function  - */
    /* ------------------------------ */
    // We will use a linear observation model built by the object transition
    // model builder. The linear model will generate a random walk.
    dbot::ObjectTransitionBuilder<State>::Parameters params_state;
    // state transition parameters
    nh.getParam(pre + "object_transition/linear_sigma_x",
                params_state.linear_sigma_x);
    nh.getParam(pre + "object_transition/linear_sigma_y",
                params_state.linear_sigma_y);
    nh.getParam(pre + "object_transition/linear_sigma_z",
                params_state.linear_sigma_z);

    nh.getParam(pre + "object_transition/angular_sigma_x",
                params_state.angular_sigma_x);
    nh.getParam(pre + "object_transition/angular_sigma_y",
                params_state.angular_sigma_y);
    nh.getParam(pre + "object_transition/angular_sigma_z",
                params_state.angular_sigma_z);

    nh.getParam(pre + "object_transition/velocity_factor",
                params_state.velocity_factor);
    params_state.part_count = object_meshes.size();

    auto state_trans_builder = std::shared_ptr<TransitionBuilder>(
        new dbot::ObjectTransitionBuilder<State>(params_state));

    /* ------------------------------ */
    /* - Observation model          - */
    /* ------------------------------ */
    dbot::RbSensorBuilder<State>::Parameters params_obsrv;
    nh.getParam(pre + "use_gpu", params_obsrv.use_gpu);

    if (params_obsrv.use_gpu)
    {
        nh.getParam(pre + "gpu/sample_count", params_obsrv.sample_count);
    }
    else
    {
        nh.getParam(pre + "cpu/sample_count", params_obsrv.sample_count);
    }

    nh.getParam(pre + "observation/occlusion/p_occluded_visible",
                params_obsrv.occlusion.p_occluded_visible);
    nh.getParam(pre + "observation/occlusion/p_occluded_occluded",
                params_obsrv.occlusion.p_occluded_occluded);
    nh.getParam(pre + "observation/occlusion/initial_occlusion_prob",
                params_obsrv.occlusion.initial_occlusion_prob);

    nh.getParam(pre + "observation/kinect/tail_weight",
                params_obsrv.kinect.tail_weight);
    nh.getParam(pre + "observation/kinect/model_sigma",
                params_obsrv.kinect.model_sigma);
    nh.getParam(pre + "observation/kinect/sigma_factor",
                params_obsrv.kinect.sigma_factor);
    params_obsrv.delta_time = 1. / 30.;

    // gpu only parameters
    nh.getParam(pre + "gpu/use_custom_shaders",
                params_obsrv.use_custom_shaders);
    nh.getParam(pre + "gpu/vertex_shader_file",
                params_obsrv.vertex_shader_file);
    nh.getParam(pre + "gpu/fragment_shader_file",
                params_obsrv.fragment_shader_file);
    nh.getParam(pre + "gpu/geometry_shader_file",
                params_obsrv.geometry_shader_file);

    auto sensor_builder =
        std::shared_ptr<SensorBuilder>(new dbot::RbSensorBuilder<State>(
            object_model, camera_data, params_obsrv));

    /* ------------------------------ */
    /* - Create Filter & Tracker    - */
    /* ------------------------------ */
    TrackerBuilder::Parameters params_tracker;
    params_tracker.evaluation_count = params_obsrv.sample_count;
    nh.getParam(pre + "moving_average_update_rate",
                params_tracker.moving_average_update_rate);
    nh.getParam(pre + "max_kl_divergence", params_tracker.max_kl_divergence);
    nh.getParam(pre + "center_object_frame",
                params_tracker.center_object_frame);

    auto tracker_builder = dbot::ParticleTrackerBuilder<Tracker>(
        state_trans_builder, sensor_builder, object_model, params_tracker);
    auto tracker = tracker_builder.build();

    dbot::ObjectTrackerRos<Tracker> ros_object_tracker(
        tracker, camera_data, ori.count_meshes());

    /* ------------------------------ */
    /* - Initialize interactively   - */
    /* ------------------------------ */
    opi::InteractiveMarkerInitializer object_initializer(
        camera_data->frame_id(),
        ori.package(),
        ori.directory(),
        ori.meshes(),
        {},
        true);
    if (!object_initializer.wait_for_object_poses())
    {
        ROS_INFO("Setting object poses was interrupted.");
        return 0;
    }

    auto initial_ros_poses = object_initializer.poses();
    std::vector<Tracker::State> initial_poses;
    initial_poses.push_back(Tracker::State(ori.count_meshes()));
    int i = 0;
    for (auto& ros_pose : initial_ros_poses)
    {
        initial_poses[0].component(i++) = ri::to_pose_velocity_vector(ros_pose);
    }

    tracker->initialize(initial_poses);

    /* ------------------------------ */
    /* - Tracker publisher          - */
    /* ------------------------------ */
    int object_color[3];
    nh.getParam(pre + "object_color/R", object_color[0]);
    nh.getParam(pre + "object_color/G", object_color[1]);
    nh.getParam(pre + "object_color/B", object_color[2]);
    auto tracker_publisher = dbot::ObjectStatePublisher(
        ori, object_color[0], object_color[1], object_color[2]);

    /* ------------------------------ */
    /* - Run the tracker            - */
    /* ------------------------------ */
    ros::Subscriber subscriber =
        nh.subscribe(depth_image_topic,
                     1,
                     &dbot::ObjectTrackerRos<Tracker>::update_obsrv,
                     &ros_object_tracker);
    (void)subscriber;

    ros::AsyncSpinner spinner(2);
    spinner.start();

    while (ros::ok())
    {
        if (ros_object_tracker.run_once())
        {
            tracker_publisher.publish(
                        ros_object_tracker.current_state_messages());
        }
    }

    return 0;
}