Beispiel #1
0
bool Camera::InitCameraDevice(bool firstTime)
{
	if (!firstTime)
	{
		delete cap;
		cap = new VideoCapture(devNumber); // open the camera
	}
	if (!cap->isOpened())  // check if we succeeded
	{
		ROS_WARN_THROTTLE(10, "Cant Open Camera Device!");
		return false;
	}

	if (cap->set(CV_CAP_PROP_FRAME_WIDTH, params.camera.width->get())
			| cap->set(CV_CAP_PROP_FRAME_HEIGHT, params.camera.height->get()))
	{
		ROS_WARN_THROTTLE(10, "Cant set image size values!");
	}


	if (-1 == system(paramStr))
	{
		ROS_WARN_THROTTLE(10, "Cant find or set v4l2-ctrl values!");
	}


	return true;
}
void visual_slam::OpenNIListener::retrieveTransformations(std_msgs::Header depth_header, FrameData* new_frame)
{
  std::string base_frame, odom_frame, gt_frame;
  _node.getParam("base_frame_name", base_frame);
  _node.getParam("odom_frame_name", odom_frame);
  _node.getParam("ground_truth_frame_name",gt_frame);

  std::string depth_frame_id = depth_header.frame_id;
  ros::Time depth_time = depth_header.stamp;
  tf::StampedTransform base2points;

  try{
    tflistener_->waitForTransform(base_frame, depth_frame_id, depth_time, ros::Duration(0.005));
    tflistener_->lookupTransform(base_frame, depth_frame_id, depth_time, base2points);
    base2points.stamp_ = depth_time;
  }
  catch (tf::TransformException ex){
    base2points.setRotation(tf::createQuaternionFromRPY(-1.57,0,-1.57));
    base2points.setOrigin(tf::Point(0,-0.04,0));
    base2points.stamp_ = depth_time;
    base2points.frame_id_ = base_frame;
    base2points.child_frame_id_ = depth_frame_id;
  }
  new_frame->setBase2PointsTransform(base2points);

  if(!gt_frame.empty()){
    tf::StampedTransform ground_truth_transform;
    try{
      tflistener_->waitForTransform(gt_frame, "/openni_camera", depth_time, ros::Duration(0.005));
      tflistener_->lookupTransform(gt_frame, "/openni_camera", depth_time, ground_truth_transform);
      ground_truth_transform.stamp_ = depth_time;
      tf::StampedTransform b2p;
      b2p.setRotation(tf::createQuaternionFromRPY(-1.57,0,-1.57));
      b2p.setOrigin(tf::Point(0,-0.04,0));
      ground_truth_transform *= b2p;
    }
    catch (tf::TransformException ex){
      ROS_WARN_THROTTLE(5, "%s - Using Identity for Ground Truth (This message is throttled to 1 per 5 seconds)",ex.what());
      ground_truth_transform = tf::StampedTransform(tf::Transform::getIdentity(), depth_time, "missing_ground_truth", "/openni_camera");
    }
    //printTransform("Ground Truth", ground_truth_transform);
    new_frame->setGroundTruthTransform(ground_truth_transform);
  }
  if(!odom_frame.empty()){
    tf::StampedTransform current_odom_transform;
    try{
      tflistener_->waitForTransform(depth_frame_id, odom_frame, depth_time, ros::Duration(0.005));
      tflistener_->lookupTransform( depth_frame_id, odom_frame, depth_time, current_odom_transform);
    }
    catch (tf::TransformException ex){
      ROS_WARN_THROTTLE(5, "%s - No odometry available (This message is throttled to 1 per 5 seconds)",ex.what());
      current_odom_transform = tf::StampedTransform(tf::Transform::getIdentity(), depth_time, "missing_odometry", depth_frame_id);
      current_odom_transform.stamp_ = depth_time;
    }
    //printTransform("Odometry", current_odom_transform);
    new_frame->setOdomTransform(current_odom_transform);
  }
}
void AlgorithmManager::stateCallback(edo_core_msgs::JointStateArrayConstPtr msg)
{
	if (msg->joints.size() != joints_number_) {
		ROS_WARN_THROTTLE(10, "Unacceptable state, impossible to initialize ORL...");
		return;
	}
	//ROS_WARN("dati %d, %d, %d, %d", strk[0][0], strk[1][0], strk[0][1], strk[1][1]);
	current_state_.unit_type = ORL_POSITION_LINK_DEGREE;
	for(size_t i = 0; i < msg->joints.size(); i++)
	{
		if (msg->joints[i].position <= (float)strk_[0][i]) {
			if (fabs(msg->joints[i].position - (float)strk_[0][i]) <= state_saturation_threshold_) {
				current_state_.value[i] = (float)strk_[0][i];
				ROS_WARN_THROTTLE(2, "joint %d: position saturated", i+1);
			}
			else
			{
				ROS_ERROR("Error, strk superato");
				//algorithm_mode_ = BLOCKED;//INITIALIZED;
				current_state_.value[i] = (float)strk_[0][i];
				hold_position_.value[i] = current_state_.value[i];
				//memcpy(&hold_position_, &current_state_, sizeof(ORL_joint_value));
			}
		}
		else if (msg->joints[i].position >= (float)strk_[1][i]) {
			if (fabs(msg->joints[i].position - (float)strk_[1][i]) <= state_saturation_threshold_) {
				current_state_.value[i] = (float)strk_[1][i];
				ROS_WARN_THROTTLE(2, "joint %d: position saturated", i+1);
			}
			else
			{
				ROS_ERROR("Error, strk superato");
				//algorithm_mode_ = BLOCKED;//INITIALIZED;
				current_state_.value[i] = (float)strk_[1][i];
				hold_position_.value[i] = current_state_.value[i];
				//memcpy(&hold_position_, &current_state_, sizeof(ORL_joint_value));
			}
		}
		else
		{
			current_state_.value[i] = msg->joints[i].position;
		}
	}
	if(algorithm_mode_ == UNINITIALIZED)
		initialize(msg);

	//ad ogni aggiornamento di stato calcolo la relativa posizione cartesiana
	cartesian_pose_pub_.publish(computeCartesianPose(&current_state_));

}
/**
 * Set a support coefficient. The coefficients are used as weights in the mixing
 * calculations (see SingleSupportModel::setCoefficient()).
 *
 * @param link The tip link used as base of the support model
 *   (e.g. right_foot_link)
 * @param coeff Support coefficient (positive or zero)
 **/
void RobotModel::setSupportCoefficient(const boost::shared_ptr<const urdf::Link>& link, double coeff)
{
	boost::shared_ptr<SingleSupportModel> model;
	for(size_t i = 0; i < m_models.size(); ++i)
	{
		if(m_models[i]->link() == link)
		{
			model = m_models[i];
			break;
		}
	}

	if(!model)
	{
		ROS_ERROR("setSupportCoefficient called with non-tip link '%s'", link->name.c_str());
		assert(0);
	}

	model->setCoefficient(coeff);

	// We need to normalize the coefficients to a sum of 1.
	double total = 0;
	for(size_t i = 0; i < m_models.size(); ++i)
		total += m_models[i]->coefficient();

	for(size_t i = 0; i < m_models.size(); ++i)
		m_models[i]->normalize(total);

	// Publish plots of the support coefficients
	ros::Time now = ros::Time::now();

	if(total == 0)
		ROS_WARN_THROTTLE(1.0, "setSupportCoefficient: %s with %f => total 0", link->name.c_str(), coeff);
}
 /** This callback is triggered when an obstacle was detected.
   *
   * The current action (if any) will be cancelled. */
 void obstaclesCallback(const amr_msgs::ObstacleConstPtr& obstacle)
 {
   ROS_WARN_THROTTLE(1, "An obstacle was detected. Will stop the robot and cancel the current action.");
   if (move_to_server_->isActive())
     move_to_server_->setAborted(amr_msgs::MoveToResult(), "Aborted. An obstacle was detected.");
   publishZeroVelocity();
 }
Beispiel #6
0
void
Tracker::find_object_in_scene()
{
    storage->readSceneProcessed(scene);
    if (scene->points.size() > model->points.size()/3)
    {
        pcl::CentroidPoint<PX> tc;
        for (size_t i=0; i<scene->points.size(); ++i)
            tc.add(scene->points[i]);
        PX target_centroid, mc_transformed;
        mc_transformed = pcl::transformPoint(model_centroid, Eigen::Affine3f(*transform));
        tc.get(target_centroid);
        Eigen::Matrix4f Tcen, guess;
        Tcen << 1, 0, 0,  (target_centroid.x - mc_transformed.x),
             0, 1, 0,  (target_centroid.y - mc_transformed.y),
             0, 0, 1,  (target_centroid.z - mc_transformed.z),
             0, 0, 0,  1;
        guess = Tcen*(*transform);
        *transform = guess;
        this->started = true;
        this->lost_it = false;
        this->error_count = 0;
        this->centroid_counter = 0;
        ROS_INFO("[Tracker::%s]\tFound something that could be the object, trying to track that",__func__);
        return;
    }
    else
    {
        ROS_WARN_THROTTLE(30, "[Tracker::%s]\tNothing is found on scene yet...",__func__);
        return;
    }
}
    /**
     * \brief create a branch marker between two nodes and store it into marker array
     */
    virtual void createBranchMarker(const Chart &child, const Chart &parent)
    {
        if (!markers){
            ROS_WARN_THROTTLE(30,"[ExplorerBase::%s]\tNo marker array to update is provided, visualization is disabled.",__func__);
            return;
        }
        geometry_msgs::Point e;
        geometry_msgs::Point s;

        visualization_msgs::Marker branch;
        branch.header.frame_id = mark_frame;
        branch.header.stamp = ros::Time();
        branch.lifetime = ros::Duration(0.5);
        branch.ns = "Atlas Branches";
        //need to know the branch id, too bad branches don't have it.
        //Lets use Cantor pairing function: 0.5(a+b)(a+b+1)+b
        branch.id = 0.5*(child.getId() + parent.getId())*(child.getId()+parent.getId()+1) + parent.getId();
        branch.type = visualization_msgs::Marker::LINE_STRIP;
        branch.action = visualization_msgs::Marker::ADD;
        s.x = child.getCenter()[0];
        s.y = child.getCenter()[1];
        s.z = child.getCenter()[2];
        branch.points.push_back(s);
        e.x = parent.getCenter()[0];
        e.y = parent.getCenter()[1];
        e.z = parent.getCenter()[2];
        branch.points.push_back(e);
        branch.scale.x = 0.005;
        branch.color.a = 1.0;
        branch.color.r = 0.0;
        branch.color.g = 0.0;
        branch.color.b = 0.9;
        std::lock_guard<std::mutex> guard(*mtx_ptr);
        markers->markers.push_back(branch);
    }
 void open_usb() {
     ROS_INFO("Connecting to %s...", port.c_str());
     ros::Time start = ros::Time::now();
     double notify_every = 10.0;
     double check_every = 0.25;
     std::string last_msg;
     while (ros::ok()) {
         try {
             ser->Open(port.c_str());
             ROS_INFO("Connected to %s", port.c_str());
             break;
         } catch (USBSerial::Exception &e) {
             last_msg = e.what();
         }
         ros::Duration(check_every).sleep();
         double dur = (ros::Time::now() - start).toSec();
         if (dur > notify_every) {
             ROS_WARN_THROTTLE(notify_every,
                               "Haven't connected to %s in %.2f seconds."
                                       "  Last error=\n%s",
                               port.c_str(), dur, last_msg.c_str());
         }
         publishTf();
     }
 }
// copied from channelcontroller
double CalibrateAction::getDistanceAtPose(const tf::Pose & pose, bool* in_bounds) const
{
    // determine current dist
    int pose_x, pose_y;
    costmap_.worldToMapNoBounds(pose.getOrigin().x(), pose.getOrigin().y(),
            pose_x, pose_y);
    //ROS_INFO("pose_x: %i, pose_y: %i", pose_x, pose_y);
    if(pose_x < 0 || pose_y < 0 ||
            pose_x >= (int)voronoi_.getSizeX() || pose_y >= (int)voronoi_.getSizeY()) {
        if(in_bounds == NULL) {
            // only warn if in_bounds isn't checked externally
            ROS_WARN_THROTTLE(1.0, "%s: Pose out of voronoi bounds (%.2f, %.2f) = (%d, %d)", __func__,
                    pose.getOrigin().x(), pose.getOrigin().y(), pose_x, pose_y);
        } else {
            *in_bounds = false;
        }
        return 0.0;
    }
    if(in_bounds)  {
        *in_bounds = true;
    }
    float dist = voronoi_.getDistance(pose_x, pose_y);
    dist *= costmap_.getResolution();
    return dist;
}
// verfiy input: does vector meet min/max jump step
bool validPoint(DataContainer * container, std::vector<double>& v_new, std::vector<double>& v_before)
{
  ROS_WARN("new avg\n" );
  fflush(stdout);
  // no validation
  if (container->getInt(13) == 0) {
    return true;
  }
  // otherwise calculate distances:
  // minimum distance to previous steering point, jump range = [min,max]
  double min = 10;
  // maximum ...
  double max = 500;
  // distance between new and last point
  double distance =
      sqrt((v_new[3] - v_before[3]) * (v_new[3] - v_before[3]) + (v_new[4] - v_before[4]) * (v_new[4] - v_before[4]) +
           (v_new[5] - v_before[5]) * (v_new[5] - v_before[5]));

    if (distance < min)
    {
      // endpoint arrived, no micro-adjustments below min
      ROS_DEBUG_THROTTLE(1, NNAME ": below jump range     %f < %f [mm]", distance, min);
      return false;
    }
    else if (distance > max)
    {
      // distance exceeds max, thus is out of jump range
      ROS_WARN_THROTTLE(1, NNAME ": exceeding jump range %f < %f [mm]", distance, max);
      return false;
    }
      // otherwise all ok
      return true;
}
 /**
  * \brief highlight solution path
  */
 virtual void highlightSolution(const std::vector<std::size_t> &path)
 {
     if (!markers){
         ROS_WARN_THROTTLE(30,"[ExplorerBase::%s]\tNo marker array to update is provided, visualization is disabled.",__func__);
         return;
     }
     //ids of branches to color
     std::vector<std::size_t> cons;
     cons.resize(path.size() -1);
     for (std::size_t i=0; i<cons.size(); ++i)
         cons.at(i) = 0.5*(path.at(i) + path.at(i+1))*(path.at(i) + path.at(i+1) + 1) + path.at(i+1);
     std::lock_guard<std::mutex> guard(*mtx_ptr);
     for (std::size_t i =0; i< markers->markers.size(); ++i)
     {
         if (markers->markers.at(i).ns.compare("Atlas Nodes")==0)
             for (const auto& id: path)
                 if (markers->markers.at(i).id == id){
                     markers->markers.at(i).color.a = 0.75;
                     markers->markers.at(i).color.r = 0.0;
                     markers->markers.at(i).color.b = 0.05;
                     markers->markers.at(i).color.g = 0.95;
                 }
         if (markers->markers.at(i).ns.compare("Atlas Branches")==0)
             for (const auto& id: cons)
                 if (markers->markers.at(i).id == id){
                     markers->markers.at(i).color.a = 1.0;
                     markers->markers.at(i).color.r = 1.0;
                     markers->markers.at(i).color.b = 0.0;
                     markers->markers.at(i).color.g = 0.0;
                 }
     }
 }
int
main(int argc, char **argv)
{
	ros::init(argc, argv, "rcll_fawkes_sim");

	ros::NodeHandle n;

	std::string  cfg_fawkes_host_;
	int          cfg_fawkes_port_;
	std::shared_ptr<fawkes::BlackBoard> blackboard_;
	std::shared_ptr<RcllFawkesSimNode> node;

	GET_PRIV_PARAM(fawkes_host);
	GET_PRIV_PARAM(fawkes_port);

	while (ros::ok()) {
		if (!blackboard_) {
			try {
				blackboard_ =
					std::make_shared<fawkes::RemoteBlackBoard>(cfg_fawkes_host_.c_str(), cfg_fawkes_port_);
				node = std::make_shared<RcllFawkesSimNode>(n, blackboard_);
				node->init();
				ROS_INFO("%s: Blackboard connected and initialized", ros::this_node::getName().c_str());
			} catch (fawkes::Exception &e) {
				ROS_WARN_THROTTLE(10, "%s: Initialization failed, retrying", ros::this_node::getName().c_str());
				if (node) {
					node->finalize();
					node.reset();
				}
				blackboard_.reset();
			}
		} else if (! blackboard_->is_alive()) {
			ROS_WARN_THROTTLE(30, "%s: blackboard connection lost, retrying", ros::this_node::getName().c_str());
			if (blackboard_->try_aliveness_restore()) {
				ROS_INFO("%s: Blackboard re-connected", ros::this_node::getName().c_str());
			}
		}
		
		ros::getGlobalCallbackQueue()->callAvailable(ros::WallDuration(0.1));
	}

	if (node) node->finalize();

	return 0;
}
Beispiel #13
0
void estop(const std_msgs::Bool & input)
{
    if(!input.data) {
        stop_robot();
        ROS_WARN_THROTTLE(5.0, "Stopping Robot due to EStop pressed (via msg).");
    }

    lastDataTime = ros::Time::now();
}
    void timercb(const ros::TimerEvent& e)
	{
	    static int num_delays = 0;
	    ROS_DEBUG("coordinator timercb triggered");
	    if (gen_flag)
	    {
		// Generate the robot ordering vector
		gen_flag = generate_order();
		return;
	    }

	    // get operating condition
	    if (ros::param::has("/operating_condition"))
		ros::param::get("/operating_condition", operating_condition);
	    else
	    {
		operating_condition = 4;
		ros::param::set("/operating_condition", operating_condition);
	    }
	    
	    // check to see if we are in run state
	    if(operating_condition == 1 || operating_condition == 2)
	    {
		if(calibrated_flag)
		{
		    if (num_delays < NUM_FRAME_DELAYS)
		    {
			num_delays++;
			return;
		    }
		    else if (num_delays < NUM_EKF_INITS+NUM_FRAME_DELAYS)
			process_robots(1);
		    else
			process_robots(operating_condition);
		    num_delays++;
		}
		return;
	    }
	    
	    // are we in idle or stop condition?
	    else if(operating_condition == 0 || operating_condition == 3)
		ROS_DEBUG_THROTTLE(1,"Coordinator node is idle due to operating condition");

	    // are we in emergency stop condition?
	    else if(operating_condition == 4)
		ROS_WARN_THROTTLE(1,"Emergency Stop Requested");

	    // otherwise something terrible has happened
	    else
		ROS_ERROR("Invalid value for operating_condition");

	    calibrated_flag = false;
	    calibrate_count = 0;
	    num_delays = 0;
	    return;
	}
Beispiel #15
0
void kobuki_core(const kobuki_msgs::SensorState & sensors)
{
    bool estop_pressed = (sensors.digital_input & kobuki_msgs::SensorState::DIGITAL_INPUT3)
        == kobuki_msgs::SensorState::DIGITAL_INPUT3;
    if(estop_pressed) {
        stop_robot();
        ROS_WARN_THROTTLE(5.0, "Stopping Robot due to EStop pressed.");
    }

    lastDataTime = ros::Time::now();
}
bool FrobitInterface::all_ok(void)
{
	active = true;
	if ((ros::Time::now() - messages.cmd_vel_left.header.stamp).toSec() > parameters.timeout)
	{
		ROS_WARN_THROTTLE(1, "Time for left cmd_vel is out of date");
		active = false;
	}

	if ((ros::Time::now() - messages.cmd_vel_right.header.stamp).toSec() > parameters.timeout)
	{
		ROS_WARN_THROTTLE(1, "Time for right cmd_vel is out of date");
		active = false;
	}

	if (ros::Time::now() > last_deadman_received + ros::Duration(0.2))
	{
		deadman = false;
	}
	return active && deadman;
}
Beispiel #17
0
double Camera::TakeCapture()
{
	if (params.camera.devNumber->get() != devNumber)
	{
		devNumber = params.camera.devNumber->get();
		sprintf(devStr, "/dev/video%d", devNumber);
		sprintf(paramStr,
				"v4l2ctrl -d /dev/video%d -l /nimbro/share/launch/config/vision/logitechConfig.txt",
				devNumber);
		sprintf(paramDefStr,"v4l2ctrl -d /dev/video%d -l /nimbro/share/launch/config/vision/logitechConfig_default.txt ",devNumber);
		usleep(1000000);
		InitCameraDevice(false);
	}

	int camFd = open(devStr, O_RDONLY);
	if (camFd != -1)
	{
		close(camFd);
	}
	else
	{
		usleep(1000000);
		InitCameraDevice(false);
		return -1;
	}

	*cap >> rawImage;
	if (rawImage.empty())
	{
		ROS_WARN_THROTTLE(10, "Failed to get capture!");
		return -1;
	}
	if (params.camera.flipHor->get() && params.camera.flipVer->get())
	{
		flip(rawImage, rawImage, -1);
	}
	else
	{
		if (params.camera.flipVer->get())
		{
			flip(rawImage, rawImage, 0);
		}
		else if (params.camera.flipHor->get())
		{
			flip(rawImage, rawImage, 1);
		}
	}
	takenImg_pub.publish(rawImage,MatPublisher::bgr);
	return 1;
}
void RTKRobotArm::setJoints(const Eigen::VectorXd& joints) {
	if(joints.size() != numdof) {
		ROS_WARN_THROTTLE(0.1, "Size of joints not same as DOF");
		return;
	}
	// Update RTK robot
	for(int i=0; i<numdof; ++i) {
			full_robot_state[mapping[i]] = joints[i];
	}
	mSensorsGroup.SetJointPositions(full_robot_state);
	mSensorsGroup.WriteSensors();
	mRobot->UpdateLinks();
	mKinematicChain.Update();
}
void joy_cb(const sensor_msgs::Joy::ConstPtr& joy) {
  int naxes = joy->axes.size();
  if (naxes <= min_naxes_exp) {
    ROS_WARN_THROTTLE(1, "Got a joy message with %i axes while expecting at least %i",
                      naxes, min_naxes_exp);
    return;
  }
  geometry_msgs::Twist vel;
  if (0 <= axis_linear)
    vel.linear.x = ((joy->axes[axis_linear]-offset_linear) * scale_linear);
  if (0 <= axis_angular)
    vel.angular.z = ((joy->axes[axis_angular]-offset_angular) * scale_angular);
  cmd_vel_pub.publish(vel);
} // end joy_cb();
Beispiel #20
0
int main (int argc, char **argv)
{
    ros::init(argc, argv, "estop_safety_controller");
    ros::NodeHandle nh;

    ros::NodeHandle nhPriv("~");

    bool auto_estop = true;
    nhPriv.param("auto_estop", auto_estop, auto_estop);
    bool use_kobuki_din3_estop = false;
    nhPriv.param("use_kobuki_din3_estop", use_kobuki_din3_estop, use_kobuki_din3_estop);

    ros::Subscriber sub = nh.subscribe ("estop", 1, estop);
    ros::Subscriber subKobuki;
    if(use_kobuki_din3_estop)
        subKobuki = nh.subscribe("mobile_base/sensors/core", 1, kobuki_core);
    pub = nh.advertise<geometry_msgs::Twist>("cmd_vel_mux/input/estop", 1);

    ROS_INFO("estop_safety_controller: auto_estop: %s use_kobuki_din3_estop: %s",
            auto_estop ? "enabled" : "disabled", use_kobuki_din3_estop ? "enabled" : "disabled");

    ros::Rate r(10);
    while (ros::ok()) {
        ros::Duration timeSinceLastData = ros::Time::now() - lastDataTime;
        if(timeSinceLastData > ros::Duration(1.0)) {
            if(auto_estop) {
                ROS_WARN_THROTTLE(1.0, "No EStop data available - Stopping robot!");
                stop_robot();
            } else {
                ROS_WARN_THROTTLE(5.0, "No EStop data available!");
            }
        }

        ros::spinOnce();
        r.sleep();
    }
}
    bool generate_order(void)
	{
	    // this function looks at the starting positions of each
	    // of the robots, it then determines what order they will
	    // be seen by the Kinect.  They are ordered from the
	    // highest x-value (in /oriented_optimization_frame) to
	    // the lowest
	    std::vector<double> pos;
	    double tmp;
	    ref_ord.clear();
	    for (int j=0; j<nr; j++)
	    {
		std::stringstream ss;
		ss << "/robot_" << j+1 << "/robot_x0";
		if (ros::param::has(ss.str()))
		    ros::param::get(ss.str(), tmp);
		else {
		    ROS_WARN_THROTTLE(1, "Cannot determine ordering!");
		    return true;
		}

		pos.push_back(tmp);		    
		ref_ord.push_back(j+1);
	    }

	    // now we can sort the stuff
	    int keyi=0;
	    double key=0;
	    int j=0;
	    
	    for (unsigned int i=1; i<pos.size(); ++i) 
	    {
		key= pos[i];
		keyi = ref_ord[i];
		j = i-1;
		while((j >= 0) && (pos[j] < key))
		{
		    pos[j+1] = pos[j];
		    ref_ord[j+1] = ref_ord[j];
		    j -= 1;
		}
		pos[j+1]=key;
		ref_ord[j+1]=keyi;
	    }

	    return false;
	}
void QualisysDriver::handleFrame() {
  // Number of rigid bodies
  int body_count = prt_packet->Get6DOFEulerBodyCount();
  // Assign each subject with a thread
  map<string, boost::shared_ptr<boost::thread> > subject_threads;

  for (int i = 0; i< body_count; ++i) {
    string subject_name(
        port_protocol.Get6DOFBodyName(i));

    // Process the subject if required
    if (model_set.empty() || model_set.count(subject_name)) {
      // Create a new subject if it does not exist
      if (subjects.find(subject_name) == subjects.end()) {
        subjects[subject_name] = Subject::SubjectPtr(
            new Subject(&nh, subject_name, fixed_frame_id));
        subjects[subject_name]->setParameters(
            process_noise, measurement_noise, frame_rate);
      }
      // Handle the subject in a different thread
      subject_threads[subject_name] =
        boost::shared_ptr<boost::thread>(
          new boost::thread(&QualisysDriver::handleSubject, this, i));
      //handleSubject(i);
    }
  }

  // Wait for all the threads to stop
  for (auto it = subject_threads.begin();
      it != subject_threads.end(); ++it) {
    it->second->join();
  }

  // Send out warnings
  for (auto it = subjects.begin();
      it != subjects.end(); ++it) {
    Subject::Status status = it->second->getStatus();
    if (status == Subject::LOST)
      ROS_WARN_THROTTLE(1, "Lose track of subject %s", (it->first).c_str());
    else if (status == Subject::INITIALIZING)
      ROS_WARN("Initialize subject %s", (it->first).c_str());
  }

  return;
}
  void RealtimeClock::loop()
  {
    ros::Rate r(750);
    while (running_)
    {
      // get lock
      lock();

      // store system time
      system_time_ = ros::Time::now();
      
      // warning, using non-locked 'lock_misses_', but it's just for debugging
      if (lock_misses_ > 100)
	ROS_WARN_THROTTLE(1.0, "Time estimator has trouble transferring data between non-RT and RT");

      // release lock
      mutex_.unlock();
      r.sleep();
    }
  }
Beispiel #24
0
void
Tracker::spinOnce()
{
    if (started){
        auto begin_time=std::chrono::high_resolution_clock::now();
        track();
        update_markers();
        publish_markers();
        broadcast_tf();
        auto end_time=std::chrono::high_resolution_clock::now();
        auto elapsed_time=std::chrono::duration_cast<std::chrono::milliseconds>(end_time - begin_time).count();
        ROS_INFO_THROTTLE(10,"[Tracker::%s]\tStep time: %ld ms.",__func__,elapsed_time);
    }
    else if (lost_it){
        find_object_in_scene();
    }
    else{
        if(!storage->readObjNames(est_names))
            ROS_WARN_THROTTLE(30,"[Tracker::%s]\tLooks like no Pose Estimation has been performed, perform one in order to start using the object tracker.",__func__);
    }
}
 /**
  * \brief create a marker from a chart and stores it into markers array
  */
 virtual void createNodeMarker(const Chart &c)
 {
     if (!markers){
         ROS_WARN_THROTTLE(30,"[ExplorerBase::%s]\tNo marker array to update is provided, visualization is disabled.",__func__);
         return;
     }
     visualization_msgs::Marker disc;
     disc.header.frame_id = mark_frame;
     disc.header.stamp = ros::Time();
     disc.lifetime = ros::Duration(0.5);
     disc.ns = "Atlas Nodes";
     disc.id = c.getId();
     disc.type = visualization_msgs::Marker::CYLINDER;
     disc.action = visualization_msgs::Marker::ADD;
     disc.scale.x = c.getRadius()*2;
     disc.scale.y = c.getRadius()*2;
     disc.scale.z = 0.001;
     disc.color.a = 0.75;
     disc.color.r = 0.3;
     disc.color.b = 0.35;
     disc.color.g = 0.5;
     Eigen::Matrix3d rot;
     rot.col(0) =  c.getTanBasisOne();
     rot.col(1) =  c.getTanBasisTwo();
     rot.col(2) =  c.getNormal();
     Eigen::Quaterniond q(rot);
     q.normalize();
     disc.pose.orientation.x = q.x();
     disc.pose.orientation.y = q.y();
     disc.pose.orientation.z = q.z();
     disc.pose.orientation.w = q.w();
     disc.pose.position.x = c.getCenter()[0];
     disc.pose.position.y = c.getCenter()[1];
     disc.pose.position.z = c.getCenter()[2];
     std::lock_guard<std::mutex> guard(*mtx_ptr);
     markers->markers.push_back(disc);
 }
Beispiel #26
0
bool
PersonTracker::poseToGlobalFrame(PoseStamped pose_msg, PoseStamped& transformed)
{
	std::string global_frame = "/map";

	pose_msg.header.stamp = ros::Time::now() - ros::Duration(0.10);
	
	//ROS_INFO_THROTTLE(2,"[person_tracker] Trying to transform at time %.2f", pose_msg.header.stamp.toSec());

	/*if( tf_.canTransform(global_frame, pose_msg.header.frame_id, pose_msg.header.stamp ) ){
		ROS_ERROR_THROTTLE(2,"[person_tracker] canTransform returned false");
	}*/

	try {
		tf_.transformPose(global_frame, pose_msg, transformed);
	}
	catch(tf::TransformException& ex) {
		ROS_WARN_THROTTLE(2,"[person_tracker] Failed to transform goal pose from \"%s\" to \"%s\" frame: %s",
		pose_msg.header.frame_id.c_str(), global_frame.c_str(), ex.what());
		return false;
	}

	return true;
}
  void BallPickerLayer::onInitialize()
  {
    ros::NodeHandle nh("~/" + name_), g_nh;

    nh.param("robot_base_frame", robot_base_frame_, std::string("base_link"));
    nh.param("robot_global_frame", global_frame_, std::string("/map"));

    ros::NodeHandle prefix_nh;
    std::string tf_prefix = tf::getPrefixParam(prefix_nh);

    global_frame_ = tf::resolve(tf_prefix, global_frame_);
    robot_base_frame_ = tf::resolve(tf_prefix, robot_base_frame_);

    confirm_update_client = nh.serviceClient<std_srvs::Empty>("/confirm_updated_costmap");    

    while (!tfl.waitForTransform(global_frame_, robot_base_frame_, ros::Time::now(), ros::Duration(1.0)))
    {
      ROS_WARN_THROTTLE(1.0, "Transofrm for looking up robot pose not available.");
      ros::spinOnce();
    }


    obstacle_buffer.clear();
    obstacles_sub = g_nh.subscribe("costmap_custom_obstacles", 1, &BallPickerLayer::obstaclesIncomeCallback, this);

    clear_srv = nh.advertiseService("ball_picker_clear_costmaps", &BallPickerLayer::clearObstacles, this);

    clear_flag = false;
    current_ = true;
    default_value_ = NO_INFORMATION;
    matchSize();

    dsrv_ = new dynamic_reconfigure::Server<costmap_2d::GenericPluginConfig>(nh);
    dynamic_reconfigure::Server<costmap_2d::GenericPluginConfig>::CallbackType cb = boost::bind(&BallPickerLayer::reconfigureCB, this, _1, _2);
    dsrv_->setCallback(cb);
  }
void FrobitInterface::on_timer(const ros::TimerEvent& e)
{

  active = true;
  if ((ros::Time::now() - messages.cmd_vel_left.header.stamp).toSec() > parameters.timeout)
  {
    ROS_WARN_THROTTLE(1, "Time for left cmd_vel is out of date");
    active = false;
  }

  if ((ros::Time::now() - messages.cmd_vel_right.header.stamp).toSec() > parameters.timeout)
  {
    ROS_WARN_THROTTLE(1, "Time for right cmd_vel is out of date");
    active = false;
  }

  if (ros::Time::now() > last_deadman_received + ros::Duration(0.2))
  {
    deadman = false;
  }

  if (active && deadman)
  {
    if (parameters.castor_front)
    {
      left_vel = messages.cmd_vel_left.twist.linear.x;
      right_vel = messages.cmd_vel_right.twist.linear.x;
    }
    else
    {
      left_vel = messages.cmd_vel_right.twist.linear.x * -1;
      right_vel = messages.cmd_vel_left.twist.linear.x * -1;
    }

    // limit to max robot velocity (safety measure only)
    //correct for higher vheel velocities due to the diff drive
    double corr_max_velocity = parameters.max_velocity + abs(abs(left_vel)-abs(right_vel))/2; 

    if (left_vel > corr_max_velocity)
      left_vel = corr_max_velocity;
    else if (left_vel < -corr_max_velocity)
      left_vel = -corr_max_velocity;

    if (right_vel > corr_max_velocity)
      right_vel = corr_max_velocity;
    else if (right_vel < -corr_max_velocity)
      right_vel = -corr_max_velocity; 

    // Convert from [m/s]
    left_vel *= meters_to_ticks;
    right_vel *= meters_to_ticks;

  }
  else
  {
    left_vel = 0;
    right_vel = 0;
  }

  //build message
  messages.motor_command.header.stamp = ros::Time::now();
  messages.motor_command.type = "PFBCT";
  messages.motor_command.data.clear();
  messages.motor_command.data.push_back(boost::lexical_cast<std::string>((int)left_vel));
  messages.motor_command.data.push_back(boost::lexical_cast<std::string>((int)right_vel));

  //publish message
  publishers.nmea.publish(messages.motor_command);
}
Beispiel #29
0
  void imageCallback(
      const sensor_msgs::ImageConstPtr& l_image_msg,
      const sensor_msgs::ImageConstPtr& r_image_msg,
      const sensor_msgs::CameraInfoConstPtr& l_info_msg,
      const sensor_msgs::CameraInfoConstPtr& r_info_msg)
  {
 
    bool first_run = false;
    // create odometer if not exists
    if (!visual_odometer_)
    {
      first_run = true;
#if(DEBUG)
      cv_bridge::CvImageConstPtr l_cv_ptr, r_cv_ptr;
      l_cv_ptr = cv_bridge::toCvShare(l_image_msg, sensor_msgs::image_encodings::MONO8);
      r_cv_ptr = cv_bridge::toCvShare(r_image_msg, sensor_msgs::image_encodings::MONO8);
      last_l_image_ = l_cv_ptr->image;
      last_r_image_ = r_cv_ptr->image;
#endif
      initOdometer(l_info_msg, r_info_msg);
    }

    // convert images if necessary
    uint8_t *l_image_data, *r_image_data;
    int l_step, r_step;
    cv_bridge::CvImageConstPtr l_cv_ptr, r_cv_ptr;
    l_cv_ptr = cv_bridge::toCvShare(l_image_msg, sensor_msgs::image_encodings::MONO8);
    l_image_data = l_cv_ptr->image.data;
    l_step = l_cv_ptr->image.step[0];
    r_cv_ptr = cv_bridge::toCvShare(r_image_msg, sensor_msgs::image_encodings::MONO8);
    r_image_data = r_cv_ptr->image.data;
    r_step = r_cv_ptr->image.step[0];

    ROS_ASSERT(l_step == r_step);
    ROS_ASSERT(l_image_msg->width == r_image_msg->width);
    ROS_ASSERT(l_image_msg->height == r_image_msg->height);

    int32_t dims[] = {l_image_msg->width, l_image_msg->height, l_step};
    // on first run or when odometer got lost, only feed the odometer with 
    // images without retrieving data
    if (first_run || got_lost_)
    {
      visual_odometer_->process(l_image_data, r_image_data, dims);
      got_lost_ = false;
    }
    else
    {
      bool success = visual_odometer_->process(
          l_image_data, r_image_data, dims, last_motion_small_);
      if (success)
      {
        Matrix motion = Matrix::inv(visual_odometer_->getMotion());
        ROS_DEBUG("Found %i matches with %i inliers.", 
                  visual_odometer_->getNumberOfMatches(),
                  visual_odometer_->getNumberOfInliers());
        ROS_DEBUG_STREAM("libviso2 returned the following motion:\n" << motion);
        Matrix camera_motion;
        // if image was replaced due to small motion we have to subtract the 
        // last motion to get the increment
        if (last_motion_small_)
        {
          camera_motion = Matrix::inv(reference_motion_) * motion;
        }
        else
        {
          // image was not replaced, report full motion from odometer
          camera_motion = motion;
        }
        reference_motion_ = motion; // store last motion as reference
        std::cout<< camera_motion << "\n" <<std::endl;
#if (USE_MOVEMENT_CONSTRAIN)
        double deltaRoll = atan2(camera_motion.val[2][1], camera_motion.val[2][2]);
        double deltaPitch = asin(-camera_motion.val[2][0]);
        double deltaYaw = atan2(camera_motion.val[1][0], camera_motion.val[0][0]);
        double tanRoll = camera_motion.val[2][1] / camera_motion.val[2][2];
        double tanPitch = tan(deltaPitch);
        printf("deltaroll is %lf\n", deltaRoll);
        printf("deltaPitch is %lf\n", deltaPitch);
        printf("deltaYaw is %lf\n", deltaYaw);
        double deltaX = camera_motion.val[0][3];
        double deltaY = camera_motion.val[1][3];
        double deltaZ = camera_motion.val[2][3];
        printf("dx %lf, dy %lf, dz %lf, tanRoll %lf tanPitch %lf\n",deltaX, deltaY, deltaZ, tanRoll, tanPitch);
        if(deltaY > 0 && deltaY > tanRoll * deltaZ)
        {
           camera_motion.val[1][3] = tanRoll * deltaZ;
          //printf("dy %lf deltaY, dynew %lf\n", deltaY,camera_motion.val[2][3]);
        }
        else if(deltaY < 0 && deltaY < -tanRoll * deltaZ)
        {
           camera_motion.val[1][3] = -tanRoll * deltaZ;
          //printf("dy %lf deltaY, dynew %lf\n", deltaY,camera_motion.val[2][3]);
        }

        /*if(deltaX > 0 && deltaX > tanPitch * deltaZ)
        {
           camera_motion.val[0][3] = tanPitch * deltaZ;
          printf("dx %lf, dxnew %lf\n", deltaX,camera_motion.val[1][3]);
        }
        else if(deltaX < 0 && deltaX < -tanPitch * deltaZ)
        {
           camera_motion.val[0][3] = -tanPitch * deltaZ;
          printf("dx %lf, dxnew %lf\n", deltaX,camera_motion.val[1][3]);
        }*/
        /*
        if(deltaPitch > 0)
        {
          deltaPitch = deltaPitch+fabs(deltaRoll)+fabs(deltaYaw);
        }
        else
        {
          deltaPitch = deltaPitch-fabs(deltaRoll)-fabs(deltaYaw);
        }*/
        deltaPitch = deltaPitch+deltaYaw;
#endif
        // calculate current feature flow
        std::vector<Matcher::p_match> matches = visual_odometer_->getMatches();
        std::vector<int> inlier_indices = visual_odometer_->getInlierIndices();
#if(DEBUG)
        cv::Mat img1 = l_cv_ptr->image;
        cv::Mat img2 = r_cv_ptr->image;
        cv::Size size(last_l_image_.cols, last_l_image_.rows+last_r_image_.rows);
        cv::Mat outImg(size,CV_MAKETYPE(img1.depth(), 3));
        cv::Mat outImg1 = outImg( cv::Rect(0, 0, last_l_image_.cols, last_l_image_.rows) );
        cv::Mat outImg2 = outImg( cv::Rect(0, last_l_image_.rows, img1.cols, img1.rows) );
        
        if( last_l_image_.type() == CV_8U )
          cvtColor( last_l_image_, outImg1, CV_GRAY2BGR );
        else
          last_l_image_.copyTo( outImg1 );
        
        if( img1.type() == CV_8U )
          cvtColor( img1, outImg2, CV_GRAY2BGR );
        else
          img1.copyTo( outImg2 );
        for (size_t i = 0; i < matches.size(); ++i)
        {
          cv::Point pt1(matches[i].u1p,matches[i].v1p);
          cv::Point pt2(matches[i].u1c,matches[i].v1c + last_l_image_.rows);
          if(pt1.y > 239)
            cv::line(outImg, pt1, pt2, cv::Scalar(255,0,0));
          //else
            //cv::line(outImg, pt1, pt2, cv::Scalar(0,255,0));
        }
        cv::imshow("matching image", outImg);
    cv::waitKey(10);
        



        last_l_image_ = img1;
        last_r_image_ = img2;
#endif
        double feature_flow = computeFeatureFlow(matches);
        last_motion_small_ = (feature_flow < motion_threshold_);
        ROS_DEBUG_STREAM("Feature flow is " << feature_flow 
            << ", marking last motion as " 
            << (last_motion_small_ ? "small." : "normal."));

        btMatrix3x3 rot_mat(
          cos(deltaPitch), 0, sin(deltaPitch),
          0,               1,           0, 
          -sin(deltaPitch), 0, cos(deltaPitch));
        /*btMatrix3x3 rot_mat(
          camera_motion.val[0][0], camera_motion.val[0][1], camera_motion.val[0][2],
          camera_motion.val[1][0], camera_motion.val[1][1], camera_motion.val[1][2],
          camera_motion.val[2][0], camera_motion.val[2][1], camera_motion.val[2][2]);*/
        btVector3 t(camera_motion.val[0][3], camera_motion.val[1][3], camera_motion.val[2][3]);
   //rotation
   /*double delta_yaw = 0;
   double delta_pitch = 0;
   double delta_roll = 0;
   delta_yaw = delta_yaw*M_PI/180.0;
   delta_pitch = delta_pitch*M_PI/180.0;
   delta_roll = delta_roll*M_PI/180.0;
   //btMatrix3x3 initialTrans;
    Eigen::Matrix4f initialTrans = Eigen::Matrix4f::Identity();
   initialTrans(0,0) = cos(delta_pitch)*cos(delta_yaw); 
   initialTrans(0,1) = -cos(delta_roll)*sin(delta_yaw) + sin(delta_roll)*sin(delta_pitch)*cos(delta_yaw);
   initialTrans(0,2) = sin(delta_roll)*sin(delta_yaw) + cos(delta_roll)*sin(delta_pitch)*cos(delta_yaw);
   initialTrans(1,0) = cos(delta_pitch)*sin(delta_yaw);
   initialTrans(1,1) = cos(delta_roll)*cos(delta_yaw) + sin(delta_roll)*sin(delta_pitch)*sin(delta_yaw);
   initialTrans(1,2) = -sin(delta_roll)*cos(delta_yaw) + cos(delta_roll)*sin(delta_pitch)*sin(delta_yaw);
   initialTrans(2,0) = -sin(delta_pitch);
   initialTrans(2,1) = sin(delta_roll)*cos(delta_pitch);
   initialTrans(2,2) = cos(delta_roll)*cos(delta_pitch);
        btMatrix3x3 rot_mat(
          initialTrans(0,0), initialTrans(0,1), initialTrans(0,2),
          initialTrans(1,0), initialTrans(1,1), initialTrans(1,2),
          initialTrans(2,0), initialTrans(2,1), initialTrans(2,2));
        btVector3 t(0.0, 0.00, 0.01);*/
        tf::Transform delta_transform(rot_mat, t);

        setPoseCovariance(STANDARD_POSE_COVARIANCE);
        setTwistCovariance(STANDARD_TWIST_COVARIANCE);

        integrateAndPublish(delta_transform, l_image_msg->header.stamp);

        if (point_cloud_pub_.getNumSubscribers() > 0)
        {
          computeAndPublishPointCloud(l_info_msg, l_image_msg, r_info_msg, matches, inlier_indices);
        }
      }
      else
      {
        setPoseCovariance(BAD_COVARIANCE);
        setTwistCovariance(BAD_COVARIANCE);
        tf::Transform delta_transform;
        delta_transform.setIdentity();
        integrateAndPublish(delta_transform, l_image_msg->header.stamp);

        ROS_DEBUG("Call to VisualOdometryStereo::process() failed.");
        ROS_WARN_THROTTLE(1.0, "Visual Odometer got lost!");
        got_lost_ = true;
      }
    }
  }
Beispiel #30
0
void MapperNode::TagsCb(const apriltag_ros::ApriltagsConstPtr& tags_c_msg) {
  // Do nothing if no detection, this prevents checking in the following steps
  if (tags_c_msg->apriltags.empty()) {
    ROS_WARN_THROTTLE(1, "No tags detected.");
    return;
  }
  // Do nothing if camera info not received
  if (!model_.initialized()) {
    ROS_WARN_THROTTLE(1, "No camera info received");
    return;
  }
  // Do nothing if there are no good tags close to the center of the image
  std::vector<Apriltag> tags_c_good;
  if (!GetGoodTags(tags_c_msg->apriltags, &tags_c_good)) {
    ROS_WARN_THROTTLE(1, "No good tags detected.");
    return;
  }
  // Initialize map by adding the first tag that is not on the edge of the image
  if (!map_.init()) {
    map_.AddFirstTag(tags_c_good.front());
    ROS_INFO("AprilMap initialized.");
  }
  // Do nothing if no pose can be estimated
  geometry_msgs::Pose pose;
  if (!map_.EstimatePose(tags_c_msg->apriltags, model_.fullIntrinsicMatrix(),
                         model_.distortionCoeffs(), &pose)) {
    ROS_WARN_THROTTLE(1, "No 2D-3D correspondence.");
    return;
  }
  // Now that with the initial pose calculated, we can do some mapping
  mapper_.AddPose(pose);
  mapper_.AddFactors(tags_c_good);
  if (mapper_.init()) {
    // This will only add new landmarks
    mapper_.AddLandmarks(tags_c_good);
    mapper_.Optimize();
    // Get latest estimates from mapper and put into map
    mapper_.Update(&map_, &pose);
    // Prepare for next iteration
    mapper_.Clear();
  } else {
    // This will add first landmark at origin and fix scale for first pose and
    // first landmark
    mapper_.Initialize(map_.first_tag());
  }

  // Publish camera to world transform
  std_msgs::Header header;
  header.stamp = tags_c_msg->header.stamp;
  header.frame_id = frame_id_;

  geometry_msgs::Vector3 translation;
  translation.x = pose.position.x;
  translation.y = pose.position.y;
  translation.z = pose.position.z;

  geometry_msgs::TransformStamped transform_stamped;
  transform_stamped.header = header;
  transform_stamped.child_frame_id = tags_c_msg->header.frame_id;
  transform_stamped.transform.translation = translation;
  transform_stamped.transform.rotation = pose.orientation;

  tf_broadcaster_.sendTransform(transform_stamped);

  geometry_msgs::PoseStamped pose_cam;
  pose_cam.header.stamp = tags_c_msg->header.stamp;
  pose_cam.header.frame_id = "0";

  pose_cam.pose.position = pose.position;
  pose_cam.pose.orientation = pose.orientation;

  pose_pub_.publish(pose_cam);

  // Publish visualisation markers
  tag_viz_.PublishApriltagsMarker(map_.tags_w(), frame_id_,
                                  tags_c_msg->header.stamp);
}