Mesh* createMeshFromAsset(const aiScene* scene, const Eigen::Vector3d &scale, const std::string &resource_name)
{
  if (!scene->HasMeshes())
  {
    ROS_WARN_STREAM("Assimp reports scene in " << resource_name << " has no meshes");
    return NULL;
  }
  EigenSTL::vector_Vector3d vertices;
  std::vector<unsigned int> triangles;
  extractMeshData(scene, scene->mRootNode, aiMatrix4x4(), scale, vertices, triangles);
  if (vertices.empty())
  {
    ROS_WARN_STREAM("There are no vertices in the scene " << resource_name);
    return NULL;
  }
  if (triangles.empty())
  {
    ROS_WARN_STREAM("There are no triangles in the scene " << resource_name);
    return NULL;
  }
  
  return createMeshFromVertices(vertices, triangles);
}
collision_space::EnvironmentModel::AllowedCollisionMatrix 
planning_environment::convertFromACMMsgToACM(const arm_navigation_msgs::AllowedCollisionMatrix& matrix)
{
  std::map<std::string, unsigned int> indices;
  std::vector<std::vector<bool> > vecs;
  unsigned int ns = matrix.link_names.size();
  if(matrix.entries.size() != ns) {
    ROS_WARN_STREAM("Matrix messed up");
  }
  vecs.resize(ns);
  for(unsigned int i = 0; i < std::min(ns, (unsigned int) matrix.entries.size()); i++) {
    indices[matrix.link_names[i]] = i;
    if(matrix.entries[i].enabled.size() != ns) {
      ROS_WARN_STREAM("Matrix messed up");
    }
    vecs[i].resize(ns);
    for(unsigned int j = 0; j < std::min(ns, (unsigned int) matrix.entries[i].enabled.size()); j++) {
      vecs[i][j] = matrix.entries[i].enabled[j];
    }
  }
  collision_space::EnvironmentModel::AllowedCollisionMatrix acm(vecs,indices);
  return acm;
}
void CSrf10Controller::timerCallback(const ros::TimerEvent& e)
{
    ros::Time now=ros::Time::now();
    std::map<uint8_t,unsigned short> updatedDistances;
    int code=device_p_->getDistanceSensors(updatedDistances);
    if (code<0)
        ROS_ERROR("Unable to get srf10 sensor distances from the base control board");
    else
    {
        std::map< uint8_t,CDistanceSensor * >::iterator it;
        for (it=srf10Sensors_.begin();it!=srf10Sensors_.end();it++)
        {
            if (updatedDistances.count((*it).first)>0)
            {
                ROS_DEBUG_STREAM("Obtained distance " << updatedDistances[(*it).first] << " for srf10 sensor " << (*it).second->getName() << " from the base control board");

//do we publish al lvalues or only when we see something ? (0.0 values means no obstacle)
                bool publish_if_obstacle;

                nh.getParam("controllers/"+getName()+"/sensors/front/"+(*it).second->getName()+"/publish_if_obstacle", publish_if_obstacle);
                if( publish_if_obstacle){
                    if( updatedDistances[(*it).first]>0)
                       (*it).second->publish((float)updatedDistances[(*it).first],now);
}
                else{
                     (*it).second->publish((float)updatedDistances[(*it).first],now);
}
            }
            else
                ROS_WARN_STREAM("Could not obtain distance of srf10 sensor " << (int)((*it).first) << " from base control board");
        }
    }
    std::vector<unsigned int> adcReads;
    code=device_p_->getAdcReads(adcSensorsAddresses_,adcReads);
    if (code<0)
        ROS_ERROR("Unable to get adc sensor reads from the base control board");
    else
    {
        if(adcReads.size()!=adcSensorsAddresses_.size())
            ROS_ERROR("The asked addreses and the returned reads for the adc sensors do not match");
        else
        {
            for (int i=0;i<adcSensorsAddresses_.size();i++)
            {
                ROS_DEBUG_STREAM("Obtained distance " << adcReads[i] << " for adc sensor " << adcSensors_[adcSensorsAddresses_[i]]->getName() << " from the base control board");
                adcSensors_[adcSensorsAddresses_[i]]->publish(adcReads[i],now);
            }
        }
    }
}
void planning_models::KinematicState::JointStateGroup::getKinematicStateValues(std::map<std::string,double>& joint_state_values) const {
  joint_state_values.clear();
  for(unsigned int i = 0; i < joint_state_vector_.size(); i++) {
    unsigned int dim = joint_state_vector_[i]->getDimension();  
    if(dim != 0) {
      for(unsigned int j = 0; j < joint_state_vector_[i]->getJointStateValues().size(); j++) {
        joint_state_values[joint_state_vector_[i]->getJointStateNameOrder()[j]] = joint_state_vector_[i]->getJointStateValues()[j]; 
      }
    }
  }
  if(joint_state_values.size() != dimension_) {
    ROS_WARN_STREAM("Some problems with group map dimension values " << joint_state_values.size() << " dimension is " << dimension_);
  }
}
void LagrangeAllocator::setWeights(const Eigen::MatrixXd &W_new)
{
  bool isCorrectDimensions = ( W_new.rows() == r && W_new.cols() == r );
  if (!isCorrectDimensions)
  {
    ROS_WARN_STREAM("Attempt to set weight matrix in LagrangeAllocator with wrong dimensions " << W_new.rows() << "*" << W_new.cols() << ".");
    return;
  }

  W = W_new;

  // New weights require recomputing the generalized inverse of the thrust config matrix
  computeGeneralizedInverse();
}
void planning_models::KinematicState::getKinematicStateValues(std::vector<double>& joint_state_values) const {
  joint_state_values.clear();
  for(unsigned int i = 0; i < joint_state_vector_.size(); i++) {
    unsigned int dim = joint_state_vector_[i]->getDimension();  
    if(dim != 0) {
      for(unsigned int j = 0; j < joint_state_vector_[i]->getJointStateValues().size(); j++) {
        joint_state_values.push_back(joint_state_vector_[i]->getJointStateValues()[j]);
      }
    }
  }
  if(joint_state_values.size() != dimension_) {
    ROS_WARN_STREAM("Some problems with state vector dimension values " << joint_state_values.size() << " dimension is " << dimension_);
  }
}
void MotionPlanningFrame::pathConstraintsIndexChanged(int index)
{
  if (move_group_)
  {
    if (index > 0)
    {
      std::string c = ui_->path_constraints_combo_box->itemText(index).toStdString();
      if (!move_group_->setPathConstraints(c))
        ROS_WARN_STREAM("Unable to set the path constraints: " << c);
    }
    else
      move_group_->clearPathConstraints();
  }
}
示例#8
0
double leatherman::getColladaFileScale(std::string resource)
{
  static std::map<std::string, float> rescale_cache;

  // Try to read unit to meter conversion ratio from mesh. Only valid in Collada XML formats. 
  TiXmlDocument xmlDoc;
  float unit_scale(1.0);
  resource_retriever::Retriever retriever;
  resource_retriever::MemoryResource res;
  try
  {
    res = retriever.get(resource);
  }
  catch (resource_retriever::Exception& e)
  {
    ROS_ERROR("%s", e.what());
    return unit_scale;
  }

  if (res.size == 0)
  {
    return unit_scale;
  }


  // Use the resource retriever to get the data.
  const char * data = reinterpret_cast<const char * > (res.data.get());
  xmlDoc.Parse(data);

  // Find the appropriate element if it exists
  if(!xmlDoc.Error())
  {
    TiXmlElement * colladaXml = xmlDoc.FirstChildElement("COLLADA");
    if(colladaXml)
    {
      TiXmlElement *assetXml = colladaXml->FirstChildElement("asset");
      if(assetXml)
      {
        TiXmlElement *unitXml = assetXml->FirstChildElement("unit");
        if (unitXml && unitXml->Attribute("meter"))
        {
          // Failing to convert leaves unit_scale as the default.
          if(unitXml->QueryFloatAttribute("meter", &unit_scale) != 0)
            ROS_WARN_STREAM("getMeshUnitRescale::Failed to convert unit element meter attribute to determine scaling. unit element: " << *unitXml);
        }
      }
    }
  }
  return unit_scale;
}
示例#9
0
void Channel::cmdCallback(const roboteq_msgs::Command& command)
{
  // Reset command timeout.
  timeout_timer_.stop();
  timeout_timer_.start();
 ROS_WARN_STREAM("Got command " << command.mode << " with " << command.setpoint);
  // Update mode of motor driver. We send this on each command for redundancy against a
  // lost message, and the MBS script keeps track of changes and updates the control
  // constants accordingly.
  controller_->command << "VAR" << channel_num_ << static_cast<int>(command.mode) << controller_->send;

  if (command.mode == roboteq_msgs::Command::MODE_VELOCITY)
  {
    // Get a -1000 .. 1000 command as a proportion of the maximum RPM.
    int roboteq_velocity = to_rpm(command.setpoint) / max_rpm_ * 1000.0;
    ROS_WARN_STREAM("Commanding " << roboteq_velocity << " velocity to motor driver.");

    // Write mode and command to the motor driver.
    controller_->command << "G" << channel_num_ << roboteq_velocity << controller_->send;
  }
  else if (command.mode == roboteq_msgs::Command::MODE_POSITION)
  {
    // Convert the commanded position in rads to encoder ticks.
    int roboteq_position = to_encoder_ticks(command.setpoint);
    ROS_DEBUG_STREAM("Commanding " << roboteq_position << " position to motor driver.");

    // Write command to the motor driver.
    controller_->command << "P" << channel_num_ << roboteq_position << controller_->send;
  }
  else
  {
    ROS_WARN_STREAM("Command received with unknown mode number, dropping.");
  }

  controller_->flush();
  last_mode_ = command.mode;
}
示例#10
0
bool VectorVisualization::visualize(const grid_map::GridMap& map)
{
  if (!isActive()) return true;

  for (const auto& type : types_) {
    if (!map.exists(type)) {
      ROS_WARN_STREAM(
          "VectorVisualization::visualize: No grid map layer with name '" << type << "' found.");
      return false;
    }
  }

  // Set marker info.
  marker_.header.frame_id = map.getFrameId();
  marker_.header.stamp.fromNSec(map.getTimestamp());

  // Clear points.
  marker_.points.clear();
  marker_.colors.clear();

  for (grid_map::GridMapIterator iterator(map); !iterator.isPastEnd(); ++iterator)
  {
    if (!map.isValid(*iterator, positionLayer_) || !map.isValid(*iterator, types_)) continue;
    geometry_msgs::Vector3 vector;
    vector.x = map.at(types_[0], *iterator); // FIXME(cfo): segfaults when types is not set
    vector.y = map.at(types_[1], *iterator);
    vector.z = map.at(types_[2], *iterator);

    Eigen::Vector3d position;
    map.getPosition3(positionLayer_, *iterator, position);
    geometry_msgs::Point startPoint;
    startPoint.x = position.x();
    startPoint.y = position.y();
    startPoint.z = position.z();
    marker_.points.push_back(startPoint);

    geometry_msgs::Point endPoint;
    endPoint.x = startPoint.x + scale_ * vector.x;
    endPoint.y = startPoint.y + scale_ * vector.y;
    endPoint.z = startPoint.z + scale_ * vector.z;
    marker_.points.push_back(endPoint);

    marker_.colors.push_back(color_); // Each vertex needs a color.
    marker_.colors.push_back(color_);
  }

  publisher_.publish(marker_);
  return true;
}
bool constraint_samplers::JointConstraintSampler::setup(const std::vector<kinematic_constraints::JointConstraint> &jc)
{
  if (!jmg_)
  {
    ROS_FATAL("NULL group specified for constraint sampler");
    return false;
  }
  
  // find and keep the constraints that operate on the group we sample
  // also keep bounds for joints as convenient
  const std::map<std::string, unsigned int> &vim = jmg_->getJointVariablesIndexMap();
  std::set<const planning_models::KinematicModel::JointModel*> bounded;
  for (std::size_t i = 0 ; i < jc.size() ; ++i)
  {
    if (!jc[i].enabled())
      continue;
    
    const planning_models::KinematicModel::JointModel *jm = jc[i].getJointModel();
    if (!jmg_->hasJointModel(jm->getName()))
      continue;
    
    std::pair<double, double> bounds;
    jm->getVariableBounds(jc[i].getJointVariableName(), bounds);

    bounds.first = std::max(bounds.first, jc[i].getDesiredJointPosition() - jc[i].getJointToleranceBelow());
    bounds.second = std::min(bounds.second, jc[i].getDesiredJointPosition() + jc[i].getJointToleranceAbove());
    if (bounds.first > bounds.second)
    {
      std::stringstream cs; jc[i].print(cs);
      ROS_WARN_STREAM("The constraints for joint '" << jm->getName() << "' are such that there are no possible values for this joint. Ignoring constraint: \n" << cs.str());
      continue;
    }
    if (jm->getVariableCount() == 1)
      bounded.insert(jm);
    bounds_.push_back(bounds);
    index_.push_back(vim.find(jc[i].getJointVariableName())->second);
  }
  
  // get a separate list of joints that are not bounded; we will sample these randomly
  const std::vector<const planning_models::KinematicModel::JointModel*> &joints = jmg_->getJointModels();
  for (std::size_t i = 0 ; i < joints.size() ; ++i)
    if (bounded.find(joints[i]) == bounded.end())
    {
      unbounded_.push_back(joints[i]);
      uindex_.push_back(vim.find(joints[i]->getName())->second);
    } 
  values_.resize(jmg_->getVariableCount());
  return true;
}
示例#12
0
	/** Open the camera device.
	 *
	 * @param newconfig configuration parameters
	 * @return true, if successful
	 *
	 * if successful:
	 *   state_ is Driver::OPENED
	 *   camera_name_ set to camera_name string
	 */
	bool openCamera(Config &newconfig)
	{
		bool success = true;

		try
		{
			ROS_INFO("opening uvc_cam at %dx%d, %f fps", newconfig.width, newconfig.height, newconfig.frame_rate);
                        uvc_cam::Cam::mode_t mode = uvc_cam::Cam::MODE_RGB;
                        switch (newconfig.format_mode) {
                                case 1:
                                  mode = uvc_cam::Cam::MODE_RGB;
                                case 2:
                                  mode = uvc_cam::Cam::MODE_YUYV;
                                case 3:
                                  mode = uvc_cam::Cam::MODE_MJPG;
                                default:
                                  mode = uvc_cam::Cam::MODE_RGB;
                        }
			cam_ = new uvc_cam::Cam(newconfig.device.c_str(), mode, newconfig.width, newconfig.height, newconfig.frame_rate);
			if (camera_name_ != camera_name_)
			{
				camera_name_ = camera_name_;
				if (!cinfo_.setCameraName(camera_name_))
				{
					// GUID is 16 hex digits, which should be valid.
					// If not, use it for log messages anyway.
					ROS_WARN_STREAM("[" << camera_name_ << "] name not valid"
							<< " for camera_info_manger");
				}
			}
			//			ROS_INFO_STREAM("[" << camera_name_
			//					<< "] opened: " << newconfig.video_mode << ", "
			//					<< newconfig.frame_rate << " fps, "
			//					<< newconfig.iso_speed << " Mb/s");
			state_ = Driver::OPENED;
			calibration_matches_ = true;

		//	cam_->display_formats_supported();

		}
		catch (uvc_cam::Exception& e)
		{
			ROS_FATAL_STREAM("[" << camera_name_
					<< "] exception opening device: " << e.what());
			success = false;
		}

		return success;
	}
示例#13
0
/**
 * @brief If not already maxxed, increment the angular velocities..
 */
void KeyOp::incrementAngularVelocity()
{
  if (power_status_)
  {
    if (cmd_->angular.z <= angular_vel_max_)
    {
      cmd_->angular.z += angular_vel_step_;
    }
    ROS_INFO_STREAM("KeyOp: angular velocity incremented [" << cmd_->linear.x << "|" << cmd_->angular.z << "]");
  }
  else
  {
    ROS_WARN_STREAM("KeyOp: motors are not yet powered up.");
  }
}
示例#14
0
  void PredictionModel::new_measurement(double x, double y, double z)
  {
    if( (x != x) | (y != y) | (z != z) )
    {
      ROS_WARN_STREAM("We received NaN, ignoring the new measurement: " << x << " " << y << " " << z );
      return;
    }

    measurement_(1) = x;
    measurement_(2) = y;
    measurement_(3) = z;

    //we have received a new measurement
    meas_used_ = false;
  }
示例#15
0
/**
 * @brief If not already mined, decrement the angular velocities..
 */
void KeyOpCore::decrementAngularVelocity()
{
  if (power_status)
  {
    if (cmd->angular.z >= -angular_vel_max)
    {
      cmd->angular.z -= angular_vel_step;
    }
    ROS_INFO_STREAM("KeyOp: angular velocity decremented [" << cmd->linear.x << "|" << cmd->angular.z << "]");
  }
  else
  {
    ROS_WARN_STREAM("KeyOp: motors are not yet powered up.");
  }
}
/**
 * Checks that we have ended inside the goal constraints.
 */
bool JointTrajectoryActionController::goalReached()
{
  for (size_t i = 0; i < joints_.size(); i++)
  {
    double error = current_trajectory_->back().splines[i].target_position - katana_->getMotorAngles()[i];
    if (goal_constraints_[i] > 0 && fabs(error) > goal_constraints_[i])
    {
      ROS_WARN_STREAM("Joint " << i << " did not reach its goal. target position: "
          << current_trajectory_->back().splines[i].target_position << " actual position: "
          << katana_->getMotorAngles()[i] << std::endl);
      return false;
    }
  }
  return true;
}
示例#17
0
Mesh* createMeshFromResource(const std::string& resource, const Eigen::Vector3d &scale)
{
  resource_retriever::Retriever retriever;
  resource_retriever::MemoryResource res;
  try
  {
    res = retriever.get(resource);
  } 
  catch (resource_retriever::Exception& e)
  {
    ROS_ERROR("%s", e.what());
    return NULL;
  }

  if (res.size == 0)
  {
    ROS_WARN("Retrieved empty mesh for resource '%s'", resource.c_str());
    return NULL;
  }
  
  // Create an instance of the Importer class
  Assimp::Importer importer;
  
  // try to get a file extension
  std::string hint;
  std::size_t pos = resource.find_last_of(".");
  if (pos != std::string::npos)
  {
    hint = resource.substr(pos + 1);
    std::transform(hint.begin(), hint.end(), hint.begin(), ::tolower);
    if (hint.find("stl") != std::string::npos)
      hint = "stl";
  }
  
  // And have it read the given file with some postprocessing
  const aiScene* scene = importer.ReadFileFromMemory(reinterpret_cast<void*>(res.data.get()), res.size,
                                                     aiProcess_Triangulate            |
                                                     aiProcess_JoinIdenticalVertices  |
                                                     aiProcess_SortByPType            |
                                                     aiProcess_OptimizeGraph          |
                                                     aiProcess_OptimizeMeshes, hint.c_str());
  if (!scene)
  {
    ROS_WARN_STREAM("Assimp reports no scene in " << resource);
    return NULL;
  }
  return createMeshFromAsset(scene, scale, resource);
}
std::vector<geometry_msgs::PoseStamped> 
CartesianTrajectoryController::GetWayPoints(geometry_msgs::PoseStamped p1, geometry_msgs::PoseStamped p2)
{
	ROS_ASSERT( p1.header.frame_id == p2.header.frame_id );
	ROS_INFO( "Calculating SubWaypoint Positions" );

	ROS_DEBUG_STREAM( "P1: " << p1 );

	std::vector<geometry_msgs::PoseStamped> way_points;

	float distance = sqrt( pow( (p1.pose.position.x - p2.pose.position.x) ,2) +
                    		pow( (p1.pose.position.y - p2.pose.position.y) ,2) +
                    		pow( (p1.pose.position.z - p2.pose.position.z) ,2) );

	//int number_of_way_points = ceil(distance/m_way_point_resolution);
	int number_of_way_points = 0;
	//double interval = 1 / number_of_way_points;
	double interval = 0.07;
	ROS_WARN_STREAM( "Calculated Interval" << " [" << 	interval << "]" );

	double dx = (p2.pose.position.x - p1.pose.position.x);
	double dy = (p2.pose.position.y - p1.pose.position.y);
	double dz = (p2.pose.position.z - p1.pose.position.z);

	ROS_DEBUG_STREAM( "Calculated Slope" << " [" << dx << "," << dy << "," << dz << "]" );

	way_points.push_back(p1);
	for (int i=0; i<number_of_way_points; i++)
	{
		geometry_msgs::PoseStamped next_point = way_points.at(i);

		next_point.pose.position.x += dx*interval;
		next_point.pose.position.y += dy*interval;
		next_point.pose.position.z += dz*interval;

		way_points.push_back(next_point);
	}
	way_points.push_back(p2);

	for( int x = 0; x < (int)way_points.size(); x ++ )
	{
		ROS_DEBUG_STREAM( "SubWayPoint #" << x << " [" << way_points[x].pose.position.x << "," <<
														 way_points[x].pose.position.y << "," <<
														 way_points[x].pose.position.z << "]" );
	}

	return way_points;
}
void planning_environment::PlanningMonitor::getAllFixedFrameTransforms(std::vector<geometry_msgs::TransformStamped>& transform_vec) const
{
  transform_vec.clear();

  std::vector<std::string> all_frame_names;
  tf_->getFrameStrings(all_frame_names);
  //TODO - doesn't cope with tf namespaces
  //takes out leading slashes
  for(unsigned int i = 0; i < all_frame_names.size(); i++) {
    if(!all_frame_names[i].empty() && all_frame_names[i][0] == '/') {
      all_frame_names[i].erase(all_frame_names[i].begin());
    }
  }
  //the idea here is that we need to figure out how to take poses from other frames
  //and transform them into the fixed frame. So we want to get the transform
  //that goes from the frame to the identity of the world frame and take the inverse
  for(unsigned int i = 0; i < all_frame_names.size(); i++) {
    if(all_frame_names[i] != cm_->getWorldFrameId() && 
       !cm_->getKinematicModel()->hasLinkModel(all_frame_names[i])) {
      ROS_DEBUG_STREAM("Adding fixed frame transform for frame " << all_frame_names[i]);
      geometry_msgs::PoseStamped ident_pose;
      ident_pose.header.frame_id = cm_->getWorldFrameId();
      ident_pose.pose.orientation.w = 1.0;
      std::string err_string;
      if (tf_->getLatestCommonTime(cm_->getWorldFrameId(), all_frame_names[i], ident_pose.header.stamp, &err_string) != tf::NO_ERROR) {
        ROS_WARN_STREAM("No transform whatsoever available between " << all_frame_names[i] << " and fixed frame" << cm_->getWorldFrameId());
        continue;
      }
      geometry_msgs::PoseStamped trans_pose;
      try {
        tf_->transformPose(all_frame_names[i], ident_pose, trans_pose);
      } catch(tf::TransformException& ex) {
        //just not going to cache this one
	//ROS_WARN_STREAM("Unable to transform object from frame " << all_frame_names[i] << " to fixed frame " 
        //                 << cm_->getWorldFrameId() << " tf error is " << ex.what());
        continue;
      }
      geometry_msgs::TransformStamped f;
      f.header = ident_pose.header;
      f.child_frame_id = all_frame_names[i];
      f.transform.translation.x = trans_pose.pose.position.x;
      f.transform.translation.y = trans_pose.pose.position.y;
      f.transform.translation.z = trans_pose.pose.position.z;
      f.transform.rotation = trans_pose.pose.orientation;
      transform_vec.push_back(f);
    }
  }
}
bool planning_models::KinematicState::areJointsWithinBounds(const std::vector<std::string>& joints) const
{
  for(std::vector<std::string>::const_iterator it = joints.begin();
      it != joints.end();
      it++) {
    const JointState* joint_state = getJointState(*it);
    if(joint_state == NULL) {
      ROS_WARN_STREAM("No joint with name " << *it);
      return false;
    }
    if(!joint_state->areJointStateValuesWithinBounds()) {
      return false;
    }
  }   
  return true;
}
bool endGame(pacman_msgs::EndGame::Request &req, pacman_msgs::EndGame::Response &res, 
        ros::ServiceClient *start_game_client, DeterministicGameState **game_state)
{
    // count number of games
    static int game_count = 0;
    game_count++;

    if (req.win)
        ROS_INFO_STREAM("Won game " << game_count);
    else
        ROS_WARN_STREAM("Lost game " << game_count);

    if (game_count < NUMBER_OF_GAMES)
    {
        pacman_msgs::StartGame start_game;

        if (game_count < NUMBER_OF_TRAININGS)
            start_game.request.show_gui = false;
        else {
            start_game.request.show_gui = true;
            is_training = false;
        }

        if (start_game_client->call(start_game))
            if(start_game.response.started)
            {
                // new game started
                delete *game_state;
                *game_state = new DeterministicGameState();
                res.game_restarted = true;

                ROS_INFO("New game started");
                return true;
            }
            else
                ROS_ERROR("Failed to start game (check if game already started)");
        else // if problem => print error
            ROS_ERROR("Failed to call service StartGame");
    }
    else
        ROS_INFO_STREAM("Game finished, type ctrl+c to exit");

    // game not restarted
    res.game_restarted = false;

    return true;
}
void StompTrajectory::overwriteTrajectory(const trajectory_msgs::JointTrajectory& traj)
{
  std::vector<int> ind;
  for(unsigned int j = 0; j < traj.joint_names.size(); j++) {
    int kdl_number = robot_model_->urdfNameToKdlNumber(traj.joint_names[j]);
    if(kdl_number == 0) {
      ROS_WARN_STREAM("Can't find kdl index for joint " << traj.joint_names[j]);
    }
    ind.push_back(kdl_number);
  }

  for(unsigned int i = 1; i <= traj.points.size(); i++) {
    for(unsigned int j = 0; j < traj.joint_names.size(); j++) {
      trajectory_(i,ind[j]) = traj.points[i-1].positions[j];
    }
  }
}
示例#23
0
MapMakerServerBase::MapMakerServerBase(Map& map, TaylorCameraMap& cameras, BundleAdjusterBase& bundleAdjuster)
  : MapMakerBase(map, true)  // This will be skipped since inheritance is virtual!
  , mmCameraModels(cameras)
  , mBundleAdjuster(bundleAdjuster)
{
  // This needs to be remapped in the launch file
  mResetSystemClient = mNodeHandle.serviceClient<mcptam::Reset>("reset", true);

  ROS_INFO_STREAM("MapMakerServerBase: Reset system client is connecting to: " << mResetSystemClient.getService());

  while (!mResetSystemClient.waitForExistence(ros::Duration(5)) && ros::ok())
  {
    ROS_WARN_STREAM("MapMakerServerBase: Waiting for system reset service to be advertised...");
  }

  Reset();
};
示例#24
0
  /** Read camera data.
   *
   * @param image points to camera Image message
   * @return true if successful, with image filled in
   */
  bool NaoqiCameraDriver::read(sensor_msgs::ImagePtr &image)
  {
    bool success = true;
    try
      {
        // Read data from the Camera
        ROS_DEBUG_STREAM("[" << camera_name_ << "] reading data");

        //TODO: support local image access. This first suppose
        // to write a MAOqi 'local' module.
        // cf: http://www.aldebaran-robotics.com/documentation/dev/cpp/tutos/create_module.html#how-to-create-a-local-module
        ALValue al_image = camera_proxy_->getImageRemote(camera_name_);

        if (config_.use_ros_time)
            image->header.stamp = ros::Time::now();
        else { 
            // use NAOqi timestamp
            image->header.stamp = ros::Time(((double) al_image[4] / 1000000.0) + (double) al_image[5]);
        }

        image->width = (int) al_image[0];
        image->height = (int) al_image[1];
        image->step = image->width * (int) al_image[2];
        image->encoding = sensor_msgs::image_encodings::RGB8;

        int image_size = image->height * image->step;
        image->data.resize(image_size);

        memcpy(&(image->data)[0], 
                al_image[6].GetBinary(),
                image_size);

        camera_proxy_->releaseImage(camera_name_);

        success = true;
        ROS_DEBUG_STREAM("[" << camera_name_ << "] read returned");
      }
    catch (naoqicamera_driver::Exception& e)
      {
        ROS_WARN_STREAM("[" << camera_name_
                        << "] Exception reading data: " << e.what());
        success = false;
      }
    return success;
  }
kinematic_constraints::ConstraintEvaluationResult kinematic_constraints::OrientationConstraint::decide(const planning_models::KinematicState &state, bool verbose) const
{
  if (!link_model_) 
    return ConstraintEvaluationResult(true, 0.0);

  const planning_models::KinematicState::LinkState *link_state = state.getLinkState(link_model_->getName());
  
  if (!link_state)
  {
    ROS_WARN_STREAM("No link in state with name '" << link_model_->getName() << "'");
    return ConstraintEvaluationResult(false, 0.0);
  }
  
  Eigen::Vector3d xyz;
  if (mobile_frame_)
  {
    Eigen::Matrix3d tmp;
    tf_->transformRotationMatrix(state, desired_rotation_frame_id_, desired_rotation_matrix_, tmp);
    Eigen::Affine3d diff(tmp.inverse() * link_state->getGlobalLinkTransform().rotation());
    xyz = diff.rotation().eulerAngles(0, 1, 2); // XYZ
  }
  else
  {
    Eigen::Affine3d diff(desired_rotation_matrix_inv_ * link_state->getGlobalLinkTransform().rotation());
    xyz = diff.rotation().eulerAngles(0, 1, 2); // 0,1,2 corresponds to ZXZ, the convention used in sampling constraints
  }
  
  xyz(0) = std::min(fabs(xyz(0)), boost::math::constants::pi<double>() - fabs(xyz(0)));
  xyz(1) = std::min(fabs(xyz(1)), boost::math::constants::pi<double>() - fabs(xyz(1)));
  xyz(2) = std::min(fabs(xyz(2)), boost::math::constants::pi<double>() - fabs(xyz(2)));
  bool result = xyz(2) < absolute_z_axis_tolerance_ && xyz(1) < absolute_y_axis_tolerance_ && xyz(0) < absolute_x_axis_tolerance_;
  
  if (verbose)
  {
    Eigen::Quaterniond q_act(link_state->getGlobalLinkTransform().rotation());
    Eigen::Quaterniond q_des(desired_rotation_matrix_);
    ROS_INFO("Orientation constraint %s for link '%s'. Quaternion desired: %f %f %f %f, quaternion actual: %f %f %f %f, error: x=%f, y=%f, z=%f, tolerance: x=%f, y=%f, z=%f",
             result ? "satisfied" : "violated", link_model_->getName().c_str(),
             q_des.x(), q_des.y(), q_des.z(), q_des.w(),
             q_act.x(), q_act.y(), q_act.z(), q_act.w(), xyz(0), xyz(1), xyz(2),
             absolute_x_axis_tolerance_, absolute_y_axis_tolerance_, absolute_z_axis_tolerance_);
  }
  
  return ConstraintEvaluationResult(result, constraint_weight_ * (xyz(0) + xyz(1) + xyz(2)));
}
示例#26
0
bool PlaceObject::MoveToPose(const geometry_msgs::PoseStamped& target_pose, double eef_step, bool check_collision)
{

  //  move_group_->setJointValueTarget(target_pose);
  target_pose_publisher_.publish(target_pose);
  geometry_msgs::PoseStamped current_pose = move_group_->getCurrentPose();
  std::vector<geometry_msgs::Pose> waypoints;
  waypoints.push_back(current_pose.pose);
  waypoints.push_back(target_pose.pose);

  robot_state::RobotState rs = *move_group_->getCurrentState();
  move_group_->setStartState(rs);

  // Plan trajectory.
  moveit_msgs::RobotTrajectory trajectory;
  moveit_msgs::MoveItErrorCodes error_code;
  double path_fraction = move_group_->computeCartesianPath(waypoints, eef_step, 0.0, trajectory, check_collision, &error_code);
  if(path_fraction < 1.0) {
    ROS_WARN_STREAM("PlacesObject: Could not calculate Cartesian Path.");
    return false;
  }

  robot_trajectory::RobotTrajectory robot_trajectory(rs.getRobotModel(), move_group_->getName());
  robot_trajectory.setRobotTrajectoryMsg(rs, trajectory);

  trajectory_processing::IterativeParabolicTimeParameterization iptp;
  iptp.computeTimeStamps(robot_trajectory, 1.0);
  robot_trajectory.getRobotTrajectoryMsg(trajectory);

  moveit::planning_interface::MoveGroup::Plan my_plan;
  moveit_msgs::RobotState robot_state_msg;
  robot_state::robotStateToRobotStateMsg(*move_group_->getCurrentState(), robot_state_msg);

  my_plan.trajectory_ = trajectory;
  my_plan.start_state_ = robot_state_msg;

  // removing points with velocity zero
  my_plan.trajectory_.joint_trajectory.points.erase(
      std::remove_if(my_plan.trajectory_.joint_trajectory.points.begin() + 1, my_plan.trajectory_.joint_trajectory.points.end(), [](const trajectory_msgs::JointTrajectoryPoint & p)
      { return p.time_from_start.toSec() == 0;}),
      my_plan.trajectory_.joint_trajectory.points.end());

  move_group_->execute(my_plan);
  return true;
}
示例#27
0
MarkersConfig::MarkersConfig(std::string path, float default_val)throw(cv::Exception):
    msize(__number_of_possible_markers,default_val),
    mposition(__number_of_possible_markers,cv::Point3f(0,0,0))
{
    std::ifstream file(path.c_str());
    if(!file) throw  cv::Exception(9100, "Could not open file:"+path,"::constructor::",__FILE__,__LINE__);

    std::list<int> readIDs;
    char line[1024];
    while(!file.eof()){
        file.getline(line,1024);
        int id;
        float size,m_x,m_y,m_z;
        switch (sscanf(line,"%d = %f (%f,%f,%f)",&id,&size,&m_x,&m_y,&m_z)){
        case 2:
            if(this->msize.at(id) != default_val){
                ROS_WARN_STREAM("Warning: ID Marker " << id << " is " << size << " instead of the previous " << this->msize.at(id));
            }

            this->msize.at(id)=size;
            readIDs.push_back(id);

            //debug:
            ROS_INFO_STREAM(id << " = " << size );
            break;
        case 5:
            if(this->msize.at(id) != default_val){
                std::cout << "Warning: ID Marker " << id << " is " << size << " instead of the previous " << this->msize.at(id) << std::endl;
                std::cout << "Probably you want to review the file configuration: " << std::endl;
            }
            this->msize.at(id)=size;
            this->mposition.at(id)= cv::Point3f(m_x,m_y,m_z);
            readIDs.push_back(id);
            //debug:

            ROS_INFO_STREAM(id << " = " << size
                             << " (" << m_x
                             << ',' << m_y
                             << ',' << m_z << ')');
            break;

        }

    }
}
示例#28
0
 void rosNamed(const std::vector<std::string> &msgs) {
   if (msgs.size()==0) return;
   if (msgs.size()==1) { ROS_INFO_STREAM("Kobuki : " << msgs[0]); }
   if (msgs.size()==2) {
     if      (msgs[0] == "debug") { ROS_DEBUG_STREAM("Kobuki : " << msgs[1]); }
     else if (msgs[0] == "info" ) { ROS_INFO_STREAM ("Kobuki : " << msgs[1]); }
     else if (msgs[0] == "warn" ) { ROS_WARN_STREAM ("Kobuki : " << msgs[1]); }
     else if (msgs[0] == "error") { ROS_ERROR_STREAM("Kobuki : " << msgs[1]); }
     else if (msgs[0] == "fatal") { ROS_FATAL_STREAM("Kobuki : " << msgs[1]); }
   }
   if (msgs.size()==3) {
     if      (msgs[0] == "debug") { ROS_DEBUG_STREAM_NAMED(msgs[1], "Kobuki : " << msgs[2]); }
     else if (msgs[0] == "info" ) { ROS_INFO_STREAM_NAMED (msgs[1], "Kobuki : " << msgs[2]); }
     else if (msgs[0] == "warn" ) { ROS_WARN_STREAM_NAMED (msgs[1], "Kobuki : " << msgs[2]); }
     else if (msgs[0] == "error") { ROS_ERROR_STREAM_NAMED(msgs[1], "Kobuki : " << msgs[2]); }
     else if (msgs[0] == "fatal") { ROS_FATAL_STREAM_NAMED(msgs[1], "Kobuki : " << msgs[2]); }
   }
 }
示例#29
0
 /** Read camera data.
  *
  * @return true if successful
  */
 bool read()
 {
   bool success = true;
   try
     {
       // Read data from the Camera
       ROS_DEBUG_STREAM("[" << camera_name_ << "] reading data");
       dev_->readData(image_);
       ROS_DEBUG_STREAM("[" << camera_name_ << "] read returned");
     }
   catch (camera1394v2::Exception& e)
     {
       ROS_WARN_STREAM("[" << camera_name_
                       << "] Exception reading data: " << e.what());
       success = false;
     }
   return success;
 }
示例#30
0
 /** Read camera data.
  *
  * @param image points to camera Image message
  * @return true if successful, with image filled in
  */
 bool Camera1394Driver::read(sensor_msgs::ImagePtr &image)
 {
   bool success = true;
   try
     {
       // Read data from the Camera
       ROS_DEBUG_STREAM("[" << camera_name_ << "] reading data");
       success = dev_->readData(*image);
       ROS_DEBUG_STREAM("[" << camera_name_ << "] read returned");
     }
   catch (camera1394::Exception& e)
     {
       ROS_WARN_STREAM("[" << camera_name_
                       << "] Exception reading data: " << e.what());
       success = false;
     }
   return success;
 }