void OPTCalibrationNode::spin()
{
  ros::Rate rate(5.0);

  while (ros::ok())
  {
    ros::spinOnce();
    calibration_->nextAcquisition();

    int count = 0;

    try
    {
#pragma omp parallel for
      for (size_t i = 0; i < pinhole_vec_.size(); ++i)
      {
        const PinholeRGBDevice::Ptr & device = pinhole_vec_[i];
        if (device->hasNewMessages())
        {
          device->convertLastMessages();
          PinholeRGBDevice::Data::Ptr data = device->lastData();
          OPTCalibration::CheckerboardView::Ptr cb_view;
          ROS_DEBUG_STREAM("[" << device->frameId() << "] analysing image generated at: " << device->lastMessages().image_msg->header.stamp);
#pragma omp critical
          if (calibration_->analyzeData(device->sensor(), data->image, cb_view))
          {
            calibration_->addData(device->sensor(), cb_view);
            images_acquired_map_[device->frameId()]++;
            ROS_INFO_STREAM("[" << device->frameId() << "] checkerboard detected");
            ++count;
          }
        }
      }
      for (size_t i = 0; i < kinect_vec_.size(); ++i)
      {
        const KinectDevice::Ptr & device = kinect_vec_[i];
        if (device->hasNewMessages())
        {
          device->convertLastMessages();
          KinectDevice::Data::Ptr data = device->lastData();
          OPTCalibration::CheckerboardView::Ptr color_cb_view;
          OPTCalibration::CheckerboardView::Ptr depth_cb_view;
          ROS_DEBUG_STREAM("[" << device->colorFrameId() << "] analysing image generated at: " << device->lastMessages().image_msg->header.stamp);
          ROS_DEBUG_STREAM("[" << device->depthFrameId() << "] analysing cloud generated at: " << device->lastMessages().cloud_msg->header.stamp);
          if (calibration_->analyzeData(device->colorSensor(), device->depthSensor(),
                                        data->image, data->cloud,
                                        color_cb_view, depth_cb_view))
          {
            calibration_->addData(device->colorSensor(), color_cb_view);
            calibration_->addData(device->depthSensor(), depth_cb_view);
            images_acquired_map_[device->colorFrameId()]++;
            ROS_INFO_STREAM("[" << device->colorFrameId() << "] checkerboard detected");
            ++count;
          }
        }
      }
      for (size_t i = 0; i < swiss_ranger_vec_.size(); ++i)
      {
        const SwissRangerDevice::Ptr & device = swiss_ranger_vec_[i];
        if (device->hasNewMessages())
        {
          device->convertLastMessages();
          SwissRangerDevice::Data::Ptr data = device->lastData();
          OPTCalibration::CheckerboardView::Ptr color_cb_view;
          OPTCalibration::CheckerboardView::Ptr depth_cb_view;
          ROS_DEBUG_STREAM("[" << device->frameId() << "] analysing image generated at: " << device->lastMessages().intensity_msg->header.stamp);
          ROS_DEBUG_STREAM("[" << device->frameId() << "] analysing cloud generated at: " << device->lastMessages().cloud_msg->header.stamp);
          if (calibration_->analyzeData(device->intensitySensor(), device->depthSensor(),
                                        data->intensity_image, data->cloud,
                                        color_cb_view, depth_cb_view))
          {
            calibration_->addData(device->intensitySensor(), color_cb_view);
            calibration_->addData(device->depthSensor(), depth_cb_view);
            images_acquired_map_[device->frameId()]++;
            ROS_INFO_STREAM("[" << device->frameId() << "] checkerboard detected");
            ++count;
          }
        }
      }
    }
    catch (cv_bridge::Exception & ex)
    {
      ROS_ERROR_STREAM("cv_bridge exception: " << ex.what());
      return;
    }
    catch (std::runtime_error & ex)
    {
      ROS_ERROR_STREAM("exception: " << ex.what());
      return;
    }

    status_msg_.header.stamp = ros::Time::now();
    status_msg_.header.seq++;
    for (size_t i = 0; i < status_msg_.sensor_ids.size(); ++i)
    {
      status_msg_.images_acquired[i] = images_acquired_map_[status_msg_.sensor_ids[i]];
    }
    status_pub_.publish(status_msg_);

    calibration_->perform();
    calibration_->publish();

    if (count > 0)
      ROS_INFO("-----------------------------------------------");

    rate.sleep();
  }
}
Exemplo n.º 2
0
  /** Dynamic reconfigure callback
   *
   *  Called immediately when callback first defined. Called again
   *  when dynamic reconfigure starts or changes a parameter value.
   *
   *  @param newconfig new Config values
   *  @param level bit-wise OR of reconfiguration levels for all
   *               changed parameters (0xffffffff on initial call)
   **/
  void Camera1394Driver::reconfig(Config &newconfig, uint32_t level)
  {
    // Do not run concurrently with poll().  Tell it to stop running,
    // and wait on the lock until it does.
    reconfiguring_ = true;
    boost::mutex::scoped_lock lock(mutex_);
    ROS_DEBUG("dynamic reconfigure level 0x%x", level);

    // resolve frame ID using tf_prefix parameter
    if (newconfig.frame_id == "")
      newconfig.frame_id = "camera";
    std::string tf_prefix = tf::getPrefixParam(priv_nh_);
    ROS_DEBUG_STREAM("tf_prefix: " << tf_prefix);
    newconfig.frame_id = tf::resolve(tf_prefix, newconfig.frame_id);

    if (state_ != Driver::CLOSED && (level & Levels::RECONFIGURE_CLOSE))
      {
        // must close the device before updating these parameters
        closeCamera();                  // state_ --> CLOSED
      }

    if (state_ == Driver::CLOSED)
      {
        // open with new values
        openCamera(newconfig);
      }

    if (config_.camera_info_url != newconfig.camera_info_url)
      {
        if (!validateConfig(newconfig))
          {
            // new URL not valid, use the old one
            newconfig.camera_info_url = config_.camera_info_url;
          }
      }

    if (state_ != Driver::CLOSED)       // openCamera() succeeded?
      {
        // configure IIDC features
        if (level & Levels::RECONFIGURE_CLOSE)
          {
            // initialize all features for newly opened device
            if (false == dev_->features_->initialize(&newconfig))
              {
                ROS_ERROR_STREAM("[" << camera_name_
                                 << "] feature initialization failure");
                closeCamera();          // can't continue
              }
          }
        else
          {
            // update any features that changed
            // TODO replace this with a dev_->reconfigure(&newconfig);
            dev_->features_->reconfigure(&newconfig);
          }
      }

    config_ = newconfig;                // save new parameters
    reconfiguring_ = false;             // let poll() run again

    ROS_DEBUG_STREAM("[" << camera_name_
                     << "] reconfigured: frame_id " << newconfig.frame_id
                     << ", camera_info_url " << newconfig.camera_info_url);
  }
Exemplo n.º 3
0
	/** Dynamic reconfigure callback
	 *
	 *  Called immediately when callback first defined. Called again
	 *  when dynamic reconfigure starts or changes a parameter value.
	 *
	 *  @param newconfig new Config values
	 *  @param level bit-wise OR of reconfiguration levels for all
	 *               changed parameters (0xffffffff on initial call)
	 **/
	void reconfig(Config &newconfig, uint32_t level)
	{
		ROS_DEBUG("dynamic reconfigure level 0x%x", level);

		// resolve frame ID using tf_prefix parameter
		if (newconfig.frame_id == "")
			newconfig.frame_id = "camera";
		std::string tf_prefix = tf::getPrefixParam(privNH_);
		ROS_DEBUG_STREAM("tf_prefix: " << tf_prefix);
		newconfig.frame_id = tf::resolve(tf_prefix, newconfig.frame_id);

		if (state_ != Driver::CLOSED && (level & Levels::RECONFIGURE_CLOSE))
		{
			// must close the device before updating these parameters
			closeCamera();                  // state_ --> CLOSED
		}

		if (state_ == Driver::CLOSED)
		{
			// open with new values
			if (openCamera(newconfig))
			{
				// update camera name string
				newconfig.camera_name = camera_name_;
			}
		}
		
		
		///THIS IS A QUICK HACK TO GET EXPOSURE ON OUR CAMERA'S THIS DOES NOT WORK FOR ALL CAMERAS

		if(config_.exposure != newconfig.exposure){
			try {
				  cam_->set_control(0x9a0901, newconfig.exposure);
			} catch (uvc_cam::Exception& e) {
				  ROS_ERROR_STREAM("Problem setting exposure. Exception was " << e.what());
			}
		}
		if(config_.absolute_exposure != newconfig.absolute_exposure){
			try {
		  cam_->set_control(0x9a0902, newconfig.absolute_exposure);
		  	} catch (uvc_cam::Exception& e) {
				ROS_ERROR_STREAM("Problem setting absolute exposure. Exception was " << e.what());
			}
		}
		if(config_.sharpness != newconfig.sharpness){
			try {
		  cam_->set_control(0x98091b, newconfig.sharpness);
			} catch (uvc_cam::Exception& e) {
				ROS_ERROR_STREAM("Problem setting sharpness. Exception was " << e.what());
			}
		}
		if(config_.power_line_frequency != newconfig.power_line_frequency){
			try {
		  cam_->set_control(0x980918, newconfig.power_line_frequency);
			} catch (uvc_cam::Exception& e) {
				ROS_ERROR_STREAM("Problem setting powerline frequency. Exception was " << e.what());
			}
		}
		if(config_.white_balance_temperature != newconfig.white_balance_temperature){
			try {
		  cam_->set_control(0x98090c, newconfig.white_balance_temperature);
		  	} catch (uvc_cam::Exception& e) {
				ROS_ERROR_STREAM("Problem setting white balance temperature. Exception was " << e.what());
			}
		}
		if(config_.gain != newconfig.gain){
			try {
		  cam_->set_control(0x980913, newconfig.gain);
		  	} catch (uvc_cam::Exception& e) {
				ROS_ERROR_STREAM("Problem setting gain. Exception was " << e.what());
			}
		}
		if(config_.saturation != newconfig.saturation){
			try {
		  cam_->set_control(0x980902, newconfig.saturation);
		  	} catch (uvc_cam::Exception& e) {
				ROS_ERROR_STREAM("Problem setting saturation. Exception was " << e.what());
			}
		}
		if(config_.contrast != newconfig.contrast){
			try {
		  cam_->set_control(0x980901, newconfig.contrast);
		  	} catch (uvc_cam::Exception& e) {
				ROS_ERROR_STREAM("Problem setting contrast. Exception was " << e.what());
			}
		}
		if(config_.brightness != newconfig.brightness){
			try {
		  cam_->set_control(0x980900, newconfig.brightness);
		  	} catch (uvc_cam::Exception& e) {
				ROS_ERROR_STREAM("Problem setting brightness. Exception was " << e.what());
			}
		}
		
		

		if (config_.camera_info_url != newconfig.camera_info_url)
		{
			// set the new URL and load CameraInfo (if any) from it
			if (cinfo_.validateURL(newconfig.camera_info_url))
			{
				cinfo_.loadCameraInfo(newconfig.camera_info_url);
			}
			else
			{
				// new URL not valid, use the old one
				newconfig.camera_info_url = config_.camera_info_url;
			}
		}

		//	    if (state_ != Driver::CLOSED)       // openCamera() succeeded?
		//	      {
		//	        // configure IIDC features
		//	        if (level & Levels::RECONFIGURE_CLOSE)
		//	          {
		//	            // initialize all features for newly opened device
		//	            if (false == dev_->features_->initialize(&newconfig))
		//	              {
		//	                ROS_ERROR_STREAM("[" << camera_name_
		//	                                 << "] feature initialization failure");
		//	                closeCamera();          // can't continue
		//	              }
		//	          }
		//	        else
		//	          {
		//	            // update any features that changed
		//	            dev_->features_->reconfigure(&newconfig);
		//	          }
		//	      }

		config_ = newconfig;                // save new parameters

		ROS_DEBUG_STREAM("[" << camera_name_
				<< "] reconfigured: frame_id " << newconfig.frame_id
				<< ", camera_info_url " << newconfig.camera_info_url);
	}
Exemplo n.º 4
0
void GraphManager::visualizeFeatureFlow3D(const Node& earlier_node,
        const Node& newer_node,
        const std::vector<cv::DMatch>& all_matches,
        const std::vector<cv::DMatch>& inlier_matches,
        unsigned int marker_id,
        bool draw_outlier) const {
    std::clock_t starttime=std::clock();
    if (ransac_marker_pub_.getNumSubscribers() > 0) { //don't visualize, if nobody's looking

        visualization_msgs::Marker marker_lines;

        marker_lines.header.frame_id = "/openni_rgb_optical_frame";
        marker_lines.ns = "ransac_markers";
        marker_lines.header.stamp = ros::Time::now();
        marker_lines.action = visualization_msgs::Marker::ADD;
        marker_lines.pose.orientation.w = 1.0;
        marker_lines.id = marker_id;
        marker_lines.type = visualization_msgs::Marker::LINE_LIST;
        marker_lines.scale.x = 0.002;

        std_msgs::ColorRGBA color_red  ;  //red outlier
        color_red.r = 1.0;
        color_red.a = 1.0;
        std_msgs::ColorRGBA color_green;  //green inlier, newer endpoint
        color_green.g = 1.0;
        color_green.a = 1.0;
        std_msgs::ColorRGBA color_yellow;  //yellow inlier, earlier endpoint
        color_yellow.r = 1.0;
        color_yellow.g = 1.0;
        color_yellow.a = 1.0;
        std_msgs::ColorRGBA color_blue  ;  //red-blue outlier
        color_blue.b = 1.0;
        color_blue.a = 1.0;

        marker_lines.color = color_green; //just to set the alpha channel to non-zero

        AISNavigation::PoseGraph3D::Vertex* earlier_v; //used to get the transform
        AISNavigation::PoseGraph3D::Vertex* newer_v; //used to get the transform
        AISNavigation::PoseGraph3D::VertexIDMap v_idmap = optimizer_->vertices();
        // end of initialization
        ROS_DEBUG("Matches Visualization start");

        // write all inital matches to the line_list
        marker_lines.points.clear();//necessary?

        if (draw_outlier)
        {
            for (unsigned int i=0; i<all_matches.size(); i++) {
                int newer_id = all_matches.at(i).queryIdx; //feature id in newer node
                int earlier_id = all_matches.at(i).trainIdx; //feature id in earlier node

                earlier_v = static_cast<AISNavigation::PoseGraph3D::Vertex*>(v_idmap[earlier_node.id_]);
                newer_v = static_cast<AISNavigation::PoseGraph3D::Vertex*>(v_idmap[newer_node.id_]);

                //Outliers are red (newer) to blue (older)
                marker_lines.colors.push_back(color_red);
                marker_lines.colors.push_back(color_blue);

                marker_lines.points.push_back(
                    pointInWorldFrame(newer_node.feature_locations_3d_[newer_id],
                                      newer_v->transformation));
                marker_lines.points.push_back(
                    pointInWorldFrame(earlier_node.feature_locations_3d_[earlier_id],
                                      earlier_v->transformation));
            }
        }


        for (unsigned int i=0; i<inlier_matches.size(); i++) {
            int newer_id = inlier_matches.at(i).queryIdx; //feature id in newer node
            int earlier_id = inlier_matches.at(i).trainIdx; //feature id in earlier node

            earlier_v = static_cast<AISNavigation::PoseGraph3D::Vertex*>(v_idmap[earlier_node.id_]);
            newer_v = static_cast<AISNavigation::PoseGraph3D::Vertex*>(v_idmap[newer_node.id_]);


            //inliers are green (newer) to blue (older)
            marker_lines.colors.push_back(color_green);
            marker_lines.colors.push_back(color_blue);

            marker_lines.points.push_back(
                pointInWorldFrame(newer_node.feature_locations_3d_[newer_id],
                                  newer_v->transformation));
            marker_lines.points.push_back(
                pointInWorldFrame(earlier_node.feature_locations_3d_[earlier_id],
                                  earlier_v->transformation));
        }



        ransac_marker_pub_.publish(marker_lines);
        ROS_DEBUG_STREAM("Published  " << marker_lines.points.size()/2 << " lines");
    }
    ROS_INFO_STREAM_COND_NAMED(( (std::clock()-starttime) / (double)CLOCKS_PER_SEC) > 0.01, "timings", "function runtime: "<< ( std::clock() - starttime ) / (double)CLOCKS_PER_SEC  <<"sec");
}
void JointPositionController::update(const ros::Time &time, const ros::Duration &period) {
    ROS_DEBUG_STREAM("updating");
    
    // compute velocities and accelerations by hand just to be sure
    for (int i = 0; i < n_joints_; i++) {
        double current_position = joints_[i].getPosition();
        current_velocities_[i] = (current_position - previous_positions_[i]) / period.toSec();
        current_accelerations_[i] = (current_velocities_[i] - previous_velocities_[i]) / period.toSec();
        previous_positions_[i] = current_position;
        previous_velocities_[i] = current_velocities_[i];
    }
    
    if (new_reference_) {
        // Read the latest commanded trajectory message
        commanded_trajectory_ = *trajectory_command_buffer_.readFromRT();
        new_reference_ = false;
        
        for (int i = 0; i < n_joints_; i ++) {
            if (std::abs(commanded_trajectory_.positions[i] - last_commanded_trajectory_.positions[i]) > command_update_tolerance_) {
                must_recompute_trajectory_ = true;
                reached_reference_ = false;
                last_commanded_trajectory_ = commanded_trajectory_;
            }
        }
    }

    // Initialize RML result
    int rml_result = 0;
    
    ROS_DEBUG_STREAM("checking for having to recompute");


    // Compute RML traj after the start time and if there are still points in the queue
    if (must_recompute_trajectory_) {
        // Compute the trajectory
        ROS_WARN_STREAM("RML Recomputing trajectory...");

        // Update RML input parameters
        for (int i = 0; i < n_joints_; i++) {
            
            rml_in_->CurrentPositionVector->VecData[i] = joints_[i].getPosition();
            rml_in_->CurrentVelocityVector->VecData[i] = current_velocities_[i];
            rml_in_->CurrentAccelerationVector->VecData[i] = current_accelerations_[i];

            rml_in_->TargetPositionVector->VecData[i] = commanded_trajectory_.positions[i];
            rml_in_->TargetVelocityVector->VecData[i] = commanded_trajectory_.velocities[i];

            rml_in_->SelectionVector->VecData[i] = true;
        }
        
        
        ROS_DEBUG_STREAM("Current position: " << std::endl << *(rml_in_->CurrentPositionVector));
        ROS_DEBUG_STREAM("Target position: " << std::endl << *(rml_in_->TargetPositionVector));

        // Store the traj start time
        traj_start_time_ = time;

        // Set desired execution time for this trajectory (definitely > 0)
        rml_in_->SetMinimumSynchronizationTime(minimum_synchronization_time_);

//         ROS_DEBUG_STREAM("RML IN: time: " << rml_in_->GetMinimumSynchronizationTime());

        // Specify behavior after reaching point
        rml_flags_.BehaviorAfterFinalStateOfMotionIsReached = recompute_trajectory_ ? RMLPositionFlags::RECOMPUTE_TRAJECTORY : RMLPositionFlags::KEEP_TARGET_VELOCITY;
        rml_flags_.SynchronizationBehavior = RMLPositionFlags::ONLY_TIME_SYNCHRONIZATION;
        rml_flags_.KeepCurrentVelocityInCaseOfFallbackStrategy = true;

        // Compute trajectory
        rml_result = rml_->RMLPosition(*rml_in_.get(),
                                       rml_out_.get(),
                                       rml_flags_);

        // Disable recompute flag
        must_recompute_trajectory_ = false;
    } 
    
    // Sample the already computed trajectory
    rml_result = rml_->RMLPositionAtAGivenSampleTime(
                        (time - traj_start_time_ + period).toSec(),
                        rml_out_.get());

    // Determine if any of the joint tolerances have been violated
    for (int i = 0; i < n_joints_; i++) {
        double tracking_error = std::abs(rml_out_->NewPositionVector->VecData[i] - joints_[i].getPosition());

        if (tracking_error > position_tolerances_[i]) {
            must_recompute_trajectory_ = true;
            ROS_WARN_STREAM("Tracking for joint " << i << " outside of tolerance! (" << tracking_error 
                << " > " << position_tolerances_[i] << ")");
        }
    }

    // Compute command
    for (int i = 0; i < n_joints_; i++) {
        commanded_positions_[i] = rml_out_->NewPositionVector->VecData[i];
    }

    // Only set a different position command if the
    switch (rml_result) {
    case ReflexxesAPI::RML_WORKING:
        // S'all good.
        break;

    case ReflexxesAPI::RML_FINAL_STATE_REACHED:
        ROS_DEBUG_STREAM("final state reached");
        must_recompute_trajectory_ = recompute_trajectory_;
        reached_reference_ = true;
        break;

    default:
        if (loop_count_ % decimation_ == 0) {
            ROS_ERROR("Reflexxes error code: %d. Setting position commands to measured position.", rml_result);
        }

        for (int i = 0; i < n_joints_; i++)
            commanded_positions_[i] = joints_[i].getPosition();
        
        break;
    };

    // Set the lower-level commands
    if (!reached_reference_) {
        ROS_DEBUG_STREAM("setting command");
        for (int i = 0; i < n_joints_; i++) {
            joints_[i].setCommand(commanded_positions_[i]);
        }
    }

    // Publish state
    if (loop_count_ % decimation_ == 0) {
        /*
         *      boost::scoped_ptr<realtime_tools::RealtimePublisher<controllers_msgs::JointControllerState> >
         *        &state_pub = controller_state_publisher_;
         *
         *      for(int i=0; i<n_joints_; i++) {
         *        if(state_pub && state_pub->trylock()) {
         *          state_pub->msg_.header.stamp = time;
         *          state_pub->msg_.set_point = pos_target;
         *          state_pub->msg_.process_value = pos_actual;
         *          state_pub->msg_.process_value_dot = vel_actual;
         *          state_pub->msg_.error = pos_error;
         *          state_pub->msg_.time_step = period.toSec();
         *          state_pub->msg_.command = commanded_effort;
         *
         *          double dummy;
         *          pids_[i]->getGains(
         *              state_pub->msg_.p,
         *              state_pub->msg_.i,
         *              state_pub->msg_.d,
         *              state_pub->msg_.i_clamp,
         *              dummy);
         *          state_pub->unlockAndPublish();
        }
        }
        */
    }

    // Increment the loop count
    loop_count_++;
}
Exemplo n.º 6
0
  /** Open the camera device.
   *
   * @param newconfig configuration parameters
   * @return true, if successful
   *
   * @post diagnostics frequency parameters set
   *
   * if successful:
   *   state_ is Driver::OPENED
   *   camera_name_ set to GUID string
   *   GUID configuration parameter updated
   */
  bool NaoqiCameraDriver::openCamera(Config &newconfig)
  {
    bool success = false;

    try
    {
        camera_proxy_ = boost::shared_ptr<ALVideoDeviceProxy>(new ALVideoDeviceProxy(m_broker));

        if (camera_proxy_->getGenericProxy()->isLocal())
            ROS_INFO("nao_camera runs directly on the robot.");
        else
            ROS_WARN("nao_camera runs remotely.");
        // build a unique name for the camera that will be used to
        // store calibration informations.
        stringstream canonical_name;
        canonical_name << "nao_camera" 
                       << "_src" << newconfig.source 
                       << "_res" << newconfig.resolution;

        camera_name_ = camera_proxy_->subscribeCamera(
                                    camera_name_, 
                                    newconfig.source,
                                    newconfig.resolution,
                                    kRGBColorSpace,
                                    newconfig.frame_rate);

        if (!cinfo_->setCameraName(canonical_name.str()))
        {
            // GUID is 16 hex digits, which should be valid.
            // If not, use it for log messages anyway.
            ROS_WARN_STREAM("[" << camera_name_
                            << "] name not valid"
                            << " for camera_info_manger");
        }

        ROS_INFO_STREAM("[" << camera_name_ << "] opened: resolution "
                        << newconfig.resolution << " @"
                        << newconfig.frame_rate << " fps");
        state_ = Driver::OPENED;
        calibration_matches_ = true;
        newconfig.guid = camera_name_; // update configuration parameter
        retries_ = 0;
        success = true;
    }
    catch (const ALError& e)
    {
        state_ = Driver::CLOSED;    // since the open() failed
        if (retries_++ > 0)
          ROS_DEBUG_STREAM("[" << camera_name_
                           << "] exception opening device (retrying): "
                           << e.what());
        else
          ROS_ERROR_STREAM("[" << camera_name_
                           << "] device open failed: " << e.what());
    }

    // update diagnostics parameters
    diagnostics_.setHardwareID(camera_name_);
    double delta = newconfig.frame_rate * 0.1; // allow 10% error margin
    topic_diagnostics_min_freq_ = newconfig.frame_rate - delta;
    topic_diagnostics_max_freq_ = newconfig.frame_rate + delta;

    return success;
  }
/***********************************************************************
 *  Method: RobotController::Init
 *  Params: ros::NodeHandle *nh, int robotID, std::string robotName, int storage_cap, int storage_used, bool type
 * Returns: void
 * Effects: Initialize the controller with parameters
 ***********************************************************************/
void RobotController::Init(ros::NodeHandle *nh, int robotID, std::string robotName, int storage_cap, int storage_used, RobotState::Type type)
{
    m_nh = nh;
    m_listener.reset( new tf::TransformListener(*nh) );

    if (m_nh->getParam("controller/robot_id", robotID))
        ROS_INFO_STREAM("Set robot ID: " << robotID);
    else
        ROS_ERROR_STREAM("Did not read robot ID: default = " << robotID);
    m_status.SetID(robotID);

    if (m_nh->getParam("controller/robot_name", robotName))
        ROS_INFO_STREAM("Read robot name: " << robotName);
    else
        ROS_ERROR_STREAM("Did not read robot name: default = " << robotName);
    m_status.SetName(robotName);

    if(m_nh->getParam("controller/storage_capacity", storage_cap))
        ROS_INFO_STREAM("Read storage capacity: " << storage_cap);
    else
        ROS_ERROR_STREAM("Did not read storage capacity: default = " << storage_cap);
    m_status.SetStorageCapacity(storage_cap);

    if(m_nh->getParam("controller/storage_used", storage_used))
        ROS_INFO_STREAM("Read storage used: " << storage_used);
    else
        ROS_ERROR_STREAM("Did not read storage used: default = " << storage_used);
    m_status.SetStorageUsed(storage_used);

    std::string robotType;
    if (m_nh->getParam("controller/type", robotType))
        ROS_INFO_STREAM("Read robot type: " << robotType);
    else
        ROS_ERROR_STREAM("Did not read type: default = " << robotType);

    if (robotType.compare("collector") == 0)
    {
        ROS_INFO_STREAM("Type: Collector");
        type = RobotState::COLLECTOR_BOT;
    }
    else
    {
        ROS_INFO_STREAM("Type: Binbot");
        type = RobotState::BIN_BOT;
    }

    m_status.SetType(type);

    std::string tf_prefix;
    if (m_nh->getParam("controller/tf_prefix", tf_prefix))
        ROS_INFO_STREAM("Read tf prefix: " << tf_prefix);
    else
        ROS_ERROR_STREAM("Did not read tf_prefix: default = " << tf_prefix);

    base_frame = std::string("base_link");
    if (m_nh->getParam("controller/base_frame", base_frame))
        ROS_INFO_STREAM("Read base frame: " << base_frame);
    else
        ROS_ERROR_STREAM("Did not read base_frame: default = " << base_frame);

    base_frame = tf_prefix + "/"+base_frame;

    if (robotID < 0)
    {
        ROS_ERROR_STREAM("ERROR: Received invalid robot id: " << robotID);
        return;
    }
    if (storage_cap < 0)
    {
        ROS_ERROR_STREAM("ERROR: invalid storage capacity: " << storage_cap);
        return;
    }
    if (storage_used > storage_cap)
    {
        ROS_ERROR_STREAM("ERROR: storage used > storage capacity: used=" << storage_used <<  ", cap=" << storage_cap);
        return;
    }
    // Todo: Only start if fake trash is not used
    m_tagProcessor.reset( new AprilTagProcessor() );
    m_tagProcessor->Init(nh, robotID);

    action_client_ptr.reset( new MoveBaseClient("move_base", true) );
    // Wait for the action server to come up
    while(ros::ok() && !action_client_ptr->waitForServer(ros::Duration(2.0))){
        ROS_INFO_THROTTLE(3.0,"Waiting for the move_base action server to come up");
    }


    SetupCallbacks();

    ROS_DEBUG_STREAM("Robot has setup the movebase client");

    Transition(RobotState::WAITING);
    m_timeEnteringState = ros::Time::now();

    ROS_INFO_STREAM("Finished initializing");
}
// ******************************************************************************************
// Save package using default path
// ******************************************************************************************
bool ConfigurationFilesWidget::generatePackage()
{
  // Get path name
  std::string new_package_path = stack_path_->getPath();

  // Check that a valid stack package name has been given
  if( new_package_path.empty() )
  {
    QMessageBox::warning( this, "Error Generating", "No package path provided. Please choose a directory location to generate the MoveIt configuration files." );
    return false;
  }

  // Check setup assist deps
  if( !checkDependencies() )
    return false; // canceled

  // Check that all groups have components
  if( !noGroupsEmpty() )
    return false; // not ready

  // Trim whitespace from user input
  boost::trim( new_package_path );

  // Get the package name ---------------------------------------------------------------------------------
  new_package_name_ = getPackageName( new_package_path );

  const std::string setup_assistant_file = config_data_->appendPaths( new_package_path, ".setup_assistant" );

  // Make sure old package is correct package type and verify over write
  if( fs::is_directory( new_package_path ) && !fs::is_empty( new_package_path ) )
  {

    // Check if the old package is a setup assistant package. If it is not, quit
    if( ! fs::is_regular_file( setup_assistant_file ) )
    {
      QMessageBox::warning( this, "Incorrect Folder/Package",
                            QString("The chosen package location already exists but was not previously created using this MoveIt Setup Assistant. If this is a mistake, replace the missing file: ")
                            .append( setup_assistant_file.c_str() ) );
      return false;
    }

    // Confirm overwrite
    if( QMessageBox::question( this, "Confirm Package Update",
                               QString("Are you sure you want to overwrite this existing package with updated configurations?<br /><i>")
                               .append( new_package_path.c_str() )
                               .append( "</i>" ),
                               QMessageBox::Ok | QMessageBox::Cancel)
        == QMessageBox::Cancel )
    {
      return false; // abort
    }

  }
  else // this is a new package (but maybe the folder already exists)
  {
    // Create new directory ------------------------------------------------------------------
    try
    {
      fs::create_directory( new_package_path ) && !fs::is_directory( new_package_path );
    }
    catch( ... )
    {
      QMessageBox::critical( this, "Error Generating Files",
                             QString("Unable to create directory ").append( new_package_path.c_str() ) );
      return false;
    }
  }

  // Begin to create files and folders ----------------------------------------------------------------------
  std::string absolute_path;

  for (int i = 0; i < gen_files_.size(); ++i)
  {
    GenerateFile* file = &gen_files_[i];

    // Check if we should skip this file
    if( !file->generate_ )
    {
      continue;
    }

    // Create the absolute path
    absolute_path = config_data_->appendPaths( new_package_path, file->rel_path_ );
    ROS_DEBUG_STREAM("Creating file " << absolute_path );

    // Run the generate function
    if( !file->gen_func_(absolute_path) )
    {
      // Error occured
      QMessageBox::critical( this, "Error Generating File",
                             QString("Failed to generate folder or file: '")
                             .append( file->rel_path_.c_str() ).append("' at location:\n").append( absolute_path.c_str() ));
      return false;
    }
    updateProgress(); // Increment and update GUI
  }

  return true;
}
bool planning_environment::removeCompletedTrajectory(const boost::shared_ptr<urdf::Model> &model,
        const trajectory_msgs::JointTrajectory &trajectory_in,
        const sensor_msgs::JointState& current_state,
        trajectory_msgs::JointTrajectory &trajectory_out,
        bool zero_vel_acc)
{

    trajectory_out = trajectory_in;
    trajectory_out.points.clear();

    if(trajectory_in.points.empty())
    {
        ROS_WARN("No points in input trajectory");
        return true;
    }

    int current_position_index = 0;
    //Get closest state in given trajectory
    current_position_index = closestStateOnTrajectory(model,
                             trajectory_in,
                             current_state,
                             current_position_index,
                             trajectory_in.points.size() - 1);
    if (current_position_index < 0)
    {
        ROS_ERROR("Unable to identify current state in trajectory");
        return false;
    } else {
        ROS_DEBUG_STREAM("Closest state is " << current_position_index << " of " << trajectory_in.points.size());
    }

    // Start one ahead of returned closest state index to make sure first trajectory point is not behind current state
    for(unsigned int i = current_position_index+1; i < trajectory_in.points.size(); ++i)
    {
        trajectory_out.points.push_back(trajectory_in.points[i]);
    }

    if(trajectory_out.points.empty())
    {
        ROS_DEBUG("No points in output trajectory");
        return false;
    }

    ros::Duration first_time = trajectory_out.points[0].time_from_start;

    if(first_time < ros::Duration(.1)) {
        first_time = ros::Duration(0.0);
    } else {
        first_time -= ros::Duration(.1);
    }

    for(unsigned int i=0; i < trajectory_out.points.size(); ++i)
    {
        if(trajectory_out.points[i].time_from_start > first_time) {
            trajectory_out.points[i].time_from_start -= first_time;
        } else {
            ROS_INFO_STREAM("Not enough time in time from start for trajectory point " << i);
        }
    }

    if(zero_vel_acc) {
        for(unsigned int i=0; i < trajectory_out.joint_names.size(); ++i) {
            for(unsigned int j=0; j < trajectory_out.points.size(); ++j) {
                trajectory_out.points[j].velocities[i] = 0;
                trajectory_out.points[j].accelerations[i] = 0;
            }
        }
    }
    return true;
}
Exemplo n.º 10
0
/** Get feature values.
 *
 *  @pre feature_set_ initialized for this camera
 *
 *  @param finfo pointer to information for this feature
 *  @param value [out] pointer where parameter value stored
 *  @param value2 [out] optional pointer for second parameter value
 *               for white balance.  Otherwise NULL.
 */
void Features::getValues(dc1394feature_info_t *finfo,
                           double *value, double *value2)
{
  dc1394feature_t feature = finfo->id;
  dc1394error_t rc;

  if (!finfo->readout_capable)
    {
      ROS_INFO_STREAM("feature " << featureName(feature)
                      << " value not available from device");
      return;
    }

  if (feature == DC1394_FEATURE_WHITE_BALANCE)
    {
      // handle White Balance separately, it has two components
      if (finfo->absolute_capable && finfo->abs_control)
        {
          // supports reading and setting float value
          // @todo get absolute White Balance values
          rc = DC1394_FUNCTION_NOT_SUPPORTED;
        }
      else
        {
          // get integer White Balance values
          uint32_t bu_val;
          uint32_t rv_val;
          rc = dc1394_feature_whitebalance_get_value(camera_, &bu_val, &rv_val);
          if (DC1394_SUCCESS == rc)
            {
              // convert to double
              *value = bu_val;
              *value2 = rv_val;
            }
        }
      if (DC1394_SUCCESS == rc)
        {
          ROS_DEBUG_STREAM("feature " << featureName(feature)
                           << " Blue/U: " << *value
                           << " Red/V: " << *value2);
        }
      else
        {
          ROS_WARN_STREAM("failed to get values for feature "
                          << featureName(feature));
        }
    }
  else
    {
      // other features only have one component
      if (finfo->absolute_capable && finfo->abs_control)
        {
          // supports reading and setting float value
          float fval;
          rc = dc1394_feature_get_absolute_value(camera_, feature, &fval);
          if (DC1394_SUCCESS == rc)
            {
              *value = fval;                // convert to double
            }
        }
      else // no float representation
        {
          uint32_t ival;
          rc = dc1394_feature_get_value(camera_, feature, &ival);
          if (DC1394_SUCCESS == rc)
            {
              *value = ival;                // convert to double
            }
        }
      if (DC1394_SUCCESS == rc)
        {
          ROS_DEBUG_STREAM("feature " << featureName(feature)
                           << " has value " << *value);
        }
      else
        {
          ROS_WARN_STREAM("failed to get value of feature "
                          << featureName(feature));
        }
    }
}
void waypoints_follower::run()
{
    
    const double MAX_TWIST_LINEAR = 1.0;
    const double MAX_TWIST_ANGULAR = 0.5;
    const double TURNING_RADIUS=0.4;

        if (!active) return;
//         ROS_INFO("active");
        
        if (!localized)
        {
            ROS_WARN("waiting for localization");
            return;
        }
        if (reached(next_target) || start_new_target) //Change of target!
        {
            start_new_target = false;
            if (targets.size()==0)
            {
                deactivate(deactivate_reason::NO_MORE_TARGETS);
                return;
            }
            
            std::unique_lock<std::mutex>(targets_mtx);
            
            next_target = targets.front();
            targets.pop_front();
            geometry_msgs::Pose2D current_pose;
            current_pose.x=x;
            current_pose.y=y;            
            
            xtarget = next_target.x;
            ytarget = next_target.y;
            ROS_INFO_STREAM("starting new target "<<xtarget<< " " <<ytarget<<" "<<int(next_target.z)%1000);
            
            straight=true;
            turning=false;
        }
        /* More complex implementation, for the future, it chooses speed and angular radius
        if (distance(next_target)<TURNING_RADIUS && !turning && targets.size()>0 && !start_new_target) //Will not use circle if this is the last target or if we are starting now
        {
            //TODO start turning along a circle
            //Find next heading, circle radius, circle length, circle angle
            double current_heading=theta;
            double heading=atan2(ytarget-targets.front().y,xtarget-targets.front().x);
            double current_speed=twist.linear.x;
            double next_speed=distance(next_target,targets.front())/(targets.front().z-next_target.z);
            double delta_heading=heading-current_heading;
            bool rotate_left;
            if((0<delta_heading && delta_heading<M_PI) || (-2*M_PI<delta_heading && delta_heading<-M_PI ))
                rotate_left=true;
            else rotate_left=false;
            bool ok=false;
            double desired_speed=(current_speed+next_speed)/2.0;
            double radius=max_turning_radius/10.0;
            double alpha=2*radius/wheel_distance;
            while(!ok)
            {
                double right_wheel_speed=-desired_speed*(1/alpha+1);
                if (right_wheel_speed>max_speed) //Too fast, slow down
                {
                    right_wheel_speed=max_speed;
                }
                double left_wheel_speed=2*desired_speed-right_wheel_speed;
                if (left_wheel_speed>max_speed) //Also too fast, change speed and turning radius
                {
                    desired_speed=0.9*desired_speed;
                    radius=radius+max_turning_radius*0.1;
                }
                else
                {
                    ok=true;
                }
            }
            turning = true;
            twist.linear.x=desired_speed;
            twist.angular.z=(right_wheel_speed-left_wheel_speed)/2.0;
        }
        */
        if (distance(next_target)<TURNING_RADIUS && distance(next_target)>reached_threshold && !turning && targets.size()>0 && !start_new_target) //Will not use circle if this is the last target or if we are starting now
        {
            turning=true;
            straight=false;
            desired_heading=atan2(targets.front().y-ytarget,targets.front().x-xtarget);
            double current_speed=twist.linear.x;
            double next_speed=distance(next_target,targets.front())/(targets.front().z-next_target.z);
            if(desired_speed>MAX_TWIST_LINEAR) desired_speed=MAX_TWIST_LINEAR;
            desired_speed=(current_speed+next_speed)/2.0;
            twist.linear.x=desired_speed;
            ROS_INFO_STREAM("starting turning from near "<<xtarget<< " " <<ytarget<<" "<<int(next_target.z)%1000<< "to next target");
        }
        double length = distance(next_target);
        double theta_err;
        
        if (turning)
        {
            //either keep publishing same rotation and speed or feedback on the circle information
            twist.linear.x=desired_speed*(1+ki1*(TURNING_RADIUS-length));
            twist.angular.z=-ki2*sin(desired_heading-theta);
            theta_err=desired_heading-theta;
            if (fabs(desired_heading-theta)<0.2 )//|| distance())
            {
                //We kind of reached the desired heading, we should switch to the next target and stop turning
                
                std::unique_lock<std::mutex>(targets_mtx);
                
                next_target = targets.front();
                targets.pop_front();
                geometry_msgs::Pose2D current_pose;
                current_pose.x=x;
                current_pose.y=y;            
                
                xtarget = next_target.x;
                ytarget = next_target.y;
                straight=true;
                ROS_INFO_STREAM("starting new target "<<xtarget<< " " <<ytarget<<" "<<int(next_target.z)%1000);
                
                ROS_INFO("starting straight");
                
                turning=false;
            }
        }
        if (straight)
        {
            double delta = next_target.z - ros::Time::now().toSec();
            if (delta<-0)
            {
                twist.linear.x = 0;
                ROS_WARN_STREAM("Target time is in the past, stopping");
                deactivate(deactivate_reason::GLOBAL_TARGET_NOT_REACHED);
            }
            else
            {
                twist.linear.x=length/delta*1.1;
                theta_err=atan2(ytarget-y,xtarget-x)-theta;
                if (fabs(fabs(theta_err)-M_PI)<0.1) theta_err=theta_err+0.1;
                twist.angular.z=-kp2*sin(theta_err);
            }
        }
        twist.linear.x = std::min(twist.linear.x,MAX_TWIST_LINEAR);
        comand_pub.publish(twist);
        ROS_DEBUG_STREAM("controller run xt: "<<xtarget<<" yt: "<<ytarget<<" x: "<<x<<" y: "<<y<<" t "<<next_target.z);
        ROS_DEBUG_STREAM("controller run theta:"<<theta<<" error "<<theta_err<<" v: "<<twist.linear.x<<" w: "<<twist.angular.z);
        
        ros::spinOnce();
}
Exemplo n.º 12
0
/** Configure a feature for the currently open device.
 *
 *  @pre feature_set_ initialized
 *
 *  @param feature desired feature number
 *  @param control [in,out] pointer to control parameter (may change)
 *  @param value [in,out] pointer to requested parameter value (may
 *               change depending on device restrictions)
 *  @param value2 [in,out] optional pointer to second parameter value
 *               for white balance (may change depending on device
 *               restrictions).  No second value if NULL pointer.
 *
 *  The parameter values are represented as double despite the fact
 *  that most features on most cameras only allow unsigned 12-bit
 *  values.  The exception is the rare feature that supports
 *  "absolute" values in 32-bit IEEE float format.  Double can
 *  represent all possible option values accurately.
 */
void Features::configure(dc1394feature_t feature, int *control,
                         double *value, double *value2)
{
  // device-relevant information for this feature
  dc1394feature_info_t *finfo =
    &feature_set_.feature[feature - DC1394_FEATURE_MIN];

  if (!finfo->available)                // feature not available?
    {
      *control = camera1394::Camera1394_None;
      return;
    }

  switch (*control)
    {
    case camera1394::Camera1394_Off:
      setOff(finfo);
      break;

    case camera1394::Camera1394_Query:
      getValues(finfo, value, value2);
      break;

    case camera1394::Camera1394_Auto:
      if (!setMode(finfo, DC1394_FEATURE_MODE_AUTO))
        {
          setOff(finfo);
        }
      break;

    case camera1394::Camera1394_Manual:
      if (!setMode(finfo, DC1394_FEATURE_MODE_MANUAL))
        {
          setOff(finfo);
          break;
        }

      // TODO: break this into some internal methods
      if (finfo->absolute_capable && finfo->abs_control)
        {
          // supports reading and setting float value
          float fmin, fmax;
          if (DC1394_SUCCESS ==
              dc1394_feature_get_absolute_boundaries(camera_, feature,
                                                     &fmin, &fmax))
            {
              // clamp *value between minimum and maximum
              if (*value < fmin)
                *value = (double) fmin;
              else if (*value > fmax)
                *value = (double) fmax;
            }
          else
            {
              ROS_WARN_STREAM("failed to get feature "
                              << featureName(feature) << " boundaries ");
            }

          // @todo handle absolute White Balance values
          float fval = *value;
          if (DC1394_SUCCESS !=
              dc1394_feature_set_absolute_value(camera_, feature, fval))
            {
              ROS_WARN_STREAM("failed to set feature "
                              << featureName(feature) << " to " << fval);
            }
        }
      else // no float representation
        {
          // round requested value to nearest integer
          *value = rint(*value);

          // clamp *value between minimum and maximum
          if (*value < finfo->min)
            *value = (double) finfo->min;
          else if (*value > finfo->max)
            *value = (double) finfo->max;

          dc1394error_t rc;
          uint32_t ival = (uint32_t) *value;

          // handle White Balance, which has two components
          if (feature == DC1394_FEATURE_WHITE_BALANCE)
            {
              *value2 = rint(*value2);

              // clamp *value2 between same minimum and maximum
              if (*value2 < finfo->min)
                *value2 = (double) finfo->min;
              else if (*value2 > finfo->max)
                *value2 = (double) finfo->max;

              uint32_t ival2 = (uint32_t) *value2;
              rc = dc1394_feature_whitebalance_set_value(camera_, ival, ival2);
            }
          else
            {
              // other features only have one component
              rc = dc1394_feature_set_value(camera_, feature, ival);
            }
          if (rc != DC1394_SUCCESS)
            {
              ROS_WARN_STREAM("failed to set feature "
                              << featureName(feature) << " to " << ival);
            }
        }
      break;

    case camera1394::Camera1394_OnePush:
      // Try to set OnePush mode
      setMode(finfo, DC1394_FEATURE_MODE_ONE_PUSH_AUTO);

      // Now turn the control off, so camera does not continue adjusting
      setOff(finfo);
      break;

    case camera1394::Camera1394_None:
      // Invalid user input, because this feature actually does exist.
      ROS_INFO_STREAM("feature " << featureName(feature)
                      << " exists, cannot set to None");
      break;

    default:
      ROS_WARN_STREAM("unknown state (" << *control
                      <<") for feature " << featureName(feature));
    }

  // return actual state reported by the device
  *control = getState(finfo);
  ROS_DEBUG_STREAM("feature " << featureName(feature)
                   << " now in state " << *control);
}
////////////////////////////////////////////////////////////////////////////////
// Update the controller
void GazeboTruck::Update()
{
  boost::mutex::scoped_lock scoped_lock(lock);

  if ( terminated_ )
    {
      return;
    }

  // std::cerr << link_->GetWorldPose() << std::endl;
  common::Time current_time = world_->GetSimTime();
  double delta_time = (current_time-last_time_).Double();
  double theta = acos(CIRCLE_RADIUS/(CIRCLE_DISTANCE/2));
  double x, y, yaw;
  double l1 = CIRCLE_DISTANCE*sin(theta);
  double l2 = 2*CIRCLE_RADIUS*(M_PI-theta);
  double all_l = l1 + l2 + l1 + l2;

  // static const float VELOCITY = 4.16667;  //  4.16667 m/sec = 15 km/h, 2.77778 m/sec = 10 km/h, 1.38889 = 5 km/h
  if ( current_time.Double() < 6*60 )
    {
      traversed_ += 4.16667 * delta_time;
    }
  else if ( current_time.Double() < 12*60 )
    {
      traversed_ += 2.77778 * delta_time;
    }
  else if ( current_time.Double() < 20*60 )
    {
      traversed_ += 1.38889 * delta_time;
    }
  else
    {
      ROS_FATAL("Time's up, Your challenge was over");
      terminated_ = true;
    }
  double l = fmod(traversed_, all_l);
  ROS_DEBUG_STREAM("time: " << current_time.Double() << ", traversed: " << traversed_);

  if ( l < l1  )
    {
      x =  (l - l1/2)*sin(theta);
      y = -(l - l1/2)*cos(theta);
      yaw = theta-M_PI/2;
    }
  else if ( l < l1 + l2 )
    {
      l = l - l1;
      x = -CIRCLE_RADIUS * cos(l/l2*(2*M_PI-theta*2)+theta) + CIRCLE_DISTANCE/2;
      y = -CIRCLE_RADIUS * sin(l/l2*(2*M_PI-theta*2)+theta);
      yaw = (l/l2*(2*M_PI-theta*2)+theta)-M_PI/2;
    }
  else if ( l < l1 + l2 + l1 )
    {
      l = l - (l1 + l2);
      x = -(l - l1/2)*sin(theta);
      y = -(l - l1/2)*cos(theta);
      yaw =-M_PI/2-theta;
    }
  else
    {
      l = l - (l1 + l2 + l1);
      x =  CIRCLE_RADIUS * cos(l/l2*(2*M_PI-theta*2)+theta) - CIRCLE_DISTANCE/2;
      y = -CIRCLE_RADIUS * sin(l/l2*(2*M_PI-theta*2)+theta);
      yaw = -(l/l2*(2*M_PI-theta*2)+theta)-M_PI/2;
    }
  model_->SetLinkWorldPose(math::Pose(x, y, 0.595, 0, 0, yaw), link_);
  last_time_ = world_->GetSimTime();

  // check score
  // void Entity::GetNearestEntityBelow(double &_distBelow,  std::string &_entityName)

  gazebo::physics::RayShapePtr rayShape = boost::dynamic_pointer_cast<gazebo::physics::RayShape>(
    world_->GetPhysicsEngine()->CreateShape("ray", gazebo::physics::CollisionPtr()));

  double distAbove;
  std::string entityName;
  math::Box box = model_->GetLink("heliport")->GetCollisionBoundingBox();
  math::Vector3 start = model_->GetLink("heliport")->GetWorldPose().pos;
  math::Vector3 end = start;
  start.z = box.max.z + 0.00001;
  end.z += 1000;
  rayShape->SetPoints(start, end);
  rayShape->GetIntersection(distAbove, entityName);
  distAbove -= 0.00001;

  // publish remain time
  std::stringstream ss;
  std_msgs::String msg_time;
  ss << 20*60 - current_time.Double();
  msg_time.data = "remain time:" + ss.str();
  pub_time_.publish(msg_time);
  if ( entityName != "" && distAbove < 1.0 )
    {
      std_msgs::String msg_score, msg_time;
      msg_score.data = "Mission Completed";
      ROS_INFO_STREAM("Remaining time is " << msg_time.data << "[sec], Score is " << msg_score.data);
      pub_score_.publish(msg_score);
      terminated_ = true;
    }
}
Exemplo n.º 14
0
// General outline: the model can be written:
//    M(o)o_dotdot + Q(o,o_dot)o_dot + G(o) = tau
// When we linearize to the first order around position O, we get the approximation: o = O + e and
//    M(O)e_dotdot + Q(O,O_dot)e_dot + G(O) + grad(G)(O)e = tau
// Or:
//    (e)_dot = I e_dot                                + 0
//    (e_dot)_dot = -M^-1 Q e_dot -M^-1 G e + M^-1 tau - G(O)
// We first compute the matrices M,Q,G and then linearize the gravity term.
bool SerialChainModel::getLinearization(const StateVector & x, StateMatrix & A, InputMatrix & B, InputVector & c)
{
//   ROS_DEBUG("1");
  assert(inputs_>0);
  const int n=inputs_;
  KDL::JntArray kdl_q(n);
  KDL::JntArray kdl_q_dot(n);

//   ROS_DEBUG("2");
  //Fills the data
  for(int i=0; i<n; ++i)
  {
    kdl_q(i) = x(i);
    kdl_q_dot(i) = x(i+n);
  }
  
//   ROS_DEBUG("3");
  //Compute M
  Eigen::MatrixXd M(n,n);
  NEWMAT::Matrix mass(n,n);
  chain_->computeMassMatrix(kdl_q,kdl_torque_,mass);
  for(int i=0;i<n;i++)
    for(int j=0;j<n;j++)
      M(i,j)=mass(i+1,j+1);
//   ROS_DEBUG("4");
  //Compute Q
  Eigen::MatrixXd Q(n,n);
  Q.setZero();
  NEWMAT::Matrix christoffel(n*n,n);
  chain_->computeChristoffelSymbols(kdl_q,kdl_torque_,christoffel);
//   ROS_DEBUG("5-");
  for(int i=0;i<n;i++)
    for(int j=0;j<n;j++)
      for(int k=0;k<n;k++)
        Q(i,j)+=christoffel(i*n+j+1,k+1)*kdl_q_dot(j);
//   ROS_DEBUG("5");
  //Compute G
  Eigen::VectorXd G(n,1);
  chain_->computeGravityTerms(kdl_q,kdl_torque_);
  for(int i=0;i<n;i++)
    G(i)=kdl_torque_[i][2];
//   ROS_DEBUG("6");
  //Computing the gradient of G
  Eigen::MatrixXd gG(n,n);
  const double epsi=0.01;
  for(int i=0;i<n;i++)
  {
    KDL::JntArray ja = kdl_q;
    ja(i)=ja(i)+epsi;
    chain_->computeGravityTerms(ja,kdl_torque_);
    for(int j=0;j<n;j++)
      gG(i,j)=(kdl_torque_[j][2]-G(j))/epsi;
  }
//   ROS_DEBUG("7");
  // Final assembling
  Eigen::MatrixXd Minvminus=-M.inverse(); //Computing M's inverse once
  ROS_DEBUG_STREAM(A.rows());
  assert(A.rows()==n*2);
  assert(A.cols()==n*2);
  A.block(0,0,n,n).setZero();
  A.block(0,n,n,n)=Eigen::MatrixXd::Identity(n,n);
  A.block(n,0,n,n)=Minvminus*gG;
  A.block(n,n,n,n)=Minvminus*Q;
  
  assert(B.rows()==n*2);
  assert(B.cols()==n);
  B.block(0,0,n,n).setZero();
  B.block(n,0,n,n)=Minvminus;
//   ROS_DEBUG("8");
  c=-G;
  return true;
}
/**
 * The callback function that is executed every time an image is received. It runs the main logic of the program.
 *
 * \param image_msg the ROS message containing the image to be processed
 */
void MPENode::imageCallback(const sensor_msgs::Image::ConstPtr& image_msg)
{

  // Check whether already received the camera calibration data
  if (!have_camera_info_)
  {
    ROS_WARN("No camera info yet...");
    return;
  }

  // Import the image from ROS message to OpenCV mat
  cv_bridge::CvImagePtr cv_ptr;
  try
  {
    cv_ptr = cv_bridge::toCvCopy(image_msg, sensor_msgs::image_encodings::MONO8);
  }
  catch (cv_bridge::Exception& e)
  {
    ROS_ERROR("cv_bridge exception: %s", e.what());
    return;
  }
  cv::Mat image = cv_ptr->image;

  // Get time at which the image was taken. This time is used to stamp the estimated pose and also calculate the position of where to search for the makers in the image
  double time_to_predict = image_msg->header.stamp.toSec();

  const bool found_body_pose = trackable_object_.estimateBodyPose(image, time_to_predict);
  if (found_body_pose) // Only output the pose, if the pose was updated (i.e. a valid pose was found).
  {
    //Eigen::Matrix4d transform = trackable_object.getPredictedPose();
    Matrix6d cov = trackable_object_.getPoseCovariance();
    Eigen::Matrix4d transform = trackable_object_.getPredictedPose();

    ROS_DEBUG_STREAM("The transform: \n" << transform);
    ROS_DEBUG_STREAM("The covariance: \n" << cov);

    // Convert transform to PoseWithCovarianceStamped message
    predicted_pose_.header.stamp = image_msg->header.stamp;
    predicted_pose_.pose.pose.position.x = transform(0, 3);
    predicted_pose_.pose.pose.position.y = transform(1, 3);
    predicted_pose_.pose.pose.position.z = transform(2, 3);
    Eigen::Quaterniond orientation = Eigen::Quaterniond(transform.block<3, 3>(0, 0));
    predicted_pose_.pose.pose.orientation.x = orientation.x();
    predicted_pose_.pose.pose.orientation.y = orientation.y();
    predicted_pose_.pose.pose.orientation.z = orientation.z();
    predicted_pose_.pose.pose.orientation.w = orientation.w();

    // Add covariance to PoseWithCovarianceStamped message
    for (unsigned i = 0; i < 6; ++i)
    {
      for (unsigned j = 0; j < 6; ++j)
      {
        predicted_pose_.pose.covariance.elems[j + 6 * i] = cov(i, j);
      }
    }

    // Publish the pose
    pose_pub_.publish(predicted_pose_);
  }
  else
  { // If pose was not updated
    ROS_WARN("Unable to resolve a pose.");
  }

  // publish visualization image
  if (image_pub_.getNumSubscribers() > 0)
  {
    cv::Mat visualized_image = image.clone();
    cv::cvtColor(visualized_image, visualized_image, CV_GRAY2RGB);
    if (found_body_pose)
    {
      trackable_object_.augmentImage(visualized_image);
    }

    // Publish image for visualization
    cv_bridge::CvImage visualized_image_msg;
    visualized_image_msg.header = image_msg->header;
    visualized_image_msg.encoding = sensor_msgs::image_encodings::BGR8;
    visualized_image_msg.image = visualized_image;

    image_pub_.publish(visualized_image_msg.toImageMsg());
  }
}
Exemplo n.º 16
0
void ChompOptimizer::optimize()
{
  ros::WallTime start_time = ros::WallTime::now();
  double averageCostVelocity = 0.0;
  int currentCostIter = 0;
  int costWindow = 10;
  std::vector<double>costs(costWindow, 0.0);
  double minimaThreshold = 0.05;
  bool should_break_out = false;

  // if(parameters_->getAnimatePath())
  // {
  //   animatePath();
  // }

  // iterate
  for(iteration_ = 0; iteration_ < parameters_->getMaxIterations(); iteration_++)
  {
    ros::WallTime for_time = ros::WallTime::now();
    performForwardKinematics();
    //ROS_INFO_STREAM("Forward kinematics took " << (ros::WallTime::now()-for_time));
    double cCost = getCollisionCost();
    double sCost = getSmoothnessCost();
    double cost = cCost + sCost;

    //ROS_INFO_STREAM("Collision cost " << cCost << " smoothness cost " << sCost);

    // if(parameters_->getAddRandomness() && currentCostIter != -1)
    // {
    //   costs[currentCostIter] = cCost;
    //   currentCostIter++;

    //   if(currentCostIter >= costWindow)
    //   {
    //     for(int i = 1; i < costWindow; i++)
    //     {
    //       averageCostVelocity += (costs.at(i) - costs.at(i - 1));
    //     }

    //     averageCostVelocity /= (double)(costWindow);
    //     currentCostIter = -1;
    //   }
    // }
    if(iteration_ == 0)
    {
      best_group_trajectory_ = group_trajectory_.getTrajectory();
      best_group_trajectory_cost_ = cost;
    }
    else
    {
      if(cost < best_group_trajectory_cost_)
      {
        best_group_trajectory_ = group_trajectory_.getTrajectory();
        best_group_trajectory_cost_ = cost;
        last_improvement_iteration_ = iteration_;
      }
    }
    calculateSmoothnessIncrements();
    ros::WallTime coll_time = ros::WallTime::now();
    calculateCollisionIncrements();
    ROS_DEBUG_STREAM("Collision increments took " << (ros::WallTime::now()-coll_time));
    calculateTotalIncrements();

    // if(!parameters_->getUseHamiltonianMonteCarlo())
    // {
    //   // non-stochastic version:
    addIncrementsToTrajectory();
    // }
    // else
    // {
    //   // hamiltonian monte carlo updates:
    //   getRandomMomentum();
    //   updateMomentum();
    //   updatePositionFromMomentum();
    //   stochasticity_factor_ *= parameters_->getHmcAnnealingFactor();
    // }
    handleJointLimits();
    updateFullTrajectory();

    if(iteration_ % 10 == 0)
    {
      if(isCurrentTrajectoryMeshToMeshCollisionFree()) {
        num_collision_free_iterations_ = 0;
        ROS_INFO("Chomp Got mesh to mesh safety at iter %d. Breaking out early.", iteration_);
        is_collision_free_ = true;
        iteration_++;
        should_break_out = true;
      }
      // } else if(safety == CollisionProximitySpace::InCollisionSafe) {

      // ROS_DEBUG("Trajectory cost: %f (s=%f, c=%f)", getTrajectoryCost(), getSmoothnessCost(), getCollisionCost());
      // CollisionProximitySpace::TrajectorySafety safety = checkCurrentIterValidity();
      // if(safety == CollisionProximitySpace::MeshToMeshSafe)
      // {
      //   num_collision_free_iterations_ = 0;
      //   ROS_INFO("Chomp Got mesh to mesh safety at iter %d. Breaking out early.", iteration_);
      //   is_collision_free_ = true;
      //   iteration_++;
      //   should_break_out = true;
      // } else if(safety == CollisionProximitySpace::InCollisionSafe) {
      //   num_collision_free_iterations_ = parameters_->getMaxIterationsAfterCollisionFree();
      //   ROS_INFO("Chomp Got in collision safety at iter %d. Breaking out soon.", iteration_);
      //   is_collision_free_ = true;
      //   iteration_++;
      //   should_break_out = true;
      // }
      // else
      // {
      //   is_collision_free_ = false;
      // }
    }

    if(!parameters_->getFilterMode())
    {
      if(cCost < parameters_->getCollisionThreshold())
      {
        num_collision_free_iterations_ = parameters_->getMaxIterationsAfterCollisionFree();
        is_collision_free_ = true;
        iteration_++;
        should_break_out = true;
      } else {
        //ROS_INFO_STREAM("cCost " << cCost << " over threshold " << parameters_->getCollisionThreshold());
      }
    }

    if((ros::WallTime::now() - start_time).toSec() > parameters_->getPlanningTimeLimit() && !parameters_->getAnimatePath() && !parameters_->getAnimateEndeffector())
    {
      ROS_WARN("Breaking out early due to time limit constraints.");
      break;
    }

    // if(fabs(averageCostVelocity) < minimaThreshold && currentCostIter == -1 && !is_collision_free_ && parameters_->getAddRandomness())
    // {
    //   ROS_INFO("Detected local minima. Attempting to break out!");
    //   int iter = 0;
    //   bool success = false;
    //   while(iter < 20 && !success)
    //   {
    //     performForwardKinematics();
    //     double original_cost = getTrajectoryCost();
    //     group_trajectory_backup_ = group_trajectory_.getTrajectory();
    //     perturbTrajectory();
    //     handleJointLimits();
    //     updateFullTrajectory();
    //     performForwardKinematics();
    //     double new_cost = getTrajectoryCost();
    //     iter ++;
    //     if(new_cost < original_cost)
    //     {
    //       ROS_INFO("Got out of minimum in %d iters!", iter);
    //       averageCostVelocity = 0.0;
    //       currentCostIter = 0;
    //       success = true;
    //     }
    //     else
    //     {
    //       group_trajectory_.getTrajectory() = group_trajectory_backup_;
    //       updateFullTrajectory();
    //       currentCostIter = 0;
    //       averageCostVelocity = 0.0;
    //       success = false;
    //     }

    //   }

    //   if(!success)
    //   {
    //     ROS_INFO("Failed to exit minimum!");
    //   }
    //}
    else if(currentCostIter == -1)
    {
      currentCostIter = 0;
      averageCostVelocity = 0.0;
    }

    // if(parameters_->getAnimateEndeffector())
    // {
    //   animateEndeffector();
    // }

    // if(parameters_->getAnimatePath() && iteration_ % 25 == 0)
    // {
    //   animatePath();
    // }

    if(should_break_out)
    {
      collision_free_iteration_++;
      if(num_collision_free_iterations_ == 0) {
        break;
      } else if(collision_free_iteration_ > num_collision_free_iterations_)
      {
        // CollisionProximitySpace::TrajectorySafety safety = checkCurrentIterValidity();
        // if(safety != CollisionProximitySpace::MeshToMeshSafe &&
        //    safety != CollisionProximitySpace::InCollisionSafe) {
        //   ROS_WARN_STREAM("Apparently regressed");
        // }
        break;
      }
    }
  }

  if(is_collision_free_)
  {
    ROS_INFO("Chomp path is collision free");
  }
  else
  {
    ROS_ERROR("Chomp path is not collision free!");
  }

  // if(parameters_->getAnimatePath())
  // {
  //   animatePath();
  // }

  group_trajectory_.getTrajectory() = best_group_trajectory_;
  updateFullTrajectory();

  // if(parameters_->getAnimatePath())
  //   animatePath();

  ROS_INFO("Terminated after %d iterations, using path from iteration %d", iteration_, last_improvement_iteration_);
  ROS_INFO("Optimization core finished in %f sec", (ros::WallTime::now() - start_time).toSec() );
  ROS_INFO_STREAM("Time per iteration " << (ros::WallTime::now() - start_time).toSec()/(iteration_*1.0));
}
Exemplo n.º 17
0
void ImageProcessing::rawImageCallback(const sensor_msgs::Image::ConstPtr& image){
  ros::Rate loop_rate(200);
  double gray_level_precision;
  double size_precision;
  bool pause_at_each_frame = false; //Wait for user input each time a new frame is recieved.

  ros::param::getCached(visp_camera_calibration::gray_level_precision_param,gray_level_precision);
  ros::param::getCached(visp_camera_calibration::size_precision_param,size_precision);
  ros::param::getCached(visp_camera_calibration::pause_at_each_frame_param,pause_at_each_frame);


  vpPose pose;
  vpCalibration calib;
  visp_camera_calibration::CalibPointArray calib_all_points;

  img_ = visp_bridge::toVispImage(*image);
  
  init();

  vpDisplay::display(img_);
  vpDisplay::flush(img_);

  if(!pause_at_each_frame){
    vpImagePoint ip;
    vpDisplay::displayRectangle(img_,0,0,img_.getWidth(),15,vpColor::black,true);
    vpDisplay::displayCharString(img_,10,10,"Click on the window to select the current image",vpColor::red);
    vpDisplay::flush(img_);
    if(pause_image_){
      pause_image_= false;
    }
    else{
      return;
    }
  }

  pose.clearPoint();
  calib.clearPoint();
	vpImagePoint ip;

  //lets the user select keypoints
  for(unsigned int i=0;i<selected_points_.size();i++){
    try{
      vpDot2 d;
      d.setGrayLevelPrecision(gray_level_precision);
      d.setSizePrecision(size_precision);
      
			ROS_INFO("Click on point %d",i+1);
      vpDisplay::displayRectangle(img_,0,0,img_.getWidth(),15,vpColor::black,true);
      vpDisplay::displayCharString(img_,10,10,boost::str(boost::format("click on point %1%") % (i+1)).c_str(),vpColor::red);
      vpDisplay::flush(img_);  
      while(ros::ok() && !vpDisplay::getClick(img_,ip,false));
      
      d.initTracking(img_, ip);

      ip.set_ij(d.getCog().get_i(),d.getCog().get_j());
      double x=0,y=0;
      vpPixelMeterConversion::convertPoint(cam_, ip, x, y);
      selected_points_[i].set_x(x);
      selected_points_[i].set_y(y);
      pose.addPoint(selected_points_[i]);
      calib.addPoint(selected_points_[i].get_oX(),selected_points_[i].get_oY(),selected_points_[i].get_oZ(), ip);
      
			vpDisplay::displayCross(img_, d.getCog(), 10, vpColor::red);
			vpDisplay::flush(img_);
    }catch(vpTrackingException e){
      ROS_ERROR("Failed to init point");
    }
  }


  vpHomogeneousMatrix cMo;
  pose.computePose(vpPose::LAGRANGE, cMo) ;
  pose.computePose(vpPose::VIRTUAL_VS, cMo) ;
  vpHomogeneousMatrix cMoTmp = cMo;

  vpCameraParameters camTmp = cam_;
  //compute local calibration to match the calibration grid with the image
  try{
    calib.computeCalibration(vpCalibration::CALIB_VIRTUAL_VS,cMoTmp,camTmp,false);
    ROS_DEBUG_STREAM("cMo="<<std::endl <<cMoTmp<<std::endl);
    ROS_DEBUG_STREAM("cam="<<std::endl <<camTmp<<std::endl);

    //project all points and track their corresponding image location
    for (std::vector<vpPoint>::iterator model_point_iter= model_points_.begin();
          model_point_iter!= model_points_.end() ;
          model_point_iter++){
      //project each model point into image according to current calibration
      vpColVector _cP, _p ;
      
			model_point_iter->changeFrame(cMoTmp,_cP) ;
      model_point_iter->projection(_cP,_p) ;
      vpMeterPixelConversion::convertPoint(camTmp,_p[0],_p[1], ip);
      if (10 < ip.get_u() && ip.get_u() < img_.getWidth()-10 &&
          10 < ip.get_v() && ip.get_v() < img_.getHeight()-10) {
        try {
          //track the projected point, look for match
          vpDot2 md;
          md.setGrayLevelPrecision(gray_level_precision);
          md.setSizePrecision(size_precision);

          md.initTracking(img_, ip);
          if(!ros::ok())
						return;

          vpRect bbox = md.getBBox();
          vpImagePoint cog = md.getCog();
          if(bbox.getLeft()<5 || bbox.getRight()>(double)img_.getWidth()-5 ||
              bbox.getTop()<5 || bbox.getBottom()>(double)img_.getHeight()-5||
              vpMath::abs(ip.get_u() - cog.get_u()) > 10 ||
              vpMath::abs(ip.get_v() - cog.get_v()) > 10){
            ROS_DEBUG("tracking failed[suspicious point location].");
          }else{
            //point matches
            double x=0, y=0;
            vpPixelMeterConversion::convertPoint(camTmp, cog, x, y)  ;
            model_point_iter->set_x(x) ;
            model_point_iter->set_y(y) ;

            md.display(img_,vpColor::yellow, 2);
            visp_camera_calibration::CalibPoint cp;
            cp.i = cog.get_i();
            cp.j = cog.get_j();
            cp.X = model_point_iter->get_oX();
            cp.Y = model_point_iter->get_oY();
            cp.Z = model_point_iter->get_oZ();

            calib_all_points.points.push_back(cp);

            model_point_iter->display(img_,cMoTmp,camTmp) ;
            loop_rate.sleep(); //To avoid refresh problems
            vpDisplay::flush(img_);
          }
        } catch(...){
          ROS_DEBUG("tracking failed.");
        }
      } else {
        ROS_DEBUG("bad projection.");
      }
    }

    ROS_INFO("Left click on the interface window to continue, right click to restart");
    vpDisplay::displayRectangle(img_,0,0,img_.getWidth(),15,vpColor::black,true);
    vpDisplay::displayCharString(img_,10,10,"Left click on the interface window to continue, right click to restart",vpColor::red);
    vpDisplay::flush(img_);
		
    vpMouseButton::vpMouseButtonType btn;
    while(ros::ok() && !vpDisplay::getClick(img_,ip,btn, false));
		
    if(btn==vpMouseButton::button1)
      point_correspondence_publisher_.publish(calib_all_points);
    else{
      rawImageCallback(image);
      return;
    }
  }catch(...){
    ROS_ERROR("calibration failed.");
  }
}
Exemplo n.º 18
0
  geometry_msgs::PoseWithCovarianceStamped PredictionModel::update( double twist_x, double twist_y, double twist_z )
  {
    //add system noise for dispersing the model if no value was received.
    // base the predicted movement of the model on the current linear twist
    // the velocity is divided by the refresh frequency as the velocity
    // is expressed in m.s-1
    MatrixWrapper::ColumnVector vel(3); vel = 0;
    vel(1) = twist_x / refresh_frequency_;
    vel(2) = twist_y / refresh_frequency_;
    vel(3) = twist_z / refresh_frequency_;

    //we have a new measurement waiting to be processed
    if( !meas_used_ )
    {
      ROS_DEBUG_STREAM("New measurement = " << measurement_ << " / vel: " <<  vel);

      kalman_filter_->Update( system_model_.get(), vel,
                              measurement_model_.get(),
                              measurement_);
      meas_used_ = true;
    }
    else
    {
      //no measurement only dispersion
      ROS_DEBUG_STREAM("Dispersion model, velocities: " << vel);
      kalman_filter_->Update(system_model_.get(), vel);
    }

    //get the result
    posterior_ = kalman_filter_->PostGet();
    ROS_DEBUG_STREAM("Object is probably at: " << posterior_->ExpectedValueGet() << " \n"
                     << "  -> Covariance of: " << posterior_->CovarianceGet());

    results_.pose.pose.position.x = posterior_->ExpectedValueGet()(1);
    results_.pose.pose.position.y = posterior_->ExpectedValueGet()(2);
    results_.pose.pose.position.z = posterior_->ExpectedValueGet()(3);

    double x, y, z;
    x = posterior_->ExpectedValueGet()(1);
    y = posterior_->ExpectedValueGet()(2);
    z = posterior_->ExpectedValueGet()(3);
    if( (x != x) | (y != y) | (z != z) )
    {
      ROS_WARN_STREAM("There was a problem, we predicted some NaN: " << x << " " << y << " " << z <<" / resetting filter." );
      reset_model_();
      timer_ = nh_.createTimer(ros::Duration(0.1), &PredictionModel::reset, this, true);
      //hack to mark it as failed
      results_.header.seq = 0;
      return results_;
    }
    else
    {
      //hack to mark it as success
      results_.header.seq = 1;
    }

    //TODO compute orientation as well?
    results_.pose.pose.orientation.x = 0.0;
    results_.pose.pose.orientation.y = 0.0;
    results_.pose.pose.orientation.z = 0.0;
    results_.pose.pose.orientation.w = 1.0;

    //The covariance from the pose is a 6x6 matrix
    // but we've only got a 3x3 matrix as we're estimating the position only
    results_.pose.covariance[0] = posterior_->CovarianceGet()(1,1);
    results_.pose.covariance[1] = posterior_->CovarianceGet()(1,2);
    results_.pose.covariance[2] = posterior_->CovarianceGet()(1,3);
    results_.pose.covariance[6] = posterior_->CovarianceGet()(2,1);
    results_.pose.covariance[7] = posterior_->CovarianceGet()(2,2);
    results_.pose.covariance[8] = posterior_->CovarianceGet()(2,3);
    results_.pose.covariance[12] = posterior_->CovarianceGet()(3,1);
    results_.pose.covariance[13] = posterior_->CovarianceGet()(3,2);
    results_.pose.covariance[14] = posterior_->CovarianceGet()(3,3);

    return results_;
  }
Exemplo n.º 19
0
  /** Dynamic reconfigure callback
   *
   *  Called immediately when callback first defined. Called again
   *  when dynamic reconfigure starts or changes a parameter value.
   *
   *  @param newconfig new Config values
   *  @param level bit-wise OR of reconfiguration levels for all
   *               changed parameters (0xffffffff on initial call)
   **/
  void NaoqiCameraDriver::reconfig(Config &newconfig, uint32_t level)
  {
    // Do not run concurrently with poll().
    boost::mutex::scoped_lock scopedLock(reconfiguration_mutex_);
    ROS_DEBUG("dynamic reconfigure level 0x%x", level);

    // resolve frame ID using tf_prefix parameter
    if (newconfig.source == kTopCamera)
      frame_id_ = "CameraTop_frame";
    else if (newconfig.source == kBottomCamera)
      frame_id_ = "CameraBottom_frame";
    else {
        ROS_ERROR("Unknown video source! (neither top nor bottom camera).");
        frame_id_ = "camera";
    }

    string tf_prefix = tf::getPrefixParam(priv_nh_);
    ROS_DEBUG_STREAM("tf_prefix: " << tf_prefix);
    frame_id_ = tf::resolve(tf_prefix, frame_id_);

    if (state_ != Driver::CLOSED && (level & Levels::RECONFIGURE_CLOSE))
    {
        // must close the device before updating these parameters
        closeCamera();                  // state_ --> CLOSED
    }

    if (config_.camera_info_url != newconfig.camera_info_url)
      {
        // set the new URL and load CameraInfo (if any) from it
        if (cinfo_->validateURL(newconfig.camera_info_url))
          {
            cinfo_->loadCameraInfo(newconfig.camera_info_url);
          }
        else
          {
            // new URL not valid, use the old one
            newconfig.camera_info_url = config_.camera_info_url;
          }
      }

    if (state_ != Driver::CLOSED)       // openCamera() succeeded?
    {

        if (config_.auto_exposition != newconfig.auto_exposition)
            camera_proxy_->setParam(kCameraAutoExpositionID, newconfig.auto_exposition);

        if (config_.auto_exposure_algo != newconfig.auto_exposure_algo)
            camera_proxy_->setParam(kCameraExposureAlgorithmID, newconfig.auto_exposure_algo);

        if (config_.exposure != newconfig.exposure) {
            newconfig.auto_exposition = 0;
            camera_proxy_->setParam(kCameraAutoExpositionID, 0);
            camera_proxy_->setParam(kCameraExposureID, newconfig.exposure);
        }

        if (config_.gain != newconfig.gain) {
            newconfig.auto_exposition = 0;
            camera_proxy_->setParam(kCameraAutoExpositionID, 0);
            camera_proxy_->setParam(kCameraGainID, newconfig.gain);
        }

        if (config_.brightness != newconfig.brightness) {
            newconfig.auto_exposition = 1;
            camera_proxy_->setParam(kCameraAutoExpositionID, 1);
            camera_proxy_->setParam(kCameraBrightnessID, newconfig.brightness);
        }

        if (config_.contrast != newconfig.contrast)
            camera_proxy_->setParam(kCameraContrastID, newconfig.contrast);

        if (config_.saturation != newconfig.saturation)
            camera_proxy_->setParam(kCameraSaturationID, newconfig.saturation);

        if (config_.hue != newconfig.hue)
            camera_proxy_->setParam(kCameraHueID, newconfig.hue);

        if (config_.sharpness != newconfig.sharpness)
            camera_proxy_->setParam(kCameraSharpnessID, newconfig.sharpness);

        if (config_.auto_white_balance != newconfig.auto_white_balance)
            camera_proxy_->setParam(kCameraAutoWhiteBalanceID, newconfig.auto_white_balance);

        if (config_.white_balance != newconfig.white_balance) {
            newconfig.auto_white_balance = 0;
            camera_proxy_->setParam(kCameraAutoWhiteBalanceID, 0);
            camera_proxy_->setParam(kCameraWhiteBalanceID, newconfig.white_balance);
        }
    }

    config_ = newconfig;                // save new parameters
    real_frame_rate_ = ros::Rate(newconfig.frame_rate);

    ROS_DEBUG_STREAM("[" << camera_name_
                     << "] reconfigured: frame_id " << frame_id_
                     << ", camera_info_url " << newconfig.camera_info_url);
  }
void planning_environment::KinematicModelStateMonitor::jointStateCallback(const sensor_msgs::JointStateConstPtr &joint_state)
{
  //bool change = !have_joint_state_;

  planning_models::KinematicState state(kmodel_);

  current_joint_values_lock_.lock();

  state.setKinematicState(current_joint_state_map_);

  static bool first_time = true;

  unsigned int n = joint_state->name.size();
  if (joint_state->name.size() != joint_state->position.size())
  {
    ROS_ERROR("Planning environment received invalid joint state");
    current_joint_values_lock_.unlock();
    return;
  }
  
  std::map<std::string, double> joint_state_map;
  for (unsigned int i = 0 ; i < n ; ++i)
  {
    joint_state_map[joint_state->name[i]] = joint_state->position[i];
  }

  std::vector<planning_models::KinematicState::JointState*>& joint_state_vector = state.getJointStateVector();
  
  for(std::vector<planning_models::KinematicState::JointState*>::iterator it = joint_state_vector.begin();
      it != joint_state_vector.end();
      it++) {
    bool tfSets = false;
    bool joint_state_sets = false;
    //see if we need to update any transforms
    std::string parent_frame_id = (*it)->getParentFrameId();
    std::string child_frame_id = (*it)->getChildFrameId();
    if(!parent_frame_id.empty() && !child_frame_id.empty()) {
      std::string err;
      ros::Time tm;
      tf::StampedTransform transf;
      bool ok = false;
      if (tf_->getLatestCommonTime(parent_frame_id, child_frame_id, tm, &err) == tf::NO_ERROR) {
        ok = true;
        try
        {
          tf_->lookupTransform(parent_frame_id, child_frame_id, tm, transf);
        }
        catch(tf::TransformException& ex)
        {
          ROS_ERROR("Unable to lookup transform from %s to %s.  Exception: %s", parent_frame_id.c_str(), child_frame_id.c_str(), ex.what());
          ok = false;
        }
      } else {
        ROS_DEBUG("Unable to lookup transform from %s to %s: no common time.", parent_frame_id.c_str(), child_frame_id.c_str());
        ok = false;
      }
      if(ok) {
        tfSets = (*it)->setJointStateValues(transf);
        if(tfSets) {
          const std::vector<std::string>& joint_state_names = (*it)->getJointStateNameOrder();
          for(std::vector<std::string>::const_iterator it = joint_state_names.begin();
              it != joint_state_names.end();
              it++) {
            last_joint_update_[*it] = tm;
          }
        }
        // tf::Transform transf = getKinematicModel()->getRoot()->variable_transform;
        // ROS_INFO_STREAM("transform is to " << transf.getRotation().x() << " " 
        //                 << transf.getRotation().y() << " z " << transf.getRotation().z()
        //                 << " w " << transf.getRotation().w());
        have_pose_ = tfSets;
        last_pose_update_ = tm;
      }
    }
    //now we update from joint state
    joint_state_sets = (*it)->setJointStateValues(joint_state_map);
    if(joint_state_sets) {
      const std::vector<std::string>& joint_state_names = (*it)->getJointStateNameOrder();
      for(std::vector<std::string>::const_iterator it = joint_state_names.begin();
          it != joint_state_names.end();
          it++) {
        last_joint_update_[*it] = joint_state->header.stamp;
      }
    }
  }
  
  state.updateKinematicLinks();
  state.getKinematicStateValues(current_joint_state_map_);

  if(allJointsUpdated()) {
    have_joint_state_ = true;
    last_joint_state_update_ = joint_state->header.stamp;
    
    if(!joint_state_map_cache_.empty()) {
      if(joint_state->header.stamp-joint_state_map_cache_.back().first > ros::Duration(joint_state_cache_allowed_difference_)) {
        ROS_DEBUG_STREAM("Introducing joint state cache sparsity time of " << (joint_state->header.stamp-joint_state_map_cache_.back().first).toSec());
      }
    }

    joint_state_map_cache_.push_back(std::pair<ros::Time, std::map<std::string, double> >(joint_state->header.stamp,
                                                                                          current_joint_state_map_));
  } 

  if(have_joint_state_) {
    while(1) {
      if(joint_state_map_cache_.empty()) {
        ROS_WARN("Empty joint state map cache");
        break;
      }
      if((ros::Time::now()-joint_state_map_cache_.front().first) > ros::Duration(joint_state_cache_time_)) {
        joint_state_map_cache_.pop_front();
      } else {
        break;
      }
    }
  }
    
  first_time = false;

  if(!allJointsUpdated(ros::Duration(1.0))) {
    if(!printed_out_of_date_) {
      ROS_WARN_STREAM("Got joint state update but did not update some joints for more than 1 second.  Turn on DEBUG for more info");
      printed_out_of_date_ = true;
    }
  } else {
    printed_out_of_date_ = false;
  }
  if(on_state_update_callback_ != NULL) {
    on_state_update_callback_(joint_state);
  }
  current_joint_values_lock_.unlock();
}
Exemplo n.º 21
0
  void DensoArmNode::go_to_arm_pose(const denso_msgs::MoveArmPoseGoalConstPtr& goal)
  {
    int success = denso_msgs::MoveArmPoseResult::SUCCESS;

    //rate at which we'll send the demands to the arm
    ros::Rate move_arm_rate( goal->rate );

    //for checking the time out
    ros::Duration time_left( goal->time_out );
    ros::Time start_time = ros::Time::now();

    while( node_.ok() )
    {
      if( move_arm_pose_server_->isPreemptRequested() || !ros::ok() )
      {
        ROS_INFO("Move Denso arm to pose preempted.");
        move_arm_pose_server_->setPreempted();
        success = denso_msgs::MoveArmPoseResult::PREEMPTED;
        break;
      }

      //Compute pose from goal
      geometry_msgs::Pose pose_tmp( goal->goal );
      Pose pose_goal = geometry_pose_to_denso_pose( pose_tmp );

      ROS_DEBUG_STREAM(" RPY : " << pose_goal.roll << " / " << pose_goal.pitch << " / " << pose_goal.yaw);

      bool locked = false;
      for( unsigned int i=0; i < 1000; ++i)
      {
        if( denso_mutex.try_lock() )
        {
          locked = true;
          break;
        }
        usleep(10000);
      }

      if( locked )
      {
        if( !simulated )
        {
	  denso_arm_->set_speed( goal->speed );

          if( denso_arm_->send_cartesian_position( pose_goal ) )
          {
            denso_mutex.unlock();
            break; //We reached the target -> SUCCESS
          }
        }
        else
        {
          usleep( 500000 );
          denso_mutex.unlock();
          break;
        }
      }
      else
      {
        ROS_WARN("Couldn't send the target to the arm.");
      }

      //publish feedback
      time_left -= (ros::Time::now() - start_time);
      move_arm_pose_feedback_.time_left = time_left;
      move_arm_pose_server_->publishFeedback( move_arm_pose_feedback_ );

      //check if timedout
      if( time_left.toSec() <= 0.0 )
      {
        success = denso_msgs::MoveArmPoseResult::TIMED_OUT;
        break;
      }

      //loop at the rate the user asked for
      move_arm_rate.sleep();
    }

    if( success == denso_msgs::MoveArmPoseResult::SUCCESS)
    {
      move_arm_pose_result_.val = denso_msgs::MoveArmPoseResult::SUCCESS;
      move_arm_pose_server_->setSucceeded( move_arm_pose_result_ );
    }
    else
    {
      //failed
      move_arm_pose_result_.val = success;
      move_arm_pose_server_->setAborted( move_arm_pose_result_ );
    }
  }
Exemplo n.º 22
0
void RosReconfigure_callback(Config &config, uint32_t level)
{
    int             changedAcquire;
    int             changedAcquisitionFrameRate;
    int             changedExposureAuto;
    int             changedGainAuto;
    int             changedExposureTimeAbs;
    int             changedGain;
    int             changedAcquisitionMode;
    int             changedTriggerMode;
    int             changedTriggerSource;
    int             changedSoftwarerate;
    int             changedFrameid;
    int             changedFocusPos;
    int             changedMtu;
    
    
    std::string tf_prefix = tf::getPrefixParam(*global.phNode);
    ROS_DEBUG_STREAM("tf_prefix: " << tf_prefix);
    
    if (config.frame_id == "")
        config.frame_id = "camera";

    
    // Find what the user changed.
    changedAcquire    			= (global.config.Acquire != config.Acquire);
    changedAcquisitionFrameRate = (global.config.AcquisitionFrameRate != config.AcquisitionFrameRate);
    changedExposureAuto 		= (global.config.ExposureAuto != config.ExposureAuto);
    changedExposureTimeAbs  	= (global.config.ExposureTimeAbs != config.ExposureTimeAbs);
    changedGainAuto     		= (global.config.GainAuto != config.GainAuto);
    changedGain         		= (global.config.Gain != config.Gain);
    changedAcquisitionMode 		= (global.config.AcquisitionMode != config.AcquisitionMode);
    changedTriggerMode  		= (global.config.TriggerMode != config.TriggerMode);
    changedTriggerSource		= (global.config.TriggerSource != config.TriggerSource);
    changedSoftwarerate  		= (global.config.softwaretriggerrate != config.softwaretriggerrate);
    changedFrameid      		= (global.config.frame_id != config.frame_id);
    changedFocusPos     		= (global.config.FocusPos != config.FocusPos);
    changedMtu          		= (global.config.mtu != config.mtu);


    // Limit params to legal values.
    config.AcquisitionFrameRate = CLIP(config.AcquisitionFrameRate, global.configMin.AcquisitionFrameRate, 	global.configMax.AcquisitionFrameRate);
    config.ExposureTimeAbs   	= CLIP(config.ExposureTimeAbs,  	global.configMin.ExposureTimeAbs,  		global.configMax.ExposureTimeAbs);
    config.Gain          		= CLIP(config.Gain,         		global.configMin.Gain,         			global.configMax.Gain);
    config.FocusPos       		= CLIP(config.FocusPos,      		global.configMin.FocusPos,      		global.configMax.FocusPos);
    config.frame_id   			= tf::resolve(tf_prefix, config.frame_id);


    // Adjust other controls dependent on what the user changed.
    if (changedExposureTimeAbs || changedGainAuto || ((changedAcquisitionFrameRate || changedGain || changedFrameid
					|| changedAcquisitionMode || changedTriggerSource || changedSoftwarerate) && config.ExposureAuto=="Once"))
    	config.ExposureAuto = "Off";

    if (changedGain || changedExposureAuto || ((changedAcquisitionFrameRate || changedExposureTimeAbs || changedFrameid
					|| changedAcquisitionMode || changedTriggerSource || changedSoftwarerate) && config.GainAuto=="Once"))
    	config.GainAuto = "Off";

    if (changedAcquisitionFrameRate)
    	config.TriggerMode = "Off";


    // Find what changed for any reason.
    changedAcquire    			= (global.config.Acquire != config.Acquire);
    changedAcquisitionFrameRate = (global.config.AcquisitionFrameRate != config.AcquisitionFrameRate);
    changedExposureAuto 		= (global.config.ExposureAuto != config.ExposureAuto);
    changedExposureTimeAbs     	= (global.config.ExposureTimeAbs != config.ExposureTimeAbs);
    changedGainAuto     		= (global.config.GainAuto != config.GainAuto);
    changedGain            		= (global.config.Gain != config.Gain);
    changedAcquisitionMode 		= (global.config.AcquisitionMode != config.AcquisitionMode);
    changedTriggerMode  		= (global.config.TriggerMode != config.TriggerMode);
    changedTriggerSource		= (global.config.TriggerSource != config.TriggerSource);
    changedSoftwarerate  		= (global.config.softwaretriggerrate != config.softwaretriggerrate);
    changedFrameid      		= (global.config.frame_id != config.frame_id);
    changedFocusPos     		= (global.config.FocusPos != config.FocusPos);
    changedMtu          		= (global.config.mtu != config.mtu);

    
    // Set params into the camera.
    if (changedExposureTimeAbs)
    {
    	if (global.isImplementedExposureTimeAbs)
		{
			ROS_INFO ("Set ExposureTimeAbs = %f", config.ExposureTimeAbs);
			arv_device_set_float_feature_value (global.pDevice, "ExposureTimeAbs", config.ExposureTimeAbs);
		}
    	else
    		ROS_INFO ("Camera does not support ExposureTimeAbs.");
    }

    if (changedGain)
    {
    	if (global.isImplementedGain)
		{
			ROS_INFO ("Set gain = %f", config.Gain);
			//arv_device_set_integer_feature_value (global.pDevice, "GainRaw", config.GainRaw);
			arv_camera_set_gain (global.pCamera, config.Gain);
		}
    	else
    		ROS_INFO ("Camera does not support Gain or GainRaw.");
    }

    if (changedExposureAuto)
    {
    	if (global.isImplementedExposureAuto && global.isImplementedExposureTimeAbs)
		{
			ROS_INFO ("Set ExposureAuto = %s", config.ExposureAuto.c_str());
			arv_device_set_string_feature_value (global.pDevice, "ExposureAuto", config.ExposureAuto.c_str());
			if (config.ExposureAuto=="Once")
			{
				ros::Duration(2.0).sleep();
				config.ExposureTimeAbs = arv_device_get_float_feature_value (global.pDevice, "ExposureTimeAbs");
				ROS_INFO ("Get ExposureTimeAbs = %f", config.ExposureTimeAbs);
				config.ExposureAuto = "Off";
			}
		}
    	else
    		ROS_INFO ("Camera does not support ExposureAuto.");
    }
    if (changedGainAuto)
    {
    	if (global.isImplementedGainAuto && global.isImplementedGain)
		{
			ROS_INFO ("Set GainAuto = %s", config.GainAuto.c_str());
			arv_device_set_string_feature_value (global.pDevice, "GainAuto", config.GainAuto.c_str());
			if (config.GainAuto=="Once")
			{
				ros::Duration(2.0).sleep();
				//config.GainRaw = arv_device_get_integer_feature_value (global.pDevice, "GainRaw");
				config.Gain = arv_camera_get_gain (global.pCamera);
				ROS_INFO ("Get Gain = %f", config.Gain);
				config.GainAuto = "Off";
			}
		}
    	else
    		ROS_INFO ("Camera does not support GainAuto.");
    }

    if (changedAcquisitionFrameRate)
    {
    	if (global.isImplementedAcquisitionFrameRate)
		{
			ROS_INFO ("Set %s = %f", global.keyAcquisitionFrameRate, config.AcquisitionFrameRate);
			arv_device_set_float_feature_value (global.pDevice, global.keyAcquisitionFrameRate, config.AcquisitionFrameRate);
		}
    	else
    		ROS_INFO ("Camera does not support AcquisitionFrameRate.");
    }

    if (changedTriggerMode)
    {
    	if (global.isImplementedTriggerMode)
		{
			ROS_INFO ("Set TriggerMode = %s", config.TriggerMode.c_str());
			arv_device_set_string_feature_value (global.pDevice, "TriggerMode", config.TriggerMode.c_str());
		}
    	else
    		ROS_INFO ("Camera does not support TriggerMode.");
    }

    if (changedTriggerSource)
    {
    	if (global.isImplementedTriggerSource)
		{
			ROS_INFO ("Set TriggerSource = %s", config.TriggerSource.c_str());
			arv_device_set_string_feature_value (global.pDevice, "TriggerSource", config.TriggerSource.c_str());
		}
    	else
    		ROS_INFO ("Camera does not support TriggerSource.");
    }

    if ((changedTriggerMode || changedTriggerSource || changedSoftwarerate) && config.TriggerMode=="On" && config.TriggerSource=="Software")
    {
    	if (global.isImplementedAcquisitionFrameRate)
		{
			// The software rate is limited by the camera's internal framerate.  Bump up the camera's internal framerate if necessary.
			config.AcquisitionFrameRate = global.configMax.AcquisitionFrameRate;
			ROS_INFO ("Set %s = %f", global.keyAcquisitionFrameRate, config.AcquisitionFrameRate);
			arv_device_set_float_feature_value (global.pDevice, global.keyAcquisitionFrameRate, config.AcquisitionFrameRate);
		}
    }

    if (changedTriggerSource || changedSoftwarerate)
    {
    	// Recreate the software trigger callback.
		if (global.idSoftwareTriggerTimer)
		{
			g_source_remove(global.idSoftwareTriggerTimer);
			global.idSoftwareTriggerTimer = 0;
		}
    	if (!strcmp(config.TriggerSource.c_str(),"Software"))
    	{
        	ROS_INFO ("Set softwaretriggerrate = %f", 1000.0/ceil(1000.0 / config.softwaretriggerrate));

    		// Turn on software timer callback.
    		global.idSoftwareTriggerTimer = g_timeout_add ((guint)ceil(1000.0 / config.softwaretriggerrate), SoftwareTrigger_callback, global.pCamera);
    	}
    }
    if (changedFocusPos)
    {
    	if (global.isImplementedFocusPos)
		{
			ROS_INFO ("Set FocusPos = %d", config.FocusPos);
			arv_device_set_integer_feature_value(global.pDevice, "FocusPos", config.FocusPos);
			ros::Duration(1.0).sleep();
			config.FocusPos = arv_device_get_integer_feature_value(global.pDevice, "FocusPos");
			ROS_INFO ("Get FocusPos = %d", config.FocusPos);
		}
    	else
    		ROS_INFO ("Camera does not support FocusPos.");
    }
    if (changedMtu)
    {
    	if (global.isImplementedMtu)
		{
			ROS_INFO ("Set mtu = %d", config.mtu);
			arv_device_set_integer_feature_value(global.pDevice, "GevSCPSPacketSize", config.mtu);
			ros::Duration(1.0).sleep();
			config.mtu = arv_device_get_integer_feature_value(global.pDevice, "GevSCPSPacketSize");
			ROS_INFO ("Get mtu = %d", config.mtu);
		}
    	else
    		ROS_INFO ("Camera does not support mtu (i.e. GevSCPSPacketSize).");
    }

    if (changedAcquisitionMode)
    {
    	if (global.isImplementedAcquisitionMode)
		{
			ROS_INFO ("Set AcquisitionMode = %s", config.AcquisitionMode.c_str());
			arv_device_set_string_feature_value (global.pDevice, "AcquisitionMode", config.AcquisitionMode.c_str());

			ROS_INFO("AcquisitionStop");
			arv_device_execute_command (global.pDevice, "AcquisitionStop");
			ROS_INFO("AcquisitionStart");
			arv_device_execute_command (global.pDevice, "AcquisitionStart");
		}
    	else
    		ROS_INFO ("Camera does not support AcquisitionMode.");
    }

    if (changedAcquire)
    {
    	if (config.Acquire)
    	{
    		ROS_INFO("AcquisitionStart");
			arv_device_execute_command (global.pDevice, "AcquisitionStart");
    	}
    	else
    	{
    		ROS_INFO("AcquisitionStop");
			arv_device_execute_command (global.pDevice, "AcquisitionStop");
    	}
    }



    global.config = config;

} // RosReconfigure_callback()
Exemplo n.º 23
0
void printTransform(const char* name, const tf::Transform t) {
    ROS_DEBUG_STREAM(name << ": Translation " << t.getOrigin().x() << " " << t.getOrigin().y() << " " << t.getOrigin().z());
    ROS_DEBUG_STREAM(name << ": Rotation " << t.getRotation().getX() << " " << t.getRotation().getY() << " " << t.getRotation().getZ() << " " << t.getRotation().getW());
}
Exemplo n.º 24
0
/**
 * @brief Initialises the node.
 */
bool KeyOpCore::init()
{
  ros::NodeHandle nh("~");

  name = nh.getUnresolvedNamespace();

  /*********************
   ** Parameters
   **********************/
  nh.getParam("linear_vel_step", linear_vel_step);
  nh.getParam("linear_vel_max", linear_vel_max);
  nh.getParam("angular_vel_step", angular_vel_step);
  nh.getParam("angular_vel_max", angular_vel_max);
  nh.getParam("wait_for_connection", wait_for_connection_);

  ROS_INFO_STREAM("KeyOpCore : using linear  vel step [" << linear_vel_step << "].");
  ROS_INFO_STREAM("KeyOpCore : using linear  vel max  [" << linear_vel_max << "].");
  ROS_INFO_STREAM("KeyOpCore : using angular vel step [" << angular_vel_step << "].");
  ROS_INFO_STREAM("KeyOpCore : using angular vel max  [" << angular_vel_max << "].");

  /*********************
   ** Subscribers
   **********************/
  keyinput_subscriber = nh.subscribe("teleop", 1, &KeyOpCore::remoteKeyInputReceived, this);

  /*********************
   ** Publishers
   **********************/
  velocity_publisher_ = nh.advertise<geometry_msgs::Twist>("cmd_vel", 1);
  motor_power_publisher_ = nh.advertise<kobuki_msgs::MotorPower>("motor_power", 1);

  /*********************
   ** Velocities
   **********************/
  cmd->linear.x = 0.0;
  cmd->linear.y = 0.0;
  cmd->linear.z = 0.0;
  cmd->angular.x = 0.0;
  cmd->angular.y = 0.0;
  cmd->angular.z = 0.0;

  /*********************
   ** Wait for connection
   **********************/
  if (!wait_for_connection_)
  {
    return true;
  }
  ecl::MilliSleep millisleep;
  int count = 0;
  bool connected = false;
  while (!connected)
  {
    if (motor_power_publisher_.getNumSubscribers() > 0)
    {
      connected = true;
      break;
    }
    if (count == 6)
    {
      connected = false;
      break;
    }
    else
    {
      ROS_WARN_STREAM("KeyOp: could not connect, trying again after 500ms...");
      try
      {
        millisleep(500);
      }
      catch (ecl::StandardException e)
      {
        ROS_ERROR_STREAM("Waiting has been interrupted.");
        ROS_DEBUG_STREAM(e.what());
        return false;
      }
      ++count;
    }
  }
  if (!connected)
  {
    ROS_ERROR("KeyOp: could not connect.");
    ROS_ERROR("KeyOp: check remappings for enable/disable topics).");
  }
  else
  {
    kobuki_msgs::MotorPower power_cmd;
    power_cmd.state = kobuki_msgs::MotorPower::ON;
    motor_power_publisher_.publish(power_cmd);
    ROS_INFO("KeyOp: connected.");
    power_status = true;
  }

  // start keyboard input thread
  thread.start(&KeyOpCore::keyboardInputLoop, *this);
  return true;
}
// This example opens the serial port and sends a request 'R' at 1Hz and waits for a reply.
int main(int argc, char** argv)
{
    ros::init(argc, argv, "imu_tf_publisher");
    ros::NodeHandle n;

    cereal::CerealPort device;
    char reply[REPLY_SIZE];
    char* tempData;
    int read_len = 0;
    int data_counter = 0;
    double imu_x, imu_y, imu_z, imu_w = 0.0;
    char* head_ID = "4-2";
    char* right_hand_ID = "6-1";
    char* left_hand_ID = "6-3";

    // Change the next line according to your port name and baud rate
    try{ device.open("/dev/ttyUSB0", 115200); }
    catch(cereal::Exception& e)
    {
        ROS_FATAL("Failed to open the serial port!!!");
        ROS_BREAK();
    }
    ROS_INFO("The serial port is opened.");

    ros::Rate r(500);

    tf::TransformBroadcaster br;
    tf::TransformListener ls;
    std::string transformString1;
    std::string transformString2;
    tf::StampedTransform stamped_transform;

    while(ros::ok())
    {
        // Send 'R' over the serial port
        //device.write("3");

        // Get the reply, the last value is the timeout in ms
        try{ read_len = device.read(reply, REPLY_SIZE, TIMEOUT); }
        catch(cereal::TimeoutException& e)
        {
            ROS_ERROR(e.what());
	    device.open("/dev/ttyUSB0", 115200); 
        }
	if(read_len > 0)
	{
	    tempData = strtok(reply, ",");
	    data_counter = 0;
	    if(tempData = strtok(NULL, ","))
	    {
		data_counter++;
		imu_x = (double)atoi(tempData)/10000.0;
	    }
	    if(tempData = strtok(NULL, ","))
	    {
		data_counter++;
		imu_y = (double)atoi(tempData)/10000.0;
	    }
	    if(tempData = strtok(NULL, ","))
	    {
		data_counter++;
		imu_z = (double)atoi(tempData)/10000.0;
	    }
	    if(tempData = strtok(NULL, ","))
	    {
		data_counter++;
		imu_w = (double)atoi(tempData)/10000.0;
	    }	    

	    if(data_counter == 4 && strncmp(reply, head_ID, 3)==0)
	    {
		  transformString1 = "neck";
		  transformString2 = "head";
		  try
		  {
			ls.waitForTransform(transformString1,transformString2,ros::Time(0),ros::Duration(1.0));
			ls.lookupTransform(transformString1,transformString2,ros::Time(0),stamped_transform);
		  }
		  catch(tf::TransformException const &ex)
		  {
			ROS_DEBUG_STREAM(ex.what());
    			ROS_WARN_STREAM("Couldn't get neck to head transform!");
		  }

		  stamped_transform.setRotation(tf::Quaternion(imu_x,imu_y,imu_z,imu_w));
		  br.sendTransform(tf::StampedTransform(stamped_transform, ros::Time::now(),stamped_transform.frame_id_,"imu_head"));
            	  ROS_INFO("Got this reply for head:   x : %f ,y : %f ,z : %f ,w : %f", imu_x, imu_y, imu_z, imu_w);

	    }
	    else if(data_counter == 4 && strncmp(reply, right_hand_ID, 3)==0)
	    {
              transformString1 = "right_elbow";
              transformString2 = "right_hand";
              try
              {
                    ls.waitForTransform(transformString1,transformString2,ros::Time(0),ros::Duration(1.0));
                    ls.lookupTransform(transformString1,transformString2,ros::Time(0),stamped_transform);
              }
              catch(tf::TransformException const &ex)
              {
                    ROS_DEBUG_STREAM(ex.what());
                    ROS_WARN_STREAM("Couldn't get right elbow to right hand transform!");
              }

              stamped_transform.setRotation(tf::Quaternion(imu_x,imu_y,imu_z,imu_w));
              br.sendTransform(tf::StampedTransform(stamped_transform, ros::Time::now(),stamped_transform.frame_id_,"imu_right_hand"));
              ROS_INFO("Got this reply for right hand: x : %f ,y : %f ,z : %f ,w : %f", imu_x, imu_y, imu_z, imu_w);
	    }
	    else if(data_counter == 4 && strncmp(reply, left_hand_ID, 3)==0)
	    {
              transformString1 = "left_elbow";
              transformString2 = "left_hand";
              try
              {
                    ls.waitForTransform(transformString1,transformString2,ros::Time(0),ros::Duration(1.0));
                    ls.lookupTransform(transformString1,transformString2,ros::Time(0),stamped_transform);
              }
              catch(tf::TransformException const &ex)
              {
                    ROS_DEBUG_STREAM(ex.what());
                    ROS_WARN_STREAM("Couldn't get left elbow to left hand transform!");
              }

              //transform.setOrigin(tf::Vector3(0.0,0.0,0.0));
              stamped_transform.setRotation(tf::Quaternion(imu_x,imu_y,imu_z,imu_w));
              br.sendTransform(tf::StampedTransform(stamped_transform, ros::Time::now(),stamped_transform.frame_id_,"imu_left_hand"));
              ROS_INFO("Got this reply for left hand:  x : %f ,y : %f ,z : %f ,w : %f", imu_x, imu_y, imu_z, imu_w);
	    }	    
	    
            read_len = 0;	    	
	}

        ros::spinOnce();
        r.sleep();
    }   
}
/* ------------------------ JointModelGroup ------------------------ */
planning_models::KinematicModel::JointModelGroup::JointModelGroup(const std::string& group_name,
                                                                  const std::vector<const JointModel*> &group_joints,
                                                                  const std::vector<const JointModel*> &fixed_group_joints,
                                                                  const KinematicModel* parent_model) :
  name_(group_name)
{
  ROS_DEBUG_STREAM("Group " << group_name);

  joint_model_vector_ = group_joints;
  fixed_joint_model_vector_ = fixed_group_joints;
  joint_model_name_vector_.resize(group_joints.size());

  ROS_DEBUG_STREAM("Joints:");    
  for (unsigned int i = 0 ; i < group_joints.size() ; ++i)
  {
    ROS_DEBUG_STREAM(group_joints[i]->getName());
    joint_model_name_vector_[i] = group_joints[i]->getName();
    joint_model_map_[group_joints[i]->getName()] = group_joints[i];
  }

  ROS_DEBUG_STREAM("Fixed joints:");    
  for (unsigned int i = 0 ; i < fixed_joint_model_vector_.size() ; ++i)
  {
    ROS_DEBUG_STREAM(fixed_joint_model_vector_[i]->getName());
  }

  std::vector<const JointModel*> all_joints = group_joints;
  all_joints.insert(all_joints.end(), fixed_group_joints.begin(), fixed_group_joints.end());

  //now we need to find all the set of joints within this group
  //that root distinct subtrees
  std::map<std::string, bool> is_root;
  for (unsigned int i = 0 ; i < all_joints.size() ; ++i)
  {
    bool found = false;
    const JointModel *joint = all_joints[i];
    //if we find that an ancestor is also in the group, then the joint is not a root
    while (joint->getParentLinkModel() != NULL)
    {
      joint = joint->getParentLinkModel()->getParentJointModel();
      if (hasJointModel(joint->name_))
      {
        found = true;
      }
    }
	
    if(!found) {
      joint_roots_.push_back(all_joints[i]);
      is_root[all_joints[i]->getName()] = true;
    } else {
      is_root[all_joints[i]->getName()] = false;
    }
  }

  //now we need to make another pass for group links
  std::set<const LinkModel*> group_links_set;
  for(unsigned int i = 0; i < all_joints.size(); i++) {
    const JointModel *joint = all_joints[i];
    //push children in directly
    group_links_set.insert(joint->getChildLinkModel());
    while (joint->getParentLinkModel() != NULL)
    {
      if(is_root.find(joint->getName()) != is_root.end() &&
         is_root[joint->getName()]) {
        break;
      }
      group_links_set.insert(joint->getParentLinkModel());
      joint = joint->getParentLinkModel()->getParentJointModel();
    }
  }

  ROS_DEBUG("Group links:");
  for(std::set<const LinkModel*>::iterator it = group_links_set.begin();
      it != group_links_set.end();
      it++) {
    group_link_model_vector_.push_back(*it);
    ROS_DEBUG_STREAM((*it)->getName());
  }
  //these subtrees are distinct, so we can stack their updated links on top of each other
  for (unsigned int i = 0 ; i < joint_roots_.size() ; ++i)
  {
    std::vector<const LinkModel*> links;
    parent_model->getChildLinkModels(joint_roots_[i], links);
    updated_link_model_vector_.insert(updated_link_model_vector_.end(), links.begin(), links.end());
  }
}
Exemplo n.º 27
0
  // Processes the point cloud with OpenCV using the PCL cluster indices
  void processClusters( const std::vector<pcl::PointIndices> cluster_indices,
                        //                        const sensor_msgs::PointCloud2ConstPtr& pointcloud_msg,
                        const pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr  cloud_transformed,
                        const pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr& cloud_filtered,
                        const pcl::PointCloud<pcl::PointXYZRGB>&  cloud )
  {

    // -------------------------------------------------------------------------------------------------------
    // Convert image
    ROS_INFO_STREAM_NAMED("perception","Converting image to OpenCV format");

    try
    {
      sensor_msgs::ImagePtr image_msg(new sensor_msgs::Image);
      //      pcl::toROSMsg (*pointcloud_msg, *image_msg);
      pcl::toROSMsg (*cloud_transformed, *image_msg);
      cv_bridge::CvImagePtr input_bridge = cv_bridge::toCvCopy(image_msg, "rgb8");
      full_input_image = input_bridge->image;
    }
    catch (cv_bridge::Exception& ex)
    {
      ROS_ERROR_STREAM_NAMED("perception","[calibrate] Failed to convert image");
      return;
    }

    // -------------------------------------------------------------------------------------------------------
    // Process Image

    // Convert image to gray
    cv::cvtColor( full_input_image, full_input_image_gray, CV_BGR2GRAY );
    //cv::adaptiveThreshold( full_input_image, full_input_image_gray, 255, CV_ADAPTIVE_THRESH_MEAN_C, CV_THRESH_BINARY,5,10);

    // Blur image - reduce noise with a 3x3 kernel
    cv::blur( full_input_image_gray, full_input_image_gray, cv::Size(3,3) );

    ROS_INFO_STREAM_NAMED("perception","Finished coverting");

    // -------------------------------------------------------------------------------------------------------
    // Check OpenCV and PCL image height for errors
    int image_width = cloud.width;
    int image_height = cloud.height;
    ROS_DEBUG_STREAM( "PCL Image height " << image_height << " -- width " << image_width << "\n");
    int image_width_cv = full_input_image.size.p[1];
    int image_height_cv = full_input_image.size.p[0];
    ROS_DEBUG_STREAM( "OpenCV Image height " << image_height_cv << " -- width " << image_width_cv << "\n");

    if( image_width != image_width_cv || image_height != image_height_cv )
    {
      ROS_ERROR_STREAM_NAMED("perception","PCL and OpenCV image heights/widths do not match!");
      return;
    }

    // -------------------------------------------------------------------------------------------------------
    // GUI Stuff

    // First window
    const char* opencv_window = "Source";
    /*
      cv::namedWindow( opencv_window, CV_WINDOW_AUTOSIZE );
      cv::imshow( opencv_window, full_input_image_gray );
    */


    //    while(true)  // use this when we want to tweak the image
    {

      output_image = full_input_image.clone();

      // -------------------------------------------------------------------------------------------------------
      // Start processing clusters
      ROS_INFO_STREAM_NAMED("perception","Finding min/max in x/y axis");

      int top_image_overlay_x = 0; // tracks were to copyTo the mini images

      // for each cluster, see if it is a block
      for(size_t c = 0; c < cluster_indices.size(); ++c)
      {
        ROS_INFO_STREAM_NAMED("perception","\n\n");
        ROS_INFO_STREAM_NAMED("perception","On cluster " << c);

        // find the outer dimensions of the cluster
        double xmin = 0; double xmax = 0;
        double ymin = 0; double ymax = 0;

        // also remember each min & max's correponding other coordinate (not needed for z)
        double xminy = 0; double xmaxy = 0;
        double yminx = 0; double ymaxx = 0;

        // also remember their corresponding indice
        int xmini = 0; int xmaxi = 0;
        int ymini = 0; int ymaxi = 0;

        // loop through and find all min/max of x/y
        for(size_t i = 0; i < cluster_indices[c].indices.size(); i++)
        {
          int j = cluster_indices[c].indices[i];

          // Get RGB from point cloud
          pcl::PointXYZRGB p = cloud_transformed->points[j];

          double x = p.x;
          double y = p.y;

          if(i == 0) // initial values
          {
            xmin = xmax = x;
            ymin = ymax = y;
            xminy = xmaxy = y;
            yminx = ymaxx = x;
            xmini = xmaxi = ymini = ymaxi = j; // record the indice corresponding to the min/max
          }
          else
          {
            if( x < xmin )
            {
              xmin = x;
              xminy = y;
              xmini = j;
            }
            if( x > xmax )
            {
              xmax = x;
              xmaxy = y;
              xmaxi = j;
            }
            if( y < ymin )
            {
              ymin = y;
              yminx = x;
              ymini = j;
            }
            if( y > ymax )
            {
              ymax = y;
              ymaxx = x;
              ymaxi = j;
            }
          }
        }

        ROS_DEBUG_STREAM_NAMED("perception","Cluster size - xmin: " << xmin << " xmax: " << xmax << " ymin: " << ymin << " ymax: " << ymax);
        ROS_DEBUG_STREAM_NAMED("perception","Cluster size - xmini: " << xmini << " xmaxi: " << xmaxi << " ymini: " << ymini << " ymaxi: " << ymaxi);

        // ---------------------------------------------------------------------------------------------
        // Check if these dimensions make sense for the block size specified
        double xside = xmax-xmin;
        double yside = ymax-ymin;

        const double tol = 0.01; // 1 cm error tolerance

        // In order to be part of the block, xside and yside must be between
        // blocksize and blocksize*sqrt(2)
        if(xside > block_size-tol &&
           xside < block_size*sqrt(2)+tol &&
                   yside > block_size-tol &&
          yside < block_size*sqrt(2)+tol )
        {

          // -------------------------------------------------------------------------------------------------------
          // Get the four farthest corners of the block - use OpenCV only on the region identified by PCL

          // Get the pixel coordinates of the xmax and ymax indicies
          int px_xmax = 0; int py_xmax = 0;
          int px_ymax = 0; int py_ymax = 0;
          getXYCoordinates( xmaxi, image_height, image_width, px_xmax, py_xmax);
          getXYCoordinates( ymaxi, image_height, image_width, px_ymax, py_ymax);

          // Get the pixel coordinates of the xmin and ymin indicies
          int px_xmin = 0; int py_xmin = 0;
          int px_ymin = 0; int py_ymin = 0;
          getXYCoordinates( xmini, image_height, image_width, px_xmin, py_xmin);
          getXYCoordinates( ymini, image_height, image_width, px_ymin, py_ymin);

          ROS_DEBUG_STREAM_NAMED("perception","px_xmin " << px_xmin << " px_xmax: " << px_xmax << " py_ymin: " << py_ymin << " py_ymax: " << py_ymax );

          // -------------------------------------------------------------------------------------------------------
          // Change the frame of reference from the robot to the camera

          // Create an array of all the x value options
          const int x_values_a[] = {px_xmax, px_ymax, px_xmin, px_ymin};
          const int y_values_a[] = {py_xmax, py_ymax, py_xmin, py_ymin};
          // Turn it into a vector
          std::vector<int> x_values (x_values_a, x_values_a + sizeof(x_values_a) / sizeof(x_values_a[0]));
          std::vector<int> y_values (y_values_a, y_values_a + sizeof(y_values_a) / sizeof(y_values_a[0]));
          // Find the min
          int x1 = *std::min_element(x_values.begin(), x_values.end());
          int y1 = *std::min_element(y_values.begin(), y_values.end());
          // Find the max
          int x2 = *std::max_element(x_values.begin(), x_values.end());
          int y2 = *std::max_element(y_values.begin(), y_values.end());

          ROS_DEBUG_STREAM_NAMED("perception","x1: " << x1 << " y1: " << y1 << " x2: " << x2 << " y2: " << y2);

          // -------------------------------------------------------------------------------------------------------
          // Expand the ROI by a fudge factor, if possible
          const int FUDGE_FACTOR = 5; // pixels
          if( x1 > FUDGE_FACTOR)
            x1 -= FUDGE_FACTOR;
          if( y1 > FUDGE_FACTOR )
            y1 -= FUDGE_FACTOR;
          if( x2 < image_width - FUDGE_FACTOR )
            x2 += FUDGE_FACTOR;
          if( y2 < image_height - FUDGE_FACTOR )
            y2 += FUDGE_FACTOR;

          ROS_DEBUG_STREAM_NAMED("perception","After Fudge Factor - x1: " << x1 << " y1: " << y1 << " x2: " << x2 << " y2: " << y2);

          // -------------------------------------------------------------------------------------------------------
          // Create ROI parameters
          //        (x1,y1)----------------------
          //       |                            |
          //       |            ROI             |
          //       |                            |
          //       |_____________________(x2,y2)|

          // Create Region of Interest
          int roi_width = x2 - x1;
          int roi_height = y2 - y1;
          cv::Rect region_of_interest = cv::Rect( x1, y1, roi_width, roi_height );
          ROS_DEBUG_STREAM_NAMED("perception","ROI: x " << x1 << " -- y " << y1 << " -- height " << roi_height << " -- width " << roi_width );

          // -------------------------------------------------------------------------------------------------------
          // Find paramters of the block in pixel coordiantes
          int block_center_x = x1 + 0.5*roi_width;
          int block_center_y = y1 + 0.5*roi_height;
          int block_center_z = block_size / 2; // TODO: make this better
          const cv::Point block_center = cv::Point( block_center_x, block_center_y );

          // -------------------------------------------------------------------------------------------------------
          // Create a sub image of just the block
          cv::Point a1 = cv::Point(x1, y1);
          cv::Point a2 = cv::Point(x2, y2);
          cv::rectangle( output_image, a1, a2, cv::Scalar(0, 255, 255), 1, 8);

          // Crop image (doesn't actually copy the data)
          cropped_image = full_input_image_gray(region_of_interest);

          // -------------------------------------------------------------------------------------------------------
          // Detect edges using canny
          ROS_INFO_STREAM_NAMED("perception","Detecting edges using canny");

          // Find edges
          cv::Mat canny_output;
          cv::Canny( cropped_image, canny_output, canny_threshold, canny_threshold*2, 3 );

          // Get mini window stats
          const int mini_width = canny_output.size.p[1];
          const int mini_height = canny_output.size.p[0];
          const cv::Size mini_size = canny_output.size();
          const cv::Point mini_center = cv::Point( mini_width/2, mini_height/2 );

          // Find contours
          vector<vector<cv::Point> > contours;
          vector<cv::Vec4i> hierarchy;
          cv::findContours( canny_output, contours, hierarchy, CV_RETR_TREE, CV_CHAIN_APPROX_SIMPLE, cv::Point(0, 0) );
          ROS_INFO_STREAM_NAMED("perception","Contours");

          // Draw contours
          cv::Mat drawing = cv::Mat::zeros( mini_size, CV_8UC3 );
          ROS_INFO_STREAM_NAMED("perception","Drawing contours");

          // Find the largest contour for getting the angle
          double max_contour_length = 0;
          int max_contour_length_i;
          for( size_t i = 0; i< contours.size(); i++ )
          {
            double contour_length = cv::arcLength( contours[i], false );
            if( contour_length > max_contour_length )
            {
              max_contour_length = contour_length;
              max_contour_length_i = i;
            }
            //ROS_DEBUG_STREAM_NAMED("perception","Contour length = " << contour_length << " of index " << max_contour_length_i);


            cv::Scalar color = cv::Scalar( (30 + i*10) % 255, (30 + i*10) % 255, (30 + i*10) % 255);
            cv::drawContours( drawing, contours, (int)i, color, 1, 8, hierarchy, 0, cv::Point() );
            //drawContours( image, contours, contourIdx, color, thickness, lineType, hierarchy, maxLevel, offset )

          }

          // -------------------------------------------------------------------------------------------------------
          // Copy largest contour to main image
          cv::Scalar color = cv::Scalar( 0, 255, 0 );
          cv::drawContours( output_image, contours, (int)max_contour_length_i, color, 1, 8, hierarchy, 0, a1 );
          //drawContours( image, contours, contourIdx, color, thickness, lineType, hierarchy, maxLevel, offset )

          // -------------------------------------------------------------------------------------------------------
          // Copy largest contour to seperate image
          cv::Mat hough_input = cv::Mat::zeros( mini_size, CV_8UC1 );
          cv::Mat hough_input_color;
          cv::Scalar hough_color = cv::Scalar( 200 );
          cv::drawContours( hough_input, contours, (int)max_contour_length_i, hough_color, 1, 8, hierarchy, 0 );
          cv::cvtColor(hough_input, hough_input_color, CV_GRAY2BGR);

          // -------------------------------------------------------------------------------------------------------
          // Hough Transform
          cv::Mat hough_drawing = cv::Mat::zeros( mini_size, CV_8UC3 );
          std::vector<cv::Vec4i> lines;


          ROS_DEBUG_STREAM_NAMED("perception","hough_rho " << hough_rho << " hough_theta " << hough_theta <<
                                 " theta_converted " << (1/hough_theta)*CV_PI/180 << " hough_threshold " <<
                                 hough_threshold << " hough_minLineLength " << hough_minLineLength <<
                                 " hough_maxLineGap " << hough_maxLineGap );

          cv::HoughLinesP(hough_input, lines, hough_rho, (1/hough_theta)*CV_PI/180, hough_threshold, hough_minLineLength, hough_maxLineGap);

          ROS_WARN_STREAM_NAMED("perception","Found " << lines.size() << " lines");

          std::vector<double> line_angles;

          // Copy detected lines to the drawing image
          for( size_t i = 0; i < lines.size(); i++ )
          {
            cv::Vec4i line = lines[i];
            cv::line( hough_drawing, cv::Point(line[0], line[1]), cv::Point(line[2], line[3]),
                      cv::Scalar(255,255,255), 1, CV_AA);

            // Error check
            if(line[3] - line[1] == 0 && line[2] - line[0] == 0)
            {
              ROS_ERROR_STREAM_NAMED("perception","Line is actually two points at the origin, unable to calculate. TODO: handle better?");
              continue;
            }

            // Find angle
            double line_angle = atan2(line[3] - line[1], line[2] - line[0]); //in radian, degrees: * 180.0 / CV_PI;
            // Reverse angle direction if negative
            if( line_angle < 0 )
            {
              line_angle += CV_PI;
            }
            line_angles.push_back(line_angle);
            ROS_DEBUG_STREAM_NAMED("perception","Hough Line angle: " << line_angle * 180.0 / CV_PI;);
          }

          double block_angle = 0; // the overall result of the block's angle

          // Everything is based on the first angle
          if( line_angles.size() == 0 ) // make sure we have at least 1 angle
          {
            ROS_ERROR_STREAM_NAMED("perception","No lines were found for this cluster, unable to calculate block angle");
          }
          else
          {
            calculateBlockAngle( line_angles, block_angle );
          }

          // -------------------------------------------------------------------------------------------------------
          // Draw chosen angle
          ROS_INFO_STREAM_NAMED("perception","Using block angle " << block_angle*180.0/CV_PI);

          // Draw chosen angle on mini image
          cv::Mat angle_drawing = cv::Mat::zeros( mini_size, CV_8UC3 );
          int line_length = 0.5*double(mini_width); // have the line go 1/4 across the screen
          int new_x = mini_center.x + line_length*cos( block_angle );
          int new_y = mini_center.y + line_length*sin( block_angle );
          ROS_INFO_STREAM("Origin (" << mini_center.x << "," << mini_center.y << ") New (" << new_x << "," << new_y <<
                          ") length " << line_length << " angle " << block_angle <<
                          " mini width " << mini_width << " mini height " << mini_height);
          cv::Point angle_point = cv::Point(new_x, new_y);
          cv::line( angle_drawing, mini_center, angle_point, cv::Scalar(255,255,255), 1, CV_AA);

          // Draw chosen angle on contours image
          cv::line( hough_drawing, mini_center, angle_point, cv::Scalar(255,0, 255), 1, CV_AA);

          // Draw chosen angle on main image
          line_length = 0.75 * double(mini_width); // have the line go 1/2 across the box
          new_x = block_center_x + line_length*cos( block_angle );
          new_y = block_center_y + line_length*sin( block_angle );
          ROS_INFO_STREAM_NAMED("perception",block_center_x << ", " << block_center_y << ", " << new_x << ", " << new_y);
          angle_point = cv::Point(new_x, new_y);
          cv::line( output_image, block_center, angle_point, cv::Scalar(255,0,255), 2, CV_AA);


          // -------------------------------------------------------------------------------------------------------
          // Get world coordinates

          // Find the block's center point
          double world_x1 = xmin+(xside)/2.0;
          double world_y1 = ymin+(yside)/2.0;
          double world_z1 = table_height + block_size / 2;

          // Convert pixel coordiantes back to world coordinates
          double world_x2 = cloud_transformed->at(new_x, new_y).x;
          double world_y2 = cloud_transformed->at(new_x, new_y).y;
          double world_z2 = world_z1; // is this even necessary?

          // Get angle from two world coordinates...
          double world_theta = abs( atan2(world_y2 - world_y1, world_x2 - world_x1) );

          // Attempt to make all angles point in same direction
          makeAnglesUniform( world_theta );

          // -------------------------------------------------------------------------------------------------------
          // GUI Stuff

          // Copy the cluster image to the main image in the top left corner
          if( top_image_overlay_x + mini_width < image_width )
          {
            const int common_height = 42;
            cv::Rect small_roi_row0 = cv::Rect(top_image_overlay_x, common_height*0, mini_width, mini_height);
            cv::Rect small_roi_row1 = cv::Rect(top_image_overlay_x, common_height*1, mini_width, mini_height);
            cv::Rect small_roi_row2 = cv::Rect(top_image_overlay_x, common_height*2, mini_width, mini_height);
            cv::Rect small_roi_row3 = cv::Rect(top_image_overlay_x, common_height*3, mini_width, mini_height);

            drawing.copyTo(              output_image(small_roi_row0) );
            hough_input_color.copyTo(    output_image(small_roi_row1) );
            hough_drawing.copyTo(        output_image(small_roi_row2) );
            angle_drawing.copyTo(        output_image(small_roi_row3) );

            top_image_overlay_x += mini_width;
          }

          // figure out the position and the orientation of the block
          //double angle = atan(block_size/((xside+yside)/2));
          //double angle = atan( (xmaxy - xminy) / (xmax - xmin ) );
          // Then add it to our set
          //addBlock( xmin+(xside)/2.0, ymin+(yside)/2.0, zmax - block_size/2.0, angle);
          //ROS_INFO_STREAM_NAMED("perception","FOUND -> xside: " << xside << " yside: " << yside << " angle: " << block_angle);


          addBlock( world_x1, world_y1, world_z1, world_theta );
          //addBlock( block_center_x, block_center_y, block_center_z, block_angle);
        }
        else
        {
void DeterministicGameState::predictGhostMove(int ghost_index)
{
    ROS_DEBUG_STREAM("Predict ghost " << ghost_index);

    // TODO: add things here
}
Exemplo n.º 29
0
int getStaticTransform(string calib_filename, geometry_msgs::TransformStamped *ros_msgTf, std_msgs::Header *header, string camera_name)
{
    ifstream calib_file(calib_filename.c_str());
    if (!calib_file.is_open())
        return false;

    ROS_DEBUG_STREAM("Reading transformation" << (camera_name.empty()? string(""): string(" for camera ")+camera_name) << " from " << calib_filename);

    typedef boost::tokenizer<boost::char_separator<char> > tokenizer;
    boost::char_separator<char> sep{" "};

    string line="";
    char index=0;
    tokenizer::iterator token_iterator;

    tf2::Transform t;

    while (getline(calib_file,line))
    {
        // Parse string phase 1, tokenize it using Boost.
        tokenizer tok(line,sep);

        // Move the iterator at the beginning of the tokenize vector and check for T/R matrices.
        token_iterator=tok.begin();
        if (strcmp((*token_iterator).c_str(),((string)(string("T")+(camera_name.empty()?string(""):string("_")+camera_name)+string(":"))).c_str())==0) //Calibration Matrix
        {
            index=0; //should be 3 at the end
            ROS_DEBUG_STREAM("T" << (camera_name.empty()?string(""):string("_")+camera_name));
            double T[3];
            for (token_iterator++; token_iterator != tok.end(); token_iterator++)
            {
                // std::cout << *token_iterator << '\n';
                T[index++]=boost::lexical_cast<double>(*token_iterator);
            }
            t.setOrigin(tf2::Vector3(T[0],T[1],T[2]));
        }

        // EXPERIMENTAL: use with unrectified images

        //        token_iterator=tok.begin();
        //        if (strcmp((*token_iterator).c_str(),((string)(string("D_")+camera_name+string(":"))).c_str())==0) //Distortion Coefficients
        //        {
        //            index=0; //should be 5 at the end
        //            ROS_DEBUG_STREAM("D_" << camera_name);
        //            for (token_iterator++; token_iterator != tok.end(); token_iterator++)
        //            {
        ////                std::cout << *token_iterator << '\n';
        //                D[index++]=boost::lexical_cast<double>(*token_iterator);
        //            }
        //        }

        token_iterator=tok.begin();
        if (strcmp((*token_iterator).c_str(),((string)(string("R")+(camera_name.empty()?string(""):string("_")+camera_name)+string(":"))).c_str())==0) //Rectification Matrix
        {
            index=0; //should be 9 at the end
            ROS_DEBUG_STREAM("R" << (camera_name.empty()?string(""):string("_")+camera_name));
            double R[9];
            for (token_iterator++; token_iterator != tok.end(); token_iterator++)
            {
                // std::cout << *token_iterator << '\n';
                R[index++]=boost::lexical_cast<double>(*token_iterator);
            }
            tf2::Matrix3x3 mat(R[0],R[1],R[2],
                              R[3],R[4],R[5],
                              R[6],R[7],R[8]);
            tf2::Quaternion quat;  mat.getRotation(quat);
            t.setRotation(quat);
        }

        // token_iterator=tok.begin();
        // if (strcmp((*token_iterator).c_str(),((string)(string("P_rect_")+camera_name+string(":"))).c_str())==0) //Projection Matrix Rectified
        // {
        //     index=0; //should be 12 at the end
        //     ROS_DEBUG_STREAM("P_rect_" << camera_name);
        //     for (token_iterator++; token_iterator != tok.end(); token_iterator++)
        //     {
        //         //std::cout << *token_iterator << '\n';
        //         P[index++]=boost::lexical_cast<double>(*token_iterator);
        //     }
        // }

    }

    // fill up tf message
    ros_msgTf->header.frame_id = header->frame_id;
    ros_msgTf->header.stamp = header->stamp;

    // Invert transform to match the tf tree structure: oxts -> velodyne -> cam00 -> cam{01,02,03}
    tf2::convert(t.inverse(),ros_msgTf->transform);

    ROS_DEBUG_STREAM("... ok");
    return true;
}
Exemplo n.º 30
0
    virtual bool adaptAndPlan(const PlannerFn &planner,
                              const planning_scene::PlanningSceneConstPtr& planning_scene,
                              const planning_interface::MotionPlanRequest &req,
                              planning_interface::MotionPlanResponse &res,
                              std::vector<std::size_t> &added_path_index) const
    {
        ROS_DEBUG("Running '%s'", getDescription().c_str());

        // get the specified start state
        robot_state::RobotState start_state = planning_scene->getCurrentState();
        robot_state::robotStateMsgToRobotState(planning_scene->getTransforms(), req.start_state, start_state);

        collision_detection::CollisionRequest creq;
        creq.group_name = req.group_name;
        collision_detection::CollisionResult cres;
        planning_scene->checkCollision(creq, cres, start_state);
        if (cres.collision)
        {
            // Rerun in verbose mode
            collision_detection::CollisionRequest vcreq = creq;
            collision_detection::CollisionResult vcres;
            vcreq.verbose = true;
            planning_scene->checkCollision(vcreq, vcres, start_state);

            if (creq.group_name.empty())
                ROS_INFO("Start state appears to be in collision");
            else
                ROS_INFO_STREAM("Start state appears to be in collision with respect to group " << creq.group_name);

            robot_state::RobotStatePtr prefix_state(new robot_state::RobotState(start_state));
            random_numbers::RandomNumberGenerator &rng = prefix_state->getRandomNumberGenerator();

            const std::vector<const robot_model::JointModel*> &jmodels =
                planning_scene->getRobotModel()->hasJointModelGroup(req.group_name) ?
                planning_scene->getRobotModel()->getJointModelGroup(req.group_name)->getJointModels() :
                planning_scene->getRobotModel()->getJointModels();

            bool found = false;
            for (int c = 0 ; !found && c < sampling_attempts_ ; ++c)
            {
                for (std::size_t i = 0 ; !found && i < jmodels.size() ; ++i)
                {
                    std::vector<double> sampled_variable_values(jmodels[i]->getVariableCount());
                    const double *original_values = prefix_state->getJointPositions(jmodels[i]);
                    jmodels[i]->getVariableRandomPositionsNearBy(rng, &sampled_variable_values[0], original_values, jmodels[i]->getMaximumExtent() * jiggle_fraction_);
                    start_state.setJointPositions(jmodels[i], sampled_variable_values);
                    collision_detection::CollisionResult cres;
                    planning_scene->checkCollision(creq, cres, start_state);
                    if (!cres.collision)
                    {
                        found = true;
                        ROS_INFO("Found a valid state near the start state at distance %lf after %d attempts", prefix_state->distance(start_state), c);
                    }
                }
            }

            if (found)
            {
                planning_interface::MotionPlanRequest req2 = req;
                robot_state::robotStateToRobotStateMsg(start_state, req2.start_state);
                bool solved = planner(planning_scene, req2, res);
                if (solved && !res.trajectory_->empty())
                {
                    // heuristically decide a duration offset for the trajectory (induced by the additional point added as a prefix to the computed trajectory)
                    res.trajectory_->setWayPointDurationFromPrevious(0, std::min(max_dt_offset_, res.trajectory_->getAverageSegmentDuration()));
                    res.trajectory_->addPrefixWayPoint(prefix_state, 0.0);
                    // we add a prefix point, so we need to bump any previously added index positions
                    for (std::size_t i = 0 ; i < added_path_index.size() ; ++i)
                        added_path_index[i]++;
                    added_path_index.push_back(0);
                }
                return solved;
            }
            else
            {
                ROS_WARN("Unable to find a valid state nearby the start state (using jiggle fraction of %lf and %u sampling attempts). Passing the original planning request to the planner.",
                         jiggle_fraction_, sampling_attempts_);
                return planner(planning_scene, req, res);
            }
        }
        else
        {
            if (creq.group_name.empty())
                ROS_DEBUG("Start state is valid");
            else
                ROS_DEBUG_STREAM("Start state is valid with respect to group " << creq.group_name);
            return planner(planning_scene, req, res);
        }
    }