int main(int argc, char** argv) {
    ros::init(argc, argv, "merry_motionplanner_test"); // name this node 
    ros::NodeHandle nh; //standard ros node handle

    MerryMotionplanner merry_motionplanner(&nh);
    CwruPclUtils cwru_pcl_utils(&nh);


    while (!merry_motionplanner.got_kinect_cloud()) {
        ROS_INFO("did not receive pointcloud");
        ros::spinOnce();
        ros::Duration(1.0).sleep();
    }
    ROS_INFO("got a pointcloud");
    tf::StampedTransform tf_sensor_frame_to_torso_frame; //use this to transform sensor frame to torso frame
    tf::TransformListener tf_listener; //start a transform listener

    //let's warm up the tf_listener, to make sure it get's all the transforms it needs to avoid crashing:
    bool tferr = true;
    ROS_INFO("waiting for tf between kinect_pc_frame and torso...");
    while (tferr) {
        tferr = false;
        try {

            //The direction of the transform returned will be from the target_frame to the source_frame. 
            //Which if applied to data, will transform data in the source_frame into the target_frame. See tf/CoordinateFrameConventions#Transform_Direction
            tf_listener.lookupTransform("torso", "kinect_pc_frame", ros::Time(0), tf_sensor_frame_to_torso_frame);
        } catch (tf::TransformException &exception) {
            ROS_ERROR("%s", exception.what());
            tferr = true;
            ros::Duration(0.5).sleep(); // sleep for half a second
            ros::spinOnce();
        }
    }
    ROS_INFO("tf is good"); //tf-listener found a complete chain from sensor to world; ready to roll
    //convert the tf to an Eigen::Affine:
    Eigen::Affine3f A_sensor_wrt_torso;
    Eigen::Affine3d Affine_des_gripper;
    Eigen::Vector3d origin_des;
    geometry_msgs::PoseStamped rt_tool_pose;
    
    A_sensor_wrt_torso = merry_motionplanner.transformTFToEigen(tf_sensor_frame_to_torso_frame);
    Eigen::Vector3f plane_normal, major_axis, centroid;
    Eigen::Matrix3d Rmat;
    int rtn_val;    
    double plane_dist;
    
    //send a command to plan a joint-space move to pre-defined pose:
    rtn_val = merry_motionplanner.plan_move_to_pre_pose();
    
    //send command to execute planned motion
    rtn_val = merry_motionplanner.rt_arm_execute_planned_path();
    
    while (ros::ok()) {
        if (cwru_pcl_utils.got_selected_points()) {
            ROS_INFO("transforming selected points");
            cwru_pcl_utils.transform_selected_points_cloud(A_sensor_wrt_torso);

            //fit a plane to these selected points:
            cwru_pcl_utils.fit_xformed_selected_pts_to_plane(plane_normal, plane_dist);
            ROS_INFO_STREAM(" normal: " << plane_normal.transpose() << "; dist = " << plane_dist);
            major_axis= cwru_pcl_utils.get_major_axis();
            centroid = cwru_pcl_utils.get_centroid();
            cwru_pcl_utils.reset_got_selected_points();   // reset the selected-points trigger

            //input: centroid, plane_normal, major_axis
            //output: Rmat, origin_des
            origin_des = merry_motionplanner.compute_origin_des(centroid);
            Rmat = merry_motionplanner.compute_orientation(plane_normal, major_axis);

            //construct a goal affine pose:
            Affine_des_gripper = merry_motionplanner.construct_affine_pose(Rmat, origin_des);

            //convert des pose from Eigen::Affine to geometry_msgs::PoseStamped
            rt_tool_pose.pose = merry_motionplanner.transformEigenAffine3dToPose(Affine_des_gripper);
            //could fill out the header as well...

            // send move plan request:
            rtn_val = merry_motionplanner.rt_arm_plan_path_current_to_goal_pose(rt_tool_pose);
            if (rtn_val == cwru_action::cwru_baxter_cart_moveResult::SUCCESS)  { 
                //send command to execute planned motion
                rtn_val = merry_motionplanner.rt_arm_execute_planned_path();
            }
            else {
                ROS_WARN("Cartesian path to desired pose not achievable");
            }
        }
        ros::Duration(0.5).sleep(); // sleep for half a second
        ros::spinOnce();
    }

    return 0;
}
int main(int argc, char** argv) {

    // Initial ROS
    ros::init(argc,argv,"lrf_server_node");

    // Create the ROS node
    ros::NodeHandle node("~");

    if(!node.hasParam("laser_topic"))
        ROS_WARN("Usage : rosrun robot_controller robot_controller_node _laser_topic:=<laser_topic>");
    node.param<std::string>("laser_topic",laser_topic,"/scan");

    if(!node.hasParam("socket_port"))
        ROS_WARN("Usage : rosrun robot_controller robot_controller_node _socket_port:=<socket_port>");
    node.param<int>("socket_port",socket_port,25650);

    // Subscribe the Topic of sensor_msgs/IMU
    ros::Subscriber sub_lrf = node.subscribe(laser_topic,100,callback_laser);

    socklen_t clilen;
    char buffer[256];
    struct sockaddr_in serv_addr, cli_addr;
    int n;

    if (argc < 2) errormsg_print(SOCKET_ERROR_NOPROVIDE_PORT);
    sockfd = socket(AF_INET, SOCK_STREAM, 0);

    // Check is there socket create error
    if(sockfd < 0) {
        fprintf(stdout, "ERROR opening socket\n");
    } else {
        fprintf(stdout, "SUCCESSFUL opening socket:%d\n",sockfd);
    }

    bzero((char*) &serv_addr, sizeof(serv_addr));


    serv_addr.sin_family = AF_INET;
    serv_addr.sin_addr.s_addr = INADDR_ANY;
    serv_addr.sin_port = htons(socket_port);

    // SET SOCKET REUSE Address
    int sock_opt = 1;
    if (setsockopt(sockfd,SOL_SOCKET,SO_REUSEADDR,(void*)&sock_opt, sizeof(sock_opt)) == -1) {
        ROS_ERROR("ERROR ON SETUP SOCKET CONFIG\n");
        return -1;
    } else {
        ROS_INFO("SUCCESSFUL ON SETUP SOCKET CONFIG\n");
    }

    ROS_INFO("Binding socket at port:%d\n",socket_port);
    if(bind(sockfd, (struct sockaddr*) &serv_addr, sizeof(serv_addr)) < 0)
        errormsg_print(SOCKET_ERROR_EVENT_ONBINDING);
    else
        ROS_INFO("SUCCESSFUL on binding");


    listen(sockfd, 5);
    clilen = sizeof(cli_addr);

    RECONNECT:

    newsockfd = accept(sockfd,(struct sockaddr*)&cli_addr,&clilen);

    if(newsockfd < 0) errormsg_print(SOCKET_ERROR_EVENT_ONACCEPT);


    bzero(buffer,256);

    n = read(newsockfd, buffer,255);


    if( n < 0) errormsg_print(SOCKET_ERROR_EVENT_ONREAD);
    printf("Here is the message: %s \n", buffer);




    // Into the Message loop, while there is a topic be subscribed exist then callback.
    ros::Rate r(10);
    while(ros::ok()) {
        if(is_rst_connect) {
            is_rst_connect = false;
            goto RECONNECT;
        }
        r.sleep();
        ros::spinOnce();
    }

    close(newsockfd);
    close(sockfd);
}
void
vtree_color_user::compute_features( const std::string& cloud_filename,
                                    FeatureVector &feature_vector )
{
    typedef pcl::PointXYZRGB nx_PointT;
    typedef pcl::Normal nx_Normal;
    typedef pcl::PointCloud<nx_PointT> nx_PointCloud;
    typedef pcl::PointCloud<nx_Normal> nx_PointCloud_normal;
    typedef pcl::PointCloud<int> nx_PointCloud_int;

    typedef pcl::UniformSampling<nx_PointT> nx_Sampler;
    typedef pcl::search::KdTree<nx_PointT> nx_SearchMethod;
    typedef pcl::NormalEstimation<nx_PointT, nx_Normal> nx_NormalEst;

#if FEATURE == 1
    typedef pcl::PFHRGBSignature250 nx_FeatureType;
    typedef pcl::PFHRGBEstimation<nx_PointT, nx_Normal, nx_FeatureType> nx_FeatureEst;
#elif FEATURE == 2
    typedef pcl::PPFRGBSignature nx_FeatureType;
    typedef pcl::PPFRGBEstimation<nx_PointT, nx_Normal, nx_FeatureType> nx_FeatureEst;
#else
#error A valid feature definition is required!
#endif
    typedef pcl::PointCloud<nx_FeatureType> nx_PointCloud_feature;

    // load the file
    nx_PointCloud::Ptr cld_ptr(new nx_PointCloud);
    pcl::io::loadPCDFile<nx_PointT> ( cloud_filename, *cld_ptr);

    ROS_INFO("[vtree_color_user] Starting keypoint extraction...");
    clock_t tic = clock();
    nx_PointCloud::Ptr keypoints( new nx_PointCloud);
    nx_PointCloud_int::Ptr keypoint_idx(new nx_PointCloud_int);
    nx_Sampler uniform_sampling;
    uniform_sampling.setInputCloud ( cld_ptr );
    uniform_sampling.setRadiusSearch ( keypoint_radius_ );
    uniform_sampling.compute( *keypoint_idx );

    pcl::copyPointCloud ( *cld_ptr, keypoint_idx->points, *keypoints);

    ROS_INFO("[vtree_color_user] No of Keypoints found %d", static_cast<int>(keypoint_idx->size()) );
    ROS_INFO("[vtree_color_user] Keypoint extraction took %f msec.", static_cast<double>((clock()-tic)*1000)/CLOCKS_PER_SEC );

    if( keypoints->empty() )
    {
        ROS_WARN("[vtree_color_user] No keypoints were found...");
        return;
    }

    // Compute normals for the input cloud
    ROS_INFO("[vtree_color_user] Starting normal extraction...");
    tic = clock();
    nx_PointCloud_normal::Ptr normals (new nx_PointCloud_normal);
    nx_SearchMethod::Ptr search_method_xyz (new nx_SearchMethod);
    nx_NormalEst norm_est;
    norm_est.setInputCloud ( cld_ptr );
    norm_est.setSearchMethod ( search_method_xyz );
    norm_est.setRadiusSearch ( normal_radius_ );
    norm_est.compute ( *normals );
    ROS_INFO("[vtree_color_user] Normal extraction took %f msec.", static_cast<double>((clock()-tic)*1000)/CLOCKS_PER_SEC );

    // Get features at the computed keypoints
    ROS_INFO("[vtree_color_user] Starting feature computation...");
    tic = clock();
    nx_PointCloud_feature::Ptr features(new nx_PointCloud_feature);
    nx_FeatureEst feat_est;
    feat_est.setInputCloud ( keypoints );
    feat_est.setSearchSurface ( cld_ptr );
    feat_est.setInputNormals ( normals );

    search_method_xyz.reset(new nx_SearchMethod);
    feat_est.setSearchMethod ( search_method_xyz );
    feat_est.setRadiusSearch ( feature_radius_ );
    feat_est.compute ( *features );
    ROS_INFO("[vtree_color_user] No of Features found %d", static_cast<int>(features->size()) );
    ROS_INFO("[vtree_color_user] Feature computation took %f msec.", static_cast<double>((clock()-tic)*1000)/CLOCKS_PER_SEC );

    // Rectify the historgram values to ensure they are in [0,100] and create a document
    for( nx_PointCloud_feature::iterator iter = features->begin();
         iter != features->end(); ++iter)
    {
        rectify_histogram( iter->histogram );
        feature_vector.push_back( FeatureHist( iter->histogram ) );
    }
}
    bool ITRCartesianImpedanceController::init(hardware_interface::PositionCartesianInterface *robot, ros::NodeHandle &n)
    {
        ROS_INFO("THIS CONTROLLER USES THE TCP INFORMATION SET ON THE FRI SIDE... SO BE SURE YOU KNOW WHAT YOU ARE DOING!");

        if (!ros::param::search(n.getNamespace(),"robot_name", robot_namespace_))
        {
            ROS_WARN_STREAM("ITRCartesianImpedanceController: No robot name found on parameter server ("<<n.getNamespace()<<"/robot_name), using the namespace...");
            robot_namespace_ = n.getNamespace();
            //return false;
        }
        if (!n.getParam("robot_name", robot_namespace_))
        {
            ROS_WARN_STREAM("ITRCartesianImpedanceController: Could not read robot name from parameter server ("<<n.getNamespace()<<"/robot_name), using the namespace...");
            robot_namespace_ = n.getNamespace();
            //return false;
        }

        // stuff to read KDL chain in order to get the transform between base_link and robot
        if( !(KinematicChainControllerBase<hardware_interface::PositionCartesianInterface >::init(robot, n)) )
        {
            ROS_ERROR("Couldn't execute init of the KinematikChainController to get the KDL chain.");
            return false;
        }
        KDL::Chain mount_chain;
        //cout << "reading segment 0 name:" << endl;
        kdl_tree_.getChain(kdl_tree_.getRootSegment()->first, this->robot_namespace_ + "_base_link", mount_chain);
        //cout << "KDL segment 0 name: " << mount_chain.getSegment(0).getName() << endl;
        base_link2robot_ = mount_chain.getSegment(0).getFrameToTip();
        //cout << "base_link2robot transform: " << endl << base_link2robot_ << endl;

        joint_names_.push_back( robot_namespace_ + std::string("_0_joint") );
        joint_names_.push_back( robot_namespace_ + std::string("_1_joint") );
        joint_names_.push_back( robot_namespace_ + std::string("_2_joint") );
        joint_names_.push_back( robot_namespace_ + std::string("_3_joint") );
        joint_names_.push_back( robot_namespace_ + std::string("_4_joint") );
        joint_names_.push_back( robot_namespace_ + std::string("_5_joint") );
        joint_names_.push_back( robot_namespace_ + std::string("_6_joint") );
        cart_12_names_.push_back( robot_namespace_ + std::string("_rot_xx") );
        cart_12_names_.push_back( robot_namespace_ + std::string("_rot_yx") );
        cart_12_names_.push_back( robot_namespace_ + std::string("_rot_zx") );
        cart_12_names_.push_back( robot_namespace_ + std::string("_pos_x") );
        cart_12_names_.push_back( robot_namespace_ + std::string("_rot_xy") );
        cart_12_names_.push_back( robot_namespace_ + std::string("_rot_yy") );
        cart_12_names_.push_back( robot_namespace_ + std::string("_rot_zy") );
        cart_12_names_.push_back( robot_namespace_ + std::string("_pos_y") );
        cart_12_names_.push_back( robot_namespace_ + std::string("_rot_xz") );
        cart_12_names_.push_back( robot_namespace_ + std::string("_rot_yz") );
        cart_12_names_.push_back( robot_namespace_ + std::string("_rot_zz") );
        cart_12_names_.push_back( robot_namespace_ + std::string("_pos_z") );
        cart_6_names_.push_back( robot_namespace_ + std::string("_X") );
        cart_6_names_.push_back( robot_namespace_ + std::string("_Y") );
        cart_6_names_.push_back( robot_namespace_ + std::string("_Z") );
        cart_6_names_.push_back( robot_namespace_ + std::string("_A") );
        cart_6_names_.push_back( robot_namespace_ + std::string("_B") );
        cart_6_names_.push_back( robot_namespace_ + std::string("_C") );

        // now get all handles, 12 for cart pos, 6 for stiff, 6 for damp, 6 for wrench, 6 for joints
        for(int c = 0; c < 30; ++c)
        {
            if(c < 12)
                cart_handles_.push_back(robot->getHandle(cart_12_names_.at(c)));
            if(c > 11 && c < 18)
                cart_handles_.push_back(robot->getHandle(cart_6_names_.at(c-12) + std::string("_stiffness")));
            if(c > 17 && c < 24)
                cart_handles_.push_back(robot->getHandle(cart_6_names_.at(c-18) + std::string("_damping")));
            if(c > 23 && c < 30)
                cart_handles_.push_back(robot->getHandle(cart_6_names_.at(c-24) + std::string("_wrench")));
        }
        for(int j = 0; j < 6; ++j)
        {
            joint_handles_.push_back(robot->getHandle(joint_names_.at(j)));
        }

        sub_pose_ = n.subscribe(n.resolveName("pose"), 1, &ITRCartesianImpedanceController::pose, this);
        sub_pose_world_ = n.subscribe(n.resolveName("pose_base_link"), 1, &ITRCartesianImpedanceController::pose_base_link, this);

        sub_gains_ = n.subscribe(n.resolveName("gains"), 1, &ITRCartesianImpedanceController::gains, this);
        sub_addFT_ = n.subscribe(n.resolveName("wrench"), 1, &ITRCartesianImpedanceController::additionalFT, this);

        srv_command_ = n.advertiseService("set_command", &ITRCartesianImpedanceController::command_cb, this);
        //sub_ft_measures_ = n.subscribe(n.resolveName("ft_measures"), 1, &ITRCartesianImpedanceController::updateFT, this);
        //pub_goal_ = n.advertise<geometry_msgs::PoseStamped>(n.resolveName("goal"),0);

        pub_msr_pos_ = n.advertise<geometry_msgs::PoseStamped>(n.resolveName("measured_cartesian_pose"),0);

        // Initial position
        KDL::Rotation cur_R(cart_handles_.at(0).getPosition(),
                            cart_handles_.at(1).getPosition(),
                            cart_handles_.at(2).getPosition(),
                            cart_handles_.at(4).getPosition(),
                            cart_handles_.at(5).getPosition(),
                            cart_handles_.at(6).getPosition(),
                            cart_handles_.at(8).getPosition(),
                            cart_handles_.at(9).getPosition(),
                            cart_handles_.at(10).getPosition());
        KDL::Vector cur_p(cart_handles_.at(3).getPosition(),
                            cart_handles_.at(7).getPosition(),
                            cart_handles_.at(11).getPosition());
        KDL::Frame cur_T( cur_R, cur_p );
        x_ref_ = cur_T;
        x_des_ = cur_T;
        x_set_ = cur_T;

        // Initial Cartesian stiffness
        KDL::Stiffness k( 800.0, 800.0, 800.0, 50.0, 50.0, 50.0 );
        k_des_ = k;

        // Initial force/torque measure
        KDL::Wrench w(KDL::Vector(0.0, 0.0, 0.0), KDL::Vector(0.0, 0.0, 0.0));
        f_des_ = w;

        // Initial Cartesian damping
        KDL::Stiffness d(0.7, 0.7, 0.7, 0.7, 0.7, 0.7);
        d_des_ = d;

        for (int i = 0; i < 3; ++i){
            if ( !nh_.getParam("position_stiffness_gains", k_des_[i] ) ){
                ROS_WARN("Position stiffness gain not set in yaml file, Using %f", k_des_[i]);
            }
        }
        for (int i = 3; i < 6; ++i){
            if ( !nh_.getParam("orientation_stiffness_gains", k_des_[i] ) ){
                ROS_WARN("Orientation stiffness gain not set in yaml file, Using %f", k_des_[i]);
            }
        }
        for (int i = 0; i < 6; ++i){
            if ( !nh_.getParam("damping_gains", d_des_[i]) ){
                ROS_WARN("Damping gain not set in yaml file, Using %f", d_des_[i]);
            }
        }

        max_trans_speed_ = 0.1; // m/s
        max_rot_speed_ = 0.5;   // rad/s

        // get params for speed limit
        if ( !nh_.getParam("max_trans_speed", max_trans_speed_) ){
            ROS_WARN("Max translation speed not set in yaml file, Using %f", max_trans_speed_);
        }
        if ( !nh_.getParam("max_rot_speed", max_rot_speed_) ){
            ROS_WARN("Max rotation speed not set in yaml file, Using %f", max_rot_speed_);
        }

        std::vector<double> cur_T_FRI;

        fromKDLtoFRI(x_des_, cur_T_FRI);

        // set initial commands already here:
        for(int c = 0; c < 30; ++c)
        {
            if(c < 12)
                cart_handles_.at(c).setCommand(cur_T_FRI.at(c));
            if(c > 11 && c < 18)
                cart_handles_.at(c).setCommand(k_des_[c-12]);
            if(c > 17 && c < 24)
                cart_handles_.at(c).setCommand(d_des_[c-18]);
            if(c > 23 && c < 30)
                cart_handles_.at(c).setCommand(f_des_[c-24]);
        }

        cmd_flag_ = false;

        return true;
    }
Exemplo n.º 5
0
    bool SiftNode::detect(posedetection_msgs::Feature0D& features, const sensor_msgs::Image& imagemsg,
                          const sensor_msgs::Image::ConstPtr& mask_ptr)
    {
        boost::mutex::scoped_lock lock(_mutex);
        Image imagesift = NULL;
        cv::Rect region;
        try {
            cv::Mat image;
            cv_bridge::CvImagePtr framefloat;

            if (!(framefloat = cv_bridge::toCvCopy(imagemsg, "mono8")) )
                return false;
            
            if(imagesift != NULL && (imagesift->cols!=imagemsg.width || imagesift->rows!=imagemsg.height)) {
                ROS_INFO("clear sift resources");
                DestroyAllImages();
                imagesift = NULL;
            }
            
            image = framefloat->image;

            if (mask_ptr) {
                cv::Mat mask = cv_bridge::toCvShare(mask_ptr, mask_ptr->encoding)->image;
                region = jsk_perception::boundingRectOfMaskImage(mask);
                image = image(region);
            }
            else {
                region = cv::Rect(0, 0, imagemsg.width, imagemsg.height);
            }
            
            if(imagesift == NULL)
                imagesift = CreateImage(imagemsg.height,imagemsg.width);

            for(int i = 0; i < imagemsg.height; ++i) {
                uint8_t* psrc = (uint8_t*)image.data+image.step*i;
                float* pdst = imagesift->pixels+i*imagesift->stride;
                for(int j = 0; j < imagemsg.width; ++j)
                    pdst[j] = (float)psrc[j]*(1.0f/255.0f);
                //memcpy(imagesift->pixels+i*imagesift->stride,framefloat->imageData+framefloat->widthStep*i,imagemsg.width*sizeof(float));
            }
        }
        catch (cv_bridge::Exception error) {
            ROS_WARN("bad frame");
            return false;
        }

        // compute SIFT
        ros::Time siftbasetime = ros::Time::now();
        Keypoint keypts = GetKeypoints(imagesift);
        // write the keys to the output
        int numkeys = 0;
        Keypoint key = keypts;
        while(key) {
            numkeys++;
            key = key->next;
        }

        // publish
        features.header = imagemsg.header;
        features.positions.resize(numkeys*2);
        features.scales.resize(numkeys);
        features.orientations.resize(numkeys);
        features.confidences.resize(numkeys);
        features.descriptors.resize(numkeys*128);
        features.descriptor_dim = 128;
        features.type = "libsiftfast";

        int index = 0;
        key = keypts;
        while(key) {

            for(int j = 0; j < 128; ++j)
                features.descriptors[128*index+j] = key->descrip[j];

            features.positions[2*index+0] = key->col + region.x;
            features.positions[2*index+1] = key->row + region.y;
            features.scales[index] = key->scale;
            features.orientations[index] = key->ori;
            features.confidences[index] = 1.0; // SIFT has no confidence?

            key = key->next;
            ++index;
        }

        FreeKeypoints(keypts);
        DestroyAllImages();

        ROS_INFO("imagesift: image: %d(size=%lu), num: %d, sift time: %.3fs, total: %.3fs", imagemsg.header.seq,
                 imagemsg.data.size(),  numkeys,
                 (float)(ros::Time::now()-siftbasetime).toSec(), (float)(ros::Time::now()-lasttime).toSec());
        lasttime = ros::Time::now();
        return true;
    }
Exemplo n.º 6
0
int main(int argc, char** argv) 
{
    char   		*pszGuid = NULL;
    char    	 szGuid[512];
    int			 nInterfaces = 0;
    int			 nDevices = 0;
    int 		 i = 0;
	const char	*pkeyAcquisitionFrameRate[2] = {"AcquisitionFrameRate", "AcquisitionFrameRateAbs"};
    ArvGcNode	*pGcNode;
	GError		*error=NULL;

    global.bCancel = FALSE;
    global.config = global.config.__getDefault__();
    global.idSoftwareTriggerTimer = 0;

    ros::init(argc, argv, "camera");
    global.phNode = new ros::NodeHandle();


    // Service callback for firing nuc's. 
	// Needed since we cannot open another connection to cameras while streaming.
	ros::NodeHandle nh;
	ros::ServiceServer NUCservice = nh.advertiseService("FireNUC", NUCService_callback);

    //g_type_init ();

    // Print out some useful info.
    ROS_INFO ("Attached cameras:");
    arv_update_device_list();
    nInterfaces = arv_get_n_interfaces();
    ROS_INFO ("# Interfaces: %d", nInterfaces);

    nDevices = arv_get_n_devices();
    ROS_INFO ("# Devices: %d", nDevices);
    for (i=0; i<nDevices; i++)
    	ROS_INFO ("Device%d: %s", i, arv_get_device_id(i));
    
    if (nDevices>0)
    {
		// Get the camera guid from either the command-line or as a parameter.
    	if (argc==2)
    	{
    		strcpy(szGuid, argv[1]);
    		pszGuid = szGuid;
    	}
    	else
    	{
    		if (global.phNode->hasParam(ros::this_node::getName()+"/guid"))
    		{
    			std::string		stGuid;
    			
    			global.phNode->getParam(ros::this_node::getName()+"/guid", stGuid);
    			strcpy (szGuid, stGuid.c_str());
        		pszGuid = szGuid;
    		}
    		else
    			pszGuid = NULL;
    	}
    	
    	// Open the camera, and set it up.
    	ROS_INFO("Opening: %s", pszGuid ? pszGuid : "(any)");
		while (TRUE)
		{
			global.pCamera = arv_camera_new(pszGuid);
			if (global.pCamera)
				break;
			else
			{
				ROS_WARN ("Could not open camera %s.  Retrying...", pszGuid);
				ros::Duration(1.0).sleep();
			    ros::spinOnce();
			}
		}

		global.pDevice = arv_camera_get_device(global.pCamera);
		ROS_INFO("Opened: %s-%s", arv_device_get_string_feature_value (global.pDevice, "DeviceVendorName"), arv_device_get_string_feature_value (global.pDevice, "DeviceID"));


		// See if some basic camera features exist.
		pGcNode = arv_device_get_feature (global.pDevice, "AcquisitionMode");
		global.isImplementedAcquisitionMode = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE;

		pGcNode = arv_device_get_feature (global.pDevice, "GainRaw");
		global.isImplementedGain = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE;
		pGcNode = arv_device_get_feature (global.pDevice, "Gain");
		global.isImplementedGain |= ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE;

		pGcNode = arv_device_get_feature (global.pDevice, "ExposureTimeAbs");
		global.isImplementedExposureTimeAbs = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE;

		pGcNode = arv_device_get_feature (global.pDevice, "ExposureAuto");
		global.isImplementedExposureAuto = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE;

		pGcNode = arv_device_get_feature (global.pDevice, "GainAuto");
		global.isImplementedGainAuto = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE;

		pGcNode = arv_device_get_feature (global.pDevice, "TriggerSelector");
		global.isImplementedTriggerSelector = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE;

		pGcNode = arv_device_get_feature (global.pDevice, "TriggerSource");
		global.isImplementedTriggerSource = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE;

		pGcNode = arv_device_get_feature (global.pDevice, "TriggerMode");
		global.isImplementedTriggerMode = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE;

		pGcNode = arv_device_get_feature (global.pDevice, "FocusPos");
		global.isImplementedFocusPos = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE;

		pGcNode = arv_device_get_feature (global.pDevice, "GevSCPSPacketSize");
		global.isImplementedMtu = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE;

		pGcNode = arv_device_get_feature (global.pDevice, "AcquisitionFrameRateEnable");
		global.isImplementedAcquisitionFrameRateEnable = ARV_GC_FEATURE_NODE (pGcNode) ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE;

		// Find the key name for framerate.
		global.keyAcquisitionFrameRate = NULL;
		for (i=0; i<2; i++)
		{
			pGcNode = arv_device_get_feature (global.pDevice, pkeyAcquisitionFrameRate[i]);
			global.isImplementedAcquisitionFrameRate = pGcNode ? arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error) : FALSE;
			if (global.isImplementedAcquisitionFrameRate)
			{
				global.keyAcquisitionFrameRate = pkeyAcquisitionFrameRate[i];
				break;
			}
		}


		// Get parameter bounds.
		arv_camera_get_exposure_time_bounds	(global.pCamera, &global.configMin.ExposureTimeAbs, &global.configMax.ExposureTimeAbs);
		arv_camera_get_gain_bounds			(global.pCamera, &global.configMin.Gain, &global.configMax.Gain);
		arv_camera_get_sensor_size			(global.pCamera, &global.widthSensor, &global.heightSensor);
		arv_camera_get_width_bounds			(global.pCamera, &global.widthRoiMin, &global.widthRoiMax);
		arv_camera_get_height_bounds		(global.pCamera, &global.heightRoiMin, &global.heightRoiMax);

		if (global.isImplementedFocusPos)
		{
			gint64 focusMin64, focusMax64;
			arv_device_get_integer_feature_bounds (global.pDevice, "FocusPos", &focusMin64, &focusMax64);
			global.configMin.FocusPos = focusMin64;
			global.configMax.FocusPos = focusMax64;
		}
		else
		{
			global.configMin.FocusPos = 0;
			global.configMax.FocusPos = 0;
		}

		global.configMin.AcquisitionFrameRate =    0.0;
		global.configMax.AcquisitionFrameRate = 1000.0;


		// Initial camera settings.
		if (global.isImplementedExposureTimeAbs)
			arv_device_set_float_feature_value(global.pDevice, "ExposureTimeAbs", global.config.ExposureTimeAbs);
		if (global.isImplementedGain)
			arv_camera_set_gain(global.pCamera, global.config.Gain);
			//arv_device_set_integer_feature_value(global.pDevice, "GainRaw", global.config.GainRaw);
		if (global.isImplementedAcquisitionFrameRateEnable)
			arv_device_set_integer_feature_value(global.pDevice, "AcquisitionFrameRateEnable", 1);
		if (global.isImplementedAcquisitionFrameRate)
			arv_device_set_float_feature_value(global.pDevice, global.keyAcquisitionFrameRate, global.config.AcquisitionFrameRate);


		// Set up the triggering.
		if (global.isImplementedTriggerMode)
		{
			if (global.isImplementedTriggerSelector && global.isImplementedTriggerMode)
			{
				arv_device_set_string_feature_value(global.pDevice, "TriggerSelector", "AcquisitionStart");
				arv_device_set_string_feature_value(global.pDevice, "TriggerMode", "Off");
				arv_device_set_string_feature_value(global.pDevice, "TriggerSelector", "FrameStart");
				arv_device_set_string_feature_value(global.pDevice, "TriggerMode", "Off");
			}
		}


		WriteCameraFeaturesFromRosparam ();


#ifdef TUNING			
		ros::Publisher pubInt64 = global.phNode->advertise<std_msgs::Int64>(ros::this_node::getName()+"/dt", 100);
		global.ppubInt64 = &pubInt64;
#endif
    	
    // Grab the calibration file url from the param server if exists
    std::string calibrationURL = ""; // Default looks in .ros/camera_info
		if (!(ros::param::get(std::string("calibrationURL").append(arv_device_get_string_feature_value (global.pDevice, "DeviceID")), calibrationURL)))
		{
			ROS_ERROR("ERROR: Could not read calibrationURL from parameter server");
		}

		// Start the camerainfo manager.
		global.pCameraInfoManager = new camera_info_manager::CameraInfoManager(ros::NodeHandle(ros::this_node::getName()), arv_device_get_string_feature_value (global.pDevice, "DeviceID"), calibrationURL);

		// Start the dynamic_reconfigure server.
		dynamic_reconfigure::Server<Config> 				reconfigureServer;
		dynamic_reconfigure::Server<Config>::CallbackType 	reconfigureCallback;

		reconfigureCallback = boost::bind(&RosReconfigure_callback, _1, _2);
		reconfigureServer.setCallback(reconfigureCallback);
		ros::Duration(2.0).sleep();


		// Get parameter current values.
		global.xRoi=0; global.yRoi=0; global.widthRoi=0; global.heightRoi=0;
		arv_camera_get_region (global.pCamera, &global.xRoi, &global.yRoi, &global.widthRoi, &global.heightRoi);
		global.config.ExposureTimeAbs 	= global.isImplementedExposureTimeAbs ? arv_device_get_float_feature_value (global.pDevice, "ExposureTimeAbs") : 0;
		global.config.Gain      		= global.isImplementedGain ? arv_camera_get_gain (global.pCamera) : 0.0;
		global.pszPixelformat   		= g_string_ascii_down(g_string_new(arv_device_get_string_feature_value(global.pDevice, "PixelFormat")))->str;
		global.nBytesPixel      		= ARV_PIXEL_FORMAT_BYTE_PER_PIXEL(arv_device_get_integer_feature_value(global.pDevice, "PixelFormat"));
		global.config.FocusPos  		= global.isImplementedFocusPos ? arv_device_get_integer_feature_value (global.pDevice, "FocusPos") : 0;
		
		
		// Print information.
		ROS_INFO ("    Using Camera Configuration:");
		ROS_INFO ("    ---------------------------");
		ROS_INFO ("    Vendor name          = %s", arv_device_get_string_feature_value (global.pDevice, "DeviceVendorName"));
		ROS_INFO ("    Model name           = %s", arv_device_get_string_feature_value (global.pDevice, "DeviceModelName"));
		ROS_INFO ("    Device id            = %s", arv_device_get_string_feature_value (global.pDevice, "DeviceID"));
		ROS_INFO ("    Sensor width         = %d", global.widthSensor);
		ROS_INFO ("    Sensor height        = %d", global.heightSensor);
		ROS_INFO ("    ROI x,y,w,h          = %d, %d, %d, %d", global.xRoi, global.yRoi, global.widthRoi, global.heightRoi);
		ROS_INFO ("    Pixel format         = %s", global.pszPixelformat);
		ROS_INFO ("    BytesPerPixel        = %d", global.nBytesPixel);
		ROS_INFO ("    Acquisition Mode     = %s", global.isImplementedAcquisitionMode ? arv_device_get_string_feature_value (global.pDevice, "AcquisitionMode") : "(not implemented in camera)");
		ROS_INFO ("    Trigger Mode         = %s", global.isImplementedTriggerMode ? arv_device_get_string_feature_value (global.pDevice, "TriggerMode") : "(not implemented in camera)");
		ROS_INFO ("    Trigger Source       = %s", global.isImplementedTriggerSource ? arv_device_get_string_feature_value(global.pDevice, "TriggerSource") : "(not implemented in camera)");
		ROS_INFO ("    Can set FrameRate:     %s", global.isImplementedAcquisitionFrameRate ? "True" : "False");
		if (global.isImplementedAcquisitionFrameRate)
		{
			global.config.AcquisitionFrameRate = arv_device_get_float_feature_value (global.pDevice, global.keyAcquisitionFrameRate);
			ROS_INFO ("    AcquisitionFrameRate = %g hz", global.config.AcquisitionFrameRate);
		}

		ROS_INFO ("    Can set Exposure:      %s", global.isImplementedExposureTimeAbs ? "True" : "False");
		if (global.isImplementedExposureTimeAbs)
		{
			ROS_INFO ("    Can set ExposureAuto:  %s", global.isImplementedExposureAuto ? "True" : "False");
			ROS_INFO ("    Exposure             = %g us in range [%g,%g]", global.config.ExposureTimeAbs, global.configMin.ExposureTimeAbs, global.configMax.ExposureTimeAbs);
		}

		ROS_INFO ("    Can set Gain:          %s", global.isImplementedGain ? "True" : "False");
		if (global.isImplementedGain)
		{
			ROS_INFO ("    Can set GainAuto:      %s", global.isImplementedGainAuto ? "True" : "False");
			ROS_INFO ("    Gain                 = %f %% in range [%f,%f]", global.config.Gain, global.configMin.Gain, global.configMax.Gain);
		}

		ROS_INFO ("    Can set FocusPos:      %s", global.isImplementedFocusPos ? "True" : "False");

		if (global.isImplementedMtu)
			ROS_INFO ("    Network mtu          = %lu", arv_device_get_integer_feature_value(global.pDevice, "GevSCPSPacketSize"));

		ROS_INFO ("    ---------------------------");


//		// Print the tree of camera features, with their values.
//		ROS_INFO ("    ----------------------------------------------------------------------------------");
//		NODEEX		 nodeex;
//		ArvGc	*pGenicam=0;
//		pGenicam = arv_device_get_genicam(global.pDevice);
//
//		nodeex.szName = "Root";
//		nodeex.pNode = (ArvDomNode	*)arv_gc_get_node(pGenicam, nodeex.szName);
//		nodeex.pNodeSibling = NULL;
//		PrintDOMTree(pGenicam, nodeex, 0);
//		ROS_INFO ("    ----------------------------------------------------------------------------------");
			

		ArvGvStream *pStream = NULL;
		while (TRUE)
		{
			pStream = CreateStream();
			if (pStream)
				break;
			else
			{
				ROS_WARN("Could not create image stream for %s.  Retrying...", pszGuid);
				ros::Duration(1.0).sleep();
			    ros::spinOnce();
			}
		}


		ApplicationData applicationdata;
		applicationdata.nBuffers=0;
		applicationdata.main_loop = 0;

		// Set up image_raw.
		image_transport::ImageTransport		*pTransport = new image_transport::ImageTransport(*global.phNode);
		global.publisher = pTransport->advertiseCamera(ros::this_node::getName()+"/image_raw", 1);

		// Connect signals with callbacks.
		g_signal_connect (pStream,        "new-buffer",   G_CALLBACK (NewBuffer_callback),   &applicationdata);
		g_signal_connect (global.pDevice, "control-lost", G_CALLBACK (ControlLost_callback), NULL);
		g_timeout_add_seconds (1, PeriodicTask_callback, &applicationdata);
		arv_stream_set_emit_signals ((ArvStream *)pStream, TRUE);


		void (*pSigintHandlerOld)(int);
		pSigintHandlerOld = signal (SIGINT, set_cancel);

		arv_device_execute_command (global.pDevice, "AcquisitionStart");

		applicationdata.main_loop = g_main_loop_new (NULL, FALSE);
		g_main_loop_run (applicationdata.main_loop);

		if (global.idSoftwareTriggerTimer)
		{
			g_source_remove(global.idSoftwareTriggerTimer);
			global.idSoftwareTriggerTimer = 0;
		}

		signal (SIGINT, pSigintHandlerOld);

		g_main_loop_unref (applicationdata.main_loop);

		guint64 n_completed_buffers;
		guint64 n_failures;
		guint64 n_underruns;
		guint64 n_resent;
		guint64 n_missing;
		arv_stream_get_statistics ((ArvStream *)pStream, &n_completed_buffers, &n_failures, &n_underruns);
		ROS_INFO ("Completed buffers = %Lu", (unsigned long long) n_completed_buffers);
		ROS_INFO ("Failures          = %Lu", (unsigned long long) n_failures);
		ROS_INFO ("Underruns         = %Lu", (unsigned long long) n_underruns);
		arv_gv_stream_get_statistics (pStream, &n_resent, &n_missing);
		ROS_INFO ("Resent buffers    = %Lu", (unsigned long long) n_resent);
		ROS_INFO ("Missing           = %Lu", (unsigned long long) n_missing);

		arv_device_execute_command (global.pDevice, "AcquisitionStop");

		g_object_unref (pStream);

    }
    else
    	ROS_ERROR ("No cameras detected.");
    
    delete global.phNode;
    
    return 0;
} // main()
Exemplo n.º 7
0
CSrf10Controller::CSrf10Controller(std::string name, CQboduinoDriver *device_p, ros::NodeHandle& nh) :
    CController(name,device_p,nh)
{
    std::string topic;
    nh.param("controllers/"+name+"/topic", topic, std::string("srf10_state"));
    nh.param("controllers/"+name+"/rate", rate_, 15.0);
    if(nh.hasParam("controllers/"+name+"/sensors/front"))
    {
      std::map< std::string, XmlRpc::XmlRpcValue >::iterator it;
      std::map< std::string, XmlRpc::XmlRpcValue > value;
      XmlRpc::MyXmlRpcValue sensors;
      nh.getParam("controllers/"+name+"/sensors/front", sensors);
      ROS_ASSERT(sensors.getType() == XmlRpc::XmlRpcValue::TypeStruct);
      value=sensors;
      for(it=value.begin();it!=value.end();it++)
      {
        ROS_ASSERT((*it).second.getType() == XmlRpc::XmlRpcValue::TypeStruct);
        if(!nh.hasParam("controllers/"+name+"/sensors/front/"+(*it).first+"/address"))
        {
            ROS_WARN_STREAM("You need to set the address atribute for the sensor " << (*it).first);
            continue;
        }
        if(!nh.hasParam("controllers/"+name+"/sensors/front/"+(*it).first+"/type"))
        {
            ROS_WARN_STREAM("You need to set the type of the sensor " << (*it).first);
            continue;
        }
        int address;
        std::string type;
        std::string frame_id;
        std::string sensor_topic;
        double max_alert_distance;
        double min_alert_distance;
        nh.getParam("controllers/"+name+"/sensors/front/"+(*it).first+"/address", address);
        nh.getParam("controllers/"+name+"/sensors/front/"+(*it).first+"/type", type);
        nh.param("controllers/"+name+"/sensors/front/"+(*it).first+"/frame_id", frame_id, std::string(""));
        nh.param("controllers/"+name+"/sensors/front/"+(*it).first+"/topic", sensor_topic, topic+"/"+(*it).first);
        nh.param("controllers/"+name+"/sensors/front/"+(*it).first+"/min_alert_distance", min_alert_distance, -1.0);
        nh.param("controllers/"+name+"/sensors/front/"+(*it).first+"/max_alert_distance", max_alert_distance, -1.0);
        //printf("%f\n",alert_distance);
        if (type.compare("srf10")==0)
        {
            srf10Sensors_[(uint8_t)address]=new CDistanceSensor((*it).first, (uint8_t)address, sensor_topic, nh, type, frame_id, min_alert_distance, max_alert_distance);
            srf10SensorsUpdateGroup_[(uint8_t)address]=1;
        }
        else if (type.compare("gp2d12")==0 || type.compare("gp2d120")==0 || type.compare("GP2Y0A21YK")==0)
        {
            adcSensors_[(uint8_t)address]=new CDistanceSensor((*it).first, (uint8_t)address, sensor_topic, nh, type, frame_id, min_alert_distance, max_alert_distance);
            adcSensorsAddresses_.push_back((uint8_t)address);
        }
        ROS_INFO_STREAM("Sensor " << (*it).first << " of type " << type << " inicializado");
      }
    }
    if(nh.hasParam("controllers/"+name+"/sensors/back"))
    {
      std::map< std::string, XmlRpc::XmlRpcValue >::iterator it;
      std::map< std::string, XmlRpc::XmlRpcValue > value;
      XmlRpc::MyXmlRpcValue sensors;
      nh.getParam("controllers/"+name+"/sensors/back", sensors);
      ROS_ASSERT(sensors.getType() == XmlRpc::XmlRpcValue::TypeStruct);
      value=sensors;
      for(it=value.begin();it!=value.end();it++)
      {
        ROS_ASSERT((*it).second.getType() == XmlRpc::XmlRpcValue::TypeStruct);
        if(!nh.hasParam("controllers/"+name+"/sensors/back/"+(*it).first+"/address"))
        {
            ROS_WARN_STREAM("You need to set the address atribute for the sensor " << (*it).first);
            continue;
        }
        if(!nh.hasParam("controllers/"+name+"/sensors/back/"+(*it).first+"/type"))
        {
            ROS_WARN_STREAM("You need to set the type of the sensor " << (*it).first);
            continue;
        }
        int address;
        std::string type;
        std::string frame_id;
        std::string sensor_topic;
        double min_alert_distance;
        double max_alert_distance;
        nh.getParam("controllers/"+name+"/sensors/back/"+(*it).first+"/address", address);
        nh.getParam("controllers/"+name+"/sensors/back/"+(*it).first+"/type", type);
        nh.param("controllers/"+name+"/sensors/back/"+(*it).first+"/frame_id", frame_id, std::string(""));
        nh.param("controllers/"+name+"/sensors/back/"+(*it).first+"/topic", sensor_topic, topic+"/"+(*it).first);
        nh.param("controllers/"+name+"/sensors/back/"+(*it).first+"/min_alert_distance", min_alert_distance, -1.0);
        nh.param("controllers/"+name+"/sensors/back/"+(*it).first+"/max_alert_distance", max_alert_distance, -1.0);
        //printf("%f\n",alert_distance);
        if (type.compare("srf10")==0)
        {
            srf10Sensors_[(uint8_t)address]=new CDistanceSensor((*it).first, (uint8_t)address, sensor_topic, nh, type, frame_id, min_alert_distance, max_alert_distance);
            srf10SensorsUpdateGroup_[(uint8_t)address]=1;
        }
        else if (type.compare("gp2d12")==0 || type.compare("gp2d120")==0  || type.compare("GP2Y0A21YK")==0)
        {
            adcSensors_[(uint8_t)address]=new CDistanceSensor((*it).first, (uint8_t)address, sensor_topic, nh, type, frame_id, min_alert_distance, max_alert_distance);
            adcSensorsAddresses_.push_back((uint8_t)address);
        }
        ROS_INFO_STREAM("Sensor " << (*it).first << " of type " << type << " inicializado");
      }
    }
    if(nh.hasParam("controllers/"+name+"/sensors/floor"))
    {
      std::map< std::string, XmlRpc::XmlRpcValue >::iterator it;
      std::map< std::string, XmlRpc::XmlRpcValue > value;
      XmlRpc::MyXmlRpcValue sensors;
      nh.getParam("controllers/"+name+"/sensors/floor", sensors);
      ROS_ASSERT(sensors.getType() == XmlRpc::XmlRpcValue::TypeStruct);
      value=sensors;
      for(it=value.begin();it!=value.end();it++)
      {
        ROS_ASSERT((*it).second.getType() == XmlRpc::XmlRpcValue::TypeStruct);
        if(!nh.hasParam("controllers/"+name+"/sensors/floor/"+(*it).first+"/address"))
        {
            ROS_WARN_STREAM("You need to set the address atribute for the sensor " << (*it).first);
            continue;
        }
        if(!nh.hasParam("controllers/"+name+"/sensors/floor/"+(*it).first+"/type"))
        {
            ROS_WARN_STREAM("You need to set the type of the sensor " << (*it).first);
            continue;
        }
        int address;
        std::string type;
        std::string frame_id;
        std::string sensor_topic;
        double min_alert_distance;
        double max_alert_distance;
        nh.getParam("controllers/"+name+"/sensors/floor/"+(*it).first+"/address", address);
        nh.getParam("controllers/"+name+"/sensors/floor/"+(*it).first+"/type", type);
        nh.param("controllers/"+name+"/sensors/floor/"+(*it).first+"/frame_id", frame_id, std::string(""));
        nh.param("controllers/"+name+"/sensors/floor/"+(*it).first+"/topic", sensor_topic, topic+"/"+(*it).first);
        nh.param("controllers/"+name+"/sensors/floor/"+(*it).first+"/min_alert_distance", min_alert_distance, -1.0);
        nh.param("controllers/"+name+"/sensors/floor/"+(*it).first+"/max_alert_distance", max_alert_distance, -1.0);
        //printf("%f\n",alert_distance);
        if (type.compare("srf10")==0)
        {
            ROS_WARN("srf10 sensors can only be declared at positions front or back");
            continue;
        }
        else if (type.compare("gp2d12")==0 || type.compare("gp2d120")==0  || type.compare("GP2Y0A21YK")==0)
        {
            adcSensors_[(uint8_t)address]=new CDistanceSensor((*it).first, (uint8_t)address, sensor_topic, nh, type, frame_id, min_alert_distance, max_alert_distance);
            adcSensorsAddresses_.push_back((uint8_t)address);
        }
        ROS_INFO_STREAM("Sensor " << (*it).first << " of type " << type << " inicializado");
      }
    }
    //config the sensors in the QBoard1
    int code=device_p_->setAutoupdateSensors(srf10SensorsUpdateGroup_);
    if (code<0)
        ROS_WARN("Unable to activate all srf10 sensors at the base control board");
    else
        ROS_INFO("All srf10 sensors updated correctly at the base control board");
    timer_=nh.createTimer(ros::Duration(1/rate_),&CSrf10Controller::timerCallback,this);
}
void OrientGoal::runBehavior(){
  if(!initialized_){
    ROS_ERROR("This object must be initialized before runBehavior is called");
    return;
  }

  if(global_costmap_ == NULL || local_costmap_ == NULL){
    ROS_ERROR("The costmaps passed to the ClearCostmapRecovery object cannot be NULL. Doing nothing.");
    return;
  }

  ros::Rate r(frequency_);
  ros::NodeHandle n;
  ros::Publisher vel_pub = n.advertise<geometry_msgs::Twist>("cmd_vel", 10);

  tf::Stamped<tf::Pose> global_pose;
  global_costmap_->getRobotPose(global_pose);
  double yaw_req = angles::normalize_angle(tf::getYaw(*goal_orientation_));
  double yaw_now = angles::normalize_angle(tf::getYaw(global_pose.getRotation()));
  ROS_INFO("The required orientation is %f but the orientation of the robot is %f\n",yaw_req,yaw_now);
  double tol = fabs(yaw_req - yaw_now);
  ROS_INFO("Tolerance with in limits. Allowed tolerance is 0.1 and the difference is %f\n",tol);
  if (tol <= 0.1)
  {
	  ROS_INFO("No Goal Correction is required\n");
	  return;
  }
  ROS_INFO("Goal Correction is required\n");

  double current_angle = -1.0 * M_PI;

  while(n.ok()){
    global_costmap_->getRobotPose(global_pose);

    current_angle = angles::normalize_angle(tf::getYaw(global_pose.getRotation()));
    double dist_left = angles::shortest_angular_distance(current_angle,yaw_req);
    ROS_INFO("Current angle = %f Required Angle = %f", current_angle,yaw_req);
    ROS_INFO("Shortest angle = %f",dist_left);

    //update the costmap copy that the world model holds
   // local_costmap_->getCostmapCopy(costmap_);
    global_costmap_->getCostmapCopy(costmap_);

    //check if that velocity is legal by forward simulating
    double sim_angle = 0.0;
    while(sim_angle < dist_left){
      std::vector<geometry_msgs::Point> oriented_footprint;
      double theta = tf::getYaw(global_pose.getRotation())+sim_angle;
      geometry_msgs::Point position;
      position.x = global_pose.getOrigin().x();
      position.y = global_pose.getOrigin().y();

      global_costmap_->getOrientedFootprint(position.x, position.y, theta, oriented_footprint);
     double footprint_cost = 1.0;
      //double footprint_cost = world_model_->footprintCost(position, oriented_footprint, local_costmap_->getInscribedRadius(), local_costmap_->getCircumscribedRadius());
      if(footprint_cost < 0.0){
        ROS_WARN("Rotation towards goal cannot take place because there is a potential collision. Cost: %.2f", footprint_cost);
        return;
      }

      sim_angle += sim_granularity_;
    }

    if (fabs(dist_left) < 0.1)
    	return;
    

    double vel = sqrt(2 * acc_lim_th_ * fabs(dist_left));

    (dist_left <=0.0)?(vel= -vel):(vel = vel);
    vel = std::min(std::max(vel, -0.5),0.5);
    ROS_INFO("The required orientation is %f but the orientation of the robot is %f\n",yaw_req,current_angle);
    ROS_INFO("Velocity:: %f",vel);

    geometry_msgs::Twist cmd_vel;
    cmd_vel.linear.x = 0.0;
    cmd_vel.linear.y = 0.0;
    cmd_vel.angular.z = vel;

    vel_pub.publish(cmd_vel);

    r.sleep();
  }
}
bool TimedElasticBand::initTEBtoGoal(BidirIter path_start, BidirIter path_end, Fun fun_position, double max_vel_x, double max_vel_theta,
		      boost::optional<double> max_acc_x, boost::optional<double> max_acc_theta,
		      boost::optional<double> start_orientation, boost::optional<double> goal_orientation, int min_samples) 
{
    Eigen::Vector2d start_position = fun_position( *path_start );
    Eigen::Vector2d goal_position = fun_position( *boost::prior(path_end) );
    
    double start_orient, goal_orient;
    if (start_orientation)
    {
      start_orient = *start_orientation;
    }
    else
    {
      Eigen::Vector2d start2goal =  goal_position - start_position;
      start_orient = atan2(start2goal[1],start2goal[0]);
    }
    double timestep = 1; // TODO: time
    
    
    if (goal_orientation)
    {
      goal_orient = *goal_orientation;
    }
    else
    {
      goal_orient = start_orient;
    }
    
    if (!isInit())
    {	
      addPose(start_position, start_orient, true); // add starting point and mark it as fixed for optimization		

      // we insert middle points now (increase start by 1 and decrease goal by 1)
      std::advance(path_start,1);
      std::advance(path_end,-1);
      unsigned int idx=0;
      for (; path_start != path_end; ++path_start) // insert middle-points
      {
	//Eigen::Vector2d point_to_goal = path.back()-*it;
	//double dir_to_goal = atan2(point_to_goal[1],point_to_goal[0]); // direction to goal
	// Alternative: Direction from last path
	Eigen::Vector2d curr_point = fun_position(*path_start);
	Eigen::Vector2d diff_last = curr_point - Pose(idx).position(); // we do not use boost::prior(*path_start) for those cases,
								       // where fun_position() does not return a reference or is expensive.
	double diff_norm = diff_last.norm();
	
	double timestep_vel = diff_norm/max_vel_x; // constant velocity
	double timestep_acc;
	if (max_acc_x)
	{
		timestep_acc = sqrt(2*diff_norm/(*max_acc_x)); // constant acceleration
		if (timestep_vel < timestep_acc && max_acc_x) timestep = timestep_acc;
		else timestep = timestep_vel;
	}
	else timestep = timestep_vel;
	
	if (timestep<0) timestep=0.2; // TODO: this is an assumption
	
	addPoseAndTimeDiff(curr_point, atan2(diff_last[1],diff_last[0]) ,timestep);
	
	Eigen::Vector2d diff_next = fun_position(*boost::next(path_start))-curr_point; // TODO maybe store the boost::next for the following iteration
	double ang_diff = std::abs( g2o::normalize_theta( atan2(diff_next[1],diff_next[0])
						         -atan2(diff_last[1],diff_last[0]) ) );
	
	timestep_vel = ang_diff/max_vel_theta; // constant velocity
	if (max_acc_theta)
	{
		timestep_acc = sqrt(2*ang_diff/(*max_acc_theta)); // constant acceleration
		if (timestep_vel < timestep_acc) timestep = timestep_acc;
		else timestep = timestep_vel;
	}
	else timestep = timestep_vel;
	
	if (timestep<0) timestep=0.2; // TODO: this is an assumption
	
	addPoseAndTimeDiff(curr_point, atan2(diff_last[1],diff_last[0]) ,timestep);
	
	++idx;
      }
      Eigen::Vector2d diff = goal_position-Pose(idx).position();
      double diff_norm = diff.norm();
      double timestep_vel = diff_norm/max_vel_x; // constant velocity
      if (max_acc_x)
      {
	double timestep_acc = sqrt(2*diff_norm/(*max_acc_x)); // constant acceleration
	if (timestep_vel < timestep_acc) timestep = timestep_acc;
	else timestep = timestep_vel;
      }
      else timestep = timestep_vel;

      
      PoseSE2 goal(goal_position, goal_orient);
      
      // if number of samples is not larger than min_samples, insert manually
      if ( (int)sizePoses() < min_samples-1 )
      {
        ROS_DEBUG("initTEBtoGoal(): number of generated samples is less than specified by min_samples. Forcing the insertion of more samples...");
        while ((int)sizePoses() < min_samples-1) // subtract goal point that will be added later
        {
          // simple strategy: interpolate between the current pose and the goal
          addPoseAndTimeDiff( PoseSE2::average(BackPose(), goal), timestep ); // let the optimier correct the timestep (TODO: better initialization	
        }
      }
      
      // now add goal
      addPoseAndTimeDiff(goal, timestep); // add goal point
      setPoseVertexFixed(sizePoses()-1,true); // GoalConf is a fixed constraint during optimization
    }
    else // size!=0
    {
      ROS_WARN("Cannot init TEB between given configuration and goal, because TEB vectors are not empty or TEB is already initialized (call this function before adding states yourself)!");
      ROS_WARN("Number of TEB configurations: %d, Number of TEB timediffs: %d",(unsigned int) sizePoses(),(unsigned int) sizeTimeDiffs());
      return false;
    }
    return true;
}  
int main(int argc, char *argv[])
{
  ros::init(argc, argv, "scanner2d");
  scanner2d::Scanner2d scanner;
  std::string port_name;
  int scan_rate;
  int sample_rejection;
  int samples_per_scan;
  int min_angle;
  int max_angle;

  ROS_DEBUG("Welcome to scanner2d Node!");

  signal(SIGINT, sig_handler);
  
  ros::NodeHandle private_node_handle_("~");
  private_node_handle_.param("port_name", port_name, std::string("/dev/ttyACM0"));
  private_node_handle_.param("scan_rate", scan_rate, int(3));
  private_node_handle_.param("samples_per_scan", samples_per_scan, int(333));
  private_node_handle_.param("sample_rejection", sample_rejection, int(0));
  private_node_handle_.param("min_angle", min_angle, int(0));
  private_node_handle_.param("max_angle", max_angle, int(360));

  if (scan_rate*samples_per_scan > 1000) {
    ROS_WARN("The scan_rate * samples_per_scan exceeds the max sample rate (1000) of the sensor!");
    ROS_WARN("  you should either alter the settings from the command line invocation,");
    ROS_WARN("  or reconfigure the parameter server directly.");
  }

  scanner.open(port_name);

  switch(scan_rate)
  {
  case 1:
    scanner.setScanPeriod(1000);
    break;
  case 2:
    scanner.setScanPeriod(500);
    break;
  case 3:
    scanner.setScanPeriod(333);
    break;
  case 4:
    scanner.setScanPeriod(250);
    break;
  case 5:
    scanner.setScanPeriod(200);
    break;
  default:
    ROS_WARN("Invalid scan_rate!");
  }
  
  scanner.setSampleRejectionMode((bool)sample_rejection);
  scanner.setSamplesPerScan(samples_per_scan);
  scanner.setMinMaxAngle(min_angle,  max_angle);

  ros::Rate loop_rate(10);

  while (ros::ok()) 
  {
    scanner2d::Scanner2dStatus_t status;
    scanner.getStatus(&status);
    if (status.flags & 0x80) 
    {
      ROS_DEBUG("Got quit from scanner");
      ROS_DEBUG("  Status flags: 0x%x", status.flags);
      break;
    }

    if (got_ctrl_c) {
      ROS_WARN("Got Ctrl-C");
      scanner.stop();
      ros::Duration(0.5).sleep();
      scanner.close();
      break;
    }

    ros::spinOnce();
    loop_rate.sleep();
  }

  ROS_WARN("Exiting.");

  exit(0);
}
void planning_environment::KinematicModelStateMonitor::jointStateCallback(const sensor_msgs::JointStateConstPtr &joint_state)
{
  //bool change = !have_joint_state_;

  planning_models::KinematicState state(kmodel_);

  current_joint_values_lock_.lock();

  state.setKinematicState(current_joint_state_map_);

  static bool first_time = true;

  unsigned int n = joint_state->name.size();
  if (joint_state->name.size() != joint_state->position.size())
  {
    ROS_ERROR("Planning environment received invalid joint state");
    current_joint_values_lock_.unlock();
    return;
  }
  
  std::map<std::string, double> joint_state_map;
  for (unsigned int i = 0 ; i < n ; ++i)
  {
    joint_state_map[joint_state->name[i]] = joint_state->position[i];
  }

  std::vector<planning_models::KinematicState::JointState*>& joint_state_vector = state.getJointStateVector();
  
  for(std::vector<planning_models::KinematicState::JointState*>::iterator it = joint_state_vector.begin();
      it != joint_state_vector.end();
      it++) {
    bool tfSets = false;
    bool joint_state_sets = false;
    //see if we need to update any transforms
    std::string parent_frame_id = (*it)->getParentFrameId();
    std::string child_frame_id = (*it)->getChildFrameId();
    if(!parent_frame_id.empty() && !child_frame_id.empty()) {
      std::string err;
      ros::Time tm;
      tf::StampedTransform transf;
      bool ok = false;
      if (tf_->getLatestCommonTime(parent_frame_id, child_frame_id, tm, &err) == tf::NO_ERROR) {
        ok = true;
        try
        {
          tf_->lookupTransform(parent_frame_id, child_frame_id, tm, transf);
        }
        catch(tf::TransformException& ex)
        {
          ROS_ERROR("Unable to lookup transform from %s to %s.  Exception: %s", parent_frame_id.c_str(), child_frame_id.c_str(), ex.what());
          ok = false;
        }
      } else {
        ROS_DEBUG("Unable to lookup transform from %s to %s: no common time.", parent_frame_id.c_str(), child_frame_id.c_str());
        ok = false;
      }
      if(ok) {
        tfSets = (*it)->setJointStateValues(transf);
        if(tfSets) {
          const std::vector<std::string>& joint_state_names = (*it)->getJointStateNameOrder();
          for(std::vector<std::string>::const_iterator it = joint_state_names.begin();
              it != joint_state_names.end();
              it++) {
            last_joint_update_[*it] = tm;
          }
        }
        // tf::Transform transf = getKinematicModel()->getRoot()->variable_transform;
        // ROS_INFO_STREAM("transform is to " << transf.getRotation().x() << " " 
        //                 << transf.getRotation().y() << " z " << transf.getRotation().z()
        //                 << " w " << transf.getRotation().w());
        have_pose_ = tfSets;
        last_pose_update_ = tm;
      }
    }
    //now we update from joint state
    joint_state_sets = (*it)->setJointStateValues(joint_state_map);
    if(joint_state_sets) {
      const std::vector<std::string>& joint_state_names = (*it)->getJointStateNameOrder();
      for(std::vector<std::string>::const_iterator it = joint_state_names.begin();
          it != joint_state_names.end();
          it++) {
        last_joint_update_[*it] = joint_state->header.stamp;
      }
    }
  }
  
  state.updateKinematicLinks();
  state.getKinematicStateValues(current_joint_state_map_);

  if(allJointsUpdated()) {
    have_joint_state_ = true;
    last_joint_state_update_ = joint_state->header.stamp;
    
    if(!joint_state_map_cache_.empty()) {
      if(joint_state->header.stamp-joint_state_map_cache_.back().first > ros::Duration(joint_state_cache_allowed_difference_)) {
        ROS_DEBUG_STREAM("Introducing joint state cache sparsity time of " << (joint_state->header.stamp-joint_state_map_cache_.back().first).toSec());
      }
    }

    joint_state_map_cache_.push_back(std::pair<ros::Time, std::map<std::string, double> >(joint_state->header.stamp,
                                                                                          current_joint_state_map_));
  } 

  if(have_joint_state_) {
    while(1) {
      if(joint_state_map_cache_.empty()) {
        ROS_WARN("Empty joint state map cache");
        break;
      }
      if((ros::Time::now()-joint_state_map_cache_.front().first) > ros::Duration(joint_state_cache_time_)) {
        joint_state_map_cache_.pop_front();
      } else {
        break;
      }
    }
  }
    
  first_time = false;

  if(!allJointsUpdated(ros::Duration(1.0))) {
    if(!printed_out_of_date_) {
      ROS_WARN_STREAM("Got joint state update but did not update some joints for more than 1 second.  Turn on DEBUG for more info");
      printed_out_of_date_ = true;
    }
  } else {
    printed_out_of_date_ = false;
  }
  if(on_state_update_callback_ != NULL) {
    on_state_update_callback_(joint_state);
  }
  current_joint_values_lock_.unlock();
}
Exemplo n.º 12
0
    //  Callback to register with tf::MessageFilter to be called when transforms are available
    void msgCallback(const boost::shared_ptr<const movingobstaclesrhc::Coil>& coil_ptr)
    {
        tf::StampedTransform transformb, transform_auxiliar;
        geometry_msgs::PointStamped point_out, point_coil;
        double tfx, tfy;
	std::string t_frame;
	if ((coil_ptr->header.frame_id=="metal_detector/right_coil")||(coil_ptr->header.frame_id=="right_coil"))
		t_frame = "right_coil";
	else if((coil_ptr->header.frame_id=="metal_detector/left_coil")||(coil_ptr->header.frame_id=="left_coil"))
		t_frame = "left_coil";
	else
		t_frame = "middle_coil";

        try{
        	tf_.lookupTransform(target_frame_,t_frame.c_str(), ros::Time(0), transformb);
        	point_out.point.x = transformb.getOrigin().x();
		      point_out.point.y = transformb.getOrigin().y();
		      point_out.point.z = transformb.getOrigin().z();
        }catch (tf::TransformException ex){
        	ROS_ERROR("%s",ex.what());
        }
        try{
          tf_.lookupTransform(target_frame_,"/base_link", ros::Time(0), transform_auxiliar);
        }catch (tf::TransformException ex){
        	ROS_ERROR("%s",ex.what());
        }
        try{
          tf_.lookupTransform("/base_link",t_frame.c_str(), ros::Time(0), transformb);
        }catch (tf::TransformException ex){
        	ROS_ERROR("%s",ex.what());
        }
        tf::Transform transform_coil=transform_auxiliar*transformb;
    	  point_coil.point.x=transform_coil.getOrigin().x();
	      point_coil.point.y=transform_coil.getOrigin().y();
	      point_coil.point.z=transform_coil.getOrigin().z();
	      
        try
        {
		
	  tfx=point_coil.point.x;
	  tfy=point_coil.point.y;
    printf("transform coil: x [%f] y [%f]\n",tfx,tfy ); 
//		if (coil_ptr->header.frame_id=="left_coil" && coil_ptr->channel[0]<-157800){
//			CCD->coilpeak=true;
//			CCD->tocka.x=tfx;
//			CCD->tocka.y=tfy;
//		}
//		if (coil_ptr->header.frame_id=="right_coil" && coil_ptr->channel[0]<-214800){
//			CCD->coilpeak=true;
//			CCD->tockaR.x=tfx;
//			CCD->tockaR.y=tfy;
//		}
               CCD->setCoverageOnCoil( tfx, tfy, 200.);//100 mm around point will be set


        }
        catch (tf::TransformException &ex)
        {
            ROS_WARN("Failure %s\n", ex.what());
        }
    }
Exemplo n.º 13
0
void ChompOptimizer::optimize()
{
  ros::WallTime start_time = ros::WallTime::now();
  double averageCostVelocity = 0.0;
  int currentCostIter = 0;
  int costWindow = 10;
  std::vector<double>costs(costWindow, 0.0);
  double minimaThreshold = 0.05;
  bool should_break_out = false;

  // if(parameters_->getAnimatePath())
  // {
  //   animatePath();
  // }

  // iterate
  for(iteration_ = 0; iteration_ < parameters_->getMaxIterations(); iteration_++)
  {
    ros::WallTime for_time = ros::WallTime::now();
    performForwardKinematics();
    //ROS_INFO_STREAM("Forward kinematics took " << (ros::WallTime::now()-for_time));
    double cCost = getCollisionCost();
    double sCost = getSmoothnessCost();
    double cost = cCost + sCost;

    //ROS_INFO_STREAM("Collision cost " << cCost << " smoothness cost " << sCost);

    // if(parameters_->getAddRandomness() && currentCostIter != -1)
    // {
    //   costs[currentCostIter] = cCost;
    //   currentCostIter++;

    //   if(currentCostIter >= costWindow)
    //   {
    //     for(int i = 1; i < costWindow; i++)
    //     {
    //       averageCostVelocity += (costs.at(i) - costs.at(i - 1));
    //     }

    //     averageCostVelocity /= (double)(costWindow);
    //     currentCostIter = -1;
    //   }
    // }
    if(iteration_ == 0)
    {
      best_group_trajectory_ = group_trajectory_.getTrajectory();
      best_group_trajectory_cost_ = cost;
    }
    else
    {
      if(cost < best_group_trajectory_cost_)
      {
        best_group_trajectory_ = group_trajectory_.getTrajectory();
        best_group_trajectory_cost_ = cost;
        last_improvement_iteration_ = iteration_;
      }
    }
    calculateSmoothnessIncrements();
    ros::WallTime coll_time = ros::WallTime::now();
    calculateCollisionIncrements();
    ROS_DEBUG_STREAM("Collision increments took " << (ros::WallTime::now()-coll_time));
    calculateTotalIncrements();

    // if(!parameters_->getUseHamiltonianMonteCarlo())
    // {
    //   // non-stochastic version:
    addIncrementsToTrajectory();
    // }
    // else
    // {
    //   // hamiltonian monte carlo updates:
    //   getRandomMomentum();
    //   updateMomentum();
    //   updatePositionFromMomentum();
    //   stochasticity_factor_ *= parameters_->getHmcAnnealingFactor();
    // }
    handleJointLimits();
    updateFullTrajectory();

    if(iteration_ % 10 == 0)
    {
      if(isCurrentTrajectoryMeshToMeshCollisionFree()) {
        num_collision_free_iterations_ = 0;
        ROS_INFO("Chomp Got mesh to mesh safety at iter %d. Breaking out early.", iteration_);
        is_collision_free_ = true;
        iteration_++;
        should_break_out = true;
      }
      // } else if(safety == CollisionProximitySpace::InCollisionSafe) {

      // ROS_DEBUG("Trajectory cost: %f (s=%f, c=%f)", getTrajectoryCost(), getSmoothnessCost(), getCollisionCost());
      // CollisionProximitySpace::TrajectorySafety safety = checkCurrentIterValidity();
      // if(safety == CollisionProximitySpace::MeshToMeshSafe)
      // {
      //   num_collision_free_iterations_ = 0;
      //   ROS_INFO("Chomp Got mesh to mesh safety at iter %d. Breaking out early.", iteration_);
      //   is_collision_free_ = true;
      //   iteration_++;
      //   should_break_out = true;
      // } else if(safety == CollisionProximitySpace::InCollisionSafe) {
      //   num_collision_free_iterations_ = parameters_->getMaxIterationsAfterCollisionFree();
      //   ROS_INFO("Chomp Got in collision safety at iter %d. Breaking out soon.", iteration_);
      //   is_collision_free_ = true;
      //   iteration_++;
      //   should_break_out = true;
      // }
      // else
      // {
      //   is_collision_free_ = false;
      // }
    }

    if(!parameters_->getFilterMode())
    {
      if(cCost < parameters_->getCollisionThreshold())
      {
        num_collision_free_iterations_ = parameters_->getMaxIterationsAfterCollisionFree();
        is_collision_free_ = true;
        iteration_++;
        should_break_out = true;
      } else {
        //ROS_INFO_STREAM("cCost " << cCost << " over threshold " << parameters_->getCollisionThreshold());
      }
    }

    if((ros::WallTime::now() - start_time).toSec() > parameters_->getPlanningTimeLimit() && !parameters_->getAnimatePath() && !parameters_->getAnimateEndeffector())
    {
      ROS_WARN("Breaking out early due to time limit constraints.");
      break;
    }

    // if(fabs(averageCostVelocity) < minimaThreshold && currentCostIter == -1 && !is_collision_free_ && parameters_->getAddRandomness())
    // {
    //   ROS_INFO("Detected local minima. Attempting to break out!");
    //   int iter = 0;
    //   bool success = false;
    //   while(iter < 20 && !success)
    //   {
    //     performForwardKinematics();
    //     double original_cost = getTrajectoryCost();
    //     group_trajectory_backup_ = group_trajectory_.getTrajectory();
    //     perturbTrajectory();
    //     handleJointLimits();
    //     updateFullTrajectory();
    //     performForwardKinematics();
    //     double new_cost = getTrajectoryCost();
    //     iter ++;
    //     if(new_cost < original_cost)
    //     {
    //       ROS_INFO("Got out of minimum in %d iters!", iter);
    //       averageCostVelocity = 0.0;
    //       currentCostIter = 0;
    //       success = true;
    //     }
    //     else
    //     {
    //       group_trajectory_.getTrajectory() = group_trajectory_backup_;
    //       updateFullTrajectory();
    //       currentCostIter = 0;
    //       averageCostVelocity = 0.0;
    //       success = false;
    //     }

    //   }

    //   if(!success)
    //   {
    //     ROS_INFO("Failed to exit minimum!");
    //   }
    //}
    else if(currentCostIter == -1)
    {
      currentCostIter = 0;
      averageCostVelocity = 0.0;
    }

    // if(parameters_->getAnimateEndeffector())
    // {
    //   animateEndeffector();
    // }

    // if(parameters_->getAnimatePath() && iteration_ % 25 == 0)
    // {
    //   animatePath();
    // }

    if(should_break_out)
    {
      collision_free_iteration_++;
      if(num_collision_free_iterations_ == 0) {
        break;
      } else if(collision_free_iteration_ > num_collision_free_iterations_)
      {
        // CollisionProximitySpace::TrajectorySafety safety = checkCurrentIterValidity();
        // if(safety != CollisionProximitySpace::MeshToMeshSafe &&
        //    safety != CollisionProximitySpace::InCollisionSafe) {
        //   ROS_WARN_STREAM("Apparently regressed");
        // }
        break;
      }
    }
  }

  if(is_collision_free_)
  {
    ROS_INFO("Chomp path is collision free");
  }
  else
  {
    ROS_ERROR("Chomp path is not collision free!");
  }

  // if(parameters_->getAnimatePath())
  // {
  //   animatePath();
  // }

  group_trajectory_.getTrajectory() = best_group_trajectory_;
  updateFullTrajectory();

  // if(parameters_->getAnimatePath())
  //   animatePath();

  ROS_INFO("Terminated after %d iterations, using path from iteration %d", iteration_, last_improvement_iteration_);
  ROS_INFO("Optimization core finished in %f sec", (ros::WallTime::now() - start_time).toSec() );
  ROS_INFO_STREAM("Time per iteration " << (ros::WallTime::now() - start_time).toSec()/(iteration_*1.0));
}
Exemplo n.º 14
0
//main ros code
int main(int argc, char **argv)
{
    //setup topic names
    std::string twist_topic;
    
    //setup node and image transport
    ros::init(argc, argv, "hardware_interface");

    ros::NodeHandle node;


    //setup dynamic reconfigure gui
    dynamic_reconfigure::Server<hardware_interface::hardware_interface_paramsConfig> srv;
    dynamic_reconfigure::Server<hardware_interface::hardware_interface_paramsConfig>::CallbackType f;
    f = boost::bind(&setparamsCallback, _1, _2);
    srv.setCallback(f);
    
    
    //Initialize serial port
    motor_port.serial_open(params.serial_port.c_str(), 19200);
    
    
    //create brodcaster
    tf::TransformBroadcaster odom_broadcaster;
    
    //initalize odometry
    g_odom_x = 0.0;
    g_odom_y = 0.0;
    g_odom_th = 0.0;
    
    g_vx = 0.0;
    g_vy = 0.0;
    g_v_theta = 0.0;
    
    //initalize variables for max accel
    for(int i = 0; i < 3; i++)
    {
        last_wheel_speed_[i] = 0;
    }
    
    vel_last_time_ = ros::Time::now();
    
    //get twist topic name
    twist_topic = node.resolveName("twist");

    //check to see if user has defined an image to subscribe to 
    if (twist_topic == "/twist") 
    {
        ROS_WARN("hardware_interface: twist has not been remapped! Typical command-line usage:\n"
                 "\t$ ./hardware_interface twist:=<image topic> [transport]");
    }
    
    
    
    //create twist subscription
    twist_sub = node.subscribe( twist_topic , 1, motionCallback  );
    
    //advertise publishers
    sensor_pub = node.advertise<hardware_interface::BaseData>("hardware_interface/sensor_data", 5);
    
    odom_pub = node.advertise<nav_msgs::Odometry>("/odom", 1000);
    
    
    //collect motor base sensor data at this rate
    ros::Rate loop_rate(SENSOR_RATE);
    
    while (ros::ok())
    {
        //check callbacks
        ros::spinOnce();
        
        //handel timeout
        if( watchdog_timer_ < ros::Time::now() && motors_enabled_)
        {
            //have not receved a new message so stop robot
            ROS_WARN("Watchdog timed out!");
            //killMotors();
            motors_enabled_ = false;
        }
        
        //collect and publish sensor data
        publishSensorData();
        
        //publish odometry with no movement
        updateOdomMsg(g_vx, g_vy, g_v_theta);
        
        //send the transform
        odom_broadcaster.sendTransform(odom_trans);
        //publish odom message
        odom_pub.publish(odom_msg);
        
        //wait for next spin
        loop_rate.sleep();
    }
    
    return 0;
}
Exemplo n.º 15
0
static void NewBuffer_callback (ArvStream *pStream, ApplicationData *pApplicationdata)
{
	static uint64_t  cm = 0L;	// Camera time prev
	uint64_t  		 cn = 0L;	// Camera time now

#ifdef TUNING			
	static uint64_t  rm = 0L;	// ROS time prev
#endif
	uint64_t  		 rn = 0L;	// ROS time now

	static uint64_t	 tm = 0L;	// Calculated image time prev
	uint64_t		 tn = 0L;	// Calculated image time now
		
	static int64_t   em = 0L;	// Error prev.
	int64_t  		 en = 0L;	// Error now between calculated image time and ROS time.
	int64_t  		 de = 0L;	// derivative.
	int64_t  		 ie = 0L;	// integral.
	int64_t			 u = 0L;	// Output of controller.
	
	int64_t			 kp1 = 0L;		// Fractional gains in integer form.
	int64_t			 kp2 = 1024L;
	int64_t			 kd1 = 0L;
	int64_t			 kd2 = 1024L;
	int64_t			 ki1 = -1L;		// A gentle pull toward zero.
	int64_t			 ki2 = 1024L;

	static uint32_t	 iFrame = 0;	// Frame counter.
    
	ArvBuffer		*pBuffer;

	
#ifdef TUNING			
	std_msgs::Int64  msgInt64;
	int 			 kp = 0;
	int 			 kd = 0;
	int 			 ki = 0;
    
	if (global.phNode->hasParam(ros::this_node::getName()+"/kp"))
	{
		global.phNode->getParam(ros::this_node::getName()+"/kp", kp);
		kp1 = kp;
	}
	
	if (global.phNode->hasParam(ros::this_node::getName()+"/kd"))
	{
		global.phNode->getParam(ros::this_node::getName()+"/kd", kd);
		kd1 = kd;
	}
	
	if (global.phNode->hasParam(ros::this_node::getName()+"/ki"))
	{
		global.phNode->getParam(ros::this_node::getName()+"/ki", ki);
		ki1 = ki;
	}
#endif
	
    pBuffer = arv_stream_try_pop_buffer (pStream);
    if (pBuffer != NULL) 
    {
        if (arv_buffer_get_status(pBuffer) == ARV_BUFFER_STATUS_SUCCESS) 
        {
			sensor_msgs::Image msg;
			pApplicationdata->nBuffers++;
      size_t currSize = arv_buffer_get_image_width(pBuffer) * arv_buffer_get_image_height(pBuffer) * global.nBytesPixel;
			std::vector<uint8_t> this_data(currSize);
			memcpy(&this_data[0], arv_buffer_get_data(pBuffer, &currSize), currSize);


			// Camera/ROS Timestamp coordination.
			cn				= (uint64_t)arv_buffer_get_timestamp(pBuffer);				// Camera now
			rn	 			= ros::Time::now().toNSec();					// ROS now
			
			if (iFrame < 10)
			{
				cm = cn;
				tm  = rn;
			}
			
			// Control the error between the computed image timestamp and the ROS timestamp.
			en = (int64_t)tm + (int64_t)cn - (int64_t)cm - (int64_t)rn; // i.e. tn-rn, but calced from prior values.
			de = en-em;
			ie += en;
			u = kp1*(en/kp2) + ki1*(ie/ki2) + kd1*(de/kd2);  // kp<0, ki<0, kd>0
			
			// Compute the new timestamp.
			tn = (uint64_t)((int64_t)tm + (int64_t)cn-(int64_t)cm + u);

#ifdef TUNING			
			ROS_WARN("en=%16ld, ie=%16ld, de=%16ld, u=%16ld + %16ld + %16ld = %16ld", en, ie, de, kp1*(en/kp2), ki1*(ie/ki2), kd1*(de/kd2), u);
			ROS_WARN("cn=%16lu, rn=%16lu, cn-cm=%8ld, rn-rm=%8ld, tn-tm=%8ld, tn-rn=%ld", cn, rn, cn-cm, rn-rm, (int64_t)tn-(int64_t)tm, tn-rn);
			msgInt64.data = tn-rn; //cn-cm+tn-tm; //
			global.ppubInt64->publish(msgInt64);
			rm = rn;
#endif
			
			// Save prior values.
			cm = cn;
			tm = tn;
			em = en;
			
			// Construct the image message.
			msg.header.stamp.fromNSec(tn);
			msg.header.seq = arv_buffer_get_frame_id(pBuffer);
			msg.header.frame_id = global.config.frame_id;
			msg.width = global.widthRoi;
			msg.height = global.heightRoi;
			msg.encoding = global.pszPixelformat;
			msg.step = msg.width * global.nBytesPixel;
			msg.data = this_data;

			// get current CameraInfo data
			global.camerainfo = global.pCameraInfoManager->getCameraInfo();
			global.camerainfo.header.stamp = msg.header.stamp;
			global.camerainfo.header.seq = msg.header.seq;
			global.camerainfo.header.frame_id = msg.header.frame_id;
			global.camerainfo.width = global.widthRoi;
			global.camerainfo.height = global.heightRoi;

			global.publisher.publish(msg, global.camerainfo);
				
        }
        else
        	ROS_WARN ("Frame error: %s", szBufferStatusFromInt[arv_buffer_get_status(pBuffer)]);
	 
	 			arv_stream_push_buffer (pStream, pBuffer);
        iFrame++;
    }
} // NewBuffer_callback()
Exemplo n.º 16
0
  // callback function for imu data
  void OdomEstimationNode::imuCallback(const ImuConstPtr& imu)
  {
    imu_callback_counter_++;

    assert(imu_used_);

    // receive data 
    imu_stamp_ = imu->header.stamp;
    tf::Quaternion orientation;
    quaternionMsgToTF(imu->orientation, orientation);
    imu_meas_ = tf::Transform(orientation, tf::Vector3(0,0,0));
    for (unsigned int i=0; i<3; i++)
      for (unsigned int j=0; j<3; j++)
        imu_covariance_(i+1, j+1) = imu->orientation_covariance[3*i+j];

    // Transforms imu data to base_footprint frame
    if (!robot_state_.waitForTransform("base_footprint", imu->header.frame_id, imu_stamp_, ros::Duration(0.5))){
      // warn when imu was already activated, not when imu is not active yet
      if (imu_active_)
        ROS_ERROR("Could not transform imu message from %s to base_footprint", imu->header.frame_id.c_str());
      else if (my_filter_.isInitialized())
        ROS_WARN("Could not transform imu message from %s to base_footprint. Imu will not be activated yet.", imu->header.frame_id.c_str());
      else 
        ROS_DEBUG("Could not transform imu message from %s to base_footprint. Imu will not be activated yet.", imu->header.frame_id.c_str());
      return;
    }
    StampedTransform base_imu_offset;
    robot_state_.lookupTransform("base_footprint", imu->header.frame_id, imu_stamp_, base_imu_offset);
    imu_meas_ = imu_meas_ * base_imu_offset;

    imu_time_  = Time::now();

    // manually set covariance untile imu sends covariance
    if (imu_covariance_(1,1) == 0.0){
      SymmetricMatrix measNoiseImu_Cov(3);  measNoiseImu_Cov = 0;
      measNoiseImu_Cov(1,1) = pow(0.00017,2);  // = 0.01 degrees / sec
      measNoiseImu_Cov(2,2) = pow(0.00017,2);  // = 0.01 degrees / sec
      measNoiseImu_Cov(3,3) = pow(0.00017,2);  // = 0.01 degrees / sec
      imu_covariance_ = measNoiseImu_Cov;
    }

    my_filter_.addMeasurement(StampedTransform(imu_meas_.inverse(), imu_stamp_, "base_footprint", "imu"), imu_covariance_);
    
    // activate imu
    if (!imu_active_) {
      if (!imu_initializing_){
	imu_initializing_ = true;
	imu_init_stamp_ = imu_stamp_;
	ROS_INFO("Initializing Imu sensor");      
      }
      if ( filter_stamp_ >= imu_init_stamp_){
	imu_active_ = true;
	imu_initializing_ = false;
	ROS_INFO("Imu sensor activated");      
      }
      else ROS_DEBUG("Waiting to activate IMU, because IMU measurements are still %f sec in the future.", 
		    (imu_init_stamp_ - filter_stamp_).toSec());
    }
    
    if (debug_){
      // write to file
      double tmp, yaw;
      imu_meas_.getBasis().getEulerYPR(yaw, tmp, tmp); 
      imu_file_ << yaw << endl;
    }
  };
Exemplo n.º 17
0
// WriteCameraFeaturesFromRosparam()
// Read ROS parameters from this node's namespace, and see if each parameter has a similarly named & typed feature in the camera.  Then set the
// camera feature to that value.  For example, if the parameter camnode/Gain is set to 123.0, then we'll write 123.0 to the Gain feature
// in the camera.
//
// Note that the datatype of the parameter *must* match the datatype of the camera feature, and this can be determined by
// looking at the camera's XML file.  Camera enum's are string parameters, camera bools are false/true parameters (not 0/1),
// integers are integers, doubles are doubles, etc.
//
void WriteCameraFeaturesFromRosparam(void)
{
	XmlRpc::XmlRpcValue	 			 xmlrpcParams;
	XmlRpc::XmlRpcValue::iterator	 iter;
    ArvGcNode						*pGcNode;
	GError							*error=NULL;


	global.phNode->getParam (ros::this_node::getName(), xmlrpcParams);


	if (xmlrpcParams.getType() == XmlRpc::XmlRpcValue::TypeStruct)
	{
		for (iter=xmlrpcParams.begin(); iter!=xmlrpcParams.end(); iter++)
		{
			std::string		key = iter->first;

			pGcNode = arv_device_get_feature (global.pDevice, key.c_str());
			if (pGcNode && arv_gc_feature_node_is_implemented (ARV_GC_FEATURE_NODE (pGcNode), &error))
			{
//				unsigned long	typeValue = arv_gc_feature_node_get_value_type((ArvGcFeatureNode *)pGcNode);
//				ROS_INFO("%s cameratype=%lu, rosparamtype=%d", key.c_str(), typeValue, static_cast<int>(iter->second.getType()));

				// We'd like to check the value types too, but typeValue is often given as G_TYPE_INVALID, so ignore it.
				switch (iter->second.getType())
				{
					case XmlRpc::XmlRpcValue::TypeBoolean://if ((iter->second.getType()==XmlRpc::XmlRpcValue::TypeBoolean))// && (typeValue==G_TYPE_INT64))
					{
						int			value = (bool)iter->second;
						arv_device_set_integer_feature_value(global.pDevice, key.c_str(), value);
						ROS_INFO("Read parameter (bool) %s: %d", key.c_str(), value);
					}
					break;

					case XmlRpc::XmlRpcValue::TypeInt: //if ((iter->second.getType()==XmlRpc::XmlRpcValue::TypeInt))// && (typeValue==G_TYPE_INT64))
					{
						int			value = (int)iter->second;
						arv_device_set_integer_feature_value(global.pDevice, key.c_str(), value);
						ROS_INFO("Read parameter (int) %s: %d", key.c_str(), value);
					}
					break;

					case XmlRpc::XmlRpcValue::TypeDouble: //if ((iter->second.getType()==XmlRpc::XmlRpcValue::TypeDouble))// && (typeValue==G_TYPE_DOUBLE))
					{
						double		value = (double)iter->second;
						arv_device_set_float_feature_value(global.pDevice, key.c_str(), value);
						ROS_INFO("Read parameter (float) %s: %f", key.c_str(), value);
					}
					break;

					case XmlRpc::XmlRpcValue::TypeString: //if ((iter->second.getType()==XmlRpc::XmlRpcValue::TypeString))// && (typeValue==G_TYPE_STRING))
					{
						std::string	value = (std::string)iter->second;
						arv_device_set_string_feature_value(global.pDevice, key.c_str(), value.c_str());
						ROS_INFO("Read parameter (string) %s: %s", key.c_str(), value.c_str());
					}
					break;

					case XmlRpc::XmlRpcValue::TypeInvalid:
					case XmlRpc::XmlRpcValue::TypeDateTime:
					case XmlRpc::XmlRpcValue::TypeBase64:
					case XmlRpc::XmlRpcValue::TypeArray:
					case XmlRpc::XmlRpcValue::TypeStruct:
					default:
						ROS_WARN("Unhandled rosparam type in WriteCameraFeaturesFromRosparam()");
				}
			}
		}
	}
} // WriteCameraFeaturesFromRosparam()
Exemplo n.º 18
0
  // filter loop
  void OdomEstimationNode::spin(const ros::TimerEvent& e)
  {
    ROS_DEBUG("Spin function at time %f", ros::Time::now().toSec());

    // check for timing problems
    if ( (odom_initializing_ || odom_active_) && (imu_initializing_ || imu_active_) ){
      double diff = fabs( Duration(odom_stamp_ - imu_stamp_).toSec() );
      if (diff > 1.0) ROS_ERROR("Timestamps of odometry and imu are %f seconds apart.", diff);
    }
    
    // initial value for filter stamp; keep this stamp when no sensors are active
    filter_stamp_ = Time::now();
    
    // check which sensors are still active
    if ((odom_active_ || odom_initializing_) && 
        (Time::now() - odom_time_).toSec() > timeout_){
      odom_active_ = false; odom_initializing_ = false;
      ROS_INFO("Odom sensor not active any more");
    }
    if ((imu_active_ || imu_initializing_) && 
        (Time::now() - imu_time_).toSec() > timeout_){
      imu_active_ = false;  imu_initializing_ = false;
      ROS_INFO("Imu sensor not active any more");
    }
    if ((vo_active_ || vo_initializing_) && 
        (Time::now() - vo_time_).toSec() > timeout_){
      vo_active_ = false;  vo_initializing_ = false;
      ROS_INFO("VO sensor not active any more");
    }
    
    // only update filter when one of the sensors is active
    if (odom_active_ || imu_active_ || vo_active_){
      
      // update filter at time where all sensor measurements are available
      if (odom_active_)  filter_stamp_ = min(filter_stamp_, odom_stamp_);
      if (imu_active_)   filter_stamp_ = min(filter_stamp_, imu_stamp_);
      if (vo_active_)    filter_stamp_ = min(filter_stamp_, vo_stamp_);
      
      // update filter
      if ( my_filter_.isInitialized() )  {
        bool diagnostics = true;
        if (my_filter_.update(odom_active_, imu_active_, vo_active_,  filter_stamp_, diagnostics)){
          
          // output most recent estimate and relative covariance
          my_filter_.getEstimate(output_);
          pose_pub_.publish(output_);
          ekf_sent_counter_++;
          
          // broadcast most recent estimate to TransformArray
          StampedTransform tmp;
          my_filter_.getEstimate(ros::Time(), tmp);
          if(!vo_active_)
            tmp.getOrigin().setZ(0.0);
          odom_broadcaster_.sendTransform(StampedTransform(tmp, tmp.stamp_, output_frame_, "base_footprint"));
          
          if (debug_){
            // write to file
            ColumnVector estimate; 
            my_filter_.getEstimate(estimate);
            for (unsigned int i=1; i<=6; i++)
              corr_file_ << estimate(i) << " ";
            corr_file_ << endl;
          }
        }
        if (self_diagnose_ && !diagnostics)
          ROS_WARN("Robot pose ekf diagnostics discovered a potential problem");
      }
      // initialize filer with odometry frame
      if ( odom_active_ && !my_filter_.isInitialized()){
        my_filter_.initialize(odom_meas_, odom_stamp_);
        ROS_INFO("Kalman filter initialized with odom measurement");
      }
      else if ( vo_active_ && !my_filter_.isInitialized()){
        my_filter_.initialize(vo_meas_, vo_stamp_);
        ROS_INFO("Kalman filter initialized with vo measurement");
      }
    }
  };
int main(int argc, char** argv) {
    ros::init(argc, argv, "coordinator"); // name this node 
    ros::NodeHandle nh; //standard ros node handle    
    
    ros::Subscriber alexa_code = nh.subscribe("/Alexa_codes", 1, alexaCB);
    
    actionlib::SimpleActionClient<object_finder_gamma::objectFinderAction> object_finder_ac("objectFinderActionServer", true);
    actionlib::SimpleActionClient<object_grabber_gamma::object_grabberAction> object_grabber_ac("objectGrabberActionServer", true);
    actionlib::SimpleActionClient<navigator_gamma::navigatorAction> navigator_ac("navigatorActionServer", true);

    // attempt to connect to the object_finder server:
    ROS_INFO("waiting for server: ");
    bool server_exists = false;
    while ((!server_exists)&&(ros::ok())) {
        server_exists = object_finder_ac.waitForServer(ros::Duration(0.5)); // 
        ros::spinOnce();
        ros::Duration(0.5).sleep();
        ROS_INFO("retrying...");
    }
    ROS_INFO("connected to object_finder action server"); // if here, then we are connected to the server;

    //connect to the object_grabber server
    server_exists=false;
    while ((!server_exists)&&(ros::ok())) {
        server_exists = object_grabber_ac.waitForServer(ros::Duration(0.5)); // 
        ros::spinOnce();
        ros::Duration(0.5).sleep();
        ROS_INFO("retrying...");
    }
    ROS_INFO("connected to object_grabber action server"); // if here, then we are connected to the server; 
    
    // attempt to connect to the navigator server:
    ROS_INFO("waiting for server: ");
    server_exists = false;
    while ((!server_exists)&&(ros::ok())) {
        server_exists = navigator_ac.waitForServer(ros::Duration(0.5)); // 
        ros::spinOnce();
        ros::Duration(0.5).sleep();
        ROS_INFO("retrying...");
    }
    ROS_INFO("connected to navigator action server"); // if here, then we are connected to the server; 

    //specifications for what we are seeking:
    object_finder_gamma::objectFinderGoal object_finder_goal;   
    object_grabber_gamma::object_grabberGoal object_grabber_goal;
    navigator_gamma::navigatorGoal navigation_goal;
    
    bool finished_before_timeout;
    
    //ALL SET UP; WAITING FOR TRIGGER
    //wait for the Alexa trigger:
    ROS_INFO("waiting for Alexa code: rostopic pub Alexa_codes std_msgs/UInt32 100");
    while(ros::ok()) {
    	if (!g_get_coke_trigger) {
        	ros::Duration(0.5).sleep();
        	ros::spinOnce();
    	} else {
        	g_get_coke_trigger=false; // reset the trigger

		    //  IF HERE, START THE FETCH BEHAVIOR!!
		    
		    // use navigator to move to the table
		    ROS_INFO("sending navigation goal: TABLE");
		    navigation_goal.location_code = navigator_gamma::navigatorGoal::TABLE; //send robot to TABLE
		    navigator_ac.sendGoal(navigation_goal,&navigatorDoneCb); // we could also name additional callback functions here, if desired
		    finished_before_timeout = navigator_ac.waitForResult(ros::Duration(30.0));
		    //bool finished_before_timeout = action_client.waitForResult(); // wait forever...
		    if (!finished_before_timeout) {
		        ROS_WARN("giving up waiting on result ");
		        return 1;
		    }
		    //SHOULD do error checking here before proceeding...
		    if (g_navigator_rtn_code != navigator_gamma::navigatorResult::DESIRED_POSE_ACHIEVED) {
		        ROS_WARN("COULD NOT REACH TABLE; QUITTING");
		        return 1;
		    }
		    


		    // if here, we have reached the table, now use object_finder to find the coke can
		    object_finder_goal.object_id=object_finder_gamma::objectFinderGoal::COKE_CAN_UPRIGHT; //specify object of interest
		    object_finder_goal.known_surface_ht = true; //we'll say we know the table height
		    object_finder_goal.surface_ht = 0.05;  // and specify the height, relative to torso; //TODO: TUNE THIS
		    
		    //try to find the object:
		    object_finder_ac.sendGoal(object_finder_goal,&objectFinderDoneCb); 
		    //decide how long we will wait
		    finished_before_timeout = object_finder_ac.waitForResult(ros::Duration(5.0));
		    //bool finished_before_timeout = action_client.waitForResult(); // wait forever...
		    if (!finished_before_timeout) {
		        ROS_WARN("giving up waiting on result ");
		        return 1; // halt with failure
		    }
		    //SHOULD examine the return code,   
		    if (g_found_object_code != object_finder_gamma::objectFinderResult::OBJECT_FOUND) {
		        ROS_WARN("could not find object; quitting!");
		        return 1;
		    }



		    // if here, then presumably have a valid pose for object of interest
		    // use object_grabber to pick up the coke can
		    object_grabber_goal.object_code = object_grabber_gamma::object_grabberGoal::COKE_CAN; //specify the object to be grabbed 
		    object_grabber_goal.object_frame = g_perceived_object_pose;
		    ROS_INFO("sending goal to grab object: ");
		    object_grabber_ac.sendGoal(object_grabber_goal,&objectGrabberDoneCb); 
		    //decide how long to wait...
		    finished_before_timeout = object_grabber_ac.waitForResult(ros::Duration(40.0));
		    if (!finished_before_timeout) {
		        ROS_WARN("giving up waiting on result; quitting ");
		        return 1;
		    }
		    if (g_object_grabber_return_code!= object_grabber_gamma::object_grabberResult::OBJECT_ACQUIRED) {
		        ROS_WARN("failed to grab object; giving up!");
		        return 1;
		    }



		    // if here, belief is that we are holding the Coke
		    // use navigator to return home
		    ROS_INFO("sending navigation goal: HOME");
		    navigation_goal.location_code = navigator_gamma::navigatorGoal::HOME; //send robot to HOME
		    navigator_ac.sendGoal(navigation_goal,&navigatorDoneCb);
		    finished_before_timeout = navigator_ac.waitForResult(ros::Duration(30.0));
		    //bool finished_before_timeout = action_client.waitForResult(); // wait forever...
		    if (!finished_before_timeout) {
		        ROS_WARN("giving up waiting on result ");
		        return 1;
		    }
		    //SHOULD do error checking here before proceeding...
		    if (g_navigator_rtn_code!= navigator_gamma::navigatorResult::DESIRED_POSE_ACHIEVED) {
		        ROS_WARN("COULD NOT REACH TABLE; QUITTING");
		        return 1;
		    }
		    ROS_INFO("Done fetching! Run me again?");    
        }
    }
    return 0;
}
void MarginalizationInfo::marginalize()
{
    int pos = 0;
    for (auto &it : parameter_block_idx)
    {
        it.second = pos;
        pos += localSize(parameter_block_size[it.first]);
    }

    m = pos;

    for (const auto &it : parameter_block_size)
    {
        if (parameter_block_idx.find(it.first) == parameter_block_idx.end())
        {
            parameter_block_idx[it.first] = pos;
            pos += localSize(it.second);
        }
    }

    n = pos - m;

    //ROS_DEBUG("marginalization, pos: %d, m: %d, n: %d, size: %d", pos, m, n, (int)parameter_block_idx.size());

    TicToc t_summing;
    Eigen::MatrixXd A(pos, pos);
    Eigen::VectorXd b(pos);
    A.setZero();
    b.setZero();
    /*
    for (auto it : factors)
    {
        for (int i = 0; i < static_cast<int>(it->parameter_blocks.size()); i++)
        {
            int idx_i = parameter_block_idx[reinterpret_cast<long>(it->parameter_blocks[i])];
            int size_i = localSize(parameter_block_size[reinterpret_cast<long>(it->parameter_blocks[i])]);
            Eigen::MatrixXd jacobian_i = it->jacobians[i].leftCols(size_i);
            for (int j = i; j < static_cast<int>(it->parameter_blocks.size()); j++)
            {
                int idx_j = parameter_block_idx[reinterpret_cast<long>(it->parameter_blocks[j])];
                int size_j = localSize(parameter_block_size[reinterpret_cast<long>(it->parameter_blocks[j])]);
                Eigen::MatrixXd jacobian_j = it->jacobians[j].leftCols(size_j);
                if (i == j)
                    A.block(idx_i, idx_j, size_i, size_j) += jacobian_i.transpose() * jacobian_j;
                else
                {
                    A.block(idx_i, idx_j, size_i, size_j) += jacobian_i.transpose() * jacobian_j;
                    A.block(idx_j, idx_i, size_j, size_i) = A.block(idx_i, idx_j, size_i, size_j).transpose();
                }
            }
            b.segment(idx_i, size_i) += jacobian_i.transpose() * it->residuals;
        }
    }
    ROS_INFO("summing up costs %f ms", t_summing.toc());
    */
    //multi thread


    TicToc t_thread_summing;
    pthread_t tids[NUM_THREADS];
    ThreadsStruct threadsstruct[NUM_THREADS];
    int i = 0;
    for (auto it : factors)
    {
        threadsstruct[i].sub_factors.push_back(it);
        i++;
        i = i % NUM_THREADS;
    }
    for (int i = 0; i < NUM_THREADS; i++)
    {
        TicToc zero_matrix;
        threadsstruct[i].A = Eigen::MatrixXd::Zero(pos,pos);
        threadsstruct[i].b = Eigen::VectorXd::Zero(pos);
        threadsstruct[i].parameter_block_size = parameter_block_size;
        threadsstruct[i].parameter_block_idx = parameter_block_idx;
        int ret = pthread_create( &tids[i], NULL, ThreadsConstructA ,(void*)&(threadsstruct[i]));
        if (ret != 0)
        {
            ROS_WARN("pthread_create error");
            ROS_BREAK();
        }
    }
    for( int i = NUM_THREADS - 1; i >= 0; i--)  
    {
        pthread_join( tids[i], NULL ); 
        A += threadsstruct[i].A;
        b += threadsstruct[i].b;
    }
    //ROS_DEBUG("thread summing up costs %f ms", t_thread_summing.toc());
    //ROS_INFO("A diff %f , b diff %f ", (A - tmp_A).sum(), (b - tmp_b).sum());


    //TODO
    Eigen::MatrixXd Amm = 0.5 * (A.block(0, 0, m, m) + A.block(0, 0, m, m).transpose());
    Eigen::SelfAdjointEigenSolver<Eigen::MatrixXd> saes(Amm);

    //ROS_ASSERT_MSG(saes.eigenvalues().minCoeff() >= -1e-4, "min eigenvalue %f", saes.eigenvalues().minCoeff());

    Eigen::MatrixXd Amm_inv = saes.eigenvectors() * Eigen::VectorXd((saes.eigenvalues().array() > eps).select(saes.eigenvalues().array().inverse(), 0)).asDiagonal() * saes.eigenvectors().transpose();
    //printf("error1: %f\n", (Amm * Amm_inv - Eigen::MatrixXd::Identity(m, m)).sum());

    Eigen::VectorXd bmm = b.segment(0, m);
    Eigen::MatrixXd Amr = A.block(0, m, m, n);
    Eigen::MatrixXd Arm = A.block(m, 0, n, m);
    Eigen::MatrixXd Arr = A.block(m, m, n, n);
    Eigen::VectorXd brr = b.segment(m, n);
    A = Arr - Arm * Amm_inv * Amr;
    b = brr - Arm * Amm_inv * bmm;

    Eigen::SelfAdjointEigenSolver<Eigen::MatrixXd> saes2(A);
    Eigen::VectorXd S = Eigen::VectorXd((saes2.eigenvalues().array() > eps).select(saes2.eigenvalues().array(), 0));
    Eigen::VectorXd S_inv = Eigen::VectorXd((saes2.eigenvalues().array() > eps).select(saes2.eigenvalues().array().inverse(), 0));

    Eigen::VectorXd S_sqrt = S.cwiseSqrt();
    Eigen::VectorXd S_inv_sqrt = S_inv.cwiseSqrt();

    linearized_jacobians = S_sqrt.asDiagonal() * saes2.eigenvectors().transpose();
    linearized_residuals = S_inv_sqrt.asDiagonal() * saes2.eigenvectors().transpose() * b;
    //std::cout << A << std::endl
    //          << std::endl;
    //std::cout << linearized_jacobians << std::endl;
    //printf("error2: %f %f\n", (linearized_jacobians.transpose() * linearized_jacobians - A).sum(),
    //      (linearized_jacobians.transpose() * linearized_residuals - b).sum());
}
/**
 *
 * @brief Initialize the camera and initial parameter values. Returns 1 if properly initialized.
 *
 * @param [in] int
 * @param [in] argv
 * @param [in] ros::NodeHandle
 *
 */
int initialize(int argc, char *argv[],ros::NodeHandle nh){
	/*
	 * Inital Setup for parameters
	 */
	integrationTime = 1500;
	modulationFrequency = 30000000;
	frameRate = 40;
	bilateralFilter = false;
	
	flip_x = flip_y = 1;
	
	AmplitudeFilterOn = false;
	AmplitudeThreshold = 0;

	for( int i = 1; i < argc; i++) {
		// reading width
		if( std::string(argv[i]) == "-it" ) {
			if( sscanf(argv[++i], "%d", &integrationTime) != 1 
				|| integrationTime < 100 || integrationTime > 2700 ) {
				ROS_WARN("*invalid integration time");
				return help();
			}
		}
		// reading heigth
		else if( std::string(argv[i]) == "-mf" ) {
			if( sscanf(argv[++i], "%d", &modulationFrequency) != 1 
				|| modulationFrequency < 5000000 || modulationFrequency > 30000000 ) {
				ROS_WARN("*invalid modulation frequency");
				return help();
			}
		}
		if( std::string(argv[i]) == "-fr" ) {
			if( sscanf(argv[++i], "%d", &frameRate) != 1 
				|| frameRate < 1 || frameRate > 40 ) {
				ROS_WARN("*invalid frame rate");
				return help();
			}
		} else if( std::string(argv[i]) == "-bf" ) {
			bilateralFilter = true;
		} else if( std::string(argv[i]) == "-flip_x" ) {
			flip_x = -1;
		} else if( std::string(argv[i]) == "-flip_y" ) {
			flip_y = -1;
		}
		// additional parameters
		else if( std::string(argv[i]) == "-af" ) {
			AmplitudeFilterOn = true;
		}
		else if( std::string(argv[i]) == "-at" ) {
			if( sscanf(argv[++i], "%f", &AmplitudeThreshold) != 1 
				|| AmplitudeThreshold < 0 || AmplitudeThreshold > 2500 ) {
				ROS_WARN("*invalid amplitude threshold");
				return help();
			}	
		}
		// print help
		else if( std::string(argv[i]) == "--help" ) {
			ROS_WARN_STREAM("arguments: " << argc << " which: " << argv[i]);		
			return help();
		}
		else if( argv[i][0] == '-' ) {
			ROS_WARN_STREAM("invalid option " << argv[i]);
			return help();
		}
	} 	

	/*
	 * Camera Initialization
	 */
	std::stringstream sourcePluginLocation, procPluginLocation;
	sourcePluginLocation.clear();
	sourcePluginLocation.clear();
	sourcePluginLocation << PMD_PLUGIN_DIR << "digicam";
	procPluginLocation << PMD_PLUGIN_DIR << "digicamproc";

	// If the camera is not connected at all, we will get an segmentation fault.
	res = pmdOpen (&hnd, sourcePluginLocation.str().c_str(), SOURCE_PARAM, procPluginLocation.str().c_str(), PROC_PARAM);
	if (res != PMD_OK)
	{
		pmdGetLastError (0, err, 128);
		ROS_ERROR_STREAM("Could not connect: " << err);
		return 0;
	}

	char result[128];
	result[0] = 0;
	res = pmdSourceCommand(hnd, result, sizeof(result), "IsCalibrationDataLoaded");
	if (res != PMD_OK)
	{
		pmdGetLastError (0, err, 128);
		ROS_ERROR_STREAM("Could not execute source command: " << err);
		pmdClose (hnd);
		return 0;
	}
	if (std::string(result) == "YES")
		ROS_INFO("Calibration file loaded.");
	else
		ROS_INFO("No calibration file found");

	res = pmdUpdate (hnd);
	if (res != PMD_OK)
	{
		pmdGetLastError (hnd, err, 128);
		ROS_ERROR_STREAM("Could not transfer data: " << err);
		pmdClose (hnd);
		return 0;
	}

	PMDDataDescription dd;

	res = pmdGetSourceDataDescription (hnd, &dd);
	if (res != PMD_OK)
	{
		pmdGetLastError (hnd, err, 128);
		ROS_ERROR_STREAM("Could not get data description: " << err);
		pmdClose (hnd);
		return 0;
	}

	if (dd.subHeaderType != PMD_IMAGE_DATA)
	{
		ROS_ERROR_STREAM("Source data is not an image!\n");
		pmdClose (hnd);
		return 0;
	}
	noOfRows = dd.img.numRows;
	noOfColumns = dd.img.numColumns;

	/*
	 * ROS Node Initialization
	 */
	pub_non_filtered = nh.advertise<PointCloud> ("depth_non_filtered", 1);
	pub_filtered = nh.advertise<PointCloud> ("depth_filtered", 1);
	dataPublished=true;
	return 1;
}
Exemplo n.º 22
0
void AlgorithmManager::updateControl()
{
	static bool debug1 = true;
	static bool debug2 = true;
	//bool ok_push;
	std_msgs::Int8 temp_algo_state;
	temp_algo_state.data = algorithm_mode_;
	algorithm_state_pub_.publish(temp_algo_state);
	edo_core_msgs::MovementCommand msg;
	
	
	//scodo i messaggi ricevuti per le move 
	while (!msglist_.empty())
	  {
	   
	    ROS_WARN("pop messaggio");
		msg=msglist_.back();
		msglist_.pop_back();			
		
		/*ok_push = false;
		
		if(old_msg.movement_type == msg.movement_type && old_msg.size == msg.size)
		{
			for(int i =0; i < msg.size; i++)
			{
				if(fabs(old_msg.data[i] - msg.data[i]) > 0.001)
				{
					ok_push = true;
					break;
				}
			}
		}
		else
		{
			ok_push = true;
		}*/

		switch (msg.movement_type) 
		{
			case MOVE_MESSAGE_TYPE::MOVE_TRJNT_J: case MOVE_MESSAGE_TYPE::MOVE_CARLIN_J:
			case MOVE_MESSAGE_TYPE::MOVE_TRJNT_C: case MOVE_MESSAGE_TYPE::MOVE_CARLIN_C:
			case MOVE_MESSAGE_TYPE::MOVE_CARCIR_C: case MOVE_MESSAGE_TYPE::MOVE_CARCIR_J:
				old_msg = msg;
			break;
		}
		/*
		if (ok_push==true)
		{*/
		   
			switch (msg.movement_type) 
			{
				case MOVE_MESSAGE_TYPE::MOVE_TRJNT_J: case MOVE_MESSAGE_TYPE::MOVE_CARLIN_J:
					if(!moveTrjntFn(&msg))
					{
						ROS_ERROR("Move Jnt not executed...");
						feedbackFn(MESSAGE_FEEDBACK::ERROR, State::FAILED_MOVE);
					}
				break;

				case MOVE_MESSAGE_TYPE::MOVE_TRJNT_C: case MOVE_MESSAGE_TYPE::MOVE_CARLIN_C:
					if(!moveCarlinFn(&msg))
					{
						ROS_ERROR("Move Lin not executed...");
						feedbackFn(MESSAGE_FEEDBACK::ERROR, State::FAILED_MOVE);
					}
				break;

				case MOVE_MESSAGE_TYPE::MOVE_CARCIR_C: case MOVE_MESSAGE_TYPE::MOVE_CARCIR_J:
					if(!moveCarcirFn(&msg))
					{
						ROS_ERROR("Move Cir not executed...");
						feedbackFn(MESSAGE_FEEDBACK::ERROR, State::FAILED_MOVE);
					}
				break;

				case MOVE_MESSAGE_TYPE::MOVE_PAUSE:
					if(!movePauseFn(&msg))
					{
						ROS_ERROR("Pause not executed...");
						feedbackFn(MESSAGE_FEEDBACK::ERROR, State::GENERIC_ERROR);
					}
				break;

				case MOVE_MESSAGE_TYPE::MOVE_RESUME:
					if(!moveResumeFn(&msg))
					{
						ROS_ERROR("Resume not executed...");
						feedbackFn(MESSAGE_FEEDBACK::ERROR, State::GENERIC_ERROR);
					}
				break;

				case MOVE_MESSAGE_TYPE::MOVE_CANCEL:
					if(!moveCancelFn(&msg))
					{
						ROS_ERROR("Cancel not executed...");
						feedbackFn(MESSAGE_FEEDBACK::ERROR, State::GENERIC_ERROR);
					}
				break;
				
				//IL: non dovrebbero esserci a questo livello messaggi non consoni... però...
				default:
					ROS_WARN("Command not implemented, please use only accepted command type");
					feedbackFn(MESSAGE_FEEDBACK::ERROR, State::MOVETYPE_UNDEF);
					return;
				break;
			}
		/*}
		else
		{
			feedbackFn(MESSAGE_FEEDBACK::COMMAND_EXECUTED, 20); // ricevuto comando doppio lo scarto e mando un feedback per sbloccare state machine
		}
		*/
 
		
	}
	//scodo i messaggi arrivati per il jog
	while (!msglistJog_.empty())
	{
		msg=msglistJog_.back();
		msglistJog_.pop_back();
  
		switch (msg.movement_type) 
		{	
			case JOG_MESSAGE_TYPE::JOG_TRJNT:
				if(!jogTrjntFn(&msg))
				{
					ROS_ERROR("Move JNTtrj not executed...");
					feedbackFn(MESSAGE_FEEDBACK::ERROR, State::GENERIC_ERROR);
				}
			break;

			case JOG_MESSAGE_TYPE::JOG_CARLIN:
				if(!jogCarlinFn(&msg))
				{
					ROS_ERROR("Move CARTtrj not executed...");
					feedbackFn(MESSAGE_FEEDBACK::ERROR, State::GENERIC_ERROR);
				}
			break;

			case JOG_MESSAGE_TYPE::JOG_STOP:
				if(!jogStopFn(&msg))
				{
					ROS_ERROR("Move JOG stop not executed...");
					feedbackFn(MESSAGE_FEEDBACK::ERROR, State::GENERIC_ERROR);
				}
			break;
			
			//IL: non dovrebbero esserci a questo livello messaggi non consoni... però...
			default:
				ROS_WARN("Command not implemented, please use only accepted command type");
				feedbackFn(MESSAGE_FEEDBACK::ERROR, State::MOVETYPE_UNDEF);
				return;
			break;
		}
	}
	
	if(algorithm_mode_ == MOVING)
	{
		if(debug1)
		{
			ROS_INFO("ALGO MODULE STARTED SENDING ORL CONTROL");
			debug1 = false;
			debug2 = true;
		}
		setControl();
	}
	else if(algorithm_mode_ == INITIALIZED)
	{
		if(debug2)
		{
			ROS_INFO("ALGO MODULE STARTED SENDING KEEP POSITION MESSAGES");
			debug2 = false;
			debug1 = true;
		}
		keepPosition();
	}
	else if (algorithm_mode_ == WAITING) 
	{
		keepPosition();
		ros::Duration d = ros::Time::now() - start_wait_time_;
		if(d.toSec() >= (double)delay_){
			waiting_ = false;
			algorithm_mode_ = FINISHED;
			delay_ = 255;
		}
	}
	else if (algorithm_mode_ == BLOCKED) 
	{
		keepPosition();
		waiting_ = false;
		algorithm_mode_ = INITIALIZED;
	}
	else if (algorithm_mode_ == PAUSE) 
	{
		keepPosition();
	}

	else if (algorithm_mode_ == FINISHED) 
	{
		if (jog_state_)
		{
			noCartPose = true;
			algorithm_mode_ = RECOVERY;
            ORL_cancel_motion(LOCAL_VERBOSITY, ORL_CNTRL01, ORL_ARM1);
			ORL_terminate_controller(ORL_VERBOSE, ORL_CNTRL01);
			ROS_WARN("TERMINATE ORL...");
			ROS_WARN("Trying to re-initialize ORL...");
			if (!initializeORL())
			{
				ROS_ERROR("Impossible to Inizialize ORL");
			}
			noCartPose = false;
			hold_position_.unit_type = ORL_POSITION_LINK_DEGREE; //TODO aggio il problema ma chi cancella il tipo, in caso di jog cartesiano?
			ORL_set_position(NULL, &hold_position_, LOCAL_VERBOSITY, ORL_CNTRL01, ORL_ARM1);
			jog_state_ = false;
		}

		keepPosition();
		algorithm_mode_ = INITIALIZED;
	}
}
Exemplo n.º 23
0
//#######################
//#### main programm ####
int main(int argc, char** argv)
{
    // initialize ROS, spezify name of node
    ros::init(argc, argv, "sick_s300");
    
    NodeClass nodeClass;
	ScannerSickS300 SickS300;

	//char *pcPort = new char();
//	const char pcPort[] = "/dev/ttyUSB1"; //TODO replace with parameter port
//	const char pcPort[] = nodeClass.port;
//	int iBaudRate = 500000;
	int iBaudRate = nodeClass.baud;
	bool bOpenScan = false, bRecScan = false;
	bool firstTry = true;
	std::vector<double> vdDistM, vdAngRAD, vdIntensAU;
 
 	while (!bOpenScan)
 	{
 		ROS_INFO("Opening scanner...");
		bOpenScan = SickS300.open(nodeClass.port.c_str(), iBaudRate);
		
		// check, if it is the first try to open scanner
	 	if(firstTry)
		{
			ROS_ERROR("...scanner not available on port %s. Will retry every second.",nodeClass.port.c_str());
			firstTry = false;
			sleep(1);
		}
		else
		{
			sleep(1);
		}
	}
	ROS_INFO("...scanner opened successfully on port %s",nodeClass.port.c_str());

	// main loop
	ros::Rate loop_rate(10); // Hz
    while(nodeClass.nodeHandle.ok())
    {
		// read scan
		ROS_DEBUG("Reading scanner...");
		bRecScan = SickS300.getScan(vdDistM, vdAngRAD, vdIntensAU);
		ROS_DEBUG("...scanner read successfully");
    	// publish LaserScan
        if(bRecScan)
        {
		    ROS_DEBUG("...publishing LaserScan message");
            nodeClass.publishLaserScan(vdDistM, vdAngRAD, vdIntensAU);
        }
        else
        {
		    ROS_WARN("...no Scan available");
        }

        // sleep and waiting for messages, callbacks    
        ros::spinOnce();
        loop_rate.sleep();
    }
    return 0;
}
Exemplo n.º 24
0
/**
 * This function handles the movement message received from state machine node
 * - a movement command can not be executed until a valid jnt_state has been
 *   received from the recovery node
 */
void AlgorithmManager::moveCallback(edo_core_msgs::MovementCommandConstPtr msg)
{

	if(algorithm_mode_!=UNINITIALIZED)
	{
		feedbackFn(MESSAGE_FEEDBACK::COMMAND_RECEIVED, 0);
		move_cb_state_ = true;
		ROS_INFO("MOVE_MESSAGE_TYPE = %d command received...   sono nello stato %d", msg->movement_type, algorithm_mode_);
		//gestisco il delay, settato al controllo precedente
		//se 255 passo oltre (no_delay), altrimenti aspetto
		if (pause_state_ && msg->movement_type != MOVE_MESSAGE_TYPE::MOVE_RESUME && msg->movement_type != MOVE_MESSAGE_TYPE::MOVE_CANCEL && msg->movement_type != MOVE_MESSAGE_TYPE::MOVE_PAUSE)
		{
			ROS_ERROR("Edo is in pause state! Please resume or cancel previous movement...");
			return;
		}

		ros::Rate loop_rate(controller_frequency_);
		while(algorithm_mode_ == WAITING || waiting_ )
		{
			if (!ros::ok())
			{
				return;
			}
			loop_rate.sleep();
		}

		//qui leggo il delay
		if(msg->movement_type != MOVE_MESSAGE_TYPE::MOVE_PAUSE && msg->movement_type != MOVE_MESSAGE_TYPE::MOVE_RESUME && msg->movement_type != MOVE_MESSAGE_TYPE::MOVE_CANCEL)
		{
			if(msg->movement_attributes.size() > 0)
			{
				delay_ = msg->movement_attributes[0];
				if (msg->movement_attributes[0] != 255 && msg->movement_attributes[0] != 0) 
				{
					waiting_ = true;
				}
			}
			else
			{
				//ROS_WARN("Delay parameter is not specified. no_delay is set as default");
				delay_ = 255;
			}
		}
//PUSH IN CODA DEI MESSAGGI ARRIVATI, in testa vengono inseriti i messaggio più prioritari (PAUSE,RESUME,CANCEL) in coda i comandi di movimento
//la coda viene svuotata dal loop che genera i target
			
		switch (msg->movement_type) 
		{
			case MOVE_MESSAGE_TYPE::MOVE_TRJNT_J:  case MOVE_MESSAGE_TYPE::MOVE_CARLIN_J:
			case MOVE_MESSAGE_TYPE::MOVE_TRJNT_C:  case MOVE_MESSAGE_TYPE::MOVE_CARLIN_C:
			case MOVE_MESSAGE_TYPE::MOVE_CARCIR_C: case MOVE_MESSAGE_TYPE::MOVE_CARCIR_J:
				msglist_.push_front(*msg);
			break;

			case MOVE_MESSAGE_TYPE::MOVE_PAUSE:
			case MOVE_MESSAGE_TYPE::MOVE_RESUME:
			case MOVE_MESSAGE_TYPE::MOVE_CANCEL:
				msglist_.push_back(*msg);
			break;

			default:
				ROS_WARN("Command not implemented, please use only accepted command type");
				feedbackFn(MESSAGE_FEEDBACK::ERROR, State::MOVETYPE_UNDEF);
				return;
			break;
		}
	
		move_cb_state_ = false;

	}
	else{
		ROS_INFO("Request for movement arrive while module was uninitialized");
		feedbackFn(MESSAGE_FEEDBACK::ERROR, State::ORL_UNINITIALIZED);
	}

}
Exemplo n.º 25
0
int main(int argc, char **argv)
{
    ros::init(argc, argv, "rtimulib_node");
    ROS_INFO("Imu driver is running");
    ros::NodeHandle n;
    ros::Publisher imu_pub = n.advertise<sensor_msgs::Imu>("imu", 1);

    std::string path_calib_file;
    n.getParam("/rtimulib_node/calibration_file_path", path_calib_file);
    std::string frame_id;
    n.getParam("/rtimulib_node/frame_id", frame_id);
    double update_rate;
    if(!n.getParam("/rtimulib_node/update_rate", update_rate))
    {
        ROS_WARN("No update_rate provided - default: 20 Hz");
        update_rate = 20;
    }

    // Load the RTIMULib.ini config file
    RTIMUSettings *settings = new RTIMUSettings(path_calib_file.c_str(), "RTIMULib"); 

    RTIMU *imu = RTIMU::createIMU(settings);

    if ((imu == NULL) || (imu->IMUType() == RTIMU_TYPE_NULL))
    {
        ROS_ERROR("No Imu found");
        return -1;
    }

    // Initialise the imu object
    imu->IMUInit();

    // Set the Fusion coefficient
    imu->setSlerpPower(0.02);
    // Enable the sensors
    imu->setGyroEnable(true);
    imu->setAccelEnable(true);
    imu->setCompassEnable(true);

    ros::Rate loop_rate(update_rate);
    while (ros::ok())
    {
        sensor_msgs::Imu imu_msg;

        if (imu->IMURead())
        {
            RTIMU_DATA imu_data = imu->getIMUData();
            imu_msg.header.stamp = ros::Time::now();
            imu_msg.header.frame_id = frame_id;
            imu_msg.orientation.x = imu_data.fusionQPose.x(); 
            imu_msg.orientation.y = imu_data.fusionQPose.y(); 
            imu_msg.orientation.z = imu_data.fusionQPose.z(); 
            imu_msg.orientation.w = imu_data.fusionQPose.scalar(); 
            imu_msg.angular_velocity.x = imu_data.gyro.x();
            imu_msg.angular_velocity.y = imu_data.gyro.y();
            imu_msg.angular_velocity.z = imu_data.gyro.z();
            imu_msg.linear_acceleration.x = imu_data.accel.x();
            imu_msg.linear_acceleration.y = imu_data.accel.y();
            imu_msg.linear_acceleration.z = imu_data.accel.z();

            imu_pub.publish(imu_msg);
        }
        ros::spinOnce();
    }
    return 0;
}
Exemplo n.º 26
0
bool AlgorithmManager::moveCarlinFn(edo_core_msgs::MovementCommand * msg)
{

	memset(&target_cart_, 0, sizeof(ORL_cartesian_position));
	target_cart_.unit_type = ORL_CART_POSITION;
	ROS_INFO("Movement type is %d", msg->movement_type);
	if (msg->size != 6) {
		ROS_ERROR("Invalid Cartesian Command, the command requires 6 data value (x, y, z, a, e, r)");
		return false;
	}
	target_cart_.x = msg->data[0];
	target_cart_.y = msg->data[1];
	target_cart_.z = msg->data[2];
	target_cart_.a = msg->data[3];
	target_cart_.e = msg->data[4];
	target_cart_.r = msg->data[5];

	if (msg->movement_attributes.size() > 0) {
		for (size_t i = 0; i < (msg->movement_attributes.size()-1); i++) {
			target_cart_.config_flags[i] = msg->movement_attributes[i+1];
		}
		target_cart_.config_flags[msg->movement_attributes.size()-1] = '\0';
	}

	ROS_INFO("Received Cartesian cmd. x: %f, y: %f, z: %f, a: %f, e: %f, r: %f", target_cart_.x, target_cart_.y, target_cart_.z, target_cart_.a, target_cart_.e, target_cart_.r);
	ROS_INFO("tags: %s", target_cart_.config_flags);

	std::vector<int> move_parameters;
	move_parameters.resize(3);

	int status = 0;
	if (delay_ == 255) {
		move_parameters[0] = ORL_FLY;
		move_parameters[1] = ORL_ADVANCE;
	}else{
		move_parameters[0] = ORL_NO_FLY;
		move_parameters[1] = ORL_WAIT;
	}

	if(first_time_)
	{
		ROS_WARN("set position");
		status = ORL_set_position(NULL, &current_state_, LOCAL_VERBOSITY, ORL_CNTRL01, ORL_ARM1);
		first_time_ = false;
	}
	move_parameters[2] = ORL_FLY_NORMAL;

	if(!manageORLStatus(status))
	{
		return false;
	}
	else{
		status = setORLMovement(move_parameters, msg->movement_type, msg->ovr);
		if(!manageORLStatus(status))
		{
			return false;
		}

		/*
		hold_position_.unit_type = ORL_POSITION_LINK_DEGREE;
		status = ORL_get_next_interpolation_step(&hold_position_, LOCAL_VERBOSITY, ORL_CNTRL01, ORL_ARM1);
		if(!manageORLStatus(status))
		{
			return false;
		}*/
	}
	algorithm_mode_ = MOVING;
	return true;

}
Exemplo n.º 27
0
void SpeedForCurvatureParameters::loadFromConfig(const mcm::KeyValueArray &config)
{
  std::unordered_map<std::string, std::string> config_map;
  for (size_t i = 0; i < config.items.size(); ++i) {
    config_map[config.items[i].key] = config.items[i].value;
  }

  if (config_map.count("curvature_filter_size")) {
    curvature_filter_size_ = boost::lexical_cast<double>(config_map.at("curvature_filter_size"));
    ROS_INFO("Setting curvature_filter_size to %lf", curvature_filter_size_);
    config_map.erase("curvature_filter_size");
  }

  if (config_map.count("lateral_acceleration_mode")) {
    use_speed_from_accel_constant_ =
      boost::lexical_cast<int>(config_map.at("lateral_acceleration_mode"));
    ROS_INFO("Setting lateral acceleration mode to %s",
             use_speed_from_accel_constant_ ? "true" : "false");
    config_map.erase("lateral_acceleration_mode");
  }

  if (config_map.count("max_lateral_acceleration")) {
    max_lateral_accel_mss_ = boost::lexical_cast<double>(config_map.at("max_lateral_acceleration"));
    ROS_INFO("Setting max_lateral_acceleration to %lf", max_lateral_accel_mss_);
    config_map.erase("max_lateral_acceleration");
  }

  if (config_map.count("curvature_vs_speed/interpolation_type")) {
    std::string interp_type = config_map.at("curvature_vs_speed/interpolation_type");
    if (interp_type == "zero_order_hold") {
      speed_curve_.setInterpolationType(smu::Interpolation1D::ZERO_ORDER_HOLD);
      ROS_INFO("Setting interpolation type to %s", interp_type.c_str());
    } else if (interp_type == "linear") {
      speed_curve_.setInterpolationType(smu::Interpolation1D::LINEAR);
      ROS_INFO("Setting interpolation type to %s", interp_type.c_str());
    } else {
      ROS_ERROR("Ignoring invalid interpolation type '%s'.", interp_type.c_str());
    }
    config_map.erase("curvature_vs_speed/interpolation_type");
  }

  // We read in the curve points by determining if point N exists in the
  // config set, starting with N = 0 and going until the first pair of points
  // fails.  We don't set the points in the curve because we want to make sure
  // that a new curve is actually specified before deleting the old curve.
  std::vector<double> x;
  std::vector<double> y;
  for (size_t i = 0; true; i++) {
    char base_key[1024];
    snprintf(base_key, sizeof(base_key), "curvature_vs_speed/values/%zu", i);
    const std::string x_key = std::string(base_key) + "/0";
    const std::string y_key = std::string(base_key) + "/1";

    if (config_map.count(x_key) && config_map.count(y_key)) {
      x.push_back(boost::lexical_cast<double>(config_map.at(x_key)));
      config_map.erase(x_key);
      y.push_back(boost::lexical_cast<double>(config_map.at(y_key)));
      config_map.erase(y_key);
    } else {
      break;
    }
  }

  // If a curve was provided, setup speed_curve using the new parameters.
  // Note that from the way x,y are constructed, they are guaranteed to have
  // the same length.
  if (x.size()) {
    ROS_INFO("Setting curvature_vs_speed curve: ");
    speed_curve_.clear();
    for (size_t i = 0; i < x.size(); i++) {
      speed_curve_.appendPoint(x[i], y[i]);
      ROS_INFO("  %zu -- %lf, %lf", i, x[i], y[i]);
    }
  }

  // Print warnings to help find ignored paramters
  for (auto const &it : config_map) {
    ROS_WARN("Ignoring unknown configuration value '%s'", it.first.c_str());
  }
}
Exemplo n.º 28
0
std::string ScanPlantState::goToScanWaypoint(XmlRpc::XmlRpcValue waypoint)
{
    // Create the action client for arm movement. True causes the client to spin its own thread.
    actionlib::SimpleActionClient<robot_control::MoveArmAction> action_client_right("robot_control_right_arm", true);

    ROS_INFO("Waiting for action server for moving rigth Baxter arm to start.");
    action_client_right.waitForServer(); //will wait for infinite time
    ROS_INFO("Action server for moving rigth Baxter arm is started.");

    robot_control::MoveArmGoal goal_message;
    goal_message.goal_pose.header.stamp    = ros::Time::now();
    goal_message.goal_pose.header.frame_id = "base";
    goal_message.goal_pose.pose.position.x = waypoint[0];
    goal_message.goal_pose.pose.position.y = waypoint[1];
    goal_message.goal_pose.pose.position.z = waypoint[2];
    goal_message.goal_pose.pose.orientation.x = waypoint[3];
    goal_message.goal_pose.pose.orientation.y = waypoint[4];
    goal_message.goal_pose.pose.orientation.z = waypoint[5];
    goal_message.goal_pose.pose.orientation.w = waypoint[6];

    ROS_INFO("Sending goal for scan waypoint.");
    action_client_right.sendGoal(goal_message);

    // Loop while waiting for the goal to execute.
    // During this loop, the scanning of the crop is performed.
    actionlib::SimpleClientGoalState state = actionlib::SimpleClientGoalState::ACTIVE;
    while(state == actionlib::SimpleClientGoalState::PENDING || state == actionlib::SimpleClientGoalState::ACTIVE)
    {
        state = action_client_right.getState();
        detectFruit();
    }

std:
    string result;
    if(state == actionlib::SimpleClientGoalState::SUCCEEDED)
    {
        if (action_client_right.getResult()->goal_reached)
        {
            ROS_INFO("Arm reached position.");
            result = "goal reached";
        }
        else
        {
            ROS_WARN("Could not move arm to position.");
            result = "goal not reached";
        }
    }
    else
    {
        if(state == actionlib::SimpleClientGoalState::PREEMPTED)
        {
            ROS_WARN("Could not move arm to position. Action preempted.");
            result = "goal not reached";
        }
        else
        {
            ROS_INFO("Goal not reached. Unhandled action state: %s",state.toString().c_str());
            result = "error";
        }
    }
    return result;
}
Exemplo n.º 29
0
void
vtree_color_user::compute_features( const pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_matrix_map,
                                    vt::Document &full_doc )
{
    typedef pcl::PointXYZRGB nx_PointT;
    typedef pcl::Normal nx_Normal;
    typedef pcl::PointCloud<nx_PointT> nx_PointCloud;
    typedef pcl::PointCloud<nx_Normal> nx_PointCloud_normal;
    typedef pcl::PointCloud<int> nx_PointCloud_int;

    typedef pcl::UniformSampling<nx_PointT> nx_Sampler;
    typedef pcl::search::KdTree<nx_PointT> nx_SearchMethod;
    typedef pcl::NormalEstimation<nx_PointT, nx_Normal> nx_NormalEst;

#if FEATURE == 1
    typedef pcl::PFHRGBSignature250 nx_FeatureType;
    typedef pcl::PFHRGBEstimation<nx_PointT, nx_Normal, nx_FeatureType> nx_FeatureEst;
#elif FEATURE == 2
    typedef pcl::PPFRGBSignature nx_FeatureType;
    typedef pcl::PPFRGBEstimation<nx_PointT, nx_Normal, nx_FeatureType> nx_FeatureEst;
#else
#error A valid feature definition is required!
#endif
    typedef pcl::PointCloud<nx_FeatureType> nx_PointCloud_feature;

    // copy the matrix into a pcl cloud
    nx_PointCloud::Ptr cld_ptr(cloud_matrix_map);
//    ROS_INFO("Computing features for current eigen matrix");
//    for( int pt = 0; pt < cloud_matrix_map.cols(); ++pt)
//    {
//        nx_PointT current_point(cloud_matrix_map(0,pt), //Problem might be here
//                                cloud_matrix_map(1,pt),
//                                cloud_matrix_map(2,pt));
//        cld_ptr->push_back( nx_PointT( cloud_matrix_map(0,pt), //Problem might be here
//                                       cloud_matrix_map(1,pt),
//                                       cloud_matrix_map(2,pt) ) );
//    }

    ROS_INFO("[vtree_color_user] Starting keypoint extraction...");
    clock_t tic = clock();
    nx_PointCloud::Ptr keypoints( new nx_PointCloud);
    nx_PointCloud_int::Ptr keypoint_idx(new nx_PointCloud_int);
    nx_Sampler uniform_sampling;
    uniform_sampling.setInputCloud ( cld_ptr );
    uniform_sampling.setRadiusSearch ( keypoint_radius_ );
    uniform_sampling.compute( *keypoint_idx );

    pcl::copyPointCloud ( *cld_ptr, keypoint_idx->points, *keypoints);

    ROS_INFO("[vtree_color_user] No of Keypoints found %d", static_cast<int>(keypoint_idx->size()) );
    ROS_INFO("[vtree_color_user] Keypoint extraction took %f msec.", static_cast<double>((clock()-tic)*1000)/CLOCKS_PER_SEC );

    if( keypoints->empty() )
    {
        ROS_WARN("[vtree_color_user] No keypoints were found...");
        return;
    }

    // Compute normals for the input cloud
    ROS_INFO("[vtree_color_user] Starting normal extraction...");
    tic = clock();
    nx_PointCloud_normal::Ptr normals (new nx_PointCloud_normal);
    nx_SearchMethod::Ptr search_method_xyz (new nx_SearchMethod);
    nx_NormalEst norm_est;
    norm_est.setInputCloud ( cld_ptr );
    norm_est.setSearchMethod ( search_method_xyz );
    norm_est.setRadiusSearch ( normal_radius_ );
    norm_est.compute ( *normals );
    ROS_INFO("[vtree_color_user] Normal extraction took %f msec.", static_cast<double>((clock()-tic)*1000)/CLOCKS_PER_SEC );

    // Get features at the computed keypoints
    ROS_INFO("[vtree_color_user] Starting feature computation...");
    tic = clock();
    nx_PointCloud_feature::Ptr features(new nx_PointCloud_feature);
    nx_FeatureEst feat_est;
    feat_est.setInputCloud ( keypoints );
    feat_est.setSearchSurface ( cld_ptr );
    feat_est.setInputNormals ( normals );

    search_method_xyz.reset(new nx_SearchMethod);
    feat_est.setSearchMethod ( search_method_xyz );
    feat_est.setRadiusSearch ( feature_radius_ );
    feat_est.compute ( *features );
    ROS_INFO("[vtree_color_user] No of Features found %d", static_cast<int>(features->size()) );
    ROS_INFO("[vtree_color_user] Feature computation took %f msec.", static_cast<double>((clock()-tic)*1000)/CLOCKS_PER_SEC );


    // Rectify the historgram values to ensure they are in [0,100] and create a document
    for( nx_PointCloud_feature::iterator iter = features->begin();
         iter != features->end(); ++iter)
    {
        rectify_histogram( iter->histogram );
        full_doc.push_back( tree.quantize( FeatureHist( iter->histogram ) ) );
    }
}
void OccupancyMapMonitor::initialize()
{
  /* load params from param server */
  if (map_resolution_ <= std::numeric_limits<double>::epsilon())
    if (!nh_.getParam("octomap_resolution", map_resolution_))
    {
      map_resolution_ = 0.1;
      ROS_WARN("Resolution not specified for Octomap. Assuming resolution = %g instead", map_resolution_);
    }
  ROS_DEBUG("Using resolution = %lf m for building octomap", map_resolution_);
  
  if (map_frame_.empty())
    if (!nh_.getParam("octomap_frame", map_frame_))
      if (tf_)
        ROS_WARN("No target frame specified for Octomap. No transforms will be applied to received data.");
  
  if (!tf_ && !map_frame_.empty())
    ROS_WARN("Target frame specified but no TF instance specified. No transforms will be applied to received data.");
  
  tree_.reset(new OccMapTree(map_resolution_));
  tree_const_ = tree_;
  
  XmlRpc::XmlRpcValue sensor_list;
  if (nh_.getParam("sensors", sensor_list))
  {
    try
    {      
      if (sensor_list.getType() == XmlRpc::XmlRpcValue::TypeArray)
        for (int32_t i = 0; i < sensor_list.size(); ++i)
        {
          if (!sensor_list[i].getType() == XmlRpc::XmlRpcValue::TypeStruct)
          {
            ROS_ERROR("Params for octomap updater %d not a struct; ignoring.", i);
            continue;
          }
          
          if (!sensor_list[i].hasMember ("sensor_plugin"))
          {
            ROS_ERROR("No sensor plugin specified for octomap updater %d; ignoring.", i);
            continue;
          }
          
          std::string sensor_plugin = std::string(sensor_list[i]["sensor_plugin"]);
          if (sensor_plugin.empty() || sensor_plugin[0] == '~')
          {
            ROS_INFO("Skipping octomap updater plugin '%s'", sensor_plugin.c_str());
            continue;
          }
          
          if (!updater_plugin_loader_)
          {
            try
            {
              updater_plugin_loader_.reset(new pluginlib::ClassLoader<OccupancyMapUpdater>("moveit_ros_perception", "occupancy_map_monitor::OccupancyMapUpdater"));
            }
            catch(pluginlib::PluginlibException& ex)
            {
              ROS_FATAL_STREAM("Exception while creating octomap updater plugin loader " << ex.what());
            }
          }
          
          OccupancyMapUpdaterPtr up;
          try
          {
            up.reset(updater_plugin_loader_->createUnmanagedInstance(sensor_plugin));
            up->setMonitor(this);
          }
          catch(pluginlib::PluginlibException& ex)
          {
            ROS_ERROR_STREAM("Exception while loading octomap updater '" << sensor_plugin << "': " << ex.what() << std::endl);
          }
          if (up)
          {
            /* pass the params struct directly in to the updater */
            if (!up->setParams(sensor_list[i]))
            {
              ROS_ERROR("Failed to configure updater of type %s", up->getType().c_str());
              continue;
            }
            
            if (!up->initialize())
            {
              ROS_ERROR("Unable to initialize map updater of type %s (plugin %s)", up->getType().c_str(), sensor_plugin.c_str());
              continue;
            }
            
            addUpdater(up);
          }
        }
      else
        ROS_ERROR("List of sensors must be an array!");
    }
    catch (XmlRpc::XmlRpcException &ex)
    {
      ROS_ERROR("XmlRpc Exception: %s", ex.getMessage().c_str());
    }
  }
  
  /* advertise a service for loading octomaps from disk */
  save_map_srv_ = nh_.advertiseService("save_map", &OccupancyMapMonitor::saveMapCallback, this);
  load_map_srv_ = nh_.advertiseService("load_map", &OccupancyMapMonitor::loadMapCallback, this);
}