void init_poses() {
    ROS_INFO("getting transforms from camera to PSMs");
    //tf::TransformListener tfListener;
    tf::StampedTransform tfResult_one, tfResult_two;
    g_tfListener_ptr = new tf::TransformListener;
    bool tferr = true;
    int ntries = 0;
    ROS_INFO("waiting for tf between base and camera...");
    while (tferr) {
        if (ntries > 5) break; //give up and accept default after this many tries
        tferr = false;
        try {
            //The direction of the transform returned will be from the target_frame to the source_frame.
            //Which if applied to data, will transform data in the source_frame into the target_frame. See tf/CoordinateFrameConventions#Transform_Direction
            g_tfListener_ptr->lookupTransform("left_camera_optical_frame", "one_psm_base_link", ros::Time(0), tfResult_one);
            g_tfListener_ptr->lookupTransform("left_camera_optical_frame", "two_psm_base_link", ros::Time(0), tfResult_two);
        } catch (tf::TransformException &exception) {
            ROS_WARN("%s", exception.what());
            tferr = true;
            ros::Duration(0.5).sleep(); // sleep for half a second
            ros::spinOnce();
            ntries++;
        }
    }
    //default transform: need to match this up to camera calibration!
    if (tferr) {
        g_affine_lcamera_to_psm_one.translation() << -0.155, -0.03265, 0.0;
        Eigen::Vector3d nvec, tvec, bvec;
        nvec << -1, 0, 0;
        tvec << 0, 1, 0;
        bvec << 0, 0, -1;
        Eigen::Matrix3d R;
        R.col(0) = nvec;
        R.col(1) = tvec;
        R.col(2) = bvec;
        g_affine_lcamera_to_psm_one.linear() = R;
        g_affine_lcamera_to_psm_two.linear() = R;
        g_affine_lcamera_to_psm_two.translation() << 0.145, -0.03265, 0.0;
        ROS_WARN("using default transform");
    } else {

        ROS_INFO("tf is good");

        //g_affine_lcamera_to_psm_one is the position/orientation of psm1 base frame w/rt left camera link frame
        // need to extend this to camera optical frame
        g_affine_lcamera_to_psm_one = transformTFToEigen(tfResult_one);
        g_affine_lcamera_to_psm_two = transformTFToEigen(tfResult_two);
    }
    ROS_INFO("transform from left camera to psm one:");
    cout << g_affine_lcamera_to_psm_one.linear() << endl;
    cout << g_affine_lcamera_to_psm_one.translation().transpose() << endl;
    ROS_INFO("transform from left camera to psm two:");
    cout << g_affine_lcamera_to_psm_two.linear() << endl;
    cout << g_affine_lcamera_to_psm_two.translation().transpose() << endl;

    //now get initial poses:
    ROS_INFO("waiting for tf between base and grippers...");
    while (tferr) {
        if (ntries > 5) break; //give up and accept default after this many tries
        tferr = false;
        try {
            //The direction of the transform returned will be from the target_frame to the source_frame.
            //Which if applied to data, will transform data in the source_frame into the target_frame. See tf/CoordinateFrameConventions#Transform_Direction
            g_tfListener_ptr->lookupTransform("left_camera_optical_frame", "one_tool_tip_link", ros::Time(0), tfResult_one);
            g_tfListener_ptr->lookupTransform("left_camera_optical_frame", "two_tool_tip_link", ros::Time(0), tfResult_two);
        } catch (tf::TransformException &exception) {
            ROS_WARN("%s", exception.what());
            tferr = true;
            ros::Duration(0.5).sleep(); // sleep for half a second
            ros::spinOnce();
            ntries++;
        }
    }
    //default start pose, if can't get tf:
    if (tferr) {
        g_psm1_start_pose.translation() << -0.02, 0, 0.04;
        g_psm2_start_pose.translation() << 0.02, 0, 0.04;
        Eigen::Vector3d nvec, tvec, bvec;
        nvec << -1, 0, 0;
        tvec << 0, 1, 0;
        bvec << 0, 0, -1;
        Eigen::Matrix3d R;
        R.col(0) = nvec;
        R.col(1) = tvec;
        R.col(2) = bvec;
        g_psm1_start_pose.linear() = R;
        g_psm2_start_pose.linear() = R;
        ROS_WARN("using default start poses");
    } else {

        ROS_INFO("tf is good");

        //g_affine_lcamera_to_psm_one is the position/orientation of psm1 base frame w/rt left camera link frame
        // need to extend this to camera optical frame
        g_psm1_start_pose = transformTFToEigen(tfResult_one);
        g_psm2_start_pose = transformTFToEigen(tfResult_two);
    }
    ROS_INFO("psm1 gripper start pose:");
    cout << g_psm1_start_pose.linear() << endl;
    cout << g_psm1_start_pose.translation().transpose() << endl;
    ROS_INFO("psm2 gripper start pose:");
    cout << g_psm2_start_pose.linear() << endl;
    cout << g_psm2_start_pose.translation().transpose() << endl;

    //just to be safe:
    g_O_entry_point(0) = 0.0;
    g_O_entry_point(1) = 0.0;
    g_O_entry_point(2) = 0.12;
    g_O_exit_point = g_O_entry_point;
    g_O_exit_point(0) += 0.02;


}
Пример #2
0
 void poseTFToEigen(const tf::Pose &t, Eigen::Affine3d &e)
 {
   transformTFToEigen(t, e);
 }
int main(int argc, char** argv) {
    // ROS set-ups:
    ros::init(argc, argv, "needle_planner_test_main"); //node name

    ros::NodeHandle nh; // create a node handle; need to pass this to the class constructor
    Eigen::Vector3d entrance_pt, exit_pt, tissue_normal;
    tissue_normal << 0, 0, -1; //antiparallel to optical axis
    entrance_pt << -0.1, 0.05, 0.1; //100mm under camera; slightly forward, to avoid jnt lims should be OK
    exit_pt << -0.09, 0.05, 0.1; // exit pt is shifted along camera-frame +x axis relative to entrance pt
    vector <Eigen::Affine3d> gripper_affines_wrt_camera; //put answers here 
    vector <Eigen::Affine3d> gripper2_affines_wrt_camera;
    
    vector <Eigen::Affine3d> vetted_gripper_affines_wrt_camera;

    ROS_INFO("instantiating  forward solver and an ik_solver");
    Davinci_fwd_solver davinci_fwd_solver; //instantiate a forward-kinematics solver    
    Davinci_IK_solver ik_solver;
    
    ofstream outfile;    
    outfile.open ("best_poses.dat");

    ROS_INFO("main: instantiating an object of type NeedlePlanner");
    NeedlePlanner needlePlanner;


    //compute the tissue frame in camera coords, based on point-cloud selections:
    needlePlanner.compute_tissue_frame_wrt_camera(entrance_pt, exit_pt, tissue_normal);
    //optional: manually set the needle grasp pose, else accept default from constructor
    //needlePlanner.set_affine_needle_frame_wrt_gripper_frame(affine);
    //optional: set the initial needle frame w/rt tissue frame (or accept default in constructor)
    //needlePlanner.set_affine_needle_frame_wrt_tissue(Eigen::Affine3d affine)
    // given grasp transform, tissue transform and initial needle pose w/rt tissue,
    // compute needle-drive path as rotation about needle-z axis:

    ROS_INFO("getting transforms from camera to PSMs");
    tf::TransformListener tfListener;
    tf::StampedTransform tfResult_one, tfResult_two;
    Eigen::Affine3d affine_lcamera_to_psm_one, affine_lcamera_to_psm_two, affine_gripper_wrt_base;
    bool tferr = true;
    int ntries = 0;
    ROS_INFO("waiting for tf between base and right_hand...");
    while (tferr) {
        if (ntries > 5) break; //give up and accept default after this many tries
        tferr = false;
        try {
            //The direction of the transform returned will be from the target_frame to the source_frame. 
            //Which if applied to data, will transform data in the source_frame into the target_frame. See tf/CoordinateFrameConventions#Transform_Direction
            tfListener.lookupTransform("left_camera_optical_frame", "one_psm_base_link", ros::Time(0), tfResult_one);
            tfListener.lookupTransform("left_camera_optical_frame", "two_psm_base_link", ros::Time(0), tfResult_two);
        } catch (tf::TransformException &exception) {
            ROS_WARN("%s", exception.what());
            tferr = true;
            ros::Duration(0.5).sleep(); // sleep for half a second
            ros::spinOnce();
            ntries++;
        }
    }
    //default transform: need to match this up to camera calibration!
    if (tferr) {
        affine_lcamera_to_psm_one.translation() << -0.155, -0.03265, 0.0;
        Eigen::Vector3d nvec, tvec, bvec;
        nvec << -1, 0, 0;
        tvec << 0, 1, 0;
        bvec << 0, 0, -1;
        Eigen::Matrix3d R;
        R.col(0) = nvec;
        R.col(1) = tvec;
        R.col(2) = bvec;
        affine_lcamera_to_psm_one.linear() = R;
        affine_lcamera_to_psm_two.linear() = R;
        affine_lcamera_to_psm_two.translation() << 0.145, -0.03265, 0.0;
        ROS_WARN("using default transform");
    } else {

        ROS_INFO("tf is good");

        //affine_lcamera_to_psm_one is the position/orientation of psm1 base frame w/rt left camera link frame
        // need to extend this to camera optical frame
        affine_lcamera_to_psm_one = transformTFToEigen(tfResult_one);
        affine_lcamera_to_psm_two = transformTFToEigen(tfResult_two);
    }
    ROS_INFO("transform from left camera to psm one:");
    cout << affine_lcamera_to_psm_one.linear() << endl;
    cout << affine_lcamera_to_psm_one.translation().transpose() << endl;
    ROS_INFO("transform from left camera to psm two:");
    cout << affine_lcamera_to_psm_two.linear() << endl;
    cout << affine_lcamera_to_psm_two.translation().transpose() << endl;

    //try grabbing needle w/ bvec_needle = -bvec_gripper
    //needlePlanner.set_grab_needle_plus_minus_z(GRASP_W_NEEDLE_NEGATIVE_GRIPPER_Z);
    //needlePlanner.compute_grasp_transform();

    double ang_to_exit, phi_x, phi_y, tilt;
    Eigen::Vector3d nvec_tissue;
    vector <int> valid_ik_samps;
    double best_phi_x = 0.0;
    double best_phi_y = 0.0;
    double best_tilt = 0.0;
    int best_drive_score = 0;
    int drive_score = 0;
    int best_run = 0;

    Eigen::VectorXi ik_ok_array(40); //Add by DLC 5-26-2016 also in test_main, test_main_v2
    int ik_score=0;
    
    //while (ros::ok()) 
    {
        cout << "entrance point at: " << entrance_pt.transpose() << endl;
        cout << "enter angle to exit point (0-2pi): ";
        cin >> ang_to_exit;

        //cout << "enter needle-grasp phi_x: ";

        //cin >> phi_x;
        for (phi_x = 0.0; phi_x < 6.28; phi_x += 0.1) {
            for (phi_y = 0.0; phi_y < 6.28; phi_y += 0.1) {
                ROS_INFO("phi_x, phi_y = %f, %f",phi_x,phi_y);
                for (tilt = -1.2; tilt < 1.21; tilt += 0.1) {
                    drive_score = 0;

                    //cout << "enter needle-grasp phi_y: ";
                    //cin >> phi_y;
                    needlePlanner.compute_grasp_transform(phi_x, phi_y);
                    //cout<< "enter tilt of needle z-axis w/rt tissue: ";
                    //cin>> tilt;
                    needlePlanner.set_psi_needle_axis_tilt_wrt_tissue(tilt);

                    //for (ang_to_exit = 0; ang_to_exit < 6.28; ang_to_exit += 1.0) 
                    {
                        //cout << "angle to exit pt on tissue surface: " << ang_to_exit << endl;
                        //cin>>ang_to_exit;
                        nvec_tissue << cos(ang_to_exit), sin(ang_to_exit), 0.0;
                        exit_pt = entrance_pt + nvec_tissue;
                        needlePlanner.compute_tissue_frame_wrt_camera(entrance_pt, exit_pt, tissue_normal);
                        gripper_affines_wrt_camera.clear();
                        gripper2_affines_wrt_camera.clear();

                        vetted_gripper_affines_wrt_camera.clear();

                        needlePlanner.compute_needle_drive_gripper_affines(gripper_affines_wrt_camera, gripper2_affines_wrt_camera, ik_ok_array, ik_score);
                        int nposes = gripper_affines_wrt_camera.size();
                        //ROS_INFO("computed %d gripper poses w/rt camera", nposes);
                        Eigen::Affine3d affine_pose, affine_gripper_wrt_base_frame, affine_gripper_wrt_base_fk;
                        Eigen::Vector3d origin_err;
                        Eigen::Matrix3d R_err;
                        Vectorq7x1 q_vec1;
                        q_vec1.resize(7);
                        valid_ik_samps.clear();
                        for (int i = 0; i < nposes; i++) {
                            if (test_debug) ROS_INFO("pose %d", i);
                            affine_pose = gripper_affines_wrt_camera[i];
                            if (test_debug) cout << affine_pose.linear() << endl;
                            if (test_debug) cout << "origin: " << affine_pose.translation().transpose() << endl;
                            affine_gripper_wrt_base_frame = affine_lcamera_to_psm_one.inverse() * affine_pose;
                            if (test_debug) ROS_INFO("pose %d w/rt PSM1 base:", i);
                            if (test_debug) cout << affine_gripper_wrt_base_frame.linear() << endl;
                            if (test_debug) cout << "origin: " << affine_gripper_wrt_base_frame.translation().transpose() << endl;
                            if (ik_solver.ik_solve(affine_gripper_wrt_base_frame)) { //convert desired pose into equiv joint displacements
                                valid_ik_samps.push_back(1);
                                drive_score++;
                                vetted_gripper_affines_wrt_camera.push_back(affine_pose); //accept this one
                                q_vec1 = ik_solver.get_soln();
                                q_vec1(6) = 0.0;
                                if (test_debug) cout << "qvec1: " << q_vec1.transpose() << endl;
                                if (test_debug) ROS_INFO("FK of IK soln: ");
                                affine_gripper_wrt_base_fk = davinci_fwd_solver.fwd_kin_solve(q_vec1);
                                if (test_debug) cout << "affine linear (R): " << endl;
                                if (test_debug) cout << affine_gripper_wrt_base_fk.linear() << endl;
                                if (test_debug) cout << "origin: ";
                                if (test_debug) cout << affine_gripper_wrt_base_fk.translation().transpose() << endl;
                                origin_err = affine_gripper_wrt_base_frame.translation() - affine_gripper_wrt_base_fk.translation();
                                R_err = affine_gripper_wrt_base_frame.linear() - affine_gripper_wrt_base_fk.linear();
                                if (test_debug) ROS_WARN("error test: %f", origin_err.norm());
                                if (test_debug) cout << "IK/FK origin err: " << origin_err.transpose() << endl;
                                if (test_debug) cout << "R err: " << endl;
                                if (test_debug) cout << R_err << endl;
                            } else {
                                if (test_debug) ROS_WARN("GRIPPER 1: NO VALID IK SOLN!!");
                                valid_ik_samps.push_back(0);
                            }
                        } //end loop through needle-drive poses
                        drive_score = runlength_ones(valid_ik_samps);
                        ROS_INFO("drive_score: %d",drive_score);
                    } //end loop ang_to_exit
                    if (drive_score == best_drive_score) {
                        best_drive_score = drive_score;
                        best_phi_x = phi_x;
                        best_phi_y = phi_y;
                        best_tilt = tilt;
                        cout << "valid samples status:" << endl;
                        for (int i = 0; i < valid_ik_samps.size(); i++) {
                            cout << valid_ik_samps[i] << "  ";
                        }
                        cout << endl;
                        ROS_INFO("phi_x, phi_y, tilt = %f, %f, %f",phi_x,phi_y,tilt);                        
                        ROS_WARN("tied best strategy; score = %d; enter 1: ",best_drive_score);

                        int ans;
                        //cin>>ans;
                        //needlePlanner.write_needle_drive_affines_to_file(vetted_gripper_affines_wrt_camera);
                    }                               
                    if (drive_score > best_drive_score) {
                        best_drive_score = drive_score;
                        best_phi_x = phi_x;
                        best_phi_y = phi_y;
                        best_tilt = tilt;
                        cout << "valid samples status:" << endl;
                        for (int i = 0; i < valid_ik_samps.size(); i++) {
                            cout << valid_ik_samps[i] << "  ";
                        }
                        cout << endl;
                        ROS_INFO("phi_x, phi_y, tilt = %f, %f, %f",phi_x,phi_y,tilt);                        
                        ROS_WARN("new best strategy; score = %d; enter 1: ",best_drive_score);

                        int ans;
                        //cin>>ans;
                        needlePlanner.write_needle_drive_affines_to_file(vetted_gripper_affines_wrt_camera);
                    }
                    if (drive_score==21) { // save best solns to file
                        outfile<<phi_x<<", "<<phi_y<<", "<<tilt<<endl;
                    }
         
                }


            }
        }
    }
    //needlePlanner.write_needle_drive_affines_to_file(vetted_gripper_affines_wrt_camera);

    ROS_INFO("best drive score: %d", best_drive_score);
    ROS_INFO("best phi_x, phi_y, tilt = %f, %f, %f", best_phi_x, best_phi_y, best_tilt);
    outfile.close();
    return 0;

}
Пример #4
0
int do_inits() {
    Eigen::Matrix3d R;
    Eigen::Vector3d nvec, tvec, bvec, tip_pos;
    bvec << 0, 0, 1; //default for testing: gripper points "down"
    nvec << 1, 0, 0;
    tvec = bvec.cross(nvec);
    R.col(0) = nvec;
    R.col(1) = tvec;
    R.col(2) = bvec;
    g_des_gripper_affine1.linear() = R;
    tip_pos << -0.15, -0.03, 0.07;
    g_des_gripper_affine1.translation() = tip_pos; //will change this, but start w/ something legal

    //hard-coded camera-to-base transform, useful for simple testing/debugging
    g_affine_lcamera_to_psm_one.translation() << -0.155, -0.03265, 0.0;
    nvec << -1, 0, 0;
    tvec << 0, 1, 0;
    bvec << 0, 0, -1;
    //Eigen::Matrix3d R;
    R.col(0) = nvec;
    R.col(1) = tvec;
    R.col(2) = bvec;
    g_affine_lcamera_to_psm_one.linear() = R;

    g_q_vec1_start.resize(7);
    g_q_vec1_goal.resize(7);
    g_q_vec2_start.resize(7);
    g_q_vec2_goal.resize(7);

    g_des_gripper1_wrt_base = g_affine_lcamera_to_psm_one.inverse() * g_des_gripper_affine1;
    g_ik_solver.ik_solve(g_des_gripper1_wrt_base); // compute IK:
    g_q_vec1_goal = g_ik_solver.get_soln();
    g_q_vec1_start = g_q_vec1_goal;
    g_q_vec2_start << 0, 0, 0, 0, 0, 0, 0; //just put gripper 2 at home position
    g_q_vec2_goal << 0, 0, 0, 0, 0, 0, 0;
    //repackage q's into a trajectory;
    //populate a goal message for action server; 
    // initialize with 2 poses that are identical
    g_trajectory_point1.positions.clear();
    g_trajectory_point2.positions.clear();
    //resize these:
    for (int i=0;i<14;i++)  {
        g_trajectory_point1.positions.push_back(0.0); 
        g_trajectory_point2.positions.push_back(0.0); 
    }
        
    for (int i = 0; i < 7; i++) { 
        g_trajectory_point1.positions[i] = g_q_vec1_start[i];
        g_trajectory_point1.positions[i + 7] = g_q_vec2_start[i];
        //should fix up jaw-opening values...do this later
    }

    g_trajectory_point1.time_from_start = ros::Duration(0.0);
    g_trajectory_point2.time_from_start = ros::Duration(2.0);


    g_des_trajectory.points.clear(); // can clear components, but not entire trajectory_msgs
    g_des_trajectory.joint_names.clear();
    //    des_trajectory.joint_names.push_back("right_s0"); //yada-yada should fill in names
    g_des_trajectory.header.stamp = ros::Time::now();

    g_des_trajectory.points.push_back(g_trajectory_point1); // first point of the trajectory 
    g_des_trajectory.points.push_back(g_trajectory_point2);

    // copy traj to goal:
    g_goal.trajectory = g_des_trajectory;
    g_got_new_pose = true; //send robot to start pose
    
     ROS_INFO("getting transforms from camera to PSMs");
    tf::TransformListener tfListener;
    tf::StampedTransform tfResult_one, tfResult_two;
    bool tferr = true;
    int ntries = 0;
    ROS_INFO("waiting for tf between base and right_hand...");
    while (tferr) {
        if (ntries > 5) break; //give up and accept default after this many tries
        tferr = false;
        try {
            //The direction of the transform returned will be from the target_frame to the source_frame. 
            //Which if applied to data, will transform data in the source_frame into the target_frame. See tf/CoordinateFrameConventions#Transform_Direction
            tfListener.lookupTransform("left_camera_optical_frame", "one_psm_base_link", ros::Time(0), tfResult_one);
            //tfListener.lookupTransform("left_camera_optical_frame", "two_psm_base_link", ros::Time(0), tfResult_two);
        } catch (tf::TransformException &exception) {
            ROS_WARN("%s", exception.what());
            tferr = true;
            ros::Duration(0.5).sleep(); // sleep for half a second
            ros::spinOnce();
            ntries++;
        }
    }
    //default transform: need to match this up to camera calibration!
    if (tferr) {
        g_affine_lcamera_to_psm_one.translation() << -0.155, -0.03265, 0.0;
        Eigen::Vector3d nvec, tvec, bvec;
        nvec << -1, 0, 0;
        tvec << 0, 1, 0;
        bvec << 0, 0, -1;
        Eigen::Matrix3d R;
        R.col(0) = nvec;
        R.col(1) = tvec;
        R.col(2) = bvec;
        g_affine_lcamera_to_psm_one.linear() = R;
        g_affine_lcamera_to_psm_one.linear() = R;
        g_affine_lcamera_to_psm_one.translation() << 0.145, -0.03265, 0.0;
        ROS_WARN("using default transform");
    } else {

        ROS_INFO("tf is good");

        //affine_lcamera_to_psm_one is the position/orientation of psm1 base frame w/rt left camera link frame
        // need to extend this to camera optical frame
        g_affine_lcamera_to_psm_one = transformTFToEigen(tfResult_one);
        //affine_lcamera_to_psm_two = transformTFToEigen(tfResult_two);
    }
    ROS_INFO("transform from left camera to psm one:");
    cout << g_affine_lcamera_to_psm_one.linear() << endl;
    cout << g_affine_lcamera_to_psm_one.translation().transpose() << endl;
    //ROS_INFO("transform from left camera to psm two:");
    //cout << affine_lcamera_to_psm_two.linear() << endl;
    //cout << affine_lcamera_to_psm_two.translation().transpose() << endl;    
    
    
    ROS_INFO("done w/ inits");

    return 0;
    
}