コード例 #1
0
//example use of needle-planner library
int main(int argc, char** argv) 
{
    // ROS set-ups:
    ros::init(argc, argv, "needle_planner_test_main"); //node name

    ros::NodeHandle nh; // create a node handle; need to pass this to the class constructor
        
    double x,y,z,r;
    cout<<"creating test circular motion; enter coords of center of circle, w/rt camera."<<endl;
    cout<<"enter x (e.g. -0.1):";
    cin>>x;
    cout<<"enter y (e.g. 0.0):";
    cin>>y;
    cout<<"enter z (e.g. 0.1):";
    cin>>z;
    cout<<"enter radius of circular path (e.g. 0.01):";
    cin>>r;
    //Eigen::Vector3d entrance_pt,exit_pt,tissue_normal;
    //tissue_normal<<0,0,-1; //antiparallel to optical axis
    //entrance_pt<<0.0,0.1,0.1; //100mm under camera; slightly forward, to avoid jnt lims should be OK
    //exit_pt<<0.01,0.1,0.1; // exit pt is shifted along camera-frame +x axis relative to entrance pt
    vector <Eigen::Affine3d> gripper_affines_wrt_camera;  //put answers here  

    ROS_INFO("main: instantiating an object of type NeedlePlanner");
    NeedlePlanner needlePlanner;  
    
    //compute the path:
    //    void simple_test_gripper_motion(double x, double y, double z, double r,vector <Eigen::Affine3d> &gripper_affines_wrt_camera);
    needlePlanner.simple_test_gripper_motion(x,y,z,r,gripper_affines_wrt_camera);
    //write these to file--will be called "gripper_poses_in_camera_coords.csp"
    needlePlanner.write_needle_drive_affines_to_file(gripper_affines_wrt_camera);
    ROS_INFO("try executing this path with: ");
    ROS_INFO("rosrun playfile_reader playfile_cameraspace gripper_poses_in_camera_coords.csp");
    return 0;
} 
コード例 #2
0
int main(int argc, char** argv) 
{
    // ROS set-ups:
    ros::init(argc, argv, "needle_planner_test_main"); //node name

    ros::NodeHandle nh; // create a node handle; need to pass this to the class constructor
    
    init_poses();    
     ROS_INFO("main: instantiating an object of type NeedlePlanner");
    NeedlePlanner needlePlanner;  
    needlePlanner.set_affine_lcamera_to_psm_one(g_affine_lcamera_to_psm_one);
    needlePlanner.set_affine_lcamera_to_psm_two(g_affine_lcamera_to_psm_two);     
    
    Eigen::Vector3d O_needle;
    O_needle<< 0.0,0.0,0.092; //we can hard-code O_needle(2) (z-value) for known tissue height
    double r_needle = 0.012; //0.0254/2.0;
    double kvec_yaw = 0.0;
    cout<<"enter kvec_yaw: (e.g. 0-2pi): ";
    cin>>kvec_yaw;
    vector <Eigen::Affine3d> gripper_affines_wrt_camera;  //put answers here  
    double needle_x,needle_y,needle_z;
    cout<<"enter needle center x coord (e.g. 0): ";
    cin>>needle_x;
    cout<<"enter needle center y coord (e.g. 0): ";
    cin>>needle_y;  
    cout<<"enter needle center z coord (e.g. 0.112): ";
    cin>>needle_z;      
    O_needle(0) = needle_x;
    O_needle(1) = needle_y;
    O_needle(2) = needle_z;
    
  
    //compute the tissue frame in camera coords, based on point-cloud selections:
    
    needlePlanner.simple_horiz_kvec_motion(O_needle, r_needle, kvec_yaw, gripper_affines_wrt_camera);
    int nposes = gripper_affines_wrt_camera.size();
    ROS_INFO("computed %d gripper poses w/rt camera",nposes);
    Eigen::Affine3d affine_pose;
    for (int i=0;i<nposes;i++) {
        ROS_INFO("pose %d",i);
        cout<<gripper_affines_wrt_camera[i].linear()<<endl;
        cout<<"origin: "<<gripper_affines_wrt_camera[i].translation().transpose()<<endl;
    }
    needlePlanner.write_needle_drive_affines_to_file(gripper_affines_wrt_camera);
    
    return 0;
} 
コード例 #3
0
int main(int argc, char** argv) {
    // ROS set-ups:
    ros::init(argc, argv, "needle_planner_test_main"); //node name

    ros::NodeHandle nh; // create a node handle; need to pass this to the class constructor

    ros::Subscriber pt_subscriber = nh.subscribe("/entrance_and_exit_pts", 1, inPointsCallback);
    ros::Subscriber entry_pt_subscriber = nh.subscribe("/thePoint", 1, entryPointCallback);

    ros::Subscriber exit_marker_subscriber = nh.subscribe("/final_point_marker",1,exitMarkerCallback);
//  ros::Publisher finalPointPub = nh.advertise<visualization_msgs::Marker>("/final_point_marker", 0);

    init_poses();

    ROS_INFO("main: instantiating an object of type NeedlePlanner");
    NeedlePlanner needlePlanner;
    needlePlanner.set_affine_lcamera_to_psm_one(g_affine_lcamera_to_psm_one);
    needlePlanner.set_affine_lcamera_to_psm_two(g_affine_lcamera_to_psm_two);

    Eigen::Vector3d O_needle;
    double r_needle = DEFAULT_NEEDLE_RADIUS; //0.012; //0.0254/2.0;
    Eigen::Vector3d in_to_out_vec;
    double kvec_yaw;



    //cout<<"enter kvec_yaw: (e.g. 0-2pi): ";
    //cin>>kvec_yaw;
    /*
    double needle_x,needle_y,needle_z;
    cout<<"enter needle center x coord (e.g. 0): ";
    cin>>needle_x;
    cout<<"enter needle center y coord (e.g. 0): ";
    cin>>needle_y;
    cout<<"enter needle center z coord (e.g. 0.112): ";
    cin>>needle_z;
    O_needle(0) = needle_x;
    O_needle(1) = needle_y;
    O_needle(2) = needle_z;
     */
    Eigen::Affine3d affine_pose;
    vector <Eigen::Affine3d> gripper_affines_wrt_camera;  //put answers here

    ROS_INFO("entering loop...");
    while (ros::ok()) {
        if (g_got_new_points) {
            g_got_new_points = false;
            //compute O_needle from entry and exit points:
            O_needle = 0.5 * (g_O_entry_point + g_O_exit_point);
            O_needle(2) -= DEFAULT_NEEDLE_AXIS_HT;


            in_to_out_vec = g_O_exit_point - g_O_entry_point;
            //vector from entry to exit is 90-deg away from needle z-axis, so add pi/2
            kvec_yaw = atan2(in_to_out_vec(1), in_to_out_vec(0))+M_PI/2.0;
            ROS_INFO("using kvec_yaw = %f",kvec_yaw);

            //compute the tissue frame in camera coords, based on point-cloud selections:

            needlePlanner.simple_horiz_kvec_motion(O_needle, r_needle, kvec_yaw, gripper_affines_wrt_camera);
            int nposes = gripper_affines_wrt_camera.size();
            ROS_INFO("computed %d gripper poses w/rt camera", nposes);

            for (int i = 0; i < nposes; i++) {
                ROS_INFO("pose %d", i);
                cout << gripper_affines_wrt_camera[i].linear() << endl;
                cout << "origin: " << gripper_affines_wrt_camera[i].translation().transpose() << endl;
            }
            needlePlanner.write_needle_drive_affines_to_file(gripper_affines_wrt_camera);
        }

        ros::spinOnce();
        ros::Duration(0.01).sleep();
    }
    return 0;
}
コード例 #4
0
int main(int argc, char** argv) {
    // ROS set-ups:
    ros::init(argc, argv, "needle_planner_test_main"); //node name

    ros::NodeHandle nh; // create a node handle; need to pass this to the class constructor
    Eigen::Vector3d entrance_pt, exit_pt, tissue_normal;
    tissue_normal << 0, 0, -1; //antiparallel to optical axis
    entrance_pt << -0.1, 0.05, 0.1; //100mm under camera; slightly forward, to avoid jnt lims should be OK
    exit_pt << -0.09, 0.05, 0.1; // exit pt is shifted along camera-frame +x axis relative to entrance pt
    vector <Eigen::Affine3d> gripper_affines_wrt_camera; //put answers here 
    vector <Eigen::Affine3d> gripper2_affines_wrt_camera;
    
    vector <Eigen::Affine3d> vetted_gripper_affines_wrt_camera;

    ROS_INFO("instantiating  forward solver and an ik_solver");
    Davinci_fwd_solver davinci_fwd_solver; //instantiate a forward-kinematics solver    
    Davinci_IK_solver ik_solver;
    
    ofstream outfile;    
    outfile.open ("best_poses.dat");

    ROS_INFO("main: instantiating an object of type NeedlePlanner");
    NeedlePlanner needlePlanner;


    //compute the tissue frame in camera coords, based on point-cloud selections:
    needlePlanner.compute_tissue_frame_wrt_camera(entrance_pt, exit_pt, tissue_normal);
    //optional: manually set the needle grasp pose, else accept default from constructor
    //needlePlanner.set_affine_needle_frame_wrt_gripper_frame(affine);
    //optional: set the initial needle frame w/rt tissue frame (or accept default in constructor)
    //needlePlanner.set_affine_needle_frame_wrt_tissue(Eigen::Affine3d affine)
    // given grasp transform, tissue transform and initial needle pose w/rt tissue,
    // compute needle-drive path as rotation about needle-z axis:

    ROS_INFO("getting transforms from camera to PSMs");
    tf::TransformListener tfListener;
    tf::StampedTransform tfResult_one, tfResult_two;
    Eigen::Affine3d affine_lcamera_to_psm_one, affine_lcamera_to_psm_two, affine_gripper_wrt_base;
    bool tferr = true;
    int ntries = 0;
    ROS_INFO("waiting for tf between base and right_hand...");
    while (tferr) {
        if (ntries > 5) break; //give up and accept default after this many tries
        tferr = false;
        try {
            //The direction of the transform returned will be from the target_frame to the source_frame. 
            //Which if applied to data, will transform data in the source_frame into the target_frame. See tf/CoordinateFrameConventions#Transform_Direction
            tfListener.lookupTransform("left_camera_optical_frame", "one_psm_base_link", ros::Time(0), tfResult_one);
            tfListener.lookupTransform("left_camera_optical_frame", "two_psm_base_link", ros::Time(0), tfResult_two);
        } catch (tf::TransformException &exception) {
            ROS_WARN("%s", exception.what());
            tferr = true;
            ros::Duration(0.5).sleep(); // sleep for half a second
            ros::spinOnce();
            ntries++;
        }
    }
    //default transform: need to match this up to camera calibration!
    if (tferr) {
        affine_lcamera_to_psm_one.translation() << -0.155, -0.03265, 0.0;
        Eigen::Vector3d nvec, tvec, bvec;
        nvec << -1, 0, 0;
        tvec << 0, 1, 0;
        bvec << 0, 0, -1;
        Eigen::Matrix3d R;
        R.col(0) = nvec;
        R.col(1) = tvec;
        R.col(2) = bvec;
        affine_lcamera_to_psm_one.linear() = R;
        affine_lcamera_to_psm_two.linear() = R;
        affine_lcamera_to_psm_two.translation() << 0.145, -0.03265, 0.0;
        ROS_WARN("using default transform");
    } else {

        ROS_INFO("tf is good");

        //affine_lcamera_to_psm_one is the position/orientation of psm1 base frame w/rt left camera link frame
        // need to extend this to camera optical frame
        affine_lcamera_to_psm_one = transformTFToEigen(tfResult_one);
        affine_lcamera_to_psm_two = transformTFToEigen(tfResult_two);
    }
    ROS_INFO("transform from left camera to psm one:");
    cout << affine_lcamera_to_psm_one.linear() << endl;
    cout << affine_lcamera_to_psm_one.translation().transpose() << endl;
    ROS_INFO("transform from left camera to psm two:");
    cout << affine_lcamera_to_psm_two.linear() << endl;
    cout << affine_lcamera_to_psm_two.translation().transpose() << endl;

    //try grabbing needle w/ bvec_needle = -bvec_gripper
    //needlePlanner.set_grab_needle_plus_minus_z(GRASP_W_NEEDLE_NEGATIVE_GRIPPER_Z);
    //needlePlanner.compute_grasp_transform();

    double ang_to_exit, phi_x, phi_y, tilt;
    Eigen::Vector3d nvec_tissue;
    vector <int> valid_ik_samps;
    double best_phi_x = 0.0;
    double best_phi_y = 0.0;
    double best_tilt = 0.0;
    int best_drive_score = 0;
    int drive_score = 0;
    int best_run = 0;

    Eigen::VectorXi ik_ok_array(40); //Add by DLC 5-26-2016 also in test_main, test_main_v2
    int ik_score=0;
    
    //while (ros::ok()) 
    {
        cout << "entrance point at: " << entrance_pt.transpose() << endl;
        cout << "enter angle to exit point (0-2pi): ";
        cin >> ang_to_exit;

        //cout << "enter needle-grasp phi_x: ";

        //cin >> phi_x;
        for (phi_x = 0.0; phi_x < 6.28; phi_x += 0.1) {
            for (phi_y = 0.0; phi_y < 6.28; phi_y += 0.1) {
                ROS_INFO("phi_x, phi_y = %f, %f",phi_x,phi_y);
                for (tilt = -1.2; tilt < 1.21; tilt += 0.1) {
                    drive_score = 0;

                    //cout << "enter needle-grasp phi_y: ";
                    //cin >> phi_y;
                    needlePlanner.compute_grasp_transform(phi_x, phi_y);
                    //cout<< "enter tilt of needle z-axis w/rt tissue: ";
                    //cin>> tilt;
                    needlePlanner.set_psi_needle_axis_tilt_wrt_tissue(tilt);

                    //for (ang_to_exit = 0; ang_to_exit < 6.28; ang_to_exit += 1.0) 
                    {
                        //cout << "angle to exit pt on tissue surface: " << ang_to_exit << endl;
                        //cin>>ang_to_exit;
                        nvec_tissue << cos(ang_to_exit), sin(ang_to_exit), 0.0;
                        exit_pt = entrance_pt + nvec_tissue;
                        needlePlanner.compute_tissue_frame_wrt_camera(entrance_pt, exit_pt, tissue_normal);
                        gripper_affines_wrt_camera.clear();
                        gripper2_affines_wrt_camera.clear();

                        vetted_gripper_affines_wrt_camera.clear();

                        needlePlanner.compute_needle_drive_gripper_affines(gripper_affines_wrt_camera, gripper2_affines_wrt_camera, ik_ok_array, ik_score);
                        int nposes = gripper_affines_wrt_camera.size();
                        //ROS_INFO("computed %d gripper poses w/rt camera", nposes);
                        Eigen::Affine3d affine_pose, affine_gripper_wrt_base_frame, affine_gripper_wrt_base_fk;
                        Eigen::Vector3d origin_err;
                        Eigen::Matrix3d R_err;
                        Vectorq7x1 q_vec1;
                        q_vec1.resize(7);
                        valid_ik_samps.clear();
                        for (int i = 0; i < nposes; i++) {
                            if (test_debug) ROS_INFO("pose %d", i);
                            affine_pose = gripper_affines_wrt_camera[i];
                            if (test_debug) cout << affine_pose.linear() << endl;
                            if (test_debug) cout << "origin: " << affine_pose.translation().transpose() << endl;
                            affine_gripper_wrt_base_frame = affine_lcamera_to_psm_one.inverse() * affine_pose;
                            if (test_debug) ROS_INFO("pose %d w/rt PSM1 base:", i);
                            if (test_debug) cout << affine_gripper_wrt_base_frame.linear() << endl;
                            if (test_debug) cout << "origin: " << affine_gripper_wrt_base_frame.translation().transpose() << endl;
                            if (ik_solver.ik_solve(affine_gripper_wrt_base_frame)) { //convert desired pose into equiv joint displacements
                                valid_ik_samps.push_back(1);
                                drive_score++;
                                vetted_gripper_affines_wrt_camera.push_back(affine_pose); //accept this one
                                q_vec1 = ik_solver.get_soln();
                                q_vec1(6) = 0.0;
                                if (test_debug) cout << "qvec1: " << q_vec1.transpose() << endl;
                                if (test_debug) ROS_INFO("FK of IK soln: ");
                                affine_gripper_wrt_base_fk = davinci_fwd_solver.fwd_kin_solve(q_vec1);
                                if (test_debug) cout << "affine linear (R): " << endl;
                                if (test_debug) cout << affine_gripper_wrt_base_fk.linear() << endl;
                                if (test_debug) cout << "origin: ";
                                if (test_debug) cout << affine_gripper_wrt_base_fk.translation().transpose() << endl;
                                origin_err = affine_gripper_wrt_base_frame.translation() - affine_gripper_wrt_base_fk.translation();
                                R_err = affine_gripper_wrt_base_frame.linear() - affine_gripper_wrt_base_fk.linear();
                                if (test_debug) ROS_WARN("error test: %f", origin_err.norm());
                                if (test_debug) cout << "IK/FK origin err: " << origin_err.transpose() << endl;
                                if (test_debug) cout << "R err: " << endl;
                                if (test_debug) cout << R_err << endl;
                            } else {
                                if (test_debug) ROS_WARN("GRIPPER 1: NO VALID IK SOLN!!");
                                valid_ik_samps.push_back(0);
                            }
                        } //end loop through needle-drive poses
                        drive_score = runlength_ones(valid_ik_samps);
                        ROS_INFO("drive_score: %d",drive_score);
                    } //end loop ang_to_exit
                    if (drive_score == best_drive_score) {
                        best_drive_score = drive_score;
                        best_phi_x = phi_x;
                        best_phi_y = phi_y;
                        best_tilt = tilt;
                        cout << "valid samples status:" << endl;
                        for (int i = 0; i < valid_ik_samps.size(); i++) {
                            cout << valid_ik_samps[i] << "  ";
                        }
                        cout << endl;
                        ROS_INFO("phi_x, phi_y, tilt = %f, %f, %f",phi_x,phi_y,tilt);                        
                        ROS_WARN("tied best strategy; score = %d; enter 1: ",best_drive_score);

                        int ans;
                        //cin>>ans;
                        //needlePlanner.write_needle_drive_affines_to_file(vetted_gripper_affines_wrt_camera);
                    }                               
                    if (drive_score > best_drive_score) {
                        best_drive_score = drive_score;
                        best_phi_x = phi_x;
                        best_phi_y = phi_y;
                        best_tilt = tilt;
                        cout << "valid samples status:" << endl;
                        for (int i = 0; i < valid_ik_samps.size(); i++) {
                            cout << valid_ik_samps[i] << "  ";
                        }
                        cout << endl;
                        ROS_INFO("phi_x, phi_y, tilt = %f, %f, %f",phi_x,phi_y,tilt);                        
                        ROS_WARN("new best strategy; score = %d; enter 1: ",best_drive_score);

                        int ans;
                        //cin>>ans;
                        needlePlanner.write_needle_drive_affines_to_file(vetted_gripper_affines_wrt_camera);
                    }
                    if (drive_score==21) { // save best solns to file
                        outfile<<phi_x<<", "<<phi_y<<", "<<tilt<<endl;
                    }
         
                }


            }
        }
    }
    //needlePlanner.write_needle_drive_affines_to_file(vetted_gripper_affines_wrt_camera);

    ROS_INFO("best drive score: %d", best_drive_score);
    ROS_INFO("best phi_x, phi_y, tilt = %f, %f, %f", best_phi_x, best_phi_y, best_tilt);
    outfile.close();
    return 0;

}
コード例 #5
0
int main(int argc, char** argv) {
    // ROS set-ups:
    ros::init(argc, argv, "needle_planner_test_main"); //node name
    ros::NodeHandle nh; // create a node handle; need to pass this to the class constructor
    ros::Publisher exit_pt_publisher = nh.advertise<geometry_msgs::Point>("exit_points", 1);
    //ros::Publisher exit_pt_score_publisher = nh.advertise<geometry_msgs::Polygon>("exit_points_score", 1);
    ros::Publisher exit_pt_score_publisher = nh.advertise<std_msgs::Int32MultiArray>("exit_points_score", 1);
    ros::Publisher exit_pt_array_publisher = nh.advertise<geometry_msgs::Polygon>("exit_point_array", 1);    
    ros::Subscriber thePoint = nh.subscribe("/thePoint", 1, inPointCallback);

    Eigen::Vector3d O_needle;
    Eigen::Vector3d O_entrance_pt;
    Eigen::Vector3d O_exit_pt;
    double x_entrance, y_entrance;
    geometry_msgs::Polygon polygon_msg;
    
    // Added by DLC
    //geometry_msgs::Polygon exit_pts_score;
    std_msgs::Int32MultiArray exit_pts_score;
    ofstream outfile;
  
 
    /*
    cout<<"using tissue z = "<<z_tissue<<endl; 

    O_needle(2) = z_tissue - needle_height_above_tissue; // closer to camera, so smaller z
    
    cout<<"enter entrance pt x (e.g. -0.02): ";
    cin>>x_entrance;
    O_entrance_pt(0) = x_entrance;
    cout<<"enter entrance pt y (e.g. 0.01): ";
    cin>>y_entrance;
    O_entrance_pt(1) = y_entrance;
    O_entrance_pt(2) = z_tissue;
    cout<<"O_entrance_pt = "<<O_entrance_pt.transpose()<<endl;
   
    //O_needle<< -0.02,0.004,0.1578; //we can hard-code O_needle(2) (z-value) for known tissue height
     */

    vector <Eigen::Affine3d> gripper_affines_wrt_camera; //put answers here
    vector <Eigen::Affine3d> gripper2_affines_wrt_camera;
    
    Eigen::Vector3d gripper1_motion;
    Eigen::Vector3d gripper2_motion;
  
   
    Eigen::VectorXi ik_ok_array(40); //Add by DLC 5-26-2016 also in test_main, test_main_v2
    int ik_score=0;

    vector <geometry_msgs::Point> exit_points;
    geometry_msgs::Point exitPoint;
    geometry_msgs::Point32 p32;
   
    // Add by DLC
    //geometry_msgs::Point32 pscore;
    

    double needle_x, needle_y, needle_x2, needle_y2;;

    Eigen::Vector3d v_entrance_to_exit, v_entrance_to_exit0, tissue_normal;
    
    tissue_normal << 0, 0, -1;
    
    v_entrance_to_exit0 << 0, -1, 0; 
    // The vector is anti parallel to camera Y when kvec is zero
  

    ROS_INFO("main: instantiating an object of type NeedlePlanner");
    NeedlePlanner needlePlanner;
    init_poses();
     // set camera-to-base transforms:
    needlePlanner.set_affine_lcamera_to_psm_one(g_affine_lcamera_to_psm_one);
    needlePlanner.set_affine_lcamera_to_psm_two(g_affine_lcamera_to_psm_two);   
    
    while (ros::ok()) {

       
        if (g_got_new_entry_point) {
            g_got_new_entry_point = false;


        // For needle driving, set need_x as desire grasping, then set Kve_yaw for one exit point
            
           for (needle_x = 0; needle_x < 0.1; needle_x += 0.7854){   
             
             ROS_WARN("file opened");
             outfile.open ("entry_exit_points_coords.txt", std::ofstream::out | std::ofstream::app);
            
            polygon_msg.points.clear();
            exit_pts_score.data.clear();
          
           
            double kvec_yaw = 0.0; // rotation of needle z-axis w/rt camera x-axis

                O_entrance_pt = g_O_entry_point;
            
             
    
            //compute the tissue frame in camera coords, based0 on point-cloud selections, when test use 0.1, normal 6.28:
            for (kvec_yaw = 0; kvec_yaw < 6.28; kvec_yaw += 0.1) {
                v_entrance_to_exit = needlePlanner.Rotz(kvec_yaw) * v_entrance_to_exit0; //rotate the needle axis about camera z-axis
                O_exit_pt = O_entrance_pt + d_to_exit_pt*v_entrance_to_exit;
                O_needle = 0.5 * (O_exit_pt + O_entrance_pt);
                O_needle(2) -= needle_height_above_tissue;
                cout << "O_entrance_pt = " << O_entrance_pt.transpose() << endl;
                cout << "O_needle = " << O_needle.transpose() << endl;
                cout << "O_exit_pt = " << O_exit_pt.transpose() << endl;
                
                gripper_affines_wrt_camera.clear();
                gripper2_affines_wrt_camera.clear();
                
                 // needle_x = 0;
	            needle_y = 0;
                 
                  ik_score = 0;

                  needlePlanner.compute_grasp_transform(needle_x, needle_y);
                    
                    needle_x2 = 3.14;
                    needle_y2 = 0;
              
                  needlePlanner.compute_grasp2_transform(needle_x2, needle_y2);

                  needlePlanner.compute_tissue_frame_wrt_camera(O_entrance_pt, O_exit_pt, tissue_normal);
                 
                // needlePlanner.compute_needle_drive_gripper_affines is computing circular needle driving

                  needlePlanner.compute_needle_drive_gripper_affines(gripper_affines_wrt_camera, gripper2_affines_wrt_camera, ik_ok_array, ik_score);
                         
               
                // Dr. Newman's code, needlePlanner.simple_horiz_kvec_motion is special case for needle_x=0 and needle_y=0
                // Add by DLC 5-26-2016 also in test_main, test_main_v3        
                 
                // needlePlanner.simple_horiz_kvec_motion(O_needle, r_needle, kvec_yaw, gripper_affines_wrt_camera, ik_ok_array, ik_score);
                

                // 
                 int nposes = gripper_affines_wrt_camera.size();
                     cout<<"ik_ok_point:: "<<ik_ok_array.transpose()<<endl;
                     cout<<"score:: "<<ik_score<<endl;
                     ROS_WARN("at kvec_yaw = %f, computed %d needle-drive gripper poses, score %d ", kvec_yaw, nposes, ik_score);
                
                
                    if ((nposes >= g_npts_good) & (ik_score>0)){
                       exitPoint.x = O_exit_pt(0);
                       exitPoint.y = O_exit_pt(1);
                       exitPoint.z = O_exit_pt(2);
                       p32.x = O_exit_pt(0);
                       p32.y = O_exit_pt(1);
                       p32.z = O_exit_pt(2);
                    
                       outfile<<ik_score<<", "<<O_entrance_pt(0)<<", "<<O_entrance_pt(1)<<", "<<O_exit_pt(0)<<", "<<O_exit_pt(1)<<endl;

                       exit_pt_publisher.publish(exitPoint);
                       polygon_msg.points.push_back(p32);
                       exit_pts_score.data.push_back(ik_score);  
                      exit_points.push_back(exitPoint); //not used...
                    }
                // 

                // Start from here gripper with respect to tissue
             
               /*
                    ik_score = 741.0;
                    exit_pts_score.data.push_back(ik_score); 
                    exit_pt_score_publisher.publish(exit_pts_score);
                  
                    exitPoint.x = O_exit_pt(0);
                    exitPoint.y = O_exit_pt(1);
                    exitPoint.z = O_exit_pt(2);
                    p32.x = O_exit_pt(0);
                    p32.y = O_exit_pt(1);
                    p32.z = O_exit_pt(2);
                   
                    polygon_msg.points.push_back(p32);
                    exit_points.push_back(exitPoint);
                   
                    exit_pt_publisher.publish(exitPoint);
                    exit_pt_array_publisher.publish(polygon_msg);
                    
                    gripper1_motion<<0.0,0.0,0.008;
                    gripper2_motion<<-0.005,0.004,0.008;
                    
                    double dt=2.0;
                       
                    needlePlanner.compute_knottying_gripper_frame_wrt_camera(gripper1_motion, gripper2_motion, dt);
                                                        
                    for(int kstep = 0; kstep < 10; kstep +=1){
                       
                       gripper1_motion(0) +=0.005;
                       gripper1_motion(1) = 0.0;
                       gripper1_motion(2) = 0.008;
                       dt += 0.2;
                       
                       needlePlanner.compute_knottying_gripper_frame_wrt_camera(gripper1_motion, gripper2_motion, dt);
                       
                                           
                    }  
                     
                     for(int kstep = 0; kstep < 9; kstep +=1){
                       
                       gripper2_motion(0) +=0.005;
                       gripper2_motion(1)  = -0.006;
                       gripper2_motion(2)  =  0.008;
                       dt += 0.2;
                       
                       needlePlanner.compute_knottying_gripper_frame_wrt_camera(gripper1_motion, gripper2_motion, dt);
                       
                                           
                    }  
                
                   for(int kstep = 0; kstep < 3; kstep +=1){
                       
                       gripper1_motion(2) += 0.004;
                       dt += 0.2;
                       
                       needlePlanner.compute_knottying_gripper_frame_wrt_camera(gripper1_motion, gripper2_motion, dt);
                       
                                           
                    }  

                   for(int kstep = 0; kstep < 5; kstep +=1){
                       
                       gripper1_motion(0) -= 0.004;
                                         
                       dt += 0.2;
                       
                       needlePlanner.compute_knottying_gripper_frame_wrt_camera(gripper1_motion, gripper2_motion, dt);
                       
                                           
                    }  
                   
                   for(int kstep = 0; kstep < 3; kstep +=1){
                       
                       gripper1_motion(2) -= 0.004;
                       dt += 0.2;
                       
                       needlePlanner.compute_knottying_gripper_frame_wrt_camera(gripper1_motion, gripper2_motion, dt);
                       
                                           
                    }  


                   for(int kstep = 0; kstep < 3; kstep +=1){
                       
                       gripper2_motion(2) += 0.004;
                       dt += 0.2;
                       
                       needlePlanner.compute_knottying_gripper_frame_wrt_camera(gripper1_motion, gripper2_motion, dt);
                       
                                           
                    }  

                    
                  for(int kstep = 0; kstep < 5; kstep +=1){
                       
                       gripper2_motion(0) -= 0.004;
                       dt += 0.2;
                       
                       needlePlanner.compute_knottying_gripper_frame_wrt_camera(gripper1_motion, gripper2_motion, dt);
   
                  }  
                  
                 for(int kstep = 0; kstep < 3; kstep +=1){
                       
                       gripper2_motion(2) -= 0.004;
                       dt += 0.2;
                       
                       needlePlanner.compute_knottying_gripper_frame_wrt_camera(gripper1_motion, gripper2_motion, dt);
                       
                 }  

                 for(int kstep = 0; kstep < 3; kstep +=1){
                       
                       gripper1_motion(2) += 0.004;
                       dt += 0.2;
                       
                       needlePlanner.compute_knottying_gripper_frame_wrt_camera(gripper1_motion, gripper2_motion, dt);
                
                                           
                 }  

                 for(int kstep = 0; kstep < 5; kstep +=1){
                       
                       gripper1_motion(0) -= 0.004;
                                         
                       dt += 0.2;
                       
                       needlePlanner.compute_knottying_gripper_frame_wrt_camera(gripper1_motion, gripper2_motion, dt);
                       
                                           
                 }  
                   
                 for(int kstep = 0; kstep < 3; kstep +=1){
                       
                       gripper1_motion(2) -= 0.004;
                       dt += 0.2;
                       
                       needlePlanner.compute_knottying_gripper_frame_wrt_camera(gripper1_motion, gripper2_motion, dt);
                       
                                           
                 }  

                for(int kstep = 0; kstep < 3; kstep +=1){
                       
                       gripper2_motion(2) += 0.004;
                       dt += 0.2;
                       
                       needlePlanner.compute_knottying_gripper_frame_wrt_camera(gripper1_motion, gripper2_motion, dt);
                       
                                           
                }  

                    
               for(int kstep = 0; kstep < 5; kstep +=1){
                       
                       gripper2_motion(0) -= 0.004;
                       dt += 0.2;
                       
                       needlePlanner.compute_knottying_gripper_frame_wrt_camera(gripper1_motion, gripper2_motion, dt);
   
               }  
                  
               for(int kstep = 0; kstep < 3; kstep +=1){
                       
                       gripper2_motion(2) -= 0.004;
                       dt += 0.2;
                       
                       needlePlanner.compute_knottying_gripper_frame_wrt_camera(gripper1_motion, gripper2_motion, dt);
                       
              }  

             */
              
            }
            
                 if (polygon_msg.points.size()>0) {
                     exit_pt_score_publisher.publish(exit_pts_score);
                     exit_pt_array_publisher.publish(polygon_msg);// for rviz display
                     cout << "size of exit points: " << polygon_msg.points.size() << endl;
                  }
         
         outfile.close(); 
         ROS_WARN("file closed");    
        
       }
     }
        
        ros::spinOnce();
        ros::Duration(0.01).sleep();
    }
    /*
    Eigen::Affine3d affine_pose;
    for (int i=0;i<nposes;i++) {
        ROS_INFO("pose %d",i);
        cout<<gripper_affines_wrt_camera[i].linear()<<endl;
        cout<<"origin: "<<gripper_affines_wrt_camera[i].translation().transpose()<<endl;
    }
    needlePlanner.write_needle_drive_affines_to_file(gripper_affines_wrt_camera);
     */
    return 0;
}