Beispiel #1
0
// Returns squared mahalanobis distance
float Reco::mahalanobis_distance (Eigen::Matrix3f cov, Eigen::Vector3f mean, pcl::PointXYZ pt)
{
	Eigen::Vector3f diff (pt.x - mean[0], pt.y - mean[1], pt.z - mean[2]);
	//return diff.dot(cov.inverse() * diff);
	return sqrt(diff.dot(cov.inverse() * diff));
}
void fi::VPDetectionWrapper::validateVanishingPoint(const std::vector<std::vector< Eigen::Vector2f> > &computed_vp_hypothesis, const Eigen::Matrix3f &cam_calib, Eigen::Vector3f &final_robust_vp_x, Eigen::Vector3f &final_robust_vp_y)
{
	Eigen::Matrix3f inv_cam_calib = cam_calib.inverse(); 

	//trans from vps to rays through camera axis, see Z+H Chapter 8, more on single view geometry!
	unsigned int num_vps = computed_vp_hypothesis.size();
	std::vector< Eigen::Vector3f> computed_vp_hypothesis_x;
	std::vector< Eigen::Vector3f> computed_vp_hypothesis_y;
	std::vector< Eigen::Vector3f> computed_vp_hypothesis_z;
	for (unsigned int i = 0; i < num_vps; i++)
	{
		std::vector<Eigen::Vector2f> a_vp = computed_vp_hypothesis.at(i);

		Eigen::Vector2f a_x = a_vp.at(0);
		Eigen::Vector3f x_h, n_x;
		x_h(0) = a_x(0);
		x_h(1) = a_x(1);
		x_h(2) = 1;
		n_x = inv_cam_calib * x_h;
		n_x = n_x.normalized();
		computed_vp_hypothesis_x.push_back(n_x);

		Eigen::Vector2f a_y = a_vp.at(1);
		Eigen::Vector3f y_h, n_y;
		y_h(0) = a_y(0);
		y_h(1) = a_y(1);
		y_h(2) = 1;
		n_y = inv_cam_calib * y_h;
		n_y = n_y.normalized();
		computed_vp_hypothesis_y.push_back(n_y);

		Eigen::Vector2f a_z = a_vp.at(2);
		Eigen::Vector3f z_h, n_z;
		z_h(0) = a_z(0);
		z_h(1) = a_z(1);
		z_h(2) = 1;
		n_z = inv_cam_calib * z_h;
		n_z = n_z.normalized();
		computed_vp_hypothesis_z.push_back(n_z);
	}

	std::vector<Eigen::Vector3f> in_liers_x;
	std::vector<Eigen::Vector3f> in_liers_y;
	std::vector<Eigen::Vector3f> in_liers_z;
	bool found_inliers_x = getRansacInliers(computed_vp_hypothesis_x, in_liers_x);
	bool found_inliers_y = getRansacInliers(computed_vp_hypothesis_y, in_liers_y);
	bool found_inliers_z = getRansacInliers(computed_vp_hypothesis_z, in_liers_z);

	Eigen::VectorXf optimized_vp_x;
	Eigen::VectorXf optimized_vp_y;
	Eigen::VectorXf optimized_vp_z;
	leastQuaresVPFitting(in_liers_x, optimized_vp_x);
	leastQuaresVPFitting(in_liers_y, optimized_vp_y);
	leastQuaresVPFitting(in_liers_z, optimized_vp_z);
        std::cout<<"Vanishing Points Validated"<<std::endl;

	//test the angles and see if OK otherwise check again if truelly orthogonal
	Eigen::Vector3f vp_x (optimized_vp_x[3], optimized_vp_x[4], optimized_vp_x[5]);;
	Eigen::Vector3f vp_y (optimized_vp_y[3], optimized_vp_y[4], optimized_vp_y[5]);
	Eigen::Vector3f vp_z (optimized_vp_z[3], optimized_vp_z[4], optimized_vp_z[5]);

	Eigen::Vector3f vp_x_centroid (optimized_vp_x[0], optimized_vp_x[1], optimized_vp_x[2]);
	Eigen::Vector3f vp_y_centroid (optimized_vp_y[0], optimized_vp_y[1], optimized_vp_y[2]);
	Eigen::Vector3f vp_z_centroid (optimized_vp_z[0], optimized_vp_z[1], optimized_vp_z[2]);

	float angle_value_radiens_cxy = angleBetweenVectors(vp_x_centroid, vp_y_centroid);
	float angle_value_degrees_cxy = pcl::rad2deg(angle_value_radiens_cxy);
	float angle_value_radiens_cxz = angleBetweenVectors(vp_x_centroid, vp_z_centroid);
	float angle_value_degrees_cxz = pcl::rad2deg(angle_value_radiens_cxz);
	float angle_value_radiens_cyz = angleBetweenVectors(vp_y_centroid, vp_z_centroid);
	float angle_value_degrees_cyz = pcl::rad2deg(angle_value_radiens_cyz);

	float angle_value_radiens_xy = angleBetweenVectors(vp_x, vp_y);
	float angle_value_degrees_xy = pcl::rad2deg(angle_value_radiens_xy);
	float angle_value_radiens_xz = angleBetweenVectors(vp_x, vp_z);
	float angle_value_degrees_xz = pcl::rad2deg(angle_value_radiens_xz);
	float angle_value_radiens_yz = angleBetweenVectors(vp_y, vp_z);
	float angle_value_degrees_yz = pcl::rad2deg(angle_value_radiens_yz);

	//collect only the mean vps
	final_robust_vp_x = optimized_vp_x.tail<3> ();
	final_robust_vp_y = optimized_vp_y.tail<3> ();

	//final_robust_vp_x = vp_x_centroid;
	//final_robust_vp_y = vp_y_centroid;
}
Beispiel #3
0
std::pair<float, float> worldBallPosFromImgCoords(AL::ALMotionProxy motionProxy,
                                                  std::pair<int, int> ballPosCam,
                                                  int imgWidth, int imgHeight,
                                                  int camera)
{
	std::string cameraName = "CameraTop";
	if (camera == 1)
	{
		cameraName = "CameraBottom";
	}

	// Image coordinates of ball
	int u = ballPosCam.first;
	int v = ballPosCam.second;

	// Angles of observation of the ball
	float phi = ((float)u-((float)imgWidth)/2)/(float)imgWidth * img_WFOV;
	float theta = ((float)v-((float)imgHeight)/2)/(float)imgHeight * img_HFOV;

	// Select the right coordinates for the NAO system!
	// x outward from camera, y to the left and z vertically upwards

	// Coefficients for line-equation going from NAO camera to the ball
	float b_c = -sin(phi);
	float c_c = -sin(theta);
	float a_c = sqrt((cos(phi)*cos(phi)+cos(theta)*cos(theta))/2);

	int space = 2; //FRAME_ROBOT
	bool useSensorValues = true;
	std::vector<float> transVec =
		motionProxy.getTransform(cameraName, space, useSensorValues); // Transform camera -> FRAME_ROBOT
	std::vector<float> cameraPos =
		motionProxy.getPosition(cameraName, space, useSensorValues); // Camera position in FRAME_ROBOT


	// std::cout << "Position of bottom camera: " << std::endl;
	// std::cout << cameraPos.at(0) << " " << cameraPos.at(1) << " " << cameraPos.at(2) << std::endl;
	// std::cout << cameraPos.at(3) << " " << cameraPos.at(4) << " " << cameraPos.at(5) << std::endl;


	// Put the camera transform into an Eigen matrix for easy multiplication
	Eigen::Matrix4f trans;
	trans <<
		transVec[0] , transVec[1] , transVec[2] , transVec[3] ,
		transVec[4] , transVec[5] , transVec[6] , transVec[7] ,
		transVec[8] , transVec[9] , transVec[10], transVec[11],
		transVec[12], transVec[13], transVec[14], transVec[15];

	Eigen::Vector4f vec(a_c, b_c, c_c, 1);

	// Transform the line equation from NAO camera coordinate system into FRAME_ROBOT
	Eigen::Vector4f transformedLine = trans*vec;
	// std::cout << "trans*vec = " << transformedLine << std::endl;

	// Solve line-plane intersection with plane at z (floor)
	// Solution from Wikipedia line-plane intersection article
	float z = 0.00;

	Eigen::Matrix3f lineMat;
	lineMat <<
		cameraPos.at(0)-transformedLine[0], 1.0-0.0, 0.0-0.0,
		cameraPos.at(1)-transformedLine[1], 0.0-0.0, 1.0-0.0,
		cameraPos.at(2)-transformedLine[2], z  -  z, z  -  z;

	Eigen::Vector3f lineVec;
	lineVec << cameraPos.at(0)-0.0, cameraPos.at(1)-0.0, cameraPos.at(2)-z;
	Eigen::Vector3f txy = lineMat.inverse()*lineVec;
	std::cout << "Ball is at (x, y): (" << txy[1] << ", " << txy[2] << ")" << std::endl;
	std::pair<float, float> return_value(txy[1], txy[2]);
	return return_value; //Return ball position (x, y)
}
Beispiel #4
0
int main(int argc, char * argv[])
{
    int width = 640;
    int height = 480;

    Resolution::getInstance(width, height);

    Intrinsics::getInstance(528, 528, 320, 240);

    cv::Mat intrinsicMatrix = cv::Mat(3,3,CV_64F);

    intrinsicMatrix.at<double>(0,0) = Intrinsics::getInstance().fx();
    intrinsicMatrix.at<double>(1,1) = Intrinsics::getInstance().fy();

    intrinsicMatrix.at<double>(0,2) = Intrinsics::getInstance().cx();
    intrinsicMatrix.at<double>(1,2) = Intrinsics::getInstance().cy();

    intrinsicMatrix.at<double>(0,1) =0;
    intrinsicMatrix.at<double>(1,0) =0;

    intrinsicMatrix.at<double>(2,0) =0;
    intrinsicMatrix.at<double>(2,1) =0;
    intrinsicMatrix.at<double>(2,2) =1;

    Bytef * decompressionBuffer = new Bytef[Resolution::getInstance().numPixels() * 2];
    IplImage * deCompImage = 0;

    std::string logFile="/home/lili/Kinect_Logs/2015-11-05.00.klg";
   // assert(pcl::console::parse_argument(argc, argv, "-l", logFile) > 0 && "Please provide a log file");

    RawLogReader logReader(decompressionBuffer,
                           deCompImage,
                           logFile,
                           true);


    cv::Mat1b tmp(height, width);
    cv::Mat3b depthImg(height, width);

    PlaceRecognition placeRecognition(&intrinsicMatrix);

    iSAMInterface iSAM;

    //Keyframes
    KeyframeMap map(true);
    Eigen::Vector3f lastPlaceRecognitionTrans = Eigen::Vector3f::Zero();
    Eigen::Matrix3f lastPlaceRecognitionRot = Eigen::Matrix3f::Identity();
    int64_t lastTime = 0;

    OdometryProvider * odom = 0;


    //int frame_index = 0;

   // uint64_t timestamp;

    /*if(true)
    {
        odom = new FOVISOdometry;
        if(logReader.hasMore())
        {
            logReader.getNext();

            Eigen::Matrix3f Rcurr = Eigen::Matrix3f::Identity();
            Eigen::Vector3f tcurr = Eigen::Vector3f::Zero();

            odom->getIncrementalTransformation(tcurr,
                                               Rcurr,
                                               logReader.timestamp,
                                               (unsigned char *)logReader.deCompImage->imageData,
                                               (unsigned short *)&decompressionBuffer[0]);
        }

    }*/
    //else
   // {
        odom = new DVOdometry;

        if(logReader.hasMore())
        {
            logReader.getNext();

            DVOdometry * dvo = static_cast<DVOdometry *>(odom);

            dvo->firstRun((unsigned char *)logReader.deCompImage->imageData,
                          (unsigned short *)&decompressionBuffer[0]);
        }
    //}

    ofstream fout1("camera_pose_DVOMarch28.txt");
    ofstream fout2("camera_pose_KeyframeMotionMetric0.1March28.txt");
    ofstream fout3("loop_closure_transformationMarch28.txt");
    ofstream fout4("camera_pose_after_optimizationMarch28.txt");
    ofstream fout5("camera_pose_after_optimizationMarch28DVOCov.txt");
    ofstream fout6("camera_pose_after_optimizationMarch28DVOLoopTransCov.txt");

    /*
    pcl::visualization::PCLVisualizer cloudViewer;

    cloudViewer.setBackgroundColor(1, 1, 1);
    cloudViewer.initCameraParameters();
    cloudViewer.addCoordinateSystem(0.1, 0, 0, 0);
    */
    //pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> color(cloud->makeShared());
    //cloudViewer.addPointCloud<pcl::PointXYZRGB>(cloud->makeShared(), color, "Cloud Viewer");


    int loopClosureCount=0;

    while(logReader.hasMore())
    {
        logReader.getNext();

        cv::Mat3b rgbImg(height, width, (cv::Vec<unsigned char, 3> *)logReader.deCompImage->imageData);

        cv::Mat1w depth(height, width, (unsigned short *)&decompressionBuffer[0]);

        cv::normalize(depth, tmp, 0, 255, cv::NORM_MINMAX, 0);

        cv::cvtColor(tmp, depthImg, CV_GRAY2RGB);

        cv::imshow("RGB", rgbImg);

        cv::imshow("Depth", depthImg);

        char key = cv::waitKey(1);

        if(key == 'q')
        {
            break;
        }
        else if(key == ' ')
        {
            key = cv::waitKey(0);
        }
        if(key == 'q')
        {
            break;
        }

        Eigen::Matrix3f Rcurr = Eigen::Matrix3f::Identity();
        Eigen::Vector3f tcurr = Eigen::Vector3f::Zero();



//        #1
        odom->getIncrementalTransformation(tcurr,
                                          Rcurr,
                                          logReader.timestamp,
                                          (unsigned char *)logReader.deCompImage->imageData,
                                          (unsigned short *)&decompressionBuffer[0]);


       fout1<<tcurr[0]<<" "<<tcurr[1]<<" "<<tcurr[2]<<" "<<Rcurr(0,0)<<" "<<Rcurr(0,1)<<" "<<Rcurr(0,2)<<" "<<Rcurr(1,0)<<" "<<Rcurr(1,1)<<" "<<Rcurr(1,2)<<" "<<Rcurr(2,0)<<" "<<Rcurr(2,1)<<" "<<Rcurr(2,2)<<endl;

        Eigen::Matrix3f Rdelta = Rcurr.inverse() * lastPlaceRecognitionRot;
        Eigen::Vector3f tdelta = tcurr - lastPlaceRecognitionTrans;

        //Eigen::MatrixXd covariance = odom->getCovariance();
         //Eigen::MatrixXd covariance=Eigen::Matrix<double, 6, 6>::Identity()* 1e-3;

        if((Projection::rodrigues2(Rdelta).norm() + tdelta.norm())  >= 0.1)
        {
            Eigen::MatrixXd covariance = odom->getCovariance();
            iSAM.addCameraCameraConstraint(lastTime,
                                           logReader.timestamp,
                                           lastPlaceRecognitionRot,
                                           lastPlaceRecognitionTrans,
                                           Rcurr,
                                           tcurr);
                                           //covariance);

            printCovariance(fout5,  covariance);

            lastTime = logReader.timestamp;

            lastPlaceRecognitionRot = Rcurr;
            lastPlaceRecognitionTrans = tcurr;

            cout<<"before add keyframe"<<endl;

//            #2
            map.addKeyframe((unsigned char *)logReader.deCompImage->imageData,
                            (unsigned short *)&decompressionBuffer[0],
                            Rcurr,
                            tcurr,
                            logReader.timestamp);

           fout2<<tcurr[0]<<" "<<tcurr[1]<<" "<<tcurr[2]<<" "<<Rcurr(0,0)<<" "<<Rcurr(0,1)<<" "<<Rcurr(0,2)<<" "<<Rcurr(1,0)<<" "<<Rcurr(1,1)<<" "<<Rcurr(1,2)<<" "<<Rcurr(2,0)<<" "<<Rcurr(2,1)<<" "<<Rcurr(2,2)<<endl;

           /*
            //Save keyframe
           {
            cv::Mat3b rgbImgKeyframe(height, width, (cv::Vec<unsigned char, 3> *)logReader.deCompImage->imageData);

            cv::Mat1w depthImgKeyframe(height, width, (unsigned short *)&decompressionBuffer[0]);

            //save keyframe depth
            char fileName[1024] = {NULL};
            sprintf(fileName, "keyframe_depth_%06d.png", frame_index);
            cv::imwrite(fileName, depthImgKeyframe);

            //save keyframe rgb

            sprintf(fileName, "keyframe_rgb_%06d.png", frame_index);
            cv::imwrite(fileName, rgbImgKeyframe);
            frame_index ++;

           }
        */

            int64_t matchTime;
            Eigen::Matrix4d transformation;
           // Eigen::MatrixXd cov(6,6);
            //isam::Covariance(0.001 * Eigen::Matrix<double, 6, 6>::Identity()))
            Eigen::MatrixXd cov=0.001 * Eigen::Matrix<double, 6, 6>::Identity();


            cout<<"map.addKeyframe is OK"<<endl;

//            #3
            if(placeRecognition.detectLoop((unsigned char *)logReader.deCompImage->imageData,
                                           (unsigned short *)&decompressionBuffer[0],
                                           logReader.timestamp,
                                           matchTime,
                                           transformation,
                                           cov,
                                           loopClosureCount))
            {

                //printCovariance(fout6,  cov);
               cout<<"logReader.timestamp "<<logReader.timestamp<<endl;
               cout<<"matchTime "<<matchTime<<endl;

               /*
               transformation << -0.2913457145219732, 0.228056050293173, -0.9290361201559172, 2.799184934345601,
                                0.6790194052589797, 0.7333821627861707, -0.03291277242681545, 1.310438143604587,
                                0.673832562222562, -0.6404225489719699, -0.3685222338703895, 6.988973505496276,
                                0, 0, 0, 0.999999999999998;
                */
                /*
              transformation << 0.9998996846969838, 0.003948215234314986, -0.01360265192291004, 0.05847011404293689,
                              -0.004032877285312574, 0.9999726343121815, -0.006202138950136233, 0.04528938486109094,
                                0.01357779229749574, 0.006256374606648019, 0.9998882444218992, 0.02203456132723125,
                                0, 0, 0, 1;
                */
              iSAM.addLoopConstraint(logReader.timestamp, matchTime, transformation);//, cov);
              fout3<<transformation(0,0)<<" "<<transformation(0,1)<<" "<<transformation(0,2)<<" "<<transformation(0,3)<<" "<<transformation(1,0)<<" "<<transformation(1,1)<<" "<<transformation(1,2)<<" "<<transformation(1,3)<<" "<<transformation(2,0)<<" "<<transformation(2,1)<<" "<<transformation(2,2)<<" "<<transformation(2,3)<<" "<<transformation(3,0)<<" "<<transformation(3,1)<<" "<<transformation(3,2)<<" "<<transformation(3,3)<<endl;
              loopClosureCount++;


            }

        }

        if(loopClosureCount>=1)
        {
            break;
        }
    }
    /*
    for(int i=0; i<loopClosureCount;i++)
    {

     iSAM.addLoopConstraint(placeRecognition.loopClosureConstraints.at(i)->time1,
                            placeRecognition.loopClosureConstraints.at(i)->time2,
                            placeRecognition.loopClosureConstraints.at(i)->constraint);

    }*/

    std::vector<std::pair<uint64_t, Eigen::Matrix4f> > posesBefore;
    iSAM.getCameraPoses(posesBefore);

    cout<<"It works good before optimization"<<endl;

//    #4
    double residual =iSAM.optimise();

    cout<<"It works good after optimize and before map.applyPoses"<<endl;

   // map.applyPoses(isam);
    //cout<<"It works good before *cloud=map.getMap and after map.applyPoses(isam)"<<endl;

    /*
    pcl::PointCloud<pcl::PointXYZRGB> *cloud = map.getMap();


     // Write it back to disk under a different name.
	// Another possibility would be "savePCDFileBinary()".
	cout<<"before storing the point cloud map"<<endl;
	pcl::io::savePCDFileASCII ("outputCloudMap03DVODensity005.pcd", *cloud);

    cout << "Saved data points to outputMap.pcd." << std::endl;


    cout<<"copy data into octomap..."<<endl;

    octomap::ColorOcTree tree( 0.05 );

    for (size_t i=0; i<(*cloud).points.size(); i++)
    {
        // 将点云里的点插入到octomap中
        tree.updateNode( octomap::point3d((*cloud).points[i].x, (*cloud).points[i].y, (*cloud).points[i].z), true );
    }

    for (size_t i=0; i<(*cloud).points.size(); i++)
    {
        tree.integrateNodeColor( (*cloud).points[i].x, (*cloud).points[i].y, (*cloud).points[i].z, (*cloud).points[i].r, (*cloud).points[i].g, (*cloud).points[i].b);
    }

    tree.updateInnerOccupancy();
    tree.write("OctomapColorLab03DVODensity005.ot");
    cout<<"please see the done."<<endl;
   */

    //pcl::visualization::PCLVisualizer cloudViewer;

   // cloudViewer.setBackgroundColor(1, 1, 1);
    //cloudViewer.initCameraParameters();
   // cloudViewer.addCoordinateSystem(0.1, 0, 0, 0);

    //pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> color(cloud->makeShared());
    //cloudViewer.addPointCloud<pcl::PointXYZRGB>(cloud->makeShared(), color, "Cloud Viewer");

    std::vector<std::pair<uint64_t, Eigen::Matrix4f> > newPoseGraph;

    iSAM.getCameraPoses(newPoseGraph);


    /*
    for(unsigned int i = 0; i < newPoseGraph.size(); i++)
    {
       // file << std::setprecision(6) << std::fixed << (double)newPoseGraph.at(i).first / 1000000.0 << " ";

        Eigen::Vector3f trans = newPoseGraph.at(i).second.topRightCorner(3, 1);
        Eigen::Matrix3f rot = newPoseGraph.at(i).second.topLeftCorner(3, 3);

        fout4 << trans(0) << " " << trans(1) << " " << trans(2) << " ";

        Eigen::Quaternionf currentCameraRotation(rot);

        //file << currentCameraRotation.x() << " " << currentCameraRotation.y() << " " << currentCameraRotation.z() << " " << currentCameraRotation.w() << "\n";
    }*/



    for(std::vector<std::pair<uint64_t, Eigen::Matrix4f> >::iterator ite=newPoseGraph.begin(); ite!=newPoseGraph.end(); ite++)
    {
        Eigen::Matrix3f Roptimized;
        Roptimized<<ite->second(0,0), ite->second(0,1), ite->second(0,2),
                    ite->second(1,0), ite->second(1,1), ite->second(1,2),
                    ite->second(2,0), ite->second(2,1), ite->second(2,2);

         Eigen::Quaternionf quatOptimized(Roptimized);

         fout4<<ite->second(0,3)<<" "<<ite->second(1,3)<<" "<<ite->second(2,3)<<" "<<quatOptimized.w()<<" "<<quatOptimized.x()<<" "<<quatOptimized.y()<<" "<<quatOptimized.z()<<endl;

    }

    cout<<"The number of optimized poses"<<newPoseGraph.size()<<endl;


   // drawPoses(poses, cloudViewer, 1.0, 0, 0);
    //drawPoses(posesBefore, cloudViewer, 0, 0, 1.0);







    //cloudViewer.spin();

    delete [] decompressionBuffer;

    return 0;
}
void
motion_controller::running(void)
{
    cycle_start = ros::Time::now();

//  receive path and when received block path receiver until mission completed
    if (m_path && !block_path_receiver)
    {
        nav_msgs::Path temp_path = *m_path;
        std::vector<geometry_msgs::PoseStamped> temp_path_vector;
        extracted_path.clear();
        for( std::vector<geometry_msgs::PoseStamped>::iterator itr = temp_path.poses.begin() ; itr != temp_path.poses.end(); ++itr)
        {
            temp_path_vector.push_back(*itr);
        }

        if(!temp_path_vector.empty())
        {
             ROS_INFO("Path Received!");
        }
        p_count++;

        while(!temp_path_vector.empty())
        {
            extracted_path.push_back(temp_path_vector.back());
            temp_path_vector.pop_back();
        }
        block_path_receiver = true;
    }

//  switch status according to the command
    switch (m_move)
    {
        // move forward 
        case FORWARD:
        {
            // v is linear speed and gain is angular velocity
            v =  sqrt(dist)*v_max/(1 + gamma*k*k)*10000/PI;

            gain = 0.3265*v*k;
            if (fabs(gain) > 600)
            {
                gain = boost::math::sign(gain) * 600;
            }

            if(fabs(v)>2000)
            {
                v = boost::math::sign(v)*2000;
            }
            // set motor rotation velocity
            locomotor->set_vel(floor(v)+floor(gain),-floor(v)+floor(gain));

            // when the euler distance is less than 100mm, achieved waypoint
            if (distance2d(m_state, m_cmd) < 0.09 && indicator(m_state[2],m_cmd[2],0.5))
            {
                m_move = IDLE;
            }
            break;
        }

        case BACKWARD:
        {
            v =  sqrt(dist) * 0.75/(1+0.2*k_back*k_back) *10000/3.1415926;

            gain = 0.3265*v*k_back;

            if (fabs(gain) > 600)
            {
                gain = boost::math::sign(gain) * 600;
            }

            if(fabs(v)>2000)
            {
                v = boost::math::sign(v)*2000;
            }
            
            locomotor->set_vel(-floor(v)+floor(gain),floor(v)+floor(gain));
            if (distance2d(m_state, m_cmd) < 0.09 && indicator(m_state[2],m_cmd[2],0.5))
            {
                m_move = IDLE;
            }
            break;
        }

        case LIFTFORK:
        {
            // TODO: Lift height should be determined, fork stroke should be determined
            printf("Lift and fork!\n");

            locomotor->set_vel(0, 0);
            sleep(3);
//            locomotor->shut_down();
            lift_motor->power_on();
            if ((fork_count%2) == 1)
//          change -243720 to be -223720, wants to liftfork a little bit lower
                lift_motor->set_pos(-223720);
            else
                lift_motor->set_pos(-293720);

            sleep(10);

           printf("Start Test\n");

            // pose correction code inserted here  first make sure tag is attached vertically, second camera has no pitch angle relative to the vehicle
            

            if ((fork_count%2) == 1)
            {
                printf("Start Correction!\n");
                int count_detect = 0;
                while(ros::ok())
                {
                    if(abs_pose1)
                    {
                        Eigen::Quaternion<float> quat;
                        quat.w() = abs_pose1->pose.pose.orientation.w;
                        quat.x() = abs_pose1->pose.pose.orientation.x;
                        quat.y() = abs_pose1->pose.pose.orientation.y;
                        quat.z() = abs_pose1->pose.pose.orientation.z;

                        Eigen::Matrix3f Rotation;

                        Rotation = qToRotation(quat);

                        Eigen::Matrix3f FixTF;
                        FixTF << 1, 0,  0,
                                 0, 0, -1,
                                 0, 1,  0;

                        Rotation = FixTF * Rotation.inverse();

                        float yaw_error;
                    
                        yaw_error = getYawFromMat(Rotation);
                    
                        gain = -3265*(k_angle*yaw_error)/3.1415926;
                        if(fabs(gain)>150)
                        {
                            gain = boost::math::sign(gain) * 150;
                        }
                        locomotor->set_vel(floor(gain), floor(gain));
                        if (fabs(yaw_error*180/3.1415926) < 0.5)
                        {
                            locomotor->set_vel(0,0);
                            printf("Yaw %f corrected!\n", yaw_error*180/3.1415926);
                            break;
                        }
                    }
                    else
                    {
                        locomotor->set_vel(0, 0);
                        usleep(10000);
                        count_detect++;
                    }
                    ros::spinOnce();

                    if (count_detect>1000)
                    {
                        ROS_WARN("No Tag detected when stoped");
                        ros::shutdown();
                        exit(1);
                    }
                }
 
                count_detect = 0;
                while(ros::ok())
                {
                    if(abs_pose1)
                    {

                        Eigen::Quaternion<float> quat;
                        quat.w() = abs_pose1->pose.pose.orientation.w;
                        quat.x() = abs_pose1->pose.pose.orientation.x;
                        quat.y() = abs_pose1->pose.pose.orientation.y;
                        quat.z() = abs_pose1->pose.pose.orientation.z;

                        Eigen::Matrix3f Rotation;
                        Eigen::Vector3f translation;
                        translation << abs_pose1->pose.pose.position.x,
                                       abs_pose1->pose.pose.position.y,
                                       abs_pose1->pose.pose.position.z;

                        Rotation = qToRotation(quat);

                        translation = translation;

                        float x_error;
                    
                        x_error = translation[0];
                    
                        v = 0.25*(x_error-0.003)*10000/3.1415926;
//                        printf("x is %f\n", x_error);
//                        printf("v is %f\n\n", floor(v));
                        if(fabs(v)>150)
                        {
                            v = boost::math::sign(v) * 150;
                        }
                        locomotor->set_vel(floor(v), -floor(v));
                        if (fabs(x_error-0.003) < 0.003)
                        {
                            locomotor->set_vel(0, 0);
                            printf("x %f corrected!\n", (x_error-0.006));
                            break;
                        }
                    }
                    else
                    {
                        locomotor->set_vel(0, 0);
                        usleep(10000);
                        count_detect++;
                    }
                    ros::spinOnce();

                    if (count_detect>1000)
                    {
                        ROS_WARN("No Tag detected when stoped");
                        ros::shutdown();
                        exit(1);
                    }
                }
            }

            if ((fork_count%2) == 0)
            {
                int count_detect = 0;

                while(ros::ok())
                {
                    if (abs_pose)
                    {
                        Eigen::Quaternion<float> quat;
                        quat.w() = abs_pose->pose.pose.orientation.w;
                        quat.x() = abs_pose->pose.pose.orientation.x;
                        quat.y() = abs_pose->pose.pose.orientation.y;
                        quat.z() = abs_pose->pose.pose.orientation.z;

                        Eigen::Matrix3f Rotation;

                        Rotation = qToRotation(quat);

                        Eigen::Matrix3f fixTF;
                        fixTF << 1, 0,  0,
                                 0, -1, 0,
                                 0, 0, -1;

                        Rotation = Rotation.inverse()*fixTF;

                        m_state[2] = getYawFromMat(Rotation);
                        gain = 3265*(k_angle*angle(m_cmd[2],m_state[2]))/PI;
                        if (fabs(gain) > 100)
                        {
                            gain = boost::math::sign(gain) * 100;
                        }
                        
                        if (fabs(gain) >100)
                        {
                            ros::shutdown();
                            exit(1);
                        }

                        locomotor->set_vel(floor(gain),floor(gain));
                        if (indicator(m_cmd[2], m_state[2], 0.008))
                        {
                            locomotor->set_vel(0, 0);
                            printf("Corrected!\n");
                            break;
                        }
                    }
                    else
                    {
                        locomotor->set_vel(0, 0);
                        usleep(10000);
                        count_detect++;
                    }
                    ros::spinOnce();

                    if (count_detect>1000)
                    {
                        locomotor->set_vel(0, 0);
                        ROS_WARN("No Tag detected when stoped");
                        //ros::shutdown();
                        //exit(1);
                    }
                }
            }

            // if we carry out hand hold barcode test, after correction, stop
//               ros::shutdown();
//               exit(1);

            locomotor->shut_down();
            sleep(0.5);

// comment following two lines can set make telefork not strech out
            telefork_motor->set_pos( boost::math::sign(lift_param)*(-386000) );
            sleep(15);

            if ((fork_count%2) == 1)
                 lift_motor->set_pos(-293720);
            else
                 lift_motor->set_pos(-223720);
            sleep(3);

            telefork_motor->set_pos(0);
            sleep(15);            

//            ros::shutdown();
//            exit(1);
            lift_motor->shut_down();
            locomotor->power_on();
            sleep(0.5);
            m_move = IDLE;

            break;
        }

        case TURN:
        {
            v = cos(alpha) * dist* k_dist *10000/PI;
            if(fabs(v)>300)
            {
                v = boost::math::sign(v)*300;
            }

            gain = 3265*(k_angle*angle(m_cmd[2],m_state[2]))/PI;
            if (fabs(gain) > 400)
            {
                gain = boost::math::sign(gain) * 400;
            }
            locomotor->set_vel(floor(v)+floor(gain),-floor(v)+floor(gain));

            if (indicator(m_state[2],m_cmd[2],0.01))
            {
                m_move = IDLE;
            }
            break;
        }
  
        case IDLE:
        {
           locomotor->set_vel(0,0);
           if (extracted_path.size()!=0)
           {
               geometry_msgs::PoseStamped temp_pose = extracted_path.back();
               float yaw_ = 2*atan2(temp_pose.pose.orientation.z,temp_pose.pose.orientation.w);
               m_cmd << temp_pose.pose.position.x * m_resolution,
                        temp_pose.pose.position.y * m_resolution,
                        angle(yaw_,0);

               printf("Next Commanded Pose is (%f, %f, %f)\n", m_cmd[0], m_cmd[1], m_cmd[2]);
               if ( (fabs(m_cmd[0] - m_state[0])>0.5) && (fabs(m_cmd[1] - m_state[1])>0.5) )
               {
                   printf("Invalid commanded position received!\n");
                   locomotor->shut_down();
                   ros::shutdown();
                   exit(1);
               }
               if ( (fabs(m_cmd[0] - m_state[0])>0.5) || (fabs(m_cmd[1] - m_state[1])>0.5) )
               {
                   if (fabs(angle(m_cmd[2],m_state[2]))>0.5)
                   {
                       printf("Invalid commanded pose orientation received!\n");
                       locomotor->shut_down();
                       ros::shutdown();
                       exit(1);
                   }
                   if (fabs(m_cmd[0] - m_state[0])>0.5)
                   {
                       if (cos(m_state[2]) *  (m_cmd[0] - m_state[0]) > 0)
                           m_move = FORWARD;
                       else
                           m_move = BACKWARD;
                   }
                   else
                   {
                       if (sin(m_state[2]) *  (m_cmd[1] - m_state[1]) > 0)
                           m_move = FORWARD;
                       else
                           m_move = BACKWARD;
                   }
                   if (m_move == FORWARD)
                       printf("Move Forward!\n");
                   else
                       printf("Move Backward!\n");
               }
               else if (fabs(m_cmd[2] - m_state[2])>0.5)
               {
                   m_move = TURN;
                   printf("Turn Around!\n");
               }
               else if (temp_pose.pose.position.z!=0)
               {
                   m_move = LIFTFORK;
                   fork_count++;
                   lift_param = temp_pose.pose.position.z;
               }
               else
                    m_move = IDLE;
               extracted_path.pop_back();
           }
           else
           {
                if (p_count == 2)
                {
                    lift_motor->power_on();
                    lift_motor->set_pos(0);
                    sleep(12);
                    lift_motor->shut_down();
                    sleep(0.5);
                }

               block_path_receiver = false;
               geometry_msgs::PoseStamped pose_msg;
         
               pose_msg.header.frame_id = "world";
               pose_msg.header.stamp = ros::Time::now();
               pose_msg.pose.position.x = m_state[0];
               pose_msg.pose.position.y = m_state[1];
               if (block_path_receiver)
                  pose_msg.pose.position.z = 1;
               else
                  pose_msg.pose.position.z = 0;

               pose_msg.pose.orientation.w = cos(m_state[2]/2);
               pose_msg.pose.orientation.x = 0;
               pose_msg.pose.orientation.y = 0;
               pose_msg.pose.orientation.z = sin(m_state[2]/2);
               m_posePub.publish(pose_msg);
               printf("IDLE!\n");
               sleep(1);
               m_move = IDLE;
           }

           break;
        }
    }
        
    vel.first = (vel.first + locomotor->get_vel().first)/2;
    vel.second = (vel.second + locomotor->get_vel().second)/2;

    cycle_period = (ros::Time::now() - cycle_start).toSec();

    m_state[0] = m_state[0] + (-vel.second+vel.first)/2 * cos(m_state[2]) * 0.018849555 * cycle_period/60;
    m_state[1] = m_state[1] + (-vel.second+vel.first)/2 * sin(m_state[2]) * 0.018849555 * cycle_period/60;        
    m_state[2] = m_state[2] + (vel.first + vel.second)/0.653 * 0.018849555 * cycle_period/60;


    if (abs_pose)
    {
        int id;
        id = abs_pose->id - 3;

        Eigen::Quaternion<float> quat;
        quat.w() = abs_pose->pose.pose.orientation.w;
        quat.x() = abs_pose->pose.pose.orientation.x;
        quat.y() = abs_pose->pose.pose.orientation.y;
        quat.z() = abs_pose->pose.pose.orientation.z;

        Eigen::Matrix3f Rotation;
        Eigen::Vector3f translation;
        translation << abs_pose->pose.pose.position.x,
                       abs_pose->pose.pose.position.y,
                       abs_pose->pose.pose.position.z;

        Rotation = qToRotation(quat);

        translation = -Rotation.inverse()*translation;

        Eigen::Matrix3f fixTF;
        fixTF << 1, 0,  0,
                 0, -1, 0,
                 0, 0, -1;

        Rotation = Rotation.inverse()*fixTF;

        m_state[0] = translation[0] + m_resolution * (id%10);
        m_state[1] = translation[1] + m_resolution * floor(id/10.0);
        m_state[2] = getYawFromMat(Rotation) + 0.04;
    }

    geometry_msgs::PoseStamped pose_msg;
         
    pose_msg.header.frame_id = "world";
    pose_msg.header.stamp = ros::Time::now();
    pose_msg.pose.position.x = m_state[0];
    pose_msg.pose.position.y = m_state[1];
    if (block_path_receiver)
        pose_msg.pose.position.z = 1;
    else
        pose_msg.pose.position.z = 0;

    pose_msg.pose.orientation.w = cos(m_state[2]/2);
    pose_msg.pose.orientation.x = 0;
    pose_msg.pose.orientation.y = 0;
    pose_msg.pose.orientation.z = sin(m_state[2]/2);

    m_posePub.publish(pose_msg);

    delta_y = m_cmd[1]-m_state[1];
    if(fabs(m_cmd[1]-m_state[1]) > 1.6)
    {
        delta_y = boost::math::sign(m_cmd[1]-m_state[1]) * 1.6;
    }

    delta_x = m_cmd[0]-m_state[0];
    if(fabs(m_cmd[0]-m_state[0]) > 1.6)
    {
        delta_x = boost::math::sign(m_cmd[0]-m_state[0]) * 1.6;
    }

    beta = angle(m_cmd[2], atan2(delta_y, delta_x));

    alpha = angle(m_state[2], atan2(delta_y, delta_x));

    beta1 = angle(m_cmd[2]+PI, atan2(delta_y, delta_x));

    alpha1 = angle(m_state[2]+3.1415926, atan2(delta_y, delta_x));

    dist = sqrt(delta_x*delta_x + delta_y* delta_y);

    k = -1/dist * (k2*(alpha-atan(-k1*beta)) + sin(alpha)*(1 + k1/(1+(k1*beta) * (k1*beta))));
    k_back = -1/dist * (k2*(alpha1-atan2(-k1*beta1,1)) + sin(alpha1)*(1 + k1/(1+(k1*beta1) * (k1*beta1))));
}