Exemple #1
0
/*
Runs the kinematics model on the state vector and returns a vector with the
derivative of each components (except for the accelerations, which must be
calculated directly using a dynamics model).
Contents are as follows:
    - Rate of change in position (3-vector, m/s, NED frame)
    - Rate of change in linear velocity (3-vector, m/s^2, NED frame)
    - Rate of change in attitude (quaternion (x, y, z, w), 1/s, body frame)
    - Rate of change in angular velocity (3-vector, rad/s^2, body frame)
*/
const StateVectorDerivative State::model(ControlVector c, DynamicsModel *d) {
    StateVectorDerivative output;

    AccelerationVector a = d->evaluate(*this, c);

    /* Calculate change in position. */
    output.segment<3>(0) << velocity();

    /* Calculate change in velocity. */
    Quaternionr attitude_q = Quaternionr(attitude());
    output.segment<3>(3) = attitude_q.conjugate() * a.segment<3>(0);

    /* Calculate change in attitude. */
    Eigen::Matrix<real_t, 4, 1> omega_q;
    omega_q << angular_velocity(), 0;

    attitude_q = Quaternionr(omega_q).conjugate() * attitude_q;
    output.segment<4>(6) << attitude_q.vec(), attitude_q.w();
    output.segment<4>(6) *= 0.5;

    /* Calculate change in angular velocity (just angular acceleration). */
    output.segment<3>(10) = a.segment<3>(3);

    return output;
}
Exemple #2
0
bool device_gyro::calibrate()
{
    static const uint_t n = 512;
    std::vector<vec3d> meas(n);

    // measure
    for (uint_t i = 0; i < n; ++ i)
    {
        meas[i] = angular_velocity();
        sysclock.delay(1);
    }

    // mean value
    vec3d mean;

    for (uint_t i = 0; i < n; ++ i)
        mean = mean + meas[i];

    mean = mean / n;

    // standard deviation
    float dev = 0;

    for (uint_t i = 0; i < n; ++ i)
        dev += (meas[i] - mean).square();

    dev = sqrt(dev / n);

    if (dev > 0.05)
        return false;

    w0 = w0 + mean;
    return true;
}
void LiveSLAMWrapper::BALoop()
{
    ros::Rate BARate(2000) ;
    list<ImageMeasurement>::iterator iterImage ;
    std::list<visensor_node::visensor_imu>::iterator iterIMU ;
    cv::Mat image0 ;
    cv::Mat image1 ;
    cv::Mat gradientMapForDebug(height, width, CV_8UC3) ;
    sensor_msgs::Image msg;
    double t ;

    while ( nh.ok() )
    {
        monoOdometry->frameInfoList_mtx.lock();
        int ttt = (monoOdometry->frameInfoListTail - monoOdometry->frameInfoListHead);
        if ( ttt < 0 ){
            ttt += frameInfoListSize ;
        }
        //printf("[BA thread] sz=%d\n", ttt ) ;
        if ( ttt < 1 ){
            monoOdometry->frameInfoList_mtx.unlock();
            BARate.sleep() ;
            continue ;
        }
        for ( int sz ; ; )
        {
            monoOdometry->frameInfoListHead++ ;
            if ( monoOdometry->frameInfoListHead >= frameInfoListSize ){
                monoOdometry->frameInfoListHead -= frameInfoListSize ;
            }
            sz = monoOdometry->frameInfoListTail - monoOdometry->frameInfoListHead ;
            if ( sz == 0 ){
                break ;
            }
            if ( monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].keyFrameFlag ){
                break ;
            }
        }
        ros::Time imageTimeStamp = monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].t ;
        monoOdometry->frameInfoList_mtx.unlock();

        //Pop out the image list
        image1_queue_mtx.lock();
        iterImage = image1Buf.begin() ;
        while ( iterImage->t < imageTimeStamp ){
            iterImage = image1Buf.erase( iterImage ) ;
        }
        image1 = iterImage->image.clone();
        image1_queue_mtx.unlock();

        image0_queue_mtx.lock();
        iterImage = image0Buf.begin() ;
        while ( iterImage->t < imageTimeStamp ){
            iterImage = image0Buf.erase( iterImage ) ;
        }
        image0 = iterImage->image.clone();
        image0_queue_mtx.unlock();

        imu_queue_mtx.lock();
        iterIMU = imuQueue.begin() ;
        Vector3d linear_acceleration;
        Vector3d angular_velocity;

        //std::cout << "imageTime=" << imageTimeStamp << std::endl;
        while ( iterIMU->header.stamp < imageTimeStamp )
        {
            linear_acceleration(0) = iterIMU->linear_acceleration.x;
            linear_acceleration(1) = iterIMU->linear_acceleration.y;
            linear_acceleration(2) = iterIMU->linear_acceleration.z;
            angular_velocity(0) = iterIMU->angular_velocity.x;
            angular_velocity(1) = iterIMU->angular_velocity.y;
            angular_velocity(2) = iterIMU->angular_velocity.z;

//            to_pub_info.x = angular_velocity(0)*180/PI ;
//            to_pub_info.y = angular_velocity(1)*180/PI ;
//            to_pub_info.z = angular_velocity(2)*180/PI ;
//            monoOdometry->pub_angular_velocity.publish( to_pub_info ) ;

//            outFile << to_pub_info.x << " "
//                    << to_pub_info.y << " "
//                    << to_pub_info.z << "\n";


            double pre_t = iterIMU->header.stamp.toSec();
            iterIMU = imuQueue.erase(iterIMU);
            double next_t = iterIMU->header.stamp.toSec();
            double dt = next_t - pre_t ;

//            std::cout << linear_acceleration.transpose() << std::endl ;
//            std::cout << angular_velocity.transpose() << std::endl ;
            monoOdometry->processIMU( dt, linear_acceleration, angular_velocity );
        }
        imu_queue_mtx.unlock();

        //propagate the last frame info to the current frame
        Frame* lastFrame = monoOdometry->slidingWindow[monoOdometry->tail].get();
        float dt = lastFrame->timeIntegral;

        Vector3d T_bk1_2_b0 = lastFrame->T_bk_2_b0 - 0.5 * gravity_b0 * dt *dt
                + lastFrame->R_bk_2_b0*(lastFrame->v_bk * dt  + lastFrame->alpha_c_k);
        Vector3d v_bk1 = lastFrame->R_k1_k.transpose() *
                (lastFrame->v_bk - lastFrame->R_bk_2_b0.transpose() * gravity_b0 * dt
                 + lastFrame->beta_c_k);
        Matrix3d R_bk1_2_b0 = lastFrame->R_bk_2_b0 * lastFrame->R_k1_k;

        monoOdometry->insertFrame(imageSeqNumber, image0, imageTimeStamp, R_bk1_2_b0, T_bk1_2_b0, v_bk1);
        Frame* currentFrame = monoOdometry->slidingWindow[monoOdometry->tail].get();
        Frame* keyFrame = monoOdometry->currentKeyFrame.get();
        if ( monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].keyFrameFlag )
        {
            //prepare key frame
            cv::Mat disparity, depth ;
            monoOdometry->bm_(image0, image1, disparity, CV_32F);
            calculateDepthImage(disparity, depth, 0.11, fx );
            currentFrame->setDepthFromGroundTruth( (float*)depth.data ) ;

#ifdef PRINT_DEBUG_INFO
            //pub debugMap
            cv::cvtColor(image0, gradientMapForDebug, CV_GRAY2BGR);
            monoOdometry->generateDubugMap(currentFrame, gradientMapForDebug);
            msg.header.stamp = imageTimeStamp;
            sensor_msgs::fillImage(msg, sensor_msgs::image_encodings::BGR8, height,
                                   width, width*3, gradientMapForDebug.data );
            monoOdometry->pub_gradientMapForDebug.publish(msg) ;
#endif
            int preKeyFrameID = monoOdometry->currentKeyFrame->id() ;

            //set key frame
            monoOdometry->currentKeyFrame = monoOdometry->slidingWindow[monoOdometry->tail] ;
            monoOdometry->currentKeyFrame->keyFrameFlag = true ;
            monoOdometry->currentKeyFrame->cameraLinkList.clear() ;

            //reset the initial guess
            monoOdometry->RefToFrame = Sophus::SE3() ;

            //update tracking reference
            monoOdometry->updateTrackingReference();

#ifdef PUB_POINT_CLOUD
            pubPointCloud(valid_num, imageTimeStamp, R_vi_2_odometry);
#endif
            //unlock dense tracking
            monoOdometry->tracking_mtx.lock();
            monoOdometry->lock_densetracking = false;
            monoOdometry->tracking_mtx.unlock();

            if ( (imageTimeStamp - lastLoopClorsureTime).toSec() > 0.18 )
            {
                //add possible loop closure link
                t = (double)cvGetTickCount()  ;
                monoOdometry->setReprojectionListRelateToLastestKeyFrame( monoOdometry->head, preKeyFrameID,
                                                                          monoOdometry->slidingWindow[monoOdometry->tail].get(),
                                                                          R_i_2_c, T_i_2_c ) ;
                ROS_WARN("loop closure link cost time: %f", ((double)cvGetTickCount() - t) / (cvGetTickFrequency() * 1000) );
                t = (double)cvGetTickCount()  ;
                lastLoopClorsureTime = imageTimeStamp ;
            }
        }

        if ( monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].trust )
        {
//            cout << "insert camera link" << endl ;
//            cout << monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].T_k_2_c << endl ;

            monoOdometry->insertCameraLink(keyFrame, currentFrame,
                          monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].R_k_2_c,
                          monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].T_k_2_c,
                          monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].lastestATA );
        }

        int control_flag = 0 ;
        Vector3d preBAt = currentFrame->T_bk_2_b0 ;


//        cout << "[-BA]current Position: " << currentFrame->T_bk_2_b0.transpose() << endl;
//        cout << "[-BA]current Velocity: " << currentFrame->v_bk.transpose() << endl;

        //BA
        t = (double)cvGetTickCount()  ;
        monoOdometry->BA();
        printf("BA cost time: %f\n", ((double)cvGetTickCount() - t) / (cvGetTickFrequency() * 1000) );
        t = (double)cvGetTickCount()  ;

        //cout << "[BA-]current Position: " << currentFrame->T_bk_2_b0.transpose() << endl;
        //cout << "[BA-]current Velocity: " << currentFrame->v_bk.transpose() << endl;

        if ( (currentFrame->T_bk_2_b0 - preBAt ).norm() > 0.1 ){
            control_flag = 1 ; //loop_closure or other sudden position change case
        }
        if ( monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].trust == false ){
            control_flag = 2 ; //only IMU link, dense tracking fails
        }

#ifdef PUB_GRAPH
        pubCameraLink();
#endif

        //marginalziation
        monoOdometry->twoWayMarginalize();
        monoOdometry->setNewMarginalzationFlag();

//        R_vi_2_odometry.setIdentity() ;
//        if ( onUAV )
//        {
//            pubOdometry(monoOdometry->slidingWindow[monoOdometry->tail]->T_bk_2_b0,
//                    monoOdometry->slidingWindow[monoOdometry->tail]->v_bk,
//                    monoOdometry->slidingWindow[monoOdometry->tail]->R_bk_2_b0,
//                    monoOdometry->pub_odometry, monoOdometry->pub_pose,
//                    control_flag, R_vi_2_odometry,
//                    monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].keyFrameFlag, imageTimeStamp );
//        }
//        else
//        {
//            pubOdometry(monoOdometry->slidingWindow[monoOdometry->tail]->T_bk_2_b0,
//                    monoOdometry->slidingWindow[monoOdometry->tail]->v_bk,
//                    monoOdometry->slidingWindow[monoOdometry->tail]->R_bk_2_b0,
//                    monoOdometry->pub_odometry, monoOdometry->pub_pose,
//                    control_flag, Eigen::Matrix3d::Identity(),
//                    monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].keyFrameFlag, imageTimeStamp );
//        }

#ifdef PRINT_DEBUG_INFO
        int colorFlag = 0 ;
        colorFlag = monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].keyFrameFlag ;
        if (  monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].trust == false ){
            colorFlag = 2 ;
        }
        if ( onUAV ){
            pubPath(monoOdometry->slidingWindow[monoOdometry->tail]->T_bk_2_b0,
                    colorFlag,
                    monoOdometry->path_line, monoOdometry->pub_path, R_vi_2_odometry);
        }
        else{
            pubPath(monoOdometry->slidingWindow[monoOdometry->tail]->T_bk_2_b0,
                    colorFlag,
                    monoOdometry->path_line, monoOdometry->pub_path, Eigen::Matrix3d::Identity() );
        }
#endif

#ifdef  PUB_TF
        static tf::TransformBroadcaster br;
        tf::Transform transform;
        Vector3d t_translation = R_vi_2_odometry * monoOdometry->slidingWindow[monoOdometry->tail]->T_bk_2_b0 ;
        Quaterniond t_q(monoOdometry->slidingWindow[monoOdometry->tail]->R_bk_2_b0) ;
        transform.setOrigin(tf::Vector3(t_translation(0),
                                t_translation(1),
                                t_translation(2)) );
        tf::Quaternion q;
        Quaterniond tt_q(R_vi_2_odometry * monoOdometry->slidingWindow[monoOdometry->tail]->R_bk_2_b0 * R_vi_2_odometry.transpose());
        q.setW(tt_q.w());
        q.setX(tt_q.x());
        q.setY(tt_q.y());
        q.setZ(tt_q.z());
        transform.setRotation(q);
        br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", "densebody"));
#endif
        logCameraPose() ;

//        int preIndex = monoOdometry->tail - 1 ;
//        if ( preIndex < 0 ){
//            preIndex += slidingWindowSize ;
//        }
//        Vector3d tt_dist = (monoOdometry->slidingWindow[monoOdometry->tail]->T_bk_2_b0 -
//                monoOdometry->slidingWindow[preIndex]->T_bk_2_b0) ;
//        Matrix3d tt_rotate = monoOdometry->slidingWindow[monoOdometry->tail]->R_bk_2_b0.transpose() *
//                monoOdometry->slidingWindow[preIndex]->R_bk_2_b0 ;
//        Quaterniond tt_q(tt_rotate) ;

//        to_pub_info.x = monoOdometry->slidingWindow[monoOdometry->tail]->v_bk(0) ;
//        to_pub_info.y = monoOdometry->slidingWindow[monoOdometry->tail]->v_bk(1) ;
//        to_pub_info.z = monoOdometry->slidingWindow[monoOdometry->tail]->v_bk(2) ;
//        monoOdometry->pub_linear_velocity.publish(to_pub_info) ;


    }
}
BodyDescription::BodyDescription(const PhysicsContext &pc, const std::string &resource_id, const XMLResourceDocument &resources)
: impl(new BodyDescription_Impl(pc.impl->get_owner()))
{
	/* example resource entry with all parameters:
	
	<body2d name="TestBody" type="static">
        <position x="0" y="0"/>
        <rotation angle="180"/>
        <velocity x="0" y="0" angular="0"/>
        <damping linear="0" angular="0"/>
		<parameters awake="true" can_sleep="true" bullet="false" active="true" />
    </body2d>

	*/
	XMLResourceNode resource = resources.get_resource(resource_id);
	if (resource.get_type() != "body2d" && resource.get_type() != "body2d_description")
		throw Exception(string_format("Resource '%1' is not of type 'body2d' or 'body2d_description'", resource_id));

	DomNode cur_node = resource.get_element().get_first_child();

	//Body type
	std::string body_type = resource.get_element().get_attribute("type","static");
	if(body_type == "kinematic") set_type(body_kinematic);
	else if(body_type == "dynamic") set_type(body_dynamic);
	else set_type(body_static);

	while(!cur_node.is_null())
	{
		if (!cur_node.is_element()) 
			continue;
		
		DomElement cur_element = cur_node.to_element();
		std::string tag_name = cur_element.get_tag_name();

		//<position x="0" y="0"/>
		if(tag_name == "position")
		{
			float pos_x = 0.0f;
			float pos_y = 0.0f;

			if(cur_element.has_attribute("x"))
			{
				pos_x = StringHelp::text_to_float(cur_element.get_attribute("x"));
			}

			if(cur_element.has_attribute("y"))
			{
				pos_y = StringHelp::text_to_float(cur_element.get_attribute("y"));
			}

			set_position(pos_x, pos_y);
		}

		//<rotation angle="180"/>
		else if(tag_name == "rotation")
		{
			Angle angle(0.0f, angle_degrees);

			if(cur_element.has_attribute("angle"))
			{
				angle = Angle(StringHelp::text_to_float(cur_element.get_attribute("angle")), angle_degrees);
			}

			set_angle(angle);
		}

		//<velocity x="0" y="0" angular="0"/>
		else if(tag_name == "velocity")
		{
			Vec2f velocity(0.0f, 0.0f);
			Angle angular_velocity(0.0f, angle_degrees);

			if(cur_element.has_attribute("x"))
			{
				velocity.x = StringHelp::text_to_float(cur_element.get_attribute("x"));
			}

			if(cur_element.has_attribute("y"))
			{
				velocity.y = StringHelp::text_to_float(cur_element.get_attribute("y"));
			}

			if(cur_element.has_attribute("angular"))
			{
				angular_velocity = Angle(StringHelp::text_to_float(cur_element.get_attribute("angular")), angle_degrees);
			}

			set_linear_velocity(velocity);
			set_angular_velocity(angular_velocity);
		}

		//<damping linear="0" angular="0"/>
		else if(tag_name == "damping")
		{
			float linear;
			float angular;

			if(cur_element.has_attribute("linear"))
			{
				linear = StringHelp::text_to_float(cur_element.get_attribute("linear"));
			}

			if(cur_element.has_attribute("angular"))
			{
				angular = StringHelp::text_to_float(cur_element.get_attribute("angular"));
			}

			set_linear_damping(linear);
			set_angular_damping(angular);
		}

		//<parameters awake="true" can_sleep="true" bullet="false" active="true" />
		else if(tag_name == "parameters")
		{
			bool value;
		
			if(cur_element.has_attribute("awake"))
			{
				value = true;
				value = StringHelp::text_to_bool(cur_element.get_attribute("awake"));
				set_awake(value);
			}
			if(cur_element.has_attribute("active"))
			{
				value = true;
				value = StringHelp::text_to_bool(cur_element.get_attribute("active"));
				set_active(value);
			}
			if(cur_element.has_attribute("bullet"))
			{
				value = false;
				value = StringHelp::text_to_bool(cur_element.get_attribute("bullet"));
				set_as_bullet(value);
			}
			if(cur_element.has_attribute("can_sleep"))
			{
				value = true;
				value = StringHelp::text_to_bool(cur_element.get_attribute("can_sleep"));
				allow_sleep(value);
			}
		}

		cur_node = cur_node.get_next_sibling();
	}
}
void LiveSLAMWrapper::BALoop()
{
    ros::Rate BARate(2000) ;
    list<ImageMeasurement>::iterator iterImage ;
    std::list<visensor_node::visensor_imu>::iterator iterIMU ;
    cv::Mat image0 ;
    cv::Mat image1 ;
    cv::Mat gradientMapForDebug(height, width, CV_8UC3) ;
    sensor_msgs::Image msg;
    double t ;

    while ( nh.ok() )
    {
        monoOdometry->frameInfoList_mtx.lock();
        int ttt = (monoOdometry->frameInfoListTail - monoOdometry->frameInfoListHead);
        if ( ttt < 0 ){
            ttt += frameInfoListSize ;
        }
        //printf("[BA thread] sz=%d\n", ttt ) ;
        if ( ttt < 1 ){
            monoOdometry->frameInfoList_mtx.unlock();
            BARate.sleep() ;
            continue ;
        }
        for ( int sz ; ; )
        {
            monoOdometry->frameInfoListHead++ ;
            if ( monoOdometry->frameInfoListHead >= frameInfoListSize ){
                monoOdometry->frameInfoListHead -= frameInfoListSize ;
            }
            sz = monoOdometry->frameInfoListTail - monoOdometry->frameInfoListHead ;
            if ( sz == 0 ){
                break ;
            }
            if ( monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].keyFrameFlag ){
                break ;
            }
        }
        ros::Time imageTimeStamp = monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].t ;
        monoOdometry->frameInfoList_mtx.unlock();

        //Pop out the image list
        image1_queue_mtx.lock();
        iterImage = image1Buf.begin() ;
        while ( iterImage->t < imageTimeStamp ){
            iterImage = image1Buf.erase( iterImage ) ;
        }
        image1 = iterImage->image.clone();
        image1_queue_mtx.unlock();

        image0_queue_mtx.lock();
        iterImage = image0Buf.begin() ;
        while ( iterImage->t < imageTimeStamp ){
            iterImage = image0Buf.erase( iterImage ) ;
        }
        image0 = iterImage->image.clone();
        image0_queue_mtx.unlock();

        imu_queue_mtx.lock();
        iterIMU = imuQueue.begin() ;
        Vector3d linear_acceleration;
        Vector3d angular_velocity;

        //std::cout << "imageTime=" << imageTimeStamp << std::endl;
        while ( iterIMU->header.stamp < imageTimeStamp )
        {
            linear_acceleration(0) = iterIMU->linear_acceleration.x;
            linear_acceleration(1) = iterIMU->linear_acceleration.y;
            linear_acceleration(2) = iterIMU->linear_acceleration.z;
            angular_velocity(0) = iterIMU->angular_velocity.x;
            angular_velocity(1) = iterIMU->angular_velocity.y;
            angular_velocity(2) = iterIMU->angular_velocity.z;

            //linear_acceleration = -linear_acceleration;
            //angular_velocity = -angular_velocity ;

//            double pre_t = iterIMU->header.stamp.toSec();
//            iterIMU = imuQueue.erase(iterIMU);

//            //std::cout << imuQueue.size() <<" "<< iterIMU->header.stamp << std::endl;

//            double next_t = iterIMU->header.stamp.toSec();
//            double dt = next_t - pre_t ;

            double pre_t = iterIMU->header.stamp.toSec();
            iterIMU = imuQueue.erase(iterIMU);
            double next_t = iterIMU->header.stamp.toSec();
            double dt = next_t - pre_t ;

//            std::cout << linear_acceleration.transpose() << std::endl ;
//            std::cout << angular_velocity.transpose() << std::endl ;
            monoOdometry->processIMU( dt, linear_acceleration, angular_velocity );
        }
        imu_queue_mtx.unlock();

        //propagate the last frame info to the current frame
        Frame* lastFrame = monoOdometry->slidingWindow[monoOdometry->tail].get();
        float dt = lastFrame->timeIntegral;

        Vector3d T_bk1_2_b0 = lastFrame->T_bk_2_b0 - 0.5 * gravity_b0 * dt *dt
                + lastFrame->R_bk_2_b0*(lastFrame->v_bk * dt  + lastFrame->alpha_c_k);
        Vector3d v_bk1 = lastFrame->R_k1_k.transpose() *
                (lastFrame->v_bk - lastFrame->R_bk_2_b0.transpose() * gravity_b0 * dt
                 + lastFrame->beta_c_k);
        Matrix3d R_bk1_2_b0 = lastFrame->R_bk_2_b0 * lastFrame->R_k1_k;

        monoOdometry->insertFrame(imageSeqNumber, image1, imageTimeStamp, R_bk1_2_b0, T_bk1_2_b0, v_bk1);
        Frame* currentFrame = monoOdometry->slidingWindow[monoOdometry->tail].get();
        Frame* keyFrame = monoOdometry->currentKeyFrame.get();
        if ( monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].keyFrameFlag )
        {
            //prepare key frame
            cv::Mat disparity, depth ;
            monoOdometry->bm_(image1, image0, disparity, CV_32F);
            calculateDepthImage(disparity, depth, 0.11, fx );
            currentFrame->setDepthFromGroundTruth( (float*)depth.data ) ;

            //pub debugMap
            cv::cvtColor(image1, gradientMapForDebug, CV_GRAY2BGR);
            monoOdometry->generateDubugMap(currentFrame, gradientMapForDebug);
            msg.header.stamp = imageTimeStamp;
            sensor_msgs::fillImage(msg, sensor_msgs::image_encodings::BGR8, height,
                                   width, width*3, gradientMapForDebug.data );
            monoOdometry->pub_gradientMapForDebug.publish(msg) ;

            //set key frame
            monoOdometry->currentKeyFrame = monoOdometry->slidingWindow[monoOdometry->tail] ;
            monoOdometry->currentKeyFrame->keyFrameFlag = true ;
            monoOdometry->currentKeyFrame->cameraLinkList.clear() ;
            //reset the initial guess
            monoOdometry->RefToFrame = Sophus::SE3() ;

            //unlock dense tracking
            monoOdometry->tracking_mtx.lock();
            monoOdometry->lock_densetracking = false;
            monoOdometry->tracking_mtx.unlock();
        }
        if ( monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].trust )
        {
//            cout << "insert camera link" << endl ;
//            cout << monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].T_k_2_c << endl ;

            monoOdometry->insertCameraLink(keyFrame, currentFrame,
                          monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].R_k_2_c,
                          monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].T_k_2_c,
                          monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].lastestATA );
        }

        cout << "[-BA]current Position: " << currentFrame->T_bk_2_b0.transpose() << endl;
        cout << "[-BA]current Velocity: " << currentFrame->v_bk.transpose() << endl;

        //BA
        t = (double)cvGetTickCount()  ;
        monoOdometry->BA();
        printf("BA cost time: %f\n", ((double)cvGetTickCount() - t) / (cvGetTickFrequency() * 1000) );
        t = (double)cvGetTickCount()  ;

        cout << "[BA-]current Position: " << currentFrame->T_bk_2_b0.transpose() << endl;
        cout << "[BA-]current Velocity: " << currentFrame->v_bk.transpose() << endl;

        //marginalziation
        monoOdometry->twoWayMarginalize();
        monoOdometry->setNewMarginalzationFlag();

        //    pubOdometry(-T_bk1_2_b0, R_bk1_2_b0, pub_odometry, pub_pose );
        //    pubPath(-T_bk1_2_b0, path_line, pub_path );

        pubOdometry(-monoOdometry->slidingWindow[monoOdometry->tail]->T_bk_2_b0,
                monoOdometry->slidingWindow[monoOdometry->tail]->R_bk_2_b0,
                monoOdometry->pub_odometry, monoOdometry->pub_pose );
        pubPath(-monoOdometry->slidingWindow[monoOdometry->tail]->T_bk_2_b0,
                monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].keyFrameFlag,
                monoOdometry->path_line, monoOdometry->pub_path);
    }
}