예제 #1
0
void QuaternionDemo::paintGL()
{
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
	glMatrixMode(GL_PROJECTION);
	glLoadIdentity();
	gluPerspective(60.0, (double)width() / height(), 0.1, 10);

	glMatrixMode(GL_MODELVIEW);
	glLoadIdentity();
	glTranslatef(0, 0, -5);

	glRotatef(-90, 1, 0, 0);
	glRotatef(90, 0, 0, 1);

	Eigen::Transform<float, 3, Eigen::Affine> t_ref(ref.conjugate());
	glMultMatrixf(t_ref.data());
	Eigen::Transform<float, 3, Eigen::Affine> t_q(q);
	glMultMatrixf(t_q.data());

	glEnable(GL_DEPTH_TEST);
	glBegin(GL_QUADS);
		glColor3f(1, 1, 1);
		glVertex3f(-1, -1, -1);
		glVertex3f(1, -1, -1);
		glVertex3f(1, 1, -1);
		glVertex3f(-1, 1, -1);

		glColor3f(1, 0, 0);
		glVertex3f(-1, -1, 1);
		glVertex3f(1, -1, 1);
		glVertex3f(1, 1, 1);
		glVertex3f(-1, 1, 1);

		glColor3f(0, 1, 0);
		glVertex3f(-1, -1, 1);
		glVertex3f(-1, -1, -1);
		glVertex3f(-1, 1, -1);
		glVertex3f(-1, 1, 1);

		glColor3f(1, 0.5f, 0);
		glVertex3f(1, -1, 1);
		glVertex3f(1, -1, -1);
		glVertex3f(1, 1, -1);
		glVertex3f(1, 1, 1);

		glColor3f(0, 0, 1);
		glVertex3f(-1, -1, 1);
		glVertex3f(-1, -1, -1);
		glVertex3f(1, -1, -1);
		glVertex3f(1, -1, 1);

		glColor3f(1, 0, 0.5f);
		glVertex3f(-1, 1, 1);
		glVertex3f(-1, 1, -1);
		glVertex3f(1, 1, -1);
		glVertex3f(1, 1, 1);
	glEnd();
}
예제 #2
0
void               P3DCameraTrackBall::RotateCS
                                      (float               a,
                                       float               x,
                                       float               y,
                                       float               z)
 {
  P3DQuaternionf                       t_q(dir);
  P3DQuaternionf                       r_q;

  r_q.FromAxisAndAngle(x,y,z,a);

  P3DQuaternionf::CrossProduct(dir.q,r_q.q,t_q.q);

  dir.Normalize();
 }
예제 #3
0
void LiveSLAMWrapper::BALoop()
{
    ros::Rate BARate(2000) ;
    list<ImageMeasurement>::iterator iterImage ;
    std::list<visensor_node::visensor_imu>::iterator iterIMU ;
    cv::Mat image0 ;
    cv::Mat image1 ;
    cv::Mat gradientMapForDebug(height, width, CV_8UC3) ;
    sensor_msgs::Image msg;
    double t ;

    while ( nh.ok() )
    {
        monoOdometry->frameInfoList_mtx.lock();
        int ttt = (monoOdometry->frameInfoListTail - monoOdometry->frameInfoListHead);
        if ( ttt < 0 ){
            ttt += frameInfoListSize ;
        }
        //printf("[BA thread] sz=%d\n", ttt ) ;
        if ( ttt < 1 ){
            monoOdometry->frameInfoList_mtx.unlock();
            BARate.sleep() ;
            continue ;
        }
        for ( int sz ; ; )
        {
            monoOdometry->frameInfoListHead++ ;
            if ( monoOdometry->frameInfoListHead >= frameInfoListSize ){
                monoOdometry->frameInfoListHead -= frameInfoListSize ;
            }
            sz = monoOdometry->frameInfoListTail - monoOdometry->frameInfoListHead ;
            if ( sz == 0 ){
                break ;
            }
            if ( monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].keyFrameFlag ){
                break ;
            }
        }
        ros::Time imageTimeStamp = monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].t ;
        monoOdometry->frameInfoList_mtx.unlock();

        //Pop out the image list
        image1_queue_mtx.lock();
        iterImage = image1Buf.begin() ;
        while ( iterImage->t < imageTimeStamp ){
            iterImage = image1Buf.erase( iterImage ) ;
        }
        image1 = iterImage->image.clone();
        image1_queue_mtx.unlock();

        image0_queue_mtx.lock();
        iterImage = image0Buf.begin() ;
        while ( iterImage->t < imageTimeStamp ){
            iterImage = image0Buf.erase( iterImage ) ;
        }
        image0 = iterImage->image.clone();
        image0_queue_mtx.unlock();

        imu_queue_mtx.lock();
        iterIMU = imuQueue.begin() ;
        Vector3d linear_acceleration;
        Vector3d angular_velocity;

        //std::cout << "imageTime=" << imageTimeStamp << std::endl;
        while ( iterIMU->header.stamp < imageTimeStamp )
        {
            linear_acceleration(0) = iterIMU->linear_acceleration.x;
            linear_acceleration(1) = iterIMU->linear_acceleration.y;
            linear_acceleration(2) = iterIMU->linear_acceleration.z;
            angular_velocity(0) = iterIMU->angular_velocity.x;
            angular_velocity(1) = iterIMU->angular_velocity.y;
            angular_velocity(2) = iterIMU->angular_velocity.z;

//            to_pub_info.x = angular_velocity(0)*180/PI ;
//            to_pub_info.y = angular_velocity(1)*180/PI ;
//            to_pub_info.z = angular_velocity(2)*180/PI ;
//            monoOdometry->pub_angular_velocity.publish( to_pub_info ) ;

//            outFile << to_pub_info.x << " "
//                    << to_pub_info.y << " "
//                    << to_pub_info.z << "\n";


            double pre_t = iterIMU->header.stamp.toSec();
            iterIMU = imuQueue.erase(iterIMU);
            double next_t = iterIMU->header.stamp.toSec();
            double dt = next_t - pre_t ;

//            std::cout << linear_acceleration.transpose() << std::endl ;
//            std::cout << angular_velocity.transpose() << std::endl ;
            monoOdometry->processIMU( dt, linear_acceleration, angular_velocity );
        }
        imu_queue_mtx.unlock();

        //propagate the last frame info to the current frame
        Frame* lastFrame = monoOdometry->slidingWindow[monoOdometry->tail].get();
        float dt = lastFrame->timeIntegral;

        Vector3d T_bk1_2_b0 = lastFrame->T_bk_2_b0 - 0.5 * gravity_b0 * dt *dt
                + lastFrame->R_bk_2_b0*(lastFrame->v_bk * dt  + lastFrame->alpha_c_k);
        Vector3d v_bk1 = lastFrame->R_k1_k.transpose() *
                (lastFrame->v_bk - lastFrame->R_bk_2_b0.transpose() * gravity_b0 * dt
                 + lastFrame->beta_c_k);
        Matrix3d R_bk1_2_b0 = lastFrame->R_bk_2_b0 * lastFrame->R_k1_k;

        monoOdometry->insertFrame(imageSeqNumber, image0, imageTimeStamp, R_bk1_2_b0, T_bk1_2_b0, v_bk1);
        Frame* currentFrame = monoOdometry->slidingWindow[monoOdometry->tail].get();
        Frame* keyFrame = monoOdometry->currentKeyFrame.get();
        if ( monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].keyFrameFlag )
        {
            //prepare key frame
            cv::Mat disparity, depth ;
            monoOdometry->bm_(image0, image1, disparity, CV_32F);
            calculateDepthImage(disparity, depth, 0.11, fx );
            currentFrame->setDepthFromGroundTruth( (float*)depth.data ) ;

#ifdef PRINT_DEBUG_INFO
            //pub debugMap
            cv::cvtColor(image0, gradientMapForDebug, CV_GRAY2BGR);
            monoOdometry->generateDubugMap(currentFrame, gradientMapForDebug);
            msg.header.stamp = imageTimeStamp;
            sensor_msgs::fillImage(msg, sensor_msgs::image_encodings::BGR8, height,
                                   width, width*3, gradientMapForDebug.data );
            monoOdometry->pub_gradientMapForDebug.publish(msg) ;
#endif
            int preKeyFrameID = monoOdometry->currentKeyFrame->id() ;

            //set key frame
            monoOdometry->currentKeyFrame = monoOdometry->slidingWindow[monoOdometry->tail] ;
            monoOdometry->currentKeyFrame->keyFrameFlag = true ;
            monoOdometry->currentKeyFrame->cameraLinkList.clear() ;

            //reset the initial guess
            monoOdometry->RefToFrame = Sophus::SE3() ;

            //update tracking reference
            monoOdometry->updateTrackingReference();

#ifdef PUB_POINT_CLOUD
            pubPointCloud(valid_num, imageTimeStamp, R_vi_2_odometry);
#endif
            //unlock dense tracking
            monoOdometry->tracking_mtx.lock();
            monoOdometry->lock_densetracking = false;
            monoOdometry->tracking_mtx.unlock();

            if ( (imageTimeStamp - lastLoopClorsureTime).toSec() > 0.18 )
            {
                //add possible loop closure link
                t = (double)cvGetTickCount()  ;
                monoOdometry->setReprojectionListRelateToLastestKeyFrame( monoOdometry->head, preKeyFrameID,
                                                                          monoOdometry->slidingWindow[monoOdometry->tail].get(),
                                                                          R_i_2_c, T_i_2_c ) ;
                ROS_WARN("loop closure link cost time: %f", ((double)cvGetTickCount() - t) / (cvGetTickFrequency() * 1000) );
                t = (double)cvGetTickCount()  ;
                lastLoopClorsureTime = imageTimeStamp ;
            }
        }

        if ( monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].trust )
        {
//            cout << "insert camera link" << endl ;
//            cout << monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].T_k_2_c << endl ;

            monoOdometry->insertCameraLink(keyFrame, currentFrame,
                          monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].R_k_2_c,
                          monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].T_k_2_c,
                          monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].lastestATA );
        }

        int control_flag = 0 ;
        Vector3d preBAt = currentFrame->T_bk_2_b0 ;


//        cout << "[-BA]current Position: " << currentFrame->T_bk_2_b0.transpose() << endl;
//        cout << "[-BA]current Velocity: " << currentFrame->v_bk.transpose() << endl;

        //BA
        t = (double)cvGetTickCount()  ;
        monoOdometry->BA();
        printf("BA cost time: %f\n", ((double)cvGetTickCount() - t) / (cvGetTickFrequency() * 1000) );
        t = (double)cvGetTickCount()  ;

        //cout << "[BA-]current Position: " << currentFrame->T_bk_2_b0.transpose() << endl;
        //cout << "[BA-]current Velocity: " << currentFrame->v_bk.transpose() << endl;

        if ( (currentFrame->T_bk_2_b0 - preBAt ).norm() > 0.1 ){
            control_flag = 1 ; //loop_closure or other sudden position change case
        }
        if ( monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].trust == false ){
            control_flag = 2 ; //only IMU link, dense tracking fails
        }

#ifdef PUB_GRAPH
        pubCameraLink();
#endif

        //marginalziation
        monoOdometry->twoWayMarginalize();
        monoOdometry->setNewMarginalzationFlag();

//        R_vi_2_odometry.setIdentity() ;
//        if ( onUAV )
//        {
//            pubOdometry(monoOdometry->slidingWindow[monoOdometry->tail]->T_bk_2_b0,
//                    monoOdometry->slidingWindow[monoOdometry->tail]->v_bk,
//                    monoOdometry->slidingWindow[monoOdometry->tail]->R_bk_2_b0,
//                    monoOdometry->pub_odometry, monoOdometry->pub_pose,
//                    control_flag, R_vi_2_odometry,
//                    monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].keyFrameFlag, imageTimeStamp );
//        }
//        else
//        {
//            pubOdometry(monoOdometry->slidingWindow[monoOdometry->tail]->T_bk_2_b0,
//                    monoOdometry->slidingWindow[monoOdometry->tail]->v_bk,
//                    monoOdometry->slidingWindow[monoOdometry->tail]->R_bk_2_b0,
//                    monoOdometry->pub_odometry, monoOdometry->pub_pose,
//                    control_flag, Eigen::Matrix3d::Identity(),
//                    monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].keyFrameFlag, imageTimeStamp );
//        }

#ifdef PRINT_DEBUG_INFO
        int colorFlag = 0 ;
        colorFlag = monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].keyFrameFlag ;
        if (  monoOdometry->frameInfoList[monoOdometry->frameInfoListHead].trust == false ){
            colorFlag = 2 ;
        }
        if ( onUAV ){
            pubPath(monoOdometry->slidingWindow[monoOdometry->tail]->T_bk_2_b0,
                    colorFlag,
                    monoOdometry->path_line, monoOdometry->pub_path, R_vi_2_odometry);
        }
        else{
            pubPath(monoOdometry->slidingWindow[monoOdometry->tail]->T_bk_2_b0,
                    colorFlag,
                    monoOdometry->path_line, monoOdometry->pub_path, Eigen::Matrix3d::Identity() );
        }
#endif

#ifdef  PUB_TF
        static tf::TransformBroadcaster br;
        tf::Transform transform;
        Vector3d t_translation = R_vi_2_odometry * monoOdometry->slidingWindow[monoOdometry->tail]->T_bk_2_b0 ;
        Quaterniond t_q(monoOdometry->slidingWindow[monoOdometry->tail]->R_bk_2_b0) ;
        transform.setOrigin(tf::Vector3(t_translation(0),
                                t_translation(1),
                                t_translation(2)) );
        tf::Quaternion q;
        Quaterniond tt_q(R_vi_2_odometry * monoOdometry->slidingWindow[monoOdometry->tail]->R_bk_2_b0 * R_vi_2_odometry.transpose());
        q.setW(tt_q.w());
        q.setX(tt_q.x());
        q.setY(tt_q.y());
        q.setZ(tt_q.z());
        transform.setRotation(q);
        br.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "world", "densebody"));
#endif
        logCameraPose() ;

//        int preIndex = monoOdometry->tail - 1 ;
//        if ( preIndex < 0 ){
//            preIndex += slidingWindowSize ;
//        }
//        Vector3d tt_dist = (monoOdometry->slidingWindow[monoOdometry->tail]->T_bk_2_b0 -
//                monoOdometry->slidingWindow[preIndex]->T_bk_2_b0) ;
//        Matrix3d tt_rotate = monoOdometry->slidingWindow[monoOdometry->tail]->R_bk_2_b0.transpose() *
//                monoOdometry->slidingWindow[preIndex]->R_bk_2_b0 ;
//        Quaterniond tt_q(tt_rotate) ;

//        to_pub_info.x = monoOdometry->slidingWindow[monoOdometry->tail]->v_bk(0) ;
//        to_pub_info.y = monoOdometry->slidingWindow[monoOdometry->tail]->v_bk(1) ;
//        to_pub_info.z = monoOdometry->slidingWindow[monoOdometry->tail]->v_bk(2) ;
//        monoOdometry->pub_linear_velocity.publish(to_pub_info) ;


    }
}