void visualOdometry::mainloop(){
    FRAME lastFrame;
    std::string file_name_ground =pd_.getData( "file_name_ground");

    readFromFile( file_name_ground,ground_truth_list_);
    lastFrame = readFrameFromGroundTruthList(start_index_ ,ground_truth_list_);

    // 我们总是在比较currFrame和lastFrame
    string detector = pd_.getData( "detector" );
    string descriptor = pd_.getData( "descriptor" );
    CAMERA_INTRINSIC_PARAMETERS camera = getDefaultCamera();
    computeKeyPointsAndDesp( lastFrame, detector, descriptor );

    // 是否显示点云
    bool visualize = pd_.getData("visualize_pointcloud")==string("true");

    int min_inliers = atoi( pd_.getData("min_inliers").c_str() );
    double max_norm = atof( pd_.getData("max_norm").c_str() );

    for (int currIndex=start_index_+1; currIndex<end_index_; currIndex++ )
    {
        current_index_ = currIndex;
        cout<<"Reading files "<<currIndex<<endl;
        FRAME currFrame = readFrameFromGroundTruthList(currIndex, ground_truth_list_); // 读取currFrame
        computeKeyPointsAndDesp( currFrame, detector, descriptor );
        // 比较currFrame 和 lastFrame
        RESULT_OF_PNP result = estimateMotion( lastFrame, currFrame, camera );
        if ( result.inliers < min_inliers ){ //inliers不够,放弃该帧
            std::cout<<"not enouth inliers: "<<result.inliers;
            continue;
        }
        //ROS_INFO_STREAM("trans: "<< result.tvec <<" rot: "<< result.rvec);

        cv::Mat rotation_matrix;
        cv::Rodrigues(result.rvec, rotation_matrix);
        Eigen::Matrix3d rotation_eigen_temp;
        cv::cv2eigen(rotation_matrix,rotation_eigen_temp);
        Eigen::AngleAxisd rotation_eigen(rotation_eigen_temp);

        Eigen::Affine3d incrementalAffine = Eigen::Affine3d::Identity();
        incrementalAffine = rotation_eigen;
        incrementalAffine(0,3) = result.tvec.ptr<double>(0)[0];
        incrementalAffine(1,3) = result.tvec.ptr<double>(1)[0];
        incrementalAffine(2,3) = result.tvec.ptr<double>(2)[0];

        //pose_camera_ = incrementalAffine * pose_camera_;

        publishTrajectory();
        publishGroundTruth();
        ROS_INFO_STREAM("RT: "<< incrementalAffine.matrix());
        lastFrame = currFrame;
    }
}
Example #2
0
int main(int argc,char** argv){
	ParameterReader* pr = new ParameterReader("../parameters.txt");
	CAMERA_INTRINSIC_PARAMETERS camera;
	if(strcmp((pr->getData("camera.cx")).c_str(),"NOT_FOUND")==0 || strcmp((pr->getData("camera.cy")).c_str(),"NOT_FOUND")==0 || strcmp((pr->getData("camera.fx")).c_str(),"NOT_FOUND")==0 || strcmp((pr->getData("camera.fy")).c_str(),"NOT_FOUND")==0 || strcmp((pr->getData("camera.scale")).c_str(),"NOT_FOUND")==0 || strcmp((pr->getData("detector")).c_str(),"NOT_FOUND")==0 || strcmp((pr->getData("descriptor")).c_str(),"NOT_FOUND")==0 || strcmp((pr->getData("good_match_threshold")).c_str(),"NOT_FOUND")==0){
		cout<<"Parameter lost."<<endl;
		return -1;
	}
	camera.cx = atof((pr->getData("camera.cx")).c_str());
	camera.cy = atof((pr->getData("camera.cy")).c_str());
	camera.fx = atof((pr->getData("camera.fx")).c_str());
	camera.fy = atof((pr->getData("camera.fy")).c_str());
	camera.scale = atof((pr->getData("camera.scale")).c_str());
	int min_good_match = atoi((pr->getData("min_good_match")).c_str());
	/*
	cv::Mat rgb,depth;
	rgb = cv::imread("../pic/color.png");
	depth = cv::imread("../pic/depth.png",-1);
	PointCloud::Ptr cloud = image2PointCloud(rgb,depth,camera);
	cloud->points.clear();
	cv::Point3f point;
	point.x = 50;
	point.y = 100;
	point.z = 1200;
	cv::Point3f p = point2dTo3d(point,camera);
	cout<<"2D point ("<<point.x<<","<<point.y<<") to "<<"3D point ("<<p.x<<","<<p.y<<","<<p.z<<")"<<endl;
	*/
	double good_match_threshold = atof((pr->getData("good_match_threshold")).c_str());
	FRAME frame1,frame2;
	frame1.rgb = cv::imread("../pic/color1.png");
	frame1.depth = cv::imread("../pic/depth1.png");
	frame2.rgb = cv::imread("../pic/color2.png");
	frame2.depth = cv::imread("../pic/depth2.png");
	string detector = pr->getData("detector");
	string descriptor = pr->getData("descriptor");
	computeKeyPointsAndDesp(frame1,detector,descriptor);
	computeKeyPointsAndDesp(frame2,detector,descriptor);
	RESULT_OF_PNP motion = estimateMotion(frame1,frame2,camera,good_match_threshold,min_good_match);
	cout<<"Inliers = "<<motion.inliers<<endl;
	cout<<"R = "<<motion.rvec<<endl;
	cout<<"t = "<<motion.tvec<<endl;
	//joinPointCloud(frame1,frame2,camera,motion);
	return 0;
}