Example #1
0
 void SetRotation( const Matx33f &R )
 {
   m_R = R;
   // cache
   m_inv_R = R.t();
   m_C = -m_inv_R * m_t;
 }
Example #2
0
Matx33f CalculateEssentialMatrix(Matx33f f, Matx33f K1, Matx33f K2)
{
    Matx33f E;
    
    E = K2.t()*f*K1;
    
    return (Matx33f)E;
}
void SfMBundleAdjustmentUtils::adjustBundle(
        PointCloud&                  pointCloud,
        std::vector<Pose>&           cameraPoses,
        Intrinsics&                  intrinsics,
        const std::vector<Features>& image2dFeatures) {

    std::call_once(initLoggingFlag, initLogging);

    // Create residuals for each observation in the bundle adjustment problem. The
    // parameters for cameras and points are added automatically.
    ceres::Problem problem;

    //Convert camera pose parameters from [R|t] (3x4) to [Angle-Axis (3), Translation (3), focal (1)] (1x7)
    typedef cv::Matx<double, 1, 6> CameraVector;
    vector<CameraVector> cameraPoses6d;
    cameraPoses6d.reserve(cameraPoses.size());
    for (size_t i = 0; i < cameraPoses.size(); i++) {
        const Pose& pose = cameraPoses[i];

        if (pose(0, 0) == 0 and pose(1, 1) == 0 and pose(2, 2) == 0) {
            //This camera pose is empty, it should not be used in the optimization
            cameraPoses6d.push_back(CameraVector());
            continue;
        }
        Vec3f t(pose(0, 3), pose(1, 3), pose(2, 3));
        Matx33f R = pose.get_minor<3, 3>(0, 0);
        float angleAxis[3];
        ceres::RotationMatrixToAngleAxis<float>(R.t().val, angleAxis); //Ceres assumes col-major...

        cameraPoses6d.push_back(CameraVector(
                angleAxis[0],
                angleAxis[1],
                angleAxis[2],
                t(0),
                t(1),
                t(2)));
    }

    //focal-length factor for optimization
    double focal = intrinsics.K.at<float>(0, 0);

    vector<cv::Vec3d> points3d(pointCloud.size());

    for (int i = 0; i < pointCloud.size(); i++) {
        const Point3DInMap& p = pointCloud[i];
        points3d[i] = cv::Vec3d(p.p.x, p.p.y, p.p.z);

        for (const auto& kv : p.originatingViews) {
            //kv.first  = camera index
            //kv.second = 2d feature index
            Point2f p2d = image2dFeatures[kv.first].points[kv.second];

            //subtract center of projection, since the optimizer doesn't know what it is
            p2d.x -= intrinsics.K.at<float>(0, 2);
            p2d.y -= intrinsics.K.at<float>(1, 2);

            // Each Residual block takes a point and a camera as input and outputs a 2
            // dimensional residual. Internally, the cost function stores the observed
            // image location and compares the reprojection against the observation.
            ceres::CostFunction* cost_function = SimpleReprojectionError::Create(p2d.x, p2d.y);

            problem.AddResidualBlock(cost_function,
                    NULL /* squared loss */,
                    cameraPoses6d[kv.first].val,
                    points3d[i].val,
					&focal);
        }
    }

    // Make Ceres automatically detect the bundle structure. Note that the
    // standard solver, SPARSE_NORMAL_CHOLESKY, also works fine but it is slower
    // for standard bundle adjustment problems.
    ceres::Solver::Options options;
    options.linear_solver_type = ceres::DENSE_SCHUR;
    options.minimizer_progress_to_stdout = true;
    options.max_num_iterations = 500;
    options.eta = 1e-2;
    options.max_solver_time_in_seconds = 10;
    options.logging_type = ceres::LoggingType::SILENT;
    ceres::Solver::Summary summary;
    ceres::Solve(options, &problem, &summary);
    std::cout << summary.BriefReport() << "\n";

    if (not (summary.termination_type == ceres::CONVERGENCE)) {
        cerr << "Bundle adjustment failed." << endl;
        return;
    }

    //update optimized focal
    intrinsics.K.at<float>(0, 0) = focal;
    intrinsics.K.at<float>(1, 1) = focal;

    //Implement the optimized camera poses and 3D points back into the reconstruction
    for (size_t i = 0; i < cameraPoses.size(); i++) {
    	Pose& pose = cameraPoses[i];
    	Pose poseBefore = pose;

        if (pose(0, 0) == 0 and pose(1, 1) == 0 and pose(2, 2) == 0) {
            //This camera pose is empty, it was not used in the optimization
            continue;
        }

        //Convert optimized Angle-Axis back to rotation matrix
        double rotationMat[9] = { 0 };
        ceres::AngleAxisToRotationMatrix(cameraPoses6d[i].val, rotationMat);

        for (int r = 0; r < 3; r++) {
            for (int c = 0; c < 3; c++) {
                pose(c, r) = rotationMat[r * 3 + c]; //`rotationMat` is col-major...
            }
        }

        //Translation
        pose(0, 3) = cameraPoses6d[i](3);
        pose(1, 3) = cameraPoses6d[i](4);
        pose(2, 3) = cameraPoses6d[i](5);
    }

    for (int i = 0; i < pointCloud.size(); i++) {
        pointCloud[i].p.x = points3d[i](0);
        pointCloud[i].p.y = points3d[i](1);
        pointCloud[i].p.z = points3d[i](2);
    }
}
int main( int argc, char* argv[] )
{
    if( argc != 6)
      {
      std::cerr << "Usage: " << argv[0] << " <InputImage> <Mask> <colorspace> <OutputMu> <OutputSigma>" << std::endl;
      return EXIT_FAILURE;
      }
    cv::Mat image = cv::imread( argv[1] );
    if(!image.data)
      {
      return EXIT_FAILURE;
      }

    cv::Mat mask = cv::imread( argv[2],0 );
    if(!mask.data)
      {
      return EXIT_FAILURE;
      }


    if (strcmp(argv[3],"YCrCb")==0)
    {
       cv::cvtColor( image, image, CV_BGR2YCrCb );
    }
    else if (strcmp(argv[3],"HSV")==0 )
    {
       cv::cvtColor( image, image, CV_BGR2HSV );
    }
    else if (strcmp(argv[3],"RGB")!=0)
    {
        std::cerr<<"Colorspaces: RGB or YCrCb or HSV"<<std::endl;
        return EXIT_FAILURE;
    }

    typedef cv::Matx<float, 1, 3> Matx13f;
    typedef cv::Matx<float, 3, 1> Matx31f;
    typedef cv::Matx<float, 3, 3> Matx33f;

    //Mu
    Matx31f mu;
    mu.zeros();
    int den=0;

    for (int i=0; i<image.rows;i++)
    {
        for (int j=0; j<image.cols; j++)
        {
            if(mask.at<float>(i,j)!=0)
            {
                den++;
                mu+=image.at<cv::Vec3b>(i,j);
            }
        }
    }
    mu(0)/=static_cast<float>(den);
    mu(1)/=static_cast<float>(den);
    mu(2)/=static_cast<float>(den);

    //Sigma
    Matx31f pixel;
    Matx13f transPixel;
    Matx33f sigma;
    sigma.zeros();

    for (int i=0; i<image.rows;i++)
    {
        for (int j=0; j<image.cols; j++)
        {
            if(mask.at<float>(i,j)!=0)
            {
                pixel=image.at<cv::Vec3b>(i,j);
                pixel-=mu;
                transPixel=pixel.t();
                sigma+=pixel*transPixel;
            }
        }
    }

    for(int i=0;i<3;i++)
        for (int j=0;j<3;j++)
        {
            sigma (i,j)/=den-1;
        }

    std::ofstream fileMu(argv[4], std::ios::out | std::ios::trunc);
    if(fileMu)
    {
        for (int i=0; i<3; i++)
        {
            fileMu<<mu(i)<<endl;
        }
        fileMu.close();
    }
    else
    {
        std::cerr << "Can't open file "<<argv[4] << std::endl;
    }

    std::ofstream fileSigma(argv[5], std::ios::out | std::ios::trunc);
    if(fileSigma)
    {
        for (int i=0; i<3; i++)
        {
            for (int j=0; j<3; j++)
            {
                fileSigma<<sigma(i,j)<<endl;
            }
        }
        fileSigma.close();
    }
    else
    {
        std::cerr << "Can't open file "<<argv[5] << std::endl;
    }

    return 0;
}