void doTranslation(){
	cout<<"doTranslation begin "<<endl;
	//Mat m = (Mat_<float>(3,3) << 1.4572068353989741e+003, 0, 3.1950000000000000e+002,0, 1.4572068353989741e+003, 2.3950000000000000e+002,0, 0, 1);
	cv::Mat tempMat, tempMat2;
	double s, zConst = 0;
			
	rotationMatrix.convertTo(rotationMatrix,CV_64FC1);
	cameraMatrix.convertTo(cameraMatrix,CV_64FC1);
				
	tempMat = rotationMatrix.inv() * cameraMatrix.inv() * uvPoint;
			

	tempMat2 = rotationMatrix.inv() * tvec;
		
	s = zConst + tempMat2.at<double>(2,0);
	s /= tempMat.at<double>(2,0);

	

	cout<<"s"<<s<<endl ; 

	cv::Mat wcPoint = rotationMatrix.inv() * (s * cameraMatrix.inv() * uvPoint - tvec);
		

	Point3f realPoint(wcPoint.at<double>(0, 0), wcPoint.at<double>(1, 0), wcPoint.at<double>(2, 0));

	cout<<"image point "<<endl<<uvPoint<< endl<< "to real point "<<endl <<wcPoint<<endl<<endl ;
	/*static	 	vector<Point2d> imagePoints_hand  ; 
	static		vector<double>  timeStampList_hand  ; 
	static		vector<Point3f> realWorldPoints_hand ; 
	static		vector<int>     frameNumList_hand ;*/
	imagePoints_hand.push_back(Point2d(uvPoint.at<double>(0,0),uvPoint.at<double>(1,0)));
	realWorldPoints_hand.push_back(realPoint);
	frameNumList_hand.push_back(frameNum_hand);
}
double searchAngle(const Mat &src, const Mat &dst, double st, double ed, double step, char dir, double &mpsnr) {
    int captureWidth = src.cols, captureHeight = src.rows;
    //const double fuvX = 799, fuvY = 799;
    const double fuvX = 744.5, fuvY = 744.5;
    double cx = captureWidth / 2 - 0.5, cy = captureHeight / 2 - 0.5;
    Mat K = Mat::zeros(3, 3, CV_64F);
    K.at<double>(0, 0) = fuvX;
    K.at<double>(1, 1) = fuvY;
    K.at<double>(0, 2) = cx;
    K.at<double>(1, 2) = cy;
    K.at<double>(2, 2) = 1;

    double max = -100, angle = 0;
    for (double x = st; x >= ed; x -= step) {
        Mat R = VideoStabilization::rotationMat(angle2Quaternion(x * (dir == 0), x * (dir == 1), 0));
        Mat H = K * R.inv() * K.inv(), src2, dst2, diff;
        warpPerspective(dst, dst2, H, Size(src.cols, src.rows), INTER_LINEAR + WARP_INVERSE_MAP);

        src.copyTo(src2, dst2);
        double psnr = getPSNR(src2, dst2);
        if (psnr > max) {
            max = psnr;
            angle = x;
        }
    }
    mpsnr = max;
    return angle;
}
Example #3
0
    void rectify(Mat& K1, Mat& R1, Mat& t1,
                 Mat& K2, Mat& R2, Mat& t2,
                 Size size,
                 Mat& T1, Mat& T2) {

        Mat_<double> Kn1(3, 3), Kn2(3, 3);
        Mat_<double> c1(3, 1), c2(3, 1),
                     v1(3, 1), v2(3, 1), v3(3, 1);

        K1.copyTo(Kn1);
        K2.copyTo(Kn2);

        // optical centers
        c1 = - R1.t() * t1;
        c2 = - R2.t() * t2;

        // x axis
        v1 = c2 - c1;
        // y axis
        v2 = Mat(R1.row(2).t()).cross(v1);
        // z axis
        v3 = v1.cross(v2);

        Mat_<double> m_R(3, 3);
        v1 = v1 / norm(v1);
        v2 = v2 / norm(v2);
        v3 = v3 / norm(v3);

        m_R(0, 0) = v1(0), m_R(0, 1) = v1(1), m_R(0, 2) = v1(2);
        m_R(1, 0) = v2(0), m_R(1, 1) = v2(1), m_R(1, 2) = v2(2);
        m_R(2, 0) = v3(0), m_R(2, 1) = v3(1), m_R(2, 2) = v3(2);

        T1 = (Kn1 * m_R) * (R1.t() * K1.inv());
        T2 = (Kn2 * m_R) * (R2.t() * K2.inv());

        // Image center displacement
        Mat_<double> o1(3, 1), o2(3, 1);
        o1(0) = 0.5 * size.width, o1(1) = 0.5 * size.height, o1(2) = 1.0;
        o2(0) = 0.5 * size.width, o2(1) = 0.5 * size.height, o2(2) = 1.0;

        Mat_<double> on1(3, 1), on2(3, 1), d1(2, 1), d2(2, 1);
        on1 = T1 * o1;
        on2 = T2 * o2;

        d1(0) = o1(0) - on1(0) / on1(2);
        d1(1) = o1(1) - on1(1) / on1(2);
        d2(0) = o2(0) - on2(0) / on1(2);
        d2(1) = o2(1) - on2(1) / on1(2);
        d1(1) = d2(1);

        Kn1(0, 2) = Kn1(0, 2) + d1(0);
        Kn1(1, 2) = Kn1(1, 2) + d1(1);
        Kn2(0, 2) = Kn2(0, 2) + d2(0);
        Kn2(1, 2) = Kn2(1, 2) + d2(1);

        T1 = (Kn1 * m_R) * (R1.t() * K1.inv());
        T2 = (Kn2 * m_R) * (R2.t() * K2.inv());
    }
Example #4
0
void doTranslation() {
    cout<<"doTranslation begin "<<endl;
    //Mat m = (Mat_<float>(3,3) << 1.4572068353989741e+003, 0, 3.1950000000000000e+002,0, 1.4572068353989741e+003, 2.3950000000000000e+002,0, 0, 1);
    cv::Mat tempMat, tempMat2;
    double s, zConst = 0;

    rotationMatrix.convertTo(rotationMatrix,CV_64FC1);
    cameraMatrix.convertTo(cameraMatrix,CV_64FC1);

    tempMat = rotationMatrix.inv() * cameraMatrix.inv() * uvPoint;


    tempMat2 = rotationMatrix.inv() * tvec;

    s = zConst + tempMat2.at<double>(2,0);
    s /= tempMat.at<double>(2,0);

    /*	double m11,m12,m14,m21,m22,m24,m31,m32,m34 ;
    	double u,v;
    	double ay,ax,by,bx,c ;
    	m11 = rotationMatrix.at<double>(0,0);
    	m12 = rotationMatrix.at<double>(0,1);
    	m21 = rotationMatrix.at<double>(1,0);
    	m22 = rotationMatrix.at<double>(1,1);
    	m31 = rotationMatrix.at<double>(2,0);
    	m32 = rotationMatrix.at<double>(2,1);

    	m14 = tvec.at<double>(0,0);
    	m24 = tvec.at<double>(1,0);
    	m34 = tvec.at<double>(2,0);

    	u = uvPoint.at<double>(0,0);
    	v = uvPoint.at<double>(1,0);

    	c= m12*m21-m11*m22  ;
    	ay = m21*u-m11*v ;
    	by = m14*m21 - m11*m24 ;

    	ax = m22*u-m12*v ;
    	bx = m14*m22-m12*m24 ;


    	s = ((m31*bx-m32*by)/c+m34)/(1+(m31*ax-m32*ay)/c);*/

    cout<<"s"<<s<<endl ;

    cv::Mat wcPoint = rotationMatrix.inv() * (s * cameraMatrix.inv() * uvPoint - tvec);


    Point3f realPoint(wcPoint.at<double>(0, 0), wcPoint.at<double>(1, 0), wcPoint.at<double>(2, 0));

    cout<<"image point "<<endl<<uvPoint<< endl<< "to real point "<<endl <<wcPoint<<endl<<endl ;
}
void adaptXYZToNewWhitePoint(Mat& XYZWhitePoint, const Mat& BGRWhitePoint){
  Mat sRGBChromCoord = (Mat_<float>(3, 3) << 0.6400, 0.3000, 0.1500,
                                             0.3300, 0.6000, 0.0600, 	
                                             0.2126, 0.7152, 0.0722);
  Mat invsRGBChromCoord = sRGBChromCoord.inv();

  Mat XYZD65 = (Mat_<float>(1, 3) << 0.9642, 1.0000, 0.8249);
  Mat xyWhitePoint = Mat(BGRWhitePoint.rows, BGRWhitePoint.cols, CV_32FC3);
  for (int k = 0; k < 3; k++){
    xyWhitePoint.at<Vec3f>(0, 0).val[k] = product(XYZD65, invsRGBChromCoord, k);
  }

  Mat T = (Mat_<float>(3, 3) << xyWhitePoint.at<Vec3f>(0, 0).val[0], 0, 0,
                                0, xyWhitePoint.at<Vec3f>(0, 0).val[1], 0,
                                0, 0, xyWhitePoint.at<Vec3f>(0, 0).val[2]);

  Mat tmpMat = Mat(3, 3, CV_32F);
  dotProduct(sRGBChromCoord, T, tmpMat);
  Mat linBGRWhitePoint = Mat(BGRWhitePoint.rows, BGRWhitePoint.cols, CV_32FC3);

  for (int i = 0; i < 3; i++){
    linBGRWhitePoint.at<Vec3f>(0, 0).val[i] =
      sRGBInverseCompanding(BGRWhitePoint.at<Vec3f>(0, 0).val[i]);
  }
  for (int k = 0; k < 3; k++){
    XYZWhitePoint.at<Vec3f>(0, 0).val[k] =
      linBGRWhitePoint.at<Vec3f>(0, 0).val[2] * tmpMat.at<float>(k, 0) +
      linBGRWhitePoint.at<Vec3f>(0, 0).val[1] * tmpMat.at<float>(k, 1) +
      linBGRWhitePoint.at<Vec3f>(0, 0).val[0] * tmpMat.at<float>(k, 2);
  }
}
bool computeBGRGrayWhitePoint(const Mat& bgrImg, Mat& whitePoint,
                              Mat& prevYUVWhitePoint){
  float sumElementsByChannel[2] = {0, 0};
  int cntElm = 0;
  Mat matrix = (Mat_<float>(3, 3) << 0.299, 0.587, 0.114,
                                     -0.229, -0.587, 0.886,
                                     0.701, -0.587, -0.114);

  Mat invMatrix = matrix.inv();
  Mat curYUVWhitePoint = Mat(1, 1, CV_32FC3);

  Mat yuvImg = Mat(bgrImg.rows, bgrImg.cols, CV_32FC3);

  convertBGRtoYUV(bgrImg, yuvImg);

  for (int i = 0; i < yuvImg.rows; i++){
    for (int j = 0; j < yuvImg.cols; j++){
      float y = yuvImg.at<Vec3f>(i, j).val[0];
      float u = yuvImg.at<Vec3f>(i, j).val[1];
      float v = yuvImg.at<Vec3f>(i, j).val[2];
      float ratio = (abs(u) + abs(v)) / y;
      if (ratio < GREY_THRESHOLD){
        sumElementsByChannel[0] += u;
        sumElementsByChannel[1] += v;
        cntElm++;
      }
    }
  }

  curYUVWhitePoint.at<Vec3f>(0, 0).val[0] = 1;
  curYUVWhitePoint.at<Vec3f>(0, 0).val[1] = sumElementsByChannel[0] / cntElm;
  curYUVWhitePoint.at<Vec3f>(0, 0).val[2] = sumElementsByChannel[1] / cntElm;

  // termination criteria
  float curY = curYUVWhitePoint.at<Vec3f>(0, 0).val[0];
  float curU = curYUVWhitePoint.at<Vec3f>(0, 0).val[1];
  float curV = curYUVWhitePoint.at<Vec3f>(0, 0).val[2];
  float prevU = prevYUVWhitePoint.at<Vec3f>(0, 0).val[1];
  float prevV = prevYUVWhitePoint.at<Vec3f>(0, 0).val[2];
  float norm = sqrt(pow(curU - prevU, 2) + pow(curV - prevV, 2));

  if (!std::isnan(prevU) && (norm < pow(10, -6) ||
                             std::max(abs(curU), abs(curV)) < 0.001)){
    return true;
  }

  prevYUVWhitePoint.at<Vec3f>(0, 0).val[0] = curY;
  prevYUVWhitePoint.at<Vec3f>(0, 0).val[1] = curU;
  prevYUVWhitePoint.at<Vec3f>(0, 0).val[2] = curV;
  
  for (int i = 0; i < 3; i++){
    whitePoint.at<Vec3f>(0, 0).val[2 - i] = invMatrix.at<float>(i, 0) * curY +
                                            invMatrix.at<float>(i, 1) * curU +
                                            invMatrix.at<float>(i, 2) * curV;
    whitePoint.at<Vec3f>(0, 0).val[2 - i] =
      clipImg(whitePoint.at<Vec3f>(0, 0).val[2 - i], 0, 1);
  }

  return false;
}
/// If H is not orientation preserving at the point, the error is infinite.
/// For this test, it is assumed that det(H)>0.
/// \param H The homography matrix.
/// \param index The correspondence index
/// \param side In which image is the error measured?
/// \return The square reprojection error.
double HomographyModel::Error(const Mat &H, size_t index, int* side) const {
    double err = std::numeric_limits<double>::infinity();
    if(side) *side=1;
    libNumerics::vector<double> x(3);
    // Transfer error in image 2
    x(0) = x1_(0,index);
    x(1) = x1_(1,index);
    x(2) = 1.0;
    x = H * x;
    if(x(2)>0) {
        x /= x(2);
        err = sqr(x2_(0,index)-x(0)) + sqr(x2_(1,index)-x(1));
    }
    // Transfer error in image 1
    if(symError_) { // ... but only if requested
        x(0) = x2_(0,index);
        x(1) = x2_(1,index);
        x(2) = 1.0;
        x = H.inv() * x;
        if(x(2)>0) {
            x /= x(2);
            double err1 = sqr(x1_(0,index)-x(0)) + sqr(x1_(1,index)-x(1));
            if(err1>err) { // Keep worse error
                err=err1;
                if(side) *side=0;
            }
        }
    }
    return err;
}
Example #8
0
// ----------------------------------------------------------------
// Применяем преобразование к точкам рамки
// ----------------------------------------------------------------
void transformPts(vector<Point2f>& src_pts,Mat& Tau,vector<Point2f>& dst_pts)
{
	vector<Point2f> pts(4);
	double w=fabs(src_pts[0].x-src_pts[2].x);
	double h=fabs(src_pts[0].y-src_pts[2].y);
	pts.assign(src_pts.begin(),src_pts.end());
	// Нашли центр
	Point2f center=(pts[0]+pts[2])*0.5;
	
	Mat tmp(4,1,CV_64FC2);

	for(int i=0;i<4;i++)
	{
		tmp.at<Vec2d>(i)[0]=pts[i].x-center.x;
		tmp.at<Vec2d>(i)[1]=pts[i].y-center.y;
	}
	Mat Tau_inv=Tau.inv();
	perspectiveTransform(tmp,tmp,Tau_inv);

	for(int i=0;i<4;i++)
	{
		pts[i].x=tmp.at<Vec2d>(i)[0]+center.x;
		pts[i].y=tmp.at<Vec2d>(i)[1]+center.y;
	}
	dst_pts.assign(pts.begin(),pts.end());
}
Example #9
0
PoseRT PoseRT::inv() const
{
  Mat projectiveMatrix = getProjectiveMatrix();
  Mat invertedProjectiveMatrix = projectiveMatrix.inv(DECOMP_SVD);
  PoseRT invertedPose(invertedProjectiveMatrix);
  return invertedPose;
}
Example #10
0
cv::Mat AntiShake::fixPictures(Mat &img_1, Mat &img_2, int loops, double final_pic_size,
		double maxDetDiff, int featurePoints, int coreSize, double absoluteRelation, int matches_type) {
	// Firstly we calculate the Homography matrix and refine it in the FeedbackController function:
	Mat H = calcHomographyFeedbackController(img_1, img_2, loops, final_pic_size, featurePoints, coreSize,
			absoluteRelation, matches_type);
	double det = determinant(H);
	cout << "STEP 11 final homography = " << endl << H << endl << " determinant = " << det << endl;

	//Secondly, lets transform the picture according to the calculated (resultant) smatrix.
	//TODO
	/*
	 * Create white Image with the same size as img_1 and img_2
	 * apply Homography on white one
	 * Count histogramas and pixels with value == 0 (black pixels)
	 * if value > 0, crop on both img_1 and img_2
	 *
	 * */
	if (det >= 1.0) {
		applyHomography(H, img_1, img_2);
		cout << "STEP 12 fixed original pictures 1->2" << endl;
		return H;
	} else {
		Mat H2 = H.inv();
		H.release();
		applyHomography(H2, img_2, img_1);
		cout << "STEP 12 fixed original pictures 2->1 " << endl;
		return H2;
	}
}
Example #11
0
// -----------------------------------------------------------------------
/// \brief Initialise et calcule la matrice fondamentale.
///
/// @param mLeftIntrinsic: matrice intrinseque de la camera gauche
/// @param mLeftExtrinsic: matrice extrinseque de la camera gauche
/// @param mRightIntrinsic: matrice intrinseque de la camera droite
/// @param mRightExtrinsic: matrice extrinseque de la camera droite
/// @return matrice fondamentale
// -----------------------------------------------------------------------
Mat iviFundamentalMatrix(const Mat& mLeftIntrinsic,
                         const Mat& mLeftExtrinsic,
                         const Mat& mRightIntrinsic,
                         const Mat& mRightExtrinsic) {
    // A modifier !
    // Doit utiliser la fonction iviVectorProductMatrix
    Mat mFundamental = Mat::eye(3, 3, CV_64F);
    Mat tmp = (Mat_<double>(3,4) <<
               1.0, 0.0, 0.0, 0.0,
               0.0, 1.0, 0.0, 0.0,
               0.0, 0.0, 1.0, 0.0
              );
    Mat P1 = mLeftIntrinsic * tmp * mLeftExtrinsic;
    Mat P2 = mRightIntrinsic * tmp * mRightExtrinsic;

    Mat O = mLeftExtrinsic.inv();
    Mat O1 = (Mat_<double>(4,1) <<
              O.at<double>(3),
              O.at<double>(7),
              O.at<double>(11),
              O.at<double>(15)
             );

    Mat Hpi = P2 * P1.inv(DECOMP_SVD);
    mFundamental = iviVectorProductMatrix(P2*O1) * Hpi;

    // Retour de la matrice fondamentale
    return mFundamental;
}
Example #12
0
void ProjectorBase::setCameraParams(InputArray _K, InputArray _R, InputArray _T)
{
    Mat K = _K.getMat(), R = _R.getMat(), T = _T.getMat();

    CV_Assert(K.size() == Size(3, 3) && K.type() == CV_32F);
    CV_Assert(R.size() == Size(3, 3) && R.type() == CV_32F);
    CV_Assert((T.size() == Size(1, 3) || T.size() == Size(3, 1)) && T.type() == CV_32F);

    Mat_<float> K_(K);
    k[0] = K_(0,0); k[1] = K_(0,1); k[2] = K_(0,2);
    k[3] = K_(1,0); k[4] = K_(1,1); k[5] = K_(1,2);
    k[6] = K_(2,0); k[7] = K_(2,1); k[8] = K_(2,2);

    Mat_<float> Rinv = R.t();
    rinv[0] = Rinv(0,0); rinv[1] = Rinv(0,1); rinv[2] = Rinv(0,2);
    rinv[3] = Rinv(1,0); rinv[4] = Rinv(1,1); rinv[5] = Rinv(1,2);
    rinv[6] = Rinv(2,0); rinv[7] = Rinv(2,1); rinv[8] = Rinv(2,2);

    Mat_<float> R_Kinv = R * K.inv();
    r_kinv[0] = R_Kinv(0,0); r_kinv[1] = R_Kinv(0,1); r_kinv[2] = R_Kinv(0,2);
    r_kinv[3] = R_Kinv(1,0); r_kinv[4] = R_Kinv(1,1); r_kinv[5] = R_Kinv(1,2);
    r_kinv[6] = R_Kinv(2,0); r_kinv[7] = R_Kinv(2,1); r_kinv[8] = R_Kinv(2,2);

    Mat_<float> K_Rinv = K * Rinv;
    k_rinv[0] = K_Rinv(0,0); k_rinv[1] = K_Rinv(0,1); k_rinv[2] = K_Rinv(0,2);
    k_rinv[3] = K_Rinv(1,0); k_rinv[4] = K_Rinv(1,1); k_rinv[5] = K_Rinv(1,2);
    k_rinv[6] = K_Rinv(2,0); k_rinv[7] = K_Rinv(2,1); k_rinv[8] = K_Rinv(2,2);

    Mat_<float> T_(T.reshape(0, 3));
    t[0] = T_(0,0); t[1] = T_(1,0); t[2] = T_(2,0);
}
Example #13
0
void ProjectorBase::setCameraParams(const Mat &K, const Mat &R, const Mat &T)
{
    CV_Assert(K.size() == Size(3, 3) && K.type() == CV_32F);
    CV_Assert(R.size() == Size(3, 3) && R.type() == CV_32F);
    CV_Assert((T.size() == Size(1, 3) || T.size() == Size(3, 1)) && T.type() == CV_32F);

    Mat_<float> K_(K);
    k[0] = K_(0,0); k[1] = K_(0,1); k[2] = K_(0,2);
    k[3] = K_(1,0); k[4] = K_(1,1); k[5] = K_(1,2);
    k[6] = K_(2,0); k[7] = K_(2,1); k[8] = K_(2,2);

    Mat_<float> Rinv = R.t();
    rinv[0] = Rinv(0,0); rinv[1] = Rinv(0,1); rinv[2] = Rinv(0,2);
    rinv[3] = Rinv(1,0); rinv[4] = Rinv(1,1); rinv[5] = Rinv(1,2);
    rinv[6] = Rinv(2,0); rinv[7] = Rinv(2,1); rinv[8] = Rinv(2,2);

    Mat_<float> R_Kinv = R * K.inv();
    r_kinv[0] = R_Kinv(0,0); r_kinv[1] = R_Kinv(0,1); r_kinv[2] = R_Kinv(0,2);
    r_kinv[3] = R_Kinv(1,0); r_kinv[4] = R_Kinv(1,1); r_kinv[5] = R_Kinv(1,2);
    r_kinv[6] = R_Kinv(2,0); r_kinv[7] = R_Kinv(2,1); r_kinv[8] = R_Kinv(2,2);

    Mat_<float> K_Rinv = K * Rinv;
    k_rinv[0] = K_Rinv(0,0); k_rinv[1] = K_Rinv(0,1); k_rinv[2] = K_Rinv(0,2);
    k_rinv[3] = K_Rinv(1,0); k_rinv[4] = K_Rinv(1,1); k_rinv[5] = K_Rinv(1,2);
    k_rinv[6] = K_Rinv(2,0); k_rinv[7] = K_Rinv(2,1); k_rinv[8] = K_Rinv(2,2);

    Mat_<float> T_(T.reshape(0, 3));
    t[0] = T_(0,0); t[1] = T_(1,0); t[2] = T_(2,0);
}
void denormalize(Mat &H, const Mat &Hn, const Mat &T, const Mat &Tp)
{
	H.create(Hn.rows, Hn.cols, Hn.type());

	H = T.inv()*Hn*Tp;
	H /= H.at<float>(H.rows-1, H.cols-1);
}
Example #15
0
Mat draw_image_in_quad(Mat& image, vector<Point>& quad, const Mat& imposed, Mat homography = Mat())
{
    vector<Point> src_points;
    src_points.push_back(Point(0, 0));
    src_points.push_back(Point(imposed.cols, 0));
    src_points.push_back(Point(imposed.cols, imposed.rows));
    src_points.push_back(Point(0, imposed.rows));

    if (homography.rows == 0 || homography.cols == 0)
    {
        homography = findHomography(src_points, quad);
    }

    Mat homography_inv = homography.inv();
    Rect r = boundingRect(quad);

    for (int i = 0; i < r.height; ++i)
    {
        for (int j = 0; j < r.width; ++j)
        {
            int x = j + r.x;
            int y = i + r.y;

            // if (x, y) is in the quad
            if (pointPolygonTest(quad, Point(x, y), false) > -0.5)
            {
                // lookup pixel value
                // H * src == dest ->
                // src ~= H^-1 * dest

                Mat p = Mat::zeros(3, 1, CV_64F);
                p.at<double>(0, 0) = x;
                p.at<double>(1, 0) = y;
                p.at<double>(2, 0) = 1;

                Mat q = homography_inv * p;
                double wx = q.at<double>(0, 0);
                double wy = q.at<double>(1, 0);
                double w  = q.at<double>(2, 0);

                assert(w != 0.0f);

                int qx = (int)(round(wx / w));
                int qy = (int)(round(wy / w));

                if (qx >= 0 && qy >= 0 && qx < imposed.cols && qy < imposed.rows)
                {
                    // printf("(%d, %d) -> (%d, %d)\n", y, x, qy, qx);
                    image.at<Vec3b>(y, x) = imposed.at<Vec3b>(qy, qx);
                }
                else
                {
                    // cout << qx << ", " << qy << endl;
                }
            }
        }
    }

    return homography;
}
void InversePerspectiveMapping::updateFrameUsingStandardAssumption(const Mat &pose){
	rCW2CI = pose.inv() * firstFrameParam.rtMat;
	//cout << " pose " << pose << endl;
	rCW2UV = firstFrameParam.intrMat * rCW2CI;
	//cout << " rCW2UV " << rCW2UV << endl;
	calcRemapMat();
}
Example #17
0
static Mat getFundamentalMat( const Mat& R1, const Mat& t1,
                             const Mat& R2, const Mat& t2,
                             const Mat& cameraMatrix )
{
    Mat_<double> R = R2*R1.t(), t = t2 - R*t1;
    double tx = t.at<double>(0,0), ty = t.at<double>(1,0), tz = t.at<double>(2,0);
    Mat E = (Mat_<double>(3,3) << 0, -tz, ty, tz, 0, -tx, -ty, tx, 0)*R;
    Mat iK = cameraMatrix.inv();
    Mat F = iK.t()*E*iK;
    
#if 0
    static bool checked = false;
    if(!checked)
    {
        vector<Point3f> objpoints(100);
        Mat O(objpoints);
        randu(O, Scalar::all(-10), Scalar::all(10));
        vector<Point2f> imgpoints1, imgpoints2;
        projectPoints(Mat(objpoints), R1, t1, cameraMatrix, Mat(), imgpoints1);
        projectPoints(Mat(objpoints), R2, t2, cameraMatrix, Mat(), imgpoints2);
        double* f = (double*)F.data;
        for( size_t i = 0; i < objpoints.size(); i++ )
        {
            Point2f p1 = imgpoints1[i], p2 = imgpoints2[i];
            double diff = p2.x*(f[0]*p1.x + f[1]*p1.y + f[2]) +
            p2.y*(f[3]*p1.x + f[4]*p1.y + f[5]) +
            f[6]*p1.x + f[7]*p1.y + f[8];
            CV_Assert(fabs(diff) < 1e-3);
        }
        checked = true;
    }
#endif
    return F;
};
Point3f AugmentedView::getCameraRotation()
{
	/*Mat invTranslation = Mat::eye(4,4,CV_32F);
	OpenGLHelper::translate(invTranslation,Point3f(((float)position->at<double>(0,0)), ((float)position->at<double>(0,1)),-((float)position->at<double>(0,2))));
	*/
	
	Mat cameraProj = Mat::eye(4,4,CV_32F);
	OpenGLHelper::gluPerspective(cameraProj,fieldOfView,1.7f,20.0f, 600.0f);
	Mat invCamProj = cameraProj.inv();

	//Projection = camera * gyro * translation * rotation
	Mat rotationOnly = projection * invCamProj;
	Mat rotationVector = Mat(1,3,CV_32F);
	rotationOnly *= (1.0f / rotationOnly.at<float>(3,3));
	Mat smallRotation =  rotationOnly(Range(0,3),Range(0,3));
	try
	{
		LOGD_Mat(LOGTAG_POSITION,"smallRotation",&smallRotation);
		cv::Rodrigues(smallRotation,rotationVector);
		LOGD_Mat(LOGTAG_POSITION,"RotationVector",&rotationVector);
	}
	catch (exception & e)
	{
		LOGW(LOGTAG_POSITION,"Error rodriguing: %s",e.what());
		return Point3f(0,0,0);
	}
	return Point3f(-(rotationVector.at<float>(0,0)), -(rotationVector.at<float>(0,1)),-(rotationVector.at<float>(0,2)));
}
Example #19
0
//------------------------------ Update ---------------------------------
void KalmanFilter::Update(const cv::Mat &z) {
  CHECK(z.rows == H_.rows);
  Mat y = z - H_ * predicted_x_;               // Innovation.
  Mat S = H_ * predicted_P_ * H_.t() + R_;     // Innovation covariance.
  Mat K = predicted_P_ * H_.t() * S.inv();     // Optimal Kalman gain.
  x_ = predicted_x_ + K * y;                   // A posteriori state estimate.
  Mat I = cv::Mat::eye(P_.rows, P_.cols, CV_32F);
  P_ = (I - K * H_) * predicted_P_;            // Updated state covariance.
}
Example #20
0
vector<Point2f> Recognition::projectPointsBack(vector<Point2f> mesh, Mat& originalImage, Mat homography){
	vector<Point2f> reprojectedPoints;
	Mat homoInverse=homography.inv();
	perspectiveTransform(mesh, reprojectedPoints, homoInverse);
	/*	for(unsigned int i=0; i< mesh.size(); i++){
		circle(originalImage, reprojectedPoints[i], 1, Scalar(0,0,255), 2);
	}*/
	return reprojectedPoints;
}
Example #21
0
//----------------------------------------------------------
// 
//----------------------------------------------------------
void PutBanner(Mat& img,Mat& banner,Mat& Tau,vector<Point2f>& region_tau)
{		
	Point2f center=(region_tau[0]+region_tau[2])*0.5;

	Rect roi=cv::boundingRect(region_tau);

	double w=roi.width;
	double h=roi.height;
	vector<Point2f> pts(w*h);
	vector<Point2f> dst_pts(w*h);

	Mat tmp(w*h,1,CV_64FC2);
	int ind=0;

	for(int i=roi.y;i<roi.y+roi.height;i++)
	{
		for(int j=roi.x;j<roi.x+roi.width;j++)
		{
			tmp.at<Vec2d>(ind)[0]=j-roi.x-roi.width/2.0;
			tmp.at<Vec2d>(ind)[1]=i-roi.y-roi.height/2.0;
			ind++;
		}
	}
	
	Mat Tau_inv=Tau.inv();
	perspectiveTransform(tmp,tmp,Tau);

	ind=0;
	for(int i=roi.y;i<roi.y+roi.height;i++)
	{
		for(int j=roi.x;j<roi.x+roi.width;j++)
		{
			Vec2d val=tmp.at<Vec2d>(ind);
			pts[ind].x=val[0]+banner.cols/2.0;
			pts[ind].y=val[1]+banner.rows/2.0;
			ind++;
		}
	}

	dst_pts.assign(pts.begin(),pts.end());

	ind=0;

	for(int i=roi.y;i<roi.y+roi.height;i++)
	{
		for(int j=roi.x;j<roi.x+roi.width;j++)
		{
			if(dst_pts[ind].y>0 && dst_pts[ind].y<banner.rows && dst_pts[ind].x >0 && dst_pts[ind].x <banner.cols)
			{
				img.at<Vec3b>(i,j)=banner.at<Vec3b>(dst_pts[ind].y,dst_pts[ind].x);
			}
			ind++;
		}
	}

}
Example #22
0
void calibExt(double *para, vector<Mat> Hset, Mat& K)
{
	int imgNum = Hset.size();
	Mat h1(3, 1, CV_64FC1, Scalar(0)), h2(3, 1, CV_64FC1, Scalar(0)), h3(3, 1, CV_64FC1, Scalar(0)); 
	Mat invK, r1, r2, r3, t;
	Mat Q(3, 3, CV_64FC1, Scalar(0));
	Mat U, V, D, Vt, Ut, R, Rnew(3, 1, CV_64FC1, Scalar(1));
	double norm;
	invK = K.inv();
	cout << "invK: " << invK << endl;
	
	for(int i = 0; i < imgNum; i++)
	{
		for(int j = 0; j < 3; j++){
			h1.at<double>(j, 0) = Hset[i].at<double>(j, 0);
			h2.at<double>(j, 0) = Hset[i].at<double>(j, 1);
			h3.at<double>(j, 0) = Hset[i].at<double>(j, 2);
		}
		//cout << "h1: " << h1 << endl;
		r1 = invK*h1;
		norm = sqrt(r1.dot(r1));
		rvecNorm(r1, norm);
		//cout << "r1: " << r1 << endl;
		r2 = invK*h2;
		rvecNorm(r2, norm);
		//cout << "r2: " << r2 << endl;
		r3 = r1.cross(r2);
		rvecNorm(r3, norm);
		//cout << "r3: " << r3 << endl;
		t = invK*h3;
		rvecNorm(t, norm);
		//cout << "t: " << t << endl;
		// rotation conditioning
		for(int k = 0; k < 3; k++)
		{
			Q.at<double>(k, 0) = r1.at<double>(k, 0);
			Q.at<double>(k, 1) = r2.at<double>(k, 0);
			Q.at<double>(k, 2) = r3.at<double>(k, 0);
		}
		//cout << "Q: " << Q << endl;
		SVD::compute(Q, D, U, Vt, 0);
		//cout << "D: " << D << endl;
		// rotaion conditioning
		R = U*Vt;
		//cout << "R: " << R.t()*R << endl;
		// convert to rodrigues representation
		Rodrigues(R, Rnew);
		//cout << "Rnew: " << Rnew << endl;
		para[7 + i*6] = Rnew.at<double>(0);
		para[7 + i*6 + 1] = Rnew.at<double>(1);
		para[7 + i*6 + 2] = Rnew.at<double>(2);
		para[7 + i*6 + 3] = t.at<double>(0);
		para[7 + i*6 + 4] = t.at<double>(1);
		para[7 + i*6 + 5] = t.at<double>(2);
	}
}
Example #23
0
//TODO this implementation is suboptimal
void Image::invert(Image& dst){
	#if IMAGE_SAFE == 1
		assert(dst.hasSize(cols,rows));
	#endif

	Mat A = this->toMat();
	Mat B = A.inv(DECOMP_SVD);

	memcpy((void*) dst.data.get(),(void*) B.data, sizeof(float)*cols*rows);
}
Example #24
0
static Mat LieSub(Mat A, Mat B){
    Mat Pa;
    Mat Pb;
    LieToP(A,Pa);
    LieToP(B,Pb);
    Mat out;
    CV_Assert(A.size()==Size(6,1) && B.size()==Size(6,1));
    PToLie(Pa*Pb.inv(),out);
    return out;
}
Example #25
0
Mat Assignment2::warpImage(Mat H)
{
	// Apply perspective transformation to see effect of H
	Mat hinv = H.inv(DECOMP_LU);
	Mat translated_image;
	
	warpPerspective(this->image, translated_image, hinv, this->image.size(), CV_INTER_LINEAR, BORDER_CONSTANT);
	
	return translated_image;
}
Example #26
0
void dataProcess(int start, int end, int step)
{
	bool first = true;
	Mat A[640], B[640];
	double depth[640];
	int index = 1;
	for (int s = start; s <= end; s += step)
	{
		static ifstream infile;
		if (!infile.is_open()) {
			infile.open("depth2.txt", ios::in);
		}
		for (int i = 0; i < 640; i++)
		{
			double a;
			infile >> a;
			/*if (a<0 || a>5000)
				a = 0;*/
			depth[i] = a;
			//cout << a << " ";
		}

		for (int i = 0; i < 640; i++)
		{
			
			double num = depth[i];
			//double temp[3] = { num*num, num, 1 };
			double temp[2] = {  num, 1 };
			A[i ].push_back(Mat(1, 2, CV_64FC1, temp));
			/*double temp[2] = {  num, 1 };
			A[i].push_back(Mat(1, 2, CV_64FC1, temp));*/
			B[i ].push_back(Mat(1, 1, CV_64FC1, s));
		}
	}
	/*Mat P = A[140].t()*A[140];
	Mat Q = A[140].t()*B[140];
	cout << P << endl;
	cout << Q << endl;
	cout << P.inv()*Q << endl;*/
	for (int i = 0; i < 640; i++)
	{
		Mat AA = A[i].t()* A[i];
		Mat BB = A[i].t()* B[i];
		Mat x = AA.inv()*BB;
		cout << x << endl;
		a[i] = x.at<double>(0, 0);
		b[i] = x.at<double>(1, 0);
		//c[i] = x.at<double>(2, 0);
	}



}
static Point3f getObject3DCoord(Point3f objectPosition, Mat projection)
{
	Mat invProjection = projection.inv();

	float data[] = {objectPosition.x,objectPosition.y,objectPosition.z,1.0f};
	Mat cameraPos = Mat(4,1,CV_32F,data);
	cameraPos = invProjection * cameraPos;
	cameraPos *= 1.0f/ cameraPos.at<float>(3,0);
	
	LOG_Mat(ANDROID_LOG_VERBOSE,LOGTAG_ARINPUT,"3D->Screen",&cameraPos);
	return Point3f(cameraPos.at<float>(0,0),cameraPos.at<float>(1,0),cameraPos.at<float>(2,0));

}
Example #28
0
Point2f backProject(Point2f p, Mat cameraMatrix, Mat rotationMatrix, Mat tvec, float height)
{
	cv::Mat uvPoint = cv::Mat::ones(3, 1, cv::DataType<double>::type); // [u v 1]
	uvPoint.at<double>(0, 0) = p.x;
	uvPoint.at<double>(1, 0) = p.y;

	cv::Mat tempMat, tempMat2;
	double s;

	tempMat = rotationMatrix.inv() * cameraMatrix.inv() * uvPoint;
	tempMat2 = rotationMatrix.inv() * tvec;
	s = height + tempMat2.at<double>(2, 0); //height Zconst is zero
	s /= tempMat.at<double>(2, 0);

	Mat pt = rotationMatrix.inv() * (s * cameraMatrix.inv() * uvPoint - tvec);
	//printf("[%f %f %f]\n", pt.at<double>(0, 0), pt.at<double>(1, 0), pt.at<double>(2, 0));

	//cv::Mat backPt = 1 / s * cameraMatrix * (rotationMatrix * pt + tvec);
	//printf("[%f %f]\n", backPt.at<double>(0, 0), backPt.at<double>(1, 0));

	return Point2f((float)pt.at<double>(0, 0), (float)pt.at<double>(1, 0));
}
static Point3f getCameraPosition(Mat projection)
{
	Mat invProjection = projection.inv();

	float data[] = {0,0,0,1.0f};
	Mat cameraPos = Mat(4,1,CV_32F,data);
	cameraPos = invProjection * cameraPos;
	cameraPos *= 1.0f/ cameraPos.at<float>(3,0);
	
	return Point3f(cameraPos.at<float>(0,0),cameraPos.at<float>(1,0),cameraPos.at<float>(2,0));
	//LOG_Mat(ANDROID_LOG_VERBOSE,LOGTAG_ARINPUT,"CameraPosition",&cameraPos);

}
Point3f doTranslate(Point2d imagePoint){

	uvPoint.at<double>(0,0) = imagePoint.x+0.0; //got this point using mouse callback
	uvPoint.at<double>(1,0) = imagePoint.y+0.0;
	
	cv::Mat tempMat, tempMat2;
	double s, zConst = 0;

	cout<<"DoTranslate ("<<uvPoint.at<double>(0,0)<<","<<uvPoint.at<double>(1,0)<<") ->";
	
	tempMat = rotationMatrix.inv() * cameraMatrix.inv() * uvPoint;
			
	tempMat2 = rotationMatrix.inv() * tvec;
		
	s = zConst + tempMat2.at<double>(2,0);
	s /= tempMat.at<double>(2,0);
	cv::Mat wcPoint = rotationMatrix.inv() * (s * cameraMatrix.inv() * uvPoint - tvec);

	Point3f realPoint(wcPoint.at<double>(0, 0), wcPoint.at<double>(1, 0), wcPoint.at<double>(2, 0));
	cout<<"("<<realPoint.x<<","<<realPoint.y<<","<<realPoint.z<<") with s="<<s <<endl ;
	return realPoint ; 
}