Example #1
0
bool isQuadValid(const vector< Point_<T> >& quad) {

	if(quad.size() != 4) return false;
	
	Mat quadMat;
	for(size_t i = 0; i < 4; i++) {
		Mat Z = (Mat_<double>(1,3) << quad[i].x, quad[i].y, 0 );
		if(quadMat.empty()){
			quadMat = Z;
		}
		else{
			quadMat.push_back( Z );
		}
	}
	
	Mat A = quadMat.row(0)-quadMat.row(1);
	Mat B = quadMat.row(2)-quadMat.row(1);
	Mat C = quadMat.row(0)-quadMat.row(2);
	Mat D = quadMat.row(3)-quadMat.row(2);
	Mat E = quadMat.row(3)-quadMat.row(1);
	
	int sign = -1;
	if(A.cross(B).at<double>(0, 2) > 0){
		sign = 1;
	}
	
	return	sign*E.cross(B).at<double>(0, 2) > 0 &&
			sign*C.cross(D).at<double>(0, 2) > 0 &&
			sign*A.cross(E).at<double>(0, 2) > 0;
}
//Function for calculating Rotation Matrix
Mat rotation_matrix(double theta)
{
	/*
	 * Calculating rotation matrix
	 */
	// VUP matrix Points opposite of Gravity
	Mat vup = Mat::zeros(1, 3, CV_32F);
	vup.at<float>(1) = 1;
	//	cout << "VUP: "<< vup << endl;

	// Look-At Point ..... Where the camera is pointing (VPN)
	Mat look_at_point = Mat::zeros(1, 3, CV_32F);
	look_at_point.at<float>(1) = -1;
	look_at_point.at<float>(2) = -1;
	//	cout << "Look-At Point: "<<look_at_point<<endl;
	look_at_point = look_at_point / norm(look_at_point, NORM_L2);
	//	cout << "Normalized Look-At point (n): "<< look_at_point<<endl;


	// The u_axis vector
	Mat u_axis = vup.cross(look_at_point);
	//	cout << "U axis: "<< u_axis<<endl;
	u_axis = u_axis / norm(u_axis, NORM_L2);
	//	cout << "Normalized U axis: "<< u_axis<<endl;

	// The v_axis vector
	Mat v_axis = look_at_point.cross(u_axis);
	//	cout << "V_axis (should be normalized): "<<v_axis<<endl<<endl;

	/*
	 * Rotation Matrix for the camera to get the new axis
	 */
	Mat rotationMatrix;
	rotationMatrix.push_back(u_axis);
	rotationMatrix.push_back(v_axis);
	rotationMatrix.push_back(look_at_point);
	//	cout << "Rotation matrix: "<<endl<< rotationMatrix<<endl<<endl;

	/*
	 * Converting the Rotation Matrix into a Homogeneous Rotation Matrix
	 */
	Mat homogeneousRotationMatrix; //= Mat::eye(4,3, CV_32F);
	homogeneousRotationMatrix.push_back(u_axis);
	homogeneousRotationMatrix.push_back(v_axis);
	homogeneousRotationMatrix.push_back(look_at_point);

	Mat justarow = Mat::zeros(1, 3, CV_32F);
	homogeneousRotationMatrix.push_back(justarow);

	Mat justacolumn = Mat::zeros(4, 1, CV_32F);
	justacolumn.at<float>(3,0) = 1;

	hconcat(homogeneousRotationMatrix, justacolumn, homogeneousRotationMatrix);
	//	cout << "Homogeneous Rotation matrix:"<<endl<<homogeneousRotationMatrix<<endl<<endl;
	return homogeneousRotationMatrix;
}
Example #3
0
Mat getVanishingLine(Mat a, Mat b, Mat c, Mat d){ //find the 
  Mat l1 = a.cross(b);
  cout << "a cross b = " << l1 << endl;
  Mat l2 = d.cross(c);
  cout << "d cross c = " << l2 << endl;
  Mat l3 = a.cross(d);
  cout << "a cross d = " << l1 << endl;
  Mat l4 = b.cross(c);
  cout << "b cross c = " << l2 << endl;
  Mat v1 = l1.cross(l2);
  Mat v2 = l3.cross(l4);

  Mat l = v1.cross(v2);
  Mat H = (Mat_<double>(3,3) << 1, 0, 0, 0, 1, 0, l.at<double>(0, 0), l.at<double>(1, 0), l.at<double>(2, 0));
    return H;
  
  //Mat l1 = a.dot(b);
  //Mat l1 = a.dot(b);
}
Example #4
0
Mat MakeRotationMatrix(sparseModelPoint smp) {
    cout << "Normal (" << smp.normal[0] << "," << smp.normal[1] << "," << smp.normal[2] << "): \n";
        
    Mat R = Mat::zeros(3,3,CV_64FC1);

    Mat X = Mat(3,1,CV_64FC1,smp.normal);
    Mat Y = Mat::zeros(3,1,CV_64FC1);
    Y.col(0).row(2) = 1;
    double dotProduct = X.at<double>(2,0);
    double angle = acos(dotProduct);
    Mat cross = X.cross(Y);
    // cout << "angle: " << angle << "Cros: " << cross << "\n";
    cross = cross/norm(cross);
    // cout << "angle: " << angle << "Cros: " << cross << "\n";
    // U.col(0) = (X.col(0) + 0);
    // U.col(1) = X.cross(Y)/norm(X.cross(Y));
    // U.col(2) = (X.cross(U.col(1))+0);

    // Mat V = Mat::zeros(3,3,CV_64FC1);
    // V.col(0) = (Y.col(0) + 0);
    // V.col(1) = (U.col(1)+0);
    // V.col(2) = Y.cross(V.col(1));


    // U.t();

    double c = cos(angle);
    double s = sin(angle);
    double t = 1.0 - c;

    double x = cross.at<double>(0,0);
    double y = cross.at<double>(1,0);
    double z = cross.at<double>(2,0);

    R.at<double>(0,0) = c + x*x*t;
    R.at<double>(1,1) = c + y*y*t;
    R.at<double>(2,2) = c + z*z*t;


    double tmp1 = x*y*t;
    double tmp2 = z*s;
    R.at<double>(1,0) = tmp1 + tmp2;
    R.at<double>(0,1) = tmp1 - tmp2;
    tmp1 = x*z*t;
    tmp2 = y*s;
    R.at<double>(2,0) = tmp1 - tmp2;
    R.at<double>(0,2) = tmp1 + tmp2;    
    tmp1 = y*z*t;
    tmp2 = x*s;
    R.at<double>(2,1) = tmp1 + tmp2;
    R.at<double>(1,2) = tmp1 - tmp2;

    return R;
}
Example #5
0
	/*

	void point3d2Mat(const Point3d& src, Mat& dest)
	{
	dest.create(3,1,CV_64F);
	dest.at<double>(0,0)=src.x;
	dest.at<double>(1,0)=src.y;
	dest.at<double>(2,0)=src.z;
	}

	void setXYZ(Mat& in, double&x, double&y, double&z)
	{
	x=in.at<double>(0,0);
	y=in.at<double>(1,0);
	z=in.at<double>(2,0);

	//	cout<<format("set XYZ: %.04f %.04f %.04f\n",x,y,z);
	}

	void lookatBF(const Point3d& from, const Point3d& to, Mat& destR)
	{
	double x,y,z;

	Mat fromMat;
	Mat toMat;
	point3d2Mat(from,fromMat);
	point3d2Mat(to,toMat);

	Mat fromtoMat;
	add(toMat,fromMat,fromtoMat,Mat(),CV_64F);
	double ndiv = 1.0/norm(fromtoMat);
	fromtoMat*=ndiv;

	setXYZ(fromtoMat,x,y,z);
	destR = Mat::eye(3,3,CV_64F);
	double yaw   =-z/abs(z)*asin(y/sqrt(y*y+z*z))/CV_PI*180.0;

	rotYaw(destR,destR,yaw);

	Mat RfromtoMat = destR*fromtoMat;

	setXYZ(RfromtoMat,x,y,z);
	double pitch =z/abs(z)*asin(x/sqrt(x*x+z*z))/CV_PI*180.0;

	rotPitch(destR,destR,pitch);
	}
	*/
	void lookat(const Point3d& from, const Point3d& to, Mat& destR)
	{
		Mat destMat = Mat(Point3d(0.0, 0.0, 1.0));
		Mat srcMat = Mat(from + to);
		srcMat = srcMat / norm(srcMat);

		Mat rotaxis = srcMat.cross(destMat);
		double angle = acos(srcMat.dot(destMat));
		//normalize cross product and multiply rotation angle
		rotaxis = rotaxis / norm(rotaxis)*angle;
		Rodrigues(rotaxis, destR);
	}
void Camera::pitch(double angle)
{
	Mat temp = center - eye;
	Mat rAxis = temp.cross(up);
	Mat cmat = (Mat_<double>(3, 3) <<
		0, -rAxis.at<double>(2, 0), rAxis.at<double>(1, 0),
		rAxis.at<double>(2, 0), 0, -rAxis.at<double>(0, 0),
		-rAxis.at<double>(1, 0), rAxis.at<double>(0, 0), 0);
	angle = (CV_PI / 180) * angle;
	Mat pitchingMat = Mat::eye(3, 3, CV_64F) + sin(angle) * cmat + (1 - cos(angle)) * cmat * cmat;
	rmat = pitchingMat * rmat;
}
/*
 * Método para obtener la matriz de rotación dada una matriz esencial y el vector t
 */
Mat getRotation(Mat essential, Mat t){
	Mat rotation = Mat(3, 3, CV_64F);
	Mat w[3];
	Mat E_norm = essential / sqrt((trace(essential*essential.t())[0]/2));

	for(int i = 0; i < 3; i++){
		Mat row = E_norm.row(i);
		w[i] = row.cross(t);
	}

	for(int i = 0; i < 3; i++){
		rotation.row(i) = w[i] + w[(i+1)%3].cross(w[(i+2)%3]);
	}

	return rotation;
}
Example #8
0
int main(){

    // Matrix initialization
    float a[] = {3,2,5,6,5,8,2,3,4};
    Mat A = Mat(3, 3, CV_32FC1, a);
    cout << "A: " << endl << A << endl << endl;

    float b[] = {7,4,3,5,3,2,1,2,9};
    Mat B = Mat(3, 3, CV_32FC1, b);
    cout << "B: " << endl << B << endl << endl;

    Mat C;

    // Vector initialization
    float d[] = {2,4,1};
    Mat D = Mat(3, 1, CV_32FC1, d);
    cout << "D: " << endl << D << endl << endl;
    float e[] = {5,2,9};
    Mat E = Mat(3, 1, CV_32FC1, e);
    cout << "E: " << endl << E << endl << endl;

    Mat F;


    // Matrix and vector multiplication
    cout << "Matrix-vector multiplication: " << A*D << endl << endl;

    // Matrix zeros
    C = Mat::zeros(3,3,CV_32FC1);
    cout << "Matrix Zeros: " << endl << C << endl << endl;

    // Matrix ones
    C = Mat::ones(3,3,CV_32FC1);
    cout << "Matrix Ones: " << endl << C << endl << endl;

    // Matrix identity
    C = Mat::eye(3,3,CV_32FC1);
    cout << "Matrix Identity: " << endl << C << endl << endl;

    // Matrix addition
    C = A + B;
    cout << "Addition: " << endl << C << endl << endl;

    // Matrix multiplication
    C = A * B;
    cout << "Multiplication: " << endl << C << endl << endl;

    // Matrix multiplication per element
    C = A.mul(B);
    cout << "Multiplication 1 to 1: " << endl << C << endl << endl;

    // Cross product
    F = D.cross(E);
    cout << "Cross product: " << endl << F << endl << endl;

    // Dot product
    F = D.dot(E);
    cout << "Dot product: " << endl << F << endl << endl;

    // Matrix inverse
    C = A.inv();
    cout << "Inverse: " << endl << C << endl << endl;

    // Matrix transpose
    C = A.t();
    cout << "Transpose: " << endl << C << endl << endl;

    // Matrix determinant
    cout << "Determinant: " << endl << determinant(A) << endl << endl;

    // Vector normalization
    normalize(D, F);
    cout << "Normalization: " << endl << F << endl << endl;

    return 0;
}
Example #9
0
//FUNCTIONS
static void cameraPoseFromHomography(vector<Point2f> corner, Mat& H, float& ext_camera_height, float& ext_pitch_angle,
		Mat image, Mat &K, const Mat &VP1, const Mat &VP2, Mat & VVP, Mat &P, Mat &camerCenter) {

	Mat NEW_K = K;
	vector<Point3f> po;

	float z = 0.;
	const float unit = HOUSE_SIZE;
	for (int i = -4; i < 5; i++) // si prende il sistema di riferimento al centro della scacchiera. In alternativa i = 0 e <9, j=0 e j<9
		for (int j = -4; j < 5; j++)
			po.push_back(Point3f((float) (j * unit), (float) (i * unit), (float) (z * unit)));

	//  MIGLIORA LA CALIBRAZIONE
	Mat distCoeffs;
	vector<Mat> rvecsCalib, tvecsCalib;
	vector<vector<Point3f> > obj;
	obj.push_back(po);
	vector<vector<Point2f> > img;
	img.push_back(corner);
	calibrateCamera(obj, img, image.size(), NEW_K, distCoeffs, rvecsCalib, tvecsCalib,
			CV_CALIB_USE_INTRINSIC_GUESS | CV_CALIB_ZERO_TANGENT_DIST | CV_CALIB_FIX_K1 | CV_CALIB_FIX_K2
					| CV_CALIB_FIX_K3 | CV_CALIB_FIX_K4 | CV_CALIB_FIX_K5 | CV_CALIB_FIX_K6);

	// BEGIN::: RICALCOLO IL VVP
	Mat w_star = NEW_K * NEW_K.t(); // DIAC
//	cout << NEW_K << w_star;
	w_star = normalize3matrix(w_star);

	Mat w = w_star.inv(); // IAC
	w = normalize3matrixf(w);
//	cout << w_star << w;

	Mat vl = VP1.cross(VP2); // line at infinity
	Mat vl_norm = normalizeLine(vl);
//	cout << vl << vl_norm;
	Mat NEW_VVP = w_star * vl_norm; // vertical vanishing point
	NEW_VVP = normalizeVectf(NEW_VVP);
	// END:::

	Mat rvec, tvec, distcoeff;
	solvePnPRansac(po, corner, NEW_K, distcoeff, rvec, tvec, false, 100, 8.0, 32, noArray(), CV_ITERATIVE);

	/*In the derivation of the camera model and its parametrization (6.10) it is assumed that
	 * the coordinate systems used in both the image and the 3D world are right handed systems,
	 * as shown in figure 6.1(pl54). However, a common practice in measuring image coordinates is
	 * that the y-coordinate increases in the downwards direction, thus defining a left handed
	 * coordinate system, contrary to figure 6.1 (pi54). A recommended practice in this case is to
	 * negate the y-coordinate of the image point so that the coordinate system again becomes right
	 * handed. However, if the image coordinate system is left handed, then the consequences are not grave.
	 * The relationship between world and image coordinates is still expressed by a 3 x 4
	 * camera matrix. Decomposition of this camera matrix according to (6.11) with K of the
	 * form (6.10) is still possible with ax and ay positive. The difference is that R now
	 * represents the orientation of the camera with respect to the negative z-axis. In addition,
	 *  the depth of points given by (6.15) will be negative instead of positive for points in
	 *  front of the camera. If this is borne in mind then it is permissible to use left handed
	 *  coordinates in the image.*/

	cv::Mat R;
	cv::Rodrigues(rvec, R); // R is 3x3
	Mat R_orig = R.clone();
	Mat t_orig = tvec.clone();
	R = R.t();  // rotation of inverse
	tvec = -R * tvec; // translation of inverse CENTRO CAMERA!

	cv::Mat T(4, 4, R.type()); // T is 4x4
	T(cv::Range(0, 3), cv::Range(0, 3)) = R * 1; // copies R into T
	T(cv::Range(0, 3), cv::Range(3, 4)) = tvec * 1; // copies tvec into T
	// fill the last row of T (NOTE: depending on your types, use float or double)
	double *p = T.ptr<double>(3);
	p[0] = p[1] = p[2] = 0;
	p[3] = 1;

	Mat trans = tvec;
	ext_camera_height = norm(trans.row(2) * HOUSE_SIZE / unit);

	/* P */

	// primo modo : calcolo esplicito dei parametri estrinseci
	P = Mat(3, 4, R.type()); // P is 3x4
	Mat KR = NEW_K * R_orig;
	Mat Kt = NEW_K * t_orig;

	P(cv::Range(0, 3), cv::Range(0, 3)) = KR * 1;
	P(cv::Range(0, 3), cv::Range(3, 4)) = Kt * 1;

	for (int pi = 0; pi < 3; pi++)
		for (int pj = 0; pj < 4; pj++)
			P.at<double>(pi, pj) = P.at<double>(pi, pj) / P.at<double>(2, 3);

	for (unsigned int i = 0; i < po.size(); i++) { // si prende il sistema di riferimento al centro della scacchiera. In alternativa i = 0 e <9, j=0 e j<9
		Mat X_i = (Mat_<double>(4, 1) << (float) po[i].x, (float) po[i].y, (float) po[i].z, 1.);
		Mat x_i = P * X_i;
		x_i = normalizeVect(x_i);

		circle(image, Point2f(x_i.at<double>(0, 0), x_i.at<double>(0, 1)), 1, Scalar(0, 0, 255), 2);
	}

	// secondo modo _ projectPoints
	vector<Point2f> imageX;
	Mat rvec_t;
	Rodrigues(R.t(), rvec_t);
	projectPoints(po, rvec_t, t_orig, NEW_K, distcoeff, imageX);
	float sum_error = 0;

	int min_error_index;
	double min_error = 9999999.;
	double central_vertex_error; // errore nel vertice centrale della scacchiera (i=40)
	for (unsigned int i = 0; i < imageX.size(); i++) {
		circle(image, imageX[i], 1, Scalar(255, 0, 0), 2);
		double error2 = pow(imageX[i].x - corner[i].x, 2) + pow(imageX[i].y - corner[i].y, 2);
		if (min_error > sqrt(error2)) {
			min_error = sqrtf(error2);
			min_error_index = i;
		}

		if (i == 40)
			central_vertex_error = error2;
		sum_error += (error2);
	}
	if (min_error == central_vertex_error)
		min_error_index = 40; // se l'errore minimo si ha anche nella casa centrale, si preferisce metterlo l� il frame

	cout << "Final error :" << sqrt(sum_error / imageX.size()) << " Min error :" << min_error << " @ point "
			<< min_error_index << " : " << po[min_error_index] << endl;

	//Mat M = K * R.t(); // P = M ! p4 // o R?
	Mat C_homo(4, 1, tvec.type()); // centro camera in coordinate world omogenee
	C_homo(cv::Range(0, 3), cv::Range(0, 1)) = tvec * 1;
	C_homo(cv::Range(3, 4), cv::Range(0, 1)) = 1. * 1;

	Mat plane = (Mat_<double>(4, 1) << 0, 0, 1., 0); // piano scacchiera Z=0

	Mat pp = (Mat_<double>(3, 1) << NEW_K.at<double>(0, 2), NEW_K.at<double>(1, 2), 1.);
	Mat P_cross = P.t() * ((P * P.t()).inv());
	pp.convertTo(pp, CV_64F);
	P_cross.convertTo(P_cross, CV_64F);
	Mat X_pp = P_cross * pp;
	X_pp = normalizeVect(X_pp, 3); // un punto 3D-h**o sulla retta tra punto principale (pp) e centro camera (C_homo)

	Mat Plucker_line_XppC = X_pp * C_homo.t() - C_homo * X_pp.t(); // linea pp C in coordinate Plucker (P3)

	Mat X_pp_on_chessboard = Plucker_line_XppC * plane; // pto intersezione piano scacchiera con linea
	X_pp_on_chessboard = normalizeVect(X_pp_on_chessboard, 3);

	//Point3f X_pp_on_chessboard_EU = Point3f(X_pp_on_chessboard.at<double>(0,0),X_pp_on_chessboard.at<double>(0,1),X_pp_on_chessboard.at<double>(0,2));
	double dist_C_X_pp_chess = norm(X_pp_on_chessboard - C_homo);
	double dist_C_X_pp_chess_CM = dist_C_X_pp_chess * HOUSE_SIZE / unit;
	ext_pitch_angle = acos(ext_camera_height / dist_C_X_pp_chess_CM) - M_PI / 2;
//	cout << endl << dist_C_X_pp_chess_CM << endl;

	//assign outputs
	camerCenter = Mat(C_homo);
	K = NEW_K;
	VVP = NEW_VVP;
}
/* Computes a cross-product of two 3-element vectors.  */
int ns__cross(  struct soap *soap, 
			std::string InputMatFilename,
			std::string AnotherMatFilename,
			std::string &OutputMatFilename=ERROR_FILENAME)
{
	bool timeChecking, memoryChecking;
	getConfig(timeChecking, memoryChecking);
	if(timeChecking){
		start = omp_get_wtime();
	}

    /* read from bin */
    Mat src;
	Mat dst;
	if(!readMat(InputMatFilename, src))
    {
		Log(logERROR) << "cross :: can not read bin file for src1" << std::endl;
        return soap_receiver_fault(soap, "cross :: can not read bin file for src1", NULL);
    }
	
	Mat anotherMat;
	if(!readMat(AnotherMatFilename, anotherMat))
    {
		Log(logERROR) << "cross:: can not read bin file for src2" << std::endl;
        return soap_receiver_fault(soap, "cross :: can not read bin file for src2", NULL);
    }
	try{
        dst = src.cross(anotherMat);
    } catch( cv::Exception& e ) {
        Log(logERROR) << e.what() << std::endl;
        return soap_receiver_fault(soap, e.what(), NULL);
    }
    
	std::string toAppend = "_cross";
    getOutputFilename(OutputMatFilename, toAppend);
    if(!saveMat(OutputMatFilename, dst))
    {
        Log(logERROR) << "cross :: can not save mat to binary file" << std::endl;
        return soap_receiver_fault(soap, "cross :: can not save mat to binary file", NULL);
    }

    src.release();
    dst.release();
    anotherMat.release();

	if(timeChecking) 
	{ 
		end = omp_get_wtime();
		Log(logINFO) << "cross :: " << "time elapsed " << end-start << std::endl;
	}
	
	if(memoryChecking)
	{	
		double vm, rss;
		getMemoryUsage(vm, rss);
		Log(logINFO)<< "mul :: VM usage :" << vm << std::endl 
					<< "Resident set size :" << rss << std::endl;
	}

    return SOAP_OK;
}
Example #11
0
// TODO: I can't math D:
void computeRotation(Camera c, sparseSiftFeature *s, sparseModelPoint smp){
    // Turning quaternion to rotation matrix: https://en.wikipedia.org/wiki/Rotation_matrix#Quaternion
    // Confirmation: https://groups.google.com/forum/#!topic/vsfm/V4lhITH2yHw
    double to_camera[3][3];
    double w = c.quaternion[0];  // order right!
    double x = c.quaternion[1];
    double y = c.quaternion[2];
    double z = c.quaternion[3];
    to_camera[0][0] = 1 - 2*y*y - 2*z*z;
    to_camera[0][1] = 2*x*y - 2*z*w;
    to_camera[0][2] = 2*x*z + 2*y*w;
    to_camera[1][0] = 2*x*y + 2*z*w;
    to_camera[1][1] = 1 - 2*x*x - 2*z*z;
    to_camera[1][2] = 2*y*z - 2*x*w;
    to_camera[2][0] = 2*x*z - 2*y*w;
    to_camera[2][1] = 2*y*z + 2*x*w;
    to_camera[2][2] = 1 - 2*x*x - 2*y*y;

    Mat vec1;
    Mat vec2;
    Mat X = Mat::zeros(3,1,CV_64FC1);
    X.col(0).row(2) = 1;
    Mat up = Mat::zeros(3,1,CV_64FC1);
    up.col(0).row(0) = 1;
    vec1 = X.cross(up);
    vec1 = vec1/norm(vec1);
    vec2 = vec1.cross(X);
    vec2 = vec2/norm(vec2);

    Mat R_C2W = Mat::zeros(3,3,CV_64FC1);
    R_C2W.row(0) = vec1.t();
    R_C2W.row(1) = vec2.t();
    R_C2W.row(2) = X.t();


    Mat R_C1W = Mat::zeros(3,3,CV_64FC1);
    R_C1W.col(0).row(0) = 1;
    R_C1W.col(1).row(0) = 0;
    R_C1W.col(2).row(0) = 0;

    R_C1W.col(0).row(1) = 0;
    R_C1W.col(1).row(1) = 0.6428;
    R_C1W.col(2).row(1) = -0.7660;

    R_C1W.col(0).row(2) = 0;
    R_C1W.col(1).row(2) = 0.7660;
    R_C1W.col(2).row(2) = 0.6428;


    // TODO: UNDERSTAND WHICH FINAL TRANSPOSE TO USE
    // R_C1W = R_C1W.t();
    // cout << "toCamera:" << XY_to_camera << "\n";
    // Mat trans = Mat(3,1,CV_64FC1,s->translation);
    // trans = XY_to_camera*trans;
    // Mat Normal_to_XY = MakeRotationMatrix(smp);
    // cout << "toXY:" << Normal_to_XY << "\n";
    // Mat rot = XY_to_camera*Normal_to_XY;
    // // rot = rot.inv();
    Mat R = R_C2W*R_C1W.t();

    for (int i = 0; i < 3; i++) {
        for (int j = 0; j < 3; j++){
            s->rotation[i][j] = R.at<double>(i,j);
            s->R_C1W[i][j] = R_C1W.at<double>(i,j);
        }
    }

    //cout << "Rotation: " << rot << "\n";
    
}
Example #12
0
int main(int argc, char** argv) {

    cout << "Welcome to Le Fancy Vase Drawer.\n" << endl;
    printf("Instructions:\n"\
           "r,g,b - Change colour of mesh to red, green, blue\n"\
           "k     - Colour the mesh black\n"\
           "p     - RAINBOW.\n"\
           "1,3   - Zoom in, zoom out\n"\
           "w,s   - Move camera up, down\n"\
           "a,d   - Rotate mesh left, right\n"\
           "q,e   - Move gaze point up, down\n"\
           "z,c   - Move camera and gaze point up, down\n");

    float viewingAngle = 60.; // degrees
    float aspectRatio = 1;
    float N = 5.;  // near plane
    float F = 30.; // far plane
    float t = N * tan(CV_PI / 360 * viewingAngle); // top
    float b = -t; // bottom
    float r = aspectRatio*t; // right
    float l = -r; // left
    int   w = 512,
          h = 512;

    bool camChanged = true;
    bool rainbow = true;
    int rotationInc = 5;
    int roll = 0;

    namedWindow(wndName, CV_WINDOW_AUTOSIZE);

    // used as row vectors, so they can be appended to Matrix easily
    Mat e = (Mat_<float>(1, 3) << 30., 30., 22.); // camera vector.  15, 15, 10
    Mat g = (Mat_<float>(1, 3) <<  0., 0., 18.); // a point through which the gaze direction unit vector n points to
    Mat p = (Mat_<float>(1, 3) <<  0., 0., 1.); // x, y, z, w
    Mat n, u, v;
    Mat screen(w, h, CV_8UC3);
    int flip = 1; // -1 to flip along X

    Mat Mv(0, 3, CV_32FC1);

    Mat S1T1Mp = (Mat_<float>(4, 4) <<
                  (2*N)/(r-l), 0, (r+l)/(r-l), 0,
                  0, (2*N)/(t-b), (t+b)/(t-b), 0,
                  0, 0, -(F+N)/(F-N), -2*F*N/(F-N),
                  0, 0, -1, 0
                  );

    Mat WS2T2 = (Mat_<float>(4, 4) <<
                 w/2, 0, 0, w/2,
                 0, flip*h/2, 0, -h/2+h,
                 0, 0, 1, 0,
                 0, 0, 0, 1
                 );

    // container for screen coords
    vector<Point2i> coords;

    // background colour and line colour
    Scalar bgColour(255, 255, 255);
    Scalar lineColour(0);

    // HSV and BGR matrices for colours of the rainbow
    int sat = 200, val = 200;
    Mat hsv(Size(1,1),CV_8UC3, Scalar(10,sat,val)), bgr;

    // the polygonal mesh object
    PolygonalMesh poly;
    poly.readFromFile("PolyVase.xml");

    bool normalChanged = true;

    char c = -1; // input char
    while (true) {
        if (camChanged) {
            if (normalChanged) {
                // normalize vector from camera to gaze point
                normalize(e - g, n);
                // generate vectors describing camera plane
                //u = (getRotationMatrix(n, rotationInc) * u.t()).t();
                //v = (getRotationMatrix(n, rotationInc) * v.t()).t();
                // normalize to keep window same size
                //normalize(u, u);
                //normalize(v, v);
                u = p.cross(n);
                v = u.cross(n);
                u = (getRotationMatrix(n, roll) * u.t()).t();
                v = (getRotationMatrix(n, roll) * v.t()).t();
                normalize(u, u);
                normalize(v, v);

                normalChanged = false;
            }

            // construct matrix for world coords to camera viewing coords
            Mv = Mat(0, 3, CV_32FC1);
            Mv.push_back(u);
            Mv.push_back(v);
            Mv.push_back(n);
            Mv = Mv.t();
            Mv.push_back(Mat((Mat_<float>(1, 3) << -e.dot(u), -e.dot(v), -e.dot(n))));
            Mv = Mv.t();
            Mv.push_back(Mat((Mat_<float>(1, 4) << 0, 0, 0, 1))); // works

            //scale, transformation, and projection matrix
            S1T1Mp = (Mat_<float>(4, 4) <<
                          (2*N)/(r-l), 0, (r+l)/(r-l), 0,
                          0, (2*N)/(t-b), (t+b)/(t-b), 0,
                          0, 0, -(F+N)/(F-N), -2*F*N/(F-N),
                          0, 0, -1, 0
                          );

            // scal, transform from viewing volume to canonical viewing volume.
            // Flip along X-axis is desired
            WS2T2 = (Mat_<float>(4, 4) <<
                         w/2, 0, 0, w/2,
                         0, flip*h/2, 0, -h/2+h,
                         0, 0, 1, 0,
                         0, 0, 0, 1
                         );

            // colour the background
            screen.setTo(bgColour);

            camChanged = false;
        }
        
        coords.clear();
        coords.reserve(poly.vertsH.size());
        for (int i = 0; i < poly.vertsH.size(); i++) {
            // Apply transformations to convert from world coordinates to screen coords
            Mat pt = WS2T2 * (S1T1Mp * (Mv * poly.vertsH[i]));
            // Perspective divide
            pt /= pt.at<float>(3, 0);
            // store generated coordinate in coords container
            coords.push_back(Point2f((int)pt.at<float>(0), (int)pt.at<float>(1)));
        }
        if (rainbow) {
            hsv = Mat(Size(1, 1), CV_8UC3, Scalar(10, sat, val));
        }
        for (int i = 0; i < poly.faces.size(); i++) {
            Face& f           = poly.faces[i];
            Normal faceNormal = poly.norms[f.data[Face::NORM]];
            Mat tv            = poly.vertsH[f.data[Face::PT0]]; //  triangle 1st vertex
            // generate camera to triangle vector
            Vec3f  camToTri(tv.at<float>(X) - e.at<float>(X),
                            tv.at<float>(Y) - e.at<float>(Y),
                            tv.at<float>(Z) - e.at<float>(Z));

            // compute dot product to determine which faces to draw.
            float b = faceNormal.dot(camToTri); // camToTri.dot(faceNormal);

            if (rainbow) {
                // increment hue value, then convert from HSV to BGR
                Vec3b clr = hsv.at<Vec3b>(0, 0);
                clr[0]++;
                hsv.at<Vec3b>(0, 0) = clr;
                cvtColor(hsv, bgr, CV_HSV2BGR);
                Vec3b bgr3 = bgr.at<Vec3b>(0, 0);
                lineColour = Scalar(bgr3[0], bgr3[1], bgr3[2]);
            }
            if (b >= 0) {
                // draw triangle from 3 face coords
                line(screen, coords[f.data[Face::PT0]], coords[f.data[Face::PT1]], lineColour);
                line(screen, coords[f.data[Face::PT0]], coords[f.data[Face::PT2]], lineColour);
                line(screen, coords[f.data[Face::PT1]], coords[f.data[Face::PT2]], lineColour);
            }
        }

        // draw the image to the screen
        imshow(wndName, screen);

        c = waitKey(0);
        switch (c) {
            case 'w':
                // move camera up
                e.at<float>(Z) += 1;
                camChanged = true;
                normalChanged = true;
                break;
            case 's':
                // move camera down
                e.at<float>(Z) -= 1;
                camChanged = true;
                normalChanged = true;
                break;
            case 'a':{
                // rotate by 5 degrees along z axis
                float a = rotationInc/180.*CV_PI;
                e = ((Mat_<float>(3, 3) <<
                    cos(a), sin(a), 0,
                    -sin(a), cos(a), 0,
                    0, 0, 1) * e.t()).t();
                camChanged = true;
                normalChanged = true;
            }
                break;
            case 'd':{
                // rotate by -5 degrees along z axis
                float a = -rotationInc/180.*CV_PI;
                e = ((Mat_<float>(3, 3) <<
                    cos(a), sin(a), 0,
                    -sin(a), cos(a), 0,
                    0, 0, 1) * e.t()).t();
                camChanged = true;
                normalChanged = true;
            }
                break;
            case 'y': {
                roll += rotationInc;
                // rotate camera
                // rotate around unit vector 'n' -- from gaze point to cam
                //u = (getRotationMatrix(n, rotationInc) * u.t()).t();
                //v = (getRotationMatrix(n, rotationInc) * v.t()).t();
                // normalize to keep window same size
                //normalize(u, u);
                //normalize(v, v);
                normalChanged = true;
                camChanged = true;
            }
                break;
            case 'u':
                roll -= rotationInc;
                //u = (getRotationMatrix(n, -rotationInc) * u.t()).t();
                //v = (getRotationMatrix(n, -rotationInc) * v.t()).t();
                //normalize(u, u);
                //normalize(v, v);
                normalChanged = true;
                camChanged = true;
                break;
            case 'q':
                // shift gaze vector down
                g.at<float>(Z) -= 1;
                camChanged = true;
                normalChanged = true;
                break;
            case 'e':
                // shift gaze vector up
                g.at<float>(Z) += 1;
                camChanged = true;
                normalChanged = true;
                break;
            case 'c':
                // move camera and gaze vector up
                g.at<float>(Z) += 1;
                e.at<float>(Z) += 1;
                camChanged = true;
                break;
            case 'z':
                // move camera and gaze vector down
                g.at<float>(Z) -= 1;
                e.at<float>(Z) -= 1;
                camChanged = true;
                break;
            case '1':
                // move camera closer to object by 10%
                e *= 0.9;
                camChanged = true;
                break;
            case '3':
                // move cam further from object by 10%
                e *= 1.1;
                camChanged = true;
                break;
            case 'r':
                // colour the mesh red, green, blue, black (next 4 cases)
                lineColour = Scalar(0, 0, 255);
                rainbow = false;
                break;
            case 'g':
                lineColour = Scalar(0, 150, 0);
                rainbow = false;
                break;
            case 'b':
                lineColour = Scalar(255, 0, 0);
                rainbow = false;
                break;
            case 'k':
                lineColour = Scalar(0, 0, 0);
                rainbow = false;
                break;
            case 'p':
                //set the rainbow flag
                rainbow = true;
                break;
            default:
                break;
        }
    }

    // chillout until the user has hit a key
    waitKey();
    getchar();
}