コード例 #1
0
int test_getAffineTransform()
{
	cv::Mat matSrc = cv::imread("E:/GitCode/OpenCV_Test/test_images/lena.png", 1);
	if (!matSrc.data) {
		std::cout << "read image fail" << std::endl;
		return -1;
	}

	fbc::Point2f srcTri[3];
	fbc::Point2f dstTri[3];

	// Set your 3 points to calculate the  Affine Transform
	srcTri[0] = fbc::Point2f(0, 0);
	srcTri[1] = fbc::Point2f(matSrc.cols - 1, 0);
	srcTri[2] = fbc::Point2f(0, matSrc.rows - 1);

	dstTri[0] = fbc::Point2f(matSrc.cols*0.0, matSrc.rows*0.33);
	dstTri[1] = fbc::Point2f(matSrc.cols*0.85, matSrc.rows*0.25);
	dstTri[2] = fbc::Point2f(matSrc.cols*0.15, matSrc.rows*0.7);

	// Get the Affine Transform
	fbc::Mat_<double, 1> warp_mat(2, 3);
	int ret = fbc::getAffineTransform(srcTri, dstTri, warp_mat);
	assert(ret == 0);

	cv::Point2f srcTri_[3];
	cv::Point2f dstTri_[3];

	// Set your 3 points to calculate the  Affine Transform
	srcTri_[0] = cv::Point2f(0, 0);
	srcTri_[1] = cv::Point2f(matSrc.cols - 1, 0);
	srcTri_[2] = cv::Point2f(0, matSrc.rows - 1);

	dstTri_[0] = cv::Point2f(matSrc.cols*0.0, matSrc.rows*0.33);
	dstTri_[1] = cv::Point2f(matSrc.cols*0.85, matSrc.rows*0.25);
	dstTri_[2] = cv::Point2f(matSrc.cols*0.15, matSrc.rows*0.7);

	// Get the Affine Transform
	cv::Mat warp_mat_(2, 3, CV_64FC1);
	warp_mat_ = cv::getAffineTransform(srcTri_, dstTri_);

	assert(warp_mat.cols == warp_mat_.cols && warp_mat.rows == warp_mat_.rows);
	assert(warp_mat.step == warp_mat_.step);
	for (int y = 0; y < warp_mat.rows; y++) {
		const fbc::uchar* p = warp_mat.ptr(y);
		const uchar* p_ = warp_mat_.ptr(y);

		for (int x = 0; x < warp_mat.step; x++) {
			assert(p[x] == p_[x]);
		}
	}

	return 0;
}
コード例 #2
0
int main(int argc, const char *argv[])
{
	cv::Mat img;
	cv::Mat warp_mat(3, 3, CV_32FC1);
	char* source_window = "source";
	char* dest_window = "destination";
	int interpolation_method = 1;
	char key;

	img = cv::imread(argv[1], 1);
	cv::namedWindow(source_window, CV_WINDOW_AUTOSIZE);
	cv::namedWindow(dest_window, CV_WINDOW_AUTOSIZE);
	cv::createTrackbar("interpolation", source_window, &interpolation_method, 4);
	cv::imshow(source_window, img);

	cv::Point2f src_points[4];
	cv::Point2f des_points[4];
	cv::setMouseCallback(dest_window, mousecallback, des_points);

	src_points[0] = cv::Point2f( 0, 0 );
	src_points[1] = cv::Point2f( img.cols-1, 0 );
	src_points[2] = cv::Point2f( 0, img.rows-1 );
	src_points[3] = cv::Point2f( img.cols-1, img.rows-1 );

	des_points[0] = cv::Point2f( img.cols*0.25, img.rows*0.25 );
	des_points[1] = cv::Point2f( img.cols*1.25, img.rows*0.25 );
	des_points[2] = cv::Point2f( img.cols*0.25, img.rows*1.25 );
	des_points[3] = cv::Point2f( img.cols*1.25, img.rows*1.25 );

	cv::Mat desimg = cv::Mat::zeros(img.cols * 1.5, img.rows * 1.5, img.type());


	for(;;){
		warp_mat = cv::getPerspectiveTransform(src_points, des_points);
		cv::warpPerspective(img, desimg, warp_mat, desimg.size(), interpolation_method);
		cv::circle(desimg, des_points[0], PRADIUS, cv::Scalar(255, 0, 0), -1);
		cv::circle(desimg, des_points[1], PRADIUS, cv::Scalar(0, 255, 0), -1);
		cv::circle(desimg, des_points[2], PRADIUS, cv::Scalar(0, 0, 255), -1);
		cv::circle(desimg, des_points[3], PRADIUS, cv::Scalar(0, 255, 255), -1);
		cv::imshow(dest_window, desimg);
		key = cv::waitKey(30);
		if( key==27 )
			break;
	}
	return 0;
}
コード例 #3
0
void algoOpenCvResizeBorder(imgznd::OpenCvImgRepr &src,double dx,double dy)
{
    cv::Point2f srcTri[3];
    cv::Point2f dstTri[3];
    /// Set your 3 points to calculate the  Affine Transform
    srcTri[0] = cv::Point2f( 0,0 );
    srcTri[1] = cv::Point2f( src.cols - 1, 0 );
    srcTri[2] = cv::Point2f( 0, src.rows - 1 );
    dstTri[0] = cv::Point2f( 0 + dx / 2.0, 0 + dy / 2.0);
    dstTri[1] = cv::Point2f( src.cols - 1 + dx / 2.0, 0 + dy / 2.0);
    dstTri[2] = cv::Point2f( 0 + dx / 2.0, src.rows - 1 + dy / 2.0 );
    /// Get the Affine Transform
    cv::Mat warp_mat( 2, 3, CV_32FC1 );
    warp_mat = cv::getAffineTransform( srcTri, dstTri );
    cv::Size imgSize = src.size();
    imgSize.width += dx;
    imgSize.height += dy;
    cv::warpAffine( src, src, warp_mat, imgSize );
}
コード例 #4
0
ファイル: panorama.cpp プロジェクト: AmmarkoV/RGBDAcquisition
int  sift_affine(const char * filenameLeft , const char * filenameRight ,  double SIFTThreshold ,
                 unsigned int RANSACLoops ,
                 unsigned int stitchedBorder ,
                 double reprojectionThresholdX ,
                 double reprojectionThresholdY ,
                 unsigned int useOpenCVEstimator
                 )
{

    fprintf(stderr,"Running SIFT on %s / %s \n" , filenameLeft , filenameRight);

    cv::Mat left = cv::imread(filenameLeft  , CV_LOAD_IMAGE_COLOR);
    if(! left.data ) { fprintf(stderr,"Left Image missing \n"); return 1; }

    cv::Mat right = cv::imread(filenameRight, CV_LOAD_IMAGE_COLOR);
    if(! right.data ) { fprintf(stderr,"Right Image missing \n"); return 1; }


    cv::DescriptorExtractor* extractor = new cv::SiftDescriptorExtractor();
    cv::SiftFeatureDetector detector;
    std::vector<cv::KeyPoint> keypointsLeft;
    cv::Mat descriptorsLeft;
    detector.detect(left, keypointsLeft);
    extractor->compute(left, keypointsLeft, descriptorsLeft);

    // Add results to image and save.
    cv::Mat output;
    cv::drawKeypoints(left, keypointsLeft, output);
    cv::imwrite("sift_features_left.jpg", output);


    std::vector<cv::KeyPoint> keypointsRight;
    cv::Mat descriptorsRight;
    detector.detect(right, keypointsRight);
    extractor->compute(right, keypointsRight, descriptorsRight);
    cv::drawKeypoints(right, keypointsRight, output);
    cv::imwrite("sift_features_right.jpg", output);

    //fprintf(stderr,"SIFT features ready \n");


    std::vector<cv::Point2f> srcRANSACPoints;
    std::vector<cv::Point2f> dstRANSACPoints;


    std::vector<cv::Point2f> srcPoints;
    std::vector<cv::Point2f> dstPoints;
    findPairs( SIFTThreshold , keypointsLeft, descriptorsLeft, keypointsRight, descriptorsRight, srcPoints, dstPoints);
    //printf("%zd keypoints are matched.\n", srcPoints.size());



   visualizeMatches(
                      "sift_initial_match.jpg" ,
                      left ,
                      keypointsLeft,
                      descriptorsLeft,
                      right ,
                      keypointsRight,
                      descriptorsRight,
                      srcPoints,
                      dstPoints,
                      srcRANSACPoints,
                      dstRANSACPoints
                    );


   cv::Mat warp_mat( 2, 3,  CV_64FC1  );
   double M[6]={0};
   fitAffineTransformationMatchesRANSAC( RANSACLoops , reprojectionThresholdX , reprojectionThresholdY , M , warp_mat, srcPoints , dstPoints ,  srcRANSACPoints, dstRANSACPoints);


   stitchAffineMatch(
                     "wrappedAffine.jpg"  ,
                     stitchedBorder,
                     left ,
                     right ,
                     warp_mat
                    );


   visualizeMatches(
                      "sift_affine_match.jpg" ,
                      left ,
                      keypointsLeft,
                      descriptorsLeft,
                      right ,
                      keypointsRight,
                      descriptorsRight,
                      srcPoints,
                      dstPoints ,
                      srcRANSACPoints,
                      dstRANSACPoints
                    );




   cv::Mat homo_mat( 3, 3,  CV_64FC1  );
   double H[9]={0};

   if (useOpenCVEstimator)
   {
    homo_mat = cv::findHomography(srcPoints , dstPoints , CV_RANSAC);
   } else
   {
    fitHomographyTransformationMatchesRANSAC( RANSACLoops , reprojectionThresholdX , reprojectionThresholdY , H , homo_mat, srcPoints , dstPoints ,  srcRANSACPoints, dstRANSACPoints);
   }


   stitchHomographyMatch(
                         "wrappedHomography.jpg"  ,
                         stitchedBorder,
                         left ,
                         right ,
                         homo_mat
                        );

   visualizeMatches(
                      "sift_homography_match.jpg" ,
                      left ,
                      keypointsLeft,
                      descriptorsLeft,
                      right ,
                      keypointsRight,
                      descriptorsRight,
                      srcPoints,
                      dstPoints ,
                      srcRANSACPoints,
                      dstRANSACPoints
                    );
}
コード例 #5
0
ファイル: aam.cpp プロジェクト: magdamagda/mood_recognizer
Mat AAM::findTransformationMatrix(Point2f from[3], Point2f to[3])
{
    Mat warp_mat( 2, 3, CV_32FC1 );
    warp_mat = getAffineTransform( from, to );
    return warp_mat;
}
コード例 #6
0
int test_warpAffine_uchar()
{
	cv::Mat matSrc = cv::imread("E:/GitCode/OpenCV_Test/test_images/lena.png", 1);
	if (!matSrc.data) {
		std::cout << "read image fail" << std::endl;
		return -1;
	}

	for (int interpolation = 0; interpolation < 5; interpolation++) {
		fbc::Point2f srcTri[3];
		fbc::Point2f dstTri[3];

		// Set your 3 points to calculate the  Affine Transform
		srcTri[0] = fbc::Point2f(0, 0);
		srcTri[1] = fbc::Point2f(matSrc.cols - 1, 0);
		srcTri[2] = fbc::Point2f(0, matSrc.rows - 1);

		dstTri[0] = fbc::Point2f(matSrc.cols*0.0, matSrc.rows*0.33);
		dstTri[1] = fbc::Point2f(matSrc.cols*0.85, matSrc.rows*0.25);
		dstTri[2] = fbc::Point2f(matSrc.cols*0.15, matSrc.rows*0.7);

		// Get the Affine Transform
		fbc::Mat_<double, 1> warp_mat(2, 3);
		int ret = fbc::getAffineTransform(srcTri, dstTri, warp_mat);
		assert(ret == 0);

		fbc::Mat_<uchar, 3> mat(matSrc.rows, matSrc.cols, matSrc.data);
		fbc::Mat_<uchar, 3> warp_dst;
		warp_dst.zeros(mat.rows, mat.cols);

		fbc::warpAffine(mat, warp_dst, warp_mat, interpolation);

		cv::Point2f srcTri_[3];
		cv::Point2f dstTri_[3];

		// Set your 3 points to calculate the  Affine Transform
		srcTri_[0] = cv::Point2f(0, 0);
		srcTri_[1] = cv::Point2f(matSrc.cols - 1, 0);
		srcTri_[2] = cv::Point2f(0, matSrc.rows - 1);

		dstTri_[0] = cv::Point2f(matSrc.cols*0.0, matSrc.rows*0.33);
		dstTri_[1] = cv::Point2f(matSrc.cols*0.85, matSrc.rows*0.25);
		dstTri_[2] = cv::Point2f(matSrc.cols*0.15, matSrc.rows*0.7);

		// Get the Affine Transform
		cv::Mat warp_mat_(2, 3, CV_64FC1);
		warp_mat_ = cv::getAffineTransform(srcTri_, dstTri_);

		// Set the dst image the same type and size as src
		cv::Mat warp_dst_ = cv::Mat::zeros(matSrc.rows, matSrc.cols, matSrc.type());
		cv::Mat mat_;
		matSrc.copyTo(mat_);

		// Apply the Affine Transform just found to the src image
		cv::warpAffine(mat_, warp_dst_, warp_mat_, warp_dst_.size(), interpolation);

		assert(warp_mat.cols == warp_mat_.cols && warp_mat.rows == warp_mat_.rows);
		assert(warp_mat.step == warp_mat_.step);
		for (int y = 0; y < warp_mat.rows; y++) {
			const fbc::uchar* p = warp_mat.ptr(y);
			const uchar* p_ = warp_mat_.ptr(y);

			for (int x = 0; x < warp_mat.step; x++) {
				assert(p[x] == p_[x]);
			}
		}
	}

	return 0;
}
コード例 #7
0
void *affine_loop(void *number)
{

/*
	if(set_single_core_affinity()!=EXIT_SUCCESS)
	{
		perror("Core Affinity");
	}
*/

	int block_size;
	int max_files;
	int i=0;
	int section=0;
	float pos[8]={0,0,1,0,0,1,1,1};

	Point2f srcTri[4];
	Point2f dstTri[4];

	max_files=3601;
	block_size=max_files/4;

	Mat rot_mat( 2, 3, CV_32FC1 );
	Mat warp_mat( 2, 3, CV_32FC1 );


	// Output variables
	Mat src, warp_dst, warp_rotate_dst;

	struct thread_limits *ptr_obj = (struct thread_limits *) number;
	int start_loop = ptr_obj->start_no;
	int stop_loop  = ptr_obj->stop_no;

	/*------------------------- Starting the loop --------------------------*/

	for (i=start_loop; i<=stop_loop; i++)
	{

		/*------------------------- Loading the Image --------------------------*/

		if(option==1)
		{
			// Select the right frame
			sprintf(frame_name2,"Sobel_frame_no_%05u.ppm",i);
			// Load the Image
			src = imread( frame_name2, 1 );
		}

		else
		{
			sprintf(frame_name,"Frame_no_%05u.ppm",i);
			src = imread( frame_name, 1 );
		}

		/*---------------------- Affine Transform : Warp -----------------------*/

		// Setting up the output image parameters

		warp_dst = Mat::zeros( src.rows, src.cols, src.type() );


		/*---------------------- Change the parameter values ----------------------*/

	
	
		switch(section)
		{

			case 0:
			{

				pos[1]=pos[1]+0.001;
				pos[2]=pos[2]-0.001;
				pos[4]=pos[4]+0.001;
				pos[7]=pos[7]-0.001;
		
			
				// Setting parameters for matrix computation

				srcTri[0] = Point2f( 0,0 );
				srcTri[1] = Point2f( src.cols - 1, 0 );
				srcTri[2] = Point2f( 0, src.rows - 1 );
				srcTri[3] = Point2f( src.cols - 1, src.rows - 1 );

				dstTri[0] = Point2f( src.cols*pos[0], src.rows*pos[1] );
				dstTri[1] = Point2f( src.cols*pos[2], src.rows*pos[3] );
				dstTri[2] = Point2f( src.cols*pos[4], src.rows*pos[5] );
				dstTri[3] = Point2f( src.cols*pos[6], src.rows*pos[7] );
			
				section=i/block_size;

				//printf("Case 0: %u\t %f %f %f %f %f %f %f %f\n",i,pos[0],pos[1],pos[2],pos[3],pos[4],pos[5],pos[6],pos[7]);

				break;
			}

			case 1:
			{

				pos[0]=pos[0]+0.001;
				pos[3]=pos[3]+0.001;
				pos[5]=pos[5]-0.001;
				pos[6]=pos[6]-0.001;
		
			
				// Setting parameters for matrix computation

				srcTri[0] = Point2f( 0,0 );
				srcTri[1] = Point2f( src.cols - 1, 0 );
				srcTri[2] = Point2f( 0, src.rows - 1 );
				srcTri[3] = Point2f( src.cols - 1, src.rows - 1 );

				dstTri[0] = Point2f( src.cols*pos[0], src.rows*pos[1] );
				dstTri[1] = Point2f( src.cols*pos[2], src.rows*pos[3] );
				dstTri[2] = Point2f( src.cols*pos[4], src.rows*pos[5] );
				dstTri[3] = Point2f( src.cols*pos[6], src.rows*pos[7] );
			
				section=i/block_size;

				//printf("Case 1: %u\t %f %f %f %f %f %f %f %f\n",i,pos[0],pos[1],pos[2],pos[3],pos[4],pos[5],pos[6],pos[7]);

				break;
			}
		
			case 2:
			{
			
				pos[1]=pos[1]-0.001;
				pos[2]=pos[2]+0.001;
				pos[4]=pos[4]-0.001;
				pos[7]=pos[7]+0.001;
		
			
				// Setting parameters for matrix computation

				srcTri[0] = Point2f( 0,0 );
				srcTri[1] = Point2f( src.cols - 1, 0 );
				srcTri[2] = Point2f( 0, src.rows - 1 );
				srcTri[3] = Point2f( src.cols - 1, src.rows - 1 );

				dstTri[0] = Point2f( src.cols*pos[0], src.rows*pos[1] );
				dstTri[1] = Point2f( src.cols*pos[2], src.rows*pos[3] );
				dstTri[2] = Point2f( src.cols*pos[4], src.rows*pos[5] );
				dstTri[3] = Point2f( src.cols*pos[6], src.rows*pos[7] );
			
				section=i/block_size;

				//printf("Case 2: %u\t %f %f %f %f %f %f %f %f\n",i,pos[0],pos[1],pos[2],pos[3],pos[4],pos[5],pos[6],pos[7]);

				break;
			}
		

			case 3:
			{

				pos[0]=pos[0]-0.001;
				pos[3]=pos[3]-0.001;
				pos[5]=pos[5]+0.001;
				pos[6]=pos[6]+0.001;
		
			
				// Setting parameters for matrix computation

				srcTri[0] = Point2f( 0,0 );
				srcTri[1] = Point2f( src.cols - 1, 0 );
				srcTri[2] = Point2f( 0, src.rows - 1 );
				srcTri[3] = Point2f( src.cols - 1, src.rows - 1 );

				dstTri[0] = Point2f( src.cols*pos[0], src.rows*pos[1] );
				dstTri[1] = Point2f( src.cols*pos[2], src.rows*pos[3] );
				dstTri[2] = Point2f( src.cols*pos[4], src.rows*pos[5] );
				dstTri[3] = Point2f( src.cols*pos[6], src.rows*pos[7] );

			
				section=i/block_size;

				//printf("Case 3: %u\t %f %f %f %f %f %f %f %f\n",i,pos[0],pos[1],pos[2],pos[3],pos[4],pos[5],pos[6],pos[7]);

				break;
			}

			default:
			{
				//printf("Value: %d\n",section);
				//perror("Default switch() case");
				break;
			}
		}
		



		// Calculate the Affine Transform matrix

		warp_mat = getAffineTransform( srcTri, dstTri );


		// Applying the Affine Transform to the src image

		warpAffine( src, warp_dst, warp_mat, warp_dst.size() );



		/*-------------------- Affine Transform : Rotate -----------------------*/

		// Compute the Rotation Matrix Parameters

		Point center = Point( warp_dst.cols/2, warp_dst.rows/2 );
		double angle = ROTATION_ANGLE;
		double scale = ISOTROPIC_SCALE_FACTOR;

		// Generate the Rotation Matrix

		rot_mat = getRotationMatrix2D( center, angle, scale );

		// Rotate the Image

		warpAffine( warp_dst, warp_rotate_dst, rot_mat, warp_dst.size() );


		/*------------------------- Storing the Image ---------------------------*/


		sprintf(frame_name3,"Affine_frame_no_%05u.ppm",i);

		// Storing the Image

		imwrite(frame_name3, warp_dst);

	}
	// End of 'for' loop

	return NULL;
}