Ejemplo n.º 1
0
void blur_function(const IplImage *latent_image, IplImage *blur_image, const CvMat *hom1, const CvMat *hom2)
{
	const int T = 20;
	const int tau = 10;
	CvMat *id_mat = cvCreateMat(3, 3, CV_32FC1);
	cvSetIdentity(id_mat, cvRealScalar(1));
	CvMat *invhom1 = cvCreateMat(3, 3, CV_32FC1);
	cvInvert(hom1, invhom1, CV_LU);
	
	CvMat *h1 = cvCreateMat(3, 3, CV_32FC1);
	CvMat *h2 = cvCreateMat(3, 3, CV_32FC1);
	CvSize size = cvSize(latent_image->width, latent_image->height);
	IplImage *temp = cvCreateImage(size, latent_image->depth, latent_image->nChannels);
	IplImage *blur = cvCreateImage(size, IPL_DEPTH_32F, latent_image->nChannels);
	cvSetZero(blur);
	
	for (int i = 1; i <= tau; ++i)
	{
		cvAddWeighted(id_mat, (double)(T-i)/T, invhom1, (double)i/T, 0, h1);
		cvAddWeighted(id_mat, (double)(T-i)/T, hom2, (double)i/T, 0, h2);
		cvWarpPerspective(latent_image, temp, h1, CV_INTER_LINEAR+CV_WARP_FILL_OUTLIERS, cvScalarAll(0));
		cvAdd(blur, temp, blur, NULL);
		cvWarpPerspective(latent_image, temp, h2, CV_INTER_LINEAR+CV_WARP_FILL_OUTLIERS, cvScalarAll(0));
		cvAdd(blur, temp, blur, NULL);
	}
	cvAdd(blur, latent_image, blur, NULL);
	cvConvertScale(blur, blur_image, 1.0/(2*tau+1), 0);
	
	cvReleaseMat(&id_mat);
	cvReleaseMat(&invhom1);
	cvReleaseMat(&h1);
	cvReleaseMat(&h2);
	cvReleaseImage(&temp);
	cvReleaseImage(&blur);
}
//Input image, Edges, outputNameFile
int homography_transformation(CvMat* src, char* out_filename, CvPoint2D32f* srcQuad){
	if(src == NULL || out_filename == NULL || srcQuad == NULL)
		return -1;
	
	CvPoint2D32f dstQuad[4]; 				//Destination vertices
   	CvMat* warp_matrix = cvCreateMat(3,3,CV_32FC1);  	//transformation matrix
    	CvMat* dst = cvCloneMat(src);   			//clone image  
    	int p[3]={CV_IMWRITE_JPEG_QUALITY,JPEG_QUALITY,0};	//FORMAT, QUALITY
    
    	dstQuad[0].x = 0;       	//dst Top left
    	dstQuad[0].y = 0;
    	dstQuad[1].x = src->cols;  	//dst Top right
    	dstQuad[1].y = 0;
    	dstQuad[2].x = 0;     		//dst Bottom left
    	dstQuad[2].y = src->rows;      
    	dstQuad[3].x = src->cols;  	//dst Bot right
    	dstQuad[3].y = src->rows;

    	// get transformation matrix that you can use to calculate 
    	cvGetPerspectiveTransform(srcQuad,dstQuad, warp_matrix);
    	// perspective transformation. Parameters: source, destination, warp_matrix, 
	//type of interpolation: (CV_INTER_LINEAR, CV_INTER_AREA, CV_INTER_CUBIC, CV_INTER_LANCZOS4)
  	///Set all scalar with the same value. 0 means the black color of border
	cvWarpPerspective(src, dst, warp_matrix, CV_INTER_LINEAR, cvScalarAll(0)); // 1 = CV_INTER_LINEAR

	cvSaveImage(out_filename,dst, p);
	//close and destroy all stuff

	cvReleaseMat(&warp_matrix);
	cvReleaseMat(&dst);
	return 0;
}
Ejemplo n.º 3
0
/**
*    A  +-------------+  B
*      /               \
*     /                 \
*    /                   \
* D +-------------------- +  C
*/
void ofCvImage::warpPerspective( const ofPoint& A, const ofPoint& B,
                                 const ofPoint& C, const ofPoint& D )
{
	// compute matrix for perspectival warping (homography)
	CvPoint2D32f cvsrc[4];
	CvPoint2D32f cvdst[4];
	CvMat* translate = cvCreateMat( 3,3, CV_32FC1 );
	cvSetZero( translate );
    
    cvsrc[0].x = 0;
    cvsrc[0].y = 0;    
    cvsrc[1].x = width;
    cvsrc[1].y = 0;    
    cvsrc[2].x = width;
    cvsrc[2].y = height;    
    cvsrc[3].x = 0;
    cvsrc[3].y = height;    
    
    cvdst[0].x = A.x;
    cvdst[0].y = A.y;    
    cvdst[1].x = B.x;
    cvdst[1].y = B.y;    
    cvdst[2].x = C.x;
    cvdst[2].y = C.y;    
    cvdst[3].x = D.x;
    cvdst[3].y = D.y;    
    
	cvWarpPerspectiveQMatrix( cvsrc, cvdst, translate );  // calculate homography
	cvWarpPerspective( cvImage, cvImageTemp, translate );
    swapTemp();
	cvReleaseMat( &translate );
}
Ejemplo n.º 4
0
static void projectImg(IplImage *src, int64_t TRANS_X, int64_t TRANS_Y,
		IplImage *dst, CvMat *tmatrix) {
	if (tmatrix->rows == 2) {
		//translate
		CvMat* result = cvCreateMat(2, 3, CV_32FC1);
		cvSetReal2D(result, 0, 0, cvGetReal2D(tmatrix, 0, 0));
		cvSetReal2D(result, 0, 1, cvGetReal2D(tmatrix, 0, 1));
		cvSetReal2D(result, 1, 0, cvGetReal2D(tmatrix, 1, 0));
		cvSetReal2D(result, 1, 1, cvGetReal2D(tmatrix, 1, 1));
		cvSetReal2D(result, 0, 2, cvGetReal2D(tmatrix, 0, 2) + TRANS_X);
		cvSetReal2D(result, 1, 2, cvGetReal2D(tmatrix, 1, 2) + TRANS_Y);
		cvWarpAffine(src, dst, result, CV_INTER_LINEAR, cvScalarAll(0));
		cvReleaseMat(&result);
	} else if (tmatrix->rows == 3) {
		//translate matrix
		CvMat* offset = cvCreateMat(3, 3, CV_32FC1);
		cvSetReal2D(offset, 0, 0, 1);
		cvSetReal2D(offset, 0, 1, 0);
		cvSetReal2D(offset, 0, 2, TRANS_X);
		cvSetReal2D(offset, 1, 0, 0);
		cvSetReal2D(offset, 1, 1, 1);
		cvSetReal2D(offset, 1, 2, TRANS_Y);
		cvSetReal2D(offset, 2, 0, 0);
		cvSetReal2D(offset, 2, 1, 0);
		cvSetReal2D(offset, 2, 2, 1);
		//translate
		CvMat* result = cvCreateMat(3, 3, CV_32FC1);
		cvMatMul(offset, tmatrix, result);
		cvWarpPerspective(src, dst, result, CV_INTER_LINEAR, cvScalarAll(0));
		cvReleaseMat(&offset);
		cvReleaseMat(&result);
	}
}
//--------------------------------------------------------------------------------
void ofxCvImage::warpPerspective( const ofPoint& A, const ofPoint& B, const ofPoint& C, const ofPoint& D ) {
	if( !bAllocated ){
		ofLog(OF_LOG_ERROR, "in warpPerspective, need to allocate image first");
		return;		
	}
    // compute matrix for perspectival warping (homography)
    CvPoint2D32f cvsrc[4];
    CvPoint2D32f cvdst[4];
    CvMat* translate = cvCreateMat( 3,3, CV_32FC1 );
    cvSetZero( translate );

    cvdst[0].x = 0;
    cvdst[0].y = 0;
    cvdst[1].x = width;
    cvdst[1].y = 0;
    cvdst[2].x = width;
    cvdst[2].y = height;
    cvdst[3].x = 0;
    cvdst[3].y = height;

    cvsrc[0].x = A.x;
    cvsrc[0].y = A.y;
    cvsrc[1].x = B.x;
    cvsrc[1].y = B.y;
    cvsrc[2].x = C.x;
    cvsrc[2].y = C.y;
    cvsrc[3].x = D.x;
    cvsrc[3].y = D.y;

    cvWarpPerspectiveQMatrix( cvsrc, cvdst, translate );  // calculate homography
    cvWarpPerspective( cvImage, cvImageTemp, translate );
    swapTemp();
    flagImageChanged();
    cvReleaseMat( &translate );
}
//--------------------------------------------------------------------------------
void ofxCvImage::warpIntoMe( ofxCvImage& mom, const ofPoint src[4], const ofPoint dst[4] ){
    
	if( !bAllocated ){
		ofLog(OF_LOG_ERROR, "in warpIntoMe, image not allocated");
		return;		
	}
	if( !mom.bAllocated ){
		ofLog(OF_LOG_ERROR, "in warpIntoMe, mom not allocated");
		return;		
	}
		
	if( mom.getCvImage()->nChannels == cvImage->nChannels &&
        mom.getCvImage()->depth == cvImage->depth ) {

    	// compute matrix for perspectival warping (homography)
    	CvPoint2D32f cvsrc[4];
    	CvPoint2D32f cvdst[4];
    	CvMat* translate = cvCreateMat( 3, 3, CV_32FC1 );
    	cvSetZero( translate );
    	for (int i = 0; i < 4; i++ ) {
    		cvsrc[i].x = src[i].x;
    		cvsrc[i].y = src[i].y;
    		cvdst[i].x = dst[i].x;
    		cvdst[i].y = dst[i].y;
    	}
    	cvWarpPerspectiveQMatrix( cvsrc, cvdst, translate );  // calculate homography
    	cvWarpPerspective( mom.getCvImage(), cvImage, translate);
        flagImageChanged();
    	cvReleaseMat( &translate );

    } else {
        ofLog(OF_LOG_ERROR, "in warpIntoMe: mom image type has to match");
    }
}
Ejemplo n.º 7
0
//--------------------------------------------------------------------------------
void ofxCvImage::warpPerspective( const ofPoint& A, const ofPoint& B, const ofPoint& C, const ofPoint& D ) {
	if( !bAllocated ){
		ofLogError("ofxCvImage") << "warpPerspective(): image not allocated";
		return;		
	}
    // compute matrix for perspectival warping (homography)
    CvPoint2D32f cvsrc[4];
    CvPoint2D32f cvdst[4];
    CvMat* translate = cvCreateMat( 3,3, CV_32FC1 );
    cvSetZero( translate );

    cvdst[0].x = 0;
    cvdst[0].y = 0;
    cvdst[1].x = width;
    cvdst[1].y = 0;
    cvdst[2].x = width;
    cvdst[2].y = height;
    cvdst[3].x = 0;
    cvdst[3].y = height;

    cvsrc[0].x = A.x;
    cvsrc[0].y = A.y;
    cvsrc[1].x = B.x;
    cvsrc[1].y = B.y;
    cvsrc[2].x = C.x;
    cvsrc[2].y = C.y;
    cvsrc[3].x = D.x;
    cvsrc[3].y = D.y;

    cvGetPerspectiveTransform( cvsrc, cvdst, translate );  // calculate homography
    cvWarpPerspective( cvImage, cvImageTemp, translate );
    swapTemp();
    flagImageChanged();
    cvReleaseMat( &translate );
}
Ejemplo n.º 8
0
//--------------------------------------------------------------------------------
void ofxCvImage::warpIntoMe( ofxCvImage& mom, const ofPoint src[4], const ofPoint dst[4] ){
    
	if( !bAllocated ){
		ofLogError("ofxCvImage") << "warpIntoMe(): image not allocated";
		return;		
	}
	if( !mom.bAllocated ){
		ofLogError("ofxCvImage") << "warpIntoMe(): source image not allocated";
		return;		
	}
		
	if( mom.getCvImage()->nChannels == cvImage->nChannels &&
        mom.getCvImage()->depth == cvImage->depth ) {

    	// compute matrix for perspectival warping (homography)
    	CvPoint2D32f cvsrc[4];
    	CvPoint2D32f cvdst[4];
    	CvMat* translate = cvCreateMat( 3, 3, CV_32FC1 );
    	cvSetZero( translate );
    	for (int i = 0; i < 4; i++ ) {
    		cvsrc[i].x = src[i].x;
    		cvsrc[i].y = src[i].y;
    		cvdst[i].x = dst[i].x;
    		cvdst[i].y = dst[i].y;
    	}
    	cvGetPerspectiveTransform( cvsrc, cvdst, translate );  // calculate homography
    	cvWarpPerspective( mom.getCvImage(), cvImage, translate);
        flagImageChanged();
    	cvReleaseMat( &translate );

    } else {
        ofLogError("ofxCvImage") << "warpIntoMe(): image type mismatch";
    }
}
Ejemplo n.º 9
0
int main(int argc, char** argv)
{
	IplImage* image = cvLoadImage(argv[1], CV_LOAD_IMAGE_COLOR); // cvCreateImage(cvSize(320, 240), IPL_DEPTH_8U, 3);
	cvNamedWindow("result", CV_WINDOW_AUTOSIZE);
/*
	gsl_qrng* q = gsl_qrng_alloc(gsl_qrng_halton, 2);
	for (int i = 0; i < 64; i++)
	{
		double v[2];
		gsl_qrng_get(q, v);
		int x = (int)(v[0] * 319), y = (int)(v[1] * 239);
		image->imageData[x * 3 + y * image->widthStep] = image->imageData[x * 3 + 1 + y * image->widthStep] = image->imageData[x * 3 + 2 + y * image->widthStep] = 255;
	}
	gsl_qrng_free(q);
*/
	gsl_rng_env_setup();

	const gsl_rng_type* T = gsl_rng_default;
	gsl_rng* r = gsl_rng_alloc(T);
	gsl_rng_set(r, time(NULL));

	double sigma = 0.1 * ((image->width < image->height) ? image->width : image->height);

	CvPoint2D32f src[4], dst[4], rds[4];
	src[0].x = 0; src[0].y = 0;
	src[1].x = image->width; src[1].y = 0;
	src[2].x = image->width; src[2].y = image->height;
	src[3].x = 0; src[3].y = image->height;
	dst[0].x = -image->width / 2 + gsl_ran_gaussian(r, sigma); dst[0].y = -image->height / 2 + gsl_ran_gaussian(r, sigma);
	dst[1].x = image->width / 2 + gsl_ran_gaussian(r, sigma); dst[1].y = -image->height / 2 + gsl_ran_gaussian(r, sigma);
	dst[2].x = image->width / 2 + gsl_ran_gaussian(r, sigma); dst[2].y = image->height / 2 + gsl_ran_gaussian(r, sigma);
	dst[3].x = -image->width / 2 + gsl_ran_gaussian(r, sigma); dst[3].y = image->height / 2 + gsl_ran_gaussian(r, sigma);
	double radius = gsl_ran_gaussian(r, 3.1415926 / 8.0);
	double minx = 0, miny = 0, maxx = 0, maxy = 0;
	for (int i = 0; i < 4; i++)
	{
		rds[i].x = dst[i].x * cos(radius) - dst[i].y * sin(radius);
		rds[i].y = dst[i].x * sin(radius) + dst[i].y * cos(radius);
		if (rds[i].x < minx) minx = rds[i].x;
		if (rds[i].y < miny) miny = rds[i].y;
		if (rds[i].x > maxx) maxx = rds[i].x;
		if (rds[i].y > maxy) maxy = rds[i].y;
	}
	for (int i = 0; i < 4; i++)
	{
		rds[i].x -= minx;
		rds[i].y -= miny;
	}
	gsl_rng_free(r);
	float _m[9];
	CvMat m = cvMat(3, 3, CV_32FC1, _m);
	cvGetPerspectiveTransform(src, rds, &m);
	IplImage* transformed = cvCreateImage(cvSize(maxx - minx, maxy - miny), IPL_DEPTH_8U, 3);
	cvWarpPerspective(image, transformed, &m);
	cvShowImage("result", transformed);
	cvWaitKey(0);
	cvDestroyWindow("result");
	return 0;
}
Ejemplo n.º 10
0
void margDisplay::feedImg(ofxCvColorImage& _source) {
	if (image.getWidth() != _source.getWidth()) {
		image.clear();
		image.allocate(source.getWidth(), source.getHeight());
	}

	cvWarpPerspective(_source.getCvImage(), image.getCvImage(), translate);
	image.flagImageChanged();
}
void SimpleImageProjector::project(IplImage* dst, A4PreciseDetectedRecord dstRecord)
{
	CvPoint2D32f dstCorners[4];
	dstCorners[0] = cvPoint2D32f(dstRecord.UL.x, dstRecord.UL.y);
	dstCorners[1] = cvPoint2D32f(dstRecord.UR.x, dstRecord.UR.y);
	dstCorners[2] = cvPoint2D32f(dstRecord.DL.x, dstRecord.DL.y);
	dstCorners[3] = cvPoint2D32f(dstRecord.DR.x, dstRecord.DR.y);
	cvWarpPerspective(projection, dst, transformMat, CV_INTER_LINEAR);
}
Ejemplo n.º 12
0
void ofxImageROI::projectToTarget(ofPoint *dstPoints)
{
	CvPoint2D32f cvsrc[4];
	CvPoint2D32f cvdst[4];

	cvsrc[0].x = cornerPoints[0].x;
	cvsrc[0].y = cornerPoints[0].y;
	cvsrc[1].x = cornerPoints[1].x;
	cvsrc[1].y = cornerPoints[1].y;
	cvsrc[2].x = cornerPoints[2].x;
	cvsrc[2].y = cornerPoints[2].y;
	cvsrc[3].x = cornerPoints[3].x;
	cvsrc[3].y = cornerPoints[3].y;

	cvdst[0].x = dstPoints[0].x;
	cvdst[0].y = dstPoints[0].y;
	cvdst[1].x = dstPoints[1].x;
	cvdst[1].y = dstPoints[1].y;
	cvdst[2].x = dstPoints[2].x;
	cvdst[2].y = dstPoints[2].y;
	cvdst[3].x = dstPoints[3].x;
	cvdst[3].y = dstPoints[3].y;

	CvMat* src_mat = cvCreateMat(4, 2, CV_32FC1);
	CvMat* dst_mat = cvCreateMat(4, 2, CV_32FC1);

	cvSetData(src_mat, cvsrc, sizeof(CvPoint2D32f));
	cvSetData(dst_mat, cvdst, sizeof(CvPoint2D32f));

	CvMat * translate = cvCreateMat(3,3,CV_32FC1); // 領域確保
    cvFindHomography(src_mat, dst_mat, translate); // ホモグラフィ計算

    // 画像サイズが変わったとき
    if (this->bAllocated() == true && inputImg.bAllocated == false) {
        projectedImage.allocate(this->width, this->height, OF_IMAGE_COLOR);
        inputImg.allocate(this->width, this->height);
        outputImg.allocate(this->width, this->height);
    }

    inputImg.setFromPixels(this->getPixelsRef()); // 画像のロード
    cvWarpPerspective(inputImg.getCvImage(), outputImg.getCvImage(), translate, CV_INTER_LINEAR+CV_WARP_FILL_OUTLIERS, cvScalarAll(255)); // 透視投影変換

    //出力結果をofImageに変換
    projectedImage.setFromPixels(outputImg.getPixels(), outputImg.getWidth(), outputImg.getHeight(), OF_IMAGE_COLOR);

    cvReleaseMat(&translate);
    cvReleaseMat(&src_mat);
    cvReleaseMat(&dst_mat);
}
Ejemplo n.º 13
0
void COpenCVMFCView::OnWarpPerspect()
{
	// TODO: Add your command handler code here

	CvPoint2D32f srcQuad[4], dstQuad[4];
	CvMat* warp_matrix = cvCreateMat(3,3,CV_32FC1);
	IplImage *src=0, *dst=0;

	src = cvCloneImage(workImg);
	cvFlip(src);
	dst = cvCloneImage(src);
	dst->origin = src->origin;
	cvZero(dst);

	srcQuad[0].x = 0;                         //src Top left
	srcQuad[0].y = 0;
	srcQuad[1].x = (float) src->width - 1;    //src Top right
	srcQuad[1].y = 0;
	srcQuad[2].x = 0;                         //src Bottom left
	srcQuad[2].y = (float) src->height - 1;
	srcQuad[3].x = (float) src->width - 1;    //src Bot right
	srcQuad[3].y = (float) src->height - 1;
	//- - - - - - - - - - - - - -//
	dstQuad[0].x = (float)(src->width*0.05);  //dst Top left
	dstQuad[0].y = (float)(src->height*0.33);
	dstQuad[1].x = (float)(src->width*0.9);   //dst Top right
	dstQuad[1].y = (float)(src->height*0.25);
	dstQuad[2].x = (float)(src->width*0.2);   //dst Bottom left
	dstQuad[2].y = (float)(src->height*0.7);      
	dstQuad[3].x = (float)(src->width*0.8);   //dst Bot right
	dstQuad[3].y = (float)(src->height*0.9);

	cvGetPerspectiveTransform(srcQuad,dstQuad,warp_matrix);
	cvWarpPerspective(src,dst,warp_matrix);

	cvNamedWindow( "Perspective_Warp", 1 );
	cvShowImage( "Perspective_Warp", dst );

	m_ImageType=-3;
	cvWaitKey();

	cvDestroyWindow( "Perspective_Warp" );
	cvReleaseImage(&src);
	cvReleaseImage(&dst);
	cvReleaseMat(&warp_matrix);

	m_ImageType=imageType(workImg);
}
//-------------------------------------------------------------------------------------
void ofCvColorImage::warpIntoMe(ofCvColorImage &mom, ofPoint2f src[4], ofPoint2f dst[4]){
	// compute matrix for perspectival warping (homography)
	CvPoint2D32f cvsrc[4];
	CvPoint2D32f cvdst[4];
	CvMat *translate = cvCreateMat(3,3,CV_32FC1);
	cvSetZero(translate);
	for (int i = 0; i < 4; i++){
		cvsrc[i].x = src[i].x;
		cvsrc[i].y = src[i].y;
		cvdst[i].x = dst[i].x;
		cvdst[i].y = dst[i].y;
	}
	cvWarpPerspectiveQMatrix(cvsrc, cvdst, translate);  // calculate homography
	cvWarpPerspective( mom.getCvImage(), cvImage, translate);
	cvReleaseMat(&translate);
}
void perspectiveTransform() {

    processImage = windowImage;
    
    setVariables();                                                                           //设置四个预设的点
    setPoints();                                                                              //选取图片上的点
    cvDestroyWindow("monitor");
    windowImage = cvCreateImage(cvSize(squareWindowSize,squareWindowSize), IPL_DEPTH_8U, 3);
    cvGetPerspectiveTransform(originalPoints, transPoints, transmat);                         //计算transmat的值
    cvWarpPerspective(processImage , windowImage , transmat);                                 //这一句利用transmat进行变换
    cvNamedWindow("control");
    cvMoveWindow("control", middlewindowX, middlewindowY);
    cvShowImage("control", windowImage);
    cvWaitKey();
    cvDestroyWindow("control");
}
int main(int argc, const char * argv[]) {
    
    CvPoint2D32f srcQuad[4], dstQuad[4];
    CvMat* warp_matrix = cvCreateMat(3, 3, CV_32FC1);
    IplImage *src, *dst;
    
    if (argc == 2 && ((src = cvLoadImage(argv[1], 1)) != 0)) {
        dst = cvCloneImage( src );
        dst->origin = src->origin;
        cvZero( dst );
        
        // warp matrix
        srcQuad[0].x = 0;
        srcQuad[0].y = 0;
        srcQuad[1].x = src->width - 1;
        srcQuad[1].y = 0;
        srcQuad[2].x = 0;
        srcQuad[2].y = src->height - 1;
        srcQuad[3].x = src->width - 1;
        srcQuad[3].y = src->height - 1;
        
        dstQuad[0].x = src->width * 0.005;
        dstQuad[0].y = src->height * 0.33;
        dstQuad[1].x = src->width * 0.9;
        dstQuad[1].y = src->height * 0.25;
        dstQuad[2].x = src->width * 0.2;
        dstQuad[2].y = src->height * 0.7;
        dstQuad[3].x = src->width * 0.8;
        dstQuad[3].y = src->height * 0.9;
        
        cvGetPerspectiveTransform( srcQuad, dstQuad, warp_matrix );
        cvWarpPerspective( src, dst, warp_matrix );
        
        
        cvNamedWindow( "Perspective_Transform", 1 );
        cvShowImage( "Perspective_Transform" , dst );
        
        cvWaitKey();
        
        cvReleaseImage( &dst );

    }
    
    cvReleaseMat( &warp_matrix );
    
    return 0;
}
int hamuju_birds_eye(const char * homography_file, hamuju_image_t * in_image, hamuju_image_t * out_image)
{
	CvMat *homography;
	CvSize imgsize;
	IplImage *orig, *birdeye;

	if(homography_file == NULL || in_image == NULL || out_image == NULL)
	{
		fprintf(stderr, "%s: Got a null pointer!\n", __func__);
		return -1;
	}

	printf("Loading Homography: %s\n", homography_file);
	homography = (CvMat *) cvLoad(homography_file, NULL, NULL, NULL);

	printf("Allocating memory for temp IplImage objects\n");
	imgsize = cvSize(in_image->width, in_image->height);
	orig = cvCreateImage(imgsize, 8, 3);
	birdeye = cvCreateImage(imgsize, 8, 3);

	// copy the image to OpenCV format
	printf("Copying original image to IplImage");
	hamuju_image_copy_to_iplimage((hamuju_iplimage_t *)orig, in_image);
	
	// transform the image
	printf("Applying warp transform\n");
        /**
         * 2D Homography matrix:
         * [R11, R12, T1]
         * [R21, R22, T2]
         * [P  , P  ,  1]
         */
	CV_MAT_ELEM(*homography,float,2,2) = 30; //zoom
        cvWarpPerspective(orig, birdeye, homography,
			CV_INTER_LINEAR | CV_WARP_FILL_OUTLIERS | CV_WARP_INVERSE_MAP,
			cvScalarAll(0));

	// copy bird's eye view to destination image
	printf("Copying transformed image to hamuju_image_t\n");
	hamuju_image_copy_from_iplimage(out_image, (hamuju_iplimage_t *)birdeye);

	// free temp memory
	printf("Releasing IplImage objects\n");
	//cvReleaseImage(orig);
	//cvReleaseImage(birdeye);
        return 0;
}
Ejemplo n.º 18
0
IplImage *square_puzzle(IplImage *in, const CvPoint2D32f *location) {
    int xsize = location[1].x - location[0].x;
    int ysize = xsize;

    CvPoint2D32f warped_coordinates[4];
    warped_coordinates[0] = cvPointTo32f(cvPoint(0,       0));
    warped_coordinates[1] = cvPointTo32f(cvPoint(xsize-1, 0));
    warped_coordinates[2] = cvPointTo32f(cvPoint(xsize-1, ysize-1));
    warped_coordinates[3] = cvPointTo32f(cvPoint(0,       ysize-1));

    CvMat *map_matrix = cvCreateMat(3, 3, CV_64FC1);
    cvGetPerspectiveTransform(location, warped_coordinates, map_matrix);

    IplImage *warped_image = cvCreateImage(cvSize(xsize, ysize), 8, in->nChannels);
    CvScalar fillval=cvScalarAll(0);
    cvWarpPerspective(in, warped_image, map_matrix, CV_WARP_FILL_OUTLIERS, fillval);

    return warped_image;
}
Ejemplo n.º 19
0
void llcv_unwarp(dmz_context *dmz, IplImage *input, const dmz_point source_points[4], const dmz_rect to_rect, IplImage *output) {
	//  dmz_point source_points[4], dest_points[4];

#ifdef IOS_DMZ
	ios_gpu_unwarp(dmz, input, source_points, output);
#else

  dmz_trace_log("pre-warp in.width:%i in.height:%i in.widthStep:%i in.nChannels:%i", input->width, input->height, input->widthStep, input->nChannels);
	
  assert(output != null);
  assert(output->imageData != null);
  
  dmz_trace_log("expecting out.width:%i out.height:%i out.widthStep:%i out.nChannels:%i", output->width, output->height, output->widthStep, output->nChannels);

#if ANDROID_USE_GLES_WARP
	if (dmz_use_gles_warp()) {
		llcv_gles_warp_perspective(dmz->mz, input, source_points, output);
	}
#endif
	if (!dmz_use_gles_warp()) {
		/* if dmz_use_gles_warp() has changed from above, then we've encountered an error and are falling back to the old way.*/

		// Old-fashioned openCV
		float matrix[16];
		dmz_point dest_points[4];
		dmz_rect_get_points(to_rect, dest_points);

		// Calculate row-major matrix
		llcv_calc_persp_transform(matrix, 9, true, source_points, dest_points);
		CvMat *cv_persp_mat = cvCreateMat(3, 3, CV_32FC1);
		for (int r = 0; r < 3; r++) {
			for (int c = 0; c < 3; c++) {
				CV_MAT_ELEM(*cv_persp_mat, float, r, c) = matrix[3 * r + c];
			}
		}
		cvWarpPerspective(input, output, cv_persp_mat, CV_INTER_LINEAR + CV_WARP_FILL_OUTLIERS, cvScalarAll(0));
		cvReleaseMat(&cv_persp_mat);
	}
Ejemplo n.º 20
0
void margDisplay::feedImg(unsigned char* pixels, int _w, int _h, bool bIsColor) {
	if (_w != image.getWidth()) {
		if(image.getWidth() != 0)image.clear();
		image.allocate(_w, _h);
	}
	if (source.getWidth() != _w) {
		if(source.getWidth() != 0) source.clear();
		source.allocate(_w, _h);
	}
	if (bIsColor) {
		source.setFromPixels(pixels, _w, _h);
	}
	else {
		if (gSource.getWidth() != _w) {
			if(gSource.getWidth() != 0) gSource.clear();
			gSource.allocate(_w, _h);
		}
		gSource.setFromPixels(pixels, _w, _h);
		source = gSource;
	}
	cvWarpPerspective(source.getCvImage(), image.getCvImage(), translate);
	image.flagImageChanged();
}
Ejemplo n.º 21
0
void opencv_birds_eye_remap(ProbabilisticMapParams map, const IplImage *src_image, IplImage *dst_image, double camera_height, double camera_pitch, stereo_util stereo_util_instance)
{
  // coordinates of 4 quadrangle vertices in the source image.
  CvPoint2D32f src_pts[4] = { cvPoint2D32f(180, 400), cvPoint2D32f(180, 320), cvPoint2D32f(520, 320), cvPoint2D32f(520, 400) };

  // coordinates of the 4 corresponding quadrangle vertices in the destination image.
  CvPoint2D32f dst_pts[4];

  for (int i = 0; i < 4; i++)
  {
    carmen_position_t right_point;
    right_point.x = src_pts[i].x;
    right_point.y = src_pts[i].y;

    carmen_vector_3D_t p3D = camera_to_world(right_point, camera_height, camera_pitch, stereo_util_instance);

    int x_map = map_grid_x(map, p3D.x);
    int y_map = map_grid_y(map, p3D.y);
    dst_pts[i] = cvPoint2D32f(x_map, y_map);
  }

  // FIND THE HOMOGRAPHY
  static CvMat *homography_matrix;
  if (homography_matrix == NULL)
    homography_matrix = cvCreateMat(3, 3, CV_32F);
  cvGetPerspectiveTransform(dst_pts, src_pts, homography_matrix);

  float Z = camera_height;

  CV_MAT_ELEM(*homography_matrix, float, 2, 2) = Z;
  cvWarpPerspective(src_image,
      dst_image,
      homography_matrix,
      CV_INTER_LINEAR | CV_WARP_INVERSE_MAP | CV_WARP_FILL_OUTLIERS,
      cvScalarAll(0));
}
Ejemplo n.º 22
0
IplImage* cutSign(IplImage* origImg, CvPoint* corners, int numcorners, bool drawcircles)
{

	// convert corners to CvPoint2D32f.
        CvPoint2D32f cornersf[numcorners];
        for (int i=0; i<numcorners; ++i)
                cornersf[i] = cvPointTo32f(corners[i]);

	if (_debug) printf("Corners: %d,%d %d,%d %d,%d %d,%d\n",corners[0].x,corners[0].y,corners[1].x,corners[1].y,corners[2].x,corners[2].y,corners[3].x,corners[3].y);

	// Create target-image with right size.
        double xDiffBottom = pointDist(corners[0], corners[1]);
        double yDiffLeft = pointDist(corners[0], corners[3]);
        IplImage* cut = cvCreateImage(cvSize(xDiffBottom,yDiffLeft), IPL_DEPTH_8U, 3);

	// target points for perspective correction.
        CvPoint2D32f cornerstarget[numcorners];
        cornerstarget[0] = cvPoint2D32f(0,0);
        cornerstarget[1] = cvPoint2D32f(cut->width-1,0);
        cornerstarget[2]= cvPoint2D32f(cut->width-1,cut->height-1);
        cornerstarget[3] = cvPoint2D32f(0,cut->height-1);
	if (_debug) printf("Corners: %f,%f %f,%f %f,%f %f,%f\n",cornerstarget[0].x,cornerstarget[0].y,cornerstarget[1].x,cornerstarget[1].y,cornerstarget[2].x,cornerstarget[2].y,cornerstarget[3].x,cornerstarget[3].y);
        
	// Apply perspective correction to the image.
        CvMat* transmat = cvCreateMat(3, 3, CV_32FC1); // Colums, rows ?
        transmat = cvGetPerspectiveTransform(cornersf,cornerstarget,transmat);
        cvWarpPerspective(origImg,cut,transmat);
        cvReleaseMat(&transmat);

	// Draw yellow circles around the corners.
	if (drawcircles)
		for (int i=0; i<numcorners; ++i)
			cvCircle(origImg, corners[i],5,CV_RGB(255,255,0),2);

        return cut;
}
Ejemplo n.º 23
0
//全景拼接
void SiftMatch::on_mosaicButton_clicked()
{
    //若能成功计算出变换矩阵,即两幅图中有共同区域,才可以进行全景拼接
    if(H)
    {
        //拼接图像,img1是左图,img2是右图
        CalcFourCorner();//计算图2的四个角经变换后的坐标
        //为拼接结果图xformed分配空间,高度为图1图2高度的较小者,根据图2右上角和右下角变换后的点的位置决定拼接图的宽度
        xformed = cvCreateImage(cvSize(MIN(rightTop.x,rightBottom.x),MIN(img1->height,img2->height)),IPL_DEPTH_8U,3);
        //用变换矩阵H对右图img2做投影变换(变换后会有坐标右移),结果放到xformed中
        cvWarpPerspective(img2,xformed,H,CV_INTER_LINEAR + CV_WARP_FILL_OUTLIERS,cvScalarAll(0));

        //cvNamedWindow(IMG_MOSAIC_TEMP); //显示临时图,即只将图2变换后的图
        //cvShowImage(IMG_MOSAIC_TEMP,xformed);

        //简易拼接法:直接将将左图img1叠加到xformed的左边
        xformed_simple = cvCloneImage(xformed);//简易拼接图,可笼子xformed
        cvSetImageROI(xformed_simple,cvRect(0,0,img1->width,img1->height));
        cvAddWeighted(img1,1,xformed_simple,0,0,xformed_simple);
        cvResetImageROI(xformed_simple);

        //cvNamedWindow(IMG_MOSAIC_SIMPLE);//创建窗口
        //cvShowImage(IMG_MOSAIC_SIMPLE,xformed_simple);//显示简易拼接图

        //处理后的拼接图,克隆自xformed
        xformed_proc = cvCloneImage(xformed);

        //重叠区域左边的部分完全取自图1
        cvSetImageROI(img1,cvRect(0,0,MIN(leftTop.x,leftBottom.x),xformed_proc->height));
        cvSetImageROI(xformed,cvRect(0,0,MIN(leftTop.x,leftBottom.x),xformed_proc->height));
        cvSetImageROI(xformed_proc,cvRect(0,0,MIN(leftTop.x,leftBottom.x),xformed_proc->height));
        cvAddWeighted(img1,1,xformed,0,0,xformed_proc);
        cvResetImageROI(img1);
        cvResetImageROI(xformed);
        cvResetImageROI(xformed_proc);

        //cvNamedWindow(IMG_MOSAIC_BEFORE_FUSION);
        //cvShowImage(IMG_MOSAIC_BEFORE_FUSION,xformed_proc);//显示融合之前的拼接图

        //采用加权平均的方法融合重叠区域
        int start = MIN(leftTop.x,leftBottom.x) ;//开始位置,即重叠区域的左边界
        double processWidth = img1->width - start;//重叠区域的宽度
        double alpha = 1;//img1中像素的权重
        for(int i=0; i<xformed_proc->height; i++)//遍历行
        {
            const uchar * pixel_img1 = ((uchar *)(img1->imageData + img1->widthStep * i));//img1中第i行数据的指针
            const uchar * pixel_xformed = ((uchar *)(xformed->imageData + xformed->widthStep * i));//xformed中第i行数据的指针
            uchar * pixel_xformed_proc = ((uchar *)(xformed_proc->imageData + xformed_proc->widthStep * i));//xformed_proc中第i行数据的指针
            for(int j=start; j<img1->width; j++)//遍历重叠区域的列
            {
                //如果遇到图像xformed中无像素的黑点,则完全拷贝图1中的数据
                if(pixel_xformed[j*3] < 50 && pixel_xformed[j*3+1] < 50 && pixel_xformed[j*3+2] < 50 )
                {
                    alpha = 1;
                }
                else
                {   //img1中像素的权重,与当前处理点距重叠区域左边界的距离成正比
                    alpha = (processWidth-(j-start)) / processWidth ;
                }
                pixel_xformed_proc[j*3] = pixel_img1[j*3] * alpha + pixel_xformed[j*3] * (1-alpha);//B通道
                pixel_xformed_proc[j*3+1] = pixel_img1[j*3+1] * alpha + pixel_xformed[j*3+1] * (1-alpha);//G通道
                pixel_xformed_proc[j*3+2] = pixel_img1[j*3+2] * alpha + pixel_xformed[j*3+2] * (1-alpha);//R通道
            }
        }
        cvNamedWindow(IMG_MOSAIC_PROC);//创建窗口
        cvShowImage(IMG_MOSAIC_PROC,xformed_proc);//显示处理后的拼接图

        //*重叠区域取两幅图像的平均值,效果不好
        //设置ROI,是包含重叠区域的矩形
        cvSetImageROI(xformed_proc,cvRect(MIN(leftTop.x,leftBottom.x),0,img1->width-MIN(leftTop.x,leftBottom.x),xformed_proc->height));
        cvSetImageROI(img1,cvRect(MIN(leftTop.x,leftBottom.x),0,img1->width-MIN(leftTop.x,leftBottom.x),xformed_proc->height));
        cvSetImageROI(xformed,cvRect(MIN(leftTop.x,leftBottom.x),0,img1->width-MIN(leftTop.x,leftBottom.x),xformed_proc->height));
        cvAddWeighted(img1,0.5,xformed,0.5,0,xformed_proc);
        cvResetImageROI(xformed_proc);
        cvResetImageROI(img1);
        cvResetImageROI(xformed); //*/

        /*对拼接缝周围区域进行滤波来消除拼接缝,效果不好
        //在处理前后的图上分别设置横跨拼接缝的矩形ROI
        cvSetImageROI(xformed_proc,cvRect(img1->width-10,0,img1->width+10,xformed->height));
        cvSetImageROI(xformed,cvRect(img1->width-10,0,img1->width+10,xformed->height));
        cvSmooth(xformed,xformed_proc,CV_MEDIAN,5);//对拼接缝周围区域进行中值滤波
        cvResetImageROI(xformed);
        cvResetImageROI(xformed_proc);
        cvShowImage(IMG_MOSAIC_PROC,xformed_proc);//显示处理后的拼接图 */

        /*想通过锐化解决变换后的图像失真的问题,对于扭曲过大的图像,效果不好
        double a[]={  0, -1,  0, -1,  5, -1, 0, -1,  0  };//拉普拉斯滤波核的数据
        CvMat kernel = cvMat(3,3,CV_64FC1,a);//拉普拉斯滤波核
        cvFilter2D(xformed_proc,xformed_proc,&kernel);//滤波
        cvShowImage(IMG_MOSAIC_PROC,xformed_proc);//显示处理后的拼接图*/

        //保存拼接图
        QString name_xformed = name1;//文件名,原文件名去掉序号后加"_Mosaic"
        cvSaveImage(name_xformed.replace( name_xformed.lastIndexOf(".",-1)-1 , 1 , "_Mosaic").toAscii().data() , xformed_simple);//保存简易拼接图
        cvSaveImage(name_xformed.insert( name_xformed.lastIndexOf(".",-1) , "_Proc").toAscii().data() , xformed_proc);//保存处理后的拼接图
        ui->mosaicButton->setEnabled(false);//禁用全景拼接按钮
    }
}
Ejemplo n.º 24
0
int main( int argc, char** argv )
{
    IplImage* img1, * img2, * stacked;
    struct feature* feat1, * feat2, * feat;
    struct feature** nbrs;
    struct kd_node* kd_root;
    CvPoint pt1, pt2;
    double d0, d1;
    int n1, n2, k, i, m = 0;

    if( argc != 3 )
        fatal_error( "usage: %s <img1> <img2>", argv[0] );

    img1 = cvLoadImage( argv[1], 1 );
    if( ! img1 )
        fatal_error( "unable to load image from %s", argv[1] );
    img2 = cvLoadImage( argv[2], 1 );
    if( ! img2 )
        fatal_error( "unable to load image from %s", argv[2] );
    stacked = stack_imgs( img1, img2 );

    fprintf( stderr, "Finding features in %s...\n", argv[1] );
    n1 = sift_features( img1, &feat1 );
    fprintf( stderr, "Finding features in %s...\n", argv[2] );
    n2 = sift_features( img2, &feat2 );
    kd_root = kdtree_build( feat2, n2 );
    for( i = 0; i < n1; i++ )
    {
        feat = feat1 + i;
        k = kdtree_bbf_knn( kd_root, feat, 2, &nbrs, KDTREE_BBF_MAX_NN_CHKS );
        if( k == 2 )
        {
            d0 = descr_dist_sq( feat, nbrs[0] );
            d1 = descr_dist_sq( feat, nbrs[1] );
            if( d0 < d1 * NN_SQ_DIST_RATIO_THR )
            {
                pt1 = cvPoint( cvRound( feat->x ), cvRound( feat->y ) );
                pt2 = cvPoint( cvRound( nbrs[0]->x ), cvRound( nbrs[0]->y ) );
                pt2.y += img1->height;
                cvLine( stacked, pt1, pt2, CV_RGB(255,0,255), 1, 8, 0 );
                m++;
                feat1[i].fwd_match = nbrs[0];
            }
        }
        free( nbrs );
    }

    fprintf( stderr, "Found %d total matches\n", m );
    display_big_img( stacked, "Matches" );
    cvWaitKey( 0 );

    /*
       UNCOMMENT BELOW TO SEE HOW RANSAC FUNCTION WORKS

       Note that this line above:

       feat1[i].fwd_match = nbrs[0];

       is important for the RANSAC function to work.
    */
// /*
// {
    CvMat* H;
    IplImage* xformed;
    H = ransac_xform( feat1, n1, FEATURE_FWD_MATCH, lsq_homog, 4, 0.01,
                      homog_xfer_err, 3.0, NULL, NULL );
    if( H )
    {
        xformed = cvCreateImage( cvGetSize( img2 ), IPL_DEPTH_8U, 3 );
        cvWarpPerspective( img1, xformed, H,
                           CV_INTER_LINEAR + CV_WARP_FILL_OUTLIERS,
                           cvScalarAll( 0 ) );
        cvNamedWindow( "Xformed", 1 );
        cvShowImage( "Xformed", xformed );
        cvWaitKey( 0 );
        cvReleaseImage( &xformed );
        cvReleaseMat( &H );
    }
// }
    //*/

    cvReleaseImage( &stacked );
    cvReleaseImage( &img1 );
    cvReleaseImage( &img2 );
    kdtree_release( kd_root );
    free( feat1 );
    free( feat2 );
    return 0;
}
Ejemplo n.º 25
0
void bird_eye() {
	int board_n = board_w * board_h;
	CvSize board_sz = cvSize(board_w, board_h);
	CvMat *intrinsic = (CvMat*) cvLoad("Intrinsics.xml");
	CvMat *distortion = (CvMat*) cvLoad("Distortion.xml");

	IplImage* image = cvLoadImage("./Resource/bird-eye.jpg", 1);
	IplImage* gray_image = cvCreateImage(cvGetSize(image), 8, 1);
	cvCvtColor(image, gray_image, CV_BGR2GRAY);

	IplImage* mapx = cvCreateImage(cvGetSize(image), IPL_DEPTH_32F, 1);
	IplImage* mapy = cvCreateImage(cvGetSize(image), IPL_DEPTH_32F, 1);
	
	cvInitUndistortMap(
			intrinsic, 
			distortion, 
			mapx, 
			mapy
	);
	
	IplImage* t = cvCloneImage(image);

	cvRemap(t, image, mapx, mapy);
	
	cvNamedWindow("Chessboard");
	cvShowImage("Chessboard", image);
	int c = cvWaitKey(-1);
	CvPoint2D32f* corners = new CvPoint2D32f[board_n];
	int corner_count = 0;
	
	int found = cvFindChessboardCorners(
			image, 
			board_sz, 
			corners, 
			&corner_count, 
			CV_CALIB_CB_ADAPTIVE_THRESH | CV_CALIB_CB_FILTER_QUADS
	);
	
	if(!found){
		printf("couldn't aquire chessboard!\n");
		return;
	}
	
	cvFindCornerSubPix(
			gray_image, 
			corners, 
			corner_count, 
			cvSize(11, 11), 
			cvSize(-1, -1), 
			cvTermCriteria(CV_TERMCRIT_EPS | CV_TERMCRIT_ITER, 30, 0.1)
	);

	CvPoint2D32f objPts[4], imgPts[4];
	objPts[0].x = 0;			objPts[0].y = 0;
	objPts[1].x = board_w - 1;	objPts[1].y = 0;
	objPts[2].x = 0;			objPts[2].y = board_h - 1;
	objPts[3].x = board_w - 1;	objPts[3].y = board_h - 1;
	imgPts[0]   = corners[0];
	imgPts[1]	= corners[board_w - 1];
	imgPts[2]	= corners[(board_h - 1) * board_w];
	imgPts[3]	= corners[(board_h - 1) * board_w + board_w - 1];

	cvCircle(image, cvPointFrom32f(imgPts[0]), 9, CV_RGB(0, 0, 255), 3);
	cvCircle(image, cvPointFrom32f(imgPts[1]), 9, CV_RGB(0, 255, 0), 3);
	cvCircle(image, cvPointFrom32f(imgPts[2]), 9, CV_RGB(255, 0, 0), 3);
	cvCircle(image, cvPointFrom32f(imgPts[3]), 9, CV_RGB(255, 255, 0), 3);

	cvDrawChessboardCorners(
		image,
		board_sz,
		corners,
		corner_count,
		found
	);

	cvShowImage("Chessboard", image);

	CvMat *H = cvCreateMat(3, 3, CV_32F);
	cvGetPerspectiveTransform(objPts, imgPts, H);

	float z = 25;
	int key = 0;
	IplImage * birds_image = cvCloneImage(image);
	cvNamedWindow("Birds_Eye");

	while(key != 27) {
		CV_MAT_ELEM(*H, float, 2, 2) = z;

		cvWarpPerspective(
			image,
			birds_image,
			H,
			CV_INTER_LINEAR| CV_WARP_INVERSE_MAP | CV_WARP_FILL_OUTLIERS
		);

		cvShowImage("Birds_Eye", birds_image);

		key = cvWaitKey();
		if(key == 'u') z += 0.5;
		if(key == 'd') z -= 0.5;
	}

	cvSave("H.xml", H);
}
Ejemplo n.º 26
0
void margModule::update() {
	
	// --
	
	blobFind.feedPixels(originalPix);
	blobFind.findContours();
	blobs = blobFind.getFoundBlobs();
	blobCorr.feedBlobs(blobs);
	blobs = blobCorr.getCorrectBlobs();
	blobInterp.feedBlobs(blobs);
	blobs = blobInterp.getInterpolatedBlobs();
	if(mode == TRAIL_MAP || mode == FINAL_IMAGE || bExhibitionMode) trailMaker.updtMap(blobs);
	
	vidPlayer.idleMovie();
	if (vidPlayer.isFrameNew()) {
		if (mode == FINAL_IMAGE || bExhibitionMode) vidBlender.blendVideo(trailMaker.getMap(), vidPlayer.getPixels());
	}
	
	// --
	
	if (bExhibitionMode) {
		display.feedImg(vidBlender.getPixels(), dispWidth, dispHeight);
	}
	else {
		switch (mode) {
			case SOURCE:
				display.feedImg(vidCapt.getPixels(), camWidth, camHeight, false);
				break;
			case LENSCORRECTED_SOURCE:
				if (correctDisp.getWidth() == 0) correctDisp.allocate(camWidth, camHeight);
				correctDisp.setFromPixels(vidCapt.getPixels(), camWidth, camHeight);
				correctDisp.undistort(correctRdX, correctRdY, correctTgX, correctTgY, correctFX, correctFY, correctCX, correctCY);
				display.feedImg(correctDisp.getPixels(), camWidth, camHeight, false);
				break;
			case WARPED_SOURCE:
				if (correctDisp.getWidth() == 0) correctDisp.allocate(camWidth, camHeight);
				tempSource.allocate(camWidth, camHeight);
				tempSource.setFromPixels(vidCapt.getPixels(), camWidth, camHeight);
				tempSource.undistort(correctRdX, correctRdY, correctTgX, correctTgY, correctFX, correctFY, correctCX, correctCY);
				cvWarpPerspective(tempSource.getCvImage(), correctDisp.getCvImage(), blobCorr.getPerspMat());
				tempSource.clear();
				display.feedImg(correctDisp.getPixels(), camWidth, camHeight, false);
				break;
			case THRESHOLD_SOURCE:
				display.feedImg(blobFind.getInputImg().getPixels(), camWidth, camHeight, false);
				break;
			case TRAIL_MAP:
				display.feedImg(trailMaker.getMap(), dispWidth, dispHeight, false);
				break;
			case FINAL_IMAGE:
				display.feedImg(vidBlender.getPixels(), dispWidth, dispHeight);
				break;
			case BYPASS_VIDEO:
				display.feedImg(vidPlayer.getPixels(), dispWidth, dispHeight);
				break;
			default:
				display.feedImg(vidBlender.getPixels(), dispWidth, dispHeight);
				break;
		}
	}
}
//추후 수정
void FkPaperKeyboard_TypeA::cornerVerification(IplImage* srcImage){
	CvSize size = cvGetSize(srcImage);
	IplImage* eigImage = cvCreateImage(size, IPL_DEPTH_8U,1);
	IplImage* tempImage = cvCreateImage(size, IPL_DEPTH_8U, 1);
	IplImage* grayImage = cvCreateImage(size, IPL_DEPTH_8U, 1);
	IplImage* veriImage = cvCreateImage(size, IPL_DEPTH_8U, 1);
	IplImage* dstImage = cvCreateImage(size, IPL_DEPTH_8U, 1);
	IplImage* mask = cvCreateImage(size, IPL_DEPTH_8U, 1);
	IplImage* mask2 = cvCreateImage(size, IPL_DEPTH_8U, 1);
	CvRect rect = cvRect(10, 10, 640 - 20, 480 - 20);

	CvPoint2D32f srcQuad[4], dstQuad[4];
	CvMat* warp_matrix = cvCreateMat(3,3, CV_32FC1);
	CvMat* warp_matrix_invert = cvCreateMat(3,3, CV_32FC1);
	CvMat* result = cvCreateMat(3, 1, CV_32FC1);
	CvMat* dst = cvCreateMat(3, 1,CV_32FC1);

	int keyButtonCornerCount = 316;
	
	cvCvtColor(srcImage, grayImage, CV_BGR2GRAY);
	cvSetImageROI(grayImage, rect);
	cvSetImageROI(mask, rect);
	cvSetImageROI(dstImage, rect);
	cvSetImageROI(mask2, rect);

	// 150~255사이의 값만 추출해서 마스크에 저장
	cvInRangeS(grayImage, cvScalar(100, 100, 100), cvScalar(255, 255, 255), mask);
	cvCopy(mask, mask2);

	//cvShowImage("mask", mask);
	//cvShowImage("mask2", mask2);

	// 20,20? 150 미만의 값을 제외하기 위해 0인 값(mask)과 추출한 값(mask2)을 XOR 연산 한다.
	cvFloodFill(mask, cvPoint(10, 10), cvScalar(0, 0, 0));
	cvXor(mask2, mask, dstImage);
	
	//cvShowImage("mask3", mask);
	//cvShowImage("mask4", mask2);
	//cvShowImage("dstImage", dstImage);

	// 최종 연산된 이미지에서 코너 추출(각 키패드의 코너)
	cvGoodFeaturesToTrack(dstImage, eigImage, tempImage, keyButtonCorner, &keyButtonCornerCount, 0.01, 7, NULL, 7, 0);
	cvFindCornerSubPix (dstImage, keyButtonCorner, keyButtonCornerCount,cvSize (3, 3), cvSize (-1, -1), cvTermCriteria (CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.03));
	
	cvResetImageROI(dstImage);
	for(int i =0 ; i < 316 ; i++){
		keyButtonCorner[i].x += rect.x;
		keyButtonCorner[i].y += rect.y;
	}
	
	initKeyButtonCorner();
	
	srcQuad[CLOCKWISE_1].x = keyButtonCorner[315].x+10;
	srcQuad[CLOCKWISE_1].y = keyButtonCorner[315].y-10;
	srcQuad[CLOCKWISE_5].x = keyButtonCorner[31].x + 10;
	srcQuad[CLOCKWISE_5].y = keyButtonCorner[31].y + 10;
	srcQuad[CLOCKWISE_7].x = keyButtonCorner[0].x - 10;
	srcQuad[CLOCKWISE_7].y = keyButtonCorner[0].y + 10;
	srcQuad[CLOCKWISE_11].x = keyButtonCorner[290].x - 10;
	srcQuad[CLOCKWISE_11].y = keyButtonCorner[290].y - 10;
	dstQuad[CLOCKWISE_1].x = 640;
	dstQuad[CLOCKWISE_1].y = 0;
	dstQuad[CLOCKWISE_5].x = 640;
	dstQuad[CLOCKWISE_5].y = 480;
	dstQuad[CLOCKWISE_7].x = 0;
	dstQuad[CLOCKWISE_7].y = 480;
	dstQuad[CLOCKWISE_11].x = 0;
	dstQuad[CLOCKWISE_11].y = 0;
	cvGetPerspectiveTransform(srcQuad, dstQuad, warp_matrix);
	
	cvWarpPerspective(dstImage, veriImage, warp_matrix);
	detectKeyButtonCorner(veriImage);
	cvInvert(warp_matrix, warp_matrix_invert);
	for(int i = 0 ; i < 316 ; i++){	
		cvmSet(dst, 0, 0, keyButtonCorner[i].x);  
		cvmSet(dst, 1, 0, keyButtonCorner[i].y);
		cvmSet(dst, 2, 0, 1);

		cvMatMul(warp_matrix_invert, dst, result);
		float t = cvmGet(result, 2,0);
		keyButtonCorner[i].x = cvmGet(result, 0,0)/t ;
		keyButtonCorner[i].y = cvmGet(result, 1,0)/t ;
	}
	cvResetImageROI(srcImage);
	cvResetImageROI(mask);
	cvReleaseImage(&eigImage);
	cvReleaseImage(&tempImage);
	cvReleaseImage(&grayImage);
	cvReleaseImage(&veriImage);
	cvReleaseImage(&dstImage);
	cvReleaseImage(&mask);
	cvReleaseImage(&mask2);
	cvReleaseMat(&warp_matrix);
	cvReleaseMat(&warp_matrix_invert);
	cvReleaseMat(&result);
	cvReleaseMat(&dst);	
}
void FkPaperKeyboard_TypeA::setKeyButton(IplImage* srcImage){
	//꼭지점
	CvPoint2D32f srcQuad[4], dstQuad[4];

	IplImage* perspectiveTransImage = cvCreateImage(cvSize(640,480), IPL_DEPTH_8U, 3);

	CvMat* warp_matrix = cvCreateMat(3,3, CV_32FC1);
	CvMat* warp_matrix_invert = cvCreateMat(3,3, CV_32FC1);
	CvMat* result = cvCreateMat(3, 1, CV_32FC1);
	CvMat* dst = cvCreateMat(3, 1,CV_32FC1);

	sortPaperKeyboardCorner();
	
	srcQuad[CLOCKWISE_1] = keyboardCorner[1];
	srcQuad[CLOCKWISE_5] = keyboardCorner[2];
	srcQuad[CLOCKWISE_7] = keyboardCorner[3];
	srcQuad[CLOCKWISE_11] = keyboardCorner[0];

	dstQuad[CLOCKWISE_1].x = 640;
	dstQuad[CLOCKWISE_1].y = 0;
	dstQuad[CLOCKWISE_5].x = 640;
	dstQuad[CLOCKWISE_5].y = 480;
	dstQuad[CLOCKWISE_7].x = 0;
	dstQuad[CLOCKWISE_7].y = 480;
	dstQuad[CLOCKWISE_11].x = 0;
	dstQuad[CLOCKWISE_11].y = 0;	

	// 원근변환 후 검사한다.
	cvGetPerspectiveTransform(srcQuad, dstQuad, warp_matrix); // 변환 값 계산
	cvWarpPerspective(srcImage, perspectiveTransImage, warp_matrix);	// 투영 변환 행렬을 구한다
	cornerVerification(perspectiveTransImage);	// 추출된 행렬의 유효성을 검사한다
	cvInvert(warp_matrix, warp_matrix_invert);	// 
	
	//cvShowImage("srcImage",srcImage);
	//cvShowImage("perspectiveTransImage", perspectiveTransImage);

	for(int i = 0 ; i < 316 ; i++){
		cvmSet(dst, 0, 0, keyButtonCorner[i].x);  
		cvmSet(dst, 1, 0, keyButtonCorner[i].y);
		cvmSet(dst, 2, 0, 1);

		cvMatMul(warp_matrix_invert, dst, result);
		float t = cvmGet(result, 2,0);
		keyButtonCorner[i].x = cvmGet(result, 0,0)/t;
		keyButtonCorner[i].y = cvmGet(result, 1,0)/t;
	}
	
	setKeyButtonArea(keyButtonCorner, 0, 16);
	setKeyButtonArea(keyButtonCorner, 64, 14);
	setKeyButtonArea(keyButtonCorner, 120, 14);
	setKeyButtonArea(keyButtonCorner, 176, 13);
	setKeyButtonArea(keyButtonCorner, 228, 12);
	setKeyButtonArea(keyButtonCorner, 276, 7);
	setDirectionKeyButtonArea(keyButtonCorner, 306, 2, 77);
	setDirectionKeyButtonArea(keyButtonCorner, 304, 4, 76);
	setDirectionKeyButtonArea(keyButtonCorner, 308, 3, 78);
	
	cvReleaseImage(&perspectiveTransImage);

	cvReleaseMat(&warp_matrix);
	cvReleaseMat(&warp_matrix_invert);
	cvReleaseMat(&result);
	cvReleaseMat(&dst);
}
Ejemplo n.º 29
0
void the_project::project_getting()
{
	cout << "Ready to go?\n";
	cin.get();
	get_in = cvCreateImage(image_size, IPL_DEPTH_8U, 3);

	if(v_c_flag==0){
	// get a image of path
	///*
		IplImage *new_one;
		cout << "调整照相机位置并获取图像,Sapce to continue:\n";
		cvNamedWindow("Adjust");
		while(1){
			if(cvWaitKey(10)==' '){
				cvCopyImage(new_one,get_in);
				cvDestroyWindow("Adjust");
				cvReleaseCapture(&for_cam);
				break;
			}
		new_one = cvQueryFrame(for_cam);
		cvShowImage("Adjust",new_one);
	}
		cvSaveImage("image_origin.jpg",get_in);
	//*/
	}
	else if(v_c_flag==1){
	//for_test
		get_in = cvQueryFrame(for_video);
	}

	cvNamedWindow("win1");
	cvShowImage("win1",get_in);
	// transform
	get_change = cvCreateImage(image_size, IPL_DEPTH_8U, 3);
	transmat = cvCreateMat(3, 3, CV_32FC1);

	vector<CvPoint2D32f> points;
	void * p_pointer = 
		reinterpret_cast<void *>(&points);

	cout << "choose four points for transform.\n";
	cvSetMouseCallback("win1", mouse,p_pointer);
	cvWaitKey();

	cvDestroyWindow("win1");
	for(int i=0;i<4;i++)
		originpoints[i] = points[i];

	cvGetPerspectiveTransform(originpoints, newpoints, transmat);
	cvWarpPerspective(get_in, get_change, transmat);

	cvNamedWindow("win1");
	cvShowImage("win1",get_in);
	cvNamedWindow("win2");
	cvShowImage("win2",get_change);

	cvSaveImage("image_square.jpg",get_change);

	get_unchange = cvCreateImage(image_size, IPL_DEPTH_8U, 3);
	get_looking = cvCreateImage(image_size, IPL_DEPTH_8U, 3);
	cvCopyImage(get_change,get_unchange);

	cout << "Press any key to continue...\n";
	cvWaitKey();
}
Ejemplo n.º 30
0
void deblur_image(int image_num, int n, IplImage *result, IplImage *result_luck)
{
	cvSetZero(result);
	cvSetZero(result_luck);
	IplImage *trans[MAX_IMAGE];//转换后的结果?
	IplImage *trans_luck[MAX_IMAGE];
	IplImage *blur[MAX_IMAGE];
	for (int i = 0; i < image_num; ++i)
	{
		trans[i] = cvCreateImage(image_size, IPL_DEPTH_8U, 3);
		cvWarpPerspective(images[i], trans[i], hom[i][n], CV_INTER_LINEAR+CV_WARP_FILL_OUTLIERS, cvScalarAll(0));
		//对图像进行透视变换得到trans后的图像
		trans_luck[i] = cvCreateImage(image_size, IPL_DEPTH_32F, 4);
		cvWarpPerspective(images_luck[i], trans_luck[i], hom[i][n], CV_INTER_LINEAR+CV_WARP_FILL_OUTLIERS, cvScalarAll(0));
		//images_luck 是每一帧各个点的luckiness指数
		blur[i] = cvCreateImage(image_size, IPL_DEPTH_8U, 3);
		blur_function(trans[i], blur[i], hom[n-1][n], hom[n][n+1]);
	}
	
	
	for (int i = 0; i < image_num; ++i)
	{
		char wname[16];
		sprintf(wname, "Homography%d", i);
		cvNamedWindow(wname, CV_WINDOW_AUTOSIZE);
		cvMoveWindow(wname, i*50, i*50);
		cvShowImage(wname, trans[i]);
		sprintf(wname, "Blurred%d", i);
		cvNamedWindow(wname, CV_WINDOW_AUTOSIZE);
		cvMoveWindow(wname, i*50+100, i*50);
		cvShowImage(wname, blur[i]);
	}
	cvWaitKey(0);
	cvDestroyAllWindows();
	
	int grid_r = (image_size.height-PATCH_SIZE/2-1) / (PATCH_SIZE/2);
	int grid_c = (image_size.width-PATCH_SIZE/2-1) / (PATCH_SIZE/2);
	if (grid_r > 0 && grid_c > 0)
	{
		CvMat *patch = cvCreateMat(grid_r, grid_c, CV_64FC4);
		for (int i = 0; i < grid_r; ++i)
		{
			int y = (i+1)*(PATCH_SIZE/2);
			
			int t1 = clock();
			for (int j = 0; j < grid_c; ++j)
			{
				CvScalar res;
				int x = (j+1)*(PATCH_SIZE/2);
				
				
				if (deblur_patch(blur, trans_luck, image_num, n, x, y, &res) != 0)
				{
					printf("deblur_patch: %d:%d,%d failed.\n", n, x, y);
					res.val[0] = n;
					res.val[1] = x;
					res.val[2] = y;
					res.val[3] = 0;
					//copy_pixel(result, x, y, images[n], x, y);
				}
				
				/*
				res.val[0] = 2;
				res.val[1] = x;
				res.val[2] = y;
				res.val[3] = 100000;*/
				
				res.val[3] = exp(-res.val[3]/(2*SIGMA_W*SIGMA_W));
				
				CV_MAT_ELEM(*patch, CvScalar, i, j) = res;
			}
			int t2 = clock();
			
			printf("y:%d/%d  %d ms\n", y, image_size.height, (t2-t1)*1000/CLOCKS_PER_SEC);
		}
		
		cvNamedWindow("origin", CV_WINDOW_AUTOSIZE);
		cvShowImage("origin", images[n]);
		cvNamedWindow("result", CV_WINDOW_AUTOSIZE);
		
		// 中心部分
		for (int i = 1; i < grid_r; ++i)
		{
			int miny = i*(PATCH_SIZE/2);
			for (int j = 1; j < grid_c; ++j)
			{
				CvScalar pres1 = CV_MAT_ELEM(*patch, CvScalar, i-1, j-1);
				CvScalar pres2 = CV_MAT_ELEM(*patch, CvScalar, i-1, j);
				CvScalar pres3 = CV_MAT_ELEM(*patch, CvScalar, i, j-1);
				CvScalar pres4 = CV_MAT_ELEM(*patch, CvScalar, i, j);
				int minx = j*(PATCH_SIZE/2);
				for (int y = 0; y < PATCH_SIZE/2; ++y)
					for (int x = 0; x < PATCH_SIZE/2; ++x)
					{
						CvScalar v[4];
						v[0] = cvGet2D(trans[(int)pres1.val[0]], (int)pres1.val[2]+y, (int)pres1.val[1]+x);
						v[1] = cvGet2D(trans[(int)pres2.val[0]], (int)pres2.val[2]+y, (int)pres2.val[1]+x-PATCH_SIZE/2);
						v[2] = cvGet2D(trans[(int)pres3.val[0]], (int)pres3.val[2]+y-PATCH_SIZE/2, (int)pres3.val[1]+x);
						v[3] = cvGet2D(trans[(int)pres4.val[0]], (int)pres4.val[2]+y-PATCH_SIZE/2, (int)pres4.val[1]+x-PATCH_SIZE/2);
						double w[4] = {pres1.val[3], pres2.val[3], pres3.val[3], pres4.val[3]};
						
						CvScalar pv = weighted_average(v, w, 4);
						cvSet2D(result, y+miny, x+minx, pv);
						
						/*
						printf("p %d %d\n", y+miny, x+minx);
						for (int a = 0; a < 4; ++a)
						{
							printf("v%d: %f %f %f w %g\n", a, v[a].val[0], v[a].val[1], v[a].val[2], w[a]);
						}
						printf("pv: %f %f %f\n", pv.val[0], pv.val[1], pv.val[2]);
						*/
					}
			}
			cvShowImage("result", result);
			cvWaitKey(20);
		}
		
		// 四周特殊情况
		for (int y = 0; y < PATCH_SIZE/2; ++y)
			for (int x = 0; x < PATCH_SIZE/2; ++x)
			{
				CvScalar pres = CV_MAT_ELEM(*patch, CvScalar, 0, 0);
				CvScalar pv = cvGet2D(trans[(int)pres.val[0]], (int)pres.val[2]+y-PATCH_SIZE/2, (int)pres.val[1]+x-PATCH_SIZE/2);
				cvSet2D(result, y, x, pv);
				
				pres = CV_MAT_ELEM(*patch, CvScalar, 0, grid_c-1);
				pv = cvGet2D(trans[(int)pres.val[0]], (int)pres.val[2]+y-PATCH_SIZE/2, (int)pres.val[1]+x);
				cvSet2D(result, y, grid_c*(PATCH_SIZE/2)+x, pv);
				
				pres = CV_MAT_ELEM(*patch, CvScalar, grid_r-1, 0);
				pv = cvGet2D(trans[(int)pres.val[0]], (int)pres.val[2]+y, (int)pres.val[1]+x-PATCH_SIZE/2);
				cvSet2D(result, grid_r*(PATCH_SIZE/2)+y, x, pv);
				
				pres = CV_MAT_ELEM(*patch, CvScalar, grid_r-1, grid_c-1);
				pv = cvGet2D(trans[(int)pres.val[0]], (int)pres.val[2]+y, (int)pres.val[1]+x);
				cvSet2D(result, grid_r*(PATCH_SIZE/2)+y, grid_c*(PATCH_SIZE/2)+x, pv);
			}
		for (int j = 1; j < grid_c; ++j)
		{
			CvScalar pres1 = CV_MAT_ELEM(*patch, CvScalar, 0, j-1);
			CvScalar pres2 = CV_MAT_ELEM(*patch, CvScalar, 0, j);
			CvScalar pres3 = CV_MAT_ELEM(*patch, CvScalar, grid_r-1, j-1);
			CvScalar pres4 = CV_MAT_ELEM(*patch, CvScalar, grid_r-1, j);
			int minx = j*(PATCH_SIZE/2);
			for (int y = 0; y < PATCH_SIZE/2; ++y)
				for (int x = 0; x < PATCH_SIZE/2; ++x)
				{
					CvScalar v[2];
					v[0] = cvGet2D(trans[(int)pres1.val[0]], (int)pres1.val[2]+y-PATCH_SIZE/2, (int)pres1.val[1]+x);
					v[1] = cvGet2D(trans[(int)pres2.val[0]], (int)pres2.val[2]+y-PATCH_SIZE/2, (int)pres2.val[1]+x-PATCH_SIZE/2);
					double w[2] = {pres1.val[3], pres2.val[3]};
					CvScalar pv = weighted_average(v, w, 2);
					cvSet2D(result, y, minx+x, pv);
					
					v[0] = cvGet2D(trans[(int)pres3.val[0]], (int)pres3.val[2]+y, (int)pres3.val[1]+x);
					v[1] = cvGet2D(trans[(int)pres4.val[0]], (int)pres4.val[2]+y, (int)pres4.val[1]+x-PATCH_SIZE/2);
					w[0] = pres3.val[3];
					w[0] = pres4.val[3];
					pv = weighted_average(v, w, 2);
					cvSet2D(result, grid_r*(PATCH_SIZE/2)+y, minx+x, pv);
				}
		}
		for (int i = 1; i < grid_r; ++i)
		{
			CvScalar pres1 = CV_MAT_ELEM(*patch, CvScalar, i-1, 0);
			CvScalar pres2 = CV_MAT_ELEM(*patch, CvScalar, i, 0);
			CvScalar pres3 = CV_MAT_ELEM(*patch, CvScalar, i-1, grid_c-1);
			CvScalar pres4 = CV_MAT_ELEM(*patch, CvScalar, i, grid_c-1);
			int miny = i*(PATCH_SIZE/2);
			for (int y = 0; y < PATCH_SIZE/2; ++y)
				for (int x = 0; x < PATCH_SIZE/2; ++x)
				{
					CvScalar v[2];
					v[0] = cvGet2D(trans[(int)pres1.val[0]], (int)pres1.val[2]+y, (int)pres1.val[1]+x-PATCH_SIZE/2);
					v[1] = cvGet2D(trans[(int)pres2.val[0]], (int)pres2.val[2]+y-PATCH_SIZE/2, (int)pres2.val[1]+x-PATCH_SIZE/2);
					double w[2] = {pres1.val[3], pres2.val[3]};
					CvScalar pv = weighted_average(v, w, 2);
					cvSet2D(result, miny+y, x, pv);
					
					v[0] = cvGet2D(trans[(int)pres3.val[0]], (int)pres3.val[2]+y, (int)pres3.val[1]+x);
					v[1] = cvGet2D(trans[(int)pres4.val[0]], (int)pres4.val[2]+y-PATCH_SIZE/2, (int)pres4.val[1]+x);
					w[0] = pres3.val[3];
					w[0] = pres4.val[3];
					pv = weighted_average(v, w, 2);
					cvSet2D(result, miny+y, grid_c*(PATCH_SIZE/2)+x, pv);
				}
		}
		cvShowImage("result", result);
		
		/*
		IplImage *res_diff = cvCreateImage(image_size, IPL_DEPTH_8U, 3);
		cvAbsDiff(result, images[n], res_diff);
		cvNamedWindow("diff", CV_WINDOW_AUTOSIZE);
		cvShowImage("diff", res_diff);*/
	
		char name[16];
		sprintf(name, "result%d.png", n);
		cvSaveImage(name, result, NULL);
		sprintf(name, "origin%d.png", n);
		cvSaveImage(name, images[n], NULL);
		
		cvReleaseMat(&patch);
	}
	
	for (int i = 0; i < image_num; ++i)
	{
		cvReleaseImage(&trans[i]);
		cvReleaseImage(&trans_luck[i]);
		cvReleaseImage(&blur[i]);
	}
}